summaryrefslogtreecommitdiff
path: root/gcc/df-problems.c
diff options
context:
space:
mode:
authordberlin <dberlin@138bc75d-0d04-0410-961f-82ee72b054a4>2007-06-11 18:02:15 +0000
committerdberlin <dberlin@138bc75d-0d04-0410-961f-82ee72b054a4>2007-06-11 18:02:15 +0000
commit3072d30e7983a3ca5ad030f1f98a5c39bcc2c07b (patch)
treefdb9e9f8a0700a2713dc690fed1a2cf20dae8392 /gcc/df-problems.c
parent8ceb1bfd33bc40bf0cbe1fab8903c2c31efd10ee (diff)
downloadgcc-3072d30e7983a3ca5ad030f1f98a5c39bcc2c07b.tar.gz
Merge dataflow branch into mainline
git-svn-id: svn+ssh://gcc.gnu.org/svn/gcc/trunk@125624 138bc75d-0d04-0410-961f-82ee72b054a4
Diffstat (limited to 'gcc/df-problems.c')
-rw-r--r--gcc/df-problems.c3666
1 files changed, 2104 insertions, 1562 deletions
diff --git a/gcc/df-problems.c b/gcc/df-problems.c
index 016ea466902..67a8cb0a916 100644
--- a/gcc/df-problems.c
+++ b/gcc/df-problems.c
@@ -1,5 +1,5 @@
/* Standard problems for dataflow support routines.
- Copyright (C) 1999, 2000, 2001, 2002, 2003, 2004, 2005, 2006
+ Copyright (C) 1999, 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007
Free Software Foundation, Inc.
Originally contributed by Michael P. Hayes
(m.hayes@elec.canterbury.ac.nz, mhayes@redhat.com)
@@ -42,8 +42,9 @@ Software Foundation, 51 Franklin Street, Fifth Floor, Boston, MA
#include "bitmap.h"
#include "timevar.h"
#include "df.h"
-#include "vecprim.h"
#include "except.h"
+#include "dce.h"
+#include "vecprim.h"
#if 0
#define REG_DEAD_DEBUGGING
@@ -53,123 +54,61 @@ Software Foundation, 51 Franklin Street, Fifth Floor, Boston, MA
static bitmap seen_in_block = NULL;
static bitmap seen_in_insn = NULL;
-static void df_ri_dump (struct dataflow *, FILE *);
/*----------------------------------------------------------------------------
Public functions access functions for the dataflow problems.
----------------------------------------------------------------------------*/
+/* Get the live at out set for BB no matter what problem happens to be
+ defined. This function is used by the register allocators who
+ choose different dataflow problems depending on the optimization
+ level. */
-/* Create a du or ud chain from SRC to DST and link it into SRC. */
-
-struct df_link *
-df_chain_create (struct dataflow *dflow, struct df_ref *src, struct df_ref *dst)
-{
- struct df_link *head = DF_REF_CHAIN (src);
- struct df_link *link = pool_alloc (dflow->block_pool);;
-
- DF_REF_CHAIN (src) = link;
- link->next = head;
- link->ref = dst;
- return link;
-}
-
-
-/* Delete a du or ud chain for REF. If LINK is NULL, delete all
- chains for ref and check to see if the reverse chains can also be
- deleted. If LINK is not NULL it must be a link off of ref. In
- this case, the other end is not deleted. */
-
-void
-df_chain_unlink (struct dataflow *dflow, struct df_ref *ref, struct df_link *link)
+bitmap
+df_get_live_out (basic_block bb)
{
- struct df_link *chain = DF_REF_CHAIN (ref);
- if (link)
- {
- /* Link was the first element in the chain. */
- if (chain == link)
- DF_REF_CHAIN (ref) = link->next;
- else
- {
- /* Link is an internal element in the chain. */
- struct df_link *prev = chain;
- while (chain)
- {
- if (chain == link)
- {
- prev->next = chain->next;
- break;
- }
- prev = chain;
- chain = chain->next;
- }
- }
- pool_free (dflow->block_pool, link);
- }
- else
- {
- /* If chain is NULL here, it was because of a recursive call
- when the other flavor of chains was not built. Just run thru
- the entire chain calling the other side and then deleting the
- link. */
- while (chain)
- {
- struct df_link *next = chain->next;
- /* Delete the other side if it exists. */
- df_chain_unlink (dflow, chain->ref, chain);
- chain = next;
- }
- }
-}
-
-
-/* Copy the du or ud chain starting at FROM_REF and attach it to
- TO_REF. */
+ gcc_assert (df_lr);
-void
-df_chain_copy (struct dataflow *dflow,
- struct df_ref *to_ref,
- struct df_link *from_ref)
-{
- while (from_ref)
- {
- df_chain_create (dflow, to_ref, from_ref->ref);
- from_ref = from_ref->next;
- }
+ if (df_urec)
+ return DF_RA_LIVE_OUT (bb);
+ else if (df_live)
+ return DF_LIVE_OUT (bb);
+ else
+ return DF_LR_OUT (bb);
}
-
-/* Get the live in set for BB no matter what problem happens to be
- defined. */
+/* Get the live at in set for BB no matter what problem happens to be
+ defined. This function is used by the register allocators who
+ choose different dataflow problems depending on the optimization
+ level. */
bitmap
-df_get_live_in (struct df *df, basic_block bb)
+df_get_live_in (basic_block bb)
{
- gcc_assert (df->problems_by_index[DF_LR]);
+ gcc_assert (df_lr);
- if (df->problems_by_index[DF_UREC])
- return DF_RA_LIVE_IN (df, bb);
- else if (df->problems_by_index[DF_UR])
- return DF_LIVE_IN (df, bb);
+ if (df_urec)
+ return DF_RA_LIVE_IN (bb);
+ else if (df_live)
+ return DF_LIVE_IN (bb);
else
- return DF_UPWARD_LIVE_IN (df, bb);
+ return DF_LR_IN (bb);
}
-
-/* Get the live out set for BB no matter what problem happens to be
- defined. */
+/* Get the live at top set for BB no matter what problem happens to be
+ defined. This function is used by the register allocators who
+ choose different dataflow problems depending on the optimization
+ level. */
bitmap
-df_get_live_out (struct df *df, basic_block bb)
+df_get_live_top (basic_block bb)
{
- gcc_assert (df->problems_by_index[DF_LR]);
+ gcc_assert (df_lr);
- if (df->problems_by_index[DF_UREC])
- return DF_RA_LIVE_OUT (df, bb);
- else if (df->problems_by_index[DF_UR])
- return DF_LIVE_OUT (df, bb);
+ if (df_urec)
+ return DF_RA_LIVE_TOP (bb);
else
- return DF_UPWARD_LIVE_OUT (df, bb);
+ return DF_LR_TOP (bb);
}
@@ -223,42 +162,22 @@ df_print_bb_index (basic_block bb, FILE *file)
edge e;
edge_iterator ei;
- fprintf (file, "( ");
+ fprintf (file, "\n( ");
FOR_EACH_EDGE (e, ei, bb->preds)
{
basic_block pred = e->src;
- fprintf (file, "%d ", pred->index);
+ fprintf (file, "%d%s ", pred->index, e->flags & EDGE_EH ? "(EH)" : "");
}
fprintf (file, ")->[%d]->( ", bb->index);
FOR_EACH_EDGE (e, ei, bb->succs)
{
basic_block succ = e->dest;
- fprintf (file, "%d ", succ->index);
+ fprintf (file, "%d%s ", succ->index, e->flags & EDGE_EH ? "(EH)" : "");
}
fprintf (file, ")\n");
}
-/* Return a bitmap for REGNO from the cache MAPS. The bitmap is to
- contain COUNT bits starting at START. These bitmaps are not to be
- changed since there is a cache of them. */
-
-static inline bitmap
-df_ref_bitmap (bitmap *maps, unsigned int regno, int start, int count)
-{
- bitmap ids = maps[regno];
- if (!ids)
- {
- unsigned int i;
- unsigned int end = start + count;;
- ids = BITMAP_ALLOC (NULL);
- maps[regno] = ids;
- for (i = start; i < end; i++)
- bitmap_set_bit (ids, i);
- }
- return ids;
-}
-
/* Make sure that the seen_in_insn and seen_in_block sbitmaps are set
up correctly. */
@@ -266,8 +185,8 @@ df_ref_bitmap (bitmap *maps, unsigned int regno, int start, int count)
static void
df_set_seen (void)
{
- seen_in_block = BITMAP_ALLOC (NULL);
- seen_in_insn = BITMAP_ALLOC (NULL);
+ seen_in_block = BITMAP_ALLOC (&df_bitmap_obstack);
+ seen_in_insn = BITMAP_ALLOC (&df_bitmap_obstack);
}
@@ -300,9 +219,7 @@ df_unset_seen (void)
2) There are two kill sets, one if the number of uses is less or
equal to DF_SPARSE_THRESHOLD and another if it is greater.
- <= : There is a bitmap for each register, uses_sites[N], that is
- built on demand. This bitvector contains a 1 for each use or reg
- N.
+ <= : Data is built directly in the kill set.
> : One level of indirection is used to keep from generating long
strings of 1 bits in the kill sets. Bitvectors that are indexed
@@ -312,45 +229,35 @@ df_unset_seen (void)
bits without actually generating a knockout vector.
The kill and sparse_kill and the dense_invalidated_by_call and
- sparse_invalidated_by call both play this game. */
+ sparse_invalidated_by_call both play this game. */
/* Private data used to compute the solution for this problem. These
data structures are not accessible outside of this module. */
struct df_ru_problem_data
{
-
- bitmap *use_sites; /* Bitmap of uses for each pseudo. */
- unsigned int use_sites_size; /* Size of use_sites. */
/* The set of defs to regs invalidated by call. */
bitmap sparse_invalidated_by_call;
/* The set of defs to regs invalidated by call for ru. */
- bitmap dense_invalidated_by_call;
+ bitmap dense_invalidated_by_call;
+ /* An obstack for the bitmaps we need for this problem. */
+ bitmap_obstack ru_bitmaps;
};
-/* Get basic block info. */
-
-struct df_ru_bb_info *
-df_ru_get_bb_info (struct dataflow *dflow, unsigned int index)
-{
- return (struct df_ru_bb_info *) dflow->block_info[index];
-}
-
-
/* Set basic block info. */
static void
-df_ru_set_bb_info (struct dataflow *dflow, unsigned int index,
- struct df_ru_bb_info *bb_info)
+df_ru_set_bb_info (unsigned int index, struct df_ru_bb_info *bb_info)
{
- dflow->block_info[index] = bb_info;
+ gcc_assert (df_ru);
+ gcc_assert (index < df_ru->block_info_size);
+ df_ru->block_info[index] = bb_info;
}
/* Free basic block info. */
static void
-df_ru_free_bb_info (struct dataflow *dflow,
- basic_block bb ATTRIBUTE_UNUSED,
+df_ru_free_bb_info (basic_block bb ATTRIBUTE_UNUSED,
void *vbb_info)
{
struct df_ru_bb_info *bb_info = (struct df_ru_bb_info *) vbb_info;
@@ -361,67 +268,44 @@ df_ru_free_bb_info (struct dataflow *dflow,
BITMAP_FREE (bb_info->gen);
BITMAP_FREE (bb_info->in);
BITMAP_FREE (bb_info->out);
- pool_free (dflow->block_pool, bb_info);
+ pool_free (df_ru->block_pool, bb_info);
}
}
-/* Allocate or reset bitmaps for DFLOW blocks. The solution bits are
+/* Allocate or reset bitmaps for DF_RU blocks. The solution bits are
not touched unless the block is new. */
static void
-df_ru_alloc (struct dataflow *dflow,
- bitmap blocks_to_rescan ATTRIBUTE_UNUSED,
- bitmap all_blocks)
+df_ru_alloc (bitmap all_blocks)
{
unsigned int bb_index;
bitmap_iterator bi;
- unsigned int reg_size = max_reg_num ();
+ struct df_ru_problem_data *problem_data;
- if (!dflow->block_pool)
- dflow->block_pool = create_alloc_pool ("df_ru_block pool",
+ if (!df_ru->block_pool)
+ df_ru->block_pool = create_alloc_pool ("df_ru_block pool",
sizeof (struct df_ru_bb_info), 50);
- if (dflow->problem_data)
+ if (df_ru->problem_data)
{
- unsigned int i;
- struct df_ru_problem_data *problem_data
- = (struct df_ru_problem_data *) dflow->problem_data;
-
- for (i = 0; i < problem_data->use_sites_size; i++)
- {
- bitmap bm = problem_data->use_sites[i];
- if (bm)
- {
- BITMAP_FREE (bm);
- problem_data->use_sites[i] = NULL;
- }
- }
-
- if (problem_data->use_sites_size < reg_size)
- {
- problem_data->use_sites
- = xrealloc (problem_data->use_sites, reg_size * sizeof (bitmap));
- memset (problem_data->use_sites + problem_data->use_sites_size, 0,
- (reg_size - problem_data->use_sites_size) * sizeof (bitmap));
- problem_data->use_sites_size = reg_size;
- }
-
+ problem_data = (struct df_ru_problem_data *) df_ru->problem_data;
bitmap_clear (problem_data->sparse_invalidated_by_call);
bitmap_clear (problem_data->dense_invalidated_by_call);
}
else
{
- struct df_ru_problem_data *problem_data = XNEW (struct df_ru_problem_data);
- dflow->problem_data = problem_data;
-
- problem_data->use_sites = XCNEWVEC (bitmap, reg_size);
- problem_data->use_sites_size = reg_size;
- problem_data->sparse_invalidated_by_call = BITMAP_ALLOC (NULL);
- problem_data->dense_invalidated_by_call = BITMAP_ALLOC (NULL);
+ problem_data = XNEW (struct df_ru_problem_data);
+ df_ru->problem_data = problem_data;
+
+ bitmap_obstack_initialize (&problem_data->ru_bitmaps);
+ problem_data->sparse_invalidated_by_call
+ = BITMAP_ALLOC (&problem_data->ru_bitmaps);
+ problem_data->dense_invalidated_by_call
+ = BITMAP_ALLOC (&problem_data->ru_bitmaps);
}
- df_grow_bb_info (dflow);
+ df_grow_bb_info (df_ru);
/* Because of the clustering of all def sites for the same pseudo,
we have to process all of the blocks before doing the
@@ -429,7 +313,7 @@ df_ru_alloc (struct dataflow *dflow,
EXECUTE_IF_SET_IN_BITMAP (all_blocks, 0, bb_index, bi)
{
- struct df_ru_bb_info *bb_info = df_ru_get_bb_info (dflow, bb_index);
+ struct df_ru_bb_info *bb_info = df_ru_get_bb_info (bb_index);
if (bb_info)
{
bitmap_clear (bb_info->kill);
@@ -438,13 +322,13 @@ df_ru_alloc (struct dataflow *dflow,
}
else
{
- bb_info = (struct df_ru_bb_info *) pool_alloc (dflow->block_pool);
- df_ru_set_bb_info (dflow, bb_index, bb_info);
- bb_info->kill = BITMAP_ALLOC (NULL);
- bb_info->sparse_kill = BITMAP_ALLOC (NULL);
- bb_info->gen = BITMAP_ALLOC (NULL);
- bb_info->in = BITMAP_ALLOC (NULL);
- bb_info->out = BITMAP_ALLOC (NULL);
+ bb_info = (struct df_ru_bb_info *) pool_alloc (df_ru->block_pool);
+ df_ru_set_bb_info (bb_index, bb_info);
+ bb_info->kill = BITMAP_ALLOC (&problem_data->ru_bitmaps);
+ bb_info->sparse_kill = BITMAP_ALLOC (&problem_data->ru_bitmaps);
+ bb_info->gen = BITMAP_ALLOC (&problem_data->ru_bitmaps);
+ bb_info->in = BITMAP_ALLOC (&problem_data->ru_bitmaps);
+ bb_info->out = BITMAP_ALLOC (&problem_data->ru_bitmaps);
}
}
}
@@ -453,22 +337,22 @@ df_ru_alloc (struct dataflow *dflow,
/* Process a list of DEFs for df_ru_bb_local_compute. */
static void
-df_ru_bb_local_compute_process_def (struct dataflow *dflow,
- struct df_ru_bb_info *bb_info,
- struct df_ref *def,
+df_ru_bb_local_compute_process_def (struct df_ru_bb_info *bb_info,
+ struct df_ref **def_rec,
enum df_ref_flags top_flag)
{
- struct df *df = dflow->df;
- while (def)
+ while (*def_rec)
{
+ struct df_ref *def = *def_rec;
if ((top_flag == (DF_REF_FLAGS (def) & DF_REF_AT_TOP))
/* If the def is to only part of the reg, it is as if it did
not happen, since some of the bits may get thru. */
- && (!(DF_REF_FLAGS (def) & DF_REF_PARTIAL)))
+ && (!(DF_REF_FLAGS (def) & (DF_REF_PARTIAL | DF_REF_CONDITIONAL))))
{
unsigned int regno = DF_REF_REGNO (def);
- unsigned int begin = DF_REG_USE_GET (df, regno)->begin;
- unsigned int n_uses = DF_REG_USE_GET (df, regno)->n_refs;
+ unsigned int begin = DF_USES_BEGIN (regno);
+ unsigned int n_uses = DF_USES_COUNT (regno);
+
if (!bitmap_bit_p (seen_in_block, regno))
{
/* The first def for regno in the insn, causes the kill
@@ -481,19 +365,12 @@ df_ru_bb_local_compute_process_def (struct dataflow *dflow,
if (n_uses > DF_SPARSE_THRESHOLD)
bitmap_set_bit (bb_info->sparse_kill, regno);
else
- {
- struct df_ru_problem_data * problem_data
- = (struct df_ru_problem_data *)dflow->problem_data;
- bitmap uses
- = df_ref_bitmap (problem_data->use_sites, regno,
- begin, n_uses);
- bitmap_ior_into (bb_info->kill, uses);
- }
+ bitmap_set_range (bb_info->kill, begin, n_uses);
}
bitmap_set_bit (seen_in_insn, regno);
}
}
- def = def->next_ref;
+ def_rec++;
}
}
@@ -502,11 +379,12 @@ df_ru_bb_local_compute_process_def (struct dataflow *dflow,
static void
df_ru_bb_local_compute_process_use (struct df_ru_bb_info *bb_info,
- struct df_ref *use,
+ struct df_ref **use_rec,
enum df_ref_flags top_flag)
{
- while (use)
+ while (*use_rec)
{
+ struct df_ref *use = *use_rec;
if (top_flag == (DF_REF_FLAGS (use) & DF_REF_AT_TOP))
{
/* Add use to set of gens in this BB unless we have seen a
@@ -515,18 +393,17 @@ df_ru_bb_local_compute_process_use (struct df_ru_bb_info *bb_info,
if (!bitmap_bit_p (seen_in_block, regno))
bitmap_set_bit (bb_info->gen, DF_REF_ID (use));
}
- use = use->next_ref;
+ use_rec++;
}
}
/* Compute local reaching use (upward exposed use) info for basic
block BB. USE_INFO->REGS[R] caches the set of uses for register R. */
static void
-df_ru_bb_local_compute (struct dataflow *dflow, unsigned int bb_index)
+df_ru_bb_local_compute (unsigned int bb_index)
{
- struct df *df = dflow->df;
basic_block bb = BASIC_BLOCK (bb_index);
- struct df_ru_bb_info *bb_info = df_ru_get_bb_info (dflow, bb_index);
+ struct df_ru_bb_info *bb_info = df_ru_get_bb_info (bb_index);
rtx insn;
/* Set when a def for regno is seen. */
@@ -537,11 +414,11 @@ df_ru_bb_local_compute (struct dataflow *dflow, unsigned int bb_index)
/* Variables defined in the prolog that are used by the exception
handler. */
df_ru_bb_local_compute_process_use (bb_info,
- df_get_artificial_uses (df, bb_index),
+ df_get_artificial_uses (bb_index),
DF_REF_AT_TOP);
#endif
- df_ru_bb_local_compute_process_def (dflow, bb_info,
- df_get_artificial_defs (df, bb_index),
+ df_ru_bb_local_compute_process_def (bb_info,
+ df_get_artificial_defs (bb_index),
DF_REF_AT_TOP);
FOR_BB_INSNS (bb, insn)
@@ -551,10 +428,14 @@ df_ru_bb_local_compute (struct dataflow *dflow, unsigned int bb_index)
continue;
df_ru_bb_local_compute_process_use (bb_info,
- DF_INSN_UID_USES (df, uid), 0);
+ DF_INSN_UID_USES (uid), 0);
+
+ if (df->changeable_flags & DF_EQ_NOTES)
+ df_ru_bb_local_compute_process_use (bb_info,
+ DF_INSN_UID_EQ_USES (uid), 0);
- df_ru_bb_local_compute_process_def (dflow, bb_info,
- DF_INSN_UID_DEFS (df, uid), 0);
+ df_ru_bb_local_compute_process_def (bb_info,
+ DF_INSN_UID_DEFS (uid), 0);
bitmap_ior_into (seen_in_block, seen_in_insn);
bitmap_clear (seen_in_insn);
@@ -562,49 +443,45 @@ df_ru_bb_local_compute (struct dataflow *dflow, unsigned int bb_index)
/* Process the hardware registers that are always live. */
df_ru_bb_local_compute_process_use (bb_info,
- df_get_artificial_uses (df, bb_index), 0);
+ df_get_artificial_uses (bb_index), 0);
- df_ru_bb_local_compute_process_def (dflow, bb_info,
- df_get_artificial_defs (df, bb_index), 0);
+ df_ru_bb_local_compute_process_def (bb_info,
+ df_get_artificial_defs (bb_index), 0);
}
/* Compute local reaching use (upward exposed use) info for each basic
block within BLOCKS. */
static void
-df_ru_local_compute (struct dataflow *dflow,
- bitmap all_blocks,
- bitmap rescan_blocks ATTRIBUTE_UNUSED)
+df_ru_local_compute (bitmap all_blocks)
{
- struct df *df = dflow->df;
unsigned int bb_index;
bitmap_iterator bi;
unsigned int regno;
struct df_ru_problem_data *problem_data
- = (struct df_ru_problem_data *) dflow->problem_data;
+ = (struct df_ru_problem_data *) df_ru->problem_data;
bitmap sparse_invalidated = problem_data->sparse_invalidated_by_call;
bitmap dense_invalidated = problem_data->dense_invalidated_by_call;
df_set_seen ();
- df_reorganize_refs (&df->use_info);
+
+ df_maybe_reorganize_use_refs (df->changeable_flags & DF_EQ_NOTES ?
+ DF_REF_ORDER_BY_REG_WITH_NOTES : DF_REF_ORDER_BY_REG);
EXECUTE_IF_SET_IN_BITMAP (all_blocks, 0, bb_index, bi)
{
- df_ru_bb_local_compute (dflow, bb_index);
+ df_ru_bb_local_compute (bb_index);
}
/* Set up the knockout bit vectors to be applied across EH_EDGES. */
EXECUTE_IF_SET_IN_BITMAP (df_invalidated_by_call, 0, regno, bi)
{
- struct df_reg_info *reg_info = DF_REG_USE_GET (df, regno);
- if (reg_info->n_refs > DF_SPARSE_THRESHOLD)
+ if (DF_USES_COUNT (regno) > DF_SPARSE_THRESHOLD)
bitmap_set_bit (sparse_invalidated, regno);
else
- {
- bitmap defs = df_ref_bitmap (problem_data->use_sites, regno,
- reg_info->begin, reg_info->n_refs);
- bitmap_ior_into (dense_invalidated, defs);
- }
+ bitmap_set_range (dense_invalidated,
+ DF_USES_BEGIN (regno),
+ DF_USES_COUNT (regno));
}
df_unset_seen ();
@@ -614,14 +491,14 @@ df_ru_local_compute (struct dataflow *dflow,
/* Initialize the solution bit vectors for problem. */
static void
-df_ru_init_solution (struct dataflow *dflow, bitmap all_blocks)
+df_ru_init_solution (bitmap all_blocks)
{
unsigned int bb_index;
bitmap_iterator bi;
EXECUTE_IF_SET_IN_BITMAP (all_blocks, 0, bb_index, bi)
{
- struct df_ru_bb_info *bb_info = df_ru_get_bb_info (dflow, bb_index);
+ struct df_ru_bb_info *bb_info = df_ru_get_bb_info (bb_index);
bitmap_copy (bb_info->in, bb_info->gen);
bitmap_clear (bb_info->out);
}
@@ -631,21 +508,20 @@ df_ru_init_solution (struct dataflow *dflow, bitmap all_blocks)
/* Out of target gets or of in of source. */
static void
-df_ru_confluence_n (struct dataflow *dflow, edge e)
+df_ru_confluence_n (edge e)
{
- bitmap op1 = df_ru_get_bb_info (dflow, e->src->index)->out;
- bitmap op2 = df_ru_get_bb_info (dflow, e->dest->index)->in;
+ bitmap op1 = df_ru_get_bb_info (e->src->index)->out;
+ bitmap op2 = df_ru_get_bb_info (e->dest->index)->in;
if (e->flags & EDGE_EH)
{
struct df_ru_problem_data *problem_data
- = (struct df_ru_problem_data *) dflow->problem_data;
+ = (struct df_ru_problem_data *) df_ru->problem_data;
bitmap sparse_invalidated = problem_data->sparse_invalidated_by_call;
bitmap dense_invalidated = problem_data->dense_invalidated_by_call;
- struct df *df = dflow->df;
bitmap_iterator bi;
unsigned int regno;
- bitmap tmp = BITMAP_ALLOC (NULL);
+ bitmap tmp = BITMAP_ALLOC (&df_bitmap_obstack);
bitmap_copy (tmp, op2);
bitmap_and_compl_into (tmp, dense_invalidated);
@@ -653,8 +529,8 @@ df_ru_confluence_n (struct dataflow *dflow, edge e)
EXECUTE_IF_SET_IN_BITMAP (sparse_invalidated, 0, regno, bi)
{
bitmap_clear_range (tmp,
- DF_REG_USE_GET (df, regno)->begin,
- DF_REG_USE_GET (df, regno)->n_refs);
+ DF_USES_BEGIN (regno),
+ DF_USES_COUNT (regno));
}
bitmap_ior_into (op1, tmp);
BITMAP_FREE (tmp);
@@ -667,9 +543,9 @@ df_ru_confluence_n (struct dataflow *dflow, edge e)
/* Transfer function. */
static bool
-df_ru_transfer_function (struct dataflow *dflow, int bb_index)
+df_ru_transfer_function (int bb_index)
{
- struct df_ru_bb_info *bb_info = df_ru_get_bb_info (dflow, bb_index);
+ struct df_ru_bb_info *bb_info = df_ru_get_bb_info (bb_index);
unsigned int regno;
bitmap_iterator bi;
bitmap in = bb_info->in;
@@ -682,15 +558,21 @@ df_ru_transfer_function (struct dataflow *dflow, int bb_index)
return bitmap_ior_and_compl (in, gen, out, kill);
else
{
- struct df *df = dflow->df;
+ struct df_ru_problem_data *problem_data;
+ bitmap tmp;
bool changed = false;
- bitmap tmp = BITMAP_ALLOC (NULL);
+
+ /* Note that TMP is _not_ a temporary bitmap if we end up replacing
+ IN with TMP. Therefore, allocate TMP in the RU bitmaps obstack. */
+ problem_data = (struct df_ru_problem_data *) df_ru->problem_data;
+ tmp = BITMAP_ALLOC (&problem_data->ru_bitmaps);
+
bitmap_copy (tmp, out);
EXECUTE_IF_SET_IN_BITMAP (sparse_kill, 0, regno, bi)
{
bitmap_clear_range (tmp,
- DF_REG_USE_GET (df, regno)->begin,
- DF_REG_USE_GET (df, regno)->n_refs);
+ DF_USES_BEGIN (regno),
+ DF_USES_COUNT (regno));
}
bitmap_and_compl_into (tmp, kill);
bitmap_ior_into (tmp, gen);
@@ -710,17 +592,17 @@ df_ru_transfer_function (struct dataflow *dflow, int bb_index)
/* Free all storage associated with the problem. */
static void
-df_ru_free (struct dataflow *dflow)
+df_ru_free (void)
{
unsigned int i;
struct df_ru_problem_data *problem_data
- = (struct df_ru_problem_data *) dflow->problem_data;
+ = (struct df_ru_problem_data *) df_ru->problem_data;
if (problem_data)
{
- for (i = 0; i < dflow->block_info_size; i++)
+ for (i = 0; i < df_ru->block_info_size; i++)
{
- struct df_ru_bb_info *bb_info = df_ru_get_bb_info (dflow, i);
+ struct df_ru_bb_info *bb_info = df_ru_get_bb_info (i);
if (bb_info)
{
BITMAP_FREE (bb_info->kill);
@@ -731,75 +613,80 @@ df_ru_free (struct dataflow *dflow)
}
}
- free_alloc_pool (dflow->block_pool);
-
- for (i = 0; i < problem_data->use_sites_size; i++)
- {
- bitmap bm = problem_data->use_sites[i];
- if (bm)
- BITMAP_FREE (bm);
- }
-
- free (problem_data->use_sites);
+ free_alloc_pool (df_ru->block_pool);
BITMAP_FREE (problem_data->sparse_invalidated_by_call);
BITMAP_FREE (problem_data->dense_invalidated_by_call);
+ bitmap_obstack_release (&problem_data->ru_bitmaps);
- dflow->block_info_size = 0;
- free (dflow->block_info);
- free (dflow->problem_data);
+ df_ru->block_info_size = 0;
+ free (df_ru->block_info);
+ free (df_ru->problem_data);
}
- free (dflow);
+ free (df_ru);
}
/* Debugging info. */
static void
-df_ru_dump (struct dataflow *dflow, FILE *file)
+df_ru_start_dump (FILE *file)
{
- basic_block bb;
- struct df *df = dflow->df;
struct df_ru_problem_data *problem_data
- = (struct df_ru_problem_data *) dflow->problem_data;
- unsigned int m = max_reg_num ();
+ = (struct df_ru_problem_data *) df_ru->problem_data;
+ unsigned int m = DF_REG_SIZE(df);
unsigned int regno;
- if (!dflow->block_info)
+ if (!df_ru->block_info)
return;
- fprintf (file, "Reaching uses:\n");
+ fprintf (file, ";; Reaching uses:\n");
- fprintf (file, " sparse invalidated \t");
+ fprintf (file, ";; sparse invalidated \t");
dump_bitmap (file, problem_data->sparse_invalidated_by_call);
- fprintf (file, " dense invalidated \t");
+ fprintf (file, " dense invalidated \t");
dump_bitmap (file, problem_data->dense_invalidated_by_call);
for (regno = 0; regno < m; regno++)
- if (DF_REG_USE_GET (df, regno)->n_refs)
+ if (DF_USES_COUNT (regno))
fprintf (file, "%d[%d,%d] ", regno,
- DF_REG_USE_GET (df, regno)->begin,
- DF_REG_USE_GET (df, regno)->n_refs);
+ DF_USES_BEGIN (regno),
+ DF_USES_COUNT (regno));
fprintf (file, "\n");
-
- FOR_ALL_BB (bb)
- {
- struct df_ru_bb_info *bb_info = df_ru_get_bb_info (dflow, bb->index);
- df_print_bb_index (bb, file);
-
- if (!bb_info->in)
- continue;
-
- fprintf (file, " in \t(%d)\n", (int) bitmap_count_bits (bb_info->in));
- dump_bitmap (file, bb_info->in);
- fprintf (file, " gen \t(%d)\n", (int) bitmap_count_bits (bb_info->gen));
- dump_bitmap (file, bb_info->gen);
- fprintf (file, " kill\t(%d)\n", (int) bitmap_count_bits (bb_info->kill));
- dump_bitmap (file, bb_info->kill);
- fprintf (file, " out \t(%d)\n", (int) bitmap_count_bits (bb_info->out));
- dump_bitmap (file, bb_info->out);
- }
}
+
+/* Debugging info at top of bb. */
+
+static void
+df_ru_top_dump (basic_block bb, FILE *file)
+{
+ struct df_ru_bb_info *bb_info = df_ru_get_bb_info (bb->index);
+ if (!bb_info || !bb_info->in)
+ return;
+
+ fprintf (file, ";; ru in \t(%d)\n", (int) bitmap_count_bits (bb_info->in));
+ dump_bitmap (file, bb_info->in);
+ fprintf (file, ";; ru gen \t(%d)\n", (int) bitmap_count_bits (bb_info->gen));
+ dump_bitmap (file, bb_info->gen);
+ fprintf (file, ";; ru kill\t(%d)\n", (int) bitmap_count_bits (bb_info->kill));
+ dump_bitmap (file, bb_info->kill);
+}
+
+
+/* Debugging info at bottom of bb. */
+
+static void
+df_ru_bottom_dump (basic_block bb, FILE *file)
+{
+ struct df_ru_bb_info *bb_info = df_ru_get_bb_info (bb->index);
+ if (!bb_info || !bb_info->out)
+ return;
+
+ fprintf (file, ";; ru out \t(%d)\n", (int) bitmap_count_bits (bb_info->out));
+ dump_bitmap (file, bb_info->out);
+}
+
+
/* All of the information associated with every instance of the problem. */
static struct df_problem problem_RU =
@@ -811,15 +698,20 @@ static struct df_problem problem_RU =
df_ru_free_bb_info, /* Free basic block info. */
df_ru_local_compute, /* Local compute function. */
df_ru_init_solution, /* Init the solution specific data. */
- df_iterative_dataflow, /* Iterative solver. */
+ df_worklist_dataflow, /* Worklist solver. */
NULL, /* Confluence operator 0. */
df_ru_confluence_n, /* Confluence operator n. */
df_ru_transfer_function, /* Transfer function. */
NULL, /* Finalize function. */
df_ru_free, /* Free all of the problem information. */
- df_ru_dump, /* Debugging. */
+ df_ru_free, /* Remove this problem from the stack of dataflow problems. */
+ df_ru_start_dump, /* Debugging. */
+ df_ru_top_dump, /* Debugging start block. */
+ df_ru_bottom_dump, /* Debugging end block. */
+ NULL, /* Incremental solution verify start. */
+ NULL, /* Incremental solution verfiy end. */
NULL, /* Dependent problem. */
- 0 /* Changeable flags. */
+ TV_DF_RU /* Timing variable. */
};
@@ -828,10 +720,10 @@ static struct df_problem problem_RU =
of DF. The returned structure is what is used to get at the
solution. */
-struct dataflow *
-df_ru_add_problem (struct df *df, int flags)
+void
+df_ru_add_problem (void)
{
- return df_add_problem (df, &problem_RU, flags);
+ df_add_problem (&problem_RU);
}
@@ -852,43 +744,30 @@ df_ru_add_problem (struct df *df, int flags)
data structures are not accessible outside of this module. */
struct df_rd_problem_data
{
- /* If the number of defs for regnum N is less than
- DF_SPARSE_THRESHOLD, uses_sites[N] contains a mask of the all of
- the defs of reg N indexed by the id in the ref structure. If
- there are more than DF_SPARSE_THRESHOLD defs for regnum N a
- different mechanism is used to mask the def. */
- bitmap *def_sites; /* Bitmap of defs for each pseudo. */
- unsigned int def_sites_size; /* Size of def_sites. */
/* The set of defs to regs invalidated by call. */
bitmap sparse_invalidated_by_call;
/* The set of defs to regs invalidate by call for rd. */
- bitmap dense_invalidated_by_call;
+ bitmap dense_invalidated_by_call;
+ /* An obstack for the bitmaps we need for this problem. */
+ bitmap_obstack rd_bitmaps;
};
-/* Get basic block info. */
-
-struct df_rd_bb_info *
-df_rd_get_bb_info (struct dataflow *dflow, unsigned int index)
-{
- return (struct df_rd_bb_info *) dflow->block_info[index];
-}
-
-
/* Set basic block info. */
static void
-df_rd_set_bb_info (struct dataflow *dflow, unsigned int index,
+df_rd_set_bb_info (unsigned int index,
struct df_rd_bb_info *bb_info)
{
- dflow->block_info[index] = bb_info;
+ gcc_assert (df_rd);
+ gcc_assert (index < df_rd->block_info_size);
+ df_rd->block_info[index] = bb_info;
}
/* Free basic block info. */
static void
-df_rd_free_bb_info (struct dataflow *dflow,
- basic_block bb ATTRIBUTE_UNUSED,
+df_rd_free_bb_info (basic_block bb ATTRIBUTE_UNUSED,
void *vbb_info)
{
struct df_rd_bb_info *bb_info = (struct df_rd_bb_info *) vbb_info;
@@ -899,67 +778,44 @@ df_rd_free_bb_info (struct dataflow *dflow,
BITMAP_FREE (bb_info->gen);
BITMAP_FREE (bb_info->in);
BITMAP_FREE (bb_info->out);
- pool_free (dflow->block_pool, bb_info);
+ pool_free (df_rd->block_pool, bb_info);
}
}
-/* Allocate or reset bitmaps for DFLOW blocks. The solution bits are
+/* Allocate or reset bitmaps for DF_RD blocks. The solution bits are
not touched unless the block is new. */
static void
-df_rd_alloc (struct dataflow *dflow,
- bitmap blocks_to_rescan ATTRIBUTE_UNUSED,
- bitmap all_blocks)
+df_rd_alloc (bitmap all_blocks)
{
unsigned int bb_index;
bitmap_iterator bi;
- unsigned int reg_size = max_reg_num ();
+ struct df_rd_problem_data *problem_data;
- if (!dflow->block_pool)
- dflow->block_pool = create_alloc_pool ("df_rd_block pool",
+ if (!df_rd->block_pool)
+ df_rd->block_pool = create_alloc_pool ("df_rd_block pool",
sizeof (struct df_rd_bb_info), 50);
- if (dflow->problem_data)
+ if (df_rd->problem_data)
{
- unsigned int i;
- struct df_rd_problem_data *problem_data
- = (struct df_rd_problem_data *) dflow->problem_data;
-
- for (i = 0; i < problem_data->def_sites_size; i++)
- {
- bitmap bm = problem_data->def_sites[i];
- if (bm)
- {
- BITMAP_FREE (bm);
- problem_data->def_sites[i] = NULL;
- }
- }
-
- if (problem_data->def_sites_size < reg_size)
- {
- problem_data->def_sites
- = xrealloc (problem_data->def_sites, reg_size *sizeof (bitmap));
- memset (problem_data->def_sites + problem_data->def_sites_size, 0,
- (reg_size - problem_data->def_sites_size) *sizeof (bitmap));
- problem_data->def_sites_size = reg_size;
- }
-
+ problem_data = (struct df_rd_problem_data *) df_rd->problem_data;
bitmap_clear (problem_data->sparse_invalidated_by_call);
bitmap_clear (problem_data->dense_invalidated_by_call);
}
else
{
- struct df_rd_problem_data *problem_data = XNEW (struct df_rd_problem_data);
- dflow->problem_data = problem_data;
-
- problem_data->def_sites = XCNEWVEC (bitmap, reg_size);
- problem_data->def_sites_size = reg_size;
- problem_data->sparse_invalidated_by_call = BITMAP_ALLOC (NULL);
- problem_data->dense_invalidated_by_call = BITMAP_ALLOC (NULL);
+ problem_data = XNEW (struct df_rd_problem_data);
+ df_rd->problem_data = problem_data;
+
+ bitmap_obstack_initialize (&problem_data->rd_bitmaps);
+ problem_data->sparse_invalidated_by_call
+ = BITMAP_ALLOC (&problem_data->rd_bitmaps);
+ problem_data->dense_invalidated_by_call
+ = BITMAP_ALLOC (&problem_data->rd_bitmaps);
}
- df_grow_bb_info (dflow);
+ df_grow_bb_info (df_rd);
/* Because of the clustering of all use sites for the same pseudo,
we have to process all of the blocks before doing the
@@ -967,7 +823,7 @@ df_rd_alloc (struct dataflow *dflow,
EXECUTE_IF_SET_IN_BITMAP (all_blocks, 0, bb_index, bi)
{
- struct df_rd_bb_info *bb_info = df_rd_get_bb_info (dflow, bb_index);
+ struct df_rd_bb_info *bb_info = df_rd_get_bb_info (bb_index);
if (bb_info)
{
bitmap_clear (bb_info->kill);
@@ -976,13 +832,13 @@ df_rd_alloc (struct dataflow *dflow,
}
else
{
- bb_info = (struct df_rd_bb_info *) pool_alloc (dflow->block_pool);
- df_rd_set_bb_info (dflow, bb_index, bb_info);
- bb_info->kill = BITMAP_ALLOC (NULL);
- bb_info->sparse_kill = BITMAP_ALLOC (NULL);
- bb_info->gen = BITMAP_ALLOC (NULL);
- bb_info->in = BITMAP_ALLOC (NULL);
- bb_info->out = BITMAP_ALLOC (NULL);
+ bb_info = (struct df_rd_bb_info *) pool_alloc (df_rd->block_pool);
+ df_rd_set_bb_info (bb_index, bb_info);
+ bb_info->kill = BITMAP_ALLOC (&problem_data->rd_bitmaps);
+ bb_info->sparse_kill = BITMAP_ALLOC (&problem_data->rd_bitmaps);
+ bb_info->gen = BITMAP_ALLOC (&problem_data->rd_bitmaps);
+ bb_info->in = BITMAP_ALLOC (&problem_data->rd_bitmaps);
+ bb_info->out = BITMAP_ALLOC (&problem_data->rd_bitmaps);
}
}
}
@@ -991,75 +847,76 @@ df_rd_alloc (struct dataflow *dflow,
/* Process a list of DEFs for df_rd_bb_local_compute. */
static void
-df_rd_bb_local_compute_process_def (struct dataflow *dflow,
- struct df_rd_bb_info *bb_info,
- struct df_ref *def,
+df_rd_bb_local_compute_process_def (struct df_rd_bb_info *bb_info,
+ struct df_ref **def_rec,
enum df_ref_flags top_flag)
{
- struct df *df = dflow->df;
- while (def)
+ while (*def_rec)
{
+ struct df_ref *def = *def_rec;
if (top_flag == (DF_REF_FLAGS (def) & DF_REF_AT_TOP))
{
unsigned int regno = DF_REF_REGNO (def);
- unsigned int begin = DF_REG_DEF_GET (df, regno)->begin;
- unsigned int n_defs = DF_REG_DEF_GET (df, regno)->n_refs;
+ unsigned int begin = DF_DEFS_BEGIN (regno);
+ unsigned int n_defs = DF_DEFS_COUNT (regno);
- /* Only the last def(s) for a regno in the block has any
- effect. */
- if (!bitmap_bit_p (seen_in_block, regno))
+ if ((!(df->changeable_flags & DF_NO_HARD_REGS))
+ || (regno >= FIRST_PSEUDO_REGISTER))
{
- /* The first def for regno in insn gets to knock out the
- defs from other instructions. */
- if ((!bitmap_bit_p (seen_in_insn, regno))
- /* If the def is to only part of the reg, it does
- not kill the other defs that reach here. */
- && (!((DF_REF_FLAGS (def) & DF_REF_PARTIAL)
- || (DF_REF_FLAGS (def) & DF_REF_MAY_CLOBBER))))
+ /* Only the last def(s) for a regno in the block has any
+ effect. */
+ if (!bitmap_bit_p (seen_in_block, regno))
{
- if (n_defs > DF_SPARSE_THRESHOLD)
+ /* The first def for regno in insn gets to knock out the
+ defs from other instructions. */
+ if ((!bitmap_bit_p (seen_in_insn, regno))
+ /* If the def is to only part of the reg, it does
+ not kill the other defs that reach here. */
+ && (!(DF_REF_FLAGS (def) &
+ (DF_REF_PARTIAL | DF_REF_CONDITIONAL | DF_REF_MAY_CLOBBER))))
{
- bitmap_set_bit (bb_info->sparse_kill, regno);
- bitmap_clear_range(bb_info->gen, begin, n_defs);
- }
- else
- {
- struct df_rd_problem_data * problem_data
- = (struct df_rd_problem_data *)dflow->problem_data;
- bitmap defs = df_ref_bitmap (problem_data->def_sites,
- regno, begin, n_defs);
- bitmap_ior_into (bb_info->kill, defs);
- bitmap_and_compl_into (bb_info->gen, defs);
+ if (n_defs > DF_SPARSE_THRESHOLD)
+ {
+ bitmap_set_bit (bb_info->sparse_kill, regno);
+ bitmap_clear_range(bb_info->gen, begin, n_defs);
+ }
+ else
+ {
+ bitmap_set_range (bb_info->kill, begin, n_defs);
+ bitmap_clear_range (bb_info->gen, begin, n_defs);
+ }
}
+
+ bitmap_set_bit (seen_in_insn, regno);
+ /* All defs for regno in the instruction may be put into
+ the gen set. */
+ if (!(DF_REF_FLAGS (def)
+ & (DF_REF_MUST_CLOBBER | DF_REF_MAY_CLOBBER)))
+ bitmap_set_bit (bb_info->gen, DF_REF_ID (def));
}
-
- bitmap_set_bit (seen_in_insn, regno);
- /* All defs for regno in the instruction may be put into
- the gen set. */
- if (!(DF_REF_FLAGS (def)
- & (DF_REF_MUST_CLOBBER | DF_REF_MAY_CLOBBER)))
- bitmap_set_bit (bb_info->gen, DF_REF_ID (def));
}
}
- def = def->next_ref;
+ def_rec++;
}
}
/* Compute local reaching def info for basic block BB. */
static void
-df_rd_bb_local_compute (struct dataflow *dflow, unsigned int bb_index)
+df_rd_bb_local_compute (unsigned int bb_index)
{
- struct df *df = dflow->df;
basic_block bb = BASIC_BLOCK (bb_index);
- struct df_rd_bb_info *bb_info = df_rd_get_bb_info (dflow, bb_index);
+ struct df_rd_bb_info *bb_info = df_rd_get_bb_info (bb_index);
rtx insn;
bitmap_clear (seen_in_block);
bitmap_clear (seen_in_insn);
- df_rd_bb_local_compute_process_def (dflow, bb_info,
- df_get_artificial_defs (df, bb_index), 0);
+ /* Artificials are only hard regs. */
+ if (!(df->changeable_flags & DF_NO_HARD_REGS))
+ df_rd_bb_local_compute_process_def (bb_info,
+ df_get_artificial_defs (bb_index),
+ 0);
FOR_BB_INSNS_REVERSE (bb, insn)
{
@@ -1068,8 +925,8 @@ df_rd_bb_local_compute (struct dataflow *dflow, unsigned int bb_index)
if (!INSN_P (insn))
continue;
- df_rd_bb_local_compute_process_def (dflow, bb_info,
- DF_INSN_UID_DEFS (df, uid), 0);
+ df_rd_bb_local_compute_process_def (bb_info,
+ DF_INSN_UID_DEFS (uid), 0);
/* This complex dance with the two bitmaps is required because
instructions can assign twice to the same pseudo. This
@@ -1084,50 +941,44 @@ df_rd_bb_local_compute (struct dataflow *dflow, unsigned int bb_index)
/* Process the artificial defs at the top of the block last since we
are going backwards through the block and these are logically at
the start. */
- df_rd_bb_local_compute_process_def (dflow, bb_info,
- df_get_artificial_defs (df, bb_index),
- DF_REF_AT_TOP);
+ if (!(df->changeable_flags & DF_NO_HARD_REGS))
+ df_rd_bb_local_compute_process_def (bb_info,
+ df_get_artificial_defs (bb_index),
+ DF_REF_AT_TOP);
}
/* Compute local reaching def info for each basic block within BLOCKS. */
static void
-df_rd_local_compute (struct dataflow *dflow,
- bitmap all_blocks,
- bitmap rescan_blocks ATTRIBUTE_UNUSED)
+df_rd_local_compute (bitmap all_blocks)
{
- struct df *df = dflow->df;
unsigned int bb_index;
bitmap_iterator bi;
unsigned int regno;
struct df_rd_problem_data *problem_data
- = (struct df_rd_problem_data *) dflow->problem_data;
+ = (struct df_rd_problem_data *) df_rd->problem_data;
bitmap sparse_invalidated = problem_data->sparse_invalidated_by_call;
bitmap dense_invalidated = problem_data->dense_invalidated_by_call;
df_set_seen ();
- df_reorganize_refs (&df->def_info);
+
+ df_maybe_reorganize_def_refs (DF_REF_ORDER_BY_REG);
EXECUTE_IF_SET_IN_BITMAP (all_blocks, 0, bb_index, bi)
{
- df_rd_bb_local_compute (dflow, bb_index);
+ df_rd_bb_local_compute (bb_index);
}
/* Set up the knockout bit vectors to be applied across EH_EDGES. */
EXECUTE_IF_SET_IN_BITMAP (df_invalidated_by_call, 0, regno, bi)
{
- struct df_reg_info *reg_info = DF_REG_DEF_GET (df, regno);
- if (reg_info->n_refs > DF_SPARSE_THRESHOLD)
- {
- bitmap_set_bit (sparse_invalidated, regno);
- }
+ if (DF_DEFS_COUNT (regno) > DF_SPARSE_THRESHOLD)
+ bitmap_set_bit (sparse_invalidated, regno);
else
- {
- bitmap defs = df_ref_bitmap (problem_data->def_sites, regno,
- reg_info->begin, reg_info->n_refs);
- bitmap_ior_into (dense_invalidated, defs);
- }
+ bitmap_set_range (dense_invalidated,
+ DF_DEFS_BEGIN (regno),
+ DF_DEFS_COUNT (regno));
}
df_unset_seen ();
}
@@ -1136,14 +987,14 @@ df_rd_local_compute (struct dataflow *dflow,
/* Initialize the solution bit vectors for problem. */
static void
-df_rd_init_solution (struct dataflow *dflow, bitmap all_blocks)
+df_rd_init_solution (bitmap all_blocks)
{
unsigned int bb_index;
bitmap_iterator bi;
EXECUTE_IF_SET_IN_BITMAP (all_blocks, 0, bb_index, bi)
{
- struct df_rd_bb_info *bb_info = df_rd_get_bb_info (dflow, bb_index);
+ struct df_rd_bb_info *bb_info = df_rd_get_bb_info (bb_index);
bitmap_copy (bb_info->out, bb_info->gen);
bitmap_clear (bb_info->in);
@@ -1153,21 +1004,20 @@ df_rd_init_solution (struct dataflow *dflow, bitmap all_blocks)
/* In of target gets or of out of source. */
static void
-df_rd_confluence_n (struct dataflow *dflow, edge e)
+df_rd_confluence_n (edge e)
{
- bitmap op1 = df_rd_get_bb_info (dflow, e->dest->index)->in;
- bitmap op2 = df_rd_get_bb_info (dflow, e->src->index)->out;
+ bitmap op1 = df_rd_get_bb_info (e->dest->index)->in;
+ bitmap op2 = df_rd_get_bb_info (e->src->index)->out;
if (e->flags & EDGE_EH)
{
struct df_rd_problem_data *problem_data
- = (struct df_rd_problem_data *) dflow->problem_data;
+ = (struct df_rd_problem_data *) df_rd->problem_data;
bitmap sparse_invalidated = problem_data->sparse_invalidated_by_call;
bitmap dense_invalidated = problem_data->dense_invalidated_by_call;
- struct df *df = dflow->df;
bitmap_iterator bi;
unsigned int regno;
- bitmap tmp = BITMAP_ALLOC (NULL);
+ bitmap tmp = BITMAP_ALLOC (&df_bitmap_obstack);
bitmap_copy (tmp, op2);
bitmap_and_compl_into (tmp, dense_invalidated);
@@ -1175,8 +1025,8 @@ df_rd_confluence_n (struct dataflow *dflow, edge e)
EXECUTE_IF_SET_IN_BITMAP (sparse_invalidated, 0, regno, bi)
{
bitmap_clear_range (tmp,
- DF_REG_DEF_GET (df, regno)->begin,
- DF_REG_DEF_GET (df, regno)->n_refs);
+ DF_DEFS_BEGIN (regno),
+ DF_DEFS_COUNT (regno));
}
bitmap_ior_into (op1, tmp);
BITMAP_FREE (tmp);
@@ -1189,9 +1039,9 @@ df_rd_confluence_n (struct dataflow *dflow, edge e)
/* Transfer function. */
static bool
-df_rd_transfer_function (struct dataflow *dflow, int bb_index)
+df_rd_transfer_function (int bb_index)
{
- struct df_rd_bb_info *bb_info = df_rd_get_bb_info (dflow, bb_index);
+ struct df_rd_bb_info *bb_info = df_rd_get_bb_info (bb_index);
unsigned int regno;
bitmap_iterator bi;
bitmap in = bb_info->in;
@@ -1204,15 +1054,21 @@ df_rd_transfer_function (struct dataflow *dflow, int bb_index)
return bitmap_ior_and_compl (out, gen, in, kill);
else
{
- struct df *df = dflow->df;
+ struct df_rd_problem_data *problem_data;
bool changed = false;
- bitmap tmp = BITMAP_ALLOC (NULL);
+ bitmap tmp;
+
+ /* Note that TMP is _not_ a temporary bitmap if we end up replacing
+ OUT with TMP. Therefore, allocate TMP in the RD bitmaps obstack. */
+ problem_data = (struct df_rd_problem_data *) df_rd->problem_data;
+ tmp = BITMAP_ALLOC (&problem_data->rd_bitmaps);
+
bitmap_copy (tmp, in);
EXECUTE_IF_SET_IN_BITMAP (sparse_kill, 0, regno, bi)
{
bitmap_clear_range (tmp,
- DF_REG_DEF_GET (df, regno)->begin,
- DF_REG_DEF_GET (df, regno)->n_refs);
+ DF_DEFS_BEGIN (regno),
+ DF_DEFS_COUNT (regno));
}
bitmap_and_compl_into (tmp, kill);
bitmap_ior_into (tmp, gen);
@@ -1232,17 +1088,17 @@ df_rd_transfer_function (struct dataflow *dflow, int bb_index)
/* Free all storage associated with the problem. */
static void
-df_rd_free (struct dataflow *dflow)
+df_rd_free (void)
{
unsigned int i;
struct df_rd_problem_data *problem_data
- = (struct df_rd_problem_data *) dflow->problem_data;
+ = (struct df_rd_problem_data *) df_rd->problem_data;
if (problem_data)
{
- for (i = 0; i < dflow->block_info_size; i++)
+ for (i = 0; i < df_rd->block_info_size; i++)
{
- struct df_rd_bb_info *bb_info = df_rd_get_bb_info (dflow, i);
+ struct df_rd_bb_info *bb_info = df_rd_get_bb_info (i);
if (bb_info)
{
BITMAP_FREE (bb_info->kill);
@@ -1253,43 +1109,33 @@ df_rd_free (struct dataflow *dflow)
}
}
- free_alloc_pool (dflow->block_pool);
-
- for (i = 0; i < problem_data->def_sites_size; i++)
- {
- bitmap bm = problem_data->def_sites[i];
- if (bm)
- BITMAP_FREE (bm);
- }
-
- free (problem_data->def_sites);
+ free_alloc_pool (df_rd->block_pool);
BITMAP_FREE (problem_data->sparse_invalidated_by_call);
BITMAP_FREE (problem_data->dense_invalidated_by_call);
+ bitmap_obstack_release (&problem_data->rd_bitmaps);
- dflow->block_info_size = 0;
- free (dflow->block_info);
- free (dflow->problem_data);
+ df_rd->block_info_size = 0;
+ free (df_rd->block_info);
+ free (df_rd->problem_data);
}
- free (dflow);
+ free (df_rd);
}
/* Debugging info. */
static void
-df_rd_dump (struct dataflow *dflow, FILE *file)
+df_rd_start_dump (FILE *file)
{
- struct df *df = dflow->df;
- basic_block bb;
struct df_rd_problem_data *problem_data
- = (struct df_rd_problem_data *) dflow->problem_data;
- unsigned int m = max_reg_num ();
+ = (struct df_rd_problem_data *) df_rd->problem_data;
+ unsigned int m = DF_REG_SIZE(df);
unsigned int regno;
- if (!dflow->block_info)
+ if (!df_rd->block_info)
return;
- fprintf (file, "Reaching defs:\n\n");
+ fprintf (file, ";; Reaching defs:\n\n");
fprintf (file, " sparse invalidated \t");
dump_bitmap (file, problem_data->sparse_invalidated_by_call);
@@ -1297,29 +1143,44 @@ df_rd_dump (struct dataflow *dflow, FILE *file)
dump_bitmap (file, problem_data->dense_invalidated_by_call);
for (regno = 0; regno < m; regno++)
- if (DF_REG_DEF_GET (df, regno)->n_refs)
+ if (DF_DEFS_COUNT (regno))
fprintf (file, "%d[%d,%d] ", regno,
- DF_REG_DEF_GET (df, regno)->begin,
- DF_REG_DEF_GET (df, regno)->n_refs);
+ DF_DEFS_BEGIN (regno),
+ DF_DEFS_COUNT (regno));
fprintf (file, "\n");
- FOR_ALL_BB (bb)
- {
- struct df_rd_bb_info *bb_info = df_rd_get_bb_info (dflow, bb->index);
- df_print_bb_index (bb, file);
-
- if (!bb_info->in)
- continue;
-
- fprintf (file, " in \t(%d)\n", (int) bitmap_count_bits (bb_info->in));
- dump_bitmap (file, bb_info->in);
- fprintf (file, " gen \t(%d)\n", (int) bitmap_count_bits (bb_info->gen));
- dump_bitmap (file, bb_info->gen);
- fprintf (file, " kill\t(%d)\n", (int) bitmap_count_bits (bb_info->kill));
- dump_bitmap (file, bb_info->kill);
- fprintf (file, " out \t(%d)\n", (int) bitmap_count_bits (bb_info->out));
- dump_bitmap (file, bb_info->out);
- }
+}
+
+
+/* Debugging info at top of bb. */
+
+static void
+df_rd_top_dump (basic_block bb, FILE *file)
+{
+ struct df_rd_bb_info *bb_info = df_rd_get_bb_info (bb->index);
+ if (!bb_info || !bb_info->in)
+ return;
+
+ fprintf (file, ";; rd in \t(%d)\n", (int) bitmap_count_bits (bb_info->in));
+ dump_bitmap (file, bb_info->in);
+ fprintf (file, ";; rd gen \t(%d)\n", (int) bitmap_count_bits (bb_info->gen));
+ dump_bitmap (file, bb_info->gen);
+ fprintf (file, ";; rd kill\t(%d)\n", (int) bitmap_count_bits (bb_info->kill));
+ dump_bitmap (file, bb_info->kill);
+}
+
+
+/* Debugging info at top of bb. */
+
+static void
+df_rd_bottom_dump (basic_block bb, FILE *file)
+{
+ struct df_rd_bb_info *bb_info = df_rd_get_bb_info (bb->index);
+ if (!bb_info || !bb_info->out)
+ return;
+
+ fprintf (file, ";; rd out \t(%d)\n", (int) bitmap_count_bits (bb_info->out));
+ dump_bitmap (file, bb_info->out);
}
/* All of the information associated with every instance of the problem. */
@@ -1333,15 +1194,20 @@ static struct df_problem problem_RD =
df_rd_free_bb_info, /* Free basic block info. */
df_rd_local_compute, /* Local compute function. */
df_rd_init_solution, /* Init the solution specific data. */
- df_iterative_dataflow, /* Iterative solver. */
+ df_worklist_dataflow, /* Worklist solver. */
NULL, /* Confluence operator 0. */
df_rd_confluence_n, /* Confluence operator n. */
df_rd_transfer_function, /* Transfer function. */
NULL, /* Finalize function. */
df_rd_free, /* Free all of the problem information. */
- df_rd_dump, /* Debugging. */
+ df_rd_free, /* Remove this problem from the stack of dataflow problems. */
+ df_rd_start_dump, /* Debugging. */
+ df_rd_top_dump, /* Debugging start block. */
+ df_rd_bottom_dump, /* Debugging end block. */
+ NULL, /* Incremental solution verify start. */
+ NULL, /* Incremental solution verfiy end. */
NULL, /* Dependent problem. */
- 0 /* Changeable flags. */
+ TV_DF_RD /* Timing variable. */
};
@@ -1350,10 +1216,10 @@ static struct df_problem problem_RD =
of DF. The returned structure is what is used to get at the
solution. */
-struct dataflow *
-df_rd_add_problem (struct df *df, int flags)
+void
+df_rd_add_problem (void)
{
- return df_add_problem (df, &problem_RD, flags);
+ df_add_problem (&problem_RD);
}
@@ -1367,30 +1233,30 @@ df_rd_add_problem (struct df *df, int flags)
See df.h for details.
----------------------------------------------------------------------------*/
-/* Get basic block info. */
-
-struct df_lr_bb_info *
-df_lr_get_bb_info (struct dataflow *dflow, unsigned int index)
+/* Private data used to verify the solution for this problem. */
+struct df_lr_problem_data
{
- return (struct df_lr_bb_info *) dflow->block_info[index];
-}
+ bitmap *in;
+ bitmap *out;
+};
/* Set basic block info. */
static void
-df_lr_set_bb_info (struct dataflow *dflow, unsigned int index,
+df_lr_set_bb_info (unsigned int index,
struct df_lr_bb_info *bb_info)
{
- dflow->block_info[index] = bb_info;
+ gcc_assert (df_lr);
+ gcc_assert (index < df_lr->block_info_size);
+ df_lr->block_info[index] = bb_info;
}
/* Free basic block info. */
static void
-df_lr_free_bb_info (struct dataflow *dflow,
- basic_block bb ATTRIBUTE_UNUSED,
+df_lr_free_bb_info (basic_block bb ATTRIBUTE_UNUSED,
void *vbb_info)
{
struct df_lr_bb_info *bb_info = (struct df_lr_bb_info *) vbb_info;
@@ -1398,77 +1264,115 @@ df_lr_free_bb_info (struct dataflow *dflow,
{
BITMAP_FREE (bb_info->use);
BITMAP_FREE (bb_info->def);
+ if (bb_info->in == bb_info->top)
+ bb_info->top = NULL;
+ else
+ {
+ BITMAP_FREE (bb_info->top);
+ BITMAP_FREE (bb_info->ause);
+ BITMAP_FREE (bb_info->adef);
+ }
BITMAP_FREE (bb_info->in);
BITMAP_FREE (bb_info->out);
- pool_free (dflow->block_pool, bb_info);
+ pool_free (df_lr->block_pool, bb_info);
}
}
-/* Allocate or reset bitmaps for DFLOW blocks. The solution bits are
+/* Allocate or reset bitmaps for DF_LR blocks. The solution bits are
not touched unless the block is new. */
static void
-df_lr_alloc (struct dataflow *dflow, bitmap blocks_to_rescan,
- bitmap all_blocks ATTRIBUTE_UNUSED)
+df_lr_alloc (bitmap all_blocks ATTRIBUTE_UNUSED)
{
unsigned int bb_index;
bitmap_iterator bi;
- if (!dflow->block_pool)
- dflow->block_pool = create_alloc_pool ("df_lr_block pool",
+ if (!df_lr->block_pool)
+ df_lr->block_pool = create_alloc_pool ("df_lr_block pool",
sizeof (struct df_lr_bb_info), 50);
- df_grow_bb_info (dflow);
+ df_grow_bb_info (df_lr);
- EXECUTE_IF_SET_IN_BITMAP (blocks_to_rescan, 0, bb_index, bi)
+ EXECUTE_IF_SET_IN_BITMAP (df_lr->out_of_date_transfer_functions, 0, bb_index, bi)
{
- struct df_lr_bb_info *bb_info = df_lr_get_bb_info (dflow, bb_index);
+ struct df_lr_bb_info *bb_info = df_lr_get_bb_info (bb_index);
if (bb_info)
{
bitmap_clear (bb_info->def);
bitmap_clear (bb_info->use);
+ if (bb_info->adef)
+ {
+ bitmap_clear (bb_info->adef);
+ bitmap_clear (bb_info->ause);
+ }
}
else
{
- bb_info = (struct df_lr_bb_info *) pool_alloc (dflow->block_pool);
- df_lr_set_bb_info (dflow, bb_index, bb_info);
+ bb_info = (struct df_lr_bb_info *) pool_alloc (df_lr->block_pool);
+ df_lr_set_bb_info (bb_index, bb_info);
bb_info->use = BITMAP_ALLOC (NULL);
bb_info->def = BITMAP_ALLOC (NULL);
bb_info->in = BITMAP_ALLOC (NULL);
bb_info->out = BITMAP_ALLOC (NULL);
+ bb_info->top = bb_info->in;
+ bb_info->adef = NULL;
+ bb_info->ause = NULL;
}
}
}
+/* Reset the global solution for recalculation. */
+
+static void
+df_lr_reset (bitmap all_blocks)
+{
+ unsigned int bb_index;
+ bitmap_iterator bi;
+
+ EXECUTE_IF_SET_IN_BITMAP (all_blocks, 0, bb_index, bi)
+ {
+ struct df_lr_bb_info *bb_info = df_lr_get_bb_info (bb_index);
+ gcc_assert (bb_info);
+ bitmap_clear (bb_info->in);
+ bitmap_clear (bb_info->out);
+ bitmap_clear (bb_info->top);
+ }
+}
+
+
/* Compute local live register info for basic block BB. */
static void
-df_lr_bb_local_compute (struct dataflow *dflow,
- struct df *df, unsigned int bb_index)
+df_lr_bb_local_compute (unsigned int bb_index)
{
basic_block bb = BASIC_BLOCK (bb_index);
- struct df_lr_bb_info *bb_info = df_lr_get_bb_info (dflow, bb_index);
+ struct df_lr_bb_info *bb_info = df_lr_get_bb_info (bb_index);
rtx insn;
- struct df_ref *def;
- struct df_ref *use;
+ struct df_ref **def_rec;
+ struct df_ref **use_rec;
/* Process the registers set in an exception handler. */
- for (def = df_get_artificial_defs (df, bb_index); def; def = def->next_ref)
- if (((DF_REF_FLAGS (def) & DF_REF_AT_TOP) == 0)
- && (!(DF_REF_FLAGS (def) & DF_REF_PARTIAL)))
- {
- unsigned int dregno = DF_REF_REGNO (def);
- bitmap_set_bit (bb_info->def, dregno);
- bitmap_clear_bit (bb_info->use, dregno);
- }
+ for (def_rec = df_get_artificial_defs (bb_index); *def_rec; def_rec++)
+ {
+ struct df_ref *def = *def_rec;
+ if ((DF_REF_FLAGS (def) & DF_REF_AT_TOP) == 0)
+ {
+ unsigned int dregno = DF_REF_REGNO (def);
+ bitmap_set_bit (bb_info->def, dregno);
+ bitmap_clear_bit (bb_info->use, dregno);
+ }
+ }
/* Process the hardware registers that are always live. */
- for (use = df_get_artificial_uses (df, bb_index); use; use = use->next_ref)
- /* Add use to set of uses in this BB. */
- if ((DF_REF_FLAGS (use) & DF_REF_AT_TOP) == 0)
- bitmap_set_bit (bb_info->use, DF_REF_REGNO (use));
+ for (use_rec = df_get_artificial_uses (bb_index); *use_rec; use_rec++)
+ {
+ struct df_ref *use = *use_rec;
+ /* Add use to set of uses in this BB. */
+ if ((DF_REF_FLAGS (use) & DF_REF_AT_TOP) == 0)
+ bitmap_set_bit (bb_info->use, DF_REF_REGNO (use));
+ }
FOR_BB_INSNS_REVERSE (bb, insn)
{
@@ -1479,73 +1383,100 @@ df_lr_bb_local_compute (struct dataflow *dflow,
if (CALL_P (insn))
{
- for (def = DF_INSN_UID_DEFS (df, uid); def; def = def->next_ref)
+ for (def_rec = DF_INSN_UID_DEFS (uid); *def_rec; def_rec++)
{
+ struct df_ref *def = *def_rec;
unsigned int dregno = DF_REF_REGNO (def);
- if (dregno >= FIRST_PSEUDO_REGISTER
- || !(SIBLING_CALL_P (insn)
- && bitmap_bit_p (df->exit_block_uses, dregno)
- && !refers_to_regno_p (dregno, dregno+1,
- current_function_return_rtx,
- (rtx *)0)))
+ if (DF_REF_FLAGS (def) & DF_REF_MUST_CLOBBER)
{
- /* If the def is to only part of the reg, it does
- not kill the other defs that reach here. */
- if (!(DF_REF_FLAGS (def) & DF_REF_PARTIAL))
+ if (dregno >= FIRST_PSEUDO_REGISTER
+ || !(SIBLING_CALL_P (insn)
+ && bitmap_bit_p (df->exit_block_uses, dregno)
+ && !refers_to_regno_p (dregno, dregno+1,
+ current_function_return_rtx,
+ (rtx *)0)))
{
- bitmap_set_bit (bb_info->def, dregno);
- bitmap_clear_bit (bb_info->use, dregno);
+ /* If the def is to only part of the reg, it does
+ not kill the other defs that reach here. */
+ if (!(DF_REF_FLAGS (def) & (DF_REF_PARTIAL | DF_REF_CONDITIONAL)))
+ {
+ bitmap_set_bit (bb_info->def, dregno);
+ bitmap_clear_bit (bb_info->use, dregno);
+ }
}
}
+ else
+ /* This is the return value. */
+ if (!(DF_REF_FLAGS (def) & (DF_REF_PARTIAL | DF_REF_CONDITIONAL)))
+ {
+ bitmap_set_bit (bb_info->def, dregno);
+ bitmap_clear_bit (bb_info->use, dregno);
+ }
}
}
else
{
- for (def = DF_INSN_UID_DEFS (df, uid); def; def = def->next_ref)
+ for (def_rec = DF_INSN_UID_DEFS (uid); *def_rec; def_rec++)
{
- unsigned int dregno = DF_REF_REGNO (def);
-
- if (DF_INSN_CONTAINS_ASM (df, insn)
- && dregno < FIRST_PSEUDO_REGISTER)
- {
- unsigned int i;
- unsigned int end = dregno
- + hard_regno_nregs[dregno][GET_MODE (DF_REF_REG (def))] - 1;
- for (i = dregno; i <= end; ++i)
- regs_asm_clobbered[i] = 1;
- }
+ struct df_ref *def = *def_rec;
/* If the def is to only part of the reg, it does
not kill the other defs that reach here. */
- if (!(DF_REF_FLAGS (def) & DF_REF_PARTIAL))
+ if (!(DF_REF_FLAGS (def) & (DF_REF_PARTIAL | DF_REF_CONDITIONAL)))
{
+ unsigned int dregno = DF_REF_REGNO (def);
bitmap_set_bit (bb_info->def, dregno);
bitmap_clear_bit (bb_info->use, dregno);
}
}
}
- for (use = DF_INSN_UID_USES (df, uid); use; use = use->next_ref)
- /* Add use to set of uses in this BB. */
- bitmap_set_bit (bb_info->use, DF_REF_REGNO (use));
+ for (use_rec = DF_INSN_UID_USES (uid); *use_rec; use_rec++)
+ {
+ struct df_ref *use = *use_rec;
+ /* Add use to set of uses in this BB. */
+ bitmap_set_bit (bb_info->use, DF_REF_REGNO (use));
+ }
}
-
/* Process the registers set in an exception handler. */
- for (def = df_get_artificial_defs (df, bb_index); def; def = def->next_ref)
- if ((DF_REF_FLAGS (def) & DF_REF_AT_TOP)
- && (!(DF_REF_FLAGS (def) & DF_REF_PARTIAL)))
- {
- unsigned int dregno = DF_REF_REGNO (def);
- bitmap_set_bit (bb_info->def, dregno);
- bitmap_clear_bit (bb_info->use, dregno);
- }
+ for (def_rec = df_get_artificial_defs (bb_index); *def_rec; def_rec++)
+ {
+ struct df_ref *def = *def_rec;
+ if ((DF_REF_FLAGS (def) & DF_REF_AT_TOP)
+ && (!(DF_REF_FLAGS (def) & (DF_REF_PARTIAL | DF_REF_CONDITIONAL))))
+ {
+ unsigned int dregno = DF_REF_REGNO (def);
+ if (bb_info->adef == NULL)
+ {
+ gcc_assert (bb_info->ause == NULL);
+ gcc_assert (bb_info->top == bb_info->in);
+ bb_info->adef = BITMAP_ALLOC (NULL);
+ bb_info->ause = BITMAP_ALLOC (NULL);
+ bb_info->top = BITMAP_ALLOC (NULL);
+ }
+ bitmap_set_bit (bb_info->adef, dregno);
+ }
+ }
#ifdef EH_USES
/* Process the uses that are live into an exception handler. */
- for (use = df_get_artificial_uses (df, bb_index); use; use = use->next_ref)
- /* Add use to set of uses in this BB. */
- if (DF_REF_FLAGS (use) & DF_REF_AT_TOP)
- bitmap_set_bit (bb_info->use, DF_REF_REGNO (use));
+ for (use_rec = df_get_artificial_uses (bb_index); *use_rec; use_rec++)
+ {
+ struct df_ref *use = *use_rec;
+ /* Add use to set of uses in this BB. */
+ if (DF_REF_FLAGS (use) & DF_REF_AT_TOP)
+ {
+ if (bb_info->adef == NULL)
+ {
+ gcc_assert (bb_info->ause == NULL);
+ gcc_assert (bb_info->top == bb_info->in);
+ bb_info->adef = BITMAP_ALLOC (NULL);
+ bb_info->ause = BITMAP_ALLOC (NULL);
+ bb_info->top = BITMAP_ALLOC (NULL);
+ }
+ bitmap_set_bit (bb_info->ause, DF_REF_REGNO (use));
+ }
+ }
#endif
}
@@ -1553,19 +1484,11 @@ df_lr_bb_local_compute (struct dataflow *dflow,
/* Compute local live register info for each basic block within BLOCKS. */
static void
-df_lr_local_compute (struct dataflow *dflow,
- bitmap all_blocks,
- bitmap rescan_blocks)
+df_lr_local_compute (bitmap all_blocks ATTRIBUTE_UNUSED)
{
- struct df *df = dflow->df;
unsigned int bb_index;
bitmap_iterator bi;
- /* Assume that the stack pointer is unchanging if alloca hasn't
- been used. */
- if (bitmap_equal_p (all_blocks, rescan_blocks))
- memset (regs_asm_clobbered, 0, sizeof (regs_asm_clobbered));
-
bitmap_clear (df->hardware_regs_used);
/* The all-important stack pointer must always be live. */
@@ -1594,34 +1517,34 @@ df_lr_local_compute (struct dataflow *dflow,
bitmap_set_bit (df->hardware_regs_used, PIC_OFFSET_TABLE_REGNUM);
}
- if (bitmap_bit_p (rescan_blocks, EXIT_BLOCK))
- {
- /* The exit block is special for this problem and its bits are
- computed from thin air. */
- struct df_lr_bb_info *bb_info = df_lr_get_bb_info (dflow, EXIT_BLOCK);
- bitmap_copy (bb_info->use, df->exit_block_uses);
- }
-
- EXECUTE_IF_SET_IN_BITMAP (rescan_blocks, 0, bb_index, bi)
+ EXECUTE_IF_SET_IN_BITMAP (df_lr->out_of_date_transfer_functions, 0, bb_index, bi)
{
if (bb_index == EXIT_BLOCK)
- continue;
- df_lr_bb_local_compute (dflow, df, bb_index);
+ {
+ /* The exit block is special for this problem and its bits are
+ computed from thin air. */
+ struct df_lr_bb_info *bb_info = df_lr_get_bb_info (EXIT_BLOCK);
+ bitmap_copy (bb_info->use, df->exit_block_uses);
+ }
+ else
+ df_lr_bb_local_compute (bb_index);
}
+
+ bitmap_clear (df_lr->out_of_date_transfer_functions);
}
/* Initialize the solution vectors. */
static void
-df_lr_init (struct dataflow *dflow, bitmap all_blocks)
+df_lr_init (bitmap all_blocks)
{
unsigned int bb_index;
bitmap_iterator bi;
EXECUTE_IF_SET_IN_BITMAP (all_blocks, 0, bb_index, bi)
{
- struct df_lr_bb_info *bb_info = df_lr_get_bb_info (dflow, bb_index);
+ struct df_lr_bb_info *bb_info = df_lr_get_bb_info (bb_index);
bitmap_copy (bb_info->in, bb_info->use);
bitmap_clear (bb_info->out);
}
@@ -1632,11 +1555,9 @@ df_lr_init (struct dataflow *dflow, bitmap all_blocks)
noreturn function that throws. And even if it isn't, getting the
unwind info right helps debugging. */
static void
-df_lr_confluence_0 (struct dataflow *dflow, basic_block bb)
+df_lr_confluence_0 (basic_block bb)
{
- struct df *df = dflow->df;
-
- bitmap op1 = df_lr_get_bb_info (dflow, bb->index)->out;
+ bitmap op1 = df_lr_get_bb_info (bb->index)->out;
if (bb != EXIT_BLOCK_PTR)
bitmap_copy (op1, df->hardware_regs_used);
}
@@ -1645,10 +1566,10 @@ df_lr_confluence_0 (struct dataflow *dflow, basic_block bb)
/* Confluence function that ignores fake edges. */
static void
-df_lr_confluence_n (struct dataflow *dflow, edge e)
+df_lr_confluence_n (edge e)
{
- bitmap op1 = df_lr_get_bb_info (dflow, e->src->index)->out;
- bitmap op2 = df_lr_get_bb_info (dflow, e->dest->index)->in;
+ bitmap op1 = df_lr_get_bb_info (e->src->index)->out;
+ bitmap op2 = df_lr_get_bb_info (e->dest->index)->in;
/* Call-clobbered registers die across exception and call edges. */
/* ??? Abnormal call edges ignored for the moment, as this gets
@@ -1658,85 +1579,221 @@ df_lr_confluence_n (struct dataflow *dflow, edge e)
else
bitmap_ior_into (op1, op2);
- bitmap_ior_into (op1, dflow->df->hardware_regs_used);
+ bitmap_ior_into (op1, df->hardware_regs_used);
}
/* Transfer function. */
static bool
-df_lr_transfer_function (struct dataflow *dflow, int bb_index)
+df_lr_transfer_function (int bb_index)
{
- struct df_lr_bb_info *bb_info = df_lr_get_bb_info (dflow, bb_index);
+ struct df_lr_bb_info *bb_info = df_lr_get_bb_info (bb_index);
bitmap in = bb_info->in;
bitmap out = bb_info->out;
bitmap use = bb_info->use;
bitmap def = bb_info->def;
+ bitmap top = bb_info->top;
+ bitmap ause = bb_info->ause;
+ bitmap adef = bb_info->adef;
+ bool changed;
- return bitmap_ior_and_compl (in, use, out, def);
+ changed = bitmap_ior_and_compl (top, use, out, def);
+ if (in != top)
+ {
+ gcc_assert (ause && adef);
+ changed |= bitmap_ior_and_compl (in, ause, top, adef);
+ }
+
+ return changed;
+}
+
+
+/* Run the fast dce as a side effect of building LR. */
+
+static void
+df_lr_local_finalize (bitmap all_blocks ATTRIBUTE_UNUSED)
+{
+ if (df->changeable_flags & DF_LR_RUN_DCE)
+ {
+ run_fast_df_dce ();
+ if (df_lr->problem_data && df_lr->solutions_dirty)
+ {
+ /* If we are here, then it is because we are both verifying
+ the solution and the dce changed the function. In that case
+ the verification info built will be wrong. So we leave the
+ dirty flag true so that the verifier will skip the checking
+ part and just clean up.*/
+ df_lr->solutions_dirty = true;
+ }
+ else
+ df_lr->solutions_dirty = false;
+ }
+ else
+ df_lr->solutions_dirty = false;
}
/* Free all storage associated with the problem. */
static void
-df_lr_free (struct dataflow *dflow)
+df_lr_free (void)
{
- if (dflow->block_info)
+ if (df_lr->block_info)
{
unsigned int i;
- for (i = 0; i < dflow->block_info_size; i++)
+ for (i = 0; i < df_lr->block_info_size; i++)
{
- struct df_lr_bb_info *bb_info = df_lr_get_bb_info (dflow, i);
+ struct df_lr_bb_info *bb_info = df_lr_get_bb_info (i);
if (bb_info)
{
BITMAP_FREE (bb_info->use);
BITMAP_FREE (bb_info->def);
+ if (bb_info->in == bb_info->top)
+ bb_info->top = NULL;
+ else
+ {
+ BITMAP_FREE (bb_info->top);
+ BITMAP_FREE (bb_info->ause);
+ BITMAP_FREE (bb_info->adef);
+ }
BITMAP_FREE (bb_info->in);
BITMAP_FREE (bb_info->out);
}
}
- free_alloc_pool (dflow->block_pool);
+ free_alloc_pool (df_lr->block_pool);
- dflow->block_info_size = 0;
- free (dflow->block_info);
+ df_lr->block_info_size = 0;
+ free (df_lr->block_info);
}
- free (dflow->problem_data);
- free (dflow);
+ BITMAP_FREE (df_lr->out_of_date_transfer_functions);
+ free (df_lr);
}
-/* Debugging info. */
+/* Debugging info at top of bb. */
static void
-df_lr_dump (struct dataflow *dflow, FILE *file)
+df_lr_top_dump (basic_block bb, FILE *file)
{
- basic_block bb;
+ struct df_lr_bb_info *bb_info = df_lr_get_bb_info (bb->index);
+ struct df_lr_problem_data *problem_data;
+ if (!bb_info || !bb_info->in)
+ return;
+
+ fprintf (file, ";; lr in \t");
+ df_print_regset (file, bb_info->in);
+ if (df_lr->problem_data)
+ {
+ problem_data = (struct df_lr_problem_data *)df_lr->problem_data;
+ fprintf (file, ";; old in \t");
+ df_print_regset (file, problem_data->in[bb->index]);
+ }
+ fprintf (file, ";; lr use \t");
+ df_print_regset (file, bb_info->use);
+ fprintf (file, ";; lr def \t");
+ df_print_regset (file, bb_info->def);
+}
+
+
+/* Debugging info at bottom of bb. */
+
+static void
+df_lr_bottom_dump (basic_block bb, FILE *file)
+{
+ struct df_lr_bb_info *bb_info = df_lr_get_bb_info (bb->index);
+ struct df_lr_problem_data *problem_data;
+ if (!bb_info || !bb_info->out)
+ return;
- if (!dflow->block_info)
+ fprintf (file, ";; lr out \t");
+ df_print_regset (file, bb_info->out);
+ if (df_lr->problem_data)
+ {
+ problem_data = (struct df_lr_problem_data *)df_lr->problem_data;
+ fprintf (file, ";; old out \t");
+ df_print_regset (file, problem_data->out[bb->index]);
+ }
+}
+
+
+/* Build the datastructure to verify that the solution to the dataflow
+ equations is not dirty. */
+
+static void
+df_lr_verify_solution_start (void)
+{
+ basic_block bb;
+ struct df_lr_problem_data *problem_data;
+ if (df_lr->solutions_dirty)
+ {
+ df_lr->problem_data = NULL;
+ return;
+ }
+
+ /* Set it true so that the solution is recomputed. */
+ df_lr->solutions_dirty = true;
+
+ problem_data = XNEW (struct df_lr_problem_data);
+ df_lr->problem_data = problem_data;
+ problem_data->in = XNEWVEC (bitmap, last_basic_block);
+ problem_data->out = XNEWVEC (bitmap, last_basic_block);
+
+ FOR_ALL_BB (bb)
+ {
+ problem_data->in[bb->index] = BITMAP_ALLOC (NULL);
+ problem_data->out[bb->index] = BITMAP_ALLOC (NULL);
+ bitmap_copy (problem_data->in[bb->index], DF_LR_IN (bb));
+ bitmap_copy (problem_data->out[bb->index], DF_LR_OUT (bb));
+ }
+}
+
+
+/* Compare the saved datastructure and the new solution to the dataflow
+ equations. */
+
+static void
+df_lr_verify_solution_end (void)
+{
+ struct df_lr_problem_data *problem_data;
+ basic_block bb;
+
+ if (df_lr->problem_data == NULL)
return;
- fprintf (file, "Live Registers:\n");
+ problem_data = (struct df_lr_problem_data *)df_lr->problem_data;
+
+ if (df_lr->solutions_dirty)
+ /* Do not check if the solution is still dirty. See the comment
+ in df_lr_local_finalize for details. */
+ df_lr->solutions_dirty = false;
+ else
+ FOR_ALL_BB (bb)
+ {
+ if ((!bitmap_equal_p (problem_data->in[bb->index], DF_LR_IN (bb)))
+ || (!bitmap_equal_p (problem_data->out[bb->index], DF_LR_OUT (bb))))
+ {
+ /*df_dump (stderr);*/
+ gcc_unreachable ();
+ }
+ }
+
+ /* Cannot delete them immediately because you may want to dump them
+ if the comparison fails. */
FOR_ALL_BB (bb)
{
- struct df_lr_bb_info *bb_info = df_lr_get_bb_info (dflow, bb->index);
- df_print_bb_index (bb, file);
-
- if (!bb_info->in)
- continue;
-
- fprintf (file, " in \t");
- dump_bitmap (file, bb_info->in);
- fprintf (file, " use \t");
- dump_bitmap (file, bb_info->use);
- fprintf (file, " def \t");
- dump_bitmap (file, bb_info->def);
- fprintf (file, " out \t");
- dump_bitmap (file, bb_info->out);
+ BITMAP_FREE (problem_data->in[bb->index]);
+ BITMAP_FREE (problem_data->out[bb->index]);
}
+
+ free (problem_data->in);
+ free (problem_data->out);
+ free (problem_data);
+ df_lr->problem_data = NULL;
}
+
/* All of the information associated with every instance of the problem. */
static struct df_problem problem_LR =
@@ -1744,19 +1801,24 @@ static struct df_problem problem_LR =
DF_LR, /* Problem id. */
DF_BACKWARD, /* Direction. */
df_lr_alloc, /* Allocate the problem specific data. */
- NULL, /* Reset global information. */
+ df_lr_reset, /* Reset global information. */
df_lr_free_bb_info, /* Free basic block info. */
df_lr_local_compute, /* Local compute function. */
df_lr_init, /* Init the solution specific data. */
- df_iterative_dataflow, /* Iterative solver. */
+ df_worklist_dataflow, /* Worklist solver. */
df_lr_confluence_0, /* Confluence operator 0. */
df_lr_confluence_n, /* Confluence operator n. */
df_lr_transfer_function, /* Transfer function. */
- NULL, /* Finalize function. */
+ df_lr_local_finalize, /* Finalize function. */
df_lr_free, /* Free all of the problem information. */
- df_lr_dump, /* Debugging. */
+ NULL, /* Remove this problem from the stack of dataflow problems. */
+ NULL, /* Debugging. */
+ df_lr_top_dump, /* Debugging start block. */
+ df_lr_bottom_dump, /* Debugging end block. */
+ df_lr_verify_solution_start,/* Incremental solution verify start. */
+ df_lr_verify_solution_end, /* Incremental solution verify end. */
NULL, /* Dependent problem. */
- 0
+ TV_DF_LR /* Timing variable. */
};
@@ -1764,80 +1826,181 @@ static struct df_problem problem_LR =
of DF. The returned structure is what is used to get at the
solution. */
-struct dataflow *
-df_lr_add_problem (struct df *df, int flags)
+void
+df_lr_add_problem (void)
{
- return df_add_problem (df, &problem_LR, flags);
+ df_add_problem (&problem_LR);
+ /* These will be initialized when df_scan_blocks processes each
+ block. */
+ df_lr->out_of_date_transfer_functions = BITMAP_ALLOC (NULL);
+}
+
+
+/* Verify that all of the lr related info is consistent and
+ correct. */
+
+void
+df_lr_verify_transfer_functions (void)
+{
+ basic_block bb;
+ bitmap saved_def;
+ bitmap saved_use;
+ bitmap saved_adef;
+ bitmap saved_ause;
+ bitmap all_blocks;
+ bool need_as;
+
+ if (!df)
+ return;
+
+ saved_def = BITMAP_ALLOC (NULL);
+ saved_use = BITMAP_ALLOC (NULL);
+ saved_adef = BITMAP_ALLOC (NULL);
+ saved_ause = BITMAP_ALLOC (NULL);
+ all_blocks = BITMAP_ALLOC (NULL);
+
+ FOR_ALL_BB (bb)
+ {
+ struct df_lr_bb_info *bb_info = df_lr_get_bb_info (bb->index);
+ bitmap_set_bit (all_blocks, bb->index);
+
+ if (bb_info)
+ {
+ /* Make a copy of the transfer functions and then compute
+ new ones to see if the transfer functions have
+ changed. */
+ if (!bitmap_bit_p (df_lr->out_of_date_transfer_functions,
+ bb->index))
+ {
+ bitmap_copy (saved_def, bb_info->def);
+ bitmap_copy (saved_use, bb_info->use);
+ bitmap_clear (bb_info->def);
+ bitmap_clear (bb_info->use);
+
+ if (bb_info->adef)
+ {
+ need_as = true;
+ bitmap_copy (saved_adef, bb_info->adef);
+ bitmap_copy (saved_ause, bb_info->ause);
+ bitmap_clear (bb_info->adef);
+ bitmap_clear (bb_info->ause);
+ }
+ else
+ need_as = false;
+
+ df_lr_bb_local_compute (bb->index);
+ gcc_assert (bitmap_equal_p (saved_def, bb_info->def));
+ gcc_assert (bitmap_equal_p (saved_use, bb_info->use));
+
+ if (need_as)
+ {
+ gcc_assert (bb_info->adef);
+ gcc_assert (bb_info->ause);
+ gcc_assert (bitmap_equal_p (saved_adef, bb_info->adef));
+ gcc_assert (bitmap_equal_p (saved_ause, bb_info->ause));
+ }
+ else
+ {
+ gcc_assert (!bb_info->adef);
+ gcc_assert (!bb_info->ause);
+ }
+ }
+ }
+ else
+ {
+ /* If we do not have basic block info, the block must be in
+ the list of dirty blocks or else some one has added a
+ block behind our backs. */
+ gcc_assert (bitmap_bit_p (df_lr->out_of_date_transfer_functions,
+ bb->index));
+ }
+ /* Make sure no one created a block without following
+ procedures. */
+ gcc_assert (df_scan_get_bb_info (bb->index));
+ }
+
+ /* Make sure there are no dirty bits in blocks that have been deleted. */
+ gcc_assert (!bitmap_intersect_compl_p (df_lr->out_of_date_transfer_functions,
+ all_blocks));
+
+ BITMAP_FREE (saved_def);
+ BITMAP_FREE (saved_use);
+ BITMAP_FREE (saved_adef);
+ BITMAP_FREE (saved_ause);
+ BITMAP_FREE (all_blocks);
}
/*----------------------------------------------------------------------------
- UNINITIALIZED REGISTERS
+ COMBINED LIVE REGISTERS AND UNINITIALIZED REGISTERS.
- Find the set of uses for registers that are reachable from the entry
- block without passing thru a definition. In and out bitvectors are built
- for each basic block. The regnum is used to index into these sets.
- See df.h for details.
-----------------------------------------------------------------------------*/
+ First find the set of uses for registers that are reachable from
+ the entry block without passing thru a definition. In and out
+ bitvectors are built for each basic block. The regnum is used to
+ index into these sets. See df.h for details.
-/* Get basic block info. */
+ Then the in and out sets here are the anded results of the in and
+ out sets from the lr and ur
+ problems.
+----------------------------------------------------------------------------*/
-struct df_ur_bb_info *
-df_ur_get_bb_info (struct dataflow *dflow, unsigned int index)
+/* Private data used to verify the solution for this problem. */
+struct df_live_problem_data
{
- return (struct df_ur_bb_info *) dflow->block_info[index];
-}
+ bitmap *in;
+ bitmap *out;
+};
/* Set basic block info. */
static void
-df_ur_set_bb_info (struct dataflow *dflow, unsigned int index,
- struct df_ur_bb_info *bb_info)
+df_live_set_bb_info (unsigned int index,
+ struct df_live_bb_info *bb_info)
{
- dflow->block_info[index] = bb_info;
+ gcc_assert (df_live);
+ gcc_assert (index < df_live->block_info_size);
+ df_live->block_info[index] = bb_info;
}
/* Free basic block info. */
static void
-df_ur_free_bb_info (struct dataflow *dflow,
- basic_block bb ATTRIBUTE_UNUSED,
+df_live_free_bb_info (basic_block bb ATTRIBUTE_UNUSED,
void *vbb_info)
{
- struct df_ur_bb_info *bb_info = (struct df_ur_bb_info *) vbb_info;
+ struct df_live_bb_info *bb_info = (struct df_live_bb_info *) vbb_info;
if (bb_info)
{
BITMAP_FREE (bb_info->gen);
BITMAP_FREE (bb_info->kill);
BITMAP_FREE (bb_info->in);
BITMAP_FREE (bb_info->out);
- pool_free (dflow->block_pool, bb_info);
+ pool_free (df_live->block_pool, bb_info);
}
}
-/* Allocate or reset bitmaps for DFLOW blocks. The solution bits are
+/* Allocate or reset bitmaps for DF_LIVE blocks. The solution bits are
not touched unless the block is new. */
static void
-df_ur_alloc (struct dataflow *dflow, bitmap blocks_to_rescan,
- bitmap all_blocks ATTRIBUTE_UNUSED)
+df_live_alloc (bitmap all_blocks ATTRIBUTE_UNUSED)
{
unsigned int bb_index;
bitmap_iterator bi;
- if (!dflow->block_pool)
- dflow->block_pool = create_alloc_pool ("df_ur_block pool",
- sizeof (struct df_ur_bb_info), 100);
+ if (!df_live->block_pool)
+ df_live->block_pool = create_alloc_pool ("df_live_block pool",
+ sizeof (struct df_live_bb_info), 100);
- df_grow_bb_info (dflow);
+ df_grow_bb_info (df_live);
- EXECUTE_IF_SET_IN_BITMAP (blocks_to_rescan, 0, bb_index, bi)
+ EXECUTE_IF_SET_IN_BITMAP (df_live->out_of_date_transfer_functions, 0, bb_index, bi)
{
- struct df_ur_bb_info *bb_info = df_ur_get_bb_info (dflow, bb_index);
+ struct df_live_bb_info *bb_info = df_live_get_bb_info (bb_index);
if (bb_info)
{
bitmap_clear (bb_info->kill);
@@ -1845,8 +2008,8 @@ df_ur_alloc (struct dataflow *dflow, bitmap blocks_to_rescan,
}
else
{
- bb_info = (struct df_ur_bb_info *) pool_alloc (dflow->block_pool);
- df_ur_set_bb_info (dflow, bb_index, bb_info);
+ bb_info = (struct df_live_bb_info *) pool_alloc (df_live->block_pool);
+ df_live_set_bb_info (bb_index, bb_info);
bb_info->kill = BITMAP_ALLOC (NULL);
bb_info->gen = BITMAP_ALLOC (NULL);
bb_info->in = BITMAP_ALLOC (NULL);
@@ -1856,160 +2019,132 @@ df_ur_alloc (struct dataflow *dflow, bitmap blocks_to_rescan,
}
+/* Reset the global solution for recalculation. */
+
+static void
+df_live_reset (bitmap all_blocks)
+{
+ unsigned int bb_index;
+ bitmap_iterator bi;
+
+ EXECUTE_IF_SET_IN_BITMAP (all_blocks, 0, bb_index, bi)
+ {
+ struct df_lr_bb_info *bb_info = df_lr_get_bb_info (bb_index);
+ gcc_assert (bb_info);
+ bitmap_clear (bb_info->in);
+ bitmap_clear (bb_info->out);
+ }
+}
+
+
/* Compute local uninitialized register info for basic block BB. */
static void
-df_ur_bb_local_compute (struct dataflow *dflow, unsigned int bb_index)
+df_live_bb_local_compute (unsigned int bb_index)
{
- struct df *df = dflow->df;
basic_block bb = BASIC_BLOCK (bb_index);
- struct df_ur_bb_info *bb_info = df_ur_get_bb_info (dflow, bb_index);
+ struct df_live_bb_info *bb_info = df_live_get_bb_info (bb_index);
rtx insn;
- struct df_ref *def;
-
- bitmap_clear (seen_in_block);
- bitmap_clear (seen_in_insn);
+ struct df_ref **def_rec;
+ int luid = 0;
- for (def = df_get_artificial_defs (df, bb_index); def; def = def->next_ref)
- if ((DF_REF_FLAGS (def) & DF_REF_AT_TOP) == 0)
- {
- unsigned int regno = DF_REF_REGNO (def);
- if (!bitmap_bit_p (seen_in_block, regno))
- {
- bitmap_set_bit (seen_in_block, regno);
- bitmap_set_bit (bb_info->gen, regno);
- }
- }
+ for (def_rec = df_get_artificial_defs (bb_index); *def_rec; def_rec++)
+ {
+ struct df_ref *def = *def_rec;
+ if (DF_REF_FLAGS (def) & DF_REF_AT_TOP)
+ bitmap_set_bit (bb_info->gen, DF_REF_REGNO (def));
+ }
- FOR_BB_INSNS_REVERSE (bb, insn)
+ FOR_BB_INSNS (bb, insn)
{
unsigned int uid = INSN_UID (insn);
+ struct df_insn_info *insn_info = DF_INSN_UID_GET (uid);
+
+ /* Inserting labels does not always trigger the incremental
+ rescanning. */
+ if (!insn_info)
+ {
+ gcc_assert (!INSN_P (insn));
+ df_insn_create_insn_record (insn);
+ }
+
+ DF_INSN_LUID (insn) = luid;
if (!INSN_P (insn))
continue;
- for (def = DF_INSN_UID_DEFS (df, uid); def; def = def->next_ref)
+ luid++;
+ for (def_rec = DF_INSN_UID_DEFS (uid); *def_rec; def_rec++)
{
+ struct df_ref *def = *def_rec;
unsigned int regno = DF_REF_REGNO (def);
- /* Only the last def counts. */
- if (!bitmap_bit_p (seen_in_block, regno))
- {
- bitmap_set_bit (seen_in_insn, regno);
-
- if (DF_REF_FLAGS (def)
- & (DF_REF_MUST_CLOBBER | DF_REF_MAY_CLOBBER))
- {
- /* Only must clobbers for the entire reg destroy the
- value. */
- if ((DF_REF_FLAGS (def) & DF_REF_MUST_CLOBBER)
- && (!DF_REF_FLAGS (def) & DF_REF_PARTIAL))
- bitmap_set_bit (bb_info->kill, regno);
- }
- else
- bitmap_set_bit (bb_info->gen, regno);
- }
+
+ if (DF_REF_FLAGS_IS_SET (def,
+ DF_REF_PARTIAL | DF_REF_CONDITIONAL))
+ /* All partial or conditional def
+ seen are included in the gen set. */
+ bitmap_set_bit (bb_info->gen, regno);
+ else if (DF_REF_FLAGS_IS_SET (def, DF_REF_MUST_CLOBBER))
+ /* Only must clobbers for the entire reg destroy the
+ value. */
+ bitmap_set_bit (bb_info->kill, regno);
+ else if (! DF_REF_FLAGS_IS_SET (def, DF_REF_MAY_CLOBBER))
+ bitmap_set_bit (bb_info->gen, regno);
}
- bitmap_ior_into (seen_in_block, seen_in_insn);
- bitmap_clear (seen_in_insn);
}
- for (def = df_get_artificial_defs (df, bb_index); def; def = def->next_ref)
- if (DF_REF_FLAGS (def) & DF_REF_AT_TOP)
- {
- unsigned int regno = DF_REF_REGNO (def);
- if (!bitmap_bit_p (seen_in_block, regno))
- {
- bitmap_set_bit (seen_in_block, regno);
- bitmap_set_bit (bb_info->gen, regno);
- }
- }
+ for (def_rec = df_get_artificial_defs (bb_index); *def_rec; def_rec++)
+ {
+ struct df_ref *def = *def_rec;
+ if ((DF_REF_FLAGS (def) & DF_REF_AT_TOP) == 0)
+ bitmap_set_bit (bb_info->gen, DF_REF_REGNO (def));
+ }
}
/* Compute local uninitialized register info. */
static void
-df_ur_local_compute (struct dataflow *dflow,
- bitmap all_blocks ATTRIBUTE_UNUSED,
- bitmap rescan_blocks)
+df_live_local_compute (bitmap all_blocks ATTRIBUTE_UNUSED)
{
unsigned int bb_index;
bitmap_iterator bi;
- df_set_seen ();
+ df_grow_insn_info ();
- EXECUTE_IF_SET_IN_BITMAP (rescan_blocks, 0, bb_index, bi)
+ EXECUTE_IF_SET_IN_BITMAP (df_live->out_of_date_transfer_functions,
+ 0, bb_index, bi)
{
- df_ur_bb_local_compute (dflow, bb_index);
+ df_live_bb_local_compute (bb_index);
}
- df_unset_seen ();
+ bitmap_clear (df_live->out_of_date_transfer_functions);
}
/* Initialize the solution vectors. */
static void
-df_ur_init (struct dataflow *dflow, bitmap all_blocks)
+df_live_init (bitmap all_blocks)
{
unsigned int bb_index;
bitmap_iterator bi;
EXECUTE_IF_SET_IN_BITMAP (all_blocks, 0, bb_index, bi)
{
- struct df_ur_bb_info *bb_info = df_ur_get_bb_info (dflow, bb_index);
+ struct df_live_bb_info *bb_info = df_live_get_bb_info (bb_index);
bitmap_copy (bb_info->out, bb_info->gen);
bitmap_clear (bb_info->in);
}
}
-
-/* Or in the stack regs, hard regs and early clobber regs into the
- ur_in sets of all of the blocks. */
-
-static void
-df_ur_local_finalize (struct dataflow *dflow, bitmap all_blocks)
-{
- struct df *df = dflow->df;
- struct dataflow *lr_dflow = df->problems_by_index[DF_LR];
- bitmap tmp = BITMAP_ALLOC (NULL);
- bitmap_iterator bi;
- unsigned int bb_index;
-
- EXECUTE_IF_SET_IN_BITMAP (all_blocks, 0, bb_index, bi)
- {
- struct df_ur_bb_info *bb_info = df_ur_get_bb_info (dflow, bb_index);
- struct df_lr_bb_info *bb_lr_info = df_lr_get_bb_info (lr_dflow, bb_index);
-
- /* No register may reach a location where it is not used. Thus
- we trim the rr result to the places where it is used. */
- bitmap_and_into (bb_info->in, bb_lr_info->in);
- bitmap_and_into (bb_info->out, bb_lr_info->out);
-
-#if 1
- /* Hard registers may still stick in the ur_out set, but not
- be in the ur_in set, if their only mention was in a call
- in this block. This is because a call kills in the lr
- problem but does not kill in the ur problem. To clean
- this up, we execute the transfer function on the lr_in
- set and then use that to knock bits out of ur_out. */
- bitmap_ior_and_compl (tmp, bb_info->gen, bb_lr_info->in,
- bb_info->kill);
- bitmap_and_into (bb_info->out, tmp);
-#endif
- }
-
- BITMAP_FREE (tmp);
-}
-
-
/* Confluence function that ignores fake edges. */
static void
-df_ur_confluence_n (struct dataflow *dflow, edge e)
+df_live_confluence_n (edge e)
{
- bitmap op1 = df_ur_get_bb_info (dflow, e->dest->index)->in;
- bitmap op2 = df_ur_get_bb_info (dflow, e->src->index)->out;
+ bitmap op1 = df_live_get_bb_info (e->dest->index)->in;
+ bitmap op2 = df_live_get_bb_info (e->src->index)->out;
if (e->flags & EDGE_FAKE)
return;
@@ -2021,9 +2156,9 @@ df_ur_confluence_n (struct dataflow *dflow, edge e)
/* Transfer function. */
static bool
-df_ur_transfer_function (struct dataflow *dflow, int bb_index)
+df_live_transfer_function (int bb_index)
{
- struct df_ur_bb_info *bb_info = df_ur_get_bb_info (dflow, bb_index);
+ struct df_live_bb_info *bb_info = df_live_get_bb_info (bb_index);
bitmap in = bb_info->in;
bitmap out = bb_info->out;
bitmap gen = bb_info->gen;
@@ -2033,18 +2168,45 @@ df_ur_transfer_function (struct dataflow *dflow, int bb_index)
}
+/* And the LR and UR info to produce the LIVE info. */
+
+static void
+df_live_local_finalize (bitmap all_blocks)
+{
+
+ if (df_live->solutions_dirty)
+ {
+ bitmap_iterator bi;
+ unsigned int bb_index;
+
+ EXECUTE_IF_SET_IN_BITMAP (all_blocks, 0, bb_index, bi)
+ {
+ struct df_lr_bb_info *bb_lr_info = df_lr_get_bb_info (bb_index);
+ struct df_live_bb_info *bb_live_info = df_live_get_bb_info (bb_index);
+
+ /* No register may reach a location where it is not used. Thus
+ we trim the rr result to the places where it is used. */
+ bitmap_and_into (bb_live_info->in, bb_lr_info->in);
+ bitmap_and_into (bb_live_info->out, bb_lr_info->out);
+ }
+
+ df_live->solutions_dirty = false;
+ }
+}
+
+
/* Free all storage associated with the problem. */
static void
-df_ur_free (struct dataflow *dflow)
+df_live_free (void)
{
- if (dflow->block_info)
+ if (df_live->block_info)
{
unsigned int i;
- for (i = 0; i < dflow->block_info_size; i++)
+ for (i = 0; i < df_live->block_info_size; i++)
{
- struct df_ur_bb_info *bb_info = df_ur_get_bb_info (dflow, i);
+ struct df_live_bb_info *bb_info = df_live_get_bb_info (i);
if (bb_info)
{
BITMAP_FREE (bb_info->gen);
@@ -2054,65 +2216,159 @@ df_ur_free (struct dataflow *dflow)
}
}
- free_alloc_pool (dflow->block_pool);
- dflow->block_info_size = 0;
- free (dflow->block_info);
+ free_alloc_pool (df_live->block_pool);
+ df_live->block_info_size = 0;
+ free (df_live->block_info);
}
- free (dflow);
+ BITMAP_FREE (df_live->out_of_date_transfer_functions);
+ free (df_live);
}
-/* Debugging info. */
+/* Debugging info at top of bb. */
static void
-df_ur_dump (struct dataflow *dflow, FILE *file)
+df_live_top_dump (basic_block bb, FILE *file)
+{
+ struct df_live_bb_info *bb_info = df_live_get_bb_info (bb->index);
+ struct df_live_problem_data *problem_data;
+
+ if (!bb_info || !bb_info->in)
+ return;
+
+ fprintf (file, ";; live in \t");
+ df_print_regset (file, bb_info->in);
+ if (df_live->problem_data)
+ {
+ problem_data = (struct df_live_problem_data *)df_live->problem_data;
+ fprintf (file, ";; old in \t");
+ df_print_regset (file, problem_data->in[bb->index]);
+ }
+ fprintf (file, ";; live gen \t");
+ df_print_regset (file, bb_info->gen);
+ fprintf (file, ";; live kill\t");
+ df_print_regset (file, bb_info->kill);
+}
+
+
+/* Debugging info at bottom of bb. */
+
+static void
+df_live_bottom_dump (basic_block bb, FILE *file)
+{
+ struct df_live_bb_info *bb_info = df_live_get_bb_info (bb->index);
+ struct df_live_problem_data *problem_data;
+
+ if (!bb_info || !bb_info->out)
+ return;
+
+ fprintf (file, ";; live out \t");
+ df_print_regset (file, bb_info->out);
+ if (df_live->problem_data)
+ {
+ problem_data = (struct df_live_problem_data *)df_live->problem_data;
+ fprintf (file, ";; old out \t");
+ df_print_regset (file, problem_data->out[bb->index]);
+ }
+}
+
+
+/* Build the datastructure to verify that the solution to the dataflow
+ equations is not dirty. */
+
+static void
+df_live_verify_solution_start (void)
{
basic_block bb;
-
- if (!dflow->block_info)
+ struct df_live_problem_data *problem_data;
+ if (df_live->solutions_dirty)
+ {
+ df_live->problem_data = NULL;
+ return;
+ }
+
+ /* Set it true so that the solution is recomputed. */
+ df_live->solutions_dirty = true;
+
+ problem_data = XNEW (struct df_live_problem_data);
+ df_live->problem_data = problem_data;
+ problem_data->in = XNEWVEC (bitmap, last_basic_block);
+ problem_data->out = XNEWVEC (bitmap, last_basic_block);
+
+ FOR_ALL_BB (bb)
+ {
+ problem_data->in[bb->index] = BITMAP_ALLOC (NULL);
+ problem_data->out[bb->index] = BITMAP_ALLOC (NULL);
+ bitmap_copy (problem_data->in[bb->index], DF_LIVE_IN (bb));
+ bitmap_copy (problem_data->out[bb->index], DF_LIVE_OUT (bb));
+ }
+}
+
+
+/* Compare the saved datastructure and the new solution to the dataflow
+ equations. */
+
+static void
+df_live_verify_solution_end (void)
+{
+ struct df_live_problem_data *problem_data;
+ basic_block bb;
+
+ if (df_live->problem_data == NULL)
return;
- fprintf (file, "Undefined regs:\n");
-
+ problem_data = (struct df_live_problem_data *)df_live->problem_data;
+
FOR_ALL_BB (bb)
{
- struct df_ur_bb_info *bb_info = df_ur_get_bb_info (dflow, bb->index);
- df_print_bb_index (bb, file);
-
- if (!bb_info->in)
- continue;
-
- fprintf (file, " in \t");
- dump_bitmap (file, bb_info->in);
- fprintf (file, " gen \t");
- dump_bitmap (file, bb_info->gen);
- fprintf (file, " kill\t");
- dump_bitmap (file, bb_info->kill);
- fprintf (file, " out \t");
- dump_bitmap (file, bb_info->out);
+ if ((!bitmap_equal_p (problem_data->in[bb->index], DF_LIVE_IN (bb)))
+ || (!bitmap_equal_p (problem_data->out[bb->index], DF_LIVE_OUT (bb))))
+ {
+ /*df_dump (stderr);*/
+ gcc_unreachable ();
+ }
+ }
+
+ /* Cannot delete them immediately because you may want to dump them
+ if the comparison fails. */
+ FOR_ALL_BB (bb)
+ {
+ BITMAP_FREE (problem_data->in[bb->index]);
+ BITMAP_FREE (problem_data->out[bb->index]);
}
+
+ free (problem_data->in);
+ free (problem_data->out);
+ free (problem_data);
+ df_live->problem_data = NULL;
}
+
/* All of the information associated with every instance of the problem. */
-static struct df_problem problem_UR =
+static struct df_problem problem_LIVE =
{
- DF_UR, /* Problem id. */
- DF_FORWARD, /* Direction. */
- df_ur_alloc, /* Allocate the problem specific data. */
- NULL, /* Reset global information. */
- df_ur_free_bb_info, /* Free basic block info. */
- df_ur_local_compute, /* Local compute function. */
- df_ur_init, /* Init the solution specific data. */
- df_iterative_dataflow, /* Iterative solver. */
- NULL, /* Confluence operator 0. */
- df_ur_confluence_n, /* Confluence operator n. */
- df_ur_transfer_function, /* Transfer function. */
- df_ur_local_finalize, /* Finalize function. */
- df_ur_free, /* Free all of the problem information. */
- df_ur_dump, /* Debugging. */
- df_lr_add_problem, /* Dependent problem. */
- 0 /* Changeable flags. */
+ DF_LIVE, /* Problem id. */
+ DF_FORWARD, /* Direction. */
+ df_live_alloc, /* Allocate the problem specific data. */
+ df_live_reset, /* Reset global information. */
+ df_live_free_bb_info, /* Free basic block info. */
+ df_live_local_compute, /* Local compute function. */
+ df_live_init, /* Init the solution specific data. */
+ df_worklist_dataflow, /* Worklist solver. */
+ NULL, /* Confluence operator 0. */
+ df_live_confluence_n, /* Confluence operator n. */
+ df_live_transfer_function, /* Transfer function. */
+ df_live_local_finalize, /* Finalize function. */
+ df_live_free, /* Free all of the problem information. */
+ df_live_free, /* Remove this problem from the stack of dataflow problems. */
+ NULL, /* Debugging. */
+ df_live_top_dump, /* Debugging start block. */
+ df_live_bottom_dump, /* Debugging end block. */
+ df_live_verify_solution_start,/* Incremental solution verify start. */
+ df_live_verify_solution_end, /* Incremental solution verify end. */
+ &problem_LR, /* Dependent problem. */
+ TV_DF_LIVE /* Timing variable. */
};
@@ -2120,10 +2376,78 @@ static struct df_problem problem_UR =
of DF. The returned structure is what is used to get at the
solution. */
-struct dataflow *
-df_ur_add_problem (struct df *df, int flags)
+void
+df_live_add_problem (void)
{
- return df_add_problem (df, &problem_UR, flags);
+ df_add_problem (&problem_LIVE);
+ /* These will be initialized when df_scan_blocks processes each
+ block. */
+ df_live->out_of_date_transfer_functions = BITMAP_ALLOC (NULL);
+}
+
+
+/* Verify that all of the lr related info is consistent and
+ correct. */
+
+void
+df_live_verify_transfer_functions (void)
+{
+ basic_block bb;
+ bitmap saved_gen;
+ bitmap saved_kill;
+ bitmap all_blocks;
+
+ if (!df)
+ return;
+
+ saved_gen = BITMAP_ALLOC (NULL);
+ saved_kill = BITMAP_ALLOC (NULL);
+ all_blocks = BITMAP_ALLOC (NULL);
+
+ df_grow_insn_info ();
+
+ FOR_ALL_BB (bb)
+ {
+ struct df_live_bb_info *bb_info = df_live_get_bb_info (bb->index);
+ bitmap_set_bit (all_blocks, bb->index);
+
+ if (bb_info)
+ {
+ /* Make a copy of the transfer functions and then compute
+ new ones to see if the transfer functions have
+ changed. */
+ if (!bitmap_bit_p (df_live->out_of_date_transfer_functions,
+ bb->index))
+ {
+ bitmap_copy (saved_gen, bb_info->gen);
+ bitmap_copy (saved_kill, bb_info->kill);
+ bitmap_clear (bb_info->gen);
+ bitmap_clear (bb_info->kill);
+
+ df_live_bb_local_compute (bb->index);
+ gcc_assert (bitmap_equal_p (saved_gen, bb_info->gen));
+ gcc_assert (bitmap_equal_p (saved_kill, bb_info->kill));
+ }
+ }
+ else
+ {
+ /* If we do not have basic block info, the block must be in
+ the list of dirty blocks or else some one has added a
+ block behind our backs. */
+ gcc_assert (bitmap_bit_p (df_live->out_of_date_transfer_functions,
+ bb->index));
+ }
+ /* Make sure no one created a block without following
+ procedures. */
+ gcc_assert (df_scan_get_bb_info (bb->index));
+ }
+
+ /* Make sure there are no dirty bits in blocks that have been deleted. */
+ gcc_assert (!bitmap_intersect_compl_p (df_live->out_of_date_transfer_functions,
+ all_blocks));
+ BITMAP_FREE (saved_gen);
+ BITMAP_FREE (saved_kill);
+ BITMAP_FREE (all_blocks);
}
@@ -2154,30 +2478,22 @@ struct df_urec_problem_data
};
-/* Get basic block info. */
-
-struct df_urec_bb_info *
-df_urec_get_bb_info (struct dataflow *dflow, unsigned int index)
-{
- return (struct df_urec_bb_info *) dflow->block_info[index];
-}
-
-
/* Set basic block info. */
static void
-df_urec_set_bb_info (struct dataflow *dflow, unsigned int index,
- struct df_urec_bb_info *bb_info)
+df_urec_set_bb_info (unsigned int index,
+ struct df_urec_bb_info *bb_info)
{
- dflow->block_info[index] = bb_info;
+ gcc_assert (df_urec);
+ gcc_assert (index < df_urec->block_info_size);
+ df_urec->block_info[index] = bb_info;
}
/* Free basic block info. */
static void
-df_urec_free_bb_info (struct dataflow *dflow,
- basic_block bb ATTRIBUTE_UNUSED,
+df_urec_free_bb_info (basic_block bb ATTRIBUTE_UNUSED,
void *vbb_info)
{
struct df_urec_bb_info *bb_info = (struct df_urec_bb_info *) vbb_info;
@@ -2188,40 +2504,39 @@ df_urec_free_bb_info (struct dataflow *dflow,
BITMAP_FREE (bb_info->in);
BITMAP_FREE (bb_info->out);
BITMAP_FREE (bb_info->earlyclobber);
- pool_free (dflow->block_pool, bb_info);
+ pool_free (df_urec->block_pool, bb_info);
}
}
-/* Allocate or reset bitmaps for DFLOW blocks. The solution bits are
+/* Allocate or reset bitmaps for DF_UREC blocks. The solution bits are
not touched unless the block is new. */
static void
-df_urec_alloc (struct dataflow *dflow, bitmap blocks_to_rescan,
- bitmap all_blocks ATTRIBUTE_UNUSED)
+df_urec_alloc (bitmap all_blocks)
{
unsigned int bb_index;
bitmap_iterator bi;
struct df_urec_problem_data *problem_data
- = (struct df_urec_problem_data *) dflow->problem_data;
+ = (struct df_urec_problem_data *) df_urec->problem_data;
- if (!dflow->block_pool)
- dflow->block_pool = create_alloc_pool ("df_urec_block pool",
+ if (!df_urec->block_pool)
+ df_urec->block_pool = create_alloc_pool ("df_urec_block pool",
sizeof (struct df_urec_bb_info), 50);
- if (!dflow->problem_data)
+ if (!df_urec->problem_data)
{
problem_data = XNEW (struct df_urec_problem_data);
- dflow->problem_data = problem_data;
+ df_urec->problem_data = problem_data;
}
problem_data->earlyclobbers_found = false;
- df_grow_bb_info (dflow);
+ df_grow_bb_info (df_urec);
- EXECUTE_IF_SET_IN_BITMAP (blocks_to_rescan, 0, bb_index, bi)
+ EXECUTE_IF_SET_IN_BITMAP (all_blocks, 0, bb_index, bi)
{
- struct df_urec_bb_info *bb_info = df_urec_get_bb_info (dflow, bb_index);
+ struct df_urec_bb_info *bb_info = df_urec_get_bb_info (bb_index);
if (bb_info)
{
bitmap_clear (bb_info->kill);
@@ -2230,12 +2545,13 @@ df_urec_alloc (struct dataflow *dflow, bitmap blocks_to_rescan,
}
else
{
- bb_info = (struct df_urec_bb_info *) pool_alloc (dflow->block_pool);
- df_urec_set_bb_info (dflow, bb_index, bb_info);
+ bb_info = (struct df_urec_bb_info *) pool_alloc (df_urec->block_pool);
+ df_urec_set_bb_info (bb_index, bb_info);
bb_info->kill = BITMAP_ALLOC (NULL);
bb_info->gen = BITMAP_ALLOC (NULL);
bb_info->in = BITMAP_ALLOC (NULL);
bb_info->out = BITMAP_ALLOC (NULL);
+ bb_info->top = BITMAP_ALLOC (NULL);
bb_info->earlyclobber = BITMAP_ALLOC (NULL);
}
}
@@ -2433,20 +2749,22 @@ df_urec_mark_reg_use_for_earlyclobber_1 (rtx *x, void *data)
/* Compute local uninitialized register info for basic block BB. */
static void
-df_urec_bb_local_compute (struct dataflow *dflow, unsigned int bb_index)
+df_urec_bb_local_compute (unsigned int bb_index)
{
- struct df *df = dflow->df;
basic_block bb = BASIC_BLOCK (bb_index);
- struct df_urec_bb_info *bb_info = df_urec_get_bb_info (dflow, bb_index);
+ struct df_urec_bb_info *bb_info = df_urec_get_bb_info (bb_index);
rtx insn;
- struct df_ref *def;
+ struct df_ref **def_rec;
- for (def = df_get_artificial_defs (df, bb_index); def; def = def->next_ref)
- if (DF_REF_FLAGS (def) & DF_REF_AT_TOP)
- {
- unsigned int regno = DF_REF_REGNO (def);
- bitmap_set_bit (bb_info->gen, regno);
- }
+ for (def_rec = df_get_artificial_defs (bb_index); *def_rec; def_rec++)
+ {
+ struct df_ref *def = *def_rec;
+ if (DF_REF_FLAGS (def) & DF_REF_AT_TOP)
+ {
+ unsigned int regno = DF_REF_REGNO (def);
+ bitmap_set_bit (bb_info->gen, regno);
+ }
+ }
FOR_BB_INSNS (bb, insn)
{
@@ -2456,7 +2774,7 @@ df_urec_bb_local_compute (struct dataflow *dflow, unsigned int bb_index)
if (df_urec_check_earlyclobber (insn))
{
struct df_urec_problem_data *problem_data
- = (struct df_urec_problem_data *) dflow->problem_data;
+ = (struct df_urec_problem_data *) df_urec->problem_data;
problem_data->earlyclobbers_found = true;
note_uses (&PATTERN (insn),
df_urec_mark_reg_use_for_earlyclobber_1, bb_info);
@@ -2464,22 +2782,22 @@ df_urec_bb_local_compute (struct dataflow *dflow, unsigned int bb_index)
}
}
- for (def = df_get_artificial_defs (df, bb_index); def; def = def->next_ref)
- if ((DF_REF_FLAGS (def) & DF_REF_AT_TOP) == 0)
- {
- unsigned int regno = DF_REF_REGNO (def);
- bitmap_set_bit (bb_info->gen, regno);
- }
-
+ for (def_rec = df_get_artificial_defs (bb_index); *def_rec; def_rec++)
+ {
+ struct df_ref *def = *def_rec;
+ if ((DF_REF_FLAGS (def) & DF_REF_AT_TOP) == 0)
+ {
+ unsigned int regno = DF_REF_REGNO (def);
+ bitmap_set_bit (bb_info->gen, regno);
+ }
+ }
}
/* Compute local uninitialized register info. */
static void
-df_urec_local_compute (struct dataflow *dflow,
- bitmap all_blocks ATTRIBUTE_UNUSED,
- bitmap rescan_blocks)
+df_urec_local_compute (bitmap all_blocks)
{
unsigned int bb_index;
bitmap_iterator bi;
@@ -2487,7 +2805,7 @@ df_urec_local_compute (struct dataflow *dflow,
int i;
HARD_REG_SET stack_hard_regs, used;
struct df_urec_problem_data *problem_data
- = (struct df_urec_problem_data *) dflow->problem_data;
+ = (struct df_urec_problem_data *) df_urec->problem_data;
/* Any register that MAY be allocated to a register stack (like the
387) is treated poorly. Each such register is marked as being
@@ -2514,9 +2832,9 @@ df_urec_local_compute (struct dataflow *dflow,
N_REG_CLASSES elements. See df_urec_check_earlyclobber. */
earlyclobber_regclass = VEC_alloc (int, heap, N_REG_CLASSES);
- EXECUTE_IF_SET_IN_BITMAP (rescan_blocks, 0, bb_index, bi)
+ EXECUTE_IF_SET_IN_BITMAP (all_blocks, 0, bb_index, bi)
{
- df_urec_bb_local_compute (dflow, bb_index);
+ df_urec_bb_local_compute (bb_index);
}
VEC_free (int, heap, earlyclobber_regclass);
@@ -2526,14 +2844,14 @@ df_urec_local_compute (struct dataflow *dflow,
/* Initialize the solution vectors. */
static void
-df_urec_init (struct dataflow *dflow, bitmap all_blocks)
+df_urec_init (bitmap all_blocks)
{
unsigned int bb_index;
bitmap_iterator bi;
EXECUTE_IF_SET_IN_BITMAP (all_blocks, 0, bb_index, bi)
{
- struct df_urec_bb_info *bb_info = df_urec_get_bb_info (dflow, bb_index);
+ struct df_urec_bb_info *bb_info = df_urec_get_bb_info (bb_index);
bitmap_copy (bb_info->out, bb_info->gen);
bitmap_clear (bb_info->in);
@@ -2542,23 +2860,22 @@ df_urec_init (struct dataflow *dflow, bitmap all_blocks)
/* Or in the stack regs, hard regs and early clobber regs into the
- ur_in sets of all of the blocks. */
+ urec_in sets of all of the blocks. */
+
static void
-df_urec_local_finalize (struct dataflow *dflow, bitmap all_blocks)
+df_urec_local_finalize (bitmap all_blocks)
{
- struct df *df = dflow->df;
- struct dataflow *lr_dflow = df->problems_by_index[DF_LR];
bitmap tmp = BITMAP_ALLOC (NULL);
bitmap_iterator bi;
unsigned int bb_index;
struct df_urec_problem_data *problem_data
- = (struct df_urec_problem_data *) dflow->problem_data;
+ = (struct df_urec_problem_data *) df_urec->problem_data;
EXECUTE_IF_SET_IN_BITMAP (all_blocks, 0, bb_index, bi)
{
- struct df_urec_bb_info *bb_info = df_urec_get_bb_info (dflow, bb_index);
- struct df_lr_bb_info *bb_lr_info = df_lr_get_bb_info (lr_dflow, bb_index);
+ struct df_urec_bb_info *bb_info = df_urec_get_bb_info (bb_index);
+ struct df_lr_bb_info *bb_lr_info = df_lr_get_bb_info (bb_index);
if (bb_index != ENTRY_BLOCK && bb_index != EXIT_BLOCK)
{
@@ -2580,8 +2897,11 @@ df_urec_local_finalize (struct dataflow *dflow, bitmap all_blocks)
we trim the rr result to the places where it is used. */
bitmap_and_into (bb_info->in, bb_lr_info->in);
bitmap_and_into (bb_info->out, bb_lr_info->out);
-
-#if 1
+ bitmap_copy (bb_info->top, bb_info->in);
+ if (bb_lr_info->adef)
+ bitmap_ior_into (bb_info->top, bb_lr_info->adef);
+ bitmap_and_into (bb_info->top, bb_lr_info->top);
+#if 0
/* Hard registers may still stick in the ur_out set, but not
be in the ur_in set, if their only mention was in a call
in this block. This is because a call kills in the lr
@@ -2604,10 +2924,10 @@ df_urec_local_finalize (struct dataflow *dflow, bitmap all_blocks)
/* Confluence function that ignores fake edges. */
static void
-df_urec_confluence_n (struct dataflow *dflow, edge e)
+df_urec_confluence_n (edge e)
{
- bitmap op1 = df_urec_get_bb_info (dflow, e->dest->index)->in;
- bitmap op2 = df_urec_get_bb_info (dflow, e->src->index)->out;
+ bitmap op1 = df_urec_get_bb_info (e->dest->index)->in;
+ bitmap op2 = df_urec_get_bb_info (e->src->index)->out;
if (e->flags & EDGE_FAKE)
return;
@@ -2619,9 +2939,9 @@ df_urec_confluence_n (struct dataflow *dflow, edge e)
/* Transfer function. */
static bool
-df_urec_transfer_function (struct dataflow *dflow, int bb_index)
+df_urec_transfer_function (int bb_index)
{
- struct df_urec_bb_info *bb_info = df_urec_get_bb_info (dflow, bb_index);
+ struct df_urec_bb_info *bb_info = df_urec_get_bb_info (bb_index);
bitmap in = bb_info->in;
bitmap out = bb_info->out;
bitmap gen = bb_info->gen;
@@ -2634,15 +2954,15 @@ df_urec_transfer_function (struct dataflow *dflow, int bb_index)
/* Free all storage associated with the problem. */
static void
-df_urec_free (struct dataflow *dflow)
+df_urec_free (void)
{
- if (dflow->block_info)
+ if (df_urec->block_info)
{
unsigned int i;
- for (i = 0; i < dflow->block_info_size; i++)
+ for (i = 0; i < df_urec->block_info_size; i++)
{
- struct df_urec_bb_info *bb_info = df_urec_get_bb_info (dflow, i);
+ struct df_urec_bb_info *bb_info = df_urec_get_bb_info (i);
if (bb_info)
{
BITMAP_FREE (bb_info->gen);
@@ -2650,52 +2970,53 @@ df_urec_free (struct dataflow *dflow)
BITMAP_FREE (bb_info->in);
BITMAP_FREE (bb_info->out);
BITMAP_FREE (bb_info->earlyclobber);
+ BITMAP_FREE (bb_info->top);
}
}
- free_alloc_pool (dflow->block_pool);
+ free_alloc_pool (df_urec->block_pool);
- dflow->block_info_size = 0;
- free (dflow->block_info);
- free (dflow->problem_data);
+ df_urec->block_info_size = 0;
+ free (df_urec->block_info);
+ free (df_urec->problem_data);
}
- free (dflow);
+ free (df_urec);
}
-/* Debugging info. */
+/* Debugging info at top of bb. */
static void
-df_urec_dump (struct dataflow *dflow, FILE *file)
+df_urec_top_dump (basic_block bb, FILE *file)
{
- basic_block bb;
-
- if (!dflow->block_info)
+ struct df_urec_bb_info *bb_info = df_urec_get_bb_info (bb->index);
+ if (!bb_info || !bb_info->in)
return;
-
- fprintf (file, "Undefined regs:\n");
-
- FOR_ALL_BB (bb)
- {
- struct df_urec_bb_info *bb_info = df_urec_get_bb_info (dflow, bb->index);
- df_print_bb_index (bb, file);
-
- if (!bb_info->in)
- continue;
- fprintf (file, " in \t");
- dump_bitmap (file, bb_info->in);
- fprintf (file, " gen \t");
- dump_bitmap (file, bb_info->gen);
- fprintf (file, " kill\t");
- dump_bitmap (file, bb_info->kill);
- fprintf (file, " ec\t");
- dump_bitmap (file, bb_info->earlyclobber);
- fprintf (file, " out \t");
- dump_bitmap (file, bb_info->out);
- }
+ fprintf (file, ";; urec in \t");
+ df_print_regset (file, bb_info->in);
+ fprintf (file, ";; urec gen \t");
+ df_print_regset (file, bb_info->gen);
+ fprintf (file, ";; urec kill\t");
+ df_print_regset (file, bb_info->kill);
+ fprintf (file, ";; urec ec\t");
+ df_print_regset (file, bb_info->earlyclobber);
}
+
+/* Debugging info at bottom of bb. */
+
+static void
+df_urec_bottom_dump (basic_block bb, FILE *file)
+{
+ struct df_urec_bb_info *bb_info = df_urec_get_bb_info (bb->index);
+ if (!bb_info || !bb_info->out)
+ return;
+ fprintf (file, ";; urec out \t");
+ df_print_regset (file, bb_info->out);
+}
+
+
/* All of the information associated with every instance of the problem. */
static struct df_problem problem_UREC =
@@ -2707,15 +3028,20 @@ static struct df_problem problem_UREC =
df_urec_free_bb_info, /* Free basic block info. */
df_urec_local_compute, /* Local compute function. */
df_urec_init, /* Init the solution specific data. */
- df_iterative_dataflow, /* Iterative solver. */
+ df_worklist_dataflow, /* Worklist solver. */
NULL, /* Confluence operator 0. */
df_urec_confluence_n, /* Confluence operator n. */
df_urec_transfer_function, /* Transfer function. */
df_urec_local_finalize, /* Finalize function. */
df_urec_free, /* Free all of the problem information. */
- df_urec_dump, /* Debugging. */
- df_lr_add_problem, /* Dependent problem. */
- 0 /* Changeable flags. */
+ df_urec_free, /* Remove this problem from the stack of dataflow problems. */
+ NULL, /* Debugging. */
+ df_urec_top_dump, /* Debugging start block. */
+ df_urec_bottom_dump, /* Debugging end block. */
+ NULL, /* Incremental solution verify start. */
+ NULL, /* Incremental solution verfiy end. */
+ &problem_LR, /* Dependent problem. */
+ TV_DF_UREC /* Timing variable. */
};
@@ -2723,10 +3049,10 @@ static struct df_problem problem_UREC =
of DF. The returned structure is what is used to get at the
solution. */
-struct dataflow *
-df_urec_add_problem (struct df *df, int flags)
+void
+df_urec_add_problem (void)
{
- return df_add_problem (df, &problem_UREC, flags);
+ df_add_problem (&problem_UREC);
}
@@ -2742,212 +3068,223 @@ df_urec_add_problem (struct df *df, int flags)
the reaching defs information (the dependent problem).
----------------------------------------------------------------------------*/
-/* Create def-use or use-def chains. */
+#define df_chain_problem_p(FLAG) (((enum df_chain_flags)df_chain->local_flags)&(FLAG))
-static void
-df_chain_alloc (struct dataflow *dflow,
- bitmap blocks_to_rescan ATTRIBUTE_UNUSED,
- bitmap all_blocks ATTRIBUTE_UNUSED)
+/* Create a du or ud chain from SRC to DST and link it into SRC. */
+struct df_link *
+df_chain_create (struct df_ref *src, struct df_ref *dst)
{
- struct df *df = dflow->df;
- unsigned int i;
+ struct df_link *head = DF_REF_CHAIN (src);
+ struct df_link *link = pool_alloc (df_chain->block_pool);;
+
+ DF_REF_CHAIN (src) = link;
+ link->next = head;
+ link->ref = dst;
+ return link;
+}
- /* Wholesale destruction of the old chains. */
- if (dflow->block_pool)
- free_alloc_pool (dflow->block_pool);
- dflow->block_pool = create_alloc_pool ("df_chain_chain_block pool",
- sizeof (struct df_link), 100);
+/* Delete any du or ud chains that start at REF and point to
+ TARGET. */
+static void
+df_chain_unlink_1 (struct df_ref *ref, struct df_ref *target)
+{
+ struct df_link *chain = DF_REF_CHAIN (ref);
+ struct df_link *prev = NULL;
- if (dflow->flags & DF_DU_CHAIN)
+ while (chain)
{
- df_reorganize_refs (&df->def_info);
-
- /* Clear out the pointers from the refs. */
- for (i = 0; i < DF_DEFS_SIZE (df); i++)
+ if (chain->ref == target)
{
- struct df_ref *ref = df->def_info.refs[i];
- DF_REF_CHAIN (ref) = NULL;
+ if (prev)
+ prev->next = chain->next;
+ else
+ DF_REF_CHAIN (ref) = chain->next;
+ pool_free (df_chain->block_pool, chain);
+ return;
}
+ prev = chain;
+ chain = chain->next;
}
-
- if (dflow->flags & DF_UD_CHAIN)
+}
+
+
+/* Delete a du or ud chain that leave or point to REF. */
+
+void
+df_chain_unlink (struct df_ref *ref)
+{
+ struct df_link *chain = DF_REF_CHAIN (ref);
+ while (chain)
{
- df_reorganize_refs (&df->use_info);
- for (i = 0; i < DF_USES_SIZE (df); i++)
- {
- struct df_ref *ref = df->use_info.refs[i];
- DF_REF_CHAIN (ref) = NULL;
- }
+ struct df_link *next = chain->next;
+ /* Delete the other side if it exists. */
+ df_chain_unlink_1 (chain->ref, ref);
+ pool_free (df_chain->block_pool, chain);
+ chain = next;
}
+ DF_REF_CHAIN (ref) = NULL;
}
-/* Reset all def_use and use_def chains in INSN. */
+/* Copy the du or ud chain starting at FROM_REF and attach it to
+ TO_REF. */
-static void
-df_chain_insn_reset (struct dataflow *dflow, rtx insn)
+void
+df_chain_copy (struct df_ref *to_ref,
+ struct df_link *from_ref)
{
- struct df *df = dflow->df;
- unsigned int uid = INSN_UID (insn);
- struct df_insn_info *insn_info = NULL;
- struct df_ref *ref;
+ while (from_ref)
+ {
+ df_chain_create (to_ref, from_ref->ref);
+ from_ref = from_ref->next;
+ }
+}
- if (uid < df->insns_size)
- insn_info = DF_INSN_UID_GET (df, uid);
- if (insn_info)
- {
- if (dflow->flags & DF_DU_CHAIN)
- {
- ref = insn_info->defs;
- while (ref)
- {
- ref->chain = NULL;
- ref = ref->next_ref;
- }
- }
+/* Remove this problem from the stack of dataflow problems. */
+
+static void
+df_chain_remove_problem (void)
+{
+ bitmap_iterator bi;
+ unsigned int bb_index;
+
+ /* Wholesale destruction of the old chains. */
+ if (df_chain->block_pool)
+ free_alloc_pool (df_chain->block_pool);
- if (dflow->flags & DF_UD_CHAIN)
+ EXECUTE_IF_SET_IN_BITMAP (df_chain->out_of_date_transfer_functions, 0, bb_index, bi)
+ {
+ rtx insn;
+ struct df_ref **def_rec;
+ struct df_ref **use_rec;
+ basic_block bb = BASIC_BLOCK (bb_index);
+
+ if (df_chain_problem_p (DF_DU_CHAIN))
+ for (def_rec = df_get_artificial_defs (bb->index); *def_rec; def_rec++)
+ DF_REF_CHAIN (*def_rec) = NULL;
+ if (df_chain_problem_p (DF_UD_CHAIN))
+ for (use_rec = df_get_artificial_uses (bb->index); *use_rec; use_rec++)
+ DF_REF_CHAIN (*use_rec) = NULL;
+
+ FOR_BB_INSNS (bb, insn)
{
- ref = insn_info->uses;
- while (ref)
+ unsigned int uid = INSN_UID (insn);
+
+ if (INSN_P (insn))
{
- ref->chain = NULL;
- ref = ref->next_ref;
+ if (df_chain_problem_p (DF_DU_CHAIN))
+ for (def_rec = DF_INSN_UID_DEFS (uid); *def_rec; def_rec++)
+ DF_REF_CHAIN (*def_rec) = NULL;
+ if (df_chain_problem_p (DF_UD_CHAIN))
+ {
+ for (use_rec = DF_INSN_UID_USES (uid); *use_rec; use_rec++)
+ DF_REF_CHAIN (*use_rec) = NULL;
+ for (use_rec = DF_INSN_UID_EQ_USES (uid); *use_rec; use_rec++)
+ DF_REF_CHAIN (*use_rec) = NULL;
+ }
}
}
}
+
+ bitmap_clear (df_chain->out_of_date_transfer_functions);
+ df_chain->block_pool = NULL;
}
-/* Reset all def_use and use_def chains in basic block. */
+/* Remove the chain problem completely. */
-static void
-df_chain_bb_reset (struct dataflow *dflow, unsigned int bb_index)
+static void
+df_chain_fully_remove_problem (void)
{
- struct df *df = dflow->df;
- rtx insn;
- basic_block bb = BASIC_BLOCK (bb_index);
+ df_chain_remove_problem ();
+ BITMAP_FREE (df_chain->out_of_date_transfer_functions);
+ free (df_chain);
+}
- /* Some one deleted the basic block out from under us. */
- if (!bb)
- return;
- FOR_BB_INSNS (bb, insn)
- {
- if (INSN_P (insn))
- {
- /* Record defs within INSN. */
- df_chain_insn_reset (dflow, insn);
- }
- }
-
- /* Get rid of any chains in artificial uses or defs. */
- if (dflow->flags & DF_DU_CHAIN)
- {
- struct df_ref *def;
- def = df_get_artificial_defs (df, bb_index);
- while (def)
- {
- def->chain = NULL;
- def = def->next_ref;
- }
- }
+/* Create def-use or use-def chains. */
- if (dflow->flags & DF_UD_CHAIN)
- {
- struct df_ref *use;
- use = df_get_artificial_uses (df, bb_index);
- while (use)
- {
- use->chain = NULL;
- use = use->next_ref;
- }
- }
+static void
+df_chain_alloc (bitmap all_blocks ATTRIBUTE_UNUSED)
+{
+ df_chain_remove_problem ();
+ df_chain->block_pool = create_alloc_pool ("df_chain_block pool",
+ sizeof (struct df_link), 50);
}
/* Reset all of the chains when the set of basic blocks changes. */
-
static void
-df_chain_reset (struct dataflow *dflow, bitmap blocks_to_clear)
+df_chain_reset (bitmap blocks_to_clear ATTRIBUTE_UNUSED)
{
- bitmap_iterator bi;
- unsigned int bb_index;
-
- EXECUTE_IF_SET_IN_BITMAP (blocks_to_clear, 0, bb_index, bi)
- {
- df_chain_bb_reset (dflow, bb_index);
- }
-
- free_alloc_pool (dflow->block_pool);
- dflow->block_pool = NULL;
+ df_chain_remove_problem ();
}
/* Create the chains for a list of USEs. */
static void
-df_chain_create_bb_process_use (struct dataflow *dflow,
- bitmap local_rd,
- struct df_ref *use,
+df_chain_create_bb_process_use (bitmap local_rd,
+ struct df_ref **use_rec,
enum df_ref_flags top_flag)
{
- struct df *df = dflow->df;
bitmap_iterator bi;
unsigned int def_index;
- while (use)
+ while (*use_rec)
{
- /* Do not want to go through this for an uninitialized var. */
+ struct df_ref *use = *use_rec;
unsigned int uregno = DF_REF_REGNO (use);
- int count = DF_REG_DEF_GET (df, uregno)->n_refs;
- if (count)
+ if ((!(df->changeable_flags & DF_NO_HARD_REGS))
+ || (uregno >= FIRST_PSEUDO_REGISTER))
{
- if (top_flag == (DF_REF_FLAGS (use) & DF_REF_AT_TOP))
+ /* Do not want to go through this for an uninitialized var. */
+ int count = DF_DEFS_COUNT (uregno);
+ if (count)
{
- unsigned int first_index = DF_REG_DEF_GET (df, uregno)->begin;
- unsigned int last_index = first_index + count - 1;
-
- EXECUTE_IF_SET_IN_BITMAP (local_rd, first_index, def_index, bi)
+ if (top_flag == (DF_REF_FLAGS (use) & DF_REF_AT_TOP))
{
- struct df_ref *def;
- if (def_index > last_index)
- break;
+ unsigned int first_index = DF_DEFS_BEGIN (uregno);
+ unsigned int last_index = first_index + count - 1;
- def = DF_DEFS_GET (df, def_index);
- if (dflow->flags & DF_DU_CHAIN)
- df_chain_create (dflow, def, use);
- if (dflow->flags & DF_UD_CHAIN)
- df_chain_create (dflow, use, def);
+ EXECUTE_IF_SET_IN_BITMAP (local_rd, first_index, def_index, bi)
+ {
+ struct df_ref *def;
+ if (def_index > last_index)
+ break;
+
+ def = DF_DEFS_GET (def_index);
+ if (df_chain_problem_p (DF_DU_CHAIN))
+ df_chain_create (def, use);
+ if (df_chain_problem_p (DF_UD_CHAIN))
+ df_chain_create (use, def);
+ }
}
}
}
- use = use->next_ref;
+
+ use_rec++;
}
}
-/* Reset the storage pool that the def-use or use-def chains have been
- allocated in. We do not need to re adjust the pointers in the refs,
- these have already been clean out.*/
/* Create chains from reaching defs bitmaps for basic block BB. */
+
static void
-df_chain_create_bb (struct dataflow *dflow,
- struct dataflow *rd_dflow,
- unsigned int bb_index)
+df_chain_create_bb (unsigned int bb_index)
{
basic_block bb = BASIC_BLOCK (bb_index);
- struct df_rd_bb_info *bb_info = df_rd_get_bb_info (rd_dflow, bb_index);
+ struct df_rd_bb_info *bb_info = df_rd_get_bb_info (bb_index);
rtx insn;
bitmap cpy = BITMAP_ALLOC (NULL);
- struct df *df = dflow->df;
- struct df_ref *def;
+ struct df_ref **def_rec;
bitmap_copy (cpy, bb_info->in);
+ bitmap_set_bit (df_chain->out_of_date_transfer_functions, bb_index);
/* Since we are going forwards, process the artificial uses first
then the artificial defs second. */
@@ -2955,26 +3292,32 @@ df_chain_create_bb (struct dataflow *dflow,
#ifdef EH_USES
/* Create the chains for the artificial uses from the EH_USES at the
beginning of the block. */
- df_chain_create_bb_process_use (dflow, cpy,
- df_get_artificial_uses (df, bb->index),
- DF_REF_AT_TOP);
+
+ /* Artificials are only hard regs. */
+ if (!(df->changeable_flags & DF_NO_HARD_REGS))
+ df_chain_create_bb_process_use (cpy,
+ df_get_artificial_uses (bb->index),
+ DF_REF_AT_TOP);
#endif
- for (def = df_get_artificial_defs (df, bb_index); def; def = def->next_ref)
- if (DF_REF_FLAGS (def) & DF_REF_AT_TOP)
- {
- unsigned int dregno = DF_REF_REGNO (def);
- if (!(DF_REF_FLAGS (def) & DF_REF_PARTIAL))
- bitmap_clear_range (cpy,
- DF_REG_DEF_GET (df, dregno)->begin,
- DF_REG_DEF_GET (df, dregno)->n_refs);
- bitmap_set_bit (cpy, DF_REF_ID (def));
- }
+ for (def_rec = df_get_artificial_defs (bb_index); *def_rec; def_rec++)
+ {
+ struct df_ref *def = *def_rec;
+ if (DF_REF_FLAGS (def) & DF_REF_AT_TOP)
+ {
+ unsigned int dregno = DF_REF_REGNO (def);
+ if (!(DF_REF_FLAGS (def) & (DF_REF_PARTIAL | DF_REF_CONDITIONAL)))
+ bitmap_clear_range (cpy,
+ DF_DEFS_BEGIN (dregno),
+ DF_DEFS_COUNT (dregno));
+ bitmap_set_bit (cpy, DF_REF_ID (def));
+ }
+ }
/* Process the regular instructions next. */
FOR_BB_INSNS (bb, insn)
{
- struct df_ref *def;
+ struct df_ref **def_rec;
unsigned int uid = INSN_UID (insn);
if (!INSN_P (insn))
@@ -2983,44 +3326,54 @@ df_chain_create_bb (struct dataflow *dflow,
/* Now scan the uses and link them up with the defs that remain
in the cpy vector. */
- df_chain_create_bb_process_use (dflow, cpy,
- DF_INSN_UID_USES (df, uid), 0);
+ df_chain_create_bb_process_use (cpy, DF_INSN_UID_USES (uid), 0);
+
+ if (df->changeable_flags & DF_EQ_NOTES)
+ df_chain_create_bb_process_use (cpy, DF_INSN_UID_EQ_USES (uid), 0);
+
/* Since we are going forwards, process the defs second. This
pass only changes the bits in cpy. */
- for (def = DF_INSN_UID_DEFS (df, uid); def; def = def->next_ref)
+ for (def_rec = DF_INSN_UID_DEFS (uid); *def_rec; def_rec++)
{
+ struct df_ref *def = *def_rec;
unsigned int dregno = DF_REF_REGNO (def);
- if (!(DF_REF_FLAGS (def) & DF_REF_PARTIAL))
- bitmap_clear_range (cpy,
- DF_REG_DEF_GET (df, dregno)->begin,
- DF_REG_DEF_GET (df, dregno)->n_refs);
- if (!(DF_REF_FLAGS (def)
- & (DF_REF_MUST_CLOBBER | DF_REF_MAY_CLOBBER)))
- bitmap_set_bit (cpy, DF_REF_ID (def));
+ if ((!(df->changeable_flags & DF_NO_HARD_REGS))
+ || (dregno >= FIRST_PSEUDO_REGISTER))
+ {
+ if (!(DF_REF_FLAGS (def) & (DF_REF_PARTIAL | DF_REF_CONDITIONAL)))
+ bitmap_clear_range (cpy,
+ DF_DEFS_BEGIN (dregno),
+ DF_DEFS_COUNT (dregno));
+ if (!(DF_REF_FLAGS (def)
+ & (DF_REF_MUST_CLOBBER | DF_REF_MAY_CLOBBER)))
+ bitmap_set_bit (cpy, DF_REF_ID (def));
+ }
}
}
/* Create the chains for the artificial uses of the hard registers
at the end of the block. */
- df_chain_create_bb_process_use (dflow, cpy,
- df_get_artificial_uses (df, bb->index), 0);
+ if (!(df->changeable_flags & DF_NO_HARD_REGS))
+ df_chain_create_bb_process_use (cpy,
+ df_get_artificial_uses (bb->index),
+ 0);
+
+ BITMAP_FREE (cpy);
}
/* Create def-use chains from reaching use bitmaps for basic blocks
in BLOCKS. */
static void
-df_chain_finalize (struct dataflow *dflow, bitmap all_blocks)
+df_chain_finalize (bitmap all_blocks)
{
unsigned int bb_index;
bitmap_iterator bi;
- struct df *df = dflow->df;
- struct dataflow *rd_dflow = df->problems_by_index [DF_RD];
EXECUTE_IF_SET_IN_BITMAP (all_blocks, 0, bb_index, bi)
{
- df_chain_create_bb (dflow, rd_dflow, bb_index);
+ df_chain_create_bb (bb_index);
}
}
@@ -3028,69 +3381,117 @@ df_chain_finalize (struct dataflow *dflow, bitmap all_blocks)
/* Free all storage associated with the problem. */
static void
-df_chain_free (struct dataflow *dflow)
+df_chain_free (void)
{
- free_alloc_pool (dflow->block_pool);
- free (dflow);
+ free_alloc_pool (df_chain->block_pool);
+ BITMAP_FREE (df_chain->out_of_date_transfer_functions);
+ free (df_chain);
}
/* Debugging info. */
static void
-df_chains_dump (struct dataflow *dflow, FILE *file)
+df_chain_top_dump (basic_block bb, FILE *file)
{
- struct df *df = dflow->df;
- unsigned int j;
-
- if (dflow->flags & DF_DU_CHAIN)
+ if (df_chain_problem_p (DF_DU_CHAIN))
{
- fprintf (file, "Def-use chains:\n");
- for (j = 0; j < df->def_info.bitmap_size; j++)
+ rtx insn;
+ struct df_ref **def_rec = df_get_artificial_defs (bb->index);
+ if (*def_rec)
{
- struct df_ref *def = DF_DEFS_GET (df, j);
- if (def)
+
+ fprintf (file, ";; DU chains for artificial defs\n");
+ while (*def_rec)
{
- fprintf (file, "d%d bb %d luid %d insn %d reg %d ",
- j, DF_REF_BBNO (def),
- DF_REF_INSN (def) ?
- DF_INSN_LUID (df, DF_REF_INSN (def)):
- -1,
- DF_REF_INSN (def) ? DF_REF_INSN_UID (def) : -1,
- DF_REF_REGNO (def));
- if (def->flags & DF_REF_READ_WRITE)
- fprintf (file, "read/write ");
+ struct df_ref *def = *def_rec;
+ fprintf (file, ";; reg %d ", DF_REF_REGNO (def));
df_chain_dump (DF_REF_CHAIN (def), file);
fprintf (file, "\n");
+ def_rec++;
+ }
+ }
+
+ FOR_BB_INSNS (bb, insn)
+ {
+ unsigned int uid = INSN_UID (insn);
+ if (INSN_P (insn))
+ {
+ def_rec = DF_INSN_UID_DEFS (uid);
+ if (*def_rec)
+ {
+ fprintf (file, ";; DU chains for insn luid %d uid %d\n",
+ DF_INSN_LUID (insn), uid);
+
+ while (*def_rec)
+ {
+ struct df_ref *def = *def_rec;
+ fprintf (file, ";; reg %d ", DF_REF_REGNO (def));
+ if (def->flags & DF_REF_READ_WRITE)
+ fprintf (file, "read/write ");
+ df_chain_dump (DF_REF_CHAIN (def), file);
+ fprintf (file, "\n");
+ def_rec++;
+ }
+ }
}
}
}
+}
+
- if (dflow->flags & DF_UD_CHAIN)
+static void
+df_chain_bottom_dump (basic_block bb, FILE *file)
+{
+ if (df_chain_problem_p (DF_UD_CHAIN))
{
- fprintf (file, "Use-def chains:\n");
- for (j = 0; j < df->use_info.bitmap_size; j++)
+ rtx insn;
+ struct df_ref **use_rec = df_get_artificial_uses (bb->index);
+
+ if (*use_rec)
{
- struct df_ref *use = DF_USES_GET (df, j);
- if (use)
+ fprintf (file, ";; UD chains for artificial uses\n");
+ while (*use_rec)
{
- fprintf (file, "u%d bb %d luid %d insn %d reg %d ",
- j, DF_REF_BBNO (use),
- DF_REF_INSN (use) ?
- DF_INSN_LUID (df, DF_REF_INSN (use))
- : -1,
- DF_REF_INSN (DF_USES_GET (df, j)) ?
- DF_REF_INSN_UID (DF_USES_GET (df,j))
- : -1,
- DF_REF_REGNO (use));
- if (use->flags & DF_REF_READ_WRITE)
- fprintf (file, "read/write ");
- if (use->flags & DF_REF_STRIPPED)
- fprintf (file, "stripped ");
- if (use->flags & DF_REF_IN_NOTE)
- fprintf (file, "note ");
+ struct df_ref *use = *use_rec;
+ fprintf (file, ";; reg %d ", DF_REF_REGNO (use));
df_chain_dump (DF_REF_CHAIN (use), file);
fprintf (file, "\n");
+ use_rec++;
+ }
+ }
+
+ FOR_BB_INSNS (bb, insn)
+ {
+ unsigned int uid = INSN_UID (insn);
+ if (INSN_P (insn))
+ {
+ struct df_ref **eq_use_rec = DF_INSN_UID_EQ_USES (uid);
+ use_rec = DF_INSN_UID_USES (uid);
+ if (*use_rec || *eq_use_rec)
+ {
+ fprintf (file, ";; UD chains for insn luid %d uid %d\n",
+ DF_INSN_LUID (insn), uid);
+
+ while (*use_rec)
+ {
+ struct df_ref *use = *use_rec;
+ fprintf (file, ";; reg %d ", DF_REF_REGNO (use));
+ if (use->flags & DF_REF_READ_WRITE)
+ fprintf (file, "read/write ");
+ df_chain_dump (DF_REF_CHAIN (use), file);
+ fprintf (file, "\n");
+ use_rec++;
+ }
+ while (*eq_use_rec)
+ {
+ struct df_ref *use = *eq_use_rec;
+ fprintf (file, ";; eq_note reg %d ", DF_REF_REGNO (use));
+ df_chain_dump (DF_REF_CHAIN (use), file);
+ fprintf (file, "\n");
+ eq_use_rec++;
+ }
+ }
}
}
}
@@ -3112,9 +3513,14 @@ static struct df_problem problem_CHAIN =
NULL, /* Transfer function. */
df_chain_finalize, /* Finalize function. */
df_chain_free, /* Free all of the problem information. */
- df_chains_dump, /* Debugging. */
- df_rd_add_problem, /* Dependent problem. */
- 0 /* Changeable flags. */
+ df_chain_fully_remove_problem,/* Remove this problem from the stack of dataflow problems. */
+ NULL, /* Debugging. */
+ df_chain_top_dump, /* Debugging start block. */
+ df_chain_bottom_dump, /* Debugging end block. */
+ NULL, /* Incremental solution verify start. */
+ NULL, /* Incremental solution verfiy end. */
+ &problem_RD, /* Dependent problem. */
+ TV_DF_CHAIN /* Timing variable. */
};
@@ -3122,108 +3528,106 @@ static struct df_problem problem_CHAIN =
of DF. The returned structure is what is used to get at the
solution. */
-struct dataflow *
-df_chain_add_problem (struct df *df, int flags)
+void
+df_chain_add_problem (enum df_chain_flags chain_flags)
{
- return df_add_problem (df, &problem_CHAIN, flags);
+ df_add_problem (&problem_CHAIN);
+ df_chain->local_flags = (unsigned int)chain_flags;
+ df_chain->out_of_date_transfer_functions = BITMAP_ALLOC (NULL);
}
+#undef df_chain_problem_p
+
/*----------------------------------------------------------------------------
- REGISTER INFORMATION
-
- This pass properly computes REG_DEAD and REG_UNUSED notes.
-
- If the DF_RI_LIFE flag is set the following vectors containing
- information about register usage are properly set: REG_N_REFS,
- REG_N_DEATHS, REG_N_SETS, REG_LIVE_LENGTH, REG_N_CALLS_CROSSED,
- REG_N_THROWING_CALLS_CROSSED and REG_BASIC_BLOCK.
-
+ This pass computes REG_DEAD and REG_UNUSED notes.
----------------------------------------------------------------------------*/
#ifdef REG_DEAD_DEBUGGING
static void
-print_note (char *prefix, rtx insn, rtx note)
-{
- fprintf (stderr, "%s %d ", prefix, INSN_UID (insn));
- print_rtl (stderr, note);
- fprintf (stderr, "\n");
-}
-#endif
-
-/* Allocate the lifetime information. */
-
-static void
-df_ri_alloc (struct dataflow *dflow,
- bitmap blocks_to_rescan ATTRIBUTE_UNUSED,
- bitmap all_blocks ATTRIBUTE_UNUSED)
+df_print_note (const char *prefix, rtx insn, rtx note)
{
- int i;
- struct df *df = dflow->df;
-
- if (dflow->flags & DF_RI_LIFE)
+ if (dump_file)
{
- max_regno = max_reg_num ();
- allocate_reg_info (max_regno, FALSE, FALSE);
-
- /* Reset all the data we'll collect. */
- for (i = 0; i < max_regno; i++)
- {
- REG_N_SETS (i) = DF_REG_DEF_COUNT (df, i);
- REG_N_REFS (i) = DF_REG_USE_COUNT (df, i) + REG_N_SETS (i);
- REG_N_DEATHS (i) = 0;
- REG_N_CALLS_CROSSED (i) = 0;
- REG_N_THROWING_CALLS_CROSSED (i) = 0;
- REG_LIVE_LENGTH (i) = 0;
- REG_FREQ (i) = 0;
- REG_BASIC_BLOCK (i) = REG_BLOCK_UNKNOWN;
- }
+ fprintf (dump_file, "%s %d ", prefix, INSN_UID (insn));
+ print_rtl (dump_file, note);
+ fprintf (dump_file, "\n");
}
}
+#endif
/* After reg-stack, the x86 floating point stack regs are difficult to
analyze because of all of the pushes, pops and rotations. Thus, we
just leave the notes alone. */
+#ifdef STACK_REGS
static inline bool
-df_ignore_stack_reg (int regno ATTRIBUTE_UNUSED)
+df_ignore_stack_reg (int regno)
{
-#ifdef STACK_REGS
- return (regstack_completed
- && IN_RANGE (regno, FIRST_STACK_REG, LAST_STACK_REG));
+ return regstack_completed
+ && IN_RANGE (regno, FIRST_STACK_REG, LAST_STACK_REG);
+}
#else
+static inline bool
+df_ignore_stack_reg (int regno ATTRIBUTE_UNUSED)
+{
return false;
-#endif
}
+#endif
-/* Remove all of the REG_DEAD or REG_UNUSED notes from INSN. */
+/* Remove all of the REG_DEAD or REG_UNUSED notes from INSN and add
+ them to OLD_DEAD_NOTES and OLD_UNUSED_NOTES. */
static void
-df_kill_notes (rtx insn, int flags)
+df_kill_notes (rtx insn, rtx *old_dead_notes, rtx *old_unused_notes)
{
rtx *pprev = &REG_NOTES (insn);
rtx link = *pprev;
-
+ rtx dead = NULL;
+ rtx unused = NULL;
+
while (link)
{
switch (REG_NOTE_KIND (link))
{
case REG_DEAD:
- if (flags & DF_RI_LIFE)
- if (df_ignore_stack_reg (REGNO (XEXP (link, 0))))
- REG_N_DEATHS (REGNO (XEXP (link, 0)))++;
+ /* After reg-stack, we need to ignore any unused notes
+ for the stack registers. */
+ if (df_ignore_stack_reg (REGNO (XEXP (link, 0))))
+ {
+ pprev = &XEXP (link, 1);
+ link = *pprev;
+ }
+ else
+ {
+ rtx next = XEXP (link, 1);
+#ifdef REG_DEAD_DEBUGGING
+ df_print_note ("deleting: ", insn, link);
+#endif
+ XEXP (link, 1) = dead;
+ dead = link;
+ *pprev = link = next;
+ }
+ break;
- /* Fallthru */
case REG_UNUSED:
- if (!df_ignore_stack_reg (REGNO (XEXP (link, 0))))
+ /* After reg-stack, we need to ignore any unused notes
+ for the stack registers. */
+ if (df_ignore_stack_reg (REGNO (XEXP (link, 0))))
+ {
+ pprev = &XEXP (link, 1);
+ link = *pprev;
+ }
+ else
{
rtx next = XEXP (link, 1);
#ifdef REG_DEAD_DEBUGGING
- print_note ("deleting: ", insn, link);
+ df_print_note ("deleting: ", insn, link);
#endif
- free_EXPR_LIST_node (link);
+ XEXP (link, 1) = unused;
+ unused = link;
*pprev = link = next;
}
break;
@@ -3234,9 +3638,43 @@ df_kill_notes (rtx insn, int flags)
break;
}
}
+
+ *old_dead_notes = dead;
+ *old_unused_notes = unused;
}
+/* Set a NOTE_TYPE note for REG in INSN. Try to pull it from the OLD
+ list, otherwise create a new one. */
+
+static inline rtx
+df_set_note (enum reg_note note_type, rtx insn, rtx old, rtx reg)
+{
+ rtx this = old;
+ rtx prev = NULL;
+
+ while (this)
+ if (XEXP (this, 0) == reg)
+ {
+ if (prev)
+ XEXP (prev, 1) = XEXP (this, 1);
+ else
+ old = XEXP (this, 1);
+ XEXP (this, 1) = REG_NOTES (insn);
+ REG_NOTES (insn) = this;
+ return old;
+ }
+ else
+ {
+ prev = this;
+ this = XEXP (this, 1);
+ }
+
+ /* Did not find the note. */
+ REG_NOTES (insn) = alloc_EXPR_LIST (note_type, reg, REG_NOTES (insn));
+ return old;
+}
+
/* Set the REG_UNUSED notes for the multiword hardreg defs in INSN
based on the bits in LIVE. Do not generate notes for registers in
artificial uses. DO_NOT_GEN is updated so that REG_DEAD notes are
@@ -3244,70 +3682,53 @@ df_kill_notes (rtx insn, int flags)
instruction.
*/
-static void
-df_set_unused_notes_for_mw (rtx insn, struct df_mw_hardreg *mws,
+static rtx
+df_set_unused_notes_for_mw (rtx insn, rtx old, struct df_mw_hardreg *mws,
bitmap live, bitmap do_not_gen,
- bitmap artificial_uses, int flags)
+ bitmap artificial_uses)
{
bool all_dead = true;
- struct df_link *regs = mws->regs;
- unsigned int regno = DF_REF_REGNO (regs->ref);
+ unsigned int r;
#ifdef REG_DEAD_DEBUGGING
- fprintf (stderr, "mw unused looking at %d\n", DF_REF_REGNO (regs->ref));
- df_ref_debug (regs->ref, stderr);
+ if (dump_file)
+ fprintf (dump_file, "mw_set_unused looking at mws[%d..%d]\n",
+ mws->start_regno, mws->end_regno);
#endif
- while (regs)
- {
- unsigned int regno = DF_REF_REGNO (regs->ref);
- if ((bitmap_bit_p (live, regno))
- || bitmap_bit_p (artificial_uses, regno))
- {
- all_dead = false;
- break;
- }
- regs = regs->next;
- }
+ for (r=mws->start_regno; r <= mws->end_regno; r++)
+ if ((bitmap_bit_p (live, r))
+ || bitmap_bit_p (artificial_uses, r))
+ {
+ all_dead = false;
+ break;
+ }
if (all_dead)
{
- struct df_link *regs = mws->regs;
- rtx note = alloc_EXPR_LIST (REG_UNUSED, *DF_REF_LOC (regs->ref),
- REG_NOTES (insn));
- REG_NOTES (insn) = note;
+ unsigned int regno = mws->start_regno;
+ old = df_set_note (REG_UNUSED, insn, old, *(mws->loc));
+
#ifdef REG_DEAD_DEBUGGING
- print_note ("adding 1: ", insn, note);
+ df_print_note ("adding 1: ", insn, REG_NOTES (insn));
#endif
bitmap_set_bit (do_not_gen, regno);
/* Only do this if the value is totally dead. */
- if (flags & DF_RI_LIFE)
- {
- REG_N_DEATHS (regno) ++;
- REG_LIVE_LENGTH (regno)++;
- }
}
else
- {
- struct df_link *regs = mws->regs;
- while (regs)
- {
- struct df_ref *ref = regs->ref;
-
- regno = DF_REF_REGNO (ref);
- if ((!bitmap_bit_p (live, regno))
- && (!bitmap_bit_p (artificial_uses, regno)))
- {
- rtx note = alloc_EXPR_LIST (REG_UNUSED, regno_reg_rtx[regno],
- REG_NOTES (insn));
- REG_NOTES (insn) = note;
+ for (r=mws->start_regno; r <= mws->end_regno; r++)
+ {
+
+ if ((!bitmap_bit_p (live, r))
+ && (!bitmap_bit_p (artificial_uses, r)))
+ {
+ old = df_set_note (REG_UNUSED, insn, old, regno_reg_rtx[r]);
#ifdef REG_DEAD_DEBUGGING
- print_note ("adding 2: ", insn, note);
+ df_print_note ("adding 2: ", insn, REG_NOTES (insn));
#endif
- }
- bitmap_set_bit (do_not_gen, regno);
- regs = regs->next;
- }
- }
+ }
+ bitmap_set_bit (do_not_gen, r);
+ }
+ return old;
}
@@ -3316,82 +3737,63 @@ df_set_unused_notes_for_mw (rtx insn, struct df_mw_hardreg *mws,
from being set if the instruction both reads and writes the
register. */
-static void
-df_set_dead_notes_for_mw (rtx insn, struct df_mw_hardreg *mws,
+static rtx
+df_set_dead_notes_for_mw (rtx insn, rtx old, struct df_mw_hardreg *mws,
bitmap live, bitmap do_not_gen,
- bitmap artificial_uses, int flags)
+ bitmap artificial_uses)
{
bool all_dead = true;
- struct df_link *regs = mws->regs;
- unsigned int regno = DF_REF_REGNO (regs->ref);
+ unsigned int r;
#ifdef REG_DEAD_DEBUGGING
- fprintf (stderr, "mw looking at %d\n", DF_REF_REGNO (regs->ref));
- df_ref_debug (regs->ref, stderr);
-#endif
- while (regs)
+ if (dump_file)
{
- unsigned int regno = DF_REF_REGNO (regs->ref);
- if ((bitmap_bit_p (live, regno))
- || bitmap_bit_p (artificial_uses, regno))
- {
- all_dead = false;
- break;
- }
- regs = regs->next;
+ fprintf (dump_file, "mw_set_dead looking at mws[%d..%d]\n do_not_gen =",
+ mws->start_regno, mws->end_regno);
+ df_print_regset (dump_file, do_not_gen);
+ fprintf (dump_file, " live =");
+ df_print_regset (dump_file, live);
+ fprintf (dump_file, " artificial uses =");
+ df_print_regset (dump_file, artificial_uses);
}
+#endif
+
+ for (r = mws->start_regno; r <= mws->end_regno; r++)
+ if ((bitmap_bit_p (live, r))
+ || bitmap_bit_p (artificial_uses, r)
+ || bitmap_bit_p (do_not_gen, r))
+ {
+ all_dead = false;
+ break;
+ }
if (all_dead)
{
- if (!bitmap_bit_p (do_not_gen, regno))
+ if (!bitmap_bit_p (do_not_gen, mws->start_regno))
{
/* Add a dead note for the entire multi word register. */
- struct df_link *regs = mws->regs;
- rtx note = alloc_EXPR_LIST (REG_DEAD, *DF_REF_LOC (regs->ref),
- REG_NOTES (insn));
- REG_NOTES (insn) = note;
+ old = df_set_note (REG_DEAD, insn, old, *(mws->loc));
#ifdef REG_DEAD_DEBUGGING
- print_note ("adding 1: ", insn, note);
+ df_print_note ("adding 1: ", insn, REG_NOTES (insn));
#endif
-
- if (flags & DF_RI_LIFE)
- {
- struct df_link *regs = mws->regs;
- while (regs)
- {
- struct df_ref *ref = regs->ref;
- regno = DF_REF_REGNO (ref);
- REG_N_DEATHS (regno)++;
- regs = regs->next;
- }
- }
}
}
else
{
- struct df_link *regs = mws->regs;
- while (regs)
+ for (r = mws->start_regno; r <= mws->end_regno; r++)
{
- struct df_ref *ref = regs->ref;
-
- regno = DF_REF_REGNO (ref);
- if ((!bitmap_bit_p (live, regno))
- && (!bitmap_bit_p (artificial_uses, regno))
- && (!bitmap_bit_p (do_not_gen, regno)))
+ if ((!bitmap_bit_p (live, r))
+ && (!bitmap_bit_p (artificial_uses, r))
+ && (!bitmap_bit_p (do_not_gen, r)))
{
- rtx note = alloc_EXPR_LIST (REG_DEAD, regno_reg_rtx[regno],
- REG_NOTES (insn));
- REG_NOTES (insn) = note;
- if (flags & DF_RI_LIFE)
- REG_N_DEATHS (regno)++;
+ old = df_set_note (REG_DEAD, insn, old, regno_reg_rtx[r]);
#ifdef REG_DEAD_DEBUGGING
- print_note ("adding 2: ", insn, note);
+ df_print_note ("adding 2: ", insn, REG_NOTES (insn));
#endif
}
-
- regs = regs->next;
}
}
+ return old;
}
@@ -3399,72 +3801,40 @@ df_set_dead_notes_for_mw (rtx insn, struct df_mw_hardreg *mws,
and DO_NOT_GEN. Do not generate notes for registers in artificial
uses. */
-static void
-df_create_unused_note (basic_block bb, rtx insn, struct df_ref *def,
- bitmap live, bitmap do_not_gen, bitmap artificial_uses,
- bitmap local_live, bitmap local_processed,
- int flags, int luid)
+static rtx
+df_create_unused_note (rtx insn, rtx old, struct df_ref *def,
+ bitmap live, bitmap do_not_gen, bitmap artificial_uses)
{
unsigned int dregno = DF_REF_REGNO (def);
#ifdef REG_DEAD_DEBUGGING
- fprintf (stderr, " regular looking at def ");
- df_ref_debug (def, stderr);
-#endif
-
- if (bitmap_bit_p (live, dregno))
+ if (dump_file)
{
- if (flags & DF_RI_LIFE)
- {
- /* If we have seen this regno, then it has already been
- processed correctly with the per insn increment. If we
- have not seen it we need to add the length from here to
- the end of the block to the live length. */
- if (bitmap_bit_p (local_processed, dregno))
- {
- if (!(DF_REF_FLAGS (def) & DF_REF_PARTIAL))
- bitmap_clear_bit (local_live, dregno);
- }
- else
- {
- bitmap_set_bit (local_processed, dregno);
- REG_LIVE_LENGTH (dregno) += luid;
- }
- }
+ fprintf (dump_file, " regular looking at def ");
+ df_ref_debug (def, dump_file);
}
- else if ((!(DF_REF_FLAGS (def) & DF_REF_MW_HARDREG))
- && (!bitmap_bit_p (artificial_uses, dregno))
- && (!df_ignore_stack_reg (dregno)))
+#endif
+
+ if (!(bitmap_bit_p (live, dregno)
+ || (DF_REF_FLAGS (def) & DF_REF_MW_HARDREG)
+ || bitmap_bit_p (artificial_uses, dregno)
+ || df_ignore_stack_reg (dregno)))
{
- rtx reg = GET_CODE (*DF_REF_LOC (def)) == SUBREG ?
- SUBREG_REG (*DF_REF_LOC (def)) : *DF_REF_LOC (def);
- rtx note = alloc_EXPR_LIST (REG_UNUSED, reg, REG_NOTES (insn));
- REG_NOTES (insn) = note;
+ rtx reg = (DF_REF_LOC (def))
+ ? *DF_REF_REAL_LOC (def): DF_REF_REG (def);
+ old = df_set_note (REG_UNUSED, insn, old, reg);
#ifdef REG_DEAD_DEBUGGING
- print_note ("adding 3: ", insn, note);
+ df_print_note ("adding 3: ", insn, REG_NOTES (insn));
#endif
- if (flags & DF_RI_LIFE)
- {
- REG_N_DEATHS (dregno) ++;
- REG_LIVE_LENGTH (dregno)++;
- }
}
- if ((flags & DF_RI_LIFE) && (dregno >= FIRST_PSEUDO_REGISTER))
- {
- REG_FREQ (dregno) += REG_FREQ_FROM_BB (bb);
- if (REG_BASIC_BLOCK (dregno) == REG_BLOCK_UNKNOWN)
- REG_BASIC_BLOCK (dregno) = bb->index;
- else if (REG_BASIC_BLOCK (dregno) != bb->index)
- REG_BASIC_BLOCK (dregno) = REG_BLOCK_GLOBAL;
- }
-
if (!(DF_REF_FLAGS (def) & (DF_REF_MUST_CLOBBER + DF_REF_MAY_CLOBBER)))
bitmap_set_bit (do_not_gen, dregno);
- /* Kill this register if it is not a subreg store. */
- if (!(DF_REF_FLAGS (def) & DF_REF_PARTIAL))
+ /* Kill this register if it is not a subreg store or conditional store. */
+ if (!(DF_REF_FLAGS (def) & (DF_REF_PARTIAL | DF_REF_CONDITIONAL)))
bitmap_clear_bit (live, dregno);
+ return old;
}
@@ -3473,163 +3843,159 @@ df_create_unused_note (basic_block bb, rtx insn, struct df_ref *def,
BB. The three bitvectors are scratch regs used here. */
static void
-df_ri_bb_compute (struct dataflow *dflow, unsigned int bb_index,
- bitmap live, bitmap do_not_gen, bitmap artificial_uses,
- bitmap local_live, bitmap local_processed, bitmap setjumps_crossed)
+df_note_bb_compute (unsigned int bb_index,
+ bitmap live, bitmap do_not_gen, bitmap artificial_uses)
{
- struct df *df = dflow->df;
basic_block bb = BASIC_BLOCK (bb_index);
rtx insn;
- struct df_ref *def;
- struct df_ref *use;
- int luid = 0;
+ struct df_ref **def_rec;
+ struct df_ref **use_rec;
- bitmap_copy (live, df_get_live_out (df, bb));
+ bitmap_copy (live, df_get_live_out (bb));
bitmap_clear (artificial_uses);
- if (dflow->flags & DF_RI_LIFE)
+#ifdef REG_DEAD_DEBUGGING
+ if (dump_file)
{
- /* Process the regs live at the end of the block. Mark them as
- not local to any one basic block. */
- bitmap_iterator bi;
- unsigned int regno;
- EXECUTE_IF_SET_IN_BITMAP (live, 0, regno, bi)
- REG_BASIC_BLOCK (regno) = REG_BLOCK_GLOBAL;
+ fprintf (dump_file, "live at bottom ");
+ df_print_regset (dump_file, live);
}
+#endif
/* Process the artificial defs and uses at the bottom of the block
to begin processing. */
- for (def = df_get_artificial_defs (df, bb_index); def; def = def->next_ref)
- if ((DF_REF_FLAGS (def) & DF_REF_AT_TOP) == 0)
- bitmap_clear_bit (live, DF_REF_REGNO (def));
+ for (def_rec = df_get_artificial_defs (bb_index); *def_rec; def_rec++)
+ {
+ struct df_ref *def = *def_rec;
+ if (dump_file)
+ fprintf (dump_file, "artificial def %d\n", DF_REF_REGNO (def));
- for (use = df_get_artificial_uses (df, bb_index); use; use = use->next_ref)
- if ((DF_REF_FLAGS (use) & DF_REF_AT_TOP) == 0)
- {
- unsigned int regno = DF_REF_REGNO (use);
- bitmap_set_bit (live, regno);
+ if ((DF_REF_FLAGS (def) & DF_REF_AT_TOP) == 0)
+ bitmap_clear_bit (live, DF_REF_REGNO (def));
+ }
- /* Notes are not generated for any of the artificial registers
- at the bottom of the block. */
- bitmap_set_bit (artificial_uses, regno);
- }
+ for (use_rec = df_get_artificial_uses (bb_index); *use_rec; use_rec++)
+ {
+ struct df_ref *use = *use_rec;
+ if ((DF_REF_FLAGS (use) & DF_REF_AT_TOP) == 0)
+ {
+ unsigned int regno = DF_REF_REGNO (use);
+ bitmap_set_bit (live, regno);
+
+ /* Notes are not generated for any of the artificial registers
+ at the bottom of the block. */
+ bitmap_set_bit (artificial_uses, regno);
+ }
+ }
+#ifdef REG_DEAD_DEBUGGING
+ if (dump_file)
+ {
+ fprintf (dump_file, "live before artificials out ");
+ df_print_regset (dump_file, live);
+ }
+#endif
+
FOR_BB_INSNS_REVERSE (bb, insn)
{
unsigned int uid = INSN_UID (insn);
- unsigned int regno;
- bitmap_iterator bi;
- struct df_mw_hardreg *mws;
-
+ struct df_mw_hardreg **mws_rec;
+ rtx old_dead_notes;
+ rtx old_unused_notes;
+
if (!INSN_P (insn))
continue;
- if (dflow->flags & DF_RI_LIFE)
- {
- /* Increment the live_length for all of the registers that
- are are referenced in this block and live at this
- particular point. */
- bitmap_iterator bi;
- unsigned int regno;
- EXECUTE_IF_SET_IN_BITMAP (local_live, 0, regno, bi)
- {
- REG_LIVE_LENGTH (regno)++;
- }
- luid++;
- }
-
bitmap_clear (do_not_gen);
- df_kill_notes (insn, dflow->flags);
+ df_kill_notes (insn, &old_dead_notes, &old_unused_notes);
/* Process the defs. */
if (CALL_P (insn))
{
- if (dflow->flags & DF_RI_LIFE)
+#ifdef REG_DEAD_DEBUGGING
+ if (dump_file)
{
- bool can_throw = can_throw_internal (insn);
- bool set_jump = (find_reg_note (insn, REG_SETJMP, NULL) != NULL);
- EXECUTE_IF_SET_IN_BITMAP (live, 0, regno, bi)
- {
- REG_N_CALLS_CROSSED (regno)++;
- if (can_throw)
- REG_N_THROWING_CALLS_CROSSED (regno)++;
-
- /* We have a problem with any pseudoreg that lives
- across the setjmp. ANSI says that if a user
- variable does not change in value between the
- setjmp and the longjmp, then the longjmp
- preserves it. This includes longjmp from a place
- where the pseudo appears dead. (In principle,
- the value still exists if it is in scope.) If
- the pseudo goes in a hard reg, some other value
- may occupy that hard reg where this pseudo is
- dead, thus clobbering the pseudo. Conclusion:
- such a pseudo must not go in a hard reg. */
- if (set_jump && regno >= FIRST_PSEUDO_REGISTER)
- bitmap_set_bit (setjumps_crossed, regno);
- }
+ fprintf (dump_file, "processing call %d\n live =", INSN_UID (insn));
+ df_print_regset (dump_file, live);
}
-
+#endif
/* We only care about real sets for calls. Clobbers only
- may clobber and cannot be depended on. */
- for (mws = DF_INSN_UID_MWS (df, uid); mws; mws = mws->next)
+ may clobbers cannot be depended on. */
+ mws_rec = DF_INSN_UID_MWS (uid);
+ while (*mws_rec)
{
+ struct df_mw_hardreg *mws = *mws_rec;
if ((mws->type == DF_REF_REG_DEF)
&& !df_ignore_stack_reg (REGNO (mws->mw_reg)))
- df_set_unused_notes_for_mw (insn, mws, live, do_not_gen,
- artificial_uses, dflow->flags);
+ old_unused_notes
+ = df_set_unused_notes_for_mw (insn, old_unused_notes,
+ mws, live, do_not_gen,
+ artificial_uses);
+ mws_rec++;
}
/* All of the defs except the return value are some sort of
clobber. This code is for the return. */
- for (def = DF_INSN_UID_DEFS (df, uid); def; def = def->next_ref)
- if (!(DF_REF_FLAGS (def) & (DF_REF_MUST_CLOBBER | DF_REF_MAY_CLOBBER)))
- df_create_unused_note (bb, insn, def, live, do_not_gen,
- artificial_uses, local_live,
- local_processed, dflow->flags, luid);
-
+ for (def_rec = DF_INSN_UID_DEFS (uid); *def_rec; def_rec++)
+ {
+ struct df_ref *def = *def_rec;
+ if (!(DF_REF_FLAGS (def) & (DF_REF_MUST_CLOBBER | DF_REF_MAY_CLOBBER)))
+ old_unused_notes
+ = df_create_unused_note (insn, old_unused_notes,
+ def, live, do_not_gen,
+ artificial_uses);
+ }
}
else
{
/* Regular insn. */
- for (mws = DF_INSN_UID_MWS (df, uid); mws; mws = mws->next)
+ mws_rec = DF_INSN_UID_MWS (uid);
+ while (*mws_rec)
{
+ struct df_mw_hardreg *mws = *mws_rec;
if (mws->type == DF_REF_REG_DEF)
- df_set_unused_notes_for_mw (insn, mws, live, do_not_gen,
- artificial_uses, dflow->flags);
+ old_unused_notes
+ = df_set_unused_notes_for_mw (insn, old_unused_notes,
+ mws, live, do_not_gen,
+ artificial_uses);
+ mws_rec++;
}
- for (def = DF_INSN_UID_DEFS (df, uid); def; def = def->next_ref)
- df_create_unused_note (bb, insn, def, live, do_not_gen,
- artificial_uses, local_live,
- local_processed, dflow->flags, luid);
+ for (def_rec = DF_INSN_UID_DEFS (uid); *def_rec; def_rec++)
+ {
+ struct df_ref *def = *def_rec;
+ old_unused_notes
+ = df_create_unused_note (insn, old_unused_notes,
+ def, live, do_not_gen,
+ artificial_uses);
+ }
}
/* Process the uses. */
- for (mws = DF_INSN_UID_MWS (df, uid); mws; mws = mws->next)
+ mws_rec = DF_INSN_UID_MWS (uid);
+ while (*mws_rec)
{
+ struct df_mw_hardreg *mws = *mws_rec;
if ((mws->type != DF_REF_REG_DEF)
&& !df_ignore_stack_reg (REGNO (mws->mw_reg)))
- df_set_dead_notes_for_mw (insn, mws, live, do_not_gen,
- artificial_uses, dflow->flags);
+ old_dead_notes
+ = df_set_dead_notes_for_mw (insn, old_dead_notes,
+ mws, live, do_not_gen,
+ artificial_uses);
+ mws_rec++;
}
- for (use = DF_INSN_UID_USES (df, uid); use; use = use->next_ref)
+ for (use_rec = DF_INSN_UID_USES (uid); *use_rec; use_rec++)
{
+ struct df_ref *use = *use_rec;
unsigned int uregno = DF_REF_REGNO (use);
- if ((dflow->flags & DF_RI_LIFE) && (uregno >= FIRST_PSEUDO_REGISTER))
+#ifdef REG_DEAD_DEBUGGING
+ if (dump_file)
{
- REG_FREQ (uregno) += REG_FREQ_FROM_BB (bb);
- if (REG_BASIC_BLOCK (uregno) == REG_BLOCK_UNKNOWN)
- REG_BASIC_BLOCK (uregno) = bb->index;
- else if (REG_BASIC_BLOCK (uregno) != bb->index)
- REG_BASIC_BLOCK (uregno) = REG_BLOCK_GLOBAL;
+ fprintf (dump_file, " regular looking at use ");
+ df_ref_debug (use, dump_file);
}
-
-#ifdef REG_DEAD_DEBUGGING
- fprintf (stderr, " regular looking at use ");
- df_ref_debug (use, stderr);
#endif
if (!bitmap_bit_p (live, uregno))
{
@@ -3639,157 +4005,99 @@ df_ri_bb_compute (struct dataflow *dflow, unsigned int bb_index,
&& (!(DF_REF_FLAGS (use) & DF_REF_READ_WRITE))
&& (!df_ignore_stack_reg (uregno)))
{
- rtx reg = GET_CODE (*DF_REF_LOC (use)) == SUBREG ?
- SUBREG_REG (*DF_REF_LOC (use)) : *DF_REF_LOC (use);
- rtx note = alloc_EXPR_LIST (REG_DEAD, reg, REG_NOTES (insn));
- REG_NOTES (insn) = note;
- if (dflow->flags & DF_RI_LIFE)
- REG_N_DEATHS (uregno)++;
+ rtx reg = (DF_REF_LOC (use))
+ ? *DF_REF_REAL_LOC (use) : DF_REF_REG (use);
+ old_dead_notes = df_set_note (REG_DEAD, insn, old_dead_notes, reg);
#ifdef REG_DEAD_DEBUGGING
- print_note ("adding 4: ", insn, note);
+ df_print_note ("adding 4: ", insn, REG_NOTES (insn));
#endif
}
/* This register is now live. */
bitmap_set_bit (live, uregno);
-
- if (dflow->flags & DF_RI_LIFE)
- {
- /* If we have seen this regno, then it has already
- been processed correctly with the per insn
- increment. If we have not seen it we set the bit
- so that begins to get processed locally. Note
- that we don't even get here if the variable was
- live at the end of the block since just a ref
- inside the block does not effect the
- calculations. */
- REG_LIVE_LENGTH (uregno) ++;
- bitmap_set_bit (local_live, uregno);
- bitmap_set_bit (local_processed, uregno);
- }
}
}
- }
-
- if (dflow->flags & DF_RI_LIFE)
- {
- /* Add the length of the block to all of the registers that were
- not referenced, but still live in this block. */
- bitmap_iterator bi;
- unsigned int regno;
- bitmap_and_compl_into (live, local_processed);
- EXECUTE_IF_SET_IN_BITMAP (live, 0, regno, bi)
+
+ while (old_unused_notes)
+ {
+ rtx next = XEXP (old_unused_notes, 1);
+ free_EXPR_LIST_node (old_unused_notes);
+ old_unused_notes = next;
+ }
+ while (old_dead_notes)
{
- REG_LIVE_LENGTH (regno) += luid;
+ rtx next = XEXP (old_dead_notes, 1);
+ free_EXPR_LIST_node (old_dead_notes);
+ old_dead_notes = next;
}
- bitmap_clear (local_processed);
- bitmap_clear (local_live);
}
}
/* Compute register info: lifetime, bb, and number of defs and uses. */
static void
-df_ri_compute (struct dataflow *dflow, bitmap all_blocks ATTRIBUTE_UNUSED,
- bitmap blocks_to_scan)
+df_note_compute (bitmap all_blocks)
{
unsigned int bb_index;
bitmap_iterator bi;
- bitmap live = BITMAP_ALLOC (NULL);
- bitmap do_not_gen = BITMAP_ALLOC (NULL);
- bitmap artificial_uses = BITMAP_ALLOC (NULL);
- bitmap local_live = NULL;
- bitmap local_processed = NULL;
- bitmap setjumps_crossed = NULL;
-
- if (dflow->flags & DF_RI_LIFE)
- {
- local_live = BITMAP_ALLOC (NULL);
- local_processed = BITMAP_ALLOC (NULL);
- setjumps_crossed = BITMAP_ALLOC (NULL);
- }
-
+ bitmap live = BITMAP_ALLOC (&df_bitmap_obstack);
+ bitmap do_not_gen = BITMAP_ALLOC (&df_bitmap_obstack);
+ bitmap artificial_uses = BITMAP_ALLOC (&df_bitmap_obstack);
#ifdef REG_DEAD_DEBUGGING
- df_lr_dump (dflow->df->problems_by_index [DF_LR], stderr);
- print_rtl_with_bb (stderr, get_insns());
+ if (dump_file)
+ print_rtl_with_bb (dump_file, get_insns());
#endif
- EXECUTE_IF_SET_IN_BITMAP (blocks_to_scan, 0, bb_index, bi)
+ EXECUTE_IF_SET_IN_BITMAP (all_blocks, 0, bb_index, bi)
{
- df_ri_bb_compute (dflow, bb_index, live, do_not_gen, artificial_uses,
- local_live, local_processed, setjumps_crossed);
+ df_note_bb_compute (bb_index, live, do_not_gen, artificial_uses);
}
BITMAP_FREE (live);
BITMAP_FREE (do_not_gen);
BITMAP_FREE (artificial_uses);
- if (dflow->flags & DF_RI_LIFE)
- {
- bitmap_iterator bi;
- unsigned int regno;
- /* See the setjump comment in df_ri_bb_compute. */
- EXECUTE_IF_SET_IN_BITMAP (setjumps_crossed, 0, regno, bi)
- {
- REG_BASIC_BLOCK (regno) = REG_BLOCK_UNKNOWN;
- REG_LIVE_LENGTH (regno) = -1;
- }
-
- BITMAP_FREE (local_live);
- BITMAP_FREE (local_processed);
- BITMAP_FREE (setjumps_crossed);
- }
}
/* Free all storage associated with the problem. */
static void
-df_ri_free (struct dataflow *dflow)
+df_note_free (void)
{
- free (dflow->problem_data);
- free (dflow);
+ free (df_note);
}
-/* Debugging info. */
-
-static void
-df_ri_dump (struct dataflow *dflow, FILE *file)
-{
- print_rtl_with_bb (file, get_insns ());
-
- if (dflow->flags & DF_RI_LIFE)
- {
- fprintf (file, "Register info:\n");
- dump_flow_info (file, -1);
- }
-}
-
/* All of the information associated every instance of the problem. */
-static struct df_problem problem_RI =
+static struct df_problem problem_NOTE =
{
- DF_RI, /* Problem id. */
+ DF_NOTE, /* Problem id. */
DF_NONE, /* Direction. */
- df_ri_alloc, /* Allocate the problem specific data. */
+ NULL, /* Allocate the problem specific data. */
NULL, /* Reset global information. */
NULL, /* Free basic block info. */
- df_ri_compute, /* Local compute function. */
+ df_note_compute, /* Local compute function. */
NULL, /* Init the solution specific data. */
NULL, /* Iterative solver. */
NULL, /* Confluence operator 0. */
NULL, /* Confluence operator n. */
NULL, /* Transfer function. */
NULL, /* Finalize function. */
- df_ri_free, /* Free all of the problem information. */
- df_ri_dump, /* Debugging. */
+ df_note_free, /* Free all of the problem information. */
+ df_note_free, /* Remove this problem from the stack of dataflow problems. */
+ NULL, /* Debugging. */
+ NULL, /* Debugging start block. */
+ NULL, /* Debugging end block. */
+ NULL, /* Incremental solution verify start. */
+ NULL, /* Incremental solution verfiy end. */
/* Technically this is only dependent on the live registers problem
but it will produce information if built one of uninitialized
register problems (UR, UREC) is also run. */
- df_lr_add_problem, /* Dependent problem. */
- 0 /* Changeable flags. */
+ &problem_LR, /* Dependent problem. */
+ TV_DF_NOTE /* Timing variable. */
};
@@ -3797,8 +4105,242 @@ static struct df_problem problem_RI =
of DF. The returned structure is what is used to get at the
solution. */
-struct dataflow *
-df_ri_add_problem (struct df *df, int flags)
+void
+df_note_add_problem (void)
{
- return df_add_problem (df, &problem_RI, flags);
+ df_add_problem (&problem_NOTE);
}
+
+
+
+
+/*----------------------------------------------------------------------------
+ Functions for simulating the effects of single insns.
+
+ You can either simulate in the forwards direction, starting from
+ the top of a block or the backwards direction from the end of the
+ block. The main difference is that if you go forwards, the uses
+ are examined first then the defs, and if you go backwards, the defs
+ are examined first then the uses.
+
+ If you start at the top of the block, use one of DF_LIVE_IN or
+ DF_LR_IN. If you start at the bottom of the block use one of
+ DF_LIVE_OUT or DF_LR_OUT. BE SURE TO PASS A COPY OF THESE SETS,
+ THEY WILL BE DESTROYED.
+
+----------------------------------------------------------------------------*/
+
+
+/* Find the set of DEFs for INSN. */
+
+void
+df_simulate_find_defs (rtx insn, bitmap defs)
+{
+ struct df_ref **def_rec;
+ unsigned int uid = INSN_UID (insn);
+
+ if (CALL_P (insn))
+ {
+ for (def_rec = DF_INSN_UID_DEFS (uid); *def_rec; def_rec++)
+ {
+ struct df_ref *def = *def_rec;
+ unsigned int dregno = DF_REF_REGNO (def);
+
+ if (DF_REF_FLAGS (def) & DF_REF_MUST_CLOBBER)
+ {
+ if (dregno >= FIRST_PSEUDO_REGISTER
+ || !(SIBLING_CALL_P (insn)
+ && bitmap_bit_p (df->exit_block_uses, dregno)
+ && !refers_to_regno_p (dregno, dregno+1,
+ current_function_return_rtx,
+ (rtx *)0)))
+ {
+ /* If the def is to only part of the reg, it does
+ not kill the other defs that reach here. */
+ if (!(DF_REF_FLAGS (def) & (DF_REF_PARTIAL | DF_REF_CONDITIONAL)))
+ bitmap_set_bit (defs, dregno);
+ }
+ }
+ else
+ /* This is the return value. */
+ if (!(DF_REF_FLAGS (def) & (DF_REF_PARTIAL | DF_REF_CONDITIONAL)))
+ bitmap_set_bit (defs, dregno);
+ }
+ }
+ else
+ {
+ for (def_rec = DF_INSN_UID_DEFS (uid); *def_rec; def_rec++)
+ {
+ struct df_ref *def = *def_rec;
+ /* If the def is to only part of the reg, it does
+ not kill the other defs that reach here. */
+ if (!(DF_REF_FLAGS (def) & (DF_REF_PARTIAL | DF_REF_CONDITIONAL)))
+ bitmap_set_bit (defs, DF_REF_REGNO (def));
+ }
+ }
+}
+
+
+/* Simulate the effects of the defs of INSN on LIVE. */
+
+void
+df_simulate_defs (rtx insn, bitmap live)
+{
+ struct df_ref **def_rec;
+ unsigned int uid = INSN_UID (insn);
+
+ if (CALL_P (insn))
+ {
+ for (def_rec = DF_INSN_UID_DEFS (uid); *def_rec; def_rec++)
+ {
+ struct df_ref *def = *def_rec;
+ unsigned int dregno = DF_REF_REGNO (def);
+
+ if (DF_REF_FLAGS (def) & DF_REF_MUST_CLOBBER)
+ {
+ if (dregno >= FIRST_PSEUDO_REGISTER
+ || !(SIBLING_CALL_P (insn)
+ && bitmap_bit_p (df->exit_block_uses, dregno)
+ && !refers_to_regno_p (dregno, dregno+1,
+ current_function_return_rtx,
+ (rtx *)0)))
+ {
+ /* If the def is to only part of the reg, it does
+ not kill the other defs that reach here. */
+ if (!(DF_REF_FLAGS (def) & (DF_REF_PARTIAL | DF_REF_CONDITIONAL)))
+ bitmap_clear_bit (live, dregno);
+ }
+ }
+ else
+ /* This is the return value. */
+ if (!(DF_REF_FLAGS (def) & (DF_REF_PARTIAL | DF_REF_CONDITIONAL)))
+ bitmap_clear_bit (live, dregno);
+ }
+ }
+ else
+ {
+ for (def_rec = DF_INSN_UID_DEFS (uid); *def_rec; def_rec++)
+ {
+ struct df_ref *def = *def_rec;
+ unsigned int dregno = DF_REF_REGNO (def);
+
+ /* If the def is to only part of the reg, it does
+ not kill the other defs that reach here. */
+ if (!(DF_REF_FLAGS (def) & (DF_REF_PARTIAL | DF_REF_CONDITIONAL)))
+ bitmap_clear_bit (live, dregno);
+ }
+ }
+}
+
+
+/* Simulate the effects of the uses of INSN on LIVE. */
+
+void
+df_simulate_uses (rtx insn, bitmap live)
+{
+ struct df_ref **use_rec;
+ unsigned int uid = INSN_UID (insn);
+
+ for (use_rec = DF_INSN_UID_USES (uid); *use_rec; use_rec++)
+ {
+ struct df_ref *use = *use_rec;
+ /* Add use to set of uses in this BB. */
+ bitmap_set_bit (live, DF_REF_REGNO (use));
+ }
+}
+
+
+/* Add back the always live regs in BB to LIVE. */
+
+static inline void
+df_simulate_fixup_sets (basic_block bb, bitmap live)
+{
+ /* These regs are considered always live so if they end up dying
+ because of some def, we need to bring the back again. */
+ if (df_has_eh_preds (bb))
+ bitmap_ior_into (live, df->eh_block_artificial_uses);
+ else
+ bitmap_ior_into (live, df->regular_block_artificial_uses);
+}
+
+
+/* Apply the artifical uses and defs at the top of BB in a forwards
+ direction. */
+
+void
+df_simulate_artificial_refs_at_top (basic_block bb, bitmap live)
+{
+ struct df_ref **def_rec;
+ struct df_ref **use_rec;
+ int bb_index = bb->index;
+
+ for (use_rec = df_get_artificial_uses (bb_index); *use_rec; use_rec++)
+ {
+ struct df_ref *use = *use_rec;
+ if (DF_REF_FLAGS (use) & DF_REF_AT_TOP)
+ bitmap_set_bit (live, DF_REF_REGNO (use));
+ }
+
+ for (def_rec = df_get_artificial_defs (bb_index); *def_rec; def_rec++)
+ {
+ struct df_ref *def = *def_rec;
+ if (DF_REF_FLAGS (def) & DF_REF_AT_TOP)
+ bitmap_clear_bit (live, DF_REF_REGNO (def));
+ }
+}
+
+
+/* Simulate the forwards effects of INSN on the bitmap LIVE. */
+
+void
+df_simulate_one_insn_forwards (basic_block bb, rtx insn, bitmap live)
+{
+ if (! INSN_P (insn))
+ return;
+
+ df_simulate_uses (insn, live);
+ df_simulate_defs (insn, live);
+ df_simulate_fixup_sets (bb, live);
+}
+
+
+/* Apply the artifical uses and defs at the end of BB in a backwards
+ direction. */
+
+void
+df_simulate_artificial_refs_at_end (basic_block bb, bitmap live)
+{
+ struct df_ref **def_rec;
+ struct df_ref **use_rec;
+ int bb_index = bb->index;
+
+ for (def_rec = df_get_artificial_defs (bb_index); *def_rec; def_rec++)
+ {
+ struct df_ref *def = *def_rec;
+ if ((DF_REF_FLAGS (def) & DF_REF_AT_TOP) == 0)
+ bitmap_clear_bit (live, DF_REF_REGNO (def));
+ }
+
+ for (use_rec = df_get_artificial_uses (bb_index); *use_rec; use_rec++)
+ {
+ struct df_ref *use = *use_rec;
+ if ((DF_REF_FLAGS (use) & DF_REF_AT_TOP) == 0)
+ bitmap_set_bit (live, DF_REF_REGNO (use));
+ }
+}
+
+
+/* Simulate the backwards effects of INSN on the bitmap LIVE. */
+
+void
+df_simulate_one_insn_backwards (basic_block bb, rtx insn, bitmap live)
+{
+ if (! INSN_P (insn))
+ return;
+
+ df_simulate_defs (insn, live);
+ df_simulate_uses (insn, live);
+ df_simulate_fixup_sets (bb, live);
+}
+
+