summaryrefslogtreecommitdiff
path: root/gcc/df.c
diff options
context:
space:
mode:
authormatz <matz@138bc75d-0d04-0410-961f-82ee72b054a4>2002-07-16 13:36:30 +0000
committermatz <matz@138bc75d-0d04-0410-961f-82ee72b054a4>2002-07-16 13:36:30 +0000
commit1c3ed44d7e029ff0be1f8009734e0bce5959d6f6 (patch)
tree1b0486e550160f809a3abbe6daf1be6b5650785f /gcc/df.c
parent9ca7cbb6cc3be6a01350df2f0e19cbaecdefc404 (diff)
downloadgcc-1c3ed44d7e029ff0be1f8009734e0bce5959d6f6.tar.gz
Add a new register allocator.
* ra.c: New file. * ra.h: New file. * ra-build.c: New file. * ra-colorize.c: New file. * ra-debug.c: New file. * ra-rewrite.c: New file. * Makefile.in (ra.o, ra-build.o, ra-colorize.o, ra-debug.o, (ra-rewrite.o): New .o files for libbackend.a. (GTFILES): Add basic-block.h. * toplev.c (flag_new_regalloc): New. (f_options): New option "new-ra". (rest_of_compilation): Call initialize_uninitialized_subregs() only for the old allocator. If flag_new_regalloc is set, call new allocator, instead of local_alloc(), global_alloc() and friends. * doc/invoke.texi: Document -fnew-ra. * basic-block.h (FOR_ALL_BB): New. * config/rs6000/rs6000.c (print_operand): Write small constants as @l+80. * df.c (read_modify_subreg_p): Narrow down cases for a rmw subreg. (df_reg_table_realloc): Make size at least as large as max_reg_num(). (df_insn_table_realloc): Size argument now is absolute, not relative. Changed all callers. * gengtype.c (main): Add the pseudo-type "HARD_REG_SET". * regclass.c (reg_scan_mark_refs): Ignore NULL rtx's. 2002-06-20 Michael Matz <matz@suse.de> * df.h (struct ref.id): Make unsigned. * df.c (df_bb_reg_def_chain_create): Remove unsigned cast. 2002-06-13 Michael Matz <matz@suse.de> * df.h (DF_REF_MODE_CHANGE): New flag. * df.c (df_def_record_1, df_uses_record): Set this flag for refs involving subregs with invalid mode changes, when CLASS_CANNOT_CHANGE_MODE is defined. 2002-05-07 Michael Matz <matz@suse.de> * reload1.c (fixup_abnormal_edges): Don't insert on NULL edge. 2002-05-03 Michael Matz <matz@suse.de> * sbitmap.c (sbitmap_difference): Accept sbitmaps of different size. Sat Feb 2 18:58:07 2002 Denis Chertykov <denisc@overta.ru> * regclass.c (regclass): Work with all regs which have sets or refs. (reg_scan_mark_refs): Count regs inside (clobber ...). 2002-01-04 Michael Matz <matzmich@cs.tu-berlin.de> * df.c (df_ref_record): Correctly calculate SUBREGs of hardregs. (df_bb_reg_def_chain_create, df_bb_reg_use_chain_create): Only add new refs. (df_bb_refs_update): Don't clear insns_modified here, ... (df_analyse): ... but here. * sbitmap.c (dump_sbitmap_file): New. (debug_sbitmap): Use it. * sbitmap.h (dump_sbitmap_file): Add prototype. 2001-08-07 Daniel Berlin <dan@cgsoftware.com> * df.c (df_insn_modify): Grow the UID table if necessary, rather than assume all emits go through df_insns_modify. 2001-07-26 Daniel Berlin <dan@cgsoftware.com> * regclass.c (reg_scan_mark_refs): When we increase REG_N_SETS, increase REG_N_REFS (like flow does), so that regclass doesn't think a reg is useless, and thus, not calculate a class, when it really should have. 2001-01-28 Daniel Berlin <dberlin@redhat.com> * sbitmap.h (EXECUTE_IF_SET_IN_SBITMAP_REV): New macro, needed for dataflow analysis. git-svn-id: svn+ssh://gcc.gnu.org/svn/gcc/trunk@55485 138bc75d-0d04-0410-961f-82ee72b054a4
Diffstat (limited to 'gcc/df.c')
-rw-r--r--gcc/df.c88
1 files changed, 71 insertions, 17 deletions
diff --git a/gcc/df.c b/gcc/df.c
index d7131683c48..7b8b85e8fca 100644
--- a/gcc/df.c
+++ b/gcc/df.c
@@ -311,17 +311,20 @@ static inline bool read_modify_subreg_p PARAMS ((rtx));
/* Local memory allocation/deallocation routines. */
-/* Increase the insn info table by SIZE more elements. */
+/* Increase the insn info table to have space for at least SIZE + 1
+ elements. */
static void
df_insn_table_realloc (df, size)
struct df *df;
int size;
{
- /* Make table 25 percent larger by default. */
- if (! size)
- size = df->insn_size / 4;
+ size++;
+ if (size <= df->insn_size)
+ return;
- size += df->insn_size;
+ /* Make the table a little larger than requested, so we don't need
+ to enlarge it so often. */
+ size += df->insn_size / 4;
df->insns = (struct insn_info *)
xrealloc (df->insns, size * sizeof (struct insn_info));
@@ -350,6 +353,8 @@ df_reg_table_realloc (df, size)
size = df->reg_size / 4;
size += df->reg_size;
+ if (size < max_reg_num ())
+ size = max_reg_num ();
df->regs = (struct reg_info *)
xrealloc (df->regs, size * sizeof (struct reg_info));
@@ -892,7 +897,11 @@ df_ref_record (df, reg, loc, insn, ref_type, ref_flags)
are really referenced. E.g. a (subreg:SI (reg:DI 0) 0) does _not_
reference the whole reg 0 in DI mode (which would also include
reg 1, at least, if 0 and 1 are SImode registers). */
- endregno = regno + HARD_REGNO_NREGS (regno, GET_MODE (reg));
+ endregno = HARD_REGNO_NREGS (regno, GET_MODE (reg));
+ if (GET_CODE (reg) == SUBREG)
+ regno += subreg_regno_offset (regno, GET_MODE (SUBREG_REG (reg)),
+ SUBREG_BYTE (reg), GET_MODE (reg));
+ endregno += regno;
for (i = regno; i < endregno; i++)
df_ref_record_1 (df, regno_reg_rtx[i],
@@ -904,18 +913,23 @@ df_ref_record (df, reg, loc, insn, ref_type, ref_flags)
}
}
-/* Writes to SUBREG of inndermode wider than word and outermode shorter than
- word are read-modify-write. */
+/* Writes to paradoxical subregs, or subregs which are too narrow
+ are read-modify-write. */
static inline bool
read_modify_subreg_p (x)
rtx x;
{
+ unsigned int isize, osize;
if (GET_CODE (x) != SUBREG)
return false;
- if (GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))) <= UNITS_PER_WORD)
+ isize = GET_MODE_SIZE (GET_MODE (SUBREG_REG (x)));
+ osize = GET_MODE_SIZE (GET_MODE (x));
+ if (isize <= osize)
+ return true;
+ if (isize <= UNITS_PER_WORD)
return false;
- if (GET_MODE_SIZE (GET_MODE (x)) > UNITS_PER_WORD)
+ if (osize >= UNITS_PER_WORD)
return false;
return true;
}
@@ -943,6 +957,13 @@ df_def_record_1 (df, x, bb, insn)
return;
}
+#ifdef CLASS_CANNOT_CHANGE_MODE
+ if (GET_CODE (dst) == SUBREG
+ && CLASS_CANNOT_CHANGE_MODE_P (GET_MODE (dst),
+ GET_MODE (SUBREG_REG (dst))))
+ flags |= DF_REF_MODE_CHANGE;
+#endif
+
/* May be, we should flag the use of strict_low_part somehow. Might be
handy for the reg allocator. */
while (GET_CODE (dst) == STRICT_LOW_PART
@@ -957,6 +978,12 @@ df_def_record_1 (df, x, bb, insn)
loc = &XEXP (dst, 0);
dst = *loc;
}
+#ifdef CLASS_CANNOT_CHANGE_MODE
+ if (GET_CODE (dst) == SUBREG
+ && CLASS_CANNOT_CHANGE_MODE_P (GET_MODE (dst),
+ GET_MODE (SUBREG_REG (dst))))
+ flags |= DF_REF_MODE_CHANGE;
+#endif
loc = &XEXP (dst, 0);
dst = *loc;
flags |= DF_REF_READ_WRITE;
@@ -1052,6 +1079,11 @@ df_uses_record (df, loc, ref_type, bb, insn, flags)
df_uses_record (df, loc, ref_type, bb, insn, flags);
return;
}
+#ifdef CLASS_CANNOT_CHANGE_MODE
+ if (CLASS_CANNOT_CHANGE_MODE_P (GET_MODE (x),
+ GET_MODE (SUBREG_REG (x))))
+ flags |= DF_REF_MODE_CHANGE;
+#endif
/* ... Fall through ... */
@@ -1068,16 +1100,24 @@ df_uses_record (df, loc, ref_type, bb, insn, flags)
switch (GET_CODE (dst))
{
+ enum df_ref_flags use_flags;
case SUBREG:
if (read_modify_subreg_p (dst))
{
+ use_flags = DF_REF_READ_WRITE;
+#ifdef CLASS_CANNOT_CHANGE_MODE
+ if (CLASS_CANNOT_CHANGE_MODE_P (GET_MODE (dst),
+ GET_MODE (SUBREG_REG (dst))))
+ use_flags |= DF_REF_MODE_CHANGE;
+#endif
df_uses_record (df, &SUBREG_REG (dst), DF_REF_REG_USE, bb,
- insn, DF_REF_READ_WRITE);
+ insn, use_flags);
break;
}
/* ... FALLTHRU ... */
case REG:
case PC:
+ case PARALLEL:
break;
case MEM:
df_uses_record (df, &XEXP (dst, 0),
@@ -1089,8 +1129,14 @@ df_uses_record (df, loc, ref_type, bb, insn, flags)
dst = XEXP (dst, 0);
if (GET_CODE (dst) != SUBREG)
abort ();
+ use_flags = DF_REF_READ_WRITE;
+#ifdef CLASS_CANNOT_CHANGE_MODE
+ if (CLASS_CANNOT_CHANGE_MODE_P (GET_MODE (dst),
+ GET_MODE (SUBREG_REG (dst))))
+ use_flags |= DF_REF_MODE_CHANGE;
+#endif
df_uses_record (df, &SUBREG_REG (dst), DF_REF_REG_USE, bb,
- insn, DF_REF_READ_WRITE);
+ insn, use_flags);
break;
case ZERO_EXTRACT:
case SIGN_EXTRACT:
@@ -1345,6 +1391,11 @@ df_bb_reg_def_chain_create (df, bb)
{
struct ref *def = link->ref;
unsigned int dregno = DF_REF_REGNO (def);
+ /* Don't add ref's to the chain two times. I.e. only add
+ new refs. XXX the same could be done by testing if the current
+ insn is a modified (or a new) one. This would be faster. */
+ if (DF_REF_ID (def) < df->def_id_save)
+ continue;
df->regs[dregno].defs
= df_link_create (def, df->regs[dregno].defs);
@@ -1394,6 +1445,11 @@ df_bb_reg_use_chain_create (df, bb)
{
struct ref *use = link->ref;
unsigned int uregno = DF_REF_REGNO (use);
+ /* Don't add ref's to the chain two times. I.e. only add
+ new refs. XXX the same could be done by testing if the current
+ insn is a modified (or a new) one. This would be faster. */
+ if (DF_REF_ID (use) < df->use_id_save)
+ continue;
df->regs[uregno].uses
= df_link_create (use, df->regs[uregno].uses);
@@ -2218,8 +2274,6 @@ df_bb_refs_update (df, bb)
/* Scan the insn for refs. */
df_insn_refs_record (df, bb, insn);
-
- bitmap_clear_bit (df->insns_modified, uid);
count++;
}
if (insn == bb->end)
@@ -2318,6 +2372,7 @@ df_analyse (df, blocks, flags)
df_analyse_1 (df, blocks, flags, 1);
bitmap_zero (df->bbs_modified);
+ bitmap_zero (df->insns_modified);
}
}
return update;
@@ -2445,9 +2500,8 @@ df_insn_modify (df, bb, insn)
unsigned int uid;
uid = INSN_UID (insn);
-
if (uid >= df->insn_size)
- df_insn_table_realloc (df, 0);
+ df_insn_table_realloc (df, uid);
bitmap_set_bit (df->bbs_modified, bb->index);
bitmap_set_bit (df->insns_modified, uid);
@@ -2734,7 +2788,7 @@ df_insns_modify (df, bb, first_insn, last_insn)
uid = INSN_UID (insn);
if (uid >= df->insn_size)
- df_insn_table_realloc (df, 0);
+ df_insn_table_realloc (df, uid);
df_insn_modify (df, bb, insn);