summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorkenner <kenner@138bc75d-0d04-0410-961f-82ee72b054a4>2003-05-03 14:25:22 +0000
committerkenner <kenner@138bc75d-0d04-0410-961f-82ee72b054a4>2003-05-03 14:25:22 +0000
commitd5f9786fae594d6d9f449e98620f3009d9a6710d (patch)
tree7ad4bd29e44c9c37b87311753f154d5fa72e515b
parentfcf31ac6ce1b003bd96382d41e617bf1a803aff2 (diff)
downloadgcc-d5f9786fae594d6d9f449e98620f3009d9a6710d.tar.gz
* emit-rtl.c (last_call_insn, add_function_usage_to): New functions.
* rtl.h (last_call_insn, add_function_usage_to): New prototypes. * builtins.c (expand_builtin_apply): Use the new emit-rtl functions. * calls.c (emit_call_1): Likewise. (expand_call): For calls initializing constant memory, replace emission of standalone mem /u clobber with function usage entry. * expr.c (emit_block_move_via_libcall): Likewise. * cse.c (count_reg_usage, case EXPR_LIST): New case. * flow.c (propagate_one_insn): Pass entire operand of CALL_INSN_FUNCTION_USAGE to mark_used_regs. * integrate.c (try_constants): For CALL_INSNs, substitute constants within the FUNCTION_USAGE also. * loop.c (prescan_loop): Note clobbers of const mem mentioned in FUNCTION_USAGE lists. * reload1.c (replace_pseudos_in): Renamed. (reload): Use it for clobbers surviving until the end of the reload. git-svn-id: svn+ssh://gcc.gnu.org/svn/gcc/trunk@66429 138bc75d-0d04-0410-961f-82ee72b054a4
-rw-r--r--gcc/ChangeLog19
-rw-r--r--gcc/builtins.c27
-rw-r--r--gcc/calls.c50
-rw-r--r--gcc/cse.c11
-rw-r--r--gcc/emit-rtl.c41
-rw-r--r--gcc/expr.c66
-rw-r--r--gcc/flow.c9
-rw-r--r--gcc/integrate.c8
-rw-r--r--gcc/loop.c24
-rw-r--r--gcc/reload1.c26
-rw-r--r--gcc/rtl.h2
11 files changed, 188 insertions, 95 deletions
diff --git a/gcc/ChangeLog b/gcc/ChangeLog
index 4ba9b2d0fad..58469126a53 100644
--- a/gcc/ChangeLog
+++ b/gcc/ChangeLog
@@ -1,3 +1,22 @@
+2003-05-03 Olivier Hainque <hainque@act-europe.fr>
+
+ * emit-rtl.c (last_call_insn, add_function_usage_to): New functions.
+ * rtl.h (last_call_insn, add_function_usage_to): New prototypes.
+ * builtins.c (expand_builtin_apply): Use the new emit-rtl functions.
+ * calls.c (emit_call_1): Likewise.
+ (expand_call): For calls initializing constant memory, replace
+ emission of standalone mem /u clobber with function usage entry.
+ * expr.c (emit_block_move_via_libcall): Likewise.
+ * cse.c (count_reg_usage, case EXPR_LIST): New case.
+ * flow.c (propagate_one_insn): Pass entire operand of
+ CALL_INSN_FUNCTION_USAGE to mark_used_regs.
+ * integrate.c (try_constants): For CALL_INSNs, substitute constants
+ within the FUNCTION_USAGE also.
+ * loop.c (prescan_loop): Note clobbers of const mem mentioned in
+ FUNCTION_USAGE lists.
+ * reload1.c (replace_pseudos_in): Renamed.
+ (reload): Use it for clobbers surviving until the end of the reload.
+
2003-05-03 Richard Kenner <kenner@vlsi1.ultra.nyu.edu>
* stor-layout.c (place_field): When adjusting offset_align, use
diff --git a/gcc/builtins.c b/gcc/builtins.c
index 46b14bcee1d..969162b8922 100644
--- a/gcc/builtins.c
+++ b/gcc/builtins.c
@@ -1323,29 +1323,10 @@ expand_builtin_apply (function, arguments, argsize)
#endif
abort ();
- /* Find the CALL insn we just emitted. */
- for (call_insn = get_last_insn ();
- call_insn && GET_CODE (call_insn) != CALL_INSN;
- call_insn = PREV_INSN (call_insn))
- ;
-
- if (! call_insn)
- abort ();
-
- /* Put the register usage information on the CALL. If there is already
- some usage information, put ours at the end. */
- if (CALL_INSN_FUNCTION_USAGE (call_insn))
- {
- rtx link;
-
- for (link = CALL_INSN_FUNCTION_USAGE (call_insn); XEXP (link, 1) != 0;
- link = XEXP (link, 1))
- ;
-
- XEXP (link, 1) = call_fusage;
- }
- else
- CALL_INSN_FUNCTION_USAGE (call_insn) = call_fusage;
+ /* Find the CALL insn we just emitted, and attach the register usage
+ information. */
+ call_insn = last_call_insn ();
+ add_function_usage_to (call_insn, call_fusage);
/* Restore the stack. */
#ifdef HAVE_save_stack_nonlocal
diff --git a/gcc/calls.c b/gcc/calls.c
index 8fb84e9b906..70883ee3835 100644
--- a/gcc/calls.c
+++ b/gcc/calls.c
@@ -536,14 +536,8 @@ emit_call_1 (funexp, fndecl, funtype, stack_size, rounded_stack_size,
#endif
abort ();
- /* Find the CALL insn we just emitted. */
- for (call_insn = get_last_insn ();
- call_insn && GET_CODE (call_insn) != CALL_INSN;
- call_insn = PREV_INSN (call_insn))
- ;
-
- if (! call_insn)
- abort ();
+ /* Find the call we just emitted. */
+ call_insn = last_call_insn ();
/* Mark memory as used for "pure" function call. */
if (ecf_flags & ECF_PURE)
@@ -554,20 +548,8 @@ emit_call_1 (funexp, fndecl, funtype, stack_size, rounded_stack_size,
gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode))),
call_fusage);
- /* Put the register usage information on the CALL. If there is already
- some usage information, put ours at the end. */
- if (CALL_INSN_FUNCTION_USAGE (call_insn))
- {
- rtx link;
-
- for (link = CALL_INSN_FUNCTION_USAGE (call_insn); XEXP (link, 1) != 0;
- link = XEXP (link, 1))
- ;
-
- XEXP (link, 1) = call_fusage;
- }
- else
- CALL_INSN_FUNCTION_USAGE (call_insn) = call_fusage;
+ /* Put the register usage information there. */
+ add_function_usage_to (call_insn, call_fusage);
/* If this is a const call, then set the insn's unchanging bit. */
if (ecf_flags & (ECF_CONST | ECF_PURE))
@@ -3166,14 +3148,6 @@ expand_call (exp, target, ignore)
if (flags & ECF_LONGJMP)
current_function_calls_longjmp = 1;
- /* If this function is returning into a memory location marked as
- readonly, it means it is initializing that location. But we normally
- treat functions as not clobbering such locations, so we need to
- specify that this one does. */
- if (target != 0 && GET_CODE (target) == MEM
- && structure_value_addr != 0 && RTX_UNCHANGING_P (target))
- emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
-
/* If value type not void, return an rtx for the value. */
/* If there are cleanups to be called, don't use a hard reg as target.
@@ -3355,6 +3329,22 @@ expand_call (exp, target, ignore)
expand_end_target_temps ();
}
+ /* If this function is returning into a memory location marked as
+ readonly, it means it is initializing that location. We normally treat
+ functions as not clobbering such locations, so we need to specify that
+ this one does. We do this by adding the appropriate CLOBBER to the
+ CALL_INSN function usage list. This cannot be done by emitting a
+ standalone CLOBBER after the call because the latter would be ignored
+ by at least the delay slot scheduling pass. We do this now instead of
+ adding to call_fusage before the call to emit_call_1 because TARGET
+ may be modified in the meantime. */
+ if (structure_value_addr != 0 && target != 0
+ && GET_CODE (target) == MEM && RTX_UNCHANGING_P (target))
+ add_function_usage_to
+ (last_call_insn (),
+ gen_rtx_EXPR_LIST (VOIDmode, gen_rtx_CLOBBER (VOIDmode, target),
+ NULL_RTX));
+
insns = get_insns ();
end_sequence ();
diff --git a/gcc/cse.c b/gcc/cse.c
index a90f0d8979e..01c583cae07 100644
--- a/gcc/cse.c
+++ b/gcc/cse.c
@@ -7515,6 +7515,17 @@ count_reg_usage (x, counts, dest, incr)
count_reg_usage (XEXP (note, 0), counts, NULL_RTX, incr);
return;
+ case EXPR_LIST:
+ if (REG_NOTE_KIND (x) == REG_EQUAL
+ || (REG_NOTE_KIND (x) != REG_NONNEG && GET_CODE (XEXP (x,0)) == USE)
+ /* FUNCTION_USAGE expression lists may include (CLOBBER (mem /u)),
+ involving registers in the address. */
+ || GET_CODE (XEXP (x, 0)) == CLOBBER)
+ count_reg_usage (XEXP (x, 0), counts, NULL_RTX, incr);
+
+ count_reg_usage (XEXP (x, 1), counts, NULL_RTX, incr);
+ return;
+
case INSN_LIST:
abort ();
diff --git a/gcc/emit-rtl.c b/gcc/emit-rtl.c
index 7048aee61df..a000551dc2e 100644
--- a/gcc/emit-rtl.c
+++ b/gcc/emit-rtl.c
@@ -3191,6 +3191,22 @@ prev_real_insn (insn)
return insn;
}
+/* Return the last CALL_INSN in the current list, or 0 if there is none.
+ This routine does not look inside SEQUENCEs. */
+
+rtx
+last_call_insn ()
+{
+ rtx insn;
+
+ for (insn = get_last_insn ();
+ insn && GET_CODE (insn) != CALL_INSN;
+ insn = PREV_INSN (insn))
+ ;
+
+ return insn;
+}
+
/* Find the next insn after INSN that really does something. This routine
does not look inside SEQUENCEs. Until reload has completed, this is the
same as next_real_insn. */
@@ -3850,6 +3866,31 @@ remove_insn (insn)
}
}
+/* Append CALL_FUSAGE to the CALL_INSN_FUNCTION_USAGE for CALL_INSN. */
+
+void
+add_function_usage_to (call_insn, call_fusage)
+ rtx call_insn, call_fusage;
+{
+ if (! call_insn || GET_CODE (call_insn) != CALL_INSN)
+ abort ();
+
+ /* Put the register usage information on the CALL. If there is already
+ some usage information, put ours at the end. */
+ if (CALL_INSN_FUNCTION_USAGE (call_insn))
+ {
+ rtx link;
+
+ for (link = CALL_INSN_FUNCTION_USAGE (call_insn); XEXP (link, 1) != 0;
+ link = XEXP (link, 1))
+ ;
+
+ XEXP (link, 1) = call_fusage;
+ }
+ else
+ CALL_INSN_FUNCTION_USAGE (call_insn) = call_fusage;
+}
+
/* Delete all insns made since FROM.
FROM becomes the new last instruction. */
diff --git a/gcc/expr.c b/gcc/expr.c
index bc316b32f56..b64319a08a7 100644
--- a/gcc/expr.c
+++ b/gcc/expr.c
@@ -1839,16 +1839,16 @@ emit_block_move_via_movstr (x, y, size, align)
rtx x, y, size;
unsigned int align;
{
- /* Try the most limited insn first, because there's no point
- including more than one in the machine description unless
- the more limited one has some advantage. */
-
rtx opalign = GEN_INT (align / BITS_PER_UNIT);
enum machine_mode mode;
/* Since this is a move insn, we don't care about volatility. */
volatile_ok = 1;
+ /* Try the most limited insn first, because there's no point
+ including more than one in the machine description unless
+ the more limited one has some advantage. */
+
for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
mode = GET_MODE_WIDER_MODE (mode))
{
@@ -1908,38 +1908,48 @@ static rtx
emit_block_move_via_libcall (dst, src, size)
rtx dst, src, size;
{
+ rtx dst_addr, src_addr;
tree call_expr, arg_list, fn, src_tree, dst_tree, size_tree;
enum machine_mode size_mode;
rtx retval;
/* DST, SRC, or SIZE may have been passed through protect_from_queue.
- It is unsafe to save the value generated by protect_from_queue
- and reuse it later. Consider what happens if emit_queue is
- called before the return value from protect_from_queue is used.
+ It is unsafe to save the value generated by protect_from_queue and reuse
+ it later. Consider what happens if emit_queue is called before the
+ return value from protect_from_queue is used.
- Expansion of the CALL_EXPR below will call emit_queue before
- we are finished emitting RTL for argument setup. So if we are
- not careful we could get the wrong value for an argument.
+ Expansion of the CALL_EXPR below will call emit_queue before we are
+ finished emitting RTL for argument setup. So if we are not careful we
+ could get the wrong value for an argument.
- To avoid this problem we go ahead and emit code to copy X, Y &
- SIZE into new pseudos. We can then place those new pseudos
- into an RTL_EXPR and use them later, even after a call to
+ To avoid this problem we go ahead and emit code to copy the addresses of
+ DST and SRC and SIZE into new pseudos. We can then place those new
+ pseudos into an RTL_EXPR and use them later, even after a call to
emit_queue.
- Note this is not strictly needed for library calls since they
- do not call emit_queue before loading their arguments. However,
- we may need to have library calls call emit_queue in the future
- since failing to do so could cause problems for targets which
- define SMALL_REGISTER_CLASSES and pass arguments in registers. */
+ Note this is not strictly needed for library calls since they do not call
+ emit_queue before loading their arguments. However, we may need to have
+ library calls call emit_queue in the future since failing to do so could
+ cause problems for targets which define SMALL_REGISTER_CLASSES and pass
+ arguments in registers. */
+
+ dst_addr = copy_to_mode_reg (Pmode, XEXP (dst, 0));
+ src_addr = copy_to_mode_reg (Pmode, XEXP (src, 0));
+
+#ifdef POINTERS_EXTEND_UNSIGNED
+ dst_addr = convert_memory_address (ptr_mode, dst_addr);
+ src_addr = convert_memory_address (ptr_mode, src_addr);
+#endif
- dst = copy_to_mode_reg (Pmode, XEXP (dst, 0));
- src = copy_to_mode_reg (Pmode, XEXP (src, 0));
+ dst_tree = make_tree (ptr_type_node, dst_addr);
+ src_tree = make_tree (ptr_type_node, src_addr);
if (TARGET_MEM_FUNCTIONS)
size_mode = TYPE_MODE (sizetype);
else
size_mode = TYPE_MODE (unsigned_type_node);
+
size = convert_to_mode (size_mode, size, 1);
size = copy_to_mode_reg (size_mode, size);
@@ -1951,8 +1961,6 @@ emit_block_move_via_libcall (dst, src, size)
For convenience, we generate the call to bcopy this way as well. */
- dst_tree = make_tree (ptr_type_node, dst);
- src_tree = make_tree (ptr_type_node, src);
if (TARGET_MEM_FUNCTIONS)
size_tree = make_tree (sizetype, size);
else
@@ -1979,13 +1987,17 @@ emit_block_move_via_libcall (dst, src, size)
retval = expand_expr (call_expr, NULL_RTX, VOIDmode, 0);
- /* If we are initializing a readonly value, show the above call
- clobbered it. Otherwise, a load from it may erroneously be
- hoisted from a loop. */
+ /* If we are initializing a readonly value, show the above call clobbered
+ it. Otherwise, a load from it may erroneously be hoisted from a loop, or
+ the delay slot scheduler might overlook conflicts and take nasty
+ decisions. */
if (RTX_UNCHANGING_P (dst))
- emit_insn (gen_rtx_CLOBBER (VOIDmode, dst));
+ add_function_usage_to
+ (last_call_insn (), gen_rtx_EXPR_LIST (VOIDmode,
+ gen_rtx_CLOBBER (VOIDmode, dst),
+ NULL_RTX));
- return (TARGET_MEM_FUNCTIONS ? retval : NULL_RTX);
+ return TARGET_MEM_FUNCTIONS ? retval : NULL_RTX;
}
/* A subroutine of emit_block_move_via_libcall. Create the tree node
diff --git a/gcc/flow.c b/gcc/flow.c
index fb60610a7a8..f2f43204c4b 100644
--- a/gcc/flow.c
+++ b/gcc/flow.c
@@ -1832,13 +1832,14 @@ propagate_one_insn (pbi, insn)
if (GET_CODE (PATTERN (insn)) == COND_EXEC)
cond = COND_EXEC_TEST (PATTERN (insn));
- /* Calls use their arguments. */
+ /* Calls use their arguments, and may clobber memory which
+ address involves some register. */
for (note = CALL_INSN_FUNCTION_USAGE (insn);
note;
note = XEXP (note, 1))
- if (GET_CODE (XEXP (note, 0)) == USE)
- mark_used_regs (pbi, XEXP (XEXP (note, 0), 0),
- cond, insn);
+ /* We find USE or CLOBBER entities in a FUNCTION_USAGE list: both
+ of which mark_used_regs knows how to handle. */
+ mark_used_regs (pbi, XEXP (XEXP (note, 0), 0), cond, insn);
/* The stack ptr is used (honorarily) by a CALL insn. */
SET_REGNO_REG_SET (pbi->reg_live, STACK_POINTER_REGNUM);
diff --git a/gcc/integrate.c b/gcc/integrate.c
index 3c0b42ae01e..0fd108f2b1d 100644
--- a/gcc/integrate.c
+++ b/gcc/integrate.c
@@ -2463,6 +2463,14 @@ try_constants (insn, map)
apply_change_group ();
subst_constants (&PATTERN (insn), insn, map, 0);
apply_change_group ();
+
+ /* Enforce consistency between the addresses in the regular insn flow
+ and the ones in CALL_INSN_FUNCTION_USAGE lists, if any. */
+ if (GET_CODE (insn) == CALL_INSN && CALL_INSN_FUNCTION_USAGE (insn))
+ {
+ subst_constants (&CALL_INSN_FUNCTION_USAGE (insn), insn, map, 1);
+ apply_change_group ();
+ }
/* Show we don't know the value of anything stored or clobbered. */
note_stores (PATTERN (insn), mark_stores, NULL);
diff --git a/gcc/loop.c b/gcc/loop.c
index 1bdc1bdca34..b74d6adc0d9 100644
--- a/gcc/loop.c
+++ b/gcc/loop.c
@@ -2575,6 +2575,30 @@ prescan_loop (loop)
loop_info->has_call = 1;
if (can_throw_internal (insn))
loop_info->has_multiple_exit_targets = 1;
+
+ /* Calls initializing constant objects have CLOBBER of MEM /u in the
+ attached FUNCTION_USAGE expression list, not accounted for by the
+ code above. We should note these to avoid missing dependencies in
+ later references. */
+ {
+ rtx fusage_entry;
+
+ for (fusage_entry = CALL_INSN_FUNCTION_USAGE (insn);
+ fusage_entry; fusage_entry = XEXP (fusage_entry, 1))
+ {
+ rtx fusage = XEXP (fusage_entry, 0);
+
+ if (GET_CODE (fusage) == CLOBBER
+ && GET_CODE (XEXP (fusage, 0)) == MEM
+ && RTX_UNCHANGING_P (XEXP (fusage, 0)))
+ {
+ note_stores (fusage, note_addr_stored, loop_info);
+ if (! loop_info->first_loop_store_insn
+ && loop_info->store_mems)
+ loop_info->first_loop_store_insn = insn;
+ }
+ }
+ }
break;
case JUMP_INSN:
diff --git a/gcc/reload1.c b/gcc/reload1.c
index edfd5d5dff1..792dd467e89 100644
--- a/gcc/reload1.c
+++ b/gcc/reload1.c
@@ -369,9 +369,7 @@ static int (*offsets_at)[NUM_ELIMINABLE_REGS];
static int num_labels;
-static void replace_pseudos_in_call_usage PARAMS ((rtx *,
- enum machine_mode,
- rtx));
+static void replace_pseudos_in PARAMS ((rtx *, enum machine_mode, rtx));
static void maybe_fix_stack_asms PARAMS ((void));
static void copy_reloads PARAMS ((struct insn_chain *));
static void calculate_needs_all_insns PARAMS ((int));
@@ -583,7 +581,7 @@ compute_use_by_pseudos (to, from)
equivalences. */
static void
-replace_pseudos_in_call_usage (loc, mem_mode, usage)
+replace_pseudos_in (loc, mem_mode, usage)
rtx *loc;
enum machine_mode mem_mode;
rtx usage;
@@ -608,7 +606,7 @@ replace_pseudos_in_call_usage (loc, mem_mode, usage)
if (x != *loc)
{
*loc = x;
- replace_pseudos_in_call_usage (loc, mem_mode, usage);
+ replace_pseudos_in (loc, mem_mode, usage);
return;
}
@@ -628,7 +626,7 @@ replace_pseudos_in_call_usage (loc, mem_mode, usage)
}
else if (code == MEM)
{
- replace_pseudos_in_call_usage (& XEXP (x, 0), GET_MODE (x), usage);
+ replace_pseudos_in (& XEXP (x, 0), GET_MODE (x), usage);
return;
}
@@ -636,10 +634,10 @@ replace_pseudos_in_call_usage (loc, mem_mode, usage)
fmt = GET_RTX_FORMAT (code);
for (i = 0; i < GET_RTX_LENGTH (code); i++, fmt++)
if (*fmt == 'e')
- replace_pseudos_in_call_usage (&XEXP (x, i), mem_mode, usage);
+ replace_pseudos_in (&XEXP (x, i), mem_mode, usage);
else if (*fmt == 'E')
for (j = 0; j < XVECLEN (x, i); j++)
- replace_pseudos_in_call_usage (& XVECEXP (x, i, j), mem_mode, usage);
+ replace_pseudos_in (& XVECEXP (x, i, j), mem_mode, usage);
}
@@ -1192,9 +1190,8 @@ reload (first, global)
rtx *pnote;
if (GET_CODE (insn) == CALL_INSN)
- replace_pseudos_in_call_usage (& CALL_INSN_FUNCTION_USAGE (insn),
- VOIDmode,
- CALL_INSN_FUNCTION_USAGE (insn));
+ replace_pseudos_in (& CALL_INSN_FUNCTION_USAGE (insn),
+ VOIDmode, CALL_INSN_FUNCTION_USAGE (insn));
if ((GET_CODE (PATTERN (insn)) == USE
/* We mark with QImode USEs introduced by reload itself. */
@@ -1213,6 +1210,13 @@ reload (first, global)
continue;
}
+ /* Some CLOBBERs may survive until here and still reference unassigned
+ pseudos with const equivalent, which may in turn cause ICE in later
+ passes if the reference remains in place. */
+ if (GET_CODE (PATTERN (insn)) == CLOBBER)
+ replace_pseudos_in (& XEXP (PATTERN (insn), 0),
+ VOIDmode, PATTERN (insn));
+
pnote = &REG_NOTES (insn);
while (*pnote != 0)
{
diff --git a/gcc/rtl.h b/gcc/rtl.h
index 7783258066a..bc6f3929298 100644
--- a/gcc/rtl.h
+++ b/gcc/rtl.h
@@ -1538,6 +1538,8 @@ extern rtx emit_line_note PARAMS ((const char *, int));
extern rtx emit_note PARAMS ((const char *, int));
extern rtx emit_line_note_force PARAMS ((const char *, int));
extern rtx make_insn_raw PARAMS ((rtx));
+extern void add_function_usage_to PARAMS ((rtx, rtx));
+extern rtx last_call_insn PARAMS ((void));
extern rtx previous_insn PARAMS ((rtx));
extern rtx next_insn PARAMS ((rtx));
extern rtx prev_nonnote_insn PARAMS ((rtx));