summaryrefslogtreecommitdiff
path: root/gcc
diff options
context:
space:
mode:
authorbonzini <bonzini@138bc75d-0d04-0410-961f-82ee72b054a4>2004-07-14 07:30:23 +0000
committerbonzini <bonzini@138bc75d-0d04-0410-961f-82ee72b054a4>2004-07-14 07:30:23 +0000
commit0a534ba7c09a3afa3ca24fc1981585a3a431bc70 (patch)
tree24070b8bb3608402594a7c1884532c74eaa969a8 /gcc
parent292718d4a58f7fb34f403c0b750bcbb7548fda2d (diff)
downloadgcc-0a534ba7c09a3afa3ca24fc1981585a3a431bc70.tar.gz
2004-07-14 Paolo Bonzini <bonzini@gnu.org>
* expr.c (enqueue_insn, finish_expr_for_function, protect_from_queue, queued_subexp_p, mark_queue, emit_insns_enqueued_after_mark, emit_queue, expand_increment): Remove. (store_constructor): Expand increment as an assignment. (expand_expr_real_1 <case PREINCREMENT_EXPR, case PREDECREMENT_EXPR, case POSTINCREMENT_EXPR, case POSTDECREMENT_EXPR>): Abort. * expr.h (QUEUED_VAR, QUEUED_INSN, QUEUED_COPY, QUEUED_BODY, QUEUED_NEXT, finish_expr_for_function, protect_from_queue, emit_queue, queued_subexp_p): Remove. * function.h (pending_chain, x_pending_chain): Remove. * rtl.def (QUEUED): Remove. * emit-rtl.c (copy_insn_1, copy_most_rtx, set_used_flags, verify_rtx_sharing): Remove references to QUEUED. * genattrtab.c (attr_copy_rtx, clear_struct_flag, encode_units_mask): Likewise. * local-alloc.c (equiv_init_varies_p): Likewise. * rtl.c (copy_rtx): Likewise. * rtlanal.c (rtx_unstable_p, rtx_varies_p): Likewise. * simplify-rtx.c (simplify_gen_subreg): Likewise. * config/mn10300/mn10300.c (legitimate_pic_operand_p): Likewise. * builtins.c (expand_builtin, expand_builtin_apply, expand_builtin_mathfn, expand_builtin_mathfn_2, expand_builtin_mathfn_3, expand_builtin_setjmp_setup): Remove calls to emit_queue and protect_from_queue. * calls.c (expand_call, precompute_arguments, precompute_register_parameters, rtx_for_function_call, store_one_arg): Likewise. * dojump.c (do_compare_and_jump, do_jump): Likewise. * explow.c (memory_address): Likewise. * expmed.c (clear_by_pieces_1, clear_storage, clear_storage_via_libcall, emit_group_load, emit_group_store, emit_store_flag, expand_expr_real_1, store_by_pieces, store_constructor, store_expr, try_casesi, try_tablejump): Likewise. * function.c (expand_pending_sizes): Likewise. * optabs.c (emit_cmp_and_jump_insns, emit_conditional_add, emit_conditional_move, expand_fix, expand_float, prepare_cmp_insn): Likewise. * stmt.c (emit_case_bit_tests, expand_asm_expr, expand_computed_goto, expand_decl_init, expand_end_case_type, expand_end_stmt_expr, expand_expr_stmt_value, expand_return, expand_start_case, optimize_tail_recursion): Likewise. * config/c4x/c4x.c (c4x_expand_builtin): Likewise. * config/s390/s390.c (s390_expand_cmpmem): Likewise. git-svn-id: svn+ssh://gcc.gnu.org/svn/gcc/trunk@84675 138bc75d-0d04-0410-961f-82ee72b054a4
Diffstat (limited to 'gcc')
-rw-r--r--gcc/ChangeLog54
-rw-r--r--gcc/builtins.c14
-rw-r--r--gcc/calls.c31
-rw-r--r--gcc/config/c4x/c4x.c7
-rw-r--r--gcc/config/mn10300/mn10300.c3
-rw-r--r--gcc/config/s390/s390.c4
-rw-r--r--gcc/dojump.c8
-rw-r--r--gcc/emit-rtl.c6
-rw-r--r--gcc/explow.c10
-rw-r--r--gcc/expmed.c33
-rw-r--r--gcc/expr.c566
-rw-r--r--gcc/expr.h35
-rw-r--r--gcc/function.c9
-rw-r--r--gcc/function.h4
-rw-r--r--gcc/genattrtab.c3
-rw-r--r--gcc/local-alloc.c3
-rw-r--r--gcc/optabs.c115
-rw-r--r--gcc/rtl.c1
-rw-r--r--gcc/rtl.def18
-rw-r--r--gcc/rtlanal.c6
-rw-r--r--gcc/simplify-rtx.c3
-rw-r--r--gcc/stmt.c27
22 files changed, 100 insertions, 860 deletions
diff --git a/gcc/ChangeLog b/gcc/ChangeLog
index 303d83932c9..1d34c8919bd 100644
--- a/gcc/ChangeLog
+++ b/gcc/ChangeLog
@@ -1,3 +1,57 @@
+2004-07-14 Paolo Bonzini <bonzini@gnu.org>
+
+ * expr.c (enqueue_insn, finish_expr_for_function,
+ protect_from_queue, queued_subexp_p, mark_queue,
+ emit_insns_enqueued_after_mark, emit_queue,
+ expand_increment): Remove.
+ (store_constructor): Expand increment as an assignment.
+ (expand_expr_real_1 <case PREINCREMENT_EXPR,
+ case PREDECREMENT_EXPR, case POSTINCREMENT_EXPR,
+ case POSTDECREMENT_EXPR>): Abort.
+ * expr.h (QUEUED_VAR, QUEUED_INSN, QUEUED_COPY,
+ QUEUED_BODY, QUEUED_NEXT, finish_expr_for_function,
+ protect_from_queue, emit_queue, queued_subexp_p): Remove.
+ * function.h (pending_chain, x_pending_chain): Remove.
+ * rtl.def (QUEUED): Remove.
+
+ * emit-rtl.c (copy_insn_1, copy_most_rtx,
+ set_used_flags, verify_rtx_sharing): Remove references to QUEUED.
+ * genattrtab.c (attr_copy_rtx, clear_struct_flag,
+ encode_units_mask): Likewise.
+ * local-alloc.c (equiv_init_varies_p): Likewise.
+ * rtl.c (copy_rtx): Likewise.
+ * rtlanal.c (rtx_unstable_p, rtx_varies_p): Likewise.
+ * simplify-rtx.c (simplify_gen_subreg): Likewise.
+ * config/mn10300/mn10300.c (legitimate_pic_operand_p): Likewise.
+
+ * builtins.c (expand_builtin, expand_builtin_apply,
+ expand_builtin_mathfn, expand_builtin_mathfn_2,
+ expand_builtin_mathfn_3, expand_builtin_setjmp_setup):
+ Remove calls to emit_queue and protect_from_queue.
+ * calls.c (expand_call, precompute_arguments,
+ precompute_register_parameters, rtx_for_function_call,
+ store_one_arg): Likewise.
+ * dojump.c (do_compare_and_jump, do_jump): Likewise.
+ * explow.c (memory_address): Likewise.
+ * expmed.c (clear_by_pieces_1, clear_storage,
+ clear_storage_via_libcall, emit_group_load,
+ emit_group_store, emit_store_flag,
+ expand_expr_real_1, store_by_pieces,
+ store_constructor, store_expr, try_casesi,
+ try_tablejump): Likewise.
+ * function.c (expand_pending_sizes): Likewise.
+ * optabs.c (emit_cmp_and_jump_insns,
+ emit_conditional_add, emit_conditional_move,
+ expand_fix, expand_float, prepare_cmp_insn): Likewise.
+ * stmt.c (emit_case_bit_tests,
+ expand_asm_expr, expand_computed_goto,
+ expand_decl_init, expand_end_case_type,
+ expand_end_stmt_expr, expand_expr_stmt_value,
+ expand_return, expand_start_case,
+ optimize_tail_recursion): Likewise.
+ * config/c4x/c4x.c (c4x_expand_builtin): Likewise.
+ * config/s390/s390.c (s390_expand_cmpmem): Likewise.
+
2004-07-14 Ben Elliston <bje@au.ibm.com>
* vec.h: Comment fix.
diff --git a/gcc/builtins.c b/gcc/builtins.c
index 292f50dbf58..58eafd403af 100644
--- a/gcc/builtins.c
+++ b/gcc/builtins.c
@@ -524,8 +524,6 @@ expand_builtin_setjmp_setup (rtx buf_addr, rtx receiver_label)
buf_addr = force_reg (Pmode, force_operand (buf_addr, NULL_RTX));
- emit_queue ();
-
/* We store the frame pointer and the address of receiver_label in
the buffer and use the rest of it for the stack save area, which
is machine-dependent. */
@@ -961,9 +959,8 @@ expand_builtin_prefetch (tree arglist)
}
emit_insn (gen_prefetch (op0, op1, op2));
}
- else
#endif
- op0 = protect_from_queue (op0, 0);
+
/* Don't do anything with direct references to volatile memory, but
generate code to handle other side effects. */
if (!MEM_P (op0) && side_effects_p (op0))
@@ -1272,9 +1269,6 @@ expand_builtin_apply (rtx function, rtx arguments, rtx argsize)
incoming_args, 0, OPTAB_LIB_WIDEN);
#endif
- /* Perform postincrements before actually calling the function. */
- emit_queue ();
-
/* Push a new argument block and copy the arguments. Do not allow
the (potential) memcpy call below to interfere with our stack
manipulations. */
@@ -1778,7 +1772,6 @@ expand_builtin_mathfn (tree exp, rtx target, rtx subtarget)
op0 = expand_expr (arg, subtarget, VOIDmode, 0);
- emit_queue ();
start_sequence ();
/* Compute into TARGET.
@@ -1933,7 +1926,6 @@ expand_builtin_mathfn_2 (tree exp, rtx target, rtx subtarget)
op0 = expand_expr (arg0, subtarget, VOIDmode, 0);
op1 = expand_expr (arg1, 0, VOIDmode, 0);
- emit_queue ();
start_sequence ();
/* Compute into TARGET.
@@ -2038,7 +2030,6 @@ expand_builtin_mathfn_3 (tree exp, rtx target, rtx subtarget)
op0 = expand_expr (arg, subtarget, VOIDmode, 0);
- emit_queue ();
start_sequence ();
/* Compute into TARGET.
@@ -5694,9 +5685,6 @@ expand_builtin (tree exp, rtx target, rtx subtarget, enum machine_mode mode,
enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
enum machine_mode target_mode = TYPE_MODE (TREE_TYPE (exp));
- /* Perform postincrements before expanding builtin functions. */
- emit_queue ();
-
if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
return targetm.expand_builtin (exp, target, subtarget, mode, ignore);
diff --git a/gcc/calls.c b/gcc/calls.c
index 88bac101ae8..d348321250b 100644
--- a/gcc/calls.c
+++ b/gcc/calls.c
@@ -163,8 +163,6 @@ rtx
prepare_call_address (rtx funexp, rtx static_chain_value,
rtx *call_fusage, int reg_parm_seen, int sibcallp)
{
- funexp = protect_from_queue (funexp, 0);
-
/* Make a valid memory address and copy constants through pseudo-regs,
but not for a constant address if -fno-function-cse. */
if (GET_CODE (funexp) != SYMBOL_REF)
@@ -663,10 +661,6 @@ precompute_register_parameters (int num_actuals, struct arg_data *args, int *reg
VOIDmode, 0);
preserve_temp_slots (args[i].value);
pop_temp_slots ();
-
- /* ANSI doesn't require a sequence point here,
- but PCC has one, so this will avoid some problems. */
- emit_queue ();
}
/* If the value is a non-legitimate constant, force it into a
@@ -1256,15 +1250,8 @@ precompute_arguments (int flags, int num_actuals, struct arg_data *args)
if (TREE_ADDRESSABLE (TREE_TYPE (args[i].tree_value)))
abort ();
- args[i].value
- = expand_expr (args[i].tree_value, NULL_RTX, VOIDmode, 0);
-
- /* ANSI doesn't require a sequence point here,
- but PCC has one, so this will avoid some problems. */
- emit_queue ();
-
args[i].initial_value = args[i].value
- = protect_from_queue (args[i].value, 0);
+ = expand_expr (args[i].tree_value, NULL_RTX, VOIDmode, 0);
mode = TYPE_MODE (TREE_TYPE (args[i].tree_value));
if (mode != args[i].mode)
@@ -1439,7 +1426,6 @@ rtx_for_function_call (tree fndecl, tree addr)
push_temp_slots ();
funexp = expand_expr (addr, NULL_RTX, VOIDmode, 0);
pop_temp_slots (); /* FUNEXP can't be BLKmode. */
- emit_queue ();
}
return funexp;
}
@@ -2365,10 +2351,6 @@ expand_call (tree exp, rtx target, int ignore)
if (pass == 0)
{
- /* Emit any queued insns now; otherwise they would end up in
- only one of the alternates. */
- emit_queue ();
-
/* State variables we need to save and restore between
iterations. */
save_pending_stack_adjust = pending_stack_adjust;
@@ -2790,9 +2772,6 @@ expand_call (tree exp, rtx target, int ignore)
load_register_parameters (args, num_actuals, &call_fusage, flags,
pass == 0, &sibcall_failure);
- /* Perform postincrements before actually calling the function. */
- emit_queue ();
-
/* Save a pointer to the last insn before the call, so that we can
later safely search backwards to find the CALL_INSN. */
before_call = get_last_insn ();
@@ -3548,9 +3527,6 @@ emit_library_call_value_1 (int retval, rtx orgfun, rtx value,
|| (GET_MODE (val) != mode && GET_MODE (val) != VOIDmode))
abort ();
- /* There's no need to call protect_from_queue, because
- either emit_move_insn or emit_push_insn will do that. */
-
/* Make sure it is a reasonable operand for a move or push insn. */
if (!REG_P (val) && !MEM_P (val)
&& ! (CONSTANT_P (val) && LEGITIMATE_CONSTANT_P (val)))
@@ -4056,7 +4032,6 @@ emit_library_call_value_1 (int retval, rtx orgfun, rtx value,
for a value of mode OUTMODE,
with NARGS different arguments, passed as alternating rtx values
and machine_modes to convert them to.
- The rtx values should have been passed through protect_from_queue already.
FN_TYPE should be LCT_NORMAL for `normal' calls, LCT_CONST for `const'
calls, LCT_PURE for `pure' calls, LCT_CONST_MAKE_BLOCK for `const' calls
@@ -4428,10 +4403,6 @@ store_one_arg (struct arg_data *arg, rtx argblock, int flags,
be deferred during the rest of the arguments. */
NO_DEFER_POP;
- /* ANSI doesn't require a sequence point here,
- but PCC has one, so this will avoid some problems. */
- emit_queue ();
-
/* Free any temporary slots made in processing this argument. Show
that we might have taken the address of something and pushed that
as an operand. */
diff --git a/gcc/config/c4x/c4x.c b/gcc/config/c4x/c4x.c
index bc384ee2e14..f1863cb68fb 100644
--- a/gcc/config/c4x/c4x.c
+++ b/gcc/config/c4x/c4x.c
@@ -4815,7 +4815,6 @@ c4x_expand_builtin (tree exp, rtx target,
case C4X_BUILTIN_FIX:
arg0 = TREE_VALUE (arglist);
r0 = expand_expr (arg0, NULL_RTX, QFmode, 0);
- r0 = protect_from_queue (r0, 0);
if (! target || ! register_operand (target, QImode))
target = gen_reg_rtx (QImode);
emit_insn (gen_fixqfqi_clobber (target, r0));
@@ -4824,7 +4823,6 @@ c4x_expand_builtin (tree exp, rtx target,
case C4X_BUILTIN_FIX_ANSI:
arg0 = TREE_VALUE (arglist);
r0 = expand_expr (arg0, NULL_RTX, QFmode, 0);
- r0 = protect_from_queue (r0, 0);
if (! target || ! register_operand (target, QImode))
target = gen_reg_rtx (QImode);
emit_insn (gen_fix_truncqfqi2 (target, r0));
@@ -4837,8 +4835,6 @@ c4x_expand_builtin (tree exp, rtx target,
arg1 = TREE_VALUE (TREE_CHAIN (arglist));
r0 = expand_expr (arg0, NULL_RTX, QImode, 0);
r1 = expand_expr (arg1, NULL_RTX, QImode, 0);
- r0 = protect_from_queue (r0, 0);
- r1 = protect_from_queue (r1, 0);
if (! target || ! register_operand (target, QImode))
target = gen_reg_rtx (QImode);
emit_insn (gen_mulqi3_24_clobber (target, r0, r1));
@@ -4849,7 +4845,6 @@ c4x_expand_builtin (tree exp, rtx target,
break;
arg0 = TREE_VALUE (arglist);
r0 = expand_expr (arg0, NULL_RTX, QFmode, 0);
- r0 = protect_from_queue (r0, 0);
if (! target || ! register_operand (target, QFmode))
target = gen_reg_rtx (QFmode);
emit_insn (gen_toieee (target, r0));
@@ -4860,7 +4855,6 @@ c4x_expand_builtin (tree exp, rtx target,
break;
arg0 = TREE_VALUE (arglist);
r0 = expand_expr (arg0, NULL_RTX, QFmode, 0);
- r0 = protect_from_queue (r0, 0);
if (register_operand (r0, QFmode))
{
r1 = assign_stack_local (QFmode, GET_MODE_SIZE (QFmode), 0);
@@ -4877,7 +4871,6 @@ c4x_expand_builtin (tree exp, rtx target,
break;
arg0 = TREE_VALUE (arglist);
r0 = expand_expr (arg0, NULL_RTX, QFmode, 0);
- r0 = protect_from_queue (r0, 0);
if (! target || ! register_operand (target, QFmode))
target = gen_reg_rtx (QFmode);
emit_insn (gen_rcpfqf_clobber (target, r0));
diff --git a/gcc/config/mn10300/mn10300.c b/gcc/config/mn10300/mn10300.c
index a67a75d3225..7b805310e09 100644
--- a/gcc/config/mn10300/mn10300.c
+++ b/gcc/config/mn10300/mn10300.c
@@ -1826,9 +1826,6 @@ legitimate_pic_operand_p (rtx x)
|| XINT (x, 1) == UNSPEC_PLT))
return 1;
- if (GET_CODE (x) == QUEUED)
- return legitimate_pic_operand_p (QUEUED_VAR (x));
-
fmt = GET_RTX_FORMAT (GET_CODE (x));
for (i = GET_RTX_LENGTH (GET_CODE (x)) - 1; i >= 0; i--)
{
diff --git a/gcc/config/s390/s390.c b/gcc/config/s390/s390.c
index 4d4aef8c9c8..2e576a257a6 100644
--- a/gcc/config/s390/s390.c
+++ b/gcc/config/s390/s390.c
@@ -3200,10 +3200,6 @@ s390_expand_cmpmem (rtx target, rtx op0, rtx op1, rtx len)
rtx (*gen_result) (rtx) =
GET_MODE (target) == DImode ? gen_cmpint_di : gen_cmpint_si;
- op0 = protect_from_queue (op0, 0);
- op1 = protect_from_queue (op1, 0);
- len = protect_from_queue (len, 0);
-
if (GET_CODE (len) == CONST_INT && INTVAL (len) >= 0 && INTVAL (len) <= 256)
{
if (INTVAL (len) > 0)
diff --git a/gcc/dojump.c b/gcc/dojump.c
index 0ce27195b9c..96493f3de8f 100644
--- a/gcc/dojump.c
+++ b/gcc/dojump.c
@@ -167,8 +167,6 @@ do_jump (tree exp, rtx if_false_label, rtx if_true_label)
tree type;
enum machine_mode mode;
- emit_queue ();
-
switch (code)
{
case ERROR_MARK:
@@ -306,7 +304,6 @@ do_jump (tree exp, rtx if_false_label, rtx if_true_label)
preserve_temp_slots (NULL_RTX);
free_temp_slots ();
pop_temp_slots ();
- emit_queue ();
do_pending_stack_adjust ();
do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
break;
@@ -619,8 +616,6 @@ do_jump (tree exp, rtx if_false_label, rtx if_true_label)
temp = copy_to_reg (temp);
#endif
do_pending_stack_adjust ();
- /* Do any postincrements in the expression that was tested. */
- emit_queue ();
if (GET_CODE (temp) == CONST_INT
|| (GET_CODE (temp) == CONST_DOUBLE && GET_MODE (temp) == VOIDmode)
@@ -1018,9 +1013,6 @@ do_compare_and_jump (tree exp, enum rtx_code signed_code,
}
#endif
- /* Do any postincrements in the expression that was tested. */
- emit_queue ();
-
do_compare_rtx_and_jump (op0, op1, code, unsignedp, mode,
((mode == BLKmode)
? expr_size (TREE_OPERAND (exp, 0)) : NULL_RTX),
diff --git a/gcc/emit-rtl.c b/gcc/emit-rtl.c
index abc16d5bec5..cca6f63de61 100644
--- a/gcc/emit-rtl.c
+++ b/gcc/emit-rtl.c
@@ -2227,7 +2227,6 @@ verify_rtx_sharing (rtx orig, rtx insn)
switch (code)
{
case REG:
- case QUEUED:
case CONST_INT:
case CONST_DOUBLE:
case CONST_VECTOR:
@@ -2408,7 +2407,6 @@ copy_most_rtx (rtx orig, rtx may_share)
switch (code)
{
case REG:
- case QUEUED:
case CONST_INT:
case CONST_DOUBLE:
case CONST_VECTOR:
@@ -2525,7 +2523,6 @@ repeat:
switch (code)
{
case REG:
- case QUEUED:
case CONST_INT:
case CONST_DOUBLE:
case CONST_VECTOR:
@@ -2651,7 +2648,6 @@ repeat:
switch (code)
{
case REG:
- case QUEUED:
case CONST_INT:
case CONST_DOUBLE:
case CONST_VECTOR:
@@ -2721,7 +2717,6 @@ set_used_flags (rtx x)
switch (code)
{
case REG:
- case QUEUED:
case CONST_INT:
case CONST_DOUBLE:
case CONST_VECTOR:
@@ -5005,7 +5000,6 @@ copy_insn_1 (rtx orig)
switch (code)
{
case REG:
- case QUEUED:
case CONST_INT:
case CONST_DOUBLE:
case CONST_VECTOR:
diff --git a/gcc/explow.c b/gcc/explow.c
index cfaadf3afa9..54a863501ea 100644
--- a/gcc/explow.c
+++ b/gcc/explow.c
@@ -443,14 +443,6 @@ memory_address (enum machine_mode mode, rtx x)
if (! cse_not_expected && CONSTANT_P (x) && CONSTANT_ADDRESS_P (x))
x = force_reg (Pmode, x);
- /* Accept a QUEUED that refers to a REG
- even though that isn't a valid address.
- On attempting to put this in an insn we will call protect_from_queue
- which will turn it into a REG, which is valid. */
- else if (GET_CODE (x) == QUEUED
- && REG_P (QUEUED_VAR (x)))
- ;
-
/* We get better cse by rejecting indirect addressing at this stage.
Let the combiner create indirect addresses where appropriate.
For now, generate the code so that the subexpressions useful to share
@@ -855,7 +847,6 @@ void
adjust_stack (rtx adjust)
{
rtx temp;
- adjust = protect_from_queue (adjust, 0);
if (adjust == const0_rtx)
return;
@@ -885,7 +876,6 @@ void
anti_adjust_stack (rtx adjust)
{
rtx temp;
- adjust = protect_from_queue (adjust, 0);
if (adjust == const0_rtx)
return;
diff --git a/gcc/expmed.c b/gcc/expmed.c
index c1f9873e978..08a8863d567 100644
--- a/gcc/expmed.c
+++ b/gcc/expmed.c
@@ -353,8 +353,6 @@ store_bit_field (rtx str_rtx, unsigned HOST_WIDE_INT bitsize,
op0 = SUBREG_REG (op0);
}
- value = protect_from_queue (value, 0);
-
/* Use vec_set patterns for inserting parts of vectors whenever
available. */
if (VECTOR_MODE_P (GET_MODE (op0))
@@ -602,8 +600,6 @@ store_bit_field (rtx str_rtx, unsigned HOST_WIDE_INT bitsize,
}
offset = 0;
}
- else
- op0 = protect_from_queue (op0, 1);
/* If VALUE is a floating-point mode, access it as an integer of the
corresponding size. This can occur on a machine with 64 bit registers
@@ -771,9 +767,7 @@ store_bit_field (rtx str_rtx, unsigned HOST_WIDE_INT bitsize,
The field starts at position BITPOS within the byte.
(If OP0 is a register, it may be a full word or a narrower mode,
but BITPOS still counts within a full word,
- which is significant on bigendian machines.)
-
- Note that protect_from_queue has already been done on OP0 and VALUE. */
+ which is significant on bigendian machines.) */
static void
store_fixed_bit_field (rtx op0, unsigned HOST_WIDE_INT offset,
@@ -1369,8 +1363,6 @@ extract_bit_field (rtx str_rtx, unsigned HOST_WIDE_INT bitsize,
}
offset = 0;
}
- else
- op0 = protect_from_queue (str_rtx, 1);
/* Now OFFSET is nonzero only for memory operands. */
@@ -1487,8 +1479,7 @@ extract_bit_field (rtx str_rtx, unsigned HOST_WIDE_INT bitsize,
bitsize_rtx = GEN_INT (bitsize);
bitpos_rtx = GEN_INT (xbitpos);
- pat = gen_extzv (protect_from_queue (xtarget, 1),
- xop0, bitsize_rtx, bitpos_rtx);
+ pat = gen_extzv (xtarget, xop0, bitsize_rtx, bitpos_rtx);
if (pat)
{
emit_insn (pat);
@@ -1616,8 +1607,7 @@ extract_bit_field (rtx str_rtx, unsigned HOST_WIDE_INT bitsize,
bitsize_rtx = GEN_INT (bitsize);
bitpos_rtx = GEN_INT (xbitpos);
- pat = gen_extv (protect_from_queue (xtarget, 1),
- xop0, bitsize_rtx, bitpos_rtx);
+ pat = gen_extv (xtarget, xop0, bitsize_rtx, bitpos_rtx);
if (pat)
{
emit_insn (pat);
@@ -2523,10 +2513,6 @@ expand_mult_const (enum machine_mode mode, rtx op0, HOST_WIDE_INT val,
int opno;
enum machine_mode nmode;
- /* op0 must be register to make mult_cost match the precomputed
- shiftadd_cost array. */
- op0 = protect_from_queue (op0, 0);
-
/* Avoid referencing memory over and over.
For speed, but also for correctness when mem is volatile. */
if (MEM_P (op0))
@@ -4564,10 +4550,6 @@ emit_store_flag (rtx target, enum rtx_code code, rtx op0, rtx op1,
rtx last = get_last_insn ();
rtx pattern, comparison;
- /* ??? Ok to do this and then fail? */
- op0 = protect_from_queue (op0, 0);
- op1 = protect_from_queue (op1, 0);
-
if (unsignedp)
code = unsigned_condition (code);
@@ -4672,7 +4654,6 @@ emit_store_flag (rtx target, enum rtx_code code, rtx op0, rtx op1,
first. */
if (GET_MODE_SIZE (target_mode) > GET_MODE_SIZE (mode))
{
- op0 = protect_from_queue (op0, 0);
op0 = convert_modes (target_mode, mode, op0, 0);
mode = target_mode;
}
@@ -4704,13 +4685,8 @@ emit_store_flag (rtx target, enum rtx_code code, rtx op0, rtx op1,
insn_operand_predicate_fn pred;
/* We think we may be able to do this with a scc insn. Emit the
- comparison and then the scc insn.
-
- compare_from_rtx may call emit_queue, which would be deleted below
- if the scc insn fails. So call it ourselves before setting LAST.
- Likewise for do_pending_stack_adjust. */
+ comparison and then the scc insn. */
- emit_queue ();
do_pending_stack_adjust ();
last = get_last_insn ();
@@ -4947,7 +4923,6 @@ emit_store_flag (rtx target, enum rtx_code code, rtx op0, rtx op1,
tem = expand_unop (mode, ffs_optab, op0, subtarget, 1);
else if (GET_MODE_SIZE (mode) < UNITS_PER_WORD)
{
- op0 = protect_from_queue (op0, 0);
tem = convert_modes (word_mode, mode, op0, 1);
mode = word_mode;
}
diff --git a/gcc/expr.c b/gcc/expr.c
index f9c65485bd2..8a54e4b3d9e 100644
--- a/gcc/expr.c
+++ b/gcc/expr.c
@@ -119,7 +119,6 @@ struct store_by_pieces
int reverse;
};
-static rtx enqueue_insn (rtx, rtx);
static unsigned HOST_WIDE_INT move_by_pieces_ninsns (unsigned HOST_WIDE_INT,
unsigned int);
static void move_by_pieces_1 (rtx (*) (rtx, ...), enum machine_mode,
@@ -151,7 +150,6 @@ static unsigned HOST_WIDE_INT highest_pow2_factor (tree);
static unsigned HOST_WIDE_INT highest_pow2_factor_for_target (tree, tree);
static int is_aligning_offset (tree, tree);
-static rtx expand_increment (tree, int, int);
static void expand_operands (tree, tree, rtx, rtx*, rtx*,
enum expand_modifier);
static rtx reduce_to_bit_field_precision (rtx, rtx, tree);
@@ -312,215 +310,6 @@ init_expr (void)
{
cfun->expr = ggc_alloc_cleared (sizeof (struct expr_status));
}
-
-/* Small sanity check that the queue is empty at the end of a function. */
-
-void
-finish_expr_for_function (void)
-{
- if (pending_chain)
- abort ();
-}
-
-/* Manage the queue of increment instructions to be output
- for POSTINCREMENT_EXPR expressions, etc. */
-
-/* Queue up to increment (or change) VAR later. BODY says how:
- BODY should be the same thing you would pass to emit_insn
- to increment right away. It will go to emit_insn later on.
-
- The value is a QUEUED expression to be used in place of VAR
- where you want to guarantee the pre-incrementation value of VAR. */
-
-static rtx
-enqueue_insn (rtx var, rtx body)
-{
- pending_chain = gen_rtx_QUEUED (GET_MODE (var), var, NULL_RTX, NULL_RTX,
- body, pending_chain);
- return pending_chain;
-}
-
-/* Use protect_from_queue to convert a QUEUED expression
- into something that you can put immediately into an instruction.
- If the queued incrementation has not happened yet,
- protect_from_queue returns the variable itself.
- If the incrementation has happened, protect_from_queue returns a temp
- that contains a copy of the old value of the variable.
-
- Any time an rtx which might possibly be a QUEUED is to be put
- into an instruction, it must be passed through protect_from_queue first.
- QUEUED expressions are not meaningful in instructions.
-
- Do not pass a value through protect_from_queue and then hold
- on to it for a while before putting it in an instruction!
- If the queue is flushed in between, incorrect code will result. */
-
-rtx
-protect_from_queue (rtx x, int modify)
-{
- RTX_CODE code = GET_CODE (x);
-
-#if 0 /* A QUEUED can hang around after the queue is forced out. */
- /* Shortcut for most common case. */
- if (pending_chain == 0)
- return x;
-#endif
-
- if (code != QUEUED)
- {
- /* A special hack for read access to (MEM (QUEUED ...)) to facilitate
- use of autoincrement. Make a copy of the contents of the memory
- location rather than a copy of the address, but not if the value is
- of mode BLKmode. Don't modify X in place since it might be
- shared. */
- if (code == MEM && GET_MODE (x) != BLKmode
- && GET_CODE (XEXP (x, 0)) == QUEUED && !modify)
- {
- rtx y = XEXP (x, 0);
- rtx new = replace_equiv_address_nv (x, QUEUED_VAR (y));
-
- if (QUEUED_INSN (y))
- {
- rtx temp = gen_reg_rtx (GET_MODE (x));
-
- emit_insn_before (gen_move_insn (temp, new),
- QUEUED_INSN (y));
- return temp;
- }
-
- /* Copy the address into a pseudo, so that the returned value
- remains correct across calls to emit_queue. */
- return replace_equiv_address (new, copy_to_reg (XEXP (new, 0)));
- }
-
- /* Otherwise, recursively protect the subexpressions of all
- the kinds of rtx's that can contain a QUEUED. */
- if (code == MEM)
- {
- rtx tem = protect_from_queue (XEXP (x, 0), 0);
- if (tem != XEXP (x, 0))
- {
- x = copy_rtx (x);
- XEXP (x, 0) = tem;
- }
- }
- else if (code == PLUS || code == MULT)
- {
- rtx new0 = protect_from_queue (XEXP (x, 0), 0);
- rtx new1 = protect_from_queue (XEXP (x, 1), 0);
- if (new0 != XEXP (x, 0) || new1 != XEXP (x, 1))
- {
- x = copy_rtx (x);
- XEXP (x, 0) = new0;
- XEXP (x, 1) = new1;
- }
- }
- return x;
- }
- /* If the increment has not happened, use the variable itself. Copy it
- into a new pseudo so that the value remains correct across calls to
- emit_queue. */
- if (QUEUED_INSN (x) == 0)
- return copy_to_reg (QUEUED_VAR (x));
- /* If the increment has happened and a pre-increment copy exists,
- use that copy. */
- if (QUEUED_COPY (x) != 0)
- return QUEUED_COPY (x);
- /* The increment has happened but we haven't set up a pre-increment copy.
- Set one up now, and use it. */
- QUEUED_COPY (x) = gen_reg_rtx (GET_MODE (QUEUED_VAR (x)));
- emit_insn_before (gen_move_insn (QUEUED_COPY (x), QUEUED_VAR (x)),
- QUEUED_INSN (x));
- return QUEUED_COPY (x);
-}
-
-/* Return nonzero if X contains a QUEUED expression:
- if it contains anything that will be altered by a queued increment.
- We handle only combinations of MEM, PLUS, MINUS and MULT operators
- since memory addresses generally contain only those. */
-
-int
-queued_subexp_p (rtx x)
-{
- enum rtx_code code = GET_CODE (x);
- switch (code)
- {
- case QUEUED:
- return 1;
- case MEM:
- return queued_subexp_p (XEXP (x, 0));
- case MULT:
- case PLUS:
- case MINUS:
- return (queued_subexp_p (XEXP (x, 0))
- || queued_subexp_p (XEXP (x, 1)));
- default:
- return 0;
- }
-}
-
-/* Retrieve a mark on the queue. */
-
-static rtx
-mark_queue (void)
-{
- return pending_chain;
-}
-
-/* Perform all the pending incrementations that have been enqueued
- after MARK was retrieved. If MARK is null, perform all the
- pending incrementations. */
-
-static void
-emit_insns_enqueued_after_mark (rtx mark)
-{
- rtx p;
-
- /* The marked incrementation may have been emitted in the meantime
- through a call to emit_queue. In this case, the mark is not valid
- anymore so do nothing. */
- if (mark && ! QUEUED_BODY (mark))
- return;
-
- while ((p = pending_chain) != mark)
- {
- rtx body = QUEUED_BODY (p);
-
- switch (GET_CODE (body))
- {
- case INSN:
- case JUMP_INSN:
- case CALL_INSN:
- case CODE_LABEL:
- case BARRIER:
- case NOTE:
- QUEUED_INSN (p) = body;
- emit_insn (body);
- break;
-
-#ifdef ENABLE_CHECKING
- case SEQUENCE:
- abort ();
- break;
-#endif
-
- default:
- QUEUED_INSN (p) = emit_insn (body);
- break;
- }
-
- QUEUED_BODY (p) = 0;
- pending_chain = QUEUED_NEXT (p);
- }
-}
-
-/* Perform all the pending incrementations. */
-
-void
-emit_queue (void)
-{
- emit_insns_enqueued_after_mark (NULL_RTX);
-}
/* Copy data from FROM to TO, where the machine modes are not the same.
Both modes may be integer, or both may be floating.
@@ -541,8 +330,6 @@ convert_move (rtx to, rtx from, int unsignedp)
enum rtx_code equiv_code = (unsignedp < 0 ? UNKNOWN
: (unsignedp ? ZERO_EXTEND : SIGN_EXTEND));
- to = protect_from_queue (to, 1);
- from = protect_from_queue (from, 0);
if (to_real != from_real)
abort ();
@@ -899,10 +686,7 @@ convert_move (rtx to, rtx from, int unsignedp)
Both X and MODE may be floating, or both integer.
UNSIGNEDP is nonzero if X is an unsigned value.
This can be done by referring to a part of X in place
- or by copying to a new temporary with conversion.
-
- This function *must not* call protect_from_queue
- except when putting X into an insn (in which case convert_move does it). */
+ or by copying to a new temporary with conversion. */
rtx
convert_to_mode (enum machine_mode mode, rtx x, int unsignedp)
@@ -918,10 +702,7 @@ convert_to_mode (enum machine_mode mode, rtx x, int unsignedp)
This can be done by referring to a part of X in place
or by copying to a new temporary with conversion.
- You can give VOIDmode for OLDMODE, if you are sure X has a nonvoid mode.
-
- This function *must not* call protect_from_queue
- except when putting X into an insn (in which case convert_move does it). */
+ You can give VOIDmode for OLDMODE, if you are sure X has a nonvoid mode. */
rtx
convert_modes (enum machine_mode mode, enum machine_mode oldmode, rtx x, int unsignedp)
@@ -1040,8 +821,7 @@ can_move_by_pieces (unsigned HOST_WIDE_INT len,
}
/* Generate several move instructions to copy LEN bytes from block FROM to
- block TO. (These are MEM rtx's with BLKmode). The caller must pass FROM
- and TO through protect_from_queue before calling.
+ block TO. (These are MEM rtx's with BLKmode).
If PUSH_ROUNDING is defined and TO is NULL, emit_single_push_insn is
used to push FROM to the stack.
@@ -1342,10 +1122,6 @@ emit_block_move (rtx x, rtx y, rtx size, enum block_op_methods method)
align = MIN (MEM_ALIGN (x), MEM_ALIGN (y));
- x = protect_from_queue (x, 1);
- y = protect_from_queue (y, 0);
- size = protect_from_queue (size, 0);
-
if (!MEM_P (x))
abort ();
if (!MEM_P (y))
@@ -1513,24 +1289,9 @@ emit_block_move_via_libcall (rtx dst, rtx src, rtx size)
enum machine_mode size_mode;
rtx retval;
- /* DST, SRC, or SIZE may have been passed through protect_from_queue.
-
- It is unsafe to save the value generated by protect_from_queue and reuse
- it later. Consider what happens if emit_queue is called before the
- return value from protect_from_queue is used.
-
- Expansion of the CALL_EXPR below will call emit_queue before we are
- finished emitting RTL for argument setup. So if we are not careful we
- could get the wrong value for an argument.
-
- To avoid this problem we go ahead and emit code to copy the addresses of
- DST and SRC and SIZE into new pseudos.
-
- Note this is not strictly needed for library calls since they do not call
- emit_queue before loading their arguments. However, we may need to have
- library calls call emit_queue in the future since failing to do so could
- cause problems for targets which define SMALL_REGISTER_CLASSES and pass
- arguments in registers. */
+ /* Emit code to copy the addresses of DST and SRC and SIZE into new
+ pseudos. We can then place those new pseudos into a VAR_DECL and
+ use them later. */
dst_addr = copy_to_mode_reg (Pmode, XEXP (dst, 0));
src_addr = copy_to_mode_reg (Pmode, XEXP (src, 0));
@@ -1926,8 +1687,6 @@ emit_group_load (rtx dst, rtx orig_src, tree type ATTRIBUTE_UNUSED, int ssize)
build_int_2 (shift, 0), tmps[i], 0);
}
- emit_queue ();
-
/* Copy the extracted pieces into the proper (probable) hard regs. */
for (i = start; i < XVECLEN (dst, 0); i++)
emit_move_insn (XEXP (XVECEXP (dst, 0, i), 0), tmps[i]);
@@ -1982,7 +1741,6 @@ emit_group_store (rtx orig_dst, rtx src, tree type ATTRIBUTE_UNUSED, int ssize)
tmps[i] = gen_reg_rtx (GET_MODE (reg));
emit_move_insn (tmps[i], reg);
}
- emit_queue ();
/* If we won't be storing directly into memory, protect the real destination
from strange tricks we might play. */
@@ -2076,8 +1834,6 @@ emit_group_store (rtx orig_dst, rtx src, tree type ATTRIBUTE_UNUSED, int ssize)
mode, tmps[i]);
}
- emit_queue ();
-
/* Copy from the pseudo into the (probable) hard reg. */
if (orig_dst != dst)
emit_move_insn (orig_dst, dst);
@@ -2322,7 +2078,6 @@ store_by_pieces (rtx to, unsigned HOST_WIDE_INT len,
if (! STORE_BY_PIECES_P (len, align))
abort ();
- to = protect_from_queue (to, 1);
data.constfun = constfun;
data.constfundata = constfundata;
data.len = len;
@@ -2360,8 +2115,7 @@ store_by_pieces (rtx to, unsigned HOST_WIDE_INT len,
}
/* Generate several move instructions to clear LEN bytes of block TO. (A MEM
- rtx with BLKmode). The caller must pass TO through protect_from_queue
- before calling. ALIGN is maximum alignment we can assume. */
+ rtx with BLKmode). ALIGN is maximum alignment we can assume. */
static void
clear_by_pieces (rtx to, unsigned HOST_WIDE_INT len, unsigned int align)
@@ -2391,8 +2145,7 @@ clear_by_pieces_1 (void *data ATTRIBUTE_UNUSED,
/* Subroutine of clear_by_pieces and store_by_pieces.
Generate several move instructions to store LEN bytes of block TO. (A MEM
- rtx with BLKmode). The caller must pass TO through protect_from_queue
- before calling. ALIGN is maximum alignment we can assume. */
+ rtx with BLKmode). ALIGN is maximum alignment we can assume. */
static void
store_by_pieces_1 (struct store_by_pieces *data ATTRIBUTE_UNUSED,
@@ -2532,9 +2285,6 @@ clear_storage (rtx object, rtx size)
emit_move_insn (object, CONST0_RTX (GET_MODE (object)));
else
{
- object = protect_from_queue (object, 1);
- size = protect_from_queue (size, 0);
-
if (size == const0_rtx)
;
else if (GET_CODE (size) == CONST_INT
@@ -2615,24 +2365,8 @@ clear_storage_via_libcall (rtx object, rtx size)
enum machine_mode size_mode;
rtx retval;
- /* OBJECT or SIZE may have been passed through protect_from_queue.
-
- It is unsafe to save the value generated by protect_from_queue
- and reuse it later. Consider what happens if emit_queue is
- called before the return value from protect_from_queue is used.
-
- Expansion of the CALL_EXPR below will call emit_queue before
- we are finished emitting RTL for argument setup. So if we are
- not careful we could get the wrong value for an argument.
-
- To avoid this problem we go ahead and emit code to copy OBJECT
- and SIZE into new pseudos.
-
- Note this is not strictly needed for library calls since they
- do not call emit_queue before loading their arguments. However,
- we may need to have library calls call emit_queue in the future
- since failing to do so could cause problems for targets which
- define SMALL_REGISTER_CLASSES and pass arguments in registers. */
+ /* Emit code to copy OBJECT and SIZE into new pseudos. We can then
+ place those into new pseudos into a VAR_DECL and use them later. */
object = copy_to_mode_reg (Pmode, XEXP (object, 0));
@@ -2736,9 +2470,6 @@ emit_move_insn (rtx x, rtx y)
rtx y_cst = NULL_RTX;
rtx last_insn, set;
- x = protect_from_queue (x, 1);
- y = protect_from_queue (y, 0);
-
if (mode == BLKmode || (GET_MODE (y) != mode && GET_MODE (y) != VOIDmode))
abort ();
@@ -3403,7 +3134,7 @@ emit_push_insn (rtx x, enum machine_mode mode, tree type, rtx size,
if (where_pad != none)
where_pad = (where_pad == downward ? upward : downward);
- xinner = x = protect_from_queue (x, 0);
+ xinner = x;
if (mode == BLKmode)
{
@@ -3847,8 +3578,6 @@ expand_assignment (tree to, tree from, int want_value)
&& (bitsize != 1 || TREE_CODE (op1) != INTEGER_CST))
break;
value = expand_expr (op1, NULL_RTX, VOIDmode, 0);
- value = protect_from_queue (value, 0);
- to_rtx = protect_from_queue (to_rtx, 1);
binop = TREE_CODE (src) == PLUS_EXPR ? add_optab : sub_optab;
if (bitsize == 1
&& count + bitsize != GET_MODE_BITSIZE (GET_MODE (to_rtx)))
@@ -4030,7 +3759,6 @@ store_expr (tree exp, rtx target, int want_value)
{
rtx temp;
rtx alt_rtl = NULL_RTX;
- rtx mark = mark_queue ();
int dont_return_target = 0;
int dont_store_target = 0;
@@ -4050,7 +3778,6 @@ store_expr (tree exp, rtx target, int want_value)
part. */
expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
want_value & 2 ? EXPAND_STACK_PARM : EXPAND_NORMAL);
- emit_queue ();
return store_expr (TREE_OPERAND (exp, 1), target, want_value);
}
else if (TREE_CODE (exp) == COND_EXPR && GET_MODE (target) == BLKmode)
@@ -4062,47 +3789,19 @@ store_expr (tree exp, rtx target, int want_value)
rtx lab1 = gen_label_rtx (), lab2 = gen_label_rtx ();
- emit_queue ();
- target = protect_from_queue (target, 1);
-
do_pending_stack_adjust ();
NO_DEFER_POP;
jumpifnot (TREE_OPERAND (exp, 0), lab1);
store_expr (TREE_OPERAND (exp, 1), target, want_value & 2);
- emit_queue ();
emit_jump_insn (gen_jump (lab2));
emit_barrier ();
emit_label (lab1);
store_expr (TREE_OPERAND (exp, 2), target, want_value & 2);
- emit_queue ();
emit_label (lab2);
OK_DEFER_POP;
return want_value & 1 ? target : NULL_RTX;
}
- else if (queued_subexp_p (target))
- /* If target contains a postincrement, let's not risk
- using it as the place to generate the rhs. */
- {
- if (GET_MODE (target) != BLKmode && GET_MODE (target) != VOIDmode)
- {
- /* Expand EXP into a new pseudo. */
- temp = gen_reg_rtx (GET_MODE (target));
- temp = expand_expr (exp, temp, GET_MODE (target),
- (want_value & 2
- ? EXPAND_STACK_PARM : EXPAND_NORMAL));
- }
- else
- temp = expand_expr (exp, NULL_RTX, GET_MODE (target),
- (want_value & 2
- ? EXPAND_STACK_PARM : EXPAND_NORMAL));
-
- /* If target is volatile, ANSI requires accessing the value
- *from* the target, if it is accessed. So make that happen.
- In no case return the target itself. */
- if (! MEM_VOLATILE_P (target) && (want_value & 1) != 0)
- dont_return_target = 1;
- }
else if ((want_value & 1) != 0
&& MEM_P (target)
&& ! MEM_VOLATILE_P (target)
@@ -4273,9 +3972,6 @@ store_expr (tree exp, rtx target, int want_value)
bit-initialized. */
&& expr_size (exp) != const0_rtx)
{
- emit_insns_enqueued_after_mark (mark);
- target = protect_from_queue (target, 1);
- temp = protect_from_queue (temp, 0);
if (GET_MODE (temp) != GET_MODE (target)
&& GET_MODE (temp) != VOIDmode)
{
@@ -5031,7 +4727,6 @@ store_constructor (tree exp, rtx target, int cleared, HOST_WIDE_INT size)
/* Build the head of the loop. */
do_pending_stack_adjust ();
- emit_queue ();
emit_label (loop_start);
/* Assign value to element index. */
@@ -5060,9 +4755,10 @@ store_constructor (tree exp, rtx target, int cleared, HOST_WIDE_INT size)
/* Update the loop counter, and jump to the head of
the loop. */
- expand_increment (build (PREINCREMENT_EXPR,
- TREE_TYPE (index),
- index, integer_one_node), 0, 0);
+ expand_assignment (index,
+ build2 (PLUS_EXPR, TREE_TYPE (index),
+ index, integer_one_node), 0);
+
emit_jump (loop_start);
/* Build the end of the loop. */
@@ -8153,7 +7849,6 @@ expand_expr_real_1 (tree exp, rtx target, enum machine_mode tmode,
case COMPOUND_EXPR:
expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
- emit_queue ();
return expand_expr_real (TREE_OPERAND (exp, 1),
(ignore ? const0_rtx : target),
VOIDmode, modifier, alt_rtl);
@@ -8492,7 +8187,6 @@ expand_expr_real_1 (tree exp, rtx target, enum machine_mode tmode,
else
expand_expr (TREE_OPERAND (exp, 1),
ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
- emit_queue ();
emit_jump_insn (gen_jump (op1));
emit_barrier ();
emit_label (op0);
@@ -8505,7 +8199,6 @@ expand_expr_real_1 (tree exp, rtx target, enum machine_mode tmode,
ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
}
- emit_queue ();
emit_label (op1);
OK_DEFER_POP;
@@ -8580,15 +8273,6 @@ expand_expr_real_1 (tree exp, rtx target, enum machine_mode tmode,
expand_return (TREE_OPERAND (exp, 0));
return const0_rtx;
- case PREINCREMENT_EXPR:
- case PREDECREMENT_EXPR:
- return REDUCE_BIT_FIELD (expand_increment (exp, 0, ignore));
-
- case POSTINCREMENT_EXPR:
- case POSTDECREMENT_EXPR:
- /* Faster to treat as pre-increment if result is not used. */
- return REDUCE_BIT_FIELD (expand_increment (exp, ! ignore, ignore));
-
case ADDR_EXPR:
if (modifier == EXPAND_STACK_PARM)
target = 0;
@@ -8619,10 +8303,6 @@ expand_expr_real_1 (tree exp, rtx target, enum machine_mode tmode,
if (ignore)
return op0;
- /* Pass 1 for MODIFY, so that protect_from_queue doesn't get
- clever and returns a REG when given a MEM. */
- op0 = protect_from_queue (op0, 1);
-
/* We would like the object in memory. If it is a constant, we can
have it be statically allocated into memory. For a non-constant,
we need to allocate some memory and store the value into it. */
@@ -8836,6 +8516,10 @@ expand_expr_real_1 (tree exp, rtx target, enum machine_mode tmode,
case FILTER_EXPR:
return get_exception_filter (cfun);
+ case PREINCREMENT_EXPR:
+ case PREDECREMENT_EXPR:
+ case POSTINCREMENT_EXPR:
+ case POSTDECREMENT_EXPR:
case FDESC_EXPR:
/* Function descriptors are not valid except for as
initialization constants, and should not be expanded. */
@@ -9071,209 +8755,6 @@ string_constant (tree arg, tree *ptr_offset)
return 0;
}
-/* Expand code for a post- or pre- increment or decrement
- and return the RTX for the result.
- POST is 1 for postinc/decrements and 0 for preinc/decrements. */
-
-static rtx
-expand_increment (tree exp, int post, int ignore)
-{
- rtx op0, op1;
- rtx temp, value;
- tree incremented = TREE_OPERAND (exp, 0);
- optab this_optab = add_optab;
- int icode;
- enum machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
- int op0_is_copy = 0;
- int single_insn = 0;
- /* 1 means we can't store into OP0 directly,
- because it is a subreg narrower than a word,
- and we don't dare clobber the rest of the word. */
- int bad_subreg = 0;
-
- /* Stabilize any component ref that might need to be
- evaluated more than once below. */
- if (!post
- || TREE_CODE (incremented) == BIT_FIELD_REF
- || (TREE_CODE (incremented) == COMPONENT_REF
- && (TREE_CODE (TREE_OPERAND (incremented, 0)) != INDIRECT_REF
- || DECL_BIT_FIELD (TREE_OPERAND (incremented, 1)))))
- incremented = stabilize_reference (incremented);
- /* Nested *INCREMENT_EXPRs can happen in C++. We must force innermost
- ones into save exprs so that they don't accidentally get evaluated
- more than once by the code below. */
- if (TREE_CODE (incremented) == PREINCREMENT_EXPR
- || TREE_CODE (incremented) == PREDECREMENT_EXPR)
- incremented = save_expr (incremented);
-
- /* Compute the operands as RTX.
- Note whether OP0 is the actual lvalue or a copy of it:
- I believe it is a copy iff it is a register or subreg
- and insns were generated in computing it. */
-
- temp = get_last_insn ();
- op0 = expand_expr (incremented, NULL_RTX, VOIDmode, 0);
-
- /* If OP0 is a SUBREG made for a promoted variable, we cannot increment
- in place but instead must do sign- or zero-extension during assignment,
- so we copy it into a new register and let the code below use it as
- a copy.
-
- Note that we can safely modify this SUBREG since it is know not to be
- shared (it was made by the expand_expr call above). */
-
- if (GET_CODE (op0) == SUBREG && SUBREG_PROMOTED_VAR_P (op0))
- {
- if (post)
- SUBREG_REG (op0) = copy_to_reg (SUBREG_REG (op0));
- else
- bad_subreg = 1;
- }
- else if (GET_CODE (op0) == SUBREG
- && GET_MODE_BITSIZE (GET_MODE (op0)) < BITS_PER_WORD)
- {
- /* We cannot increment this SUBREG in place. If we are
- post-incrementing, get a copy of the old value. Otherwise,
- just mark that we cannot increment in place. */
- if (post)
- op0 = copy_to_reg (op0);
- else
- bad_subreg = 1;
- }
-
- op0_is_copy = ((GET_CODE (op0) == SUBREG || REG_P (op0))
- && temp != get_last_insn ());
- op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
-
- /* Decide whether incrementing or decrementing. */
- if (TREE_CODE (exp) == POSTDECREMENT_EXPR
- || TREE_CODE (exp) == PREDECREMENT_EXPR)
- this_optab = sub_optab;
-
- /* Convert decrement by a constant into a negative increment. */
- if (this_optab == sub_optab
- && GET_CODE (op1) == CONST_INT)
- {
- op1 = GEN_INT (-INTVAL (op1));
- this_optab = add_optab;
- }
-
- if (TYPE_TRAP_SIGNED (TREE_TYPE (exp)))
- this_optab = this_optab == add_optab ? addv_optab : subv_optab;
-
- /* For a preincrement, see if we can do this with a single instruction. */
- if (!post)
- {
- icode = (int) this_optab->handlers[(int) mode].insn_code;
- if (icode != (int) CODE_FOR_nothing
- /* Make sure that OP0 is valid for operands 0 and 1
- of the insn we want to queue. */
- && (*insn_data[icode].operand[0].predicate) (op0, mode)
- && (*insn_data[icode].operand[1].predicate) (op0, mode)
- && (*insn_data[icode].operand[2].predicate) (op1, mode))
- single_insn = 1;
- }
-
- /* If OP0 is not the actual lvalue, but rather a copy in a register,
- then we cannot just increment OP0. We must therefore contrive to
- increment the original value. Then, for postincrement, we can return
- OP0 since it is a copy of the old value. For preincrement, expand here
- unless we can do it with a single insn.
-
- Likewise if storing directly into OP0 would clobber high bits
- we need to preserve (bad_subreg). */
- if (op0_is_copy || (!post && !single_insn) || bad_subreg)
- {
- /* This is the easiest way to increment the value wherever it is.
- Problems with multiple evaluation of INCREMENTED are prevented
- because either (1) it is a component_ref or preincrement,
- in which case it was stabilized above, or (2) it is an array_ref
- with constant index in an array in a register, which is
- safe to reevaluate. */
- tree newexp = build (((TREE_CODE (exp) == POSTDECREMENT_EXPR
- || TREE_CODE (exp) == PREDECREMENT_EXPR)
- ? MINUS_EXPR : PLUS_EXPR),
- TREE_TYPE (exp),
- incremented,
- TREE_OPERAND (exp, 1));
-
- while (TREE_CODE (incremented) == NOP_EXPR
- || TREE_CODE (incremented) == CONVERT_EXPR)
- {
- newexp = convert (TREE_TYPE (incremented), newexp);
- incremented = TREE_OPERAND (incremented, 0);
- }
-
- temp = expand_assignment (incremented, newexp, ! post && ! ignore);
- return post ? op0 : temp;
- }
-
- if (post)
- {
- /* We have a true reference to the value in OP0.
- If there is an insn to add or subtract in this mode, queue it.
- Queuing the increment insn avoids the register shuffling
- that often results if we must increment now and first save
- the old value for subsequent use. */
-
-#if 0 /* Turned off to avoid making extra insn for indexed memref. */
- op0 = stabilize (op0);
-#endif
-
- icode = (int) this_optab->handlers[(int) mode].insn_code;
- if (icode != (int) CODE_FOR_nothing
- /* Make sure that OP0 is valid for operands 0 and 1
- of the insn we want to queue. */
- && (*insn_data[icode].operand[0].predicate) (op0, mode)
- && (*insn_data[icode].operand[1].predicate) (op0, mode))
- {
- if (! (*insn_data[icode].operand[2].predicate) (op1, mode))
- op1 = force_reg (mode, op1);
-
- return enqueue_insn (op0, GEN_FCN (icode) (op0, op0, op1));
- }
- if (icode != (int) CODE_FOR_nothing && MEM_P (op0))
- {
- rtx addr = (general_operand (XEXP (op0, 0), mode)
- ? force_reg (Pmode, XEXP (op0, 0))
- : copy_to_reg (XEXP (op0, 0)));
- rtx temp, result;
-
- op0 = replace_equiv_address (op0, addr);
- temp = force_reg (GET_MODE (op0), op0);
- if (! (*insn_data[icode].operand[2].predicate) (op1, mode))
- op1 = force_reg (mode, op1);
-
- /* The increment queue is LIFO, thus we have to `queue'
- the instructions in reverse order. */
- enqueue_insn (op0, gen_move_insn (op0, temp));
- result = enqueue_insn (temp, GEN_FCN (icode) (temp, temp, op1));
- return result;
- }
- }
-
- /* Preincrement, or we can't increment with one simple insn. */
- if (post)
- /* Save a copy of the value before inc or dec, to return it later. */
- temp = value = copy_to_reg (op0);
- else
- /* Arrange to return the incremented value. */
- /* Copy the rtx because expand_binop will protect from the queue,
- and the results of that would be invalid for us to return
- if our caller does emit_queue before using our result. */
- temp = copy_rtx (value = op0);
-
- /* Increment however we can. */
- op1 = expand_binop (mode, this_optab, value, op1, op0,
- TYPE_UNSIGNED (TREE_TYPE (exp)), OPTAB_LIB_WIDEN);
-
- /* Make sure the value is stored into OP0. */
- if (op1 != op0)
- emit_move_insn (op0, op1);
-
- return temp;
-}
-
/* Generate code to calculate EXP using a store-flag instruction
and return an rtx for the result. EXP is either a comparison
or a TRUTH_NOT_EXPR whose operand is a comparison.
@@ -9481,9 +8962,7 @@ do_store_flag (tree exp, rtx target, enum machine_mode mode, int only_cheap)
because, if the emit_store_flag does anything it will succeed and
OP0 and OP1 will not be used subsequently. */
- result = emit_store_flag (target, code,
- queued_subexp_p (op0) ? copy_rtx (op0) : op0,
- queued_subexp_p (op1) ? copy_rtx (op1) : op1,
+ result = emit_store_flag (target, code, op0, op1,
operand_mode, unsignedp, 1);
if (result)
@@ -9588,8 +9067,7 @@ try_casesi (tree index_type, tree index_expr, tree minval, tree range,
index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
}
- emit_queue ();
- index = protect_from_queue (index, 0);
+
do_pending_stack_adjust ();
op_mode = insn_data[(int) CODE_FOR_casesi].operand[0].mode;
@@ -9715,8 +9193,6 @@ try_tablejump (tree index_type, tree index_expr, tree minval, tree range,
convert (index_type, index_expr),
convert (index_type, minval)));
index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
- emit_queue ();
- index = protect_from_queue (index, 0);
do_pending_stack_adjust ();
do_tablejump (index, TYPE_MODE (index_type),
diff --git a/gcc/expr.h b/gcc/expr.h
index 9a50410131e..ebd30c3b6f9 100644
--- a/gcc/expr.h
+++ b/gcc/expr.h
@@ -39,25 +39,6 @@ Software Foundation, 59 Temple Place - Suite 330, Boston, MA
#define BRANCH_COST 1
#endif
-/* Macros to access the slots of a QUEUED rtx.
- Here rather than in rtl.h because only the expansion pass
- should ever encounter a QUEUED. */
-
-/* The variable for which an increment is queued. */
-#define QUEUED_VAR(P) XEXP (P, 0)
-/* If the increment has been emitted, this is the insn
- that does the increment. It is zero before the increment is emitted.
- If more than one insn is emitted, this is the first insn. */
-#define QUEUED_INSN(P) XEXP (P, 1)
-/* If a pre-increment copy has been generated, this is the copy
- (it is a temporary reg). Zero if no copy made yet. */
-#define QUEUED_COPY(P) XEXP (P, 2)
-/* This is the body to use for the insn to do the increment.
- It is used to emit the increment. */
-#define QUEUED_BODY(P) XEXP (P, 3)
-/* Next QUEUED in the queue. */
-#define QUEUED_NEXT(P) XEXP (P, 4)
-
/* This is the 4th arg to `expand_expr'.
EXPAND_STACK_PARM means we are possibly expanding a call param onto
the stack. Choosing a value of 2 isn't special; It just allows
@@ -304,8 +285,7 @@ extern void emit_libcall_block (rtx, rtx, rtx, rtx);
/* Create but don't emit one rtl instruction to perform certain operations.
Modes must match; operands must meet the operation's predicates.
- Likewise for subtraction and for just copying.
- These do not call protect_from_queue; caller must do so. */
+ Likewise for subtraction and for just copying. */
extern rtx gen_add2_insn (rtx, rtx);
extern rtx gen_add3_insn (rtx, rtx, rtx);
extern rtx gen_sub2_insn (rtx, rtx);
@@ -389,19 +369,6 @@ extern void init_expr_once (void);
/* This is run at the start of compiling a function. */
extern void init_expr (void);
-/* This is run at the end of compiling a function. */
-extern void finish_expr_for_function (void);
-
-/* Use protect_from_queue to convert a QUEUED expression
- into something that you can put immediately into an instruction. */
-extern rtx protect_from_queue (rtx, int);
-
-/* Perform all the pending incrementations. */
-extern void emit_queue (void);
-
-/* Tell if something has a queued subexpression. */
-extern int queued_subexp_p (rtx);
-
/* Emit some rtl insns to move data between rtx's, converting machine modes.
Both modes must be floating or both fixed. */
extern void convert_move (rtx, rtx, int);
diff --git a/gcc/function.c b/gcc/function.c
index 2d6a976e892..cddade0a751 100644
--- a/gcc/function.c
+++ b/gcc/function.c
@@ -4078,12 +4078,7 @@ expand_pending_sizes (tree pending_sizes)
/* Evaluate now the sizes of any types declared among the arguments. */
for (tem = pending_sizes; tem; tem = TREE_CHAIN (tem))
- {
- expand_expr (TREE_VALUE (tem), const0_rtx, VOIDmode, 0);
- /* Flush the queue in case this parameter declaration has
- side-effects. */
- emit_queue ();
- }
+ expand_expr (TREE_VALUE (tem), const0_rtx, VOIDmode, 0);
}
/* Start the RTL for a new function, and set variables used for
@@ -4343,8 +4338,6 @@ expand_function_end (void)
{
rtx clobber_after;
- finish_expr_for_function ();
-
/* If arg_pointer_save_area was referenced only from a nested
function, we will not have initialized it yet. Do that now. */
if (arg_pointer_save_area && ! cfun->arg_pointer_save_area_init)
diff --git a/gcc/function.h b/gcc/function.h
index 5ac7731daca..8b25793498a 100644
--- a/gcc/function.h
+++ b/gcc/function.h
@@ -147,9 +147,6 @@ struct expr_status GTY(())
/* List of labels that must never be deleted. */
rtx x_forced_labels;
-
- /* Postincrements that still need to be expanded. */
- rtx x_pending_chain;
};
#define pending_stack_adjust (cfun->expr->x_pending_stack_adjust)
@@ -157,7 +154,6 @@ struct expr_status GTY(())
#define saveregs_value (cfun->expr->x_saveregs_value)
#define apply_args_value (cfun->expr->x_apply_args_value)
#define forced_labels (cfun->expr->x_forced_labels)
-#define pending_chain (cfun->expr->x_pending_chain)
#define stack_pointer_delta (cfun->expr->x_stack_pointer_delta)
/* This structure can save all the important global and static variables
diff --git a/gcc/genattrtab.c b/gcc/genattrtab.c
index 4a9ad8c43af..c701488951e 100644
--- a/gcc/genattrtab.c
+++ b/gcc/genattrtab.c
@@ -838,7 +838,6 @@ attr_copy_rtx (rtx orig)
switch (code)
{
case REG:
- case QUEUED:
case CONST_INT:
case CONST_DOUBLE:
case CONST_VECTOR:
@@ -2218,7 +2217,6 @@ encode_units_mask (rtx x)
return attr_rtx (CONST_STRING, attr_printf (MAX_DIGITS, "%d", j));
case REG:
- case QUEUED:
case CONST_INT:
case CONST_DOUBLE:
case CONST_VECTOR:
@@ -4174,7 +4172,6 @@ clear_struct_flag (rtx x)
switch (code)
{
case REG:
- case QUEUED:
case CONST_INT:
case CONST_DOUBLE:
case CONST_VECTOR:
diff --git a/gcc/local-alloc.c b/gcc/local-alloc.c
index a9cf8e7b524..67133a035f9 100644
--- a/gcc/local-alloc.c
+++ b/gcc/local-alloc.c
@@ -520,9 +520,6 @@ equiv_init_varies_p (rtx x)
case MEM:
return ! RTX_UNCHANGING_P (x) || equiv_init_varies_p (XEXP (x, 0));
- case QUEUED:
- return 1;
-
case CONST:
case CONST_INT:
case CONST_DOUBLE:
diff --git a/gcc/optabs.c b/gcc/optabs.c
index 041ec928f15..2440a86dc94 100644
--- a/gcc/optabs.c
+++ b/gcc/optabs.c
@@ -675,11 +675,6 @@ expand_binop (enum machine_mode mode, optab binoptab, rtx op0, rtx op1,
class = GET_MODE_CLASS (mode);
- op0 = protect_from_queue (op0, 0);
- op1 = protect_from_queue (op1, 0);
- if (target)
- target = protect_from_queue (target, 1);
-
if (flag_force_mem)
{
/* Load duplicate non-volatile operands once. */
@@ -2168,20 +2163,12 @@ expand_twoval_unop (optab unoptab, rtx op0, rtx targ0, rtx targ1,
class = GET_MODE_CLASS (mode);
- op0 = protect_from_queue (op0, 0);
-
if (flag_force_mem)
- {
- op0 = force_not_mem (op0);
- }
+ op0 = force_not_mem (op0);
- if (targ0)
- targ0 = protect_from_queue (targ0, 1);
- else
+ if (!targ0)
targ0 = gen_reg_rtx (mode);
- if (targ1)
- targ1 = protect_from_queue (targ1, 1);
- else
+ if (!targ1)
targ1 = gen_reg_rtx (mode);
/* Record where to go back to if we fail. */
@@ -2272,9 +2259,6 @@ expand_twoval_binop (optab binoptab, rtx op0, rtx op1, rtx targ0, rtx targ1,
class = GET_MODE_CLASS (mode);
- op0 = protect_from_queue (op0, 0);
- op1 = protect_from_queue (op1, 0);
-
if (flag_force_mem)
{
op0 = force_not_mem (op0);
@@ -2291,13 +2275,9 @@ expand_twoval_binop (optab binoptab, rtx op0, rtx op1, rtx targ0, rtx targ1,
&& rtx_cost (op1, binoptab->code) > COSTS_N_INSNS (1))
op1 = force_reg (mode, op1);
- if (targ0)
- targ0 = protect_from_queue (targ0, 1);
- else
+ if (!targ0)
targ0 = gen_reg_rtx (mode);
- if (targ1)
- targ1 = protect_from_queue (targ1, 1);
- else
+ if (!targ1)
targ1 = gen_reg_rtx (mode);
/* Record where to go back to if we fail. */
@@ -2500,15 +2480,8 @@ expand_unop (enum machine_mode mode, optab unoptab, rtx op0, rtx target,
class = GET_MODE_CLASS (mode);
- op0 = protect_from_queue (op0, 0);
-
if (flag_force_mem)
- {
- op0 = force_not_mem (op0);
- }
-
- if (target)
- target = protect_from_queue (target, 1);
+ op0 = force_not_mem (op0);
if (unoptab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
{
@@ -3037,18 +3010,11 @@ expand_complex_abs (enum machine_mode mode, rtx op0, rtx target,
if (submode == BLKmode)
abort ();
- op0 = protect_from_queue (op0, 0);
-
if (flag_force_mem)
- {
- op0 = force_not_mem (op0);
- }
+ op0 = force_not_mem (op0);
last = get_last_insn ();
- if (target)
- target = protect_from_queue (target, 1);
-
this_abs_optab = ! unsignedp && flag_trapv
&& (GET_MODE_CLASS(mode) == MODE_INT)
? absv_optab : abs_optab;
@@ -3223,9 +3189,7 @@ emit_unop_insn (int icode, rtx target, rtx op0, enum rtx_code code)
enum machine_mode mode0 = insn_data[icode].operand[1].mode;
rtx pat;
- temp = target = protect_from_queue (target, 1);
-
- op0 = protect_from_queue (op0, 0);
+ temp = target;
/* Sign and zero extension from memory is often done specially on
RISC machines, so forcing into a register here can pessimize
@@ -3707,11 +3671,6 @@ prepare_cmp_insn (rtx *px, rtx *py, enum rtx_code *pcomparison, rtx size,
if (size == 0)
abort ();
- emit_queue ();
- x = protect_from_queue (x, 0);
- y = protect_from_queue (y, 0);
- size = protect_from_queue (size, 0);
-
/* Try to use a memory block compare insn - either cmpstr
or cmpmem will do. */
for (cmp_mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
@@ -3816,8 +3775,6 @@ rtx
prepare_operand (int icode, rtx x, int opnum, enum machine_mode mode,
enum machine_mode wider_mode, int unsignedp)
{
- x = protect_from_queue (x, 0);
-
if (mode != wider_mode)
x = convert_modes (wider_mode, mode, x, unsignedp);
@@ -3943,7 +3900,6 @@ emit_cmp_and_jump_insns (rtx x, rtx y, enum rtx_code comparison, rtx size,
op0 = force_reg (mode, op0);
#endif
- emit_queue ();
if (unsignedp)
comparison = unsigned_condition (comparison);
@@ -3970,8 +3926,8 @@ prepare_float_lib_cmp (rtx *px, rtx *py, enum rtx_code *pcomparison,
{
enum rtx_code comparison = *pcomparison;
enum rtx_code swapped = swap_condition (comparison);
- rtx x = protect_from_queue (*px, 0);
- rtx y = protect_from_queue (*py, 0);
+ rtx x = *px;
+ rtx y = *py;
enum machine_mode orig_mode = GET_MODE (x);
enum machine_mode mode;
rtx value, target, insns, equiv;
@@ -4162,18 +4118,11 @@ emit_conditional_move (rtx target, enum rtx_code code, rtx op0, rtx op1,
op3 = force_not_mem (op3);
}
- if (target)
- target = protect_from_queue (target, 1);
- else
+ if (!target)
target = gen_reg_rtx (mode);
subtarget = target;
- emit_queue ();
-
- op2 = protect_from_queue (op2, 0);
- op3 = protect_from_queue (op3, 0);
-
/* If the insn doesn't accept these operands, put them in pseudos. */
if (! (*insn_data[icode].operand[0].predicate)
@@ -4303,23 +4252,16 @@ emit_conditional_add (rtx target, enum rtx_code code, rtx op0, rtx op1,
op3 = force_not_mem (op3);
}
- if (target)
- target = protect_from_queue (target, 1);
- else
+ if (!target)
target = gen_reg_rtx (mode);
- subtarget = target;
-
- emit_queue ();
-
- op2 = protect_from_queue (op2, 0);
- op3 = protect_from_queue (op3, 0);
-
/* If the insn doesn't accept these operands, put them in pseudos. */
if (! (*insn_data[icode].operand[0].predicate)
- (subtarget, insn_data[icode].operand[0].mode))
+ (target, insn_data[icode].operand[0].mode))
subtarget = gen_reg_rtx (insn_data[icode].operand[0].mode);
+ else
+ subtarget = target;
if (! (*insn_data[icode].operand[2].predicate)
(op2, insn_data[icode].operand[2].mode))
@@ -4358,11 +4300,7 @@ emit_conditional_add (rtx target, enum rtx_code code, rtx op0, rtx op1,
/* These functions attempt to generate an insn body, rather than
emitting the insn, but if the gen function already emits them, we
- make no attempt to turn them back into naked patterns.
-
- They do not protect from queued increments,
- because they may be used 1) in protect_from_queue itself
- and 2) in other passes where there is no queue. */
+ make no attempt to turn them back into naked patterns. */
/* Generate and return an insn body to add Y to X. */
@@ -4619,9 +4557,6 @@ expand_float (rtx to, rtx from, int unsignedp)
if (icode != CODE_FOR_nothing)
{
- to = protect_from_queue (to, 1);
- from = protect_from_queue (from, 0);
-
if (imode != GET_MODE (from))
from = convert_to_mode (imode, from, unsignedp);
@@ -4645,11 +4580,6 @@ expand_float (rtx to, rtx from, int unsignedp)
rtx temp;
REAL_VALUE_TYPE offset;
- emit_queue ();
-
- to = protect_from_queue (to, 1);
- from = protect_from_queue (from, 0);
-
if (flag_force_mem)
from = force_not_mem (from);
@@ -4757,9 +4687,6 @@ expand_float (rtx to, rtx from, int unsignedp)
rtx value;
convert_optab tab = unsignedp ? ufloat_optab : sfloat_optab;
- to = protect_from_queue (to, 1);
- from = protect_from_queue (from, 0);
-
if (GET_MODE_SIZE (GET_MODE (from)) < GET_MODE_SIZE (SImode))
from = convert_to_mode (SImode, from, unsignedp);
@@ -4825,9 +4752,6 @@ expand_fix (rtx to, rtx from, int unsignedp)
if (icode != CODE_FOR_nothing)
{
- to = protect_from_queue (to, 1);
- from = protect_from_queue (from, 0);
-
if (fmode != GET_MODE (from))
from = convert_to_mode (fmode, from, 0);
@@ -4887,10 +4811,6 @@ expand_fix (rtx to, rtx from, int unsignedp)
lab1 = gen_label_rtx ();
lab2 = gen_label_rtx ();
- emit_queue ();
- to = protect_from_queue (to, 1);
- from = protect_from_queue (from, 0);
-
if (flag_force_mem)
from = force_not_mem (from);
@@ -4961,9 +4881,6 @@ expand_fix (rtx to, rtx from, int unsignedp)
if (!libfunc)
abort ();
- to = protect_from_queue (to, 1);
- from = protect_from_queue (from, 0);
-
if (flag_force_mem)
from = force_not_mem (from);
diff --git a/gcc/rtl.c b/gcc/rtl.c
index f1aa85b64dc..05b69a48668 100644
--- a/gcc/rtl.c
+++ b/gcc/rtl.c
@@ -214,7 +214,6 @@ copy_rtx (rtx orig)
switch (code)
{
case REG:
- case QUEUED:
case CONST_INT:
case CONST_DOUBLE:
case CONST_VECTOR:
diff --git a/gcc/rtl.def b/gcc/rtl.def
index 25b5ee90610..50d947d80dd 100644
--- a/gcc/rtl.def
+++ b/gcc/rtl.def
@@ -906,24 +906,6 @@ DEF_RTL_EXPR(SYMBOL_REF, "symbol_ref", "s00", RTX_CONST_OBJ)
pretend to be looking at the entire value and comparing it. */
DEF_RTL_EXPR(CC0, "cc0", "", RTX_OBJ)
-/* =====================================================================
- A QUEUED expression really points to a member of the queue of instructions
- to be output later for postincrement/postdecrement.
- QUEUED expressions never become part of instructions.
- When a QUEUED expression would be put into an instruction,
- instead either the incremented variable or a copy of its previous
- value is used.
-
- Operands are:
- 0. the variable to be incremented (a REG rtx).
- 1. the incrementing instruction, or 0 if it hasn't been output yet.
- 2. A REG rtx for a copy of the old value of the variable, or 0 if none yet.
- 3. the body to use for the incrementing instruction
- 4. the next QUEUED expression in the queue.
- ====================================================================== */
-
-DEF_RTL_EXPR(QUEUED, "queued", "eeeee", RTX_EXTRA)
-
/* ----------------------------------------------------------------------
Expressions for operators in an rtl pattern
---------------------------------------------------------------------- */
diff --git a/gcc/rtlanal.c b/gcc/rtlanal.c
index 81d4f4024f5..f3ce004bb2e 100644
--- a/gcc/rtlanal.c
+++ b/gcc/rtlanal.c
@@ -83,9 +83,6 @@ rtx_unstable_p (rtx x)
case MEM:
return ! RTX_UNCHANGING_P (x) || rtx_unstable_p (XEXP (x, 0));
- case QUEUED:
- return 1;
-
case CONST:
case CONST_INT:
case CONST_DOUBLE:
@@ -161,9 +158,6 @@ rtx_varies_p (rtx x, int for_alias)
case MEM:
return ! RTX_UNCHANGING_P (x) || rtx_varies_p (XEXP (x, 0), for_alias);
- case QUEUED:
- return 1;
-
case CONST:
case CONST_INT:
case CONST_DOUBLE:
diff --git a/gcc/simplify-rtx.c b/gcc/simplify-rtx.c
index f40e6959e38..2297f697818 100644
--- a/gcc/simplify-rtx.c
+++ b/gcc/simplify-rtx.c
@@ -3802,9 +3802,6 @@ simplify_gen_subreg (enum machine_mode outermode, rtx op,
|| byte >= GET_MODE_SIZE (innermode))
abort ();
- if (GET_CODE (op) == QUEUED)
- return NULL_RTX;
-
new = simplify_subreg (outermode, op, innermode, byte);
if (new)
return new;
diff --git a/gcc/stmt.c b/gcc/stmt.c
index a26d8e6097b..e9c6e5e2296 100644
--- a/gcc/stmt.c
+++ b/gcc/stmt.c
@@ -430,7 +430,6 @@ expand_computed_goto (tree exp)
x = convert_memory_address (Pmode, x);
- emit_queue ();
do_pending_stack_adjust ();
emit_indirect_jump (x);
}
@@ -1060,7 +1059,7 @@ expand_asm_operands (tree string, tree outputs, tree inputs,
if ((! allows_mem && MEM_P (op))
|| GET_CODE (op) == CONCAT)
{
- real_output_rtx[i] = protect_from_queue (op, 1);
+ real_output_rtx[i] = op;
op = gen_reg_rtx (GET_MODE (op));
if (is_inout)
emit_move_insn (op, real_output_rtx[i]);
@@ -1185,13 +1184,6 @@ expand_asm_operands (tree string, tree outputs, tree inputs,
generating_concat_p = 0;
- for (i = 0; i < ninputs - ninout; i++)
- ASM_OPERANDS_INPUT (body, i)
- = protect_from_queue (ASM_OPERANDS_INPUT (body, i), 0);
-
- for (i = 0; i < noutputs; i++)
- output_rtx[i] = protect_from_queue (output_rtx[i], 1);
-
/* For in-out operands, copy output rtx to input rtx. */
for (i = 0; i < ninout; i++)
{
@@ -1362,9 +1354,6 @@ expand_asm_expr (tree exp)
TREE_VALUE (tail) = o[i];
}
}
-
- /* Those MODIFY_EXPRs could do autoincrements. */
- emit_queue ();
}
/* A subroutine of expand_asm_operands. Check that all operands have
@@ -1626,8 +1615,6 @@ expand_expr_stmt (tree exp)
/* Free any temporaries used to evaluate this expression. */
free_temp_slots ();
-
- emit_queue ();
}
/* Warn if EXP contains any computations whose results are not used.
@@ -2014,7 +2001,6 @@ expand_return (tree retval)
if (TREE_CODE (TREE_TYPE (TREE_TYPE (current_function_decl))) == VOID_TYPE)
{
expand_expr (retval, NULL_RTX, VOIDmode, 0);
- emit_queue ();
expand_null_return ();
return;
}
@@ -2144,7 +2130,6 @@ expand_return (tree retval)
result_reg_mode = tmpmode;
result_reg = gen_reg_rtx (result_reg_mode);
- emit_queue ();
for (i = 0; i < n_regs; i++)
emit_move_insn (operand_subword (result_reg, i, 0, result_reg_mode),
result_pseudos[i]);
@@ -2167,7 +2152,6 @@ expand_return (tree retval)
val = assign_temp (nt, 0, 0, 1);
val = expand_expr (retval_rhs, val, GET_MODE (val), 0);
val = force_not_mem (val);
- emit_queue ();
/* Return the calculated value. */
expand_value_return (shift_return_value (val));
}
@@ -2175,7 +2159,6 @@ expand_return (tree retval)
{
/* No hard reg used; calculate value into hard return reg. */
expand_expr (retval, const0_rtx, VOIDmode, 0);
- emit_queue ();
expand_value_return (result_rtl);
}
}
@@ -2682,13 +2665,11 @@ expand_decl_init (tree decl)
|| code == POINTER_TYPE || code == REFERENCE_TYPE)
expand_assignment (decl, convert (TREE_TYPE (decl), integer_zero_node),
0);
- emit_queue ();
}
else if (DECL_INITIAL (decl) && TREE_CODE (DECL_INITIAL (decl)) != TREE_LIST)
{
emit_line_note (DECL_SOURCE_LOCATION (decl));
expand_assignment (decl, DECL_INITIAL (decl), 0);
- emit_queue ();
}
/* Don't let the initialization count as "using" the variable. */
@@ -2801,7 +2782,6 @@ expand_start_case (int exit_flag, tree expr, tree type,
nesting_stack = thiscase;
do_pending_stack_adjust ();
- emit_queue ();
/* Make sure case_stmt.start points to something that won't
need any transformation before expand_end_case. */
@@ -3281,8 +3261,6 @@ emit_case_bit_tests (tree index_type, tree index_expr, tree minval,
convert (index_type, index_expr),
convert (index_type, minval)));
index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
- emit_queue ();
- index = protect_from_queue (index, 0);
do_pending_stack_adjust ();
mode = TYPE_MODE (index_type);
@@ -3434,7 +3412,6 @@ expand_end_case_type (tree orig_index, tree orig_type)
if (count == 0)
{
expand_expr (index_expr, const0_rtx, VOIDmode, 0);
- emit_queue ();
emit_jump (default_label);
}
@@ -3503,10 +3480,8 @@ expand_end_case_type (tree orig_index, tree orig_type)
}
}
- emit_queue ();
do_pending_stack_adjust ();
- index = protect_from_queue (index, 0);
if (MEM_P (index))
index = copy_to_reg (index);
if (GET_CODE (index) == CONST_INT