summaryrefslogtreecommitdiff
path: root/gcc/postreload.c
diff options
context:
space:
mode:
authortbsaunde <tbsaunde@138bc75d-0d04-0410-961f-82ee72b054a4>2015-07-09 02:49:51 +0000
committertbsaunde <tbsaunde@138bc75d-0d04-0410-961f-82ee72b054a4>2015-07-09 02:49:51 +0000
commit5fe18e785a87cd6fa3bef8df8c83adf50274f42d (patch)
treee4dc3a562a1e0900ee3ab1384aba87e00500bcef /gcc/postreload.c
parentbddd01c17102927f2cab4b73704474ad913278c2 (diff)
downloadgcc-5fe18e785a87cd6fa3bef8df8c83adf50274f42d.tar.gz
reduce conditional compilation for LOAD_EXTEND_OP
Provide a default in files where that is possible, so that everything else there can be unconditionally compiled. However rtlanal.c and reload.c do tricky things that break providing a global default, so we can't do that yet. gcc/ChangeLog: 2015-07-08 Trevor Saunders <tbsaunde+gcc@tbsaunde.org> * combine.c (try_combine): Don't check if LOAD_EXTEND_OP is defined. (simplify_set): Likewise. * cse.c (cse_insn): Likewise. * fold-const.c (fold_single_bit_test): Likewise. (fold_unary_loc): Likewise. * postreload.c (reload_cse_simplify_set): Likewise. (reload_cse_simplify_operands): Likewise. git-svn-id: svn+ssh://gcc.gnu.org/svn/gcc/trunk@225591 138bc75d-0d04-0410-961f-82ee72b054a4
Diffstat (limited to 'gcc/postreload.c')
-rw-r--r--gcc/postreload.c18
1 files changed, 6 insertions, 12 deletions
diff --git a/gcc/postreload.c b/gcc/postreload.c
index 3db2c07224a..03babc87695 100644
--- a/gcc/postreload.c
+++ b/gcc/postreload.c
@@ -54,6 +54,10 @@ along with GCC; see the file COPYING3. If not see
#include "tree-pass.h"
#include "dbgcnt.h"
+#ifndef LOAD_EXTEND_OP
+#define LOAD_EXTEND_OP(M) UNKNOWN
+#endif
+
static int reload_cse_noop_set_p (rtx);
static bool reload_cse_simplify (rtx_insn *, rtx);
static void reload_cse_regs_1 (void);
@@ -254,9 +258,7 @@ reload_cse_simplify_set (rtx set, rtx_insn *insn)
int old_cost;
cselib_val *val;
struct elt_loc_list *l;
-#ifdef LOAD_EXTEND_OP
enum rtx_code extend_op = UNKNOWN;
-#endif
bool speed = optimize_bb_for_speed_p (BLOCK_FOR_INSN (insn));
dreg = true_regnum (SET_DEST (set));
@@ -269,7 +271,6 @@ reload_cse_simplify_set (rtx set, rtx_insn *insn)
dclass = REGNO_REG_CLASS (dreg);
-#ifdef LOAD_EXTEND_OP
/* When replacing a memory with a register, we need to honor assumptions
that combine made wrt the contents of sign bits. We'll do this by
generating an extend instruction instead of a reg->reg copy. Thus
@@ -279,7 +280,6 @@ reload_cse_simplify_set (rtx set, rtx_insn *insn)
&& (extend_op = LOAD_EXTEND_OP (GET_MODE (src))) != UNKNOWN
&& !REG_P (SET_DEST (set)))
return 0;
-#endif
val = cselib_lookup (src, GET_MODE (SET_DEST (set)), 0, VOIDmode);
if (! val)
@@ -301,7 +301,6 @@ reload_cse_simplify_set (rtx set, rtx_insn *insn)
if (CONSTANT_P (this_rtx) && ! references_value_p (this_rtx, 0))
{
-#ifdef LOAD_EXTEND_OP
if (extend_op != UNKNOWN)
{
wide_int result;
@@ -326,19 +325,17 @@ reload_cse_simplify_set (rtx set, rtx_insn *insn)
}
this_rtx = immed_wide_int_const (result, word_mode);
}
-#endif
+
this_cost = set_src_cost (this_rtx, GET_MODE (SET_DEST (set)), speed);
}
else if (REG_P (this_rtx))
{
-#ifdef LOAD_EXTEND_OP
if (extend_op != UNKNOWN)
{
this_rtx = gen_rtx_fmt_e (extend_op, word_mode, this_rtx);
this_cost = set_src_cost (this_rtx, word_mode, speed);
}
else
-#endif
this_cost = register_move_cost (GET_MODE (this_rtx),
REGNO_REG_CLASS (REGNO (this_rtx)),
dclass);
@@ -353,7 +350,6 @@ reload_cse_simplify_set (rtx set, rtx_insn *insn)
&& REG_P (this_rtx)
&& !REG_P (SET_SRC (set))))
{
-#ifdef LOAD_EXTEND_OP
if (GET_MODE_BITSIZE (GET_MODE (SET_DEST (set))) < BITS_PER_WORD
&& extend_op != UNKNOWN
#ifdef CANNOT_CHANGE_MODE_CLASS
@@ -367,7 +363,6 @@ reload_cse_simplify_set (rtx set, rtx_insn *insn)
ORIGINAL_REGNO (wide_dest) = ORIGINAL_REGNO (SET_DEST (set));
validate_change (insn, &SET_DEST (set), wide_dest, 1);
}
-#endif
validate_unshare_change (insn, &SET_SRC (set), this_rtx, 1);
old_cost = this_cost, did_change = 1;
@@ -439,7 +434,6 @@ reload_cse_simplify_operands (rtx_insn *insn, rtx testreg)
continue;
op = recog_data.operand[i];
-#ifdef LOAD_EXTEND_OP
if (MEM_P (op)
&& GET_MODE_BITSIZE (GET_MODE (op)) < BITS_PER_WORD
&& LOAD_EXTEND_OP (GET_MODE (op)) != UNKNOWN)
@@ -490,7 +484,7 @@ reload_cse_simplify_operands (rtx_insn *insn, rtx testreg)
safe to optimize, but is it worth the trouble? */
continue;
}
-#endif /* LOAD_EXTEND_OP */
+
if (side_effects_p (op))
continue;
v = cselib_lookup (op, recog_data.operand_mode[i], 0, VOIDmode);