summaryrefslogtreecommitdiff
path: root/gcc/config
diff options
context:
space:
mode:
authoruros <uros@138bc75d-0d04-0410-961f-82ee72b054a4>2007-01-23 07:14:26 +0000
committeruros <uros@138bc75d-0d04-0410-961f-82ee72b054a4>2007-01-23 07:14:26 +0000
commit24deb0afcacf40bdbbe30a12d88c43f15b5b26cc (patch)
tree3c45285417cab5be4b197aa1e2a6272e6d14348f /gcc/config
parenta7761c5721cf67c942d5b8b4899911627478730c (diff)
downloadgcc-24deb0afcacf40bdbbe30a12d88c43f15b5b26cc.tar.gz
* config/i386/i386.md: Use REG_P, MEM_P, CONST_INT_P, LABEL_P,
JUMP_P and CALL_P predicates where applicable. * config/i386/i386.c: Ditto. * config/i386/i386.md: Ditto. * config/i386/mmx.md: Ditto. * config/i386/predicates.md: Ditto. git-svn-id: svn+ssh://gcc.gnu.org/svn/gcc/trunk@121079 138bc75d-0d04-0410-961f-82ee72b054a4
Diffstat (limited to 'gcc/config')
-rw-r--r--gcc/config/i386/i386.c276
-rw-r--r--gcc/config/i386/i386.md286
-rw-r--r--gcc/config/i386/mmx.md8
-rw-r--r--gcc/config/i386/predicates.md18
4 files changed, 295 insertions, 293 deletions
diff --git a/gcc/config/i386/i386.c b/gcc/config/i386/i386.c
index 88fb88d9b01..9191376dd43 100644
--- a/gcc/config/i386/i386.c
+++ b/gcc/config/i386/i386.c
@@ -4708,7 +4708,7 @@ ix86_check_movabs (rtx insn, int opnum)
mem = XEXP (set, opnum);
while (GET_CODE (mem) == SUBREG)
mem = SUBREG_REG (mem);
- gcc_assert (GET_CODE (mem) == MEM);
+ gcc_assert (MEM_P (mem));
return (volatile_ok || !MEM_VOLATILE_P (mem));
}
@@ -5959,7 +5959,7 @@ ix86_decompose_address (rtx addr, struct ix86_address *out)
int retval = 1;
enum ix86_address_seg seg = SEG_DEFAULT;
- if (GET_CODE (addr) == REG || GET_CODE (addr) == SUBREG)
+ if (REG_P (addr) || GET_CODE (addr) == SUBREG)
base = addr;
else if (GET_CODE (addr) == PLUS)
{
@@ -6036,7 +6036,7 @@ ix86_decompose_address (rtx addr, struct ix86_address *out)
/* We're called for lea too, which implements ashift on occasion. */
index = XEXP (addr, 0);
tmp = XEXP (addr, 1);
- if (GET_CODE (tmp) != CONST_INT)
+ if (!CONST_INT_P (tmp))
return 0;
scale = INTVAL (tmp);
if ((unsigned HOST_WIDE_INT) scale > 3)
@@ -6050,7 +6050,7 @@ ix86_decompose_address (rtx addr, struct ix86_address *out)
/* Extract the integral value of scale. */
if (scale_rtx)
{
- if (GET_CODE (scale_rtx) != CONST_INT)
+ if (!CONST_INT_P (scale_rtx))
return 0;
scale = INTVAL (scale_rtx);
}
@@ -6179,7 +6179,7 @@ ix86_find_base_term (rtx x)
return x;
term = XEXP (x, 0);
if (GET_CODE (term) == PLUS
- && (GET_CODE (XEXP (term, 1)) == CONST_INT
+ && (CONST_INT_P (XEXP (term, 1))
|| GET_CODE (XEXP (term, 1)) == CONST_DOUBLE))
term = XEXP (term, 0);
if (GET_CODE (term) != UNSPEC
@@ -6239,7 +6239,7 @@ legitimate_constant_p (rtx x)
if (GET_CODE (x) == PLUS)
{
- if (GET_CODE (XEXP (x, 1)) != CONST_INT)
+ if (!CONST_INT_P (XEXP (x, 1)))
return false;
x = XEXP (x, 0);
}
@@ -6342,7 +6342,7 @@ legitimate_pic_operand_p (rtx x)
case CONST:
inner = XEXP (x, 0);
if (GET_CODE (inner) == PLUS
- && GET_CODE (XEXP (inner, 1)) == CONST_INT)
+ && CONST_INT_P (XEXP (inner, 1)))
inner = XEXP (inner, 0);
/* Only some unspecs are valid as "constants". */
@@ -6393,7 +6393,7 @@ legitimate_pic_address_disp_p (rtx disp)
break;
op0 = XEXP (XEXP (disp, 0), 0);
op1 = XEXP (XEXP (disp, 0), 1);
- if (GET_CODE (op1) != CONST_INT
+ if (!CONST_INT_P (op1)
|| INTVAL (op1) >= 16*1024*1024
|| INTVAL (op1) < -16*1024*1024)
break;
@@ -6437,7 +6437,7 @@ legitimate_pic_address_disp_p (rtx disp)
saw_plus = false;
if (GET_CODE (disp) == PLUS)
{
- if (GET_CODE (XEXP (disp, 1)) != CONST_INT)
+ if (!CONST_INT_P (XEXP (disp, 1)))
return 0;
disp = XEXP (disp, 0);
saw_plus = true;
@@ -6665,7 +6665,7 @@ legitimate_address_p (enum machine_mode mode, rtx addr, int strict)
if (GET_CODE (disp) != CONST
|| GET_CODE (XEXP (disp, 0)) != PLUS
|| GET_CODE (XEXP (XEXP (disp, 0), 0)) != UNSPEC
- || GET_CODE (XEXP (XEXP (disp, 0), 1)) != CONST_INT
+ || !CONST_INT_P (XEXP (XEXP (disp, 0), 1))
|| (XINT (XEXP (XEXP (disp, 0), 0), 1) != UNSPEC_DTPOFF
&& XINT (XEXP (XEXP (disp, 0), 0), 1) != UNSPEC_NTPOFF))
{
@@ -6702,7 +6702,7 @@ legitimate_address_p (enum machine_mode mode, rtx addr, int strict)
correct fix for crash to disable this test. */
}
else if (GET_CODE (disp) != LABEL_REF
- && GET_CODE (disp) != CONST_INT
+ && !CONST_INT_P (disp)
&& (GET_CODE (disp) != CONST
|| !legitimate_constant_p (disp))
&& (GET_CODE (disp) != SYMBOL_REF
@@ -6878,7 +6878,7 @@ legitimize_pic_address (rtx orig, rtx reg)
}
else
{
- if (GET_CODE (addr) == CONST_INT
+ if (CONST_INT_P (addr)
&& !x86_64_immediate_operand (addr, VOIDmode))
{
if (reg)
@@ -6909,7 +6909,7 @@ legitimize_pic_address (rtx orig, rtx reg)
/* Check first to see if this is a constant offset from a @GOTOFF
symbol reference. */
if (local_symbolic_operand (op0, Pmode)
- && GET_CODE (op1) == CONST_INT)
+ && CONST_INT_P (op1))
{
if (!TARGET_64BIT)
{
@@ -6944,7 +6944,7 @@ legitimize_pic_address (rtx orig, rtx reg)
new = legitimize_pic_address (XEXP (addr, 1),
base == reg ? NULL_RTX : reg);
- if (GET_CODE (new) == CONST_INT)
+ if (CONST_INT_P (new))
new = plus_constant (base, INTVAL (new));
else
{
@@ -7186,7 +7186,7 @@ legitimize_address (rtx x, rtx oldx ATTRIBUTE_UNUSED, enum machine_mode mode)
/* Canonicalize shifts by 0, 1, 2, 3 into multiply */
if (GET_CODE (x) == ASHIFT
- && GET_CODE (XEXP (x, 1)) == CONST_INT
+ && CONST_INT_P (XEXP (x, 1))
&& (unsigned HOST_WIDE_INT) INTVAL (XEXP (x, 1)) < 4)
{
changed = 1;
@@ -7200,7 +7200,7 @@ legitimize_address (rtx x, rtx oldx ATTRIBUTE_UNUSED, enum machine_mode mode)
/* Canonicalize shifts by 0, 1, 2, 3 into multiply. */
if (GET_CODE (XEXP (x, 0)) == ASHIFT
- && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
+ && CONST_INT_P (XEXP (XEXP (x, 0), 1))
&& (unsigned HOST_WIDE_INT) INTVAL (XEXP (XEXP (x, 0), 1)) < 4)
{
changed = 1;
@@ -7211,7 +7211,7 @@ legitimize_address (rtx x, rtx oldx ATTRIBUTE_UNUSED, enum machine_mode mode)
}
if (GET_CODE (XEXP (x, 1)) == ASHIFT
- && GET_CODE (XEXP (XEXP (x, 1), 1)) == CONST_INT
+ && CONST_INT_P (XEXP (XEXP (x, 1), 1))
&& (unsigned HOST_WIDE_INT) INTVAL (XEXP (XEXP (x, 1), 1)) < 4)
{
changed = 1;
@@ -7254,12 +7254,12 @@ legitimize_address (rtx x, rtx oldx ATTRIBUTE_UNUSED, enum machine_mode mode)
rtx constant;
rtx other = NULL_RTX;
- if (GET_CODE (XEXP (x, 1)) == CONST_INT)
+ if (CONST_INT_P (XEXP (x, 1)))
{
constant = XEXP (x, 1);
other = XEXP (XEXP (XEXP (x, 0), 1), 1);
}
- else if (GET_CODE (XEXP (XEXP (XEXP (x, 0), 1), 1)) == CONST_INT)
+ else if (CONST_INT_P (XEXP (XEXP (XEXP (x, 0), 1), 1)))
{
constant = XEXP (XEXP (XEXP (x, 0), 1), 1);
other = XEXP (x, 1);
@@ -7293,8 +7293,8 @@ legitimize_address (rtx x, rtx oldx ATTRIBUTE_UNUSED, enum machine_mode mode)
}
if (changed
- && GET_CODE (XEXP (x, 1)) == REG
- && GET_CODE (XEXP (x, 0)) == REG)
+ && REG_P (XEXP (x, 1))
+ && REG_P (XEXP (x, 0)))
return x;
if (flag_pic && SYMBOLIC_CONST (XEXP (x, 1)))
@@ -7306,7 +7306,7 @@ legitimize_address (rtx x, rtx oldx ATTRIBUTE_UNUSED, enum machine_mode mode)
if (changed && legitimate_address_p (mode, x, FALSE))
return x;
- if (GET_CODE (XEXP (x, 0)) == REG)
+ if (REG_P (XEXP (x, 0)))
{
rtx temp = gen_reg_rtx (Pmode);
rtx val = force_operand (XEXP (x, 1), temp);
@@ -7317,7 +7317,7 @@ legitimize_address (rtx x, rtx oldx ATTRIBUTE_UNUSED, enum machine_mode mode)
return x;
}
- else if (GET_CODE (XEXP (x, 1)) == REG)
+ else if (REG_P (XEXP (x, 1)))
{
rtx temp = gen_reg_rtx (Pmode);
rtx val = force_operand (XEXP (x, 0), temp);
@@ -7392,7 +7392,7 @@ output_pic_addr_const (FILE *file, rtx x, int code)
case PLUS:
/* Some assemblers need integer constants to appear first. */
- if (GET_CODE (XEXP (x, 0)) == CONST_INT)
+ if (CONST_INT_P (XEXP (x, 0)))
{
output_pic_addr_const (file, XEXP (x, 0), code);
putc ('+', file);
@@ -7400,7 +7400,7 @@ output_pic_addr_const (FILE *file, rtx x, int code)
}
else
{
- gcc_assert (GET_CODE (XEXP (x, 1)) == CONST_INT);
+ gcc_assert (CONST_INT_P (XEXP (x, 1)));
output_pic_addr_const (file, XEXP (x, 1), code);
putc ('+', file);
output_pic_addr_const (file, XEXP (x, 0), code);
@@ -7509,7 +7509,7 @@ ix86_delegitimize_address (rtx orig_x)
/* This is the result, or NULL. */
rtx result = NULL_RTX;
- if (GET_CODE (x) == MEM)
+ if (MEM_P (x))
x = XEXP (x, 0);
if (TARGET_64BIT)
@@ -7517,7 +7517,7 @@ ix86_delegitimize_address (rtx orig_x)
if (GET_CODE (x) != CONST
|| GET_CODE (XEXP (x, 0)) != UNSPEC
|| XINT (XEXP (x, 0), 1) != UNSPEC_GOTPCREL
- || GET_CODE (orig_x) != MEM)
+ || !MEM_P (orig_x))
return orig_x;
return XVECEXP (XEXP (x, 0), 0, 0);
}
@@ -7526,7 +7526,7 @@ ix86_delegitimize_address (rtx orig_x)
|| GET_CODE (XEXP (x, 1)) != CONST)
return orig_x;
- if (GET_CODE (XEXP (x, 0)) == REG
+ if (REG_P (XEXP (x, 0))
&& REGNO (XEXP (x, 0)) == PIC_OFFSET_TABLE_REGNUM)
/* %ebx + GOT/GOTOFF */
;
@@ -7534,15 +7534,15 @@ ix86_delegitimize_address (rtx orig_x)
{
/* %ebx + %reg * scale + GOT/GOTOFF */
reg_addend = XEXP (x, 0);
- if (GET_CODE (XEXP (reg_addend, 0)) == REG
+ if (REG_P (XEXP (reg_addend, 0))
&& REGNO (XEXP (reg_addend, 0)) == PIC_OFFSET_TABLE_REGNUM)
reg_addend = XEXP (reg_addend, 1);
- else if (GET_CODE (XEXP (reg_addend, 1)) == REG
+ else if (REG_P (XEXP (reg_addend, 1))
&& REGNO (XEXP (reg_addend, 1)) == PIC_OFFSET_TABLE_REGNUM)
reg_addend = XEXP (reg_addend, 0);
else
return orig_x;
- if (GET_CODE (reg_addend) != REG
+ if (!REG_P (reg_addend)
&& GET_CODE (reg_addend) != MULT
&& GET_CODE (reg_addend) != ASHIFT)
return orig_x;
@@ -7552,19 +7552,19 @@ ix86_delegitimize_address (rtx orig_x)
x = XEXP (XEXP (x, 1), 0);
if (GET_CODE (x) == PLUS
- && GET_CODE (XEXP (x, 1)) == CONST_INT)
+ && CONST_INT_P (XEXP (x, 1)))
{
const_addend = XEXP (x, 1);
x = XEXP (x, 0);
}
if (GET_CODE (x) == UNSPEC
- && ((XINT (x, 1) == UNSPEC_GOT && GET_CODE (orig_x) == MEM)
- || (XINT (x, 1) == UNSPEC_GOTOFF && GET_CODE (orig_x) != MEM)))
+ && ((XINT (x, 1) == UNSPEC_GOT && MEM_P (orig_x))
+ || (XINT (x, 1) == UNSPEC_GOTOFF && !MEM_P (orig_x))))
result = XVECEXP (x, 0, 0);
if (TARGET_MACHO && darwin_local_data_pic (x)
- && GET_CODE (orig_x) != MEM)
+ && !MEM_P (orig_x))
result = XEXP (x, 0);
if (! result)
@@ -7862,7 +7862,7 @@ print_operand (FILE *file, rtx x, int code)
case ASM_INTEL:
/* Intel syntax. For absolute addresses, registers should not
be surrounded by braces. */
- if (GET_CODE (x) != REG)
+ if (!REG_P (x))
{
putc ('[', file);
PRINT_OPERAND (file, x, 0);
@@ -7976,7 +7976,7 @@ print_operand (FILE *file, rtx x, int code)
break;
case 's':
- if (GET_CODE (x) == CONST_INT || ! SHIFT_DOUBLE_OMITS_COUNT)
+ if (CONST_INT_P (x) || ! SHIFT_DOUBLE_OMITS_COUNT)
{
PRINT_OPERAND (file, x, 0);
putc (',', file);
@@ -8114,10 +8114,10 @@ print_operand (FILE *file, rtx x, int code)
}
}
- if (GET_CODE (x) == REG)
+ if (REG_P (x))
print_reg (x, code, file);
- else if (GET_CODE (x) == MEM)
+ else if (MEM_P (x))
{
/* No `byte ptr' prefix for call instructions. */
if (ASSEMBLER_DIALECT == ASM_INTEL && code != 'X' && code != 'P')
@@ -8150,7 +8150,7 @@ print_operand (FILE *file, rtx x, int code)
x = XEXP (x, 0);
/* Avoid (%rip) for call operands. */
if (CONSTANT_ADDRESS_P (x) && code == 'P'
- && GET_CODE (x) != CONST_INT)
+ && !CONST_INT_P (x))
output_addr_const (file, x);
else if (this_is_asm_operands && ! address_operand (x, VOIDmode))
output_operand_lossage ("invalid constraints for operand");
@@ -8202,7 +8202,7 @@ print_operand (FILE *file, rtx x, int code)
if (code != 'P')
{
- if (GET_CODE (x) == CONST_INT || GET_CODE (x) == CONST_DOUBLE)
+ if (CONST_INT_P (x) || GET_CODE (x) == CONST_DOUBLE)
{
if (ASSEMBLER_DIALECT == ASM_ATT)
putc ('$', file);
@@ -8216,7 +8216,7 @@ print_operand (FILE *file, rtx x, int code)
fputs ("OFFSET FLAT:", file);
}
}
- if (GET_CODE (x) == CONST_INT)
+ if (CONST_INT_P (x))
fprintf (file, HOST_WIDE_INT_PRINT_DEC, INTVAL (x));
else if (flag_pic)
output_pic_addr_const (file, x, code);
@@ -8260,7 +8260,7 @@ print_operand_address (FILE *file, rtx addr)
{
/* Displacement only requires special attention. */
- if (GET_CODE (disp) == CONST_INT)
+ if (CONST_INT_P (disp))
{
if (ASSEMBLER_DIALECT == ASM_INTEL && parts.seg == SEG_DEFAULT)
{
@@ -8280,7 +8280,7 @@ print_operand_address (FILE *file, rtx addr)
{
if (GET_CODE (disp) == CONST
&& GET_CODE (XEXP (disp, 0)) == PLUS
- && GET_CODE (XEXP (XEXP (disp, 0), 1)) == CONST_INT)
+ && CONST_INT_P (XEXP (XEXP (disp, 0), 1)))
disp = XEXP (XEXP (disp, 0), 0);
if (GET_CODE (disp) == LABEL_REF
|| (GET_CODE (disp) == SYMBOL_REF
@@ -8323,7 +8323,7 @@ print_operand_address (FILE *file, rtx addr)
/* Pull out the offset of a symbol; print any symbol itself. */
if (GET_CODE (disp) == CONST
&& GET_CODE (XEXP (disp, 0)) == PLUS
- && GET_CODE (XEXP (XEXP (disp, 0), 1)) == CONST_INT)
+ && CONST_INT_P (XEXP (XEXP (disp, 0), 1)))
{
offset = XEXP (XEXP (disp, 0), 1);
disp = gen_rtx_CONST (VOIDmode,
@@ -8334,7 +8334,7 @@ print_operand_address (FILE *file, rtx addr)
output_pic_addr_const (file, disp, 0);
else if (GET_CODE (disp) == LABEL_REF)
output_asm_label (disp);
- else if (GET_CODE (disp) == CONST_INT)
+ else if (CONST_INT_P (disp))
offset = disp;
else
output_addr_const (file, disp);
@@ -8433,7 +8433,7 @@ split_di (rtx operands[], int num, rtx lo_half[], rtx hi_half[])
/* simplify_subreg refuse to split volatile memory addresses,
but we still have to handle it. */
- if (GET_CODE (op) == MEM)
+ if (MEM_P (op))
{
lo_half[num] = adjust_address (op, SImode, 0);
hi_half[num] = adjust_address (op, SImode, 4);
@@ -8464,7 +8464,7 @@ split_ti (rtx operands[], int num, rtx lo_half[], rtx hi_half[])
/* simplify_subreg refuse to split volatile memory addresses, but we
still have to handle it. */
- if (GET_CODE (op) == MEM)
+ if (MEM_P (op))
{
lo_half[num] = adjust_address (op, DImode, 0);
hi_half[num] = adjust_address (op, DImode, 8);
@@ -8508,10 +8508,10 @@ output_387_binary_op (rtx insn, rtx *operands)
if (STACK_REG_P (operands[0])
&& ((REG_P (operands[1])
&& REGNO (operands[0]) == REGNO (operands[1])
- && (STACK_REG_P (operands[2]) || GET_CODE (operands[2]) == MEM))
+ && (STACK_REG_P (operands[2]) || MEM_P (operands[2])))
|| (REG_P (operands[2])
&& REGNO (operands[0]) == REGNO (operands[2])
- && (STACK_REG_P (operands[1]) || GET_CODE (operands[1]) == MEM)))
+ && (STACK_REG_P (operands[1]) || MEM_P (operands[1]))))
&& (STACK_TOP_P (operands[1]) || STACK_TOP_P (operands[2])))
; /* ok */
else
@@ -8584,7 +8584,7 @@ output_387_binary_op (rtx insn, rtx *operands)
/* know operands[0] == operands[1]. */
- if (GET_CODE (operands[2]) == MEM)
+ if (MEM_P (operands[2]))
{
p = "%z2\t%2";
break;
@@ -8614,13 +8614,13 @@ output_387_binary_op (rtx insn, rtx *operands)
case MINUS:
case DIV:
- if (GET_CODE (operands[1]) == MEM)
+ if (MEM_P (operands[1]))
{
p = "r%z1\t%1";
break;
}
- if (GET_CODE (operands[2]) == MEM)
+ if (MEM_P (operands[2]))
{
p = "%z2\t%2";
break;
@@ -8859,7 +8859,7 @@ output_fix_trunc (rtx insn, rtx *operands, int fisttp)
output_asm_insn ("fld\t%y1", operands);
gcc_assert (STACK_TOP_P (operands[1]));
- gcc_assert (GET_CODE (operands[0]) == MEM);
+ gcc_assert (MEM_P (operands[0]));
if (fisttp)
output_asm_insn ("fisttp%z0\t%0", operands);
@@ -9147,7 +9147,7 @@ ix86_expand_move (enum machine_mode mode, rtx operands[])
if (MACHOPIC_PURE)
{
rtx temp = ((reload_in_progress
- || ((op0 && GET_CODE (op0) == REG)
+ || ((op0 && REG_P (op0))
&& mode == Pmode))
? op0 : gen_reg_rtx (Pmode));
op1 = machopic_indirect_data_reference (op1, temp);
@@ -9162,7 +9162,7 @@ ix86_expand_move (enum machine_mode mode, rtx operands[])
}
else
{
- if (GET_CODE (op0) == MEM)
+ if (MEM_P (op0))
op1 = force_reg (Pmode, op1);
else
op1 = legitimize_address (op1, op1, Pmode);
@@ -9170,10 +9170,10 @@ ix86_expand_move (enum machine_mode mode, rtx operands[])
}
else
{
- if (GET_CODE (op0) == MEM
+ if (MEM_P (op0)
&& (PUSH_ROUNDING (GET_MODE_SIZE (mode)) != GET_MODE_SIZE (mode)
|| !push_operand (op0, mode))
- && GET_CODE (op1) == MEM)
+ && MEM_P (op1))
op1 = force_reg (mode, op1);
if (push_operand (op0, mode)
@@ -9408,7 +9408,7 @@ ix86_fixup_binary_operands (enum rtx_code code, enum machine_mode mode,
/* If the destination is memory, and we do not have matching source
operands, do things in registers. */
matching_memory = 0;
- if (GET_CODE (dst) == MEM)
+ if (MEM_P (dst))
{
if (rtx_equal_p (dst, src1))
matching_memory = 1;
@@ -9420,7 +9420,7 @@ ix86_fixup_binary_operands (enum rtx_code code, enum machine_mode mode,
}
/* Both source operands cannot be in memory. */
- if (GET_CODE (src1) == MEM && GET_CODE (src2) == MEM)
+ if (MEM_P (src1) && MEM_P (src2))
{
if (matching_memory != 2)
src2 = force_reg (mode, src2);
@@ -9431,7 +9431,7 @@ ix86_fixup_binary_operands (enum rtx_code code, enum machine_mode mode,
/* If the operation is not commutable, source 1 cannot be a constant
or non-matching memory. */
if ((CONSTANT_P (src1)
- || (!matching_memory && GET_CODE (src1) == MEM))
+ || (!matching_memory && MEM_P (src1)))
&& GET_RTX_CLASS (code) != RTX_COMM_ARITH)
src1 = force_reg (mode, src1);
@@ -9495,20 +9495,20 @@ ix86_binary_operator_ok (enum rtx_code code,
rtx operands[3])
{
/* Both source operands cannot be in memory. */
- if (GET_CODE (operands[1]) == MEM && GET_CODE (operands[2]) == MEM)
+ if (MEM_P (operands[1]) && MEM_P (operands[2]))
return 0;
/* If the operation is not commutable, source 1 cannot be a constant. */
if (CONSTANT_P (operands[1]) && GET_RTX_CLASS (code) != RTX_COMM_ARITH)
return 0;
/* If the destination is memory, we must have a matching source operand. */
- if (GET_CODE (operands[0]) == MEM
+ if (MEM_P (operands[0])
&& ! (rtx_equal_p (operands[0], operands[1])
|| (GET_RTX_CLASS (code) == RTX_COMM_ARITH
&& rtx_equal_p (operands[0], operands[2]))))
return 0;
/* If the operation is not commutable and the source 1 is memory, we must
have a matching destination. */
- if (GET_CODE (operands[1]) == MEM
+ if (MEM_P (operands[1])
&& GET_RTX_CLASS (code) != RTX_COMM_ARITH
&& ! rtx_equal_p (operands[0], operands[1]))
return 0;
@@ -9574,8 +9574,8 @@ ix86_unary_operator_ok (enum rtx_code code ATTRIBUTE_UNUSED,
rtx operands[2] ATTRIBUTE_UNUSED)
{
/* If one of operands is memory, source and destination must match. */
- if ((GET_CODE (operands[0]) == MEM
- || GET_CODE (operands[1]) == MEM)
+ if ((MEM_P (operands[0])
+ || MEM_P (operands[1]))
&& ! rtx_equal_p (operands[0], operands[1]))
return FALSE;
return TRUE;
@@ -10085,16 +10085,16 @@ ix86_prepare_fp_compare_args (enum rtx_code code, rtx *pop0, rtx *pop1)
into a register. */
if (standard_80387_constant_p (op0) == 0
- || (GET_CODE (op0) == MEM
+ || (MEM_P (op0)
&& ! (standard_80387_constant_p (op1) == 0
- || GET_CODE (op1) == MEM)))
+ || MEM_P (op1))))
{
rtx tmp;
tmp = op0, op0 = op1, op1 = tmp;
code = swap_condition (code);
}
- if (GET_CODE (op0) != REG)
+ if (!REG_P (op0))
op0 = force_reg (op_mode, op0);
if (CONSTANT_P (op1))
@@ -10115,12 +10115,12 @@ ix86_prepare_fp_compare_args (enum rtx_code code, rtx *pop0, rtx *pop1)
/* Try to rearrange the comparison to make it cheaper. */
if (ix86_fp_comparison_cost (code)
> ix86_fp_comparison_cost (swap_condition (code))
- && (GET_CODE (op1) == REG || !no_new_pseudos))
+ && (REG_P (op1) || !no_new_pseudos))
{
rtx tmp;
tmp = op0, op0 = op1, op1 = tmp;
code = swap_condition (code);
- if (GET_CODE (op0) != REG)
+ if (!REG_P (op0))
op0 = force_reg (op_mode, op0);
}
@@ -10674,7 +10674,7 @@ ix86_expand_branch (enum rtx_code code, rtx label)
op1 is a constant and the low word is zero, then we can just
examine the high word. */
- if (GET_CODE (hi[1]) == CONST_INT && lo[1] == const0_rtx)
+ if (CONST_INT_P (hi[1]) && lo[1] == const0_rtx)
switch (code)
{
case LT: case LTU: case GE: case GEU:
@@ -10948,7 +10948,7 @@ ix86_expand_carry_flag_compare (enum rtx_code code, rtx op0, rtx op1, rtx *pop)
/* Convert a>b into b<a or a>=b-1. */
case GTU:
case LEU:
- if (GET_CODE (op1) == CONST_INT)
+ if (CONST_INT_P (op1))
{
op1 = gen_int_mode (INTVAL (op1) + 1, GET_MODE (op0));
/* Bail out on overflow. We still can swap operands but that
@@ -11025,8 +11025,8 @@ ix86_expand_int_movcc (rtx operands[])
if ((mode != HImode || TARGET_FAST_PREFIX)
&& (mode != (TARGET_64BIT ? TImode : DImode))
- && GET_CODE (operands[2]) == CONST_INT
- && GET_CODE (operands[3]) == CONST_INT)
+ && CONST_INT_P (operands[2])
+ && CONST_INT_P (operands[3]))
{
rtx out = operands[0];
HOST_WIDE_INT ct = INTVAL (operands[2]);
@@ -11201,7 +11201,7 @@ ix86_expand_int_movcc (rtx operands[])
compare_code = UNKNOWN;
if (GET_MODE_CLASS (GET_MODE (ix86_compare_op0)) == MODE_INT
- && GET_CODE (ix86_compare_op1) == CONST_INT)
+ && CONST_INT_P (ix86_compare_op1))
{
if (ix86_compare_op1 == const0_rtx
&& (code == LT || code == GE))
@@ -11413,7 +11413,7 @@ ix86_expand_int_movcc (rtx operands[])
/* If one of the two operands is an interesting constant, load a
constant with the above and mask it in with a logical operation. */
- if (GET_CODE (operands[2]) == CONST_INT)
+ if (CONST_INT_P (operands[2]))
{
var = operands[3];
if (INTVAL (operands[2]) == 0 && operands[3] != constm1_rtx)
@@ -11423,7 +11423,7 @@ ix86_expand_int_movcc (rtx operands[])
else
return 0; /* FAIL */
}
- else if (GET_CODE (operands[3]) == CONST_INT)
+ else if (CONST_INT_P (operands[3]))
{
var = operands[2];
if (INTVAL (operands[3]) == 0 && operands[2] != constm1_rtx)
@@ -12055,19 +12055,19 @@ ix86_split_to_parts (rtx operand, rtx *parts, enum machine_mode mode)
else
size = (GET_MODE_SIZE (mode) + 4) / 8;
- gcc_assert (GET_CODE (operand) != REG || !MMX_REGNO_P (REGNO (operand)));
+ gcc_assert (!REG_P (operand) || !MMX_REGNO_P (REGNO (operand)));
gcc_assert (size >= 2 && size <= 3);
/* Optimize constant pool reference to immediates. This is used by fp
moves, that force all constants to memory to allow combining. */
- if (GET_CODE (operand) == MEM && MEM_READONLY_P (operand))
+ if (MEM_P (operand) && MEM_READONLY_P (operand))
{
rtx tmp = maybe_get_pool_constant (operand);
if (tmp)
operand = tmp;
}
- if (GET_CODE (operand) == MEM && !offsettable_memref_p (operand))
+ if (MEM_P (operand) && !offsettable_memref_p (operand))
{
/* The only non-offsetable memories we handle are pushes. */
int ok = push_operand (operand, VOIDmode);
@@ -12216,7 +12216,7 @@ ix86_split_long_move (rtx operands[])
/* Optimize constant pool reference to immediates. This is used by
fp moves, that force all constants to memory to allow combining. */
- if (GET_CODE (operands[1]) == MEM
+ if (MEM_P (operands[1])
&& GET_CODE (XEXP (operands[1], 0)) == SYMBOL_REF
&& CONSTANT_POOL_ADDRESS_P (XEXP (operands[1], 0)))
operands[1] = get_pool_constant (XEXP (operands[1], 0));
@@ -12236,14 +12236,14 @@ ix86_split_long_move (rtx operands[])
if (push_operand (operands[0], VOIDmode))
push = 1;
else
- gcc_assert (GET_CODE (operands[0]) != MEM
+ gcc_assert (!MEM_P (operands[0])
|| offsettable_memref_p (operands[0]));
nparts = ix86_split_to_parts (operands[1], part[1], GET_MODE (operands[0]));
ix86_split_to_parts (operands[0], part[0], GET_MODE (operands[0]));
/* When emitting push, take care for source operands on the stack. */
- if (push && GET_CODE (operands[1]) == MEM
+ if (push && MEM_P (operands[1])
&& reg_overlap_mentioned_p (stack_pointer_rtx, operands[1]))
{
if (nparts == 3)
@@ -12255,7 +12255,7 @@ ix86_split_long_move (rtx operands[])
/* We need to do copy in the right order in case an address register
of the source overlaps the destination. */
- if (REG_P (part[0][0]) && GET_CODE (part[1][0]) == MEM)
+ if (REG_P (part[0][0]) && MEM_P (part[1][0]))
{
if (reg_overlap_mentioned_p (part[0][0], XEXP (part[1][0], 0)))
collisions++;
@@ -12390,25 +12390,25 @@ ix86_split_long_move (rtx operands[])
/* If optimizing for size, attempt to locally unCSE nonzero constants. */
if (optimize_size)
{
- if (GET_CODE (operands[5]) == CONST_INT
+ if (CONST_INT_P (operands[5])
&& operands[5] != const0_rtx
&& REG_P (operands[2]))
{
- if (GET_CODE (operands[6]) == CONST_INT
+ if (CONST_INT_P (operands[6])
&& INTVAL (operands[6]) == INTVAL (operands[5]))
operands[6] = operands[2];
if (nparts == 3
- && GET_CODE (operands[7]) == CONST_INT
+ && CONST_INT_P (operands[7])
&& INTVAL (operands[7]) == INTVAL (operands[5]))
operands[7] = operands[2];
}
if (nparts == 3
- && GET_CODE (operands[6]) == CONST_INT
+ && CONST_INT_P (operands[6])
&& operands[6] != const0_rtx
&& REG_P (operands[3])
- && GET_CODE (operands[7]) == CONST_INT
+ && CONST_INT_P (operands[7])
&& INTVAL (operands[7]) == INTVAL (operands[6]))
operands[7] = operands[3];
}
@@ -12458,7 +12458,7 @@ ix86_split_ashl (rtx *operands, rtx scratch, enum machine_mode mode)
int count;
const int single_width = mode == DImode ? 32 : 64;
- if (GET_CODE (operands[2]) == CONST_INT)
+ if (CONST_INT_P (operands[2]))
{
(mode == DImode ? split_di : split_ti) (operands, 2, low, high);
count = INTVAL (operands[2]) & (single_width * 2 - 1);
@@ -12585,7 +12585,7 @@ ix86_split_ashr (rtx *operands, rtx scratch, enum machine_mode mode)
int count;
const int single_width = mode == DImode ? 32 : 64;
- if (GET_CODE (operands[2]) == CONST_INT)
+ if (CONST_INT_P (operands[2]))
{
(mode == DImode ? split_di : split_ti) (operands, 2, low, high);
count = INTVAL (operands[2]) & (single_width * 2 - 1);
@@ -12664,7 +12664,7 @@ ix86_split_lshr (rtx *operands, rtx scratch, enum machine_mode mode)
int count;
const int single_width = mode == DImode ? 32 : 64;
- if (GET_CODE (operands[2]) == CONST_INT)
+ if (CONST_INT_P (operands[2]))
{
(mode == DImode ? split_di : split_ti) (operands, 2, low, high);
count = INTVAL (operands[2]) & (single_width * 2 - 1);
@@ -12725,7 +12725,7 @@ static void
predict_jump (int prob)
{
rtx insn = get_last_insn ();
- gcc_assert (GET_CODE (insn) == JUMP_INSN);
+ gcc_assert (JUMP_P (insn));
REG_NOTES (insn)
= gen_rtx_EXPR_LIST (REG_BR_PROB,
GEN_INT (prob),
@@ -12785,7 +12785,7 @@ scale_counter (rtx countreg, int scale)
if (scale == 1)
return countreg;
- if (GET_CODE (countreg) == CONST_INT)
+ if (CONST_INT_P (countreg))
return GEN_INT (INTVAL (countreg) / scale);
gcc_assert (REG_P (countreg));
@@ -12948,7 +12948,7 @@ expand_movmem_via_rep_mov (rtx destmem, rtx srcmem,
rtx countreg;
/* If the size is known, it is shorter to use rep movs. */
- if (mode == QImode && GET_CODE (count) == CONST_INT
+ if (mode == QImode && CONST_INT_P (count)
&& !(INTVAL (count) & 3))
mode = SImode;
@@ -13015,7 +13015,7 @@ expand_movmem_epilogue (rtx destmem, rtx srcmem,
rtx destptr, rtx srcptr, rtx count, int max_size)
{
rtx src, dest;
- if (GET_CODE (count) == CONST_INT)
+ if (CONST_INT_P (count))
{
HOST_WIDE_INT countval = INTVAL (count);
int offset = 0;
@@ -13168,7 +13168,7 @@ expand_setmem_epilogue (rtx destmem, rtx destptr, rtx value, rtx count, int max_
{
rtx dest;
- if (GET_CODE (count) == CONST_INT)
+ if (CONST_INT_P (count))
{
HOST_WIDE_INT countval = INTVAL (count);
int offset = 0;
@@ -13551,15 +13551,15 @@ ix86_expand_movmem (rtx dst, rtx src, rtx count_exp, rtx align_exp,
enum stringop_alg alg;
int dynamic_check;
- if (GET_CODE (align_exp) == CONST_INT)
+ if (CONST_INT_P (align_exp))
align = INTVAL (align_exp);
/* i386 can do misaligned access on reasonably increased cost. */
- if (GET_CODE (expected_align_exp) == CONST_INT
+ if (CONST_INT_P (expected_align_exp)
&& INTVAL (expected_align_exp) > align)
align = INTVAL (expected_align_exp);
- if (GET_CODE (count_exp) == CONST_INT)
+ if (CONST_INT_P (count_exp))
count = expected_size = INTVAL (count_exp);
- if (GET_CODE (expected_size_exp) == CONST_INT && count == 0)
+ if (CONST_INT_P (expected_size_exp) && count == 0)
expected_size = INTVAL (expected_size_exp);
/* Step 0: Decide on preferred algorithm, desired alignment and
@@ -13606,7 +13606,7 @@ ix86_expand_movmem (rtx dst, rtx src, rtx count_exp, rtx align_exp,
/* Step 1: Prologue guard. */
/* Alignment code needs count to be in register. */
- if (GET_CODE (count_exp) == CONST_INT && desired_align > align)
+ if (CONST_INT_P (count_exp) && desired_align > align)
{
enum machine_mode mode = SImode;
if (TARGET_64BIT && (count & ~0xffffffff))
@@ -13704,7 +13704,7 @@ ix86_expand_movmem (rtx dst, rtx src, rtx count_exp, rtx align_exp,
break;
}
/* Adjust properly the offset of src and dest memory for aliasing. */
- if (GET_CODE (count_exp) == CONST_INT)
+ if (CONST_INT_P (count_exp))
{
src = adjust_automodify_address_nv (src, BLKmode, srcreg,
(count / size_needed) * size_needed);
@@ -13762,7 +13762,7 @@ promote_duplicated_reg (enum machine_mode mode, rtx val)
gcc_assert (mode == SImode || mode == DImode);
if (val == const0_rtx)
return copy_to_mode_reg (mode, const0_rtx);
- if (GET_CODE (val) == CONST_INT)
+ if (CONST_INT_P (val))
{
HOST_WIDE_INT v = INTVAL (val) & 255;
@@ -13861,15 +13861,15 @@ ix86_expand_setmem (rtx dst, rtx count_exp, rtx val_exp, rtx align_exp,
bool force_loopy_epilogue = false;
int dynamic_check;
- if (GET_CODE (align_exp) == CONST_INT)
+ if (CONST_INT_P (align_exp))
align = INTVAL (align_exp);
/* i386 can do misaligned access on reasonably increased cost. */
- if (GET_CODE (expected_align_exp) == CONST_INT
+ if (CONST_INT_P (expected_align_exp)
&& INTVAL (expected_align_exp) > align)
align = INTVAL (expected_align_exp);
- if (GET_CODE (count_exp) == CONST_INT)
+ if (CONST_INT_P (count_exp))
count = expected_size = INTVAL (count_exp);
- if (GET_CODE (expected_size_exp) == CONST_INT && count == 0)
+ if (CONST_INT_P (expected_size_exp) && count == 0)
expected_size = INTVAL (expected_size_exp);
/* Step 0: Decide on preferred algorithm, desired alignment and
@@ -13914,7 +13914,7 @@ ix86_expand_setmem (rtx dst, rtx count_exp, rtx val_exp, rtx align_exp,
/* Step 1: Prologue guard. */
/* Alignment code needs count to be in register. */
- if (GET_CODE (count_exp) == CONST_INT && desired_align > align)
+ if (CONST_INT_P (count_exp) && desired_align > align)
{
enum machine_mode mode = SImode;
if (TARGET_64BIT && (count & ~0xffffffff))
@@ -13924,7 +13924,7 @@ ix86_expand_setmem (rtx dst, rtx count_exp, rtx val_exp, rtx align_exp,
/* Do the cheap promotion to allow better CSE across the
main loop and epilogue (ie one load of the big constant in the
front of all code. */
- if (GET_CODE (val_exp) == CONST_INT)
+ if (CONST_INT_P (val_exp))
promoted_val = promote_duplicated_reg_to_size (val_exp, size_needed,
desired_align, align);
/* Ensure that alignment prologue won't copy past end of block. */
@@ -14022,7 +14022,7 @@ ix86_expand_setmem (rtx dst, rtx count_exp, rtx val_exp, rtx align_exp,
break;
}
/* Adjust properly the offset of src and dest memory for aliasing. */
- if (GET_CODE (count_exp) == CONST_INT)
+ if (CONST_INT_P (count_exp))
dst = adjust_automodify_address_nv (dst, BLKmode, destreg,
(count / size_needed) * size_needed);
else
@@ -14076,7 +14076,7 @@ ix86_expand_strlen (rtx out, rtx src, rtx eoschar, rtx align)
if (TARGET_UNROLL_STRLEN && eoschar == const0_rtx && optimize > 1
&& !TARGET_INLINE_ALL_STRINGOPS
&& !optimize_size
- && (GET_CODE (align) != CONST_INT || INTVAL (align) < 4))
+ && (!CONST_INT_P (align) || INTVAL (align) < 4))
return 0;
addr = force_reg (Pmode, XEXP (src, 0));
@@ -14161,7 +14161,7 @@ ix86_expand_strlensi_unroll_1 (rtx out, rtx src, rtx align_rtx)
rtx cmp;
align = 0;
- if (GET_CODE (align_rtx) == CONST_INT)
+ if (CONST_INT_P (align_rtx))
align = INTVAL (align_rtx);
/* Loop to check 1..3 bytes for null to get an aligned pointer. */
@@ -14606,7 +14606,7 @@ ix86_attr_length_address_default (rtx insn)
extract_insn_cached (insn);
for (i = recog_data.n_operands - 1; i >= 0; --i)
- if (GET_CODE (recog_data.operand[i]) == MEM)
+ if (MEM_P (recog_data.operand[i]))
{
return memory_address_length (XEXP (recog_data.operand[i], 0));
break;
@@ -14673,7 +14673,7 @@ ix86_flags_dependent (rtx insn, rtx dep_insn, enum attr_type insn_type)
else
return 0;
- if (GET_CODE (set) != REG || REGNO (set) != FLAGS_REG)
+ if (!REG_P (set) || REGNO (set) != FLAGS_REG)
return 0;
/* This test is true if the dependent insn reads the flags but
@@ -14712,7 +14712,7 @@ ix86_agi_dependent (rtx insn, rtx dep_insn, enum attr_type insn_type)
int i;
extract_insn_cached (insn);
for (i = recog_data.n_operands - 1; i >= 0; --i)
- if (GET_CODE (recog_data.operand[i]) == MEM)
+ if (MEM_P (recog_data.operand[i]))
{
addr = XEXP (recog_data.operand[i], 0);
goto found;
@@ -14775,7 +14775,7 @@ ix86_adjust_cost (rtx insn, rtx link, rtx dep_insn, int cost)
&& (set = single_set (dep_insn)) != NULL_RTX
&& (set2 = single_set (insn)) != NULL_RTX
&& rtx_equal_p (SET_DEST (set), SET_SRC (set2))
- && GET_CODE (SET_DEST (set2)) == MEM)
+ && MEM_P (SET_DEST (set2)))
cost += 1;
/* Show ability of reorder buffer to hide latency of load by executing
@@ -18250,7 +18250,7 @@ ix86_rtx_costs (rtx x, int code, int outer_code, int *total)
return false;
case ASHIFT:
- if (GET_CODE (XEXP (x, 1)) == CONST_INT
+ if (CONST_INT_P (XEXP (x, 1))
&& (GET_MODE (XEXP (x, 0)) != DImode || TARGET_64BIT))
{
HOST_WIDE_INT value = INTVAL (XEXP (x, 1));
@@ -18274,7 +18274,7 @@ ix86_rtx_costs (rtx x, int code, int outer_code, int *total)
case ROTATERT:
if (!TARGET_64BIT && GET_MODE (XEXP (x, 0)) == DImode)
{
- if (GET_CODE (XEXP (x, 1)) == CONST_INT)
+ if (CONST_INT_P (XEXP (x, 1)))
{
if (INTVAL (XEXP (x, 1)) > 32)
*total = ix86_cost->shift_const + COSTS_N_INSNS (2);
@@ -18291,7 +18291,7 @@ ix86_rtx_costs (rtx x, int code, int outer_code, int *total)
}
else
{
- if (GET_CODE (XEXP (x, 1)) == CONST_INT)
+ if (CONST_INT_P (XEXP (x, 1)))
*total = ix86_cost->shift_const;
else
*total = ix86_cost->shift_var;
@@ -18309,7 +18309,7 @@ ix86_rtx_costs (rtx x, int code, int outer_code, int *total)
rtx op0 = XEXP (x, 0);
rtx op1 = XEXP (x, 1);
int nbits;
- if (GET_CODE (XEXP (x, 1)) == CONST_INT)
+ if (CONST_INT_P (XEXP (x, 1)))
{
unsigned HOST_WIDE_INT value = INTVAL (XEXP (x, 1));
for (nbits = 0; value != 0; value &= value - 1)
@@ -18329,7 +18329,7 @@ ix86_rtx_costs (rtx x, int code, int outer_code, int *total)
if (GET_CODE (op0) == GET_CODE (op1))
is_mulwiden = 1, op1 = XEXP (op1, 0);
- else if (GET_CODE (op1) == CONST_INT)
+ else if (CONST_INT_P (op1))
{
if (GET_CODE (op0) == SIGN_EXTEND)
is_mulwiden = trunc_int_for_mode (INTVAL (op1), inner_mode)
@@ -18367,7 +18367,7 @@ ix86_rtx_costs (rtx x, int code, int outer_code, int *total)
{
if (GET_CODE (XEXP (x, 0)) == PLUS
&& GET_CODE (XEXP (XEXP (x, 0), 0)) == MULT
- && GET_CODE (XEXP (XEXP (XEXP (x, 0), 0), 1)) == CONST_INT
+ && CONST_INT_P (XEXP (XEXP (XEXP (x, 0), 0), 1))
&& CONSTANT_P (XEXP (x, 1)))
{
HOST_WIDE_INT val = INTVAL (XEXP (XEXP (XEXP (x, 0), 0), 1));
@@ -18382,7 +18382,7 @@ ix86_rtx_costs (rtx x, int code, int outer_code, int *total)
}
}
else if (GET_CODE (XEXP (x, 0)) == MULT
- && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT)
+ && CONST_INT_P (XEXP (XEXP (x, 0), 1)))
{
HOST_WIDE_INT val = INTVAL (XEXP (XEXP (x, 0), 1));
if (val == 2 || val == 4 || val == 8)
@@ -18444,7 +18444,7 @@ ix86_rtx_costs (rtx x, int code, int outer_code, int *total)
case COMPARE:
if (GET_CODE (XEXP (x, 0)) == ZERO_EXTRACT
&& XEXP (XEXP (x, 0), 1) == const1_rtx
- && GET_CODE (XEXP (XEXP (x, 0), 2)) == CONST_INT
+ && CONST_INT_P (XEXP (XEXP (x, 0), 2))
&& XEXP (x, 1) == const0_rtx)
{
/* This kind of construct is implemented using test[bwl].
@@ -18938,14 +18938,14 @@ min_insn_size (rtx insn)
if (GET_CODE (PATTERN (insn)) == UNSPEC_VOLATILE
&& XINT (PATTERN (insn), 1) == UNSPECV_ALIGN)
return 0;
- if (GET_CODE (insn) == JUMP_INSN
+ if (JUMP_P (insn)
&& (GET_CODE (PATTERN (insn)) == ADDR_VEC
|| GET_CODE (PATTERN (insn)) == ADDR_DIFF_VEC))
return 0;
/* Important case - calls are always 5 bytes.
It is common to have many calls in the row. */
- if (GET_CODE (insn) == CALL_INSN
+ if (CALL_P (insn)
&& symbolic_reference_mentioned_p (PATTERN (insn))
&& !SIBLING_CALL_P (insn))
return 5;
@@ -18955,7 +18955,7 @@ min_insn_size (rtx insn)
/* For normal instructions we may rely on the sizes of addresses
and the presence of symbol to require 4 bytes of encoding.
This is not the case for jumps where references are PC relative. */
- if (GET_CODE (insn) != JUMP_INSN)
+ if (!JUMP_P (insn))
{
l = get_attr_length_address (insn);
if (l < 4 && symbolic_reference_mentioned_p (PATTERN (insn)))
@@ -18994,10 +18994,10 @@ ix86_avoid_jump_misspredicts (void)
if (dump_file)
fprintf(dump_file, "Insn %i estimated to %i bytes\n",
INSN_UID (insn), min_insn_size (insn));
- if ((GET_CODE (insn) == JUMP_INSN
+ if ((JUMP_P (insn)
&& GET_CODE (PATTERN (insn)) != ADDR_VEC
&& GET_CODE (PATTERN (insn)) != ADDR_DIFF_VEC)
- || GET_CODE (insn) == CALL_INSN)
+ || CALL_P (insn))
njumps++;
else
continue;
@@ -19005,10 +19005,10 @@ ix86_avoid_jump_misspredicts (void)
while (njumps > 3)
{
start = NEXT_INSN (start);
- if ((GET_CODE (start) == JUMP_INSN
+ if ((JUMP_P (start)
&& GET_CODE (PATTERN (start)) != ADDR_VEC
&& GET_CODE (PATTERN (start)) != ADDR_DIFF_VEC)
- || GET_CODE (start) == CALL_INSN)
+ || CALL_P (start))
njumps--, isjump = 1;
else
isjump = 0;
@@ -19048,13 +19048,13 @@ ix86_pad_returns (void)
rtx prev;
bool replace = false;
- if (GET_CODE (ret) != JUMP_INSN || GET_CODE (PATTERN (ret)) != RETURN
+ if (!JUMP_P (ret) || GET_CODE (PATTERN (ret)) != RETURN
|| !maybe_hot_bb_p (bb))
continue;
for (prev = PREV_INSN (ret); prev; prev = PREV_INSN (prev))
- if (active_insn_p (prev) || GET_CODE (prev) == CODE_LABEL)
+ if (active_insn_p (prev) || LABEL_P (prev))
break;
- if (prev && GET_CODE (prev) == CODE_LABEL)
+ if (prev && LABEL_P (prev))
{
edge e;
edge_iterator ei;
@@ -19068,8 +19068,8 @@ ix86_pad_returns (void)
{
prev = prev_active_insn (ret);
if (prev
- && ((GET_CODE (prev) == JUMP_INSN && any_condjump_p (prev))
- || GET_CODE (prev) == CALL_INSN))
+ && ((JUMP_P (prev) && any_condjump_p (prev))
+ || CALL_P (prev)))
replace = true;
/* Empty functions get branch mispredict even when the jump destination
is not visible to us. */
diff --git a/gcc/config/i386/i386.md b/gcc/config/i386/i386.md
index fb3a41bbe4f..5ca723b4501 100644
--- a/gcc/config/i386/i386.md
+++ b/gcc/config/i386/i386.md
@@ -507,7 +507,7 @@
(match_operand:TI 1 "x86_64_general_operand" "")))]
"TARGET_64BIT"
{
- if (GET_CODE (operands[0]) == MEM && GET_CODE (operands[1]) == MEM)
+ if (MEM_P (operands[0]) && MEM_P (operands[1]))
operands[0] = force_reg (TImode, operands[0]);
ix86_compare_op0 = operands[0];
ix86_compare_op1 = operands[1];
@@ -520,7 +520,7 @@
(match_operand:DI 1 "x86_64_general_operand" "")))]
""
{
- if (GET_CODE (operands[0]) == MEM && GET_CODE (operands[1]) == MEM)
+ if (MEM_P (operands[0]) && MEM_P (operands[1]))
operands[0] = force_reg (DImode, operands[0]);
ix86_compare_op0 = operands[0];
ix86_compare_op1 = operands[1];
@@ -533,7 +533,7 @@
(match_operand:SI 1 "general_operand" "")))]
""
{
- if (GET_CODE (operands[0]) == MEM && GET_CODE (operands[1]) == MEM)
+ if (MEM_P (operands[0]) && MEM_P (operands[1]))
operands[0] = force_reg (SImode, operands[0]);
ix86_compare_op0 = operands[0];
ix86_compare_op1 = operands[1];
@@ -546,7 +546,7 @@
(match_operand:HI 1 "general_operand" "")))]
""
{
- if (GET_CODE (operands[0]) == MEM && GET_CODE (operands[1]) == MEM)
+ if (MEM_P (operands[0]) && MEM_P (operands[1]))
operands[0] = force_reg (HImode, operands[0]);
ix86_compare_op0 = operands[0];
ix86_compare_op1 = operands[1];
@@ -559,7 +559,7 @@
(match_operand:QI 1 "general_operand" "")))]
"TARGET_QIMODE_MATH"
{
- if (GET_CODE (operands[0]) == MEM && GET_CODE (operands[1]) == MEM)
+ if (MEM_P (operands[0]) && MEM_P (operands[1]))
operands[0] = force_reg (QImode, operands[0]);
ix86_compare_op0 = operands[0];
ix86_compare_op1 = operands[1];
@@ -638,7 +638,7 @@
[(set (reg FLAGS_REG)
(compare (match_operand:SI 0 "nonimmediate_operand" "rm,r")
(match_operand:SI 1 "general_operand" "ri,mr")))]
- "(GET_CODE (operands[0]) != MEM || GET_CODE (operands[1]) != MEM)
+ "!(MEM_P (operands[0]) && MEM_P (operands[1]))
&& ix86_match_ccmode (insn, CCmode)"
"cmp{l}\t{%1, %0|%0, %1}"
[(set_attr "type" "icmp")
@@ -670,7 +670,7 @@
[(set (reg FLAGS_REG)
(compare (match_operand:HI 0 "nonimmediate_operand" "rm,r")
(match_operand:HI 1 "general_operand" "ri,mr")))]
- "(GET_CODE (operands[0]) != MEM || GET_CODE (operands[1]) != MEM)
+ "!(MEM_P (operands[0]) && MEM_P (operands[1]))
&& ix86_match_ccmode (insn, CCmode)"
"cmp{w}\t{%1, %0|%0, %1}"
[(set_attr "type" "icmp")
@@ -692,7 +692,7 @@
[(set (reg FLAGS_REG)
(compare (match_operand:QI 0 "nonimmediate_operand" "qm,q")
(match_operand:QI 1 "general_operand" "qi,mq")))]
- "(GET_CODE (operands[0]) != MEM || GET_CODE (operands[1]) != MEM)
+ "!(MEM_P (operands[0]) && MEM_P (operands[1]))
&& ix86_match_ccmode (insn, CCmode)"
"cmp{b}\t{%1, %0|%0, %1}"
[(set_attr "type" "icmp")
@@ -1310,7 +1310,7 @@
(define_insn "*movhi_1"
[(set (match_operand:HI 0 "nonimmediate_operand" "=r,r,r,m")
(match_operand:HI 1 "general_operand" "r,rn,rm,rn"))]
- "GET_CODE (operands[0]) != MEM || GET_CODE (operands[1]) != MEM"
+ "!(MEM_P (operands[0]) && MEM_P (operands[1]))"
{
switch (get_attr_type (insn))
{
@@ -1419,7 +1419,7 @@
"! TARGET_PARTIAL_REG_STALL || optimize_size"
{
/* Don't generate memory->memory moves, go through a register */
- if (GET_CODE (operands[0]) == MEM && GET_CODE (operands[1]) == MEM)
+ if (MEM_P (operands[0]) && MEM_P (operands[1]))
operands[1] = force_reg (HImode, operands[1]);
})
@@ -1427,7 +1427,7 @@
[(set (strict_low_part (match_operand:HI 0 "nonimmediate_operand" "+rm,r"))
(match_operand:HI 1 "general_operand" "rn,m"))]
"(! TARGET_PARTIAL_REG_STALL || optimize_size)
- && (GET_CODE (operands[0]) != MEM || GET_CODE (operands[1]) != MEM)"
+ && !(MEM_P (operands[0]) && MEM_P (operands[1]))"
"mov{w}\t{%1, %0|%0, %1}"
[(set_attr "type" "imov")
(set_attr "mode" "HI")])
@@ -1483,12 +1483,12 @@
(define_insn "*movqi_1"
[(set (match_operand:QI 0 "nonimmediate_operand" "=q,q ,q ,r,r ,?r,m")
(match_operand:QI 1 "general_operand" " q,qn,qm,q,rn,qm,qn"))]
- "GET_CODE (operands[0]) != MEM || GET_CODE (operands[1]) != MEM"
+ "!(MEM_P (operands[0]) && MEM_P (operands[1]))"
{
switch (get_attr_type (insn))
{
case TYPE_IMOVX:
- gcc_assert (ANY_QI_REG_P (operands[1]) || GET_CODE (operands[1]) == MEM);
+ gcc_assert (ANY_QI_REG_P (operands[1]) || MEM_P (operands[1]));
return "movz{bl|x}\t{%1, %k0|%k0, %1}";
default:
if (get_attr_mode (insn) == MODE_SI)
@@ -1593,7 +1593,7 @@
"! TARGET_PARTIAL_REG_STALL || optimize_size"
{
/* Don't generate memory->memory moves, go through a register. */
- if (GET_CODE (operands[0]) == MEM && GET_CODE (operands[1]) == MEM)
+ if (MEM_P (operands[0]) && MEM_P (operands[1]))
operands[1] = force_reg (QImode, operands[1]);
})
@@ -1601,7 +1601,7 @@
[(set (strict_low_part (match_operand:QI 0 "nonimmediate_operand" "+qm,q"))
(match_operand:QI 1 "general_operand" "*qn,m"))]
"(! TARGET_PARTIAL_REG_STALL || optimize_size)
- && (GET_CODE (operands[0]) != MEM || GET_CODE (operands[1]) != MEM)"
+ && !(MEM_P (operands[0]) && MEM_P (operands[1]))"
"mov{b}\t{%1, %0|%0, %1}"
[(set_attr "type" "imov")
(set_attr "mode" "QI")])
@@ -2156,7 +2156,7 @@
[(set (match_operand:TI 0 "nonimmediate_operand" "=x,x,m")
(match_operand:TI 1 "vector_move_operand" "C,xm,x"))]
"TARGET_SSE && !TARGET_64BIT
- && (GET_CODE (operands[0]) != MEM || GET_CODE (operands[1]) != MEM)"
+ && !(MEM_P (operands[0]) && MEM_P (operands[1]))"
{
switch (which_alternative)
{
@@ -2190,7 +2190,7 @@
[(set (match_operand:TI 0 "nonimmediate_operand" "=r,o,x,x,xm")
(match_operand:TI 1 "general_operand" "riFo,riF,C,xm,x"))]
"TARGET_64BIT
- && (GET_CODE (operands[0]) != MEM || GET_CODE (operands[1]) != MEM)"
+ && !(MEM_P (operands[0]) && MEM_P (operands[1]))"
{
switch (which_alternative)
{
@@ -2274,7 +2274,7 @@
[(set (match_operand:SF 0 "push_operand" "")
(match_operand:SF 1 "memory_operand" ""))]
"reload_completed
- && GET_CODE (operands[1]) == MEM
+ && MEM_P (operands[1])
&& constant_pool_reference_p (operands[1])"
[(set (match_dup 0)
(match_dup 1))]
@@ -2470,7 +2470,7 @@
"=f,m,f,*r ,o ,Y*x,Y*x,Y*x ,m ")
(match_operand:DF 1 "general_operand"
"fm,f,G,*roF,F*r,C ,Y*x,mY*x,Y*x"))]
- "(GET_CODE (operands[0]) != MEM || GET_CODE (operands[1]) != MEM)
+ "!(MEM_P (operands[0]) && MEM_P (operands[1]))
&& ((optimize_size || !TARGET_INTEGER_DFMODE_MOVES) && !TARGET_64BIT)
&& (reload_in_progress || reload_completed
|| (ix86_cmodel == CM_MEDIUM || ix86_cmodel == CM_LARGE)
@@ -2592,7 +2592,7 @@
"=f,m,f,r ,o ,Y*x,Y*x,Y*x,m ")
(match_operand:DF 1 "general_operand"
"fm,f,G,roF,Fr,C ,Y*x,m ,Y*x"))]
- "(GET_CODE (operands[0]) != MEM || GET_CODE (operands[1]) != MEM)
+ "!(MEM_P (operands[0]) && MEM_P (operands[1]))
&& ((!optimize_size && TARGET_INTEGER_DFMODE_MOVES) || TARGET_64BIT)
&& (reload_in_progress || reload_completed
|| (ix86_cmodel == CM_MEDIUM || ix86_cmodel == CM_LARGE)
@@ -2714,7 +2714,7 @@
[(set (match_operand:DF 0 "nonimmediate_operand" "")
(match_operand:DF 1 "general_operand" ""))]
"reload_completed
- && (GET_CODE (operands[0]) != MEM || GET_CODE (operands[1]) != MEM)
+ && !(MEM_P (operands[0]) && MEM_P (operands[1]))
&& ! (ANY_FP_REG_P (operands[0]) ||
(GET_CODE (operands[0]) == SUBREG
&& ANY_FP_REG_P (SUBREG_REG (operands[0]))))
@@ -2807,7 +2807,7 @@
[(set (match_operand:XF 0 "nonimmediate_operand" "=f,m,f,*r,o")
(match_operand:XF 1 "general_operand" "fm,f,G,*roF,F*r"))]
"optimize_size
- && (GET_CODE (operands[0]) != MEM || GET_CODE (operands[1]) != MEM)
+ && !(MEM_P (operands[0]) && MEM_P (operands[1]))
&& (reload_in_progress || reload_completed
|| (optimize_size && standard_80387_constant_p (operands[1]))
|| GET_CODE (operands[1]) != CONST_DOUBLE
@@ -2842,7 +2842,7 @@
[(set (match_operand:XF 0 "nonimmediate_operand" "=f,m,f,r,o")
(match_operand:XF 1 "general_operand" "fm,f,G,roF,Fr"))]
"!optimize_size
- && (GET_CODE (operands[0]) != MEM || GET_CODE (operands[1]) != MEM)
+ && !(MEM_P (operands[0]) && MEM_P (operands[1]))
&& (reload_in_progress || reload_completed
|| (optimize_size && standard_80387_constant_p (operands[1]))
|| GET_CODE (operands[1]) != CONST_DOUBLE
@@ -2878,7 +2878,7 @@
[(set (match_operand 0 "nonimmediate_operand" "")
(match_operand 1 "general_operand" ""))]
"reload_completed
- && (GET_CODE (operands[0]) != MEM || GET_CODE (operands[1]) != MEM)
+ && !(MEM_P (operands[0]) && MEM_P (operands[1]))
&& GET_MODE (operands[0]) == XFmode
&& ! (ANY_FP_REG_P (operands[0]) ||
(GET_CODE (operands[0]) == SUBREG
@@ -2893,9 +2893,10 @@
[(set (match_operand 0 "register_operand" "")
(match_operand 1 "memory_operand" ""))]
"reload_completed
- && GET_CODE (operands[1]) == MEM
+ && MEM_P (operands[1])
&& (GET_MODE (operands[0]) == XFmode
- || GET_MODE (operands[0]) == SFmode || GET_MODE (operands[0]) == DFmode)
+ || GET_MODE (operands[0]) == SFmode
+ || GET_MODE (operands[0]) == DFmode)
&& constant_pool_reference_p (operands[1])"
[(set (match_dup 0) (match_dup 1))]
{
@@ -2925,9 +2926,10 @@
[(set (match_operand 0 "register_operand" "")
(float_extend (match_operand 1 "memory_operand" "")))]
"reload_completed
- && GET_CODE (operands[1]) == MEM
+ && MEM_P (operands[1])
&& (GET_MODE (operands[0]) == XFmode
- || GET_MODE (operands[0]) == SFmode || GET_MODE (operands[0]) == DFmode)
+ || GET_MODE (operands[0]) == SFmode
+ || GET_MODE (operands[0]) == DFmode)
&& constant_pool_reference_p (operands[1])"
[(set (match_dup 0) (match_dup 1))]
{
@@ -3001,7 +3003,7 @@
[(set (match_operand:TF 0 "nonimmediate_operand" "=r,o,x,x,xm")
(match_operand:TF 1 "general_operand" "riFo,riF,C,xm,x"))]
"TARGET_64BIT
- && (GET_CODE (operands[0]) != MEM || GET_CODE (operands[1]) != MEM)"
+ && !(MEM_P (operands[0]) && MEM_P (operands[1]))"
{
switch (which_alternative)
{
@@ -3214,7 +3216,7 @@
(clobber (reg:CC FLAGS_REG))]
"reload_completed
&& ANY_QI_REG_P (operands[0])
- && (ANY_QI_REG_P (operands[1]) || GET_CODE (operands[1]) == MEM)
+ && (ANY_QI_REG_P (operands[1]) || MEM_P (operands[1]))
&& (TARGET_ZERO_EXTEND_WITH_AND && !optimize_size)
&& !reg_overlap_mentioned_p (operands[0], operands[1])"
[(set (match_dup 0) (const_int 0))
@@ -5191,7 +5193,7 @@
/* Make things pretty and `subl $4,%eax' rather than `addl $-4, %eax'.
Exceptions: -128 encodes smaller than 128, so swap sign and op. */
- if (GET_CODE (operands[2]) == CONST_INT
+ if (CONST_INT_P (operands[2])
/* Avoid overflows. */
&& ((INTVAL (operands[2]) & ((((unsigned int) 1) << 31) - 1)))
&& (INTVAL (operands[2]) == 128
@@ -5262,7 +5264,7 @@
- do we need new constraint? */
/* Make things pretty and `subl $4,%eax' rather than `addl $-4, %eax'.
Exceptions: -128 encodes smaller than 128, so swap sign and op. */
- if (GET_CODE (operands[2]) == CONST_INT
+ if (CONST_INT_P (operands[2])
/* Avoid overflows. */
&& ((INTVAL (operands[2]) & ((((unsigned int) 1) << 31) - 1)))
&& (INTVAL (operands[2]) == 128
@@ -5288,7 +5290,7 @@
(clobber (match_scratch:DI 0 "=r"))]
"TARGET_64BIT
&& ix86_match_ccmode (insn, CCZmode)
- && (GET_CODE (operands[1]) != MEM || GET_CODE (operands[2]) != MEM)
+ && !(MEM_P (operands[1]) && MEM_P (operands[2]))
/* Current assemblers are broken and do not allow @GOTOFF in
ought but a memory context. */
&& ! pic_symbolic_operand (operands[2], VOIDmode)"
@@ -5311,7 +5313,7 @@
- do we need new constraint? */
/* Make things pretty and `subl $4,%eax' rather than `addl $-4, %eax'.
Exceptions: -128 encodes smaller than 128, so swap sign and op. */
- if (GET_CODE (operands[2]) == CONST_INT
+ if (CONST_INT_P (operands[2])
/* Avoid overflows. */
&& ((INTVAL (operands[2]) & ((((unsigned int) 1) << 31) - 1)))
&& (INTVAL (operands[2]) == 128
@@ -5386,7 +5388,7 @@
(clobber (match_scratch:DI 0 "=r"))]
"TARGET_64BIT
&& ix86_match_ccmode (insn, CCGOCmode)
- && (GET_CODE (operands[1]) != MEM || GET_CODE (operands[2]) != MEM)
+ && !(MEM_P (operands[1]) && MEM_P (operands[2]))
/* Current assemblers are broken and do not allow @GOTOFF in
ought but a memory context. */
&& ! pic_symbolic_operand (operands[2], VOIDmode)"
@@ -5407,7 +5409,7 @@
gcc_assert (rtx_equal_p (operands[0], operands[1]));
/* Make things pretty and `subl $4,%eax' rather than `addl $-4, %eax'.
Exceptions: -128 encodes smaller than 128, so swap sign and op. */
- if (GET_CODE (operands[2]) == CONST_INT
+ if (CONST_INT_P (operands[2])
/* Avoid overflows. */
&& ((INTVAL (operands[2]) & ((((unsigned int) 1) << 31) - 1)))
&& (INTVAL (operands[2]) == 128
@@ -5455,7 +5457,7 @@
/* Make things pretty and `subl $4,%eax' rather than `addl $-4, %eax'.
Exceptions: -128 encodes smaller than 128, so swap sign and op. */
- if (GET_CODE (operands[2]) == CONST_INT
+ if (CONST_INT_P (operands[2])
&& (INTVAL (operands[2]) == 128
|| (INTVAL (operands[2]) < 0
&& INTVAL (operands[2]) != -128)))
@@ -5536,7 +5538,7 @@
default:
/* Make things pretty and `subl $4,%eax' rather than `addl $-4, %eax'.
Exceptions: -128 encodes smaller than 128, so swap sign and op. */
- if (GET_CODE (operands[2]) == CONST_INT
+ if (CONST_INT_P (operands[2])
&& (INTVAL (operands[2]) == 128
|| (INTVAL (operands[2]) < 0
&& INTVAL (operands[2]) != -128)))
@@ -5606,7 +5608,7 @@
gcc_assert (rtx_equal_p (operands[0], operands[1]));
/* Make things pretty and `subl $4,%eax' rather than `addl $-4, %eax'.
Exceptions: -128 encodes smaller than 128, so swap sign and op. */
- if (GET_CODE (operands[2]) == CONST_INT
+ if (CONST_INT_P (operands[2])
&& (INTVAL (operands[2]) == 128
|| (INTVAL (operands[2]) < 0
&& INTVAL (operands[2]) != -128)))
@@ -5652,7 +5654,7 @@
default:
/* Make things pretty and `subl $4,%eax' rather than `addl $-4, %eax'.
Exceptions: -128 encodes smaller than 128, so swap sign and op. */
- if (GET_CODE (operands[2]) == CONST_INT
+ if (CONST_INT_P (operands[2])
&& (INTVAL (operands[2]) == 128
|| (INTVAL (operands[2]) < 0
&& INTVAL (operands[2]) != -128)))
@@ -5675,7 +5677,7 @@
(match_operand:SI 1 "nonimmediate_operand" "%0")))
(clobber (match_scratch:SI 0 "=r"))]
"ix86_match_ccmode (insn, CCZmode)
- && (GET_CODE (operands[1]) != MEM || GET_CODE (operands[2]) != MEM)
+ && !(MEM_P (operands[1]) && MEM_P (operands[2]))
/* Current assemblers are broken and do not allow @GOTOFF in
ought but a memory context. */
&& ! pic_symbolic_operand (operands[2], VOIDmode)"
@@ -5696,7 +5698,7 @@
gcc_assert (rtx_equal_p (operands[0], operands[1]));
/* Make things pretty and `subl $4,%eax' rather than `addl $-4, %eax'.
Exceptions: -128 encodes smaller than 128, so swap sign and op. */
- if (GET_CODE (operands[2]) == CONST_INT
+ if (CONST_INT_P (operands[2])
&& (INTVAL (operands[2]) == 128
|| (INTVAL (operands[2]) < 0
&& INTVAL (operands[2]) != -128)))
@@ -5740,7 +5742,7 @@
default:
/* Make things pretty and `subl $4,%eax' rather than `addl $-4, %eax'.
Exceptions: -128 encodes smaller than 128, so swap sign and op. */
- if (GET_CODE (operands[2]) == CONST_INT
+ if (CONST_INT_P (operands[2])
&& (INTVAL (operands[2]) == 128
|| (INTVAL (operands[2]) < 0
&& INTVAL (operands[2]) != -128)))
@@ -5810,7 +5812,7 @@
(const_int 0)))
(clobber (match_scratch:SI 0 "=r"))]
"ix86_match_ccmode (insn, CCGOCmode)
- && (GET_CODE (operands[1]) != MEM || GET_CODE (operands[2]) != MEM)
+ && !(MEM_P (operands[1]) && MEM_P (operands[2]))
/* Current assemblers are broken and do not allow @GOTOFF in
ought but a memory context. */
&& ! pic_symbolic_operand (operands[2], VOIDmode)"
@@ -5831,7 +5833,7 @@
gcc_assert (rtx_equal_p (operands[0], operands[1]));
/* Make things pretty and `subl $4,%eax' rather than `addl $-4, %eax'.
Exceptions: -128 encodes smaller than 128, so swap sign and op. */
- if (GET_CODE (operands[2]) == CONST_INT
+ if (CONST_INT_P (operands[2])
&& (INTVAL (operands[2]) == 128
|| (INTVAL (operands[2]) < 0
&& INTVAL (operands[2]) != -128)))
@@ -5884,7 +5886,7 @@
default:
/* Make things pretty and `subl $4,%eax' rather than `addl $-4, %eax'.
Exceptions: -128 encodes smaller than 128, so swap sign and op. */
- if (GET_CODE (operands[2]) == CONST_INT
+ if (CONST_INT_P (operands[2])
&& (INTVAL (operands[2]) == 128
|| (INTVAL (operands[2]) < 0
&& INTVAL (operands[2]) != -128)))
@@ -5925,7 +5927,7 @@
default:
/* Make things pretty and `subl $4,%eax' rather than `addl $-4, %eax'.
Exceptions: -128 encodes smaller than 128, so swap sign and op. */
- if (GET_CODE (operands[2]) == CONST_INT
+ if (CONST_INT_P (operands[2])
&& (INTVAL (operands[2]) == 128
|| (INTVAL (operands[2]) < 0
&& INTVAL (operands[2]) != -128)))
@@ -5967,7 +5969,7 @@
default:
/* Make things pretty and `subl $4,%eax' rather than `addl $-4, %eax'.
Exceptions: -128 encodes smaller than 128, so swap sign and op. */
- if (GET_CODE (operands[2]) == CONST_INT
+ if (CONST_INT_P (operands[2])
&& (INTVAL (operands[2]) == 128
|| (INTVAL (operands[2]) < 0
&& INTVAL (operands[2]) != -128)))
@@ -5990,7 +5992,7 @@
(match_operand:HI 1 "nonimmediate_operand" "%0")))
(clobber (match_scratch:HI 0 "=r"))]
"ix86_match_ccmode (insn, CCZmode)
- && (GET_CODE (operands[1]) != MEM || GET_CODE (operands[2]) != MEM)"
+ && !(MEM_P (operands[1]) && MEM_P (operands[2]))"
{
switch (get_attr_type (insn))
{
@@ -6006,7 +6008,7 @@
default:
/* Make things pretty and `subl $4,%eax' rather than `addl $-4, %eax'.
Exceptions: -128 encodes smaller than 128, so swap sign and op. */
- if (GET_CODE (operands[2]) == CONST_INT
+ if (CONST_INT_P (operands[2])
&& (INTVAL (operands[2]) == 128
|| (INTVAL (operands[2]) < 0
&& INTVAL (operands[2]) != -128)))
@@ -6070,7 +6072,7 @@
(const_int 0)))
(clobber (match_scratch:HI 0 "=r"))]
"ix86_match_ccmode (insn, CCGOCmode)
- && (GET_CODE (operands[1]) != MEM || GET_CODE (operands[2]) != MEM)"
+ && !(MEM_P (operands[1]) && MEM_P (operands[2]))"
{
switch (get_attr_type (insn))
{
@@ -6086,7 +6088,7 @@
default:
/* Make things pretty and `subl $4,%eax' rather than `addl $-4, %eax'.
Exceptions: -128 encodes smaller than 128, so swap sign and op. */
- if (GET_CODE (operands[2]) == CONST_INT
+ if (CONST_INT_P (operands[2])
&& (INTVAL (operands[2]) == 128
|| (INTVAL (operands[2]) < 0
&& INTVAL (operands[2]) != -128)))
@@ -6137,7 +6139,7 @@
default:
/* Make things pretty and `subl $4,%eax' rather than `addl $-4, %eax'.
Exceptions: -128 encodes smaller than 128, so swap sign and op. */
- if (GET_CODE (operands[2]) == CONST_INT
+ if (CONST_INT_P (operands[2])
&& (INTVAL (operands[2]) == 128
|| (INTVAL (operands[2]) < 0
&& INTVAL (operands[2]) != -128)))
@@ -6185,7 +6187,7 @@
default:
/* Make things pretty and `subl $4,%eax' rather than `addl $-4, %eax'.
Exceptions: -128 encodes smaller than 128, so swap sign and op. */
- if (GET_CODE (operands[2]) == CONST_INT
+ if (CONST_INT_P (operands[2])
&& (INTVAL (operands[2]) == 128
|| (INTVAL (operands[2]) < 0
&& INTVAL (operands[2]) != -128)))
@@ -6214,7 +6216,7 @@
(match_operand:QI 1 "general_operand" "qn,qnm")))
(clobber (reg:CC FLAGS_REG))]
"(! TARGET_PARTIAL_REG_STALL || optimize_size)
- && (GET_CODE (operands[0]) != MEM || GET_CODE (operands[1]) != MEM)"
+ && !(MEM_P (operands[0]) && MEM_P (operands[1]))"
{
switch (get_attr_type (insn))
{
@@ -6229,7 +6231,7 @@
default:
/* Make things pretty and `subl $4,%eax' rather than `addl $-4, %eax'. */
- if (GET_CODE (operands[1]) == CONST_INT
+ if (CONST_INT_P (operands[1])
&& INTVAL (operands[1]) < 0)
{
operands[1] = GEN_INT (-INTVAL (operands[1]));
@@ -6267,14 +6269,14 @@
else
{
gcc_assert (operands[2] == constm1_rtx
- || (GET_CODE (operands[2]) == CONST_INT
+ || (CONST_INT_P (operands[2])
&& INTVAL (operands[2]) == 255));
return "dec{b}\t%0";
}
default:
/* Make things pretty and `subb $4,%al' rather than `addb $-4, %al'. */
- if (GET_CODE (operands[2]) == CONST_INT
+ if (CONST_INT_P (operands[2])
&& INTVAL (operands[2]) < 0)
{
operands[2] = GEN_INT (-INTVAL (operands[2]));
@@ -6295,7 +6297,7 @@
(match_operand:QI 1 "nonimmediate_operand" "%0")))
(clobber (match_scratch:QI 0 "=q"))]
"ix86_match_ccmode (insn, CCZmode)
- && (GET_CODE (operands[1]) != MEM || GET_CODE (operands[2]) != MEM)"
+ && !(MEM_P (operands[1]) && MEM_P (operands[2]))"
{
switch (get_attr_type (insn))
{
@@ -6305,14 +6307,14 @@
else
{
gcc_assert (operands[2] == constm1_rtx
- || (GET_CODE (operands[2]) == CONST_INT
+ || (CONST_INT_P (operands[2])
&& INTVAL (operands[2]) == 255));
return "dec{b}\t%0";
}
default:
/* Make things pretty and `subb $4,%al' rather than `addb $-4, %al'. */
- if (GET_CODE (operands[2]) == CONST_INT
+ if (CONST_INT_P (operands[2])
&& INTVAL (operands[2]) < 0)
{
operands[2] = GEN_INT (-INTVAL (operands[2]));
@@ -6340,7 +6342,7 @@
{
case TYPE_INCDEC:
if (operands[2] == constm1_rtx
- || (GET_CODE (operands[2]) == CONST_INT
+ || (CONST_INT_P (operands[2])
&& INTVAL (operands[2]) == 255))
return "inc{b}\t%0";
else
@@ -6374,7 +6376,7 @@
(const_int 0)))
(clobber (match_scratch:QI 0 "=q"))]
"ix86_match_ccmode (insn, CCGOCmode)
- && (GET_CODE (operands[1]) != MEM || GET_CODE (operands[2]) != MEM)"
+ && !(MEM_P (operands[1]) && MEM_P (operands[2]))"
{
switch (get_attr_type (insn))
{
@@ -6384,14 +6386,14 @@
else
{
gcc_assert (operands[2] == constm1_rtx
- || (GET_CODE (operands[2]) == CONST_INT
+ || (CONST_INT_P (operands[2])
&& INTVAL (operands[2]) == 255));
return "dec{b}\t%0";
}
default:
/* Make things pretty and `subb $4,%al' rather than `addb $-4, %al'. */
- if (GET_CODE (operands[2]) == CONST_INT
+ if (CONST_INT_P (operands[2])
&& INTVAL (operands[2]) < 0)
{
operands[2] = GEN_INT (-INTVAL (operands[2]));
@@ -6428,7 +6430,7 @@
else
{
gcc_assert (operands[2] == constm1_rtx
- || (GET_CODE (operands[2]) == CONST_INT
+ || (CONST_INT_P (operands[2])
&& INTVAL (operands[2]) == 255));
return "dec{b}\t%h0";
}
@@ -6464,7 +6466,7 @@
else
{
gcc_assert (operands[2] == constm1_rtx
- || (GET_CODE (operands[2]) == CONST_INT
+ || (CONST_INT_P (operands[2])
&& INTVAL (operands[2]) == 255));
return "dec{b}\t%h0";
}
@@ -6843,7 +6845,7 @@
(match_operand:QI 1 "general_operand" "qn,qmn")))
(clobber (reg:CC FLAGS_REG))]
"(! TARGET_PARTIAL_REG_STALL || optimize_size)
- && (GET_CODE (operands[0]) != MEM || GET_CODE (operands[1]) != MEM)"
+ && !(MEM_P (operands[0]) && MEM_P (operands[1]))"
"sub{b}\t{%1, %0|%0, %1}"
[(set_attr "type" "alu1")
(set_attr "mode" "QI")])
@@ -6913,7 +6915,7 @@
(match_operand:DI 2 "x86_64_general_operand" "K,e,mr")))
(clobber (reg:CC FLAGS_REG))]
"TARGET_64BIT
- && (GET_CODE (operands[1]) != MEM || GET_CODE (operands[2]) != MEM)"
+ && !(MEM_P (operands[1]) && MEM_P (operands[2]))"
"@
imul{q}\t{%2, %1, %0|%0, %1, %2}
imul{q}\t{%2, %1, %0|%0, %1, %2}
@@ -6944,7 +6946,7 @@
(mult:SI (match_operand:SI 1 "nonimmediate_operand" "%rm,rm,0")
(match_operand:SI 2 "general_operand" "K,i,mr")))
(clobber (reg:CC FLAGS_REG))]
- "GET_CODE (operands[1]) != MEM || GET_CODE (operands[2]) != MEM"
+ "!(MEM_P (operands[1]) && MEM_P (operands[2]))"
"@
imul{l}\t{%2, %1, %0|%0, %1, %2}
imul{l}\t{%2, %1, %0|%0, %1, %2}
@@ -6969,7 +6971,7 @@
(match_operand:SI 2 "general_operand" "K,i,mr"))))
(clobber (reg:CC FLAGS_REG))]
"TARGET_64BIT
- && (GET_CODE (operands[1]) != MEM || GET_CODE (operands[2]) != MEM)"
+ && !(MEM_P (operands[1]) && MEM_P (operands[2]))"
"@
imul{l}\t{%2, %1, %k0|%k0, %1, %2}
imul{l}\t{%2, %1, %k0|%k0, %1, %2}
@@ -7000,7 +7002,7 @@
(mult:HI (match_operand:HI 1 "nonimmediate_operand" "%rm,rm,0")
(match_operand:HI 2 "general_operand" "K,i,mr")))
(clobber (reg:CC FLAGS_REG))]
- "GET_CODE (operands[1]) != MEM || GET_CODE (operands[2]) != MEM"
+ "!(MEM_P (operands[1]) && MEM_P (operands[2]))"
"@
imul{w}\t{%2, %1, %0|%0, %1, %2}
imul{w}\t{%2, %1, %0|%0, %1, %2}
@@ -7029,7 +7031,7 @@
(match_operand:QI 2 "nonimmediate_operand" "qm")))
(clobber (reg:CC FLAGS_REG))]
"TARGET_QIMODE_MATH
- && (GET_CODE (operands[1]) != MEM || GET_CODE (operands[2]) != MEM)"
+ && !(MEM_P (operands[1]) && MEM_P (operands[2]))"
"mul{b}\t%2"
[(set_attr "type" "imul")
(set_attr "length_immediate" "0")
@@ -7055,7 +7057,7 @@
(zero_extend:HI (match_operand:QI 2 "nonimmediate_operand" "qm"))))
(clobber (reg:CC FLAGS_REG))]
"TARGET_QIMODE_MATH
- && (GET_CODE (operands[1]) != MEM || GET_CODE (operands[2]) != MEM)"
+ && !(MEM_P (operands[1]) && MEM_P (operands[2]))"
"mul{b}\t%2"
[(set_attr "type" "imul")
(set_attr "length_immediate" "0")
@@ -7079,7 +7081,7 @@
(sign_extend:HI (match_operand:QI 2 "nonimmediate_operand" "qm"))))
(clobber (reg:CC FLAGS_REG))]
"TARGET_QIMODE_MATH
- && (GET_CODE (operands[1]) != MEM || GET_CODE (operands[2]) != MEM)"
+ && !(MEM_P (operands[1]) && MEM_P (operands[2]))"
"imul{b}\t%2"
[(set_attr "type" "imul")
(set_attr "length_immediate" "0")
@@ -7105,7 +7107,7 @@
(zero_extend:TI (match_operand:DI 2 "nonimmediate_operand" "rm"))))
(clobber (reg:CC FLAGS_REG))]
"TARGET_64BIT
- && (GET_CODE (operands[1]) != MEM || GET_CODE (operands[2]) != MEM)"
+ && !(MEM_P (operands[1]) && MEM_P (operands[2]))"
"mul{q}\t%2"
[(set_attr "type" "imul")
(set_attr "length_immediate" "0")
@@ -7132,7 +7134,7 @@
(zero_extend:DI (match_operand:SI 2 "nonimmediate_operand" "rm"))))
(clobber (reg:CC FLAGS_REG))]
"!TARGET_64BIT
- && (GET_CODE (operands[1]) != MEM || GET_CODE (operands[2]) != MEM)"
+ && !(MEM_P (operands[1]) && MEM_P (operands[2]))"
"mul{l}\t%2"
[(set_attr "type" "imul")
(set_attr "length_immediate" "0")
@@ -7158,7 +7160,7 @@
(sign_extend:TI (match_operand:DI 2 "nonimmediate_operand" "rm"))))
(clobber (reg:CC FLAGS_REG))]
"TARGET_64BIT
- && (GET_CODE (operands[1]) != MEM || GET_CODE (operands[2]) != MEM)"
+ && !(MEM_P (operands[1]) && MEM_P (operands[2]))"
"imul{q}\t%2"
[(set_attr "type" "imul")
(set_attr "length_immediate" "0")
@@ -7184,7 +7186,7 @@
(sign_extend:DI (match_operand:SI 2 "nonimmediate_operand" "rm"))))
(clobber (reg:CC FLAGS_REG))]
"!TARGET_64BIT
- && (GET_CODE (operands[1]) != MEM || GET_CODE (operands[2]) != MEM)"
+ && !(MEM_P (operands[1]) && MEM_P (operands[2]))"
"imul{l}\t%2"
[(set_attr "type" "imul")
(set_attr "length_immediate" "0")
@@ -7220,7 +7222,7 @@
(clobber (match_scratch:DI 3 "=1"))
(clobber (reg:CC FLAGS_REG))]
"TARGET_64BIT
- && (GET_CODE (operands[1]) != MEM || GET_CODE (operands[2]) != MEM)"
+ && !(MEM_P (operands[1]) && MEM_P (operands[2]))"
"mul{q}\t%2"
[(set_attr "type" "imul")
(set_attr "length_immediate" "0")
@@ -7255,7 +7257,7 @@
(const_int 32))))
(clobber (match_scratch:SI 3 "=1"))
(clobber (reg:CC FLAGS_REG))]
- "GET_CODE (operands[1]) != MEM || GET_CODE (operands[2]) != MEM"
+ "!(MEM_P (operands[1]) && MEM_P (operands[2]))"
"mul{l}\t%2"
[(set_attr "type" "imul")
(set_attr "length_immediate" "0")
@@ -7277,7 +7279,7 @@
(clobber (match_scratch:SI 3 "=1"))
(clobber (reg:CC FLAGS_REG))]
"TARGET_64BIT
- && (GET_CODE (operands[1]) != MEM || GET_CODE (operands[2]) != MEM)"
+ && !(MEM_P (operands[1]) && MEM_P (operands[2]))"
"mul{l}\t%2"
[(set_attr "type" "imul")
(set_attr "length_immediate" "0")
@@ -7313,7 +7315,7 @@
(clobber (match_scratch:DI 3 "=1"))
(clobber (reg:CC FLAGS_REG))]
"TARGET_64BIT
- && (GET_CODE (operands[1]) != MEM || GET_CODE (operands[2]) != MEM)"
+ && !(MEM_P (operands[1]) && MEM_P (operands[2]))"
"imul{q}\t%2"
[(set_attr "type" "imul")
(set (attr "athlon_decode")
@@ -7347,7 +7349,7 @@
(const_int 32))))
(clobber (match_scratch:SI 3 "=1"))
(clobber (reg:CC FLAGS_REG))]
- "GET_CODE (operands[1]) != MEM || GET_CODE (operands[2]) != MEM"
+ "!(MEM_P (operands[1]) && MEM_P (operands[2]))"
"imul{l}\t%2"
[(set_attr "type" "imul")
(set (attr "athlon_decode")
@@ -7368,7 +7370,7 @@
(clobber (match_scratch:SI 3 "=1"))
(clobber (reg:CC FLAGS_REG))]
"TARGET_64BIT
- && (GET_CODE (operands[1]) != MEM || GET_CODE (operands[2]) != MEM)"
+ && !(MEM_P (operands[1]) && MEM_P (operands[2]))"
"imul{l}\t%2"
[(set_attr "type" "imul")
(set (attr "athlon_decode")
@@ -7767,7 +7769,7 @@
(match_operand:DI 1 "x86_64_szext_general_operand" "Z,Z,e,e,re"))
(const_int 0)))]
"TARGET_64BIT && ix86_match_ccmode (insn, CCNOmode)
- && (GET_CODE (operands[0]) != MEM || GET_CODE (operands[1]) != MEM)"
+ && !(MEM_P (operands[0]) && MEM_P (operands[1]))"
"@
test{l}\t{%k1, %k0|%k0, %k1}
test{l}\t{%k1, %k0|%k0, %k1}
@@ -7786,7 +7788,7 @@
(match_operand:SI 1 "general_operand" "in,in,rin"))
(const_int 0)))]
"ix86_match_ccmode (insn, CCNOmode)
- && (GET_CODE (operands[0]) != MEM || GET_CODE (operands[1]) != MEM)"
+ && !(MEM_P (operands[0]) && MEM_P (operands[1]))"
"test{l}\t{%1, %0|%0, %1}"
[(set_attr "type" "test")
(set_attr "modrm" "0,1,1")
@@ -7808,7 +7810,7 @@
(match_operand:HI 1 "general_operand" "n,n,rn"))
(const_int 0)))]
"ix86_match_ccmode (insn, CCNOmode)
- && (GET_CODE (operands[0]) != MEM || GET_CODE (operands[1]) != MEM)"
+ && !(MEM_P (operands[0]) && MEM_P (operands[1]))"
"test{w}\t{%1, %0|%0, %1}"
[(set_attr "type" "test")
(set_attr "modrm" "0,1,1")
@@ -7830,14 +7832,14 @@
(match_operand:QI 0 "nonimmediate_operand" "%!*a,q,qm,r")
(match_operand:QI 1 "general_operand" "n,n,qn,n"))
(const_int 0)))]
- "(GET_CODE (operands[0]) != MEM || GET_CODE (operands[1]) != MEM)
+ "!(MEM_P (operands[0]) && MEM_P (operands[1]))
&& ix86_match_ccmode (insn,
- GET_CODE (operands[1]) == CONST_INT
+ CONST_INT_P (operands[1])
&& INTVAL (operands[1]) >= 0 ? CCNOmode : CCZmode)"
{
if (which_alternative == 3)
{
- if (GET_CODE (operands[1]) == CONST_INT && INTVAL (operands[1]) < 0)
+ if (CONST_INT_P (operands[1]) && INTVAL (operands[1]) < 0)
operands[1] = GEN_INT (INTVAL (operands[1]) & 0xff);
return "test{l}\t{%1, %k0|%k0, %1}";
}
@@ -7855,7 +7857,7 @@
(match_operand:QI 0 "nonimmediate_operand" "%!*a,q,qm")
(match_operand:QI 1 "general_operand" "n,n,qn"))
(const_int 0)))]
- "(GET_CODE (operands[0]) != MEM || GET_CODE (operands[1]) != MEM)
+ "!(MEM_P (operands[0]) && MEM_P (operands[1]))
&& ix86_match_ccmode (insn, CCNOmode)"
"test{b}\t{%1, %0|%0, %1}"
[(set_attr "type" "test")
@@ -7905,7 +7907,7 @@
(match_operand:QI 1 "general_operand" "Qm")))
(const_int 0)))]
"!TARGET_64BIT && ix86_match_ccmode (insn, CCNOmode)
- && (GET_CODE (operands[0]) != MEM || GET_CODE (operands[1]) != MEM)"
+ && !(MEM_P (operands[0]) && MEM_P (operands[1]))"
"test{b}\t{%1, %h0|%h0, %1}"
[(set_attr "type" "test")
(set_attr "mode" "QI")])
@@ -8001,7 +8003,7 @@
enum machine_mode mode, submode;
mode = GET_MODE (val);
- if (GET_CODE (val) == MEM)
+ if (MEM_P (val))
{
/* ??? Combine likes to put non-volatile mem extractions in QImode
no matter the size of the test. So find a mode that works. */
@@ -8107,7 +8109,7 @@
{
enum machine_mode mode;
- gcc_assert (GET_CODE (operands[2]) == CONST_INT);
+ gcc_assert (CONST_INT_P (operands[2]));
if (INTVAL (operands[2]) == 0xff)
mode = QImode;
else
@@ -8172,7 +8174,7 @@
{
enum machine_mode mode;
- gcc_assert (GET_CODE (operands[2]) == CONST_INT);
+ gcc_assert (CONST_INT_P (operands[2]));
if (INTVAL (operands[2]) == 0xff)
mode = QImode;
else
@@ -8291,7 +8293,7 @@
switch (get_attr_type (insn))
{
case TYPE_IMOVX:
- gcc_assert (GET_CODE (operands[2]) == CONST_INT);
+ gcc_assert (CONST_INT_P (operands[2]));
gcc_assert (INTVAL (operands[2]) == 0xff);
return "movz{bl|x}\t{%b1, %k0|%k0, %b1}";
@@ -8346,7 +8348,7 @@
(match_operand:QI 1 "general_operand" "qi,qmi")))
(clobber (reg:CC FLAGS_REG))]
"(! TARGET_PARTIAL_REG_STALL || optimize_size)
- && (GET_CODE (operands[0]) != MEM || GET_CODE (operands[1]) != MEM)"
+ && !(MEM_P (operands[0]) && MEM_P (operands[1]))"
"and{b}\t{%1, %0|%0, %1}"
[(set_attr "type" "alu1")
(set_attr "mode" "QI")])
@@ -8361,12 +8363,12 @@
(and:QI (match_dup 1) (match_dup 2)))]
"ix86_binary_operator_ok (AND, QImode, operands)
&& ix86_match_ccmode (insn,
- GET_CODE (operands[2]) == CONST_INT
+ CONST_INT_P (operands[2])
&& INTVAL (operands[2]) >= 0 ? CCNOmode : CCZmode)"
{
if (which_alternative == 2)
{
- if (GET_CODE (operands[2]) == CONST_INT && INTVAL (operands[2]) < 0)
+ if (CONST_INT_P (operands[2]) && INTVAL (operands[2]) < 0)
operands[2] = GEN_INT (INTVAL (operands[2]) & 0xff);
return "and{l}\t{%2, %k0|%k0, %2}";
}
@@ -8399,7 +8401,7 @@
(and:QI (match_dup 0) (match_dup 1)))]
"(! TARGET_PARTIAL_REG_STALL || optimize_size)
&& ix86_match_ccmode (insn, CCNOmode)
- && (GET_CODE (operands[0]) != MEM || GET_CODE (operands[1]) != MEM)"
+ && !(MEM_P (operands[0]) && MEM_P (operands[1]))"
"and{b}\t{%1, %0|%0, %1}"
[(set_attr "type" "alu1")
(set_attr "mode" "QI")])
@@ -8694,7 +8696,7 @@
(const_int 0)))
(clobber (match_scratch:SI 0 "=r"))]
"ix86_match_ccmode (insn, CCNOmode)
- && (GET_CODE (operands[1]) != MEM || GET_CODE (operands[2]) != MEM)"
+ && !(MEM_P (operands[1]) && MEM_P (operands[2]))"
"or{l}\t{%2, %0|%0, %2}"
[(set_attr "type" "alu")
(set_attr "mode" "SI")])
@@ -8737,7 +8739,7 @@
(const_int 0)))
(clobber (match_scratch:HI 0 "=r"))]
"ix86_match_ccmode (insn, CCNOmode)
- && (GET_CODE (operands[1]) != MEM || GET_CODE (operands[2]) != MEM)"
+ && !(MEM_P (operands[1]) && MEM_P (operands[2]))"
"or{w}\t{%2, %0|%0, %2}"
[(set_attr "type" "alu")
(set_attr "mode" "HI")])
@@ -8770,7 +8772,7 @@
(match_operand:QI 1 "general_operand" "qmi,qi")))
(clobber (reg:CC FLAGS_REG))]
"(! TARGET_PARTIAL_REG_STALL || optimize_size)
- && (GET_CODE (operands[0]) != MEM || GET_CODE (operands[1]) != MEM)"
+ && !(MEM_P (operands[0]) && MEM_P (operands[1]))"
"or{b}\t{%1, %0|%0, %1}"
[(set_attr "type" "alu1")
(set_attr "mode" "QI")])
@@ -8797,7 +8799,7 @@
(ior:QI (match_dup 0) (match_dup 1)))]
"(! TARGET_PARTIAL_REG_STALL || optimize_size)
&& ix86_match_ccmode (insn, CCNOmode)
- && (GET_CODE (operands[0]) != MEM || GET_CODE (operands[1]) != MEM)"
+ && !(MEM_P (operands[0]) && MEM_P (operands[1]))"
"or{b}\t{%1, %0|%0, %1}"
[(set_attr "type" "alu1")
(set_attr "mode" "QI")])
@@ -8809,7 +8811,7 @@
(const_int 0)))
(clobber (match_scratch:QI 0 "=q"))]
"ix86_match_ccmode (insn, CCNOmode)
- && (GET_CODE (operands[1]) != MEM || GET_CODE (operands[2]) != MEM)"
+ && !(MEM_P (operands[1]) && MEM_P (operands[2]))"
"or{b}\t{%2, %0|%0, %2}"
[(set_attr "type" "alu")
(set_attr "mode" "QI")])
@@ -9071,7 +9073,7 @@
(const_int 0)))
(clobber (match_scratch:SI 0 "=r"))]
"ix86_match_ccmode (insn, CCNOmode)
- && (GET_CODE (operands[1]) != MEM || GET_CODE (operands[2]) != MEM)"
+ && !(MEM_P (operands[1]) && MEM_P (operands[2]))"
"xor{l}\t{%2, %0|%0, %2}"
[(set_attr "type" "alu")
(set_attr "mode" "SI")])
@@ -9114,7 +9116,7 @@
(const_int 0)))
(clobber (match_scratch:HI 0 "=r"))]
"ix86_match_ccmode (insn, CCNOmode)
- && (GET_CODE (operands[1]) != MEM || GET_CODE (operands[2]) != MEM)"
+ && !(MEM_P (operands[1]) && MEM_P (operands[2]))"
"xor{w}\t{%2, %0|%0, %2}"
[(set_attr "type" "alu")
(set_attr "mode" "HI")])
@@ -9147,7 +9149,7 @@
(match_operand:QI 1 "general_operand" "qi,qmi")))
(clobber (reg:CC FLAGS_REG))]
"(! TARGET_PARTIAL_REG_STALL || optimize_size)
- && (GET_CODE (operands[0]) != MEM || GET_CODE (operands[1]) != MEM)"
+ && !(MEM_P (operands[0]) && MEM_P (operands[1]))"
"xor{b}\t{%1, %0|%0, %1}"
[(set_attr "type" "alu1")
(set_attr "mode" "QI")])
@@ -9248,7 +9250,7 @@
(xor:QI (match_dup 0) (match_dup 1)))]
"(! TARGET_PARTIAL_REG_STALL || optimize_size)
&& ix86_match_ccmode (insn, CCNOmode)
- && (GET_CODE (operands[0]) != MEM || GET_CODE (operands[1]) != MEM)"
+ && !(MEM_P (operands[0]) && MEM_P (operands[1]))"
"xor{b}\t{%1, %0|%0, %1}"
[(set_attr "type" "alu1")
(set_attr "mode" "QI")])
@@ -9261,7 +9263,7 @@
(const_int 0)))
(clobber (match_scratch:QI 0 "=q"))]
"ix86_match_ccmode (insn, CCNOmode)
- && (GET_CODE (operands[1]) != MEM || GET_CODE (operands[2]) != MEM)"
+ && !(MEM_P (operands[1]) && MEM_P (operands[2]))"
"xor{b}\t{%2, %0|%0, %2}"
[(set_attr "type" "alu")
(set_attr "mode" "QI")])
@@ -10396,7 +10398,7 @@
return "add{q}\t{%0, %0|%0, %0}";
case TYPE_LEA:
- gcc_assert (GET_CODE (operands[2]) == CONST_INT);
+ gcc_assert (CONST_INT_P (operands[2]));
gcc_assert ((unsigned HOST_WIDE_INT) INTVAL (operands[2]) <= 3);
operands[1] = gen_rtx_MULT (DImode, operands[1],
GEN_INT (1 << INTVAL (operands[2])));
@@ -11877,7 +11879,7 @@
(match_operand:QI 1 "nonmemory_operand" "I,c")))
(clobber (reg:CC FLAGS_REG))]
"(! TARGET_PARTIAL_REG_STALL || optimize_size)
- && (GET_CODE (operands[0]) != MEM || GET_CODE (operands[1]) != MEM)"
+ && !(MEM_P (operands[0]) && MEM_P (operands[1]))"
"@
sar{b}\t{%1, %0|%0, %1}
sar{b}\t{%b1, %0|%0, %b1}"
@@ -12462,7 +12464,7 @@
(match_operand:QI 1 "nonmemory_operand" "I,c")))
(clobber (reg:CC FLAGS_REG))]
"(! TARGET_PARTIAL_REG_STALL || optimize_size)
- && (GET_CODE (operands[0]) != MEM || GET_CODE (operands[1]) != MEM)"
+ && !(MEM_P (operands[0]) && MEM_P (operands[1]))"
"@
shr{b}\t{%1, %0|%0, %1}
shr{b}\t{%b1, %0|%0, %b1}"
@@ -12745,7 +12747,7 @@
(match_operand:QI 1 "nonmemory_operand" "I,c")))
(clobber (reg:CC FLAGS_REG))]
"(! TARGET_PARTIAL_REG_STALL || optimize_size)
- && (GET_CODE (operands[0]) != MEM || GET_CODE (operands[1]) != MEM)"
+ && !(MEM_P (operands[0]) && MEM_P (operands[1]))"
"@
rol{b}\t{%1, %0|%0, %1}
rol{b}\t{%b1, %0|%0, %b1}"
@@ -12984,7 +12986,7 @@
(match_operand:QI 1 "nonmemory_operand" "I,c")))
(clobber (reg:CC FLAGS_REG))]
"(! TARGET_PARTIAL_REG_STALL || optimize_size)
- && (GET_CODE (operands[0]) != MEM || GET_CODE (operands[1]) != MEM)"
+ && !(MEM_P (operands[0]) && MEM_P (operands[1]))"
"@
ror{b}\t{%1, %0|%0, %1}
ror{b}\t{%b1, %0|%0, %b1}"
@@ -15044,7 +15046,7 @@
(match_operand:SF 2 "nonimmediate_operand" "fm,xm")]))]
"TARGET_MIX_SSE_I387
&& COMMUTATIVE_ARITH_P (operands[3])
- && (GET_CODE (operands[1]) != MEM || GET_CODE (operands[2]) != MEM)"
+ && !(MEM_P (operands[1]) && MEM_P (operands[2]))"
"* return output_387_binary_op (insn, operands);"
[(set (attr "type")
(if_then_else (eq_attr "alternative" "1")
@@ -15063,7 +15065,7 @@
(match_operand:SF 2 "nonimmediate_operand" "xm")]))]
"TARGET_SSE_MATH
&& COMMUTATIVE_ARITH_P (operands[3])
- && (GET_CODE (operands[1]) != MEM || GET_CODE (operands[2]) != MEM)"
+ && !(MEM_P (operands[1]) && MEM_P (operands[2]))"
"* return output_387_binary_op (insn, operands);"
[(set (attr "type")
(if_then_else (match_operand:SF 3 "mult_operator" "")
@@ -15078,7 +15080,7 @@
(match_operand:SF 2 "nonimmediate_operand" "fm")]))]
"TARGET_80387
&& COMMUTATIVE_ARITH_P (operands[3])
- && (GET_CODE (operands[1]) != MEM || GET_CODE (operands[2]) != MEM)"
+ && !(MEM_P (operands[1]) && MEM_P (operands[2]))"
"* return output_387_binary_op (insn, operands);"
[(set (attr "type")
(if_then_else (match_operand:SF 3 "mult_operator" "")
@@ -15093,7 +15095,7 @@
(match_operand:SF 2 "nonimmediate_operand" "fm,0,xm")]))]
"TARGET_MIX_SSE_I387
&& !COMMUTATIVE_ARITH_P (operands[3])
- && (GET_CODE (operands[1]) != MEM || GET_CODE (operands[2]) != MEM)"
+ && !(MEM_P (operands[1]) && MEM_P (operands[2]))"
"* return output_387_binary_op (insn, operands);"
[(set (attr "type")
(cond [(and (eq_attr "alternative" "2")
@@ -15137,7 +15139,7 @@
(match_operand:SF 2 "nonimmediate_operand" "fm,0")]))]
"TARGET_80387 && !TARGET_SSE_MATH
&& !COMMUTATIVE_ARITH_P (operands[3])
- && (GET_CODE (operands[1]) != MEM || GET_CODE (operands[2]) != MEM)"
+ && !(MEM_P (operands[1]) && MEM_P (operands[2]))"
"* return output_387_binary_op (insn, operands);"
[(set (attr "type")
(cond [(match_operand:SF 3 "mult_operator" "")
@@ -15190,7 +15192,7 @@
(match_operand:DF 2 "nonimmediate_operand" "fm,Ym")]))]
"TARGET_SSE2 && TARGET_MIX_SSE_I387
&& COMMUTATIVE_ARITH_P (operands[3])
- && (GET_CODE (operands[1]) != MEM || GET_CODE (operands[2]) != MEM)"
+ && !(MEM_P (operands[1]) && MEM_P (operands[2]))"
"* return output_387_binary_op (insn, operands);"
[(set (attr "type")
(if_then_else (eq_attr "alternative" "1")
@@ -15209,7 +15211,7 @@
(match_operand:DF 2 "nonimmediate_operand" "Ym")]))]
"TARGET_SSE2 && TARGET_SSE_MATH
&& COMMUTATIVE_ARITH_P (operands[3])
- && (GET_CODE (operands[1]) != MEM || GET_CODE (operands[2]) != MEM)"
+ && !(MEM_P (operands[1]) && MEM_P (operands[2]))"
"* return output_387_binary_op (insn, operands);"
[(set (attr "type")
(if_then_else (match_operand:DF 3 "mult_operator" "")
@@ -15224,7 +15226,7 @@
(match_operand:DF 2 "nonimmediate_operand" "fm")]))]
"TARGET_80387
&& COMMUTATIVE_ARITH_P (operands[3])
- && (GET_CODE (operands[1]) != MEM || GET_CODE (operands[2]) != MEM)"
+ && !(MEM_P (operands[1]) && MEM_P (operands[2]))"
"* return output_387_binary_op (insn, operands);"
[(set (attr "type")
(if_then_else (match_operand:DF 3 "mult_operator" "")
@@ -15239,7 +15241,7 @@
(match_operand:DF 2 "nonimmediate_operand" "fm,0,Ym")]))]
"TARGET_SSE2 && TARGET_SSE_MATH && TARGET_MIX_SSE_I387
&& !COMMUTATIVE_ARITH_P (operands[3])
- && (GET_CODE (operands[1]) != MEM || GET_CODE (operands[2]) != MEM)"
+ && !(MEM_P (operands[1]) && MEM_P (operands[2]))"
"* return output_387_binary_op (insn, operands);"
[(set (attr "type")
(cond [(and (eq_attr "alternative" "2")
@@ -15283,7 +15285,7 @@
(match_operand:DF 2 "nonimmediate_operand" "fm,0")]))]
"TARGET_80387 && !(TARGET_SSE2 && TARGET_SSE_MATH)
&& !COMMUTATIVE_ARITH_P (operands[3])
- && (GET_CODE (operands[1]) != MEM || GET_CODE (operands[2]) != MEM)"
+ && !(MEM_P (operands[1]) && MEM_P (operands[2]))"
"* return output_387_binary_op (insn, operands);"
[(set (attr "type")
(cond [(match_operand:DF 3 "mult_operator" "")
@@ -15337,7 +15339,7 @@
[(float_extend:DF (match_operand:SF 1 "nonimmediate_operand" "fm,0"))
(match_operand:DF 2 "register_operand" "0,f")]))]
"TARGET_80387 && !(TARGET_SSE2 && TARGET_SSE_MATH)
- && (GET_CODE (operands[1]) != MEM || GET_CODE (operands[2]) != MEM)"
+ && !(MEM_P (operands[1]) && MEM_P (operands[2]))"
"* return output_387_binary_op (insn, operands);"
[(set (attr "type")
(cond [(match_operand:DF 3 "mult_operator" "")
@@ -18120,7 +18122,7 @@
FAIL;
out = operands[0];
- if (GET_CODE (out) != REG)
+ if (!REG_P (out))
out = gen_reg_rtx (SImode);
addr1 = copy_to_mode_reg (Pmode, XEXP (operands[1], 0));
@@ -18138,7 +18140,7 @@
once cc0 is dead. */
align = operands[4];
- if (GET_CODE (count) == CONST_INT)
+ if (CONST_INT_P (count))
{
if (INTVAL (count) == 0)
{
@@ -18457,7 +18459,7 @@
(match_operand:DI 2 "nonimmediate_operand" "rm,0")
(match_operand:DI 3 "nonimmediate_operand" "0,rm")))]
"TARGET_64BIT && TARGET_CMOVE
- && (GET_CODE (operands[2]) != MEM || GET_CODE (operands[3]) != MEM)"
+ && !(MEM_P (operands[2]) && MEM_P (operands[3]))"
"@
cmov%O2%C1\t{%2, %0|%0, %2}
cmov%O2%c1\t{%3, %0|%0, %3}"
@@ -18500,7 +18502,7 @@
(match_operand:SI 2 "nonimmediate_operand" "rm,0")
(match_operand:SI 3 "nonimmediate_operand" "0,rm")))]
"TARGET_CMOVE
- && (GET_CODE (operands[2]) != MEM || GET_CODE (operands[3]) != MEM)"
+ && !(MEM_P (operands[2]) && MEM_P (operands[3]))"
"@
cmov%O2%C1\t{%2, %0|%0, %2}
cmov%O2%c1\t{%3, %0|%0, %3}"
@@ -18522,7 +18524,7 @@
(match_operand:HI 2 "nonimmediate_operand" "rm,0")
(match_operand:HI 3 "nonimmediate_operand" "0,rm")))]
"TARGET_CMOVE
- && (GET_CODE (operands[2]) != MEM || GET_CODE (operands[3]) != MEM)"
+ && !(MEM_P (operands[2]) && MEM_P (operands[3]))"
"@
cmov%O2%C1\t{%2, %0|%0, %2}
cmov%O2%c1\t{%3, %0|%0, %3}"
@@ -18572,7 +18574,7 @@
(match_operand:SF 2 "nonimmediate_operand" "f,0,rm,0")
(match_operand:SF 3 "nonimmediate_operand" "0,f,0,rm")))]
"TARGET_80387 && TARGET_CMOVE
- && (GET_CODE (operands[2]) != MEM || GET_CODE (operands[3]) != MEM)"
+ && !(MEM_P (operands[2]) && MEM_P (operands[3]))"
"@
fcmov%F1\t{%2, %0|%0, %2}
fcmov%f1\t{%3, %0|%0, %3}
@@ -18596,7 +18598,7 @@
(match_operand:DF 2 "nonimmediate_operand" "f,0,rm,0")
(match_operand:DF 3 "nonimmediate_operand" "0,f,0,rm")))]
"!TARGET_64BIT && TARGET_80387 && TARGET_CMOVE
- && (GET_CODE (operands[2]) != MEM || GET_CODE (operands[3]) != MEM)"
+ && !(MEM_P (operands[2]) && MEM_P (operands[3]))"
"@
fcmov%F1\t{%2, %0|%0, %2}
fcmov%f1\t{%3, %0|%0, %3}
@@ -18612,7 +18614,7 @@
(match_operand:DF 2 "nonimmediate_operand" "f,0,rm,0")
(match_operand:DF 3 "nonimmediate_operand" "0,f,0,rm")))]
"TARGET_64BIT && TARGET_80387 && TARGET_CMOVE
- && (GET_CODE (operands[2]) != MEM || GET_CODE (operands[3]) != MEM)"
+ && !(MEM_P (operands[2]) && MEM_P (operands[3]))"
"@
fcmov%F1\t{%2, %0|%0, %2}
fcmov%f1\t{%3, %0|%0, %3}
@@ -18834,7 +18836,7 @@
return "mov{l}\t{%1, %0|%0, %1}";
case TYPE_ALU:
- if (GET_CODE (operands[2]) == CONST_INT
+ if (CONST_INT_P (operands[2])
&& (INTVAL (operands[2]) == 128
|| (INTVAL (operands[2]) < 0
&& INTVAL (operands[2]) != -128)))
@@ -18875,7 +18877,7 @@
return "mov{q}\t{%1, %0|%0, %1}";
case TYPE_ALU:
- if (GET_CODE (operands[2]) == CONST_INT
+ if (CONST_INT_P (operands[2])
/* Avoid overflows. */
&& ((INTVAL (operands[2]) & ((((unsigned int) 1) << 31) - 1)))
&& (INTVAL (operands[2]) == 128
@@ -19001,7 +19003,7 @@
"TARGET_STACK_PROBE"
{
#ifdef CHECK_STACK_LIMIT
- if (GET_CODE (operands[1]) == CONST_INT
+ if (CONST_INT_P (operands[1])
&& INTVAL (operands[1]) < CHECK_STACK_LIMIT)
emit_insn (gen_subsi3 (stack_pointer_rtx, stack_pointer_rtx,
operands[1]));
@@ -19048,7 +19050,7 @@
&& ((GET_MODE (operands[0]) == HImode
&& ((!optimize_size && !TARGET_FAST_PREFIX)
/* ??? next two lines just !satisfies_constraint_K (...) */
- || GET_CODE (operands[2]) != CONST_INT
+ || !CONST_INT_P (operands[2])
|| satisfies_constraint_K (operands[2])))
|| (GET_MODE (operands[0]) == QImode
&& (TARGET_PROMOTE_QImode || optimize_size)))"
@@ -19320,7 +19322,7 @@
"!optimize_size
&& peep2_regno_dead_p (0, FLAGS_REG)
&& ((TARGET_PENTIUM
- && (GET_CODE (operands[0]) != MEM
+ && (!MEM_P (operands[0])
|| !memory_displacement_operand (operands[0], SImode)))
|| (TARGET_K6 && long_memory_operand (operands[0], SImode)))"
[(parallel [(set (match_dup 0)
@@ -19334,7 +19336,7 @@
"!optimize_size
&& peep2_regno_dead_p (0, FLAGS_REG)
&& ((TARGET_PENTIUM
- && (GET_CODE (operands[0]) != MEM
+ && (!MEM_P (operands[0])
|| !memory_displacement_operand (operands[0], HImode)))
|| (TARGET_K6 && long_memory_operand (operands[0], HImode)))"
[(parallel [(set (match_dup 0)
@@ -19348,7 +19350,7 @@
"!optimize_size
&& peep2_regno_dead_p (0, FLAGS_REG)
&& ((TARGET_PENTIUM
- && (GET_CODE (operands[0]) != MEM
+ && (!MEM_P (operands[0])
|| !memory_displacement_operand (operands[0], QImode)))
|| (TARGET_K6 && long_memory_operand (operands[0], QImode)))"
[(parallel [(set (match_dup 0)
diff --git a/gcc/config/i386/mmx.md b/gcc/config/i386/mmx.md
index 4f0ab2ca3ee..4c5ebbca4d1 100644
--- a/gcc/config/i386/mmx.md
+++ b/gcc/config/i386/mmx.md
@@ -68,7 +68,7 @@
(match_operand:MMXMODEI 1 "vector_move_operand"
"Cr ,m,C ,*ym,*y,Y ,*y,C,xm,x,x,r"))]
"TARGET_64BIT && TARGET_MMX
- && (GET_CODE (operands[0]) != MEM || GET_CODE (operands[1]) != MEM)"
+ && !(MEM_P (operands[0]) && MEM_P (operands[1]))"
"@
movq\t{%1, %0|%0, %1}
movq\t{%1, %0|%0, %1}
@@ -92,7 +92,7 @@
(match_operand:MMXMODEI 1 "vector_move_operand"
"C ,*ym,*y,*Y,*y,C ,*Ym,*Y,C ,*x,m ,*x,irm,r"))]
"TARGET_MMX
- && (GET_CODE (operands[0]) != MEM || GET_CODE (operands[1]) != MEM)"
+ && !(MEM_P (operands[0]) && MEM_P (operands[1]))"
"@
pxor\t%0, %0
movq\t{%1, %0|%0, %1}
@@ -127,7 +127,7 @@
(match_operand:V2SF 1 "vector_move_operand"
"Cr ,m ,C ,*ym,*y,Y ,*y,C,x,m,x,x,r"))]
"TARGET_64BIT && TARGET_MMX
- && (GET_CODE (operands[0]) != MEM || GET_CODE (operands[1]) != MEM)"
+ && !(MEM_P (operands[0]) && MEM_P (operands[1]))"
"@
movq\t{%1, %0|%0, %1}
movq\t{%1, %0|%0, %1}
@@ -152,7 +152,7 @@
(match_operand:V2SF 1 "vector_move_operand"
"C ,*ym,*y,*Y,*y,C ,*x,m ,*x,irm,r"))]
"TARGET_MMX
- && (GET_CODE (operands[0]) != MEM || GET_CODE (operands[1]) != MEM)"
+ && !(MEM_P (operands[0]) && MEM_P (operands[1]))"
"@
pxor\t%0, %0
movq\t{%1, %0|%0, %1}
diff --git a/gcc/config/i386/predicates.md b/gcc/config/i386/predicates.md
index 52011918100..3f48234bdaf 100644
--- a/gcc/config/i386/predicates.md
+++ b/gcc/config/i386/predicates.md
@@ -146,7 +146,7 @@
if (ix86_cmodel == CM_LARGE)
return 0;
- if (GET_CODE (op2) != CONST_INT)
+ if (!CONST_INT_P (op2))
return 0;
offset = trunc_int_for_mode (INTVAL (op2), DImode);
switch (GET_CODE (op1))
@@ -266,7 +266,7 @@
if ((ix86_cmodel == CM_SMALL
|| (ix86_cmodel == CM_MEDIUM
&& !SYMBOL_REF_FAR_ADDR_P (op1)))
- && GET_CODE (op2) == CONST_INT
+ && CONST_INT_P (op2)
&& trunc_int_for_mode (INTVAL (op2), DImode) > -0x10000
&& trunc_int_for_mode (INTVAL (op2), SImode) == INTVAL (op2))
return 1;
@@ -280,7 +280,7 @@
/* These conditions are similar to SYMBOL_REF ones, just the
constraints for code models differ. */
if ((ix86_cmodel == CM_SMALL || ix86_cmodel == CM_MEDIUM)
- && GET_CODE (op2) == CONST_INT
+ && CONST_INT_P (op2)
&& trunc_int_for_mode (INTVAL (op2), DImode) > -0x10000
&& trunc_int_for_mode (INTVAL (op2), SImode) == INTVAL (op2))
return 1;
@@ -340,7 +340,7 @@
if (TARGET_64BIT && GET_CODE (op) == CONST)
{
op = XEXP (op, 0);
- if (GET_CODE (op) == PLUS && GET_CODE (XEXP (op, 1)) == CONST_INT)
+ if (GET_CODE (op) == PLUS && CONST_INT_P (XEXP (op, 1)))
op = XEXP (op, 0);
if (GET_CODE (op) == UNSPEC
&& (XINT (op, 1) == UNSPEC_GOTOFF
@@ -380,7 +380,7 @@
|| XINT (op, 1) == UNSPEC_GOTPCREL)))
return 1;
if (GET_CODE (op) != PLUS
- || GET_CODE (XEXP (op, 1)) != CONST_INT)
+ || !CONST_INT_P (XEXP (op, 1)))
return 0;
op = XEXP (op, 0);
@@ -423,7 +423,7 @@
if (GET_CODE (op) == UNSPEC)
return 1;
if (GET_CODE (op) != PLUS
- || GET_CODE (XEXP (op, 1)) != CONST_INT)
+ || !CONST_INT_P (XEXP (op, 1)))
return 0;
op = XEXP (op, 0);
if (GET_CODE (op) == UNSPEC)
@@ -438,7 +438,7 @@
{
if (GET_CODE (op) == CONST
&& GET_CODE (XEXP (op, 0)) == PLUS
- && GET_CODE (XEXP (XEXP (op, 0), 1)) == CONST_INT)
+ && CONST_INT_P (XEXP (XEXP (op, 0), 1)))
op = XEXP (XEXP (op, 0), 0);
if (GET_CODE (op) == LABEL_REF)
@@ -784,7 +784,7 @@
}
if (parts.disp)
{
- if (GET_CODE (parts.disp) != CONST_INT
+ if (!CONST_INT_P (parts.disp)
|| (INTVAL (parts.disp) & 3) != 0)
return 0;
}
@@ -911,7 +911,7 @@
enum machine_mode inmode = GET_MODE (XEXP (op, 0));
enum rtx_code code = GET_CODE (op);
- if (GET_CODE (XEXP (op, 0)) != REG
+ if (!REG_P (XEXP (op, 0))
|| REGNO (XEXP (op, 0)) != FLAGS_REG
|| XEXP (op, 1) != const0_rtx)
return 0;