summaryrefslogtreecommitdiff
path: root/gcc/config/sh
diff options
context:
space:
mode:
Diffstat (limited to 'gcc/config/sh')
-rw-r--r--gcc/config/sh/sh-protos.h2
-rw-r--r--gcc/config/sh/sh.c4
-rw-r--r--gcc/config/sh/sh.md3
-rw-r--r--gcc/config/sh/sh_treg_combine.cc12
4 files changed, 11 insertions, 10 deletions
diff --git a/gcc/config/sh/sh-protos.h b/gcc/config/sh/sh-protos.h
index 1d34651ec11..de2ff90b3b8 100644
--- a/gcc/config/sh/sh-protos.h
+++ b/gcc/config/sh/sh-protos.h
@@ -104,7 +104,7 @@ extern const char *output_far_jump (rtx_insn *, rtx);
extern rtx sfunc_uses_reg (rtx);
extern int barrier_align (rtx_insn *);
-extern int sh_loop_align (rtx);
+extern int sh_loop_align (rtx_insn *);
extern bool fp_zero_operand (rtx);
extern bool fp_one_operand (rtx);
extern rtx get_fpscr_rtx (void);
diff --git a/gcc/config/sh/sh.c b/gcc/config/sh/sh.c
index 3b4acb97daf..dba107ecf35 100644
--- a/gcc/config/sh/sh.c
+++ b/gcc/config/sh/sh.c
@@ -5965,9 +5965,9 @@ barrier_align (rtx_insn *barrier_or_label)
Applying loop alignment to small constant or switch tables is a waste
of space, so we suppress this too. */
int
-sh_loop_align (rtx label)
+sh_loop_align (rtx_insn *label)
{
- rtx next = label;
+ rtx_insn *next = label;
if (! optimize || optimize_size)
return 0;
diff --git a/gcc/config/sh/sh.md b/gcc/config/sh/sh.md
index a7155308377..265592623ba 100644
--- a/gcc/config/sh/sh.md
+++ b/gcc/config/sh/sh.md
@@ -1566,7 +1566,8 @@
[(set (match_dup 0) (match_dup 3))
(set (match_dup 4) (match_dup 5))]
{
- rtx set1, set2, insn2;
+ rtx set1, set2;
+ rtx_insn *insn2;
rtx replacements[4];
/* We want to replace occurrences of operands[0] with operands[1] and
diff --git a/gcc/config/sh/sh_treg_combine.cc b/gcc/config/sh/sh_treg_combine.cc
index 1c0778978fb..5ad7013a429 100644
--- a/gcc/config/sh/sh_treg_combine.cc
+++ b/gcc/config/sh/sh_treg_combine.cc
@@ -563,7 +563,7 @@ private:
rtx make_not_reg_insn (rtx dst_reg, rtx src_reg) const;
// Create an insn rtx that inverts the ccreg.
- rtx make_inv_ccreg_insn (void) const;
+ rtx_insn *make_inv_ccreg_insn (void) const;
// Adds the specified insn to the set of modified or newly added insns that
// might need splitting at the end of the pass.
@@ -899,13 +899,13 @@ sh_treg_combine::make_not_reg_insn (rtx dst_reg, rtx src_reg) const
return i;
}
-rtx
+rtx_insn *
sh_treg_combine::make_inv_ccreg_insn (void) const
{
start_sequence ();
- rtx i = emit_insn (gen_rtx_SET (VOIDmode, m_ccreg,
- gen_rtx_fmt_ee (XOR, GET_MODE (m_ccreg),
- m_ccreg, const1_rtx)));
+ rtx_insn *i = emit_insn (gen_rtx_SET (VOIDmode, m_ccreg,
+ gen_rtx_fmt_ee (XOR, GET_MODE (m_ccreg),
+ m_ccreg, const1_rtx)));
end_sequence ();
return i;
}
@@ -1222,7 +1222,7 @@ sh_treg_combine::try_eliminate_cstores (cbranch_trace& trace,
// invert the ccreg as a replacement for one of them.
if (cstore_count != 0 && inv_cstore_count != 0)
{
- rtx i = make_inv_ccreg_insn ();
+ rtx_insn *i = make_inv_ccreg_insn ();
if (recog_memoized (i) < 0)
{
log_msg ("failed to match ccreg inversion insn:\n");