summaryrefslogtreecommitdiff
path: root/gcc/cse.c
diff options
context:
space:
mode:
authorbstarynk <bstarynk@138bc75d-0d04-0410-961f-82ee72b054a4>2008-09-02 21:27:57 +0000
committerbstarynk <bstarynk@138bc75d-0d04-0410-961f-82ee72b054a4>2008-09-02 21:27:57 +0000
commitd93d495ba54e9746659d91d69119157f038a815f (patch)
treebfc67c88ff54c4880beb663c32eedf5f95f15123 /gcc/cse.c
parente827aa43033edbfc6bac3fa2ff04421737b78421 (diff)
downloadgcc-d93d495ba54e9746659d91d69119157f038a815f.tar.gz
2008-09-02 Basile Starynkevitch <basile@starynkevitch.net>
MELT branch merged with trunk r139912 after graphite merge into trunk graphite uses PPL & CLOOG... git-svn-id: svn+ssh://gcc.gnu.org/svn/gcc/branches/melt-branch@139915 138bc75d-0d04-0410-961f-82ee72b054a4
Diffstat (limited to 'gcc/cse.c')
-rw-r--r--gcc/cse.c96
1 files changed, 64 insertions, 32 deletions
diff --git a/gcc/cse.c b/gcc/cse.c
index d586c6c26df..c1effee8f03 100644
--- a/gcc/cse.c
+++ b/gcc/cse.c
@@ -574,7 +574,7 @@ static rtx use_related_value (rtx, struct table_elt *);
static inline unsigned canon_hash (rtx, enum machine_mode);
static inline unsigned safe_hash (rtx, enum machine_mode);
-static unsigned hash_rtx_string (const char *);
+static inline unsigned hash_rtx_string (const char *);
static rtx canon_reg (rtx, rtx);
static enum rtx_code find_comparison_args (enum rtx_code, rtx *, rtx *,
@@ -2044,6 +2044,7 @@ use_related_value (rtx x, struct table_elt *elt)
return plus_constant (q->exp, offset);
}
+
/* Hash a string. Just add its bytes up. */
static inline unsigned
hash_rtx_string (const char *ps)
@@ -2058,27 +2059,20 @@ hash_rtx_string (const char *ps)
return hash;
}
-/* Hash an rtx. We are careful to make sure the value is never negative.
- Equivalent registers hash identically.
- MODE is used in hashing for CONST_INTs only;
- otherwise the mode of X is used.
-
- Store 1 in DO_NOT_RECORD_P if any subexpression is volatile.
-
- If HASH_ARG_IN_MEMORY_P is not NULL, store 1 in it if X contains
- a MEM rtx which does not have the RTX_UNCHANGING_P bit set.
-
- Note that cse_insn knows that the hash code of a MEM expression
- is just (int) MEM plus the hash code of the address. */
+/* Same as hash_rtx, but call CB on each rtx if it is not NULL.
+ When the callback returns true, we continue with the new rtx. */
unsigned
-hash_rtx (const_rtx x, enum machine_mode mode, int *do_not_record_p,
- int *hash_arg_in_memory_p, bool have_reg_qty)
+hash_rtx_cb (const_rtx x, enum machine_mode mode,
+ int *do_not_record_p, int *hash_arg_in_memory_p,
+ bool have_reg_qty, hash_rtx_callback_function cb)
{
int i, j;
unsigned hash = 0;
enum rtx_code code;
const char *fmt;
+ enum machine_mode newmode;
+ rtx newx;
/* Used to turn recursion into iteration. We can't rely on GCC's
tail-recursion elimination since we need to keep accumulating values
@@ -2087,6 +2081,15 @@ hash_rtx (const_rtx x, enum machine_mode mode, int *do_not_record_p,
if (x == 0)
return hash;
+ /* Invoke the callback first. */
+ if (cb != NULL
+ && ((*cb) (x, mode, &newx, &newmode)))
+ {
+ hash += hash_rtx_cb (newx, newmode, do_not_record_p,
+ hash_arg_in_memory_p, have_reg_qty, cb);
+ return hash;
+ }
+
code = GET_CODE (x);
switch (code)
{
@@ -2094,7 +2097,7 @@ hash_rtx (const_rtx x, enum machine_mode mode, int *do_not_record_p,
{
unsigned int regno = REGNO (x);
- if (!reload_completed)
+ if (do_not_record_p && !reload_completed)
{
/* On some machines, we can't record any non-fixed hard register,
because extending its life will cause reload problems. We
@@ -2188,8 +2191,9 @@ hash_rtx (const_rtx x, enum machine_mode mode, int *do_not_record_p,
for (i = 0; i < units; ++i)
{
elt = CONST_VECTOR_ELT (x, i);
- hash += hash_rtx (elt, GET_MODE (elt), do_not_record_p,
- hash_arg_in_memory_p, have_reg_qty);
+ hash += hash_rtx_cb (elt, GET_MODE (elt),
+ do_not_record_p, hash_arg_in_memory_p,
+ have_reg_qty, cb);
}
return hash;
@@ -2223,7 +2227,7 @@ hash_rtx (const_rtx x, enum machine_mode mode, int *do_not_record_p,
case MEM:
/* We don't record if marked volatile or if BLKmode since we don't
know the size of the move. */
- if (MEM_VOLATILE_P (x) || GET_MODE (x) == BLKmode)
+ if (do_not_record_p && (MEM_VOLATILE_P (x) || GET_MODE (x) == BLKmode))
{
*do_not_record_p = 1;
return 0;
@@ -2270,11 +2274,16 @@ hash_rtx (const_rtx x, enum machine_mode mode, int *do_not_record_p,
case CC0:
case CALL:
case UNSPEC_VOLATILE:
- *do_not_record_p = 1;
- return 0;
+ if (do_not_record_p) {
+ *do_not_record_p = 1;
+ return 0;
+ }
+ else
+ return hash;
+ break;
case ASM_OPERANDS:
- if (MEM_VOLATILE_P (x))
+ if (do_not_record_p && MEM_VOLATILE_P (x))
{
*do_not_record_p = 1;
return 0;
@@ -2291,12 +2300,12 @@ hash_rtx (const_rtx x, enum machine_mode mode, int *do_not_record_p,
{
for (i = 1; i < ASM_OPERANDS_INPUT_LENGTH (x); i++)
{
- hash += (hash_rtx (ASM_OPERANDS_INPUT (x, i),
- GET_MODE (ASM_OPERANDS_INPUT (x, i)),
- do_not_record_p, hash_arg_in_memory_p,
- have_reg_qty)
+ hash += (hash_rtx_cb (ASM_OPERANDS_INPUT (x, i),
+ GET_MODE (ASM_OPERANDS_INPUT (x, i)),
+ do_not_record_p, hash_arg_in_memory_p,
+ have_reg_qty, cb)
+ hash_rtx_string
- (ASM_OPERANDS_INPUT_CONSTRAINT (x, i)));
+ (ASM_OPERANDS_INPUT_CONSTRAINT (x, i)));
}
hash += hash_rtx_string (ASM_OPERANDS_INPUT_CONSTRAINT (x, 0));
@@ -2329,15 +2338,17 @@ hash_rtx (const_rtx x, enum machine_mode mode, int *do_not_record_p,
x = XEXP (x, i);
goto repeat;
}
-
- hash += hash_rtx (XEXP (x, i), 0, do_not_record_p,
- hash_arg_in_memory_p, have_reg_qty);
+
+ hash += hash_rtx_cb (XEXP (x, i), 0, do_not_record_p,
+ hash_arg_in_memory_p,
+ have_reg_qty, cb);
break;
case 'E':
for (j = 0; j < XVECLEN (x, i); j++)
- hash += hash_rtx (XVECEXP (x, i, j), 0, do_not_record_p,
- hash_arg_in_memory_p, have_reg_qty);
+ hash += hash_rtx_cb (XVECEXP (x, i, j), 0, do_not_record_p,
+ hash_arg_in_memory_p,
+ have_reg_qty, cb);
break;
case 's':
@@ -2360,6 +2371,27 @@ hash_rtx (const_rtx x, enum machine_mode mode, int *do_not_record_p,
return hash;
}
+/* Hash an rtx. We are careful to make sure the value is never negative.
+ Equivalent registers hash identically.
+ MODE is used in hashing for CONST_INTs only;
+ otherwise the mode of X is used.
+
+ Store 1 in DO_NOT_RECORD_P if any subexpression is volatile.
+
+ If HASH_ARG_IN_MEMORY_P is not NULL, store 1 in it if X contains
+ a MEM rtx which does not have the RTX_UNCHANGING_P bit set.
+
+ Note that cse_insn knows that the hash code of a MEM expression
+ is just (int) MEM plus the hash code of the address. */
+
+unsigned
+hash_rtx (const_rtx x, enum machine_mode mode, int *do_not_record_p,
+ int *hash_arg_in_memory_p, bool have_reg_qty)
+{
+ return hash_rtx_cb (x, mode, do_not_record_p,
+ hash_arg_in_memory_p, have_reg_qty, NULL);
+}
+
/* Hash an rtx X for cse via hash_rtx.
Stores 1 in do_not_record if any subexpression is volatile.
Stores 1 in hash_arg_in_memory if X contains a mem rtx which