summaryrefslogtreecommitdiff
path: root/gcc
diff options
context:
space:
mode:
authormmitchel <mmitchel@138bc75d-0d04-0410-961f-82ee72b054a4>2001-05-11 15:54:19 +0000
committermmitchel <mmitchel@138bc75d-0d04-0410-961f-82ee72b054a4>2001-05-11 15:54:19 +0000
commit85bd95435556b512eaeb6ec4abd483a938a46b6a (patch)
tree7e8f2fa2de2821974241456f1c9e7c9a00401659 /gcc
parentc198dd7476f15878680535871c0543d50dff04b7 (diff)
downloadgcc-85bd95435556b512eaeb6ec4abd483a938a46b6a.tar.gz
* loop.h (struct movables): Remove `num'.
* loop.c (scan_loop): Don't set it. Use count_insns_in_loop. Adjust call to strength_reduce. (num_unmoved_movables): New function. (move_movables): Don't set movables->num. (strength_reduce): Compute the number of instructions in the loop here, rather than in the caller. (check_dbra_loop): Always clear reversible_mem_store if there are any memory sets. (loop_regs_scan): Don't count instructions here. (count_insn_in_loop): New function. git-svn-id: svn+ssh://gcc.gnu.org/svn/gcc/trunk@41966 138bc75d-0d04-0410-961f-82ee72b054a4
Diffstat (limited to 'gcc')
-rw-r--r--gcc/ChangeLog14
-rw-r--r--gcc/loop.c82
-rw-r--r--gcc/loop.h2
3 files changed, 66 insertions, 32 deletions
diff --git a/gcc/ChangeLog b/gcc/ChangeLog
index af101ea6897..9387b95bc73 100644
--- a/gcc/ChangeLog
+++ b/gcc/ChangeLog
@@ -1,5 +1,19 @@
2001-05-11 Mark Mitchell <mark@codesourcery.com>
+ * loop.h (struct movables): Remove `num'.
+ * loop.c (scan_loop): Don't set it. Use count_insns_in_loop.
+ Adjust call to strength_reduce.
+ (num_unmoved_movables): New function.
+ (move_movables): Don't set movables->num.
+ (strength_reduce): Compute the number of instructions in the loop
+ here, rather than in the caller.
+ (check_dbra_loop): Always clear reversible_mem_store if there
+ are any memory sets.
+ (loop_regs_scan): Don't count instructions here.
+ (count_insn_in_loop): New function.
+
+2001-05-11 Mark Mitchell <mark@codesourcery.com>
+
* optabs.c (emit_libcall_block): Don't mark calls as CONST_CALL_P.
2001-05-11 Neil Booth <neil@daikokuya.demon.co.uk>
diff --git a/gcc/loop.c b/gcc/loop.c
index fb045123356..4a41c2d32d1 100644
--- a/gcc/loop.c
+++ b/gcc/loop.c
@@ -165,6 +165,7 @@ static void ignore_some_movables PARAMS ((struct loop_movables *));
static void force_movables PARAMS ((struct loop_movables *));
static void combine_movables PARAMS ((struct loop_movables *,
struct loop_regs *));
+static int num_unmoved_movables PARAMS ((const struct loop *));
static int regs_match_p PARAMS ((rtx, rtx, struct loop_movables *));
static int rtx_equal_for_loop_p PARAMS ((rtx, rtx, struct loop_movables *,
struct loop_regs *));
@@ -189,7 +190,7 @@ static void loop_givs_reduce PARAMS((struct loop *, struct iv_class *));
static void loop_givs_rescan PARAMS((struct loop *, struct iv_class *,
rtx *));
static void loop_ivs_free PARAMS((struct loop *));
-static void strength_reduce PARAMS ((struct loop *, int, int));
+static void strength_reduce PARAMS ((struct loop *, int));
static void find_single_use_in_loop PARAMS ((struct loop_regs *, rtx, rtx));
static int valid_initial_value_p PARAMS ((rtx, rtx, int, rtx));
static void find_mem_givs PARAMS ((const struct loop *, rtx, rtx, int, int));
@@ -232,7 +233,8 @@ static int last_use_this_basic_block PARAMS ((rtx, rtx));
static void record_initial PARAMS ((rtx, rtx, void *));
static void update_reg_last_use PARAMS ((rtx, rtx));
static rtx next_insn_in_loop PARAMS ((const struct loop *, rtx));
-static void loop_regs_scan PARAMS ((const struct loop*, int, int *));
+static void loop_regs_scan PARAMS ((const struct loop *, int));
+static int count_insns_in_loop PARAMS ((const struct loop *));
static void load_mems PARAMS ((const struct loop *));
static int insert_loop_mem PARAMS ((rtx *, void *));
static int replace_loop_mem PARAMS ((rtx *, void *));
@@ -550,7 +552,6 @@ scan_loop (loop, flags)
movables->head = 0;
movables->last = 0;
- movables->num = 0;
/* Determine whether this loop starts with a jump down to a test at
the end. This will occur for a small number of loops with a test
@@ -637,7 +638,8 @@ scan_loop (loop, flags)
/* Allocate extra space for REGs that might be created by load_mems.
We allocate a little extra slop as well, in the hopes that we
won't have to reallocate the regs array. */
- loop_regs_scan (loop, loop_info->mems_idx + 16, &insn_count);
+ loop_regs_scan (loop, loop_info->mems_idx + 16);
+ insn_count = count_insns_in_loop (loop);
if (loop_dump_stream)
{
@@ -1006,7 +1008,7 @@ scan_loop (loop, flags)
/* Recalculate regs->array if load_mems has created new registers. */
if (max_reg_num () > regs->num)
- loop_regs_scan (loop, 0, &insn_count);
+ loop_regs_scan (loop, 0);
for (update_start = loop_start;
PREV_INSN (update_start)
@@ -1024,7 +1026,7 @@ scan_loop (loop, flags)
/* Ensure our label doesn't go away. */
LABEL_NUSES (update_end)++;
- strength_reduce (loop, insn_count, flags);
+ strength_reduce (loop, flags);
reg_scan_update (update_start, update_end, loop_max_reg);
loop_max_reg = max_reg_num ();
@@ -1421,6 +1423,24 @@ combine_movables (movables, regs)
/* Clean up. */
free (matched_regs);
}
+
+/* Returns the number of movable instructions in LOOP that were not
+ moved outside the loop. */
+
+static int
+num_unmoved_movables (loop)
+ const struct loop *loop;
+{
+ int num = 0;
+ struct movable *m;
+
+ for (m = LOOP_MOVABLES (loop)->head; m; m = m->next)
+ if (!m->done)
+ ++num;
+
+ return num;
+}
+
/* Return 1 if regs X and Y will become the same if moved. */
@@ -1634,8 +1654,6 @@ move_movables (loop, movables, threshold, insn_count)
rtx *reg_map = (rtx *) xcalloc (nregs, sizeof (rtx));
char *already_moved = (char *) xcalloc (nregs, sizeof (char));
- movables->num = 0;
-
for (m = movables->head; m; m = m->next)
{
/* Describe this movable insn. */
@@ -1664,9 +1682,6 @@ move_movables (loop, movables, threshold, insn_count)
INSN_UID (m->forces->insn));
}
- /* Count movables. Value used in heuristics in strength_reduce. */
- movables->num++;
-
/* Ignore the insn if it's already done (it matched something else).
Otherwise, see if it is now safe to move. */
@@ -4200,9 +4215,8 @@ loop_ivs_free (loop)
must check regnos to make sure they are in bounds. */
static void
-strength_reduce (loop, insn_count, flags)
+strength_reduce (loop, flags)
struct loop *loop;
- int insn_count;
int flags;
{
struct loop_info *loop_info = LOOP_INFO (loop);
@@ -4222,6 +4236,7 @@ strength_reduce (loop, insn_count, flags)
int reg_map_size;
int unrolled_insn_copies = 0;
rtx test_reg = gen_rtx_REG (word_mode, LAST_VIRTUAL_REGISTER + 1);
+ int insn_count = count_insns_in_loop (loop);
addr_placeholder = gen_reg_rtx (Pmode);
@@ -7325,12 +7340,11 @@ check_dbra_loop (loop, insn_count)
{
struct induction *v;
- reversible_mem_store
- = (! loop_info->unknown_address_altered
- && ! loop_info->unknown_constant_address_altered
- && ! loop_invariant_p (loop,
- XEXP (XEXP (loop_info->store_mems, 0),
- 0)));
+ /* If we could prove that each of the memory locations
+ written to was different, then we could reverse the
+ store -- but we don't presently have any way of
+ knowing that. */
+ reversible_mem_store = 0;
/* If the store depends on a register that is set after the
store, it depends on the initial value, and is thus not
@@ -7362,7 +7376,7 @@ check_dbra_loop (loop, insn_count)
&& ! loop_info->has_volatile
&& reversible_mem_store
&& (bl->giv_count + bl->biv_count + loop_info->num_mem_sets
- + LOOP_MOVABLES (loop)->num + compare_and_branch == insn_count)
+ + num_unmoved_movables (loop) + compare_and_branch == insn_count)
&& (bl == ivs->list && bl->next == 0))
|| no_use_except_counting)
{
@@ -8702,16 +8716,12 @@ insert_loop_mem (mem, data)
parameter may be zero, in which case this processing is not done.
Set REGS->ARRAY[I].MAY_NOT_OPTIMIZE nonzero if we should not
- optimize register I.
-
- Store in *COUNT_PTR the number of actual instructions
- in the loop. We use this to decide what is worth moving out. */
+ optimize register I. */
static void
-loop_regs_scan (loop, extra_size, count_ptr)
+loop_regs_scan (loop, extra_size)
const struct loop *loop;
int extra_size;
- int *count_ptr;
{
struct loop_regs *regs = LOOP_REGS (loop);
int old_nregs;
@@ -8719,7 +8729,6 @@ loop_regs_scan (loop, extra_size, count_ptr)
basic block. In that case, it is the insn that last set reg n. */
rtx *last_set;
rtx insn;
- int count = 0;
int i;
old_nregs = regs->num;
@@ -8754,8 +8763,6 @@ loop_regs_scan (loop, extra_size, count_ptr)
{
if (INSN_P (insn))
{
- ++count;
-
/* Record registers that have exactly one use. */
find_single_use_in_loop (regs, insn, PATTERN (insn));
@@ -8798,9 +8805,24 @@ loop_regs_scan (loop, extra_size, count_ptr)
regs->array[i].n_times_set = regs->array[i].set_in_loop;
free (last_set);
- *count_ptr = count;
}
+/* Returns the number of real INSNs in the LOOP. */
+
+static int
+count_insns_in_loop (loop)
+ const struct loop *loop;
+{
+ int count = 0;
+ rtx insn;
+
+ for (insn = loop->top ? loop->top : loop->start; insn != loop->end;
+ insn = NEXT_INSN (insn))
+ if (INSN_P (insn))
+ ++count;
+
+ return count;
+}
/* Move MEMs into registers for the duration of the loop. */
diff --git a/gcc/loop.h b/gcc/loop.h
index cfec9934b20..1794730307d 100644
--- a/gcc/loop.h
+++ b/gcc/loop.h
@@ -289,8 +289,6 @@ struct loop_movables
struct movable *head;
/* Last movable in chain. */
struct movable *last;
- /* Number of movables in the loop. */
- int num;
};