summaryrefslogtreecommitdiff
path: root/gcc/predict.c
diff options
context:
space:
mode:
authorJan Hubicka <jh@suse.cz>2001-12-13 12:34:11 +0100
committerJan Hubicka <hubicka@gcc.gnu.org>2001-12-13 11:34:11 +0000
commit0dd0e980b5c9b23a3749647c69603f8a29eea4c3 (patch)
tree370e6c425d4395a411fdf96a2629203ee94049f1 /gcc/predict.c
parent85230e5255cd8dc23a0d0440992ac24a119b32a5 (diff)
downloadgcc-0dd0e980b5c9b23a3749647c69603f8a29eea4c3.tar.gz
predict.c (estimate_probability): Reorganize opcode heuristics.
* predict.c (estimate_probability): Reorganize opcode heuristics. * predict.def (PRED_OPCODE_POSITIVE, PRED_OPCODE_NONEQUAL, PRED_FPOPCODE): New. * i386.c (override_options): Recognize various CPU variants and set SSE/MMX/3dNOW flags accordingly. * i386.h (MASK_MMX_SET, MASK_SSE_SET, MASK_SSE2_SET, MASK_3DNOW_SET, MASK_3DNOW_A_SET): New. (MASK_ACCUMULATE_OUTGOING_ARGS_SET): New. (MASK_NO_ACCUMULATE_OUTGOING_ARGS): Delete. (MASK_*): Renumber. (TARGET_FLAGS): Use new masks. (CPP_CPU_SPECS): Recognize new CPU variants. * invoke.texi (-mcpu): Update documentation. * flags.h (flag_prefetch_loop_arrays): Declare. * loop.h (LOOP_PREFETCH): Define new constant. * loop.c (strength_reduce): Call emit_prefetch_instructions. (MAX_PREFETCHES, PREFETCH_BLOCKS_BEFORE_LOOP_MAX, PREFETCH_BLOCKS_BEFORE_LOOP_MIN, PREFETCH_BLOCKS_IN_LOOP_MIN): New constants. (check_store_data): New structure. (check_store, emit_prefetch_instructions, rtx_equal_for_prefetch_p): New functions. * toplev.c: Include insn-flags.h. (flag_prefetch_loop_arrays): New global variable. (lang_independent_option): Add -fprefetch-loop-arrays. (rest_of_compilation) Pass LOOP_PREFETCH when flag_prefetch_loop_arrays is set. * Makefile.in (toplev.c): Depend on insn-flags.h. * invoke.texi (-fprefetch-loop-arrays): Document. * predict.c (estimate_probability): Distribute the loop exit probability according to number of exit edges. * cfgcleanup.c (insns_match_p): Break out from ...; (flow_find_cross_jump): ... here; (outgoing_edges_match): Add parameter MODE; attempt to match everything except for tablejumps. (try_crossjump_to_edge): Accept complex edges. (try_crossjump_bb): Likewise. From-SVN: r47969
Diffstat (limited to 'gcc/predict.c')
-rw-r--r--gcc/predict.c155
1 files changed, 86 insertions, 69 deletions
diff --git a/gcc/predict.c b/gcc/predict.c
index 516cfbcc1c5..92b956b6cc8 100644
--- a/gcc/predict.c
+++ b/gcc/predict.c
@@ -329,12 +329,17 @@ estimate_probability (loops_info)
for (i = 0; i < loops_info->num; i++)
{
int j;
+ int exits;
+ struct loop *loop = &loops_info->array[i];
- for (j = loops_info->array[i].first->index;
- j <= loops_info->array[i].last->index;
+ flow_loop_scan (loops_info, loop, LOOP_EXIT_EDGES);
+ exits = loop->num_exits;
+
+ for (j = loop->first->index;
+ j <= loop->last->index;
++j)
{
- if (TEST_BIT (loops_info->array[i].nodes, j))
+ if (TEST_BIT (loop->nodes, j))
{
int header_found = 0;
edge e;
@@ -342,8 +347,8 @@ estimate_probability (loops_info)
/* Loop branch heuristics - predict as taken an edge back to
a loop's head. */
for (e = BASIC_BLOCK(j)->succ; e; e = e->succ_next)
- if (e->dest == loops_info->array[i].header
- && e->src == loops_info->array[i].latch)
+ if (e->dest == loop->header
+ && e->src == loop->latch)
{
header_found = 1;
predict_edge_def (e, PRED_LOOP_BRANCH, TAKEN);
@@ -354,8 +359,11 @@ estimate_probability (loops_info)
if (!header_found)
for (e = BASIC_BLOCK(j)->succ; e; e = e->succ_next)
if (e->dest->index <= 0
- || !TEST_BIT (loops_info->array[i].nodes, e->dest->index))
- predict_edge_def (e, PRED_LOOP_EXIT, NOT_TAKEN);
+ || !TEST_BIT (loop->nodes, e->dest->index))
+ predict_edge (e, PRED_LOOP_EXIT,
+ (REG_BR_PROB_BASE
+ - predictor_info [(int)PRED_LOOP_EXIT].hitrate)
+ / exits);
}
}
}
@@ -435,74 +443,83 @@ estimate_probability (loops_info)
/* Try "pointer heuristic."
A comparison ptr == 0 is predicted as false.
Similarly, a comparison ptr1 == ptr2 is predicted as false. */
- switch (GET_CODE (cond))
- {
- case EQ:
- if (GET_CODE (XEXP (cond, 0)) == REG
- && REG_POINTER (XEXP (cond, 0))
- && (XEXP (cond, 1) == const0_rtx
- || (GET_CODE (XEXP (cond, 1)) == REG
- && REG_POINTER (XEXP (cond, 1)))))
-
+ if (GET_RTX_CLASS (GET_CODE (cond)) == '<'
+ && ((REG_P (XEXP (cond, 0)) && REG_POINTER (XEXP (cond, 0)))
+ || (REG_P (XEXP (cond, 1)) && REG_POINTER (XEXP (cond, 1)))))
+ switch (GET_CODE (cond))
+ {
+ case EQ:
predict_insn_def (last_insn, PRED_POINTER, NOT_TAKEN);
- break;
- case NE:
- if (GET_CODE (XEXP (cond, 0)) == REG
- && REG_POINTER (XEXP (cond, 0))
- && (XEXP (cond, 1) == const0_rtx
- || (GET_CODE (XEXP (cond, 1)) == REG
- && REG_POINTER (XEXP (cond, 1)))))
+ break;
+ case NE:
predict_insn_def (last_insn, PRED_POINTER, TAKEN);
- break;
-
- default:
- break;
- }
-
+ break;
+ default:
+ break;
+ }
+ else
/* Try "opcode heuristic."
EQ tests are usually false and NE tests are usually true. Also,
most quantities are positive, so we can make the appropriate guesses
about signed comparisons against zero. */
- switch (GET_CODE (cond))
- {
- case CONST_INT:
- /* Unconditional branch. */
- predict_insn_def (last_insn, PRED_UNCONDITIONAL,
- cond == const0_rtx ? NOT_TAKEN : TAKEN);
- break;
-
- case EQ:
- case UNEQ:
- predict_insn_def (last_insn, PRED_OPCODE, NOT_TAKEN);
- break;
- case NE:
- case LTGT:
- predict_insn_def (last_insn, PRED_OPCODE, TAKEN);
- break;
- case ORDERED:
- predict_insn_def (last_insn, PRED_OPCODE, TAKEN);
- break;
- case UNORDERED:
- predict_insn_def (last_insn, PRED_OPCODE, NOT_TAKEN);
- break;
- case LE:
- case LT:
- if (XEXP (cond, 1) == const0_rtx
- || (GET_CODE (XEXP (cond, 1)) == CONST_INT
- && INTVAL (XEXP (cond, 1)) == -1))
- predict_insn_def (last_insn, PRED_OPCODE, NOT_TAKEN);
- break;
- case GE:
- case GT:
- if (XEXP (cond, 1) == const0_rtx
- || (GET_CODE (XEXP (cond, 1)) == CONST_INT
- && INTVAL (XEXP (cond, 1)) == -1))
- predict_insn_def (last_insn, PRED_OPCODE, TAKEN);
- break;
-
- default:
- break;
- }
+ switch (GET_CODE (cond))
+ {
+ case CONST_INT:
+ /* Unconditional branch. */
+ predict_insn_def (last_insn, PRED_UNCONDITIONAL,
+ cond == const0_rtx ? NOT_TAKEN : TAKEN);
+ break;
+
+ case EQ:
+ case UNEQ:
+ /* Floating point comparisons appears to behave in a very
+ inpredictable way because of special role of = tests in
+ FP code. */
+ if (FLOAT_MODE_P (GET_MODE (XEXP (cond, 0))))
+ ;
+ /* Comparisons with 0 are often used for booleans and there is
+ nothing usefull to predict about them. */
+ else if (XEXP (cond, 1) == const0_rtx || XEXP (cond, 0) == const0_rtx)
+ ;
+ else
+ predict_insn_def (last_insn, PRED_OPCODE_NONEQUAL, NOT_TAKEN);
+ break;
+ case NE:
+ case LTGT:
+ /* Floating point comparisons appears to behave in a very
+ inpredictable way because of special role of = tests in
+ FP code. */
+ if (FLOAT_MODE_P (GET_MODE (XEXP (cond, 0))))
+ ;
+ /* Comparisons with 0 are often used for booleans and there is
+ nothing usefull to predict about them. */
+ else if (XEXP (cond, 1) == const0_rtx || XEXP (cond, 0) == const0_rtx)
+ ;
+ else
+ predict_insn_def (last_insn, PRED_OPCODE_NONEQUAL, TAKEN);
+ break;
+ case ORDERED:
+ predict_insn_def (last_insn, PRED_FPOPCODE, TAKEN);
+ break;
+ case UNORDERED:
+ predict_insn_def (last_insn, PRED_FPOPCODE, NOT_TAKEN);
+ break;
+ case LE:
+ case LT:
+ if (XEXP (cond, 1) == const0_rtx || XEXP (cond, 1) == const1_rtx
+ || XEXP (cond, 1) == constm1_rtx)
+ predict_insn_def (last_insn, PRED_OPCODE_POSITIVE, NOT_TAKEN);
+ break;
+ case GE:
+ case GT:
+ if (XEXP (cond, 1) == const0_rtx || XEXP (cond, 1) == const1_rtx
+ || XEXP (cond, 1) == constm1_rtx)
+ predict_insn_def (last_insn, PRED_OPCODE_POSITIVE, TAKEN);
+ break;
+
+ default:
+ break;
+ }
}
/* Attach the combined probability to each conditional jump. */