summaryrefslogtreecommitdiff
path: root/gcc
diff options
context:
space:
mode:
authortejohnson <tejohnson@138bc75d-0d04-0410-961f-82ee72b054a4>2013-04-29 13:22:46 +0000
committertejohnson <tejohnson@138bc75d-0d04-0410-961f-82ee72b054a4>2013-04-29 13:22:46 +0000
commite2bc4ec8d5c6cfcf1caacbfab229a22728a11d75 (patch)
tree9daf54908866e15052bfcfc011bc4753b4b363fb /gcc
parentc48f77d534fed8f4995de260aaabc84f0790012d (diff)
downloadgcc-e2bc4ec8d5c6cfcf1caacbfab229a22728a11d75.tar.gz
This patch fixes PR bootstrap/57077. Certain new uses of apply_probability
are actually scaling the counts up, and the scale factor should not be treated as a probability as the value may exceed REG_BR_PROB_BASE. One example (from the PR) is when scaling counts up in LTO when merging profiles. Another example I found when preparing the patch to use the rounding divide in more places is when inlining COMDAT functions. Add new helper function apply_scale that does the scaling without the probability range check. I audited the new uses of apply_probability and changed the calls as appropriate. 2013-04-29 Teresa Johnson <tejohnson@google.com> PR bootstrap/57077 * basic-block.h (apply_scale): New function. (apply_probability): Use apply_scale. * gimple-streamer-in.c (input_bb): Ditto. * lto-streamer-in.c (input_cfg): Ditto. * lto-cgraph.c (merge_profile_summaries): Ditto. * tree-optimize.c (execute_fixup_cfg): Ditto. * tree-inline.c (copy_bb): Update comment to use apply_scale. (copy_edges_for_bb): Ditto. (copy_cfg_body): Ditto. git-svn-id: svn+ssh://gcc.gnu.org/svn/gcc/trunk@198416 138bc75d-0d04-0410-961f-82ee72b054a4
Diffstat (limited to 'gcc')
-rw-r--r--gcc/ChangeLog14
-rw-r--r--gcc/basic-block.h14
-rw-r--r--gcc/gimple-streamer-in.c4
-rw-r--r--gcc/lto-cgraph.c16
-rw-r--r--gcc/lto-streamer-in.c4
-rw-r--r--gcc/tree-inline.c6
-rw-r--r--gcc/tree-optimize.c10
7 files changed, 46 insertions, 22 deletions
diff --git a/gcc/ChangeLog b/gcc/ChangeLog
index 34dcd586384..1529dd37f14 100644
--- a/gcc/ChangeLog
+++ b/gcc/ChangeLog
@@ -1,3 +1,17 @@
+2013-04-29 Teresa Johnson <tejohnson@google.com>
+
+ PR bootstrap/57077
+ * basic-block.h (apply_scale): New function.
+ (apply_probability): Use apply_scale.
+ * gimple-streamer-in.c (input_bb): Ditto.
+ * lto-streamer-in.c (input_cfg): Ditto.
+ * lto-cgraph.c (merge_profile_summaries): Ditto.
+ * tree-optimize.c (execute_fixup_cfg): Ditto.
+ * tree-inline.c (copy_bb): Update comment to use
+ apply_scale.
+ (copy_edges_for_bb): Ditto.
+ (copy_cfg_body): Ditto.
+
2013-04-29 Tom de Vries <tom@codesourcery.com>
* tree-ssa-tail-merge.c (find_same_succ_bb): Skip loop latch bbs.
diff --git a/gcc/basic-block.h b/gcc/basic-block.h
index 9b5192eb877..eed320c9ea0 100644
--- a/gcc/basic-block.h
+++ b/gcc/basic-block.h
@@ -500,7 +500,7 @@ struct edge_list
REG_BR_PROB_BASE)
/* Compute a scale factor (or probability) suitable for scaling of
- gcov_type values via apply_probability(). */
+ gcov_type values via apply_probability() and apply_scale(). */
#define GCOV_COMPUTE_SCALE(num,den) \
((den) ? RDIV ((num) * REG_BR_PROB_BASE, (den)) : REG_BR_PROB_BASE)
@@ -952,13 +952,23 @@ combine_probabilities (int prob1, int prob2)
return RDIV (prob1 * prob2, REG_BR_PROB_BASE);
}
+/* Apply scale factor SCALE on frequency or count FREQ. Use this
+ interface when potentially scaling up, so that SCALE is not
+ constrained to be < REG_BR_PROB_BASE. */
+
+static inline gcov_type
+apply_scale (gcov_type freq, int scale)
+{
+ return RDIV (freq * scale, REG_BR_PROB_BASE);
+}
+
/* Apply probability PROB on frequency or count FREQ. */
static inline gcov_type
apply_probability (gcov_type freq, int prob)
{
check_probability (prob);
- return RDIV (freq * prob, REG_BR_PROB_BASE);
+ return apply_scale (freq, prob);
}
/* Return inverse probability for PROB. */
diff --git a/gcc/gimple-streamer-in.c b/gcc/gimple-streamer-in.c
index a27f0d6d62f..03fbe91bbe2 100644
--- a/gcc/gimple-streamer-in.c
+++ b/gcc/gimple-streamer-in.c
@@ -329,8 +329,8 @@ input_bb (struct lto_input_block *ib, enum LTO_tags tag,
index = streamer_read_uhwi (ib);
bb = BASIC_BLOCK_FOR_FUNCTION (fn, index);
- bb->count = apply_probability (streamer_read_gcov_count (ib),
- count_materialization_scale);
+ bb->count = apply_scale (streamer_read_gcov_count (ib),
+ count_materialization_scale);
bb->frequency = streamer_read_hwi (ib);
bb->flags = streamer_read_hwi (ib);
diff --git a/gcc/lto-cgraph.c b/gcc/lto-cgraph.c
index 69f5e3a659f..cead76b0cd2 100644
--- a/gcc/lto-cgraph.c
+++ b/gcc/lto-cgraph.c
@@ -1347,10 +1347,10 @@ merge_profile_summaries (struct lto_file_decl_data **file_data_vec)
file_data->profile_info.runs);
lto_gcov_summary.sum_max
= MAX (lto_gcov_summary.sum_max,
- apply_probability (file_data->profile_info.sum_max, scale));
+ apply_scale (file_data->profile_info.sum_max, scale));
lto_gcov_summary.sum_all
= MAX (lto_gcov_summary.sum_all,
- apply_probability (file_data->profile_info.sum_all, scale));
+ apply_scale (file_data->profile_info.sum_all, scale));
/* Save a pointer to the profile_info with the largest
scaled sum_all and the scale for use in merging the
histogram. */
@@ -1372,8 +1372,8 @@ merge_profile_summaries (struct lto_file_decl_data **file_data_vec)
/* Scale up the min value as we did the corresponding sum_all
above. Use that to find the new histogram index. */
gcov_type scaled_min
- = apply_probability (saved_profile_info->histogram[h_ix].min_value,
- saved_scale);
+ = apply_scale (saved_profile_info->histogram[h_ix].min_value,
+ saved_scale);
/* The new index may be shared with another scaled histogram entry,
so we need to account for a non-zero histogram entry at new_ix. */
unsigned new_ix = gcov_histo_index (scaled_min);
@@ -1386,8 +1386,8 @@ merge_profile_summaries (struct lto_file_decl_data **file_data_vec)
here and place the scaled cumulative counter value in the bucket
corresponding to the scaled minimum counter value. */
lto_gcov_summary.histogram[new_ix].cum_value
- += apply_probability (saved_profile_info->histogram[h_ix].cum_value,
- saved_scale);
+ += apply_scale (saved_profile_info->histogram[h_ix].cum_value,
+ saved_scale);
lto_gcov_summary.histogram[new_ix].num_counters
+= saved_profile_info->histogram[h_ix].num_counters;
}
@@ -1419,8 +1419,8 @@ merge_profile_summaries (struct lto_file_decl_data **file_data_vec)
if (scale == REG_BR_PROB_BASE)
continue;
for (edge = node->callees; edge; edge = edge->next_callee)
- edge->count = apply_probability (edge->count, scale);
- node->count = apply_probability (node->count, scale);
+ edge->count = apply_scale (edge->count, scale);
+ node->count = apply_scale (node->count, scale);
}
}
diff --git a/gcc/lto-streamer-in.c b/gcc/lto-streamer-in.c
index f5789c01277..87158bf0b33 100644
--- a/gcc/lto-streamer-in.c
+++ b/gcc/lto-streamer-in.c
@@ -635,8 +635,8 @@ input_cfg (struct lto_input_block *ib, struct function *fn,
dest_index = streamer_read_uhwi (ib);
probability = (int) streamer_read_hwi (ib);
- count = apply_probability ((gcov_type) streamer_read_gcov_count (ib),
- count_materialization_scale);
+ count = apply_scale ((gcov_type) streamer_read_gcov_count (ib),
+ count_materialization_scale);
edge_flags = streamer_read_uhwi (ib);
dest = BASIC_BLOCK_FOR_FUNCTION (fn, dest_index);
diff --git a/gcc/tree-inline.c b/gcc/tree-inline.c
index c358cbef2a7..69455cebf79 100644
--- a/gcc/tree-inline.c
+++ b/gcc/tree-inline.c
@@ -1519,7 +1519,7 @@ copy_bb (copy_body_data *id, basic_block bb, int frequency_scale,
basic_block_info automatically. */
copy_basic_block = create_basic_block (NULL, (void *) 0,
(basic_block) prev->aux);
- /* Update to use apply_probability(). */
+ /* Update to use apply_scale(). */
copy_basic_block->count = bb->count * count_scale / REG_BR_PROB_BASE;
/* We are going to rebuild frequencies from scratch. These values
@@ -1891,7 +1891,7 @@ copy_edges_for_bb (basic_block bb, gcov_type count_scale, basic_block ret_bb,
&& old_edge->dest->aux != EXIT_BLOCK_PTR)
flags |= EDGE_FALLTHRU;
new_edge = make_edge (new_bb, (basic_block) old_edge->dest->aux, flags);
- /* Update to use apply_probability(). */
+ /* Update to use apply_scale(). */
new_edge->count = old_edge->count * count_scale / REG_BR_PROB_BASE;
new_edge->probability = old_edge->probability;
}
@@ -2278,7 +2278,7 @@ copy_cfg_body (copy_body_data * id, gcov_type count, int frequency_scale,
incoming_frequency += EDGE_FREQUENCY (e);
incoming_count += e->count;
}
- /* Update to use apply_probability(). */
+ /* Update to use apply_scale(). */
incoming_count = incoming_count * count_scale / REG_BR_PROB_BASE;
/* Update to use EDGE_FREQUENCY. */
incoming_frequency
diff --git a/gcc/tree-optimize.c b/gcc/tree-optimize.c
index d399d8a98c6..0948d2e6977 100644
--- a/gcc/tree-optimize.c
+++ b/gcc/tree-optimize.c
@@ -131,15 +131,15 @@ execute_fixup_cfg (void)
ENTRY_BLOCK_PTR->count);
ENTRY_BLOCK_PTR->count = cgraph_get_node (current_function_decl)->count;
- EXIT_BLOCK_PTR->count = apply_probability (EXIT_BLOCK_PTR->count,
- count_scale);
+ EXIT_BLOCK_PTR->count = apply_scale (EXIT_BLOCK_PTR->count,
+ count_scale);
FOR_EACH_EDGE (e, ei, ENTRY_BLOCK_PTR->succs)
- e->count = apply_probability (e->count, count_scale);
+ e->count = apply_scale (e->count, count_scale);
FOR_EACH_BB (bb)
{
- bb->count = apply_probability (bb->count, count_scale);
+ bb->count = apply_scale (bb->count, count_scale);
for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
{
gimple stmt = gsi_stmt (gsi);
@@ -172,7 +172,7 @@ execute_fixup_cfg (void)
}
FOR_EACH_EDGE (e, ei, bb->succs)
- e->count = apply_probability (e->count, count_scale);
+ e->count = apply_scale (e->count, count_scale);
/* If we have a basic block with no successors that does not
end with a control statement or a noreturn call end it with