diff options
Diffstat (limited to 'gcc/tree-vrp.c')
-rw-r--r-- | gcc/tree-vrp.c | 63 |
1 files changed, 52 insertions, 11 deletions
diff --git a/gcc/tree-vrp.c b/gcc/tree-vrp.c index 49231d1dd87..731a6debff6 100644 --- a/gcc/tree-vrp.c +++ b/gcc/tree-vrp.c @@ -3682,6 +3682,47 @@ extract_range_basic (value_range_t *vr, gimple stmt) break; } } + else if (is_gimple_call (stmt) + && gimple_call_internal_p (stmt)) + { + enum tree_code subcode = ERROR_MARK; + switch (gimple_call_internal_fn (stmt)) + { + case IFN_UBSAN_CHECK_ADD: + subcode = PLUS_EXPR; + break; + case IFN_UBSAN_CHECK_SUB: + subcode = MINUS_EXPR; + break; + case IFN_UBSAN_CHECK_MUL: + subcode = MULT_EXPR; + break; + default: + break; + } + if (subcode != ERROR_MARK) + { + bool saved_flag_wrapv = flag_wrapv; + /* Pretend the arithmetics is wrapping. If there is + any overflow, we'll complain, but will actually do + wrapping operation. */ + flag_wrapv = 1; + extract_range_from_binary_expr (vr, subcode, type, + gimple_call_arg (stmt, 0), + gimple_call_arg (stmt, 1)); + flag_wrapv = saved_flag_wrapv; + + /* If for both arguments vrp_valueize returned non-NULL, + this should have been already folded and if not, it + wasn't folded because of overflow. Avoid removing the + UBSAN_CHECK_* calls in that case. */ + if (vr->type == VR_RANGE + && (vr->min == vr->max + || operand_equal_p (vr->min, vr->max, 0))) + set_value_range_to_varying (vr); + return; + } + } if (INTEGRAL_TYPE_P (type) && gimple_stmt_nonnegative_warnv_p (stmt, &sop)) set_value_range_to_nonnegative (vr, type, @@ -4423,7 +4464,7 @@ infer_value_range (gimple stmt, tree op, enum tree_code *comp_code_p, tree *val_ if (stmt_ends_bb_p (stmt) && EDGE_COUNT (gimple_bb (stmt)->succs) == 0) return false; - if (infer_nonnull_range (stmt, op)) + if (infer_nonnull_range (stmt, op, true, true)) { *val_p = build_int_cst (TREE_TYPE (op), 0); *comp_code_p = NE_EXPR; @@ -5818,13 +5859,13 @@ find_assert_locations_1 (basic_block bb, sbitmap live) static bool find_assert_locations (void) { - int *rpo = XNEWVEC (int, last_basic_block); - int *bb_rpo = XNEWVEC (int, last_basic_block); - int *last_rpo = XCNEWVEC (int, last_basic_block); + int *rpo = XNEWVEC (int, last_basic_block_for_fn (cfun)); + int *bb_rpo = XNEWVEC (int, last_basic_block_for_fn (cfun)); + int *last_rpo = XCNEWVEC (int, last_basic_block_for_fn (cfun)); int rpo_cnt, i; bool need_asserts; - live = XCNEWVEC (sbitmap, last_basic_block); + live = XCNEWVEC (sbitmap, last_basic_block_for_fn (cfun)); rpo_cnt = pre_and_rev_post_order_compute (NULL, rpo, false); for (i = 0; i < rpo_cnt; ++i) bb_rpo[rpo[i]] = i; @@ -5859,7 +5900,7 @@ find_assert_locations (void) need_asserts = false; for (i = rpo_cnt - 1; i >= 0; --i) { - basic_block bb = BASIC_BLOCK (rpo[i]); + basic_block bb = BASIC_BLOCK_FOR_FN (cfun, rpo[i]); edge e; edge_iterator ei; @@ -5918,7 +5959,7 @@ find_assert_locations (void) XDELETEVEC (rpo); XDELETEVEC (bb_rpo); XDELETEVEC (last_rpo); - for (i = 0; i < last_basic_block; ++i) + for (i = 0; i < last_basic_block_for_fn (cfun); ++i) if (live[i]) sbitmap_free (live[i]); XDELETEVEC (live); @@ -6315,7 +6356,7 @@ check_all_array_refs (void) basic_block bb; gimple_stmt_iterator si; - FOR_EACH_BB (bb) + FOR_EACH_BB_FN (bb, cfun) { edge_iterator ei; edge e; @@ -6476,7 +6517,7 @@ remove_range_assertions (void) /* Note that the BSI iterator bump happens at the bottom of the loop and no bump is necessary if we're removing the statement referenced by the current BSI. */ - FOR_EACH_BB (bb) + FOR_EACH_BB_FN (bb, cfun) for (si = gsi_after_labels (bb), is_unreachable = -1; !gsi_end_p (si);) { gimple stmt = gsi_stmt (si); @@ -6591,7 +6632,7 @@ vrp_initialize (void) vr_value = XCNEWVEC (value_range_t *, num_vr_values); vr_phi_edge_counts = XCNEWVEC (int, num_ssa_names); - FOR_EACH_BB (bb) + FOR_EACH_BB_FN (bb, cfun) { gimple_stmt_iterator si; @@ -9443,7 +9484,7 @@ identify_jump_threads (void) I doubt it's worth the effort for the classes of jump threading opportunities we are trying to identify at this point in compilation. */ - FOR_EACH_BB (bb) + FOR_EACH_BB_FN (bb, cfun) { gimple last; |