summaryrefslogtreecommitdiff
path: root/gcc/internal-fn.c
diff options
context:
space:
mode:
authormpolacek <mpolacek@138bc75d-0d04-0410-961f-82ee72b054a4>2013-12-04 22:47:11 +0000
committermpolacek <mpolacek@138bc75d-0d04-0410-961f-82ee72b054a4>2013-12-04 22:47:11 +0000
commit137559b2325c900743803d115c4d8495f9da5d67 (patch)
tree20fca81276f1e6d6224300c11a4e10fa89519d41 /gcc/internal-fn.c
parent85f5275438752595289934e2bbf725d1481f77cc (diff)
downloadgcc-137559b2325c900743803d115c4d8495f9da5d67.tar.gz
Implement -fsanitize=signed-integer-overflow.
git-svn-id: svn+ssh://gcc.gnu.org/svn/gcc/trunk@205684 138bc75d-0d04-0410-961f-82ee72b054a4
Diffstat (limited to 'gcc/internal-fn.c')
-rw-r--r--gcc/internal-fn.c302
1 files changed, 302 insertions, 0 deletions
diff --git a/gcc/internal-fn.c b/gcc/internal-fn.c
index 9ae917ba0ac..527b5ffaf7f 100644
--- a/gcc/internal-fn.c
+++ b/gcc/internal-fn.c
@@ -31,6 +31,9 @@ along with GCC; see the file COPYING3. If not see
#include "gimple-expr.h"
#include "is-a.h"
#include "gimple.h"
+#include "ubsan.h"
+#include "target.h"
+#include "predict.h"
/* The names of each internal function, indexed by function number. */
const char *const internal_fn_name_array[] = {
@@ -153,6 +156,305 @@ expand_UBSAN_NULL (gimple stmt ATTRIBUTE_UNUSED)
gcc_unreachable ();
}
+/* Add sub/add overflow checking to the statement STMT.
+ CODE says whether the operation is +, or -. */
+
+void
+ubsan_expand_si_overflow_addsub_check (tree_code code, gimple stmt)
+{
+ rtx res, op0, op1;
+ tree lhs, fn, arg0, arg1;
+ rtx done_label, do_error, target = NULL_RTX;
+
+ lhs = gimple_call_lhs (stmt);
+ arg0 = gimple_call_arg (stmt, 0);
+ arg1 = gimple_call_arg (stmt, 1);
+ done_label = gen_label_rtx ();
+ do_error = gen_label_rtx ();
+ fn = ubsan_build_overflow_builtin (code, gimple_location (stmt),
+ TREE_TYPE (arg0), arg0, arg1);
+ do_pending_stack_adjust ();
+ op0 = expand_normal (arg0);
+ op1 = expand_normal (arg1);
+
+ enum machine_mode mode = TYPE_MODE (TREE_TYPE (arg0));
+ if (lhs)
+ target = expand_expr (lhs, NULL_RTX, VOIDmode, EXPAND_WRITE);
+
+ enum insn_code icode
+ = optab_handler (code == PLUS_EXPR ? addv4_optab : subv4_optab, mode);
+ if (icode != CODE_FOR_nothing)
+ {
+ struct expand_operand ops[4];
+ rtx last = get_last_insn ();
+
+ res = gen_reg_rtx (mode);
+ create_output_operand (&ops[0], res, mode);
+ create_input_operand (&ops[1], op0, mode);
+ create_input_operand (&ops[2], op1, mode);
+ create_fixed_operand (&ops[3], do_error);
+ if (maybe_expand_insn (icode, 4, ops))
+ {
+ last = get_last_insn ();
+ if (profile_status != PROFILE_ABSENT
+ && JUMP_P (last)
+ && any_condjump_p (last)
+ && !find_reg_note (last, REG_BR_PROB, 0))
+ add_int_reg_note (last, REG_BR_PROB, PROB_VERY_UNLIKELY);
+ emit_jump (done_label);
+ }
+ else
+ {
+ delete_insns_since (last);
+ icode = CODE_FOR_nothing;
+ }
+ }
+
+ if (icode == CODE_FOR_nothing)
+ {
+ rtx sub_check = gen_label_rtx ();
+
+ /* Compute the operation. On RTL level, the addition is always
+ unsigned. */
+ res = expand_binop (mode, add_optab, op0, op1,
+ NULL_RTX, false, OPTAB_LIB_WIDEN);
+
+ /* If the op1 is negative, we have to use a different check. */
+ emit_cmp_and_jump_insns (op1, const0_rtx, LT, NULL_RTX, mode,
+ false, sub_check, PROB_EVEN);
+
+ /* Compare the result of the addition with one of the operands. */
+ emit_cmp_and_jump_insns (res, op0, code == PLUS_EXPR ? GE : LE,
+ NULL_RTX, mode, false, done_label,
+ PROB_VERY_LIKELY);
+ /* If we get here, we have to print the error. */
+ emit_jump (do_error);
+
+ emit_label (sub_check);
+ /* We have k = a + b for b < 0 here. k <= a must hold. */
+ emit_cmp_and_jump_insns (res, op0, code == PLUS_EXPR ? LE : GE,
+ NULL_RTX, mode, false, done_label,
+ PROB_VERY_LIKELY);
+ }
+
+ emit_label (do_error);
+ /* Expand the ubsan builtin call. */
+ expand_normal (fn);
+ do_pending_stack_adjust ();
+
+ /* We're done. */
+ emit_label (done_label);
+
+ if (lhs)
+ emit_move_insn (target, res);
+}
+
+/* Add negate overflow checking to the statement STMT. */
+
+void
+ubsan_expand_si_overflow_neg_check (gimple stmt)
+{
+ rtx res, op1;
+ tree lhs, fn, arg1;
+ rtx done_label, do_error, target = NULL_RTX;
+
+ lhs = gimple_call_lhs (stmt);
+ arg1 = gimple_call_arg (stmt, 1);
+ done_label = gen_label_rtx ();
+ do_error = gen_label_rtx ();
+ fn = ubsan_build_overflow_builtin (NEGATE_EXPR, gimple_location (stmt),
+ TREE_TYPE (arg1), arg1, NULL_TREE);
+
+ do_pending_stack_adjust ();
+ op1 = expand_normal (arg1);
+
+ enum machine_mode mode = TYPE_MODE (TREE_TYPE (arg1));
+ if (lhs)
+ target = expand_expr (lhs, NULL_RTX, VOIDmode, EXPAND_WRITE);
+
+ enum insn_code icode = optab_handler (negv3_optab, mode);
+ if (icode != CODE_FOR_nothing)
+ {
+ struct expand_operand ops[3];
+ rtx last = get_last_insn ();
+
+ res = gen_reg_rtx (mode);
+ create_output_operand (&ops[0], res, mode);
+ create_input_operand (&ops[1], op1, mode);
+ create_fixed_operand (&ops[2], do_error);
+ if (maybe_expand_insn (icode, 3, ops))
+ {
+ last = get_last_insn ();
+ if (profile_status != PROFILE_ABSENT
+ && JUMP_P (last)
+ && any_condjump_p (last)
+ && !find_reg_note (last, REG_BR_PROB, 0))
+ add_int_reg_note (last, REG_BR_PROB, PROB_VERY_UNLIKELY);
+ emit_jump (done_label);
+ }
+ else
+ {
+ delete_insns_since (last);
+ icode = CODE_FOR_nothing;
+ }
+ }
+
+ if (icode == CODE_FOR_nothing)
+ {
+ /* Compute the operation. On RTL level, the addition is always
+ unsigned. */
+ res = expand_unop (mode, neg_optab, op1, NULL_RTX, false);
+
+ /* Compare the operand with the most negative value. */
+ rtx minv = expand_normal (TYPE_MIN_VALUE (TREE_TYPE (arg1)));
+ emit_cmp_and_jump_insns (op1, minv, NE, NULL_RTX, mode, false,
+ done_label, PROB_VERY_LIKELY);
+ }
+
+ emit_label (do_error);
+ /* Expand the ubsan builtin call. */
+ expand_normal (fn);
+ do_pending_stack_adjust ();
+
+ /* We're done. */
+ emit_label (done_label);
+
+ if (lhs)
+ emit_move_insn (target, res);
+}
+
+/* Add mul overflow checking to the statement STMT. */
+
+void
+ubsan_expand_si_overflow_mul_check (gimple stmt)
+{
+ rtx res, op0, op1;
+ tree lhs, fn, arg0, arg1;
+ rtx done_label, do_error, target = NULL_RTX;
+
+ lhs = gimple_call_lhs (stmt);
+ arg0 = gimple_call_arg (stmt, 0);
+ arg1 = gimple_call_arg (stmt, 1);
+ done_label = gen_label_rtx ();
+ do_error = gen_label_rtx ();
+ fn = ubsan_build_overflow_builtin (MULT_EXPR, gimple_location (stmt),
+ TREE_TYPE (arg0), arg0, arg1);
+
+ do_pending_stack_adjust ();
+ op0 = expand_normal (arg0);
+ op1 = expand_normal (arg1);
+
+ enum machine_mode mode = TYPE_MODE (TREE_TYPE (arg0));
+ if (lhs)
+ target = expand_expr (lhs, NULL_RTX, VOIDmode, EXPAND_WRITE);
+
+ enum insn_code icode = optab_handler (mulv4_optab, mode);
+ if (icode != CODE_FOR_nothing)
+ {
+ struct expand_operand ops[4];
+ rtx last = get_last_insn ();
+
+ res = gen_reg_rtx (mode);
+ create_output_operand (&ops[0], res, mode);
+ create_input_operand (&ops[1], op0, mode);
+ create_input_operand (&ops[2], op1, mode);
+ create_fixed_operand (&ops[3], do_error);
+ if (maybe_expand_insn (icode, 4, ops))
+ {
+ last = get_last_insn ();
+ if (profile_status != PROFILE_ABSENT
+ && JUMP_P (last)
+ && any_condjump_p (last)
+ && !find_reg_note (last, REG_BR_PROB, 0))
+ add_int_reg_note (last, REG_BR_PROB, PROB_VERY_UNLIKELY);
+ emit_jump (done_label);
+ }
+ else
+ {
+ delete_insns_since (last);
+ icode = CODE_FOR_nothing;
+ }
+ }
+
+ if (icode == CODE_FOR_nothing)
+ {
+ struct separate_ops ops;
+ ops.op0 = arg0;
+ ops.op1 = arg1;
+ ops.op2 = NULL_TREE;
+ ops.location = gimple_location (stmt);
+ if (GET_MODE_2XWIDER_MODE (mode) != VOIDmode
+ && targetm.scalar_mode_supported_p (GET_MODE_2XWIDER_MODE (mode)))
+ {
+ enum machine_mode wmode = GET_MODE_2XWIDER_MODE (mode);
+ ops.code = WIDEN_MULT_EXPR;
+ ops.type
+ = build_nonstandard_integer_type (GET_MODE_PRECISION (wmode), 0);
+
+ res = expand_expr_real_2 (&ops, NULL_RTX, wmode, EXPAND_NORMAL);
+ rtx hipart = expand_shift (RSHIFT_EXPR, wmode, res,
+ GET_MODE_PRECISION (mode), NULL_RTX, 0);
+ hipart = gen_lowpart (mode, hipart);
+ res = gen_lowpart (mode, res);
+ rtx signbit = expand_shift (RSHIFT_EXPR, mode, res,
+ GET_MODE_PRECISION (mode) - 1,
+ NULL_RTX, 0);
+ /* RES is low half of the double width result, HIPART
+ the high half. There was overflow if
+ HIPART is different from RES < 0 ? -1 : 0. */
+ emit_cmp_and_jump_insns (signbit, hipart, EQ, NULL_RTX, mode,
+ false, done_label, PROB_VERY_LIKELY);
+ }
+ else
+ {
+ /* For now we don't instrument this. See __mulvDI3 in libgcc2.c
+ for what could be done. */
+ ops.code = MULT_EXPR;
+ ops.type = TREE_TYPE (arg0);
+ res = expand_expr_real_2 (&ops, NULL_RTX, mode, EXPAND_NORMAL);
+ emit_jump (done_label);
+ }
+ }
+
+ emit_label (do_error);
+ /* Expand the ubsan builtin call. */
+ expand_normal (fn);
+ do_pending_stack_adjust ();
+
+ /* We're done. */
+ emit_label (done_label);
+
+ if (lhs)
+ emit_move_insn (target, res);
+}
+
+/* Expand UBSAN_CHECK_ADD call STMT. */
+
+static void
+expand_UBSAN_CHECK_ADD (gimple stmt)
+{
+ ubsan_expand_si_overflow_addsub_check (PLUS_EXPR, stmt);
+}
+
+/* Expand UBSAN_CHECK_SUB call STMT. */
+
+static void
+expand_UBSAN_CHECK_SUB (gimple stmt)
+{
+ if (integer_zerop (gimple_call_arg (stmt, 0)))
+ ubsan_expand_si_overflow_neg_check (stmt);
+ else
+ ubsan_expand_si_overflow_addsub_check (MINUS_EXPR, stmt);
+}
+
+/* Expand UBSAN_CHECK_MUL call STMT. */
+
+static void
+expand_UBSAN_CHECK_MUL (gimple stmt)
+{
+ ubsan_expand_si_overflow_mul_check (stmt);
+}
+
/* Routines to expand each internal function, indexed by function number.
Each routine has the prototype: