From 2ba5b7632104872ea0283683c8dba28be2458a7b Mon Sep 17 00:00:00 2001 From: ebotcazou Date: Sun, 28 Jun 2009 18:17:47 +0000 Subject: * fold-const.c (contains_label_1): Fix comments. (contains_label_p): Do not walk trees multiple time. git-svn-id: svn+ssh://gcc.gnu.org/svn/gcc/trunk@149037 138bc75d-0d04-0410-961f-82ee72b054a4 --- gcc/fold-const.c | 23 ++++++++++++----------- 1 file changed, 12 insertions(+), 11 deletions(-) (limited to 'gcc/fold-const.c') diff --git a/gcc/fold-const.c b/gcc/fold-const.c index 433ec6085ab..39548e2792e 100644 --- a/gcc/fold-const.c +++ b/gcc/fold-const.c @@ -13225,35 +13225,36 @@ fold_binary (enum tree_code code, tree type, tree op0, tree op1) } /* switch (code) */ } -/* Callback for walk_tree, looking for LABEL_EXPR. - Returns tree TP if it is LABEL_EXPR. Otherwise it returns NULL_TREE. - Do not check the sub-tree of GOTO_EXPR. */ +/* Callback for walk_tree, looking for LABEL_EXPR. Return *TP if it is + a LABEL_EXPR; otherwise return NULL_TREE. Do not check the subtrees + of GOTO_EXPR. */ static tree -contains_label_1 (tree *tp, - int *walk_subtrees, - void *data ATTRIBUTE_UNUSED) +contains_label_1 (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED) { switch (TREE_CODE (*tp)) { case LABEL_EXPR: return *tp; + case GOTO_EXPR: *walk_subtrees = 0; - /* no break */ + + /* ... fall through ... */ + default: return NULL_TREE; } } -/* Checks whether the sub-tree ST contains a label LABEL_EXPR which is - accessible from outside the sub-tree. Returns NULL_TREE if no - addressable label is found. */ +/* Return whether the sub-tree ST contains a label which is accessible from + outside the sub-tree. */ static bool contains_label_p (tree st) { - return (walk_tree (&st, contains_label_1 , NULL, NULL) != NULL_TREE); + return + (walk_tree_without_duplicates (&st, contains_label_1 , NULL) != NULL_TREE); } /* Fold a ternary expression of code CODE and type TYPE with operands -- cgit v1.2.1 From 5fb6a9126a7c9e63e0330e2e089f5a6c664335a9 Mon Sep 17 00:00:00 2001 From: manu Date: Tue, 7 Jul 2009 02:10:19 +0000 Subject: =?UTF-8?q?2009-07-07=20=20Manuel=20L=C3=B3pez-Ib=C3=A1=C3=B1ez=20?= =?UTF-8?q?=20?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * c-lex.c: Replace %H by an explicit location. Update all calls. * c-common.c: Likewise. * c-decl.c: Likewise. * c-typeck.c: Likewise. * fold-const.c: Likewise. * gimplify.c: Likewise. * stmt.c: Likewise. * tree-cfg.c: Likewise. * tree-ssa-loop-niter.c: Likewise. * tree-vrp.c: Likewise. * value-prof.c: Likewise. java/ * jcf-parse.c: Replace %H by an explicit location. Update all calls. objc/ * objc-act.c: Replace %H by an explicit location. Update all calls. testsuite/ * gcc.dg/plugin/selfassign.c: Replace %H by an explicit location. Update all calls. * g++.dg/plugin/selfassign.c: Likewise. git-svn-id: svn+ssh://gcc.gnu.org/svn/gcc/trunk@149310 138bc75d-0d04-0410-961f-82ee72b054a4 --- gcc/fold-const.c | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) (limited to 'gcc/fold-const.c') diff --git a/gcc/fold-const.c b/gcc/fold-const.c index 39548e2792e..670fa45e497 100644 --- a/gcc/fold-const.c +++ b/gcc/fold-const.c @@ -988,7 +988,7 @@ fold_undefer_overflow_warnings (bool issue, const_gimple stmt, int code) locus = input_location; else locus = gimple_location (stmt); - warning (OPT_Wstrict_overflow, "%H%s", &locus, warnmsg); + warning_at (locus, OPT_Wstrict_overflow, "%s", warnmsg); } /* Stop deferring overflow warnings, ignoring any deferred -- cgit v1.2.1 From 507d706c72c45b372c5ec487f1045e6099a0290e Mon Sep 17 00:00:00 2001 From: jakub Date: Thu, 9 Jul 2009 11:19:22 +0000 Subject: PR middle-end/40692 * fold-const.c (fold_cond_expr_with_comparison): Don't replace arg1 with arg01 if arg1 is already INTEGER_CST. * gcc.c-torture/compile/pr40692.c: New test. git-svn-id: svn+ssh://gcc.gnu.org/svn/gcc/trunk@149418 138bc75d-0d04-0410-961f-82ee72b054a4 --- gcc/fold-const.c | 2 ++ 1 file changed, 2 insertions(+) (limited to 'gcc/fold-const.c') diff --git a/gcc/fold-const.c b/gcc/fold-const.c index 670fa45e497..f3e0614a4a6 100644 --- a/gcc/fold-const.c +++ b/gcc/fold-const.c @@ -5303,6 +5303,8 @@ fold_cond_expr_with_comparison (tree type, tree arg0, tree arg1, tree arg2) switch (comp_code) { case EQ_EXPR: + if (TREE_CODE (arg1) == INTEGER_CST) + break; /* We can replace A with C1 in this case. */ arg1 = fold_convert (type, arg01); return fold_build3 (COND_EXPR, type, arg0, arg1, arg2); -- cgit v1.2.1 From 0962300ca386d8daaca283470a24bf651e9165ea Mon Sep 17 00:00:00 2001 From: jakub Date: Wed, 15 Jul 2009 10:17:54 +0000 Subject: PR middle-end/40747 * fold-const.c (fold_cond_expr_with_comparison): When folding < and <= to MIN, make sure the MIN uses the same type as the comparison's operands. * gcc.c-torture/execute/pr40747.c: New test. git-svn-id: svn+ssh://gcc.gnu.org/svn/gcc/trunk@149675 138bc75d-0d04-0410-961f-82ee72b054a4 --- gcc/fold-const.c | 44 ++++++++++++++++++++++++-------------------- 1 file changed, 24 insertions(+), 20 deletions(-) (limited to 'gcc/fold-const.c') diff --git a/gcc/fold-const.c b/gcc/fold-const.c index f3e0614a4a6..eba869085cc 100644 --- a/gcc/fold-const.c +++ b/gcc/fold-const.c @@ -5310,31 +5310,35 @@ fold_cond_expr_with_comparison (tree type, tree arg0, tree arg1, tree arg2) return fold_build3 (COND_EXPR, type, arg0, arg1, arg2); case LT_EXPR: - /* If C1 is C2 + 1, this is min(A, C2). */ + /* If C1 is C2 + 1, this is min(A, C2), but use ARG00's type for + MIN_EXPR, to preserve the signedness of the comparison. */ if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type), OEP_ONLY_CONST) && operand_equal_p (arg01, const_binop (PLUS_EXPR, arg2, build_int_cst (type, 1), 0), OEP_ONLY_CONST)) - return pedantic_non_lvalue (fold_build2 (MIN_EXPR, - type, - fold_convert (type, arg1), - arg2)); + { + tem = fold_build2 (MIN_EXPR, TREE_TYPE (arg00), arg00, + fold_convert (TREE_TYPE (arg00), arg2)); + return pedantic_non_lvalue (fold_convert (type, tem)); + } break; case LE_EXPR: - /* If C1 is C2 - 1, this is min(A, C2). */ + /* If C1 is C2 - 1, this is min(A, C2), with the same care + as above. */ if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type), OEP_ONLY_CONST) && operand_equal_p (arg01, const_binop (MINUS_EXPR, arg2, build_int_cst (type, 1), 0), OEP_ONLY_CONST)) - return pedantic_non_lvalue (fold_build2 (MIN_EXPR, - type, - fold_convert (type, arg1), - arg2)); + { + tem = fold_build2 (MIN_EXPR, TREE_TYPE (arg00), arg00, + fold_convert (TREE_TYPE (arg00), arg2)); + return pedantic_non_lvalue (fold_convert (type, tem)); + } break; case GT_EXPR: @@ -5346,11 +5350,11 @@ fold_cond_expr_with_comparison (tree type, tree arg0, tree arg1, tree arg2) const_binop (MINUS_EXPR, arg2, build_int_cst (type, 1), 0), OEP_ONLY_CONST)) - return pedantic_non_lvalue (fold_convert (type, - fold_build2 (MAX_EXPR, TREE_TYPE (arg00), - arg00, - fold_convert (TREE_TYPE (arg00), - arg2)))); + { + tem = fold_build2 (MAX_EXPR, TREE_TYPE (arg00), arg00, + fold_convert (TREE_TYPE (arg00), arg2)); + return pedantic_non_lvalue (fold_convert (type, tem)); + } break; case GE_EXPR: @@ -5361,11 +5365,11 @@ fold_cond_expr_with_comparison (tree type, tree arg0, tree arg1, tree arg2) const_binop (PLUS_EXPR, arg2, build_int_cst (type, 1), 0), OEP_ONLY_CONST)) - return pedantic_non_lvalue (fold_convert (type, - fold_build2 (MAX_EXPR, TREE_TYPE (arg00), - arg00, - fold_convert (TREE_TYPE (arg00), - arg2)))); + { + tem = fold_build2 (MAX_EXPR, TREE_TYPE (arg00), arg00, + fold_convert (TREE_TYPE (arg00), arg2)); + return pedantic_non_lvalue (fold_convert (type, tem)); + } break; case NE_EXPR: break; -- cgit v1.2.1 From 389dd41bd043170e7dc7660304f14a5f16af3562 Mon Sep 17 00:00:00 2001 From: manu Date: Thu, 16 Jul 2009 22:29:52 +0000 Subject: =?UTF-8?q?2009-07-17=20=20Aldy=20Hernandez=20=20=20=09=20=20=20=20Manuel=20L=C3=B3pez-Ib=C3=A1=C3=B1ez=20=20?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit PR 40435 * tree-complex.c, tree-loop-distribution.c, tree.c, tree.h, builtins.c, fold-const.c, omp-low.c, cgraphunit.c, tree-ssa-ccp.c, tree-ssa-dom.c, gimple-low.c, expr.c, tree-ssa-ifcombine.c, c-decl.c, stor-layout.c, tree-if-conv.c, c-typeck.c, gimplify.c, calls.c, tree-sra.c, tree-mudflap.c, tree-ssa-copy.c, tree-ssa-forwprop.c, c-convert.c, c-omp.c, varasm.c, tree-inline.c, c-common.c, c-common.h, gimple.c, tree-switch-conversion.c, gimple.h, tree-cfg.c, c-parser.c, convert.c: Add location argument to fold_{unary,binary,ternary}, fold_build[123], build_call_expr, build_size_arg, build_fold_addr_expr, build_call_array, non_lvalue, size_diffop, fold_build1_initializer, fold_build2_initializer, fold_build3_initializer, fold_build_call_array, fold_build_call_array_initializer, fold_single_bit_test, omit_one_operand, omit_two_operands, invert_truthvalue, fold_truth_not_expr, build_fold_indirect_ref, fold_indirect_ref, combine_comparisons, fold_builtin_*, fold_call_expr, build_range_check, maybe_fold_offset_to_address, round_up, round_down. objc/ * objc-act.c: Add location argument to all calls to build_fold_addr_expr. testsuite/ * gcc.dg/pr36902.c: Add column info. * g++.dg/gcov/gcov-2.C: Change count for definition. cp/ * typeck.c, init.c, class.c, method.c, rtti.c, except.c, error.c, tree.c, cp-gimplify.c, cxx-pretty-print.c, pt.c, semantics.c, call.c, cvt.c, mangle.c: Add location argument to fold_{unary,binary,ternary}, fold_build[123], build_call_expr, build_size_arg, build_fold_addr_expr, build_call_array, non_lvalue, size_diffop, fold_build1_initializer, fold_build2_initializer, fold_build3_initializer, fold_build_call_array, fold_build_call_array_initializer, fold_single_bit_test, omit_one_operand, omit_two_operands, invert_truthvalue, fold_truth_not_expr, build_fold_indirect_ref, fold_indirect_ref, combine_comparisons, fold_builtin_*, fold_call_expr, build_range_check, maybe_fold_offset_to_address, round_up, round_down. fortran/ * trans-expr.c, trans-array.c, trans-openmp.c, trans-stmt.c, trans.c, trans-io.c, trans-decl.c, trans-intrinsic.c: Add location argument to fold_{unary,binary,ternary}, fold_build[123], build_call_expr, build_size_arg, build_fold_addr_expr, build_call_array, non_lvalue, size_diffop, fold_build1_initializer, fold_build2_initializer, fold_build3_initializer, fold_build_call_array, fold_build_call_array_initializer, fold_single_bit_test, omit_one_operand, omit_two_operands, invert_truthvalue, fold_truth_not_expr, build_fold_indirect_ref, fold_indirect_ref, combine_comparisons, fold_builtin_*, fold_call_expr, build_range_check, maybe_fold_offset_to_address, round_up, round_down. git-svn-id: svn+ssh://gcc.gnu.org/svn/gcc/trunk@149722 138bc75d-0d04-0410-961f-82ee72b054a4 --- gcc/fold-const.c | 2931 ++++++++++++++++++++++++++++++------------------------ 1 file changed, 1648 insertions(+), 1283 deletions(-) (limited to 'gcc/fold-const.c') diff --git a/gcc/fold-const.c b/gcc/fold-const.c index eba869085cc..803c7a549af 100644 --- a/gcc/fold-const.c +++ b/gcc/fold-const.c @@ -99,18 +99,21 @@ static bool negate_mathfn_p (enum built_in_function); static bool negate_expr_p (tree); static tree negate_expr (tree); static tree split_tree (tree, enum tree_code, tree *, tree *, tree *, int); -static tree associate_trees (tree, tree, enum tree_code, tree); +static tree associate_trees (location_t, tree, tree, enum tree_code, tree); static tree const_binop (enum tree_code, tree, tree, int); static enum comparison_code comparison_to_compcode (enum tree_code); static enum tree_code compcode_to_comparison (enum comparison_code); static int operand_equal_for_comparison_p (tree, tree, tree); static int twoval_comparison_p (tree, tree *, tree *, int *); -static tree eval_subst (tree, tree, tree, tree, tree); -static tree pedantic_omit_one_operand (tree, tree, tree); -static tree distribute_bit_expr (enum tree_code, tree, tree, tree); -static tree make_bit_field_ref (tree, tree, HOST_WIDE_INT, HOST_WIDE_INT, int); -static tree optimize_bit_field_compare (enum tree_code, tree, tree, tree); -static tree decode_field_reference (tree, HOST_WIDE_INT *, HOST_WIDE_INT *, +static tree eval_subst (location_t, tree, tree, tree, tree, tree); +static tree pedantic_omit_one_operand_loc (location_t, tree, tree, tree); +static tree distribute_bit_expr (location_t, enum tree_code, tree, tree, tree); +static tree make_bit_field_ref (location_t, tree, tree, + HOST_WIDE_INT, HOST_WIDE_INT, int); +static tree optimize_bit_field_compare (location_t, enum tree_code, + tree, tree, tree); +static tree decode_field_reference (location_t, tree, HOST_WIDE_INT *, + HOST_WIDE_INT *, enum machine_mode *, int *, int *, tree *, tree *); static int all_ones_mask_p (const_tree, int); @@ -120,23 +123,25 @@ static tree range_binop (enum tree_code, tree, tree, int, tree, int); static tree range_predecessor (tree); static tree range_successor (tree); extern tree make_range (tree, int *, tree *, tree *, bool *); -extern tree build_range_check (tree, tree, int, tree, tree); extern bool merge_ranges (int *, tree *, tree *, int, tree, tree, int, tree, tree); -static tree fold_range_test (enum tree_code, tree, tree, tree); -static tree fold_cond_expr_with_comparison (tree, tree, tree, tree); +static tree fold_range_test (location_t, enum tree_code, tree, tree, tree); +static tree fold_cond_expr_with_comparison (location_t, tree, tree, tree, tree); static tree unextend (tree, int, int, tree); -static tree fold_truthop (enum tree_code, tree, tree, tree); -static tree optimize_minmax_comparison (enum tree_code, tree, tree, tree); +static tree fold_truthop (location_t, enum tree_code, tree, tree, tree); +static tree optimize_minmax_comparison (location_t, enum tree_code, + tree, tree, tree); static tree extract_muldiv (tree, tree, enum tree_code, tree, bool *); static tree extract_muldiv_1 (tree, tree, enum tree_code, tree, bool *); -static tree fold_binary_op_with_conditional_arg (enum tree_code, tree, +static tree fold_binary_op_with_conditional_arg (location_t, + enum tree_code, tree, tree, tree, tree, tree, int); -static tree fold_mathfn_compare (enum built_in_function, enum tree_code, +static tree fold_mathfn_compare (location_t, + enum built_in_function, enum tree_code, tree, tree, tree); -static tree fold_inf_compare (enum tree_code, tree, tree, tree); -static tree fold_div_compare (enum tree_code, tree, tree, tree); +static tree fold_inf_compare (location_t, enum tree_code, tree, tree, tree); +static tree fold_div_compare (location_t, enum tree_code, tree, tree, tree); static bool reorder_operands_p (const_tree, const_tree); static tree fold_negate_const (tree, tree); static tree fold_not_const (tree, tree); @@ -1232,7 +1237,7 @@ negate_expr_p (tree t) returned. */ static tree -fold_negate_expr (tree t) +fold_negate_expr (location_t loc, tree t) { tree type = TREE_TYPE (t); tree tem; @@ -1242,7 +1247,7 @@ fold_negate_expr (tree t) /* Convert - (~A) to A + 1. */ case BIT_NOT_EXPR: if (INTEGRAL_TYPE_P (type)) - return fold_build2 (PLUS_EXPR, type, TREE_OPERAND (t, 0), + return fold_build2_loc (loc, PLUS_EXPR, type, TREE_OPERAND (t, 0), build_int_cst (type, 1)); break; @@ -1279,15 +1284,15 @@ fold_negate_expr (tree t) case COMPLEX_EXPR: if (negate_expr_p (t)) - return fold_build2 (COMPLEX_EXPR, type, - fold_negate_expr (TREE_OPERAND (t, 0)), - fold_negate_expr (TREE_OPERAND (t, 1))); + return fold_build2_loc (loc, COMPLEX_EXPR, type, + fold_negate_expr (loc, TREE_OPERAND (t, 0)), + fold_negate_expr (loc, TREE_OPERAND (t, 1))); break; case CONJ_EXPR: if (negate_expr_p (t)) - return fold_build1 (CONJ_EXPR, type, - fold_negate_expr (TREE_OPERAND (t, 0))); + return fold_build1_loc (loc, CONJ_EXPR, type, + fold_negate_expr (loc, TREE_OPERAND (t, 0))); break; case NEGATE_EXPR: @@ -1303,7 +1308,7 @@ fold_negate_expr (tree t) TREE_OPERAND (t, 1))) { tem = negate_expr (TREE_OPERAND (t, 1)); - return fold_build2 (MINUS_EXPR, type, + return fold_build2_loc (loc, MINUS_EXPR, type, tem, TREE_OPERAND (t, 0)); } @@ -1311,7 +1316,7 @@ fold_negate_expr (tree t) if (negate_expr_p (TREE_OPERAND (t, 0))) { tem = negate_expr (TREE_OPERAND (t, 0)); - return fold_build2 (MINUS_EXPR, type, + return fold_build2_loc (loc, MINUS_EXPR, type, tem, TREE_OPERAND (t, 1)); } } @@ -1322,7 +1327,7 @@ fold_negate_expr (tree t) if (!HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type)) && !HONOR_SIGNED_ZEROS (TYPE_MODE (type)) && reorder_operands_p (TREE_OPERAND (t, 0), TREE_OPERAND (t, 1))) - return fold_build2 (MINUS_EXPR, type, + return fold_build2_loc (loc, MINUS_EXPR, type, TREE_OPERAND (t, 1), TREE_OPERAND (t, 0)); break; @@ -1337,11 +1342,11 @@ fold_negate_expr (tree t) { tem = TREE_OPERAND (t, 1); if (negate_expr_p (tem)) - return fold_build2 (TREE_CODE (t), type, + return fold_build2_loc (loc, TREE_CODE (t), type, TREE_OPERAND (t, 0), negate_expr (tem)); tem = TREE_OPERAND (t, 0); if (negate_expr_p (tem)) - return fold_build2 (TREE_CODE (t), type, + return fold_build2_loc (loc, TREE_CODE (t), type, negate_expr (tem), TREE_OPERAND (t, 1)); } break; @@ -1367,7 +1372,7 @@ fold_negate_expr (tree t) && (TREE_CODE (tem) != INTEGER_CST || integer_onep (tem))) fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MISC); - return fold_build2 (TREE_CODE (t), type, + return fold_build2_loc (loc, TREE_CODE (t), type, TREE_OPERAND (t, 0), negate_expr (tem)); } tem = TREE_OPERAND (t, 0); @@ -1377,7 +1382,7 @@ fold_negate_expr (tree t) && (TREE_CODE (tem) != INTEGER_CST || tree_int_cst_equal (tem, TYPE_MIN_VALUE (type)))) fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MISC); - return fold_build2 (TREE_CODE (t), type, + return fold_build2_loc (loc, TREE_CODE (t), type, negate_expr (tem), TREE_OPERAND (t, 1)); } } @@ -1389,7 +1394,7 @@ fold_negate_expr (tree t) { tem = strip_float_extensions (t); if (tem != t && negate_expr_p (tem)) - return fold_convert (type, negate_expr (tem)); + return fold_convert_loc (loc, type, negate_expr (tem)); } break; @@ -1402,7 +1407,7 @@ fold_negate_expr (tree t) fndecl = get_callee_fndecl (t); arg = negate_expr (CALL_EXPR_ARG (t, 0)); - return build_call_expr (fndecl, 1, arg); + return build_call_expr_loc (loc, fndecl, 1, arg); } break; @@ -1418,9 +1423,9 @@ fold_negate_expr (tree t) tree ntype = TYPE_UNSIGNED (type) ? signed_type_for (type) : unsigned_type_for (type); - tree temp = fold_convert (ntype, TREE_OPERAND (t, 0)); - temp = fold_build2 (RSHIFT_EXPR, ntype, temp, op1); - return fold_convert (type, temp); + tree temp = fold_convert_loc (loc, ntype, TREE_OPERAND (t, 0)); + temp = fold_build2_loc (loc, RSHIFT_EXPR, ntype, temp, op1); + return fold_convert_loc (loc, type, temp); } } break; @@ -1440,17 +1445,22 @@ static tree negate_expr (tree t) { tree type, tem; + location_t loc; if (t == NULL_TREE) return NULL_TREE; + loc = EXPR_LOCATION (t); type = TREE_TYPE (t); STRIP_SIGN_NOPS (t); - tem = fold_negate_expr (t); + tem = fold_negate_expr (loc, t); if (!tem) - tem = build1 (NEGATE_EXPR, TREE_TYPE (t), t); - return fold_convert (type, tem); + { + tem = build1 (NEGATE_EXPR, TREE_TYPE (t), t); + SET_EXPR_LOCATION (tem, loc); + } + return fold_convert_loc (loc, type, tem); } /* Split a tree IN into a constant, literal and variable parts that could be @@ -1552,13 +1562,16 @@ split_tree (tree in, enum tree_code code, tree *conp, tree *litp, return var; } -/* Re-associate trees split by the above function. T1 and T2 are either - expressions to associate or null. Return the new expression, if any. If +/* Re-associate trees split by the above function. T1 and T2 are + either expressions to associate or null. Return the new + expression, if any. LOC is the location of the new expression. If we build an operation, do it in TYPE and with CODE. */ static tree -associate_trees (tree t1, tree t2, enum tree_code code, tree type) +associate_trees (location_t loc, tree t1, tree t2, enum tree_code code, tree type) { + tree tem; + if (t1 == 0) return t2; else if (t2 == 0) @@ -1573,26 +1586,30 @@ associate_trees (tree t1, tree t2, enum tree_code code, tree type) if (code == PLUS_EXPR) { if (TREE_CODE (t1) == NEGATE_EXPR) - return build2 (MINUS_EXPR, type, fold_convert (type, t2), - fold_convert (type, TREE_OPERAND (t1, 0))); + tem = build2 (MINUS_EXPR, type, fold_convert_loc (loc, type, t2), + fold_convert_loc (loc, type, TREE_OPERAND (t1, 0))); else if (TREE_CODE (t2) == NEGATE_EXPR) - return build2 (MINUS_EXPR, type, fold_convert (type, t1), - fold_convert (type, TREE_OPERAND (t2, 0))); + tem = build2 (MINUS_EXPR, type, fold_convert_loc (loc, type, t1), + fold_convert_loc (loc, type, TREE_OPERAND (t2, 0))); else if (integer_zerop (t2)) - return fold_convert (type, t1); + return fold_convert_loc (loc, type, t1); } else if (code == MINUS_EXPR) { if (integer_zerop (t2)) - return fold_convert (type, t1); + return fold_convert_loc (loc, type, t1); } - return build2 (code, type, fold_convert (type, t1), - fold_convert (type, t2)); + tem = build2 (code, type, fold_convert_loc (loc, type, t1), + fold_convert_loc (loc, type, t2)); + goto associate_trees_exit; } - return fold_build2 (code, type, fold_convert (type, t1), - fold_convert (type, t2)); + return fold_build2_loc (loc, code, type, fold_convert_loc (loc, type, t1), + fold_convert_loc (loc, type, t2)); + associate_trees_exit: + protected_set_expr_location (tem, loc); + return tem; } /* Check whether TYPE1 and TYPE2 are equivalent integer types, suitable @@ -2061,7 +2078,7 @@ size_int_kind (HOST_WIDE_INT number, enum size_type_kind kind) If the operands are constant, so is the result. */ tree -size_binop (enum tree_code code, tree arg0, tree arg1) +size_binop_loc (location_t loc, enum tree_code code, tree arg0, tree arg1) { tree type = TREE_TYPE (arg0); @@ -2097,7 +2114,7 @@ size_binop (enum tree_code code, tree arg0, tree arg1) return int_const_binop (code, arg0, arg1, 0); } - return fold_build2 (code, type, arg0, arg1); + return fold_build2_loc (loc, code, type, arg0, arg1); } /* Given two values, either both of sizetype or both of bitsizetype, @@ -2105,7 +2122,7 @@ size_binop (enum tree_code code, tree arg0, tree arg1) in signed type corresponding to the type of the operands. */ tree -size_diffop (tree arg0, tree arg1) +size_diffop_loc (location_t loc, tree arg0, tree arg1) { tree type = TREE_TYPE (arg0); tree ctype; @@ -2115,7 +2132,7 @@ size_diffop (tree arg0, tree arg1) /* If the type is already signed, just do the simple thing. */ if (!TYPE_UNSIGNED (type)) - return size_binop (MINUS_EXPR, arg0, arg1); + return size_binop_loc (loc, MINUS_EXPR, arg0, arg1); if (type == sizetype) ctype = ssizetype; @@ -2128,8 +2145,9 @@ size_diffop (tree arg0, tree arg1) type and subtract. The hardware will do the right thing with any overflow in the subtraction. */ if (TREE_CODE (arg0) != INTEGER_CST || TREE_CODE (arg1) != INTEGER_CST) - return size_binop (MINUS_EXPR, fold_convert (ctype, arg0), - fold_convert (ctype, arg1)); + return size_binop_loc (loc, MINUS_EXPR, + fold_convert_loc (loc, ctype, arg0), + fold_convert_loc (loc, ctype, arg1)); /* If ARG0 is larger than ARG1, subtract and return the result in CTYPE. Otherwise, subtract the other way, convert to CTYPE (we know that can't @@ -2138,11 +2156,14 @@ size_diffop (tree arg0, tree arg1) if (tree_int_cst_equal (arg0, arg1)) return build_int_cst (ctype, 0); else if (tree_int_cst_lt (arg1, arg0)) - return fold_convert (ctype, size_binop (MINUS_EXPR, arg0, arg1)); + return fold_convert_loc (loc, ctype, + size_binop_loc (loc, MINUS_EXPR, arg0, arg1)); else - return size_binop (MINUS_EXPR, build_int_cst (ctype, 0), - fold_convert (ctype, size_binop (MINUS_EXPR, - arg1, arg0))); + return size_binop_loc (loc, MINUS_EXPR, build_int_cst (ctype, 0), + fold_convert_loc (loc, ctype, + size_binop_loc (loc, + MINUS_EXPR, + arg1, arg0))); } /* A subroutine of fold_convert_const handling conversions of an @@ -2529,7 +2550,7 @@ fold_convertible_p (const_tree type, const_tree arg) simple conversions in preference to calling the front-end's convert. */ tree -fold_convert (tree type, tree arg) +fold_convert_loc (location_t loc, tree type, tree arg) { tree orig = TREE_TYPE (arg); tree tem; @@ -2543,7 +2564,7 @@ fold_convert (tree type, tree arg) return error_mark_node; if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (orig)) - return fold_build1 (NOP_EXPR, type, arg); + return fold_build1_loc (loc, NOP_EXPR, type, arg); switch (TREE_CODE (type)) { @@ -2558,15 +2579,14 @@ fold_convert (tree type, tree arg) } if (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig) || TREE_CODE (orig) == OFFSET_TYPE) - return fold_build1 (NOP_EXPR, type, arg); + return fold_build1_loc (loc, NOP_EXPR, type, arg); if (TREE_CODE (orig) == COMPLEX_TYPE) - { - tem = fold_build1 (REALPART_EXPR, TREE_TYPE (orig), arg); - return fold_convert (type, tem); - } + return fold_convert_loc (loc, type, + fold_build1_loc (loc, REALPART_EXPR, + TREE_TYPE (orig), arg)); gcc_assert (TREE_CODE (orig) == VECTOR_TYPE && tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig))); - return fold_build1 (NOP_EXPR, type, arg); + return fold_build1_loc (loc, NOP_EXPR, type, arg); case REAL_TYPE: if (TREE_CODE (arg) == INTEGER_CST) @@ -2593,17 +2613,17 @@ fold_convert (tree type, tree arg) case INTEGER_TYPE: case BOOLEAN_TYPE: case ENUMERAL_TYPE: case POINTER_TYPE: case REFERENCE_TYPE: - return fold_build1 (FLOAT_EXPR, type, arg); + return fold_build1_loc (loc, FLOAT_EXPR, type, arg); case REAL_TYPE: - return fold_build1 (NOP_EXPR, type, arg); + return fold_build1_loc (loc, NOP_EXPR, type, arg); case FIXED_POINT_TYPE: - return fold_build1 (FIXED_CONVERT_EXPR, type, arg); + return fold_build1_loc (loc, FIXED_CONVERT_EXPR, type, arg); case COMPLEX_TYPE: - tem = fold_build1 (REALPART_EXPR, TREE_TYPE (orig), arg); - return fold_convert (type, tem); + tem = fold_build1_loc (loc, REALPART_EXPR, TREE_TYPE (orig), arg); + return fold_convert_loc (loc, type, tem); default: gcc_unreachable (); @@ -2615,7 +2635,7 @@ fold_convert (tree type, tree arg) { tem = fold_convert_const (FIXED_CONVERT_EXPR, type, arg); if (tem != NULL_TREE) - return tem; + goto fold_convert_exit; } switch (TREE_CODE (orig)) @@ -2625,11 +2645,11 @@ fold_convert (tree type, tree arg) case ENUMERAL_TYPE: case BOOLEAN_TYPE: case REAL_TYPE: - return fold_build1 (FIXED_CONVERT_EXPR, type, arg); + return fold_build1_loc (loc, FIXED_CONVERT_EXPR, type, arg); case COMPLEX_TYPE: - tem = fold_build1 (REALPART_EXPR, TREE_TYPE (orig), arg); - return fold_convert (type, tem); + tem = fold_build1_loc (loc, REALPART_EXPR, TREE_TYPE (orig), arg); + return fold_convert_loc (loc, type, tem); default: gcc_unreachable (); @@ -2643,9 +2663,9 @@ fold_convert (tree type, tree arg) case POINTER_TYPE: case REFERENCE_TYPE: case REAL_TYPE: case FIXED_POINT_TYPE: - return fold_build2 (COMPLEX_EXPR, type, - fold_convert (TREE_TYPE (type), arg), - fold_convert (TREE_TYPE (type), + return fold_build2_loc (loc, COMPLEX_EXPR, type, + fold_convert_loc (loc, TREE_TYPE (type), arg), + fold_convert_loc (loc, TREE_TYPE (type), integer_zero_node)); case COMPLEX_TYPE: { @@ -2653,17 +2673,19 @@ fold_convert (tree type, tree arg) if (TREE_CODE (arg) == COMPLEX_EXPR) { - rpart = fold_convert (TREE_TYPE (type), TREE_OPERAND (arg, 0)); - ipart = fold_convert (TREE_TYPE (type), TREE_OPERAND (arg, 1)); - return fold_build2 (COMPLEX_EXPR, type, rpart, ipart); + rpart = fold_convert_loc (loc, TREE_TYPE (type), + TREE_OPERAND (arg, 0)); + ipart = fold_convert_loc (loc, TREE_TYPE (type), + TREE_OPERAND (arg, 1)); + return fold_build2_loc (loc, COMPLEX_EXPR, type, rpart, ipart); } arg = save_expr (arg); - rpart = fold_build1 (REALPART_EXPR, TREE_TYPE (orig), arg); - ipart = fold_build1 (IMAGPART_EXPR, TREE_TYPE (orig), arg); - rpart = fold_convert (TREE_TYPE (type), rpart); - ipart = fold_convert (TREE_TYPE (type), ipart); - return fold_build2 (COMPLEX_EXPR, type, rpart, ipart); + rpart = fold_build1_loc (loc, REALPART_EXPR, TREE_TYPE (orig), arg); + ipart = fold_build1_loc (loc, IMAGPART_EXPR, TREE_TYPE (orig), arg); + rpart = fold_convert_loc (loc, TREE_TYPE (type), rpart); + ipart = fold_convert_loc (loc, TREE_TYPE (type), ipart); + return fold_build2_loc (loc, COMPLEX_EXPR, type, rpart, ipart); } default: @@ -2676,17 +2698,20 @@ fold_convert (tree type, tree arg) gcc_assert (tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig))); gcc_assert (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig) || TREE_CODE (orig) == VECTOR_TYPE); - return fold_build1 (VIEW_CONVERT_EXPR, type, arg); + return fold_build1_loc (loc, VIEW_CONVERT_EXPR, type, arg); case VOID_TYPE: tem = fold_ignored_result (arg); if (TREE_CODE (tem) == MODIFY_EXPR) - return tem; - return fold_build1 (NOP_EXPR, type, tem); + goto fold_convert_exit; + return fold_build1_loc (loc, NOP_EXPR, type, tem); default: gcc_unreachable (); } + fold_convert_exit: + protected_set_expr_location (tem, loc); + return tem; } /* Return false if expr can be assumed not to be an lvalue, true @@ -2743,7 +2768,7 @@ maybe_lvalue_p (const_tree x) /* Return an expr equal to X but certainly not valid as an lvalue. */ tree -non_lvalue (tree x) +non_lvalue_loc (location_t loc, tree x) { /* While we are in GIMPLE, NON_LVALUE_EXPR doesn't mean anything to us. */ @@ -2752,7 +2777,9 @@ non_lvalue (tree x) if (! maybe_lvalue_p (x)) return x; - return build1 (NON_LVALUE_EXPR, TREE_TYPE (x), x); + x = build1 (NON_LVALUE_EXPR, TREE_TYPE (x), x); + SET_EXPR_LOCATION (x, loc); + return x; } /* Nonzero means lvalues are limited to those valid in pedantic ANSI C. @@ -2764,12 +2791,12 @@ int pedantic_lvalues; pedantic lvalue. Otherwise, return X. */ static tree -pedantic_non_lvalue (tree x) +pedantic_non_lvalue_loc (location_t loc, tree x) { if (pedantic_lvalues) - return non_lvalue (x); - else - return x; + return non_lvalue_loc (loc, x); + protected_set_expr_location (x, loc); + return x; } /* Given a tree comparison code, return the code that is the logical inverse @@ -2946,7 +2973,8 @@ compcode_to_comparison (enum comparison_code code) if this makes the transformation invalid. */ tree -combine_comparisons (enum tree_code code, enum tree_code lcode, +combine_comparisons (location_t loc, + enum tree_code code, enum tree_code lcode, enum tree_code rcode, tree truth_type, tree ll_arg, tree lr_arg) { @@ -3023,7 +3051,7 @@ combine_comparisons (enum tree_code code, enum tree_code lcode, enum tree_code tcode; tcode = compcode_to_comparison ((enum comparison_code) compcode); - return fold_build2 (tcode, truth_type, ll_arg, lr_arg); + return fold_build2_loc (loc, tcode, truth_type, ll_arg, lr_arg); } } @@ -3513,7 +3541,8 @@ twoval_comparison_p (tree arg, tree *cval1, tree *cval2, int *save_p) NEW1 and OLD1. */ static tree -eval_subst (tree arg, tree old0, tree new0, tree old1, tree new1) +eval_subst (location_t loc, tree arg, tree old0, tree new0, + tree old1, tree new1) { tree type = TREE_TYPE (arg); enum tree_code code = TREE_CODE (arg); @@ -3529,33 +3558,35 @@ eval_subst (tree arg, tree old0, tree new0, tree old1, tree new1) switch (tclass) { case tcc_unary: - return fold_build1 (code, type, - eval_subst (TREE_OPERAND (arg, 0), + return fold_build1_loc (loc, code, type, + eval_subst (loc, TREE_OPERAND (arg, 0), old0, new0, old1, new1)); case tcc_binary: - return fold_build2 (code, type, - eval_subst (TREE_OPERAND (arg, 0), + return fold_build2_loc (loc, code, type, + eval_subst (loc, TREE_OPERAND (arg, 0), old0, new0, old1, new1), - eval_subst (TREE_OPERAND (arg, 1), + eval_subst (loc, TREE_OPERAND (arg, 1), old0, new0, old1, new1)); case tcc_expression: switch (code) { case SAVE_EXPR: - return eval_subst (TREE_OPERAND (arg, 0), old0, new0, old1, new1); + return eval_subst (loc, TREE_OPERAND (arg, 0), old0, new0, + old1, new1); case COMPOUND_EXPR: - return eval_subst (TREE_OPERAND (arg, 1), old0, new0, old1, new1); + return eval_subst (loc, TREE_OPERAND (arg, 1), old0, new0, + old1, new1); case COND_EXPR: - return fold_build3 (code, type, - eval_subst (TREE_OPERAND (arg, 0), + return fold_build3_loc (loc, code, type, + eval_subst (loc, TREE_OPERAND (arg, 0), old0, new0, old1, new1), - eval_subst (TREE_OPERAND (arg, 1), + eval_subst (loc, TREE_OPERAND (arg, 1), old0, new0, old1, new1), - eval_subst (TREE_OPERAND (arg, 2), + eval_subst (loc, TREE_OPERAND (arg, 2), old0, new0, old1, new1)); default: break; @@ -3581,7 +3612,7 @@ eval_subst (tree arg, tree old0, tree new0, tree old1, tree new1) else if (arg1 == old1 || operand_equal_p (arg1, old1, 0)) arg1 = new1; - return fold_build2 (code, type, arg0, arg1); + return fold_build2_loc (loc, code, type, arg0, arg1); } default: @@ -3597,37 +3628,58 @@ eval_subst (tree arg, tree old0, tree new0, tree old1, tree new1) the conversion of RESULT to TYPE. */ tree -omit_one_operand (tree type, tree result, tree omitted) +omit_one_operand_loc (location_t loc, tree type, tree result, tree omitted) { - tree t = fold_convert (type, result); + tree t = fold_convert_loc (loc, type, result); /* If the resulting operand is an empty statement, just return the omitted statement casted to void. */ if (IS_EMPTY_STMT (t) && TREE_SIDE_EFFECTS (omitted)) - return build1 (NOP_EXPR, void_type_node, fold_ignored_result (omitted)); + { + t = build1 (NOP_EXPR, void_type_node, fold_ignored_result (omitted)); + goto omit_one_operand_exit; + } if (TREE_SIDE_EFFECTS (omitted)) - return build2 (COMPOUND_EXPR, type, fold_ignored_result (omitted), t); + { + t = build2 (COMPOUND_EXPR, type, fold_ignored_result (omitted), t); + goto omit_one_operand_exit; + } + + return non_lvalue_loc (loc, t); - return non_lvalue (t); + omit_one_operand_exit: + protected_set_expr_location (t, loc); + return t; } /* Similar, but call pedantic_non_lvalue instead of non_lvalue. */ static tree -pedantic_omit_one_operand (tree type, tree result, tree omitted) +pedantic_omit_one_operand_loc (location_t loc, tree type, tree result, + tree omitted) { - tree t = fold_convert (type, result); + tree t = fold_convert_loc (loc, type, result); /* If the resulting operand is an empty statement, just return the omitted statement casted to void. */ if (IS_EMPTY_STMT (t) && TREE_SIDE_EFFECTS (omitted)) - return build1 (NOP_EXPR, void_type_node, fold_ignored_result (omitted)); + { + t = build1 (NOP_EXPR, void_type_node, fold_ignored_result (omitted)); + goto pedantic_omit_one_operand_exit; + } if (TREE_SIDE_EFFECTS (omitted)) - return build2 (COMPOUND_EXPR, type, fold_ignored_result (omitted), t); + { + t = build2 (COMPOUND_EXPR, type, fold_ignored_result (omitted), t); + goto pedantic_omit_one_operand_exit; + } - return pedantic_non_lvalue (t); + return pedantic_non_lvalue_loc (loc, t); + + pedantic_omit_one_operand_exit: + protected_set_expr_location (t, loc); + return t; } /* Return a tree for the case when the result of an expression is RESULT @@ -3640,16 +3692,23 @@ pedantic_omit_one_operand (tree type, tree result, tree omitted) just do the conversion of RESULT to TYPE. */ tree -omit_two_operands (tree type, tree result, tree omitted1, tree omitted2) +omit_two_operands_loc (location_t loc, tree type, tree result, + tree omitted1, tree omitted2) { - tree t = fold_convert (type, result); + tree t = fold_convert_loc (loc, type, result); if (TREE_SIDE_EFFECTS (omitted2)) - t = build2 (COMPOUND_EXPR, type, omitted2, t); + { + t = build2 (COMPOUND_EXPR, type, omitted2, t); + SET_EXPR_LOCATION (t, loc); + } if (TREE_SIDE_EFFECTS (omitted1)) - t = build2 (COMPOUND_EXPR, type, omitted1, t); + { + t = build2 (COMPOUND_EXPR, type, omitted1, t); + SET_EXPR_LOCATION (t, loc); + } - return TREE_CODE (t) != COMPOUND_EXPR ? non_lvalue (t) : t; + return TREE_CODE (t) != COMPOUND_EXPR ? non_lvalue_loc (loc, t) : t; } @@ -3661,10 +3720,11 @@ omit_two_operands (tree type, tree result, tree omitted1, tree omitted2) problems with the dominator optimizer. */ tree -fold_truth_not_expr (tree arg) +fold_truth_not_expr (location_t loc, tree arg) { tree t, type = TREE_TYPE (arg); enum tree_code code = TREE_CODE (arg); + location_t loc1, loc2; /* If this is a comparison, we can simply invert it, except for floating-point non-equality comparisons, in which case we just @@ -3684,8 +3744,7 @@ fold_truth_not_expr (tree arg) return NULL_TREE; t = build2 (code, type, TREE_OPERAND (arg, 0), TREE_OPERAND (arg, 1)); - if (EXPR_HAS_LOCATION (arg)) - SET_EXPR_LOCATION (t, EXPR_LOCATION (arg)); + SET_EXPR_LOCATION (t, loc); return t; } @@ -3695,15 +3754,27 @@ fold_truth_not_expr (tree arg) return constant_boolean_node (integer_zerop (arg), type); case TRUTH_AND_EXPR: + loc1 = EXPR_LOCATION (TREE_OPERAND (arg, 0)); + loc2 = EXPR_LOCATION (TREE_OPERAND (arg, 1)); + if (loc1 == UNKNOWN_LOCATION) + loc1 = loc; + if (loc2 == UNKNOWN_LOCATION) + loc2 = loc; t = build2 (TRUTH_OR_EXPR, type, - invert_truthvalue (TREE_OPERAND (arg, 0)), - invert_truthvalue (TREE_OPERAND (arg, 1))); + invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)), + invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1))); break; case TRUTH_OR_EXPR: + loc1 = EXPR_LOCATION (TREE_OPERAND (arg, 0)); + loc2 = EXPR_LOCATION (TREE_OPERAND (arg, 1)); + if (loc1 == UNKNOWN_LOCATION) + loc1 = loc; + if (loc2 == UNKNOWN_LOCATION) + loc2 = loc; t = build2 (TRUTH_AND_EXPR, type, - invert_truthvalue (TREE_OPERAND (arg, 0)), - invert_truthvalue (TREE_OPERAND (arg, 1))); + invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)), + invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1))); break; case TRUTH_XOR_EXPR: @@ -3717,20 +3788,32 @@ fold_truth_not_expr (tree arg) TREE_OPERAND (TREE_OPERAND (arg, 1), 0)); else t = build2 (TRUTH_XOR_EXPR, type, - invert_truthvalue (TREE_OPERAND (arg, 0)), + invert_truthvalue_loc (loc, TREE_OPERAND (arg, 0)), TREE_OPERAND (arg, 1)); break; case TRUTH_ANDIF_EXPR: + loc1 = EXPR_LOCATION (TREE_OPERAND (arg, 0)); + loc2 = EXPR_LOCATION (TREE_OPERAND (arg, 1)); + if (loc1 == UNKNOWN_LOCATION) + loc1 = loc; + if (loc2 == UNKNOWN_LOCATION) + loc2 = loc; t = build2 (TRUTH_ORIF_EXPR, type, - invert_truthvalue (TREE_OPERAND (arg, 0)), - invert_truthvalue (TREE_OPERAND (arg, 1))); + invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)), + invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1))); break; case TRUTH_ORIF_EXPR: + loc1 = EXPR_LOCATION (TREE_OPERAND (arg, 0)); + loc2 = EXPR_LOCATION (TREE_OPERAND (arg, 1)); + if (loc1 == UNKNOWN_LOCATION) + loc1 = loc; + if (loc2 == UNKNOWN_LOCATION) + loc2 = loc; t = build2 (TRUTH_ANDIF_EXPR, type, - invert_truthvalue (TREE_OPERAND (arg, 0)), - invert_truthvalue (TREE_OPERAND (arg, 1))); + invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)), + invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1))); break; case TRUTH_NOT_EXPR: @@ -3740,24 +3823,39 @@ fold_truth_not_expr (tree arg) { tree arg1 = TREE_OPERAND (arg, 1); tree arg2 = TREE_OPERAND (arg, 2); + + loc1 = EXPR_LOCATION (TREE_OPERAND (arg, 1)); + loc2 = EXPR_LOCATION (TREE_OPERAND (arg, 2)); + if (loc1 == UNKNOWN_LOCATION) + loc1 = loc; + if (loc2 == UNKNOWN_LOCATION) + loc2 = loc; + /* A COND_EXPR may have a throw as one operand, which then has void type. Just leave void operands as they are. */ t = build3 (COND_EXPR, type, TREE_OPERAND (arg, 0), VOID_TYPE_P (TREE_TYPE (arg1)) - ? arg1 : invert_truthvalue (arg1), + ? arg1 : invert_truthvalue_loc (loc1, arg1), VOID_TYPE_P (TREE_TYPE (arg2)) - ? arg2 : invert_truthvalue (arg2)); + ? arg2 : invert_truthvalue_loc (loc2, arg2)); break; } case COMPOUND_EXPR: - t = build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg, 0), - invert_truthvalue (TREE_OPERAND (arg, 1))); + loc1 = EXPR_LOCATION (TREE_OPERAND (arg, 1)); + if (loc1 == UNKNOWN_LOCATION) + loc1 = loc; + t = build2 (COMPOUND_EXPR, type, + TREE_OPERAND (arg, 0), + invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 1))); break; case NON_LVALUE_EXPR: - return invert_truthvalue (TREE_OPERAND (arg, 0)); + loc1 = EXPR_LOCATION (TREE_OPERAND (arg, 0)); + if (loc1 == UNKNOWN_LOCATION) + loc1 = loc; + return invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)); CASE_CONVERT: if (TREE_CODE (TREE_TYPE (arg)) == BOOLEAN_TYPE) @@ -3769,8 +3867,11 @@ fold_truth_not_expr (tree arg) /* ... fall through ... */ case FLOAT_EXPR: + loc1 = EXPR_LOCATION (TREE_OPERAND (arg, 0)); + if (loc1 == UNKNOWN_LOCATION) + loc1 = loc; t = build1 (TREE_CODE (arg), type, - invert_truthvalue (TREE_OPERAND (arg, 0))); + invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0))); break; case BIT_AND_EXPR: @@ -3784,8 +3885,11 @@ fold_truth_not_expr (tree arg) break; case CLEANUP_POINT_EXPR: + loc1 = EXPR_LOCATION (TREE_OPERAND (arg, 0)); + if (loc1 == UNKNOWN_LOCATION) + loc1 = loc; t = build1 (CLEANUP_POINT_EXPR, type, - invert_truthvalue (TREE_OPERAND (arg, 0))); + invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0))); break; default: @@ -3793,8 +3897,8 @@ fold_truth_not_expr (tree arg) break; } - if (t && EXPR_HAS_LOCATION (arg)) - SET_EXPR_LOCATION (t, EXPR_LOCATION (arg)); + if (t) + SET_EXPR_LOCATION (t, loc); return t; } @@ -3807,16 +3911,19 @@ fold_truth_not_expr (tree arg) problems with the dominator optimizer. */ tree -invert_truthvalue (tree arg) +invert_truthvalue_loc (location_t loc, tree arg) { tree tem; if (TREE_CODE (arg) == ERROR_MARK) return arg; - tem = fold_truth_not_expr (arg); + tem = fold_truth_not_expr (loc, arg); if (!tem) - tem = build1 (TRUTH_NOT_EXPR, TREE_TYPE (arg), arg); + { + tem = build1 (TRUTH_NOT_EXPR, TREE_TYPE (arg), arg); + SET_EXPR_LOCATION (tem, loc); + } return tem; } @@ -3831,7 +3938,8 @@ invert_truthvalue (tree arg) If this optimization cannot be done, 0 will be returned. */ static tree -distribute_bit_expr (enum tree_code code, tree type, tree arg0, tree arg1) +distribute_bit_expr (location_t loc, enum tree_code code, tree type, + tree arg0, tree arg1) { tree common; tree left, right; @@ -3869,17 +3977,18 @@ distribute_bit_expr (enum tree_code code, tree type, tree arg0, tree arg1) else return 0; - common = fold_convert (type, common); - left = fold_convert (type, left); - right = fold_convert (type, right); - return fold_build2 (TREE_CODE (arg0), type, common, - fold_build2 (code, type, left, right)); + common = fold_convert_loc (loc, type, common); + left = fold_convert_loc (loc, type, left); + right = fold_convert_loc (loc, type, right); + return fold_build2_loc (loc, TREE_CODE (arg0), type, common, + fold_build2_loc (loc, code, type, left, right)); } /* Knowing that ARG0 and ARG1 are both RDIV_EXPRs, simplify a binary operation with code CODE. This optimization is unsafe. */ static tree -distribute_real_division (enum tree_code code, tree type, tree arg0, tree arg1) +distribute_real_division (location_t loc, enum tree_code code, tree type, + tree arg0, tree arg1) { bool mul0 = TREE_CODE (arg0) == MULT_EXPR; bool mul1 = TREE_CODE (arg1) == MULT_EXPR; @@ -3888,8 +3997,8 @@ distribute_real_division (enum tree_code code, tree type, tree arg0, tree arg1) if (mul0 == mul1 && operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 1), 0)) - return fold_build2 (mul0 ? MULT_EXPR : RDIV_EXPR, type, - fold_build2 (code, type, + return fold_build2_loc (loc, mul0 ? MULT_EXPR : RDIV_EXPR, type, + fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0)), TREE_OPERAND (arg0, 1)); @@ -3908,7 +4017,7 @@ distribute_real_division (enum tree_code code, tree type, tree arg0, tree arg1) if (!mul1) real_arithmetic (&r1, RDIV_EXPR, &dconst1, &r1); real_arithmetic (&r0, code, &r0, &r1); - return fold_build2 (MULT_EXPR, type, + return fold_build2_loc (loc, MULT_EXPR, type, TREE_OPERAND (arg0, 0), build_real (type, r0)); } @@ -3920,8 +4029,8 @@ distribute_real_division (enum tree_code code, tree type, tree arg0, tree arg1) starting at BITPOS. The field is unsigned if UNSIGNEDP is nonzero. */ static tree -make_bit_field_ref (tree inner, tree type, HOST_WIDE_INT bitsize, - HOST_WIDE_INT bitpos, int unsignedp) +make_bit_field_ref (location_t loc, tree inner, tree type, + HOST_WIDE_INT bitsize, HOST_WIDE_INT bitpos, int unsignedp) { tree result, bftype; @@ -3932,7 +4041,7 @@ make_bit_field_ref (tree inner, tree type, HOST_WIDE_INT bitsize, || POINTER_TYPE_P (TREE_TYPE (inner))) && host_integerp (size, 0) && tree_low_cst (size, 0) == bitsize) - return fold_convert (type, inner); + return fold_convert_loc (loc, type, inner); } bftype = type; @@ -3942,9 +4051,10 @@ make_bit_field_ref (tree inner, tree type, HOST_WIDE_INT bitsize, result = build3 (BIT_FIELD_REF, bftype, inner, size_int (bitsize), bitsize_int (bitpos)); + SET_EXPR_LOCATION (result, loc); if (bftype != type) - result = fold_convert (type, result); + result = fold_convert_loc (loc, type, result); return result; } @@ -3970,8 +4080,8 @@ make_bit_field_ref (tree inner, tree type, HOST_WIDE_INT bitsize, tree. Otherwise we return zero. */ static tree -optimize_bit_field_compare (enum tree_code code, tree compare_type, - tree lhs, tree rhs) +optimize_bit_field_compare (location_t loc, enum tree_code code, + tree compare_type, tree lhs, tree rhs) { HOST_WIDE_INT lbitpos, lbitsize, rbitpos, rbitsize, nbitpos, nbitsize; tree type = TREE_TYPE (lhs); @@ -4044,15 +4154,15 @@ optimize_bit_field_compare (enum tree_code code, tree compare_type, if (! const_p) /* If not comparing with constant, just rework the comparison and return. */ - return fold_build2 (code, compare_type, - fold_build2 (BIT_AND_EXPR, unsigned_type, - make_bit_field_ref (linner, + return fold_build2_loc (loc, code, compare_type, + fold_build2_loc (loc, BIT_AND_EXPR, unsigned_type, + make_bit_field_ref (loc, linner, unsigned_type, nbitsize, nbitpos, 1), mask), - fold_build2 (BIT_AND_EXPR, unsigned_type, - make_bit_field_ref (rinner, + fold_build2_loc (loc, BIT_AND_EXPR, unsigned_type, + make_bit_field_ref (loc, rinner, unsigned_type, nbitsize, nbitpos, 1), @@ -4070,7 +4180,8 @@ optimize_bit_field_compare (enum tree_code code, tree compare_type, if (lunsignedp) { if (! integer_zerop (const_binop (RSHIFT_EXPR, - fold_convert (unsigned_type, rhs), + fold_convert_loc (loc, + unsigned_type, rhs), size_int (lbitsize), 0))) { warning (0, "comparison is always %d due to width of bit-field", @@ -4080,7 +4191,8 @@ optimize_bit_field_compare (enum tree_code code, tree compare_type, } else { - tree tem = const_binop (RSHIFT_EXPR, fold_convert (signed_type, rhs), + tree tem = const_binop (RSHIFT_EXPR, + fold_convert_loc (loc, signed_type, rhs), size_int (lbitsize - 1), 0); if (! integer_zerop (tem) && ! integer_all_onesp (tem)) { @@ -4100,7 +4212,7 @@ optimize_bit_field_compare (enum tree_code code, tree compare_type, /* Make a new bitfield reference, shift the constant over the appropriate number of bits and mask it with the computed mask (in case this was a signed field). If we changed it, make a new one. */ - lhs = make_bit_field_ref (linner, unsigned_type, nbitsize, nbitpos, 1); + lhs = make_bit_field_ref (loc, linner, unsigned_type, nbitsize, nbitpos, 1); if (lvolatilep) { TREE_SIDE_EFFECTS (lhs) = 1; @@ -4109,13 +4221,15 @@ optimize_bit_field_compare (enum tree_code code, tree compare_type, rhs = const_binop (BIT_AND_EXPR, const_binop (LSHIFT_EXPR, - fold_convert (unsigned_type, rhs), + fold_convert_loc (loc, unsigned_type, rhs), size_int (lbitpos), 0), mask, 0); - return build2 (code, compare_type, - build2 (BIT_AND_EXPR, unsigned_type, lhs, mask), - rhs); + lhs = build2 (code, compare_type, + build2 (BIT_AND_EXPR, unsigned_type, lhs, mask), + rhs); + SET_EXPR_LOCATION (lhs, loc); + return lhs; } /* Subroutine for fold_truthop: decode a field reference. @@ -4142,7 +4256,7 @@ optimize_bit_field_compare (enum tree_code code, tree compare_type, do anything with. */ static tree -decode_field_reference (tree exp, HOST_WIDE_INT *pbitsize, +decode_field_reference (location_t loc, tree exp, HOST_WIDE_INT *pbitsize, HOST_WIDE_INT *pbitpos, enum machine_mode *pmode, int *punsignedp, int *pvolatilep, tree *pmask, tree *pand_mask) @@ -4200,8 +4314,8 @@ decode_field_reference (tree exp, HOST_WIDE_INT *pbitsize, /* Merge it with the mask we found in the BIT_AND_EXPR, if any. */ if (and_mask != 0) - mask = fold_build2 (BIT_AND_EXPR, unsigned_type, - fold_convert (unsigned_type, and_mask), mask); + mask = fold_build2_loc (loc, BIT_AND_EXPR, unsigned_type, + fold_convert_loc (loc, unsigned_type, and_mask), mask); *pmask = mask; *pand_mask = and_mask; @@ -4424,6 +4538,7 @@ make_range (tree exp, int *pin_p, tree *plow, tree *phigh, tree exp_type = NULL_TREE, arg0_type = NULL_TREE; int in_p, n_in_p; tree low, high, n_low, n_high; + location_t loc = EXPR_LOCATION (exp); /* Start with simply saying "EXP != 0" and then look at the code of EXP and see if we can refine the range. Some of the cases below may not @@ -4542,6 +4657,7 @@ make_range (tree exp, int *pin_p, tree *plow, tree *phigh, /* ~ X -> -X - 1 */ exp = build2 (MINUS_EXPR, exp_type, negate_expr (arg0), build_int_cst (exp_type, 1)); + SET_EXPR_LOCATION (exp, loc); continue; case PLUS_EXPR: case MINUS_EXPR: @@ -4606,10 +4722,10 @@ make_range (tree exp, int *pin_p, tree *plow, tree *phigh, n_low = low, n_high = high; if (n_low != 0) - n_low = fold_convert (arg0_type, n_low); + n_low = fold_convert_loc (loc, arg0_type, n_low); if (n_high != 0) - n_high = fold_convert (arg0_type, n_high); + n_high = fold_convert_loc (loc, arg0_type, n_high); /* If we're converting arg0 from an unsigned type, to exp, @@ -4643,9 +4759,9 @@ make_range (tree exp, int *pin_p, tree *plow, tree *phigh, : TYPE_MAX_VALUE (arg0_type); if (TYPE_PRECISION (exp_type) == TYPE_PRECISION (arg0_type)) - high_positive = fold_build2 (RSHIFT_EXPR, arg0_type, - fold_convert (arg0_type, - high_positive), + high_positive = fold_build2_loc (loc, RSHIFT_EXPR, arg0_type, + fold_convert_loc (loc, arg0_type, + high_positive), build_int_cst (arg0_type, 1)); /* If the low bound is specified, "and" the range with the @@ -4655,8 +4771,8 @@ make_range (tree exp, int *pin_p, tree *plow, tree *phigh, { if (! merge_ranges (&n_in_p, &n_low, &n_high, 1, n_low, n_high, 1, - fold_convert (arg0_type, - integer_zero_node), + fold_convert_loc (loc, arg0_type, + integer_zero_node), high_positive)) break; @@ -4668,8 +4784,8 @@ make_range (tree exp, int *pin_p, tree *plow, tree *phigh, that will be interpreted as negative. */ if (! merge_ranges (&n_in_p, &n_low, &n_high, 0, n_low, n_high, 1, - fold_convert (arg0_type, - integer_zero_node), + fold_convert_loc (loc, arg0_type, + integer_zero_node), high_positive)) break; @@ -4708,7 +4824,8 @@ make_range (tree exp, int *pin_p, tree *plow, tree *phigh, on IN_P) the range. Return 0 if the test couldn't be created. */ tree -build_range_check (tree type, tree exp, int in_p, tree low, tree high) +build_range_check (location_t loc, tree type, tree exp, int in_p, + tree low, tree high) { tree etype = TREE_TYPE (exp), value; @@ -4723,9 +4840,9 @@ build_range_check (tree type, tree exp, int in_p, tree low, tree high) if (! in_p) { - value = build_range_check (type, exp, 1, low, high); + value = build_range_check (loc, type, exp, 1, low, high); if (value != 0) - return invert_truthvalue (value); + return invert_truthvalue_loc (loc, value); return 0; } @@ -4734,26 +4851,26 @@ build_range_check (tree type, tree exp, int in_p, tree low, tree high) return build_int_cst (type, 1); if (low == 0) - return fold_build2 (LE_EXPR, type, exp, - fold_convert (etype, high)); + return fold_build2_loc (loc, LE_EXPR, type, exp, + fold_convert_loc (loc, etype, high)); if (high == 0) - return fold_build2 (GE_EXPR, type, exp, - fold_convert (etype, low)); + return fold_build2_loc (loc, GE_EXPR, type, exp, + fold_convert_loc (loc, etype, low)); if (operand_equal_p (low, high, 0)) - return fold_build2 (EQ_EXPR, type, exp, - fold_convert (etype, low)); + return fold_build2_loc (loc, EQ_EXPR, type, exp, + fold_convert_loc (loc, etype, low)); if (integer_zerop (low)) { if (! TYPE_UNSIGNED (etype)) { etype = unsigned_type_for (etype); - high = fold_convert (etype, high); - exp = fold_convert (etype, exp); + high = fold_convert_loc (loc, etype, high); + exp = fold_convert_loc (loc, etype, exp); } - return build_range_check (type, exp, 1, 0, high); + return build_range_check (loc, type, exp, 1, 0, high); } /* Optimize (c>=1) && (c<=127) into (signed char)c > 0. */ @@ -4785,9 +4902,9 @@ build_range_check (tree type, tree exp, int in_p, tree low, tree high) = build_nonstandard_integer_type (TYPE_PRECISION (etype), 0); else etype = signed_etype; - exp = fold_convert (etype, exp); + exp = fold_convert_loc (loc, etype, exp); } - return fold_build2 (GT_EXPR, type, exp, + return fold_build2_loc (loc, GT_EXPR, type, exp, build_int_cst (etype, 0)); } } @@ -4807,10 +4924,10 @@ build_range_check (tree type, tree exp, int in_p, tree low, tree high) /* Check if (unsigned) INT_MAX + 1 == (unsigned) INT_MIN for the type in question, as we rely on this here. */ utype = unsigned_type_for (etype); - maxv = fold_convert (utype, TYPE_MAX_VALUE (etype)); + maxv = fold_convert_loc (loc, utype, TYPE_MAX_VALUE (etype)); maxv = range_binop (PLUS_EXPR, NULL_TREE, maxv, 1, integer_one_node, 1); - minv = fold_convert (utype, TYPE_MIN_VALUE (etype)); + minv = fold_convert_loc (loc, utype, TYPE_MIN_VALUE (etype)); if (integer_zerop (range_binop (NE_EXPR, integer_type_node, minv, 1, maxv, 1))) @@ -4819,9 +4936,9 @@ build_range_check (tree type, tree exp, int in_p, tree low, tree high) return 0; } - high = fold_convert (etype, high); - low = fold_convert (etype, low); - exp = fold_convert (etype, exp); + high = fold_convert_loc (loc, etype, high); + low = fold_convert_loc (loc, etype, low); + exp = fold_convert_loc (loc, etype, exp); value = const_binop (MINUS_EXPR, high, low, 0); @@ -4830,18 +4947,19 @@ build_range_check (tree type, tree exp, int in_p, tree low, tree high) { if (value != 0 && !TREE_OVERFLOW (value)) { - low = fold_convert (sizetype, low); - low = fold_build1 (NEGATE_EXPR, sizetype, low); - return build_range_check (type, - fold_build2 (POINTER_PLUS_EXPR, etype, exp, low), + low = fold_convert_loc (loc, sizetype, low); + low = fold_build1_loc (loc, NEGATE_EXPR, sizetype, low); + return build_range_check (loc, type, + fold_build2_loc (loc, POINTER_PLUS_EXPR, + etype, exp, low), 1, build_int_cst (etype, 0), value); } return 0; } if (value != 0 && !TREE_OVERFLOW (value)) - return build_range_check (type, - fold_build2 (MINUS_EXPR, etype, exp, low), + return build_range_check (loc, type, + fold_build2_loc (loc, MINUS_EXPR, etype, exp, low), 1, build_int_cst (etype, 0), value); return 0; @@ -5096,7 +5214,8 @@ merge_ranges (int *pin_p, tree *plow, tree *phigh, int in0_p, tree low0, anymore, or NULL_TREE if no folding opportunity is found. */ static tree -fold_cond_expr_with_comparison (tree type, tree arg0, tree arg1, tree arg2) +fold_cond_expr_with_comparison (location_t loc, tree type, + tree arg0, tree arg1, tree arg2) { enum tree_code comp_code = TREE_CODE (arg0); tree arg00 = TREE_OPERAND (arg0, 0); @@ -5144,11 +5263,13 @@ fold_cond_expr_with_comparison (tree type, tree arg0, tree arg1, tree arg2) { case EQ_EXPR: case UNEQ_EXPR: - tem = fold_convert (arg1_type, arg1); - return pedantic_non_lvalue (fold_convert (type, negate_expr (tem))); + tem = fold_convert_loc (loc, arg1_type, arg1); + return pedantic_non_lvalue_loc (loc, + fold_convert_loc (loc, type, + negate_expr (tem))); case NE_EXPR: case LTGT_EXPR: - return pedantic_non_lvalue (fold_convert (type, arg1)); + return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1)); case UNGE_EXPR: case UNGT_EXPR: if (flag_trapping_math) @@ -5157,10 +5278,10 @@ fold_cond_expr_with_comparison (tree type, tree arg0, tree arg1, tree arg2) case GE_EXPR: case GT_EXPR: if (TYPE_UNSIGNED (TREE_TYPE (arg1))) - arg1 = fold_convert (signed_type_for + arg1 = fold_convert_loc (loc, signed_type_for (TREE_TYPE (arg1)), arg1); - tem = fold_build1 (ABS_EXPR, TREE_TYPE (arg1), arg1); - return pedantic_non_lvalue (fold_convert (type, tem)); + tem = fold_build1_loc (loc, ABS_EXPR, TREE_TYPE (arg1), arg1); + return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, tem)); case UNLE_EXPR: case UNLT_EXPR: if (flag_trapping_math) @@ -5168,10 +5289,10 @@ fold_cond_expr_with_comparison (tree type, tree arg0, tree arg1, tree arg2) case LE_EXPR: case LT_EXPR: if (TYPE_UNSIGNED (TREE_TYPE (arg1))) - arg1 = fold_convert (signed_type_for + arg1 = fold_convert_loc (loc, signed_type_for (TREE_TYPE (arg1)), arg1); - tem = fold_build1 (ABS_EXPR, TREE_TYPE (arg1), arg1); - return negate_expr (fold_convert (type, tem)); + tem = fold_build1_loc (loc, ABS_EXPR, TREE_TYPE (arg1), arg1); + return negate_expr (fold_convert_loc (loc, type, tem)); default: gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison); break; @@ -5186,7 +5307,7 @@ fold_cond_expr_with_comparison (tree type, tree arg0, tree arg1, tree arg2) && integer_zerop (arg01) && integer_zerop (arg2)) { if (comp_code == NE_EXPR) - return pedantic_non_lvalue (fold_convert (type, arg1)); + return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1)); else if (comp_code == EQ_EXPR) return build_int_cst (type, 0); } @@ -5242,9 +5363,9 @@ fold_cond_expr_with_comparison (tree type, tree arg0, tree arg1, tree arg2) switch (comp_code) { case EQ_EXPR: - return pedantic_non_lvalue (fold_convert (type, arg2)); + return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, arg2)); case NE_EXPR: - return pedantic_non_lvalue (fold_convert (type, arg1)); + return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1)); case LE_EXPR: case LT_EXPR: case UNLE_EXPR: @@ -5255,12 +5376,14 @@ fold_cond_expr_with_comparison (tree type, tree arg0, tree arg1, tree arg2) corresponding COND_EXPR. */ if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1)))) { - comp_op0 = fold_convert (comp_type, comp_op0); - comp_op1 = fold_convert (comp_type, comp_op1); + comp_op0 = fold_convert_loc (loc, comp_type, comp_op0); + comp_op1 = fold_convert_loc (loc, comp_type, comp_op1); tem = (comp_code == LE_EXPR || comp_code == UNLE_EXPR) - ? fold_build2 (MIN_EXPR, comp_type, comp_op0, comp_op1) - : fold_build2 (MIN_EXPR, comp_type, comp_op1, comp_op0); - return pedantic_non_lvalue (fold_convert (type, tem)); + ? fold_build2_loc (loc, MIN_EXPR, comp_type, comp_op0, comp_op1) + : fold_build2_loc (loc, MIN_EXPR, comp_type, + comp_op1, comp_op0); + return pedantic_non_lvalue_loc (loc, + fold_convert_loc (loc, type, tem)); } break; case GE_EXPR: @@ -5269,21 +5392,25 @@ fold_cond_expr_with_comparison (tree type, tree arg0, tree arg1, tree arg2) case UNGT_EXPR: if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1)))) { - comp_op0 = fold_convert (comp_type, comp_op0); - comp_op1 = fold_convert (comp_type, comp_op1); + comp_op0 = fold_convert_loc (loc, comp_type, comp_op0); + comp_op1 = fold_convert_loc (loc, comp_type, comp_op1); tem = (comp_code == GE_EXPR || comp_code == UNGE_EXPR) - ? fold_build2 (MAX_EXPR, comp_type, comp_op0, comp_op1) - : fold_build2 (MAX_EXPR, comp_type, comp_op1, comp_op0); - return pedantic_non_lvalue (fold_convert (type, tem)); + ? fold_build2_loc (loc, MAX_EXPR, comp_type, comp_op0, comp_op1) + : fold_build2_loc (loc, MAX_EXPR, comp_type, + comp_op1, comp_op0); + return pedantic_non_lvalue_loc (loc, + fold_convert_loc (loc, type, tem)); } break; case UNEQ_EXPR: if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1)))) - return pedantic_non_lvalue (fold_convert (type, arg2)); + return pedantic_non_lvalue_loc (loc, + fold_convert_loc (loc, type, arg2)); break; case LTGT_EXPR: if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1)))) - return pedantic_non_lvalue (fold_convert (type, arg1)); + return pedantic_non_lvalue_loc (loc, + fold_convert_loc (loc, type, arg1)); break; default: gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison); @@ -5306,8 +5433,8 @@ fold_cond_expr_with_comparison (tree type, tree arg0, tree arg1, tree arg2) if (TREE_CODE (arg1) == INTEGER_CST) break; /* We can replace A with C1 in this case. */ - arg1 = fold_convert (type, arg01); - return fold_build3 (COND_EXPR, type, arg0, arg1, arg2); + arg1 = fold_convert_loc (loc, type, arg01); + return fold_build3_loc (loc, COND_EXPR, type, arg0, arg1, arg2); case LT_EXPR: /* If C1 is C2 + 1, this is min(A, C2), but use ARG00's type for @@ -5319,9 +5446,11 @@ fold_cond_expr_with_comparison (tree type, tree arg0, tree arg1, tree arg2) build_int_cst (type, 1), 0), OEP_ONLY_CONST)) { - tem = fold_build2 (MIN_EXPR, TREE_TYPE (arg00), arg00, - fold_convert (TREE_TYPE (arg00), arg2)); - return pedantic_non_lvalue (fold_convert (type, tem)); + tem = fold_build2_loc (loc, MIN_EXPR, TREE_TYPE (arg00), arg00, + fold_convert_loc (loc, TREE_TYPE (arg00), + arg2)); + return pedantic_non_lvalue_loc (loc, + fold_convert_loc (loc, type, tem)); } break; @@ -5335,9 +5464,11 @@ fold_cond_expr_with_comparison (tree type, tree arg0, tree arg1, tree arg2) build_int_cst (type, 1), 0), OEP_ONLY_CONST)) { - tem = fold_build2 (MIN_EXPR, TREE_TYPE (arg00), arg00, - fold_convert (TREE_TYPE (arg00), arg2)); - return pedantic_non_lvalue (fold_convert (type, tem)); + tem = fold_build2_loc (loc, MIN_EXPR, TREE_TYPE (arg00), arg00, + fold_convert_loc (loc, TREE_TYPE (arg00), + arg2)); + return pedantic_non_lvalue_loc (loc, + fold_convert_loc (loc, type, tem)); } break; @@ -5351,9 +5482,10 @@ fold_cond_expr_with_comparison (tree type, tree arg0, tree arg1, tree arg2) build_int_cst (type, 1), 0), OEP_ONLY_CONST)) { - tem = fold_build2 (MAX_EXPR, TREE_TYPE (arg00), arg00, - fold_convert (TREE_TYPE (arg00), arg2)); - return pedantic_non_lvalue (fold_convert (type, tem)); + tem = fold_build2_loc (loc, MAX_EXPR, TREE_TYPE (arg00), arg00, + fold_convert_loc (loc, TREE_TYPE (arg00), + arg2)); + return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, tem)); } break; @@ -5366,9 +5498,10 @@ fold_cond_expr_with_comparison (tree type, tree arg0, tree arg1, tree arg2) build_int_cst (type, 1), 0), OEP_ONLY_CONST)) { - tem = fold_build2 (MAX_EXPR, TREE_TYPE (arg00), arg00, - fold_convert (TREE_TYPE (arg00), arg2)); - return pedantic_non_lvalue (fold_convert (type, tem)); + tem = fold_build2_loc (loc, MAX_EXPR, TREE_TYPE (arg00), arg00, + fold_convert_loc (loc, TREE_TYPE (arg00), + arg2)); + return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, tem)); } break; case NE_EXPR: @@ -5392,7 +5525,8 @@ fold_cond_expr_with_comparison (tree type, tree arg0, tree arg1, tree arg2) merge it into some range test. Return the new tree if so. */ static tree -fold_range_test (enum tree_code code, tree type, tree op0, tree op1) +fold_range_test (location_t loc, enum tree_code code, tree type, + tree op0, tree op1) { int or_op = (code == TRUTH_ORIF_EXPR || code == TRUTH_OR_EXPR); @@ -5417,14 +5551,14 @@ fold_range_test (enum tree_code code, tree type, tree op0, tree op1) if ((lhs == 0 || rhs == 0 || operand_equal_p (lhs, rhs, 0)) && merge_ranges (&in_p, &low, &high, in0_p, low0, high0, in1_p, low1, high1) - && 0 != (tem = (build_range_check (type, + && 0 != (tem = (build_range_check (UNKNOWN_LOCATION, type, lhs != 0 ? lhs : rhs != 0 ? rhs : integer_zero_node, in_p, low, high)))) { if (strict_overflow_p) fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON); - return or_op ? invert_truthvalue (tem) : tem; + return or_op ? invert_truthvalue_loc (loc, tem) : tem; } /* On machines where the branch cost is expensive, if this is a @@ -5440,28 +5574,34 @@ fold_range_test (enum tree_code code, tree type, tree op0, tree op1) unless we are at top level or LHS contains a PLACEHOLDER_EXPR, in which cases we can't do this. */ if (simple_operand_p (lhs)) - return build2 (code == TRUTH_ANDIF_EXPR - ? TRUTH_AND_EXPR : TRUTH_OR_EXPR, - type, op0, op1); + { + tem = build2 (code == TRUTH_ANDIF_EXPR + ? TRUTH_AND_EXPR : TRUTH_OR_EXPR, + type, op0, op1); + SET_EXPR_LOCATION (tem, loc); + return tem; + } else if (lang_hooks.decls.global_bindings_p () == 0 && ! CONTAINS_PLACEHOLDER_P (lhs)) { tree common = save_expr (lhs); - if (0 != (lhs = build_range_check (type, common, + if (0 != (lhs = build_range_check (loc, type, common, or_op ? ! in0_p : in0_p, low0, high0)) - && (0 != (rhs = build_range_check (type, common, + && (0 != (rhs = build_range_check (loc, type, common, or_op ? ! in1_p : in1_p, low1, high1)))) { if (strict_overflow_p) fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON); - return build2 (code == TRUTH_ANDIF_EXPR - ? TRUTH_AND_EXPR : TRUTH_OR_EXPR, - type, lhs, rhs); + tem = build2 (code == TRUTH_ANDIF_EXPR + ? TRUTH_AND_EXPR : TRUTH_OR_EXPR, + type, lhs, rhs); + SET_EXPR_LOCATION (tem, loc); + return tem; } } } @@ -5503,12 +5643,14 @@ unextend (tree c, int p, int unsignedp, tree mask) temp = const_binop (RSHIFT_EXPR, temp, size_int (modesize - p - 1), 0); if (mask != 0) temp = const_binop (BIT_AND_EXPR, temp, - fold_convert (TREE_TYPE (c), mask), 0); + fold_convert (TREE_TYPE (c), mask), + 0); /* If necessary, convert the type back to match the type of C. */ if (TYPE_UNSIGNED (type)) temp = fold_convert (type, temp); - return fold_convert (type, const_binop (BIT_XOR_EXPR, c, temp, 0)); + return fold_convert (type, + const_binop (BIT_XOR_EXPR, c, temp, 0)); } /* Find ways of folding logical expressions of LHS and RHS: @@ -5536,7 +5678,8 @@ unextend (tree c, int p, int unsignedp, tree mask) We return the simplified tree or 0 if no optimization is possible. */ static tree -fold_truthop (enum tree_code code, tree truth_type, tree lhs, tree rhs) +fold_truthop (location_t loc, enum tree_code code, tree truth_type, + tree lhs, tree rhs) { /* If this is the "or" of two comparisons, we can do something if the comparisons are NE_EXPR. If this is the "and", we can do something @@ -5608,7 +5751,7 @@ fold_truthop (enum tree_code code, tree truth_type, tree lhs, tree rhs) if (operand_equal_p (ll_arg, rl_arg, 0) && operand_equal_p (lr_arg, rr_arg, 0)) { - result = combine_comparisons (code, lcode, rcode, + result = combine_comparisons (loc, code, lcode, rcode, truth_type, ll_arg, lr_arg); if (result) return result; @@ -5616,7 +5759,7 @@ fold_truthop (enum tree_code code, tree truth_type, tree lhs, tree rhs) else if (operand_equal_p (ll_arg, rr_arg, 0) && operand_equal_p (lr_arg, rl_arg, 0)) { - result = combine_comparisons (code, lcode, + result = combine_comparisons (loc, code, lcode, swap_tree_comparison (rcode), truth_type, ll_arg, lr_arg); if (result) @@ -5645,10 +5788,13 @@ fold_truthop (enum tree_code code, tree truth_type, tree lhs, tree rhs) && rcode == NE_EXPR && integer_zerop (rr_arg) && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg) && INTEGRAL_TYPE_P (TREE_TYPE (ll_arg))) - return build2 (NE_EXPR, truth_type, - build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg), - ll_arg, rl_arg), - build_int_cst (TREE_TYPE (ll_arg), 0)); + { + result = build2 (NE_EXPR, truth_type, + build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg), + ll_arg, rl_arg), + build_int_cst (TREE_TYPE (ll_arg), 0)); + goto fold_truthop_exit; + } /* Convert (a == 0) && (b == 0) into (a | b) == 0. */ if (code == TRUTH_AND_EXPR @@ -5656,15 +5802,21 @@ fold_truthop (enum tree_code code, tree truth_type, tree lhs, tree rhs) && rcode == EQ_EXPR && integer_zerop (rr_arg) && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg) && INTEGRAL_TYPE_P (TREE_TYPE (ll_arg))) - return build2 (EQ_EXPR, truth_type, - build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg), - ll_arg, rl_arg), - build_int_cst (TREE_TYPE (ll_arg), 0)); + { + result = build2 (EQ_EXPR, truth_type, + build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg), + ll_arg, rl_arg), + build_int_cst (TREE_TYPE (ll_arg), 0)); + goto fold_truthop_exit; + } if (LOGICAL_OP_NON_SHORT_CIRCUIT) { if (code != orig_code || lhs != orig_lhs || rhs != orig_rhs) - return build2 (code, truth_type, lhs, rhs); + { + result = build2 (code, truth_type, lhs, rhs); + goto fold_truthop_exit; + } return NULL_TREE; } } @@ -5677,19 +5829,19 @@ fold_truthop (enum tree_code code, tree truth_type, tree lhs, tree rhs) return 0; volatilep = 0; - ll_inner = decode_field_reference (ll_arg, + ll_inner = decode_field_reference (loc, ll_arg, &ll_bitsize, &ll_bitpos, &ll_mode, &ll_unsignedp, &volatilep, &ll_mask, &ll_and_mask); - lr_inner = decode_field_reference (lr_arg, + lr_inner = decode_field_reference (loc, lr_arg, &lr_bitsize, &lr_bitpos, &lr_mode, &lr_unsignedp, &volatilep, &lr_mask, &lr_and_mask); - rl_inner = decode_field_reference (rl_arg, + rl_inner = decode_field_reference (loc, rl_arg, &rl_bitsize, &rl_bitpos, &rl_mode, &rl_unsignedp, &volatilep, &rl_mask, &rl_and_mask); - rr_inner = decode_field_reference (rr_arg, + rr_inner = decode_field_reference (loc, rr_arg, &rr_bitsize, &rr_bitpos, &rr_mode, &rr_unsignedp, &volatilep, &rr_mask, &rr_and_mask); @@ -5764,18 +5916,18 @@ fold_truthop (enum tree_code code, tree truth_type, tree lhs, tree rhs) xrl_bitpos = lnbitsize - xrl_bitpos - rl_bitsize; } - ll_mask = const_binop (LSHIFT_EXPR, fold_convert (lntype, ll_mask), + ll_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc, lntype, ll_mask), size_int (xll_bitpos), 0); - rl_mask = const_binop (LSHIFT_EXPR, fold_convert (lntype, rl_mask), + rl_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc, lntype, rl_mask), size_int (xrl_bitpos), 0); if (l_const) { - l_const = fold_convert (lntype, l_const); + l_const = fold_convert_loc (loc, lntype, l_const); l_const = unextend (l_const, ll_bitsize, ll_unsignedp, ll_and_mask); l_const = const_binop (LSHIFT_EXPR, l_const, size_int (xll_bitpos), 0); if (! integer_zerop (const_binop (BIT_AND_EXPR, l_const, - fold_build1 (BIT_NOT_EXPR, + fold_build1_loc (loc, BIT_NOT_EXPR, lntype, ll_mask), 0))) { @@ -5786,11 +5938,11 @@ fold_truthop (enum tree_code code, tree truth_type, tree lhs, tree rhs) } if (r_const) { - r_const = fold_convert (lntype, r_const); + r_const = fold_convert_loc (loc, lntype, r_const); r_const = unextend (r_const, rl_bitsize, rl_unsignedp, rl_and_mask); r_const = const_binop (LSHIFT_EXPR, r_const, size_int (xrl_bitpos), 0); if (! integer_zerop (const_binop (BIT_AND_EXPR, r_const, - fold_build1 (BIT_NOT_EXPR, + fold_build1_loc (loc, BIT_NOT_EXPR, lntype, rl_mask), 0))) { @@ -5831,9 +5983,11 @@ fold_truthop (enum tree_code code, tree truth_type, tree lhs, tree rhs) xrr_bitpos = rnbitsize - xrr_bitpos - rr_bitsize; } - lr_mask = const_binop (LSHIFT_EXPR, fold_convert (rntype, lr_mask), + lr_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc, + rntype, lr_mask), size_int (xlr_bitpos), 0); - rr_mask = const_binop (LSHIFT_EXPR, fold_convert (rntype, rr_mask), + rr_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc, + rntype, rr_mask), size_int (xrr_bitpos), 0); /* Make a mask that corresponds to both fields being compared. @@ -5845,17 +5999,18 @@ fold_truthop (enum tree_code code, tree truth_type, tree lhs, tree rhs) lr_mask = const_binop (BIT_IOR_EXPR, lr_mask, rr_mask, 0); if (lnbitsize == rnbitsize && xll_bitpos == xlr_bitpos) { - lhs = make_bit_field_ref (ll_inner, lntype, lnbitsize, lnbitpos, + lhs = make_bit_field_ref (loc, ll_inner, lntype, lnbitsize, lnbitpos, ll_unsignedp || rl_unsignedp); if (! all_ones_mask_p (ll_mask, lnbitsize)) lhs = build2 (BIT_AND_EXPR, lntype, lhs, ll_mask); - rhs = make_bit_field_ref (lr_inner, rntype, rnbitsize, rnbitpos, + rhs = make_bit_field_ref (loc, lr_inner, rntype, rnbitsize, rnbitpos, lr_unsignedp || rr_unsignedp); if (! all_ones_mask_p (lr_mask, rnbitsize)) rhs = build2 (BIT_AND_EXPR, rntype, rhs, lr_mask); - return build2 (wanted_code, truth_type, lhs, rhs); + result = build2 (wanted_code, truth_type, lhs, rhs); + goto fold_truthop_exit; } /* There is still another way we can do something: If both pairs of @@ -5872,9 +6027,11 @@ fold_truthop (enum tree_code code, tree truth_type, tree lhs, tree rhs) { tree type; - lhs = make_bit_field_ref (ll_inner, lntype, ll_bitsize + rl_bitsize, + lhs = make_bit_field_ref (loc, ll_inner, lntype, + ll_bitsize + rl_bitsize, MIN (ll_bitpos, rl_bitpos), ll_unsignedp); - rhs = make_bit_field_ref (lr_inner, rntype, lr_bitsize + rr_bitsize, + rhs = make_bit_field_ref (loc, lr_inner, rntype, + lr_bitsize + rr_bitsize, MIN (lr_bitpos, rr_bitpos), lr_unsignedp); ll_mask = const_binop (RSHIFT_EXPR, ll_mask, @@ -5888,14 +6045,14 @@ fold_truthop (enum tree_code code, tree truth_type, tree lhs, tree rhs) { if (lnbitsize > rnbitsize) { - lhs = fold_convert (rntype, lhs); - ll_mask = fold_convert (rntype, ll_mask); + lhs = fold_convert_loc (loc, rntype, lhs); + ll_mask = fold_convert_loc (loc, rntype, ll_mask); type = rntype; } else if (lnbitsize < rnbitsize) { - rhs = fold_convert (lntype, rhs); - lr_mask = fold_convert (lntype, lr_mask); + rhs = fold_convert_loc (loc, lntype, rhs); + lr_mask = fold_convert_loc (loc, lntype, lr_mask); type = lntype; } } @@ -5906,7 +6063,8 @@ fold_truthop (enum tree_code code, tree truth_type, tree lhs, tree rhs) if (! all_ones_mask_p (lr_mask, lr_bitsize + rr_bitsize)) rhs = build2 (BIT_AND_EXPR, type, rhs, lr_mask); - return build2 (wanted_code, truth_type, lhs, rhs); + result = build2 (wanted_code, truth_type, lhs, rhs); + goto fold_truthop_exit; } return 0; @@ -5937,22 +6095,30 @@ fold_truthop (enum tree_code code, tree truth_type, tree lhs, tree rhs) reference we will make. Unless the mask is all ones the width of that field, perform the mask operation. Then compare with the merged constant. */ - result = make_bit_field_ref (ll_inner, lntype, lnbitsize, lnbitpos, + result = make_bit_field_ref (loc, ll_inner, lntype, lnbitsize, lnbitpos, ll_unsignedp || rl_unsignedp); ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask, 0); if (! all_ones_mask_p (ll_mask, lnbitsize)) - result = build2 (BIT_AND_EXPR, lntype, result, ll_mask); + { + result = build2 (BIT_AND_EXPR, lntype, result, ll_mask); + SET_EXPR_LOCATION (result, loc); + } - return build2 (wanted_code, truth_type, result, - const_binop (BIT_IOR_EXPR, l_const, r_const, 0)); + result = build2 (wanted_code, truth_type, result, + const_binop (BIT_IOR_EXPR, l_const, r_const, 0)); + + fold_truthop_exit: + SET_EXPR_LOCATION (result, loc); + return result; } /* Optimize T, which is a comparison of a MIN_EXPR or MAX_EXPR with a constant. */ static tree -optimize_minmax_comparison (enum tree_code code, tree type, tree op0, tree op1) +optimize_minmax_comparison (location_t loc, enum tree_code code, tree type, + tree op0, tree op1) { tree arg0 = op0; enum tree_code op_code; @@ -5965,7 +6131,7 @@ optimize_minmax_comparison (enum tree_code code, tree type, tree op0, tree op1) op_code = TREE_CODE (arg0); minmax_const = TREE_OPERAND (arg0, 1); - comp_const = fold_convert (TREE_TYPE (arg0), op1); + comp_const = fold_convert_loc (loc, TREE_TYPE (arg0), op1); consts_equal = tree_int_cst_equal (minmax_const, comp_const); consts_lt = tree_int_cst_lt (minmax_const, comp_const); inner = TREE_OPERAND (arg0, 0); @@ -5985,64 +6151,66 @@ optimize_minmax_comparison (enum tree_code code, tree type, tree op0, tree op1) { case NE_EXPR: case LT_EXPR: case LE_EXPR: { - tree tem = optimize_minmax_comparison (invert_tree_comparison (code, false), - type, op0, op1); + tree tem + = optimize_minmax_comparison (loc, + invert_tree_comparison (code, false), + type, op0, op1); if (tem) - return invert_truthvalue (tem); + return invert_truthvalue_loc (loc, tem); return NULL_TREE; } case GE_EXPR: return - fold_build2 (TRUTH_ORIF_EXPR, type, + fold_build2_loc (loc, TRUTH_ORIF_EXPR, type, optimize_minmax_comparison - (EQ_EXPR, type, arg0, comp_const), + (loc, EQ_EXPR, type, arg0, comp_const), optimize_minmax_comparison - (GT_EXPR, type, arg0, comp_const)); + (loc, GT_EXPR, type, arg0, comp_const)); case EQ_EXPR: if (op_code == MAX_EXPR && consts_equal) /* MAX (X, 0) == 0 -> X <= 0 */ - return fold_build2 (LE_EXPR, type, inner, comp_const); + return fold_build2_loc (loc, LE_EXPR, type, inner, comp_const); else if (op_code == MAX_EXPR && consts_lt) /* MAX (X, 0) == 5 -> X == 5 */ - return fold_build2 (EQ_EXPR, type, inner, comp_const); + return fold_build2_loc (loc, EQ_EXPR, type, inner, comp_const); else if (op_code == MAX_EXPR) /* MAX (X, 0) == -1 -> false */ - return omit_one_operand (type, integer_zero_node, inner); + return omit_one_operand_loc (loc, type, integer_zero_node, inner); else if (consts_equal) /* MIN (X, 0) == 0 -> X >= 0 */ - return fold_build2 (GE_EXPR, type, inner, comp_const); + return fold_build2_loc (loc, GE_EXPR, type, inner, comp_const); else if (consts_lt) /* MIN (X, 0) == 5 -> false */ - return omit_one_operand (type, integer_zero_node, inner); + return omit_one_operand_loc (loc, type, integer_zero_node, inner); else /* MIN (X, 0) == -1 -> X == -1 */ - return fold_build2 (EQ_EXPR, type, inner, comp_const); + return fold_build2_loc (loc, EQ_EXPR, type, inner, comp_const); case GT_EXPR: if (op_code == MAX_EXPR && (consts_equal || consts_lt)) /* MAX (X, 0) > 0 -> X > 0 MAX (X, 0) > 5 -> X > 5 */ - return fold_build2 (GT_EXPR, type, inner, comp_const); + return fold_build2_loc (loc, GT_EXPR, type, inner, comp_const); else if (op_code == MAX_EXPR) /* MAX (X, 0) > -1 -> true */ - return omit_one_operand (type, integer_one_node, inner); + return omit_one_operand_loc (loc, type, integer_one_node, inner); else if (op_code == MIN_EXPR && (consts_equal || consts_lt)) /* MIN (X, 0) > 0 -> false MIN (X, 0) > 5 -> false */ - return omit_one_operand (type, integer_zero_node, inner); + return omit_one_operand_loc (loc, type, integer_zero_node, inner); else /* MIN (X, 0) > -1 -> X > -1 */ - return fold_build2 (GT_EXPR, type, inner, comp_const); + return fold_build2_loc (loc, GT_EXPR, type, inner, comp_const); default: return NULL_TREE; @@ -6235,7 +6403,9 @@ extract_muldiv_1 (tree t, tree c, enum tree_code code, tree wide_type, && !TREE_OVERFLOW (t1)) return extract_muldiv (build2 (tcode == LSHIFT_EXPR ? MULT_EXPR : FLOOR_DIV_EXPR, - ctype, fold_convert (ctype, op0), t1), + ctype, + fold_convert (ctype, op0), + t1), c, code, wide_type, strict_overflow_p); break; @@ -6365,7 +6535,8 @@ extract_muldiv_1 (tree t, tree c, enum tree_code code, tree wide_type, /* If these are the same operation types, we can associate them assuming no overflow. */ if (tcode == code - && 0 != (t1 = int_const_binop (MULT_EXPR, fold_convert (ctype, op1), + && 0 != (t1 = int_const_binop (MULT_EXPR, + fold_convert (ctype, op1), fold_convert (ctype, c), 1)) && 0 != (t1 = force_fit_type_double (ctype, TREE_INT_CST_LOW (t1), TREE_INT_CST_HIGH (t1), @@ -6444,7 +6615,8 @@ constant_boolean_node (int value, tree type) possible. */ static tree -fold_binary_op_with_conditional_arg (enum tree_code code, +fold_binary_op_with_conditional_arg (location_t loc, + enum tree_code code, tree type, tree op0, tree op1, tree cond, tree arg, int cond_first_p) { @@ -6481,26 +6653,26 @@ fold_binary_op_with_conditional_arg (enum tree_code code, false_value = constant_boolean_node (false, testtype); } - arg = fold_convert (arg_type, arg); + arg = fold_convert_loc (loc, arg_type, arg); if (lhs == 0) { - true_value = fold_convert (cond_type, true_value); + true_value = fold_convert_loc (loc, cond_type, true_value); if (cond_first_p) - lhs = fold_build2 (code, type, true_value, arg); + lhs = fold_build2_loc (loc, code, type, true_value, arg); else - lhs = fold_build2 (code, type, arg, true_value); + lhs = fold_build2_loc (loc, code, type, arg, true_value); } if (rhs == 0) { - false_value = fold_convert (cond_type, false_value); + false_value = fold_convert_loc (loc, cond_type, false_value); if (cond_first_p) - rhs = fold_build2 (code, type, false_value, arg); + rhs = fold_build2_loc (loc, code, type, false_value, arg); else - rhs = fold_build2 (code, type, arg, false_value); + rhs = fold_build2_loc (loc, code, type, arg, false_value); } - test = fold_build3 (COND_EXPR, type, test, lhs, rhs); - return fold_convert (type, test); + test = fold_build3_loc (loc, COND_EXPR, type, test, lhs, rhs); + return fold_convert_loc (loc, type, test); } @@ -6554,7 +6726,8 @@ fold_real_zero_addition_p (const_tree type, const_tree addend, int negate) can be made, and NULL_TREE otherwise. */ static tree -fold_mathfn_compare (enum built_in_function fcode, enum tree_code code, +fold_mathfn_compare (location_t loc, + enum built_in_function fcode, enum tree_code code, tree type, tree arg0, tree arg1) { REAL_VALUE_TYPE c; @@ -6569,15 +6742,15 @@ fold_mathfn_compare (enum built_in_function fcode, enum tree_code code, { /* sqrt(x) < y is always false, if y is negative. */ if (code == EQ_EXPR || code == LT_EXPR || code == LE_EXPR) - return omit_one_operand (type, integer_zero_node, arg); + return omit_one_operand_loc (loc, type, integer_zero_node, arg); /* sqrt(x) > y is always true, if y is negative and we don't care about NaNs, i.e. negative values of x. */ if (code == NE_EXPR || !HONOR_NANS (mode)) - return omit_one_operand (type, integer_one_node, arg); + return omit_one_operand_loc (loc, type, integer_one_node, arg); /* sqrt(x) > y is the same as x >= 0, if y is negative. */ - return fold_build2 (GE_EXPR, type, arg, + return fold_build2_loc (loc, GE_EXPR, type, arg, build_real (TREE_TYPE (arg), dconst0)); } else if (code == GT_EXPR || code == GE_EXPR) @@ -6591,16 +6764,16 @@ fold_mathfn_compare (enum built_in_function fcode, enum tree_code code, { /* sqrt(x) > y is x == +Inf, when y is very large. */ if (HONOR_INFINITIES (mode)) - return fold_build2 (EQ_EXPR, type, arg, + return fold_build2_loc (loc, EQ_EXPR, type, arg, build_real (TREE_TYPE (arg), c2)); /* sqrt(x) > y is always false, when y is very large and we don't care about infinities. */ - return omit_one_operand (type, integer_zero_node, arg); + return omit_one_operand_loc (loc, type, integer_zero_node, arg); } /* sqrt(x) > c is the same as x > c*c. */ - return fold_build2 (code, type, arg, + return fold_build2_loc (loc, code, type, arg, build_real (TREE_TYPE (arg), c2)); } else if (code == LT_EXPR || code == LE_EXPR) @@ -6615,18 +6788,18 @@ fold_mathfn_compare (enum built_in_function fcode, enum tree_code code, /* sqrt(x) < y is always true, when y is a very large value and we don't care about NaNs or Infinities. */ if (! HONOR_NANS (mode) && ! HONOR_INFINITIES (mode)) - return omit_one_operand (type, integer_one_node, arg); + return omit_one_operand_loc (loc, type, integer_one_node, arg); /* sqrt(x) < y is x != +Inf when y is very large and we don't care about NaNs. */ if (! HONOR_NANS (mode)) - return fold_build2 (NE_EXPR, type, arg, + return fold_build2_loc (loc, NE_EXPR, type, arg, build_real (TREE_TYPE (arg), c2)); /* sqrt(x) < y is x >= 0 when y is very large and we don't care about Infinities. */ if (! HONOR_INFINITIES (mode)) - return fold_build2 (GE_EXPR, type, arg, + return fold_build2_loc (loc, GE_EXPR, type, arg, build_real (TREE_TYPE (arg), dconst0)); /* sqrt(x) < y is x >= 0 && x != +Inf, when y is large. */ @@ -6635,18 +6808,18 @@ fold_mathfn_compare (enum built_in_function fcode, enum tree_code code, return NULL_TREE; arg = save_expr (arg); - return fold_build2 (TRUTH_ANDIF_EXPR, type, - fold_build2 (GE_EXPR, type, arg, + return fold_build2_loc (loc, TRUTH_ANDIF_EXPR, type, + fold_build2_loc (loc, GE_EXPR, type, arg, build_real (TREE_TYPE (arg), dconst0)), - fold_build2 (NE_EXPR, type, arg, + fold_build2_loc (loc, NE_EXPR, type, arg, build_real (TREE_TYPE (arg), c2))); } /* sqrt(x) < c is the same as x < c*c, if we ignore NaNs. */ if (! HONOR_NANS (mode)) - return fold_build2 (code, type, arg, + return fold_build2_loc (loc, code, type, arg, build_real (TREE_TYPE (arg), c2)); /* sqrt(x) < c is the same as x >= 0 && x < c*c. */ @@ -6654,11 +6827,11 @@ fold_mathfn_compare (enum built_in_function fcode, enum tree_code code, && ! CONTAINS_PLACEHOLDER_P (arg)) { arg = save_expr (arg); - return fold_build2 (TRUTH_ANDIF_EXPR, type, - fold_build2 (GE_EXPR, type, arg, + return fold_build2_loc (loc, TRUTH_ANDIF_EXPR, type, + fold_build2_loc (loc, GE_EXPR, type, arg, build_real (TREE_TYPE (arg), dconst0)), - fold_build2 (code, type, arg, + fold_build2_loc (loc, code, type, arg, build_real (TREE_TYPE (arg), c2))); } @@ -6679,7 +6852,8 @@ fold_mathfn_compare (enum built_in_function fcode, enum tree_code code, can be made, and NULL_TREE otherwise. */ static tree -fold_inf_compare (enum tree_code code, tree type, tree arg0, tree arg1) +fold_inf_compare (location_t loc, enum tree_code code, tree type, + tree arg0, tree arg1) { enum machine_mode mode; REAL_VALUE_TYPE max; @@ -6699,19 +6873,19 @@ fold_inf_compare (enum tree_code code, tree type, tree arg0, tree arg1) /* x > +Inf is always false, if with ignore sNANs. */ if (HONOR_SNANS (mode)) return NULL_TREE; - return omit_one_operand (type, integer_zero_node, arg0); + return omit_one_operand_loc (loc, type, integer_zero_node, arg0); case LE_EXPR: /* x <= +Inf is always true, if we don't case about NaNs. */ if (! HONOR_NANS (mode)) - return omit_one_operand (type, integer_one_node, arg0); + return omit_one_operand_loc (loc, type, integer_one_node, arg0); /* x <= +Inf is the same as x == x, i.e. isfinite(x). */ if (lang_hooks.decls.global_bindings_p () == 0 && ! CONTAINS_PLACEHOLDER_P (arg0)) { arg0 = save_expr (arg0); - return fold_build2 (EQ_EXPR, type, arg0, arg0); + return fold_build2_loc (loc, EQ_EXPR, type, arg0, arg0); } break; @@ -6719,25 +6893,25 @@ fold_inf_compare (enum tree_code code, tree type, tree arg0, tree arg1) case GE_EXPR: /* x == +Inf and x >= +Inf are always equal to x > DBL_MAX. */ real_maxval (&max, neg, mode); - return fold_build2 (neg ? LT_EXPR : GT_EXPR, type, + return fold_build2_loc (loc, neg ? LT_EXPR : GT_EXPR, type, arg0, build_real (TREE_TYPE (arg0), max)); case LT_EXPR: /* x < +Inf is always equal to x <= DBL_MAX. */ real_maxval (&max, neg, mode); - return fold_build2 (neg ? GE_EXPR : LE_EXPR, type, + return fold_build2_loc (loc, neg ? GE_EXPR : LE_EXPR, type, arg0, build_real (TREE_TYPE (arg0), max)); case NE_EXPR: /* x != +Inf is always equal to !(x > DBL_MAX). */ real_maxval (&max, neg, mode); if (! HONOR_NANS (mode)) - return fold_build2 (neg ? GE_EXPR : LE_EXPR, type, + return fold_build2_loc (loc, neg ? GE_EXPR : LE_EXPR, type, arg0, build_real (TREE_TYPE (arg0), max)); - temp = fold_build2 (neg ? LT_EXPR : GT_EXPR, type, + temp = fold_build2_loc (loc, neg ? LT_EXPR : GT_EXPR, type, arg0, build_real (TREE_TYPE (arg0), max)); - return fold_build1 (TRUTH_NOT_EXPR, type, temp); + return fold_build1_loc (loc, TRUTH_NOT_EXPR, type, temp); default: break; @@ -6758,7 +6932,8 @@ fold_inf_compare (enum tree_code code, tree type, tree arg0, tree arg1) can be made, and NULL_TREE otherwise. */ static tree -fold_div_compare (enum tree_code code, tree type, tree arg0, tree arg1) +fold_div_compare (location_t loc, + enum tree_code code, tree type, tree arg0, tree arg1) { tree prod, tmp, hi, lo; tree arg00 = TREE_OPERAND (arg0, 0); @@ -6855,53 +7030,53 @@ fold_div_compare (enum tree_code code, tree type, tree arg0, tree arg1) { case EQ_EXPR: if (TREE_OVERFLOW (lo) && TREE_OVERFLOW (hi)) - return omit_one_operand (type, integer_zero_node, arg00); + return omit_one_operand_loc (loc, type, integer_zero_node, arg00); if (TREE_OVERFLOW (hi)) - return fold_build2 (GE_EXPR, type, arg00, lo); + return fold_build2_loc (loc, GE_EXPR, type, arg00, lo); if (TREE_OVERFLOW (lo)) - return fold_build2 (LE_EXPR, type, arg00, hi); - return build_range_check (type, arg00, 1, lo, hi); + return fold_build2_loc (loc, LE_EXPR, type, arg00, hi); + return build_range_check (loc, type, arg00, 1, lo, hi); case NE_EXPR: if (TREE_OVERFLOW (lo) && TREE_OVERFLOW (hi)) - return omit_one_operand (type, integer_one_node, arg00); + return omit_one_operand_loc (loc, type, integer_one_node, arg00); if (TREE_OVERFLOW (hi)) - return fold_build2 (LT_EXPR, type, arg00, lo); + return fold_build2_loc (loc, LT_EXPR, type, arg00, lo); if (TREE_OVERFLOW (lo)) - return fold_build2 (GT_EXPR, type, arg00, hi); - return build_range_check (type, arg00, 0, lo, hi); + return fold_build2_loc (loc, GT_EXPR, type, arg00, hi); + return build_range_check (loc, type, arg00, 0, lo, hi); case LT_EXPR: if (TREE_OVERFLOW (lo)) { tmp = neg_overflow ? integer_zero_node : integer_one_node; - return omit_one_operand (type, tmp, arg00); + return omit_one_operand_loc (loc, type, tmp, arg00); } - return fold_build2 (LT_EXPR, type, arg00, lo); + return fold_build2_loc (loc, LT_EXPR, type, arg00, lo); case LE_EXPR: if (TREE_OVERFLOW (hi)) { tmp = neg_overflow ? integer_zero_node : integer_one_node; - return omit_one_operand (type, tmp, arg00); + return omit_one_operand_loc (loc, type, tmp, arg00); } - return fold_build2 (LE_EXPR, type, arg00, hi); + return fold_build2_loc (loc, LE_EXPR, type, arg00, hi); case GT_EXPR: if (TREE_OVERFLOW (hi)) { tmp = neg_overflow ? integer_one_node : integer_zero_node; - return omit_one_operand (type, tmp, arg00); + return omit_one_operand_loc (loc, type, tmp, arg00); } - return fold_build2 (GT_EXPR, type, arg00, hi); + return fold_build2_loc (loc, GT_EXPR, type, arg00, hi); case GE_EXPR: if (TREE_OVERFLOW (lo)) { tmp = neg_overflow ? integer_one_node : integer_zero_node; - return omit_one_operand (type, tmp, arg00); + return omit_one_operand_loc (loc, type, tmp, arg00); } - return fold_build2 (GE_EXPR, type, arg00, lo); + return fold_build2_loc (loc, GE_EXPR, type, arg00, lo); default: break; @@ -6917,7 +7092,8 @@ fold_div_compare (enum tree_code code, tree type, tree arg0, tree arg1) result type. */ static tree -fold_single_bit_test_into_sign_test (enum tree_code code, tree arg0, tree arg1, +fold_single_bit_test_into_sign_test (location_t loc, + enum tree_code code, tree arg0, tree arg1, tree result_type) { /* If this is testing a single bit, we can optimize the test. */ @@ -6936,8 +7112,9 @@ fold_single_bit_test_into_sign_test (enum tree_code code, tree arg0, tree arg1, == GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (arg00)))) { tree stype = signed_type_for (TREE_TYPE (arg00)); - return fold_build2 (code == EQ_EXPR ? GE_EXPR : LT_EXPR, - result_type, fold_convert (stype, arg00), + return fold_build2_loc (loc, code == EQ_EXPR ? GE_EXPR : LT_EXPR, + result_type, + fold_convert_loc (loc, stype, arg00), build_int_cst (stype, 0)); } } @@ -6951,8 +7128,8 @@ fold_single_bit_test_into_sign_test (enum tree_code code, tree arg0, tree arg1, NULL. TYPE is the desired result type. */ tree -fold_single_bit_test (enum tree_code code, tree arg0, tree arg1, - tree result_type) +fold_single_bit_test (location_t loc, enum tree_code code, + tree arg0, tree arg1, tree result_type) { /* If this is testing a single bit, we can optimize the test. */ if ((code == NE_EXPR || code == EQ_EXPR) @@ -6969,7 +7146,7 @@ fold_single_bit_test (enum tree_code code, tree arg0, tree arg1, /* First, see if we can fold the single bit test into a sign-bit test. */ - tem = fold_single_bit_test_into_sign_test (code, arg0, arg1, + tem = fold_single_bit_test_into_sign_test (loc, code, arg0, arg1, result_type); if (tem) return tem; @@ -7004,7 +7181,7 @@ fold_single_bit_test (enum tree_code code, tree arg0, tree arg1, signed_type = lang_hooks.types.type_for_mode (operand_mode, 0); unsigned_type = lang_hooks.types.type_for_mode (operand_mode, 1); intermediate_type = ops_unsigned ? unsigned_type : signed_type; - inner = fold_convert (intermediate_type, inner); + inner = fold_convert_loc (loc, intermediate_type, inner); if (bitnum != 0) inner = build2 (RSHIFT_EXPR, intermediate_type, @@ -7013,13 +7190,13 @@ fold_single_bit_test (enum tree_code code, tree arg0, tree arg1, one = build_int_cst (intermediate_type, 1); if (code == EQ_EXPR) - inner = fold_build2 (BIT_XOR_EXPR, intermediate_type, inner, one); + inner = fold_build2_loc (loc, BIT_XOR_EXPR, intermediate_type, inner, one); /* Put the AND last so it can combine with more things. */ inner = build2 (BIT_AND_EXPR, intermediate_type, inner, one); /* Make sure to return the proper type. */ - inner = fold_convert (result_type, inner); + inner = fold_convert_loc (loc, result_type, inner); return inner; } @@ -7111,7 +7288,8 @@ tree_swap_operands_p (const_tree arg0, const_tree arg1, bool reorder) ARG0 is extended to a wider type. */ static tree -fold_widened_comparison (enum tree_code code, tree type, tree arg0, tree arg1) +fold_widened_comparison (location_t loc, enum tree_code code, + tree type, tree arg0, tree arg1) { tree arg0_unw = get_unwidened (arg0, NULL_TREE); tree arg1_unw; @@ -7149,8 +7327,8 @@ fold_widened_comparison (enum tree_code code, tree type, tree arg0, tree arg1) && (TREE_CODE (shorter_type) == INTEGER_TYPE || TREE_CODE (shorter_type) == BOOLEAN_TYPE) && int_fits_type_p (arg1_unw, shorter_type)))) - return fold_build2 (code, type, arg0_unw, - fold_convert (shorter_type, arg1_unw)); + return fold_build2_loc (loc, code, type, arg0_unw, + fold_convert_loc (loc, shorter_type, arg1_unw)); if (TREE_CODE (arg1_unw) != INTEGER_CST || TREE_CODE (shorter_type) != INTEGER_TYPE @@ -7172,27 +7350,27 @@ fold_widened_comparison (enum tree_code code, tree type, tree arg0, tree arg1) { case EQ_EXPR: if (above || below) - return omit_one_operand (type, integer_zero_node, arg0); + return omit_one_operand_loc (loc, type, integer_zero_node, arg0); break; case NE_EXPR: if (above || below) - return omit_one_operand (type, integer_one_node, arg0); + return omit_one_operand_loc (loc, type, integer_one_node, arg0); break; case LT_EXPR: case LE_EXPR: if (above) - return omit_one_operand (type, integer_one_node, arg0); + return omit_one_operand_loc (loc, type, integer_one_node, arg0); else if (below) - return omit_one_operand (type, integer_zero_node, arg0); + return omit_one_operand_loc (loc, type, integer_zero_node, arg0); case GT_EXPR: case GE_EXPR: if (above) - return omit_one_operand (type, integer_zero_node, arg0); + return omit_one_operand_loc (loc, type, integer_zero_node, arg0); else if (below) - return omit_one_operand (type, integer_one_node, arg0); + return omit_one_operand_loc (loc, type, integer_one_node, arg0); default: break; @@ -7205,7 +7383,7 @@ fold_widened_comparison (enum tree_code code, tree type, tree arg0, tree arg1) ARG0 just the signedness is changed. */ static tree -fold_sign_changed_comparison (enum tree_code code, tree type, +fold_sign_changed_comparison (location_t loc, enum tree_code code, tree type, tree arg0, tree arg1) { tree arg0_inner; @@ -7246,20 +7424,21 @@ fold_sign_changed_comparison (enum tree_code code, tree type, TREE_INT_CST_HIGH (arg1), 0, TREE_OVERFLOW (arg1)); else - arg1 = fold_convert (inner_type, arg1); + arg1 = fold_convert_loc (loc, inner_type, arg1); - return fold_build2 (code, type, arg0_inner, arg1); + return fold_build2_loc (loc, code, type, arg0_inner, arg1); } /* Tries to replace &a[idx] p+ s * delta with &a[idx + delta], if s is - step of the array. Reconstructs s and delta in the case of s * delta - being an integer constant (and thus already folded). - ADDR is the address. MULT is the multiplicative expression. - If the function succeeds, the new address expression is returned. Otherwise - NULL_TREE is returned. */ + step of the array. Reconstructs s and delta in the case of s * + delta being an integer constant (and thus already folded). ADDR is + the address. MULT is the multiplicative expression. If the + function succeeds, the new address expression is returned. + Otherwise NULL_TREE is returned. LOC is the location of the + resulting expression. */ static tree -try_move_mult_to_index (tree addr, tree op1) +try_move_mult_to_index (location_t loc, tree addr, tree op1) { tree s, delta, step; tree ref = TREE_OPERAND (addr, 0), pref; @@ -7346,10 +7525,10 @@ try_move_mult_to_index (tree addr, tree op1) || TREE_CODE (TYPE_MAX_VALUE (itype)) != INTEGER_CST) continue; - tmp = fold_binary (PLUS_EXPR, itype, - fold_convert (itype, - TREE_OPERAND (ref, 1)), - fold_convert (itype, delta)); + tmp = fold_binary_loc (loc, PLUS_EXPR, itype, + fold_convert_loc (loc, itype, + TREE_OPERAND (ref, 1)), + fold_convert_loc (loc, itype, delta)); if (!tmp || TREE_CODE (tmp) != INTEGER_CST || tree_int_cst_lt (TYPE_MAX_VALUE (itype), tmp)) @@ -7370,6 +7549,7 @@ try_move_mult_to_index (tree addr, tree op1) pref = TREE_OPERAND (addr, 0); ret = copy_node (pref); + SET_EXPR_LOCATION (ret, loc); pos = ret; while (pref != ref) @@ -7379,12 +7559,12 @@ try_move_mult_to_index (tree addr, tree op1) pos = TREE_OPERAND (pos, 0); } - TREE_OPERAND (pos, 1) = fold_build2 (PLUS_EXPR, itype, - fold_convert (itype, - TREE_OPERAND (pos, 1)), - fold_convert (itype, delta)); + TREE_OPERAND (pos, 1) = fold_build2_loc (loc, PLUS_EXPR, itype, + fold_convert_loc (loc, itype, + TREE_OPERAND (pos, 1)), + fold_convert_loc (loc, itype, delta)); - return fold_build1 (ADDR_EXPR, TREE_TYPE (addr), ret); + return fold_build1_loc (loc, ADDR_EXPR, TREE_TYPE (addr), ret); } @@ -7393,7 +7573,7 @@ try_move_mult_to_index (tree addr, tree op1) A < X <= MAX. INEQ is A + 1 > Y, BOUND is A < X. */ static tree -fold_to_nonsharp_ineq_using_bound (tree ineq, tree bound) +fold_to_nonsharp_ineq_using_bound (location_t loc, tree ineq, tree bound) { tree a, typea, type = TREE_TYPE (ineq), a1, diff, y; @@ -7428,24 +7608,25 @@ fold_to_nonsharp_ineq_using_bound (tree ineq, tree bound) if (POINTER_TYPE_P (typea)) { /* Convert the pointer types into integer before taking the difference. */ - tree ta = fold_convert (ssizetype, a); - tree ta1 = fold_convert (ssizetype, a1); - diff = fold_binary (MINUS_EXPR, ssizetype, ta1, ta); + tree ta = fold_convert_loc (loc, ssizetype, a); + tree ta1 = fold_convert_loc (loc, ssizetype, a1); + diff = fold_binary_loc (loc, MINUS_EXPR, ssizetype, ta1, ta); } else - diff = fold_binary (MINUS_EXPR, typea, a1, a); + diff = fold_binary_loc (loc, MINUS_EXPR, typea, a1, a); if (!diff || !integer_onep (diff)) return NULL_TREE; - return fold_build2 (GE_EXPR, type, a, y); + return fold_build2_loc (loc, GE_EXPR, type, a, y); } /* Fold a sum or difference of at least one multiplication. Returns the folded tree or NULL if no simplification could be made. */ static tree -fold_plusminus_mult_expr (enum tree_code code, tree type, tree arg0, tree arg1) +fold_plusminus_mult_expr (location_t loc, enum tree_code code, tree type, + tree arg0, tree arg1) { tree arg00, arg01, arg10, arg11; tree alt0 = NULL_TREE, alt1 = NULL_TREE, same; @@ -7543,7 +7724,7 @@ fold_plusminus_mult_expr (enum tree_code code, tree type, tree arg0, tree arg1) increased the number of multiplications necessary. */ && TREE_CODE (arg10) != INTEGER_CST) { - alt0 = fold_build2 (MULT_EXPR, TREE_TYPE (arg00), arg00, + alt0 = fold_build2_loc (loc, MULT_EXPR, TREE_TYPE (arg00), arg00, build_int_cst (TREE_TYPE (arg00), int01 / int11)); alt1 = arg10; @@ -7554,11 +7735,11 @@ fold_plusminus_mult_expr (enum tree_code code, tree type, tree arg0, tree arg1) } if (same) - return fold_build2 (MULT_EXPR, type, - fold_build2 (code, type, - fold_convert (type, alt0), - fold_convert (type, alt1)), - fold_convert (type, same)); + return fold_build2_loc (loc, MULT_EXPR, type, + fold_build2_loc (loc, code, type, + fold_convert_loc (loc, type, alt0), + fold_convert_loc (loc, type, alt1)), + fold_convert_loc (loc, type, same)); return NULL_TREE; } @@ -7991,7 +8172,7 @@ fold_view_convert_expr (tree type, tree expr) to avoid confusing the gimplify process. */ tree -build_fold_addr_expr_with_type (tree t, tree ptrtype) +build_fold_addr_expr_with_type_loc (location_t loc, tree t, tree ptrtype) { /* The size of the object is not relevant when talking about its address. */ if (TREE_CODE (t) == WITH_SIZE_EXPR) @@ -8004,17 +8185,23 @@ build_fold_addr_expr_with_type (tree t, tree ptrtype) t = TREE_OPERAND (t, 0); if (TREE_TYPE (t) != ptrtype) - t = build1 (NOP_EXPR, ptrtype, t); + { + t = build1 (NOP_EXPR, ptrtype, t); + SET_EXPR_LOCATION (t, loc); + } } else if (TREE_CODE (t) == VIEW_CONVERT_EXPR) { - t = build_fold_addr_expr (TREE_OPERAND (t, 0)); + t = build_fold_addr_expr_loc (loc, TREE_OPERAND (t, 0)); if (TREE_TYPE (t) != ptrtype) - t = fold_convert (ptrtype, t); + t = fold_convert_loc (loc, ptrtype, t); } else - t = build1 (ADDR_EXPR, ptrtype, t); + { + t = build1 (ADDR_EXPR, ptrtype, t); + SET_EXPR_LOCATION (t, loc); + } return t; } @@ -8022,11 +8209,11 @@ build_fold_addr_expr_with_type (tree t, tree ptrtype) /* Build an expression for the address of T. */ tree -build_fold_addr_expr (tree t) +build_fold_addr_expr_loc (location_t loc, tree t) { tree ptrtype = build_pointer_type (TREE_TYPE (t)); - return build_fold_addr_expr_with_type (t, ptrtype); + return build_fold_addr_expr_with_type_loc (loc, t, ptrtype); } /* Fold a unary expression of code CODE and type TYPE with operand @@ -8034,7 +8221,7 @@ build_fold_addr_expr (tree t) Otherwise, return NULL_TREE. */ tree -fold_unary (enum tree_code code, tree type, tree op0) +fold_unary_loc (location_t loc, enum tree_code code, tree type, tree op0) { tree tem; tree arg0; @@ -8074,20 +8261,22 @@ fold_unary (enum tree_code code, tree type, tree op0) { if (TREE_CODE (arg0) == COMPOUND_EXPR) return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0), - fold_build1 (code, type, - fold_convert (TREE_TYPE (op0), - TREE_OPERAND (arg0, 1)))); + fold_build1_loc (loc, code, type, + fold_convert_loc (loc, TREE_TYPE (op0), + TREE_OPERAND (arg0, 1)))); else if (TREE_CODE (arg0) == COND_EXPR) { tree arg01 = TREE_OPERAND (arg0, 1); tree arg02 = TREE_OPERAND (arg0, 2); if (! VOID_TYPE_P (TREE_TYPE (arg01))) - arg01 = fold_build1 (code, type, - fold_convert (TREE_TYPE (op0), arg01)); + arg01 = fold_build1_loc (loc, code, type, + fold_convert_loc (loc, + TREE_TYPE (op0), arg01)); if (! VOID_TYPE_P (TREE_TYPE (arg02))) - arg02 = fold_build1 (code, type, - fold_convert (TREE_TYPE (op0), arg02)); - tem = fold_build3 (COND_EXPR, type, TREE_OPERAND (arg0, 0), + arg02 = fold_build1_loc (loc, code, type, + fold_convert_loc (loc, + TREE_TYPE (op0), arg02)); + tem = fold_build3_loc (loc, COND_EXPR, type, TREE_OPERAND (arg0, 0), arg01, arg02); /* If this was a conversion, and all we did was to move into @@ -8113,13 +8302,16 @@ fold_unary (enum tree_code code, tree type, tree op0) (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0)))) && TYPE_PRECISION (TREE_TYPE (tem)) <= BITS_PER_WORD) || flag_syntax_only)) - tem = build1 (code, type, - build3 (COND_EXPR, - TREE_TYPE (TREE_OPERAND - (TREE_OPERAND (tem, 1), 0)), - TREE_OPERAND (tem, 0), - TREE_OPERAND (TREE_OPERAND (tem, 1), 0), - TREE_OPERAND (TREE_OPERAND (tem, 2), 0))); + { + tem = build1 (code, type, + build3 (COND_EXPR, + TREE_TYPE (TREE_OPERAND + (TREE_OPERAND (tem, 1), 0)), + TREE_OPERAND (tem, 0), + TREE_OPERAND (TREE_OPERAND (tem, 1), 0), + TREE_OPERAND (TREE_OPERAND (tem, 2), 0))); + SET_EXPR_LOCATION (tem, loc); + } return tem; } else if (COMPARISON_CLASS_P (arg0)) @@ -8131,10 +8323,10 @@ fold_unary (enum tree_code code, tree type, tree op0) return arg0; } else if (TREE_CODE (type) != INTEGER_TYPE) - return fold_build3 (COND_EXPR, type, arg0, - fold_build1 (code, type, + return fold_build3_loc (loc, COND_EXPR, type, arg0, + fold_build1_loc (loc, code, type, integer_one_node), - fold_build1 (code, type, + fold_build1_loc (loc, code, type, integer_zero_node)); } } @@ -8146,7 +8338,7 @@ fold_unary (enum tree_code code, tree type, tree op0) barriers can be removed. */ if (CONSTANT_CLASS_P (op0) || TREE_CODE (op0) == PAREN_EXPR) - return fold_convert (type, op0); + return fold_convert_loc (loc, type, op0); return NULL_TREE; CASE_CONVERT: @@ -8158,7 +8350,7 @@ fold_unary (enum tree_code code, tree type, tree op0) /* If we have (type) (a CMP b) and type is an integral type, return new expression involving the new type. */ if (COMPARISON_CLASS_P (op0) && INTEGRAL_TYPE_P (type)) - return fold_build2 (TREE_CODE (op0), type, TREE_OPERAND (op0, 0), + return fold_build2_loc (loc, TREE_CODE (op0), type, TREE_OPERAND (op0, 0), TREE_OPERAND (op0, 1)); /* Handle cases of two conversions in a row. */ @@ -8193,7 +8385,7 @@ fold_unary (enum tree_code code, tree type, tree op0) && (((inter_int || inter_ptr) && final_int) || (inter_float && final_float)) && inter_prec >= final_prec) - return fold_build1 (code, type, TREE_OPERAND (op0, 0)); + return fold_build1_loc (loc, code, type, TREE_OPERAND (op0, 0)); /* Likewise, if the intermediate and initial types are either both float or both integer, we don't need the middle conversion if the @@ -8211,14 +8403,14 @@ fold_unary (enum tree_code code, tree type, tree op0) && TYPE_MODE (type) == TYPE_MODE (inter_type)) && ! final_ptr && (! final_vec || inter_prec == inside_prec)) - return fold_build1 (code, type, TREE_OPERAND (op0, 0)); + return fold_build1_loc (loc, code, type, TREE_OPERAND (op0, 0)); /* If we have a sign-extension of a zero-extended value, we can replace that by a single zero-extension. */ if (inside_int && inter_int && final_int && inside_prec < inter_prec && inter_prec < final_prec && inside_unsignedp && !inter_unsignedp) - return fold_build1 (code, type, TREE_OPERAND (op0, 0)); + return fold_build1_loc (loc, code, type, TREE_OPERAND (op0, 0)); /* Two conversions in a row are not needed unless: - some conversion is floating-point (overstrict for now), or @@ -8243,7 +8435,7 @@ fold_unary (enum tree_code code, tree type, tree op0) && ! (final_ptr && inside_prec != inter_prec) && ! (final_prec != GET_MODE_BITSIZE (TYPE_MODE (type)) && TYPE_MODE (type) == TYPE_MODE (inter_type))) - return fold_build1 (code, type, TREE_OPERAND (op0, 0)); + return fold_build1_loc (loc, code, type, TREE_OPERAND (op0, 0)); } /* Handle (T *)&A.B.C for A being of type T and B and C @@ -8266,7 +8458,8 @@ fold_unary (enum tree_code code, tree type, tree op0) if (! offset && bitpos == 0 && TYPE_MAIN_VARIANT (TREE_TYPE (type)) == TYPE_MAIN_VARIANT (TREE_TYPE (base))) - return fold_convert (type, build_fold_addr_expr (base)); + return fold_convert_loc (loc, type, + build_fold_addr_expr_loc (loc, base)); } if (TREE_CODE (op0) == MODIFY_EXPR @@ -8278,11 +8471,12 @@ fold_unary (enum tree_code code, tree type, tree op0) { /* Don't leave an assignment inside a conversion unless assigning a bitfield. */ - tem = fold_build1 (code, type, TREE_OPERAND (op0, 1)); + tem = fold_build1_loc (loc, code, type, TREE_OPERAND (op0, 1)); /* First do the assignment, then return converted constant. */ tem = build2 (COMPOUND_EXPR, TREE_TYPE (tem), op0, tem); TREE_NO_WARNING (tem) = 1; TREE_USED (tem) = 1; + SET_EXPR_LOCATION (tem, loc); return tem; } @@ -8322,8 +8516,8 @@ fold_unary (enum tree_code code, tree type, tree op0) == ZERO_EXTEND)) { tree uns = unsigned_type_for (TREE_TYPE (and0)); - and0 = fold_convert (uns, and0); - and1 = fold_convert (uns, and1); + and0 = fold_convert_loc (loc, uns, and0); + and1 = fold_convert_loc (loc, uns, and1); } #endif } @@ -8332,8 +8526,8 @@ fold_unary (enum tree_code code, tree type, tree op0) tem = force_fit_type_double (type, TREE_INT_CST_LOW (and1), TREE_INT_CST_HIGH (and1), 0, TREE_OVERFLOW (and1)); - return fold_build2 (BIT_AND_EXPR, type, - fold_convert (type, and0), tem); + return fold_build2_loc (loc, BIT_AND_EXPR, type, + fold_convert_loc (loc, type, and0), tem); } } @@ -8349,8 +8543,10 @@ fold_unary (enum tree_code code, tree type, tree op0) tree arg00 = TREE_OPERAND (arg0, 0); tree arg01 = TREE_OPERAND (arg0, 1); - return fold_build2 (TREE_CODE (arg0), type, fold_convert (type, arg00), - fold_convert (sizetype, arg01)); + return fold_build2_loc (loc, + TREE_CODE (arg0), type, + fold_convert_loc (loc, type, arg00), + fold_convert_loc (loc, sizetype, arg01)); } /* Convert (T1)(~(T2)X) into ~(T1)X if T1 and T2 are integral types @@ -8365,7 +8561,8 @@ fold_unary (enum tree_code code, tree type, tree op0) tem = TREE_OPERAND (TREE_OPERAND (op0, 0), 0); if (INTEGRAL_TYPE_P (TREE_TYPE (tem)) && TYPE_PRECISION (type) <= TYPE_PRECISION (TREE_TYPE (tem))) - return fold_build1 (BIT_NOT_EXPR, type, fold_convert (type, tem)); + return fold_build1_loc (loc, BIT_NOT_EXPR, type, + fold_convert_loc (loc, type, tem)); } /* Convert (T1)(X * Y) into (T1)X * (T1)Y if T1 is narrower than the @@ -8384,12 +8581,12 @@ fold_unary (enum tree_code code, tree type, tree op0) if (TYPE_PRECISION (mult_type) < TYPE_PRECISION (TREE_TYPE (op0))) { - tem = fold_build2 (MULT_EXPR, mult_type, - fold_convert (mult_type, - TREE_OPERAND (op0, 0)), - fold_convert (mult_type, - TREE_OPERAND (op0, 1))); - return fold_convert (type, tem); + tem = fold_build2_loc (loc, MULT_EXPR, mult_type, + fold_convert_loc (loc, mult_type, + TREE_OPERAND (op0, 0)), + fold_convert_loc (loc, mult_type, + TREE_OPERAND (op0, 1))); + return fold_convert_loc (loc, type, tem); } } @@ -8404,7 +8601,8 @@ fold_unary (enum tree_code code, tree type, tree op0) if (TREE_TYPE (op0) == type) return op0; if (TREE_CODE (op0) == VIEW_CONVERT_EXPR) - return fold_build1 (VIEW_CONVERT_EXPR, type, TREE_OPERAND (op0, 0)); + return fold_build1_loc (loc, VIEW_CONVERT_EXPR, + type, TREE_OPERAND (op0, 0)); /* For integral conversions with the same precision or pointer conversions use a NOP_EXPR instead. */ @@ -8413,7 +8611,7 @@ fold_unary (enum tree_code code, tree type, tree op0) && (INTEGRAL_TYPE_P (TREE_TYPE (op0)) || POINTER_TYPE_P (TREE_TYPE (op0))) && TYPE_PRECISION (type) == TYPE_PRECISION (TREE_TYPE (op0))) - return fold_convert (type, op0); + return fold_convert_loc (loc, type, op0); /* Strip inner integral conversions that do not change the precision. */ if (CONVERT_EXPR_P (op0) @@ -8423,30 +8621,32 @@ fold_unary (enum tree_code code, tree type, tree op0) || POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (op0, 0)))) && (TYPE_PRECISION (TREE_TYPE (op0)) == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (op0, 0))))) - return fold_build1 (VIEW_CONVERT_EXPR, type, TREE_OPERAND (op0, 0)); + return fold_build1_loc (loc, VIEW_CONVERT_EXPR, + type, TREE_OPERAND (op0, 0)); return fold_view_convert_expr (type, op0); case NEGATE_EXPR: - tem = fold_negate_expr (arg0); + tem = fold_negate_expr (loc, arg0); if (tem) - return fold_convert (type, tem); + return fold_convert_loc (loc, type, tem); return NULL_TREE; case ABS_EXPR: if (TREE_CODE (arg0) == INTEGER_CST || TREE_CODE (arg0) == REAL_CST) return fold_abs_const (arg0, type); else if (TREE_CODE (arg0) == NEGATE_EXPR) - return fold_build1 (ABS_EXPR, type, TREE_OPERAND (arg0, 0)); + return fold_build1_loc (loc, ABS_EXPR, type, TREE_OPERAND (arg0, 0)); /* Convert fabs((double)float) into (double)fabsf(float). */ else if (TREE_CODE (arg0) == NOP_EXPR && TREE_CODE (type) == REAL_TYPE) { tree targ0 = strip_float_extensions (arg0); if (targ0 != arg0) - return fold_convert (type, fold_build1 (ABS_EXPR, - TREE_TYPE (targ0), - targ0)); + return fold_convert_loc (loc, type, + fold_build1_loc (loc, ABS_EXPR, + TREE_TYPE (targ0), + targ0)); } /* ABS_EXPR> = ABS_EXPR even if flag_wrapv is on. */ else if (TREE_CODE (arg0) == ABS_EXPR) @@ -8459,40 +8659,42 @@ fold_unary (enum tree_code code, tree type, tree op0) { tem = fold_strip_sign_ops (arg0); if (tem) - return fold_build1 (ABS_EXPR, type, fold_convert (type, tem)); + return fold_build1_loc (loc, ABS_EXPR, type, + fold_convert_loc (loc, type, tem)); } return NULL_TREE; case CONJ_EXPR: if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE) - return fold_convert (type, arg0); + return fold_convert_loc (loc, type, arg0); if (TREE_CODE (arg0) == COMPLEX_EXPR) { tree itype = TREE_TYPE (type); - tree rpart = fold_convert (itype, TREE_OPERAND (arg0, 0)); - tree ipart = fold_convert (itype, TREE_OPERAND (arg0, 1)); - return fold_build2 (COMPLEX_EXPR, type, rpart, negate_expr (ipart)); + tree rpart = fold_convert_loc (loc, itype, TREE_OPERAND (arg0, 0)); + tree ipart = fold_convert_loc (loc, itype, TREE_OPERAND (arg0, 1)); + return fold_build2_loc (loc, COMPLEX_EXPR, type, rpart, + negate_expr (ipart)); } if (TREE_CODE (arg0) == COMPLEX_CST) { tree itype = TREE_TYPE (type); - tree rpart = fold_convert (itype, TREE_REALPART (arg0)); - tree ipart = fold_convert (itype, TREE_IMAGPART (arg0)); + tree rpart = fold_convert_loc (loc, itype, TREE_REALPART (arg0)); + tree ipart = fold_convert_loc (loc, itype, TREE_IMAGPART (arg0)); return build_complex (type, rpart, negate_expr (ipart)); } if (TREE_CODE (arg0) == CONJ_EXPR) - return fold_convert (type, TREE_OPERAND (arg0, 0)); + return fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0)); return NULL_TREE; case BIT_NOT_EXPR: if (TREE_CODE (arg0) == INTEGER_CST) return fold_not_const (arg0, type); else if (TREE_CODE (arg0) == BIT_NOT_EXPR) - return fold_convert (type, TREE_OPERAND (arg0, 0)); + return fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0)); /* Convert ~ (-A) to A - 1. */ else if (INTEGRAL_TYPE_P (type) && TREE_CODE (arg0) == NEGATE_EXPR) - return fold_build2 (MINUS_EXPR, type, - fold_convert (type, TREE_OPERAND (arg0, 0)), + return fold_build2_loc (loc, MINUS_EXPR, type, + fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0)), build_int_cst (type, 1)); /* Convert ~ (A - 1) or ~ (A + -1) to -A. */ else if (INTEGRAL_TYPE_P (type) @@ -8500,21 +8702,24 @@ fold_unary (enum tree_code code, tree type, tree op0) && integer_onep (TREE_OPERAND (arg0, 1))) || (TREE_CODE (arg0) == PLUS_EXPR && integer_all_onesp (TREE_OPERAND (arg0, 1))))) - return fold_build1 (NEGATE_EXPR, type, - fold_convert (type, TREE_OPERAND (arg0, 0))); + return fold_build1_loc (loc, NEGATE_EXPR, type, + fold_convert_loc (loc, type, + TREE_OPERAND (arg0, 0))); /* Convert ~(X ^ Y) to ~X ^ Y or X ^ ~Y if ~X or ~Y simplify. */ else if (TREE_CODE (arg0) == BIT_XOR_EXPR - && (tem = fold_unary (BIT_NOT_EXPR, type, - fold_convert (type, - TREE_OPERAND (arg0, 0))))) - return fold_build2 (BIT_XOR_EXPR, type, tem, - fold_convert (type, TREE_OPERAND (arg0, 1))); + && (tem = fold_unary_loc (loc, BIT_NOT_EXPR, type, + fold_convert_loc (loc, type, + TREE_OPERAND (arg0, 0))))) + return fold_build2_loc (loc, BIT_XOR_EXPR, type, tem, + fold_convert_loc (loc, type, + TREE_OPERAND (arg0, 1))); else if (TREE_CODE (arg0) == BIT_XOR_EXPR - && (tem = fold_unary (BIT_NOT_EXPR, type, - fold_convert (type, - TREE_OPERAND (arg0, 1))))) - return fold_build2 (BIT_XOR_EXPR, type, - fold_convert (type, TREE_OPERAND (arg0, 0)), tem); + && (tem = fold_unary_loc (loc, BIT_NOT_EXPR, type, + fold_convert_loc (loc, type, + TREE_OPERAND (arg0, 1))))) + return fold_build2_loc (loc, BIT_XOR_EXPR, type, + fold_convert_loc (loc, type, + TREE_OPERAND (arg0, 0)), tem); /* Perform BIT_NOT_EXPR on each element individually. */ else if (TREE_CODE (arg0) == VECTOR_CST) { @@ -8526,7 +8731,7 @@ fold_unary (enum tree_code code, tree type, tree op0) if (elements) { elem = TREE_VALUE (elements); - elem = fold_unary (BIT_NOT_EXPR, TREE_TYPE (type), elem); + elem = fold_unary_loc (loc, BIT_NOT_EXPR, TREE_TYPE (type), elem); if (elem == NULL_TREE) break; elements = TREE_CHAIN (elements); @@ -8544,40 +8749,41 @@ fold_unary (enum tree_code code, tree type, tree op0) case TRUTH_NOT_EXPR: /* The argument to invert_truthvalue must have Boolean type. */ if (TREE_CODE (TREE_TYPE (arg0)) != BOOLEAN_TYPE) - arg0 = fold_convert (boolean_type_node, arg0); + arg0 = fold_convert_loc (loc, boolean_type_node, arg0); /* Note that the operand of this must be an int and its values must be 0 or 1. ("true" is a fixed value perhaps depending on the language, but we don't handle values other than 1 correctly yet.) */ - tem = fold_truth_not_expr (arg0); + tem = fold_truth_not_expr (loc, arg0); if (!tem) return NULL_TREE; - return fold_convert (type, tem); + return fold_convert_loc (loc, type, tem); case REALPART_EXPR: if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE) - return fold_convert (type, arg0); + return fold_convert_loc (loc, type, arg0); if (TREE_CODE (arg0) == COMPLEX_EXPR) - return omit_one_operand (type, TREE_OPERAND (arg0, 0), + return omit_one_operand_loc (loc, type, TREE_OPERAND (arg0, 0), TREE_OPERAND (arg0, 1)); if (TREE_CODE (arg0) == COMPLEX_CST) - return fold_convert (type, TREE_REALPART (arg0)); + return fold_convert_loc (loc, type, TREE_REALPART (arg0)); if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR) { tree itype = TREE_TYPE (TREE_TYPE (arg0)); - tem = fold_build2 (TREE_CODE (arg0), itype, - fold_build1 (REALPART_EXPR, itype, + tem = fold_build2_loc (loc, TREE_CODE (arg0), itype, + fold_build1_loc (loc, REALPART_EXPR, itype, TREE_OPERAND (arg0, 0)), - fold_build1 (REALPART_EXPR, itype, + fold_build1_loc (loc, REALPART_EXPR, itype, TREE_OPERAND (arg0, 1))); - return fold_convert (type, tem); + return fold_convert_loc (loc, type, tem); } if (TREE_CODE (arg0) == CONJ_EXPR) { tree itype = TREE_TYPE (TREE_TYPE (arg0)); - tem = fold_build1 (REALPART_EXPR, itype, TREE_OPERAND (arg0, 0)); - return fold_convert (type, tem); + tem = fold_build1_loc (loc, REALPART_EXPR, itype, + TREE_OPERAND (arg0, 0)); + return fold_convert_loc (loc, type, tem); } if (TREE_CODE (arg0) == CALL_EXPR) { @@ -8588,7 +8794,7 @@ fold_unary (enum tree_code code, tree type, tree op0) CASE_FLT_FN (BUILT_IN_CEXPI): fn = mathfn_built_in (type, BUILT_IN_COS); if (fn) - return build_call_expr (fn, 1, CALL_EXPR_ARG (arg0, 0)); + return build_call_expr_loc (loc, fn, 1, CALL_EXPR_ARG (arg0, 0)); break; default: @@ -8599,27 +8805,27 @@ fold_unary (enum tree_code code, tree type, tree op0) case IMAGPART_EXPR: if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE) - return fold_convert (type, integer_zero_node); + return fold_convert_loc (loc, type, integer_zero_node); if (TREE_CODE (arg0) == COMPLEX_EXPR) - return omit_one_operand (type, TREE_OPERAND (arg0, 1), + return omit_one_operand_loc (loc, type, TREE_OPERAND (arg0, 1), TREE_OPERAND (arg0, 0)); if (TREE_CODE (arg0) == COMPLEX_CST) - return fold_convert (type, TREE_IMAGPART (arg0)); + return fold_convert_loc (loc, type, TREE_IMAGPART (arg0)); if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR) { tree itype = TREE_TYPE (TREE_TYPE (arg0)); - tem = fold_build2 (TREE_CODE (arg0), itype, - fold_build1 (IMAGPART_EXPR, itype, + tem = fold_build2_loc (loc, TREE_CODE (arg0), itype, + fold_build1_loc (loc, IMAGPART_EXPR, itype, TREE_OPERAND (arg0, 0)), - fold_build1 (IMAGPART_EXPR, itype, + fold_build1_loc (loc, IMAGPART_EXPR, itype, TREE_OPERAND (arg0, 1))); - return fold_convert (type, tem); + return fold_convert_loc (loc, type, tem); } if (TREE_CODE (arg0) == CONJ_EXPR) { tree itype = TREE_TYPE (TREE_TYPE (arg0)); - tem = fold_build1 (IMAGPART_EXPR, itype, TREE_OPERAND (arg0, 0)); - return fold_convert (type, negate_expr (tem)); + tem = fold_build1_loc (loc, IMAGPART_EXPR, itype, TREE_OPERAND (arg0, 0)); + return fold_convert_loc (loc, type, negate_expr (tem)); } if (TREE_CODE (arg0) == CALL_EXPR) { @@ -8630,7 +8836,7 @@ fold_unary (enum tree_code code, tree type, tree op0) CASE_FLT_FN (BUILT_IN_CEXPI): fn = mathfn_built_in (type, BUILT_IN_SIN); if (fn) - return build_call_expr (fn, 1, CALL_EXPR_ARG (arg0, 0)); + return build_call_expr_loc (loc, fn, 1, CALL_EXPR_ARG (arg0, 0)); break; default: @@ -8650,9 +8856,10 @@ fold_unary (enum tree_code code, tree type, tree op0) have implementation defined behavior and retaining the TREE_OVERFLOW flag here would confuse later passes such as VRP. */ tree -fold_unary_ignore_overflow (enum tree_code code, tree type, tree op0) +fold_unary_ignore_overflow_loc (location_t loc, enum tree_code code, + tree type, tree op0) { - tree res = fold_unary (code, type, op0); + tree res = fold_unary_loc (loc, code, type, op0); if (res && TREE_CODE (res) == INTEGER_CST && TREE_CODE (op0) == INTEGER_CST @@ -8668,7 +8875,7 @@ fold_unary_ignore_overflow (enum tree_code code, tree type, tree op0) return NULL_TREE. */ static tree -fold_minmax (enum tree_code code, tree type, tree op0, tree op1) +fold_minmax (location_t loc, enum tree_code code, tree type, tree op0, tree op1) { enum tree_code compl_code; @@ -8682,25 +8889,25 @@ fold_minmax (enum tree_code code, tree type, tree op0, tree op1) /* MIN (MAX (a, b), b) == b. */ if (TREE_CODE (op0) == compl_code && operand_equal_p (TREE_OPERAND (op0, 1), op1, 0)) - return omit_one_operand (type, op1, TREE_OPERAND (op0, 0)); + return omit_one_operand_loc (loc, type, op1, TREE_OPERAND (op0, 0)); /* MIN (MAX (b, a), b) == b. */ if (TREE_CODE (op0) == compl_code && operand_equal_p (TREE_OPERAND (op0, 0), op1, 0) && reorder_operands_p (TREE_OPERAND (op0, 1), op1)) - return omit_one_operand (type, op1, TREE_OPERAND (op0, 1)); + return omit_one_operand_loc (loc, type, op1, TREE_OPERAND (op0, 1)); /* MIN (a, MAX (a, b)) == a. */ if (TREE_CODE (op1) == compl_code && operand_equal_p (op0, TREE_OPERAND (op1, 0), 0) && reorder_operands_p (op0, TREE_OPERAND (op1, 1))) - return omit_one_operand (type, op0, TREE_OPERAND (op1, 1)); + return omit_one_operand_loc (loc, type, op0, TREE_OPERAND (op1, 1)); /* MIN (a, MAX (b, a)) == a. */ if (TREE_CODE (op1) == compl_code && operand_equal_p (op0, TREE_OPERAND (op1, 1), 0) && reorder_operands_p (op0, TREE_OPERAND (op1, 0))) - return omit_one_operand (type, op0, TREE_OPERAND (op1, 0)); + return omit_one_operand_loc (loc, type, op0, TREE_OPERAND (op1, 0)); return NULL_TREE; } @@ -8714,7 +8921,7 @@ fold_minmax (enum tree_code code, tree type, tree op0, tree op1) valid if signed overflow is undefined. */ static tree -maybe_canonicalize_comparison_1 (enum tree_code code, tree type, +maybe_canonicalize_comparison_1 (location_t loc, enum tree_code code, tree type, tree arg0, tree arg1, bool *strict_overflow_p) { @@ -8807,13 +9014,13 @@ maybe_canonicalize_comparison_1 (enum tree_code code, tree type, t = int_const_binop (sgn0 == -1 ? PLUS_EXPR : MINUS_EXPR, cst0, build_int_cst (TREE_TYPE (cst0), 1), 0); if (code0 != INTEGER_CST) - t = fold_build2 (code0, TREE_TYPE (arg0), TREE_OPERAND (arg0, 0), t); + t = fold_build2_loc (loc, code0, TREE_TYPE (arg0), TREE_OPERAND (arg0, 0), t); /* If swapping might yield to a more canonical form, do so. */ if (swap) - return fold_build2 (swap_tree_comparison (code), type, arg1, t); + return fold_build2_loc (loc, swap_tree_comparison (code), type, arg1, t); else - return fold_build2 (code, type, t, arg1); + return fold_build2_loc (loc, code, type, t, arg1); } /* Canonicalize the comparison ARG0 CODE ARG1 with type TYPE with undefined @@ -8823,7 +9030,7 @@ maybe_canonicalize_comparison_1 (enum tree_code code, tree type, Returns the canonicalized tree if changed, otherwise NULL_TREE. */ static tree -maybe_canonicalize_comparison (enum tree_code code, tree type, +maybe_canonicalize_comparison (location_t loc, enum tree_code code, tree type, tree arg0, tree arg1) { tree t; @@ -8833,7 +9040,7 @@ maybe_canonicalize_comparison (enum tree_code code, tree type, /* Try canonicalization by simplifying arg0. */ strict_overflow_p = false; - t = maybe_canonicalize_comparison_1 (code, type, arg0, arg1, + t = maybe_canonicalize_comparison_1 (loc, code, type, arg0, arg1, &strict_overflow_p); if (t) { @@ -8846,7 +9053,7 @@ maybe_canonicalize_comparison (enum tree_code code, tree type, comparison. */ code = swap_tree_comparison (code); strict_overflow_p = false; - t = maybe_canonicalize_comparison_1 (code, type, arg1, arg0, + t = maybe_canonicalize_comparison_1 (loc, code, type, arg1, arg0, &strict_overflow_p); if (t && strict_overflow_p) fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MAGNITUDE); @@ -8918,7 +9125,8 @@ pointer_may_wrap_p (tree base, tree offset, HOST_WIDE_INT bitpos) the folded comparison or NULL_TREE. */ static tree -fold_comparison (enum tree_code code, tree type, tree op0, tree op1) +fold_comparison (location_t loc, enum tree_code code, tree type, + tree op0, tree op1) { tree arg0, arg1, tem; @@ -8934,7 +9142,7 @@ fold_comparison (enum tree_code code, tree type, tree op0, tree op1) /* If one arg is a real or integer constant, put it last. */ if (tree_swap_operands_p (arg0, arg1, true)) - return fold_build2 (swap_tree_comparison (code), type, op1, op0); + return fold_build2_loc (loc, swap_tree_comparison (code), type, op1, op0); /* Transform comparisons of the form X +- C1 CMP C2 to X CMP C2 +- C1. */ if ((TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR) @@ -8951,7 +9159,7 @@ fold_comparison (enum tree_code code, tree type, tree op0, tree op1) int lhs_add; lhs_add = TREE_CODE (arg0) != PLUS_EXPR; - lhs = fold_build2 (lhs_add ? PLUS_EXPR : MINUS_EXPR, + lhs = fold_build2_loc (loc, lhs_add ? PLUS_EXPR : MINUS_EXPR, TREE_TYPE (arg1), const2, const1); /* If the constant operation overflowed this can be @@ -8980,11 +9188,11 @@ fold_comparison (enum tree_code code, tree type, tree op0, tree op1) if (code2 == LT_EXPR || code2 == LE_EXPR || code2 == EQ_EXPR) - return omit_one_operand (type, boolean_false_node, variable); + return omit_one_operand_loc (loc, type, boolean_false_node, variable); else if (code2 == NE_EXPR || code2 == GE_EXPR || code2 == GT_EXPR) - return omit_one_operand (type, boolean_true_node, variable); + return omit_one_operand_loc (loc, type, boolean_true_node, variable); } if (TREE_CODE (lhs) == TREE_CODE (arg1) @@ -8995,7 +9203,7 @@ fold_comparison (enum tree_code code, tree type, tree op0, tree op1) "when changing X +- C1 cmp C2 to " "X cmp C1 +- C2"), WARN_STRICT_OVERFLOW_COMPARISON); - return fold_build2 (code, type, variable, lhs); + return fold_build2_loc (loc, code, type, variable, lhs); } } @@ -9116,11 +9324,13 @@ fold_comparison (enum tree_code code, tree type, tree op0, tree op1) if (offset0 == NULL_TREE) offset0 = build_int_cst (signed_size_type_node, 0); else - offset0 = fold_convert (signed_size_type_node, offset0); + offset0 = fold_convert_loc (loc, signed_size_type_node, + offset0); if (offset1 == NULL_TREE) offset1 = build_int_cst (signed_size_type_node, 0); else - offset1 = fold_convert (signed_size_type_node, offset1); + offset1 = fold_convert_loc (loc, signed_size_type_node, + offset1); if (code != EQ_EXPR && code != NE_EXPR @@ -9131,7 +9341,7 @@ fold_comparison (enum tree_code code, tree type, tree op0, tree op1) "P +- C2"), WARN_STRICT_OVERFLOW_COMPARISON); - return fold_build2 (code, type, offset0, offset1); + return fold_build2_loc (loc, code, type, offset0, offset1); } } /* For non-equal bases we can simplify if they are addresses @@ -9155,9 +9365,11 @@ fold_comparison (enum tree_code code, tree type, tree op0, tree op1) || CONSTANT_CLASS_P (base1))) { if (code == EQ_EXPR) - return omit_two_operands (type, boolean_false_node, arg0, arg1); + return omit_two_operands_loc (loc, type, boolean_false_node, + arg0, arg1); else if (code == NE_EXPR) - return omit_two_operands (type, boolean_true_node, arg0, arg1); + return omit_two_operands_loc (loc, type, boolean_true_node, + arg0, arg1); } /* For equal offsets we can simplify to a comparison of the base addresses. */ @@ -9171,10 +9383,10 @@ fold_comparison (enum tree_code code, tree type, tree op0, tree op1) && operand_equal_p (offset0, offset1, 0)))) { if (indirect_base0) - base0 = build_fold_addr_expr (base0); + base0 = build_fold_addr_expr_loc (loc, base0); if (indirect_base1) - base1 = build_fold_addr_expr (base1); - return fold_build2 (code, type, base0, base1); + base1 = build_fold_addr_expr_loc (loc, base1); + return fold_build2_loc (loc, code, type, base0, base1); } } @@ -9208,9 +9420,10 @@ fold_comparison (enum tree_code code, tree type, tree op0, tree op1) && tree_int_cst_compare (const2, cst) == tree_int_cst_sgn (const2)) { fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON); - return fold_build2 (code, type, + return fold_build2_loc (loc, code, type, variable1, - fold_build2 (TREE_CODE (arg1), TREE_TYPE (arg1), + fold_build2_loc (loc, + TREE_CODE (arg1), TREE_TYPE (arg1), variable2, cst)); } @@ -9221,8 +9434,8 @@ fold_comparison (enum tree_code code, tree type, tree op0, tree op1) && tree_int_cst_compare (const1, cst) == tree_int_cst_sgn (const1)) { fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON); - return fold_build2 (code, type, - fold_build2 (TREE_CODE (arg0), TREE_TYPE (arg0), + return fold_build2_loc (loc, code, type, + fold_build2_loc (loc, TREE_CODE (arg0), TREE_TYPE (arg0), variable1, cst), variable2); } @@ -9254,10 +9467,10 @@ fold_comparison (enum tree_code code, tree type, tree op0, tree op1) if (tree_int_cst_sgn (const1) < 0) cmp_code = swap_tree_comparison (cmp_code); - return fold_build2 (cmp_code, type, variable1, const2); + return fold_build2_loc (loc, cmp_code, type, variable1, const2); } - tem = maybe_canonicalize_comparison (code, type, op0, op1); + tem = maybe_canonicalize_comparison (loc, code, type, op0, op1); if (tem) return tem; @@ -9272,13 +9485,14 @@ fold_comparison (enum tree_code code, tree type, tree op0, tree op1) /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */ if (TYPE_PRECISION (newtype) < TYPE_PRECISION (TREE_TYPE (arg0))) - return fold_build2 (code, type, fold_convert (newtype, targ0), - fold_convert (newtype, targ1)); + return fold_build2_loc (loc, code, type, + fold_convert_loc (loc, newtype, targ0), + fold_convert_loc (loc, newtype, targ1)); /* (-a) CMP (-b) -> b CMP a */ if (TREE_CODE (arg0) == NEGATE_EXPR && TREE_CODE (arg1) == NEGATE_EXPR) - return fold_build2 (code, type, TREE_OPERAND (arg1, 0), + return fold_build2_loc (loc, code, type, TREE_OPERAND (arg1, 0), TREE_OPERAND (arg0, 0)); if (TREE_CODE (arg1) == REAL_CST) @@ -9288,7 +9502,7 @@ fold_comparison (enum tree_code code, tree type, tree op0, tree op1) /* (-a) CMP CST -> a swap(CMP) (-CST) */ if (TREE_CODE (arg0) == NEGATE_EXPR) - return fold_build2 (swap_tree_comparison (code), type, + return fold_build2_loc (loc, swap_tree_comparison (code), type, TREE_OPERAND (arg0, 0), build_real (TREE_TYPE (arg1), REAL_VALUE_NEGATE (cst))); @@ -9296,7 +9510,7 @@ fold_comparison (enum tree_code code, tree type, tree op0, tree op1) /* IEEE doesn't distinguish +0 and -0 in comparisons. */ /* a CMP (-0) -> a CMP 0 */ if (REAL_VALUE_MINUS_ZERO (cst)) - return fold_build2 (code, type, arg0, + return fold_build2_loc (loc, code, type, arg0, build_real (TREE_TYPE (arg1), dconst0)); /* x != NaN is always true, other ops are always false. */ @@ -9304,14 +9518,14 @@ fold_comparison (enum tree_code code, tree type, tree op0, tree op1) && ! HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg1)))) { tem = (code == NE_EXPR) ? integer_one_node : integer_zero_node; - return omit_one_operand (type, tem, arg0); + return omit_one_operand_loc (loc, type, tem, arg0); } /* Fold comparisons against infinity. */ if (REAL_VALUE_ISINF (cst) && MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (arg1)))) { - tem = fold_inf_compare (code, type, arg0, arg1); + tem = fold_inf_compare (loc, code, type, arg0, arg1); if (tem != NULL_TREE) return tem; } @@ -9330,7 +9544,7 @@ fold_comparison (enum tree_code code, tree type, tree op0, tree op1) ? MINUS_EXPR : PLUS_EXPR, arg1, TREE_OPERAND (arg0, 1), 0)) && !TREE_OVERFLOW (tem)) - return fold_build2 (code, type, TREE_OPERAND (arg0, 0), tem); + return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), tem); /* Likewise, we can simplify a comparison of a real constant with a MINUS_EXPR whose first operand is also a real constant, i.e. @@ -9343,7 +9557,7 @@ fold_comparison (enum tree_code code, tree type, tree op0, tree op1) && 0 != (tem = const_binop (MINUS_EXPR, TREE_OPERAND (arg0, 0), arg1, 0)) && !TREE_OVERFLOW (tem)) - return fold_build2 (swap_tree_comparison (code), type, + return fold_build2_loc (loc, swap_tree_comparison (code), type, TREE_OPERAND (arg0, 1), tem); /* Fold comparisons against built-in math functions. */ @@ -9355,7 +9569,7 @@ fold_comparison (enum tree_code code, tree type, tree op0, tree op1) if (fcode != END_BUILTINS) { - tem = fold_mathfn_compare (fcode, code, type, arg0, arg1); + tem = fold_mathfn_compare (loc, fcode, code, type, arg0, arg1); if (tem != NULL_TREE) return tem; } @@ -9368,12 +9582,12 @@ fold_comparison (enum tree_code code, tree type, tree op0, tree op1) /* If we are widening one operand of an integer comparison, see if the other operand is similarly being widened. Perhaps we can do the comparison in the narrower type. */ - tem = fold_widened_comparison (code, type, arg0, arg1); + tem = fold_widened_comparison (loc, code, type, arg0, arg1); if (tem) return tem; /* Or if we are changing signedness. */ - tem = fold_sign_changed_comparison (code, type, arg0, arg1); + tem = fold_sign_changed_comparison (loc, code, type, arg0, arg1); if (tem) return tem; } @@ -9385,7 +9599,7 @@ fold_comparison (enum tree_code code, tree type, tree op0, tree op1) || TREE_CODE (arg0) == MAX_EXPR) && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST) { - tem = optimize_minmax_comparison (code, type, op0, op1); + tem = optimize_minmax_comparison (loc, code, type, op0, op1); if (tem) return tem; } @@ -9407,7 +9621,7 @@ fold_comparison (enum tree_code code, tree type, tree op0, tree op1) if (! FLOAT_TYPE_P (TREE_TYPE (arg0)) || ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))) return constant_boolean_node (1, type); - return fold_build2 (EQ_EXPR, type, arg0, arg1); + return fold_build2_loc (loc, EQ_EXPR, type, arg0, arg1); case NE_EXPR: /* For NE, we can only do this simplification if integer @@ -9461,18 +9675,18 @@ fold_comparison (enum tree_code code, tree type, tree op0, tree op1) was the same as ARG1. */ tree high_result - = fold_build2 (code, type, - eval_subst (arg0, cval1, maxval, + = fold_build2_loc (loc, code, type, + eval_subst (loc, arg0, cval1, maxval, cval2, minval), arg1); tree equal_result - = fold_build2 (code, type, - eval_subst (arg0, cval1, maxval, + = fold_build2_loc (loc, code, type, + eval_subst (loc, arg0, cval1, maxval, cval2, maxval), arg1); tree low_result - = fold_build2 (code, type, - eval_subst (arg0, cval1, minval, + = fold_build2_loc (loc, code, type, + eval_subst (loc, arg0, cval1, minval, cval2, maxval), arg1); @@ -9491,7 +9705,7 @@ fold_comparison (enum tree_code code, tree type, tree op0, tree op1) { case 0: /* Always false. */ - return omit_one_operand (type, integer_zero_node, arg0); + return omit_one_operand_loc (loc, type, integer_zero_node, arg0); case 1: code = LT_EXPR; break; @@ -9512,12 +9726,16 @@ fold_comparison (enum tree_code code, tree type, tree op0, tree op1) break; case 7: /* Always true. */ - return omit_one_operand (type, integer_one_node, arg0); + return omit_one_operand_loc (loc, type, integer_one_node, arg0); } if (save_p) - return save_expr (build2 (code, type, cval1, cval2)); - return fold_build2 (code, type, cval1, cval2); + { + tem = save_expr (build2 (code, type, cval1, cval2)); + SET_EXPR_LOCATION (tem, loc); + return tem; + } + return fold_build2_loc (loc, code, type, cval1, cval2); } } } @@ -9532,7 +9750,7 @@ fold_comparison (enum tree_code code, tree type, tree op0, tree op1) && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1)) && !TREE_OVERFLOW (arg1)) { - tem = fold_div_compare (code, type, arg0, arg1); + tem = fold_div_compare (loc, code, type, arg0, arg1); if (tem != NULL_TREE) return tem; } @@ -9542,8 +9760,9 @@ fold_comparison (enum tree_code code, tree type, tree op0, tree op1) && TREE_CODE (arg1) == BIT_NOT_EXPR) { tree cmp_type = TREE_TYPE (TREE_OPERAND (arg0, 0)); - return fold_build2 (code, type, - fold_convert (cmp_type, TREE_OPERAND (arg1, 0)), + return fold_build2_loc (loc, code, type, + fold_convert_loc (loc, cmp_type, + TREE_OPERAND (arg1, 0)), TREE_OPERAND (arg0, 0)); } @@ -9552,10 +9771,10 @@ fold_comparison (enum tree_code code, tree type, tree op0, tree op1) && TREE_CODE (arg1) == INTEGER_CST) { tree cmp_type = TREE_TYPE (TREE_OPERAND (arg0, 0)); - return fold_build2 (swap_tree_comparison (code), type, + return fold_build2_loc (loc, swap_tree_comparison (code), type, TREE_OPERAND (arg0, 0), - fold_build1 (BIT_NOT_EXPR, cmp_type, - fold_convert (cmp_type, arg1))); + fold_build1_loc (loc, BIT_NOT_EXPR, cmp_type, + fold_convert_loc (loc, cmp_type, arg1))); } return NULL_TREE; @@ -9567,7 +9786,7 @@ fold_comparison (enum tree_code code, tree type, tree op0, tree op1) argument EXPR represents the expression "z" of type TYPE. */ static tree -fold_mult_zconjz (tree type, tree expr) +fold_mult_zconjz (location_t loc, tree type, tree expr) { tree itype = TREE_TYPE (type); tree rpart, ipart, tem; @@ -9585,17 +9804,17 @@ fold_mult_zconjz (tree type, tree expr) else { expr = save_expr (expr); - rpart = fold_build1 (REALPART_EXPR, itype, expr); - ipart = fold_build1 (IMAGPART_EXPR, itype, expr); + rpart = fold_build1_loc (loc, REALPART_EXPR, itype, expr); + ipart = fold_build1_loc (loc, IMAGPART_EXPR, itype, expr); } rpart = save_expr (rpart); ipart = save_expr (ipart); - tem = fold_build2 (PLUS_EXPR, itype, - fold_build2 (MULT_EXPR, itype, rpart, rpart), - fold_build2 (MULT_EXPR, itype, ipart, ipart)); - return fold_build2 (COMPLEX_EXPR, type, tem, - fold_convert (itype, integer_zero_node)); + tem = fold_build2_loc (loc, PLUS_EXPR, itype, + fold_build2_loc (loc, MULT_EXPR, itype, rpart, rpart), + fold_build2_loc (loc, MULT_EXPR, itype, ipart, ipart)); + return fold_build2_loc (loc, COMPLEX_EXPR, type, tem, + fold_convert_loc (loc, itype, integer_zero_node)); } @@ -9698,11 +9917,13 @@ get_pointer_modulus_and_residue (tree expr, unsigned HOST_WIDE_INT *residue, /* Fold a binary expression of code CODE and type TYPE with operands - OP0 and OP1. Return the folded expression if folding is - successful. Otherwise, return NULL_TREE. */ + OP0 and OP1. LOC is the location of the resulting expression. + Return the folded expression if folding is successful. Otherwise, + return NULL_TREE. */ tree -fold_binary (enum tree_code code, tree type, tree op0, tree op1) +fold_binary_loc (location_t loc, + enum tree_code code, tree type, tree op0, tree op1) { enum tree_code_class kind = TREE_CODE_CLASS (code); tree arg0, arg1, tem; @@ -9765,7 +9986,7 @@ fold_binary (enum tree_code code, tree type, tree op0, tree op1) if (tem != NULL_TREE) { if (TREE_TYPE (tem) != type) - tem = fold_convert (type, tem); + tem = fold_convert_loc (loc, type, tem); return tem; } } @@ -9774,7 +9995,7 @@ fold_binary (enum tree_code code, tree type, tree op0, tree op1) to ARG1 to reduce the number of tests below. */ if (commutative_tree_code (code) && tree_swap_operands_p (arg0, arg1, true)) - return fold_build2 (code, type, op1, op0); + return fold_build2_loc (loc, code, type, op1, op0); /* ARG0 is the first operand of EXPR, and ARG1 is the second operand. @@ -9803,38 +10024,45 @@ fold_binary (enum tree_code code, tree type, tree op0, tree op1) || (TREE_CODE (arg0) == BIT_AND_EXPR && integer_onep (TREE_OPERAND (arg0, 1))))))) { - tem = fold_build2 (code == BIT_AND_EXPR ? TRUTH_AND_EXPR + tem = fold_build2_loc (loc, code == BIT_AND_EXPR ? TRUTH_AND_EXPR : code == BIT_IOR_EXPR ? TRUTH_OR_EXPR : TRUTH_XOR_EXPR, boolean_type_node, - fold_convert (boolean_type_node, arg0), - fold_convert (boolean_type_node, arg1)); + fold_convert_loc (loc, boolean_type_node, arg0), + fold_convert_loc (loc, boolean_type_node, arg1)); if (code == EQ_EXPR) - tem = invert_truthvalue (tem); + tem = invert_truthvalue_loc (loc, tem); - return fold_convert (type, tem); + return fold_convert_loc (loc, type, tem); } if (TREE_CODE_CLASS (code) == tcc_binary || TREE_CODE_CLASS (code) == tcc_comparison) { if (TREE_CODE (arg0) == COMPOUND_EXPR) - return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0), - fold_build2 (code, type, - fold_convert (TREE_TYPE (op0), - TREE_OPERAND (arg0, 1)), - op1)); + { + tem = fold_build2_loc (loc, code, type, + fold_convert_loc (loc, TREE_TYPE (op0), + TREE_OPERAND (arg0, 1)), op1); + protected_set_expr_location (tem, loc); + tem = build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0), tem); + goto fold_binary_exit; + } if (TREE_CODE (arg1) == COMPOUND_EXPR && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0))) - return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg1, 0), - fold_build2 (code, type, op0, - fold_convert (TREE_TYPE (op1), - TREE_OPERAND (arg1, 1)))); + { + tem = fold_build2_loc (loc, code, type, op0, + fold_convert_loc (loc, TREE_TYPE (op1), + TREE_OPERAND (arg1, 1))); + protected_set_expr_location (tem, loc); + tem = build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg1, 0), tem); + goto fold_binary_exit; + } if (TREE_CODE (arg0) == COND_EXPR || COMPARISON_CLASS_P (arg0)) { - tem = fold_binary_op_with_conditional_arg (code, type, op0, op1, + tem = fold_binary_op_with_conditional_arg (loc, code, type, op0, op1, arg0, arg1, /*cond_first_p=*/1); if (tem != NULL_TREE) @@ -9843,7 +10071,7 @@ fold_binary (enum tree_code code, tree type, tree op0, tree op1) if (TREE_CODE (arg1) == COND_EXPR || COMPARISON_CLASS_P (arg1)) { - tem = fold_binary_op_with_conditional_arg (code, type, op0, op1, + tem = fold_binary_op_with_conditional_arg (loc, code, type, op0, op1, arg1, arg0, /*cond_first_p=*/0); if (tem != NULL_TREE) @@ -9856,51 +10084,57 @@ fold_binary (enum tree_code code, tree type, tree op0, tree op1) case POINTER_PLUS_EXPR: /* 0 +p index -> (type)index */ if (integer_zerop (arg0)) - return non_lvalue (fold_convert (type, arg1)); + return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1)); /* PTR +p 0 -> PTR */ if (integer_zerop (arg1)) - return non_lvalue (fold_convert (type, arg0)); + return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0)); /* INT +p INT -> (PTR)(INT + INT). Stripping types allows for this. */ if (INTEGRAL_TYPE_P (TREE_TYPE (arg1)) && INTEGRAL_TYPE_P (TREE_TYPE (arg0))) - return fold_convert (type, fold_build2 (PLUS_EXPR, sizetype, - fold_convert (sizetype, arg1), - fold_convert (sizetype, arg0))); + return fold_convert_loc (loc, type, + fold_build2_loc (loc, PLUS_EXPR, sizetype, + fold_convert_loc (loc, sizetype, + arg1), + fold_convert_loc (loc, sizetype, + arg0))); /* index +p PTR -> PTR +p index */ if (POINTER_TYPE_P (TREE_TYPE (arg1)) && INTEGRAL_TYPE_P (TREE_TYPE (arg0))) - return fold_build2 (POINTER_PLUS_EXPR, type, - fold_convert (type, arg1), - fold_convert (sizetype, arg0)); + return fold_build2_loc (loc, POINTER_PLUS_EXPR, type, + fold_convert_loc (loc, type, arg1), + fold_convert_loc (loc, sizetype, arg0)); /* (PTR +p B) +p A -> PTR +p (B + A) */ if (TREE_CODE (arg0) == POINTER_PLUS_EXPR) { tree inner; - tree arg01 = fold_convert (sizetype, TREE_OPERAND (arg0, 1)); + tree arg01 = fold_convert_loc (loc, sizetype, TREE_OPERAND (arg0, 1)); tree arg00 = TREE_OPERAND (arg0, 0); - inner = fold_build2 (PLUS_EXPR, sizetype, - arg01, fold_convert (sizetype, arg1)); - return fold_convert (type, - fold_build2 (POINTER_PLUS_EXPR, - TREE_TYPE (arg00), arg00, inner)); + inner = fold_build2_loc (loc, PLUS_EXPR, sizetype, + arg01, fold_convert_loc (loc, sizetype, arg1)); + return fold_convert_loc (loc, type, + fold_build2_loc (loc, POINTER_PLUS_EXPR, + TREE_TYPE (arg00), + arg00, inner)); } /* PTR_CST +p CST -> CST1 */ if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST) - return fold_build2 (PLUS_EXPR, type, arg0, fold_convert (type, arg1)); + return fold_build2_loc (loc, PLUS_EXPR, type, arg0, + fold_convert_loc (loc, type, arg1)); /* Try replacing &a[i1] +p c * i2 with &a[i1 + i2], if c is step of the array. Loop optimizer sometimes produce this type of expressions. */ if (TREE_CODE (arg0) == ADDR_EXPR) { - tem = try_move_mult_to_index (arg0, fold_convert (sizetype, arg1)); + tem = try_move_mult_to_index (loc, arg0, + fold_convert_loc (loc, sizetype, arg1)); if (tem) - return fold_convert (type, tem); + return fold_convert_loc (loc, type, tem); } return NULL_TREE; @@ -9908,23 +10142,26 @@ fold_binary (enum tree_code code, tree type, tree op0, tree op1) case PLUS_EXPR: /* A + (-B) -> A - B */ if (TREE_CODE (arg1) == NEGATE_EXPR) - return fold_build2 (MINUS_EXPR, type, - fold_convert (type, arg0), - fold_convert (type, TREE_OPERAND (arg1, 0))); + return fold_build2_loc (loc, MINUS_EXPR, type, + fold_convert_loc (loc, type, arg0), + fold_convert_loc (loc, type, + TREE_OPERAND (arg1, 0))); /* (-A) + B -> B - A */ if (TREE_CODE (arg0) == NEGATE_EXPR && reorder_operands_p (TREE_OPERAND (arg0, 0), arg1)) - return fold_build2 (MINUS_EXPR, type, - fold_convert (type, arg1), - fold_convert (type, TREE_OPERAND (arg0, 0))); + return fold_build2_loc (loc, MINUS_EXPR, type, + fold_convert_loc (loc, type, arg1), + fold_convert_loc (loc, type, + TREE_OPERAND (arg0, 0))); if (INTEGRAL_TYPE_P (type)) { /* Convert ~A + 1 to -A. */ if (TREE_CODE (arg0) == BIT_NOT_EXPR && integer_onep (arg1)) - return fold_build1 (NEGATE_EXPR, type, - fold_convert (type, TREE_OPERAND (arg0, 0))); + return fold_build1_loc (loc, NEGATE_EXPR, type, + fold_convert_loc (loc, type, + TREE_OPERAND (arg0, 0))); /* ~X + X is -1. */ if (TREE_CODE (arg0) == BIT_NOT_EXPR @@ -9936,7 +10173,7 @@ fold_binary (enum tree_code code, tree type, tree op0, tree op1) if (operand_equal_p (tem, arg1, 0)) { t1 = build_int_cst_type (type, -1); - return omit_one_operand (type, t1, arg1); + return omit_one_operand_loc (loc, type, t1, arg1); } } @@ -9950,7 +10187,7 @@ fold_binary (enum tree_code code, tree type, tree op0, tree op1) if (operand_equal_p (arg0, tem, 0)) { t1 = build_int_cst_type (type, -1); - return omit_one_operand (type, t1, arg0); + return omit_one_operand_loc (loc, type, t1, arg0); } } @@ -9962,11 +10199,13 @@ fold_binary (enum tree_code code, tree type, tree op0, tree op1) { tree cst0 = TREE_OPERAND (TREE_OPERAND (arg1, 0), 1); tree cst1 = TREE_OPERAND (arg1, 1); - tree sum = fold_binary (PLUS_EXPR, TREE_TYPE (cst1), cst1, cst0); + tree sum = fold_binary_loc (loc, PLUS_EXPR, TREE_TYPE (cst1), + cst1, cst0); if (sum && integer_zerop (sum)) - return fold_convert (type, - fold_build2 (TRUNC_MOD_EXPR, - TREE_TYPE (arg0), arg0, cst0)); + return fold_convert_loc (loc, type, + fold_build2_loc (loc, TRUNC_MOD_EXPR, + TREE_TYPE (arg0), arg0, + cst0)); } } @@ -9978,7 +10217,7 @@ fold_binary (enum tree_code code, tree type, tree op0, tree op1) && !TYPE_SATURATING (type) && (!FLOAT_TYPE_P (type) || flag_associative_math)) { - tree tem = fold_plusminus_mult_expr (code, type, arg0, arg1); + tree tem = fold_plusminus_mult_expr (loc, code, type, arg0, arg1); if (tem) return tem; } @@ -9986,7 +10225,7 @@ fold_binary (enum tree_code code, tree type, tree op0, tree op1) if (! FLOAT_TYPE_P (type)) { if (integer_zerop (arg1)) - return non_lvalue (fold_convert (type, arg0)); + return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0)); /* If we are adding two BIT_AND_EXPR's, both of which are and'ing with a constant, and the two constants have no bits in common, @@ -10029,30 +10268,33 @@ fold_binary (enum tree_code code, tree type, tree op0, tree op1) if (TREE_CODE (parg0) == MULT_EXPR && TREE_CODE (parg1) != MULT_EXPR) - return fold_build2 (pcode, type, - fold_build2 (PLUS_EXPR, type, - fold_convert (type, parg0), - fold_convert (type, marg)), - fold_convert (type, parg1)); + return fold_build2_loc (loc, pcode, type, + fold_build2_loc (loc, PLUS_EXPR, type, + fold_convert_loc (loc, type, + parg0), + fold_convert_loc (loc, type, + marg)), + fold_convert_loc (loc, type, parg1)); if (TREE_CODE (parg0) != MULT_EXPR && TREE_CODE (parg1) == MULT_EXPR) - return fold_build2 (PLUS_EXPR, type, - fold_convert (type, parg0), - fold_build2 (pcode, type, - fold_convert (type, marg), - fold_convert (type, - parg1))); + return + fold_build2_loc (loc, PLUS_EXPR, type, + fold_convert_loc (loc, type, parg0), + fold_build2_loc (loc, pcode, type, + fold_convert_loc (loc, type, marg), + fold_convert_loc (loc, type, + parg1))); } } else { /* See if ARG1 is zero and X + ARG1 reduces to X. */ if (fold_real_zero_addition_p (TREE_TYPE (arg0), arg1, 0)) - return non_lvalue (fold_convert (type, arg0)); + return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0)); /* Likewise if the operands are reversed. */ if (fold_real_zero_addition_p (TREE_TYPE (arg1), arg0, 0)) - return non_lvalue (fold_convert (type, arg1)); + return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1)); /* Convert X + -C into X - C. */ if (TREE_CODE (arg1) == REAL_CST @@ -10060,9 +10302,9 @@ fold_binary (enum tree_code code, tree type, tree op0, tree op1) { tem = fold_negate_const (arg1, type); if (!TREE_OVERFLOW (arg1) || !flag_trapping_math) - return fold_build2 (MINUS_EXPR, type, - fold_convert (type, arg0), - fold_convert (type, tem)); + return fold_build2_loc (loc, MINUS_EXPR, type, + fold_convert_loc (loc, type, arg0), + fold_convert_loc (loc, type, tem)); } /* Fold __complex__ ( x, 0 ) + __complex__ ( 0, y ) @@ -10073,21 +10315,21 @@ fold_binary (enum tree_code code, tree type, tree op0, tree op1) && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0))) { tree rtype = TREE_TYPE (TREE_TYPE (arg0)); - tree arg0r = fold_unary (REALPART_EXPR, rtype, arg0); - tree arg0i = fold_unary (IMAGPART_EXPR, rtype, arg0); + tree arg0r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg0); + tree arg0i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg0); bool arg0rz = false, arg0iz = false; if ((arg0r && (arg0rz = real_zerop (arg0r))) || (arg0i && (arg0iz = real_zerop (arg0i)))) { - tree arg1r = fold_unary (REALPART_EXPR, rtype, arg1); - tree arg1i = fold_unary (IMAGPART_EXPR, rtype, arg1); + tree arg1r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg1); + tree arg1i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg1); if (arg0rz && arg1i && real_zerop (arg1i)) { tree rp = arg1r ? arg1r : build1 (REALPART_EXPR, rtype, arg1); tree ip = arg0i ? arg0i : build1 (IMAGPART_EXPR, rtype, arg0); - return fold_build2 (COMPLEX_EXPR, type, rp, ip); + return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip); } else if (arg0iz && arg1r && real_zerop (arg1r)) { @@ -10095,7 +10337,7 @@ fold_binary (enum tree_code code, tree type, tree op0, tree op1) : build1 (REALPART_EXPR, rtype, arg0); tree ip = arg1i ? arg1i : build1 (IMAGPART_EXPR, rtype, arg1); - return fold_build2 (COMPLEX_EXPR, type, rp, ip); + return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip); } } } @@ -10103,13 +10345,13 @@ fold_binary (enum tree_code code, tree type, tree op0, tree op1) if (flag_unsafe_math_optimizations && (TREE_CODE (arg0) == RDIV_EXPR || TREE_CODE (arg0) == MULT_EXPR) && (TREE_CODE (arg1) == RDIV_EXPR || TREE_CODE (arg1) == MULT_EXPR) - && (tem = distribute_real_division (code, type, arg0, arg1))) + && (tem = distribute_real_division (loc, code, type, arg0, arg1))) return tem; /* Convert x+x into x*2.0. */ if (operand_equal_p (arg0, arg1, 0) && SCALAR_FLOAT_TYPE_P (type)) - return fold_build2 (MULT_EXPR, type, arg0, + return fold_build2_loc (loc, MULT_EXPR, type, arg0, build_real (type, dconst2)); /* Convert a + (b*c + d*e) into (a + b*c) + d*e. @@ -10125,8 +10367,8 @@ fold_binary (enum tree_code code, tree type, tree op0, tree op1) && TREE_CODE (tree10) == MULT_EXPR) { tree tree0; - tree0 = fold_build2 (PLUS_EXPR, type, arg0, tree10); - return fold_build2 (PLUS_EXPR, type, tree0, tree11); + tree0 = fold_build2_loc (loc, PLUS_EXPR, type, arg0, tree10); + return fold_build2_loc (loc, PLUS_EXPR, type, tree0, tree11); } } /* Convert (b*c + d*e) + a into b*c + (d*e +a). @@ -10142,8 +10384,8 @@ fold_binary (enum tree_code code, tree type, tree op0, tree op1) && TREE_CODE (tree00) == MULT_EXPR) { tree tree0; - tree0 = fold_build2 (PLUS_EXPR, type, tree01, arg1); - return fold_build2 (PLUS_EXPR, type, tree00, tree0); + tree0 = fold_build2_loc (loc, PLUS_EXPR, type, tree01, arg1); + return fold_build2_loc (loc, PLUS_EXPR, type, tree00, tree0); } } } @@ -10183,12 +10425,15 @@ fold_binary (enum tree_code code, tree type, tree op0, tree op1) && TREE_INT_CST_HIGH (tree11) == 0 && ((TREE_INT_CST_LOW (tree01) + TREE_INT_CST_LOW (tree11)) == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0, 0))))) - return fold_convert (type, - build2 (LROTATE_EXPR, - TREE_TYPE (TREE_OPERAND (arg0, 0)), - TREE_OPERAND (arg0, 0), - code0 == LSHIFT_EXPR - ? tree01 : tree11)); + { + tem = build2 (LROTATE_EXPR, + TREE_TYPE (TREE_OPERAND (arg0, 0)), + TREE_OPERAND (arg0, 0), + code0 == LSHIFT_EXPR + ? tree01 : tree11); + SET_EXPR_LOCATION (tem, loc); + return fold_convert_loc (loc, type, tem); + } else if (code11 == MINUS_EXPR) { tree tree110, tree111; @@ -10202,12 +10447,13 @@ fold_binary (enum tree_code code, tree type, tree op0, tree op1) (TREE_TYPE (TREE_OPERAND (arg0, 0)))) && operand_equal_p (tree01, tree111, 0)) - return fold_convert (type, - build2 ((code0 == LSHIFT_EXPR - ? LROTATE_EXPR - : RROTATE_EXPR), - TREE_TYPE (TREE_OPERAND (arg0, 0)), - TREE_OPERAND (arg0, 0), tree01)); + return + fold_convert_loc (loc, type, + build2 ((code0 == LSHIFT_EXPR + ? LROTATE_EXPR + : RROTATE_EXPR), + TREE_TYPE (TREE_OPERAND (arg0, 0)), + TREE_OPERAND (arg0, 0), tree01)); } else if (code01 == MINUS_EXPR) { @@ -10222,12 +10468,13 @@ fold_binary (enum tree_code code, tree type, tree op0, tree op1) (TREE_TYPE (TREE_OPERAND (arg0, 0)))) && operand_equal_p (tree11, tree011, 0)) - return fold_convert (type, - build2 ((code0 != LSHIFT_EXPR - ? LROTATE_EXPR - : RROTATE_EXPR), - TREE_TYPE (TREE_OPERAND (arg0, 0)), - TREE_OPERAND (arg0, 0), tree11)); + return fold_convert_loc + (loc, type, + build2 ((code0 != LSHIFT_EXPR + ? LROTATE_EXPR + : RROTATE_EXPR), + TREE_TYPE (TREE_OPERAND (arg0, 0)), + TREE_OPERAND (arg0, 0), tree11)); } } } @@ -10286,10 +10533,10 @@ fold_binary (enum tree_code code, tree type, tree op0, tree op1) if (code == MINUS_EXPR) code = PLUS_EXPR; - var0 = associate_trees (var0, var1, code, type); - con0 = associate_trees (con0, con1, code, type); - lit0 = associate_trees (lit0, lit1, code, type); - minus_lit0 = associate_trees (minus_lit0, minus_lit1, code, type); + var0 = associate_trees (loc, var0, var1, code, type); + con0 = associate_trees (loc, con0, con1, code, type); + lit0 = associate_trees (loc, lit0, lit1, code, type); + minus_lit0 = associate_trees (loc, minus_lit0, minus_lit1, code, type); /* Preserve the MINUS_EXPR if the negative part of the literal is greater than the positive part. Otherwise, the multiplicative @@ -10302,13 +10549,13 @@ fold_binary (enum tree_code code, tree type, tree op0, tree op1) && TREE_CODE (minus_lit0) == INTEGER_CST && tree_int_cst_lt (lit0, minus_lit0)) { - minus_lit0 = associate_trees (minus_lit0, lit0, + minus_lit0 = associate_trees (loc, minus_lit0, lit0, MINUS_EXPR, type); lit0 = 0; } else { - lit0 = associate_trees (lit0, minus_lit0, + lit0 = associate_trees (loc, lit0, minus_lit0, MINUS_EXPR, type); minus_lit0 = 0; } @@ -10316,22 +10563,25 @@ fold_binary (enum tree_code code, tree type, tree op0, tree op1) if (minus_lit0) { if (con0 == 0) - return fold_convert (type, - associate_trees (var0, minus_lit0, - MINUS_EXPR, type)); + return + fold_convert_loc (loc, type, + associate_trees (loc, var0, minus_lit0, + MINUS_EXPR, type)); else { - con0 = associate_trees (con0, minus_lit0, + con0 = associate_trees (loc, con0, minus_lit0, MINUS_EXPR, type); - return fold_convert (type, - associate_trees (var0, con0, - PLUS_EXPR, type)); + return + fold_convert_loc (loc, type, + associate_trees (loc, var0, con0, + PLUS_EXPR, type)); } } - con0 = associate_trees (con0, lit0, code, type); - return fold_convert (type, associate_trees (var0, con0, - code, type)); + con0 = associate_trees (loc, con0, lit0, code, type); + return + fold_convert_loc (loc, type, associate_trees (loc, var0, con0, + code, type)); } } @@ -10345,49 +10595,56 @@ fold_binary (enum tree_code code, tree type, tree op0, tree op1) if (TREE_CODE (arg0) == POINTER_PLUS_EXPR && TREE_CODE (arg1) == POINTER_PLUS_EXPR) { - tree arg00 = fold_convert (type, TREE_OPERAND (arg0, 0)); - tree arg01 = fold_convert (type, TREE_OPERAND (arg0, 1)); - tree arg10 = fold_convert (type, TREE_OPERAND (arg1, 0)); - tree arg11 = fold_convert (type, TREE_OPERAND (arg1, 1)); - return fold_build2 (PLUS_EXPR, type, - fold_build2 (MINUS_EXPR, type, arg00, arg10), - fold_build2 (MINUS_EXPR, type, arg01, arg11)); + tree arg00 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0)); + tree arg01 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1)); + tree arg10 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0)); + tree arg11 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1)); + return fold_build2_loc (loc, PLUS_EXPR, type, + fold_build2_loc (loc, MINUS_EXPR, type, + arg00, arg10), + fold_build2_loc (loc, MINUS_EXPR, type, + arg01, arg11)); } /* (PTR0 p+ A) - PTR1 -> (PTR0 - PTR1) + A, assuming PTR0 - PTR1 simplifies. */ else if (TREE_CODE (arg0) == POINTER_PLUS_EXPR) { - tree arg00 = fold_convert (type, TREE_OPERAND (arg0, 0)); - tree arg01 = fold_convert (type, TREE_OPERAND (arg0, 1)); - tree tmp = fold_binary (MINUS_EXPR, type, arg00, fold_convert (type, arg1)); + tree arg00 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0)); + tree arg01 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1)); + tree tmp = fold_binary_loc (loc, MINUS_EXPR, type, arg00, + fold_convert_loc (loc, type, arg1)); if (tmp) - return fold_build2 (PLUS_EXPR, type, tmp, arg01); + return fold_build2_loc (loc, PLUS_EXPR, type, tmp, arg01); } } /* A - (-B) -> A + B */ if (TREE_CODE (arg1) == NEGATE_EXPR) - return fold_build2 (PLUS_EXPR, type, op0, - fold_convert (type, TREE_OPERAND (arg1, 0))); + return fold_build2_loc (loc, PLUS_EXPR, type, op0, + fold_convert_loc (loc, type, + TREE_OPERAND (arg1, 0))); /* (-A) - B -> (-B) - A where B is easily negated and we can swap. */ if (TREE_CODE (arg0) == NEGATE_EXPR && (FLOAT_TYPE_P (type) || INTEGRAL_TYPE_P (type)) && negate_expr_p (arg1) && reorder_operands_p (arg0, arg1)) - return fold_build2 (MINUS_EXPR, type, - fold_convert (type, negate_expr (arg1)), - fold_convert (type, TREE_OPERAND (arg0, 0))); + return fold_build2_loc (loc, MINUS_EXPR, type, + fold_convert_loc (loc, type, + negate_expr (arg1)), + fold_convert_loc (loc, type, + TREE_OPERAND (arg0, 0))); /* Convert -A - 1 to ~A. */ if (INTEGRAL_TYPE_P (type) && TREE_CODE (arg0) == NEGATE_EXPR && integer_onep (arg1) && !TYPE_OVERFLOW_TRAPS (type)) - return fold_build1 (BIT_NOT_EXPR, type, - fold_convert (type, TREE_OPERAND (arg0, 0))); + return fold_build1_loc (loc, BIT_NOT_EXPR, type, + fold_convert_loc (loc, type, + TREE_OPERAND (arg0, 0))); /* Convert -1 - A to ~A. */ if (INTEGRAL_TYPE_P (type) && integer_all_onesp (arg0)) - return fold_build1 (BIT_NOT_EXPR, type, op1); + return fold_build1_loc (loc, BIT_NOT_EXPR, type, op1); /* X - (X / CST) * CST is X % CST. */ @@ -10398,16 +10655,17 @@ fold_binary (enum tree_code code, tree type, tree op0, tree op1) TREE_OPERAND (TREE_OPERAND (arg1, 0), 0), 0) && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg1, 0), 1), TREE_OPERAND (arg1, 1), 0)) - return fold_convert (type, - fold_build2 (TRUNC_MOD_EXPR, TREE_TYPE (arg0), - arg0, TREE_OPERAND (arg1, 1))); + return + fold_convert_loc (loc, type, + fold_build2_loc (loc, TRUNC_MOD_EXPR, TREE_TYPE (arg0), + arg0, TREE_OPERAND (arg1, 1))); if (! FLOAT_TYPE_P (type)) { if (integer_zerop (arg0)) - return negate_expr (fold_convert (type, arg1)); + return negate_expr (fold_convert_loc (loc, type, arg1)); if (integer_zerop (arg1)) - return non_lvalue (fold_convert (type, arg0)); + return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0)); /* Fold A - (A & B) into ~B & A. */ if (!TREE_SIDE_EFFECTS (arg0) @@ -10415,17 +10673,21 @@ fold_binary (enum tree_code code, tree type, tree op0, tree op1) { if (operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)) { - tree arg10 = fold_convert (type, TREE_OPERAND (arg1, 0)); - return fold_build2 (BIT_AND_EXPR, type, - fold_build1 (BIT_NOT_EXPR, type, arg10), - fold_convert (type, arg0)); + tree arg10 = fold_convert_loc (loc, type, + TREE_OPERAND (arg1, 0)); + return fold_build2_loc (loc, BIT_AND_EXPR, type, + fold_build1_loc (loc, BIT_NOT_EXPR, + type, arg10), + fold_convert_loc (loc, type, arg0)); } if (operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0)) { - tree arg11 = fold_convert (type, TREE_OPERAND (arg1, 1)); - return fold_build2 (BIT_AND_EXPR, type, - fold_build1 (BIT_NOT_EXPR, type, arg11), - fold_convert (type, arg0)); + tree arg11 = fold_convert_loc (loc, + type, TREE_OPERAND (arg1, 1)); + return fold_build2_loc (loc, BIT_AND_EXPR, type, + fold_build1_loc (loc, BIT_NOT_EXPR, + type, arg11), + fold_convert_loc (loc, type, arg0)); } } @@ -10438,26 +10700,26 @@ fold_binary (enum tree_code code, tree type, tree op0, tree op1) { tree mask0 = TREE_OPERAND (arg0, 1); tree mask1 = TREE_OPERAND (arg1, 1); - tree tem = fold_build1 (BIT_NOT_EXPR, type, mask0); + tree tem = fold_build1_loc (loc, BIT_NOT_EXPR, type, mask0); if (operand_equal_p (tem, mask1, 0)) { - tem = fold_build2 (BIT_XOR_EXPR, type, + tem = fold_build2_loc (loc, BIT_XOR_EXPR, type, TREE_OPERAND (arg0, 0), mask1); - return fold_build2 (MINUS_EXPR, type, tem, mask1); + return fold_build2_loc (loc, MINUS_EXPR, type, tem, mask1); } } } /* See if ARG1 is zero and X - ARG1 reduces to X. */ else if (fold_real_zero_addition_p (TREE_TYPE (arg0), arg1, 1)) - return non_lvalue (fold_convert (type, arg0)); + return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0)); /* (ARG0 - ARG1) is the same as (-ARG1 + ARG0). So check whether ARG0 is zero and X + ARG0 reduces to X, since that would mean (-ARG1 + ARG0) reduces to -ARG1. */ else if (fold_real_zero_addition_p (TREE_TYPE (arg1), arg0, 0)) - return negate_expr (fold_convert (type, arg1)); + return negate_expr (fold_convert_loc (loc, type, arg1)); /* Fold __complex__ ( x, 0 ) - __complex__ ( 0, y ) to __complex__ ( x, -y ). This is not the same for SNaNs or if @@ -10467,31 +10729,31 @@ fold_binary (enum tree_code code, tree type, tree op0, tree op1) && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0))) { tree rtype = TREE_TYPE (TREE_TYPE (arg0)); - tree arg0r = fold_unary (REALPART_EXPR, rtype, arg0); - tree arg0i = fold_unary (IMAGPART_EXPR, rtype, arg0); + tree arg0r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg0); + tree arg0i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg0); bool arg0rz = false, arg0iz = false; if ((arg0r && (arg0rz = real_zerop (arg0r))) || (arg0i && (arg0iz = real_zerop (arg0i)))) { - tree arg1r = fold_unary (REALPART_EXPR, rtype, arg1); - tree arg1i = fold_unary (IMAGPART_EXPR, rtype, arg1); + tree arg1r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg1); + tree arg1i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg1); if (arg0rz && arg1i && real_zerop (arg1i)) { - tree rp = fold_build1 (NEGATE_EXPR, rtype, + tree rp = fold_build1_loc (loc, NEGATE_EXPR, rtype, arg1r ? arg1r : build1 (REALPART_EXPR, rtype, arg1)); tree ip = arg0i ? arg0i : build1 (IMAGPART_EXPR, rtype, arg0); - return fold_build2 (COMPLEX_EXPR, type, rp, ip); + return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip); } else if (arg0iz && arg1r && real_zerop (arg1r)) { tree rp = arg0r ? arg0r : build1 (REALPART_EXPR, rtype, arg0); - tree ip = fold_build1 (NEGATE_EXPR, rtype, + tree ip = fold_build1_loc (loc, NEGATE_EXPR, rtype, arg1i ? arg1i : build1 (IMAGPART_EXPR, rtype, arg1)); - return fold_build2 (COMPLEX_EXPR, type, rp, ip); + return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip); } } } @@ -10504,7 +10766,7 @@ fold_binary (enum tree_code code, tree type, tree op0, tree op1) if ((!FLOAT_TYPE_P (type) || !HONOR_NANS (TYPE_MODE (type))) && operand_equal_p (arg0, arg1, 0)) - return fold_convert (type, integer_zero_node); + return fold_convert_loc (loc, type, integer_zero_node); /* A - B -> A + (-B) if B is easily negatable. */ if (negate_expr_p (arg1) @@ -10513,9 +10775,10 @@ fold_binary (enum tree_code code, tree type, tree op0, tree op1) && (TREE_CODE (arg1) != REAL_CST || REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1)))) || INTEGRAL_TYPE_P (type))) - return fold_build2 (PLUS_EXPR, type, - fold_convert (type, arg0), - fold_convert (type, negate_expr (arg1))); + return fold_build2_loc (loc, PLUS_EXPR, type, + fold_convert_loc (loc, type, arg0), + fold_convert_loc (loc, type, + negate_expr (arg1))); /* Try folding difference of addresses. */ { @@ -10538,12 +10801,12 @@ fold_binary (enum tree_code code, tree type, tree op0, tree op1) if (operand_equal_p (TREE_OPERAND (aref0, 0), TREE_OPERAND (aref1, 0), 0)) { - tree op0 = fold_convert (type, TREE_OPERAND (aref0, 1)); - tree op1 = fold_convert (type, TREE_OPERAND (aref1, 1)); + tree op0 = fold_convert_loc (loc, type, TREE_OPERAND (aref0, 1)); + tree op1 = fold_convert_loc (loc, type, TREE_OPERAND (aref1, 1)); tree esz = array_ref_element_size (aref0); tree diff = build2 (MINUS_EXPR, type, op0, op1); - return fold_build2 (MULT_EXPR, type, diff, - fold_convert (type, esz)); + return fold_build2_loc (loc, MULT_EXPR, type, diff, + fold_convert_loc (loc, type, esz)); } } @@ -10552,7 +10815,7 @@ fold_binary (enum tree_code code, tree type, tree op0, tree op1) && flag_unsafe_math_optimizations && (TREE_CODE (arg0) == RDIV_EXPR || TREE_CODE (arg0) == MULT_EXPR) && (TREE_CODE (arg1) == RDIV_EXPR || TREE_CODE (arg1) == MULT_EXPR) - && (tem = distribute_real_division (code, type, arg0, arg1))) + && (tem = distribute_real_division (loc, code, type, arg0, arg1))) return tem; /* Handle (A1 * C1) - (A2 * C2) with A1, A2 or C1, C2 being the @@ -10563,7 +10826,7 @@ fold_binary (enum tree_code code, tree type, tree op0, tree op1) && !TYPE_SATURATING (type) && (!FLOAT_TYPE_P (type) || flag_associative_math)) { - tree tem = fold_plusminus_mult_expr (code, type, arg0, arg1); + tree tem = fold_plusminus_mult_expr (loc, code, type, arg0, arg1); if (tem) return tem; } @@ -10573,42 +10836,48 @@ fold_binary (enum tree_code code, tree type, tree op0, tree op1) case MULT_EXPR: /* (-A) * (-B) -> A * B */ if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1)) - return fold_build2 (MULT_EXPR, type, - fold_convert (type, TREE_OPERAND (arg0, 0)), - fold_convert (type, negate_expr (arg1))); + return fold_build2_loc (loc, MULT_EXPR, type, + fold_convert_loc (loc, type, + TREE_OPERAND (arg0, 0)), + fold_convert_loc (loc, type, + negate_expr (arg1))); if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0)) - return fold_build2 (MULT_EXPR, type, - fold_convert (type, negate_expr (arg0)), - fold_convert (type, TREE_OPERAND (arg1, 0))); + return fold_build2_loc (loc, MULT_EXPR, type, + fold_convert_loc (loc, type, + negate_expr (arg0)), + fold_convert_loc (loc, type, + TREE_OPERAND (arg1, 0))); if (! FLOAT_TYPE_P (type)) { if (integer_zerop (arg1)) - return omit_one_operand (type, arg1, arg0); + return omit_one_operand_loc (loc, type, arg1, arg0); if (integer_onep (arg1)) - return non_lvalue (fold_convert (type, arg0)); + return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0)); /* Transform x * -1 into -x. Make sure to do the negation on the original operand with conversions not stripped because we can only strip non-sign-changing conversions. */ if (integer_all_onesp (arg1)) - return fold_convert (type, negate_expr (op0)); + return fold_convert_loc (loc, type, negate_expr (op0)); /* Transform x * -C into -x * C if x is easily negatable. */ if (TREE_CODE (arg1) == INTEGER_CST && tree_int_cst_sgn (arg1) == -1 && negate_expr_p (arg0) && (tem = negate_expr (arg1)) != arg1 && !TREE_OVERFLOW (tem)) - return fold_build2 (MULT_EXPR, type, - fold_convert (type, negate_expr (arg0)), tem); + return fold_build2_loc (loc, MULT_EXPR, type, + fold_convert_loc (loc, type, + negate_expr (arg0)), + tem); /* (a * (1 << b)) is (a << b) */ if (TREE_CODE (arg1) == LSHIFT_EXPR && integer_onep (TREE_OPERAND (arg1, 0))) - return fold_build2 (LSHIFT_EXPR, type, op0, + return fold_build2_loc (loc, LSHIFT_EXPR, type, op0, TREE_OPERAND (arg1, 1)); if (TREE_CODE (arg0) == LSHIFT_EXPR && integer_onep (TREE_OPERAND (arg0, 0))) - return fold_build2 (LSHIFT_EXPR, type, op1, + return fold_build2_loc (loc, LSHIFT_EXPR, type, op1, TREE_OPERAND (arg0, 1)); /* (A + A) * C -> A * 2 * C */ @@ -10616,10 +10885,11 @@ fold_binary (enum tree_code code, tree type, tree op0, tree op1) && TREE_CODE (arg1) == INTEGER_CST && operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg0, 1), 0)) - return fold_build2 (MULT_EXPR, type, - omit_one_operand (type, TREE_OPERAND (arg0, 0), + return fold_build2_loc (loc, MULT_EXPR, type, + omit_one_operand_loc (loc, type, + TREE_OPERAND (arg0, 0), TREE_OPERAND (arg0, 1)), - fold_build2 (MULT_EXPR, type, + fold_build2_loc (loc, MULT_EXPR, type, build_int_cst (type, 2) , arg1)); strict_overflow_p = false; @@ -10632,16 +10902,16 @@ fold_binary (enum tree_code code, tree type, tree op0, tree op1) "occur when simplifying " "multiplication"), WARN_STRICT_OVERFLOW_MISC); - return fold_convert (type, tem); + return fold_convert_loc (loc, type, tem); } /* Optimize z * conj(z) for integer complex numbers. */ if (TREE_CODE (arg0) == CONJ_EXPR && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)) - return fold_mult_zconjz (type, arg1); + return fold_mult_zconjz (loc, type, arg1); if (TREE_CODE (arg1) == CONJ_EXPR && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0)) - return fold_mult_zconjz (type, arg0); + return fold_mult_zconjz (loc, type, arg0); } else { @@ -10652,21 +10922,21 @@ fold_binary (enum tree_code code, tree type, tree op0, tree op1) if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))) && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0))) && real_zerop (arg1)) - return omit_one_operand (type, arg1, arg0); + return omit_one_operand_loc (loc, type, arg1, arg0); /* In IEEE floating point, x*1 is not equivalent to x for snans. Likewise for complex arithmetic with signed zeros. */ if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0))) && (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0))) || !COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0))) && real_onep (arg1)) - return non_lvalue (fold_convert (type, arg0)); + return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0)); /* Transform x * -1.0 into -x. */ if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0))) && (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0))) || !COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0))) && real_minus_onep (arg1)) - return fold_convert (type, negate_expr (arg0)); + return fold_convert_loc (loc, type, negate_expr (arg0)); /* Convert (C1/X)*C2 into (C1*C2)/X. This transformation may change the result for floating point types due to rounding so it is applied @@ -10679,7 +10949,7 @@ fold_binary (enum tree_code code, tree type, tree op0, tree op1) tree tem = const_binop (MULT_EXPR, TREE_OPERAND (arg0, 0), arg1, 0); if (tem) - return fold_build2 (RDIV_EXPR, type, tem, + return fold_build2_loc (loc, RDIV_EXPR, type, tem, TREE_OPERAND (arg0, 1)); } @@ -10689,8 +10959,8 @@ fold_binary (enum tree_code code, tree type, tree op0, tree op1) tree tem = fold_strip_sign_ops (arg0); if (tem != NULL_TREE) { - tem = fold_convert (type, tem); - return fold_build2 (MULT_EXPR, type, tem, tem); + tem = fold_convert_loc (loc, type, tem); + return fold_build2_loc (loc, MULT_EXPR, type, tem, tem); } } @@ -10705,15 +10975,17 @@ fold_binary (enum tree_code code, tree type, tree op0, tree op1) { tree rtype = TREE_TYPE (TREE_TYPE (arg0)); if (real_onep (TREE_IMAGPART (arg1))) - return fold_build2 (COMPLEX_EXPR, type, - negate_expr (fold_build1 (IMAGPART_EXPR, - rtype, arg0)), - fold_build1 (REALPART_EXPR, rtype, arg0)); + return + fold_build2_loc (loc, COMPLEX_EXPR, type, + negate_expr (fold_build1_loc (loc, IMAGPART_EXPR, + rtype, arg0)), + fold_build1_loc (loc, REALPART_EXPR, rtype, arg0)); else if (real_minus_onep (TREE_IMAGPART (arg1))) - return fold_build2 (COMPLEX_EXPR, type, - fold_build1 (IMAGPART_EXPR, rtype, arg0), - negate_expr (fold_build1 (REALPART_EXPR, - rtype, arg0))); + return + fold_build2_loc (loc, COMPLEX_EXPR, type, + fold_build1_loc (loc, IMAGPART_EXPR, rtype, arg0), + negate_expr (fold_build1_loc (loc, REALPART_EXPR, + rtype, arg0))); } /* Optimize z * conj(z) for floating point complex numbers. @@ -10722,11 +10994,11 @@ fold_binary (enum tree_code code, tree type, tree op0, tree op1) if (flag_unsafe_math_optimizations && TREE_CODE (arg0) == CONJ_EXPR && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)) - return fold_mult_zconjz (type, arg1); + return fold_mult_zconjz (loc, type, arg1); if (flag_unsafe_math_optimizations && TREE_CODE (arg1) == CONJ_EXPR && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0)) - return fold_mult_zconjz (type, arg0); + return fold_mult_zconjz (loc, type, arg0); if (flag_unsafe_math_optimizations) { @@ -10748,18 +11020,18 @@ fold_binary (enum tree_code code, tree type, tree op0, tree op1) /* Optimize root(x)*root(y) as root(x*y). */ rootfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0); - arg = fold_build2 (MULT_EXPR, type, arg00, arg10); - return build_call_expr (rootfn, 1, arg); + arg = fold_build2_loc (loc, MULT_EXPR, type, arg00, arg10); + return build_call_expr_loc (loc, rootfn, 1, arg); } /* Optimize expN(x)*expN(y) as expN(x+y). */ if (fcode0 == fcode1 && BUILTIN_EXPONENT_P (fcode0)) { tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0); - tree arg = fold_build2 (PLUS_EXPR, type, + tree arg = fold_build2_loc (loc, PLUS_EXPR, type, CALL_EXPR_ARG (arg0, 0), CALL_EXPR_ARG (arg1, 0)); - return build_call_expr (expfn, 1, arg); + return build_call_expr_loc (loc, expfn, 1, arg); } /* Optimizations of pow(...)*pow(...). */ @@ -10776,16 +11048,18 @@ fold_binary (enum tree_code code, tree type, tree op0, tree op1) if (operand_equal_p (arg01, arg11, 0)) { tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0); - tree arg = fold_build2 (MULT_EXPR, type, arg00, arg10); - return build_call_expr (powfn, 2, arg, arg01); + tree arg = fold_build2_loc (loc, MULT_EXPR, type, + arg00, arg10); + return build_call_expr_loc (loc, powfn, 2, arg, arg01); } /* Optimize pow(x,y)*pow(x,z) as pow(x,y+z). */ if (operand_equal_p (arg00, arg10, 0)) { tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0); - tree arg = fold_build2 (PLUS_EXPR, type, arg01, arg11); - return build_call_expr (powfn, 2, arg00, arg); + tree arg = fold_build2_loc (loc, PLUS_EXPR, type, + arg01, arg11); + return build_call_expr_loc (loc, powfn, 2, arg00, arg); } } @@ -10802,7 +11076,8 @@ fold_binary (enum tree_code code, tree type, tree op0, tree op1) tree sinfn = mathfn_built_in (type, BUILT_IN_SIN); if (sinfn != NULL_TREE) - return build_call_expr (sinfn, 1, CALL_EXPR_ARG (arg0, 0)); + return build_call_expr_loc (loc, sinfn, 1, + CALL_EXPR_ARG (arg0, 0)); } /* Optimize x*pow(x,c) as pow(x,c+1). */ @@ -10823,7 +11098,7 @@ fold_binary (enum tree_code code, tree type, tree op0, tree op1) c = TREE_REAL_CST (arg11); real_arithmetic (&c, PLUS_EXPR, &c, &dconst1); arg = build_real (type, c); - return build_call_expr (powfn, 2, arg0, arg); + return build_call_expr_loc (loc, powfn, 2, arg0, arg); } } @@ -10845,7 +11120,7 @@ fold_binary (enum tree_code code, tree type, tree op0, tree op1) c = TREE_REAL_CST (arg01); real_arithmetic (&c, PLUS_EXPR, &c, &dconst1); arg = build_real (type, c); - return build_call_expr (powfn, 2, arg1, arg); + return build_call_expr_loc (loc, powfn, 2, arg1, arg); } } @@ -10858,7 +11133,7 @@ fold_binary (enum tree_code code, tree type, tree op0, tree op1) if (powfn) { tree arg = build_real (type, dconst2); - return build_call_expr (powfn, 2, arg0, arg); + return build_call_expr_loc (loc, powfn, 2, arg0, arg); } } } @@ -10868,28 +11143,28 @@ fold_binary (enum tree_code code, tree type, tree op0, tree op1) case BIT_IOR_EXPR: bit_ior: if (integer_all_onesp (arg1)) - return omit_one_operand (type, arg1, arg0); + return omit_one_operand_loc (loc, type, arg1, arg0); if (integer_zerop (arg1)) - return non_lvalue (fold_convert (type, arg0)); + return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0)); if (operand_equal_p (arg0, arg1, 0)) - return non_lvalue (fold_convert (type, arg0)); + return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0)); /* ~X | X is -1. */ if (TREE_CODE (arg0) == BIT_NOT_EXPR && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)) { - t1 = fold_convert (type, integer_zero_node); - t1 = fold_unary (BIT_NOT_EXPR, type, t1); - return omit_one_operand (type, t1, arg1); + t1 = fold_convert_loc (loc, type, integer_zero_node); + t1 = fold_unary_loc (loc, BIT_NOT_EXPR, type, t1); + return omit_one_operand_loc (loc, type, t1, arg1); } /* X | ~X is -1. */ if (TREE_CODE (arg1) == BIT_NOT_EXPR && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0)) { - t1 = fold_convert (type, integer_zero_node); - t1 = fold_unary (BIT_NOT_EXPR, type, t1); - return omit_one_operand (type, t1, arg0); + t1 = fold_convert_loc (loc, type, integer_zero_node); + t1 = fold_unary_loc (loc, BIT_NOT_EXPR, type, t1); + return omit_one_operand_loc (loc, type, t1, arg0); } /* Canonicalize (X & C1) | C2. */ @@ -10906,7 +11181,8 @@ fold_binary (enum tree_code code, tree type, tree op0, tree op1) /* If (C1&C2) == C1, then (X&C1)|C2 becomes (X,C2). */ if ((hi1 & hi2) == hi1 && (lo1 & lo2) == lo1) - return omit_one_operand (type, arg1, TREE_OPERAND (arg0, 0)); + return omit_one_operand_loc (loc, type, arg1, + TREE_OPERAND (arg0, 0)); if (width > HOST_BITS_PER_WIDE_INT) { @@ -10923,7 +11199,7 @@ fold_binary (enum tree_code code, tree type, tree op0, tree op1) /* If (C1|C2) == ~0 then (X&C1)|C2 becomes X|C2. */ if ((~(hi1 | hi2) & mhi) == 0 && (~(lo1 | lo2) & mlo) == 0) - return fold_build2 (BIT_IOR_EXPR, type, + return fold_build2_loc (loc, BIT_IOR_EXPR, type, TREE_OPERAND (arg0, 0), arg1); /* Minimize the number of bits set in C1, i.e. C1 := C1 & ~C2, @@ -10950,8 +11226,8 @@ fold_binary (enum tree_code code, tree type, tree op0, tree op1) } } if (hi3 != hi1 || lo3 != lo1) - return fold_build2 (BIT_IOR_EXPR, type, - fold_build2 (BIT_AND_EXPR, type, + return fold_build2_loc (loc, BIT_IOR_EXPR, type, + fold_build2_loc (loc, BIT_AND_EXPR, type, TREE_OPERAND (arg0, 0), build_int_cst_wide (type, lo3, hi3)), @@ -10961,24 +11237,24 @@ fold_binary (enum tree_code code, tree type, tree op0, tree op1) /* (X & Y) | Y is (X, Y). */ if (TREE_CODE (arg0) == BIT_AND_EXPR && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0)) - return omit_one_operand (type, arg1, TREE_OPERAND (arg0, 0)); + return omit_one_operand_loc (loc, type, arg1, TREE_OPERAND (arg0, 0)); /* (X & Y) | X is (Y, X). */ if (TREE_CODE (arg0) == BIT_AND_EXPR && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0) && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1)) - return omit_one_operand (type, arg1, TREE_OPERAND (arg0, 1)); + return omit_one_operand_loc (loc, type, arg1, TREE_OPERAND (arg0, 1)); /* X | (X & Y) is (Y, X). */ if (TREE_CODE (arg1) == BIT_AND_EXPR && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0) && reorder_operands_p (arg0, TREE_OPERAND (arg1, 1))) - return omit_one_operand (type, arg0, TREE_OPERAND (arg1, 1)); + return omit_one_operand_loc (loc, type, arg0, TREE_OPERAND (arg1, 1)); /* X | (Y & X) is (Y, X). */ if (TREE_CODE (arg1) == BIT_AND_EXPR && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0) && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0))) - return omit_one_operand (type, arg0, TREE_OPERAND (arg1, 0)); + return omit_one_operand_loc (loc, type, arg0, TREE_OPERAND (arg1, 0)); - t1 = distribute_bit_expr (code, type, arg0, arg1); + t1 = distribute_bit_expr (loc, code, type, arg0, arg1); if (t1 != NULL_TREE) return t1; @@ -10991,12 +11267,13 @@ fold_binary (enum tree_code code, tree type, tree op0, tree op1) if (TREE_CODE (arg0) == BIT_NOT_EXPR && TREE_CODE (arg1) == BIT_NOT_EXPR) { - return fold_build1 (BIT_NOT_EXPR, type, - build2 (BIT_AND_EXPR, type, - fold_convert (type, - TREE_OPERAND (arg0, 0)), - fold_convert (type, - TREE_OPERAND (arg1, 0)))); + return + fold_build1_loc (loc, BIT_NOT_EXPR, type, + build2 (BIT_AND_EXPR, type, + fold_convert_loc (loc, type, + TREE_OPERAND (arg0, 0)), + fold_convert_loc (loc, type, + TREE_OPERAND (arg1, 0)))); } /* See if this can be simplified into a rotate first. If that @@ -11005,28 +11282,28 @@ fold_binary (enum tree_code code, tree type, tree op0, tree op1) case BIT_XOR_EXPR: if (integer_zerop (arg1)) - return non_lvalue (fold_convert (type, arg0)); + return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0)); if (integer_all_onesp (arg1)) - return fold_build1 (BIT_NOT_EXPR, type, op0); + return fold_build1_loc (loc, BIT_NOT_EXPR, type, op0); if (operand_equal_p (arg0, arg1, 0)) - return omit_one_operand (type, integer_zero_node, arg0); + return omit_one_operand_loc (loc, type, integer_zero_node, arg0); /* ~X ^ X is -1. */ if (TREE_CODE (arg0) == BIT_NOT_EXPR && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)) { - t1 = fold_convert (type, integer_zero_node); - t1 = fold_unary (BIT_NOT_EXPR, type, t1); - return omit_one_operand (type, t1, arg1); + t1 = fold_convert_loc (loc, type, integer_zero_node); + t1 = fold_unary_loc (loc, BIT_NOT_EXPR, type, t1); + return omit_one_operand_loc (loc, type, t1, arg1); } /* X ^ ~X is -1. */ if (TREE_CODE (arg1) == BIT_NOT_EXPR && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0)) { - t1 = fold_convert (type, integer_zero_node); - t1 = fold_unary (BIT_NOT_EXPR, type, t1); - return omit_one_operand (type, t1, arg0); + t1 = fold_convert_loc (loc, type, integer_zero_node); + t1 = fold_unary_loc (loc, BIT_NOT_EXPR, type, t1); + return omit_one_operand_loc (loc, type, t1, arg0); } /* If we are XORing two BIT_AND_EXPR's, both of which are and'ing @@ -11050,10 +11327,11 @@ fold_binary (enum tree_code code, tree type, tree op0, tree op1) && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)) { tree t2 = TREE_OPERAND (arg0, 1); - t1 = fold_build1 (BIT_NOT_EXPR, TREE_TYPE (arg1), + t1 = fold_build1_loc (loc, BIT_NOT_EXPR, TREE_TYPE (arg1), arg1); - t1 = fold_build2 (BIT_AND_EXPR, type, fold_convert (type, t2), - fold_convert (type, t1)); + t1 = fold_build2_loc (loc, BIT_AND_EXPR, type, + fold_convert_loc (loc, type, t2), + fold_convert_loc (loc, type, t1)); return t1; } @@ -11062,10 +11340,11 @@ fold_binary (enum tree_code code, tree type, tree op0, tree op1) && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0)) { tree t2 = TREE_OPERAND (arg0, 0); - t1 = fold_build1 (BIT_NOT_EXPR, TREE_TYPE (arg1), + t1 = fold_build1_loc (loc, BIT_NOT_EXPR, TREE_TYPE (arg1), arg1); - t1 = fold_build2 (BIT_AND_EXPR, type, fold_convert (type, t2), - fold_convert (type, t1)); + t1 = fold_build2_loc (loc, BIT_AND_EXPR, type, + fold_convert_loc (loc, type, t2), + fold_convert_loc (loc, type, t1)); return t1; } @@ -11074,10 +11353,11 @@ fold_binary (enum tree_code code, tree type, tree op0, tree op1) && operand_equal_p (TREE_OPERAND (arg1, 0), arg0, 0)) { tree t2 = TREE_OPERAND (arg1, 1); - t1 = fold_build1 (BIT_NOT_EXPR, TREE_TYPE (arg0), + t1 = fold_build1_loc (loc, BIT_NOT_EXPR, TREE_TYPE (arg0), arg0); - t1 = fold_build2 (BIT_AND_EXPR, type, fold_convert (type, t2), - fold_convert (type, t1)); + t1 = fold_build2_loc (loc, BIT_AND_EXPR, type, + fold_convert_loc (loc, type, t2), + fold_convert_loc (loc, type, t1)); return t1; } @@ -11086,71 +11366,75 @@ fold_binary (enum tree_code code, tree type, tree op0, tree op1) && operand_equal_p (TREE_OPERAND (arg1, 1), arg0, 0)) { tree t2 = TREE_OPERAND (arg1, 0); - t1 = fold_build1 (BIT_NOT_EXPR, TREE_TYPE (arg0), + t1 = fold_build1_loc (loc, BIT_NOT_EXPR, TREE_TYPE (arg0), arg0); - t1 = fold_build2 (BIT_AND_EXPR, type, fold_convert (type, t2), - fold_convert (type, t1)); + t1 = fold_build2_loc (loc, BIT_AND_EXPR, type, + fold_convert_loc (loc, type, t2), + fold_convert_loc (loc, type, t1)); return t1; } /* Convert ~X ^ ~Y to X ^ Y. */ if (TREE_CODE (arg0) == BIT_NOT_EXPR && TREE_CODE (arg1) == BIT_NOT_EXPR) - return fold_build2 (code, type, - fold_convert (type, TREE_OPERAND (arg0, 0)), - fold_convert (type, TREE_OPERAND (arg1, 0))); + return fold_build2_loc (loc, code, type, + fold_convert_loc (loc, type, + TREE_OPERAND (arg0, 0)), + fold_convert_loc (loc, type, + TREE_OPERAND (arg1, 0))); /* Convert ~X ^ C to X ^ ~C. */ if (TREE_CODE (arg0) == BIT_NOT_EXPR && TREE_CODE (arg1) == INTEGER_CST) - return fold_build2 (code, type, - fold_convert (type, TREE_OPERAND (arg0, 0)), - fold_build1 (BIT_NOT_EXPR, type, arg1)); + return fold_build2_loc (loc, code, type, + fold_convert_loc (loc, type, + TREE_OPERAND (arg0, 0)), + fold_build1_loc (loc, BIT_NOT_EXPR, type, arg1)); /* Fold (X & 1) ^ 1 as (X & 1) == 0. */ if (TREE_CODE (arg0) == BIT_AND_EXPR && integer_onep (TREE_OPERAND (arg0, 1)) && integer_onep (arg1)) - return fold_build2 (EQ_EXPR, type, arg0, + return fold_build2_loc (loc, EQ_EXPR, type, arg0, build_int_cst (TREE_TYPE (arg0), 0)); /* Fold (X & Y) ^ Y as ~X & Y. */ if (TREE_CODE (arg0) == BIT_AND_EXPR && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0)) { - tem = fold_convert (type, TREE_OPERAND (arg0, 0)); - return fold_build2 (BIT_AND_EXPR, type, - fold_build1 (BIT_NOT_EXPR, type, tem), - fold_convert (type, arg1)); + tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0)); + return fold_build2_loc (loc, BIT_AND_EXPR, type, + fold_build1_loc (loc, BIT_NOT_EXPR, type, tem), + fold_convert_loc (loc, type, arg1)); } /* Fold (X & Y) ^ X as ~Y & X. */ if (TREE_CODE (arg0) == BIT_AND_EXPR && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0) && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1)) { - tem = fold_convert (type, TREE_OPERAND (arg0, 1)); - return fold_build2 (BIT_AND_EXPR, type, - fold_build1 (BIT_NOT_EXPR, type, tem), - fold_convert (type, arg1)); + tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1)); + return fold_build2_loc (loc, BIT_AND_EXPR, type, + fold_build1_loc (loc, BIT_NOT_EXPR, type, tem), + fold_convert_loc (loc, type, arg1)); } /* Fold X ^ (X & Y) as X & ~Y. */ if (TREE_CODE (arg1) == BIT_AND_EXPR && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0)) { - tem = fold_convert (type, TREE_OPERAND (arg1, 1)); - return fold_build2 (BIT_AND_EXPR, type, - fold_convert (type, arg0), - fold_build1 (BIT_NOT_EXPR, type, tem)); + tem = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1)); + return fold_build2_loc (loc, BIT_AND_EXPR, type, + fold_convert_loc (loc, type, arg0), + fold_build1_loc (loc, BIT_NOT_EXPR, type, tem)); } /* Fold X ^ (Y & X) as ~Y & X. */ if (TREE_CODE (arg1) == BIT_AND_EXPR && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0) && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0))) { - tem = fold_convert (type, TREE_OPERAND (arg1, 0)); - return fold_build2 (BIT_AND_EXPR, type, - fold_build1 (BIT_NOT_EXPR, type, tem), - fold_convert (type, arg0)); + tem = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0)); + return fold_build2_loc (loc, BIT_AND_EXPR, type, + fold_build1_loc (loc, BIT_NOT_EXPR, type, tem), + fold_convert_loc (loc, type, arg0)); } /* See if this can be simplified into a rotate first. If that @@ -11159,55 +11443,57 @@ fold_binary (enum tree_code code, tree type, tree op0, tree op1) case BIT_AND_EXPR: if (integer_all_onesp (arg1)) - return non_lvalue (fold_convert (type, arg0)); + return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0)); if (integer_zerop (arg1)) - return omit_one_operand (type, arg1, arg0); + return omit_one_operand_loc (loc, type, arg1, arg0); if (operand_equal_p (arg0, arg1, 0)) - return non_lvalue (fold_convert (type, arg0)); + return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0)); /* ~X & X is always zero. */ if (TREE_CODE (arg0) == BIT_NOT_EXPR && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)) - return omit_one_operand (type, integer_zero_node, arg1); + return omit_one_operand_loc (loc, type, integer_zero_node, arg1); /* X & ~X is always zero. */ if (TREE_CODE (arg1) == BIT_NOT_EXPR && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0)) - return omit_one_operand (type, integer_zero_node, arg0); + return omit_one_operand_loc (loc, type, integer_zero_node, arg0); /* Canonicalize (X | C1) & C2 as (X & C2) | (C1 & C2). */ if (TREE_CODE (arg0) == BIT_IOR_EXPR && TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST) { - tree tmp1 = fold_convert (type, arg1); - tree tmp2 = fold_convert (type, TREE_OPERAND (arg0, 0)); - tree tmp3 = fold_convert (type, TREE_OPERAND (arg0, 1)); - tmp2 = fold_build2 (BIT_AND_EXPR, type, tmp2, tmp1); - tmp3 = fold_build2 (BIT_AND_EXPR, type, tmp3, tmp1); - return fold_convert (type, - fold_build2 (BIT_IOR_EXPR, type, tmp2, tmp3)); + tree tmp1 = fold_convert_loc (loc, type, arg1); + tree tmp2 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0)); + tree tmp3 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1)); + tmp2 = fold_build2_loc (loc, BIT_AND_EXPR, type, tmp2, tmp1); + tmp3 = fold_build2_loc (loc, BIT_AND_EXPR, type, tmp3, tmp1); + return + fold_convert_loc (loc, type, + fold_build2_loc (loc, BIT_IOR_EXPR, + type, tmp2, tmp3)); } /* (X | Y) & Y is (X, Y). */ if (TREE_CODE (arg0) == BIT_IOR_EXPR && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0)) - return omit_one_operand (type, arg1, TREE_OPERAND (arg0, 0)); + return omit_one_operand_loc (loc, type, arg1, TREE_OPERAND (arg0, 0)); /* (X | Y) & X is (Y, X). */ if (TREE_CODE (arg0) == BIT_IOR_EXPR && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0) && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1)) - return omit_one_operand (type, arg1, TREE_OPERAND (arg0, 1)); + return omit_one_operand_loc (loc, type, arg1, TREE_OPERAND (arg0, 1)); /* X & (X | Y) is (Y, X). */ if (TREE_CODE (arg1) == BIT_IOR_EXPR && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0) && reorder_operands_p (arg0, TREE_OPERAND (arg1, 1))) - return omit_one_operand (type, arg0, TREE_OPERAND (arg1, 1)); + return omit_one_operand_loc (loc, type, arg0, TREE_OPERAND (arg1, 1)); /* X & (Y | X) is (Y, X). */ if (TREE_CODE (arg1) == BIT_IOR_EXPR && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0) && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0))) - return omit_one_operand (type, arg0, TREE_OPERAND (arg1, 0)); + return omit_one_operand_loc (loc, type, arg0, TREE_OPERAND (arg1, 0)); /* Fold (X ^ 1) & 1 as (X & 1) == 0. */ if (TREE_CODE (arg0) == BIT_XOR_EXPR @@ -11215,8 +11501,8 @@ fold_binary (enum tree_code code, tree type, tree op0, tree op1) && integer_onep (arg1)) { tem = TREE_OPERAND (arg0, 0); - return fold_build2 (EQ_EXPR, type, - fold_build2 (BIT_AND_EXPR, TREE_TYPE (tem), tem, + return fold_build2_loc (loc, EQ_EXPR, type, + fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (tem), tem, build_int_cst (TREE_TYPE (tem), 1)), build_int_cst (TREE_TYPE (tem), 0)); } @@ -11225,8 +11511,8 @@ fold_binary (enum tree_code code, tree type, tree op0, tree op1) && integer_onep (arg1)) { tem = TREE_OPERAND (arg0, 0); - return fold_build2 (EQ_EXPR, type, - fold_build2 (BIT_AND_EXPR, TREE_TYPE (tem), tem, + return fold_build2_loc (loc, EQ_EXPR, type, + fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (tem), tem, build_int_cst (TREE_TYPE (tem), 1)), build_int_cst (TREE_TYPE (tem), 0)); } @@ -11235,42 +11521,42 @@ fold_binary (enum tree_code code, tree type, tree op0, tree op1) if (TREE_CODE (arg0) == BIT_XOR_EXPR && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0)) { - tem = fold_convert (type, TREE_OPERAND (arg0, 0)); - return fold_build2 (BIT_AND_EXPR, type, - fold_build1 (BIT_NOT_EXPR, type, tem), - fold_convert (type, arg1)); + tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0)); + return fold_build2_loc (loc, BIT_AND_EXPR, type, + fold_build1_loc (loc, BIT_NOT_EXPR, type, tem), + fold_convert_loc (loc, type, arg1)); } /* Fold (X ^ Y) & X as ~Y & X. */ if (TREE_CODE (arg0) == BIT_XOR_EXPR && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0) && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1)) { - tem = fold_convert (type, TREE_OPERAND (arg0, 1)); - return fold_build2 (BIT_AND_EXPR, type, - fold_build1 (BIT_NOT_EXPR, type, tem), - fold_convert (type, arg1)); + tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1)); + return fold_build2_loc (loc, BIT_AND_EXPR, type, + fold_build1_loc (loc, BIT_NOT_EXPR, type, tem), + fold_convert_loc (loc, type, arg1)); } /* Fold X & (X ^ Y) as X & ~Y. */ if (TREE_CODE (arg1) == BIT_XOR_EXPR && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0)) { - tem = fold_convert (type, TREE_OPERAND (arg1, 1)); - return fold_build2 (BIT_AND_EXPR, type, - fold_convert (type, arg0), - fold_build1 (BIT_NOT_EXPR, type, tem)); + tem = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1)); + return fold_build2_loc (loc, BIT_AND_EXPR, type, + fold_convert_loc (loc, type, arg0), + fold_build1_loc (loc, BIT_NOT_EXPR, type, tem)); } /* Fold X & (Y ^ X) as ~Y & X. */ if (TREE_CODE (arg1) == BIT_XOR_EXPR && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0) && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0))) { - tem = fold_convert (type, TREE_OPERAND (arg1, 0)); - return fold_build2 (BIT_AND_EXPR, type, - fold_build1 (BIT_NOT_EXPR, type, tem), - fold_convert (type, arg0)); + tem = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0)); + return fold_build2_loc (loc, BIT_AND_EXPR, type, + fold_build1_loc (loc, BIT_NOT_EXPR, type, tem), + fold_convert_loc (loc, type, arg0)); } - t1 = distribute_bit_expr (code, type, arg0, arg1); + t1 = distribute_bit_expr (loc, code, type, arg0, arg1); if (t1 != NULL_TREE) return t1; /* Simplify ((int)c & 0377) into (int)c, if c is unsigned char. */ @@ -11283,7 +11569,8 @@ fold_binary (enum tree_code code, tree type, tree op0, tree op1) if (prec < BITS_PER_WORD && prec < HOST_BITS_PER_WIDE_INT && (~TREE_INT_CST_LOW (arg1) & (((HOST_WIDE_INT) 1 << prec) - 1)) == 0) - return fold_convert (type, TREE_OPERAND (arg0, 0)); + return + fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0)); } /* Convert (and (not arg0) (not arg1)) to (not (or (arg0) (arg1))). @@ -11295,12 +11582,12 @@ fold_binary (enum tree_code code, tree type, tree op0, tree op1) if (TREE_CODE (arg0) == BIT_NOT_EXPR && TREE_CODE (arg1) == BIT_NOT_EXPR) { - return fold_build1 (BIT_NOT_EXPR, type, + return fold_build1_loc (loc, BIT_NOT_EXPR, type, build2 (BIT_IOR_EXPR, type, - fold_convert (type, - TREE_OPERAND (arg0, 0)), - fold_convert (type, - TREE_OPERAND (arg1, 0)))); + fold_convert_loc (loc, type, + TREE_OPERAND (arg0, 0)), + fold_convert_loc (loc, type, + TREE_OPERAND (arg1, 0)))); } /* If arg0 is derived from the address of an object or function, we may @@ -11382,7 +11669,8 @@ fold_binary (enum tree_code code, tree type, tree op0, tree op1) /* ((X << 16) & 0xff00) is (X, 0). */ if ((mask & zerobits) == mask) - return omit_one_operand (type, build_int_cst (type, 0), arg0); + return omit_one_operand_loc (loc, type, + build_int_cst (type, 0), arg0); newmask = mask | zerobits; if (newmask != mask && (newmask & (newmask + 1)) == 0) @@ -11402,17 +11690,17 @@ fold_binary (enum tree_code code, tree type, tree op0, tree op1) if (shift_type != TREE_TYPE (arg0)) { - tem = fold_build2 (TREE_CODE (arg0), shift_type, - fold_convert (shift_type, - TREE_OPERAND (arg0, 0)), + tem = fold_build2_loc (loc, TREE_CODE (arg0), shift_type, + fold_convert_loc (loc, shift_type, + TREE_OPERAND (arg0, 0)), TREE_OPERAND (arg0, 1)); - tem = fold_convert (type, tem); + tem = fold_convert_loc (loc, type, tem); } else tem = op0; newmaskt = build_int_cst_type (TREE_TYPE (op1), newmask); if (!tree_int_cst_equal (newmaskt, arg1)) - return fold_build2 (BIT_AND_EXPR, type, tem, newmaskt); + return fold_build2_loc (loc, BIT_AND_EXPR, type, tem, newmaskt); } } } @@ -11437,7 +11725,7 @@ fold_binary (enum tree_code code, tree type, tree op0, tree op1) { tree r = build_real (TREE_TYPE (arg0), dconst1); - return omit_two_operands (type, r, arg0, arg1); + return omit_two_operands_loc (loc, type, r, arg0, arg1); } /* The complex version of the above A / A optimization. */ @@ -11450,29 +11738,30 @@ fold_binary (enum tree_code code, tree type, tree op0, tree op1) { tree r = build_real (elem_type, dconst1); /* omit_two_operands will call fold_convert for us. */ - return omit_two_operands (type, r, arg0, arg1); + return omit_two_operands_loc (loc, type, r, arg0, arg1); } } /* (-A) / (-B) -> A / B */ if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1)) - return fold_build2 (RDIV_EXPR, type, + return fold_build2_loc (loc, RDIV_EXPR, type, TREE_OPERAND (arg0, 0), negate_expr (arg1)); if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0)) - return fold_build2 (RDIV_EXPR, type, + return fold_build2_loc (loc, RDIV_EXPR, type, negate_expr (arg0), TREE_OPERAND (arg1, 0)); /* In IEEE floating point, x/1 is not equivalent to x for snans. */ if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0))) && real_onep (arg1)) - return non_lvalue (fold_convert (type, arg0)); + return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0)); /* In IEEE floating point, x/-1 is not equivalent to -x for snans. */ if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0))) && real_minus_onep (arg1)) - return non_lvalue (fold_convert (type, negate_expr (arg0))); + return non_lvalue_loc (loc, fold_convert_loc (loc, type, + negate_expr (arg0))); /* If ARG1 is a constant, we can convert this to a multiply by the reciprocal. This does not have the same rounding properties, @@ -11484,7 +11773,7 @@ fold_binary (enum tree_code code, tree type, tree op0, tree op1) if (flag_reciprocal_math && 0 != (tem = const_binop (code, build_real (type, dconst1), arg1, 0))) - return fold_build2 (MULT_EXPR, type, arg0, tem); + return fold_build2_loc (loc, MULT_EXPR, type, arg0, tem); /* Find the reciprocal if optimizing and the result is exact. */ if (optimize) { @@ -11493,23 +11782,23 @@ fold_binary (enum tree_code code, tree type, tree op0, tree op1) if (exact_real_inverse (TYPE_MODE(TREE_TYPE(arg0)), &r)) { tem = build_real (type, r); - return fold_build2 (MULT_EXPR, type, - fold_convert (type, arg0), tem); + return fold_build2_loc (loc, MULT_EXPR, type, + fold_convert_loc (loc, type, arg0), tem); } } } /* Convert A/B/C to A/(B*C). */ if (flag_reciprocal_math && TREE_CODE (arg0) == RDIV_EXPR) - return fold_build2 (RDIV_EXPR, type, TREE_OPERAND (arg0, 0), - fold_build2 (MULT_EXPR, type, + return fold_build2_loc (loc, RDIV_EXPR, type, TREE_OPERAND (arg0, 0), + fold_build2_loc (loc, MULT_EXPR, type, TREE_OPERAND (arg0, 1), arg1)); /* Convert A/(B/C) to (A/B)*C. */ if (flag_reciprocal_math && TREE_CODE (arg1) == RDIV_EXPR) - return fold_build2 (MULT_EXPR, type, - fold_build2 (RDIV_EXPR, type, arg0, + return fold_build2_loc (loc, MULT_EXPR, type, + fold_build2_loc (loc, RDIV_EXPR, type, arg0, TREE_OPERAND (arg1, 0)), TREE_OPERAND (arg1, 1)); @@ -11522,7 +11811,7 @@ fold_binary (enum tree_code code, tree type, tree op0, tree op1) tree tem = const_binop (RDIV_EXPR, arg0, TREE_OPERAND (arg1, 1), 0); if (tem) - return fold_build2 (RDIV_EXPR, type, tem, + return fold_build2_loc (loc, RDIV_EXPR, type, tem, TREE_OPERAND (arg1, 0)); } @@ -11541,7 +11830,7 @@ fold_binary (enum tree_code code, tree type, tree op0, tree op1) tree tanfn = mathfn_built_in (type, BUILT_IN_TAN); if (tanfn != NULL_TREE) - return build_call_expr (tanfn, 1, CALL_EXPR_ARG (arg0, 0)); + return build_call_expr_loc (loc, tanfn, 1, CALL_EXPR_ARG (arg0, 0)); } /* Optimize cos(x)/sin(x) as 1.0/tan(x). */ @@ -11555,8 +11844,9 @@ fold_binary (enum tree_code code, tree type, tree op0, tree op1) if (tanfn != NULL_TREE) { - tree tmp = build_call_expr (tanfn, 1, CALL_EXPR_ARG (arg0, 0)); - return fold_build2 (RDIV_EXPR, type, + tree tmp = build_call_expr_loc (loc, tanfn, 1, + CALL_EXPR_ARG (arg0, 0)); + return fold_build2_loc (loc, RDIV_EXPR, type, build_real (type, dconst1), tmp); } } @@ -11577,7 +11867,7 @@ fold_binary (enum tree_code code, tree type, tree op0, tree op1) tree cosfn = mathfn_built_in (type, BUILT_IN_COS); if (cosfn != NULL_TREE) - return build_call_expr (cosfn, 1, arg00); + return build_call_expr_loc (loc, cosfn, 1, arg00); } } @@ -11598,8 +11888,8 @@ fold_binary (enum tree_code code, tree type, tree op0, tree op1) if (cosfn != NULL_TREE) { - tree tmp = build_call_expr (cosfn, 1, arg00); - return fold_build2 (RDIV_EXPR, type, + tree tmp = build_call_expr_loc (loc, cosfn, 1, arg00); + return fold_build2_loc (loc, RDIV_EXPR, type, build_real (type, dconst1), tmp); } @@ -11624,7 +11914,7 @@ fold_binary (enum tree_code code, tree type, tree op0, tree op1) c = TREE_REAL_CST (arg01); real_arithmetic (&c, MINUS_EXPR, &c, &dconst1); arg = build_real (type, c); - return build_call_expr (powfn, 2, arg1, arg); + return build_call_expr_loc (loc, powfn, 2, arg1, arg); } } @@ -11639,10 +11929,10 @@ fold_binary (enum tree_code code, tree type, tree op0, tree op1) tree b = TREE_OPERAND (rootarg, 0); tree c = TREE_OPERAND (rootarg, 1); - tree tmp = fold_build2 (RDIV_EXPR, type, c, b); + tree tmp = fold_build2_loc (loc, RDIV_EXPR, type, c, b); - tmp = build_call_expr (rootfn, 1, tmp); - return fold_build2 (MULT_EXPR, type, arg0, tmp); + tmp = build_call_expr_loc (loc, rootfn, 1, tmp); + return fold_build2_loc (loc, MULT_EXPR, type, arg0, tmp); } } @@ -11651,8 +11941,10 @@ fold_binary (enum tree_code code, tree type, tree op0, tree op1) { tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0); tree arg = negate_expr (CALL_EXPR_ARG (arg1, 0)); - arg1 = build_call_expr (expfn, 1, fold_convert (type, arg)); - return fold_build2 (MULT_EXPR, type, arg0, arg1); + arg1 = build_call_expr_loc (loc, + expfn, 1, + fold_convert_loc (loc, type, arg)); + return fold_build2_loc (loc, MULT_EXPR, type, arg0, arg1); } /* Optimize x/pow(y,z) into x*pow(y,-z). */ @@ -11663,9 +11955,10 @@ fold_binary (enum tree_code code, tree type, tree op0, tree op1) tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0); tree arg10 = CALL_EXPR_ARG (arg1, 0); tree arg11 = CALL_EXPR_ARG (arg1, 1); - tree neg11 = fold_convert (type, negate_expr (arg11)); - arg1 = build_call_expr (powfn, 2, arg10, neg11); - return fold_build2 (MULT_EXPR, type, arg0, arg1); + tree neg11 = fold_convert_loc (loc, type, + negate_expr (arg11)); + arg1 = build_call_expr_loc (loc, powfn, 2, arg10, neg11); + return fold_build2_loc (loc, MULT_EXPR, type, arg0, arg1); } } return NULL_TREE; @@ -11690,10 +11983,10 @@ fold_binary (enum tree_code code, tree type, tree op0, tree op1) "occur when simplifying A / (B << N)"), WARN_STRICT_OVERFLOW_MISC); - sh_cnt = fold_build2 (PLUS_EXPR, TREE_TYPE (sh_cnt), + sh_cnt = fold_build2_loc (loc, PLUS_EXPR, TREE_TYPE (sh_cnt), sh_cnt, build_int_cst (NULL_TREE, pow2)); - return fold_build2 (RSHIFT_EXPR, type, - fold_convert (type, arg0), sh_cnt); + return fold_build2_loc (loc, RSHIFT_EXPR, type, + fold_convert_loc (loc, type, arg0), sh_cnt); } } @@ -11702,7 +11995,7 @@ fold_binary (enum tree_code code, tree type, tree op0, tree op1) if (INTEGRAL_TYPE_P (type) && TYPE_UNSIGNED (type) && code == FLOOR_DIV_EXPR) - return fold_build2 (TRUNC_DIV_EXPR, type, op0, op1); + return fold_build2_loc (loc, TRUNC_DIV_EXPR, type, op0, op1); /* Fall thru */ @@ -11710,7 +12003,7 @@ fold_binary (enum tree_code code, tree type, tree op0, tree op1) case CEIL_DIV_EXPR: case EXACT_DIV_EXPR: if (integer_onep (arg1)) - return non_lvalue (fold_convert (type, arg0)); + return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0)); if (integer_zerop (arg1)) return NULL_TREE; /* X / -1 is -X. */ @@ -11718,7 +12011,7 @@ fold_binary (enum tree_code code, tree type, tree op0, tree op1) && TREE_CODE (arg1) == INTEGER_CST && TREE_INT_CST_LOW (arg1) == (unsigned HOST_WIDE_INT) -1 && TREE_INT_CST_HIGH (arg1) == -1) - return fold_convert (type, negate_expr (arg0)); + return fold_convert_loc (loc, type, negate_expr (arg0)); /* Convert -A / -B to A / B when the type is signed and overflow is undefined. */ @@ -11731,9 +12024,11 @@ fold_binary (enum tree_code code, tree type, tree op0, tree op1) "when distributing negation across " "division"), WARN_STRICT_OVERFLOW_MISC); - return fold_build2 (code, type, - fold_convert (type, TREE_OPERAND (arg0, 0)), - fold_convert (type, negate_expr (arg1))); + return fold_build2_loc (loc, code, type, + fold_convert_loc (loc, type, + TREE_OPERAND (arg0, 0)), + fold_convert_loc (loc, type, + negate_expr (arg1))); } if ((!INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type)) && TREE_CODE (arg1) == NEGATE_EXPR @@ -11744,9 +12039,11 @@ fold_binary (enum tree_code code, tree type, tree op0, tree op1) "when distributing negation across " "division"), WARN_STRICT_OVERFLOW_MISC); - return fold_build2 (code, type, - fold_convert (type, negate_expr (arg0)), - fold_convert (type, TREE_OPERAND (arg1, 0))); + return fold_build2_loc (loc, code, type, + fold_convert_loc (loc, type, + negate_expr (arg0)), + fold_convert_loc (loc, type, + TREE_OPERAND (arg1, 0))); } /* If arg0 is a multiple of arg1, then rewrite to the fastest div @@ -11757,7 +12054,7 @@ fold_binary (enum tree_code code, tree type, tree op0, tree op1) after the last round to changes to the DIV code in expmed.c. */ if ((code == CEIL_DIV_EXPR || code == FLOOR_DIV_EXPR) && multiple_of_p (type, arg0, arg1)) - return fold_build2 (EXACT_DIV_EXPR, type, arg0, arg1); + return fold_build2_loc (loc, EXACT_DIV_EXPR, type, arg0, arg1); strict_overflow_p = false; if (TREE_CODE (arg1) == INTEGER_CST @@ -11768,7 +12065,7 @@ fold_binary (enum tree_code code, tree type, tree op0, tree op1) fold_overflow_warning (("assuming signed overflow does not occur " "when simplifying division"), WARN_STRICT_OVERFLOW_MISC); - return fold_convert (type, tem); + return fold_convert_loc (loc, type, tem); } return NULL_TREE; @@ -11780,7 +12077,7 @@ fold_binary (enum tree_code code, tree type, tree op0, tree op1) /* X % 1 is always zero, but be sure to preserve any side effects in X. */ if (integer_onep (arg1)) - return omit_one_operand (type, integer_zero_node, arg0); + return omit_one_operand_loc (loc, type, integer_zero_node, arg0); /* X % 0, return X % 0 unchanged so that we can get the proper warnings and errors. */ @@ -11790,14 +12087,14 @@ fold_binary (enum tree_code code, tree type, tree op0, tree op1) /* 0 % X is always zero, but be sure to preserve any side effects in X. Place this after checking for X == 0. */ if (integer_zerop (arg0)) - return omit_one_operand (type, integer_zero_node, arg1); + return omit_one_operand_loc (loc, type, integer_zero_node, arg1); /* X % -1 is zero. */ if (!TYPE_UNSIGNED (type) && TREE_CODE (arg1) == INTEGER_CST && TREE_INT_CST_LOW (arg1) == (unsigned HOST_WIDE_INT) -1 && TREE_INT_CST_HIGH (arg1) == -1) - return omit_one_operand (type, integer_zero_node, arg0); + return omit_one_operand_loc (loc, type, integer_zero_node, arg0); /* Optimize TRUNC_MOD_EXPR by a power of two into a BIT_AND_EXPR, i.e. "X % C" into "X & (C - 1)", if X and C are positive. */ @@ -11814,16 +12111,16 @@ fold_binary (enum tree_code code, tree type, tree op0, tree op1) if (integer_pow2p (c) && tree_int_cst_sgn (c) > 0) { - tree mask = fold_build2 (MINUS_EXPR, TREE_TYPE (arg1), arg1, + tree mask = fold_build2_loc (loc, MINUS_EXPR, TREE_TYPE (arg1), arg1, build_int_cst (TREE_TYPE (arg1), 1)); if (strict_overflow_p) fold_overflow_warning (("assuming signed overflow does not " "occur when simplifying " "X % (power of two)"), WARN_STRICT_OVERFLOW_MISC); - return fold_build2 (BIT_AND_EXPR, type, - fold_convert (type, arg0), - fold_convert (type, mask)); + return fold_build2_loc (loc, BIT_AND_EXPR, type, + fold_convert_loc (loc, type, arg0), + fold_convert_loc (loc, type, mask)); } } @@ -11836,16 +12133,19 @@ fold_binary (enum tree_code code, tree type, tree op0, tree op1) && !TYPE_OVERFLOW_TRAPS (type) /* Avoid this transformation if C is INT_MIN, i.e. C == -C. */ && !sign_bit_p (arg1, arg1)) - return fold_build2 (code, type, fold_convert (type, arg0), - fold_convert (type, negate_expr (arg1))); + return fold_build2_loc (loc, code, type, + fold_convert_loc (loc, type, arg0), + fold_convert_loc (loc, type, + negate_expr (arg1))); /* X % -Y is the same as X % Y. */ if (code == TRUNC_MOD_EXPR && !TYPE_UNSIGNED (type) && TREE_CODE (arg1) == NEGATE_EXPR && !TYPE_OVERFLOW_TRAPS (type)) - return fold_build2 (code, type, fold_convert (type, arg0), - fold_convert (type, TREE_OPERAND (arg1, 0))); + return fold_build2_loc (loc, code, type, fold_convert_loc (loc, type, arg0), + fold_convert_loc (loc, type, + TREE_OPERAND (arg1, 0))); if (TREE_CODE (arg1) == INTEGER_CST && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE, @@ -11855,7 +12155,7 @@ fold_binary (enum tree_code code, tree type, tree op0, tree op1) fold_overflow_warning (("assuming signed overflow does not occur " "when simplifying modulus"), WARN_STRICT_OVERFLOW_MISC); - return fold_convert (type, tem); + return fold_convert_loc (loc, type, tem); } return NULL_TREE; @@ -11863,22 +12163,22 @@ fold_binary (enum tree_code code, tree type, tree op0, tree op1) case LROTATE_EXPR: case RROTATE_EXPR: if (integer_all_onesp (arg0)) - return omit_one_operand (type, arg0, arg1); + return omit_one_operand_loc (loc, type, arg0, arg1); goto shift; case RSHIFT_EXPR: /* Optimize -1 >> x for arithmetic right shifts. */ if (integer_all_onesp (arg0) && !TYPE_UNSIGNED (type) && tree_expr_nonnegative_p (arg1)) - return omit_one_operand (type, arg0, arg1); + return omit_one_operand_loc (loc, type, arg0, arg1); /* ... fall through ... */ case LSHIFT_EXPR: shift: if (integer_zerop (arg1)) - return non_lvalue (fold_convert (type, arg0)); + return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0)); if (integer_zerop (arg0)) - return omit_one_operand (type, arg0, arg1); + return omit_one_operand_loc (loc, type, arg0, arg1); /* Since negative shift count is not well-defined, don't try to compute it in the compiler. */ @@ -11901,13 +12201,13 @@ fold_binary (enum tree_code code, tree type, tree op0, tree op1) if (code == LROTATE_EXPR || code == RROTATE_EXPR) low = low % TYPE_PRECISION (type); else if (TYPE_UNSIGNED (type) || code == LSHIFT_EXPR) - return omit_one_operand (type, build_int_cst (type, 0), + return omit_one_operand_loc (loc, type, build_int_cst (type, 0), TREE_OPERAND (arg0, 0)); else low = TYPE_PRECISION (type) - 1; } - return fold_build2 (code, type, TREE_OPERAND (arg0, 0), + return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), build_int_cst (type, low)); } @@ -11928,12 +12228,12 @@ fold_binary (enum tree_code code, tree type, tree op0, tree op1) if (low0 == low1) { - arg00 = fold_convert (type, TREE_OPERAND (arg0, 0)); + arg00 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0)); lshift = build_int_cst (type, -1); lshift = int_const_binop (code, lshift, arg1, 0); - return fold_build2 (BIT_AND_EXPR, type, arg00, lshift); + return fold_build2_loc (loc, BIT_AND_EXPR, type, arg00, lshift); } } @@ -11944,7 +12244,7 @@ fold_binary (enum tree_code code, tree type, tree op0, tree op1) tree tem = build_int_cst (TREE_TYPE (arg1), TYPE_PRECISION (type)); tem = const_binop (MINUS_EXPR, tem, arg1, 0); - return fold_build2 (RROTATE_EXPR, type, op0, tem); + return fold_build2_loc (loc, RROTATE_EXPR, type, op0, tem); } /* If we have a rotate of a bit operation with the rotate count and @@ -11955,10 +12255,10 @@ fold_binary (enum tree_code code, tree type, tree op0, tree op1) || TREE_CODE (arg0) == BIT_IOR_EXPR || TREE_CODE (arg0) == BIT_XOR_EXPR) && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST) - return fold_build2 (TREE_CODE (arg0), type, - fold_build2 (code, type, + return fold_build2_loc (loc, TREE_CODE (arg0), type, + fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), arg1), - fold_build2 (code, type, + fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 1), arg1)); /* Two consecutive rotates adding up to the precision of the @@ -11981,13 +12281,15 @@ fold_binary (enum tree_code code, tree type, tree op0, tree op1) && TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST) { - tree mask = fold_build2 (code, type, - fold_convert (type, TREE_OPERAND (arg0, 1)), + tree mask = fold_build2_loc (loc, code, type, + fold_convert_loc (loc, type, + TREE_OPERAND (arg0, 1)), arg1); - tree shift = fold_build2 (code, type, - fold_convert (type, TREE_OPERAND (arg0, 0)), + tree shift = fold_build2_loc (loc, code, type, + fold_convert_loc (loc, type, + TREE_OPERAND (arg0, 0)), arg1); - tem = fold_binary (BIT_AND_EXPR, type, shift, mask); + tem = fold_binary_loc (loc, BIT_AND_EXPR, type, shift, mask); if (tem) return tem; } @@ -11996,23 +12298,23 @@ fold_binary (enum tree_code code, tree type, tree op0, tree op1) case MIN_EXPR: if (operand_equal_p (arg0, arg1, 0)) - return omit_one_operand (type, arg0, arg1); + return omit_one_operand_loc (loc, type, arg0, arg1); if (INTEGRAL_TYPE_P (type) && operand_equal_p (arg1, TYPE_MIN_VALUE (type), OEP_ONLY_CONST)) - return omit_one_operand (type, arg1, arg0); - tem = fold_minmax (MIN_EXPR, type, arg0, arg1); + return omit_one_operand_loc (loc, type, arg1, arg0); + tem = fold_minmax (loc, MIN_EXPR, type, arg0, arg1); if (tem) return tem; goto associate; case MAX_EXPR: if (operand_equal_p (arg0, arg1, 0)) - return omit_one_operand (type, arg0, arg1); + return omit_one_operand_loc (loc, type, arg0, arg1); if (INTEGRAL_TYPE_P (type) && TYPE_MAX_VALUE (type) && operand_equal_p (arg1, TYPE_MAX_VALUE (type), OEP_ONLY_CONST)) - return omit_one_operand (type, arg1, arg0); - tem = fold_minmax (MAX_EXPR, type, arg0, arg1); + return omit_one_operand_loc (loc, type, arg1, arg0); + tem = fold_minmax (loc, MAX_EXPR, type, arg0, arg1); if (tem) return tem; goto associate; @@ -12023,32 +12325,32 @@ fold_binary (enum tree_code code, tree type, tree op0, tree op1) ("true" is a fixed value perhaps depending on the language.) */ /* If first arg is constant zero, return it. */ if (integer_zerop (arg0)) - return fold_convert (type, arg0); + return fold_convert_loc (loc, type, arg0); case TRUTH_AND_EXPR: /* If either arg is constant true, drop it. */ if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0)) - return non_lvalue (fold_convert (type, arg1)); + return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1)); if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1) /* Preserve sequence points. */ && (code != TRUTH_ANDIF_EXPR || ! TREE_SIDE_EFFECTS (arg0))) - return non_lvalue (fold_convert (type, arg0)); + return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0)); /* If second arg is constant zero, result is zero, but first arg must be evaluated. */ if (integer_zerop (arg1)) - return omit_one_operand (type, arg1, arg0); + return omit_one_operand_loc (loc, type, arg1, arg0); /* Likewise for first arg, but note that only the TRUTH_AND_EXPR case will be handled here. */ if (integer_zerop (arg0)) - return omit_one_operand (type, arg0, arg1); + return omit_one_operand_loc (loc, type, arg0, arg1); /* !X && X is always false. */ if (TREE_CODE (arg0) == TRUTH_NOT_EXPR && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)) - return omit_one_operand (type, integer_zero_node, arg1); + return omit_one_operand_loc (loc, type, integer_zero_node, arg1); /* X && !X is always false. */ if (TREE_CODE (arg1) == TRUTH_NOT_EXPR && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0)) - return omit_one_operand (type, integer_zero_node, arg0); + return omit_one_operand_loc (loc, type, integer_zero_node, arg0); /* A < X && A + 1 > Y ==> A < X && A >= Y. Normally A + 1 > Y means A >= Y && A != MAX, but in this case we know that @@ -12057,13 +12359,13 @@ fold_binary (enum tree_code code, tree type, tree op0, tree op1) if (!TREE_SIDE_EFFECTS (arg0) && !TREE_SIDE_EFFECTS (arg1)) { - tem = fold_to_nonsharp_ineq_using_bound (arg0, arg1); + tem = fold_to_nonsharp_ineq_using_bound (loc, arg0, arg1); if (tem && !operand_equal_p (tem, arg0, 0)) - return fold_build2 (code, type, tem, arg1); + return fold_build2_loc (loc, code, type, tem, arg1); - tem = fold_to_nonsharp_ineq_using_bound (arg1, arg0); + tem = fold_to_nonsharp_ineq_using_bound (loc, arg1, arg0); if (tem && !operand_equal_p (tem, arg1, 0)) - return fold_build2 (code, type, arg0, tem); + return fold_build2_loc (loc, code, type, arg0, tem); } truth_andor: @@ -12094,38 +12396,38 @@ fold_binary (enum tree_code code, tree type, tree op0, tree op1) || code == TRUTH_OR_EXPR)); if (operand_equal_p (a00, a10, 0)) - return fold_build2 (TREE_CODE (arg0), type, a00, - fold_build2 (code, type, a01, a11)); + return fold_build2_loc (loc, TREE_CODE (arg0), type, a00, + fold_build2_loc (loc, code, type, a01, a11)); else if (commutative && operand_equal_p (a00, a11, 0)) - return fold_build2 (TREE_CODE (arg0), type, a00, - fold_build2 (code, type, a01, a10)); + return fold_build2_loc (loc, TREE_CODE (arg0), type, a00, + fold_build2_loc (loc, code, type, a01, a10)); else if (commutative && operand_equal_p (a01, a10, 0)) - return fold_build2 (TREE_CODE (arg0), type, a01, - fold_build2 (code, type, a00, a11)); + return fold_build2_loc (loc, TREE_CODE (arg0), type, a01, + fold_build2_loc (loc, code, type, a00, a11)); /* This case if tricky because we must either have commutative operators or else A10 must not have side-effects. */ else if ((commutative || ! TREE_SIDE_EFFECTS (a10)) && operand_equal_p (a01, a11, 0)) - return fold_build2 (TREE_CODE (arg0), type, - fold_build2 (code, type, a00, a10), + return fold_build2_loc (loc, TREE_CODE (arg0), type, + fold_build2_loc (loc, code, type, a00, a10), a01); } /* See if we can build a range comparison. */ - if (0 != (tem = fold_range_test (code, type, op0, op1))) + if (0 != (tem = fold_range_test (loc, code, type, op0, op1))) return tem; /* Check for the possibility of merging component references. If our lhs is another similar operation, try to merge its rhs with our rhs. Then try to merge our lhs and rhs. */ if (TREE_CODE (arg0) == code - && 0 != (tem = fold_truthop (code, type, + && 0 != (tem = fold_truthop (loc, code, type, TREE_OPERAND (arg0, 1), arg1))) - return fold_build2 (code, type, TREE_OPERAND (arg0, 0), tem); + return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), tem); - if ((tem = fold_truthop (code, type, arg0, arg1)) != 0) + if ((tem = fold_truthop (loc, code, type, arg0, arg1)) != 0) return tem; return NULL_TREE; @@ -12136,90 +12438,92 @@ fold_binary (enum tree_code code, tree type, tree op0, tree op1) ("true" is a fixed value perhaps depending on the language.) */ /* If first arg is constant true, return it. */ if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0)) - return fold_convert (type, arg0); + return fold_convert_loc (loc, type, arg0); case TRUTH_OR_EXPR: /* If either arg is constant zero, drop it. */ if (TREE_CODE (arg0) == INTEGER_CST && integer_zerop (arg0)) - return non_lvalue (fold_convert (type, arg1)); + return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1)); if (TREE_CODE (arg1) == INTEGER_CST && integer_zerop (arg1) /* Preserve sequence points. */ && (code != TRUTH_ORIF_EXPR || ! TREE_SIDE_EFFECTS (arg0))) - return non_lvalue (fold_convert (type, arg0)); + return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0)); /* If second arg is constant true, result is true, but we must evaluate first arg. */ if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1)) - return omit_one_operand (type, arg1, arg0); + return omit_one_operand_loc (loc, type, arg1, arg0); /* Likewise for first arg, but note this only occurs here for TRUTH_OR_EXPR. */ if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0)) - return omit_one_operand (type, arg0, arg1); + return omit_one_operand_loc (loc, type, arg0, arg1); /* !X || X is always true. */ if (TREE_CODE (arg0) == TRUTH_NOT_EXPR && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)) - return omit_one_operand (type, integer_one_node, arg1); + return omit_one_operand_loc (loc, type, integer_one_node, arg1); /* X || !X is always true. */ if (TREE_CODE (arg1) == TRUTH_NOT_EXPR && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0)) - return omit_one_operand (type, integer_one_node, arg0); + return omit_one_operand_loc (loc, type, integer_one_node, arg0); goto truth_andor; case TRUTH_XOR_EXPR: /* If the second arg is constant zero, drop it. */ if (integer_zerop (arg1)) - return non_lvalue (fold_convert (type, arg0)); + return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0)); /* If the second arg is constant true, this is a logical inversion. */ if (integer_onep (arg1)) { /* Only call invert_truthvalue if operand is a truth value. */ if (TREE_CODE (TREE_TYPE (arg0)) != BOOLEAN_TYPE) - tem = fold_build1 (TRUTH_NOT_EXPR, TREE_TYPE (arg0), arg0); + tem = fold_build1_loc (loc, TRUTH_NOT_EXPR, TREE_TYPE (arg0), arg0); else - tem = invert_truthvalue (arg0); - return non_lvalue (fold_convert (type, tem)); + tem = invert_truthvalue_loc (loc, arg0); + return non_lvalue_loc (loc, fold_convert_loc (loc, type, tem)); } /* Identical arguments cancel to zero. */ if (operand_equal_p (arg0, arg1, 0)) - return omit_one_operand (type, integer_zero_node, arg0); + return omit_one_operand_loc (loc, type, integer_zero_node, arg0); /* !X ^ X is always true. */ if (TREE_CODE (arg0) == TRUTH_NOT_EXPR && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)) - return omit_one_operand (type, integer_one_node, arg1); + return omit_one_operand_loc (loc, type, integer_one_node, arg1); /* X ^ !X is always true. */ if (TREE_CODE (arg1) == TRUTH_NOT_EXPR && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0)) - return omit_one_operand (type, integer_one_node, arg0); + return omit_one_operand_loc (loc, type, integer_one_node, arg0); return NULL_TREE; case EQ_EXPR: case NE_EXPR: - tem = fold_comparison (code, type, op0, op1); + tem = fold_comparison (loc, code, type, op0, op1); if (tem != NULL_TREE) return tem; /* bool_var != 0 becomes bool_var. */ if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_zerop (arg1) && code == NE_EXPR) - return non_lvalue (fold_convert (type, arg0)); + return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0)); /* bool_var == 1 becomes bool_var. */ if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_onep (arg1) && code == EQ_EXPR) - return non_lvalue (fold_convert (type, arg0)); + return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0)); /* bool_var != 1 becomes !bool_var. */ if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_onep (arg1) && code == NE_EXPR) - return fold_build1 (TRUTH_NOT_EXPR, type, fold_convert (type, arg0)); + return fold_build1_loc (loc, TRUTH_NOT_EXPR, type, + fold_convert_loc (loc, type, arg0)); /* bool_var == 0 becomes !bool_var. */ if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_zerop (arg1) && code == EQ_EXPR) - return fold_build1 (TRUTH_NOT_EXPR, type, fold_convert (type, arg0)); + return fold_build1_loc (loc, TRUTH_NOT_EXPR, type, + fold_convert_loc (loc, type, arg0)); /* If this is an equality comparison of the address of two non-weak, unaliased symbols neither of which are extern (since we do not @@ -12263,10 +12567,11 @@ fold_binary (enum tree_code code, tree type, tree op0, tree op1) && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST && 0 != (tem = const_binop (TREE_CODE (arg0) == PLUS_EXPR ? MINUS_EXPR : PLUS_EXPR, - fold_convert (TREE_TYPE (arg0), arg1), + fold_convert_loc (loc, TREE_TYPE (arg0), + arg1), TREE_OPERAND (arg0, 1), 0)) && !TREE_OVERFLOW (tem)) - return fold_build2 (code, type, TREE_OPERAND (arg0, 0), tem); + return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), tem); /* Similarly for a NEGATE_EXPR. */ if (TREE_CODE (arg0) == NEGATE_EXPR @@ -12274,15 +12579,17 @@ fold_binary (enum tree_code code, tree type, tree op0, tree op1) && 0 != (tem = negate_expr (arg1)) && TREE_CODE (tem) == INTEGER_CST && !TREE_OVERFLOW (tem)) - return fold_build2 (code, type, TREE_OPERAND (arg0, 0), tem); + return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), tem); /* Similarly for a BIT_XOR_EXPR; X ^ C1 == C2 is X == (C1 ^ C2). */ if (TREE_CODE (arg0) == BIT_XOR_EXPR && TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST) - return fold_build2 (code, type, TREE_OPERAND (arg0, 0), - fold_build2 (BIT_XOR_EXPR, TREE_TYPE (arg0), - fold_convert (TREE_TYPE (arg0), arg1), + return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), + fold_build2_loc (loc, BIT_XOR_EXPR, TREE_TYPE (arg0), + fold_convert_loc (loc, + TREE_TYPE (arg0), + arg1), TREE_OPERAND (arg0, 1))); /* Transform comparisons of the form X +- Y CMP X to Y CMP 0. */ @@ -12294,8 +12601,8 @@ fold_binary (enum tree_code code, tree type, tree op0, tree op1) || POINTER_TYPE_P (TREE_TYPE (arg0)))) { tree val = TREE_OPERAND (arg0, 1); - return omit_two_operands (type, - fold_build2 (code, type, + return omit_two_operands_loc (loc, type, + fold_build2_loc (loc, code, type, val, build_int_cst (TREE_TYPE (val), 0)), @@ -12308,7 +12615,7 @@ fold_binary (enum tree_code code, tree type, tree op0, tree op1) && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0) && (TREE_INT_CST_LOW (TREE_OPERAND (arg0, 0)) & 1) == 1) { - return omit_two_operands (type, + return omit_two_operands_loc (loc, type, code == NE_EXPR ? boolean_true_node : boolean_false_node, TREE_OPERAND (arg0, 1), arg1); @@ -12318,13 +12625,13 @@ fold_binary (enum tree_code code, tree type, tree op0, tree op1) for !=. Don't do this for ordered comparisons due to overflow. */ if (TREE_CODE (arg0) == MINUS_EXPR && integer_zerop (arg1)) - return fold_build2 (code, type, + return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), TREE_OPERAND (arg0, 1)); /* Convert ABS_EXPR == 0 or ABS_EXPR != 0 to x == 0 or x != 0. */ if (TREE_CODE (arg0) == ABS_EXPR && (integer_zerop (arg1) || real_zerop (arg1))) - return fold_build2 (code, type, TREE_OPERAND (arg0, 0), arg1); + return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), arg1); /* If this is an EQ or NE comparison with zero and ARG0 is (1 << foo) & bar, convert it to (bar >> foo) & 1. Both require @@ -12339,22 +12646,24 @@ fold_binary (enum tree_code code, tree type, tree op0, tree op1) if (TREE_CODE (arg00) == LSHIFT_EXPR && integer_onep (TREE_OPERAND (arg00, 0))) { - tree tem = fold_build2 (RSHIFT_EXPR, TREE_TYPE (arg00), + tree tem = fold_build2_loc (loc, RSHIFT_EXPR, TREE_TYPE (arg00), arg01, TREE_OPERAND (arg00, 1)); - tem = fold_build2 (BIT_AND_EXPR, TREE_TYPE (arg0), tem, + tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0), tem, build_int_cst (TREE_TYPE (arg0), 1)); - return fold_build2 (code, type, - fold_convert (TREE_TYPE (arg1), tem), arg1); + return fold_build2_loc (loc, code, type, + fold_convert_loc (loc, TREE_TYPE (arg1), tem), + arg1); } else if (TREE_CODE (arg01) == LSHIFT_EXPR && integer_onep (TREE_OPERAND (arg01, 0))) { - tree tem = fold_build2 (RSHIFT_EXPR, TREE_TYPE (arg01), + tree tem = fold_build2_loc (loc, RSHIFT_EXPR, TREE_TYPE (arg01), arg00, TREE_OPERAND (arg01, 1)); - tem = fold_build2 (BIT_AND_EXPR, TREE_TYPE (arg0), tem, + tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0), tem, build_int_cst (TREE_TYPE (arg0), 1)); - return fold_build2 (code, type, - fold_convert (TREE_TYPE (arg1), tem), arg1); + return fold_build2_loc (loc, code, type, + fold_convert_loc (loc, TREE_TYPE (arg1), tem), + arg1); } } @@ -12370,14 +12679,14 @@ fold_binary (enum tree_code code, tree type, tree op0, tree op1) && integer_pow2p (TREE_OPERAND (arg0, 1))) { tree newtype = unsigned_type_for (TREE_TYPE (arg0)); - tree newmod = fold_build2 (TREE_CODE (arg0), newtype, - fold_convert (newtype, - TREE_OPERAND (arg0, 0)), - fold_convert (newtype, - TREE_OPERAND (arg0, 1))); + tree newmod = fold_build2_loc (loc, TREE_CODE (arg0), newtype, + fold_convert_loc (loc, newtype, + TREE_OPERAND (arg0, 0)), + fold_convert_loc (loc, newtype, + TREE_OPERAND (arg0, 1))); - return fold_build2 (code, type, newmod, - fold_convert (newtype, arg1)); + return fold_build2_loc (loc, code, type, newmod, + fold_convert_loc (loc, newtype, arg1)); } /* Fold ((X >> C1) & C2) == 0 and ((X >> C1) & C2) != 0 where @@ -12405,21 +12714,21 @@ fold_binary (enum tree_code code, tree type, tree op0, tree op1) can be rewritten as (X & (C2 << C1)) != 0. */ if ((log2 + TREE_INT_CST_LOW (arg001)) < prec) { - tem = fold_build2 (LSHIFT_EXPR, itype, arg01, arg001); - tem = fold_build2 (BIT_AND_EXPR, itype, arg000, tem); - return fold_build2 (code, type, tem, arg1); + tem = fold_build2_loc (loc, LSHIFT_EXPR, itype, arg01, arg001); + tem = fold_build2_loc (loc, BIT_AND_EXPR, itype, arg000, tem); + return fold_build2_loc (loc, code, type, tem, arg1); } /* Otherwise, for signed (arithmetic) shifts, ((X >> C1) & C2) != 0 is rewritten as X < 0, and ((X >> C1) & C2) == 0 is rewritten as X >= 0. */ else if (!TYPE_UNSIGNED (itype)) - return fold_build2 (code == EQ_EXPR ? GE_EXPR : LT_EXPR, type, + return fold_build2_loc (loc, code == EQ_EXPR ? GE_EXPR : LT_EXPR, type, arg000, build_int_cst (itype, 0)); /* Otherwise, of unsigned (logical) shifts, ((X >> C1) & C2) != 0 is rewritten as (X,false), and ((X >> C1) & C2) == 0 is rewritten as (X,true). */ else - return omit_one_operand (type, + return omit_one_operand_loc (loc, type, code == EQ_EXPR ? integer_one_node : integer_zero_node, arg000); @@ -12432,20 +12741,20 @@ fold_binary (enum tree_code code, tree type, tree op0, tree op1) && integer_zerop (arg1) && TREE_CODE (arg0) == BIT_AND_EXPR && integer_onep (TREE_OPERAND (arg0, 1))) - return fold_convert (type, arg0); + return fold_convert_loc (loc, type, arg0); /* If we have (A & C) == C where C is a power of 2, convert this into (A & C) != 0. Similarly for NE_EXPR. */ if (TREE_CODE (arg0) == BIT_AND_EXPR && integer_pow2p (TREE_OPERAND (arg0, 1)) && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0)) - return fold_build2 (code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type, - arg0, fold_convert (TREE_TYPE (arg0), - integer_zero_node)); + return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type, + arg0, fold_convert_loc (loc, TREE_TYPE (arg0), + integer_zero_node)); /* If we have (A & C) != 0 or (A & C) == 0 and C is the sign bit, then fold the expression into A < 0 or A >= 0. */ - tem = fold_single_bit_test_into_sign_test (code, arg0, arg1, type); + tem = fold_single_bit_test_into_sign_test (loc, code, arg0, arg1, type); if (tem) return tem; @@ -12455,14 +12764,14 @@ fold_binary (enum tree_code code, tree type, tree op0, tree op1) && TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST) { - tree notc = fold_build1 (BIT_NOT_EXPR, + tree notc = fold_build1_loc (loc, BIT_NOT_EXPR, TREE_TYPE (TREE_OPERAND (arg0, 1)), TREE_OPERAND (arg0, 1)); - tree dandnotc = fold_build2 (BIT_AND_EXPR, TREE_TYPE (arg0), + tree dandnotc = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0), arg1, notc); tree rslt = code == EQ_EXPR ? integer_zero_node : integer_one_node; if (integer_nonzerop (dandnotc)) - return omit_one_operand (type, rslt, arg0); + return omit_one_operand_loc (loc, type, rslt, arg0); } /* If we have (A | C) == D where C & ~D != 0, convert this into 0. @@ -12471,12 +12780,12 @@ fold_binary (enum tree_code code, tree type, tree op0, tree op1) && TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST) { - tree notd = fold_build1 (BIT_NOT_EXPR, TREE_TYPE (arg1), arg1); - tree candnotd = fold_build2 (BIT_AND_EXPR, TREE_TYPE (arg0), + tree notd = fold_build1_loc (loc, BIT_NOT_EXPR, TREE_TYPE (arg1), arg1); + tree candnotd = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0), TREE_OPERAND (arg0, 1), notd); tree rslt = code == EQ_EXPR ? integer_zero_node : integer_one_node; if (integer_nonzerop (candnotd)) - return omit_one_operand (type, rslt, arg0); + return omit_one_operand_loc (loc, type, rslt, arg0); } /* If this is a comparison of a field, we may be able to simplify it. */ @@ -12486,7 +12795,7 @@ fold_binary (enum tree_code code, tree type, tree op0, tree op1) to make sure the warnings are given. */ && (optimize || TREE_CODE (arg1) == INTEGER_CST)) { - t1 = optimize_bit_field_compare (code, type, arg0, arg1); + t1 = optimize_bit_field_compare (loc, code, type, arg0, arg1); if (t1) return t1; } @@ -12508,8 +12817,9 @@ fold_binary (enum tree_code code, tree type, tree op0, tree op1) && call_expr_nargs (arg0) == 1 && TREE_CODE (TREE_TYPE (CALL_EXPR_ARG (arg0, 0))) == POINTER_TYPE) { - tree iref = build_fold_indirect_ref (CALL_EXPR_ARG (arg0, 0)); - return fold_build2 (code, type, iref, + tree iref = build_fold_indirect_ref_loc (loc, + CALL_EXPR_ARG (arg0, 0)); + return fold_build2_loc (loc, code, type, iref, build_int_cst (TREE_TYPE (iref), 0)); } } @@ -12530,9 +12840,9 @@ fold_binary (enum tree_code code, tree type, tree op0, tree op1) if (TYPE_UNSIGNED (itype)) { itype = signed_type_for (itype); - arg00 = fold_convert (itype, arg00); + arg00 = fold_convert_loc (loc, itype, arg00); } - return fold_build2 (code == EQ_EXPR ? GE_EXPR : LT_EXPR, + return fold_build2_loc (loc, code == EQ_EXPR ? GE_EXPR : LT_EXPR, type, arg00, build_int_cst (itype, 0)); } } @@ -12540,27 +12850,27 @@ fold_binary (enum tree_code code, tree type, tree op0, tree op1) /* (X ^ Y) == 0 becomes X == Y, and (X ^ Y) != 0 becomes X != Y. */ if (integer_zerop (arg1) && TREE_CODE (arg0) == BIT_XOR_EXPR) - return fold_build2 (code, type, TREE_OPERAND (arg0, 0), + return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), TREE_OPERAND (arg0, 1)); /* (X ^ Y) == Y becomes X == 0. We know that Y has no side-effects. */ if (TREE_CODE (arg0) == BIT_XOR_EXPR && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0)) - return fold_build2 (code, type, TREE_OPERAND (arg0, 0), + return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), build_int_cst (TREE_TYPE (arg1), 0)); /* Likewise (X ^ Y) == X becomes Y == 0. X has no side-effects. */ if (TREE_CODE (arg0) == BIT_XOR_EXPR && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0) && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1)) - return fold_build2 (code, type, TREE_OPERAND (arg0, 1), + return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 1), build_int_cst (TREE_TYPE (arg1), 0)); /* (X ^ C1) op C2 can be rewritten as X op (C1 ^ C2). */ if (TREE_CODE (arg0) == BIT_XOR_EXPR && TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST) - return fold_build2 (code, type, TREE_OPERAND (arg0, 0), - fold_build2 (BIT_XOR_EXPR, TREE_TYPE (arg1), + return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), + fold_build2_loc (loc, BIT_XOR_EXPR, TREE_TYPE (arg1), TREE_OPERAND (arg0, 1), arg1)); /* Fold (~X & C) == 0 into (X & C) != 0 and (~X & C) != 0 into @@ -12570,10 +12880,10 @@ fold_binary (enum tree_code code, tree type, tree op0, tree op1) && integer_zerop (arg1) && integer_pow2p (TREE_OPERAND (arg0, 1))) { - tem = fold_build2 (BIT_AND_EXPR, TREE_TYPE (arg0), + tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0), TREE_OPERAND (TREE_OPERAND (arg0, 0), 0), TREE_OPERAND (arg0, 1)); - return fold_build2 (code == EQ_EXPR ? NE_EXPR : EQ_EXPR, + return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type, tem, arg1); } @@ -12587,7 +12897,7 @@ fold_binary (enum tree_code code, tree type, tree op0, tree op1) TREE_OPERAND (arg0, 1), OEP_ONLY_CONST)) { tree arg00 = TREE_OPERAND (arg0, 0); - return fold_build2 (code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type, + return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type, arg00, build_int_cst (TREE_TYPE (arg00), 0)); } @@ -12601,9 +12911,9 @@ fold_binary (enum tree_code code, tree type, tree op0, tree op1) TREE_OPERAND (arg0, 1), OEP_ONLY_CONST)) { tree arg000 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0); - tem = fold_build2 (BIT_AND_EXPR, TREE_TYPE (arg000), + tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg000), arg000, TREE_OPERAND (arg0, 1)); - return fold_build2 (code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type, + return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type, tem, build_int_cst (TREE_TYPE (tem), 0)); } @@ -12611,13 +12921,13 @@ fold_binary (enum tree_code code, tree type, tree op0, tree op1) && tree_expr_nonzero_p (arg0)) { tree res = constant_boolean_node (code==NE_EXPR, type); - return omit_one_operand (type, res, arg0); + return omit_one_operand_loc (loc, type, res, arg0); } /* Fold -X op -Y as X op Y, where op is eq/ne. */ if (TREE_CODE (arg0) == NEGATE_EXPR && TREE_CODE (arg1) == NEGATE_EXPR) - return fold_build2 (code, type, + return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0)); @@ -12632,33 +12942,37 @@ fold_binary (enum tree_code code, tree type, tree op0, tree op1) tree itype = TREE_TYPE (arg0); if (operand_equal_p (arg01, arg11, 0)) - return fold_build2 (code, type, - fold_build2 (BIT_AND_EXPR, itype, - fold_build2 (BIT_XOR_EXPR, itype, + return fold_build2_loc (loc, code, type, + fold_build2_loc (loc, BIT_AND_EXPR, itype, + fold_build2_loc (loc, + BIT_XOR_EXPR, itype, arg00, arg10), arg01), build_int_cst (itype, 0)); if (operand_equal_p (arg01, arg10, 0)) - return fold_build2 (code, type, - fold_build2 (BIT_AND_EXPR, itype, - fold_build2 (BIT_XOR_EXPR, itype, + return fold_build2_loc (loc, code, type, + fold_build2_loc (loc, BIT_AND_EXPR, itype, + fold_build2_loc (loc, + BIT_XOR_EXPR, itype, arg00, arg11), arg01), build_int_cst (itype, 0)); if (operand_equal_p (arg00, arg11, 0)) - return fold_build2 (code, type, - fold_build2 (BIT_AND_EXPR, itype, - fold_build2 (BIT_XOR_EXPR, itype, + return fold_build2_loc (loc, code, type, + fold_build2_loc (loc, BIT_AND_EXPR, itype, + fold_build2_loc (loc, + BIT_XOR_EXPR, itype, arg01, arg10), arg00), build_int_cst (itype, 0)); if (operand_equal_p (arg00, arg10, 0)) - return fold_build2 (code, type, - fold_build2 (BIT_AND_EXPR, itype, - fold_build2 (BIT_XOR_EXPR, itype, + return fold_build2_loc (loc, code, type, + fold_build2_loc (loc, BIT_AND_EXPR, itype, + fold_build2_loc (loc, + BIT_XOR_EXPR, itype, arg01, arg11), arg00), build_int_cst (itype, 0)); @@ -12677,20 +12991,21 @@ fold_binary (enum tree_code code, tree type, tree op0, tree op1) operand_equal_p guarantees no side-effects so we don't need to use omit_one_operand on Z. */ if (operand_equal_p (arg01, arg11, 0)) - return fold_build2 (code, type, arg00, arg10); + return fold_build2_loc (loc, code, type, arg00, arg10); if (operand_equal_p (arg01, arg10, 0)) - return fold_build2 (code, type, arg00, arg11); + return fold_build2_loc (loc, code, type, arg00, arg11); if (operand_equal_p (arg00, arg11, 0)) - return fold_build2 (code, type, arg01, arg10); + return fold_build2_loc (loc, code, type, arg01, arg10); if (operand_equal_p (arg00, arg10, 0)) - return fold_build2 (code, type, arg01, arg11); + return fold_build2_loc (loc, code, type, arg01, arg11); /* Optimize (X ^ C1) op (Y ^ C2) as (X ^ (C1 ^ C2)) op Y. */ if (TREE_CODE (arg01) == INTEGER_CST && TREE_CODE (arg11) == INTEGER_CST) - return fold_build2 (code, type, - fold_build2 (BIT_XOR_EXPR, itype, arg00, - fold_build2 (BIT_XOR_EXPR, itype, + return fold_build2_loc (loc, code, type, + fold_build2_loc (loc, BIT_XOR_EXPR, itype, arg00, + fold_build2_loc (loc, + BIT_XOR_EXPR, itype, arg01, arg11)), arg10); } @@ -12728,41 +13043,41 @@ fold_binary (enum tree_code code, tree type, tree op0, tree op1) imag1 = TREE_IMAGPART (arg1); } - rcond = fold_binary (code, type, real0, real1); + rcond = fold_binary_loc (loc, code, type, real0, real1); if (rcond && TREE_CODE (rcond) == INTEGER_CST) { if (integer_zerop (rcond)) { if (code == EQ_EXPR) - return omit_two_operands (type, boolean_false_node, + return omit_two_operands_loc (loc, type, boolean_false_node, imag0, imag1); - return fold_build2 (NE_EXPR, type, imag0, imag1); + return fold_build2_loc (loc, NE_EXPR, type, imag0, imag1); } else { if (code == NE_EXPR) - return omit_two_operands (type, boolean_true_node, + return omit_two_operands_loc (loc, type, boolean_true_node, imag0, imag1); - return fold_build2 (EQ_EXPR, type, imag0, imag1); + return fold_build2_loc (loc, EQ_EXPR, type, imag0, imag1); } } - icond = fold_binary (code, type, imag0, imag1); + icond = fold_binary_loc (loc, code, type, imag0, imag1); if (icond && TREE_CODE (icond) == INTEGER_CST) { if (integer_zerop (icond)) { if (code == EQ_EXPR) - return omit_two_operands (type, boolean_false_node, + return omit_two_operands_loc (loc, type, boolean_false_node, real0, real1); - return fold_build2 (NE_EXPR, type, real0, real1); + return fold_build2_loc (loc, NE_EXPR, type, real0, real1); } else { if (code == NE_EXPR) - return omit_two_operands (type, boolean_true_node, + return omit_two_operands_loc (loc, type, boolean_true_node, real0, real1); - return fold_build2 (EQ_EXPR, type, real0, real1); + return fold_build2_loc (loc, EQ_EXPR, type, real0, real1); } } } @@ -12773,7 +13088,7 @@ fold_binary (enum tree_code code, tree type, tree op0, tree op1) case GT_EXPR: case LE_EXPR: case GE_EXPR: - tem = fold_comparison (code, type, op0, op1); + tem = fold_comparison (loc, code, type, op0, op1); if (tem != NULL_TREE) return tem; @@ -12966,16 +13281,16 @@ fold_binary (enum tree_code code, tree type, tree op0, tree op1) switch (code) { case GT_EXPR: - return omit_one_operand (type, integer_zero_node, arg0); + return omit_one_operand_loc (loc, type, integer_zero_node, arg0); case GE_EXPR: - return fold_build2 (EQ_EXPR, type, op0, op1); + return fold_build2_loc (loc, EQ_EXPR, type, op0, op1); case LE_EXPR: - return omit_one_operand (type, integer_one_node, arg0); + return omit_one_operand_loc (loc, type, integer_one_node, arg0); case LT_EXPR: - return fold_build2 (NE_EXPR, type, op0, op1); + return fold_build2_loc (loc, NE_EXPR, type, op0, op1); /* The GE_EXPR and LT_EXPR cases above are not normally reached because of previous transformations. */ @@ -12991,14 +13306,16 @@ fold_binary (enum tree_code code, tree type, tree op0, tree op1) case GT_EXPR: arg1 = const_binop (PLUS_EXPR, arg1, build_int_cst (TREE_TYPE (arg1), 1), 0); - return fold_build2 (EQ_EXPR, type, - fold_convert (TREE_TYPE (arg1), arg0), + return fold_build2_loc (loc, EQ_EXPR, type, + fold_convert_loc (loc, + TREE_TYPE (arg1), arg0), arg1); case LE_EXPR: arg1 = const_binop (PLUS_EXPR, arg1, build_int_cst (TREE_TYPE (arg1), 1), 0); - return fold_build2 (NE_EXPR, type, - fold_convert (TREE_TYPE (arg1), arg0), + return fold_build2_loc (loc, NE_EXPR, type, + fold_convert_loc (loc, TREE_TYPE (arg1), + arg0), arg1); default: break; @@ -13009,16 +13326,16 @@ fold_binary (enum tree_code code, tree type, tree op0, tree op1) switch (code) { case LT_EXPR: - return omit_one_operand (type, integer_zero_node, arg0); + return omit_one_operand_loc (loc, type, integer_zero_node, arg0); case LE_EXPR: - return fold_build2 (EQ_EXPR, type, op0, op1); + return fold_build2_loc (loc, EQ_EXPR, type, op0, op1); case GE_EXPR: - return omit_one_operand (type, integer_one_node, arg0); + return omit_one_operand_loc (loc, type, integer_one_node, arg0); case GT_EXPR: - return fold_build2 (NE_EXPR, type, op0, op1); + return fold_build2_loc (loc, NE_EXPR, type, op0, op1); default: break; @@ -13030,13 +13347,15 @@ fold_binary (enum tree_code code, tree type, tree op0, tree op1) { case GE_EXPR: arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node, 0); - return fold_build2 (NE_EXPR, type, - fold_convert (TREE_TYPE (arg1), arg0), + return fold_build2_loc (loc, NE_EXPR, type, + fold_convert_loc (loc, + TREE_TYPE (arg1), arg0), arg1); case LT_EXPR: arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node, 0); - return fold_build2 (EQ_EXPR, type, - fold_convert (TREE_TYPE (arg1), arg0), + return fold_build2_loc (loc, EQ_EXPR, type, + fold_convert_loc (loc, TREE_TYPE (arg1), + arg0), arg1); default: break; @@ -13059,8 +13378,9 @@ fold_binary (enum tree_code code, tree type, tree op0, tree op1) { tree st; st = signed_type_for (TREE_TYPE (arg1)); - return fold_build2 (code == LE_EXPR ? GE_EXPR : LT_EXPR, - type, fold_convert (st, arg0), + return fold_build2_loc (loc, + code == LE_EXPR ? GE_EXPR : LT_EXPR, + type, fold_convert_loc (loc, st, arg0), build_int_cst (st, 0)); } } @@ -13079,7 +13399,7 @@ fold_binary (enum tree_code code, tree type, tree op0, tree op1) && (0 != (tem = negate_expr (arg1))) && TREE_CODE (tem) == INTEGER_CST && !TREE_OVERFLOW (tem)) - return fold_build2 (TRUTH_ANDIF_EXPR, type, + return fold_build2_loc (loc, TRUTH_ANDIF_EXPR, type, build2 (GE_EXPR, type, TREE_OPERAND (arg0, 0), tem), build2 (LE_EXPR, type, @@ -13098,7 +13418,7 @@ fold_binary (enum tree_code code, tree type, tree op0, tree op1) "when simplifying comparison of " "absolute value and zero"), WARN_STRICT_OVERFLOW_CONDITIONAL); - return omit_one_operand (type, integer_one_node, arg0); + return omit_one_operand_loc (loc, type, integer_one_node, arg0); } /* Convert ABS_EXPR < 0 to false. */ @@ -13112,7 +13432,7 @@ fold_binary (enum tree_code code, tree type, tree op0, tree op1) "when simplifying comparison of " "absolute value and zero"), WARN_STRICT_OVERFLOW_CONDITIONAL); - return omit_one_operand (type, integer_zero_node, arg0); + return omit_one_operand_loc (loc, type, integer_zero_node, arg0); } /* If X is unsigned, convert X < (1 << Y) into X >> Y == 0 @@ -13121,23 +13441,29 @@ fold_binary (enum tree_code code, tree type, tree op0, tree op1) && TYPE_UNSIGNED (TREE_TYPE (arg0)) && TREE_CODE (arg1) == LSHIFT_EXPR && integer_onep (TREE_OPERAND (arg1, 0))) - return build2 (code == LT_EXPR ? EQ_EXPR : NE_EXPR, type, - build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0, - TREE_OPERAND (arg1, 1)), - build_int_cst (TREE_TYPE (arg0), 0)); + { + tem = build2 (code == LT_EXPR ? EQ_EXPR : NE_EXPR, type, + build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0, + TREE_OPERAND (arg1, 1)), + build_int_cst (TREE_TYPE (arg0), 0)); + goto fold_binary_exit; + } if ((code == LT_EXPR || code == GE_EXPR) && TYPE_UNSIGNED (TREE_TYPE (arg0)) && CONVERT_EXPR_P (arg1) && TREE_CODE (TREE_OPERAND (arg1, 0)) == LSHIFT_EXPR && integer_onep (TREE_OPERAND (TREE_OPERAND (arg1, 0), 0))) - return - build2 (code == LT_EXPR ? EQ_EXPR : NE_EXPR, type, - fold_convert (TREE_TYPE (arg0), - build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0, - TREE_OPERAND (TREE_OPERAND (arg1, 0), - 1))), - build_int_cst (TREE_TYPE (arg0), 0)); + { + tem = build2 (code == LT_EXPR ? EQ_EXPR : NE_EXPR, type, + fold_convert_loc (loc, TREE_TYPE (arg0), + build2 (RSHIFT_EXPR, + TREE_TYPE (arg0), arg0, + TREE_OPERAND (TREE_OPERAND (arg1, 0), + 1))), + build_int_cst (TREE_TYPE (arg0), 0)); + goto fold_binary_exit; + } return NULL_TREE; @@ -13164,7 +13490,7 @@ fold_binary (enum tree_code code, tree type, tree op0, tree op1) t1 = (code == ORDERED_EXPR || code == LTGT_EXPR) ? integer_zero_node : integer_one_node; - return omit_one_operand (type, t1, arg1); + return omit_one_operand_loc (loc, type, t1, arg1); } /* If the second operand is NaN, the result is constant. */ @@ -13175,7 +13501,7 @@ fold_binary (enum tree_code code, tree type, tree op0, tree op1) t1 = (code == ORDERED_EXPR || code == LTGT_EXPR) ? integer_zero_node : integer_one_node; - return omit_one_operand (type, t1, arg0); + return omit_one_operand_loc (loc, type, t1, arg0); } /* Simplify unordered comparison of something with itself. */ @@ -13198,8 +13524,9 @@ fold_binary (enum tree_code code, tree type, tree op0, tree op1) newtype = TREE_TYPE (targ1); if (TYPE_PRECISION (newtype) < TYPE_PRECISION (TREE_TYPE (arg0))) - return fold_build2 (code, type, fold_convert (newtype, targ0), - fold_convert (newtype, targ1)); + return fold_build2_loc (loc, code, type, + fold_convert_loc (loc, newtype, targ0), + fold_convert_loc (loc, newtype, targ1)); } return NULL_TREE; @@ -13211,8 +13538,8 @@ fold_binary (enum tree_code code, tree type, tree op0, tree op1) return NULL_TREE; /* Don't let (0, 0) be null pointer constant. */ tem = integer_zerop (arg1) ? build1 (NOP_EXPR, type, arg1) - : fold_convert (type, arg1); - return pedantic_non_lvalue (tem); + : fold_convert_loc (loc, type, arg1); + return pedantic_non_lvalue_loc (loc, tem); case COMPLEX_EXPR: if ((TREE_CODE (arg0) == REAL_CST @@ -13229,6 +13556,9 @@ fold_binary (enum tree_code code, tree type, tree op0, tree op1) default: return NULL_TREE; } /* switch (code) */ + fold_binary_exit: + protected_set_expr_location (tem, loc); + return tem; } /* Callback for walk_tree, looking for LABEL_EXPR. Return *TP if it is @@ -13268,7 +13598,8 @@ contains_label_p (tree st) successful. Otherwise, return NULL_TREE. */ tree -fold_ternary (enum tree_code code, tree type, tree op0, tree op1, tree op2) +fold_ternary_loc (location_t loc, enum tree_code code, tree type, + tree op0, tree op1, tree op2) { tree tem; tree arg0 = NULL_TREE, arg1 = NULL_TREE; @@ -13328,11 +13659,11 @@ fold_ternary (enum tree_code code, tree type, tree op0, tree op1, tree op2) || !contains_label_p (unused_op)) && (! VOID_TYPE_P (TREE_TYPE (tem)) || VOID_TYPE_P (type))) - return pedantic_non_lvalue (tem); + return pedantic_non_lvalue_loc (loc, tem); return NULL_TREE; } if (operand_equal_p (arg1, op2, 0)) - return pedantic_omit_one_operand (type, arg1, arg0); + return pedantic_omit_one_operand_loc (loc, type, arg1, arg0); /* If we have A op B ? A : C, we may be able to convert this to a simpler expression, depending on the operation and the values @@ -13345,7 +13676,7 @@ fold_ternary (enum tree_code code, tree type, tree op0, tree op1, tree op2) arg1, TREE_OPERAND (arg0, 1)) && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg1)))) { - tem = fold_cond_expr_with_comparison (type, arg0, op1, op2); + tem = fold_cond_expr_with_comparison (loc, type, arg0, op1, op2); if (tem) return tem; } @@ -13356,10 +13687,10 @@ fold_ternary (enum tree_code code, tree type, tree op0, tree op1, tree op2) TREE_OPERAND (arg0, 1)) && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (op2)))) { - tem = fold_truth_not_expr (arg0); + tem = fold_truth_not_expr (loc, arg0); if (tem && COMPARISON_CLASS_P (tem)) { - tem = fold_cond_expr_with_comparison (type, tem, op2, op1); + tem = fold_cond_expr_with_comparison (loc, type, tem, op2, op1); if (tem) return tem; } @@ -13373,9 +13704,9 @@ fold_ternary (enum tree_code code, tree type, tree op0, tree op1, tree op2) /* See if this can be inverted. If it can't, possibly because it was a floating-point inequality comparison, don't do anything. */ - tem = fold_truth_not_expr (arg0); + tem = fold_truth_not_expr (loc, arg0); if (tem) - return fold_build3 (code, type, tem, op2, op1); + return fold_build3_loc (loc, code, type, tem, op2, op1); } /* Convert A ? 1 : 0 to simply A. */ @@ -13386,15 +13717,17 @@ fold_ternary (enum tree_code code, tree type, tree op0, tree op1, tree op2) a COND, which will recurse. In that case, the COND_EXPR is probably the best choice, so leave it alone. */ && type == TREE_TYPE (arg0)) - return pedantic_non_lvalue (arg0); + return pedantic_non_lvalue_loc (loc, arg0); /* Convert A ? 0 : 1 to !A. This prefers the use of NOT_EXPR over COND_EXPR in cases such as floating point comparisons. */ if (integer_zerop (op1) && integer_onep (op2) && truth_value_p (TREE_CODE (arg0))) - return pedantic_non_lvalue (fold_convert (type, - invert_truthvalue (arg0))); + return pedantic_non_lvalue_loc (loc, + fold_convert_loc (loc, type, + invert_truthvalue_loc (loc, + arg0))); /* A < 0 ? : 0 is simply (A & ). */ if (TREE_CODE (arg0) == LT_EXPR @@ -13449,24 +13782,26 @@ fold_ternary (enum tree_code code, tree type, tree op0, tree op1, tree op2) && (TREE_INT_CST_LOW (arg1) & mask_lo) == mask_lo) { tem_type = signed_type_for (TREE_TYPE (tem)); - tem = fold_convert (tem_type, tem); + tem = fold_convert_loc (loc, tem_type, tem); } else if ((TREE_INT_CST_HIGH (arg1) & mask_hi) == 0 && (TREE_INT_CST_LOW (arg1) & mask_lo) == 0) { tem_type = unsigned_type_for (TREE_TYPE (tem)); - tem = fold_convert (tem_type, tem); + tem = fold_convert_loc (loc, tem_type, tem); } else tem = NULL; } if (tem) - return fold_convert (type, - fold_build2 (BIT_AND_EXPR, - TREE_TYPE (tem), tem, - fold_convert (TREE_TYPE (tem), - arg1))); + return + fold_convert_loc (loc, type, + fold_build2_loc (loc, BIT_AND_EXPR, + TREE_TYPE (tem), tem, + fold_convert_loc (loc, + TREE_TYPE (tem), + arg1))); } /* (A >> N) & 1 ? (1 << N) : 0 is simply A & (1 << N). A & 1 was @@ -13482,7 +13817,7 @@ fold_ternary (enum tree_code code, tree type, tree op0, tree op1, tree op2) && TREE_CODE (TREE_OPERAND (tem, 1)) == INTEGER_CST && (unsigned HOST_WIDE_INT) tree_log2 (arg1) == TREE_INT_CST_LOW (TREE_OPERAND (tem, 1))) - return fold_build2 (BIT_AND_EXPR, type, + return fold_build2_loc (loc, BIT_AND_EXPR, type, TREE_OPERAND (tem, 0), arg1); } @@ -13497,15 +13832,16 @@ fold_ternary (enum tree_code code, tree type, tree op0, tree op1, tree op2) && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1), arg1, OEP_ONLY_CONST)) - return pedantic_non_lvalue (fold_convert (type, - TREE_OPERAND (arg0, 0))); + return pedantic_non_lvalue_loc (loc, + fold_convert_loc (loc, type, + TREE_OPERAND (arg0, 0))); /* Convert A ? B : 0 into A && B if A and B are truth values. */ if (integer_zerop (op2) && truth_value_p (TREE_CODE (arg0)) && truth_value_p (TREE_CODE (arg1))) - return fold_build2 (TRUTH_ANDIF_EXPR, type, - fold_convert (type, arg0), + return fold_build2_loc (loc, TRUTH_ANDIF_EXPR, type, + fold_convert_loc (loc, type, arg0), arg1); /* Convert A ? B : 1 into !A || B if A and B are truth values. */ @@ -13514,10 +13850,10 @@ fold_ternary (enum tree_code code, tree type, tree op0, tree op1, tree op2) && truth_value_p (TREE_CODE (arg1))) { /* Only perform transformation if ARG0 is easily inverted. */ - tem = fold_truth_not_expr (arg0); + tem = fold_truth_not_expr (loc, arg0); if (tem) - return fold_build2 (TRUTH_ORIF_EXPR, type, - fold_convert (type, tem), + return fold_build2_loc (loc, TRUTH_ORIF_EXPR, type, + fold_convert_loc (loc, type, tem), arg1); } @@ -13527,10 +13863,10 @@ fold_ternary (enum tree_code code, tree type, tree op0, tree op1, tree op2) && truth_value_p (TREE_CODE (op2))) { /* Only perform transformation if ARG0 is easily inverted. */ - tem = fold_truth_not_expr (arg0); + tem = fold_truth_not_expr (loc, arg0); if (tem) - return fold_build2 (TRUTH_ANDIF_EXPR, type, - fold_convert (type, tem), + return fold_build2_loc (loc, TRUTH_ANDIF_EXPR, type, + fold_convert_loc (loc, type, tem), op2); } @@ -13538,8 +13874,8 @@ fold_ternary (enum tree_code code, tree type, tree op0, tree op1, tree op2) if (integer_onep (arg1) && truth_value_p (TREE_CODE (arg0)) && truth_value_p (TREE_CODE (op2))) - return fold_build2 (TRUTH_ORIF_EXPR, type, - fold_convert (type, arg0), + return fold_build2_loc (loc, TRUTH_ORIF_EXPR, type, + fold_convert_loc (loc, type, arg0), op2); return NULL_TREE; @@ -13580,7 +13916,7 @@ fold_ternary (enum tree_code code, tree type, tree op0, tree op1, tree op2) if (elements) return TREE_VALUE (elements); else - return fold_convert (type, integer_zero_node); + return fold_convert_loc (loc, type, integer_zero_node); } } @@ -13588,7 +13924,7 @@ fold_ternary (enum tree_code code, tree type, tree op0, tree op1, tree op2) if (INTEGRAL_TYPE_P (TREE_TYPE (arg0)) && TYPE_PRECISION (TREE_TYPE (arg0)) == tree_low_cst (arg1, 1) && integer_zerop (op2)) - return fold_convert (type, arg0); + return fold_convert_loc (loc, type, arg0); return NULL_TREE; @@ -13617,6 +13953,7 @@ fold (tree expr) enum tree_code code = TREE_CODE (t); enum tree_code_class kind = TREE_CODE_CLASS (code); tree tem; + location_t loc = EXPR_LOCATION (expr); /* Return right away if a constant. */ if (kind == tcc_constant) @@ -13628,7 +13965,7 @@ fold (tree expr) { if (code == CALL_EXPR) { - tem = fold_call_expr (expr, false); + tem = fold_call_expr (loc, expr, false); return tem ? tem : expr; } return expr; @@ -13643,18 +13980,18 @@ fold (tree expr) { case 1: op0 = TREE_OPERAND (t, 0); - tem = fold_unary (code, type, op0); + tem = fold_unary_loc (loc, code, type, op0); return tem ? tem : expr; case 2: op0 = TREE_OPERAND (t, 0); op1 = TREE_OPERAND (t, 1); - tem = fold_binary (code, type, op0, op1); + tem = fold_binary_loc (loc, code, type, op0, op1); return tem ? tem : expr; case 3: op0 = TREE_OPERAND (t, 0); op1 = TREE_OPERAND (t, 1); op2 = TREE_OPERAND (t, 2); - tem = fold_ternary (code, type, op0, op1, op2); + tem = fold_ternary_loc (loc, code, type, op0, op1, op2); return tem ? tem : expr; default: break; @@ -13948,12 +14285,13 @@ debug_fold_checksum (const_tree t) #endif /* Fold a unary tree expression with code CODE of type TYPE with an - operand OP0. Return a folded expression if successful. Otherwise, - return a tree expression with code CODE of type TYPE with an - operand OP0. */ + operand OP0. LOC is the location of the resulting expression. + Return a folded expression if successful. Otherwise, return a tree + expression with code CODE of type TYPE with an operand OP0. */ tree -fold_build1_stat (enum tree_code code, tree type, tree op0 MEM_STAT_DECL) +fold_build1_stat_loc (location_t loc, + enum tree_code code, tree type, tree op0 MEM_STAT_DECL) { tree tem; #ifdef ENABLE_FOLD_CHECKING @@ -13968,9 +14306,12 @@ fold_build1_stat (enum tree_code code, tree type, tree op0 MEM_STAT_DECL) htab_empty (ht); #endif - tem = fold_unary (code, type, op0); + tem = fold_unary_loc (loc, code, type, op0); if (!tem) - tem = build1_stat (code, type, op0 PASS_MEM_STAT); + { + tem = build1_stat (code, type, op0 PASS_MEM_STAT); + SET_EXPR_LOCATION (tem, loc); + } #ifdef ENABLE_FOLD_CHECKING md5_init_ctx (&ctx); @@ -13985,13 +14326,15 @@ fold_build1_stat (enum tree_code code, tree type, tree op0 MEM_STAT_DECL) } /* Fold a binary tree expression with code CODE of type TYPE with - operands OP0 and OP1. Return a folded expression if successful. - Otherwise, return a tree expression with code CODE of type TYPE - with operands OP0 and OP1. */ + operands OP0 and OP1. LOC is the location of the resulting + expression. Return a folded expression if successful. Otherwise, + return a tree expression with code CODE of type TYPE with operands + OP0 and OP1. */ tree -fold_build2_stat (enum tree_code code, tree type, tree op0, tree op1 - MEM_STAT_DECL) +fold_build2_stat_loc (location_t loc, + enum tree_code code, tree type, tree op0, tree op1 + MEM_STAT_DECL) { tree tem; #ifdef ENABLE_FOLD_CHECKING @@ -14014,9 +14357,12 @@ fold_build2_stat (enum tree_code code, tree type, tree op0, tree op1 htab_empty (ht); #endif - tem = fold_binary (code, type, op0, op1); + tem = fold_binary_loc (loc, code, type, op0, op1); if (!tem) - tem = build2_stat (code, type, op0, op1 PASS_MEM_STAT); + { + tem = build2_stat (code, type, op0, op1 PASS_MEM_STAT); + SET_EXPR_LOCATION (tem, loc); + } #ifdef ENABLE_FOLD_CHECKING md5_init_ctx (&ctx); @@ -14044,8 +14390,8 @@ fold_build2_stat (enum tree_code code, tree type, tree op0, tree op1 type TYPE with operands OP0, OP1, and OP2. */ tree -fold_build3_stat (enum tree_code code, tree type, tree op0, tree op1, tree op2 - MEM_STAT_DECL) +fold_build3_stat_loc (location_t loc, enum tree_code code, tree type, + tree op0, tree op1, tree op2 MEM_STAT_DECL) { tree tem; #ifdef ENABLE_FOLD_CHECKING @@ -14076,9 +14422,12 @@ fold_build3_stat (enum tree_code code, tree type, tree op0, tree op1, tree op2 #endif gcc_assert (TREE_CODE_CLASS (code) != tcc_vl_exp); - tem = fold_ternary (code, type, op0, op1, op2); + tem = fold_ternary_loc (loc, code, type, op0, op1, op2); if (!tem) - tem = build3_stat (code, type, op0, op1, op2 PASS_MEM_STAT); + { + tem = build3_stat (code, type, op0, op1, op2 PASS_MEM_STAT); + SET_EXPR_LOCATION (tem, loc); + } #ifdef ENABLE_FOLD_CHECKING md5_init_ctx (&ctx); @@ -14114,7 +14463,8 @@ fold_build3_stat (enum tree_code code, tree type, tree op0, tree op1, tree op2 of type TYPE from the given operands as constructed by build_call_array. */ tree -fold_build_call_array (tree type, tree fn, int nargs, tree *argarray) +fold_build_call_array_loc (location_t loc, tree type, tree fn, + int nargs, tree *argarray) { tree tem; #ifdef ENABLE_FOLD_CHECKING @@ -14139,7 +14489,7 @@ fold_build_call_array (tree type, tree fn, int nargs, tree *argarray) htab_empty (ht); #endif - tem = fold_builtin_call_array (type, fn, nargs, argarray); + tem = fold_builtin_call_array (loc, type, fn, nargs, argarray); #ifdef ENABLE_FOLD_CHECKING md5_init_ctx (&ctx); @@ -14186,50 +14536,52 @@ fold_build_call_array (tree type, tree fn, int nargs, tree *argarray) folding_initializer = saved_folding_initializer; tree -fold_build1_initializer (enum tree_code code, tree type, tree op) +fold_build1_initializer_loc (location_t loc, enum tree_code code, + tree type, tree op) { tree result; START_FOLD_INIT; - result = fold_build1 (code, type, op); + result = fold_build1_loc (loc, code, type, op); END_FOLD_INIT; return result; } tree -fold_build2_initializer (enum tree_code code, tree type, tree op0, tree op1) +fold_build2_initializer_loc (location_t loc, enum tree_code code, + tree type, tree op0, tree op1) { tree result; START_FOLD_INIT; - result = fold_build2 (code, type, op0, op1); + result = fold_build2_loc (loc, code, type, op0, op1); END_FOLD_INIT; return result; } tree -fold_build3_initializer (enum tree_code code, tree type, tree op0, tree op1, - tree op2) +fold_build3_initializer_loc (location_t loc, enum tree_code code, + tree type, tree op0, tree op1, tree op2) { tree result; START_FOLD_INIT; - result = fold_build3 (code, type, op0, op1, op2); + result = fold_build3_loc (loc, code, type, op0, op1, op2); END_FOLD_INIT; return result; } tree -fold_build_call_array_initializer (tree type, tree fn, - int nargs, tree *argarray) +fold_build_call_array_initializer_loc (location_t loc, tree type, tree fn, + int nargs, tree *argarray) { tree result; START_FOLD_INIT; - result = fold_build_call_array (type, fn, nargs, argarray); + result = fold_build_call_array_loc (loc, type, fn, nargs, argarray); END_FOLD_INIT; return result; @@ -15242,13 +15594,14 @@ fold_read_from_constant_string (tree exp) tree exp1 = TREE_OPERAND (exp, 0); tree index; tree string; + location_t loc = EXPR_LOCATION (exp); if (TREE_CODE (exp) == INDIRECT_REF) string = string_constant (exp1, &index); else { tree low_bound = array_ref_low_bound (exp); - index = fold_convert (sizetype, TREE_OPERAND (exp, 1)); + index = fold_convert_loc (loc, sizetype, TREE_OPERAND (exp, 1)); /* Optimize the special-case of a zero lower bound. @@ -15258,7 +15611,8 @@ fold_read_from_constant_string (tree exp) +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1)) +INDEX), which becomes (ARRAY+255+INDEX). Oops!) */ if (! integer_zerop (low_bound)) - index = size_diffop (index, fold_convert (sizetype, low_bound)); + index = size_diffop_loc (loc, index, + fold_convert_loc (loc, sizetype, low_bound)); string = exp1; } @@ -15555,7 +15909,7 @@ fold_build_cleanup_point_expr (tree type, tree expr) possible. */ tree -fold_indirect_ref_1 (tree type, tree op0) +fold_indirect_ref_1 (location_t loc, tree type, tree op0) { tree sub = op0; tree subtype; @@ -15589,19 +15943,21 @@ fold_indirect_ref_1 (tree type, tree op0) tree min_val = size_zero_node; if (type_domain && TYPE_MIN_VALUE (type_domain)) min_val = TYPE_MIN_VALUE (type_domain); - return build4 (ARRAY_REF, type, op, min_val, NULL_TREE, NULL_TREE); + op0 = build4 (ARRAY_REF, type, op, min_val, NULL_TREE, NULL_TREE); + SET_EXPR_LOCATION (op0, loc); + return op0; } /* *(foo *)&complexfoo => __real__ complexfoo */ else if (TREE_CODE (optype) == COMPLEX_TYPE && type == TREE_TYPE (optype)) - return fold_build1 (REALPART_EXPR, type, op); + return fold_build1_loc (loc, REALPART_EXPR, type, op); /* *(foo *)&vectorfoo => BIT_FIELD_REF */ else if (TREE_CODE (optype) == VECTOR_TYPE && type == TREE_TYPE (optype)) { tree part_width = TYPE_SIZE (type); tree index = bitsize_int (0); - return fold_build3 (BIT_FIELD_REF, type, op, part_width, index); + return fold_build3_loc (loc, BIT_FIELD_REF, type, op, part_width, index); } } @@ -15626,7 +15982,8 @@ fold_indirect_ref_1 (tree type, tree op0) tree index = bitsize_int (indexi); if (offset/part_widthi <= TYPE_VECTOR_SUBPARTS (TREE_TYPE (op00type))) - return fold_build3 (BIT_FIELD_REF, type, TREE_OPERAND (op00, 0), + return fold_build3_loc (loc, + BIT_FIELD_REF, type, TREE_OPERAND (op00, 0), part_width, index); } @@ -15649,7 +16006,8 @@ fold_indirect_ref_1 (tree type, tree op0) { tree size = TYPE_SIZE_UNIT (type); if (tree_int_cst_equal (size, op01)) - return fold_build1 (IMAGPART_EXPR, type, TREE_OPERAND (op00, 0)); + return fold_build1_loc (loc, IMAGPART_EXPR, type, + TREE_OPERAND (op00, 0)); } } @@ -15659,11 +16017,13 @@ fold_indirect_ref_1 (tree type, tree op0) { tree type_domain; tree min_val = size_zero_node; - sub = build_fold_indirect_ref (sub); + sub = build_fold_indirect_ref_loc (loc, sub); type_domain = TYPE_DOMAIN (TREE_TYPE (sub)); if (type_domain && TYPE_MIN_VALUE (type_domain)) min_val = TYPE_MIN_VALUE (type_domain); - return build4 (ARRAY_REF, type, sub, min_val, NULL_TREE, NULL_TREE); + op0 = build4 (ARRAY_REF, type, sub, min_val, NULL_TREE, NULL_TREE); + SET_EXPR_LOCATION (op0, loc); + return op0; } return NULL_TREE; @@ -15673,23 +16033,25 @@ fold_indirect_ref_1 (tree type, tree op0) cases. */ tree -build_fold_indirect_ref (tree t) +build_fold_indirect_ref_loc (location_t loc, tree t) { tree type = TREE_TYPE (TREE_TYPE (t)); - tree sub = fold_indirect_ref_1 (type, t); + tree sub = fold_indirect_ref_1 (loc, type, t); if (sub) return sub; - else - return build1 (INDIRECT_REF, type, t); + + t = build1 (INDIRECT_REF, type, t); + SET_EXPR_LOCATION (t, loc); + return t; } /* Given an INDIRECT_REF T, return either T or a simplified version. */ tree -fold_indirect_ref (tree t) +fold_indirect_ref_loc (location_t loc, tree t) { - tree sub = fold_indirect_ref_1 (TREE_TYPE (t), TREE_OPERAND (t, 0)); + tree sub = fold_indirect_ref_1 (loc, TREE_TYPE (t), TREE_OPERAND (t, 0)); if (sub) return sub; @@ -15754,7 +16116,7 @@ fold_ignored_result (tree t) This can only be applied to objects of a sizetype. */ tree -round_up (tree value, int divisor) +round_up_loc (location_t loc, tree value, int divisor) { tree div = NULL_TREE; @@ -15805,17 +16167,17 @@ round_up (tree value, int divisor) tree t; t = build_int_cst (TREE_TYPE (value), divisor - 1); - value = size_binop (PLUS_EXPR, value, t); + value = size_binop_loc (loc, PLUS_EXPR, value, t); t = build_int_cst (TREE_TYPE (value), -divisor); - value = size_binop (BIT_AND_EXPR, value, t); + value = size_binop_loc (loc, BIT_AND_EXPR, value, t); } } else { if (!div) div = build_int_cst (TREE_TYPE (value), divisor); - value = size_binop (CEIL_DIV_EXPR, value, div); - value = size_binop (MULT_EXPR, value, div); + value = size_binop_loc (loc, CEIL_DIV_EXPR, value, div); + value = size_binop_loc (loc, MULT_EXPR, value, div); } return value; @@ -15824,7 +16186,7 @@ round_up (tree value, int divisor) /* Likewise, but round down. */ tree -round_down (tree value, int divisor) +round_down_loc (location_t loc, tree value, int divisor) { tree div = NULL_TREE; @@ -15850,14 +16212,14 @@ round_down (tree value, int divisor) tree t; t = build_int_cst (TREE_TYPE (value), -divisor); - value = size_binop (BIT_AND_EXPR, value, t); + value = size_binop_loc (loc, BIT_AND_EXPR, value, t); } else { if (!div) div = build_int_cst (TREE_TYPE (value), divisor); - value = size_binop (FLOOR_DIV_EXPR, value, div); - value = size_binop (MULT_EXPR, value, div); + value = size_binop_loc (loc, FLOOR_DIV_EXPR, value, div); + value = size_binop_loc (loc, MULT_EXPR, value, div); } return value; @@ -15875,13 +16237,14 @@ split_address_to_core_and_offset (tree exp, enum machine_mode mode; int unsignedp, volatilep; HOST_WIDE_INT bitsize; + location_t loc = EXPR_LOCATION (exp); if (TREE_CODE (exp) == ADDR_EXPR) { core = get_inner_reference (TREE_OPERAND (exp, 0), &bitsize, pbitpos, poffset, &mode, &unsignedp, &volatilep, false); - core = build_fold_addr_expr (core); + core = build_fold_addr_expr_loc (loc, core); } else { @@ -15944,6 +16307,7 @@ tree fold_strip_sign_ops (tree exp) { tree arg0, arg1; + location_t loc = EXPR_LOCATION (exp); switch (TREE_CODE (exp)) { @@ -15959,7 +16323,7 @@ fold_strip_sign_ops (tree exp) arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 0)); arg1 = fold_strip_sign_ops (TREE_OPERAND (exp, 1)); if (arg0 != NULL_TREE || arg1 != NULL_TREE) - return fold_build2 (TREE_CODE (exp), TREE_TYPE (exp), + return fold_build2_loc (loc, TREE_CODE (exp), TREE_TYPE (exp), arg0 ? arg0 : TREE_OPERAND (exp, 0), arg1 ? arg1 : TREE_OPERAND (exp, 1)); break; @@ -15968,14 +16332,15 @@ fold_strip_sign_ops (tree exp) arg0 = TREE_OPERAND (exp, 0); arg1 = fold_strip_sign_ops (TREE_OPERAND (exp, 1)); if (arg1) - return fold_build2 (COMPOUND_EXPR, TREE_TYPE (exp), arg0, arg1); + return fold_build2_loc (loc, COMPOUND_EXPR, TREE_TYPE (exp), arg0, arg1); break; case COND_EXPR: arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 1)); arg1 = fold_strip_sign_ops (TREE_OPERAND (exp, 2)); if (arg0 || arg1) - return fold_build3 (COND_EXPR, TREE_TYPE (exp), TREE_OPERAND (exp, 0), + return fold_build3_loc (loc, + COND_EXPR, TREE_TYPE (exp), TREE_OPERAND (exp, 0), arg0 ? arg0 : TREE_OPERAND (exp, 1), arg1 ? arg1 : TREE_OPERAND (exp, 2)); break; @@ -15989,7 +16354,7 @@ fold_strip_sign_ops (tree exp) /* Strip copysign function call, return the 1st argument. */ arg0 = CALL_EXPR_ARG (exp, 0); arg1 = CALL_EXPR_ARG (exp, 1); - return omit_one_operand (TREE_TYPE (exp), arg0, arg1); + return omit_one_operand_loc (loc, TREE_TYPE (exp), arg0, arg1); default: /* Strip sign ops from the argument of "odd" math functions. */ @@ -15997,7 +16362,7 @@ fold_strip_sign_ops (tree exp) { arg0 = fold_strip_sign_ops (CALL_EXPR_ARG (exp, 0)); if (arg0) - return build_call_expr (get_callee_fndecl (exp), 1, arg0); + return build_call_expr_loc (loc, get_callee_fndecl (exp), 1, arg0); } break; } -- cgit v1.2.1 From 63e8969836b3c9a5e0c843659bfcae2969218999 Mon Sep 17 00:00:00 2001 From: ghazi Date: Fri, 14 Aug 2009 16:44:36 +0000 Subject: PR middle-end/30789 * builtins.c (do_mpc_arg2): Make extern, define for any MPC version. Move declaration... * real.h (do_mpc_arg2): ... here. * fold-const.c (const_binop): Use MPC for complex MULT_EXPR and RDIV_EXPR. testsuite: * gcc.dg/torture/builtin-math-7.c: New. git-svn-id: svn+ssh://gcc.gnu.org/svn/gcc/trunk@150760 138bc75d-0d04-0410-961f-82ee72b054a4 --- gcc/fold-const.c | 10 ++++++++++ 1 file changed, 10 insertions(+) (limited to 'gcc/fold-const.c') diff --git a/gcc/fold-const.c b/gcc/fold-const.c index 803c7a549af..342e3760bdf 100644 --- a/gcc/fold-const.c +++ b/gcc/fold-const.c @@ -1972,6 +1972,11 @@ const_binop (enum tree_code code, tree arg1, tree arg2, int notrunc) break; case MULT_EXPR: +#ifdef HAVE_mpc + if (COMPLEX_FLOAT_TYPE_P (type)) + return do_mpc_arg2 (arg1, arg2, type, mpc_mul); +#endif + real = const_binop (MINUS_EXPR, const_binop (MULT_EXPR, r1, r2, notrunc), const_binop (MULT_EXPR, i1, i2, notrunc), @@ -1983,6 +1988,11 @@ const_binop (enum tree_code code, tree arg1, tree arg2, int notrunc) break; case RDIV_EXPR: +#ifdef HAVE_mpc + if (COMPLEX_FLOAT_TYPE_P (type)) + return do_mpc_arg2 (arg1, arg2, type, mpc_div); +#endif + { tree magsquared = const_binop (PLUS_EXPR, -- cgit v1.2.1 From e38def9ca7953bb5611d08ce8617249516ba5a99 Mon Sep 17 00:00:00 2001 From: rth Date: Mon, 14 Sep 2009 19:18:58 +0000 Subject: Squash commit of EH in gimple git-svn-id: svn+ssh://gcc.gnu.org/svn/gcc/trunk@151696 138bc75d-0d04-0410-961f-82ee72b054a4 --- gcc/fold-const.c | 4 ---- 1 file changed, 4 deletions(-) (limited to 'gcc/fold-const.c') diff --git a/gcc/fold-const.c b/gcc/fold-const.c index 342e3760bdf..1ce0013ef35 100644 --- a/gcc/fold-const.c +++ b/gcc/fold-const.c @@ -15224,9 +15224,7 @@ tree_expr_nonnegative_warnv_p (tree t, bool *strict_overflow_p) case ASSERT_EXPR: case ADDR_EXPR: case WITH_SIZE_EXPR: - case EXC_PTR_EXPR: case SSA_NAME: - case FILTER_EXPR: return tree_single_nonnegative_warnv_p (t, strict_overflow_p); default: @@ -15518,9 +15516,7 @@ tree_expr_nonzero_warnv_p (tree t, bool *strict_overflow_p) case ASSERT_EXPR: case ADDR_EXPR: case WITH_SIZE_EXPR: - case EXC_PTR_EXPR: case SSA_NAME: - case FILTER_EXPR: return tree_single_nonzero_warnv_p (t, strict_overflow_p); case COMPOUND_EXPR: -- cgit v1.2.1 From 652d9409a2480187eb77ca0ae3547b00a7c8d16d Mon Sep 17 00:00:00 2001 From: ghazi Date: Sun, 20 Sep 2009 15:39:22 +0000 Subject: PR middle-end/30789 * builtins.c (do_mpc_arg2): Accept DO_NONFINITE parameter. (do_mpc_ckconv): Accept FORCE_CONVERT parameter. (fold_builtin_2, do_mpc_arg1): Update accordingly. * fold-const.c (const_binop): Likewise. * real.h (do_mpc_arg2): Update prototype. testsuite: * gcc.dg/torture/builtin-math-7.c: Update for testing Annex G cases in static initializers. git-svn-id: svn+ssh://gcc.gnu.org/svn/gcc/trunk@151904 138bc75d-0d04-0410-961f-82ee72b054a4 --- gcc/fold-const.c | 8 ++++++-- 1 file changed, 6 insertions(+), 2 deletions(-) (limited to 'gcc/fold-const.c') diff --git a/gcc/fold-const.c b/gcc/fold-const.c index 1ce0013ef35..34e47c19e79 100644 --- a/gcc/fold-const.c +++ b/gcc/fold-const.c @@ -1974,7 +1974,9 @@ const_binop (enum tree_code code, tree arg1, tree arg2, int notrunc) case MULT_EXPR: #ifdef HAVE_mpc if (COMPLEX_FLOAT_TYPE_P (type)) - return do_mpc_arg2 (arg1, arg2, type, mpc_mul); + return do_mpc_arg2 (arg1, arg2, type, + /* do_nonfinite= */ folding_initializer, + mpc_mul); #endif real = const_binop (MINUS_EXPR, @@ -1990,7 +1992,9 @@ const_binop (enum tree_code code, tree arg1, tree arg2, int notrunc) case RDIV_EXPR: #ifdef HAVE_mpc if (COMPLEX_FLOAT_TYPE_P (type)) - return do_mpc_arg2 (arg1, arg2, type, mpc_div); + return do_mpc_arg2 (arg1, arg2, type, + /* do_nonfinite= */ folding_initializer, + mpc_div); #endif { -- cgit v1.2.1 From 03a7d9e96c2162143894318f0a6a9f48a8adda71 Mon Sep 17 00:00:00 2001 From: ghazi Date: Thu, 24 Sep 2009 20:44:55 +0000 Subject: PR middle-end/41435 * fold-const.c (const_binop): Handle complex int division. * tree-complex.c (expand_complex_div_straight, expand_complex_div_wide): Update comments. testsuite: * gcc.dg/torture/builtin-math-7.c: Test complex int division at compile-time. git-svn-id: svn+ssh://gcc.gnu.org/svn/gcc/trunk@152145 138bc75d-0d04-0410-961f-82ee72b054a4 --- gcc/fold-const.c | 71 +++++++++++++++++++++++++++++++++++++++++++++++++++++--- 1 file changed, 68 insertions(+), 3 deletions(-) (limited to 'gcc/fold-const.c') diff --git a/gcc/fold-const.c b/gcc/fold-const.c index 34e47c19e79..d754bee2581 100644 --- a/gcc/fold-const.c +++ b/gcc/fold-const.c @@ -1995,9 +1995,22 @@ const_binop (enum tree_code code, tree arg1, tree arg2, int notrunc) return do_mpc_arg2 (arg1, arg2, type, /* do_nonfinite= */ folding_initializer, mpc_div); + /* Fallthru ... */ #endif + case TRUNC_DIV_EXPR: + case CEIL_DIV_EXPR: + case FLOOR_DIV_EXPR: + case ROUND_DIV_EXPR: + if (flag_complex_method == 0) { + /* Keep this algorithm in sync with + tree-complex.c:expand_complex_div_straight(). + + Expand complex division to scalars, straightforward algorithm. + a / b = ((ar*br + ai*bi)/t) + i((ai*br - ar*bi)/t) + t = br*br + bi*bi + */ tree magsquared = const_binop (PLUS_EXPR, const_binop (MULT_EXPR, r2, r2, notrunc), @@ -2014,12 +2027,64 @@ const_binop (enum tree_code code, tree arg1, tree arg2, int notrunc) const_binop (MULT_EXPR, r1, i2, notrunc), notrunc); - if (INTEGRAL_TYPE_P (TREE_TYPE (r1))) - code = TRUNC_DIV_EXPR; - real = const_binop (code, t1, magsquared, notrunc); imag = const_binop (code, t2, magsquared, notrunc); } + else + { + /* Keep this algorithm in sync with + tree-complex.c:expand_complex_div_wide(). + + Expand complex division to scalars, modified algorithm to minimize + overflow with wide input ranges. */ + tree inner_type = TREE_TYPE (type); + tree absr2 = fold_build1 (ABS_EXPR, inner_type, r2); + tree absi2 = fold_build1 (ABS_EXPR, inner_type, i2); + tree compare = fold_build2 (LT_EXPR, boolean_type_node, absr2, absi2); + if (integer_nonzerop (compare)) + { + /* In the TRUE branch, we compute + ratio = br/bi; + div = (br * ratio) + bi; + tr = (ar * ratio) + ai; + ti = (ai * ratio) - ar; + tr = tr / div; + ti = ti / div; */ + tree ratio = fold_build2 (code, inner_type, r2, i2); + tree div = fold_build2 (PLUS_EXPR, inner_type, i2, + fold_build2 (MULT_EXPR, inner_type, + r2, ratio)); + real = fold_build2 (MULT_EXPR, inner_type, r1, ratio); + real = fold_build2 (PLUS_EXPR, inner_type, real, i1); + real = fold_build2 (code, inner_type, real, div); + + imag = fold_build2 (MULT_EXPR, inner_type, i1, ratio); + imag = fold_build2 (MINUS_EXPR, inner_type, imag, r1); + imag = fold_build2 (code, inner_type, imag, div); + } + else + { + /* In the FALSE branch, we compute + ratio = d/c; + divisor = (d * ratio) + c; + tr = (b * ratio) + a; + ti = b - (a * ratio); + tr = tr / div; + ti = ti / div; */ + tree ratio = fold_build2 (code, inner_type, i2, r2); + tree div = fold_build2 (PLUS_EXPR, inner_type, r2, + fold_build2 (MULT_EXPR, inner_type, + i2, ratio)); + + real = fold_build2 (MULT_EXPR, inner_type, i1, ratio); + real = fold_build2 (PLUS_EXPR, inner_type, real, r1); + real = fold_build2 (code, inner_type, real, div); + + imag = fold_build2 (MULT_EXPR, inner_type, r1, ratio); + imag = fold_build2 (MINUS_EXPR, inner_type, i1, imag); + imag = fold_build2 (code, inner_type, imag, div); + } + } break; default: -- cgit v1.2.1 From d5518ed987a5cdbf23d5c3d9294319f532f6678a Mon Sep 17 00:00:00 2001 From: ebotcazou Date: Sun, 27 Sep 2009 12:36:43 +0000 Subject: * fold-const.c (maybe_lvalue_p): Return false for M(IN|AX)_EXPR. (extract_muldiv_1) : Swap operands if necessary. * stor-layout.c (layout_type) : Do not take the maximum of the length and zero. git-svn-id: svn+ssh://gcc.gnu.org/svn/gcc/trunk@152217 138bc75d-0d04-0410-961f-82ee72b054a4 --- gcc/fold-const.c | 16 +++++++++++++--- 1 file changed, 13 insertions(+), 3 deletions(-) (limited to 'gcc/fold-const.c') diff --git a/gcc/fold-const.c b/gcc/fold-const.c index d754bee2581..b080d85b471 100644 --- a/gcc/fold-const.c +++ b/gcc/fold-const.c @@ -2830,8 +2830,6 @@ maybe_lvalue_p (const_tree x) case TARGET_EXPR: case COND_EXPR: case BIND_EXPR: - case MIN_EXPR: - case MAX_EXPR: break; default: @@ -6512,7 +6510,19 @@ extract_muldiv_1 (tree t, tree c, enum tree_code code, tree wide_type, /* If this was a subtraction, negate OP1 and set it to be an addition. This simplifies the logic below. */ if (tcode == MINUS_EXPR) - tcode = PLUS_EXPR, op1 = negate_expr (op1); + { + tcode = PLUS_EXPR, op1 = negate_expr (op1); + /* If OP1 was not easily negatable, the constant may be OP0. */ + if (TREE_CODE (op0) == INTEGER_CST) + { + tree tem = op0; + op0 = op1; + op1 = tem; + tem = t1; + t1 = t2; + t2 = tem; + } + } if (TREE_CODE (op1) != INTEGER_CST) break; -- cgit v1.2.1 From a65c4d644bc63c7e9ed0df60898aac5aff8cc60c Mon Sep 17 00:00:00 2001 From: matz Date: Mon, 28 Sep 2009 12:54:23 +0000 Subject: * builtins.c (interclass_mathfn_icode): New helper. (expand_builtin_interclass_mathfn): Use it here, and split folding into ... (fold_builtin_interclass_mathfn): ... this new folder. (build_call_nofold_loc): New static helper. (build_call_nofold): New wrapper macro for above. (expand_builtin_int_roundingfn): Use it instead of build_call_expr. (expand_builtin_pow): Ditto. (expand_builtin_memset_args): Ditto. (expand_builtin_printf): Ditto. (expand_builtin_fprintf): Ditto. (expand_builtin_sprintf): Ditto. (expand_builtin_memory_chk): Ditto. (expand_builtin_mempcpy_args): Ditto and don't call folders. (expand_builtin_stpcpy): Ditto. (expand_builtin_strcmp): Ditto. (expand_builtin_strncmp): Ditto. (expand_builtin_strcpy): Remove FNDECL and MODE arguments. (expand_builtin_strcpy_args): Don't call folders. (expand_builtin_memcmp): Ditto. (expand_builtin_strncpy): Ditto, and use target. (expand_builtin_memcpy): Ditto. (expand_builtin_strstr, expand_builtin_strchr, expand_builtin_strrchr, expand_builtin_strpbrk, expand_builtin_memmove, expand_builtin_memmove_args, expand_builtin_bcopy, expand_builtin_memchr, expand_builtin_strcat, expand_builtin_strncat, expand_builtin_strspn, expand_builtin_strcspn, expand_builtin_fputs): Remove these. (expand_builtin): Don't call the above, change calls to other expanders that changed prototype. (fold_builtin_stpcpy): New folder split out from expand_builtin_stpcpy. (fold_builtin_1 ): Call fold_builtin_interclass_mathfn. (fold_builtin_2 ): Call fold_builtin_stpcpy. (fold_builtin_strcat): Add folding split from expand_builtin_strcat. * fold-const.c (fold_binary_loc ): Add !exp != 0 -> !exp. * passes.c (init_optimization_passes): Move pass_fold_builtins after last phiopt pass. * tree-inline.c (fold_marked_statements): When folding builtins iterate over all instruction potentially generated. * tree-ssa-ccp.c (gimplify_and_update_call_from_tree): Declare earlier. (fold_gimple_call): Use it to always fold calls (into potentially multiple instructions). * tree-ssa-dom.c (optimize_stmt): Resolve __builtin_constant_p calls into zero at this time. * tree-ssa-propagate.c (substitute_and_fold): Ignore multiple statements generated by builtin folding. testsuite/ * gcc.dg/builtins-44.c: Use __builtin_isinf_sign when checking for sign of -Inf. git-svn-id: svn+ssh://gcc.gnu.org/svn/gcc/trunk@152236 138bc75d-0d04-0410-961f-82ee72b054a4 --- gcc/fold-const.c | 5 +++++ 1 file changed, 5 insertions(+) (limited to 'gcc/fold-const.c') diff --git a/gcc/fold-const.c b/gcc/fold-const.c index b080d85b471..1539ad21387 100644 --- a/gcc/fold-const.c +++ b/gcc/fold-const.c @@ -12614,6 +12614,11 @@ fold_binary_loc (location_t loc, return fold_build1_loc (loc, TRUTH_NOT_EXPR, type, fold_convert_loc (loc, type, arg0)); + /* !exp != 0 becomes !exp */ + if (TREE_CODE (arg0) == TRUTH_NOT_EXPR && integer_zerop (arg1) + && code == NE_EXPR) + return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0)); + /* If this is an equality comparison of the address of two non-weak, unaliased symbols neither of which are extern (since we do not have access to attributes for externs), then we know the result. */ -- cgit v1.2.1 From bd1a81f7e1665d2e33cc824dd05dd7988da9f1a8 Mon Sep 17 00:00:00 2001 From: uweigand Date: Mon, 26 Oct 2009 21:55:59 +0000 Subject: 2009-10-26 Ben Elliston Michael Meissner Ulrich Weigand * doc/extend.texi (Named Address Spaces): New section. * coretypes.h (addr_space_t): New type. (ADDR_SPACE_GENERIC): New define. (ADDR_SPACE_GENERIC_P): New macro. * doc/tm.texi (Named Address Spaces): New section. (TARGET_ADDR_SPACE_LEGITIMATE_ADDRESS_P): Document. (TARGET_ADDR_SPACE_LEGITIMIZE_ADDRESS): Document. (TARGET_ADDR_SPACE_SUBSET_P): Document. (TARGET_ADDR_SPACE_CONVERT): Document. * target.h (struct gcc_target): Add addr_space substructure. * target-def.h (TARGET_ADDR_SPACE_LEGITIMATE_ADDRESS_P): Define. (TARGET_ADDR_SPACE_LEGITIMIZE_ADDRESS): Likewise. (TARGET_ADDR_SPACE_SUBSET_P): Likewise. (TARGET_ADDR_SPACE_CONVERT): Likewise. (TARGET_ADDR_SPACE_HOOKS): Likewise. (TARGET_INITIALIZER): Initialize addr_space hooks. * targhooks.c (default_addr_space_legitimate_address_p): New function. (default_addr_space_legitimize_address): Likewise. (default_addr_space_subset_p): Likewise. (default_addr_space_convert): Likewise. * targhooks.h (default_addr_space_legitimate_address_p): Add prototype. (default_addr_space_legitimize_address): Likewise. (default_addr_space_subset_p): Likewise. (default_addr_space_convert): Likewise. * doc/rtl.texi (MEM_ADDR_SPACE): Document. * rtl.h (mem_attrs): Add ADDRSPACE memory attribute. (MEM_ADDR_SPACE): New macro. * emit-rtl.c (get_mem_attrs): Add ADDRSPACE argument and set address space memory attribute. (mem_attrs_htab_hash): Handle address space memory attribute. (mem_attrs_htab_eq): Likewise. (set_mem_attributes_minus_bitpos): Likewise. (set_mem_alias_set): Likewise. (set_mem_align): Likewise. (set_mem_expr): Likewise. (set_mem_offset): Likewise. (set_mem_size): Likewise. (adjust_address_1): Likewise. (offset_address): Likewise. (widen_memoy_address): Likewise. (get_spill_slot_decl): Likewise. (set_mem_attrs_for_spill): Likewise. (set_mem_addr_space): New function. * emit-rtl.h (set_mem_addr_space): Add prototype. * print-rtl.c (print_rtx): Print address space memory attribute. * expr.c (expand_assignment): Set address space memory attribute of generated MEM RTXes as appropriate. (expand_expr_real_1): Likewise. * cfgexpand.c (expand_debug_expr): Likewise. * tree-ssa-loop-ivopts.c (produce_memory_decl_rtl): Likewise. * tree.h (struct tree_base): Add address_space bitfield. Reduce size of "spare" bitfield. (TYPE_ADDR_SPACE): New macro. (ENCODE_QUAL_ADDR_SPACE): Likewise. (DECODE_QUAL_ADDR_SPACE): Likewise. (CLEAR_QUAL_ADDR_SPACE): Likewise. (KEEP_QUAL_ADDR_SPACE): Likewise. (TYPE_QUALS): Encode type address space. (TYPE_QUALS_NO_ADDR_SPACE): New macro. * tree.c (set_type_quals): Set type address space. (build_array_type): Inherit array address space from element type. * print-tree.c (print_node_brief): Print type address space. (print_node): Likewise. * tree-pretty-print.c (dump_generic_node): Likewise. * explow.c (memory_address): Rename to ... (memory_address_addr_space): ... this. Add ADDRSPACE argument. Use address-space aware variants of memory address routines. * recog.c (memory_address_p): Rename to ... (memory_address_addr_space_p): ... this. Add ADDSPACE argument. Use address-space aware variants of memory address routines. (offsettable_address_p): Rename to ... (offsettable_address_addr_space_p): ... this. Add ADDRSPACE argument. Use address-space aware variants of memory address routines. * reload.c (strict_memory_address_p): Rename to ... (strict_memory_address_addr_space_p): ... this. Add ADDSPACE argument. Use address-space aware variants of memory address routines. (maybe_memory_address_p): Rename to ... (maybe_memory_address_addr_space_p): ... this. Add ADDSPACE argument. Use address-space aware variants of memory address routines. * expr.h (memory_address_addr_space): Add prototype. (memory_address): Define as macro. * recog.h (memory_address_addr_space_p): Add prototype. (memory_address_p): Define as macro. (offsettable_address_addr_space_p): Add prototype. (offsettable_address_p): Define as macro. (strict_memory_address_addr_space_p): Add prototype. (strict_memory_address_p): Define as macro. * combine.c (find_split_point): Use address-space aware variants of memory address routines. * emit-rtl.c (operand_subword): Likewise. (change_address_1): Likewise. (adjust_address_1): Likewise. (offset_address): Likewise. * expr.c (emit_move_insn): Likewise. (expand_assignment): Likewise. (expand_expr_real_1): Likewise. * recog.c (verify_changes): Likewise. (general_operand): Likewise. (offsettable_memref_p): Likewise. (offsettable_nonstrict_memref_p): Likewise. (constrain_operands): Likewise. * reload.c (get_secondary_mem): Likewise. (find_reloads_toplev): Likewise. (find_reloads_address): Likewise. (find_reloads_subreg_address): Likewise. * reload1.c (reload): Likewise. * rtlhooks.c (gen_lowpart_if_possible): Likewise. * rtl.h (address_cost): Add ADDRSPACE argument. * rtlanal.c (address_cost): Add ADDRSPACE argument. Use address-space aware variant of memory address routines. * loop-invariant.c (create_new_invariant): Update address_cost call. * tree-ssa-loop-ivopts.c (computation_cost): Likewise. * fwprop.c (should_replace_address): Add ADDRSPACE argument. Use address-space aware variant of memory address routines. (propagate_rtx_1): Update call to should_replace_address. * tree-flow.h (multiplier_allowed_in_address_p): Add ADDRSPACE argument. * tree-ssa-loop-ivopts.c (multiplier_allowed_in_address_p): Add ADDRSPACE argument. Use per-address-space instead of global cache. Use address-space aware variant of memory address routines. (get_address_cost): Likewise. (get_computation_cost_at): Update calls. * tree-ssa-address.c (valid_mem_ref_p): Add ADDRSPACE argument. Use address-space aware variant of memory address routines. (create_mem_ref_raw): Update call to valid_mem_ref_p. (most_expensive_mult_to_index): Update call to multiplier_allowed_in_address_p. * dwarf2out.c (modified_type_die): Output DW_AT_address_class attribute to indicate named address spaces. * varasm.c (get_variable_section): DECLs in named address spaces cannot be "common". * reload.c (find_reloads_address): Do not use LEGITIMIZE_RELOAD_ADDRESS for addresses in a non-generic address space. * expr.c (emit_block_move_hints): Do not use libcalls for memory in non-generic address spaces. (clear_storage_hints): Likewise. (expand_assignment): Likewise. * fold-const.c (operand_equal_p): Expressions refering to different address spaces are not equivalent. * rtl.c (rtx_equal_p_cb): MEMs refering to different address spaces are not equivalent. (rtx_equal_p): Likewise. * cse.c (exp_equiv_p): Likewise. * jump.c (rtx_renumbered_equal_p): Likewise. * reload.c (operands_match_p): Likewise. * alias.c (nonoverlapping_memrefs_p): MEMs refering to different address spaces may alias. (true_dependence): Likewise. (canon_true_dependence): Likewise. (write_dependence_p): Likewise. * dse.c (canon_address): Handle named address spaces. * ifcvt.c (noce_try_cmove_arith): Likewise. * tree.def (ADDR_SPACE_CONVERT_EXPR): New tree code. * expr.c (expand_expr_real_2): Expand ADDR_SPACE_CONVERT_EXPR. * convert.c (convert_to_pointer): Generate ADDR_SPACE_CONVERT_EXPR to handle conversions between different address spaces. * fold-const.c (fold_convert_loc): Likewise. (fold_unary_loc): Handle ADDR_SPACE_CONVERT_EXPR. * tree-pretty-print.c (dump_generic_node): Likewise. * gimple-pretty-print.c (dump_unary_rhs): Likewise. * tree-cfg.c (verify_gimple_assign_unary): Likewise. * tree-inline.c (estimate_operator_cost): Likewise. * tree-ssa.c (useless_type_conversion_p): Conversions between pointers to different address spaces are not useless. git-svn-id: svn+ssh://gcc.gnu.org/svn/gcc/trunk@153572 138bc75d-0d04-0410-961f-82ee72b054a4 --- gcc/fold-const.c | 21 ++++++++++++++++++++- 1 file changed, 20 insertions(+), 1 deletion(-) (limited to 'gcc/fold-const.c') diff --git a/gcc/fold-const.c b/gcc/fold-const.c index 1539ad21387..aab4fac9edb 100644 --- a/gcc/fold-const.c +++ b/gcc/fold-const.c @@ -2647,8 +2647,16 @@ fold_convert_loc (location_t loc, tree type, tree arg) switch (TREE_CODE (type)) { + case POINTER_TYPE: + case REFERENCE_TYPE: + /* Handle conversions between pointers to different address spaces. */ + if (POINTER_TYPE_P (orig) + && (TYPE_ADDR_SPACE (TREE_TYPE (type)) + != TYPE_ADDR_SPACE (TREE_TYPE (orig)))) + return fold_build1_loc (loc, ADDR_SPACE_CONVERT_EXPR, type, arg); + /* fall through */ + case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE: - case POINTER_TYPE: case REFERENCE_TYPE: case OFFSET_TYPE: if (TREE_CODE (arg) == INTEGER_CST) { @@ -3179,6 +3187,12 @@ operand_equal_p (const_tree arg0, const_tree arg1, unsigned int flags) || POINTER_TYPE_P (TREE_TYPE (arg0)) != POINTER_TYPE_P (TREE_TYPE (arg1))) return 0; + /* We cannot consider pointers to different address space equal. */ + if (POINTER_TYPE_P (TREE_TYPE (arg0)) && POINTER_TYPE_P (TREE_TYPE (arg1)) + && (TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (arg0))) + != TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (arg1))))) + return 0; + /* If both types don't have the same precision, then it is not safe to strip NOPs. */ if (TYPE_PRECISION (TREE_TYPE (arg0)) != TYPE_PRECISION (TREE_TYPE (arg1))) @@ -8682,6 +8696,11 @@ fold_unary_loc (location_t loc, enum tree_code code, tree type, tree op0) tem = fold_convert_const (code, type, op0); return tem ? tem : NULL_TREE; + case ADDR_SPACE_CONVERT_EXPR: + if (integer_zerop (arg0)) + return fold_convert_const (code, type, arg0); + return NULL_TREE; + case FIXED_CONVERT_EXPR: tem = fold_convert_const (code, type, arg0); return tem ? tem : NULL_TREE; -- cgit v1.2.1 From 98155838dbd82b97bb7bb16dfcbf98fa2ab27ca9 Mon Sep 17 00:00:00 2001 From: uweigand Date: Mon, 26 Oct 2009 21:57:10 +0000 Subject: 2009-10-26 Ben Elliston Michael Meissner Ulrich Weigand * doc/tm.texi (TARGET_ADDR_SPACE_POINTER_MODE): Document. (TARGET_ADDR_SPACE_ADDRESS_MODE): Likewise. (TARGET_ADDR_SPACE_VALID_POINTER_MODE): Likewise. * target.h (struct target_def): Add pointer_mode, address_mode, and valid_pointer_mode to addr_space substructure. * target-def.h (TARGET_ADDR_SPACE_POINTER_MODE): Define. (TARGET_ADDR_SPACE_ADDRESS_MODE): Likewise. (TARGET_ADDR_SPACE_VALID_POINTER_MODE): Likewise. (TARGET_ADDR_SPACE_HOOKS): Add them. * targhooks.c (target_default_pointer_address_modes_p): New function. * target.h (target_default_pointer_address_modes_p): Add prototype. * targhooks.c (default_addr_space_pointer_mode): New function. (default_addr_space_address_mode): Likewise. (default_addr_space_valid_pointer_mode): Likewise. * targhooks.h (default_addr_space_pointer_mode): Add prototype. (default_addr_space_address_mode): Likewise. (default_addr_space_valid_pointer_mode): Likewise. * output.h (default_valid_pointer_mode): Move to ... * targhooks.h (default_valid_pointer_mode): ... here. * varasm.c (default_valid_pointer_mode): Move to ... * targhooks.c (default_valid_pointer_mode): ... here. * varasm.c (output_constant): Use targetm.addr_space.valid_pointer_mode instead of targetm.valid_pointer_mode. * fold-const.c (fit_double_type): Use int_or_pointer_precision. * tree.c (integer_pow2p): Likewise. (tree_log2): Likewise. (tree_floor_log2): Likewise. (signed_or_unsigned_type_for): Support pointer type of different size. (int_or_pointer_precision): New function. * tree.h (int_or_pointer_precision): Add prototype. * stor-layout.c (layout_type): Set TYPE_PRECISION for offset types. * varasm.c (initializer_constant_valid_p): Use TYPE_PRECISION of incoming pointer type instead of POINTER_SIZE. * tree.c (build_pointer_type): Use appropriate pointer mode instead of ptr_mode. (build_reference_type): Likewise. * expr.c (store_expr): Likewise. (expand_expr_addr_expr): Likewise. * tree-vect-data-refs.c (vect_create_data_ref_ptr): Likewise. * cfgexpand.c (expand_debug_expr): Likewise. * auto-inc-dec.c: Include "target.h". (try_merge): Use appropriate address mode instead of Pmode. (find_inc): Likewise. * combine.c (find_split_point): Likewise. * cselib.c (cselib_record_sets): Likewise. * dse.c (replace_inc_dec): Likewise. (canon_address): Likewise. * var-tracking.c (replace_expr_with_values): Likewise. (count_uses): Likewise. (add_uses): Likewise. (add_stores): Likewise. * emit-rtl.c: Include "target.h". (adjust_address_1): Use appropriate address mode instead of Pmode. (offset_address): Likewise. * explow.c (break_out_memory_refs): Likewise. (memory_address_addr_space): Likewise. (promote_mode): Likewise. * expr.c (move_by_pieces): Likewise. (emit_block_move_via_loop): Likewise. (store_by_pieces): Likewise. (store_by_pieces_1): Likewise. (expand_assignment): Likewise. (store_constructor): Likewise. (expand_expr_addr_expr): Likewise. (expand_expr_real_1): Likewise. * cfgexpand.c (expand_debug_expr): Likewise. * ifcvt.c (noce_try_cmove_arith): Likewise. * regcprop.c (kill_autoinc_value): Likewise. * regmove.c (try_auto_increment): Likewise. * reload.c (find_reloads): Likewise. (find_reloads_address): Likewise. (find_reloads_address_1): Likewise. * sched-deps.c: Include "target.h". (sched_analyze_1): Use appropriate address mode instead of Pmode. (sched_analyze_2): Likewise. * sel-sched-dump.c: Include "target.h". (debug_mem_addr_value): Use appropriate address mode instead of Pmode. * stor-layout.c (layout_type): Likewise. * tree-ssa-loop-ivopts.c (produce_memory_decl_rtl): Likewise. (multiplier_allowed_in_address_p): Likewise. (get_address_cost): Likewise. * varasm.c (make_decl_rtl): Likewise. * expr.c (expand_assignment): Always convert offsets to appropriate address mode. (store_expr): Likewise. (store_constructor): Likewise. (expand_expr_real_1): Likewise. * reload.h (form_sum): Add MODE argument. * reload.c (form_sum): Add MODE argument, use it instead of Pmode. Update recursive calls. (subst_indexed_address): Update calls to form_sum. * tree-flow.h (addr_for_mem_ref): Add ADDRSPACE argument. * tree-ssa-address.c: Include "target.h". (templates): Replace by ... (mem_addr_template_list): ... this new vector. (TEMPL_IDX): Handle address space numbers. (gen_addr_rtx): Add address mode argument, use it instead of Pmode. (addr_for_mem_ref): Add ADDRSPACE argument. Use per-address-space instead of global cache. Update call to gen_addr_rtx. (valid_mem_ref_p): Update call to addr_for_mem_ref. * expr.c (expand_expr_real_1): Update call to addr_for_mem_ref. * rtl.h (convert_memory_address_addr_space): Add prototype. (convert_memory_address): Define as macro. * explow.c (convert_memory_address): Rename to ... (convert_memory_address_addr_space): ... this. Add ADDRSPACE argument. Use appropriate pointer and address modes instead of ptr_mode / Pmode. Update recursive calls. (memory_address_addr_space): Call convert_memory_address_addr_space. * expmed.c (make_tree): Likewise. * expr.c (expand_assignment): Likewise. (expand_expr_addr_expr_1): Likewise. Also, add ADDRSPACE argument. (expand_expr_addr_expr): Likewise. Also, update call. * alias.c (find_base_value): Guard pointer size optimizations. (find_base_term): Likewise. * rtlanal.c (nonzero_bits1): Likewise. (num_sign_bit_copies1): Likewise. * simplify-rtx.c (simplify_unary_operation_1): Likewise. * Makefile.in (tree-ssa-address.o): Add $(TARGET_H) dependency. (emit-rtl.o): Likewise. (auto-inc-dec.o): Likewise. (sched-deps.o): Likewise. git-svn-id: svn+ssh://gcc.gnu.org/svn/gcc/trunk@153573 138bc75d-0d04-0410-961f-82ee72b054a4 --- gcc/fold-const.c | 8 +------- 1 file changed, 1 insertion(+), 7 deletions(-) (limited to 'gcc/fold-const.c') diff --git a/gcc/fold-const.c b/gcc/fold-const.c index aab4fac9edb..102929d1e20 100644 --- a/gcc/fold-const.c +++ b/gcc/fold-const.c @@ -206,15 +206,9 @@ fit_double_type (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1, { unsigned HOST_WIDE_INT low0 = l1; HOST_WIDE_INT high0 = h1; - unsigned int prec; + unsigned int prec = int_or_pointer_precision (type); int sign_extended_type; - if (POINTER_TYPE_P (type) - || TREE_CODE (type) == OFFSET_TYPE) - prec = POINTER_SIZE; - else - prec = TYPE_PRECISION (type); - /* Size types *are* sign extended. */ sign_extended_type = (!TYPE_UNSIGNED (type) || (TREE_CODE (type) == INTEGER_TYPE -- cgit v1.2.1 From 27cbd062e0179c0394b5f932ac561714930c7c76 Mon Sep 17 00:00:00 2001 From: aldyh Date: Tue, 27 Oct 2009 11:18:12 +0000 Subject: PR bootstrap/41451 * fold-const.c (fold_binary_loc): Do not call protected_set_expr_location. git-svn-id: svn+ssh://gcc.gnu.org/svn/gcc/trunk@153588 138bc75d-0d04-0410-961f-82ee72b054a4 --- gcc/fold-const.c | 2 -- 1 file changed, 2 deletions(-) (limited to 'gcc/fold-const.c') diff --git a/gcc/fold-const.c b/gcc/fold-const.c index 102929d1e20..3403938edc3 100644 --- a/gcc/fold-const.c +++ b/gcc/fold-const.c @@ -10147,7 +10147,6 @@ fold_binary_loc (location_t loc, tem = fold_build2_loc (loc, code, type, fold_convert_loc (loc, TREE_TYPE (op0), TREE_OPERAND (arg0, 1)), op1); - protected_set_expr_location (tem, loc); tem = build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0), tem); goto fold_binary_exit; } @@ -10157,7 +10156,6 @@ fold_binary_loc (location_t loc, tem = fold_build2_loc (loc, code, type, op0, fold_convert_loc (loc, TREE_TYPE (op1), TREE_OPERAND (arg1, 1))); - protected_set_expr_location (tem, loc); tem = build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg1, 0), tem); goto fold_binary_exit; } -- cgit v1.2.1 From f7d5c4ddf4e3ee0b8decf55b8043e40d327d9fcb Mon Sep 17 00:00:00 2001 From: ghazi Date: Tue, 10 Nov 2009 16:16:57 +0000 Subject: PR tree-optimization/41987 * fold-const.c (const_binop): Avoid using fold_buildN(). testsuite: * gcc.c-torture/compile/pr41987.c: New. git-svn-id: svn+ssh://gcc.gnu.org/svn/gcc/trunk@154065 138bc75d-0d04-0410-961f-82ee72b054a4 --- gcc/fold-const.c | 56 +++++++++++++++++++++++++++++--------------------------- 1 file changed, 29 insertions(+), 27 deletions(-) (limited to 'gcc/fold-const.c') diff --git a/gcc/fold-const.c b/gcc/fold-const.c index 3403938edc3..c6b420bfe88 100644 --- a/gcc/fold-const.c +++ b/gcc/fold-const.c @@ -2031,10 +2031,10 @@ const_binop (enum tree_code code, tree arg1, tree arg2, int notrunc) Expand complex division to scalars, modified algorithm to minimize overflow with wide input ranges. */ - tree inner_type = TREE_TYPE (type); - tree absr2 = fold_build1 (ABS_EXPR, inner_type, r2); - tree absi2 = fold_build1 (ABS_EXPR, inner_type, i2); - tree compare = fold_build2 (LT_EXPR, boolean_type_node, absr2, absi2); + tree compare = fold_build2 (LT_EXPR, boolean_type_node, + fold_abs_const (r2, TREE_TYPE (type)), + fold_abs_const (i2, TREE_TYPE (type))); + if (integer_nonzerop (compare)) { /* In the TRUE branch, we compute @@ -2044,17 +2044,18 @@ const_binop (enum tree_code code, tree arg1, tree arg2, int notrunc) ti = (ai * ratio) - ar; tr = tr / div; ti = ti / div; */ - tree ratio = fold_build2 (code, inner_type, r2, i2); - tree div = fold_build2 (PLUS_EXPR, inner_type, i2, - fold_build2 (MULT_EXPR, inner_type, - r2, ratio)); - real = fold_build2 (MULT_EXPR, inner_type, r1, ratio); - real = fold_build2 (PLUS_EXPR, inner_type, real, i1); - real = fold_build2 (code, inner_type, real, div); - - imag = fold_build2 (MULT_EXPR, inner_type, i1, ratio); - imag = fold_build2 (MINUS_EXPR, inner_type, imag, r1); - imag = fold_build2 (code, inner_type, imag, div); + tree ratio = const_binop (code, r2, i2, notrunc); + tree div = const_binop (PLUS_EXPR, i2, + const_binop (MULT_EXPR, r2, ratio, + notrunc), + notrunc); + real = const_binop (MULT_EXPR, r1, ratio, notrunc); + real = const_binop (PLUS_EXPR, real, i1, notrunc); + real = const_binop (code, real, div, notrunc); + + imag = const_binop (MULT_EXPR, i1, ratio, notrunc); + imag = const_binop (MINUS_EXPR, imag, r1, notrunc); + imag = const_binop (code, imag, div, notrunc); } else { @@ -2065,18 +2066,19 @@ const_binop (enum tree_code code, tree arg1, tree arg2, int notrunc) ti = b - (a * ratio); tr = tr / div; ti = ti / div; */ - tree ratio = fold_build2 (code, inner_type, i2, r2); - tree div = fold_build2 (PLUS_EXPR, inner_type, r2, - fold_build2 (MULT_EXPR, inner_type, - i2, ratio)); - - real = fold_build2 (MULT_EXPR, inner_type, i1, ratio); - real = fold_build2 (PLUS_EXPR, inner_type, real, r1); - real = fold_build2 (code, inner_type, real, div); - - imag = fold_build2 (MULT_EXPR, inner_type, r1, ratio); - imag = fold_build2 (MINUS_EXPR, inner_type, i1, imag); - imag = fold_build2 (code, inner_type, imag, div); + tree ratio = const_binop (code, i2, r2, notrunc); + tree div = const_binop (PLUS_EXPR, r2, + const_binop (MULT_EXPR, i2, ratio, + notrunc), + notrunc); + + real = const_binop (MULT_EXPR, i1, ratio, notrunc); + real = const_binop (PLUS_EXPR, real, r1, notrunc); + real = const_binop (code, real, div, notrunc); + + imag = const_binop (MULT_EXPR, r1, ratio, notrunc); + imag = const_binop (MINUS_EXPR, i1, imag, notrunc); + imag = const_binop (code, imag, div, notrunc); } } break; -- cgit v1.2.1 From 48e1416a24d50cacbb2a5e06a9ee61dd8cbee313 Mon Sep 17 00:00:00 2001 From: hjl Date: Wed, 25 Nov 2009 10:55:54 +0000 Subject: Remove trailing white spaces. 2009-11-25 H.J. Lu * alias.c: Remove trailing white spaces. * alloc-pool.c: Likewise. * alloc-pool.h: Likewise. * attribs.c: Likewise. * auto-inc-dec.c: Likewise. * basic-block.h: Likewise. * bb-reorder.c: Likewise. * bt-load.c: Likewise. * builtins.c: Likewise. * builtins.def: Likewise. * c-common.c: Likewise. * c-common.h: Likewise. * c-cppbuiltin.c: Likewise. * c-decl.c: Likewise. * c-format.c: Likewise. * c-lex.c: Likewise. * c-omp.c: Likewise. * c-opts.c: Likewise. * c-parser.c: Likewise. * c-pretty-print.c: Likewise. * c-tree.h: Likewise. * c-typeck.c: Likewise. * caller-save.c: Likewise. * calls.c: Likewise. * cfg.c: Likewise. * cfganal.c: Likewise. * cfgexpand.c: Likewise. * cfghooks.c: Likewise. * cfghooks.h: Likewise. * cfglayout.c: Likewise. * cfgloop.c: Likewise. * cfgloop.h: Likewise. * cfgloopmanip.c: Likewise. * cfgrtl.c: Likewise. * cgraph.c: Likewise. * cgraph.h: Likewise. * cgraphbuild.c: Likewise. * cgraphunit.c: Likewise. * cif-code.def: Likewise. * collect2.c: Likewise. * combine.c: Likewise. * convert.c: Likewise. * coverage.c: Likewise. * crtstuff.c: Likewise. * cse.c: Likewise. * cselib.c: Likewise. * dbgcnt.c: Likewise. * dbgcnt.def: Likewise. * dbgcnt.h: Likewise. * dbxout.c: Likewise. * dce.c: Likewise. * ddg.c: Likewise. * ddg.h: Likewise. * defaults.h: Likewise. * df-byte-scan.c: Likewise. * df-core.c: Likewise. * df-problems.c: Likewise. * df-scan.c: Likewise. * df.h: Likewise. * dfp.c: Likewise. * diagnostic.c: Likewise. * diagnostic.h: Likewise. * dominance.c: Likewise. * domwalk.c: Likewise. * double-int.c: Likewise. * double-int.h: Likewise. * dse.c: Likewise. * dwarf2asm.c: Likewise. * dwarf2asm.h: Likewise. * dwarf2out.c: Likewise. * ebitmap.c: Likewise. * ebitmap.h: Likewise. * emit-rtl.c: Likewise. * et-forest.c: Likewise. * except.c: Likewise. * except.h: Likewise. * expmed.c: Likewise. * expr.c: Likewise. * expr.h: Likewise. * final.c: Likewise. * flags.h: Likewise. * fold-const.c: Likewise. * function.c: Likewise. * function.h: Likewise. * fwprop.c: Likewise. * gcc.c: Likewise. * gcov-dump.c: Likewise. * gcov-io.c: Likewise. * gcov-io.h: Likewise. * gcov.c: Likewise. * gcse.c: Likewise. * genattr.c: Likewise. * genattrtab.c: Likewise. * genautomata.c: Likewise. * genchecksum.c: Likewise. * genconfig.c: Likewise. * genflags.c: Likewise. * gengtype-parse.c: Likewise. * gengtype.c: Likewise. * gengtype.h: Likewise. * genmddeps.c: Likewise. * genmodes.c: Likewise. * genopinit.c: Likewise. * genpreds.c: Likewise. * gensupport.c: Likewise. * ggc-common.c: Likewise. * ggc-page.c: Likewise. * ggc-zone.c: Likewise. * ggc.h: Likewise. * gimple-iterator.c: Likewise. * gimple-low.c: Likewise. * gimple-pretty-print.c: Likewise. * gimple.c: Likewise. * gimple.def: Likewise. * gimple.h: Likewise. * gimplify.c: Likewise. * graphds.c: Likewise. * graphite-clast-to-gimple.c: Likewise. * gthr-nks.h: Likewise. * gthr-posix.c: Likewise. * gthr-posix.h: Likewise. * gthr-posix95.h: Likewise. * gthr-single.h: Likewise. * gthr-tpf.h: Likewise. * gthr-vxworks.h: Likewise. * gthr.h: Likewise. * haifa-sched.c: Likewise. * hard-reg-set.h: Likewise. * hooks.c: Likewise. * hooks.h: Likewise. * hosthooks.h: Likewise. * hwint.h: Likewise. * ifcvt.c: Likewise. * incpath.c: Likewise. * init-regs.c: Likewise. * integrate.c: Likewise. * ipa-cp.c: Likewise. * ipa-inline.c: Likewise. * ipa-prop.c: Likewise. * ipa-pure-const.c: Likewise. * ipa-reference.c: Likewise. * ipa-struct-reorg.c: Likewise. * ipa-struct-reorg.h: Likewise. * ipa-type-escape.c: Likewise. * ipa-type-escape.h: Likewise. * ipa-utils.c: Likewise. * ipa-utils.h: Likewise. * ipa.c: Likewise. * ira-build.c: Likewise. * ira-color.c: Likewise. * ira-conflicts.c: Likewise. * ira-costs.c: Likewise. * ira-emit.c: Likewise. * ira-int.h: Likewise. * ira-lives.c: Likewise. * ira.c: Likewise. * jump.c: Likewise. * lambda-code.c: Likewise. * lambda-mat.c: Likewise. * lambda-trans.c: Likewise. * lambda.h: Likewise. * langhooks.c: Likewise. * lcm.c: Likewise. * libgcov.c: Likewise. * lists.c: Likewise. * loop-doloop.c: Likewise. * loop-init.c: Likewise. * loop-invariant.c: Likewise. * loop-iv.c: Likewise. * loop-unroll.c: Likewise. * lower-subreg.c: Likewise. * lto-cgraph.c: Likewise. * lto-compress.c: Likewise. * lto-opts.c: Likewise. * lto-section-in.c: Likewise. * lto-section-out.c: Likewise. * lto-streamer-in.c: Likewise. * lto-streamer-out.c: Likewise. * lto-streamer.c: Likewise. * lto-streamer.h: Likewise. * lto-symtab.c: Likewise. * lto-wpa-fixup.c: Likewise. * matrix-reorg.c: Likewise. * mcf.c: Likewise. * mode-switching.c: Likewise. * modulo-sched.c: Likewise. * omega.c: Likewise. * omega.h: Likewise. * omp-low.c: Likewise. * optabs.c: Likewise. * optabs.h: Likewise. * opts-common.c: Likewise. * opts.c: Likewise. * params.def: Likewise. * params.h: Likewise. * passes.c: Likewise. * plugin.c: Likewise. * postreload-gcse.c: Likewise. * postreload.c: Likewise. * predict.c: Likewise. * predict.def: Likewise. * pretty-print.c: Likewise. * pretty-print.h: Likewise. * print-rtl.c: Likewise. * print-tree.c: Likewise. * profile.c: Likewise. * read-rtl.c: Likewise. * real.c: Likewise. * recog.c: Likewise. * reg-stack.c: Likewise. * regcprop.c: Likewise. * reginfo.c: Likewise. * regmove.c: Likewise. * regrename.c: Likewise. * regs.h: Likewise. * regstat.c: Likewise. * reload.c: Likewise. * reload1.c: Likewise. * resource.c: Likewise. * rtl.c: Likewise. * rtl.def: Likewise. * rtl.h: Likewise. * rtlanal.c: Likewise. * sbitmap.c: Likewise. * sched-deps.c: Likewise. * sched-ebb.c: Likewise. * sched-int.h: Likewise. * sched-rgn.c: Likewise. * sched-vis.c: Likewise. * sdbout.c: Likewise. * sel-sched-dump.c: Likewise. * sel-sched-dump.h: Likewise. * sel-sched-ir.c: Likewise. * sel-sched-ir.h: Likewise. * sel-sched.c: Likewise. * sel-sched.h: Likewise. * sese.c: Likewise. * sese.h: Likewise. * simplify-rtx.c: Likewise. * stack-ptr-mod.c: Likewise. * stmt.c: Likewise. * stor-layout.c: Likewise. * store-motion.c: Likewise. * stringpool.c: Likewise. * stub-objc.c: Likewise. * sync-builtins.def: Likewise. * target-def.h: Likewise. * target.h: Likewise. * targhooks.c: Likewise. * targhooks.h: Likewise. * timevar.c: Likewise. * tlink.c: Likewise. * toplev.c: Likewise. * toplev.h: Likewise. * tracer.c: Likewise. * tree-affine.c: Likewise. * tree-affine.h: Likewise. * tree-browser.def: Likewise. * tree-call-cdce.c: Likewise. * tree-cfg.c: Likewise. * tree-cfgcleanup.c: Likewise. * tree-chrec.c: Likewise. * tree-chrec.h: Likewise. * tree-complex.c: Likewise. * tree-data-ref.c: Likewise. * tree-data-ref.h: Likewise. * tree-dfa.c: Likewise. * tree-dump.c: Likewise. * tree-dump.h: Likewise. * tree-eh.c: Likewise. * tree-flow-inline.h: Likewise. * tree-flow.h: Likewise. * tree-if-conv.c: Likewise. * tree-inline.c: Likewise. * tree-into-ssa.c: Likewise. * tree-loop-distribution.c: Likewise. * tree-loop-linear.c: Likewise. * tree-mudflap.c: Likewise. * tree-nested.c: Likewise. * tree-nomudflap.c: Likewise. * tree-nrv.c: Likewise. * tree-object-size.c: Likewise. * tree-optimize.c: Likewise. * tree-outof-ssa.c: Likewise. * tree-parloops.c: Likewise. * tree-pass.h: Likewise. * tree-phinodes.c: Likewise. * tree-predcom.c: Likewise. * tree-pretty-print.c: Likewise. * tree-profile.c: Likewise. * tree-scalar-evolution.c: Likewise. * tree-ssa-address.c: Likewise. * tree-ssa-alias.c: Likewise. * tree-ssa-ccp.c: Likewise. * tree-ssa-coalesce.c: Likewise. * tree-ssa-copy.c: Likewise. * tree-ssa-copyrename.c: Likewise. * tree-ssa-dce.c: Likewise. * tree-ssa-dom.c: Likewise. * tree-ssa-dse.c: Likewise. * tree-ssa-forwprop.c: Likewise. * tree-ssa-ifcombine.c: Likewise. * tree-ssa-live.c: Likewise. * tree-ssa-live.h: Likewise. * tree-ssa-loop-ch.c: Likewise. * tree-ssa-loop-im.c: Likewise. * tree-ssa-loop-ivcanon.c: Likewise. * tree-ssa-loop-ivopts.c: Likewise. * tree-ssa-loop-manip.c: Likewise. * tree-ssa-loop-niter.c: Likewise. * tree-ssa-loop-prefetch.c: Likewise. * tree-ssa-loop-unswitch.c: Likewise. * tree-ssa-loop.c: Likewise. * tree-ssa-math-opts.c: Likewise. * tree-ssa-operands.c: Likewise. * tree-ssa-operands.h: Likewise. * tree-ssa-phiopt.c: Likewise. * tree-ssa-phiprop.c: Likewise. * tree-ssa-pre.c: Likewise. * tree-ssa-propagate.c: Likewise. * tree-ssa-reassoc.c: Likewise. * tree-ssa-sccvn.c: Likewise. * tree-ssa-sink.c: Likewise. * tree-ssa-structalias.c: Likewise. * tree-ssa-ter.c: Likewise. * tree-ssa-threadedge.c: Likewise. * tree-ssa-threadupdate.c: Likewise. * tree-ssa-uncprop.c: Likewise. * tree-ssa.c: Likewise. * tree-ssanames.c: Likewise. * tree-switch-conversion.c: Likewise. * tree-tailcall.c: Likewise. * tree-vect-data-refs.c: Likewise. * tree-vect-generic.c: Likewise. * tree-vect-loop-manip.c: Likewise. * tree-vect-loop.c: Likewise. * tree-vect-patterns.c: Likewise. * tree-vect-slp.c: Likewise. * tree-vect-stmts.c: Likewise. * tree-vectorizer.c: Likewise. * tree-vectorizer.h: Likewise. * tree-vrp.c: Likewise. * tree.c: Likewise. * tree.def: Likewise. * tree.h: Likewise. * treestruct.def: Likewise. * unwind-compat.c: Likewise. * unwind-dw2-fde-glibc.c: Likewise. * unwind-dw2.c: Likewise. * value-prof.c: Likewise. * value-prof.h: Likewise. * var-tracking.c: Likewise. * varasm.c: Likewise. * varpool.c: Likewise. * vec.c: Likewise. * vec.h: Likewise. * vmsdbgout.c: Likewise. * web.c: Likewise. * xcoffout.c: Likewise. git-svn-id: svn+ssh://gcc.gnu.org/svn/gcc/trunk@154645 138bc75d-0d04-0410-961f-82ee72b054a4 --- gcc/fold-const.c | 114 +++++++++++++++++++++++++++---------------------------- 1 file changed, 57 insertions(+), 57 deletions(-) (limited to 'gcc/fold-const.c') diff --git a/gcc/fold-const.c b/gcc/fold-const.c index c6b420bfe88..40a580e77a6 100644 --- a/gcc/fold-const.c +++ b/gcc/fold-const.c @@ -1064,7 +1064,7 @@ negate_mathfn_p (enum built_in_function code) CASE_FLT_FN (BUILT_IN_NEARBYINT): CASE_FLT_FN (BUILT_IN_RINT): return !flag_rounding_math; - + default: break; } @@ -1244,7 +1244,7 @@ fold_negate_expr (location_t loc, tree t) return fold_build2_loc (loc, PLUS_EXPR, type, TREE_OPERAND (t, 0), build_int_cst (type, 1)); break; - + case INTEGER_CST: tem = fold_negate_const (t, type); if (TREE_OVERFLOW (tem) == TREE_OVERFLOW (t) @@ -1282,7 +1282,7 @@ fold_negate_expr (location_t loc, tree t) fold_negate_expr (loc, TREE_OPERAND (t, 0)), fold_negate_expr (loc, TREE_OPERAND (t, 1))); break; - + case CONJ_EXPR: if (negate_expr_p (t)) return fold_build1_loc (loc, CONJ_EXPR, type, @@ -2034,7 +2034,7 @@ const_binop (enum tree_code code, tree arg1, tree arg2, int notrunc) tree compare = fold_build2 (LT_EXPR, boolean_type_node, fold_abs_const (r2, TREE_TYPE (type)), fold_abs_const (i2, TREE_TYPE (type))); - + if (integer_nonzerop (compare)) { /* In the TRUE branch, we compute @@ -2096,17 +2096,17 @@ const_binop (enum tree_code code, tree arg1, tree arg2, int notrunc) tree type = TREE_TYPE(arg1); int count = TYPE_VECTOR_SUBPARTS (type), i; tree elements1, elements2, list = NULL_TREE; - + if(TREE_CODE(arg2) != VECTOR_CST) return NULL_TREE; - + elements1 = TREE_VECTOR_CST_ELTS (arg1); elements2 = TREE_VECTOR_CST_ELTS (arg2); for (i = 0; i < count; i++) { tree elem1, elem2, elem; - + /* The trailing elements can be empty and should be treated as 0 */ if(!elements1) elem1 = fold_convert_const (NOP_EXPR, TREE_TYPE (type), integer_zero_node); @@ -2114,8 +2114,8 @@ const_binop (enum tree_code code, tree arg1, tree arg2, int notrunc) { elem1 = TREE_VALUE(elements1); elements1 = TREE_CHAIN (elements1); - } - + } + if(!elements2) elem2 = fold_convert_const (NOP_EXPR, TREE_TYPE (type), integer_zero_node); else @@ -2123,17 +2123,17 @@ const_binop (enum tree_code code, tree arg1, tree arg2, int notrunc) elem2 = TREE_VALUE(elements2); elements2 = TREE_CHAIN (elements2); } - + elem = const_binop (code, elem1, elem2, notrunc); - + /* It is possible that const_binop cannot handle the given code and return NULL_TREE */ if(elem == NULL_TREE) return NULL_TREE; - + list = tree_cons (NULL_TREE, elem, list); } - return build_vector(type, nreverse(list)); + return build_vector(type, nreverse(list)); } return NULL_TREE; } @@ -2573,7 +2573,7 @@ build_zero_vector (tree type) elem = fold_convert_const (NOP_EXPR, TREE_TYPE (type), integer_zero_node); units = TYPE_VECTOR_SUBPARTS (type); - + list = NULL_TREE; for (i = 0; i < units; i++) list = tree_cons (NULL_TREE, elem, list); @@ -3250,7 +3250,7 @@ operand_equal_p (const_tree arg0, const_tree arg1, unsigned int flags) TREE_REAL_CST (arg1))) return 1; - + if (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))) { /* If we do not distinguish between signed and unsigned zero, @@ -3409,7 +3409,7 @@ operand_equal_p (const_tree arg0, const_tree arg1, unsigned int flags) case COND_EXPR: return OP_SAME (0) && OP_SAME (1) && OP_SAME (2); - + default: return 0; } @@ -4126,7 +4126,7 @@ make_bit_field_ref (location_t loc, tree inner, tree type, tree size = TYPE_SIZE (TREE_TYPE (inner)); if ((INTEGRAL_TYPE_P (TREE_TYPE (inner)) || POINTER_TYPE_P (TREE_TYPE (inner))) - && host_integerp (size, 0) + && host_integerp (size, 0) && tree_low_cst (size, 0) == bitsize) return fold_convert_loc (loc, type, inner); } @@ -5536,7 +5536,7 @@ fold_cond_expr_with_comparison (location_t loc, tree type, tem = fold_build2_loc (loc, MIN_EXPR, TREE_TYPE (arg00), arg00, fold_convert_loc (loc, TREE_TYPE (arg00), arg2)); - return pedantic_non_lvalue_loc (loc, + return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, tem)); } break; @@ -7271,7 +7271,7 @@ fold_single_bit_test (location_t loc, enum tree_code code, operations as unsigned. If we must use the AND, we have a choice. Normally unsigned is faster, but for some machines signed is. */ #ifdef LOAD_EXTEND_OP - ops_unsigned = (LOAD_EXTEND_OP (operand_mode) == SIGN_EXTEND + ops_unsigned = (LOAD_EXTEND_OP (operand_mode) == SIGN_EXTEND && !flag_syntax_only) ? 0 : 1; #else ops_unsigned = 1; @@ -7556,7 +7556,7 @@ try_move_mult_to_index (location_t loc, tree addr, tree op1) STRIP_NOPS (arg0); STRIP_NOPS (arg1); - + if (TREE_CODE (arg0) == INTEGER_CST) { s = arg0; @@ -8445,7 +8445,7 @@ fold_unary_loc (location_t loc, enum tree_code code, tree type, tree op0) case FIX_TRUNC_EXPR: if (TREE_TYPE (op0) == type) return op0; - + /* If we have (type) (a CMP b) and type is an integral type, return new expression involving the new type. */ if (COMPARISON_CLASS_P (op0) && INTEGRAL_TYPE_P (type)) @@ -9377,7 +9377,7 @@ fold_comparison (location_t loc, enum tree_code code, tree type, && (code == EQ_EXPR || code == NE_EXPR || POINTER_TYPE_OVERFLOW_UNDEFINED)) - + { if (code != EQ_EXPR && code != NE_EXPR @@ -9652,7 +9652,7 @@ fold_comparison (location_t loc, enum tree_code code, tree type, /* Likewise, we can simplify a comparison of a real constant with a MINUS_EXPR whose first operand is also a real constant, i.e. - (c1 - x) < c2 becomes x > c1-c2. Reordering is allowed on + (c1 - x) < c2 becomes x > c1-c2. Reordering is allowed on floating-point types only if -fassociative-math is set. */ if (flag_associative_math && TREE_CODE (arg1) == REAL_CST @@ -9976,7 +9976,7 @@ get_pointer_modulus_and_residue (tree expr, unsigned HOST_WIDE_INT *residue, tree op0, op1; unsigned HOST_WIDE_INT modulus; enum tree_code inner_code; - + op0 = TREE_OPERAND (expr, 0); STRIP_NOPS (op0); modulus = get_pointer_modulus_and_residue (op0, residue, @@ -9996,7 +9996,7 @@ get_pointer_modulus_and_residue (tree expr, unsigned HOST_WIDE_INT *residue, if (TREE_CODE (op1) == INTEGER_CST) { unsigned HOST_WIDE_INT align; - + /* Compute the greatest power-of-2 divisor of op1. */ align = TREE_INT_CST_LOW (op1); align &= -align; @@ -10165,7 +10165,7 @@ fold_binary_loc (location_t loc, if (TREE_CODE (arg0) == COND_EXPR || COMPARISON_CLASS_P (arg0)) { tem = fold_binary_op_with_conditional_arg (loc, code, type, op0, op1, - arg0, arg1, + arg0, arg1, /*cond_first_p=*/1); if (tem != NULL_TREE) return tem; @@ -10174,7 +10174,7 @@ fold_binary_loc (location_t loc, if (TREE_CODE (arg1) == COND_EXPR || COMPARISON_CLASS_P (arg1)) { tem = fold_binary_op_with_conditional_arg (loc, code, type, op0, op1, - arg1, arg0, + arg1, arg0, /*cond_first_p=*/0); if (tem != NULL_TREE) return tem; @@ -10456,7 +10456,7 @@ fold_binary_loc (location_t loc, return fold_build2_loc (loc, MULT_EXPR, type, arg0, build_real (type, dconst2)); - /* Convert a + (b*c + d*e) into (a + b*c) + d*e. + /* Convert a + (b*c + d*e) into (a + b*c) + d*e. We associate floats only if the user has specified -fassociative-math. */ if (flag_associative_math @@ -10473,7 +10473,7 @@ fold_binary_loc (location_t loc, return fold_build2_loc (loc, PLUS_EXPR, type, tree0, tree11); } } - /* Convert (b*c + d*e) + a into b*c + (d*e +a). + /* Convert (b*c + d*e) + a into b*c + (d*e +a). We associate floats only if the user has specified -fassociative-math. */ if (flag_associative_math @@ -10909,7 +10909,7 @@ fold_binary_loc (location_t loc, tree diff = build2 (MINUS_EXPR, type, op0, op1); return fold_build2_loc (loc, MULT_EXPR, type, diff, fold_convert_loc (loc, type, esz)); - + } } @@ -11288,7 +11288,7 @@ fold_binary_loc (location_t loc, if (width > HOST_BITS_PER_WIDE_INT) { - mhi = (unsigned HOST_WIDE_INT) -1 + mhi = (unsigned HOST_WIDE_INT) -1 >> (2 * HOST_BITS_PER_WIDE_INT - width); mlo = -1; } @@ -11475,7 +11475,7 @@ fold_binary_loc (location_t loc, fold_convert_loc (loc, type, t1)); return t1; } - + /* Convert ~X ^ ~Y to X ^ Y. */ if (TREE_CODE (arg0) == BIT_NOT_EXPR && TREE_CODE (arg1) == BIT_NOT_EXPR) @@ -11505,7 +11505,7 @@ fold_binary_loc (location_t loc, && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0)) { tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0)); - return fold_build2_loc (loc, BIT_AND_EXPR, type, + return fold_build2_loc (loc, BIT_AND_EXPR, type, fold_build1_loc (loc, BIT_NOT_EXPR, type, tem), fold_convert_loc (loc, type, arg1)); } @@ -11624,7 +11624,7 @@ fold_binary_loc (location_t loc, && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0)) { tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0)); - return fold_build2_loc (loc, BIT_AND_EXPR, type, + return fold_build2_loc (loc, BIT_AND_EXPR, type, fold_build1_loc (loc, BIT_NOT_EXPR, type, tem), fold_convert_loc (loc, type, arg1)); } @@ -11889,7 +11889,7 @@ fold_binary_loc (location_t loc, } } } - /* Convert A/B/C to A/(B*C). */ + /* Convert A/B/C to A/(B*C). */ if (flag_reciprocal_math && TREE_CODE (arg0) == RDIV_EXPR) return fold_build2_loc (loc, RDIV_EXPR, type, TREE_OPERAND (arg0, 0), @@ -14222,7 +14222,7 @@ fold_checksum_tree (const_tree expr, struct md5_ctx *ctx, htab_t ht) enum tree_code code; union tree_node buf; int i, len; - + recursive_label: gcc_assert ((sizeof (struct tree_exp) + 5 * sizeof (tree) @@ -14332,7 +14332,7 @@ recursive_label: } if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_WITH_VIS)) fold_checksum_tree (DECL_SECTION_NAME (expr), ctx, ht); - + if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_NON_COMMON)) { fold_checksum_tree (DECL_VINDEX (expr), ctx, ht); @@ -14377,7 +14377,7 @@ debug_fold_checksum (const_tree t) unsigned char checksum[16]; struct md5_ctx ctx; htab_t ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL); - + md5_init_ctx (&ctx); fold_checksum_tree (t, &ctx, ht); md5_finish_ctx (&ctx, checksum); @@ -14412,14 +14412,14 @@ fold_build1_stat_loc (location_t loc, md5_finish_ctx (&ctx, checksum_before); htab_empty (ht); #endif - + tem = fold_unary_loc (loc, code, type, op0); if (!tem) { tem = build1_stat (code, type, op0 PASS_MEM_STAT); SET_EXPR_LOCATION (tem, loc); } - + #ifdef ENABLE_FOLD_CHECKING md5_init_ctx (&ctx); fold_checksum_tree (op0, &ctx, ht); @@ -14470,7 +14470,7 @@ fold_build2_stat_loc (location_t loc, tem = build2_stat (code, type, op0, op1 PASS_MEM_STAT); SET_EXPR_LOCATION (tem, loc); } - + #ifdef ENABLE_FOLD_CHECKING md5_init_ctx (&ctx); fold_checksum_tree (op0, &ctx, ht); @@ -14479,7 +14479,7 @@ fold_build2_stat_loc (location_t loc, if (memcmp (checksum_before_op0, checksum_after_op0, 16)) fold_check_failed (op0, tem); - + md5_init_ctx (&ctx); fold_checksum_tree (op1, &ctx, ht); md5_finish_ctx (&ctx, checksum_after_op1); @@ -14535,7 +14535,7 @@ fold_build3_stat_loc (location_t loc, enum tree_code code, tree type, tem = build3_stat (code, type, op0, op1, op2 PASS_MEM_STAT); SET_EXPR_LOCATION (tem, loc); } - + #ifdef ENABLE_FOLD_CHECKING md5_init_ctx (&ctx); fold_checksum_tree (op0, &ctx, ht); @@ -14544,7 +14544,7 @@ fold_build3_stat_loc (location_t loc, enum tree_code code, tree type, if (memcmp (checksum_before_op0, checksum_after_op0, 16)) fold_check_failed (op0, tem); - + md5_init_ctx (&ctx); fold_checksum_tree (op1, &ctx, ht); md5_finish_ctx (&ctx, checksum_after_op1); @@ -14552,7 +14552,7 @@ fold_build3_stat_loc (location_t loc, enum tree_code code, tree type, if (memcmp (checksum_before_op1, checksum_after_op1, 16)) fold_check_failed (op1, tem); - + md5_init_ctx (&ctx); fold_checksum_tree (op2, &ctx, ht); md5_finish_ctx (&ctx, checksum_after_op2); @@ -14597,7 +14597,7 @@ fold_build_call_array_loc (location_t loc, tree type, tree fn, #endif tem = fold_builtin_call_array (loc, type, fn, nargs, argarray); - + #ifdef ENABLE_FOLD_CHECKING md5_init_ctx (&ctx); fold_checksum_tree (fn, &ctx, ht); @@ -14606,7 +14606,7 @@ fold_build_call_array_loc (location_t loc, tree type, tree fn, if (memcmp (checksum_before_fn, checksum_after_fn, 16)) fold_check_failed (fn, tem); - + md5_init_ctx (&ctx); for (i = 0; i < nargs; i++) fold_checksum_tree (argarray[i], &ctx, ht); @@ -14952,10 +14952,10 @@ tree_binary_nonnegative_warnv_p (enum tree_code code, tree type, tree op0, && (TREE_CODE (op0) == NOP_EXPR || TREE_CODE (op0) == INTEGER_CST) && (TREE_CODE (op1) == NOP_EXPR || TREE_CODE (op1) == INTEGER_CST)) { - tree inner0 = (TREE_CODE (op0) == NOP_EXPR) + tree inner0 = (TREE_CODE (op0) == NOP_EXPR) ? TREE_TYPE (TREE_OPERAND (op0, 0)) : TREE_TYPE (op0); - tree inner1 = (TREE_CODE (op1) == NOP_EXPR) + tree inner1 = (TREE_CODE (op1) == NOP_EXPR) ? TREE_TYPE (TREE_OPERAND (op1, 0)) : TREE_TYPE (op1); @@ -16003,7 +16003,7 @@ fold_build_cleanup_point_expr (tree type, tree expr) if (!TREE_SIDE_EFFECTS (op)) return expr; } - + return build1 (CLEANUP_POINT_EXPR, type, expr); } @@ -16067,17 +16067,17 @@ fold_indirect_ref_1 (location_t loc, tree type, tree op0) /* ((foo*)&vectorfoo)[1] => BIT_FIELD_REF */ if (TREE_CODE (sub) == POINTER_PLUS_EXPR && TREE_CODE (TREE_OPERAND (sub, 1)) == INTEGER_CST) - { + { tree op00 = TREE_OPERAND (sub, 0); tree op01 = TREE_OPERAND (sub, 1); tree op00type; - + STRIP_NOPS (op00); op00type = TREE_TYPE (op00); if (TREE_CODE (op00) == ADDR_EXPR && TREE_CODE (TREE_TYPE (op00type)) == VECTOR_TYPE && type == TREE_TYPE (TREE_TYPE (op00type))) - { + { HOST_WIDE_INT offset = tree_low_cst (op01, 0); tree part_width = TYPE_SIZE (type); unsigned HOST_WIDE_INT part_widthi = tree_low_cst (part_width, 0)/BITS_PER_UNIT; @@ -16088,7 +16088,7 @@ fold_indirect_ref_1 (location_t loc, tree type, tree op0) return fold_build3_loc (loc, BIT_FIELD_REF, type, TREE_OPERAND (op00, 0), part_width, index); - + } } @@ -16113,7 +16113,7 @@ fold_indirect_ref_1 (location_t loc, tree type, tree op0) TREE_OPERAND (op00, 0)); } } - + /* *(foo *)fooarrptr => (*fooarrptr)[0] */ if (TREE_CODE (TREE_TYPE (subtype)) == ARRAY_TYPE && type == TREE_TYPE (TREE_TYPE (subtype))) @@ -16437,7 +16437,7 @@ fold_strip_sign_ops (tree exp) if (arg1) return fold_build2_loc (loc, COMPOUND_EXPR, TREE_TYPE (exp), arg0, arg1); break; - + case COND_EXPR: arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 1)); arg1 = fold_strip_sign_ops (TREE_OPERAND (exp, 2)); @@ -16447,7 +16447,7 @@ fold_strip_sign_ops (tree exp) arg0 ? arg0 : TREE_OPERAND (exp, 1), arg1 ? arg1 : TREE_OPERAND (exp, 2)); break; - + case CALL_EXPR: { const enum built_in_function fcode = builtin_mathfn_code (exp); -- cgit v1.2.1 From aaf45dfea47c5442092b42425f2997f679ad8593 Mon Sep 17 00:00:00 2001 From: uweigand Date: Wed, 2 Dec 2009 13:50:52 +0000 Subject: gcc/ PR middle-end/42224 * tree.h (int_or_pointer_precision): Remove. * tree.c (int_or_pointer_precision): Remove. (integer_pow2p): Use TYPE_PRECISION instead. (tree_log2): Likewise. (tree_floor_log2): Likewise. (signed_or_unsigned_type_for): Likewise. * fold-const.c (fit_double_type): Likewise. * varasm.c (initializer_constant_valid_p): Likewise. gcc/testsuite/ PR middle-end/42224 * gcc.target/s390/pr42224.c: New test. git-svn-id: svn+ssh://gcc.gnu.org/svn/gcc/trunk@154908 138bc75d-0d04-0410-961f-82ee72b054a4 --- gcc/fold-const.c | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) (limited to 'gcc/fold-const.c') diff --git a/gcc/fold-const.c b/gcc/fold-const.c index 40a580e77a6..1a77dd592f1 100644 --- a/gcc/fold-const.c +++ b/gcc/fold-const.c @@ -206,7 +206,7 @@ fit_double_type (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1, { unsigned HOST_WIDE_INT low0 = l1; HOST_WIDE_INT high0 = h1; - unsigned int prec = int_or_pointer_precision (type); + unsigned int prec = TYPE_PRECISION (type); int sign_extended_type; /* Size types *are* sign extended. */ -- cgit v1.2.1 From 5b36834c4a327ee1ea6d2ebcbd73d4e690619027 Mon Sep 17 00:00:00 2001 From: rguenth Date: Wed, 2 Dec 2009 18:15:17 +0000 Subject: 2009-12-02 Richard Guenther PR middle-end/41491 * fold-const.c (try_move_mult_to_index): Do not leak domain types into the IL. git-svn-id: svn+ssh://gcc.gnu.org/svn/gcc/trunk@154920 138bc75d-0d04-0410-961f-82ee72b054a4 --- gcc/fold-const.c | 20 +++++++++++--------- 1 file changed, 11 insertions(+), 9 deletions(-) (limited to 'gcc/fold-const.c') diff --git a/gcc/fold-const.c b/gcc/fold-const.c index 1a77dd592f1..37475a08ce2 100644 --- a/gcc/fold-const.c +++ b/gcc/fold-const.c @@ -7586,13 +7586,16 @@ try_move_mult_to_index (location_t loc, tree addr, tree op1) { if (TREE_CODE (ref) == ARRAY_REF) { + tree domain; + /* Remember if this was a multi-dimensional array. */ if (TREE_CODE (TREE_OPERAND (ref, 0)) == ARRAY_REF) mdim = true; - itype = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (ref, 0))); - if (! itype) + domain = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (ref, 0))); + if (! domain) continue; + itype = TREE_TYPE (domain); step = array_ref_element_size (ref); if (TREE_CODE (step) != INTEGER_CST) @@ -7619,18 +7622,17 @@ try_move_mult_to_index (location_t loc, tree addr, tree op1) tree tmp; if (TREE_CODE (TREE_OPERAND (ref, 1)) != INTEGER_CST - || !INTEGRAL_TYPE_P (itype) - || !TYPE_MAX_VALUE (itype) - || TREE_CODE (TYPE_MAX_VALUE (itype)) != INTEGER_CST) + || !TYPE_MAX_VALUE (domain) + || TREE_CODE (TYPE_MAX_VALUE (domain)) != INTEGER_CST) continue; tmp = fold_binary_loc (loc, PLUS_EXPR, itype, - fold_convert_loc (loc, itype, - TREE_OPERAND (ref, 1)), - fold_convert_loc (loc, itype, delta)); + fold_convert_loc (loc, itype, + TREE_OPERAND (ref, 1)), + fold_convert_loc (loc, itype, delta)); if (!tmp || TREE_CODE (tmp) != INTEGER_CST - || tree_int_cst_lt (TYPE_MAX_VALUE (itype), tmp)) + || tree_int_cst_lt (TYPE_MAX_VALUE (domain), tmp)) continue; } -- cgit v1.2.1 From 8bdac511d36f9045b6ac9604219cae38c2f85b1c Mon Sep 17 00:00:00 2001 From: rguenth Date: Wed, 2 Dec 2009 22:49:43 +0000 Subject: 2009-12-02 Richard Guenther * fold-const.c (div_if_zero_remainder): Honor that sizetypes are sign-extending. Simplify. git-svn-id: svn+ssh://gcc.gnu.org/svn/gcc/trunk@154926 138bc75d-0d04-0410-961f-82ee72b054a4 --- gcc/fold-const.c | 24 ++++++++++-------------- 1 file changed, 10 insertions(+), 14 deletions(-) (limited to 'gcc/fold-const.c') diff --git a/gcc/fold-const.c b/gcc/fold-const.c index 37475a08ce2..cbdaf86b22c 100644 --- a/gcc/fold-const.c +++ b/gcc/fold-const.c @@ -881,22 +881,18 @@ div_if_zero_remainder (enum tree_code code, const_tree arg1, const_tree arg2) HOST_WIDE_INT int1h, int2h; unsigned HOST_WIDE_INT quol, reml; HOST_WIDE_INT quoh, remh; - tree type = TREE_TYPE (arg1); - int uns = TYPE_UNSIGNED (type); + int uns; + + /* The sign of the division is according to operand two, that + does the correct thing for POINTER_PLUS_EXPR where we want + a signed division. */ + uns = TYPE_UNSIGNED (TREE_TYPE (arg2)); + if (TREE_CODE (TREE_TYPE (arg2)) == INTEGER_TYPE + && TYPE_IS_SIZETYPE (TREE_TYPE (arg2))) + uns = false; int1l = TREE_INT_CST_LOW (arg1); int1h = TREE_INT_CST_HIGH (arg1); - /* &obj[0] + -128 really should be compiled as &obj[-8] rather than - &obj[some_exotic_number]. */ - if (POINTER_TYPE_P (type)) - { - uns = false; - type = signed_type_for (type); - fit_double_type (int1l, int1h, &int1l, &int1h, - type); - } - else - fit_double_type (int1l, int1h, &int1l, &int1h, type); int2l = TREE_INT_CST_LOW (arg2); int2h = TREE_INT_CST_HIGH (arg2); @@ -905,7 +901,7 @@ div_if_zero_remainder (enum tree_code code, const_tree arg1, const_tree arg2) if (remh != 0 || reml != 0) return NULL_TREE; - return build_int_cst_wide (type, quol, quoh); + return build_int_cst_wide (TREE_TYPE (arg1), quol, quoh); } /* This is nonzero if we should defer warnings about undefined -- cgit v1.2.1 From 965d0f2968633239f18f735b4eb9308ddc81898b Mon Sep 17 00:00:00 2001 From: ghazi Date: Mon, 7 Dec 2009 15:42:55 +0000 Subject: PR other/40302 * builtins.c: Remove HAVE_mpc* checks throughout. * fold-const.c: Likewise. * real.h: Likewise. * toplev.c: Likewise. git-svn-id: svn+ssh://gcc.gnu.org/svn/gcc/trunk@155046 138bc75d-0d04-0410-961f-82ee72b054a4 --- gcc/fold-const.c | 5 ----- 1 file changed, 5 deletions(-) (limited to 'gcc/fold-const.c') diff --git a/gcc/fold-const.c b/gcc/fold-const.c index cbdaf86b22c..e1126219689 100644 --- a/gcc/fold-const.c +++ b/gcc/fold-const.c @@ -1962,12 +1962,10 @@ const_binop (enum tree_code code, tree arg1, tree arg2, int notrunc) break; case MULT_EXPR: -#ifdef HAVE_mpc if (COMPLEX_FLOAT_TYPE_P (type)) return do_mpc_arg2 (arg1, arg2, type, /* do_nonfinite= */ folding_initializer, mpc_mul); -#endif real = const_binop (MINUS_EXPR, const_binop (MULT_EXPR, r1, r2, notrunc), @@ -1980,14 +1978,11 @@ const_binop (enum tree_code code, tree arg1, tree arg2, int notrunc) break; case RDIV_EXPR: -#ifdef HAVE_mpc if (COMPLEX_FLOAT_TYPE_P (type)) return do_mpc_arg2 (arg1, arg2, type, /* do_nonfinite= */ folding_initializer, mpc_div); /* Fallthru ... */ -#endif - case TRUNC_DIV_EXPR: case CEIL_DIV_EXPR: case FLOOR_DIV_EXPR: -- cgit v1.2.1 From a9b39de805384725f09b3b80c928bdf8aeca8d1f Mon Sep 17 00:00:00 2001 From: rguenth Date: Wed, 13 Jan 2010 13:31:13 +0000 Subject: 2010-01-13 Richard Guenther PR middle-end/42716 * fold-const.c (fold_unary_loc): Fold INDIRECT_REFs. * gcc.c-torture/compile/pr42716.c: New testcase. git-svn-id: svn+ssh://gcc.gnu.org/svn/gcc/trunk@155859 138bc75d-0d04-0410-961f-82ee72b054a4 --- gcc/fold-const.c | 13 +++++++++++++ 1 file changed, 13 insertions(+) (limited to 'gcc/fold-const.c') diff --git a/gcc/fold-const.c b/gcc/fold-const.c index e1126219689..9e40296123c 100644 --- a/gcc/fold-const.c +++ b/gcc/fold-const.c @@ -8942,6 +8942,19 @@ fold_unary_loc (location_t loc, enum tree_code code, tree type, tree op0) } return NULL_TREE; + case INDIRECT_REF: + /* Fold *&X to X if X is an lvalue. */ + if (TREE_CODE (op0) == ADDR_EXPR) + { + tree op00 = TREE_OPERAND (op0, 0); + if ((TREE_CODE (op00) == VAR_DECL + || TREE_CODE (op00) == PARM_DECL + || TREE_CODE (op00) == RESULT_DECL) + && !TREE_READONLY (op00)) + return op00; + } + return NULL_TREE; + default: return NULL_TREE; } /* switch (code) */ -- cgit v1.2.1 From 7fa61d419d766c2b06f204a08a0b205ed91d1735 Mon Sep 17 00:00:00 2001 From: jakub Date: Thu, 14 Jan 2010 09:47:09 +0000 Subject: PR c/42721 Port from no-undefined-overflow branch 2009-03-09 Richard Guenther * fold-const.c (add_double_with_sign): Fix unsigned overflow detection. * gcc.c-torture/execute/pr42721.c: New test. git-svn-id: svn+ssh://gcc.gnu.org/svn/gcc/trunk@155887 138bc75d-0d04-0410-961f-82ee72b054a4 --- gcc/fold-const.c | 8 ++++++-- 1 file changed, 6 insertions(+), 2 deletions(-) (limited to 'gcc/fold-const.c') diff --git a/gcc/fold-const.c b/gcc/fold-const.c index 9e40296123c..9d249cc3b2f 100644 --- a/gcc/fold-const.c +++ b/gcc/fold-const.c @@ -326,13 +326,17 @@ add_double_with_sign (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1, HOST_WIDE_INT h; l = l1 + l2; - h = h1 + h2 + (l < l1); + h = (HOST_WIDE_INT) ((unsigned HOST_WIDE_INT) h1 + + (unsigned HOST_WIDE_INT) h2 + + (l < l1)); *lv = l; *hv = h; if (unsigned_p) - return (unsigned HOST_WIDE_INT) h < (unsigned HOST_WIDE_INT) h1; + return ((unsigned HOST_WIDE_INT) h < (unsigned HOST_WIDE_INT) h1 + || (h == h1 + && l < l1)); else return OVERFLOW_SUM_SIGN (h1, h2, h); } -- cgit v1.2.1 From 53293165a9fdd4b820d14463b2c2f892d3fd56b6 Mon Sep 17 00:00:00 2001 From: rguenth Date: Thu, 21 Jan 2010 12:15:40 +0000 Subject: 2010-01-21 Richard Guenther PR middle-end/19988 * fold-const.c (negate_expr_p): Pretend only negative real constants are easily negatable. * gcc.dg/pr19988.c: New testcase. git-svn-id: svn+ssh://gcc.gnu.org/svn/gcc/trunk@156152 138bc75d-0d04-0410-961f-82ee72b054a4 --- gcc/fold-const.c | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) (limited to 'gcc/fold-const.c') diff --git a/gcc/fold-const.c b/gcc/fold-const.c index 9d249cc3b2f..e7a4b8c8f49 100644 --- a/gcc/fold-const.c +++ b/gcc/fold-const.c @@ -1129,10 +1129,14 @@ negate_expr_p (tree t) && TYPE_OVERFLOW_WRAPS (type)); case FIXED_CST: - case REAL_CST: case NEGATE_EXPR: return true; + case REAL_CST: + /* We want to canonicalize to positive real constants. Pretend + that only negative ones can be easily negated. */ + return REAL_VALUE_NEGATIVE (TREE_REAL_CST (t)); + case COMPLEX_CST: return negate_expr_p (TREE_REALPART (t)) && negate_expr_p (TREE_IMAGPART (t)); -- cgit v1.2.1 From 767a8a1c56d5c6ca73112f1249730fb2d3538de9 Mon Sep 17 00:00:00 2001 From: jason Date: Wed, 3 Feb 2010 21:53:41 +0000 Subject: PR c++/40138 * fold-const.c (operand_equal_p): Handle erroneous types. git-svn-id: svn+ssh://gcc.gnu.org/svn/gcc/trunk@156483 138bc75d-0d04-0410-961f-82ee72b054a4 --- gcc/fold-const.c | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) (limited to 'gcc/fold-const.c') diff --git a/gcc/fold-const.c b/gcc/fold-const.c index e7a4b8c8f49..292b89f9496 100644 --- a/gcc/fold-const.c +++ b/gcc/fold-const.c @@ -3165,7 +3165,9 @@ int operand_equal_p (const_tree arg0, const_tree arg1, unsigned int flags) { /* If either is ERROR_MARK, they aren't equal. */ - if (TREE_CODE (arg0) == ERROR_MARK || TREE_CODE (arg1) == ERROR_MARK) + if (TREE_CODE (arg0) == ERROR_MARK || TREE_CODE (arg1) == ERROR_MARK + || TREE_TYPE (arg0) == error_mark_node + || TREE_TYPE (arg1) == error_mark_node) return 0; /* Check equality of integer constants before bailing out due to -- cgit v1.2.1 From 7cf0dbf3e5eee1286c76c26a836622c9c9974736 Mon Sep 17 00:00:00 2001 From: steven Date: Fri, 2 Apr 2010 19:54:46 +0000 Subject: * ada/gcc-interface/Make-lang.in, alias.c, attribs.c, auto-inc-dec.c, basic-block.h, bb-reorder.c, calls.c, c-common.c, cgraph.h, collect2.h, config/alpha/alpha.c, config/alpha/alpha.md, config/alpha/predicates.md, config/arm/arm.md, config/arm/lib1funcs.asm, config/arm/neon-schedgen.ml, config/avr/avr.c, config/avr/avr.md, config/bfin/bfin.c, config/darwin9.h, config/darwin.c, config/darwin.h, config/h8300/h8300.c, config/i386/cpuid.h, config/i386/cygming.h, config/i386/cygwin.h, config/i386/mingw32.h, config/i386/msformat-c.c, config/i386/sol2-10.h, config/i386/xopintrin.h, config/ia64/ia64.c, config/ia64/ia64.md, config/ia64/sync.md, config/mep/mep.c, config/mips/mips.md, config/mn10300/mn10300.c, config/mn10300/mn10300.h, config/pa/pa.c, config/pa/pa.md, config/rs6000/aix.h, config/rs6000/dfp.md, config/rs6000/rs6000-builtin.def, config/rs6000/rs6000-c.c, config/rs6000/vector.md, config/rtems.h, config/rx/rx.md, config/s390/s390.md, config/sol2-c.c, config/sparc/sol2-bi.h, config/sparc/sol2-gas.h, config/sparc/sparc.h, config/sparc/sparc.md, config/sparc/sparc-protos.h, config/spu/spu.c, config/spu/spu-c.c, config/t-darwin, convert.c, c.opt, c-opts.c, cp/Make-lang.in, c-pretty-print.c, c-typeck.c, df-core.c, df-scan.c, diagnostic.c, diagnostic.h, doc/cppopts.texi, doc/cpp.texi, doc/extend.texi, doc/gimple.texi, doc/languages.texi, doc/plugins.texi, doc/rtl.texi, doc/standards.texi, doc/tree-ssa.texi, doc/trouble.texi, dominance.c, fold-const.c, fortran/Make-lang.in, fwprop.c, gcc-plugin.h, gensupport.c, gimple.h, gimple-iterator.c, graphite.c, graphite-clast-to-gimple.c, graphite-clast-to-gimple.h, graphite-dependences.c, graphite-poly.c, graphite-poly.h, graphite-ppl.c, graphite-ppl.h, graphite-scop-detection.c, graphite-sese-to-poly.c, graphite-sese-to-poly.h, ifcvt.c, intl.c, intl.h, ipa.c, ipa-cp.c, ipa-inline.c, ipa-prop.c, ipa-prop.h, ipa-pure-const.c, ipa-reference.c, ipa-type-escape.c, ira-color.c, ira-conflicts.c, ira-lives.c, java/Make-lang.in, lambda-code.c, loop-invariant.c, lto/Make-lang.in, lto-streamer.h, lto-streamer-in.c, objc/Make-lang.in, objcp/Make-lang.in, omp-low.c, optc-gen.awk, opt-functions.awk, opth-gen.awk, params.def, passes.c, postreload-gcse.c, print-tree.c, recog.c, regrename.c, reload.h, rtl.def, sched-int.h, sched-rgn.c, sel-sched-dump.c, sese.c, sese.h, store-motion.c, stor-layout.c, tree-cfgcleanup.c, tree-chrec.c, tree-complex.c, tree-data-ref.c, tree.def, tree-eh.c, tree-flow.h, tree-flow-inline.h, tree.h, tree-loop-distribution.c, tree-outof-ssa.c, tree-parloops.c, tree-pass.h, tree-predcom.c, tree-profile.c, tree-scalar-evolution.c, tree-ssa-address.c, tree-ssa-alias.c, tree-ssa-coalesce.c, tree-ssa-copy.c, tree-ssa-dce.c, tree-ssa-dom.c, tree-ssa-dse.c, tree-ssa-loop-im.c, tree-ssa-loop-ivcanon.c, tree-ssa-loop-manip.c, tree-ssa-math-opts.c, tree-ssa-operands.c, tree-ssa-pre.c, tree-ssa-sccvn.c, tree-ssa-structalias.c, tree-ssa-uncprop.c, tree-tailcall.c, tree-vect-data-refs.c, tree-vect-loop.c, tree-vectorizer.h, tree-vect-slp.c, tree-vrp.c, unwind-dw2-fde-darwin.c, varpool.c: Update copyright years. git-svn-id: svn+ssh://gcc.gnu.org/svn/gcc/trunk@157950 138bc75d-0d04-0410-961f-82ee72b054a4 --- gcc/fold-const.c | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) (limited to 'gcc/fold-const.c') diff --git a/gcc/fold-const.c b/gcc/fold-const.c index 292b89f9496..8dd68c2540f 100644 --- a/gcc/fold-const.c +++ b/gcc/fold-const.c @@ -1,6 +1,6 @@ /* Fold a constant sub-tree into a single node for C-compiler Copyright (C) 1987, 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999, - 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009 + 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010 Free Software Foundation, Inc. This file is part of GCC. -- cgit v1.2.1 From 30806cf18c1416546da47e9e4fdb23692b37a73c Mon Sep 17 00:00:00 2001 From: rguenth Date: Tue, 6 Apr 2010 10:36:57 +0000 Subject: 2010-04-06 Richard Guenther PR middle-end/43661 * fold-const.c (fold_comparison): Handle X * 0 CMP 0. * gcc.c-torture/compile/pr43661.c: New testcase. git-svn-id: svn+ssh://gcc.gnu.org/svn/gcc/trunk@157984 138bc75d-0d04-0410-961f-82ee72b054a4 --- gcc/fold-const.c | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) (limited to 'gcc/fold-const.c') diff --git a/gcc/fold-const.c b/gcc/fold-const.c index 8dd68c2540f..e79d934243d 100644 --- a/gcc/fold-const.c +++ b/gcc/fold-const.c @@ -9576,7 +9576,9 @@ fold_comparison (location_t loc, enum tree_code code, tree type, tree variable1 = TREE_OPERAND (arg0, 0); enum tree_code cmp_code = code; - gcc_assert (!integer_zerop (const1)); + /* Handle unfolded multiplication by zero. */ + if (integer_zerop (const1)) + return fold_build2_loc (loc, cmp_code, type, const1, const2); fold_overflow_warning (("assuming signed overflow does not occur when " "eliminating multiplication in comparison " -- cgit v1.2.1 From 6d72287b7ad40b4ebac984afcab7c2ee3577aed4 Mon Sep 17 00:00:00 2001 From: rguenth Date: Wed, 7 Apr 2010 10:13:25 +0000 Subject: 2010-04-07 Richard Guenther PR middle-end/42617 * emit-rtl.c (set_mem_attributes_minus_bitpos): Do not discard plain indirect references. * fold-const.c (operand_equal_p): Guard against NULL_TREE type. * tree.c (tree_nop_conversion): Likewise. git-svn-id: svn+ssh://gcc.gnu.org/svn/gcc/trunk@158045 138bc75d-0d04-0410-961f-82ee72b054a4 --- gcc/fold-const.c | 5 +++++ 1 file changed, 5 insertions(+) (limited to 'gcc/fold-const.c') diff --git a/gcc/fold-const.c b/gcc/fold-const.c index e79d934243d..62c86254ea1 100644 --- a/gcc/fold-const.c +++ b/gcc/fold-const.c @@ -3170,6 +3170,11 @@ operand_equal_p (const_tree arg0, const_tree arg1, unsigned int flags) || TREE_TYPE (arg1) == error_mark_node) return 0; + /* Similar, if either does not have a type (like a released SSA name), + they aren't equal. */ + if (!TREE_TYPE (arg0) || !TREE_TYPE (arg1)) + return 0; + /* Check equality of integer constants before bailing out due to precision differences. */ if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST) -- cgit v1.2.1 From 7f2d9047e1adab1993bd392999885c4023a176c7 Mon Sep 17 00:00:00 2001 From: rguenth Date: Wed, 7 Apr 2010 15:31:37 +0000 Subject: 2010-04-07 Richard Guenther * ipa-reference.c (mark_load): Use get_base_address. (mark_store): Likewise. * tree-ssa-ccp.c (gimplify_and_update_call_from_tree): Avoid inserting GIMPLE_NOPs into the IL. * tree-ssa-structalias.c (get_constraint_for_component_ref): Explicitly strip handled components and indirect references. * fold-const.c (fold_unary_loc): Do not strip qualifiers when folding address expressions. * gimple.c (gimple_ior_addresses_taken_1): Use get_base_address. * tree-ssa-alias.c (decl_refs_may_alias_p): Do not use operand_equal_p to compare decls. (ptr_deref_may_alias_decl_p): Likewise. * tree-ssa-operands.c (get_asm_expr_operands): Simplify * tree-ssa-forwprop.c (forward_propagate_into_gimple_cond): Handle reversed comparison ops. * tree-sra.c (asm_visit_addr): Use get_base_address. * ipa-prop.c (visit_store_addr_for_mod_analysis): Use get_base_address. * ipa-reference.c (mark_address): Use get_base_address. git-svn-id: svn+ssh://gcc.gnu.org/svn/gcc/trunk@158069 138bc75d-0d04-0410-961f-82ee72b054a4 --- gcc/fold-const.c | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) (limited to 'gcc/fold-const.c') diff --git a/gcc/fold-const.c b/gcc/fold-const.c index 62c86254ea1..03598a59a6f 100644 --- a/gcc/fold-const.c +++ b/gcc/fold-const.c @@ -8561,10 +8561,11 @@ fold_unary_loc (location_t loc, enum tree_code code, tree type, tree op0) &mode, &unsignedp, &volatilep, false); /* If the reference was to a (constant) zero offset, we can use the address of the base if it has the same base type - as the result type. */ + as the result type and the pointer type is unqualified. */ if (! offset && bitpos == 0 - && TYPE_MAIN_VARIANT (TREE_TYPE (type)) + && (TYPE_MAIN_VARIANT (TREE_TYPE (type)) == TYPE_MAIN_VARIANT (TREE_TYPE (base))) + && TYPE_QUALS (type) == TYPE_UNQUALIFIED) return fold_convert_loc (loc, type, build_fold_addr_expr_loc (loc, base)); } -- cgit v1.2.1 From c6feb9f122942ce917541aae8fead534ce6b9929 Mon Sep 17 00:00:00 2001 From: ebotcazou Date: Tue, 13 Apr 2010 15:47:38 +0000 Subject: PR middle-end/32628 * c-common.c (pointer_int_sum): Disregard overflow that occured only because of sign-extension change when converting to sizetype here... * fold-const.c (fold_convert_const_int_from_int): ...and not here. * fold-const.c (fold_binary_op_with_conditional_arg): Do not restrict the folding to constants. Remove redundant final conversion. (fold_binary) : Do not associate if the re-association of constants alone overflows. (fold_binary) : Move transformation into BIT_AND_EXPR to the end of the list. (multiple_of_p) : New case. git-svn-id: svn+ssh://gcc.gnu.org/svn/gcc/trunk@158274 138bc75d-0d04-0410-961f-82ee72b054a4 --- gcc/fold-const.c | 152 +++++++++++++++++++++++++++++-------------------------- 1 file changed, 79 insertions(+), 73 deletions(-) (limited to 'gcc/fold-const.c') diff --git a/gcc/fold-const.c b/gcc/fold-const.c index 03598a59a6f..34e5874eadd 100644 --- a/gcc/fold-const.c +++ b/gcc/fold-const.c @@ -2252,23 +2252,7 @@ fold_convert_const_int_from_int (tree type, const_tree arg1) appropriately sign-extended or truncated. */ t = force_fit_type_double (type, TREE_INT_CST_LOW (arg1), TREE_INT_CST_HIGH (arg1), - /* Don't set the overflow when - converting from a pointer, */ - !POINTER_TYPE_P (TREE_TYPE (arg1)) - /* or to a sizetype with same signedness - and the precision is unchanged. - ??? sizetype is always sign-extended, - but its signedness depends on the - frontend. Thus we see spurious overflows - here if we do not check this. */ - && !((TYPE_PRECISION (TREE_TYPE (arg1)) - == TYPE_PRECISION (type)) - && (TYPE_UNSIGNED (TREE_TYPE (arg1)) - == TYPE_UNSIGNED (type)) - && ((TREE_CODE (TREE_TYPE (arg1)) == INTEGER_TYPE - && TYPE_IS_SIZETYPE (TREE_TYPE (arg1))) - || (TREE_CODE (type) == INTEGER_TYPE - && TYPE_IS_SIZETYPE (type)))), + !POINTER_TYPE_P (TREE_TYPE (arg1)), (TREE_INT_CST_HIGH (arg1) < 0 && (TYPE_UNSIGNED (type) < TYPE_UNSIGNED (TREE_TYPE (arg1)))) @@ -6731,12 +6715,6 @@ fold_binary_op_with_conditional_arg (location_t loc, tree lhs = NULL_TREE; tree rhs = NULL_TREE; - /* This transformation is only worthwhile if we don't have to wrap - arg in a SAVE_EXPR, and the operation can be simplified on at least - one of the branches once its pushed inside the COND_EXPR. */ - if (!TREE_CONSTANT (arg)) - return NULL_TREE; - if (TREE_CODE (cond) == COND_EXPR) { test = TREE_OPERAND (cond, 0); @@ -6758,6 +6736,14 @@ fold_binary_op_with_conditional_arg (location_t loc, false_value = constant_boolean_node (false, testtype); } + /* This transformation is only worthwhile if we don't have to wrap ARG + in a SAVE_EXPR and the operation can be simplified on at least one + of the branches once its pushed inside the COND_EXPR. */ + if (!TREE_CONSTANT (arg) + && (TREE_SIDE_EFFECTS (arg) + || TREE_CONSTANT (true_value) || TREE_CONSTANT (false_value))) + return NULL_TREE; + arg = fold_convert_loc (loc, arg_type, arg); if (lhs == 0) { @@ -6776,8 +6762,11 @@ fold_binary_op_with_conditional_arg (location_t loc, rhs = fold_build2_loc (loc, code, type, arg, false_value); } - test = fold_build3_loc (loc, COND_EXPR, type, test, lhs, rhs); - return fold_convert_loc (loc, type, test); + /* Check that we have simplified at least one of the branches. */ + if (!TREE_CONSTANT (arg) && !TREE_CONSTANT (lhs) && !TREE_CONSTANT (rhs)) + return NULL_TREE; + + return fold_build3_loc (loc, COND_EXPR, type, test, lhs, rhs); } @@ -10628,23 +10617,39 @@ fold_binary_loc (location_t loc, var1 = split_tree (arg1, code, &con1, &lit1, &minus_lit1, code == MINUS_EXPR); - /* With undefined overflow we can only associate constants - with one variable. */ - if (((POINTER_TYPE_P (type) && POINTER_TYPE_OVERFLOW_UNDEFINED) - || (INTEGRAL_TYPE_P (type) && !TYPE_OVERFLOW_WRAPS (type))) - && var0 && var1) + /* Recombine MINUS_EXPR operands by using PLUS_EXPR. */ + if (code == MINUS_EXPR) + code = PLUS_EXPR; + + /* With undefined overflow we can only associate constants with one + variable, and constants whose association doesn't overflow. */ + if ((POINTER_TYPE_P (type) && POINTER_TYPE_OVERFLOW_UNDEFINED) + || (INTEGRAL_TYPE_P (type) && !TYPE_OVERFLOW_WRAPS (type))) { - tree tmp0 = var0; - tree tmp1 = var1; - - if (TREE_CODE (tmp0) == NEGATE_EXPR) - tmp0 = TREE_OPERAND (tmp0, 0); - if (TREE_CODE (tmp1) == NEGATE_EXPR) - tmp1 = TREE_OPERAND (tmp1, 0); - /* The only case we can still associate with two variables - is if they are the same, modulo negation. */ - if (!operand_equal_p (tmp0, tmp1, 0)) - ok = false; + if (var0 && var1) + { + tree tmp0 = var0; + tree tmp1 = var1; + + if (TREE_CODE (tmp0) == NEGATE_EXPR) + tmp0 = TREE_OPERAND (tmp0, 0); + if (TREE_CODE (tmp1) == NEGATE_EXPR) + tmp1 = TREE_OPERAND (tmp1, 0); + /* The only case we can still associate with two variables + is if they are the same, modulo negation. */ + if (!operand_equal_p (tmp0, tmp1, 0)) + ok = false; + } + + if (ok && lit0 && lit1) + { + tree tmp0 = fold_convert (type, lit0); + tree tmp1 = fold_convert (type, lit1); + + if (!TREE_OVERFLOW (tmp0) && !TREE_OVERFLOW (tmp1) + && TREE_OVERFLOW (fold_build2 (code, type, tmp0, tmp1))) + ok = false; + } } /* Only do something if we found more than two objects. Otherwise, @@ -10655,10 +10660,6 @@ fold_binary_loc (location_t loc, + (lit0 != 0) + (lit1 != 0) + (minus_lit0 != 0) + (minus_lit1 != 0)))) { - /* Recombine MINUS_EXPR operands by using PLUS_EXPR. */ - if (code == MINUS_EXPR) - code = PLUS_EXPR; - var0 = associate_trees (loc, var0, var1, code, type); con0 = associate_trees (loc, con0, con1, code, type); lit0 = associate_trees (loc, lit0, lit1, code, type); @@ -12222,34 +12223,6 @@ fold_binary_loc (location_t loc, && TREE_INT_CST_HIGH (arg1) == -1) return omit_one_operand_loc (loc, type, integer_zero_node, arg0); - /* Optimize TRUNC_MOD_EXPR by a power of two into a BIT_AND_EXPR, - i.e. "X % C" into "X & (C - 1)", if X and C are positive. */ - strict_overflow_p = false; - if ((code == TRUNC_MOD_EXPR || code == FLOOR_MOD_EXPR) - && (TYPE_UNSIGNED (type) - || tree_expr_nonnegative_warnv_p (op0, &strict_overflow_p))) - { - tree c = arg1; - /* Also optimize A % (C << N) where C is a power of 2, - to A & ((C << N) - 1). */ - if (TREE_CODE (arg1) == LSHIFT_EXPR) - c = TREE_OPERAND (arg1, 0); - - if (integer_pow2p (c) && tree_int_cst_sgn (c) > 0) - { - tree mask = fold_build2_loc (loc, MINUS_EXPR, TREE_TYPE (arg1), arg1, - build_int_cst (TREE_TYPE (arg1), 1)); - if (strict_overflow_p) - fold_overflow_warning (("assuming signed overflow does not " - "occur when simplifying " - "X % (power of two)"), - WARN_STRICT_OVERFLOW_MISC); - return fold_build2_loc (loc, BIT_AND_EXPR, type, - fold_convert_loc (loc, type, arg0), - fold_convert_loc (loc, type, mask)); - } - } - /* X % -C is the same as X % C. */ if (code == TRUNC_MOD_EXPR && !TYPE_UNSIGNED (type) @@ -12273,6 +12246,7 @@ fold_binary_loc (location_t loc, fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0))); + strict_overflow_p = false; if (TREE_CODE (arg1) == INTEGER_CST && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE, &strict_overflow_p))) @@ -12284,6 +12258,34 @@ fold_binary_loc (location_t loc, return fold_convert_loc (loc, type, tem); } + /* Optimize TRUNC_MOD_EXPR by a power of two into a BIT_AND_EXPR, + i.e. "X % C" into "X & (C - 1)", if X and C are positive. */ + if ((code == TRUNC_MOD_EXPR || code == FLOOR_MOD_EXPR) + && (TYPE_UNSIGNED (type) + || tree_expr_nonnegative_warnv_p (op0, &strict_overflow_p))) + { + tree c = arg1; + /* Also optimize A % (C << N) where C is a power of 2, + to A & ((C << N) - 1). */ + if (TREE_CODE (arg1) == LSHIFT_EXPR) + c = TREE_OPERAND (arg1, 0); + + if (integer_pow2p (c) && tree_int_cst_sgn (c) > 0) + { + tree mask + = fold_build2_loc (loc, MINUS_EXPR, TREE_TYPE (arg1), arg1, + build_int_cst (TREE_TYPE (arg1), 1)); + if (strict_overflow_p) + fold_overflow_warning (("assuming signed overflow does not " + "occur when simplifying " + "X % (power of two)"), + WARN_STRICT_OVERFLOW_MISC); + return fold_build2_loc (loc, BIT_AND_EXPR, type, + fold_convert_loc (loc, type, arg0), + fold_convert_loc (loc, type, mask)); + } + } + return NULL_TREE; case LROTATE_EXPR: @@ -14820,6 +14822,10 @@ multiple_of_p (tree type, const_tree top, const_tree bottom) case SAVE_EXPR: return multiple_of_p (type, TREE_OPERAND (top, 0), bottom); + case COND_EXPR: + return (multiple_of_p (type, TREE_OPERAND (top, 1), bottom) + && multiple_of_p (type, TREE_OPERAND (top, 2), bottom)); + case INTEGER_CST: if (TREE_CODE (bottom) != INTEGER_CST || integer_zerop (bottom) -- cgit v1.2.1 From 412839221276fd0a251ba61296262b09352c5927 Mon Sep 17 00:00:00 2001 From: aesok Date: Wed, 14 Apr 2010 22:05:32 +0000 Subject: * double-int.h (HOST_BITS_PER_DOUBLE_INT): Define. (double_int_not, double_int_lshift, double_int_rshift): Declare. (double_int_negative_p): Convert to static inline function. * double-int.c (double_int_lshift, double_int_lshift): Add new function. (double_int_negative_p): Remove. * tree.h (lshift_double, rshift_double): * tree.c (build_low_bits_mask): Clean up, use double_int_* functions. * fold-const.c (fold_convert_const_int_from_real, fold_convert_const_int_from_fixed, div_if_zero_remainder): (Ditto.). (lshift_double): Change type of arith argument to bool. (rshift_double): Change type of arith argument to bool. Correct comment. * expmed.c (mask_rtx, lshift_value): (Ditto.). git-svn-id: svn+ssh://gcc.gnu.org/svn/gcc/trunk@158360 138bc75d-0d04-0410-961f-82ee72b054a4 --- gcc/fold-const.c | 72 ++++++++++++++++++++++---------------------------------- 1 file changed, 28 insertions(+), 44 deletions(-) (limited to 'gcc/fold-const.c') diff --git a/gcc/fold-const.c b/gcc/fold-const.c index 34e5874eadd..c3fcaa58c96 100644 --- a/gcc/fold-const.c +++ b/gcc/fold-const.c @@ -436,7 +436,7 @@ mul_double_with_sign (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1, void lshift_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1, HOST_WIDE_INT count, unsigned int prec, - unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv, int arith) + unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv, bool arith) { unsigned HOST_WIDE_INT signmask; @@ -491,7 +491,7 @@ lshift_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1, } /* Shift the doubleword integer in L1, H1 right by COUNT places - keeping only PREC bits of result. COUNT must be positive. + keeping only PREC bits of result. Shift left if COUNT is negative. ARITH nonzero specifies arithmetic shifting; otherwise use logical shift. Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */ @@ -499,7 +499,7 @@ void rshift_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1, HOST_WIDE_INT count, unsigned int prec, unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv, - int arith) + bool arith) { unsigned HOST_WIDE_INT signmask; @@ -881,10 +881,7 @@ div_and_round_double (enum tree_code code, int uns, tree div_if_zero_remainder (enum tree_code code, const_tree arg1, const_tree arg2) { - unsigned HOST_WIDE_INT int1l, int2l; - HOST_WIDE_INT int1h, int2h; - unsigned HOST_WIDE_INT quol, reml; - HOST_WIDE_INT quoh, remh; + double_int quo, rem; int uns; /* The sign of the division is according to operand two, that @@ -895,17 +892,14 @@ div_if_zero_remainder (enum tree_code code, const_tree arg1, const_tree arg2) && TYPE_IS_SIZETYPE (TREE_TYPE (arg2))) uns = false; - int1l = TREE_INT_CST_LOW (arg1); - int1h = TREE_INT_CST_HIGH (arg1); - int2l = TREE_INT_CST_LOW (arg2); - int2h = TREE_INT_CST_HIGH (arg2); + quo = double_int_divmod (tree_to_double_int (arg1), + tree_to_double_int (arg2), + uns, code, &rem); - div_and_round_double (code, uns, int1l, int1h, int2l, int2h, - &quol, &quoh, &reml, &remh); - if (remh != 0 || reml != 0) - return NULL_TREE; + if (double_int_zero_p (rem)) + return build_int_cst_wide (TREE_TYPE (arg1), quo.low, quo.high); - return build_int_cst_wide (TREE_TYPE (arg1), quol, quoh); + return NULL_TREE; } /* This is nonzero if we should defer warnings about undefined @@ -2279,7 +2273,7 @@ fold_convert_const_int_from_real (enum tree_code code, tree type, const_tree arg C and C++ standards that simply state that the behavior of FP-to-integer conversion is unspecified upon overflow. */ - HOST_WIDE_INT high, low; + double_int val; REAL_VALUE_TYPE r; REAL_VALUE_TYPE x = TREE_REAL_CST (arg1); @@ -2297,8 +2291,7 @@ fold_convert_const_int_from_real (enum tree_code code, tree type, const_tree arg if (REAL_VALUE_ISNAN (r)) { overflow = 1; - high = 0; - low = 0; + val = double_int_zero; } /* See if R is less than the lower bound or greater than the @@ -2311,8 +2304,7 @@ fold_convert_const_int_from_real (enum tree_code code, tree type, const_tree arg if (REAL_VALUES_LESS (r, l)) { overflow = 1; - high = TREE_INT_CST_HIGH (lt); - low = TREE_INT_CST_LOW (lt); + val = tree_to_double_int (lt); } } @@ -2325,16 +2317,15 @@ fold_convert_const_int_from_real (enum tree_code code, tree type, const_tree arg if (REAL_VALUES_LESS (u, r)) { overflow = 1; - high = TREE_INT_CST_HIGH (ut); - low = TREE_INT_CST_LOW (ut); + val = tree_to_double_int (ut); } } } if (! overflow) - REAL_VALUE_TO_INT (&low, &high, r); + real_to_integer2 ((HOST_WIDE_INT *) &val.low, &val.high, &r); - t = force_fit_type_double (type, low, high, -1, + t = force_fit_type_double (type, val.low, val.high, -1, overflow | TREE_OVERFLOW (arg1)); return t; } @@ -2354,39 +2345,32 @@ fold_convert_const_int_from_fixed (tree type, const_tree arg1) mode = TREE_FIXED_CST (arg1).mode; if (GET_MODE_FBIT (mode) < 2 * HOST_BITS_PER_WIDE_INT) { - lshift_double (temp.low, temp.high, - - GET_MODE_FBIT (mode), 2 * HOST_BITS_PER_WIDE_INT, - &temp.low, &temp.high, SIGNED_FIXED_POINT_MODE_P (mode)); + temp = double_int_rshift (temp, GET_MODE_FBIT (mode), + HOST_BITS_PER_DOUBLE_INT, + SIGNED_FIXED_POINT_MODE_P (mode)); /* Left shift temp to temp_trunc by fbit. */ - lshift_double (temp.low, temp.high, - GET_MODE_FBIT (mode), 2 * HOST_BITS_PER_WIDE_INT, - &temp_trunc.low, &temp_trunc.high, - SIGNED_FIXED_POINT_MODE_P (mode)); + temp_trunc = double_int_lshift (temp, GET_MODE_FBIT (mode), + HOST_BITS_PER_DOUBLE_INT, + SIGNED_FIXED_POINT_MODE_P (mode)); } else { - temp.low = 0; - temp.high = 0; - temp_trunc.low = 0; - temp_trunc.high = 0; + temp = double_int_zero; + temp_trunc = double_int_zero; } /* If FIXED_CST is negative, we need to round the value toward 0. By checking if the fractional bits are not zero to add 1 to temp. */ - if (SIGNED_FIXED_POINT_MODE_P (mode) && temp_trunc.high < 0 + if (SIGNED_FIXED_POINT_MODE_P (mode) + && double_int_negative_p (temp_trunc) && !double_int_equal_p (TREE_FIXED_CST (arg1).data, temp_trunc)) - { - double_int one; - one.low = 1; - one.high = 0; - temp = double_int_add (temp, one); - } + temp = double_int_add (temp, double_int_one); /* Given a fixed-point constant, make new constant with new type, appropriately sign-extended or truncated. */ t = force_fit_type_double (type, temp.low, temp.high, -1, - (temp.high < 0 + (double_int_negative_p (temp) && (TYPE_UNSIGNED (type) < TYPE_UNSIGNED (TREE_TYPE (arg1)))) | TREE_OVERFLOW (arg1)); -- cgit v1.2.1 From b2afff2d97f09cb4c4921f5af82ea52f26b74adb Mon Sep 17 00:00:00 2001 From: rguenth Date: Thu, 15 Apr 2010 12:45:58 +0000 Subject: 2010-04-15 Richard Guenther * fold-const.c (LOWPART, HIGHPART, BASE, encode, decode, fit_double_type, force_fit_type_double, add_double_with_sign, neg_double, mul_double_with_sign, lshift_double, rshift_double, lrotate_double, rrotate_double, div_and_round_double): Move ... * double-int.c: ... here. * tree.h (force_fit_type_double, fit_double_type, add_double_with_sign, add_double, neg_double, mul_double_with_sign, mul_double, lshift_double, rshift_double, lrotate_double, rrotate_double, div_and_round_double): Move prototypes ... * double-int.h: ... here. git-svn-id: svn+ssh://gcc.gnu.org/svn/gcc/trunk@158372 138bc75d-0d04-0410-961f-82ee72b054a4 --- gcc/fold-const.c | 717 ------------------------------------------------------- 1 file changed, 717 deletions(-) (limited to 'gcc/fold-const.c') diff --git a/gcc/fold-const.c b/gcc/fold-const.c index c3fcaa58c96..c1af8248a39 100644 --- a/gcc/fold-const.c +++ b/gcc/fold-const.c @@ -93,8 +93,6 @@ enum comparison_code { COMPCODE_TRUE = 15 }; -static void encode (HOST_WIDE_INT *, unsigned HOST_WIDE_INT, HOST_WIDE_INT); -static void decode (HOST_WIDE_INT *, unsigned HOST_WIDE_INT *, HOST_WIDE_INT *); static bool negate_mathfn_p (enum built_in_function); static bool negate_expr_p (tree); static tree negate_expr (tree); @@ -159,721 +157,6 @@ static tree fold_convert_const (enum tree_code, tree, tree); sign. */ #define OVERFLOW_SUM_SIGN(a, b, sum) ((~((a) ^ (b)) & ((a) ^ (sum))) < 0) -/* To do constant folding on INTEGER_CST nodes requires two-word arithmetic. - We do that by representing the two-word integer in 4 words, with only - HOST_BITS_PER_WIDE_INT / 2 bits stored in each word, as a positive - number. The value of the word is LOWPART + HIGHPART * BASE. */ - -#define LOWPART(x) \ - ((x) & (((unsigned HOST_WIDE_INT) 1 << (HOST_BITS_PER_WIDE_INT / 2)) - 1)) -#define HIGHPART(x) \ - ((unsigned HOST_WIDE_INT) (x) >> HOST_BITS_PER_WIDE_INT / 2) -#define BASE ((unsigned HOST_WIDE_INT) 1 << HOST_BITS_PER_WIDE_INT / 2) - -/* Unpack a two-word integer into 4 words. - LOW and HI are the integer, as two `HOST_WIDE_INT' pieces. - WORDS points to the array of HOST_WIDE_INTs. */ - -static void -encode (HOST_WIDE_INT *words, unsigned HOST_WIDE_INT low, HOST_WIDE_INT hi) -{ - words[0] = LOWPART (low); - words[1] = HIGHPART (low); - words[2] = LOWPART (hi); - words[3] = HIGHPART (hi); -} - -/* Pack an array of 4 words into a two-word integer. - WORDS points to the array of words. - The integer is stored into *LOW and *HI as two `HOST_WIDE_INT' pieces. */ - -static void -decode (HOST_WIDE_INT *words, unsigned HOST_WIDE_INT *low, - HOST_WIDE_INT *hi) -{ - *low = words[0] + words[1] * BASE; - *hi = words[2] + words[3] * BASE; -} - -/* Force the double-word integer L1, H1 to be within the range of the - integer type TYPE. Stores the properly truncated and sign-extended - double-word integer in *LV, *HV. Returns true if the operation - overflows, that is, argument and result are different. */ - -int -fit_double_type (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1, - unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv, const_tree type) -{ - unsigned HOST_WIDE_INT low0 = l1; - HOST_WIDE_INT high0 = h1; - unsigned int prec = TYPE_PRECISION (type); - int sign_extended_type; - - /* Size types *are* sign extended. */ - sign_extended_type = (!TYPE_UNSIGNED (type) - || (TREE_CODE (type) == INTEGER_TYPE - && TYPE_IS_SIZETYPE (type))); - - /* First clear all bits that are beyond the type's precision. */ - if (prec >= 2 * HOST_BITS_PER_WIDE_INT) - ; - else if (prec > HOST_BITS_PER_WIDE_INT) - h1 &= ~((HOST_WIDE_INT) (-1) << (prec - HOST_BITS_PER_WIDE_INT)); - else - { - h1 = 0; - if (prec < HOST_BITS_PER_WIDE_INT) - l1 &= ~((HOST_WIDE_INT) (-1) << prec); - } - - /* Then do sign extension if necessary. */ - if (!sign_extended_type) - /* No sign extension */; - else if (prec >= 2 * HOST_BITS_PER_WIDE_INT) - /* Correct width already. */; - else if (prec > HOST_BITS_PER_WIDE_INT) - { - /* Sign extend top half? */ - if (h1 & ((unsigned HOST_WIDE_INT)1 - << (prec - HOST_BITS_PER_WIDE_INT - 1))) - h1 |= (HOST_WIDE_INT) (-1) << (prec - HOST_BITS_PER_WIDE_INT); - } - else if (prec == HOST_BITS_PER_WIDE_INT) - { - if ((HOST_WIDE_INT)l1 < 0) - h1 = -1; - } - else - { - /* Sign extend bottom half? */ - if (l1 & ((unsigned HOST_WIDE_INT)1 << (prec - 1))) - { - h1 = -1; - l1 |= (HOST_WIDE_INT)(-1) << prec; - } - } - - *lv = l1; - *hv = h1; - - /* If the value didn't fit, signal overflow. */ - return l1 != low0 || h1 != high0; -} - -/* We force the double-int HIGH:LOW to the range of the type TYPE by - sign or zero extending it. - OVERFLOWABLE indicates if we are interested - in overflow of the value, when >0 we are only interested in signed - overflow, for <0 we are interested in any overflow. OVERFLOWED - indicates whether overflow has already occurred. CONST_OVERFLOWED - indicates whether constant overflow has already occurred. We force - T's value to be within range of T's type (by setting to 0 or 1 all - the bits outside the type's range). We set TREE_OVERFLOWED if, - OVERFLOWED is nonzero, - or OVERFLOWABLE is >0 and signed overflow occurs - or OVERFLOWABLE is <0 and any overflow occurs - We return a new tree node for the extended double-int. The node - is shared if no overflow flags are set. */ - -tree -force_fit_type_double (tree type, unsigned HOST_WIDE_INT low, - HOST_WIDE_INT high, int overflowable, - bool overflowed) -{ - int sign_extended_type; - bool overflow; - - /* Size types *are* sign extended. */ - sign_extended_type = (!TYPE_UNSIGNED (type) - || (TREE_CODE (type) == INTEGER_TYPE - && TYPE_IS_SIZETYPE (type))); - - overflow = fit_double_type (low, high, &low, &high, type); - - /* If we need to set overflow flags, return a new unshared node. */ - if (overflowed || overflow) - { - if (overflowed - || overflowable < 0 - || (overflowable > 0 && sign_extended_type)) - { - tree t = make_node (INTEGER_CST); - TREE_INT_CST_LOW (t) = low; - TREE_INT_CST_HIGH (t) = high; - TREE_TYPE (t) = type; - TREE_OVERFLOW (t) = 1; - return t; - } - } - - /* Else build a shared node. */ - return build_int_cst_wide (type, low, high); -} - -/* Add two doubleword integers with doubleword result. - Return nonzero if the operation overflows according to UNSIGNED_P. - Each argument is given as two `HOST_WIDE_INT' pieces. - One argument is L1 and H1; the other, L2 and H2. - The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */ - -int -add_double_with_sign (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1, - unsigned HOST_WIDE_INT l2, HOST_WIDE_INT h2, - unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv, - bool unsigned_p) -{ - unsigned HOST_WIDE_INT l; - HOST_WIDE_INT h; - - l = l1 + l2; - h = (HOST_WIDE_INT) ((unsigned HOST_WIDE_INT) h1 - + (unsigned HOST_WIDE_INT) h2 - + (l < l1)); - - *lv = l; - *hv = h; - - if (unsigned_p) - return ((unsigned HOST_WIDE_INT) h < (unsigned HOST_WIDE_INT) h1 - || (h == h1 - && l < l1)); - else - return OVERFLOW_SUM_SIGN (h1, h2, h); -} - -/* Negate a doubleword integer with doubleword result. - Return nonzero if the operation overflows, assuming it's signed. - The argument is given as two `HOST_WIDE_INT' pieces in L1 and H1. - The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */ - -int -neg_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1, - unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv) -{ - if (l1 == 0) - { - *lv = 0; - *hv = - h1; - return (*hv & h1) < 0; - } - else - { - *lv = -l1; - *hv = ~h1; - return 0; - } -} - -/* Multiply two doubleword integers with doubleword result. - Return nonzero if the operation overflows according to UNSIGNED_P. - Each argument is given as two `HOST_WIDE_INT' pieces. - One argument is L1 and H1; the other, L2 and H2. - The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */ - -int -mul_double_with_sign (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1, - unsigned HOST_WIDE_INT l2, HOST_WIDE_INT h2, - unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv, - bool unsigned_p) -{ - HOST_WIDE_INT arg1[4]; - HOST_WIDE_INT arg2[4]; - HOST_WIDE_INT prod[4 * 2]; - unsigned HOST_WIDE_INT carry; - int i, j, k; - unsigned HOST_WIDE_INT toplow, neglow; - HOST_WIDE_INT tophigh, neghigh; - - encode (arg1, l1, h1); - encode (arg2, l2, h2); - - memset (prod, 0, sizeof prod); - - for (i = 0; i < 4; i++) - { - carry = 0; - for (j = 0; j < 4; j++) - { - k = i + j; - /* This product is <= 0xFFFE0001, the sum <= 0xFFFF0000. */ - carry += arg1[i] * arg2[j]; - /* Since prod[p] < 0xFFFF, this sum <= 0xFFFFFFFF. */ - carry += prod[k]; - prod[k] = LOWPART (carry); - carry = HIGHPART (carry); - } - prod[i + 4] = carry; - } - - decode (prod, lv, hv); - decode (prod + 4, &toplow, &tophigh); - - /* Unsigned overflow is immediate. */ - if (unsigned_p) - return (toplow | tophigh) != 0; - - /* Check for signed overflow by calculating the signed representation of the - top half of the result; it should agree with the low half's sign bit. */ - if (h1 < 0) - { - neg_double (l2, h2, &neglow, &neghigh); - add_double (neglow, neghigh, toplow, tophigh, &toplow, &tophigh); - } - if (h2 < 0) - { - neg_double (l1, h1, &neglow, &neghigh); - add_double (neglow, neghigh, toplow, tophigh, &toplow, &tophigh); - } - return (*hv < 0 ? ~(toplow & tophigh) : toplow | tophigh) != 0; -} - -/* Shift the doubleword integer in L1, H1 left by COUNT places - keeping only PREC bits of result. - Shift right if COUNT is negative. - ARITH nonzero specifies arithmetic shifting; otherwise use logical shift. - Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */ - -void -lshift_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1, - HOST_WIDE_INT count, unsigned int prec, - unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv, bool arith) -{ - unsigned HOST_WIDE_INT signmask; - - if (count < 0) - { - rshift_double (l1, h1, -count, prec, lv, hv, arith); - return; - } - - if (SHIFT_COUNT_TRUNCATED) - count %= prec; - - if (count >= 2 * HOST_BITS_PER_WIDE_INT) - { - /* Shifting by the host word size is undefined according to the - ANSI standard, so we must handle this as a special case. */ - *hv = 0; - *lv = 0; - } - else if (count >= HOST_BITS_PER_WIDE_INT) - { - *hv = l1 << (count - HOST_BITS_PER_WIDE_INT); - *lv = 0; - } - else - { - *hv = (((unsigned HOST_WIDE_INT) h1 << count) - | (l1 >> (HOST_BITS_PER_WIDE_INT - count - 1) >> 1)); - *lv = l1 << count; - } - - /* Sign extend all bits that are beyond the precision. */ - - signmask = -((prec > HOST_BITS_PER_WIDE_INT - ? ((unsigned HOST_WIDE_INT) *hv - >> (prec - HOST_BITS_PER_WIDE_INT - 1)) - : (*lv >> (prec - 1))) & 1); - - if (prec >= 2 * HOST_BITS_PER_WIDE_INT) - ; - else if (prec >= HOST_BITS_PER_WIDE_INT) - { - *hv &= ~((HOST_WIDE_INT) (-1) << (prec - HOST_BITS_PER_WIDE_INT)); - *hv |= signmask << (prec - HOST_BITS_PER_WIDE_INT); - } - else - { - *hv = signmask; - *lv &= ~((unsigned HOST_WIDE_INT) (-1) << prec); - *lv |= signmask << prec; - } -} - -/* Shift the doubleword integer in L1, H1 right by COUNT places - keeping only PREC bits of result. Shift left if COUNT is negative. - ARITH nonzero specifies arithmetic shifting; otherwise use logical shift. - Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */ - -void -rshift_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1, - HOST_WIDE_INT count, unsigned int prec, - unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv, - bool arith) -{ - unsigned HOST_WIDE_INT signmask; - - signmask = (arith - ? -((unsigned HOST_WIDE_INT) h1 >> (HOST_BITS_PER_WIDE_INT - 1)) - : 0); - - if (SHIFT_COUNT_TRUNCATED) - count %= prec; - - if (count >= 2 * HOST_BITS_PER_WIDE_INT) - { - /* Shifting by the host word size is undefined according to the - ANSI standard, so we must handle this as a special case. */ - *hv = 0; - *lv = 0; - } - else if (count >= HOST_BITS_PER_WIDE_INT) - { - *hv = 0; - *lv = (unsigned HOST_WIDE_INT) h1 >> (count - HOST_BITS_PER_WIDE_INT); - } - else - { - *hv = (unsigned HOST_WIDE_INT) h1 >> count; - *lv = ((l1 >> count) - | ((unsigned HOST_WIDE_INT) h1 << (HOST_BITS_PER_WIDE_INT - count - 1) << 1)); - } - - /* Zero / sign extend all bits that are beyond the precision. */ - - if (count >= (HOST_WIDE_INT)prec) - { - *hv = signmask; - *lv = signmask; - } - else if ((prec - count) >= 2 * HOST_BITS_PER_WIDE_INT) - ; - else if ((prec - count) >= HOST_BITS_PER_WIDE_INT) - { - *hv &= ~((HOST_WIDE_INT) (-1) << (prec - count - HOST_BITS_PER_WIDE_INT)); - *hv |= signmask << (prec - count - HOST_BITS_PER_WIDE_INT); - } - else - { - *hv = signmask; - *lv &= ~((unsigned HOST_WIDE_INT) (-1) << (prec - count)); - *lv |= signmask << (prec - count); - } -} - -/* Rotate the doubleword integer in L1, H1 left by COUNT places - keeping only PREC bits of result. - Rotate right if COUNT is negative. - Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */ - -void -lrotate_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1, - HOST_WIDE_INT count, unsigned int prec, - unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv) -{ - unsigned HOST_WIDE_INT s1l, s2l; - HOST_WIDE_INT s1h, s2h; - - count %= prec; - if (count < 0) - count += prec; - - lshift_double (l1, h1, count, prec, &s1l, &s1h, 0); - rshift_double (l1, h1, prec - count, prec, &s2l, &s2h, 0); - *lv = s1l | s2l; - *hv = s1h | s2h; -} - -/* Rotate the doubleword integer in L1, H1 left by COUNT places - keeping only PREC bits of result. COUNT must be positive. - Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */ - -void -rrotate_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1, - HOST_WIDE_INT count, unsigned int prec, - unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv) -{ - unsigned HOST_WIDE_INT s1l, s2l; - HOST_WIDE_INT s1h, s2h; - - count %= prec; - if (count < 0) - count += prec; - - rshift_double (l1, h1, count, prec, &s1l, &s1h, 0); - lshift_double (l1, h1, prec - count, prec, &s2l, &s2h, 0); - *lv = s1l | s2l; - *hv = s1h | s2h; -} - -/* Divide doubleword integer LNUM, HNUM by doubleword integer LDEN, HDEN - for a quotient (stored in *LQUO, *HQUO) and remainder (in *LREM, *HREM). - CODE is a tree code for a kind of division, one of - TRUNC_DIV_EXPR, FLOOR_DIV_EXPR, CEIL_DIV_EXPR, ROUND_DIV_EXPR - or EXACT_DIV_EXPR - It controls how the quotient is rounded to an integer. - Return nonzero if the operation overflows. - UNS nonzero says do unsigned division. */ - -int -div_and_round_double (enum tree_code code, int uns, - unsigned HOST_WIDE_INT lnum_orig, /* num == numerator == dividend */ - HOST_WIDE_INT hnum_orig, - unsigned HOST_WIDE_INT lden_orig, /* den == denominator == divisor */ - HOST_WIDE_INT hden_orig, - unsigned HOST_WIDE_INT *lquo, - HOST_WIDE_INT *hquo, unsigned HOST_WIDE_INT *lrem, - HOST_WIDE_INT *hrem) -{ - int quo_neg = 0; - HOST_WIDE_INT num[4 + 1]; /* extra element for scaling. */ - HOST_WIDE_INT den[4], quo[4]; - int i, j; - unsigned HOST_WIDE_INT work; - unsigned HOST_WIDE_INT carry = 0; - unsigned HOST_WIDE_INT lnum = lnum_orig; - HOST_WIDE_INT hnum = hnum_orig; - unsigned HOST_WIDE_INT lden = lden_orig; - HOST_WIDE_INT hden = hden_orig; - int overflow = 0; - - if (hden == 0 && lden == 0) - overflow = 1, lden = 1; - - /* Calculate quotient sign and convert operands to unsigned. */ - if (!uns) - { - if (hnum < 0) - { - quo_neg = ~ quo_neg; - /* (minimum integer) / (-1) is the only overflow case. */ - if (neg_double (lnum, hnum, &lnum, &hnum) - && ((HOST_WIDE_INT) lden & hden) == -1) - overflow = 1; - } - if (hden < 0) - { - quo_neg = ~ quo_neg; - neg_double (lden, hden, &lden, &hden); - } - } - - if (hnum == 0 && hden == 0) - { /* single precision */ - *hquo = *hrem = 0; - /* This unsigned division rounds toward zero. */ - *lquo = lnum / lden; - goto finish_up; - } - - if (hnum == 0) - { /* trivial case: dividend < divisor */ - /* hden != 0 already checked. */ - *hquo = *lquo = 0; - *hrem = hnum; - *lrem = lnum; - goto finish_up; - } - - memset (quo, 0, sizeof quo); - - memset (num, 0, sizeof num); /* to zero 9th element */ - memset (den, 0, sizeof den); - - encode (num, lnum, hnum); - encode (den, lden, hden); - - /* Special code for when the divisor < BASE. */ - if (hden == 0 && lden < (unsigned HOST_WIDE_INT) BASE) - { - /* hnum != 0 already checked. */ - for (i = 4 - 1; i >= 0; i--) - { - work = num[i] + carry * BASE; - quo[i] = work / lden; - carry = work % lden; - } - } - else - { - /* Full double precision division, - with thanks to Don Knuth's "Seminumerical Algorithms". */ - int num_hi_sig, den_hi_sig; - unsigned HOST_WIDE_INT quo_est, scale; - - /* Find the highest nonzero divisor digit. */ - for (i = 4 - 1;; i--) - if (den[i] != 0) - { - den_hi_sig = i; - break; - } - - /* Insure that the first digit of the divisor is at least BASE/2. - This is required by the quotient digit estimation algorithm. */ - - scale = BASE / (den[den_hi_sig] + 1); - if (scale > 1) - { /* scale divisor and dividend */ - carry = 0; - for (i = 0; i <= 4 - 1; i++) - { - work = (num[i] * scale) + carry; - num[i] = LOWPART (work); - carry = HIGHPART (work); - } - - num[4] = carry; - carry = 0; - for (i = 0; i <= 4 - 1; i++) - { - work = (den[i] * scale) + carry; - den[i] = LOWPART (work); - carry = HIGHPART (work); - if (den[i] != 0) den_hi_sig = i; - } - } - - num_hi_sig = 4; - - /* Main loop */ - for (i = num_hi_sig - den_hi_sig - 1; i >= 0; i--) - { - /* Guess the next quotient digit, quo_est, by dividing the first - two remaining dividend digits by the high order quotient digit. - quo_est is never low and is at most 2 high. */ - unsigned HOST_WIDE_INT tmp; - - num_hi_sig = i + den_hi_sig + 1; - work = num[num_hi_sig] * BASE + num[num_hi_sig - 1]; - if (num[num_hi_sig] != den[den_hi_sig]) - quo_est = work / den[den_hi_sig]; - else - quo_est = BASE - 1; - - /* Refine quo_est so it's usually correct, and at most one high. */ - tmp = work - quo_est * den[den_hi_sig]; - if (tmp < BASE - && (den[den_hi_sig - 1] * quo_est - > (tmp * BASE + num[num_hi_sig - 2]))) - quo_est--; - - /* Try QUO_EST as the quotient digit, by multiplying the - divisor by QUO_EST and subtracting from the remaining dividend. - Keep in mind that QUO_EST is the I - 1st digit. */ - - carry = 0; - for (j = 0; j <= den_hi_sig; j++) - { - work = quo_est * den[j] + carry; - carry = HIGHPART (work); - work = num[i + j] - LOWPART (work); - num[i + j] = LOWPART (work); - carry += HIGHPART (work) != 0; - } - - /* If quo_est was high by one, then num[i] went negative and - we need to correct things. */ - if (num[num_hi_sig] < (HOST_WIDE_INT) carry) - { - quo_est--; - carry = 0; /* add divisor back in */ - for (j = 0; j <= den_hi_sig; j++) - { - work = num[i + j] + den[j] + carry; - carry = HIGHPART (work); - num[i + j] = LOWPART (work); - } - - num [num_hi_sig] += carry; - } - - /* Store the quotient digit. */ - quo[i] = quo_est; - } - } - - decode (quo, lquo, hquo); - - finish_up: - /* If result is negative, make it so. */ - if (quo_neg) - neg_double (*lquo, *hquo, lquo, hquo); - - /* Compute trial remainder: rem = num - (quo * den) */ - mul_double (*lquo, *hquo, lden_orig, hden_orig, lrem, hrem); - neg_double (*lrem, *hrem, lrem, hrem); - add_double (lnum_orig, hnum_orig, *lrem, *hrem, lrem, hrem); - - switch (code) - { - case TRUNC_DIV_EXPR: - case TRUNC_MOD_EXPR: /* round toward zero */ - case EXACT_DIV_EXPR: /* for this one, it shouldn't matter */ - return overflow; - - case FLOOR_DIV_EXPR: - case FLOOR_MOD_EXPR: /* round toward negative infinity */ - if (quo_neg && (*lrem != 0 || *hrem != 0)) /* ratio < 0 && rem != 0 */ - { - /* quo = quo - 1; */ - add_double (*lquo, *hquo, (HOST_WIDE_INT) -1, (HOST_WIDE_INT) -1, - lquo, hquo); - } - else - return overflow; - break; - - case CEIL_DIV_EXPR: - case CEIL_MOD_EXPR: /* round toward positive infinity */ - if (!quo_neg && (*lrem != 0 || *hrem != 0)) /* ratio > 0 && rem != 0 */ - { - add_double (*lquo, *hquo, (HOST_WIDE_INT) 1, (HOST_WIDE_INT) 0, - lquo, hquo); - } - else - return overflow; - break; - - case ROUND_DIV_EXPR: - case ROUND_MOD_EXPR: /* round to closest integer */ - { - unsigned HOST_WIDE_INT labs_rem = *lrem; - HOST_WIDE_INT habs_rem = *hrem; - unsigned HOST_WIDE_INT labs_den = lden, ltwice; - HOST_WIDE_INT habs_den = hden, htwice; - - /* Get absolute values. */ - if (*hrem < 0) - neg_double (*lrem, *hrem, &labs_rem, &habs_rem); - if (hden < 0) - neg_double (lden, hden, &labs_den, &habs_den); - - /* If (2 * abs (lrem) >= abs (lden)), adjust the quotient. */ - mul_double ((HOST_WIDE_INT) 2, (HOST_WIDE_INT) 0, - labs_rem, habs_rem, <wice, &htwice); - - if (((unsigned HOST_WIDE_INT) habs_den - < (unsigned HOST_WIDE_INT) htwice) - || (((unsigned HOST_WIDE_INT) habs_den - == (unsigned HOST_WIDE_INT) htwice) - && (labs_den <= ltwice))) - { - if (*hquo < 0) - /* quo = quo - 1; */ - add_double (*lquo, *hquo, - (HOST_WIDE_INT) -1, (HOST_WIDE_INT) -1, lquo, hquo); - else - /* quo = quo + 1; */ - add_double (*lquo, *hquo, (HOST_WIDE_INT) 1, (HOST_WIDE_INT) 0, - lquo, hquo); - } - else - return overflow; - } - break; - - default: - gcc_unreachable (); - } - - /* Compute true remainder: rem = num - (quo * den) */ - mul_double (*lquo, *hquo, lden_orig, hden_orig, lrem, hrem); - neg_double (*lrem, *hrem, lrem, hrem); - add_double (lnum_orig, hnum_orig, *lrem, *hrem, lrem, hrem); - return overflow; -} - /* If ARG2 divides ARG1 with zero remainder, carries out the division of type CODE and returns the quotient. Otherwise returns NULL_TREE. */ -- cgit v1.2.1 From ad086ed471d513856fff3cf8325b8d5e0ee571a4 Mon Sep 17 00:00:00 2001 From: ebotcazou Date: Sun, 18 Apr 2010 21:49:29 +0000 Subject: =?UTF-8?q?2010-04-18=20=C2=A0Eric=20Botcazou=20=C2=A0?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * fold-const.c (fold_comparison): Use ssizetype. * gimple-fold.c (maybe_fold_offset_to_array_ref): Likewise. * ipa-prop.c (ipa_modify_call_arguments): Use sizetype. * tree-loop-distribution.c (build_size_arg_loc): Likewise. * tree-object-size.c (compute_object_sizes): Use size_type_node. * tree.h (initialize_sizetypes): Remove parameter. (build_common_tree_nodes): Remove second parameter. * stor-layout.c (initialize_sizetypes): Remove parameter. Always create an unsigned type. (set_sizetype): Assert that the passed type is unsigned and simplify. * tree.c (build_common_tree_nodes): Remove second parameter. Adjust call to initialize_sizetypes. * c-decl.c (c_init_decl_processing): Remove second argument in call to build_common_tree_nodes. cp/ * decl.c (cxx_init_decl_processing): Remove second argument in call to build_common_tree_nodes. java/ * decl.c (java_init_decl_processing): Remove argument in call to initialize_sizetypes fortran/ * f95-lang.c (gfc_init_decl_processing): Remove second argument in call to build_common_tree_nodes. ada/ * gcc-interface/misc.c (gnat_init): Remove second argument in call to build_common_tree_nodes. lto/ * lto-lang.c (lto_init): Remove second argument in call to build_common_tree_nodes. git-svn-id: svn+ssh://gcc.gnu.org/svn/gcc/trunk@158496 138bc75d-0d04-0410-961f-82ee72b054a4 --- gcc/fold-const.c | 13 ++++--------- 1 file changed, 4 insertions(+), 9 deletions(-) (limited to 'gcc/fold-const.c') diff --git a/gcc/fold-const.c b/gcc/fold-const.c index c1af8248a39..85f9cd1fd91 100644 --- a/gcc/fold-const.c +++ b/gcc/fold-const.c @@ -8695,24 +8695,19 @@ fold_comparison (location_t loc, enum tree_code code, tree type, && ((code == EQ_EXPR || code == NE_EXPR) || POINTER_TYPE_OVERFLOW_UNDEFINED)) { - tree signed_size_type_node; - signed_size_type_node = signed_type_for (size_type_node); - /* By converting to signed size type we cover middle-end pointer arithmetic which operates on unsigned pointer types of size type size and ARRAY_REF offsets which are properly sign or zero extended from their type in case it is narrower than size type. */ if (offset0 == NULL_TREE) - offset0 = build_int_cst (signed_size_type_node, 0); + offset0 = build_int_cst (ssizetype, 0); else - offset0 = fold_convert_loc (loc, signed_size_type_node, - offset0); + offset0 = fold_convert_loc (loc, ssizetype, offset0); if (offset1 == NULL_TREE) - offset1 = build_int_cst (signed_size_type_node, 0); + offset1 = build_int_cst (ssizetype, 0); else - offset1 = fold_convert_loc (loc, signed_size_type_node, - offset1); + offset1 = fold_convert_loc (loc, ssizetype, offset1); if (code != EQ_EXPR && code != NE_EXPR -- cgit v1.2.1 From 2b6cd5e4e8b388b09895c5ee0d44e4562ce7a311 Mon Sep 17 00:00:00 2001 From: davidxl Date: Tue, 20 Apr 2010 17:00:37 +0000 Subject: new folding rule git-svn-id: svn+ssh://gcc.gnu.org/svn/gcc/trunk@158567 138bc75d-0d04-0410-961f-82ee72b054a4 --- gcc/fold-const.c | 28 ++++++++++++++++++++++++++-- 1 file changed, 26 insertions(+), 2 deletions(-) (limited to 'gcc/fold-const.c') diff --git a/gcc/fold-const.c b/gcc/fold-const.c index 85f9cd1fd91..cdae661733c 100644 --- a/gcc/fold-const.c +++ b/gcc/fold-const.c @@ -8643,9 +8643,33 @@ fold_comparison (location_t loc, enum tree_code code, tree type, offset1 = TREE_OPERAND (arg1, 1); } + /* A local variable can never be pointed to by + the default SSA name of an incoming parameter. */ + if ((TREE_CODE (arg0) == ADDR_EXPR + && indirect_base0 + && TREE_CODE (base0) == VAR_DECL + && auto_var_in_fn_p (base0, current_function_decl) + && !indirect_base1 + && TREE_CODE (base1) == SSA_NAME + && TREE_CODE (SSA_NAME_VAR (base1)) == PARM_DECL + && SSA_NAME_IS_DEFAULT_DEF (base1)) + || (TREE_CODE (arg1) == ADDR_EXPR + && indirect_base1 + && TREE_CODE (base1) == VAR_DECL + && auto_var_in_fn_p (base1, current_function_decl) + && !indirect_base0 + && TREE_CODE (base0) == SSA_NAME + && TREE_CODE (SSA_NAME_VAR (base0)) == PARM_DECL + && SSA_NAME_IS_DEFAULT_DEF (base0))) + { + if (code == NE_EXPR) + return constant_boolean_node (1, type); + else if (code == EQ_EXPR) + return constant_boolean_node (0, type); + } /* If we have equivalent bases we might be able to simplify. */ - if (indirect_base0 == indirect_base1 - && operand_equal_p (base0, base1, 0)) + else if (indirect_base0 == indirect_base1 + && operand_equal_p (base0, base1, 0)) { /* We can fold this expression to a constant if the non-constant offset parts are equal. */ -- cgit v1.2.1 From e230978bae4ce492c8aed7d5c2535b634ccea064 Mon Sep 17 00:00:00 2001 From: bernds Date: Sat, 24 Apr 2010 18:53:47 +0000 Subject: gcc/ PR tree-optimization/41442 * fold-const.c (merge_truthop_with_opposite_arm): New function. (fold_binary_loc): Call it. gcc/testsuite/ PR tree-optimization/41442 * gcc.target/i386/pr41442.c: New test. git-svn-id: svn+ssh://gcc.gnu.org/svn/gcc/trunk@158689 138bc75d-0d04-0410-961f-82ee72b054a4 --- gcc/fold-const.c | 86 ++++++++++++++++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 86 insertions(+) (limited to 'gcc/fold-const.c') diff --git a/gcc/fold-const.c b/gcc/fold-const.c index cdae661733c..ffd9d30ef2e 100644 --- a/gcc/fold-const.c +++ b/gcc/fold-const.c @@ -4997,6 +4997,76 @@ unextend (tree c, int p, int unsignedp, tree mask) const_binop (BIT_XOR_EXPR, c, temp, 0)); } +/* For an expression that has the form + (A && B) || ~B + or + (A || B) && ~B, + we can drop one of the inner expressions and simplify to + A || ~B + or + A && ~B + LOC is the location of the resulting expression. OP is the inner + logical operation; the left-hand side in the examples above, while CMPOP + is the right-hand side. RHS_ONLY is used to prevent us from accidentally + removing a condition that guards another, as in + (A != NULL && A->...) || A == NULL + which we must not transform. If RHS_ONLY is true, only eliminate the + right-most operand of the inner logical operation. */ + +static tree +merge_truthop_with_opposite_arm (location_t loc, tree op, tree cmpop, + bool rhs_only) +{ + tree type = TREE_TYPE (cmpop); + enum tree_code code = TREE_CODE (cmpop); + enum tree_code truthop_code = TREE_CODE (op); + tree lhs = TREE_OPERAND (op, 0); + tree rhs = TREE_OPERAND (op, 1); + tree orig_lhs = lhs, orig_rhs = rhs; + enum tree_code rhs_code = TREE_CODE (rhs); + enum tree_code lhs_code = TREE_CODE (lhs); + enum tree_code inv_code; + + if (TREE_SIDE_EFFECTS (op) || TREE_SIDE_EFFECTS (cmpop)) + return NULL_TREE; + + if (TREE_CODE_CLASS (code) != tcc_comparison) + return NULL_TREE; + + if (rhs_code == truthop_code) + { + tree newrhs = merge_truthop_with_opposite_arm (loc, rhs, cmpop, rhs_only); + if (newrhs != NULL_TREE) + { + rhs = newrhs; + rhs_code = TREE_CODE (rhs); + } + } + if (lhs_code == truthop_code && !rhs_only) + { + tree newlhs = merge_truthop_with_opposite_arm (loc, lhs, cmpop, false); + if (newlhs != NULL_TREE) + { + lhs = newlhs; + lhs_code = TREE_CODE (lhs); + } + } + + inv_code = invert_tree_comparison (code, HONOR_NANS (TYPE_MODE (type))); + if (inv_code == rhs_code + && operand_equal_p (TREE_OPERAND (rhs, 0), TREE_OPERAND (cmpop, 0), 0) + && operand_equal_p (TREE_OPERAND (rhs, 1), TREE_OPERAND (cmpop, 1), 0)) + return lhs; + if (!rhs_only && inv_code == lhs_code + && operand_equal_p (TREE_OPERAND (lhs, 0), TREE_OPERAND (cmpop, 0), 0) + && operand_equal_p (TREE_OPERAND (lhs, 1), TREE_OPERAND (cmpop, 1), 0)) + return rhs; + if (rhs != orig_rhs || lhs != orig_lhs) + return fold_build2_loc (loc, truthop_code, TREE_TYPE (cmpop), + lhs, rhs); + return NULL_TREE; +} + /* Find ways of folding logical expressions of LHS and RHS: Try to merge two comparisons to the same innermost item. Look for range tests like "ch >= '0' && ch <= '9'". @@ -11833,6 +11903,22 @@ fold_binary_loc (location_t loc, if (0 != (tem = fold_range_test (loc, code, type, op0, op1))) return tem; + if ((code == TRUTH_ANDIF_EXPR && TREE_CODE (arg0) == TRUTH_ORIF_EXPR) + || (code == TRUTH_ORIF_EXPR && TREE_CODE (arg0) == TRUTH_ANDIF_EXPR)) + { + tem = merge_truthop_with_opposite_arm (loc, arg0, arg1, true); + if (tem) + return fold_build2_loc (loc, code, type, tem, arg1); + } + + if ((code == TRUTH_ANDIF_EXPR && TREE_CODE (arg1) == TRUTH_ORIF_EXPR) + || (code == TRUTH_ORIF_EXPR && TREE_CODE (arg1) == TRUTH_ANDIF_EXPR)) + { + tem = merge_truthop_with_opposite_arm (loc, arg1, arg0, false); + if (tem) + return fold_build2_loc (loc, code, type, arg0, tem); + } + /* Check for the possibility of merging component references. If our lhs is another similar operation, try to merge its rhs with our rhs. Then try to merge our lhs and rhs. */ -- cgit v1.2.1 From 3760428fe7d598ecb20f6439eb0da84fbcea3830 Mon Sep 17 00:00:00 2001 From: rguenth Date: Sun, 9 May 2010 18:17:33 +0000 Subject: 2010-05-09 Richard Guenther PR middle-end/44024 * fold-const.c (tree_single_nonzero_warnv_p): Properly handle &FUNCTION_DECL. * gcc.dg/pr44024.c: New testcase. git-svn-id: svn+ssh://gcc.gnu.org/svn/gcc/trunk@159205 138bc75d-0d04-0410-961f-82ee72b054a4 --- gcc/fold-const.c | 8 ++++++-- 1 file changed, 6 insertions(+), 2 deletions(-) (limited to 'gcc/fold-const.c') diff --git a/gcc/fold-const.c b/gcc/fold-const.c index ffd9d30ef2e..17a753692d5 100644 --- a/gcc/fold-const.c +++ b/gcc/fold-const.c @@ -14917,7 +14917,9 @@ tree_single_nonzero_warnv_p (tree t, bool *strict_overflow_p) case ADDR_EXPR: { - tree base = get_base_address (TREE_OPERAND (t, 0)); + tree base = TREE_OPERAND (t, 0); + if (!DECL_P (base)) + base = get_base_address (base); if (!base) return false; @@ -14927,7 +14929,9 @@ tree_single_nonzero_warnv_p (tree t, bool *strict_overflow_p) allocated on the stack. */ if (DECL_P (base) && (flag_delete_null_pointer_checks - || (TREE_CODE (base) == VAR_DECL && !TREE_STATIC (base)))) + || (DECL_CONTEXT (base) + && TREE_CODE (DECL_CONTEXT (base)) == FUNCTION_DECL + && auto_var_in_fn_p (base, DECL_CONTEXT (base))))) return !VAR_OR_FUNCTION_DECL_P (base) || !DECL_WEAK (base); /* Constants are never weak. */ -- cgit v1.2.1 From c5083e8b59f188c155a36a3335d803220428f7a9 Mon Sep 17 00:00:00 2001 From: aesok Date: Wed, 19 May 2010 20:09:57 +0000 Subject: * double-int.h (double_int_ior): New function. * tree.h (build_int_cst_wide_type): Remove. * tree.c (build_int_cst_wide_type): Remove. * fold-const.c (native_interpret_int): Use double_int_to_tree instead of build_int_cst_wide_type. * stor-layout.c (set_sizetype): (Ditto.). * dojump.c (do_jump): Use build_int_cstu instead of build_int_cst_wide_type. /java * jcf-parse.c (get_constant): Use double_int_to_tree instead of build_int_cst_wide_type. git-svn-id: svn+ssh://gcc.gnu.org/svn/gcc/trunk@159595 138bc75d-0d04-0410-961f-82ee72b054a4 --- gcc/fold-const.c | 13 +++++++------ 1 file changed, 7 insertions(+), 6 deletions(-) (limited to 'gcc/fold-const.c') diff --git a/gcc/fold-const.c b/gcc/fold-const.c index 17a753692d5..e92a674277a 100644 --- a/gcc/fold-const.c +++ b/gcc/fold-const.c @@ -7408,13 +7408,14 @@ native_interpret_int (tree type, const unsigned char *ptr, int len) int total_bytes = GET_MODE_SIZE (TYPE_MODE (type)); int byte, offset, word, words; unsigned char value; - unsigned int HOST_WIDE_INT lo = 0; - HOST_WIDE_INT hi = 0; + double_int result; if (total_bytes > len) return NULL_TREE; if (total_bytes * BITS_PER_UNIT > 2 * HOST_BITS_PER_WIDE_INT) return NULL_TREE; + + result = double_int_zero; words = total_bytes / UNITS_PER_WORD; for (byte = 0; byte < total_bytes; byte++) @@ -7436,13 +7437,13 @@ native_interpret_int (tree type, const unsigned char *ptr, int len) value = ptr[offset]; if (bitpos < HOST_BITS_PER_WIDE_INT) - lo |= (unsigned HOST_WIDE_INT) value << bitpos; + result.low |= (unsigned HOST_WIDE_INT) value << bitpos; else - hi |= (unsigned HOST_WIDE_INT) value - << (bitpos - HOST_BITS_PER_WIDE_INT); + result.high |= (unsigned HOST_WIDE_INT) value + << (bitpos - HOST_BITS_PER_WIDE_INT); } - return build_int_cst_wide_type (type, lo, hi); + return double_int_to_tree (type, result); } -- cgit v1.2.1 From a7a4626828090600459358ca745c4482cf9551a1 Mon Sep 17 00:00:00 2001 From: steven Date: Fri, 21 May 2010 13:53:22 +0000 Subject: gcc/ChangeLog: * tree.h: Include real.h and fixed-value.h as basic datatypes. * dfp.c, convert.c, reload1.c, reginfo.c, tree-flow.h, tree-ssa-threadedge.c, tree-ssanames.c, tree-loop-linear.c, tree-into-ssa.c, tree-vect-generic.c, tree-ssa-structalias.c, tree-ssa-loop-im.c, tree-dump.c, tree-complex.c, tree-ssa-uninit.c, genrecog.c, tree-ssa-threadupdate.c, tree-ssa-loop-niter.c, tree-pretty-print.c, tree-loop-distribution.c, tree-ssa-loop-unswitch.c, c-lex.c, optabs.c, postreload-gcse.c, tree-ssa-loop-manip.c, postreload.c, tree-ssa-loop-ch.c, tree-tailcall.c, tree.c, reload.c, tree-scalar-evolution.c, rtlanal.c, tree-phinodes.c, builtins.c, final.c, genoutput.c, fold-const.c, tree-ssa-dse.c, genautomata.c, tree-ssa-uncprop.c, toplev.c, tree-chrec.c, genemit.c, c-cppbuiltin.c, tree-ssa-sccvn.c, tree-ssa-ccp.c, tree-ssa-loop-ivopts.c, mode-switching.c, tree-call-cdce.c, cse.c, genpeep.c, tree-ssa-math-opts.c, tree-ssa-dom.c, tree-nrv.c, tree-ssa-propagate.c, tree-ssa-alias.c, tree-ssa-sink.c, jump.c, ifcvt.c, dwarf2out.c, expr.c, genattrtab.c, genconditions.c, tree-ssa-loop-ivcanon.c, tree-ssa-loop.c, tree-parloops.c, recog.c, tree-ssa-address.c, lcm.c, tree-eh.c, gimple-pretty-print.c, c-pretty-print.c, print-rtl.c, gcse.c, tree-if-conv.c, tree-data-ref.c, tree-affine.c, gimplify.c, tree-ssa-phiopt.c, implicit-zee.c, expmed.c, tree-dfa.c, emit-rtl.c, store-motion.c, cselib.c, tree-cfgcleanup.c, simplify-rtx.c, tree-ssa-pre.c, genpreds.c, tree-mudflap.c, print-tree.c, tree-ssa-copy.c, tree-ssa-forwprop.c, tree-ssa-dce.c, varasm.c, tree-nested.c, tree-ssa.c, tree-ssa-loop-prefetch.c, rtl.c, tree-inline.c, integrate.c, tree-optimize.c, tree-ssa-phiprop.c, fixed-value.c, combine.c, tree-profile.c, c-common.c, sched-vis.c, tree-cfg.c, passes.c, tree-ssa-reassoc.c, config/alpha/alpha.c, config/frv/frv.c, config/s390/s390.c, config/m32c/m32c.c, config/spu/spu.c, config/sparc/sparc.c, config/mep/mep.c, config/m32r/m32r.c, config/rx/rx.c, config/i386/i386.c, config/sh/sh.c, config/pdp11/pdp11.c, config/avr/avr.c, config/crx/crx.c, config/xtensa/xtensa.c, config/stormy16/stormy16.c, config/fr30/fr30.c, config/lm32/lm32.c, config/moxie/moxie.c, config/m68hc11/m68hc11.c, config/cris/cris.c, config/iq2000/iq2000.c, config/mn10300/mn10300.c, config/ia64/ia64.c, config/m68k/m68k.c, config/rs6000/rs6000.c, config/picochip/picochip.c, config/darwin.c, config/arc/arc.c, config/mcore/mcore.c, config/score/score3.c, config/score/score7.c, config/score/score.c, config/arm/arm.c, config/pa/pa.c, config/mips/mips.c, config/vax/vax.c, config/h8300/h8300.c, config/v850/v850.c, config/mmix/mmix.c, config/bfin/bfin.c: Clean up redundant includes. * Makefile.in: Update accordingly. java/ChangeLog: * typeck.c, decl.c, jcf-parse.c, except.c, expr.c: cp/Changelog: * error.c, tree.c, typeck2.c, cxx-pretty-print.c, mangle.c: Clean up redundant includes. fortran/ChangeLog: * trans-const.c, trans-types.c, trans-intrinsic.c: Clean up redundant includes. git-svn-id: svn+ssh://gcc.gnu.org/svn/gcc/trunk@159663 138bc75d-0d04-0410-961f-82ee72b054a4 --- gcc/fold-const.c | 2 -- 1 file changed, 2 deletions(-) (limited to 'gcc/fold-const.c') diff --git a/gcc/fold-const.c b/gcc/fold-const.c index e92a674277a..65ded14e95f 100644 --- a/gcc/fold-const.c +++ b/gcc/fold-const.c @@ -53,8 +53,6 @@ along with GCC; see the file COPYING3. If not see #include "tm.h" #include "flags.h" #include "tree.h" -#include "real.h" -#include "fixed-value.h" #include "rtl.h" #include "expr.h" #include "tm_p.h" -- cgit v1.2.1 From dae0b5cb50db9ca6cef431ff04cc3dcb322eb943 Mon Sep 17 00:00:00 2001 From: steven Date: Fri, 21 May 2010 18:13:54 +0000 Subject: gcc/ChangeLog: * real: Do not include gmp.h, mpfr.h, and mpc.h. (REAL_VALUE_NEGATE, REAL_VALUE_ABS, real_arithmetic2): Remove. (real_value_negate, real_value_abs): New prototypes. (do_mpc_arg2, real_from_mpfr, mpfr_from_real): Move from here... * realmpfr.h (do_mpc_arg2, real_from_mpfr, mpfr_from_real): ...to here, new include file for interface between MPFR and REAL_VALUE_TYPE. * real.c: Include realmpfr.h. (real_arithmetic2): Remove legacy function. (real_value_negate): New. (real_value_abs): New. (mfpr_from_real, real_from_mpfr): Move from here... * realmpfr.c (mpfr_from_real, real_from_mpfr): ...to here, new file. * builtins.c: Include realmpfr.h. * fold-const.c: Include realmpfr.h. (fold_comparison): Use real_value_negate instead of REAL_VALUE_NEGATE. (fold_negate_const): Likewise. (fold_abs_const): Use real_value_abs instead of REAL_VALUE_ABS. * toplev.c: Include realmpfr.h. * simplify-rtx.c (simplify_const_unary_operation): Use real_value_abs and real_value_negate. * fixed-value.c (check_real_for_fixed_mode): Likewise. * config/arm/arm.c (neg_const_double_rtx_ok_for_fpa): Likewise. (vfp3_const_double_index): Likewise. (arm_print_operand): Likewise. * Makefile.in: Update dependencies. fortran/ChangeLog: * trans-const.c: Include realmpfr.h. * Make-lang.in: Update dependencies. git-svn-id: svn+ssh://gcc.gnu.org/svn/gcc/trunk@159679 138bc75d-0d04-0410-961f-82ee72b054a4 --- gcc/fold-const.c | 7 ++++--- 1 file changed, 4 insertions(+), 3 deletions(-) (limited to 'gcc/fold-const.c') diff --git a/gcc/fold-const.c b/gcc/fold-const.c index 65ded14e95f..17672ad6c43 100644 --- a/gcc/fold-const.c +++ b/gcc/fold-const.c @@ -53,6 +53,7 @@ along with GCC; see the file COPYING3. If not see #include "tm.h" #include "flags.h" #include "tree.h" +#include "realmpfr.h" #include "rtl.h" #include "expr.h" #include "tm_p.h" @@ -8977,7 +8978,7 @@ fold_comparison (location_t loc, enum tree_code code, tree type, return fold_build2_loc (loc, swap_tree_comparison (code), type, TREE_OPERAND (arg0, 0), build_real (TREE_TYPE (arg1), - REAL_VALUE_NEGATE (cst))); + real_value_negate (&cst))); /* IEEE doesn't distinguish +0 and -0 in comparisons. */ /* a CMP (-0) -> a CMP 0 */ @@ -15166,7 +15167,7 @@ fold_negate_const (tree arg0, tree type) } case REAL_CST: - t = build_real (type, REAL_VALUE_NEGATE (TREE_REAL_CST (arg0))); + t = build_real (type, real_value_negate (&TREE_REAL_CST (arg0))); break; case FIXED_CST: @@ -15225,7 +15226,7 @@ fold_abs_const (tree arg0, tree type) case REAL_CST: if (REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg0))) - t = build_real (type, REAL_VALUE_NEGATE (TREE_REAL_CST (arg0))); + t = build_real (type, real_value_negate (&TREE_REAL_CST (arg0))); else t = arg0; break; -- cgit v1.2.1 From 4b987facd8ba658d00c277a7e9c46548b492854f Mon Sep 17 00:00:00 2001 From: hubicka Date: Sat, 29 May 2010 20:31:45 +0000 Subject: * tree-vrp.c (debug_value_range, debug_all_value_ranges, debug_asserts_for, debug_all_asserts): Annotate with DEBUG_FUNCTION. * tree-into-ssa.c (debug_decl_set, debug_defs_stack, debug_currdefs, debug_tree_ssa, debug_tree_ssa_stats, debug_def_blocks, debug_names_replaced_by, debug_update_ssa): Likewise. * sbitmap.c (debug_sbitmap): Likewise. * genrecog.c (debug_decision, debug_decision_list): Likewise. * tree-pretty-print.c (debug_generic_expr, debug_generic_stmt, debug_tree_chain): Likewise. * tree-loop-distribution.c (debug_rdg_partitions): Likewise. * cgraph.c (debug_cgraph_node, debug_cgraph): Likewise. * optabs.c (debug_optab_libfuncs): Likewise. (verify_loop_closed_ssa): Likewise. * value-prof.c (verify_histograms): Likewise. * reload.c (debug_reload_to_stream, debug_reload): Likewise. * bitmap.c (debug_bitmap_file, debug_bitmap, bitmap_print): Likewise. * cfghooks.c (verify_flow_info): Likewise. * fold-const.c (debug_fold_checksum): Likewise. * omp-low.c (debug_omp_region, debug_all_omp_regions): Likewise. * cfg.c (debug_regset, debug_flow_info, debug_bb, debug_bb_n): Likewise. * omega.c (debug_omega_problem): Likewise. * cgraphunit.c (verify_cgraph_node, verify_cgraph): Likewise. * tree-ssa-ccp.c (debug_lattice_value): Likewise. * dominance.c (verify_dominators, debug_dominance_info, debug_dominance_tree): Likewise. * df-core.c (df_insn_uid_debug, df_insn_debug, df_insn_debug_regno, * df_regno_debug, df_ref_debug, debug_df_insn, debug_df_reg, debug_df_regno, debug_df_ref, debug_df_defno, debug_df_useno, debug_df_chain): Likewise. * tree-ssa-dom.c (debug_dominator_optimization_stats): Likewise. * sel-sched.c (debug_state): Likewise. * tree-ssa-alias.c (debug_alias_info, debug_points_to_info_for): Likewise. * cfganal.c (print_edge_list, verify_edge_list): Likewise. * dwarf2out.c (debug_dwarf_die, debug_dwarf): Likewise. * tree-eh.c (verify_eh_edges, verify_eh_dispatch_edge): Likewise. * gimple-pretty-print.c (debug_gimple_stmt, debug_gimple_seq): Likewise. * c-pretty-print.c (debug_c_tree): Likewise. * sel-sched-dump.c (debug_insn_rtx, debug_vinsn, debug_expr, debug_insn debug_av_set, debug_lv_set, debug_ilist, debug_blist, debug_insn_vector, debug_hard_reg_set, debug_mem_addr_value): Likewise. * ebitmap.c (debug_ebitmap): Likewise. * function.c (debug_find_var_in_block_tree): Likewise. * print-rtl.c (debug_rtx): Likewise. (debug_rtx_count): Likewise. (debug_rtx_list, debug_rtx_range, debug_rtx_find): Likewise. * stor-layout.c (debug_rli): Likewise. * ipa.c (debug_cgraph_node_set, debug_varpool_node_set): Likewise. * tree-data-ref.c (debug_data_references, debug_data_dependence_relations, debug_data_reference, debug_data_dependence_relation, debug_rdg_vertex, debug_rdg_component, debug_rdg): Likewise. * tree-affine.c (debug_aff): Likewise. * tree-dfa.c (debug_referenced_vars, debug_variable, debug_dfa_stats): Likewise. * except.c (debug_eh_tree, verify_eh_tree): Likewise. * emit-rtl.c (verify_rtl_sharing): Likewise. * tree-ssa-pre.c (debug_pre_expr, debug_bitmap_set, debug_value_expressions): Likewise. * tree-ssa-live.c (debug_scope_block, debug_scope_blocks): Likewise. * sese.c (debug_rename_map, debug_ivtype_map): Likewise. * print-tree.c (debug_tree, debug_vec_tree): Likewise. * cfglayout.c (verify_insn_chain): Likewise. * graphite-clast-to-gimple.c (debug_clast_name_indexes, debug_clast_stmt, debug_generated_program): Likewise. * ggc-page.c (debug_print_page_list): Likewise. * tree-ssa-ter.c (debug_ter): Likewise. * graphite-dependences.c (debug_pddr): Likewise. * sched-deps.c (debug_ds): Likewise. * tree-ssa.c (verify_ssa): Likewise. * graphite-poly.c (debug_scattering_function, debug_iteration_domain, debug_scattering_functions, debug_iteration_domains, debug_pdr, debug_pdrs, debug_pbb_domain, debug_pbb, debug_scop_context, debug_scop, debug_cloog, debug_scop_params, debug_lst): Likewise. * tree-inline.c (debug_find_tree): Likewise. * graphite-ppl.c (debug_ppl_linear_expr, debug_ppl_polyhedron_matrix, debug_ppl_powerset_matrix): Likewise. * var-tracking.c (debug_dv): Likewise. * system.h (DEBUG_FUNCTION, DEBUG_VARIABLE): Define. * cfgloop.c (verify_loop_structure): Likewise. * plugin.c (dump_active_plugins, debug_active_plugins): Likewise. * c-common.c (verify_sequence_points): Likewise. * sched-rgn.c (debug_regions, debug_region, debug_candidate, debug_candidates, debug_rgn_dependencies): Likewise. * tree-ssa-structalias.c (debug_constraint, debug_constraints, * debug_constraint_graph, debug_solution_for_var, debug_sa_points_to_info): Likewise. * sched-vis.c (debug_insn_slim, debug_bb_slim, debug_bb_n_slim): Likewie. * tree-cfg.c (debug_cfg_stats, verify_stmts, debug_function, debug_loops, debug_loop, debug_loop_num): Likewise. * passes.c (debug_pass): Likewise. (dump_properties): Likewise; add cfglayout property. (debug_properties): Likewise. * tree-ssa-reassoc.c (debug_ops_vector): Likewise. * varpool.c (debug_varpool): Likewise. * regcprop.c (debug_value_data): Likewise. * tree-ssa-operands.c (verify_imm_links, debug_immediate_uses, debug_immediate_uses_for): Likewise. git-svn-id: svn+ssh://gcc.gnu.org/svn/gcc/trunk@160036 138bc75d-0d04-0410-961f-82ee72b054a4 --- gcc/fold-const.c | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) (limited to 'gcc/fold-const.c') diff --git a/gcc/fold-const.c b/gcc/fold-const.c index 17672ad6c43..b6e8ed68783 100644 --- a/gcc/fold-const.c +++ b/gcc/fold-const.c @@ -13768,7 +13768,7 @@ recursive_label: by "call debug_fold_checksum (op0)", then just trace down till the outputs differ. */ -void +DEBUG_FUNCTION void debug_fold_checksum (const_tree t) { int i; -- cgit v1.2.1 From 836045d7d14b773d7c2dcca5386c1a350766d173 Mon Sep 17 00:00:00 2001 From: pzhao Date: Tue, 8 Jun 2010 04:07:55 +0000 Subject: 2010-06-08 Shujing Zhao * fold-const.c (fold_comparison): Remove redundant parenthesis. * tree-inline.c (expand_call_inline): Pass translated return value of cgraph_inline_failed_string to diagnostic function. git-svn-id: svn+ssh://gcc.gnu.org/svn/gcc/trunk@160419 138bc75d-0d04-0410-961f-82ee72b054a4 --- gcc/fold-const.c | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) (limited to 'gcc/fold-const.c') diff --git a/gcc/fold-const.c b/gcc/fold-const.c index b6e8ed68783..9f2c250e235 100644 --- a/gcc/fold-const.c +++ b/gcc/fold-const.c @@ -8651,9 +8651,9 @@ fold_comparison (location_t loc, enum tree_code code, tree type, && (TREE_CODE (lhs) != INTEGER_CST || !TREE_OVERFLOW (lhs))) { - fold_overflow_warning (("assuming signed overflow does not occur " + fold_overflow_warning ("assuming signed overflow does not occur " "when changing X +- C1 cmp C2 to " - "X cmp C1 +- C2"), + "X cmp C1 +- C2", WARN_STRICT_OVERFLOW_COMPARISON); return fold_build2_loc (loc, code, type, variable, lhs); } -- cgit v1.2.1 From a420d927846c59b8b089ccdd66361ee2d0fe0962 Mon Sep 17 00:00:00 2001 From: dj Date: Wed, 16 Jun 2010 22:52:25 +0000 Subject: * common.opt (-fstrict-volatile-bitfields): new. * doc/invoke.texi: Document it. * fold-const.c (optimize_bit_field_compare): For volatile bitfields, use the field's type to determine the mode, not the field's size. * expr.c (expand_assignment): Likewise. (get_inner_reference): Likewise. (expand_expr_real_1): Likewise. * expmed.c (store_fixed_bit_field): Likewise. (extract_bit_field_1): Likewise. (extract_fixed_bit_field): Likewise. * gcc.target/i386/volatile-bitfields-1.c: New. * gcc.target/i386/volatile-bitfields-2.c: New. git-svn-id: svn+ssh://gcc.gnu.org/svn/gcc/trunk@160865 138bc75d-0d04-0410-961f-82ee72b054a4 --- gcc/fold-const.c | 15 ++++++++++----- 1 file changed, 10 insertions(+), 5 deletions(-) (limited to 'gcc/fold-const.c') diff --git a/gcc/fold-const.c b/gcc/fold-const.c index 9f2c250e235..9abc94e8457 100644 --- a/gcc/fold-const.c +++ b/gcc/fold-const.c @@ -3463,11 +3463,16 @@ optimize_bit_field_compare (location_t loc, enum tree_code code, /* See if we can find a mode to refer to this field. We should be able to, but fail if we can't. */ - nmode = get_best_mode (lbitsize, lbitpos, - const_p ? TYPE_ALIGN (TREE_TYPE (linner)) - : MIN (TYPE_ALIGN (TREE_TYPE (linner)), - TYPE_ALIGN (TREE_TYPE (rinner))), - word_mode, lvolatilep || rvolatilep); + if (lvolatilep + && GET_MODE_BITSIZE (lmode) > 0 + && flag_strict_volatile_bitfields > 0) + nmode = lmode; + else + nmode = get_best_mode (lbitsize, lbitpos, + const_p ? TYPE_ALIGN (TREE_TYPE (linner)) + : MIN (TYPE_ALIGN (TREE_TYPE (linner)), + TYPE_ALIGN (TREE_TYPE (rinner))), + word_mode, lvolatilep || rvolatilep); if (nmode == VOIDmode) return 0; -- cgit v1.2.1 From 90739616821228b8d42607fdd1fcddcb23919574 Mon Sep 17 00:00:00 2001 From: aesok Date: Fri, 18 Jun 2010 15:58:48 +0000 Subject: * double-int.h (double_int_to_shwi, double_int_to_uhwi, double_int_fits_in_uhwi_p): Implement as static inline. (double_int_xor): New inline function. (double_int_lrotate, double_int_rrotate, double_int_max, double_int_umax, double_int_smax, double_int_min, double_int_umin, double_int_smin): Declare. (lrotate_double, rrotate_double): Remove declaration. * double-int.c (double_int_fits_in_uhwi_p, double_int_to_shwi, double_int_to_uhwi, lrotate_double, rrotate_double): Remove function. (double_int_lrotate, double_int_rrotate, double_int_max, double_int_umax, double_int_smax, double_int_min, double_int_umin, double_int_smin): New function. * fold-const.c (int_const_binop): Clean up, use double_int_* functions. * simplify-rtx.c (simplify_const_binary_operation): Clean up, use double_int_* and immed_double_int_const functions. git-svn-id: svn+ssh://gcc.gnu.org/svn/gcc/trunk@161002 138bc75d-0d04-0410-961f-82ee72b054a4 --- gcc/fold-const.c | 119 ++++++++++++++++++++++++++----------------------------- 1 file changed, 57 insertions(+), 62 deletions(-) (limited to 'gcc/fold-const.c') diff --git a/gcc/fold-const.c b/gcc/fold-const.c index 9abc94e8457..a1cd552014c 100644 --- a/gcc/fold-const.c +++ b/gcc/fold-const.c @@ -924,145 +924,140 @@ int_binop_types_match_p (enum tree_code code, const_tree type1, const_tree type2 tree int_const_binop (enum tree_code code, const_tree arg1, const_tree arg2, int notrunc) { - unsigned HOST_WIDE_INT int1l, int2l; - HOST_WIDE_INT int1h, int2h; - unsigned HOST_WIDE_INT low; - HOST_WIDE_INT hi; - unsigned HOST_WIDE_INT garbagel; - HOST_WIDE_INT garbageh; + double_int op1, op2, res, tmp; tree t; tree type = TREE_TYPE (arg1); - int uns = TYPE_UNSIGNED (type); - int is_sizetype + bool uns = TYPE_UNSIGNED (type); + bool is_sizetype = (TREE_CODE (type) == INTEGER_TYPE && TYPE_IS_SIZETYPE (type)); - int overflow = 0; + bool overflow = false; - int1l = TREE_INT_CST_LOW (arg1); - int1h = TREE_INT_CST_HIGH (arg1); - int2l = TREE_INT_CST_LOW (arg2); - int2h = TREE_INT_CST_HIGH (arg2); + op1 = tree_to_double_int (arg1); + op2 = tree_to_double_int (arg2); switch (code) { case BIT_IOR_EXPR: - low = int1l | int2l, hi = int1h | int2h; + res = double_int_ior (op1, op2); break; case BIT_XOR_EXPR: - low = int1l ^ int2l, hi = int1h ^ int2h; + res = double_int_xor (op1, op2); break; case BIT_AND_EXPR: - low = int1l & int2l, hi = int1h & int2h; + res = double_int_and (op1, op2); break; case RSHIFT_EXPR: - int2l = -int2l; + res = double_int_rshift (op1, double_int_to_shwi (op2), + TYPE_PRECISION (type), !uns); + break; + case LSHIFT_EXPR: /* It's unclear from the C standard whether shifts can overflow. The following code ignores overflow; perhaps a C standard interpretation ruling is needed. */ - lshift_double (int1l, int1h, int2l, TYPE_PRECISION (type), - &low, &hi, !uns); + res = double_int_lshift (op1, double_int_to_shwi (op2), + TYPE_PRECISION (type), !uns); break; case RROTATE_EXPR: - int2l = - int2l; + res = double_int_rrotate (op1, double_int_to_shwi (op2), + TYPE_PRECISION (type)); + break; + case LROTATE_EXPR: - lrotate_double (int1l, int1h, int2l, TYPE_PRECISION (type), - &low, &hi); + res = double_int_lrotate (op1, double_int_to_shwi (op2), + TYPE_PRECISION (type)); break; case PLUS_EXPR: - overflow = add_double (int1l, int1h, int2l, int2h, &low, &hi); + overflow = add_double (op1.low, op1.high, op2.low, op2.high, + &res.low, &res.high); break; case MINUS_EXPR: - neg_double (int2l, int2h, &low, &hi); - add_double (int1l, int1h, low, hi, &low, &hi); - overflow = OVERFLOW_SUM_SIGN (hi, int2h, int1h); + neg_double (op2.low, op2.high, &res.low, &res.high); + add_double (op1.low, op1.high, res.low, res.high, + &res.low, &res.high); + overflow = OVERFLOW_SUM_SIGN (res.high, op2.high, op1.high); break; case MULT_EXPR: - overflow = mul_double (int1l, int1h, int2l, int2h, &low, &hi); + overflow = mul_double (op1.low, op1.high, op2.low, op2.high, + &res.low, &res.high); break; case TRUNC_DIV_EXPR: case FLOOR_DIV_EXPR: case CEIL_DIV_EXPR: case EXACT_DIV_EXPR: /* This is a shortcut for a common special case. */ - if (int2h == 0 && (HOST_WIDE_INT) int2l > 0 + if (op2.high == 0 && (HOST_WIDE_INT) op2.low > 0 && !TREE_OVERFLOW (arg1) && !TREE_OVERFLOW (arg2) - && int1h == 0 && (HOST_WIDE_INT) int1l >= 0) + && op1.high == 0 && (HOST_WIDE_INT) op1.low >= 0) { if (code == CEIL_DIV_EXPR) - int1l += int2l - 1; + op1.low += op2.low - 1; - low = int1l / int2l, hi = 0; + res.low = op1.low / op2.low, res.high = 0; break; } /* ... fall through ... */ case ROUND_DIV_EXPR: - if (int2h == 0 && int2l == 0) + if (double_int_zero_p (op2)) return NULL_TREE; - if (int2h == 0 && int2l == 1) + if (double_int_one_p (op2)) { - low = int1l, hi = int1h; + res = op1; break; } - if (int1l == int2l && int1h == int2h - && ! (int1l == 0 && int1h == 0)) + if (double_int_equal_p (op1, op2) + && ! double_int_zero_p (op1)) { - low = 1, hi = 0; + res = double_int_one; break; } - overflow = div_and_round_double (code, uns, int1l, int1h, int2l, int2h, - &low, &hi, &garbagel, &garbageh); + overflow = div_and_round_double (code, uns, + op1.low, op1.high, op2.low, op2.high, + &res.low, &res.high, + &tmp.low, &tmp.high); break; case TRUNC_MOD_EXPR: case FLOOR_MOD_EXPR: case CEIL_MOD_EXPR: /* This is a shortcut for a common special case. */ - if (int2h == 0 && (HOST_WIDE_INT) int2l > 0 + if (op2.high == 0 && (HOST_WIDE_INT) op2.low > 0 && !TREE_OVERFLOW (arg1) && !TREE_OVERFLOW (arg2) - && int1h == 0 && (HOST_WIDE_INT) int1l >= 0) + && op1.high == 0 && (HOST_WIDE_INT) op1.low >= 0) { if (code == CEIL_MOD_EXPR) - int1l += int2l - 1; - low = int1l % int2l, hi = 0; + op1.low += op2.low - 1; + res.low = op1.low % op2.low, res.high = 0; break; } /* ... fall through ... */ case ROUND_MOD_EXPR: - if (int2h == 0 && int2l == 0) + if (double_int_zero_p (op2)) return NULL_TREE; overflow = div_and_round_double (code, uns, - int1l, int1h, int2l, int2h, - &garbagel, &garbageh, &low, &hi); + op1.low, op1.high, op2.low, op2.high, + &tmp.low, &tmp.high, + &res.low, &res.high); break; case MIN_EXPR: - case MAX_EXPR: - if (uns) - low = (((unsigned HOST_WIDE_INT) int1h - < (unsigned HOST_WIDE_INT) int2h) - || (((unsigned HOST_WIDE_INT) int1h - == (unsigned HOST_WIDE_INT) int2h) - && int1l < int2l)); - else - low = (int1h < int2h - || (int1h == int2h && int1l < int2l)); + res = double_int_min (op1, op2, uns); + break; - if (low == (code == MIN_EXPR)) - low = int1l, hi = int1h; - else - low = int2l, hi = int2h; + case MAX_EXPR: + res = double_int_max (op1, op2, uns); break; default: @@ -1071,7 +1066,7 @@ int_const_binop (enum tree_code code, const_tree arg1, const_tree arg2, int notr if (notrunc) { - t = build_int_cst_wide (TREE_TYPE (arg1), low, hi); + t = build_int_cst_wide (TREE_TYPE (arg1), res.low, res.high); /* Propagate overflow flags ourselves. */ if (((!uns || is_sizetype) && overflow) @@ -1082,7 +1077,7 @@ int_const_binop (enum tree_code code, const_tree arg1, const_tree arg2, int notr } } else - t = force_fit_type_double (TREE_TYPE (arg1), low, hi, 1, + t = force_fit_type_double (TREE_TYPE (arg1), res.low, res.high, 1, ((!uns || is_sizetype) && overflow) | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2)); -- cgit v1.2.1 From d6973489e43d103d0a3669cc2112c122ec083fd1 Mon Sep 17 00:00:00 2001 From: aesok Date: Thu, 24 Jun 2010 19:11:19 +0000 Subject: * fold-const.c (const_binop): Remove 'notrunc' argement. Adjust recursive call and call to 'int_const_binop'. (build_range_check, fold_cond_expr_with_comparison, unextend, fold_truthop, extract_muldiv_1, fold_comparison, fold_binary_loc, multiple_of_p): Adjust call to const_binop. git-svn-id: svn+ssh://gcc.gnu.org/svn/gcc/trunk@161336 138bc75d-0d04-0410-961f-82ee72b054a4 --- gcc/fold-const.c | 214 +++++++++++++++++++++++++------------------------------ 1 file changed, 99 insertions(+), 115 deletions(-) (limited to 'gcc/fold-const.c') diff --git a/gcc/fold-const.c b/gcc/fold-const.c index a1cd552014c..7e7c0a20251 100644 --- a/gcc/fold-const.c +++ b/gcc/fold-const.c @@ -97,7 +97,7 @@ static bool negate_expr_p (tree); static tree negate_expr (tree); static tree split_tree (tree, enum tree_code, tree *, tree *, tree *, int); static tree associate_trees (location_t, tree, tree, enum tree_code, tree); -static tree const_binop (enum tree_code, tree, tree, int); +static tree const_binop (enum tree_code, tree, tree); static enum comparison_code comparison_to_compcode (enum tree_code); static enum tree_code compcode_to_comparison (enum comparison_code); static int operand_equal_for_comparison_p (tree, tree, tree); @@ -1087,12 +1087,10 @@ int_const_binop (enum tree_code code, const_tree arg1, const_tree arg2, int notr /* Combine two constants ARG1 and ARG2 under operation CODE to produce a new constant. We assume ARG1 and ARG2 have the same data type, or at least are the same kind of constant and the same machine mode. Return zero if - combining the constants is not allowed in the current operating mode. - - If NOTRUNC is nonzero, do not truncate the result to fit the data type. */ + combining the constants is not allowed in the current operating mode. */ static tree -const_binop (enum tree_code code, tree arg1, tree arg2, int notrunc) +const_binop (enum tree_code code, tree arg1, tree arg2) { /* Sanity check for the recursive cases. */ if (!arg1 || !arg2) @@ -1102,7 +1100,7 @@ const_binop (enum tree_code code, tree arg1, tree arg2, int notrunc) STRIP_NOPS (arg2); if (TREE_CODE (arg1) == INTEGER_CST) - return int_const_binop (code, arg1, arg2, notrunc); + return int_const_binop (code, arg1, arg2, 0); if (TREE_CODE (arg1) == REAL_CST) { @@ -1236,8 +1234,8 @@ const_binop (enum tree_code code, tree arg1, tree arg2, int notrunc) { case PLUS_EXPR: case MINUS_EXPR: - real = const_binop (code, r1, r2, notrunc); - imag = const_binop (code, i1, i2, notrunc); + real = const_binop (code, r1, r2); + imag = const_binop (code, i1, i2); break; case MULT_EXPR: @@ -1247,13 +1245,11 @@ const_binop (enum tree_code code, tree arg1, tree arg2, int notrunc) mpc_mul); real = const_binop (MINUS_EXPR, - const_binop (MULT_EXPR, r1, r2, notrunc), - const_binop (MULT_EXPR, i1, i2, notrunc), - notrunc); + const_binop (MULT_EXPR, r1, r2), + const_binop (MULT_EXPR, i1, i2)); imag = const_binop (PLUS_EXPR, - const_binop (MULT_EXPR, r1, i2, notrunc), - const_binop (MULT_EXPR, i1, r2, notrunc), - notrunc); + const_binop (MULT_EXPR, r1, i2), + const_binop (MULT_EXPR, i1, r2)); break; case RDIV_EXPR: @@ -1277,22 +1273,19 @@ const_binop (enum tree_code code, tree arg1, tree arg2, int notrunc) */ tree magsquared = const_binop (PLUS_EXPR, - const_binop (MULT_EXPR, r2, r2, notrunc), - const_binop (MULT_EXPR, i2, i2, notrunc), - notrunc); + const_binop (MULT_EXPR, r2, r2), + const_binop (MULT_EXPR, i2, i2)); tree t1 = const_binop (PLUS_EXPR, - const_binop (MULT_EXPR, r1, r2, notrunc), - const_binop (MULT_EXPR, i1, i2, notrunc), - notrunc); + const_binop (MULT_EXPR, r1, r2), + const_binop (MULT_EXPR, i1, i2)); tree t2 = const_binop (MINUS_EXPR, - const_binop (MULT_EXPR, i1, r2, notrunc), - const_binop (MULT_EXPR, r1, i2, notrunc), - notrunc); + const_binop (MULT_EXPR, i1, r2), + const_binop (MULT_EXPR, r1, i2)); - real = const_binop (code, t1, magsquared, notrunc); - imag = const_binop (code, t2, magsquared, notrunc); + real = const_binop (code, t1, magsquared); + imag = const_binop (code, t2, magsquared); } else { @@ -1314,18 +1307,16 @@ const_binop (enum tree_code code, tree arg1, tree arg2, int notrunc) ti = (ai * ratio) - ar; tr = tr / div; ti = ti / div; */ - tree ratio = const_binop (code, r2, i2, notrunc); + tree ratio = const_binop (code, r2, i2); tree div = const_binop (PLUS_EXPR, i2, - const_binop (MULT_EXPR, r2, ratio, - notrunc), - notrunc); - real = const_binop (MULT_EXPR, r1, ratio, notrunc); - real = const_binop (PLUS_EXPR, real, i1, notrunc); - real = const_binop (code, real, div, notrunc); - - imag = const_binop (MULT_EXPR, i1, ratio, notrunc); - imag = const_binop (MINUS_EXPR, imag, r1, notrunc); - imag = const_binop (code, imag, div, notrunc); + const_binop (MULT_EXPR, r2, ratio)); + real = const_binop (MULT_EXPR, r1, ratio); + real = const_binop (PLUS_EXPR, real, i1); + real = const_binop (code, real, div); + + imag = const_binop (MULT_EXPR, i1, ratio); + imag = const_binop (MINUS_EXPR, imag, r1); + imag = const_binop (code, imag, div); } else { @@ -1336,19 +1327,17 @@ const_binop (enum tree_code code, tree arg1, tree arg2, int notrunc) ti = b - (a * ratio); tr = tr / div; ti = ti / div; */ - tree ratio = const_binop (code, i2, r2, notrunc); + tree ratio = const_binop (code, i2, r2); tree div = const_binop (PLUS_EXPR, r2, - const_binop (MULT_EXPR, i2, ratio, - notrunc), - notrunc); + const_binop (MULT_EXPR, i2, ratio)); - real = const_binop (MULT_EXPR, i1, ratio, notrunc); - real = const_binop (PLUS_EXPR, real, r1, notrunc); - real = const_binop (code, real, div, notrunc); + real = const_binop (MULT_EXPR, i1, ratio); + real = const_binop (PLUS_EXPR, real, r1); + real = const_binop (code, real, div); - imag = const_binop (MULT_EXPR, r1, ratio, notrunc); - imag = const_binop (MINUS_EXPR, i1, imag, notrunc); - imag = const_binop (code, imag, div, notrunc); + imag = const_binop (MULT_EXPR, r1, ratio); + imag = const_binop (MINUS_EXPR, i1, imag); + imag = const_binop (code, imag, div); } } break; @@ -1394,7 +1383,7 @@ const_binop (enum tree_code code, tree arg1, tree arg2, int notrunc) elements2 = TREE_CHAIN (elements2); } - elem = const_binop (code, elem1, elem2, notrunc); + elem = const_binop (code, elem1, elem2); /* It is possible that const_binop cannot handle the given code and return NULL_TREE */ @@ -3490,9 +3479,9 @@ optimize_bit_field_compare (location_t loc, enum tree_code code, /* Make the mask to be used against the extracted field. */ mask = build_int_cst_type (unsigned_type, -1); - mask = const_binop (LSHIFT_EXPR, mask, size_int (nbitsize - lbitsize), 0); + mask = const_binop (LSHIFT_EXPR, mask, size_int (nbitsize - lbitsize)); mask = const_binop (RSHIFT_EXPR, mask, - size_int (nbitsize - lbitsize - lbitpos), 0); + size_int (nbitsize - lbitsize - lbitpos)); if (! const_p) /* If not comparing with constant, just rework the comparison @@ -3525,7 +3514,7 @@ optimize_bit_field_compare (location_t loc, enum tree_code code, if (! integer_zerop (const_binop (RSHIFT_EXPR, fold_convert_loc (loc, unsigned_type, rhs), - size_int (lbitsize), 0))) + size_int (lbitsize)))) { warning (0, "comparison is always %d due to width of bit-field", code == NE_EXPR); @@ -3536,7 +3525,7 @@ optimize_bit_field_compare (location_t loc, enum tree_code code, { tree tem = const_binop (RSHIFT_EXPR, fold_convert_loc (loc, signed_type, rhs), - size_int (lbitsize - 1), 0); + size_int (lbitsize - 1)); if (! integer_zerop (tem) && ! integer_all_onesp (tem)) { warning (0, "comparison is always %d due to width of bit-field", @@ -3565,8 +3554,8 @@ optimize_bit_field_compare (location_t loc, enum tree_code code, rhs = const_binop (BIT_AND_EXPR, const_binop (LSHIFT_EXPR, fold_convert_loc (loc, unsigned_type, rhs), - size_int (lbitpos), 0), - mask, 0); + size_int (lbitpos)), + mask); lhs = build2 (code, compare_type, build2 (BIT_AND_EXPR, unsigned_type, lhs, mask), @@ -3652,8 +3641,8 @@ decode_field_reference (location_t loc, tree exp, HOST_WIDE_INT *pbitsize, mask = build_int_cst_type (unsigned_type, -1); - mask = const_binop (LSHIFT_EXPR, mask, size_int (precision - *pbitsize), 0); - mask = const_binop (RSHIFT_EXPR, mask, size_int (precision - *pbitsize), 0); + mask = const_binop (LSHIFT_EXPR, mask, size_int (precision - *pbitsize)); + mask = const_binop (RSHIFT_EXPR, mask, size_int (precision - *pbitsize)); /* Merge it with the mask we found in the BIT_AND_EXPR, if any. */ if (and_mask != 0) @@ -3681,9 +3670,8 @@ all_ones_mask_p (const_tree mask, int size) tree_int_cst_equal (mask, const_binop (RSHIFT_EXPR, const_binop (LSHIFT_EXPR, tmask, - size_int (precision - size), - 0), - size_int (precision - size), 0)); + size_int (precision - size)), + size_int (precision - size))); } /* Subroutine for fold: determine if VAL is the INTEGER_CONST that @@ -4283,7 +4271,7 @@ build_range_check (location_t loc, tree type, tree exp, int in_p, low = fold_convert_loc (loc, etype, low); exp = fold_convert_loc (loc, etype, exp); - value = const_binop (MINUS_EXPR, high, low, 0); + value = const_binop (MINUS_EXPR, high, low); if (POINTER_TYPE_P (etype)) @@ -4786,7 +4774,7 @@ fold_cond_expr_with_comparison (location_t loc, tree type, OEP_ONLY_CONST) && operand_equal_p (arg01, const_binop (PLUS_EXPR, arg2, - build_int_cst (type, 1), 0), + build_int_cst (type, 1)), OEP_ONLY_CONST)) { tem = fold_build2_loc (loc, MIN_EXPR, TREE_TYPE (arg00), arg00, @@ -4804,7 +4792,7 @@ fold_cond_expr_with_comparison (location_t loc, tree type, OEP_ONLY_CONST) && operand_equal_p (arg01, const_binop (MINUS_EXPR, arg2, - build_int_cst (type, 1), 0), + build_int_cst (type, 1)), OEP_ONLY_CONST)) { tem = fold_build2_loc (loc, MIN_EXPR, TREE_TYPE (arg00), arg00, @@ -4822,7 +4810,7 @@ fold_cond_expr_with_comparison (location_t loc, tree type, OEP_ONLY_CONST) && operand_equal_p (arg01, const_binop (MINUS_EXPR, arg2, - build_int_cst (type, 1), 0), + build_int_cst (type, 1)), OEP_ONLY_CONST)) { tem = fold_build2_loc (loc, MAX_EXPR, TREE_TYPE (arg00), arg00, @@ -4838,7 +4826,7 @@ fold_cond_expr_with_comparison (location_t loc, tree type, OEP_ONLY_CONST) && operand_equal_p (arg01, const_binop (PLUS_EXPR, arg2, - build_int_cst (type, 1), 0), + build_int_cst (type, 1)), OEP_ONLY_CONST)) { tem = fold_build2_loc (loc, MAX_EXPR, TREE_TYPE (arg00), arg00, @@ -4970,8 +4958,8 @@ unextend (tree c, int p, int unsignedp, tree mask) /* We work by getting just the sign bit into the low-order bit, then into the high-order bit, then sign-extend. We then XOR that value with C. */ - temp = const_binop (RSHIFT_EXPR, c, size_int (p - 1), 0); - temp = const_binop (BIT_AND_EXPR, temp, size_int (1), 0); + temp = const_binop (RSHIFT_EXPR, c, size_int (p - 1)); + temp = const_binop (BIT_AND_EXPR, temp, size_int (1)); /* We must use a signed type in order to get an arithmetic right shift. However, we must also avoid introducing accidental overflows, so that @@ -4982,18 +4970,16 @@ unextend (tree c, int p, int unsignedp, tree mask) if (TYPE_UNSIGNED (type)) temp = fold_convert (signed_type_for (type), temp); - temp = const_binop (LSHIFT_EXPR, temp, size_int (modesize - 1), 0); - temp = const_binop (RSHIFT_EXPR, temp, size_int (modesize - p - 1), 0); + temp = const_binop (LSHIFT_EXPR, temp, size_int (modesize - 1)); + temp = const_binop (RSHIFT_EXPR, temp, size_int (modesize - p - 1)); if (mask != 0) temp = const_binop (BIT_AND_EXPR, temp, - fold_convert (TREE_TYPE (c), mask), - 0); + fold_convert (TREE_TYPE (c), mask)); /* If necessary, convert the type back to match the type of C. */ if (TYPE_UNSIGNED (type)) temp = fold_convert (type, temp); - return fold_convert (type, - const_binop (BIT_XOR_EXPR, c, temp, 0)); + return fold_convert (type, const_binop (BIT_XOR_EXPR, c, temp)); } /* For an expression that has the form @@ -5330,19 +5316,18 @@ fold_truthop (location_t loc, enum tree_code code, tree truth_type, } ll_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc, lntype, ll_mask), - size_int (xll_bitpos), 0); + size_int (xll_bitpos)); rl_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc, lntype, rl_mask), - size_int (xrl_bitpos), 0); + size_int (xrl_bitpos)); if (l_const) { l_const = fold_convert_loc (loc, lntype, l_const); l_const = unextend (l_const, ll_bitsize, ll_unsignedp, ll_and_mask); - l_const = const_binop (LSHIFT_EXPR, l_const, size_int (xll_bitpos), 0); + l_const = const_binop (LSHIFT_EXPR, l_const, size_int (xll_bitpos)); if (! integer_zerop (const_binop (BIT_AND_EXPR, l_const, fold_build1_loc (loc, BIT_NOT_EXPR, - lntype, ll_mask), - 0))) + lntype, ll_mask)))) { warning (0, "comparison is always %d", wanted_code == NE_EXPR); @@ -5353,11 +5338,10 @@ fold_truthop (location_t loc, enum tree_code code, tree truth_type, { r_const = fold_convert_loc (loc, lntype, r_const); r_const = unextend (r_const, rl_bitsize, rl_unsignedp, rl_and_mask); - r_const = const_binop (LSHIFT_EXPR, r_const, size_int (xrl_bitpos), 0); + r_const = const_binop (LSHIFT_EXPR, r_const, size_int (xrl_bitpos)); if (! integer_zerop (const_binop (BIT_AND_EXPR, r_const, fold_build1_loc (loc, BIT_NOT_EXPR, - lntype, rl_mask), - 0))) + lntype, rl_mask)))) { warning (0, "comparison is always %d", wanted_code == NE_EXPR); @@ -5398,18 +5382,18 @@ fold_truthop (location_t loc, enum tree_code code, tree truth_type, lr_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc, rntype, lr_mask), - size_int (xlr_bitpos), 0); + size_int (xlr_bitpos)); rr_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc, rntype, rr_mask), - size_int (xrr_bitpos), 0); + size_int (xrr_bitpos)); /* Make a mask that corresponds to both fields being compared. Do this for both items being compared. If the operands are the same size and the bits being compared are in the same position then we can do this by masking both and comparing the masked results. */ - ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask, 0); - lr_mask = const_binop (BIT_IOR_EXPR, lr_mask, rr_mask, 0); + ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask); + lr_mask = const_binop (BIT_IOR_EXPR, lr_mask, rr_mask); if (lnbitsize == rnbitsize && xll_bitpos == xlr_bitpos) { lhs = make_bit_field_ref (loc, ll_inner, lntype, lnbitsize, lnbitpos, @@ -5448,9 +5432,9 @@ fold_truthop (location_t loc, enum tree_code code, tree truth_type, MIN (lr_bitpos, rr_bitpos), lr_unsignedp); ll_mask = const_binop (RSHIFT_EXPR, ll_mask, - size_int (MIN (xll_bitpos, xrl_bitpos)), 0); + size_int (MIN (xll_bitpos, xrl_bitpos))); lr_mask = const_binop (RSHIFT_EXPR, lr_mask, - size_int (MIN (xlr_bitpos, xrr_bitpos)), 0); + size_int (MIN (xlr_bitpos, xrr_bitpos))); /* Convert to the smaller type before masking out unwanted bits. */ type = lntype; @@ -5487,10 +5471,10 @@ fold_truthop (location_t loc, enum tree_code code, tree truth_type, common between the masks, those bits of the constants must be the same. If not, the condition is always false. Test for this to avoid generating incorrect code below. */ - result = const_binop (BIT_AND_EXPR, ll_mask, rl_mask, 0); + result = const_binop (BIT_AND_EXPR, ll_mask, rl_mask); if (! integer_zerop (result) - && simple_cst_equal (const_binop (BIT_AND_EXPR, result, l_const, 0), - const_binop (BIT_AND_EXPR, result, r_const, 0)) != 1) + && simple_cst_equal (const_binop (BIT_AND_EXPR, result, l_const), + const_binop (BIT_AND_EXPR, result, r_const)) != 1) { if (wanted_code == NE_EXPR) { @@ -5511,7 +5495,7 @@ fold_truthop (location_t loc, enum tree_code code, tree truth_type, result = make_bit_field_ref (loc, ll_inner, lntype, lnbitsize, lnbitpos, ll_unsignedp || rl_unsignedp); - ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask, 0); + ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask); if (! all_ones_mask_p (ll_mask, lnbitsize)) { result = build2 (BIT_AND_EXPR, lntype, result, ll_mask); @@ -5519,7 +5503,7 @@ fold_truthop (location_t loc, enum tree_code code, tree truth_type, } result = build2 (wanted_code, truth_type, result, - const_binop (BIT_IOR_EXPR, l_const, r_const, 0)); + const_binop (BIT_IOR_EXPR, l_const, r_const)); fold_truthop_exit: SET_EXPR_LOCATION (result, loc); @@ -5702,9 +5686,9 @@ extract_muldiv_1 (tree t, tree c, enum tree_code code, tree wide_type, /* For a constant, we can always simplify if we are a multiply or (for divide and modulus) if it is a multiple of our constant. */ if (code == MULT_EXPR - || integer_zerop (const_binop (TRUNC_MOD_EXPR, t, c, 0))) + || integer_zerop (const_binop (TRUNC_MOD_EXPR, t, c))) return const_binop (code, fold_convert (ctype, t), - fold_convert (ctype, c), 0); + fold_convert (ctype, c)); break; CASE_CONVERT: case NON_LVALUE_EXPR: @@ -5812,7 +5796,7 @@ extract_muldiv_1 (tree t, tree c, enum tree_code code, tree wide_type, && 0 != (t1 = fold_convert (ctype, const_binop (LSHIFT_EXPR, size_one_node, - op1, 0))) + op1))) && !TREE_OVERFLOW (t1)) return extract_muldiv (build2 (tcode == LSHIFT_EXPR ? MULT_EXPR : FLOOR_DIV_EXPR, @@ -5880,10 +5864,10 @@ extract_muldiv_1 (tree t, tree c, enum tree_code code, tree wide_type, /* If it's a multiply or a division/modulus operation of a multiple of our constant, do the operation and verify it doesn't overflow. */ if (code == MULT_EXPR - || integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c, 0))) + || integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c))) { op1 = const_binop (code, fold_convert (ctype, op1), - fold_convert (ctype, c), 0); + fold_convert (ctype, c)); /* We allow the constant to overflow with wrapping semantics. */ if (op1 == 0 || (TREE_OVERFLOW (op1) && !TYPE_OVERFLOW_WRAPS (ctype))) @@ -5931,7 +5915,7 @@ extract_muldiv_1 (tree t, tree c, enum tree_code code, tree wide_type, || (TREE_CODE (TREE_TYPE (t)) == INTEGER_TYPE && TYPE_IS_SIZETYPE (TREE_TYPE (t)))) && TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST - && integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c, 0))) + && integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c))) { *strict_overflow_p = true; return omit_one_operand (type, integer_zero_node, op0); @@ -5987,23 +5971,23 @@ extract_muldiv_1 (tree t, tree c, enum tree_code code, tree wide_type, && code != FLOOR_MOD_EXPR && code != ROUND_MOD_EXPR && code != MULT_EXPR))) { - if (integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c, 0))) + if (integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c))) { if (TYPE_OVERFLOW_UNDEFINED (ctype)) *strict_overflow_p = true; return fold_build2 (tcode, ctype, fold_convert (ctype, op0), fold_convert (ctype, const_binop (TRUNC_DIV_EXPR, - op1, c, 0))); + op1, c))); } - else if (integer_zerop (const_binop (TRUNC_MOD_EXPR, c, op1, 0))) + else if (integer_zerop (const_binop (TRUNC_MOD_EXPR, c, op1))) { if (TYPE_OVERFLOW_UNDEFINED (ctype)) *strict_overflow_p = true; return fold_build2 (code, ctype, fold_convert (ctype, op0), fold_convert (ctype, const_binop (TRUNC_DIV_EXPR, - c, op1, 0))); + c, op1))); } } break; @@ -9015,7 +8999,7 @@ fold_comparison (location_t loc, enum tree_code code, tree type, && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST && 0 != (tem = const_binop (TREE_CODE (arg0) == PLUS_EXPR ? MINUS_EXPR : PLUS_EXPR, - arg1, TREE_OPERAND (arg0, 1), 0)) + arg1, TREE_OPERAND (arg0, 1))) && !TREE_OVERFLOW (tem)) return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), tem); @@ -9028,7 +9012,7 @@ fold_comparison (location_t loc, enum tree_code code, tree type, && TREE_CODE (arg0) == MINUS_EXPR && TREE_CODE (TREE_OPERAND (arg0, 0)) == REAL_CST && 0 != (tem = const_binop (MINUS_EXPR, TREE_OPERAND (arg0, 0), - arg1, 0)) + arg1)) && !TREE_OVERFLOW (tem)) return fold_build2_loc (loc, swap_tree_comparison (code), type, TREE_OPERAND (arg0, 1), tem); @@ -9449,7 +9433,7 @@ fold_binary_loc (location_t loc, /* Make sure type and arg0 have the same saturating flag. */ gcc_assert (TYPE_SATURATING (type) == TYPE_SATURATING (TREE_TYPE (arg0))); - tem = const_binop (code, arg0, arg1, 0); + tem = const_binop (code, arg0, arg1); } else if (kind == tcc_comparison) tem = fold_relational_const (code, type, arg0, arg1); @@ -9708,7 +9692,7 @@ fold_binary_loc (location_t loc, && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST && integer_zerop (const_binop (BIT_AND_EXPR, TREE_OPERAND (arg0, 1), - TREE_OPERAND (arg1, 1), 0))) + TREE_OPERAND (arg1, 1)))) { code = BIT_IOR_EXPR; goto bit_ior; @@ -10430,7 +10414,7 @@ fold_binary_loc (location_t loc, && TREE_CODE (TREE_OPERAND (arg0, 0)) == REAL_CST) { tree tem = const_binop (MULT_EXPR, TREE_OPERAND (arg0, 0), - arg1, 0); + arg1); if (tem) return fold_build2_loc (loc, RDIV_EXPR, type, tem, TREE_OPERAND (arg0, 1)); @@ -10799,7 +10783,7 @@ fold_binary_loc (location_t loc, && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST && integer_zerop (const_binop (BIT_AND_EXPR, TREE_OPERAND (arg0, 1), - TREE_OPERAND (arg1, 1), 0))) + TREE_OPERAND (arg1, 1)))) { code = BIT_IOR_EXPR; goto bit_ior; @@ -11255,7 +11239,7 @@ fold_binary_loc (location_t loc, { if (flag_reciprocal_math && 0 != (tem = const_binop (code, build_real (type, dconst1), - arg1, 0))) + arg1))) return fold_build2_loc (loc, MULT_EXPR, type, arg0, tem); /* Find the reciprocal if optimizing and the result is exact. */ if (optimize) @@ -11292,7 +11276,7 @@ fold_binary_loc (location_t loc, && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST) { tree tem = const_binop (RDIV_EXPR, arg0, - TREE_OPERAND (arg1, 1), 0); + TREE_OPERAND (arg1, 1)); if (tem) return fold_build2_loc (loc, RDIV_EXPR, type, tem, TREE_OPERAND (arg1, 0)); @@ -11727,7 +11711,7 @@ fold_binary_loc (location_t loc, { tree tem = build_int_cst (TREE_TYPE (arg1), TYPE_PRECISION (type)); - tem = const_binop (MINUS_EXPR, tem, arg1, 0); + tem = const_binop (MINUS_EXPR, tem, arg1); return fold_build2_loc (loc, RROTATE_EXPR, type, op0, tem); } @@ -12074,7 +12058,7 @@ fold_binary_loc (location_t loc, ? MINUS_EXPR : PLUS_EXPR, fold_convert_loc (loc, TREE_TYPE (arg0), arg1), - TREE_OPERAND (arg0, 1), 0)) + TREE_OPERAND (arg0, 1))) && !TREE_OVERFLOW (tem)) return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), tem); @@ -12810,14 +12794,14 @@ fold_binary_loc (location_t loc, { case GT_EXPR: arg1 = const_binop (PLUS_EXPR, arg1, - build_int_cst (TREE_TYPE (arg1), 1), 0); + build_int_cst (TREE_TYPE (arg1), 1)); return fold_build2_loc (loc, EQ_EXPR, type, fold_convert_loc (loc, TREE_TYPE (arg1), arg0), arg1); case LE_EXPR: arg1 = const_binop (PLUS_EXPR, arg1, - build_int_cst (TREE_TYPE (arg1), 1), 0); + build_int_cst (TREE_TYPE (arg1), 1)); return fold_build2_loc (loc, NE_EXPR, type, fold_convert_loc (loc, TREE_TYPE (arg1), arg0), @@ -12851,13 +12835,13 @@ fold_binary_loc (location_t loc, switch (code) { case GE_EXPR: - arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node, 0); + arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node); return fold_build2_loc (loc, NE_EXPR, type, fold_convert_loc (loc, TREE_TYPE (arg1), arg0), arg1); case LT_EXPR: - arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node, 0); + arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node); return fold_build2_loc (loc, EQ_EXPR, type, fold_convert_loc (loc, TREE_TYPE (arg1), arg0), @@ -14176,7 +14160,7 @@ multiple_of_p (tree type, const_tree top, const_tree bottom) && 0 != (t1 = fold_convert (type, const_binop (LSHIFT_EXPR, size_one_node, - op1, 0))) + op1))) && !TREE_OVERFLOW (t1)) return multiple_of_p (type, t1, bottom); } -- cgit v1.2.1 From d3237426929c302bcab9d683353b041667b22488 Mon Sep 17 00:00:00 2001 From: aesok Date: Mon, 28 Jun 2010 18:23:16 +0000 Subject: * double-int.h (force_fit_type_double): Remove declaration. * double-int.c (force_fit_type_double): Move to tree.c. * tree.h (force_fit_type_double): Declare. * tree.h (force_fit_type_double): Moved from double-int.c. Use double_int type for 'cst' argument. Use double_int_fits_to_tree_p and double_int_to_tree instead of fit_double_type and build_int_cst_wide. * convert.c (convert_to_pointer): Adjust call to force_fit_type_double. * tree-vrp.c (extract_range_from_assert, extract_range_from_unary_expr): Adjust call to force_fit_type_double. * fold-const.c: Update comment. (int_const_binop, fold_convert_const_int_from_int, fold_convert_const_int_from_real, fold_convert_const_int_from_fixed, extract_muldiv_1, fold_div_compare, fold_sign_changed_comparison, fold_unary_loc, fold_negate_const, fold_abs_const, fold_not_const, round_up_loc): Adjust call to force_fit_type_double. /c-family * c-common.c (shorten_compare): Adjust call to force_fit_type_double. git-svn-id: svn+ssh://gcc.gnu.org/svn/gcc/trunk@161509 138bc75d-0d04-0410-961f-82ee72b054a4 --- gcc/fold-const.c | 122 ++++++++++++++++++++++++------------------------------- 1 file changed, 52 insertions(+), 70 deletions(-) (limited to 'gcc/fold-const.c') diff --git a/gcc/fold-const.c b/gcc/fold-const.c index 7e7c0a20251..e2b30f90e8d 100644 --- a/gcc/fold-const.c +++ b/gcc/fold-const.c @@ -27,8 +27,7 @@ along with GCC; see the file COPYING3. If not see @@ This would also make life easier when this technology is used @@ for cross-compilers. */ -/* The entry points in this file are fold, size_int_wide, size_binop - and force_fit_type_double. +/* The entry points in this file are fold, size_int_wide and size_binop. fold takes a tree as argument and returns a simplified tree. @@ -39,10 +38,6 @@ along with GCC; see the file COPYING3. If not see size_int takes an integer value, and creates a tree constant with type from `sizetype'. - force_fit_type_double takes a constant, an overflowable flag and a - prior overflow indicator. It forces the value to fit the type and - sets TREE_OVERFLOW. - Note: Since the folders get called on non-gimple code as well as gimple code, we need to handle GIMPLE tuples as well as their corresponding tree equivalents. */ @@ -141,7 +136,7 @@ static tree fold_inf_compare (location_t, enum tree_code, tree, tree, tree); static tree fold_div_compare (location_t, enum tree_code, tree, tree, tree); static bool reorder_operands_p (const_tree, const_tree); static tree fold_negate_const (tree, tree); -static tree fold_not_const (tree, tree); +static tree fold_not_const (const_tree, tree); static tree fold_relational_const (enum tree_code, tree, tree, tree); static tree fold_convert_const (enum tree_code, tree, tree); @@ -1077,7 +1072,7 @@ int_const_binop (enum tree_code code, const_tree arg1, const_tree arg2, int notr } } else - t = force_fit_type_double (TREE_TYPE (arg1), res.low, res.high, 1, + t = force_fit_type_double (TREE_TYPE (arg1), res, 1, ((!uns || is_sizetype) && overflow) | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2)); @@ -1510,8 +1505,7 @@ fold_convert_const_int_from_int (tree type, const_tree arg1) /* Given an integer constant, make new constant with new type, appropriately sign-extended or truncated. */ - t = force_fit_type_double (type, TREE_INT_CST_LOW (arg1), - TREE_INT_CST_HIGH (arg1), + t = force_fit_type_double (type, tree_to_double_int (arg1), !POINTER_TYPE_P (TREE_TYPE (arg1)), (TREE_INT_CST_HIGH (arg1) < 0 && (TYPE_UNSIGNED (type) @@ -1591,8 +1585,7 @@ fold_convert_const_int_from_real (enum tree_code code, tree type, const_tree arg if (! overflow) real_to_integer2 ((HOST_WIDE_INT *) &val.low, &val.high, &r); - t = force_fit_type_double (type, val.low, val.high, -1, - overflow | TREE_OVERFLOW (arg1)); + t = force_fit_type_double (type, val, -1, overflow | TREE_OVERFLOW (arg1)); return t; } @@ -1635,7 +1628,7 @@ fold_convert_const_int_from_fixed (tree type, const_tree arg1) /* Given a fixed-point constant, make new constant with new type, appropriately sign-extended or truncated. */ - t = force_fit_type_double (type, temp.low, temp.high, -1, + t = force_fit_type_double (type, temp, -1, (double_int_negative_p (temp) && (TYPE_UNSIGNED (type) < TYPE_UNSIGNED (TREE_TYPE (arg1)))) @@ -5947,8 +5940,7 @@ extract_muldiv_1 (tree t, tree c, enum tree_code code, tree wide_type, && 0 != (t1 = int_const_binop (MULT_EXPR, fold_convert (ctype, op1), fold_convert (ctype, c), 1)) - && 0 != (t1 = force_fit_type_double (ctype, TREE_INT_CST_LOW (t1), - TREE_INT_CST_HIGH (t1), + && 0 != (t1 = force_fit_type_double (ctype, tree_to_double_int (t1), (TYPE_UNSIGNED (ctype) && tcode != MULT_EXPR) ? -1 : 1, TREE_OVERFLOW (t1))) @@ -6352,8 +6344,7 @@ fold_div_compare (location_t loc, tree prod, tmp, hi, lo; tree arg00 = TREE_OPERAND (arg0, 0); tree arg01 = TREE_OPERAND (arg0, 1); - unsigned HOST_WIDE_INT lpart; - HOST_WIDE_INT hpart; + double_int val; bool unsigned_p = TYPE_UNSIGNED (TREE_TYPE (arg0)); bool neg_overflow; int overflow; @@ -6364,9 +6355,8 @@ fold_div_compare (location_t loc, TREE_INT_CST_HIGH (arg01), TREE_INT_CST_LOW (arg1), TREE_INT_CST_HIGH (arg1), - &lpart, &hpart, unsigned_p); - prod = force_fit_type_double (TREE_TYPE (arg00), lpart, hpart, - -1, overflow); + &val.low, &val.high, unsigned_p); + prod = force_fit_type_double (TREE_TYPE (arg00), val, -1, overflow); neg_overflow = false; if (unsigned_p) @@ -6380,8 +6370,8 @@ fold_div_compare (location_t loc, TREE_INT_CST_HIGH (prod), TREE_INT_CST_LOW (tmp), TREE_INT_CST_HIGH (tmp), - &lpart, &hpart, unsigned_p); - hi = force_fit_type_double (TREE_TYPE (arg00), lpart, hpart, + &val.low, &val.high, unsigned_p); + hi = force_fit_type_double (TREE_TYPE (arg00), val, -1, overflow | TREE_OVERFLOW (prod)); } else if (tree_int_cst_sgn (arg01) >= 0) @@ -6834,9 +6824,8 @@ fold_sign_changed_comparison (location_t loc, enum tree_code code, tree type, return NULL_TREE; if (TREE_CODE (arg1) == INTEGER_CST) - arg1 = force_fit_type_double (inner_type, TREE_INT_CST_LOW (arg1), - TREE_INT_CST_HIGH (arg1), 0, - TREE_OVERFLOW (arg1)); + arg1 = force_fit_type_double (inner_type, tree_to_double_int (arg1), + 0, TREE_OVERFLOW (arg1)); else arg1 = fold_convert_loc (loc, inner_type, arg1); @@ -7941,9 +7930,8 @@ fold_unary_loc (location_t loc, enum tree_code code, tree type, tree op0) } if (change) { - tem = force_fit_type_double (type, TREE_INT_CST_LOW (and1), - TREE_INT_CST_HIGH (and1), 0, - TREE_OVERFLOW (and1)); + tem = force_fit_type_double (type, tree_to_double_int (and1), + 0, TREE_OVERFLOW (and1)); return fold_build2_loc (loc, BIT_AND_EXPR, type, fold_convert_loc (loc, type, and0), tem); } @@ -15139,12 +15127,10 @@ fold_negate_const (tree arg0, tree type) { case INTEGER_CST: { - unsigned HOST_WIDE_INT low; - HOST_WIDE_INT high; - int overflow = neg_double (TREE_INT_CST_LOW (arg0), - TREE_INT_CST_HIGH (arg0), - &low, &high); - t = force_fit_type_double (type, low, high, 1, + double_int val = tree_to_double_int (arg0); + int overflow = neg_double (val.low, val.high, &val.low, &val.high); + + t = force_fit_type_double (type, val, 1, (overflow | TREE_OVERFLOW (arg0)) && !TYPE_UNSIGNED (type)); break; @@ -15187,25 +15173,26 @@ fold_abs_const (tree arg0, tree type) switch (TREE_CODE (arg0)) { case INTEGER_CST: - /* If the value is unsigned, then the absolute value is - the same as the ordinary value. */ - if (TYPE_UNSIGNED (type)) - t = arg0; - /* Similarly, if the value is non-negative. */ - else if (INT_CST_LT (integer_minus_one_node, arg0)) - t = arg0; - /* If the value is negative, then the absolute value is - its negation. */ - else - { - unsigned HOST_WIDE_INT low; - HOST_WIDE_INT high; - int overflow = neg_double (TREE_INT_CST_LOW (arg0), - TREE_INT_CST_HIGH (arg0), - &low, &high); - t = force_fit_type_double (type, low, high, -1, - overflow | TREE_OVERFLOW (arg0)); - } + { + double_int val = tree_to_double_int (arg0); + + /* If the value is unsigned or non-negative, then the absolute value + is the same as the ordinary value. */ + if (TYPE_UNSIGNED (type) + || !double_int_negative_p (val)) + t = arg0; + + /* If the value is negative, then the absolute value is + its negation. */ + else + { + int overflow; + + overflow = neg_double (val.low, val.high, &val.low, &val.high); + t = force_fit_type_double (type, val, -1, + overflow | TREE_OVERFLOW (arg0)); + } + } break; case REAL_CST: @@ -15226,17 +15213,14 @@ fold_abs_const (tree arg0, tree type) constant. TYPE is the type of the result. */ static tree -fold_not_const (tree arg0, tree type) +fold_not_const (const_tree arg0, tree type) { - tree t = NULL_TREE; + double_int val; gcc_assert (TREE_CODE (arg0) == INTEGER_CST); - t = force_fit_type_double (type, ~TREE_INT_CST_LOW (arg0), - ~TREE_INT_CST_HIGH (arg0), 0, - TREE_OVERFLOW (arg0)); - - return t; + val = double_int_not (tree_to_double_int (arg0)); + return force_fit_type_double (type, val, 0, TREE_OVERFLOW (arg0)); } /* Given CODE, a relational operator, the target type, TYPE and two @@ -15634,25 +15618,23 @@ round_up_loc (location_t loc, tree value, int divisor) { if (TREE_CODE (value) == INTEGER_CST) { - unsigned HOST_WIDE_INT low = TREE_INT_CST_LOW (value); - unsigned HOST_WIDE_INT high; + double_int val = tree_to_double_int (value); bool overflow_p; - if ((low & (divisor - 1)) == 0) + if ((val.low & (divisor - 1)) == 0) return value; overflow_p = TREE_OVERFLOW (value); - high = TREE_INT_CST_HIGH (value); - low &= ~(divisor - 1); - low += divisor; - if (low == 0) + val.low &= ~(divisor - 1); + val.low += divisor; + if (val.low == 0) { - high++; - if (high == 0) + val.high++; + if (val.high == 0) overflow_p = true; } - return force_fit_type_double (TREE_TYPE (value), low, high, + return force_fit_type_double (TREE_TYPE (value), val, -1, overflow_p); } else -- cgit v1.2.1 From 182cf5a9a415f31df0f9a10e46faed1221484a35 Mon Sep 17 00:00:00 2001 From: rguenth Date: Thu, 1 Jul 2010 08:49:19 +0000 Subject: 2010-07-01 Richard Guenther PR middle-end/42834 PR middle-end/44468 * doc/gimple.texi (is_gimple_mem_ref_addr): Document. * doc/generic.texi (References to storage): Document MEM_REF. * tree-pretty-print.c (dump_generic_node): Handle MEM_REF. (print_call_name): Likewise. * tree.c (recompute_tree_invariant_for_addr_expr): Handle MEM_REF. (build_simple_mem_ref_loc): New function. (mem_ref_offset): Likewise. * tree.h (build_simple_mem_ref_loc): Declare. (build_simple_mem_ref): Define. (mem_ref_offset): Declare. * fold-const.c: Include tree-flow.h. (operand_equal_p): Handle MEM_REF. (build_fold_addr_expr_with_type_loc): Likewise. (fold_comparison): Likewise. (fold_unary_loc): Fold VIEW_CONVERT_EXPR > to MEM_REF . (fold_binary_loc): Fold MEM[&MEM[p, CST1], CST2] to MEM[p, CST1 + CST2], fold MEM[&a.b, CST2] to MEM[&a, offsetof (a, b) + CST2]. * tree-ssa-alias.c (ptr_deref_may_alias_decl_p): Handle MEM_REF. (ptr_deref_may_alias_ref_p_1): Likewise. (ao_ref_base_alias_set): Properly differentiate base object for offset and TBAA. (ao_ref_init_from_ptr_and_size): Use MEM_REF. (indirect_ref_may_alias_decl_p): Handle MEM_REFs properly. (indirect_refs_may_alias_p): Likewise. (refs_may_alias_p_1): Likewise. Remove pointer SSA name def chasing code. (ref_maybe_used_by_call_p_1): Handle MEM_REF. (call_may_clobber_ref_p_1): Likewise. * dwarf2out.c (loc_list_from_tree): Handle MEM_REF. * expr.c (expand_assignment): Handle MEM_REF. (store_expr): Handle MEM_REFs from STRING_CSTs. (store_field): If expanding a MEM_REF of a non-addressable decl use bitfield operations. (get_inner_reference): Handle MEM_REF. (expand_expr_addr_expr_1): Likewise. (expand_expr_real_1): Likewise. * tree-eh.c (tree_could_trap_p): Handle MEM_REF. * alias.c (ao_ref_from_mem): Handle MEM_REF. (get_alias_set): Likewise. Properly handle VIEW_CONVERT_EXPRs. * tree-data-ref.c (dr_analyze_innermost): Handle MEM_REF. (dr_analyze_indices): Likewise. (dr_analyze_alias): Likewise. (object_address_invariant_in_loop_p): Likewise. * gimplify.c (mark_addressable): Handle MEM_REF. (gimplify_cond_expr): Build MEM_REFs. (gimplify_modify_expr_to_memcpy): Likewise. (gimplify_init_ctor_preeval_1): Handle MEM_REF. (gimple_fold_indirect_ref): Adjust. (gimplify_expr): Handle MEM_REF. Gimplify INDIRECT_REF to MEM_REF. * tree.def (MEM_REF): New tree code. * tree-dfa.c: Include toplev.h. (get_ref_base_and_extent): Handle MEM_REF. (get_addr_base_and_unit_offset): New function. * emit-rtl.c (set_mem_attributes_minus_bitpos): Handle MEM_REF. * gimple-fold.c (may_propagate_address_into_dereference): Handle MEM_REF. (maybe_fold_offset_to_array_ref): Allow possibly out-of bounds accesses if the array has just one dimension. Remove always true parameter. Do not require type compatibility here. (maybe_fold_offset_to_component_ref): Remove. (maybe_fold_stmt_indirect): Remove. (maybe_fold_reference): Remove INDIRECT_REF handling. Fold back to non-MEM_REF. (maybe_fold_offset_to_address): Simplify. Deal with type mismatches here. (maybe_fold_reference): Likewise. (maybe_fold_stmt_addition): Likewise. Also handle &ARRAY + I in addition to &ARRAY[0] + I. (fold_gimple_assign): Handle ADDR_EXPR of MEM_REFs. (gimple_get_relevant_ref_binfo): Handle MEM_REF. * cfgexpand.c (expand_debug_expr): Handle MEM_REF. * tree-ssa.c (useless_type_conversion_p): Make most pointer conversions useless. (warn_uninitialized_var): Handle MEM_REF. (maybe_rewrite_mem_ref_base): New function. (execute_update_addresses_taken): Implement re-writing of MEM_REFs to SSA form. * tree-inline.c (remap_gimple_op_r): Handle MEM_REF, remove INDIRECT_REF handling. (copy_tree_body_r): Handle MEM_REF. * gimple.c (is_gimple_addressable): Adjust. (is_gimple_address): Likewise. (is_gimple_invariant_address): ADDR_EXPRs of MEM_REFs with invariant base are invariant. (is_gimple_min_lval): Adjust. (is_gimple_mem_ref_addr): New function. (get_base_address): Handle MEM_REF. (count_ptr_derefs): Likewise. (get_base_loadstore): Likewise. * gimple.h (is_gimple_mem_ref_addr): Declare. (gimple_call_fndecl): Handle invariant MEM_REF addresses. * tree-cfg.c (verify_address): New function, split out from ... (verify_expr): ... here. Use for verifying ADDR_EXPRs and the address operand of MEM_REFs. Verify MEM_REFs. Reject INDIRECT_REFs. (verify_types_in_gimple_min_lval): Handle MEM_REF. Disallow INDIRECT_REF. Allow conversions. (verify_types_in_gimple_reference): Verify VIEW_CONVERT_EXPR of a register does not change its size. (verify_types_in_gimple_reference): Verify MEM_REF. (verify_gimple_assign_single): Disallow INDIRECT_REF. Handle MEM_REF. * tree-ssa-operands.c (opf_non_addressable, opf_not_non_addressable): New. (mark_address_taken): Handle MEM_REF. (get_indirect_ref_operands): Pass through opf_not_non_addressable. (get_asm_expr_operands): Pass opf_not_non_addressable. (get_expr_operands): Handle opf_[not_]non_addressable. Handle MEM_REF. Remove INDIRECT_REF handling. * tree-vrp.c: (check_array_ref): Handle MEM_REF. (search_for_addr_array): Likewise. (check_array_bounds): Likewise. (vrp_stmt_computes_nonzero): Adjust for MEM_REF. * tree-ssa-loop-im.c (for_each_index): Handle MEM_REF. (ref_always_accessed_p): Likewise. (gen_lsm_tmp_name): Likewise. Handle ADDR_EXPR. * tree-complex.c (extract_component): Do not handle INDIRECT_REF. Handle MEM_REF. * cgraphbuild.c (mark_load): Properly check for NULL result from get_base_address. (mark_store): Likewise. * tree-ssa-loop-niter.c (array_at_struct_end_p): Handle MEM_REF. * tree-loop-distribution.c (generate_builtin): Exchange INDIRECT_REF handling for MEM_REF. * tree-scalar-evolution.c (follow_ssa_edge_expr): Handle &MEM[ptr + CST] similar to POINTER_PLUS_EXPR. * builtins.c (stabilize_va_list_loc): Use the function ABI valist type if we couldn't canonicalize the argument type. Always dereference with the canonical va-list type. (maybe_emit_free_warning): Handle MEM_REF. (fold_builtin_memory_op): Simplify and handle MEM_REFs in folding memmove to memcpy. * builtins.c (fold_builtin_memory_op): Use ref-all types for all memcpy foldings. * omp-low.c (build_receiver_ref): Adjust for MEM_REF. (build_outer_var_ref): Likewise. (scan_omp_1_op): Likewise. (lower_rec_input_clauses): Likewise. (lower_lastprivate_clauses): Likewise. (lower_reduction_clauses): Likewise. (lower_copyprivate_clauses): Likewise. (expand_omp_atomic_pipeline): Likewise. (expand_omp_atomic_mutex): Likewise. (create_task_copyfn): Likewise. * tree-ssa-sccvn.c (copy_reference_ops_from_ref): Handle MEM_REF. Remove old union trick. Initialize constant offsets. (ao_ref_init_from_vn_reference): Likewise. Do not handle INDIRECT_REF. Init base_alias_set properly. (vn_reference_lookup_3): Replace INDIRECT_REF handling with MEM_REF. (vn_reference_fold_indirect): Adjust for MEM_REFs. (valueize_refs): Fold MEM_REFs. Re-evaluate constant offset for ARRAY_REFs. (may_insert): Remove. (visit_reference_op_load): Do not test may_insert. (run_scc_vn): Remove parameter, do not fiddle with may_insert. * tree-ssa-sccvn.h (struct vn_reference_op_struct): Add a field to store the constant offset this op applies. (run_scc_vn): Adjust prototype. * cgraphunit.c (thunk_adjust): Adjust for MEM_REF. * tree-ssa-ccp.c (ccp_fold): Replace INDIRECT_REF folding with MEM_REF. Propagate &foo + CST as &MEM[&foo, CST]. Do not bother about volatile qualifiers on pointers. (fold_const_aggregate_ref): Handle MEM_REF, do not handle INDIRECT_REF. * tree-ssa-loop-ivopts.c * tree-ssa-loop-ivopts.c (determine_base_object): Adjust for MEM_REF. (strip_offset_1): Likewise. (find_interesting_uses_address): Replace INDIRECT_REF handling with MEM_REF handling. (get_computation_cost_at): Likewise. * ipa-pure-const.c (check_op): Handle MEM_REF. * tree-stdarg.c (check_all_va_list_escapes): Adjust for MEM_REF. * tree-ssa-sink.c (is_hidden_global_store): Handle MEM_REF and constants. * ipa-inline.c (likely_eliminated_by_inlining_p): Handle MEM_REF. * tree-parloops.c (take_address_of): Adjust for MEM_REF. (eliminate_local_variables_1): Likewise. (create_call_for_reduction_1): Likewise. (create_loads_for_reductions): Likewise. (create_loads_and_stores_for_name): Likewise. * matrix-reorg.c (may_flatten_matrices_1): Sanitize. (ssa_accessed_in_tree): Handle MEM_REF. (ssa_accessed_in_assign_rhs): Likewise. (update_type_size): Likewise. (analyze_accesses_for_call_stmt): Likewise. (analyze_accesses_for_assign_stmt): Likewise. (transform_access_sites): Likewise. (transform_allocation_sites): Likewise. * tree-affine.c (tree_to_aff_combination): Handle MEM_REF. * tree-vect-data-refs.c (vect_create_addr_base_for_vector_ref): Do not handle INDIRECT_REF. * tree-ssa-phiopt.c (add_or_mark_expr): Handle MEM_REF. (cond_store_replacement): Likewise. * tree-ssa-pre.c (create_component_ref_by_pieces_1): Handle MEM_REF, no not handle INDIRECT_REFs. (insert_into_preds_of_block): Properly initialize avail. (phi_translate_1): Fold MEM_REFs. Re-evaluate constant offset for ARRAY_REFs. Properly handle reference lookups that require a bit re-interpretation. (can_PRE_operation): Do not handle INDIRECT_REF. Handle MEM_REF. * tree-sra.c * tree-sra.c (build_access_from_expr_1): Handle MEM_REF. (build_ref_for_offset_1): Remove. (build_ref_for_offset): Build MEM_REFs. (gate_intra_sra): Disable for now. (sra_ipa_modify_expr): Handle MEM_REF. (ipa_early_sra_gate): Disable for now. * tree-sra.c (create_access): Swap INDIRECT_REF handling for MEM_REF handling. (disqualify_base_of_expr): Likewise. (ptr_parm_has_direct_uses): Swap INDIRECT_REF handling for MEM_REF handling. (sra_ipa_modify_expr): Remove INDIRECT_REF handling. Use mem_ref_offset. Remove bogus folding. (build_access_from_expr_1): Properly handle MEM_REF for non IPA-SRA. (make_fancy_name_1): Add support for MEM_REF. * tree-predcom.c (ref_at_iteration): Handle MEM_REFs. * tree-mudflap.c (mf_xform_derefs_1): Adjust for MEM_REF. * ipa-prop.c (compute_complex_assign_jump_func): Handle MEM_REF. (compute_complex_ancestor_jump_func): Likewise. (ipa_analyze_virtual_call_uses): Likewise. * tree-ssa-forwprop.c (forward_propagate_addr_expr_1): Replace INDIRECT_REF folding with more generalized MEM_REF folding. (tree_ssa_forward_propagate_single_use_vars): Adjust accordingly. (forward_propagate_addr_into_variable_array_index): Also handle &ARRAY + I in addition to &ARRAY[0] + I. * tree-ssa-dce.c (ref_may_be_aliased): Handle MEM_REF. * tree-ssa-ter.c (find_replaceable_in_bb): Avoid TER if that creates assignments with overlap. * tree-nested.c (get_static_chain): Adjust for MEM_REF. (get_frame_field): Likewise. (get_nonlocal_debug_decl): Likewise. (convert_nonlocal_reference_op): Likewise. (struct nesting_info): Add mem_refs pointer-set. (create_nesting_tree): Allocate it. (convert_local_reference_op): Insert to be folded mem-refs. (fold_mem_refs): New function. (finalize_nesting_tree_1): Perform defered folding of mem-refs (free_nesting_tree): Free the pointer-set. * tree-vect-stmts.c (vectorizable_store): Adjust for MEM_REF. (vectorizable_load): Likewise. * tree-ssa-phiprop.c (phiprop_insert_phi): Adjust for MEM_REF. (propagate_with_phi): Likewise. * tree-object-size.c (addr_object_size): Handle MEM_REFs instead of INDIRECT_REFs. (compute_object_offset): Handle MEM_REF. (plus_stmt_object_size): Handle MEM_REF. (collect_object_sizes_for): Dispatch to plus_stmt_object_size for &MEM_REF. * tree-flow.h (get_addr_base_and_unit_offset): Declare. (symbol_marked_for_renaming): Likewise. * Makefile.in (tree-dfa.o): Add $(TOPLEV_H). (fold-const.o): Add $(TREE_FLOW_H). * tree-ssa-structalias.c (get_constraint_for_1): Handle MEM_REF. (find_func_clobbers): Likewise. * ipa-struct-reorg.c (decompose_indirect_ref_acc): Handle MEM_REF. (decompose_access): Likewise. (replace_field_acc): Likewise. (replace_field_access_stmt): Likewise. (insert_new_var_in_stmt): Likewise. (get_stmt_accesses): Likewise. (reorg_structs_drive): Disable. * config/i386/i386.c (ix86_va_start): Adjust for MEM_REF. (ix86_canonical_va_list_type): Likewise. cp/ * cp-gimplify.c (cp_gimplify_expr): Open-code the rhs predicate we are looking for, allow non-gimplified INDIRECT_REFs. testsuite/ * gcc.c-torture/execute/20100316-1.c: New testcase. * gcc.c-torture/execute/pr44468.c: Likewise. * gcc.c-torture/compile/20100609-1.c: Likewise. * gcc.dg/volatile2.c: Adjust. * gcc.dg/plugin/selfassign.c: Likewise. * gcc.dg/pr36902.c: Likewise. * gcc.dg/tree-ssa/foldaddr-2.c: Remove. * gcc.dg/tree-ssa/foldaddr-3.c: Likewise. * gcc.dg/tree-ssa/forwprop-8.c: Adjust. * gcc.dg/tree-ssa/pr17141-1.c: Likewise. * gcc.dg/tree-ssa/ssa-fre-13.c: Likewise. * gcc.dg/tree-ssa/ssa-fre-14.c: Likewise. * gcc.dg/tree-ssa/ssa-ccp-21.c: Likewise. * gcc.dg/tree-ssa/pta-ptrarith-1.c: Likewise. * gcc.dg/tree-ssa/20030807-7.c: Likewise. * gcc.dg/tree-ssa/forwprop-10.c: Likewise. * gcc.dg/tree-ssa/ssa-fre-1.c: Likewise. * gcc.dg/tree-ssa/pta-ptrarith-2.c: Likewise. * gcc.dg/tree-ssa/ssa-ccp-23.c: Likewise. * gcc.dg/tree-ssa/forwprop-1.c: Likewise. * gcc.dg/tree-ssa/forwprop-2.c: Likewise. * gcc.dg/tree-ssa/struct-aliasing-1.c: Likewise. * gcc.dg/tree-ssa/ssa-ccp-25.c: Likewise. * gcc.dg/tree-ssa/ssa-pre-26.c: Likewise. * gcc.dg/tree-ssa/struct-aliasing-2.c: Likewise. * gcc.dg/tree-ssa/ssa-ccp-26.c: Likewise. * gcc.dg/tree-ssa/ssa-sccvn-4.c: Likewise. * gcc.dg/tree-ssa/ssa-pre-7.c: Likewise. * gcc.dg/tree-ssa/forwprop-5.c: Likewise. * gcc.dg/struct/w_prof_two_strs.c: XFAIL. * gcc.dg/struct/wo_prof_escape_arg_to_local.c: Likewise. * gcc.dg/struct/wo_prof_global_var.c: Likewise. * gcc.dg/struct/wo_prof_malloc_size_var.c: Likewise. * gcc.dg/struct/w_prof_local_array.c: Likewise. * gcc.dg/struct/w_prof_single_str_global.c: Likewise. * gcc.dg/struct/wo_prof_escape_str_init.c: Likewise. * gcc.dg/struct/wo_prof_array_through_pointer.c: Likewise. * gcc.dg/struct/w_prof_global_array.c: Likewise. * gcc.dg/struct/wo_prof_array_field.c: Likewise. * gcc.dg/struct/wo_prof_single_str_local.c: Likewise. * gcc.dg/struct/w_prof_local_var.c: Likewise. * gcc.dg/struct/wo_prof_two_strs.c: Likewise. * gcc.dg/struct/wo_prof_empty_str.c: Likewise. * gcc.dg/struct/wo_prof_local_array.c: Likewise. * gcc.dg/struct/w_prof_global_var.c: Likewise. * gcc.dg/struct/wo_prof_single_str_global.c: Likewise. * gcc.dg/struct/wo_prof_escape_substr_value.c: Likewise. * gcc.dg/struct/wo_prof_global_array.c: Likewise. * gcc.dg/struct/wo_prof_escape_return.c: Likewise. * gcc.dg/struct/wo_prof_escape_substr_array.c: Likewise. * gcc.dg/struct/wo_prof_double_malloc.c: Likewise. * gcc.dg/struct/w_ratio_cold_str.c: Likewise. * gcc.dg/struct/wo_prof_escape_substr_pointer.c: Likewise. * gcc.dg/struct/wo_prof_local_var.c: Likewise. * gcc.dg/tree-prof/stringop-1.c: Adjust. * g++.dg/tree-ssa/pr31146.C: Likewise. * g++.dg/tree-ssa/copyprop-1.C: Likewise. * g++.dg/tree-ssa/pr33604.C: Likewise. * g++.dg/plugin/selfassign.c: Likewise. * gfortran.dg/array_memcpy_3.f90: Likewise. * gfortran.dg/array_memcpy_4.f90: Likewise. * c-c++-common/torture/pr42834.c: New testcase. git-svn-id: svn+ssh://gcc.gnu.org/svn/gcc/trunk@161655 138bc75d-0d04-0410-961f-82ee72b054a4 --- gcc/fold-const.c | 58 ++++++++++++++++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 58 insertions(+) (limited to 'gcc/fold-const.c') diff --git a/gcc/fold-const.c b/gcc/fold-const.c index e2b30f90e8d..1e3bae65405 100644 --- a/gcc/fold-const.c +++ b/gcc/fold-const.c @@ -60,6 +60,7 @@ along with GCC; see the file COPYING3. If not see #include "langhooks.h" #include "md5.h" #include "gimple.h" +#include "tree-flow.h" /* Nonzero if we are folding constants inside an initializer; zero otherwise. */ @@ -2591,6 +2592,17 @@ operand_equal_p (const_tree arg0, const_tree arg1, unsigned int flags) case IMAGPART_EXPR: return OP_SAME (0); + case MEM_REF: + /* Require equal access sizes. We can have incomplete types + for array references of variable-sized arrays from the + Fortran frontent though. */ + return ((TYPE_SIZE (TREE_TYPE (arg0)) == TYPE_SIZE (TREE_TYPE (arg1)) + || (TYPE_SIZE (TREE_TYPE (arg0)) + && TYPE_SIZE (TREE_TYPE (arg1)) + && operand_equal_p (TYPE_SIZE (TREE_TYPE (arg0)), + TYPE_SIZE (TREE_TYPE (arg1)), flags))) + && OP_SAME (0) && OP_SAME (1)); + case ARRAY_REF: case ARRAY_RANGE_REF: /* Operands 2 and 3 may be null. @@ -7596,6 +7608,9 @@ build_fold_addr_expr_with_type_loc (location_t loc, tree t, tree ptrtype) SET_EXPR_LOCATION (t, loc); } } + else if (TREE_CODE (t) == MEM_REF + && integer_zerop (TREE_OPERAND (t, 1))) + return TREE_OPERAND (t, 0); else if (TREE_CODE (t) == VIEW_CONVERT_EXPR) { t = build_fold_addr_expr_loc (loc, TREE_OPERAND (t, 0)); @@ -8014,6 +8029,9 @@ fold_unary_loc (location_t loc, enum tree_code code, tree type, tree op0) if (TREE_CODE (op0) == VIEW_CONVERT_EXPR) return fold_build1_loc (loc, VIEW_CONVERT_EXPR, type, TREE_OPERAND (op0, 0)); + if (TREE_CODE (op0) == MEM_REF) + return fold_build2_loc (loc, MEM_REF, type, + TREE_OPERAND (op0, 0), TREE_OPERAND (op0, 1)); /* For integral conversions with the same precision or pointer conversions use a NOP_EXPR instead. */ @@ -8665,6 +8683,11 @@ fold_comparison (location_t loc, enum tree_code code, tree type, else if (TREE_CODE (arg0) == POINTER_PLUS_EXPR) { base0 = TREE_OPERAND (arg0, 0); + if (TREE_CODE (base0) == ADDR_EXPR) + { + base0 = TREE_OPERAND (base0, 0); + indirect_base0 = true; + } offset0 = TREE_OPERAND (arg0, 1); } @@ -8682,6 +8705,11 @@ fold_comparison (location_t loc, enum tree_code code, tree type, else if (TREE_CODE (arg1) == POINTER_PLUS_EXPR) { base1 = TREE_OPERAND (arg1, 0); + if (TREE_CODE (base1) == ADDR_EXPR) + { + base1 = TREE_OPERAND (base1, 0); + indirect_base1 = true; + } offset1 = TREE_OPERAND (arg1, 1); } @@ -9524,6 +9552,36 @@ fold_binary_loc (location_t loc, switch (code) { + case MEM_REF: + /* MEM[&MEM[p, CST1], CST2] -> MEM[p, CST1 + CST2]. */ + if (TREE_CODE (arg0) == ADDR_EXPR + && TREE_CODE (TREE_OPERAND (arg0, 0)) == MEM_REF) + { + tree iref = TREE_OPERAND (arg0, 0); + return fold_build2 (MEM_REF, type, + TREE_OPERAND (iref, 0), + int_const_binop (PLUS_EXPR, arg1, + TREE_OPERAND (iref, 1), 0)); + } + + /* MEM[&a.b, CST2] -> MEM[&a, offsetof (a, b) + CST2]. */ + if (TREE_CODE (arg0) == ADDR_EXPR + && handled_component_p (TREE_OPERAND (arg0, 0))) + { + tree base; + HOST_WIDE_INT coffset; + base = get_addr_base_and_unit_offset (TREE_OPERAND (arg0, 0), + &coffset); + if (!base) + return NULL_TREE; + return fold_build2 (MEM_REF, type, + build_fold_addr_expr (base), + int_const_binop (PLUS_EXPR, arg1, + size_int (coffset), 0)); + } + + return NULL_TREE; + case POINTER_PLUS_EXPR: /* 0 +p index -> (type)index */ if (integer_zerop (arg0)) -- cgit v1.2.1 From 86638c2ef3b5ed40e2c8f19e5ce0cdbf86593413 Mon Sep 17 00:00:00 2001 From: rguenth Date: Mon, 5 Jul 2010 12:25:20 +0000 Subject: 2010-07-05 Richard Guenther * tree-ssa-loop-im.c (for_each_index): Do not handle ALIGN_INDIRECT_REF. (gen_lsm_tmp_name): Likewise. * tree-dump.c (dequeue_and_dump): Likewise. * tree-pretty-print.c (dump_generic_node): Likewise. (op_code_prio): Likewise. (op_symbol_code): Likewise. * tree.c (staticp): Likewise. (build1_stat): Likewise. * tree.h (INDIRECT_REF_P): Likewise. * fold-const.c (maybe_lvalue_p): Likewise. (operand_equal_p): Likewise. * tree-ssa-sccvn.c (copy_reference_ops_from_ref): Likewise. (ao_ref_init_from_vn_reference): Likewise. * tree-ssa-loop-ivopts.c (idx_find_step): Likewise. (find_interesting_uses_address): Likewise. * dwarf2out.c (loc_list_from_tree): Likewise. * gimplify.c (gimplify_expr): Likewise. * tree-eh.c (tree_could_trap_p): Likewise. * emit-rtl.c (set_mem_attributes_minus_bitpos): Likewise. * cfgexpand.c (expand_debug_expr): Likewise. * tree-ssa-pre.c (create_component_ref_by_pieces_1): Likewise. * tree-ssa-loop-prefetch.c (idx_analyze_ref): Likewise. * tree-cfg.c (verify_types_in_gimple_min_lval): Likewise. * config/rs6000/rs6000 (rs6000_check_sdmode): Likewise. * tree-ssa-operands.c (get_expr_operands): Likewise. * expr.c (safe_from_p): Likewise. (expand_expr_real_1): Likewise. TER BIT_AND_EXPRs into MEM_REFs. * tree-vect-data-refs.c (vect_setup_realignment): Build BIT_AND_EXPR and MEM_REF instead of ALIGN_INDIRECT_REF. * tree-vect-stmts.c (vectorizable_load): Likewise. * tree.def (ALIGN_INDIRECT_REF): Remove. git-svn-id: svn+ssh://gcc.gnu.org/svn/gcc/trunk@161830 138bc75d-0d04-0410-961f-82ee72b054a4 --- gcc/fold-const.c | 4 +--- 1 file changed, 1 insertion(+), 3 deletions(-) (limited to 'gcc/fold-const.c') diff --git a/gcc/fold-const.c b/gcc/fold-const.c index 1e3bae65405..8be8f3ee4a7 100644 --- a/gcc/fold-const.c +++ b/gcc/fold-const.c @@ -2041,8 +2041,8 @@ maybe_lvalue_p (const_tree x) case SSA_NAME: case COMPONENT_REF: + case MEM_REF: case INDIRECT_REF: - case ALIGN_INDIRECT_REF: case MISALIGNED_INDIRECT_REF: case ARRAY_REF: case ARRAY_RANGE_REF: @@ -2586,7 +2586,6 @@ operand_equal_p (const_tree arg0, const_tree arg1, unsigned int flags) switch (TREE_CODE (arg0)) { case INDIRECT_REF: - case ALIGN_INDIRECT_REF: case MISALIGNED_INDIRECT_REF: case REALPART_EXPR: case IMAGPART_EXPR: @@ -7596,7 +7595,6 @@ build_fold_addr_expr_with_type_loc (location_t loc, tree t, tree ptrtype) if (TREE_CODE (t) == WITH_SIZE_EXPR) t = TREE_OPERAND (t, 0); - /* Note: doesn't apply to ALIGN_INDIRECT_REF */ if (TREE_CODE (t) == INDIRECT_REF || TREE_CODE (t) == MISALIGNED_INDIRECT_REF) { -- cgit v1.2.1 From 0b205f4ca112a643f4f1b9c9886648b569e0b380 Mon Sep 17 00:00:00 2001 From: manu Date: Thu, 8 Jul 2010 04:22:54 +0000 Subject: =?UTF-8?q?2010-07-08=20=20Manuel=20L=C3=B3pez-Ib=C3=A1=C3=B1ez=20?= =?UTF-8?q?=20?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * toplev.h: Do not include diagnostic-core.h. Include diagnostic-core.h in every file that includes toplev.h. * c-tree.h: Do not include toplev.h. * pretty-print.h: Update comment. * Makefile.in: Update dependencies. * alias.c: Include diagnostic-core.h in every file that includes toplev.h. * attribs.c: Likewise. * auto-inc-dec.c: Likewise. * bb-reorder.c: Likewise. * bt-load.c: Likewise. * caller-save.c: Likewise. * calls.c: Likewise. * cfg.c: Likewise. * cfganal.c: Likewise. * cfgbuild.c: Likewise. * cfgcleanup.c: Likewise. * cfghooks.c: Likewise. * cfgloop.c: Likewise. * combine.c: Likewise. * config/alpha/alpha.c: Likewise. * config/arc/arc.c: Likewise. * config/arm/arm.c: Likewise. * config/arm/pe.c: Likewise. * config/avr/avr.c: Likewise. * config/bfin/bfin.c: Likewise. * config/cris/cris.c: Likewise. * config/crx/crx.c: Likewise. * config/darwin-c.c: Likewise. * config/darwin.c: Likewise. * config/fr30/fr30.c: Likewise. * config/frv/frv.c: Likewise. * config/h8300/h8300.c: Likewise. * config/host-darwin.c: Likewise. * config/i386/i386.c: Likewise. * config/i386/netware.c: Likewise. * config/i386/nwld.c: Likewise. * config/i386/winnt-cxx.c: Likewise. * config/i386/winnt-stubs.c: Likewise. * config/i386/winnt.c: Likewise. * config/ia64/ia64-c.c: Likewise. * config/ia64/ia64.c: Likewise. * config/iq2000/iq2000.c: Likewise. * config/lm32/lm32.c: Likewise. * config/m32c/m32c-pragma.c: Likewise. * config/m32c/m32c.c: Likewise. * config/m32r/m32r.c: Likewise. * config/m68hc11/m68hc11.c: Likewise. * config/m68k/m68k.c: Likewise. * config/mcore/mcore.c: Likewise. * config/mep/mep-pragma.c: Likewise. * config/mep/mep.c: Likewise. * config/mmix/mmix.c: Likewise. * config/mn10300/mn10300.c: Likewise. * config/moxie/moxie.c: Likewise. * config/pa/pa.c: Likewise. * config/pdp11/pdp11.c: Likewise. * config/picochip/picochip.c: Likewise. * config/rs6000/rs6000-c.c: Likewise. * config/rs6000/rs6000.c: Likewise. * config/rx/rx.c: Likewise. * config/s390/s390.c: Likewise. * config/score/score.c: Likewise. * config/score/score3.c: Likewise. * config/score/score7.c: Likewise. * config/sh/sh.c: Likewise. * config/sh/symbian-base.c: Likewise. * config/sh/symbian-c.c: Likewise. * config/sh/symbian-cxx.c: Likewise. * config/sol2-c.c: Likewise. * config/sol2.c: Likewise. * config/sparc/sparc.c: Likewise. * config/spu/spu.c: Likewise. * config/stormy16/stormy16.c: Likewise. * config/v850/v850-c.c: Likewise. * config/v850/v850.c: Likewise. * config/vax/vax.c: Likewise. * config/vxworks.c: Likewise. * config/xtensa/xtensa.c: Likewise. * convert.c: Likewise. * cse.c: Likewise. * cselib.c: Likewise. * dbgcnt.c: Likewise. * dbxout.c: Likewise. * ddg.c: Likewise. * dominance.c: Likewise. * emit-rtl.c: Likewise. * explow.c: Likewise. * expmed.c: Likewise. * fixed-value.c: Likewise. * fold-const.c: Likewise. * fwprop.c: Likewise. * gcse.c: Likewise. * ggc-common.c: Likewise. * ggc-page.c: Likewise. * ggc-zone.c: Likewise. * gimple-low.c: Likewise. * gimplify.c: Likewise. * graph.c: Likewise. * haifa-sched.c: Likewise. * ifcvt.c: Likewise. * implicit-zee.c: Likewise. * integrate.c: Likewise. * ira-build.c: Likewise. * ira-color.c: Likewise. * ira-conflicts.c: Likewise. * ira-costs.c: Likewise. * ira-lives.c: Likewise. * ira.c: Likewise. * lists.c: Likewise. * loop-doloop.c: Likewise. * loop-iv.c: Likewise. * lto-opts.c: Likewise. * lto-symtab.c: Likewise. * main.c: Likewise. * modulo-sched.c: Likewise. * optabs.c: Likewise. * params.c: Likewise. * plugin.c: Likewise. * postreload-gcse.c: Likewise. * postreload.c: Likewise. * predict.c: Likewise. * profile.c: Likewise. * real.c: Likewise. * regcprop.c: Likewise. * reginfo.c: Likewise. * regmove.c: Likewise. * reorg.c: Likewise. * resource.c: Likewise. * rtl.c: Likewise. * rtlanal.c: Likewise. * sched-deps.c: Likewise. * sched-ebb.c: Likewise. * sched-rgn.c: Likewise. * sdbout.c: Likewise. * sel-sched-dump.c: Likewise. * sel-sched-ir.c: Likewise. * simplify-rtx.c: Likewise. * stmt.c: Likewise. * stor-layout.c: Likewise. * store-motion.c: Likewise. * targhooks.c: Likewise. * tree-cfg.c: Likewise. * tree-cfgcleanup.c: Likewise. * tree-dump.c: Likewise. * tree-eh.c: Likewise. * tree-inline.c: Likewise. * tree-nomudflap.c: Likewise. * tree-object-size.c: Likewise. * tree-optimize.c: Likewise. * tree-outof-ssa.c: Likewise. * tree-phinodes.c: Likewise. * tree-profile.c: Likewise. * tree-ssa-ccp.c: Likewise. * tree-ssa-coalesce.c: Likewise. * tree-ssa-live.c: Likewise. * tree-ssa-loop-niter.c: Likewise. * tree-ssa-loop-prefetch.c: Likewise. * tree-ssa-loop.c: Likewise. * tree-ssa-structalias.c: Likewise. * tree-ssa-uninit.c: Likewise. * tree-ssa.c: Likewise. * tree-vect-data-refs.c: Likewise. * tree-vect-loop-manip.c: Likewise. * tree-vect-loop.c: Likewise. * tree-vect-patterns.c: Likewise. * tree-vect-stmts.c: Likewise. * tree-vrp.c: Likewise. * varasm.c: Likewise. * vec.c: Likewise. * web.c: Likewise. * xcoffout.c: Likewise. c-family/ * c-common.h: Include diagnostic-core.h. Error if already included. * c-semantics.c: Do not define GCC_DIAG_STYLE here. cp/ * cp-tree.h: Do not include toplev.h. java/ * boehm.c: Include diagnostic-core.h in every file that includes toplev.h. * class.c: Likewise. * constants.c: Likewise. * decl.c: Likewise. * except.c: Likewise. * expr.c: Likewise. * jcf-parse.c: Likewise. * mangle.c: Likewise. * mangle_name.c: Likewise. * resource.c: Likewise. * typeck.c: Likewise. * verify-glue.c: Likewise. ada/ * gcc-interface/utils.c: Include diagnostic-core.h in every file that includes toplev.h. lto/ * lto-coff.c: Include diagnostic-core.h in every file that includes toplev.h. * lto-elf.c: Likewise. * lto-lang.c: Likewise. * lto-macho.c: Likewise. git-svn-id: svn+ssh://gcc.gnu.org/svn/gcc/trunk@161943 138bc75d-0d04-0410-961f-82ee72b054a4 --- gcc/fold-const.c | 1 + 1 file changed, 1 insertion(+) (limited to 'gcc/fold-const.c') diff --git a/gcc/fold-const.c b/gcc/fold-const.c index 8be8f3ee4a7..9ca5eff554e 100644 --- a/gcc/fold-const.c +++ b/gcc/fold-const.c @@ -53,6 +53,7 @@ along with GCC; see the file COPYING3. If not see #include "expr.h" #include "tm_p.h" #include "target.h" +#include "diagnostic-core.h" #include "toplev.h" #include "intl.h" #include "ggc.h" -- cgit v1.2.1