summaryrefslogtreecommitdiff
path: root/gcc/fold-const.c
diff options
context:
space:
mode:
authorrsandifo <rsandifo@138bc75d-0d04-0410-961f-82ee72b054a4>2017-12-20 12:51:50 +0000
committerrsandifo <rsandifo@138bc75d-0d04-0410-961f-82ee72b054a4>2017-12-20 12:51:50 +0000
commit8672ee56bb5745263da8b6412dfb4647edef0e56 (patch)
tree8ecb64d56e437c9912d8f6db4a70c9aec93c5019 /gcc/fold-const.c
parentbbad7cd0bcb8102f4211c9b012a5d1ca886217a0 (diff)
downloadgcc-8672ee56bb5745263da8b6412dfb4647edef0e56.tar.gz
poly_int: tree constants
This patch adds a tree representation for poly_ints. Unlike the rtx version, the coefficients are INTEGER_CSTs rather than plain integers, so that we can easily access them as poly_widest_ints and poly_offset_ints. The patch also adjusts some places that previously relied on "constant" meaning "INTEGER_CST". It also makes sure that the TYPE_SIZE agrees with the TYPE_SIZE_UNIT for vector booleans, given the existing: /* Several boolean vector elements may fit in a single unit. */ if (VECTOR_BOOLEAN_TYPE_P (type) && type->type_common.mode != BLKmode) TYPE_SIZE_UNIT (type) = size_int (GET_MODE_SIZE (type->type_common.mode)); else TYPE_SIZE_UNIT (type) = int_const_binop (MULT_EXPR, TYPE_SIZE_UNIT (innertype), size_int (nunits)); 2017-12-20 Richard Sandiford <richard.sandiford@linaro.org> Alan Hayward <alan.hayward@arm.com> David Sherwood <david.sherwood@arm.com> gcc/ * doc/generic.texi (POLY_INT_CST): Document. * tree.def (POLY_INT_CST): New tree code. * treestruct.def (TS_POLY_INT_CST): New tree layout. * tree-core.h (tree_poly_int_cst): New struct. (tree_node): Add a poly_int_cst field. * tree.h (POLY_INT_CST_P, POLY_INT_CST_COEFF): New macros. (wide_int_to_tree, force_fit_type): Take a poly_wide_int_ref instead of a wide_int_ref. (build_int_cst, build_int_cst_type): Take a poly_int64 instead of a HOST_WIDE_INT. (build_int_cstu, build_array_type_nelts): Take a poly_uint64 instead of an unsigned HOST_WIDE_INT. (build_poly_int_cst, tree_fits_poly_int64_p, tree_fits_poly_uint64_p) (ptrdiff_tree_p): Declare. (tree_to_poly_int64, tree_to_poly_uint64): Likewise. Provide extern inline implementations if the target doesn't use POLY_INT_CST. (poly_int_tree_p): New function. (wi::unextended_tree): New class. (wi::int_traits <unextended_tree>): New override. (wi::extended_tree): Add a default constructor. (wi::extended_tree::get_tree): New function. (wi::widest_extended_tree, wi::offset_extended_tree): New typedefs. (wi::tree_to_widest_ref, wi::tree_to_offset_ref): Use them. (wi::tree_to_poly_widest_ref, wi::tree_to_poly_offset_ref) (wi::tree_to_poly_wide_ref): New typedefs. (wi::ints_for): Provide overloads for extended_tree and unextended_tree. (poly_int_cst_value, wi::to_poly_widest, wi::to_poly_offset) (wi::to_wide): New functions. (wi::fits_to_boolean_p, wi::fits_to_tree_p): Handle poly_ints. * tree.c (poly_int_cst_hasher): New struct. (poly_int_cst_hash_table): New variable. (tree_node_structure_for_code, tree_code_size, simple_cst_equal) (valid_constant_size_p, add_expr, drop_tree_overflow): Handle POLY_INT_CST. (initialize_tree_contains_struct): Handle TS_POLY_INT_CST. (init_ttree): Initialize poly_int_cst_hash_table. (build_int_cst, build_int_cst_type, build_invariant_address): Take a poly_int64 instead of a HOST_WIDE_INT. (build_int_cstu, build_array_type_nelts): Take a poly_uint64 instead of an unsigned HOST_WIDE_INT. (wide_int_to_tree): Rename to... (wide_int_to_tree_1): ...this. (build_new_poly_int_cst, build_poly_int_cst): New functions. (force_fit_type): Take a poly_wide_int_ref instead of a wide_int_ref. (wide_int_to_tree): New function that takes a poly_wide_int_ref. (ptrdiff_tree_p, tree_to_poly_int64, tree_to_poly_uint64) (tree_fits_poly_int64_p, tree_fits_poly_uint64_p): New functions. * lto-streamer-out.c (DFS::DFS_write_tree_body, hash_tree): Handle TS_POLY_INT_CST. * tree-streamer-in.c (lto_input_ts_poly_tree_pointers): Likewise. (streamer_read_tree_body): Likewise. * tree-streamer-out.c (write_ts_poly_tree_pointers): Likewise. (streamer_write_tree_body): Likewise. * tree-streamer.c (streamer_check_handled_ts_structures): Likewise. * asan.c (asan_protect_global): Require the size to be an INTEGER_CST. * cfgexpand.c (expand_debug_expr): Handle POLY_INT_CST. * expr.c (expand_expr_real_1, const_vector_from_tree): Likewise. * gimple-expr.h (is_gimple_constant): Likewise. * gimplify.c (maybe_with_size_expr): Likewise. * print-tree.c (print_node): Likewise. * tree-data-ref.c (data_ref_compare_tree): Likewise. * tree-pretty-print.c (dump_generic_node): Likewise. * tree-ssa-address.c (addr_for_mem_ref): Likewise. * tree-vect-data-refs.c (dr_group_sort_cmp): Likewise. * tree-vrp.c (compare_values_warnv): Likewise. * tree-ssa-loop-ivopts.c (determine_base_object, constant_multiple_of) (get_loop_invariant_expr, add_candidate_1, get_computation_aff_1) (force_expr_to_var_cost): Likewise. * tree-ssa-loop.c (for_each_index): Likewise. * fold-const.h (build_invariant_address, size_int_kind): Take a poly_int64 instead of a HOST_WIDE_INT. * fold-const.c (fold_negate_expr_1, const_binop, const_unop) (fold_convert_const, multiple_of_p, fold_negate_const): Handle POLY_INT_CST. (size_binop_loc): Likewise. Allow int_const_binop_1 to fail. (int_const_binop_2): New function, split out from... (int_const_binop_1): ...here. Handle POLY_INT_CST. (size_int_kind): Take a poly_int64 instead of a HOST_WIDE_INT. * expmed.c (make_tree): Handle CONST_POLY_INT_P. * gimple-ssa-strength-reduction.c (slsr_process_add) (slsr_process_mul): Check for INTEGER_CSTs before using them as candidates. * stor-layout.c (bits_from_bytes): New function. (bit_from_pos): Use it. (layout_type): Likewise. For vectors, multiply the TYPE_SIZE_UNIT by BITS_PER_UNIT to get the TYPE_SIZE. * tree-cfg.c (verify_expr, verify_types_in_gimple_reference): Allow MEM_REF and TARGET_MEM_REF offsets to be a POLY_INT_CST. git-svn-id: svn+ssh://gcc.gnu.org/svn/gcc/trunk@255863 138bc75d-0d04-0410-961f-82ee72b054a4
Diffstat (limited to 'gcc/fold-const.c')
-rw-r--r--gcc/fold-const.c130
1 files changed, 106 insertions, 24 deletions
diff --git a/gcc/fold-const.c b/gcc/fold-const.c
index 89a9f400729..c219ed8a6af 100644
--- a/gcc/fold-const.c
+++ b/gcc/fold-const.c
@@ -547,10 +547,8 @@ fold_negate_expr_1 (location_t loc, tree t)
return tem;
break;
+ case POLY_INT_CST:
case REAL_CST:
- tem = fold_negate_const (t, type);
- return tem;
-
case FIXED_CST:
tem = fold_negate_const (t, type);
return tem;
@@ -961,13 +959,10 @@ int_binop_types_match_p (enum tree_code code, const_tree type1, const_tree type2
&& TYPE_MODE (type1) == TYPE_MODE (type2);
}
-
-/* Combine two integer constants PARG1 and PARG2 under operation CODE
- to produce a new constant. Return NULL_TREE if we don't know how
- to evaluate CODE at compile-time. */
+/* Subroutine of int_const_binop_1 that handles two INTEGER_CSTs. */
static tree
-int_const_binop_1 (enum tree_code code, const_tree parg1, const_tree parg2,
+int_const_binop_2 (enum tree_code code, const_tree parg1, const_tree parg2,
int overflowable)
{
wide_int res;
@@ -1115,6 +1110,74 @@ int_const_binop_1 (enum tree_code code, const_tree parg1, const_tree parg2,
return t;
}
+/* Combine two integer constants PARG1 and PARG2 under operation CODE
+ to produce a new constant. Return NULL_TREE if we don't know how
+ to evaluate CODE at compile-time. */
+
+static tree
+int_const_binop_1 (enum tree_code code, const_tree arg1, const_tree arg2,
+ int overflowable)
+{
+ if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg2) == INTEGER_CST)
+ return int_const_binop_2 (code, arg1, arg2, overflowable);
+
+ gcc_assert (NUM_POLY_INT_COEFFS != 1);
+
+ if (poly_int_tree_p (arg1) && poly_int_tree_p (arg2))
+ {
+ poly_wide_int res;
+ bool overflow;
+ tree type = TREE_TYPE (arg1);
+ signop sign = TYPE_SIGN (type);
+ switch (code)
+ {
+ case PLUS_EXPR:
+ res = wi::add (wi::to_poly_wide (arg1),
+ wi::to_poly_wide (arg2), sign, &overflow);
+ break;
+
+ case MINUS_EXPR:
+ res = wi::sub (wi::to_poly_wide (arg1),
+ wi::to_poly_wide (arg2), sign, &overflow);
+ break;
+
+ case MULT_EXPR:
+ if (TREE_CODE (arg2) == INTEGER_CST)
+ res = wi::mul (wi::to_poly_wide (arg1),
+ wi::to_wide (arg2), sign, &overflow);
+ else if (TREE_CODE (arg1) == INTEGER_CST)
+ res = wi::mul (wi::to_poly_wide (arg2),
+ wi::to_wide (arg1), sign, &overflow);
+ else
+ return NULL_TREE;
+ break;
+
+ case LSHIFT_EXPR:
+ if (TREE_CODE (arg2) == INTEGER_CST)
+ res = wi::to_poly_wide (arg1) << wi::to_wide (arg2);
+ else
+ return NULL_TREE;
+ break;
+
+ case BIT_IOR_EXPR:
+ if (TREE_CODE (arg2) != INTEGER_CST
+ || !can_ior_p (wi::to_poly_wide (arg1), wi::to_wide (arg2),
+ &res))
+ return NULL_TREE;
+ break;
+
+ default:
+ return NULL_TREE;
+ }
+ return force_fit_type (type, res, overflowable,
+ (((sign == SIGNED || overflowable == -1)
+ && overflow)
+ | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2)));
+ }
+
+ return NULL_TREE;
+}
+
tree
int_const_binop (enum tree_code code, const_tree arg1, const_tree arg2)
{
@@ -1157,7 +1220,7 @@ const_binop (enum tree_code code, tree arg1, tree arg2)
STRIP_NOPS (arg1);
STRIP_NOPS (arg2);
- if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg2) == INTEGER_CST)
+ if (poly_int_tree_p (arg1) && poly_int_tree_p (arg2))
{
if (code == POINTER_PLUS_EXPR)
return int_const_binop (PLUS_EXPR,
@@ -1701,6 +1764,8 @@ const_unop (enum tree_code code, tree type, tree arg0)
case BIT_NOT_EXPR:
if (TREE_CODE (arg0) == INTEGER_CST)
return fold_not_const (arg0, type);
+ else if (POLY_INT_CST_P (arg0))
+ return wide_int_to_tree (type, -poly_int_cst_value (arg0));
/* Perform BIT_NOT_EXPR on each element individually. */
else if (TREE_CODE (arg0) == VECTOR_CST)
{
@@ -1792,7 +1857,7 @@ const_unop (enum tree_code code, tree type, tree arg0)
indicates which particular sizetype to create. */
tree
-size_int_kind (HOST_WIDE_INT number, enum size_type_kind kind)
+size_int_kind (poly_int64 number, enum size_type_kind kind)
{
return build_int_cst (sizetype_tab[(int) kind], number);
}
@@ -1813,8 +1878,8 @@ size_binop_loc (location_t loc, enum tree_code code, tree arg0, tree arg1)
gcc_assert (int_binop_types_match_p (code, TREE_TYPE (arg0),
TREE_TYPE (arg1)));
- /* Handle the special case of two integer constants faster. */
- if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
+ /* Handle the special case of two poly_int constants faster. */
+ if (poly_int_tree_p (arg0) && poly_int_tree_p (arg1))
{
/* And some specific cases even faster than that. */
if (code == PLUS_EXPR)
@@ -1838,7 +1903,9 @@ size_binop_loc (location_t loc, enum tree_code code, tree arg0, tree arg1)
/* Handle general case of two integer constants. For sizetype
constant calculations we always want to know about overflow,
even in the unsigned case. */
- return int_const_binop_1 (code, arg0, arg1, -1);
+ tree res = int_const_binop_1 (code, arg0, arg1, -1);
+ if (res != NULL_TREE)
+ return res;
}
return fold_build2_loc (loc, code, type, arg0, arg1);
@@ -2162,9 +2229,20 @@ fold_convert_const_fixed_from_real (tree type, const_tree arg1)
static tree
fold_convert_const (enum tree_code code, tree type, tree arg1)
{
- if (TREE_TYPE (arg1) == type)
+ tree arg_type = TREE_TYPE (arg1);
+ if (arg_type == type)
return arg1;
+ /* We can't widen types, since the runtime value could overflow the
+ original type before being extended to the new type. */
+ if (POLY_INT_CST_P (arg1)
+ && (POINTER_TYPE_P (type) || INTEGRAL_TYPE_P (type))
+ && TYPE_PRECISION (type) <= TYPE_PRECISION (arg_type))
+ return build_poly_int_cst (type,
+ poly_wide_int::from (poly_int_cst_value (arg1),
+ TYPE_PRECISION (type),
+ TYPE_SIGN (arg_type)));
+
if (POINTER_TYPE_P (type) || INTEGRAL_TYPE_P (type)
|| TREE_CODE (type) == OFFSET_TYPE)
{
@@ -12562,6 +12640,10 @@ multiple_of_p (tree type, const_tree top, const_tree bottom)
/* fall through */
default:
+ if (POLY_INT_CST_P (top) && poly_int_tree_p (bottom))
+ return multiple_p (wi::to_poly_widest (top),
+ wi::to_poly_widest (bottom));
+
return 0;
}
}
@@ -13624,16 +13706,6 @@ fold_negate_const (tree arg0, tree type)
switch (TREE_CODE (arg0))
{
- case INTEGER_CST:
- {
- bool overflow;
- wide_int val = wi::neg (wi::to_wide (arg0), &overflow);
- t = force_fit_type (type, val, 1,
- (overflow && ! TYPE_UNSIGNED (type))
- || TREE_OVERFLOW (arg0));
- break;
- }
-
case REAL_CST:
t = build_real (type, real_value_negate (&TREE_REAL_CST (arg0)));
break;
@@ -13652,6 +13724,16 @@ fold_negate_const (tree arg0, tree type)
}
default:
+ if (poly_int_tree_p (arg0))
+ {
+ bool overflow;
+ poly_wide_int res = wi::neg (wi::to_poly_wide (arg0), &overflow);
+ t = force_fit_type (type, res, 1,
+ (overflow && ! TYPE_UNSIGNED (type))
+ || TREE_OVERFLOW (arg0));
+ break;
+ }
+
gcc_unreachable ();
}