summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorrsandifo <rsandifo@138bc75d-0d04-0410-961f-82ee72b054a4>2017-12-20 12:51:50 +0000
committerrsandifo <rsandifo@138bc75d-0d04-0410-961f-82ee72b054a4>2017-12-20 12:51:50 +0000
commit8672ee56bb5745263da8b6412dfb4647edef0e56 (patch)
tree8ecb64d56e437c9912d8f6db4a70c9aec93c5019
parentbbad7cd0bcb8102f4211c9b012a5d1ca886217a0 (diff)
downloadgcc-8672ee56bb5745263da8b6412dfb4647edef0e56.tar.gz
poly_int: tree constants
This patch adds a tree representation for poly_ints. Unlike the rtx version, the coefficients are INTEGER_CSTs rather than plain integers, so that we can easily access them as poly_widest_ints and poly_offset_ints. The patch also adjusts some places that previously relied on "constant" meaning "INTEGER_CST". It also makes sure that the TYPE_SIZE agrees with the TYPE_SIZE_UNIT for vector booleans, given the existing: /* Several boolean vector elements may fit in a single unit. */ if (VECTOR_BOOLEAN_TYPE_P (type) && type->type_common.mode != BLKmode) TYPE_SIZE_UNIT (type) = size_int (GET_MODE_SIZE (type->type_common.mode)); else TYPE_SIZE_UNIT (type) = int_const_binop (MULT_EXPR, TYPE_SIZE_UNIT (innertype), size_int (nunits)); 2017-12-20 Richard Sandiford <richard.sandiford@linaro.org> Alan Hayward <alan.hayward@arm.com> David Sherwood <david.sherwood@arm.com> gcc/ * doc/generic.texi (POLY_INT_CST): Document. * tree.def (POLY_INT_CST): New tree code. * treestruct.def (TS_POLY_INT_CST): New tree layout. * tree-core.h (tree_poly_int_cst): New struct. (tree_node): Add a poly_int_cst field. * tree.h (POLY_INT_CST_P, POLY_INT_CST_COEFF): New macros. (wide_int_to_tree, force_fit_type): Take a poly_wide_int_ref instead of a wide_int_ref. (build_int_cst, build_int_cst_type): Take a poly_int64 instead of a HOST_WIDE_INT. (build_int_cstu, build_array_type_nelts): Take a poly_uint64 instead of an unsigned HOST_WIDE_INT. (build_poly_int_cst, tree_fits_poly_int64_p, tree_fits_poly_uint64_p) (ptrdiff_tree_p): Declare. (tree_to_poly_int64, tree_to_poly_uint64): Likewise. Provide extern inline implementations if the target doesn't use POLY_INT_CST. (poly_int_tree_p): New function. (wi::unextended_tree): New class. (wi::int_traits <unextended_tree>): New override. (wi::extended_tree): Add a default constructor. (wi::extended_tree::get_tree): New function. (wi::widest_extended_tree, wi::offset_extended_tree): New typedefs. (wi::tree_to_widest_ref, wi::tree_to_offset_ref): Use them. (wi::tree_to_poly_widest_ref, wi::tree_to_poly_offset_ref) (wi::tree_to_poly_wide_ref): New typedefs. (wi::ints_for): Provide overloads for extended_tree and unextended_tree. (poly_int_cst_value, wi::to_poly_widest, wi::to_poly_offset) (wi::to_wide): New functions. (wi::fits_to_boolean_p, wi::fits_to_tree_p): Handle poly_ints. * tree.c (poly_int_cst_hasher): New struct. (poly_int_cst_hash_table): New variable. (tree_node_structure_for_code, tree_code_size, simple_cst_equal) (valid_constant_size_p, add_expr, drop_tree_overflow): Handle POLY_INT_CST. (initialize_tree_contains_struct): Handle TS_POLY_INT_CST. (init_ttree): Initialize poly_int_cst_hash_table. (build_int_cst, build_int_cst_type, build_invariant_address): Take a poly_int64 instead of a HOST_WIDE_INT. (build_int_cstu, build_array_type_nelts): Take a poly_uint64 instead of an unsigned HOST_WIDE_INT. (wide_int_to_tree): Rename to... (wide_int_to_tree_1): ...this. (build_new_poly_int_cst, build_poly_int_cst): New functions. (force_fit_type): Take a poly_wide_int_ref instead of a wide_int_ref. (wide_int_to_tree): New function that takes a poly_wide_int_ref. (ptrdiff_tree_p, tree_to_poly_int64, tree_to_poly_uint64) (tree_fits_poly_int64_p, tree_fits_poly_uint64_p): New functions. * lto-streamer-out.c (DFS::DFS_write_tree_body, hash_tree): Handle TS_POLY_INT_CST. * tree-streamer-in.c (lto_input_ts_poly_tree_pointers): Likewise. (streamer_read_tree_body): Likewise. * tree-streamer-out.c (write_ts_poly_tree_pointers): Likewise. (streamer_write_tree_body): Likewise. * tree-streamer.c (streamer_check_handled_ts_structures): Likewise. * asan.c (asan_protect_global): Require the size to be an INTEGER_CST. * cfgexpand.c (expand_debug_expr): Handle POLY_INT_CST. * expr.c (expand_expr_real_1, const_vector_from_tree): Likewise. * gimple-expr.h (is_gimple_constant): Likewise. * gimplify.c (maybe_with_size_expr): Likewise. * print-tree.c (print_node): Likewise. * tree-data-ref.c (data_ref_compare_tree): Likewise. * tree-pretty-print.c (dump_generic_node): Likewise. * tree-ssa-address.c (addr_for_mem_ref): Likewise. * tree-vect-data-refs.c (dr_group_sort_cmp): Likewise. * tree-vrp.c (compare_values_warnv): Likewise. * tree-ssa-loop-ivopts.c (determine_base_object, constant_multiple_of) (get_loop_invariant_expr, add_candidate_1, get_computation_aff_1) (force_expr_to_var_cost): Likewise. * tree-ssa-loop.c (for_each_index): Likewise. * fold-const.h (build_invariant_address, size_int_kind): Take a poly_int64 instead of a HOST_WIDE_INT. * fold-const.c (fold_negate_expr_1, const_binop, const_unop) (fold_convert_const, multiple_of_p, fold_negate_const): Handle POLY_INT_CST. (size_binop_loc): Likewise. Allow int_const_binop_1 to fail. (int_const_binop_2): New function, split out from... (int_const_binop_1): ...here. Handle POLY_INT_CST. (size_int_kind): Take a poly_int64 instead of a HOST_WIDE_INT. * expmed.c (make_tree): Handle CONST_POLY_INT_P. * gimple-ssa-strength-reduction.c (slsr_process_add) (slsr_process_mul): Check for INTEGER_CSTs before using them as candidates. * stor-layout.c (bits_from_bytes): New function. (bit_from_pos): Use it. (layout_type): Likewise. For vectors, multiply the TYPE_SIZE_UNIT by BITS_PER_UNIT to get the TYPE_SIZE. * tree-cfg.c (verify_expr, verify_types_in_gimple_reference): Allow MEM_REF and TARGET_MEM_REF offsets to be a POLY_INT_CST. git-svn-id: svn+ssh://gcc.gnu.org/svn/gcc/trunk@255863 138bc75d-0d04-0410-961f-82ee72b054a4
-rw-r--r--gcc/ChangeLog94
-rw-r--r--gcc/asan.c1
-rw-r--r--gcc/cfgexpand.c3
-rw-r--r--gcc/doc/generic.texi11
-rw-r--r--gcc/expmed.c3
-rw-r--r--gcc/expr.c6
-rw-r--r--gcc/fold-const.c130
-rw-r--r--gcc/fold-const.h4
-rw-r--r--gcc/gimple-expr.h1
-rw-r--r--gcc/gimple-ssa-strength-reduction.c4
-rw-r--r--gcc/gimplify.c2
-rw-r--r--gcc/lto-streamer-out.c8
-rw-r--r--gcc/print-tree.c12
-rw-r--r--gcc/stor-layout.c35
-rw-r--r--gcc/tree-cfg.c6
-rw-r--r--gcc/tree-core.h6
-rw-r--r--gcc/tree-data-ref.c4
-rw-r--r--gcc/tree-pretty-print.c12
-rw-r--r--gcc/tree-ssa-address.c3
-rw-r--r--gcc/tree-ssa-loop-ivopts.c17
-rw-r--r--gcc/tree-ssa-loop.c1
-rw-r--r--gcc/tree-streamer-in.c16
-rw-r--r--gcc/tree-streamer-out.c15
-rw-r--r--gcc/tree-streamer.c1
-rw-r--r--gcc/tree-vect-data-refs.c2
-rw-r--r--gcc/tree-vrp.c19
-rw-r--r--gcc/tree.c249
-rw-r--r--gcc/tree.def3
-rw-r--r--gcc/tree.h242
-rw-r--r--gcc/treestruct.def1
30 files changed, 831 insertions, 80 deletions
diff --git a/gcc/ChangeLog b/gcc/ChangeLog
index 082f098e212..2b57ea6c98d 100644
--- a/gcc/ChangeLog
+++ b/gcc/ChangeLog
@@ -2,6 +2,100 @@
Alan Hayward <alan.hayward@arm.com>
David Sherwood <david.sherwood@arm.com>
+ * doc/generic.texi (POLY_INT_CST): Document.
+ * tree.def (POLY_INT_CST): New tree code.
+ * treestruct.def (TS_POLY_INT_CST): New tree layout.
+ * tree-core.h (tree_poly_int_cst): New struct.
+ (tree_node): Add a poly_int_cst field.
+ * tree.h (POLY_INT_CST_P, POLY_INT_CST_COEFF): New macros.
+ (wide_int_to_tree, force_fit_type): Take a poly_wide_int_ref
+ instead of a wide_int_ref.
+ (build_int_cst, build_int_cst_type): Take a poly_int64 instead
+ of a HOST_WIDE_INT.
+ (build_int_cstu, build_array_type_nelts): Take a poly_uint64
+ instead of an unsigned HOST_WIDE_INT.
+ (build_poly_int_cst, tree_fits_poly_int64_p, tree_fits_poly_uint64_p)
+ (ptrdiff_tree_p): Declare.
+ (tree_to_poly_int64, tree_to_poly_uint64): Likewise. Provide
+ extern inline implementations if the target doesn't use POLY_INT_CST.
+ (poly_int_tree_p): New function.
+ (wi::unextended_tree): New class.
+ (wi::int_traits <unextended_tree>): New override.
+ (wi::extended_tree): Add a default constructor.
+ (wi::extended_tree::get_tree): New function.
+ (wi::widest_extended_tree, wi::offset_extended_tree): New typedefs.
+ (wi::tree_to_widest_ref, wi::tree_to_offset_ref): Use them.
+ (wi::tree_to_poly_widest_ref, wi::tree_to_poly_offset_ref)
+ (wi::tree_to_poly_wide_ref): New typedefs.
+ (wi::ints_for): Provide overloads for extended_tree and
+ unextended_tree.
+ (poly_int_cst_value, wi::to_poly_widest, wi::to_poly_offset)
+ (wi::to_wide): New functions.
+ (wi::fits_to_boolean_p, wi::fits_to_tree_p): Handle poly_ints.
+ * tree.c (poly_int_cst_hasher): New struct.
+ (poly_int_cst_hash_table): New variable.
+ (tree_node_structure_for_code, tree_code_size, simple_cst_equal)
+ (valid_constant_size_p, add_expr, drop_tree_overflow): Handle
+ POLY_INT_CST.
+ (initialize_tree_contains_struct): Handle TS_POLY_INT_CST.
+ (init_ttree): Initialize poly_int_cst_hash_table.
+ (build_int_cst, build_int_cst_type, build_invariant_address): Take
+ a poly_int64 instead of a HOST_WIDE_INT.
+ (build_int_cstu, build_array_type_nelts): Take a poly_uint64
+ instead of an unsigned HOST_WIDE_INT.
+ (wide_int_to_tree): Rename to...
+ (wide_int_to_tree_1): ...this.
+ (build_new_poly_int_cst, build_poly_int_cst): New functions.
+ (force_fit_type): Take a poly_wide_int_ref instead of a wide_int_ref.
+ (wide_int_to_tree): New function that takes a poly_wide_int_ref.
+ (ptrdiff_tree_p, tree_to_poly_int64, tree_to_poly_uint64)
+ (tree_fits_poly_int64_p, tree_fits_poly_uint64_p): New functions.
+ * lto-streamer-out.c (DFS::DFS_write_tree_body, hash_tree): Handle
+ TS_POLY_INT_CST.
+ * tree-streamer-in.c (lto_input_ts_poly_tree_pointers): Likewise.
+ (streamer_read_tree_body): Likewise.
+ * tree-streamer-out.c (write_ts_poly_tree_pointers): Likewise.
+ (streamer_write_tree_body): Likewise.
+ * tree-streamer.c (streamer_check_handled_ts_structures): Likewise.
+ * asan.c (asan_protect_global): Require the size to be an INTEGER_CST.
+ * cfgexpand.c (expand_debug_expr): Handle POLY_INT_CST.
+ * expr.c (expand_expr_real_1, const_vector_from_tree): Likewise.
+ * gimple-expr.h (is_gimple_constant): Likewise.
+ * gimplify.c (maybe_with_size_expr): Likewise.
+ * print-tree.c (print_node): Likewise.
+ * tree-data-ref.c (data_ref_compare_tree): Likewise.
+ * tree-pretty-print.c (dump_generic_node): Likewise.
+ * tree-ssa-address.c (addr_for_mem_ref): Likewise.
+ * tree-vect-data-refs.c (dr_group_sort_cmp): Likewise.
+ * tree-vrp.c (compare_values_warnv): Likewise.
+ * tree-ssa-loop-ivopts.c (determine_base_object, constant_multiple_of)
+ (get_loop_invariant_expr, add_candidate_1, get_computation_aff_1)
+ (force_expr_to_var_cost): Likewise.
+ * tree-ssa-loop.c (for_each_index): Likewise.
+ * fold-const.h (build_invariant_address, size_int_kind): Take a
+ poly_int64 instead of a HOST_WIDE_INT.
+ * fold-const.c (fold_negate_expr_1, const_binop, const_unop)
+ (fold_convert_const, multiple_of_p, fold_negate_const): Handle
+ POLY_INT_CST.
+ (size_binop_loc): Likewise. Allow int_const_binop_1 to fail.
+ (int_const_binop_2): New function, split out from...
+ (int_const_binop_1): ...here. Handle POLY_INT_CST.
+ (size_int_kind): Take a poly_int64 instead of a HOST_WIDE_INT.
+ * expmed.c (make_tree): Handle CONST_POLY_INT_P.
+ * gimple-ssa-strength-reduction.c (slsr_process_add)
+ (slsr_process_mul): Check for INTEGER_CSTs before using them
+ as candidates.
+ * stor-layout.c (bits_from_bytes): New function.
+ (bit_from_pos): Use it.
+ (layout_type): Likewise. For vectors, multiply the TYPE_SIZE_UNIT
+ by BITS_PER_UNIT to get the TYPE_SIZE.
+ * tree-cfg.c (verify_expr, verify_types_in_gimple_reference): Allow
+ MEM_REF and TARGET_MEM_REF offsets to be a POLY_INT_CST.
+
+2017-12-20 Richard Sandiford <richard.sandiford@linaro.org>
+ Alan Hayward <alan.hayward@arm.com>
+ David Sherwood <david.sherwood@arm.com>
+
* doc/rtl.texi (const_poly_int): Document. Also document the
rtl sharing behavior.
* gengenrtl.c (excluded_rtx): Return true for CONST_POLY_INT.
diff --git a/gcc/asan.c b/gcc/asan.c
index c1135af7773..e27338fda77 100644
--- a/gcc/asan.c
+++ b/gcc/asan.c
@@ -1651,6 +1651,7 @@ asan_protect_global (tree decl, bool ignore_decl_rtl_set_p)
&& !section_sanitized_p (DECL_SECTION_NAME (decl)))
|| DECL_SIZE (decl) == 0
|| ASAN_RED_ZONE_SIZE * BITS_PER_UNIT > MAX_OFILE_ALIGNMENT
+ || TREE_CODE (DECL_SIZE_UNIT (decl)) != INTEGER_CST
|| !valid_constant_size_p (DECL_SIZE_UNIT (decl))
|| DECL_ALIGN_UNIT (decl) > 2 * ASAN_RED_ZONE_SIZE
|| TREE_TYPE (decl) == ubsan_get_source_location_type ()
diff --git a/gcc/cfgexpand.c b/gcc/cfgexpand.c
index d1616e192cb..3bd36ebba2e 100644
--- a/gcc/cfgexpand.c
+++ b/gcc/cfgexpand.c
@@ -4264,6 +4264,9 @@ expand_debug_expr (tree exp)
op0 = expand_expr (exp, NULL_RTX, mode, EXPAND_INITIALIZER);
return op0;
+ case POLY_INT_CST:
+ return immed_wide_int_const (poly_int_cst_value (exp), mode);
+
case COMPLEX_CST:
gcc_assert (COMPLEX_MODE_P (mode));
op0 = expand_debug_expr (TREE_REALPART (exp));
diff --git a/gcc/doc/generic.texi b/gcc/doc/generic.texi
index 29d85316877..33fef20b817 100644
--- a/gcc/doc/generic.texi
+++ b/gcc/doc/generic.texi
@@ -1037,6 +1037,7 @@ As this example indicates, the operands are zero-indexed.
@tindex COMPLEX_CST
@tindex VECTOR_CST
@tindex STRING_CST
+@tindex POLY_INT_CST
@findex TREE_STRING_LENGTH
@findex TREE_STRING_POINTER
@@ -1175,6 +1176,16 @@ of the @code{STRING_CST}.
FIXME: The formats of string constants are not well-defined when the
target system bytes are not the same width as host system bytes.
+@item POLY_INT_CST
+These nodes represent invariants that depend on some target-specific
+runtime parameters. They consist of @code{NUM_POLY_INT_COEFFS}
+coefficients, with the first coefficient being the constant term and
+the others being multipliers that are applied to the runtime parameters.
+
+@code{POLY_INT_CST_ELT (@var{x}, @var{i})} references coefficient number
+@var{i} of @code{POLY_INT_CST} node @var{x}. Each coefficient is an
+@code{INTEGER_CST}.
+
@end table
@node Storage References
diff --git a/gcc/expmed.c b/gcc/expmed.c
index 36aabc11c25..cc57d3cf12f 100644
--- a/gcc/expmed.c
+++ b/gcc/expmed.c
@@ -5278,6 +5278,9 @@ make_tree (tree type, rtx x)
/* fall through. */
default:
+ if (CONST_POLY_INT_P (x))
+ return wide_int_to_tree (t, const_poly_int_value (x));
+
t = build_decl (RTL_LOCATION (x), VAR_DECL, NULL_TREE, type);
/* If TYPE is a POINTER_TYPE, we might need to convert X from
diff --git a/gcc/expr.c b/gcc/expr.c
index 63e45cfc501..a6b236507fa 100644
--- a/gcc/expr.c
+++ b/gcc/expr.c
@@ -10101,6 +10101,9 @@ expand_expr_real_1 (tree exp, rtx target, machine_mode tmode,
copy_rtx (XEXP (temp, 0)));
return temp;
+ case POLY_INT_CST:
+ return immed_wide_int_const (poly_int_cst_value (exp), mode);
+
case SAVE_EXPR:
{
tree val = treeop0;
@@ -11816,7 +11819,8 @@ const_vector_from_tree (tree exp)
RTVEC_ELT (v, i) = CONST_FIXED_FROM_FIXED_VALUE (TREE_FIXED_CST (elt),
inner);
else
- RTVEC_ELT (v, i) = immed_wide_int_const (wi::to_wide (elt), inner);
+ RTVEC_ELT (v, i) = immed_wide_int_const (wi::to_poly_wide (elt),
+ inner);
}
return gen_rtx_CONST_VECTOR (mode, v);
diff --git a/gcc/fold-const.c b/gcc/fold-const.c
index 89a9f400729..c219ed8a6af 100644
--- a/gcc/fold-const.c
+++ b/gcc/fold-const.c
@@ -547,10 +547,8 @@ fold_negate_expr_1 (location_t loc, tree t)
return tem;
break;
+ case POLY_INT_CST:
case REAL_CST:
- tem = fold_negate_const (t, type);
- return tem;
-
case FIXED_CST:
tem = fold_negate_const (t, type);
return tem;
@@ -961,13 +959,10 @@ int_binop_types_match_p (enum tree_code code, const_tree type1, const_tree type2
&& TYPE_MODE (type1) == TYPE_MODE (type2);
}
-
-/* Combine two integer constants PARG1 and PARG2 under operation CODE
- to produce a new constant. Return NULL_TREE if we don't know how
- to evaluate CODE at compile-time. */
+/* Subroutine of int_const_binop_1 that handles two INTEGER_CSTs. */
static tree
-int_const_binop_1 (enum tree_code code, const_tree parg1, const_tree parg2,
+int_const_binop_2 (enum tree_code code, const_tree parg1, const_tree parg2,
int overflowable)
{
wide_int res;
@@ -1115,6 +1110,74 @@ int_const_binop_1 (enum tree_code code, const_tree parg1, const_tree parg2,
return t;
}
+/* Combine two integer constants PARG1 and PARG2 under operation CODE
+ to produce a new constant. Return NULL_TREE if we don't know how
+ to evaluate CODE at compile-time. */
+
+static tree
+int_const_binop_1 (enum tree_code code, const_tree arg1, const_tree arg2,
+ int overflowable)
+{
+ if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg2) == INTEGER_CST)
+ return int_const_binop_2 (code, arg1, arg2, overflowable);
+
+ gcc_assert (NUM_POLY_INT_COEFFS != 1);
+
+ if (poly_int_tree_p (arg1) && poly_int_tree_p (arg2))
+ {
+ poly_wide_int res;
+ bool overflow;
+ tree type = TREE_TYPE (arg1);
+ signop sign = TYPE_SIGN (type);
+ switch (code)
+ {
+ case PLUS_EXPR:
+ res = wi::add (wi::to_poly_wide (arg1),
+ wi::to_poly_wide (arg2), sign, &overflow);
+ break;
+
+ case MINUS_EXPR:
+ res = wi::sub (wi::to_poly_wide (arg1),
+ wi::to_poly_wide (arg2), sign, &overflow);
+ break;
+
+ case MULT_EXPR:
+ if (TREE_CODE (arg2) == INTEGER_CST)
+ res = wi::mul (wi::to_poly_wide (arg1),
+ wi::to_wide (arg2), sign, &overflow);
+ else if (TREE_CODE (arg1) == INTEGER_CST)
+ res = wi::mul (wi::to_poly_wide (arg2),
+ wi::to_wide (arg1), sign, &overflow);
+ else
+ return NULL_TREE;
+ break;
+
+ case LSHIFT_EXPR:
+ if (TREE_CODE (arg2) == INTEGER_CST)
+ res = wi::to_poly_wide (arg1) << wi::to_wide (arg2);
+ else
+ return NULL_TREE;
+ break;
+
+ case BIT_IOR_EXPR:
+ if (TREE_CODE (arg2) != INTEGER_CST
+ || !can_ior_p (wi::to_poly_wide (arg1), wi::to_wide (arg2),
+ &res))
+ return NULL_TREE;
+ break;
+
+ default:
+ return NULL_TREE;
+ }
+ return force_fit_type (type, res, overflowable,
+ (((sign == SIGNED || overflowable == -1)
+ && overflow)
+ | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2)));
+ }
+
+ return NULL_TREE;
+}
+
tree
int_const_binop (enum tree_code code, const_tree arg1, const_tree arg2)
{
@@ -1157,7 +1220,7 @@ const_binop (enum tree_code code, tree arg1, tree arg2)
STRIP_NOPS (arg1);
STRIP_NOPS (arg2);
- if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg2) == INTEGER_CST)
+ if (poly_int_tree_p (arg1) && poly_int_tree_p (arg2))
{
if (code == POINTER_PLUS_EXPR)
return int_const_binop (PLUS_EXPR,
@@ -1701,6 +1764,8 @@ const_unop (enum tree_code code, tree type, tree arg0)
case BIT_NOT_EXPR:
if (TREE_CODE (arg0) == INTEGER_CST)
return fold_not_const (arg0, type);
+ else if (POLY_INT_CST_P (arg0))
+ return wide_int_to_tree (type, -poly_int_cst_value (arg0));
/* Perform BIT_NOT_EXPR on each element individually. */
else if (TREE_CODE (arg0) == VECTOR_CST)
{
@@ -1792,7 +1857,7 @@ const_unop (enum tree_code code, tree type, tree arg0)
indicates which particular sizetype to create. */
tree
-size_int_kind (HOST_WIDE_INT number, enum size_type_kind kind)
+size_int_kind (poly_int64 number, enum size_type_kind kind)
{
return build_int_cst (sizetype_tab[(int) kind], number);
}
@@ -1813,8 +1878,8 @@ size_binop_loc (location_t loc, enum tree_code code, tree arg0, tree arg1)
gcc_assert (int_binop_types_match_p (code, TREE_TYPE (arg0),
TREE_TYPE (arg1)));
- /* Handle the special case of two integer constants faster. */
- if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
+ /* Handle the special case of two poly_int constants faster. */
+ if (poly_int_tree_p (arg0) && poly_int_tree_p (arg1))
{
/* And some specific cases even faster than that. */
if (code == PLUS_EXPR)
@@ -1838,7 +1903,9 @@ size_binop_loc (location_t loc, enum tree_code code, tree arg0, tree arg1)
/* Handle general case of two integer constants. For sizetype
constant calculations we always want to know about overflow,
even in the unsigned case. */
- return int_const_binop_1 (code, arg0, arg1, -1);
+ tree res = int_const_binop_1 (code, arg0, arg1, -1);
+ if (res != NULL_TREE)
+ return res;
}
return fold_build2_loc (loc, code, type, arg0, arg1);
@@ -2162,9 +2229,20 @@ fold_convert_const_fixed_from_real (tree type, const_tree arg1)
static tree
fold_convert_const (enum tree_code code, tree type, tree arg1)
{
- if (TREE_TYPE (arg1) == type)
+ tree arg_type = TREE_TYPE (arg1);
+ if (arg_type == type)
return arg1;
+ /* We can't widen types, since the runtime value could overflow the
+ original type before being extended to the new type. */
+ if (POLY_INT_CST_P (arg1)
+ && (POINTER_TYPE_P (type) || INTEGRAL_TYPE_P (type))
+ && TYPE_PRECISION (type) <= TYPE_PRECISION (arg_type))
+ return build_poly_int_cst (type,
+ poly_wide_int::from (poly_int_cst_value (arg1),
+ TYPE_PRECISION (type),
+ TYPE_SIGN (arg_type)));
+
if (POINTER_TYPE_P (type) || INTEGRAL_TYPE_P (type)
|| TREE_CODE (type) == OFFSET_TYPE)
{
@@ -12562,6 +12640,10 @@ multiple_of_p (tree type, const_tree top, const_tree bottom)
/* fall through */
default:
+ if (POLY_INT_CST_P (top) && poly_int_tree_p (bottom))
+ return multiple_p (wi::to_poly_widest (top),
+ wi::to_poly_widest (bottom));
+
return 0;
}
}
@@ -13624,16 +13706,6 @@ fold_negate_const (tree arg0, tree type)
switch (TREE_CODE (arg0))
{
- case INTEGER_CST:
- {
- bool overflow;
- wide_int val = wi::neg (wi::to_wide (arg0), &overflow);
- t = force_fit_type (type, val, 1,
- (overflow && ! TYPE_UNSIGNED (type))
- || TREE_OVERFLOW (arg0));
- break;
- }
-
case REAL_CST:
t = build_real (type, real_value_negate (&TREE_REAL_CST (arg0)));
break;
@@ -13652,6 +13724,16 @@ fold_negate_const (tree arg0, tree type)
}
default:
+ if (poly_int_tree_p (arg0))
+ {
+ bool overflow;
+ poly_wide_int res = wi::neg (wi::to_poly_wide (arg0), &overflow);
+ t = force_fit_type (type, res, 1,
+ (overflow && ! TYPE_UNSIGNED (type))
+ || TREE_OVERFLOW (arg0));
+ break;
+ }
+
gcc_unreachable ();
}
diff --git a/gcc/fold-const.h b/gcc/fold-const.h
index 0684ae76235..73a8764d7ce 100644
--- a/gcc/fold-const.h
+++ b/gcc/fold-const.h
@@ -115,7 +115,7 @@ extern tree build_simple_mem_ref_loc (location_t, tree);
#define build_simple_mem_ref(T)\
build_simple_mem_ref_loc (UNKNOWN_LOCATION, T)
extern offset_int mem_ref_offset (const_tree);
-extern tree build_invariant_address (tree, tree, HOST_WIDE_INT);
+extern tree build_invariant_address (tree, tree, poly_int64);
extern tree constant_boolean_node (bool, tree);
extern tree div_if_zero_remainder (const_tree, const_tree);
@@ -152,7 +152,7 @@ extern bool may_negate_without_overflow_p (const_tree);
extern tree round_up_loc (location_t, tree, unsigned int);
#define round_down(T,N) round_down_loc (UNKNOWN_LOCATION, T, N)
extern tree round_down_loc (location_t, tree, int);
-extern tree size_int_kind (HOST_WIDE_INT, enum size_type_kind);
+extern tree size_int_kind (poly_int64, enum size_type_kind);
#define size_binop(CODE,T1,T2)\
size_binop_loc (UNKNOWN_LOCATION, CODE, T1, T2)
extern tree size_binop_loc (location_t, enum tree_code, tree, tree);
diff --git a/gcc/gimple-expr.h b/gcc/gimple-expr.h
index 6e969164a37..05d99894bf2 100644
--- a/gcc/gimple-expr.h
+++ b/gcc/gimple-expr.h
@@ -130,6 +130,7 @@ is_gimple_constant (const_tree t)
switch (TREE_CODE (t))
{
case INTEGER_CST:
+ case POLY_INT_CST:
case REAL_CST:
case FIXED_CST:
case COMPLEX_CST:
diff --git a/gcc/gimple-ssa-strength-reduction.c b/gcc/gimple-ssa-strength-reduction.c
index 0f90232cca3..45ddfc4d763 100644
--- a/gcc/gimple-ssa-strength-reduction.c
+++ b/gcc/gimple-ssa-strength-reduction.c
@@ -1258,7 +1258,7 @@ slsr_process_mul (gimple *gs, tree rhs1, tree rhs2, bool speed)
c2 = create_mul_ssa_cand (gs, rhs2, rhs1, speed);
c->next_interp = c2->cand_num;
}
- else
+ else if (TREE_CODE (rhs2) == INTEGER_CST)
{
/* Record an interpretation for the multiply-immediate. */
c = create_mul_imm_cand (gs, rhs1, rhs2, speed);
@@ -1499,7 +1499,7 @@ slsr_process_add (gimple *gs, tree rhs1, tree rhs2, bool speed)
add_cand_for_stmt (gs, c2);
}
}
- else
+ else if (TREE_CODE (rhs2) == INTEGER_CST)
{
/* Record an interpretation for the add-immediate. */
widest_int index = wi::to_widest (rhs2);
diff --git a/gcc/gimplify.c b/gcc/gimplify.c
index 3f2aa1e395f..9da462569f6 100644
--- a/gcc/gimplify.c
+++ b/gcc/gimplify.c
@@ -3124,7 +3124,7 @@ maybe_with_size_expr (tree *expr_p)
/* If the size isn't known or is a constant, we have nothing to do. */
size = TYPE_SIZE_UNIT (type);
- if (!size || TREE_CODE (size) == INTEGER_CST)
+ if (!size || poly_int_tree_p (size))
return;
/* Otherwise, make a WITH_SIZE_EXPR. */
diff --git a/gcc/lto-streamer-out.c b/gcc/lto-streamer-out.c
index 4efa9c9c2fc..ba29bd088e6 100644
--- a/gcc/lto-streamer-out.c
+++ b/gcc/lto-streamer-out.c
@@ -752,6 +752,10 @@ DFS::DFS_write_tree_body (struct output_block *ob,
DFS_follow_tree_edge (VECTOR_CST_ENCODED_ELT (expr, i));
}
+ if (CODE_CONTAINS_STRUCT (code, TS_POLY_INT_CST))
+ for (unsigned int i = 0; i < NUM_POLY_INT_COEFFS; ++i)
+ DFS_follow_tree_edge (POLY_INT_CST_COEFF (expr, i));
+
if (CODE_CONTAINS_STRUCT (code, TS_COMPLEX))
{
DFS_follow_tree_edge (TREE_REALPART (expr));
@@ -1202,6 +1206,10 @@ hash_tree (struct streamer_tree_cache_d *cache, hash_map<tree, hashval_t> *map,
visit (VECTOR_CST_ENCODED_ELT (t, i));
}
+ if (CODE_CONTAINS_STRUCT (code, TS_POLY_INT_CST))
+ for (unsigned int i = 0; i < NUM_POLY_INT_COEFFS; ++i)
+ visit (POLY_INT_CST_COEFF (t, i));
+
if (CODE_CONTAINS_STRUCT (code, TS_COMPLEX))
{
visit (TREE_REALPART (t));
diff --git a/gcc/print-tree.c b/gcc/print-tree.c
index 2139a7278ea..4b698dfa633 100644
--- a/gcc/print-tree.c
+++ b/gcc/print-tree.c
@@ -799,6 +799,18 @@ print_node (FILE *file, const char *prefix, tree node, int indent,
}
break;
+ case POLY_INT_CST:
+ {
+ char buf[10];
+ for (unsigned int i = 0; i < NUM_POLY_INT_COEFFS; ++i)
+ {
+ snprintf (buf, sizeof (buf), "elt%u: ", i);
+ print_node (file, buf, POLY_INT_CST_COEFF (node, i),
+ indent + 4);
+ }
+ }
+ break;
+
case IDENTIFIER_NODE:
lang_hooks.print_identifier (file, node, indent);
break;
diff --git a/gcc/stor-layout.c b/gcc/stor-layout.c
index 67062194d2c..0056d75153d 100644
--- a/gcc/stor-layout.c
+++ b/gcc/stor-layout.c
@@ -841,6 +841,28 @@ start_record_layout (tree t)
return rli;
}
+/* Fold sizetype value X to bitsizetype, given that X represents a type
+ size or offset. */
+
+static tree
+bits_from_bytes (tree x)
+{
+ if (POLY_INT_CST_P (x))
+ /* The runtime calculation isn't allowed to overflow sizetype;
+ increasing the runtime values must always increase the size
+ or offset of the object. This means that the object imposes
+ a maximum value on the runtime parameters, but we don't record
+ what that is. */
+ return build_poly_int_cst
+ (bitsizetype,
+ poly_wide_int::from (poly_int_cst_value (x),
+ TYPE_PRECISION (bitsizetype),
+ TYPE_SIGN (TREE_TYPE (x))));
+ x = fold_convert (bitsizetype, x);
+ gcc_checking_assert (x);
+ return x;
+}
+
/* Return the combined bit position for the byte offset OFFSET and the
bit position BITPOS.
@@ -854,8 +876,7 @@ tree
bit_from_pos (tree offset, tree bitpos)
{
return size_binop (PLUS_EXPR, bitpos,
- size_binop (MULT_EXPR,
- fold_convert (bitsizetype, offset),
+ size_binop (MULT_EXPR, bits_from_bytes (offset),
bitsize_unit_node));
}
@@ -2272,9 +2293,10 @@ layout_type (tree type)
TYPE_SIZE_UNIT (type) = int_const_binop (MULT_EXPR,
TYPE_SIZE_UNIT (innertype),
size_int (nunits));
- TYPE_SIZE (type) = int_const_binop (MULT_EXPR,
- TYPE_SIZE (innertype),
- bitsize_int (nunits));
+ TYPE_SIZE (type) = int_const_binop
+ (MULT_EXPR,
+ bits_from_bytes (TYPE_SIZE_UNIT (type)),
+ bitsize_int (BITS_PER_UNIT));
/* For vector types, we do not default to the mode's alignment.
Instead, query a target hook, defaulting to natural alignment.
@@ -2387,8 +2409,7 @@ layout_type (tree type)
length = size_zero_node;
TYPE_SIZE (type) = size_binop (MULT_EXPR, element_size,
- fold_convert (bitsizetype,
- length));
+ bits_from_bytes (length));
/* If we know the size of the element, calculate the total size
directly, rather than do some division thing below. This
diff --git a/gcc/tree-cfg.c b/gcc/tree-cfg.c
index 4bf621895cd..48fbe52edd8 100644
--- a/gcc/tree-cfg.c
+++ b/gcc/tree-cfg.c
@@ -3022,7 +3022,7 @@ verify_expr (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
error ("invalid first operand of MEM_REF");
return x;
}
- if (TREE_CODE (TREE_OPERAND (t, 1)) != INTEGER_CST
+ if (!poly_int_tree_p (TREE_OPERAND (t, 1))
|| !POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (t, 1))))
{
error ("invalid offset operand of MEM_REF");
@@ -3447,7 +3447,7 @@ verify_types_in_gimple_reference (tree expr, bool require_lvalue)
debug_generic_stmt (expr);
return true;
}
- if (TREE_CODE (TREE_OPERAND (expr, 1)) != INTEGER_CST
+ if (!poly_int_tree_p (TREE_OPERAND (expr, 1))
|| !POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (expr, 1))))
{
error ("invalid offset operand in MEM_REF");
@@ -3464,7 +3464,7 @@ verify_types_in_gimple_reference (tree expr, bool require_lvalue)
return true;
}
if (!TMR_OFFSET (expr)
- || TREE_CODE (TMR_OFFSET (expr)) != INTEGER_CST
+ || !poly_int_tree_p (TMR_OFFSET (expr))
|| !POINTER_TYPE_P (TREE_TYPE (TMR_OFFSET (expr))))
{
error ("invalid offset operand in TARGET_MEM_REF");
diff --git a/gcc/tree-core.h b/gcc/tree-core.h
index e25500def95..b08d2151f34 100644
--- a/gcc/tree-core.h
+++ b/gcc/tree-core.h
@@ -1337,6 +1337,11 @@ struct GTY(()) tree_vector {
tree GTY ((length ("vector_cst_encoded_nelts ((tree) &%h)"))) elts[1];
};
+struct GTY(()) tree_poly_int_cst {
+ struct tree_typed typed;
+ tree coeffs[NUM_POLY_INT_COEFFS];
+};
+
struct GTY(()) tree_identifier {
struct tree_common common;
struct ht_identifier id;
@@ -1863,6 +1868,7 @@ union GTY ((ptr_alias (union lang_tree_node),
struct tree_typed GTY ((tag ("TS_TYPED"))) typed;
struct tree_common GTY ((tag ("TS_COMMON"))) common;
struct tree_int_cst GTY ((tag ("TS_INT_CST"))) int_cst;
+ struct tree_poly_int_cst GTY ((tag ("TS_POLY_INT_CST"))) poly_int_cst;
struct tree_real_cst GTY ((tag ("TS_REAL_CST"))) real_cst;
struct tree_fixed_cst GTY ((tag ("TS_FIXED_CST"))) fixed_cst;
struct tree_vector GTY ((tag ("TS_VECTOR"))) vector;
diff --git a/gcc/tree-data-ref.c b/gcc/tree-data-ref.c
index a8cbea9d634..50e11f5765a 100644
--- a/gcc/tree-data-ref.c
+++ b/gcc/tree-data-ref.c
@@ -1235,6 +1235,10 @@ data_ref_compare_tree (tree t1, tree t2)
break;
default:
+ if (POLY_INT_CST_P (t1))
+ return compare_sizes_for_sort (wi::to_poly_widest (t1),
+ wi::to_poly_widest (t2));
+
tclass = TREE_CODE_CLASS (code);
/* For decls, compare their UIDs. */
diff --git a/gcc/tree-pretty-print.c b/gcc/tree-pretty-print.c
index 681ff944487..ae0107e78e3 100644
--- a/gcc/tree-pretty-print.c
+++ b/gcc/tree-pretty-print.c
@@ -1734,6 +1734,18 @@ dump_generic_node (pretty_printer *pp, tree node, int spc, dump_flags_t flags,
pp_string (pp, "(OVF)");
break;
+ case POLY_INT_CST:
+ pp_string (pp, "POLY_INT_CST [");
+ dump_generic_node (pp, POLY_INT_CST_COEFF (node, 0), spc, flags, false);
+ for (unsigned int i = 1; i < NUM_POLY_INT_COEFFS; ++i)
+ {
+ pp_string (pp, ", ");
+ dump_generic_node (pp, POLY_INT_CST_COEFF (node, i),
+ spc, flags, false);
+ }
+ pp_string (pp, "]");
+ break;
+
case REAL_CST:
/* Code copied from print_node. */
{
diff --git a/gcc/tree-ssa-address.c b/gcc/tree-ssa-address.c
index 14c743414df..d1c15d6a703 100644
--- a/gcc/tree-ssa-address.c
+++ b/gcc/tree-ssa-address.c
@@ -203,7 +203,8 @@ addr_for_mem_ref (struct mem_address *addr, addr_space_t as,
if (addr->offset && !integer_zerop (addr->offset))
{
- offset_int dc = offset_int::from (wi::to_wide (addr->offset), SIGNED);
+ poly_offset_int dc
+ = poly_offset_int::from (wi::to_poly_wide (addr->offset), SIGNED);
off = immed_wide_int_const (dc, pointer_mode);
}
else
diff --git a/gcc/tree-ssa-loop-ivopts.c b/gcc/tree-ssa-loop-ivopts.c
index 350f94dfc33..d5743c5935e 100644
--- a/gcc/tree-ssa-loop-ivopts.c
+++ b/gcc/tree-ssa-loop-ivopts.c
@@ -1127,6 +1127,8 @@ determine_base_object (tree expr)
gcc_unreachable ();
default:
+ if (POLY_INT_CST_P (expr))
+ return NULL_TREE;
return fold_convert (ptr_type_node, expr);
}
}
@@ -2165,6 +2167,12 @@ constant_multiple_of (tree top, tree bot, widest_int *mul)
return res == 0;
default:
+ if (POLY_INT_CST_P (top)
+ && POLY_INT_CST_P (bot)
+ && constant_multiple_p (wi::to_poly_widest (top),
+ wi::to_poly_widest (bot), mul))
+ return true;
+
return false;
}
}
@@ -2964,7 +2972,8 @@ get_loop_invariant_expr (struct ivopts_data *data, tree inv_expr)
{
STRIP_NOPS (inv_expr);
- if (TREE_CODE (inv_expr) == INTEGER_CST || TREE_CODE (inv_expr) == SSA_NAME)
+ if (poly_int_tree_p (inv_expr)
+ || TREE_CODE (inv_expr) == SSA_NAME)
return NULL;
/* Don't strip constant part away as we used to. */
@@ -3061,7 +3070,7 @@ add_candidate_1 (struct ivopts_data *data,
cand->incremented_at = incremented_at;
data->vcands.safe_push (cand);
- if (TREE_CODE (step) != INTEGER_CST)
+ if (!poly_int_tree_p (step))
{
find_inv_vars (data, &step, &cand->inv_vars);
@@ -3797,7 +3806,7 @@ get_computation_aff_1 (struct loop *loop, gimple *at, struct iv_use *use,
if (TYPE_PRECISION (utype) < TYPE_PRECISION (ctype))
{
if (cand->orig_iv != NULL && CONVERT_EXPR_P (cbase)
- && (CONVERT_EXPR_P (cstep) || TREE_CODE (cstep) == INTEGER_CST))
+ && (CONVERT_EXPR_P (cstep) || poly_int_tree_p (cstep)))
{
tree inner_base, inner_step, inner_type;
inner_base = TREE_OPERAND (cbase, 0);
@@ -4055,7 +4064,7 @@ force_expr_to_var_cost (tree expr, bool speed)
if (is_gimple_min_invariant (expr))
{
- if (TREE_CODE (expr) == INTEGER_CST)
+ if (poly_int_tree_p (expr))
return comp_cost (integer_cost [speed], 0);
if (TREE_CODE (expr) == ADDR_EXPR)
diff --git a/gcc/tree-ssa-loop.c b/gcc/tree-ssa-loop.c
index 1e8491757a6..a809552fa75 100644
--- a/gcc/tree-ssa-loop.c
+++ b/gcc/tree-ssa-loop.c
@@ -618,6 +618,7 @@ for_each_index (tree *addr_p, bool (*cbck) (tree, tree *, void *), void *data)
case VECTOR_CST:
case COMPLEX_CST:
case INTEGER_CST:
+ case POLY_INT_CST:
case REAL_CST:
case FIXED_CST:
case CONSTRUCTOR:
diff --git a/gcc/tree-streamer-in.c b/gcc/tree-streamer-in.c
index 09201393cd3..ea999a7431c 100644
--- a/gcc/tree-streamer-in.c
+++ b/gcc/tree-streamer-in.c
@@ -658,6 +658,19 @@ lto_input_ts_vector_tree_pointers (struct lto_input_block *ib,
}
+/* Read all pointer fields in the TS_POLY_INT_CST structure of EXPR from
+ input block IB. DATA_IN contains tables and descriptors for the
+ file being read. */
+
+static void
+lto_input_ts_poly_tree_pointers (struct lto_input_block *ib,
+ struct data_in *data_in, tree expr)
+{
+ for (unsigned int i = 0; i < NUM_POLY_INT_COEFFS; ++i)
+ POLY_INT_CST_COEFF (expr, i) = stream_read_tree (ib, data_in);
+}
+
+
/* Read all pointer fields in the TS_COMPLEX structure of EXPR from input
block IB. DATA_IN contains tables and descriptors for the
file being read. */
@@ -1041,6 +1054,9 @@ streamer_read_tree_body (struct lto_input_block *ib, struct data_in *data_in,
if (CODE_CONTAINS_STRUCT (code, TS_VECTOR))
lto_input_ts_vector_tree_pointers (ib, data_in, expr);
+ if (CODE_CONTAINS_STRUCT (code, TS_POLY_INT_CST))
+ lto_input_ts_poly_tree_pointers (ib, data_in, expr);
+
if (CODE_CONTAINS_STRUCT (code, TS_COMPLEX))
lto_input_ts_complex_tree_pointers (ib, data_in, expr);
diff --git a/gcc/tree-streamer-out.c b/gcc/tree-streamer-out.c
index 921cb874dcc..30dc78c6b68 100644
--- a/gcc/tree-streamer-out.c
+++ b/gcc/tree-streamer-out.c
@@ -541,6 +541,18 @@ write_ts_vector_tree_pointers (struct output_block *ob, tree expr, bool ref_p)
}
+/* Write all pointer fields in the TS_POLY_INT_CST structure of EXPR to
+ output block OB. If REF_P is true, write a reference to EXPR's pointer
+ fields. */
+
+static void
+write_ts_poly_tree_pointers (struct output_block *ob, tree expr, bool ref_p)
+{
+ for (unsigned int i = 0; i < NUM_POLY_INT_COEFFS; ++i)
+ stream_write_tree (ob, POLY_INT_CST_COEFF (expr, i), ref_p);
+}
+
+
/* Write all pointer fields in the TS_COMPLEX structure of EXPR to output
block OB. If REF_P is true, write a reference to EXPR's pointer
fields. */
@@ -882,6 +894,9 @@ streamer_write_tree_body (struct output_block *ob, tree expr, bool ref_p)
if (CODE_CONTAINS_STRUCT (code, TS_VECTOR))
write_ts_vector_tree_pointers (ob, expr, ref_p);
+ if (CODE_CONTAINS_STRUCT (code, TS_POLY_INT_CST))
+ write_ts_poly_tree_pointers (ob, expr, ref_p);
+
if (CODE_CONTAINS_STRUCT (code, TS_COMPLEX))
write_ts_complex_tree_pointers (ob, expr, ref_p);
diff --git a/gcc/tree-streamer.c b/gcc/tree-streamer.c
index 5fc4efdb1a0..fd34cffd742 100644
--- a/gcc/tree-streamer.c
+++ b/gcc/tree-streamer.c
@@ -55,6 +55,7 @@ streamer_check_handled_ts_structures (void)
handled_p[TS_TYPED] = true;
handled_p[TS_COMMON] = true;
handled_p[TS_INT_CST] = true;
+ handled_p[TS_POLY_INT_CST] = true;
handled_p[TS_REAL_CST] = true;
handled_p[TS_FIXED_CST] = true;
handled_p[TS_VECTOR] = true;
diff --git a/gcc/tree-vect-data-refs.c b/gcc/tree-vect-data-refs.c
index 996d156a239..78ee673a9d3 100644
--- a/gcc/tree-vect-data-refs.c
+++ b/gcc/tree-vect-data-refs.c
@@ -2753,7 +2753,7 @@ dr_group_sort_cmp (const void *dra_, const void *drb_)
return cmp;
/* Then sort after DR_INIT. In case of identical DRs sort after stmt UID. */
- cmp = tree_int_cst_compare (DR_INIT (dra), DR_INIT (drb));
+ cmp = data_ref_compare_tree (DR_INIT (dra), DR_INIT (drb));
if (cmp == 0)
return gimple_uid (DR_STMT (dra)) < gimple_uid (DR_STMT (drb)) ? -1 : 1;
return cmp;
diff --git a/gcc/tree-vrp.c b/gcc/tree-vrp.c
index 8fa8616c795..0adf9600330 100644
--- a/gcc/tree-vrp.c
+++ b/gcc/tree-vrp.c
@@ -731,7 +731,24 @@ compare_values_warnv (tree val1, tree val2, bool *strict_overflow_p)
if (TREE_OVERFLOW (val1) || TREE_OVERFLOW (val2))
return -2;
- return tree_int_cst_compare (val1, val2);
+ if (TREE_CODE (val1) == INTEGER_CST
+ && TREE_CODE (val2) == INTEGER_CST)
+ return tree_int_cst_compare (val1, val2);
+
+ if (poly_int_tree_p (val1) && poly_int_tree_p (val2))
+ {
+ if (known_eq (wi::to_poly_widest (val1),
+ wi::to_poly_widest (val2)))
+ return 0;
+ if (known_lt (wi::to_poly_widest (val1),
+ wi::to_poly_widest (val2)))
+ return -1;
+ if (known_gt (wi::to_poly_widest (val1),
+ wi::to_poly_widest (val2)))
+ return 1;
+ }
+
+ return -2;
}
else
{
diff --git a/gcc/tree.c b/gcc/tree.c
index 65e945afe06..93a05496692 100644
--- a/gcc/tree.c
+++ b/gcc/tree.c
@@ -204,6 +204,17 @@ struct int_cst_hasher : ggc_cache_ptr_hash<tree_node>
static GTY ((cache)) hash_table<int_cst_hasher> *int_cst_hash_table;
+/* Class and variable for making sure that there is a single POLY_INT_CST
+ for a given value. */
+struct poly_int_cst_hasher : ggc_cache_ptr_hash<tree_node>
+{
+ typedef std::pair<tree, const poly_wide_int *> compare_type;
+ static hashval_t hash (tree t);
+ static bool equal (tree x, const compare_type &y);
+};
+
+static GTY ((cache)) hash_table<poly_int_cst_hasher> *poly_int_cst_hash_table;
+
/* Hash table for optimization flags and target option flags. Use the same
hash table for both sets of options. Nodes for building the current
optimization and target option nodes. The assumption is most of the time
@@ -459,6 +470,7 @@ tree_node_structure_for_code (enum tree_code code)
/* tcc_constant cases. */
case VOID_CST: return TS_TYPED;
case INTEGER_CST: return TS_INT_CST;
+ case POLY_INT_CST: return TS_POLY_INT_CST;
case REAL_CST: return TS_REAL_CST;
case FIXED_CST: return TS_FIXED_CST;
case COMPLEX_CST: return TS_COMPLEX;
@@ -516,6 +528,7 @@ initialize_tree_contains_struct (void)
case TS_COMMON:
case TS_INT_CST:
+ case TS_POLY_INT_CST:
case TS_REAL_CST:
case TS_FIXED_CST:
case TS_VECTOR:
@@ -649,6 +662,8 @@ init_ttree (void)
int_cst_hash_table = hash_table<int_cst_hasher>::create_ggc (1024);
+ poly_int_cst_hash_table = hash_table<poly_int_cst_hasher>::create_ggc (64);
+
int_cst_node = make_int_cst (1, 1);
cl_option_hash_table = hash_table<cl_option_hasher>::create_ggc (64);
@@ -835,6 +850,7 @@ tree_code_size (enum tree_code code)
{
case VOID_CST: return sizeof (tree_typed);
case INTEGER_CST: gcc_unreachable ();
+ case POLY_INT_CST: return sizeof (tree_poly_int_cst);
case REAL_CST: return sizeof (tree_real_cst);
case FIXED_CST: return sizeof (tree_fixed_cst);
case COMPLEX_CST: return sizeof (tree_complex);
@@ -1313,31 +1329,51 @@ build_new_int_cst (tree type, const wide_int &cst)
return nt;
}
-/* Create an INT_CST node with a LOW value sign extended to TYPE. */
+/* Return a new POLY_INT_CST with coefficients COEFFS and type TYPE. */
+
+static tree
+build_new_poly_int_cst (tree type, tree (&coeffs)[NUM_POLY_INT_COEFFS])
+{
+ size_t length = sizeof (struct tree_poly_int_cst);
+ record_node_allocation_statistics (POLY_INT_CST, length);
+
+ tree t = ggc_alloc_cleared_tree_node_stat (length PASS_MEM_STAT);
+
+ TREE_SET_CODE (t, POLY_INT_CST);
+ TREE_CONSTANT (t) = 1;
+ TREE_TYPE (t) = type;
+ for (unsigned int i = 0; i < NUM_POLY_INT_COEFFS; ++i)
+ POLY_INT_CST_COEFF (t, i) = coeffs[i];
+ return t;
+}
+
+/* Create a constant tree that contains CST sign-extended to TYPE. */
tree
-build_int_cst (tree type, HOST_WIDE_INT low)
+build_int_cst (tree type, poly_int64 cst)
{
/* Support legacy code. */
if (!type)
type = integer_type_node;
- return wide_int_to_tree (type, wi::shwi (low, TYPE_PRECISION (type)));
+ return wide_int_to_tree (type, wi::shwi (cst, TYPE_PRECISION (type)));
}
+/* Create a constant tree that contains CST zero-extended to TYPE. */
+
tree
-build_int_cstu (tree type, unsigned HOST_WIDE_INT cst)
+build_int_cstu (tree type, poly_uint64 cst)
{
return wide_int_to_tree (type, wi::uhwi (cst, TYPE_PRECISION (type)));
}
-/* Create an INT_CST node with a LOW value sign extended to TYPE. */
+/* Create a constant tree that contains CST sign-extended to TYPE. */
tree
-build_int_cst_type (tree type, HOST_WIDE_INT low)
+build_int_cst_type (tree type, poly_int64 cst)
{
gcc_assert (type);
- return wide_int_to_tree (type, wi::shwi (low, TYPE_PRECISION (type)));
+ return wide_int_to_tree (type, wi::shwi (cst, TYPE_PRECISION (type)));
}
/* Constructs tree in type TYPE from with value given by CST. Signedness
@@ -1365,7 +1401,7 @@ double_int_to_tree (tree type, double_int cst)
tree
-force_fit_type (tree type, const wide_int_ref &cst,
+force_fit_type (tree type, const poly_wide_int_ref &cst,
int overflowable, bool overflowed)
{
signop sign = TYPE_SIGN (type);
@@ -1377,8 +1413,21 @@ force_fit_type (tree type, const wide_int_ref &cst,
|| overflowable < 0
|| (overflowable > 0 && sign == SIGNED))
{
- wide_int tmp = wide_int::from (cst, TYPE_PRECISION (type), sign);
- tree t = build_new_int_cst (type, tmp);
+ poly_wide_int tmp = poly_wide_int::from (cst, TYPE_PRECISION (type),
+ sign);
+ tree t;
+ if (tmp.is_constant ())
+ t = build_new_int_cst (type, tmp.coeffs[0]);
+ else
+ {
+ tree coeffs[NUM_POLY_INT_COEFFS];
+ for (unsigned int i = 0; i < NUM_POLY_INT_COEFFS; ++i)
+ {
+ coeffs[i] = build_new_int_cst (type, tmp.coeffs[i]);
+ TREE_OVERFLOW (coeffs[i]) = 1;
+ }
+ t = build_new_poly_int_cst (type, coeffs);
+ }
TREE_OVERFLOW (t) = 1;
return t;
}
@@ -1435,8 +1484,8 @@ int_cst_hasher::equal (tree x, tree y)
the upper bits and ensures that hashing and value equality based
upon the underlying HOST_WIDE_INTs works without masking. */
-tree
-wide_int_to_tree (tree type, const wide_int_ref &pcst)
+static tree
+wide_int_to_tree_1 (tree type, const wide_int_ref &pcst)
{
tree t;
int ix = -1;
@@ -1583,6 +1632,66 @@ wide_int_to_tree (tree type, const wide_int_ref &pcst)
return t;
}
+hashval_t
+poly_int_cst_hasher::hash (tree t)
+{
+ inchash::hash hstate;
+
+ hstate.add_int (TYPE_UID (TREE_TYPE (t)));
+ for (unsigned int i = 0; i < NUM_POLY_INT_COEFFS; ++i)
+ hstate.add_wide_int (wi::to_wide (POLY_INT_CST_COEFF (t, i)));
+
+ return hstate.end ();
+}
+
+bool
+poly_int_cst_hasher::equal (tree x, const compare_type &y)
+{
+ if (TREE_TYPE (x) != y.first)
+ return false;
+ for (unsigned int i = 0; i < NUM_POLY_INT_COEFFS; ++i)
+ if (wi::to_wide (POLY_INT_CST_COEFF (x, i)) != y.second->coeffs[i])
+ return false;
+ return true;
+}
+
+/* Build a POLY_INT_CST node with type TYPE and with the elements in VALUES.
+ The elements must also have type TYPE. */
+
+tree
+build_poly_int_cst (tree type, const poly_wide_int_ref &values)
+{
+ unsigned int prec = TYPE_PRECISION (type);
+ gcc_assert (prec <= values.coeffs[0].get_precision ());
+ poly_wide_int c = poly_wide_int::from (values, prec, SIGNED);
+
+ inchash::hash h;
+ h.add_int (TYPE_UID (type));
+ for (unsigned int i = 0; i < NUM_POLY_INT_COEFFS; ++i)
+ h.add_wide_int (c.coeffs[i]);
+ poly_int_cst_hasher::compare_type comp (type, &c);
+ tree *slot = poly_int_cst_hash_table->find_slot_with_hash (comp, h.end (),
+ INSERT);
+ if (*slot == NULL_TREE)
+ {
+ tree coeffs[NUM_POLY_INT_COEFFS];
+ for (unsigned int i = 0; i < NUM_POLY_INT_COEFFS; ++i)
+ coeffs[i] = wide_int_to_tree_1 (type, c.coeffs[i]);
+ *slot = build_new_poly_int_cst (type, coeffs);
+ }
+ return *slot;
+}
+
+/* Create a constant tree with value VALUE in type TYPE. */
+
+tree
+wide_int_to_tree (tree type, const poly_wide_int_ref &value)
+{
+ if (value.is_constant ())
+ return wide_int_to_tree_1 (type, value.coeffs[0]);
+ return build_poly_int_cst (type, value);
+}
+
void
cache_integer_cst (tree t)
{
@@ -2716,6 +2825,55 @@ really_constant_p (const_tree exp)
exp = TREE_OPERAND (exp, 0);
return TREE_CONSTANT (exp);
}
+
+/* Return true if T holds a polynomial pointer difference, storing it in
+ *VALUE if so. A true return means that T's precision is no greater
+ than 64 bits, which is the largest address space we support, so *VALUE
+ never loses precision. However, the signedness of the result does
+ not necessarily match the signedness of T: sometimes an unsigned type
+ like sizetype is used to encode a value that is actually negative. */
+
+bool
+ptrdiff_tree_p (const_tree t, poly_int64_pod *value)
+{
+ if (!t)
+ return false;
+ if (TREE_CODE (t) == INTEGER_CST)
+ {
+ if (!cst_and_fits_in_hwi (t))
+ return false;
+ *value = int_cst_value (t);
+ return true;
+ }
+ if (POLY_INT_CST_P (t))
+ {
+ for (unsigned int i = 0; i < NUM_POLY_INT_COEFFS; ++i)
+ if (!cst_and_fits_in_hwi (POLY_INT_CST_COEFF (t, i)))
+ return false;
+ for (unsigned int i = 0; i < NUM_POLY_INT_COEFFS; ++i)
+ value->coeffs[i] = int_cst_value (POLY_INT_CST_COEFF (t, i));
+ return true;
+ }
+ return false;
+}
+
+poly_int64
+tree_to_poly_int64 (const_tree t)
+{
+ gcc_assert (tree_fits_poly_int64_p (t));
+ if (POLY_INT_CST_P (t))
+ return poly_int_cst_value (t).force_shwi ();
+ return TREE_INT_CST_LOW (t);
+}
+
+poly_uint64
+tree_to_poly_uint64 (const_tree t)
+{
+ gcc_assert (tree_fits_poly_uint64_p (t));
+ if (POLY_INT_CST_P (t))
+ return poly_int_cst_value (t).force_uhwi ();
+ return TREE_INT_CST_LOW (t);
+}
/* Return first list element whose TREE_VALUE is ELEM.
Return 0 if ELEM is not in LIST. */
@@ -4707,7 +4865,7 @@ mem_ref_offset (const_tree t)
offsetted by OFFSET units. */
tree
-build_invariant_address (tree type, tree base, HOST_WIDE_INT offset)
+build_invariant_address (tree type, tree base, poly_int64 offset)
{
tree ref = fold_build2 (MEM_REF, TREE_TYPE (type),
build_fold_addr_expr (base),
@@ -6603,6 +6761,25 @@ tree_fits_shwi_p (const_tree t)
&& wi::fits_shwi_p (wi::to_widest (t)));
}
+/* Return true if T is an INTEGER_CST or POLY_INT_CST whose numerical
+ value (extended according to TYPE_UNSIGNED) fits in a poly_int64. */
+
+bool
+tree_fits_poly_int64_p (const_tree t)
+{
+ if (t == NULL_TREE)
+ return false;
+ if (POLY_INT_CST_P (t))
+ {
+ for (unsigned int i = 0; i < NUM_POLY_INT_COEFFS; i++)
+ if (!wi::fits_shwi_p (wi::to_wide (POLY_INT_CST_COEFF (t, i))))
+ return false;
+ return true;
+ }
+ return (TREE_CODE (t) == INTEGER_CST
+ && wi::fits_shwi_p (wi::to_widest (t)));
+}
+
/* Return true if T is an INTEGER_CST whose numerical value (extended
according to TYPE_UNSIGNED) fits in an unsigned HOST_WIDE_INT. */
@@ -6614,6 +6791,25 @@ tree_fits_uhwi_p (const_tree t)
&& wi::fits_uhwi_p (wi::to_widest (t)));
}
+/* Return true if T is an INTEGER_CST or POLY_INT_CST whose numerical
+ value (extended according to TYPE_UNSIGNED) fits in a poly_uint64. */
+
+bool
+tree_fits_poly_uint64_p (const_tree t)
+{
+ if (t == NULL_TREE)
+ return false;
+ if (POLY_INT_CST_P (t))
+ {
+ for (unsigned int i = 0; i < NUM_POLY_INT_COEFFS; i++)
+ if (!wi::fits_uhwi_p (wi::to_widest (POLY_INT_CST_COEFF (t, i))))
+ return false;
+ return true;
+ }
+ return (TREE_CODE (t) == INTEGER_CST
+ && wi::fits_uhwi_p (wi::to_widest (t)));
+}
+
/* T is an INTEGER_CST whose numerical value (extended according to
TYPE_UNSIGNED) fits in a signed HOST_WIDE_INT. Return that
HOST_WIDE_INT. */
@@ -6822,6 +7018,12 @@ simple_cst_equal (const_tree t1, const_tree t2)
return 0;
default:
+ if (POLY_INT_CST_P (t1))
+ /* A false return means maybe_ne rather than known_ne. */
+ return known_eq (poly_widest_int::from (poly_int_cst_value (t1),
+ TYPE_SIGN (TREE_TYPE (t1))),
+ poly_widest_int::from (poly_int_cst_value (t2),
+ TYPE_SIGN (TREE_TYPE (t2))));
break;
}
@@ -6881,6 +7083,15 @@ compare_tree_int (const_tree t, unsigned HOST_WIDE_INT u)
bool
valid_constant_size_p (const_tree size)
{
+ if (POLY_INT_CST_P (size))
+ {
+ if (TREE_OVERFLOW (size))
+ return false;
+ for (unsigned int i = 0; i < NUM_POLY_INT_COEFFS; ++i)
+ if (!valid_constant_size_p (POLY_INT_CST_COEFF (size, i)))
+ return false;
+ return true;
+ }
if (! tree_fits_uhwi_p (size)
|| TREE_OVERFLOW (size)
|| tree_int_cst_sign_bit (size) != 0)
@@ -7176,6 +7387,12 @@ add_expr (const_tree t, inchash::hash &hstate, unsigned int flags)
}
/* FALL THROUGH */
default:
+ if (POLY_INT_CST_P (t))
+ {
+ for (unsigned int i = 0; i < NUM_POLY_INT_COEFFS; ++i)
+ hstate.add_wide_int (wi::to_wide (POLY_INT_CST_COEFF (t, i)));
+ return;
+ }
tclass = TREE_CODE_CLASS (code);
if (tclass == tcc_declaration)
@@ -7715,7 +7932,7 @@ build_nonshared_array_type (tree elt_type, tree index_type)
sizetype. */
tree
-build_array_type_nelts (tree elt_type, unsigned HOST_WIDE_INT nelts)
+build_array_type_nelts (tree elt_type, poly_uint64 nelts)
{
return build_array_type (elt_type, build_index_type (size_int (nelts - 1)));
}
@@ -12487,8 +12704,8 @@ drop_tree_overflow (tree t)
gcc_checking_assert (TREE_OVERFLOW (t));
/* For tree codes with a sharing machinery re-build the result. */
- if (TREE_CODE (t) == INTEGER_CST)
- return wide_int_to_tree (TREE_TYPE (t), wi::to_wide (t));
+ if (poly_int_tree_p (t))
+ return wide_int_to_tree (TREE_TYPE (t), wi::to_poly_wide (t));
/* For VECTOR_CST, remove the overflow bits from the encoded elements
and canonicalize the result. */
diff --git a/gcc/tree.def b/gcc/tree.def
index 48a53b7aa4b..a4a7f1a995c 100644
--- a/gcc/tree.def
+++ b/gcc/tree.def
@@ -291,6 +291,9 @@ DEFTREECODE (VOID_CST, "void_cst", tcc_constant, 0)
some circumstances. */
DEFTREECODE (INTEGER_CST, "integer_cst", tcc_constant, 0)
+/* Contents are given by POLY_INT_CST_COEFF. */
+DEFTREECODE (POLY_INT_CST, "poly_int_cst", tcc_constant, 0)
+
/* Contents are in TREE_REAL_CST field. */
DEFTREECODE (REAL_CST, "real_cst", tcc_constant, 0)
diff --git a/gcc/tree.h b/gcc/tree.h
index ae40e903e4c..a7b8c2f400c 100644
--- a/gcc/tree.h
+++ b/gcc/tree.h
@@ -991,6 +991,15 @@ extern void omp_clause_range_check_failed (const_tree, const char *, int,
#define TREE_INT_CST_LOW(NODE) \
((unsigned HOST_WIDE_INT) TREE_INT_CST_ELT (NODE, 0))
+/* Return true if NODE is a POLY_INT_CST. This is only ever true on
+ targets with variable-sized modes. */
+#define POLY_INT_CST_P(NODE) \
+ (NUM_POLY_INT_COEFFS > 1 && TREE_CODE (NODE) == POLY_INT_CST)
+
+/* In a POLY_INT_CST node. */
+#define POLY_INT_CST_COEFF(NODE, I) \
+ (POLY_INT_CST_CHECK (NODE)->poly_int_cst.coeffs[I])
+
#define TREE_REAL_CST_PTR(NODE) (REAL_CST_CHECK (NODE)->real_cst.real_cst_ptr)
#define TREE_REAL_CST(NODE) (*TREE_REAL_CST_PTR (NODE))
@@ -4040,15 +4049,15 @@ build5_loc (location_t loc, enum tree_code code, tree type, tree arg0,
extern tree double_int_to_tree (tree, double_int);
-extern tree wide_int_to_tree (tree type, const wide_int_ref &cst);
-extern tree force_fit_type (tree, const wide_int_ref &, int, bool);
+extern tree wide_int_to_tree (tree type, const poly_wide_int_ref &cst);
+extern tree force_fit_type (tree, const poly_wide_int_ref &, int, bool);
/* Create an INT_CST node with a CST value zero extended. */
/* static inline */
-extern tree build_int_cst (tree, HOST_WIDE_INT);
-extern tree build_int_cstu (tree type, unsigned HOST_WIDE_INT cst);
-extern tree build_int_cst_type (tree, HOST_WIDE_INT);
+extern tree build_int_cst (tree, poly_int64);
+extern tree build_int_cstu (tree type, poly_uint64);
+extern tree build_int_cst_type (tree, poly_int64);
extern tree make_vector (unsigned, unsigned CXX_MEM_STAT_INFO);
extern tree build_vector_from_ctor (tree, vec<constructor_elt, va_gc> *);
extern tree build_vector_from_val (tree, tree);
@@ -4068,6 +4077,7 @@ extern tree build_minus_one_cst (tree);
extern tree build_all_ones_cst (tree);
extern tree build_zero_cst (tree);
extern tree build_string (int, const char *);
+extern tree build_poly_int_cst (tree, const poly_wide_int_ref &);
extern tree build_tree_list (tree, tree CXX_MEM_STAT_INFO);
extern tree build_tree_list_vec (const vec<tree, va_gc> * CXX_MEM_STAT_INFO);
extern tree build_decl (location_t, enum tree_code,
@@ -4117,7 +4127,7 @@ extern tree build_opaque_vector_type (tree innertype, int nunits);
extern tree build_index_type (tree);
extern tree build_array_type (tree, tree, bool = false);
extern tree build_nonshared_array_type (tree, tree);
-extern tree build_array_type_nelts (tree, unsigned HOST_WIDE_INT);
+extern tree build_array_type_nelts (tree, poly_uint64);
extern tree build_function_type (tree, tree);
extern tree build_function_type_list (tree, ...);
extern tree build_varargs_function_type_list (tree, ...);
@@ -4141,12 +4151,14 @@ extern tree chain_index (int, tree);
extern int tree_int_cst_equal (const_tree, const_tree);
-extern bool tree_fits_shwi_p (const_tree)
- ATTRIBUTE_PURE;
-extern bool tree_fits_uhwi_p (const_tree)
- ATTRIBUTE_PURE;
+extern bool tree_fits_shwi_p (const_tree) ATTRIBUTE_PURE;
+extern bool tree_fits_poly_int64_p (const_tree) ATTRIBUTE_PURE;
+extern bool tree_fits_uhwi_p (const_tree) ATTRIBUTE_PURE;
+extern bool tree_fits_poly_uint64_p (const_tree) ATTRIBUTE_PURE;
extern HOST_WIDE_INT tree_to_shwi (const_tree);
+extern poly_int64 tree_to_poly_int64 (const_tree);
extern unsigned HOST_WIDE_INT tree_to_uhwi (const_tree);
+extern poly_uint64 tree_to_poly_uint64 (const_tree);
#if !defined ENABLE_TREE_CHECKING && (GCC_VERSION >= 4003)
extern inline __attribute__ ((__gnu_inline__)) HOST_WIDE_INT
tree_to_shwi (const_tree t)
@@ -4161,6 +4173,21 @@ tree_to_uhwi (const_tree t)
gcc_assert (tree_fits_uhwi_p (t));
return TREE_INT_CST_LOW (t);
}
+#if NUM_POLY_INT_COEFFS == 1
+extern inline __attribute__ ((__gnu_inline__)) poly_int64
+tree_to_poly_int64 (const_tree t)
+{
+ gcc_assert (tree_fits_poly_int64_p (t));
+ return TREE_INT_CST_LOW (t);
+}
+
+extern inline __attribute__ ((__gnu_inline__)) poly_uint64
+tree_to_poly_uint64 (const_tree t)
+{
+ gcc_assert (tree_fits_poly_uint64_p (t));
+ return TREE_INT_CST_LOW (t);
+}
+#endif
#endif
extern int tree_int_cst_sgn (const_tree);
extern int tree_int_cst_sign_bit (const_tree);
@@ -4169,6 +4196,33 @@ extern tree strip_array_types (tree);
extern tree excess_precision_type (tree);
extern bool valid_constant_size_p (const_tree);
+/* Return true if T holds a value that can be represented as a poly_int64
+ without loss of precision. Store the value in *VALUE if so. */
+
+inline bool
+poly_int_tree_p (const_tree t, poly_int64_pod *value)
+{
+ if (tree_fits_poly_int64_p (t))
+ {
+ *value = tree_to_poly_int64 (t);
+ return true;
+ }
+ return false;
+}
+
+/* Return true if T holds a value that can be represented as a poly_uint64
+ without loss of precision. Store the value in *VALUE if so. */
+
+inline bool
+poly_int_tree_p (const_tree t, poly_uint64_pod *value)
+{
+ if (tree_fits_poly_uint64_p (t))
+ {
+ *value = tree_to_poly_uint64 (t);
+ return true;
+ }
+ return false;
+}
/* From expmed.c. Since rtl.h is included after tree.h, we can't
put the prototype here. Rtl.h does declare the prototype if
@@ -4722,8 +4776,17 @@ complete_or_array_type_p (const_tree type)
&& COMPLETE_TYPE_P (TREE_TYPE (type)));
}
+/* Return true if the value of T could be represented as a poly_widest_int. */
+
+inline bool
+poly_int_tree_p (const_tree t)
+{
+ return (TREE_CODE (t) == INTEGER_CST || POLY_INT_CST_P (t));
+}
+
extern tree strip_float_extensions (tree);
extern int really_constant_p (const_tree);
+extern bool ptrdiff_tree_p (const_tree, poly_int64_pod *);
extern bool decl_address_invariant_p (const_tree);
extern bool decl_address_ip_invariant_p (const_tree);
extern bool int_fits_type_p (const_tree, const_tree);
@@ -5159,6 +5222,29 @@ extern bool anon_aggrname_p (const_tree);
/* The tree and const_tree overload templates. */
namespace wi
{
+ class unextended_tree
+ {
+ private:
+ const_tree m_t;
+
+ public:
+ unextended_tree () {}
+ unextended_tree (const_tree t) : m_t (t) {}
+
+ unsigned int get_precision () const;
+ const HOST_WIDE_INT *get_val () const;
+ unsigned int get_len () const;
+ const_tree get_tree () const { return m_t; }
+ };
+
+ template <>
+ struct int_traits <unextended_tree>
+ {
+ static const enum precision_type precision_type = VAR_PRECISION;
+ static const bool host_dependent_precision = false;
+ static const bool is_sign_extended = false;
+ };
+
template <int N>
class extended_tree
{
@@ -5166,11 +5252,13 @@ namespace wi
const_tree m_t;
public:
+ extended_tree () {}
extended_tree (const_tree);
unsigned int get_precision () const;
const HOST_WIDE_INT *get_val () const;
unsigned int get_len () const;
+ const_tree get_tree () const { return m_t; }
};
template <int N>
@@ -5182,10 +5270,11 @@ namespace wi
static const unsigned int precision = N;
};
- typedef const generic_wide_int <extended_tree <WIDE_INT_MAX_PRECISION> >
- tree_to_widest_ref;
- typedef const generic_wide_int <extended_tree <ADDR_MAX_PRECISION> >
- tree_to_offset_ref;
+ typedef extended_tree <WIDE_INT_MAX_PRECISION> widest_extended_tree;
+ typedef extended_tree <ADDR_MAX_PRECISION> offset_extended_tree;
+
+ typedef const generic_wide_int <widest_extended_tree> tree_to_widest_ref;
+ typedef const generic_wide_int <offset_extended_tree> tree_to_offset_ref;
typedef const generic_wide_int<wide_int_ref_storage<false, false> >
tree_to_wide_ref;
@@ -5193,6 +5282,34 @@ namespace wi
tree_to_offset_ref to_offset (const_tree);
tree_to_wide_ref to_wide (const_tree);
wide_int to_wide (const_tree, unsigned int);
+
+ typedef const poly_int <NUM_POLY_INT_COEFFS,
+ generic_wide_int <widest_extended_tree> >
+ tree_to_poly_widest_ref;
+ typedef const poly_int <NUM_POLY_INT_COEFFS,
+ generic_wide_int <offset_extended_tree> >
+ tree_to_poly_offset_ref;
+ typedef const poly_int <NUM_POLY_INT_COEFFS,
+ generic_wide_int <unextended_tree> >
+ tree_to_poly_wide_ref;
+
+ tree_to_poly_widest_ref to_poly_widest (const_tree);
+ tree_to_poly_offset_ref to_poly_offset (const_tree);
+ tree_to_poly_wide_ref to_poly_wide (const_tree);
+
+ template <int N>
+ struct ints_for <generic_wide_int <extended_tree <N> >, CONST_PRECISION>
+ {
+ typedef generic_wide_int <extended_tree <N> > extended;
+ static extended zero (const extended &);
+ };
+
+ template <>
+ struct ints_for <generic_wide_int <unextended_tree>, VAR_PRECISION>
+ {
+ typedef generic_wide_int <unextended_tree> unextended;
+ static unextended zero (const unextended &);
+ };
}
/* Refer to INTEGER_CST T as though it were a widest_int.
@@ -5337,6 +5454,95 @@ wi::extended_tree <N>::get_len () const
gcc_unreachable ();
}
+inline unsigned int
+wi::unextended_tree::get_precision () const
+{
+ return TYPE_PRECISION (TREE_TYPE (m_t));
+}
+
+inline const HOST_WIDE_INT *
+wi::unextended_tree::get_val () const
+{
+ return &TREE_INT_CST_ELT (m_t, 0);
+}
+
+inline unsigned int
+wi::unextended_tree::get_len () const
+{
+ return TREE_INT_CST_NUNITS (m_t);
+}
+
+/* Return the value of a POLY_INT_CST in its native precision. */
+
+inline wi::tree_to_poly_wide_ref
+poly_int_cst_value (const_tree x)
+{
+ poly_int <NUM_POLY_INT_COEFFS, generic_wide_int <wi::unextended_tree> > res;
+ for (unsigned int i = 0; i < NUM_POLY_INT_COEFFS; ++i)
+ res.coeffs[i] = POLY_INT_CST_COEFF (x, i);
+ return res;
+}
+
+/* Access INTEGER_CST or POLY_INT_CST tree T as if it were a
+ poly_widest_int. See wi::to_widest for more details. */
+
+inline wi::tree_to_poly_widest_ref
+wi::to_poly_widest (const_tree t)
+{
+ if (POLY_INT_CST_P (t))
+ {
+ poly_int <NUM_POLY_INT_COEFFS,
+ generic_wide_int <widest_extended_tree> > res;
+ for (unsigned int i = 0; i < NUM_POLY_INT_COEFFS; ++i)
+ res.coeffs[i] = POLY_INT_CST_COEFF (t, i);
+ return res;
+ }
+ return t;
+}
+
+/* Access INTEGER_CST or POLY_INT_CST tree T as if it were a
+ poly_offset_int. See wi::to_offset for more details. */
+
+inline wi::tree_to_poly_offset_ref
+wi::to_poly_offset (const_tree t)
+{
+ if (POLY_INT_CST_P (t))
+ {
+ poly_int <NUM_POLY_INT_COEFFS,
+ generic_wide_int <offset_extended_tree> > res;
+ for (unsigned int i = 0; i < NUM_POLY_INT_COEFFS; ++i)
+ res.coeffs[i] = POLY_INT_CST_COEFF (t, i);
+ return res;
+ }
+ return t;
+}
+
+/* Access INTEGER_CST or POLY_INT_CST tree T as if it were a
+ poly_wide_int. See wi::to_wide for more details. */
+
+inline wi::tree_to_poly_wide_ref
+wi::to_poly_wide (const_tree t)
+{
+ if (POLY_INT_CST_P (t))
+ return poly_int_cst_value (t);
+ return t;
+}
+
+template <int N>
+inline generic_wide_int <wi::extended_tree <N> >
+wi::ints_for <generic_wide_int <wi::extended_tree <N> >,
+ wi::CONST_PRECISION>::zero (const extended &x)
+{
+ return build_zero_cst (TREE_TYPE (x.get_tree ()));
+}
+
+inline generic_wide_int <wi::unextended_tree>
+wi::ints_for <generic_wide_int <wi::unextended_tree>,
+ wi::VAR_PRECISION>::zero (const unextended &x)
+{
+ return build_zero_cst (TREE_TYPE (x.get_tree ()));
+}
+
namespace wi
{
template <typename T>
@@ -5354,7 +5560,9 @@ template <typename T>
bool
wi::fits_to_boolean_p (const T &x, const_tree type)
{
- return eq_p (x, 0) || eq_p (x, TYPE_UNSIGNED (type) ? 1 : -1);
+ typedef typename poly_int_traits<T>::int_type int_type;
+ return (known_eq (x, int_type (0))
+ || known_eq (x, int_type (TYPE_UNSIGNED (type) ? 1 : -1)));
}
template <typename T>
@@ -5367,9 +5575,9 @@ wi::fits_to_tree_p (const T &x, const_tree type)
return fits_to_boolean_p (x, type);
if (TYPE_UNSIGNED (type))
- return eq_p (x, zext (x, TYPE_PRECISION (type)));
+ return known_eq (x, zext (x, TYPE_PRECISION (type)));
else
- return eq_p (x, sext (x, TYPE_PRECISION (type)));
+ return known_eq (x, sext (x, TYPE_PRECISION (type)));
}
/* Produce the smallest number that is represented in TYPE. The precision
diff --git a/gcc/treestruct.def b/gcc/treestruct.def
index 63c93360c0b..491d2ddb8c2 100644
--- a/gcc/treestruct.def
+++ b/gcc/treestruct.def
@@ -34,6 +34,7 @@ DEFTREESTRUCT(TS_BASE, "base")
DEFTREESTRUCT(TS_TYPED, "typed")
DEFTREESTRUCT(TS_COMMON, "common")
DEFTREESTRUCT(TS_INT_CST, "integer cst")
+DEFTREESTRUCT(TS_POLY_INT_CST, "poly_int_cst")
DEFTREESTRUCT(TS_REAL_CST, "real cst")
DEFTREESTRUCT(TS_FIXED_CST, "fixed cst")
DEFTREESTRUCT(TS_VECTOR, "vector")