diff options
author | ebotcazou <ebotcazou@138bc75d-0d04-0410-961f-82ee72b054a4> | 2006-07-03 20:14:18 +0000 |
---|---|---|
committer | ebotcazou <ebotcazou@138bc75d-0d04-0410-961f-82ee72b054a4> | 2006-07-03 20:14:18 +0000 |
commit | 2100c228d42604b982361a4309871b6e1f2c56d7 (patch) | |
tree | 730ad49fb04e92cd040cc6234ab11f52bd71851f /gcc/tree-sra.c | |
parent | 9c65be62147b4e0c07b8cf52a382f01d95613e6e (diff) | |
download | gcc-2100c228d42604b982361a4309871b6e1f2c56d7.tar.gz |
* tree.c (range_in_array_bounds_p): New predicate.
* tree.h (range_in_array_bounds_p): Declare it.
* tree-eh.c (tree_could_trap_p) <ARRAY_RANGE_REF>: Use it to
return a less conservative answer.
* tree-sra.c (struct sra_elt): Add new pointer field 'groups'
and flag 'is_group'.
(IS_ELEMENT_FOR_GROUP): New macro.
(FOR_EACH_ACTUAL_CHILD): Likewise.
(next_child_for_group): New helper function.
(can_completely_scalarize_p): Take into account groups.
(sra_hash_tree): Handle RANGE_EXPR.
(sra_elt_eq): Likewise.
(lookup_element): Be prepared for handling groups.
(is_valid_const_index): Delete.
(maybe_lookup_element_for_expr) <ARRAY_REF>: Use in_array_bounds_p
instead of is_valid_const_index.
<ARRAY_RANGE_REF>: New case.
(sra_walk_expr) <ARRAY_REF>: Use in_array_bounds_p instead of
is_valid_const_index.
<ARRAY_RANGE_REF>: Do not unconditionally punt.
(scan_dump): Dump info for groups too.
(decide_instantiation_1): Likewise.
(decide_block_copy): Assert that the element is not a group.
Propagate decision to groups.
(generate_one_element_ref): Handle RANGE_EXPR.
(mark_no_warning): Iterate over actual childs.
(generate_copy_inout): Likewise.
(generate_element_copy): Likewise.
(generate_element_zero): Likewise.
(generate_element_init_1): Likewise.
(dump_sra_elt_name): Handle RANGE_EXPR.
git-svn-id: svn+ssh://gcc.gnu.org/svn/gcc/trunk@115160 138bc75d-0d04-0410-961f-82ee72b054a4
Diffstat (limited to 'gcc/tree-sra.c')
-rw-r--r-- | gcc/tree-sra.c | 212 |
1 files changed, 153 insertions, 59 deletions
diff --git a/gcc/tree-sra.c b/gcc/tree-sra.c index 1faff832799..80c4ca744e0 100644 --- a/gcc/tree-sra.c +++ b/gcc/tree-sra.c @@ -89,20 +89,22 @@ static bitmap needs_copy_in; static bitmap sra_type_decomp_cache; static bitmap sra_type_inst_cache; -/* One of these structures is created for each candidate aggregate - and each (accessed) member of such an aggregate. */ +/* One of these structures is created for each candidate aggregate and + each (accessed) member or group of members of such an aggregate. */ struct sra_elt { /* A tree of the elements. Used when we want to traverse everything. */ struct sra_elt *parent; + struct sra_elt *groups; struct sra_elt *children; struct sra_elt *sibling; /* If this element is a root, then this is the VAR_DECL. If this is a sub-element, this is some token used to identify the reference. In the case of COMPONENT_REF, this is the FIELD_DECL. In the case - of an ARRAY_REF, this is the (constant) index. In the case of a - complex number, this is a zero or one. */ + of an ARRAY_REF, this is the (constant) index. In the case of an + ARRAY_RANGE_REF, this is the (constant) RANGE_EXPR. In the case + of a complex number, this is a zero or one. */ tree element; /* The type of the element. */ @@ -122,6 +124,9 @@ struct sra_elt /* True if TYPE is scalar. */ bool is_scalar; + /* True if this element is a group of members of its parent. */ + bool is_group; + /* True if we saw something about this element that prevents scalarization, such as non-constant indexing. */ bool cannot_scalarize; @@ -137,6 +142,48 @@ struct sra_elt bool visited; }; +#define IS_ELEMENT_FOR_GROUP(ELEMENT) (TREE_CODE (ELEMENT) == RANGE_EXPR) + +#define FOR_EACH_ACTUAL_CHILD(CHILD, ELT) \ + for ((CHILD) = (ELT)->is_group \ + ? next_child_for_group (NULL, (ELT)) \ + : (ELT)->children; \ + (CHILD); \ + (CHILD) = (ELT)->is_group \ + ? next_child_for_group ((CHILD), (ELT)) \ + : (CHILD)->sibling) + +/* Helper function for above macro. Return next child in group. */ +static struct sra_elt * +next_child_for_group (struct sra_elt *child, struct sra_elt *group) +{ + gcc_assert (group->is_group); + + /* Find the next child in the parent. */ + if (child) + child = child->sibling; + else + child = group->parent->children; + + /* Skip siblings that do not belong to the group. */ + while (child) + { + tree g_elt = group->element; + if (TREE_CODE (g_elt) == RANGE_EXPR) + { + if (!tree_int_cst_lt (child->element, TREE_OPERAND (g_elt, 0)) + && !tree_int_cst_lt (TREE_OPERAND (g_elt, 1), child->element)) + break; + } + else + gcc_unreachable (); + + child = child->sibling; + } + + return child; +} + /* Random access to the child of a parent is performed by hashing. This prevents quadratic behavior, and allows SRA to function reasonably on larger records. */ @@ -352,7 +399,11 @@ can_completely_scalarize_p (struct sra_elt *elt) if (elt->cannot_scalarize) return false; - for (c = elt->children; c ; c = c->sibling) + for (c = elt->children; c; c = c->sibling) + if (!can_completely_scalarize_p (c)) + return false; + + for (c = elt->groups; c; c = c->sibling) if (!can_completely_scalarize_p (c)) return false; @@ -380,6 +431,11 @@ sra_hash_tree (tree t) h = TREE_INT_CST_LOW (t) ^ TREE_INT_CST_HIGH (t); break; + case RANGE_EXPR: + h = iterative_hash_expr (TREE_OPERAND (t, 0), 0); + h = iterative_hash_expr (TREE_OPERAND (t, 1), h); + break; + case FIELD_DECL: /* We can have types that are compatible, but have different member lists, so we can't hash fields by ID. Use offsets instead. */ @@ -447,6 +503,11 @@ sra_elt_eq (const void *x, const void *y) /* Integers are not pointer unique, so compare their values. */ return tree_int_cst_equal (ae, be); + case RANGE_EXPR: + return + tree_int_cst_equal (TREE_OPERAND (ae, 0), TREE_OPERAND (be, 0)) + && tree_int_cst_equal (TREE_OPERAND (ae, 1), TREE_OPERAND (be, 1)); + case FIELD_DECL: /* Fields are unique within a record, but not between compatible records. */ @@ -470,7 +531,10 @@ lookup_element (struct sra_elt *parent, tree child, tree type, struct sra_elt **slot; struct sra_elt *elt; - dummy.parent = parent; + if (parent) + dummy.parent = parent->is_group ? parent->parent : parent; + else + dummy.parent = NULL; dummy.element = child; slot = (struct sra_elt **) htab_find_slot (sra_map, &dummy, insert); @@ -490,8 +554,17 @@ lookup_element (struct sra_elt *parent, tree child, tree type, if (parent) { - elt->sibling = parent->children; - parent->children = elt; + if (IS_ELEMENT_FOR_GROUP (elt->element)) + { + elt->is_group = true; + elt->sibling = parent->groups; + parent->groups = elt; + } + else + { + elt->sibling = parent->children; + parent->children = elt; + } } /* If this is a parameter, then if we want to scalarize, we have @@ -506,42 +579,6 @@ lookup_element (struct sra_elt *parent, tree child, tree type, return elt; } -/* Return true if the ARRAY_REF in EXPR is a constant, in bounds access. */ - -static bool -is_valid_const_index (tree expr) -{ - tree dom, t, index = TREE_OPERAND (expr, 1); - - if (TREE_CODE (index) != INTEGER_CST) - return false; - - /* Watch out for stupid user tricks, indexing outside the array. - - Careful, we're not called only on scalarizable types, so do not - assume constant array bounds. We needn't do anything with such - cases, since they'll be referring to objects that we should have - already rejected for scalarization, so returning false is fine. */ - - dom = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (expr, 0))); - if (dom == NULL) - return false; - - t = TYPE_MIN_VALUE (dom); - if (!t || TREE_CODE (t) != INTEGER_CST) - return false; - if (tree_int_cst_lt (index, t)) - return false; - - t = TYPE_MAX_VALUE (dom); - if (!t || TREE_CODE (t) != INTEGER_CST) - return false; - if (tree_int_cst_lt (t, index)) - return false; - - return true; -} - /* Create or return the SRA_ELT structure for EXPR if the expression refers to a scalarizable variable. */ @@ -561,13 +598,25 @@ maybe_lookup_element_for_expr (tree expr) return NULL; case ARRAY_REF: - /* We can't scalarize variable array indicies. */ - if (is_valid_const_index (expr)) + /* We can't scalarize variable array indices. */ + if (in_array_bounds_p (expr)) child = TREE_OPERAND (expr, 1); else return NULL; break; + case ARRAY_RANGE_REF: + /* We can't scalarize variable array indices. */ + if (range_in_array_bounds_p (expr)) + { + tree domain = TYPE_DOMAIN (TREE_TYPE (expr)); + child = build2 (RANGE_EXPR, integer_type_node, + TYPE_MIN_VALUE (domain), TYPE_MAX_VALUE (domain)); + } + else + return NULL; + break; + case COMPONENT_REF: /* Don't look through unions. */ if (TREE_CODE (TREE_TYPE (TREE_OPERAND (expr, 0))) != RECORD_TYPE) @@ -697,7 +746,7 @@ sra_walk_expr (tree *expr_p, block_stmt_iterator *bsi, bool is_output, the effort. */ /* ??? Hack. Figure out how to push this into the scan routines without duplicating too much code. */ - if (!is_valid_const_index (inner)) + if (!in_array_bounds_p (inner)) { disable_scalarization = true; goto use_all; @@ -709,6 +758,18 @@ sra_walk_expr (tree *expr_p, block_stmt_iterator *bsi, bool is_output, inner = TREE_OPERAND (inner, 0); break; + case ARRAY_RANGE_REF: + if (!range_in_array_bounds_p (inner)) + { + disable_scalarization = true; + goto use_all; + } + /* ??? See above non-constant bounds and stride . */ + if (TREE_OPERAND (inner, 2) || TREE_OPERAND (inner, 3)) + goto use_all; + inner = TREE_OPERAND (inner, 0); + break; + case COMPONENT_REF: /* A reference to a union member constitutes a reference to the entire union. */ @@ -731,11 +792,6 @@ sra_walk_expr (tree *expr_p, block_stmt_iterator *bsi, bool is_output, complete outer element, to which walk_tree will bring us next. */ goto use_all; - case ARRAY_RANGE_REF: - /* Similarly, a subrange reference is used to modify indexing. Which - means that the canonical element names that we have won't work. */ - goto use_all; - case VIEW_CONVERT_EXPR: case NOP_EXPR: /* Similarly, a view/nop explicitly wants to look at an object in a @@ -1016,6 +1072,9 @@ scan_dump (struct sra_elt *elt) for (c = elt->children; c ; c = c->sibling) scan_dump (c); + + for (c = elt->groups; c ; c = c->sibling) + scan_dump (c); } /* Entry point to phase 2. Scan the entire function, building up @@ -1186,10 +1245,19 @@ decide_instantiation_1 (struct sra_elt *elt, unsigned int parent_uses, } else { - struct sra_elt *c; + struct sra_elt *c, *group; unsigned int this_uses = elt->n_uses + parent_uses; unsigned int this_copies = elt->n_copies + parent_copies; + /* Consider groups of sub-elements as weighing in favour of + instantiation whatever their size. */ + for (group = elt->groups; group ; group = group->sibling) + FOR_EACH_ACTUAL_CHILD (c, group) + { + c->n_uses += group->n_uses; + c->n_copies += group->n_copies; + } + for (c = elt->children; c ; c = c->sibling) decide_instantiation_1 (c, this_uses, this_copies); } @@ -1293,6 +1361,10 @@ decide_block_copy (struct sra_elt *elt) struct sra_elt *c; bool any_inst; + /* We shouldn't be invoked on groups of sub-elements as they must + behave like their parent as far as block copy is concerned. */ + gcc_assert (!elt->is_group); + /* If scalarization is disabled, respect it. */ if (elt->cannot_scalarize) { @@ -1311,6 +1383,14 @@ decide_block_copy (struct sra_elt *elt) c->cannot_scalarize = 1; decide_block_copy (c); } + + /* Groups behave like their parent. */ + for (c = elt->groups; c; c = c->sibling) + { + c->cannot_scalarize = 1; + c->use_block_copy = 1; + } + return false; } @@ -1372,8 +1452,13 @@ decide_block_copy (struct sra_elt *elt) || !type_can_instantiate_all_elements (elt->type))) use_block_copy = true; } + elt->use_block_copy = use_block_copy; + /* Groups behave like their parent. */ + for (c = elt->groups; c; c = c->sibling) + c->use_block_copy = use_block_copy; + if (dump_file) { fprintf (dump_file, "Using %s for ", @@ -1496,9 +1581,10 @@ mark_no_warning (struct sra_elt *elt) else { struct sra_elt *c; - for (c = elt->children; c ; c = c->sibling) + FOR_EACH_ACTUAL_CHILD (c, elt) mark_no_warning (c); } + elt->all_no_warning = true; } } @@ -1522,7 +1608,11 @@ generate_one_element_ref (struct sra_elt *elt, tree base) case ARRAY_TYPE: todoflags |= TODO_update_smt_usage; - return build4 (ARRAY_REF, elt->type, base, elt->element, NULL, NULL); + if (TREE_CODE (elt->element) == RANGE_EXPR) + return build4 (ARRAY_RANGE_REF, elt->type, base, + TREE_OPERAND (elt->element, 0), NULL, NULL); + else + return build4 (ARRAY_REF, elt->type, base, elt->element, NULL, NULL); case COMPLEX_TYPE: if (elt->element == integer_zero_node) @@ -1583,7 +1673,7 @@ generate_copy_inout (struct sra_elt *elt, bool copy_out, tree expr, } else { - for (c = elt->children; c ; c = c->sibling) + FOR_EACH_ACTUAL_CHILD (c, elt) { t = generate_one_element_ref (c, unshare_expr (expr)); generate_copy_inout (c, copy_out, t, list_p); @@ -1600,7 +1690,7 @@ generate_element_copy (struct sra_elt *dst, struct sra_elt *src, tree *list_p) { struct sra_elt *dc, *sc; - for (dc = dst->children; dc ; dc = dc->sibling) + FOR_EACH_ACTUAL_CHILD (dc, dst) { sc = lookup_element (src, dc->element, NULL, NO_INSERT); gcc_assert (sc); @@ -1635,7 +1725,7 @@ generate_element_zero (struct sra_elt *elt, tree *list_p) return; } - for (c = elt->children; c ; c = c->sibling) + FOR_EACH_ACTUAL_CHILD (c, elt) generate_element_zero (c, list_p); if (elt->replacement) @@ -1696,7 +1786,7 @@ generate_element_init_1 (struct sra_elt *elt, tree init, tree *list_p) { case COMPLEX_CST: case COMPLEX_EXPR: - for (sub = elt->children; sub ; sub = sub->sibling) + FOR_EACH_ACTUAL_CHILD (sub, elt) { if (sub->element == integer_zero_node) t = (init_code == COMPLEX_EXPR @@ -2158,6 +2248,10 @@ dump_sra_elt_name (FILE *f, struct sra_elt *elt) fputc ('.', f); print_generic_expr (f, elt->element, dump_flags); } + else if (TREE_CODE (elt->element) == RANGE_EXPR) + fprintf (f, "["HOST_WIDE_INT_PRINT_DEC".."HOST_WIDE_INT_PRINT_DEC"]", + TREE_INT_CST_LOW (TREE_OPERAND (elt->element, 0)), + TREE_INT_CST_LOW (TREE_OPERAND (elt->element, 1))); else fprintf (f, "[" HOST_WIDE_INT_PRINT_DEC "]", TREE_INT_CST_LOW (elt->element)); |