summaryrefslogtreecommitdiff
path: root/gcc/stor-layout.c
diff options
context:
space:
mode:
authorkenner <kenner@138bc75d-0d04-0410-961f-82ee72b054a4>2000-03-25 18:34:13 +0000
committerkenner <kenner@138bc75d-0d04-0410-961f-82ee72b054a4>2000-03-25 18:34:13 +0000
commit02e7a332077569bfa13ac57067d817db087a189b (patch)
tree2aa8734829bb9352ea3ee4958179c54a164bfc53 /gcc/stor-layout.c
parent3a8f9e39243612fd41979753bdd1f60464967b58 (diff)
downloadgcc-02e7a332077569bfa13ac57067d817db087a189b.tar.gz
* Rework fields used to describe positions of bitfields and
modify sizes to be unsigned and use HOST_WIDE_INT. * alias.c (reg_known_value_size): Now unsigned. * c-typeck.c (build_unary_op, case ADDR_EXPR): Use byte_position. (really_start_incremental_init): Use bitsize_zero_node. (push_init_level, pop_init_level, output_init_element): Likewise. Use bitsize_unit_node and bitsize_one_node. (output_pending_init_elements, process_init_element): Likewise. * combine.c (combine_max_regno, reg_sign_bit_copies): Now unsigned. (make_extraction): Position and length HOST_WIDE_INT and unsigned HOST_WIDE_INT, respectively. (get_pos_from_mask): Passed in value is unsigned HOST_WIDE_INT. (num_sign_bit_copies): Returns unsigned. BITWIDTH now unsigned; rework arithmetic. Remove recursive call from arg to MAX. (combine_instructions, init_reg_last_arrays): NREGS now unsigned. (setup_incoming_promotions, can_combine_p, try_combine, simplify_set): REGNO now unsigned. (set_nonzero_bit_and_sign_copies): NUM now unsigned. (find_split_point, expand_compound_operation, make_extraction): LEN now unsigned HOST_WIDE_INT, POS now HOST_WIDE_INT. (make_field_assignment): Likewise. (combine_simplify_rtx): Add cast. (expand_compound_operation): MODEWIDTH now unsigned; rework arithmetic. (force_to_mode): WIDTH now unsigned; add cast. (if_then_else_cond): SIZE now unsigned. (nonzero_bits): MODE_WIDTH, RESULT_WIDTH, and WIDTH now unsigned. (extended_count): Now returns unsigned. (simplify_shift_const): COUNT unsigned; arg is now INPUT_COUNT. Add SIGNED_COUNT variable; MODE_WORDS and FIRST_COUNT now unsigned. (simplify_comparison): MODE_WIDTH now unsigned. (update_table_tick): REGNO and ENDREGNO now unsigned; new var R. (mark_used_regs_combine): Likewise; rework arithmetic. (record_value_for_reg): REGNO, ENDREGNO, and I now unsigned. (record_dead_and_set_regs, reg_dead_at_p, distribute_notes): Likewise. (record_promoted_value): REGNO now unsigned. (get_last_value_validate): REGNO, ENDREGNO, and J now unsigned. (get_last_value): REGNO now unsigned. (use_crosses_set_p): REGNO and ENDREGNO now unsigned. (reg_dead_regno, reg_dead_endregno): Now unsigned. (remove_death): Arg REGNO now unsigned. (move_deaths): REGNO, DEADREGNO, DEADEND, OUREND, and I now unsigned. (reg_bitfield_target_p): REGNO, REGNO, ENDREGNO, and ENDTREGNO now unsigned. * convert.c (convert_to_integer): INPREC and OUTPREC now unsigned. * cse.c (struct qty_table_elem): FIRST_REG and LAST_REG now unsigned. (struct cse_reg_info): REGNO now unsigned. (cached_regno): Now unsigned. (REGNO_QTY_VALID_P): Add cast. (make_new_qty, make_regs_eqv, delete_reg_eqiv): Regno args unsigned. (remove_invalid_regs): Likewise. (remove_invalid_subreg_refs): Likewise; arg WORD also unsigned as are variables END and I. (get_cse_reg_info, insert): Likewise. (mention_regs, invalidate_for_call): REGNO, ENDREGNO, and I unsigned. (canon_hash): Likewise. (insert_regs, lookup_for_remove): REGNO now unsigned. (invalidate): REGNO, ENDREGNO, TREGNO, and TENDREGNO now unsigned. New variable RN. * dbxout.c (dbxout_parms, dbxout_reg_parms): Don't check for REGNO < 0. * dwarf2out.c (dwarf2ou_frame_debug_expr): Remove cast. * emit-rtl.c (subreg_realpart_p): Add cast. (operand_subword): Arg I is now unsigned as is var PARTWORDS. (operand_subword_force): Arg I is now unsigned. * except.c (eh_regs): Variable I is now unsigned. * explow.c (hard_function_value): BYTES is unsigned HOST_WIDE_INT. * expmed.c (store_fixed_bit_field): Position is HOST_WIDE_INT; length is unsigned HOST_WIDE_INT; likewise for internal variables. (store_split_bit_field, extract_fixed_bit_field): Likewise. (extract_split_bit_field, store_bit_field, extract_bit_field): Likewise. * expr.c (store_constructor_fields, store_constructor, store_field): Positions are HOST_WIDE_INT and lengths are unsigned HOST_WIDE_INT. (expand_assignment, expand_expr, expand_expr_unaligned): Likewise. (do_jump): Likewise. (move_by_pieces, move_by_pieces_ninsns, clear_by_pieces): MAX_SIZE is now unsigned. (emit_group_load): BYTEPOS is HOST_WIDE_INT; BYTELEN is unsigned. (emit_group_store): Likewise. (emit_move_insn): I now unsigned. (store_constructor): Use host_integerp, tree_low_cst, and bitsize_unit_node. (get_inner_reference): Return bitpos and bitsize as HOST_WIDE_INT. Rework all calculations to use trees and new fields. * expr.h (promoted_input_arg): Regno now unsigned. (store_bit_field, extract_bit_field): Adjust types of pos and size. (mark_seen_cases): Arg is HOST_WIDE_INT. * flow.c (verify_wide_reg_1): REGNO now unsigned. * fold-const.c (decode_field_reference): Size and pos HOST_WIDE_INT; precisions and alignments are unsigned. (optimize_bit_field_compare, fold_truthop): Likewise. (int_const_binop): Adjust threshold for size_int_type_wide call. (fold_convert): Likewise. (size_int_type_wide): Make table larger and fix thinko that only had half of table used. (all_ones_mask_p, fold): Precisions are unsigned. * function.c (put_reg_info_stack): REGNO is unsigned. (instantiate_decl): Size is HOST_WIDE_INT. (instantiate_virtual_regs): I is unsigned. (assign_parms): REGNO, REGNOI, and REGNOR are unsigned. (promoted_input_arg): REGNO is unsigned. * function.h (struct function): x_max_parm_reg is now unsigned. * gcse.c (max_gcse_regno): Now unsigned. (struct null_pointer_info): min_reg and max_reg now unsigned. (lookup_set, next_set): REGNO arg now unsigned. (compute_hash_table): REGNO and I now unsigned. (handle_avail_expr): regnum_for_replacing now unsigned. (cprop_insn): REGNO now unsigned. (delete_null_pointer_checks_1): BLOCK_REG now pointer to unsigned. * ggc-common.c (ggc_mark_tree_children, case FIELD_DECL): New case. * global.c (set_preference): SRC_REGNO, DEST_REGNO, and I now unsigned. * hard-reg-set.h (reg_class_size): Now unsigned. * integrate.c (mark_stores): LAST_REG and I now unsigned; new UREGNO. * jump.c (mark_modified_reg): I now unsigned; add cast. (rtx_equal_for_thread_p): Add cast. * loop.c (max_reg_before_loop): Now unsigned. (struct_movable): REGNO now unsigned. (try_copy_prop): REGNO arg unsigned. (regs_match_p): XN and YN now unsigned. (consec_sets_invariant_p, maybe_eliminate_biv): REGNO now unsigned. (strength_reduce): Likewise; NREGS also unsigned. (first_increment_giv, last_increment_giv unsigned): Now unsigned. * loop.h (struct iv_class): REGNO now unsigned. (max_reg_before_loop, first_increment_giv, last_increment_giv): Now unsigned. * machmode.h (mode_size, mode_unit_size): Now unsigned. (mode_for_size, smallest_mode_for_size): Pass size as unsigned. * optabs.c (expand_binop): I and NWORDS now unsigned. (expand_unop): I now unsigned. * print-tree.c (print_node): Don't print DECL_FIELD_BITPOS, but do print DECL_FIELD_OFFSET and DECL_FIELD_BIT_OFFSET. * real.c (significand_size): Now returns unsigned. * real.h (significand_size): Likewise. * regclass.c (reg_class_size): Now unsigned. (choose_hard_reg_mode): Both operands now unsigned. (record_reg_classes): REGNO and NR now unsigned. (reg_scan): NREGS now unsigned. (reg_scan_update): old_max_regno now unsigned. (reg_scan_mark_refs): Arg MIN_REGNO and var REGNO now unsigned. * reload.c (find_valid_class): BEST_SIZE now unsigned. (find_dummy_reload): REGNO, NWORDS, and I now unsigned. (hard_reg_set_here_p): Args BEG_REGNO and END_REGNO now unsigned. Likewise for variable R. (refers_to_regno_for_reload_p): Args REGNO and END_REGNO now unsigned, as are variables INNER_REGNO and INNER_ENDREGNO; add new variable R. (find_equiv_reg): Add casts. (regno_clobbered_p): Arg REGNO now unsigned. * reload.h (struct reload): NREGS now unsigned. (refers_to_regno_for_reload_p): Regno args are unsigned. (regno_clobbered_p): Likewise. * reload1.c (reg_max_ref_width, spill_stack_slot_width): Now unsigned. (compute_use_by_pseudos): REGNO now unsigned. (find_reg): I and J now unsigned, new variable K, and change loop variables accordingly; THIS_NREGS now unsigned. (alter_reg): INHERENT_SIZE and TOTAL_SIZE now unsigned. (spill_hard_reg): REGNO arg now unsigned; add casts. (forget_old_reloads_1): REGNO, NR, and I now unsigned. (mark_reload_reg_in_use): Arg REGNO and vars NREGS and I now unsigned. (clear_reload_reg_in_use): Arg REGNO and vars NREGS, START_REGNO, END_REGNO, CONFLICT_START, and CONFLICT_END now unsigned. (reload_reg_free_p, reload_reg_reaches_end_p): Arg REGNO now unsigned. (choose_reload_regs): MAX_GROUP_SIZE now unsigned. (emit_reload_insns): REGNO now unsigned. (reload_cse_move2add): Add cast. (move2add_note_store): REGNO and I now unsigned; new variable ENDREGNO and rework loop. * resource.c (mark_referenced_resources, mark_set_resources): New variable R; REGNO and LAST_REGNO now unsigned. (mark_target_live_regs): J and REGNO now unsigned. * rtl.c (mode_size, mode_unit_size): Now unsigned. * rtl.h (union rtunion_def): New field rtuint. (XCUINT): New macro. (ADDRESSOF_REGNO, REGNO, SUBREG_WORD): New XCUINT. (operand_subword, operand_subword_force): Word number is unsigned. (choose_hard_reg_mode): Operands are unsigned. (refers_to-regno_p, dead_or_set_regno_p): Regno arg is unsigned. (find_regno_note, find_regno_fusage, replace_regs): Likewise. (regno_use_in, combine_instructions, remove_death): Likewise. (reg_scan, reg_scan_update): Likewise. (extended_count): Return is unsigned. * rtlanal.c (refers_to_regno_p): Args REGNO and ENDREGNO and vars I, INNER_REGNO, and INNER_ENDREGNO now unsigned; new variable X_REGNO. (reg_overlap_mentioned_p): REGNO and ENDREGNO now unsigned. (reg_set_last_first_regno, reg_set_last_last_regno): Now unsigned. (reg_reg_last_1): FIRS and LAST now unsigned. (dead_or_set_p): REGNO, LAST_REGNO, and I now unsigned. (dead_or_set_regno_p): Arg TEST_REGNO and vars REGNO and ENDREGNO now unsigned. (find_regno_note, regno_use_in): Arg REGNO now unsigned. (find_regno_fusage): Likewise; also var REGNOTE now unsigned. (find_reg_fusage): Variables REGNO, END_REGNO, and I now unsigned. (replace_regs): Arg NREGS now unsigned. * sdbout.c (sdbout_parms, sdbout_reg_parms): Don't check REGNO < 0. * simplify-rtx.c (simplify_unary_operation): WIDTH now unsigned. (simplify_binary_operation): Likewise. (cselib_invalidate_regno): Arg REGNO and variables ENDREGNO, I, and THIS_LAST now unsigned. (cselib_record_set): Add cast. * ssa.c (ssa_max_reg_num): Now unsigned. (rename_block): REGNO now unsigned. * stmt.c (expand_return): Bit positions unsigned HOST_WIDE_INT; sizes now unsigned. (all_cases_count): Just return -1 not -2. COUNT, MINVAL, and LASTVAL now HOST_WIDE_INT. Rework tests to use trees whenever possible. Use host_integerp and tree_low_cst. (mark_seen_cases): COUNT arg now HOST_WIDE_INT; Likewise variable NEXT_NODE_OFFSET; XLO now unsigned. (check_for_full_enumeration_handing): BYTES_NEEDED, I to HOST_WIDE_INT. * stor-layout.c (mode_for_size): SIZE arg now unsigned. (smallest_mode_for_size): Likewise. (layout_decl): Simplify handing of a specified DECL_SIZE_UNIT. KNOWN_ALIGN is now an alignment, so simplify code. Don't turn off DECL_BIT_FIELD if field is BLKmode, but not type. (start_record_layout): Renamed from new_record_layout_info. Update to new fields. (debug_rli, normalize_rli, rli_size_unit_so_far, rli_size_so_far): New functions. (place_union_field): Renamed from layout_union_field. Update to use new fields in rli. (place_field): Renamed from layout_field. Major rewrite to use new fields in rli; pass alignment to layout_decl. (finalize_record_size): Rework to use new fields in rli and handle union. (compute_record_mode): Rework to simplify and to use new DECL fields. (finalize_type_size): Make rounding more consistent. (finish_union_layout): Deleted. (layout_type, case VOID_TYPE): Don't set TYPE_SIZE_UNIT either. (layout_type, case RECORD_TYPE): Call new function names. (initialize_sizetypes): Set TYPE_IS_SIZETYPE. (set_sizetype): Set TYPE_IS_SIZETYPE earlier. (get_best_mode): UNIT is now unsigned; remove casts. * tree.c (bit_position): Compute from new fields. (byte_position, int_byte_position): New functions. (print_type_hash_statistics): Cast to remove warning. (build_range_type): Use host_integerp and tree_low_cst to try to hash. (build_index_type): Likewise; make subtype of sizetype. (build_index_2_type): Pass sizetype to build_range_type. (build_common_tree_nodes): Use size_int and bitsize_int to initialize nodes; add bitsize_{zero,one,unit}_node. * tree.h (DECL_FIELD_CONTEXT): Use FIELD_DECL_CHECK. (DECL_BIT_FIELD_TYPE, DECL_QUALIFIER, DECL_FCONTEXT): Likewise. (DECL_PACKED, DECL_BIT_FIELD): Likewise. (DECL_FIELD_BITPOS): Deleted. (DECL_FIELD_OFFSET, DECL_FIELD_BIT_OFFSET): New fields. (DECL_RESULT, DECL_SAVED_INSNS): Use FUNCTION_DECL_CHECK. (DECL_FRAME_SIZE, DECL_FUNCTION_CODE, DECL_NO_STATIC_CHAIN): Likewise. (DECL_INLINE, DECL_BUILT_IN_NONANSI, DECL_IS_MALLOC): Likewise. (DECL_BUILT_IN_CLASS, DECL_STATIC_CONSTRUCTOR): Likewise. (DECL_STATIC_DESTRUCTOR, DECL_NO_CHECK_MEMORY_USAGE): Likewise. (DECL_NO_INSTRUMENT_FUNCTION_ENTRY_EXIT, DECL_NO_LIMIT_STACK) Likewise. (DECL_ORIGINAL_TYPE, TYPE_DECL_SUPPRESS_DEBUG): Use TYPE_DECL_CHECK. (DECL_ARG_TYPE_AS_WRITEN, DECL_ARG_TYPE): Use PARM_DECL_CHECK. (DECL_INCOMING_RTL, DECL_TRANSPARENT_UNION): Likewise. (DECL_ALIGN): Adjust to new field in union. (DECL_OFFSET_ALIGN): New field. (DECL_ERROR_ISSUED, DECL_TOO_LATE): Use LABEL_DECL_CHECK. (DECL_IN_TEXT_SECTION): Use VAR_DECL_CHECK. (union tree_decl): Add struct for both aligns. (enum tree_index): Add TI_BITSIZE_{ZERO,ONE,UNIT}. (bitsize_zero_node, bitsize_one_node, bitsize_unit_node): Added. (struct record_layout_info): Rework fields to have offset alignment and byte and bit position. (start_record_layout, place_field): Renamed from old names. (rli_size_so_far, rli_size_unit_so_far, normalize_rli): New decls. (byte_position, int_byte_position): Likewise. (get_inner_reference): Change types of position and length. * unroll.c (unroll_loop): New variable R; use for some loops. MAX_LOCAL_REGNUM and MAXREGNUM now unsigned. (calculate_giv_inc): Arg REGNO now unsigned. (copy_loop_body): REGNO and SRC_REGNO now unsigned. * varasm.c (assemble_variable): Clean up handling of size using host_integerp and tree_low_cst. (decode_addr_const): Use byte, not bit, position. (output_constructor): bitpos and offsets are HOST_WIDE_INT; use tree_low_cst and int_bit_position. * objc/objc-act.c (build_ivar_list_initializer): Use byte_position. * ch/actions.c (check_missing_cases): BYTES_NEEDED is HOST_WIDE_INT. * ch/typeck.c (expand_constant_to_buffer): Use int_byte_position. (extract_constant_from_buffer): Likewise. * cp/class.c (build_vbase_pointer_fields): layout_field now place_field. (get_vfield_offset): Use byte_position. (set_rtti_entry): Set OFFSET to ssizetype zero. (get_binfo_offset_as_int): Deleted. (dfs_record_base_offsets): Use tree_low_cst. (dfs_search_base_offsets): Likewise. (layout_nonempty_base_or_field): Reflect changes in RLI format and call byte_position. (layout_empty_base): Convert offset to ssizetype. (build_base_field): use rli_size_unit_so_far. (dfs_propagate_binfo_offsets): Do computation in proper type. (layout_virtual_bases): Pass ssizetype to propagate_binfo_offsets. (layout_class_type): Reflect changes in RLI names and fields. (finish_struct_1): Set DECL_FIELD_OFFSET. * cp/dump.c (dequeue_and_dump): Call bit_position. * cp/expr.c (cplus_expand_constant): Use byte_position. * cp/rtti.c (expand_class_desc): Use bitsize_one_node. * cp/typeck.c (build_component_addr): Use byte_position and don't special case for zero offset. * f/com.c (ffecom_tree_canonize_ptr_): Use bitsize_zero_node. (ffecom_tree_canonize_ref_): Likewise. * java/class.c (make_field_value): Use byte_position. * java/expr.c (JAVA_ARRAY_LENGTH_OFFSET): Use byte_position. (java_array_data_offset): Likewise. * java/java-tree.h (MAYBE_CREATE_TYPE_TYPE_LANG_SPECIFIC): Add case to bzero call. git-svn-id: svn+ssh://gcc.gnu.org/svn/gcc/trunk@32742 138bc75d-0d04-0410-961f-82ee72b054a4
Diffstat (limited to 'gcc/stor-layout.c')
-rw-r--r--gcc/stor-layout.c741
1 files changed, 388 insertions, 353 deletions
diff --git a/gcc/stor-layout.c b/gcc/stor-layout.c
index 8773fa9a8a8..ada43a0ca84 100644
--- a/gcc/stor-layout.c
+++ b/gcc/stor-layout.c
@@ -50,11 +50,10 @@ unsigned int maximum_field_alignment;
May be overridden by front-ends. */
unsigned int set_alignment = 0;
-static void finalize_record_size PARAMS ((record_layout_info));
-static void compute_record_mode PARAMS ((tree));
-static void finalize_type_size PARAMS ((tree));
-static void layout_union_field PARAMS ((record_layout_info, tree));
-static void finish_union_layout PARAMS ((record_layout_info));
+static void finalize_record_size PARAMS ((record_layout_info));
+static void compute_record_mode PARAMS ((tree));
+static void finalize_type_size PARAMS ((tree));
+static void place_union_field PARAMS ((record_layout_info, tree));
/* SAVE_EXPRs for sizes of types and decls, waiting to be expanded. */
@@ -65,6 +64,8 @@ static tree pending_sizes;
int immediate_size_expand;
+/* Get a list of all the objects put on the pending sizes list. */
+
tree
get_pending_sizes ()
{
@@ -79,6 +80,9 @@ get_pending_sizes ()
return chain;
}
+/* Put a chain of objects into the pending sizes list, which must be
+ empty. */
+
void
put_pending_sizes (chain)
tree chain;
@@ -131,8 +135,7 @@ variable_size (size)
Also, we would like to pass const0_rtx here, but don't have it. */
expand_expr (size, expand_expr (integer_zero_node, NULL_PTR, VOIDmode, 0),
VOIDmode, 0);
- else if (cfun != 0
- && cfun->x_dont_save_pending_sizes_p)
+ else if (cfun != 0 && cfun->x_dont_save_pending_sizes_p)
/* The front-end doesn't want us to keep a list of the expressions
that determine sizes for variable size objects. */
;
@@ -153,7 +156,7 @@ variable_size (size)
enum machine_mode
mode_for_size (size, class, limit)
- int size;
+ unsigned int size;
enum mode_class class;
int limit;
{
@@ -194,7 +197,7 @@ mode_for_size_tree (size, class, limit)
enum machine_mode
smallest_mode_for_size (size, class)
- int size;
+ unsigned int size;
enum mode_class class;
{
register enum machine_mode mode;
@@ -296,37 +299,37 @@ layout_decl (decl, known_align)
if (type == error_mark_node)
type = void_type_node;
- /* Usually the size and mode come from the data type without change. */
+ /* Usually the size and mode come from the data type without change,
+ however, the front-end may set the explicit width of the field, so its
+ size may not be the same as the size of its type. This happens with
+ bitfields, of course (an `int' bitfield may be only 2 bits, say), but it
+ also happens with other fields. For example, the C++ front-end creates
+ zero-sized fields corresponding to empty base classes, and depends on
+ layout_type setting DECL_FIELD_BITPOS correctly for the field. Set the
+ size in bytes from the size in bits. */
+
DECL_MODE (decl) = TYPE_MODE (type);
TREE_UNSIGNED (decl) = TREE_UNSIGNED (type);
+
if (DECL_SIZE (decl) == 0)
{
DECL_SIZE (decl) = TYPE_SIZE (type);
DECL_SIZE_UNIT (decl) = TYPE_SIZE_UNIT (type);
}
- else if (code == FIELD_DECL)
- {
- HOST_WIDE_INT spec_size;
-
- /* Size is specified in number of bits. */
- spec_size = TREE_INT_CST_LOW (DECL_SIZE (decl));
- if (spec_size % BITS_PER_UNIT == 0)
- DECL_SIZE_UNIT (decl) = size_int (spec_size / BITS_PER_UNIT);
- else
- DECL_SIZE_UNIT (decl) = 0;
- }
+ else
+ DECL_SIZE_UNIT (decl)
+ = convert (sizetype, size_binop (CEIL_DIV_EXPR, DECL_SIZE (decl),
+ bitsize_unit_node));
/* Force alignment required for the data type.
But if the decl itself wants greater alignment, don't override that.
Likewise, if the decl is packed, don't override it. */
if (!(code == FIELD_DECL && DECL_BIT_FIELD (decl))
&& (DECL_ALIGN (decl) == 0
- || (! DECL_PACKED (decl) && TYPE_ALIGN (type) > DECL_ALIGN (decl))))
+ || (! DECL_PACKED (decl) && TYPE_ALIGN (type) > DECL_ALIGN (decl))))
DECL_ALIGN (decl) = TYPE_ALIGN (type);
- /* See if we can use an ordinary integer mode for a bit-field.
- Conditions are: a fixed size that is correct for another mode
- and occupying a complete byte or bytes on proper boundary. */
+ /* For fields, set the bit field type and update the alignment. */
if (code == FIELD_DECL)
{
DECL_BIT_FIELD_TYPE (decl) = DECL_BIT_FIELD (decl) ? type : 0;
@@ -336,6 +339,9 @@ layout_decl (decl, known_align)
DECL_ALIGN (decl) = MIN (DECL_ALIGN (decl), BITS_PER_UNIT);
}
+ /* See if we can use an ordinary integer mode for a bit-field.
+ Conditions are: a fixed size that is correct for another mode
+ and occupying a complete byte or bytes on proper boundary. */
if (DECL_BIT_FIELD (decl)
&& TYPE_SIZE (type) != 0
&& TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST
@@ -344,24 +350,21 @@ layout_decl (decl, known_align)
register enum machine_mode xmode
= mode_for_size_tree (DECL_SIZE (decl), MODE_INT, 1);
- if (xmode != BLKmode
- && known_align % GET_MODE_ALIGNMENT (xmode) == 0)
+ if (xmode != BLKmode && known_align > GET_MODE_ALIGNMENT (xmode))
{
DECL_ALIGN (decl) = MAX (GET_MODE_ALIGNMENT (xmode),
DECL_ALIGN (decl));
DECL_MODE (decl) = xmode;
- DECL_SIZE (decl) = bitsize_int (GET_MODE_BITSIZE (xmode));
- DECL_SIZE_UNIT (decl) = size_int (GET_MODE_SIZE (xmode));
- /* This no longer needs to be accessed as a bit field. */
DECL_BIT_FIELD (decl) = 0;
}
}
/* Turn off DECL_BIT_FIELD if we won't need it set. */
- if (DECL_BIT_FIELD (decl) && TYPE_MODE (type) == BLKmode
- && known_align % TYPE_ALIGN (type) == 0
- && DECL_SIZE_UNIT (decl) != 0
- && DECL_ALIGN (decl) >= TYPE_ALIGN (type))
+ if (DECL_BIT_FIELD (decl)
+ && TYPE_MODE (type) == BLKmode && DECL_MODE (decl) == BLKmode
+ && known_align > TYPE_ALIGN (type)
+ && DECL_ALIGN (decl) >= TYPE_ALIGN (type)
+ && DECL_SIZE_UNIT (decl) != 0)
DECL_BIT_FIELD (decl) = 0;
/* Evaluate nonconstant size only once, either now or as soon as safe. */
@@ -392,23 +395,28 @@ layout_decl (decl, known_align)
}
}
-/* Create a new record_layout_info for T, which may be a RECORD_TYPE,
- UNION_TYPE, or QUAL_UNION_TYPE. It is the responsibility of the
- caller to call `free' for the storage the returned. */
+/* Begin laying out type T, which may be a RECORD_TYPE, UNION_TYPE, or
+ QUAL_UNION_TYPE. Return a pointer to a struct record_layout_info which
+ is to be passed to all other layout functions for this record. It is the
+ responsibility of the caller to call `free' for the storage returned.
+ Note that garbage collection is not permitted until we finish laying
+ out the record. */
record_layout_info
-new_record_layout_info (t)
+start_record_layout (t)
tree t;
{
record_layout_info rli
- = (record_layout_info) xcalloc (1, sizeof (struct record_layout_info_s));
+ = (record_layout_info) xmalloc (sizeof (struct record_layout_info));
rli->t = t;
+
/* If the type has a minimum specified alignment (via an attribute
declaration, for example) use it -- otherwise, start with a
one-byte alignment. */
rli->record_align = MAX (BITS_PER_UNIT, TYPE_ALIGN (t));
rli->unpacked_align = rli->record_align;
+ rli->offset_align = MAX (rli->record_align, BIGGEST_ALIGNMENT);
#ifdef STRUCTURE_SIZE_BOUNDARY
/* Packed structures don't need to have minimum size. */
@@ -416,30 +424,96 @@ new_record_layout_info (t)
rli->record_align = MAX (rli->record_align, STRUCTURE_SIZE_BOUNDARY);
#endif
+ rli->offset = size_zero_node;
+ rli->bitpos = bitsize_zero_node;
+ rli->pending_statics = 0;
+ rli->packed_maybe_necessary = 0;
+
return rli;
}
-/* Like layout_field, but for unions. */
+/* Print debugging information about the information in RLI. */
-static void
-layout_union_field (rli, field)
+void
+debug_rli (rli)
record_layout_info rli;
- tree field;
{
- tree dsize;
-
- /* This function should only be used for unions; use layout_field
- for RECORD_TYPEs. */
- if (TREE_CODE (rli->t) != UNION_TYPE
- && TREE_CODE (rli->t) != QUAL_UNION_TYPE)
- abort ();
+ print_node_brief (stderr, "type", rli->t, 0);
+ print_node_brief (stderr, "\noffset", rli->offset, 0);
+ print_node_brief (stderr, " bitpos", rli->bitpos, 0);
- /* By now, we should only be seeing FIELD_DECLs. */
- if (TREE_CODE (field) != FIELD_DECL)
- abort ();
+ fprintf (stderr, "\nrec_align = %u, unpack_align = %u, off_align = %u\n",
+ rli->record_align, rli->unpacked_align, rli->offset_align);
+ if (rli->packed_maybe_necessary)
+ fprintf (stderr, "packed may be necessary\n");
+
+ if (rli->pending_statics)
+ {
+ fprintf (stderr, "pending statics:\n");
+ debug_tree (rli->pending_statics);
+ }
+}
+
+/* Given an RLI with a possibly-incremented BITPOS, adjust OFFSET and
+ BITPOS if necessary to keep BITPOS below OFFSET_ALIGN. */
+
+void
+normalize_rli (rli)
+ record_layout_info rli;
+{
+ /* If the bit position is now larger than it should be, adjust it
+ downwards. */
+ if (compare_tree_int (rli->bitpos, rli->offset_align) >= 0)
+ {
+ tree extra_aligns = size_binop (FLOOR_DIV_EXPR, rli->bitpos,
+ bitsize_int (rli->offset_align));
+
+ rli->offset
+ = size_binop (PLUS_EXPR, rli->offset,
+ size_binop (MULT_EXPR, convert (sizetype, extra_aligns),
+ size_int (rli->offset_align
+ / BITS_PER_UNIT)));
+
+ rli->bitpos = size_binop (FLOOR_MOD_EXPR, rli->bitpos,
+ bitsize_int (rli->offset_align));
+ }
+}
+/* Returns the size in bytes allocated so far. */
+
+tree
+rli_size_unit_so_far (rli)
+ record_layout_info rli;
+{
+ return size_binop (PLUS_EXPR, rli->offset,
+ convert (sizetype,
+ size_binop (CEIL_DIV_EXPR, rli->bitpos,
+ bitsize_unit_node)));
+}
+
+/* Returns the size in bits allocated so far. */
+
+tree
+rli_size_so_far (rli)
+ record_layout_info rli;
+{
+ return size_binop (PLUS_EXPR, rli->bitpos,
+ size_binop (MULT_EXPR, convert (bitsizetype, rli->offset),
+ bitsize_unit_node));
+}
+
+/* Called from place_field to handle unions. */
+
+static void
+place_union_field (rli, field)
+ record_layout_info rli;
+ tree field;
+{
layout_decl (field, 0);
- DECL_FIELD_BITPOS (field) = bitsize_int (0);
+
+ DECL_FIELD_OFFSET (field) = size_zero_node;
+ DECL_FIELD_BIT_OFFSET (field) = bitsize_zero_node;
+ DECL_OFFSET_ALIGN (field) = BIGGEST_ALIGNMENT;
/* Union must be at least as aligned as any field requires. */
rli->record_align = MAX (rli->record_align, DECL_ALIGN (field));
@@ -452,30 +526,14 @@ layout_union_field (rli, field)
TYPE_ALIGN (TREE_TYPE (field)));
#endif
- dsize = DECL_SIZE (field);
+ /* We assume the union's size will be a multiple of a byte so we don't
+ bother with BITPOS. */
if (TREE_CODE (rli->t) == UNION_TYPE)
- {
- /* Set union_size to max (decl_size, union_size). There are
- more and less general ways to do this. Use only CONST_SIZE
- unless forced to use VAR_SIZE. */
-
- if (TREE_CODE (dsize) == INTEGER_CST
- && ! TREE_CONSTANT_OVERFLOW (dsize)
- && TREE_INT_CST_HIGH (dsize) == 0)
- rli->const_size
- = MAX (rli->const_size, TREE_INT_CST_LOW (dsize));
- else if (rli->var_size == 0)
- rli->var_size = dsize;
- else
- rli->var_size = size_binop (MAX_EXPR, rli->var_size, dsize);
- }
+ rli->offset = size_binop (MAX_EXPR, rli->offset, DECL_SIZE_UNIT (field));
else if (TREE_CODE (rli->t) == QUAL_UNION_TYPE)
- rli->var_size = fold (build (COND_EXPR, bitsizetype,
- DECL_QUALIFIER (field),
- DECL_SIZE (field),
- (rli->var_size
- ? rli->var_size
- : bitsize_int (0))));
+ rli->offset = fold (build (COND_EXPR, sizetype,
+ DECL_QUALIFIER (field),
+ DECL_SIZE_UNIT (field), rli->offset));
}
/* RLI contains information about the layout of a RECORD_TYPE. FIELD
@@ -484,7 +542,7 @@ layout_union_field (rli, field)
callers that desire that behavior must manually perform that step.) */
void
-layout_field (rli, field)
+place_field (rli, field)
record_layout_info rli;
tree field;
{
@@ -493,11 +551,10 @@ layout_field (rli, field)
/* The alignment FIELD would have if we just dropped it into the
record as it presently stands. */
unsigned int known_align;
+ unsigned int actual_align;
/* The type of this field. */
tree type = TREE_TYPE (field);
- /* The size of this field, in bits. */
- tree dsize;
-
+
/* If FIELD is static, then treat it like a separate variable, not
really like a structure field. If it is a FUNCTION_DECL, it's a
method. In both cases, all we do is lay out the decl, and we do
@@ -508,29 +565,40 @@ layout_field (rli, field)
rli->pending_statics);
return;
}
+
/* Enumerators and enum types which are local to this class need not
be laid out. Likewise for initialized constant fields. */
else if (TREE_CODE (field) != FIELD_DECL)
return;
- /* This function should only be used for records; use
- layout_union_field for unions. */
+
+ /* Unions are laid out very differently than records, so split
+ that code off to another function. */
else if (TREE_CODE (rli->t) != RECORD_TYPE)
{
- layout_union_field (rli, field);
+ place_union_field (rli, field);
return;
}
- /* Work out the known alignment so far. */
- known_align = rli->var_size ? rli->var_align : rli->const_size;
+ /* Work out the known alignment so far. Note that A & (-A) is the
+ value of the least-significant bit in A that is one. */
+ if (! integer_zerop (rli->bitpos) && TREE_CONSTANT (rli->offset))
+ known_align = (tree_low_cst (rli->bitpos, 1)
+ & - tree_low_cst (rli->bitpos, 1));
+ else if (host_integerp (rli->offset, 1))
+ known_align = (BITS_PER_UNIT
+ * (tree_low_cst (rli->offset, 1)
+ & - tree_low_cst (rli->offset, 1)));
+ else
+ known_align = rli->offset_align;
/* Lay out the field so we know what alignment it needs. For a
packed field, use the alignment as specified, disregarding what
the type would want. */
- if (DECL_PACKED (field))
- desired_align = DECL_ALIGN (field);
+ desired_align = DECL_ALIGN (field);
layout_decl (field, known_align);
if (! DECL_PACKED (field))
desired_align = DECL_ALIGN (field);
+
/* Some targets (i.e. VMS) limit struct field alignment
to a lower boundary than alignment of variables. */
#ifdef BIGGEST_FIELD_ALIGNMENT
@@ -556,6 +624,7 @@ layout_field (rli, field)
rli->record_align = MAX (rli->record_align, desired_align);
else if (! DECL_PACKED (field))
desired_align = TYPE_ALIGN (type);
+
/* A named bit field of declared type `int'
forces the entire structure to have `int' alignment. */
if (DECL_NAME (field) != 0)
@@ -577,15 +646,12 @@ layout_field (rli, field)
#endif
{
rli->record_align = MAX (rli->record_align, desired_align);
- if (warn_packed)
- rli->unpacked_align = MAX (rli->unpacked_align, TYPE_ALIGN (type));
+ rli->unpacked_align = MAX (rli->unpacked_align, TYPE_ALIGN (type));
}
if (warn_packed && DECL_PACKED (field))
{
- if (rli->const_size % TYPE_ALIGN (type) == 0
- || (rli->var_align % TYPE_ALIGN (type) == 0
- && rli->var_size != NULL_TREE))
+ if (known_align > TYPE_ALIGN (type))
{
if (TYPE_ALIGN (type) > desired_align)
{
@@ -601,9 +667,7 @@ layout_field (rli, field)
/* Does this field automatically have alignment it needs by virtue
of the fields that precede it and the record's own alignment? */
- if (rli->const_size % desired_align != 0
- || (rli->var_align % desired_align != 0
- && rli->var_size != NULL_TREE))
+ if (known_align < desired_align)
{
/* No, we need to skip space before this field.
Bump the cumulative size to multiple of field alignment. */
@@ -611,55 +675,72 @@ layout_field (rli, field)
if (warn_padded)
warning_with_decl (field, "padding struct to align `%s'");
- if (rli->var_size == NULL_TREE || rli->var_align % desired_align == 0)
- rli->const_size
- = CEIL (rli->const_size, desired_align) * desired_align;
+ /* If the alignment is still within offset_align, just align
+ the bit position. */
+ if (desired_align < rli->offset_align)
+ rli->bitpos = round_up (rli->bitpos, desired_align);
else
{
- if (rli->const_size > 0)
- rli->var_size = size_binop (PLUS_EXPR, rli->var_size,
- bitsize_int (rli->const_size));
- rli->const_size = 0;
- rli->var_size = round_up (rli->var_size, desired_align);
- rli->var_align = MIN (rli->var_align, desired_align);
+ /* First adjust OFFSET by the partial bits, then align. */
+ rli->offset
+ = size_binop (PLUS_EXPR, rli->offset,
+ convert (sizetype,
+ size_binop (CEIL_DIV_EXPR, rli->bitpos,
+ bitsize_unit_node)));
+ rli->bitpos = bitsize_zero_node;
+
+ rli->offset = round_up (rli->offset, desired_align / BITS_PER_UNIT);
}
+
}
+ /* Handle compatibility with PCC. Note that if the record has any
+ variable-sized fields, we need not worry about compatibility. */
#ifdef PCC_BITFIELD_TYPE_MATTERS
if (PCC_BITFIELD_TYPE_MATTERS
&& TREE_CODE (field) == FIELD_DECL
&& type != error_mark_node
- && DECL_BIT_FIELD_TYPE (field)
- && !DECL_PACKED (field)
+ && DECL_BIT_FIELD (field)
+ && ! DECL_PACKED (field)
&& maximum_field_alignment == 0
- && !integer_zerop (DECL_SIZE (field)))
+ && ! integer_zerop (DECL_SIZE (field))
+ && host_integerp (DECL_SIZE (field), 1)
+ && host_integerp (rli->offset, 1)
+ && host_integerp (TYPE_SIZE (type), 1))
{
unsigned int type_align = TYPE_ALIGN (type);
- register tree dsize = DECL_SIZE (field);
- unsigned int field_size = TREE_INT_CST_LOW (dsize);
+ tree dsize = DECL_SIZE (field);
+ HOST_WIDE_INT field_size = tree_low_cst (dsize, 1);
+ HOST_WIDE_INT offset = tree_low_cst (rli->offset, 0);
+ HOST_WIDE_INT bit_offset = tree_low_cst (rli->bitpos, 0);
/* A bit field may not span more units of alignment of its type
than its type itself. Advance to next boundary if necessary. */
- if (((rli->const_size + field_size + type_align - 1) / type_align
- - rli->const_size / type_align)
- > TREE_INT_CST_LOW (TYPE_SIZE (TREE_TYPE (field))) / type_align)
- rli->const_size = CEIL (rli->const_size, type_align) * type_align;
+ if ((((offset * BITS_PER_UNIT + bit_offset + field_size +
+ type_align - 1)
+ / type_align)
+ - (offset * BITS_PER_UNIT + bit_offset) / type_align)
+ > tree_low_cst (TYPE_SIZE (type), 1) / type_align)
+ rli->bitpos = round_up (rli->bitpos, type_align);
}
#endif
- /* No existing machine description uses this parameter. So I have
- made it in this aspect identical to PCC_BITFIELD_TYPE_MATTERS. */
#ifdef BITFIELD_NBYTES_LIMITED
if (BITFIELD_NBYTES_LIMITED
&& TREE_CODE (field) == FIELD_DECL
&& type != error_mark_node
&& DECL_BIT_FIELD_TYPE (field)
- && !DECL_PACKED (field)
- && !integer_zerop (DECL_SIZE (field)))
+ && ! DECL_PACKED (field)
+ && ! integer_zerop (DECL_SIZE (field))
+ && host_integerp (DECL_SIZE (field), 1)
+ && host_integerp (rli->size, 1)
+ && host_integerp (TYPE_SIZE (type), 1))
{
unsigned int type_align = TYPE_ALIGN (type);
- register tree dsize = DECL_SIZE (field);
- int field_size = TREE_INT_CST_LOW (dsize);
+ tree dsize = DECL_SIZE (field);
+ HOST_WIDE_INT field_size = tree_low_cst (dsize, 1);
+ HOST_WIDE_INT offset = tree_low_cst (rli->offset, 0);
+ HOST_WIDE_INT bit_offset = tree_low_cst (rli->bitpos, 0);
if (maximum_field_alignment != 0)
type_align = MIN (type_align, maximum_field_alignment);
@@ -672,51 +753,63 @@ layout_field (rli, field)
Advance to next boundary if necessary. */
/* ??? This code should match the code above for the
PCC_BITFIELD_TYPE_MATTERS case. */
- if (rli->const_size / type_align
- != (rli->const_size + field_size - 1) / type_align)
- rli->const_size = CEIL (rli->const_size, type_align) * type_align;
+ if ((offset * BITS_PER_UNIT + bit_offset) / type_align
+ != ((offset * BITS_PER_UNIT + bit_offset + field_size - 1)
+ / type_align))
+ rli->bitpos = round_up (rli->bitpos, type_align);
}
#endif
- /* Size so far becomes the position of this field. */
-
- if (rli->var_size && rli->const_size)
- DECL_FIELD_BITPOS (field)
- = size_binop (PLUS_EXPR, rli->var_size, bitsize_int (rli->const_size));
- else if (rli->var_size)
- DECL_FIELD_BITPOS (field) = rli->var_size;
+ if (! TREE_CONSTANT (rli->offset))
+ rli->offset_align = DECL_ALIGN (field);
+
+ /* Offset so far becomes the position of this field after normalizing. */
+ normalize_rli (rli);
+ DECL_FIELD_OFFSET (field) = rli->offset;
+ DECL_FIELD_BIT_OFFSET (field) = rli->bitpos;
+ DECL_OFFSET_ALIGN (field) = rli->offset_align;
+
+ /* If this field ended up more aligned than we thought it would be (we
+ approximate this by seeing if its position changed), lay out the field
+ again; perhaps we can use an integral mode for it now. */
+ if (! integer_zerop (DECL_FIELD_BIT_OFFSET (field))
+ && TREE_CONSTANT (DECL_FIELD_OFFSET (field)))
+ actual_align = (tree_low_cst (DECL_FIELD_BIT_OFFSET (field), 1)
+ & - tree_low_cst (DECL_FIELD_BIT_OFFSET (field), 1));
+ else if (host_integerp (DECL_FIELD_OFFSET (field), 1))
+ actual_align = (BITS_PER_UNIT
+ * (tree_low_cst (DECL_FIELD_OFFSET (field), 1)
+ & - tree_low_cst (DECL_FIELD_OFFSET (field), 1)));
else
+ actual_align = DECL_OFFSET_ALIGN (field);
+
+ if (known_align != actual_align)
+ layout_decl (field, actual_align);
+
+ /* Now add size of this field to the size of the record. If the size is
+ not constant, treat the field as being a multiple of bytes and just
+ adjust the offset, resetting the bit position. Otherwise, apportion the
+ size amongst the bit position and offset. First handle the case of an
+ unspecified size, which can happen when we have an invalid nested struct
+ definition, such as struct j { struct j { int i; } }. The error message
+ is printed in finish_struct. */
+ if (DECL_SIZE (field) == 0)
+ /* Do nothing. */;
+ else if (! TREE_CONSTANT (DECL_SIZE_UNIT (field)))
{
- DECL_FIELD_BITPOS (field) = bitsize_int (rli->const_size);
-
- /* If this field ended up more aligned than we thought it
- would be (we approximate this by seeing if its position
- changed), lay out the field again; perhaps we can use an
- integral mode for it now. */
- if (known_align != rli->const_size)
- layout_decl (field, rli->const_size);
+ rli->offset
+ = size_binop (PLUS_EXPR, rli->offset,
+ convert (sizetype,
+ size_binop (CEIL_DIV_EXPR, rli->bitpos,
+ bitsize_unit_node)));
+ rli->offset
+ = size_binop (PLUS_EXPR, rli->offset, DECL_SIZE_UNIT (field));
+ rli->bitpos = bitsize_zero_node;
}
-
- /* Now add size of this field to the size of the record. */
- dsize = DECL_SIZE (field);
-
- /* This can happen when we have an invalid nested struct definition,
- such as struct j { struct j { int i; } }. The error message is
- printed in finish_struct. */
- if (dsize == 0)
- /* Do nothing. */;
- else if (TREE_CODE (dsize) == INTEGER_CST
- && ! TREE_CONSTANT_OVERFLOW (dsize)
- && TREE_INT_CST_HIGH (dsize) == 0
- && TREE_INT_CST_LOW (dsize) + rli->const_size >= rli->const_size)
- /* Use const_size if there's no overflow. */
- rli->const_size += TREE_INT_CST_LOW (dsize);
else
{
- if (rli->var_size == NULL_TREE)
- rli->var_size = dsize;
- else
- rli->var_size = size_binop (PLUS_EXPR, rli->var_size, dsize);
+ rli->bitpos = size_binop (PLUS_EXPR, rli->bitpos, DECL_SIZE (field));
+ normalize_rli (rli);
}
}
@@ -728,18 +821,15 @@ static void
finalize_record_size (rli)
record_layout_info rli;
{
- /* Work out the total size and alignment of the record as one
- expression and store in the record type. Round it up to a
- multiple of the record's alignment. */
- if (rli->var_size == NULL_TREE)
- TYPE_SIZE (rli->t) = bitsize_int (rli->const_size);
- else
- {
- if (rli->const_size)
- rli->var_size = size_binop (PLUS_EXPR, rli->var_size,
- bitsize_int (rli->const_size));
- TYPE_SIZE (rli->t) = rli->var_size;
- }
+ tree unpadded_size, unpadded_size_unit;
+
+ /* Next move any full bytes of bits into the byte size. */
+ rli->offset
+ = size_binop (PLUS_EXPR, rli->offset,
+ convert (sizetype,
+ size_binop (TRUNC_DIV_EXPR, rli->bitpos,
+ bitsize_unit_node)));
+ rli->bitpos = size_binop (TRUNC_MOD_EXPR, rli->bitpos, bitsize_unit_node);
/* Determine the desired alignment. */
#ifdef ROUND_TYPE_ALIGN
@@ -749,45 +839,55 @@ finalize_record_size (rli)
TYPE_ALIGN (rli->t) = MAX (TYPE_ALIGN (rli->t), rli->record_align);
#endif
+ unpadded_size
+ = size_binop (PLUS_EXPR, rli->bitpos,
+ size_binop (MULT_EXPR, convert (bitsizetype, rli->offset),
+ bitsize_unit_node));
+
+ unpadded_size_unit
+ = size_binop (PLUS_EXPR, rli->offset,
+ convert (sizetype,
+ size_binop (CEIL_DIV_EXPR, rli->bitpos,
+ bitsize_unit_node)));
+
/* Record the un-rounded size in the binfo node. But first we check
the size of TYPE_BINFO to make sure that BINFO_SIZE is available. */
if (TYPE_BINFO (rli->t) && TREE_VEC_LENGTH (TYPE_BINFO (rli->t)) > 6)
{
- TYPE_BINFO_SIZE (rli->t) = TYPE_SIZE (rli->t);
- TYPE_BINFO_SIZE_UNIT (rli->t)
- = convert (sizetype,
- size_binop (FLOOR_DIV_EXPR, TYPE_SIZE (rli->t),
- bitsize_int (BITS_PER_UNIT)));
+ TYPE_BINFO_SIZE (rli->t) = unpadded_size;
+ TYPE_BINFO_SIZE_UNIT (rli->t) = unpadded_size_unit;
}
-
- {
- tree unpadded_size = TYPE_SIZE (rli->t);
+ /* Round the size up to be a multiple of the required alignment */
#ifdef ROUND_TYPE_SIZE
- TYPE_SIZE (rli->t) = ROUND_TYPE_SIZE (rli->t, TYPE_SIZE (rli->t),
- TYPE_ALIGN (rli->t));
+ TYPE_SIZE (rli->t) = ROUND_TYPE_SIZE (rli->t, unpadded_size,
+ TYPE_ALIGN (rli->t));
+ TYPE_SIZE_UNIT (rli->t)
+ = ROUND_TYPE_SIZE_UNIT (rli->t, unpaded_size_unit,
+ TYPE_ALIGN (rli->t) / BITS_PER_UNIT);
#else
- /* Round the size up to be a multiple of the required alignment */
- TYPE_SIZE (rli->t) = round_up (TYPE_SIZE (rli->t), TYPE_ALIGN (rli->t));
+ TYPE_SIZE (rli->t) = round_up (unpadded_size, TYPE_ALIGN (rli->t));
+ TYPE_SIZE_UNIT (rli->t) = round_up (unpadded_size_unit,
+ TYPE_ALIGN (rli->t) / BITS_PER_UNIT);
#endif
- if (warn_padded && rli->var_size == NULL_TREE
- && simple_cst_equal (unpadded_size, TYPE_SIZE (rli->t)) == 0)
- warning ("padding struct size to alignment boundary");
- }
+ if (warn_padded && TREE_CONSTANT (unpadded_size)
+ && simple_cst_equal (unpadded_size, TYPE_SIZE (rli->t)) == 0)
+ warning ("padding struct size to alignment boundary");
- if (warn_packed && TYPE_PACKED (rli->t) && !rli->packed_maybe_necessary
- && rli->var_size == NULL_TREE)
+ if (warn_packed && TREE_CODE (rli->t) == RECORD_TYPE
+ && TYPE_PACKED (rli->t) && ! rli->packed_maybe_necessary
+ && TREE_CONSTANT (unpadded_size))
{
tree unpacked_size;
- TYPE_PACKED (rli->t) = 0;
#ifdef ROUND_TYPE_ALIGN
rli->unpacked_align
= ROUND_TYPE_ALIGN (rli->t, TYPE_ALIGN (rli->t), rli->unpacked_align);
#else
rli->unpacked_align = MAX (TYPE_ALIGN (rli->t), rli->unpacked_align);
#endif
+
#ifdef ROUND_TYPE_SIZE
unpacked_size = ROUND_TYPE_SIZE (rli->t, TYPE_SIZE (rli->t),
rli->unpacked_align);
@@ -797,6 +897,8 @@ finalize_record_size (rli)
if (simple_cst_equal (unpacked_size, TYPE_SIZE (rli->t)))
{
+ TYPE_PACKED (rli->t) = 0;
+
if (TYPE_NAME (rli->t))
{
char *name;
@@ -805,6 +907,7 @@ finalize_record_size (rli)
name = IDENTIFIER_POINTER (TYPE_NAME (rli->t));
else
name = IDENTIFIER_POINTER (DECL_NAME (TYPE_NAME (rli->t)));
+
if (STRICT_ALIGNMENT)
warning ("packed attribute causes inefficient alignment for `%s'", name);
else
@@ -818,7 +921,6 @@ finalize_record_size (rli)
warning ("packed attribute is unnecessary");
}
}
- TYPE_PACKED (rli->t) = 1;
}
}
@@ -828,79 +930,77 @@ static void
compute_record_mode (type)
tree type;
{
+ tree field;
+ enum machine_mode mode = VOIDmode;
+
/* Most RECORD_TYPEs have BLKmode, so we start off assuming that.
However, if possible, we use a mode that fits in a register
instead, in order to allow for better optimization down the
line. */
TYPE_MODE (type) = BLKmode;
- if (TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST)
- {
- tree field;
- enum machine_mode mode = VOIDmode;
-
- /* A record which has any BLKmode members must itself be
- BLKmode; it can't go in a register. Unless the member is
- BLKmode only because it isn't aligned. */
- for (field = TYPE_FIELDS (type); field; field = TREE_CHAIN (field))
- {
- unsigned HOST_WIDE_INT bitpos;
-
- if (TREE_CODE (field) != FIELD_DECL
- || TREE_CODE (TREE_TYPE (field)) == ERROR_MARK)
- continue;
- if (TYPE_MODE (TREE_TYPE (field)) == BLKmode
- && ! TYPE_NO_FORCE_BLK (TREE_TYPE (field)))
- return;
-
- if (TREE_CODE (DECL_FIELD_BITPOS (field)) != INTEGER_CST)
- return;
+ if (! host_integerp (TYPE_SIZE (type), 1))
+ return;
- bitpos = TREE_INT_CST_LOW (DECL_FIELD_BITPOS (field));
+ /* A record which has any BLKmode members must itself be
+ BLKmode; it can't go in a register. Unless the member is
+ BLKmode only because it isn't aligned. */
+ for (field = TYPE_FIELDS (type); field; field = TREE_CHAIN (field))
+ {
+ unsigned HOST_WIDE_INT bitpos;
- /* Must be BLKmode if any field crosses a word boundary,
- since extract_bit_field can't handle that in registers. */
- if (bitpos / BITS_PER_WORD
- != ((TREE_INT_CST_LOW (DECL_SIZE (field)) + bitpos - 1)
- / BITS_PER_WORD)
- /* But there is no problem if the field is entire words. */
- && TREE_INT_CST_LOW (DECL_SIZE (field)) % BITS_PER_WORD != 0)
- return;
+ if (TREE_CODE (field) != FIELD_DECL)
+ continue;
- /* If this field is the whole struct, remember its mode so
- that, say, we can put a double in a class into a DF
- register instead of forcing it to live in the stack. */
- if (simple_cst_equal (TYPE_SIZE (type), DECL_SIZE (field)))
- mode = DECL_MODE (field);
+ if (TREE_CODE (TREE_TYPE (field)) == ERROR_MARK
+ || (TYPE_MODE (TREE_TYPE (field)) == BLKmode
+ && ! TYPE_NO_FORCE_BLK (TREE_TYPE (field)))
+ || ! host_integerp (bit_position (field), 1)
+ || ! host_integerp (DECL_SIZE (field), 1))
+ return;
+
+ bitpos = int_bit_position (field);
+
+ /* Must be BLKmode if any field crosses a word boundary,
+ since extract_bit_field can't handle that in registers. */
+ if (bitpos / BITS_PER_WORD
+ != ((TREE_INT_CST_LOW (DECL_SIZE (field)) + bitpos - 1)
+ / BITS_PER_WORD)
+ /* But there is no problem if the field is entire words. */
+ && tree_low_cst (DECL_SIZE (field), 1) % BITS_PER_WORD != 0)
+ return;
+
+ /* If this field is the whole struct, remember its mode so
+ that, say, we can put a double in a class into a DF
+ register instead of forcing it to live in the stack. */
+ if (field == TYPE_FIELDS (type) && TREE_CHAIN (field) == 0)
+ mode = DECL_MODE (field);
#ifdef STRUCT_FORCE_BLK
- /* With some targets, eg. c4x, it is sub-optimal
- to access an aligned BLKmode structure as a scalar. */
- if (mode == VOIDmode && STRUCT_FORCE_BLK (field))
- return;
+ /* With some targets, eg. c4x, it is sub-optimal
+ to access an aligned BLKmode structure as a scalar. */
+ if (mode == VOIDmode && STRUCT_FORCE_BLK (field))
+ return;
#endif /* STRUCT_FORCE_BLK */
- }
+ }
- if (mode != VOIDmode)
- /* We only have one real field; use its mode. */
- TYPE_MODE (type) = mode;
- else
- TYPE_MODE (type)
- = mode_for_size_tree (TYPE_SIZE (type), MODE_INT, 1);
-
- /* If structure's known alignment is less than what the scalar
- mode would need, and it matters, then stick with BLKmode. */
- if (TYPE_MODE (type) != BLKmode
- && STRICT_ALIGNMENT
- && ! (TYPE_ALIGN (type) >= BIGGEST_ALIGNMENT
- || (TYPE_ALIGN (type) >=
- GET_MODE_ALIGNMENT (TYPE_MODE (type)))))
- {
- /* If this is the only reason this type is BLKmode, then
- don't force containing types to be BLKmode. */
- TYPE_NO_FORCE_BLK (type) = 1;
- TYPE_MODE (type) = BLKmode;
- }
+ if (mode != VOIDmode)
+ /* We only have one real field; use its mode. */
+ TYPE_MODE (type) = mode;
+ else
+ TYPE_MODE (type) = mode_for_size_tree (TYPE_SIZE (type), MODE_INT, 1);
+
+ /* If structure's known alignment is less than what the scalar
+ mode would need, and it matters, then stick with BLKmode. */
+ if (TYPE_MODE (type) != BLKmode
+ && STRICT_ALIGNMENT
+ && ! (TYPE_ALIGN (type) >= BIGGEST_ALIGNMENT
+ || TYPE_ALIGN (type) >= GET_MODE_ALIGNMENT (TYPE_MODE (type))))
+ {
+ /* If this is the only reason this type is BLKmode, then
+ don't force containing types to be BLKmode. */
+ TYPE_NO_FORCE_BLK (type) = 1;
+ TYPE_MODE (type) = BLKmode;
}
}
@@ -929,18 +1029,8 @@ finalize_type_size (type)
= ROUND_TYPE_ALIGN (type, TYPE_ALIGN (type), BITS_PER_UNIT);
#endif
-#ifdef ROUND_TYPE_SIZE
- if (TYPE_SIZE (type) != 0)
- TYPE_SIZE (type)
- = ROUND_TYPE_SIZE (type, TYPE_SIZE (type), TYPE_ALIGN (type));
-#endif
-
- /* Evaluate nonconstant size only once, either now or as soon as safe. */
- if (TYPE_SIZE (type) != 0 && TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST)
- TYPE_SIZE (type) = variable_size (TYPE_SIZE (type));
-
/* If we failed to find a simple way to calculate the unit size
- of the type above, find it by division. */
+ of the type, find it by division. */
if (TYPE_SIZE_UNIT (type) == 0 && TYPE_SIZE (type) != 0)
/* TYPE_SIZE (type) is computed in bitsizetype. After the division, the
result will fit in sizetype. We will get more efficient code using
@@ -948,9 +1038,26 @@ finalize_type_size (type)
TYPE_SIZE_UNIT (type)
= convert (sizetype,
size_binop (FLOOR_DIV_EXPR, TYPE_SIZE (type),
- bitsize_int (BITS_PER_UNIT)));
+ bitsize_unit_node));
- /* Once again evaluate only once, either now or as soon as safe. */
+ if (TYPE_SIZE (type) != 0)
+ {
+#ifdef ROUND_TYPE_SIZE
+ TYPE_SIZE (type)
+ = ROUND_TYPE_SIZE (type, TYPE_SIZE (type), TYPE_ALIGN (type));
+ TYPE_SIZE_UNIT (type)
+ = ROUND_TYPE_SIZE_UNIT (type, TYPE_SIZE_UNIT (type),
+ TYPE_ALIGN (type) / BITS_PER_UNIT);
+#else
+ TYPE_SIZE (type) = round_up (TYPE_SIZE (type), TYPE_ALIGN (type));
+ TYPE_SIZE_UNIT (type)
+ = round_up (TYPE_SIZE_UNIT (type), TYPE_ALIGN (type) / BITS_PER_UNIT);
+#endif
+ }
+
+ /* Evaluate nonconstant sizes only once, either now or as soon as safe. */
+ if (TYPE_SIZE (type) != 0 && TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST)
+ TYPE_SIZE (type) = variable_size (TYPE_SIZE (type));
if (TYPE_SIZE_UNIT (type) != 0
&& TREE_CODE (TYPE_SIZE_UNIT (type)) != INTEGER_CST)
TYPE_SIZE_UNIT (type) = variable_size (TYPE_SIZE_UNIT (type));
@@ -987,16 +1094,11 @@ void
finish_record_layout (rli)
record_layout_info rli;
{
- /* Use finish_union_layout for unions. */
- if (TREE_CODE (rli->t) != RECORD_TYPE)
- finish_union_layout (rli);
- else
- {
- /* Compute the final size. */
- finalize_record_size (rli);
- /* Compute the TYPE_MODE for the record. */
- compute_record_mode (rli->t);
- }
+ /* Compute the final size. */
+ finalize_record_size (rli);
+
+ /* Compute the TYPE_MODE for the record. */
+ compute_record_mode (rli->t);
/* Lay out any static members. This is done now because their type
may use the record's type. */
@@ -1008,83 +1110,10 @@ finish_record_layout (rli)
/* Perform any last tweaks to the TYPE_SIZE, etc. */
finalize_type_size (rli->t);
+
/* Clean up. */
free (rli);
}
-
-/* Like finish_record_layout, but for unions. */
-
-static void
-finish_union_layout (rli)
- record_layout_info rli;
-{
- /* This function should only be used for unions; use
- finish_record_layout for RECORD_TYPEs. */
- if (TREE_CODE (rli->t) != UNION_TYPE
- && TREE_CODE (rli->t) != QUAL_UNION_TYPE)
- abort ();
-
- /* Determine the ultimate size of the union (in bytes). */
- if (NULL == rli->var_size)
- TYPE_SIZE (rli->t)
- = bitsize_int (CEIL (rli->const_size, BITS_PER_UNIT) * BITS_PER_UNIT);
-
- else if (rli->const_size == 0)
- TYPE_SIZE (rli->t) = rli->var_size;
- else
- TYPE_SIZE (rli->t) = size_binop (MAX_EXPR, rli->var_size,
- round_up (bitsize_int (rli->const_size),
- BITS_PER_UNIT));
-
- /* Determine the desired alignment. */
-#ifdef ROUND_TYPE_ALIGN
- TYPE_ALIGN (rli->t) = ROUND_TYPE_ALIGN (rli->t, TYPE_ALIGN (rli->t),
- rli->record_align);
-#else
- TYPE_ALIGN (rli->t) = MAX (TYPE_ALIGN (rli->t), rli->record_align);
-#endif
-
-#ifdef ROUND_TYPE_SIZE
- TYPE_SIZE (rli->t) = ROUND_TYPE_SIZE (rli->t, TYPE_SIZE (rli->t),
- TYPE_ALIGN (rli->t));
-#else
- /* Round the size up to be a multiple of the required alignment */
- TYPE_SIZE (rli->t) = round_up (TYPE_SIZE (rli->t),
- TYPE_ALIGN (rli->t));
-#endif
-
- TYPE_MODE (rli->t) = BLKmode;
- if (TREE_CODE (TYPE_SIZE (rli->t)) == INTEGER_CST
- /* If structure's known alignment is less than
- what the scalar mode would need, and it matters,
- then stick with BLKmode. */
- && (! STRICT_ALIGNMENT
- || TYPE_ALIGN (rli->t) >= BIGGEST_ALIGNMENT
- || compare_tree_int (TYPE_SIZE (rli->t),
- TYPE_ALIGN (rli->t)) <= 0))
- {
- tree field;
-
- /* A union which has any BLKmode members must itself be BLKmode;
- it can't go in a register.
- Unless the member is BLKmode only because it isn't aligned. */
- for (field = TYPE_FIELDS (rli->t);
- field;
- field = TREE_CHAIN (field))
- {
- if (TREE_CODE (field) != FIELD_DECL)
- continue;
-
- if (TYPE_MODE (TREE_TYPE (field)) == BLKmode
- && ! TYPE_NO_FORCE_BLK (TREE_TYPE (field)))
- return;
- }
-
- TYPE_MODE (rli->t)
- = mode_for_size_tree (TYPE_SIZE (rli->t), MODE_INT, 1);
- }
-}
-
/* Calculate the mode, size, and alignment for TYPE.
For an array type, calculate the element separation as well.
@@ -1163,8 +1192,7 @@ layout_type (type)
break;
case VOID_TYPE:
- /* VOID_TYPE is an incompletable type, it has no size */
- TYPE_SIZE_UNIT (type) = size_zero_node;
+ /* This is an incomplete type and so doesn't have a size. */
TYPE_ALIGN (type) = 1;
TYPE_MODE (type) = VOIDmode;
break;
@@ -1325,17 +1353,21 @@ layout_type (type)
record_layout_info rli;
/* Initialize the layout information. */
- rli = new_record_layout_info (type);
+ rli = start_record_layout (type);
+
/* If this is a QUAL_UNION_TYPE, we want to process the fields
in the reverse order in building the COND_EXPR that denotes
its size. We reverse them again later. */
if (TREE_CODE (type) == QUAL_UNION_TYPE)
TYPE_FIELDS (type) = nreverse (TYPE_FIELDS (type));
- /* Layout all the fields. */
+
+ /* Place all the fields. */
for (field = TYPE_FIELDS (type); field; field = TREE_CHAIN (field))
- layout_field (rli, field);
+ place_field (rli, field);
+
if (TREE_CODE (type) == QUAL_UNION_TYPE)
TYPE_FIELDS (type) = nreverse (TYPE_FIELDS (type));
+
/* Finish laying out the record. */
finish_record_layout (rli);
}
@@ -1448,6 +1480,7 @@ initialize_sizetypes ()
TREE_UNSIGNED (t) = 1;
TYPE_PRECISION (t) = GET_MODE_BITSIZE (SImode);
TYPE_MIN_VALUE (t) = build_int_2 (0, 0);
+ TYPE_IS_SIZETYPE (t) = 1;
/* 1000 avoids problems with possible overflow and is certainly
larger than any size value we'd want to be storing. */
@@ -1483,9 +1516,11 @@ set_sizetype (type)
/* Make copies of nodes since we'll be setting TYPE_IS_SIZETYPE. */
sizetype = copy_node (type);
TYPE_DOMAIN (sizetype) = type;
+ TYPE_IS_SIZETYPE (sizetype) = 1;
bitsizetype = make_node (INTEGER_TYPE);
TYPE_NAME (bitsizetype) = TYPE_NAME (type);
TYPE_PRECISION (bitsizetype) = precision;
+ TYPE_IS_SIZETYPE (bitsizetype) = 1;
if (TREE_UNSIGNED (type))
fixup_unsigned_type (bitsizetype);
@@ -1624,7 +1659,7 @@ get_best_mode (bitsize, bitpos, align, largest_mode, volatilep)
int volatilep;
{
enum machine_mode mode;
- int unit = 0;
+ unsigned int unit = 0;
/* Find the narrowest integer mode that contains the bit field. */
for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
@@ -1643,7 +1678,7 @@ get_best_mode (bitsize, bitpos, align, largest_mode, volatilep)
if the extra 4th byte is past the end of memory.
(Though at least one Unix compiler ignores this problem:
that on the Sequent 386 machine. */
- || MIN (unit, BIGGEST_ALIGNMENT) > (int) align
+ || MIN (unit, BIGGEST_ALIGNMENT) > align
|| (largest_mode != VOIDmode && unit > GET_MODE_BITSIZE (largest_mode)))
return VOIDmode;
@@ -1657,7 +1692,7 @@ get_best_mode (bitsize, bitpos, align, largest_mode, volatilep)
unit = GET_MODE_BITSIZE (tmode);
if (bitpos / unit == (bitpos + bitsize - 1) / unit
&& unit <= BITS_PER_WORD
- && unit <= (int) MIN (align, BIGGEST_ALIGNMENT)
+ && unit <= MIN (align, BIGGEST_ALIGNMENT)
&& (largest_mode == VOIDmode
|| unit <= GET_MODE_BITSIZE (largest_mode)))
wide_mode = tmode;