summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorebotcazou <ebotcazou@138bc75d-0d04-0410-961f-82ee72b054a4>2008-06-12 11:36:34 +0000
committerebotcazou <ebotcazou@138bc75d-0d04-0410-961f-82ee72b054a4>2008-06-12 11:36:34 +0000
commit3bfa8adadb3a43aa297bebe84370f9ae3922ad34 (patch)
tree8b62385d4dccde65201b9a345733145b0344648d
parenta01549829b47470029aa87c71ba1afacffe3bbfa (diff)
downloadgcc-3bfa8adadb3a43aa297bebe84370f9ae3922ad34.tar.gz
* expr.c (store_field): Do a block copy from BLKmode to BLKmode-like.
(get_inner_reference): Use BLKmode for byte-aligned BLKmode bitfields. git-svn-id: svn+ssh://gcc.gnu.org/svn/gcc/trunk@136699 138bc75d-0d04-0410-961f-82ee72b054a4
-rw-r--r--gcc/ChangeLog5
-rw-r--r--gcc/expr.c54
2 files changed, 41 insertions, 18 deletions
diff --git a/gcc/ChangeLog b/gcc/ChangeLog
index f2c51854bcd..58260bbaff5 100644
--- a/gcc/ChangeLog
+++ b/gcc/ChangeLog
@@ -1,3 +1,8 @@
+2008-06-12 Eric Botcazou <ebotcazou@adacore.com>
+
+ * expr.c (store_field): Do a block copy from BLKmode to BLKmode-like.
+ (get_inner_reference): Use BLKmode for byte-aligned BLKmode bitfields.
+
2008-06-12 Jakub Jelinek <jakub@redhat.com>
PR middle-end/36506
diff --git a/gcc/expr.c b/gcc/expr.c
index d55a2797fb5..2357170dd16 100644
--- a/gcc/expr.c
+++ b/gcc/expr.c
@@ -5795,13 +5795,19 @@ store_field (rtx target, HOST_WIDE_INT bitsize, HOST_WIDE_INT bitpos,
&& mode != TYPE_MODE (TREE_TYPE (exp)))
temp = convert_modes (mode, TYPE_MODE (TREE_TYPE (exp)), temp, 1);
- /* If the modes of TARGET and TEMP are both BLKmode, both
+ /* If the modes of TEMP and TARGET are both BLKmode, both
must be in memory and BITPOS must be aligned on a byte
- boundary. If so, we simply do a block copy. */
- if (GET_MODE (target) == BLKmode && GET_MODE (temp) == BLKmode)
+ boundary. If so, we simply do a block copy. Likewise
+ for a BLKmode-like TARGET. */
+ if (GET_MODE (temp) == BLKmode
+ && (GET_MODE (target) == BLKmode
+ || (MEM_P (target)
+ && GET_MODE_CLASS (GET_MODE (target)) == MODE_INT
+ && (bitpos % BITS_PER_UNIT) == 0
+ && (bitsize % BITS_PER_UNIT) == 0)))
{
gcc_assert (MEM_P (target) && MEM_P (temp)
- && !(bitpos % BITS_PER_UNIT));
+ && (bitpos % BITS_PER_UNIT) == 0);
target = adjust_address (target, VOIDmode, bitpos / BITS_PER_UNIT);
emit_block_move (target, temp,
@@ -5847,12 +5853,11 @@ store_field (rtx target, HOST_WIDE_INT bitsize, HOST_WIDE_INT bitpos,
If any of the extraction expressions is volatile,
we store 1 in *PVOLATILEP. Otherwise we don't change that.
- If the field is a bit-field, *PMODE is set to VOIDmode. Otherwise, it
- is a mode that can be used to access the field. In that case, *PBITSIZE
- is redundant.
+ If the field is a non-BLKmode bit-field, *PMODE is set to VOIDmode.
+ Otherwise, it is a mode that can be used to access the field.
If the field describes a variable-sized object, *PMODE is set to
- VOIDmode and *PBITSIZE is set to -1. An access cannot be made in
+ BLKmode and *PBITSIZE is set to -1. An access cannot be made in
this case, but the address of the object can be found.
If KEEP_ALIGNING is true and the target is STRICT_ALIGNMENT, we don't
@@ -5877,6 +5882,7 @@ get_inner_reference (tree exp, HOST_WIDE_INT *pbitsize,
{
tree size_tree = 0;
enum machine_mode mode = VOIDmode;
+ bool blkmode_bitfield = false;
tree offset = size_zero_node;
tree bit_offset = bitsize_zero_node;
@@ -5884,11 +5890,14 @@ get_inner_reference (tree exp, HOST_WIDE_INT *pbitsize,
outermost expression. */
if (TREE_CODE (exp) == COMPONENT_REF)
{
- size_tree = DECL_SIZE (TREE_OPERAND (exp, 1));
- if (! DECL_BIT_FIELD (TREE_OPERAND (exp, 1)))
- mode = DECL_MODE (TREE_OPERAND (exp, 1));
+ tree field = TREE_OPERAND (exp, 1);
+ size_tree = DECL_SIZE (field);
+ if (!DECL_BIT_FIELD (field))
+ mode = DECL_MODE (field);
+ else if (DECL_MODE (field) == BLKmode)
+ blkmode_bitfield = true;
- *punsignedp = DECL_UNSIGNED (TREE_OPERAND (exp, 1));
+ *punsignedp = DECL_UNSIGNED (field);
}
else if (TREE_CODE (exp) == BIT_FIELD_REF)
{
@@ -5922,8 +5931,6 @@ get_inner_reference (tree exp, HOST_WIDE_INT *pbitsize,
*pbitsize = tree_low_cst (size_tree, 1);
}
- *pmode = mode;
-
/* Compute cumulative bit-offset for nested component-refs and array-refs,
and find the ultimate containing object. */
while (1)
@@ -6018,14 +6025,25 @@ get_inner_reference (tree exp, HOST_WIDE_INT *pbitsize,
if (double_int_fits_in_shwi_p (tem))
{
*pbitpos = double_int_to_shwi (tem);
- *poffset = NULL_TREE;
- return exp;
+ *poffset = offset = NULL_TREE;
}
}
/* Otherwise, split it up. */
- *pbitpos = tree_low_cst (bit_offset, 0);
- *poffset = offset;
+ if (offset)
+ {
+ *pbitpos = tree_low_cst (bit_offset, 0);
+ *poffset = offset;
+ }
+
+ /* We can use BLKmode for a byte-aligned BLKmode bitfield. */
+ if (mode == VOIDmode
+ && blkmode_bitfield
+ && (*pbitpos % BITS_PER_UNIT) == 0
+ && (*pbitsize % BITS_PER_UNIT) == 0)
+ *pmode = BLKmode;
+ else
+ *pmode = mode;
return exp;
}