HOST_WIDE_INT bytepos = INTVAL (XEXP (XVECEXP (src, 0, i), 1));
enum machine_mode mode = GET_MODE (tmps[i]);
unsigned int bytelen = GET_MODE_SIZE (mode);
- unsigned int adj_bytelen = bytelen;
+ unsigned int adj_bytelen;
rtx dest = dst;
/* Handle trailing fragments that run over the size of the struct. */
if (ssize >= 0 && bytepos + (HOST_WIDE_INT) bytelen > ssize)
adj_bytelen = ssize - bytepos;
+ else
+ adj_bytelen = bytelen;
if (GET_CODE (dst) == CONCAT)
{
}
}
+ /* Handle trailing fragments that run over the size of the struct. */
if (ssize >= 0 && bytepos + (HOST_WIDE_INT) bytelen > ssize)
{
/* store_bit_field always takes its value from the lsb.
tmps[i] = expand_shift (RSHIFT_EXPR, mode, tmps[i],
shift, tmps[i], 0);
}
- bytelen = adj_bytelen;
+
+ /* Make sure not to write past the end of the struct. */
+ store_bit_field (dest,
+ adj_bytelen * BITS_PER_UNIT, bytepos * BITS_PER_UNIT,
+ bytepos * BITS_PER_UNIT, ssize * BITS_PER_UNIT,
+ VOIDmode, tmps[i]);
}
/* Optimize the access just a bit. */
- if (MEM_P (dest)
- && (! SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (dest))
- || MEM_ALIGN (dest) >= GET_MODE_ALIGNMENT (mode))
- && bytepos * BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
- && bytelen == GET_MODE_SIZE (mode))
+ else if (MEM_P (dest)
+ && (!SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (dest))
+ || MEM_ALIGN (dest) >= GET_MODE_ALIGNMENT (mode))
+ && bytepos * BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
+ && bytelen == GET_MODE_SIZE (mode))
emit_move_insn (adjust_address (dest, mode, bytepos), tmps[i]);
+
else
store_bit_field (dest, bytelen * BITS_PER_UNIT, bytepos * BITS_PER_UNIT,
0, 0, mode, tmps[i]);
expand_insn (icode, 2, ops);
}
else
- store_bit_field (mem, GET_MODE_BITSIZE (mode),
- 0, 0, 0, mode, reg);
+ store_bit_field (mem, GET_MODE_BITSIZE (mode), 0, 0, 0, mode, reg);
return;
}