/* SLOW_UNALIGNED_ACCESS is non-zero if unaligned accesses are very slow. */
#ifndef SLOW_UNALIGNED_ACCESS
-#define SLOW_UNALIGNED_ACCESS 0
+#define SLOW_UNALIGNED_ACCESS STRICT_ALIGNMENT
#endif
/* Register mappings for target machines without register windows. */
data.to_addr = copy_addr_to_reg (to_addr);
}
- if (! (STRICT_ALIGNMENT || SLOW_UNALIGNED_ACCESS)
+ if (! SLOW_UNALIGNED_ACCESS
|| align > MOVE_MAX || align >= BIGGEST_ALIGNMENT / BITS_PER_UNIT)
align = MOVE_MAX;
register int n_insns = 0;
int max_size = MOVE_MAX + 1;
- if (! (STRICT_ALIGNMENT || SLOW_UNALIGNED_ACCESS)
+ if (! SLOW_UNALIGNED_ACCESS
|| align > MOVE_MAX || align >= BIGGEST_ALIGNMENT / BITS_PER_UNIT)
align = MOVE_MAX;
/* Here we avoid the case of a structure whose weak alignment
forces many pushes of a small amount of data,
and such small pushes do rounding that causes trouble. */
- && ((! STRICT_ALIGNMENT && ! SLOW_UNALIGNED_ACCESS)
+ && ((! SLOW_UNALIGNED_ACCESS)
|| align >= BIGGEST_ALIGNMENT / BITS_PER_UNIT
|| PUSH_ROUNDING (align) == align)
&& PUSH_ROUNDING (INTVAL (size)) == INTVAL (size))
|| (TREE_CODE (to) == ARRAY_REF
&& ((TREE_CODE (TREE_OPERAND (to, 1)) == INTEGER_CST
&& TREE_CODE (TYPE_SIZE (TREE_TYPE (to))) == INTEGER_CST)
- || (STRICT_ALIGNMENT && get_inner_unaligned_p (to)))))
+ || (SLOW_UNALIGNED_ACCESS && get_inner_unaligned_p (to)))))
{
enum machine_mode mode1;
int bitsize;
|| GET_CODE (target) == SUBREG
/* If the field isn't aligned enough to store as an ordinary memref,
store it as a bit field. */
- || (STRICT_ALIGNMENT
+ || (SLOW_UNALIGNED_ACCESS
&& align * BITS_PER_UNIT < GET_MODE_ALIGNMENT (mode))
- || (STRICT_ALIGNMENT && bitpos % GET_MODE_ALIGNMENT (mode) != 0))
+ || (SLOW_UNALIGNED_ACCESS && bitpos % GET_MODE_ALIGNMENT (mode) != 0))
{
rtx temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
if ((TREE_CODE (index) != INTEGER_CST
|| TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST)
- && (! STRICT_ALIGNMENT || ! get_inner_unaligned_p (exp)))
+ && (! SLOW_UNALIGNED_ACCESS || ! get_inner_unaligned_p (exp)))
{
/* Nonconstant array index or nonconstant element size, and
not an array in an unaligned (packed) structure field.
|| GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
/* If the field isn't aligned enough to fetch as a memref,
fetch it as a bit field. */
- || (STRICT_ALIGNMENT
+ || (SLOW_UNALIGNED_ACCESS
&& TYPE_ALIGN (TREE_TYPE (tem)) < GET_MODE_ALIGNMENT (mode))
- || (STRICT_ALIGNMENT && bitpos % GET_MODE_ALIGNMENT (mode) != 0))
+ || (SLOW_UNALIGNED_ACCESS
+ && bitpos % GET_MODE_ALIGNMENT (mode) != 0))
{
enum machine_mode ext_mode = mode;