+2016-02-18 Nick Clifton <nickc@redhat.com>
+
+ PR target/62554
+ PR target/69610
+ * config/arm/arm.c (arm_option_override_internal): Disable
+ interworking if the target does not support thumb instructions.
+ (arm_reload_in_hi): Handle the case where a register to register
+ move needs reloading because there is no simple pattern to handle
+ it.
+ (arm_reload_out_hi): Likewise.
+
2016-02-18 Richard Biener <rguenther@suse.de>
PR middle-end/69854
{
arm_override_options_after_change_1 (opts);
+ if (TARGET_INTERWORK && !ARM_FSET_HAS_CPU1 (insn_flags, FL_THUMB))
+ {
+ /* The default is to enable interworking, so this warning message would
+ be confusing to users who have just compiled with, eg, -march=armv3. */
+ /* warning (0, "ignoring -minterwork because target CPU does not support THUMB"); */
+ opts->x_target_flags &= ~MASK_INTERWORK;
+ }
+
if (TARGET_THUMB_P (opts->x_target_flags)
&& !(ARM_FSET_HAS_CPU1 (insn_flags, FL_THUMB)))
{
else
/* The slot is out of range, or was dressed up in a SUBREG. */
base = reg_equiv_address (REGNO (ref));
+
+ /* PR 62554: If there is no equivalent memory location then just move
+ the value as an SImode register move. This happens when the target
+ architecture variant does not have an HImode register move. */
+ if (base == NULL)
+ {
+ gcc_assert (REG_P (operands[0]));
+ emit_insn (gen_movsi (gen_rtx_SUBREG (SImode, operands[0], 0),
+ gen_rtx_SUBREG (SImode, ref, 0)));
+ return;
+ }
}
else
base = find_replacement (&XEXP (ref, 0));
else
/* The slot is out of range, or was dressed up in a SUBREG. */
base = reg_equiv_address (REGNO (ref));
+
+ /* PR 62554: If there is no equivalent memory location then just move
+ the value as an SImode register move. This happens when the target
+ architecture variant does not have an HImode register move. */
+ if (base == NULL)
+ {
+ gcc_assert (REG_P (outval));
+ emit_insn (gen_movsi (gen_rtx_SUBREG (SImode, ref, 0),
+ gen_rtx_SUBREG (SImode, outval, 0)));
+ return;
+ }
}
else
base = find_replacement (&XEXP (ref, 0));
break;
case ARM_FT_INTERWORKED:
+ gcc_assert (arm_arch5 || arm_arch4t);
sprintf (instr, "bx%s\t%%|lr", conditional);
break;
--- /dev/null
+/* Check that pre ARMv4 compilation still works. */
+/* { dg-do compile } */
+/* { dg-options "-marm -march=armv3 -ftree-ter" } */
+/* { dg-require-effective-target arm_arm_ok } */
+
+typedef unsigned short v16u16 __attribute__ ((vector_size (16)));
+typedef unsigned int v16u32 __attribute__ ((vector_size (16)));
+
+unsigned short
+foo (v16u16 v16u16_1, v16u32 v16u32_1)
+{
+ v16u16_1 += (v16u16) v16u32_1;
+ return v16u16_1[5] + v16u32_1[1];
+}
--- /dev/null
+/* Check that pre ARMv4 compilation still works. */
+/* { dg-do compile } */
+/* { dg-options "-marm -march=armv3 -O2 -fno-forward-propagate" } */
+/* { dg-require-effective-target arm_arm_ok } */
+
+typedef short v16u16 __attribute__ ((vector_size (16)));
+typedef unsigned v16u32 __attribute__ ((vector_size (16)));
+typedef long long v16u64 __attribute__ ((vector_size (16)));
+
+unsigned
+foo
+ (int
+ u16_0,
+ unsigned
+ u32_0,
+ int
+ u64_0,
+ int
+ u16_1,
+ unsigned
+ u64_1,
+ v16u16
+ v16u16_0,
+ v16u32
+ v16u32_0,
+ v16u64 v16u64_0, v16u16 v16u16_1, v16u32 v16u32_1, v16u64 v16u64_1)
+{
+ v16u16_1[3] -= v16u32_0[0];
+ v16u16_0 -= (v16u16) v16u32_0;
+ return u16_0 + u32_0 + u64_0 + u16_1 +
+ v16u16_0[0] + v16u16_0[2] + v16u16_0[3] + v16u16_0[4] + v16u16_0[5] + v16u32_0[0] + v16u32_0[1] + v16u32_0[3] + v16u64_0[1] +
+ v16u16_1[2] + v16u16_1[3] + v16u16_1[5] + v16u16_1[7] + v16u32_1[0] + v16u32_1[3] + v16u64_1[0] + v16u64_1[1];
+}