if (GET_MODE_CLASS (mode1) == GET_MODE_CLASS (mode2))
return true;
+ /* Allow changes between scalar modes if both modes fit within 64 bits.
+ This is because:
+
+ - We allow all such modes for both FPRs and GPRs.
+ - They occupy a single register for both FPRs and GPRs.
+ - We can reinterpret one mode as another in both types of register. */
+ if (is_a<scalar_mode> (mode1)
+ && is_a<scalar_mode> (mode2)
+ && known_le (GET_MODE_SIZE (mode1), 8)
+ && known_le (GET_MODE_SIZE (mode2), 8))
+ return true;
+
/* We specifically want to allow elements of "structure" modes to
be tieable to the structure. This more general condition allows
other rarer situations too. The reason we don't extend this to
operands[3] = force_reg (<MODE>mode, value);
})
+(define_insn "*insv_reg<mode>_<SUBDI_BITS>"
+ [(set (zero_extract:GPI (match_operand:GPI 0 "register_operand" "+r,w,?w")
+ (const_int SUBDI_BITS)
+ (match_operand 1 "const_int_operand"))
+ (match_operand:GPI 2 "register_operand" "r,w,r"))]
+ "multiple_p (UINTVAL (operands[1]), <SUBDI_BITS>)
+ && UINTVAL (operands[1]) + <SUBDI_BITS> <= <GPI:sizen>"
+ {
+ if (which_alternative == 0)
+ return "bfi\t%<w>0, %<w>2, %1, <SUBDI_BITS>";
+
+ operands[1] = gen_int_mode (UINTVAL (operands[1]) / <SUBDI_BITS>, SImode);
+ if (which_alternative == 1)
+ return "ins\t%0.<bits_etype>[%1], %2.<bits_etype>[0]";
+ return "ins\t%0.<bits_etype>[%1], %w2";
+ }
+ [(set_attr "type" "bfm,neon_ins_q,neon_ins_q")]
+)
+
(define_insn "*insv_reg<mode>"
[(set (zero_extract:GPI (match_operand:GPI 0 "register_operand" "+r")
(match_operand 1 "const_int_operand" "n")
[(set_attr "type" "bfm")]
)
+(define_insn_and_split "*aarch64_bfi<GPI:mode><ALLX:mode>_<SUBDI_BITS>"
+ [(set (zero_extract:GPI (match_operand:GPI 0 "register_operand" "+r,w,?w")
+ (const_int SUBDI_BITS)
+ (match_operand 1 "const_int_operand"))
+ (zero_extend:GPI (match_operand:ALLX 2 "register_operand" "r,w,r")))]
+ "<SUBDI_BITS> <= <ALLX:sizen>
+ && multiple_p (UINTVAL (operands[1]), <SUBDI_BITS>)
+ && UINTVAL (operands[1]) + <SUBDI_BITS> <= <GPI:sizen>"
+ "#"
+ "&& 1"
+ [(set (zero_extract:GPI (match_dup 0)
+ (const_int SUBDI_BITS)
+ (match_dup 1))
+ (match_dup 2))]
+ {
+ operands[2] = lowpart_subreg (<GPI:MODE>mode, operands[2],
+ <ALLX:MODE>mode);
+ }
+ [(set_attr "type" "bfm,neon_ins_q,neon_ins_q")]
+)
+
(define_insn "*aarch64_bfi<GPI:mode><ALLX:mode>4"
[(set (zero_extract:GPI (match_operand:GPI 0 "register_operand" "+r")
(match_operand 1 "const_int_operand" "n")
[(set_attr "type" "bfm")]
)
+(define_insn_and_split "*aarch64_bfidi<ALLX:mode>_subreg_<SUBDI_BITS>"
+ [(set (zero_extract:DI (match_operand:DI 0 "register_operand" "+r,w,?w")
+ (const_int SUBDI_BITS)
+ (match_operand 1 "const_int_operand"))
+ (match_operator:DI 2 "subreg_lowpart_operator"
+ [(zero_extend:SI
+ (match_operand:ALLX 3 "register_operand" "r,w,r"))]))]
+ "<SUBDI_BITS> <= <ALLX:sizen>
+ && multiple_p (UINTVAL (operands[1]), <SUBDI_BITS>)
+ && UINTVAL (operands[1]) + <SUBDI_BITS> <= 64"
+ "#"
+ "&& 1"
+ [(set (zero_extract:DI (match_dup 0)
+ (const_int SUBDI_BITS)
+ (match_dup 1))
+ (match_dup 2))]
+ {
+ operands[2] = lowpart_subreg (DImode, operands[3], <ALLX:MODE>mode);
+ }
+ [(set_attr "type" "bfm,neon_ins_q,neon_ins_q")]
+)
+
;; Match a bfi instruction where the shift of OP3 means that we are
;; actually copying the least significant bits of OP3 into OP0 by way
;; of the AND masks and the IOR instruction. A similar instruction
[(UNSPECV_ATOMIC_LDOP_OR "ior") (UNSPECV_ATOMIC_LDOP_BIC "bic")
(UNSPECV_ATOMIC_LDOP_XOR "xor") (UNSPECV_ATOMIC_LDOP_PLUS "add")])
+(define_int_iterator SUBDI_BITS [8 16 32])
+
;; -------------------------------------------------------------------
;; Int Iterators Attributes.
;; -------------------------------------------------------------------
(UNSPECV_SET_FPSR "fpsr")
(UNSPECV_GET_FPCR "fpcr")
(UNSPECV_SET_FPCR "fpcr")])
+
+(define_int_attr bits_etype [(8 "b") (16 "h") (32 "s")])
--- /dev/null
+/* { dg-do assemble } */
+/* { dg-options "-O2 -mlittle-endian --save-temps" } */
+/* { dg-final { check-function-bodies "**" "" "" } } */
+
+typedef unsigned char v16qi __attribute__((vector_size(16)));
+typedef unsigned short v8hi __attribute__((vector_size(16)));
+typedef unsigned int v4si __attribute__((vector_size(16)));
+
+struct di_qi_1 { unsigned char c[4]; unsigned int x; };
+struct di_qi_2 { unsigned int x; unsigned char c[4]; };
+
+struct di_hi_1 { unsigned short s[2]; unsigned int x; };
+struct di_hi_2 { unsigned int x; unsigned short s[2]; };
+
+struct di_si { unsigned int i[2]; };
+
+struct si_qi_1 { unsigned char c[2]; unsigned short x; };
+struct si_qi_2 { unsigned short x; unsigned char c[2]; };
+
+struct si_hi { unsigned short s[2]; };
+
+#define TEST(NAME, STYPE, VTYPE, LHS, RHS) \
+ void \
+ NAME (VTYPE x) \
+ { \
+ register struct STYPE y asm ("v1"); \
+ asm volatile ("" : "=w" (y)); \
+ LHS = RHS; \
+ asm volatile ("" :: "w" (y)); \
+ }
+
+/*
+** f_di_qi_0:
+** ins v1\.b\[0\], v0\.b\[0\]
+** ret
+*/
+TEST (f_di_qi_0, di_qi_1, v16qi, y.c[0], x[0])
+
+/*
+** f_di_qi_1:
+** ins v1\.b\[3\], v0\.b\[0\]
+** ret
+*/
+TEST (f_di_qi_1, di_qi_1, v16qi, y.c[3], x[0])
+
+/*
+** f_di_qi_2:
+** ins v1\.b\[4\], v0\.b\[0\]
+** ret
+*/
+TEST (f_di_qi_2, di_qi_2, v16qi, y.c[0], x[0])
+
+/*
+** f_di_qi_3:
+** ins v1\.b\[7\], v0\.b\[0\]
+** ret
+*/
+TEST (f_di_qi_3, di_qi_2, v16qi, y.c[3], x[0])
+
+/*
+** f_di_hi_0:
+** ins v1\.h\[0\], v0\.h\[0\]
+** ret
+*/
+TEST (f_di_hi_0, di_hi_1, v8hi, y.s[0], x[0])
+
+/*
+** f_di_hi_1:
+** ins v1\.h\[1\], v0\.h\[0\]
+** ret
+*/
+TEST (f_di_hi_1, di_hi_1, v8hi, y.s[1], x[0])
+
+/*
+** f_di_hi_2:
+** ins v1\.h\[2\], v0\.h\[0\]
+** ret
+*/
+TEST (f_di_hi_2, di_hi_2, v8hi, y.s[0], x[0])
+
+/*
+** f_di_hi_3:
+** ins v1\.h\[3\], v0\.h\[0\]
+** ret
+*/
+TEST (f_di_hi_3, di_hi_2, v8hi, y.s[1], x[0])
+
+/*
+** f_di_si_0:
+** ins v1\.s\[0\], v0\.s\[0\]
+** ret
+*/
+TEST (f_di_si_0, di_si, v4si, y.i[0], x[0])
+
+/*
+** f_di_si_1:
+** ins v1\.s\[1\], v0\.s\[0\]
+** ret
+*/
+TEST (f_di_si_1, di_si, v4si, y.i[1], x[0])
+
+/*
+** f_si_qi_0:
+** ins v1\.b\[0\], v0\.b\[0\]
+** ret
+*/
+TEST (f_si_qi_0, si_qi_1, v16qi, y.c[0], x[0])
+
+/*
+** f_si_qi_1:
+** ins v1\.b\[1\], v0\.b\[0\]
+** ret
+*/
+TEST (f_si_qi_1, si_qi_1, v16qi, y.c[1], x[0])
+
+/*
+** f_si_qi_2:
+** ins v1\.b\[2\], v0\.b\[0\]
+** ret
+*/
+TEST (f_si_qi_2, si_qi_2, v16qi, y.c[0], x[0])
+
+/*
+** f_si_qi_3:
+** ins v1\.b\[3\], v0\.b\[0\]
+** ret
+*/
+TEST (f_si_qi_3, si_qi_2, v16qi, y.c[1], x[0])
+
+/*
+** f_si_hi_0:
+** ins v1\.h\[0\], v0\.h\[0\]
+** ret
+*/
+TEST (f_si_hi_0, si_hi, v8hi, y.s[0], x[0])
+
+/*
+** f_si_hi_1:
+** ins v1\.h\[1\], v0\.h\[0\]
+** ret
+*/
+TEST (f_si_hi_1, si_hi, v8hi, y.s[1], x[0])
--- /dev/null
+/* { dg-do assemble } */
+/* { dg-options "-O2 -mbig-endian --save-temps" } */
+/* { dg-final { check-function-bodies "**" "" "" } } */
+
+typedef unsigned char v16qi __attribute__((vector_size(16)));
+typedef unsigned short v8hi __attribute__((vector_size(16)));
+typedef unsigned int v4si __attribute__((vector_size(16)));
+
+struct di_qi_1 { unsigned char c[4]; unsigned int x; };
+struct di_qi_2 { unsigned int x; unsigned char c[4]; };
+
+struct di_hi_1 { unsigned short s[2]; unsigned int x; };
+struct di_hi_2 { unsigned int x; unsigned short s[2]; };
+
+struct di_si { unsigned int i[2]; };
+
+struct si_qi_1 { unsigned char c[2]; unsigned short x; };
+struct si_qi_2 { unsigned short x; unsigned char c[2]; };
+
+struct si_hi { unsigned short s[2]; };
+
+#define TEST(NAME, STYPE, VTYPE, LHS, RHS) \
+ void \
+ NAME (VTYPE x) \
+ { \
+ register struct STYPE y asm ("v1"); \
+ asm volatile ("" : "=w" (y)); \
+ LHS = RHS; \
+ asm volatile ("" :: "w" (y)); \
+ }
+
+/*
+** f_di_qi_0:
+** ins v1\.b\[7\], v0\.b\[0\]
+** ret
+*/
+TEST (f_di_qi_0, di_qi_1, v16qi, y.c[0], x[15])
+
+/*
+** f_di_qi_1:
+** ins v1\.b\[4\], v0\.b\[0\]
+** ret
+*/
+TEST (f_di_qi_1, di_qi_1, v16qi, y.c[3], x[15])
+
+/*
+** f_di_qi_2:
+** ins v1\.b\[3\], v0\.b\[0\]
+** ret
+*/
+TEST (f_di_qi_2, di_qi_2, v16qi, y.c[0], x[15])
+
+/*
+** f_di_qi_3:
+** ins v1\.b\[0\], v0\.b\[0\]
+** ret
+*/
+TEST (f_di_qi_3, di_qi_2, v16qi, y.c[3], x[15])
+
+/*
+** f_di_hi_0:
+** ins v1\.h\[3\], v0\.h\[0\]
+** ret
+*/
+TEST (f_di_hi_0, di_hi_1, v8hi, y.s[0], x[7])
+
+/*
+** f_di_hi_1:
+** ins v1\.h\[2\], v0\.h\[0\]
+** ret
+*/
+TEST (f_di_hi_1, di_hi_1, v8hi, y.s[1], x[7])
+
+/*
+** f_di_hi_2:
+** ins v1\.h\[1\], v0\.h\[0\]
+** ret
+*/
+TEST (f_di_hi_2, di_hi_2, v8hi, y.s[0], x[7])
+
+/*
+** f_di_hi_3:
+** ins v1\.h\[0\], v0\.h\[0\]
+** ret
+*/
+TEST (f_di_hi_3, di_hi_2, v8hi, y.s[1], x[7])
+
+/*
+** f_di_si_0:
+** ins v1\.s\[1\], v0\.s\[0\]
+** ret
+*/
+TEST (f_di_si_0, di_si, v4si, y.i[0], x[3])
+
+/*
+** f_di_si_1:
+** ins v1\.s\[0\], v0\.s\[0\]
+** ret
+*/
+TEST (f_di_si_1, di_si, v4si, y.i[1], x[3])
+
+/*
+** f_si_qi_0:
+** ins v1\.b\[3\], v0\.b\[0\]
+** ret
+*/
+TEST (f_si_qi_0, si_qi_1, v16qi, y.c[0], x[15])
+
+/*
+** f_si_qi_1:
+** ins v1\.b\[2\], v0\.b\[0\]
+** ret
+*/
+TEST (f_si_qi_1, si_qi_1, v16qi, y.c[1], x[15])
+
+/*
+** f_si_qi_2:
+** ins v1\.b\[1\], v0\.b\[0\]
+** ret
+*/
+TEST (f_si_qi_2, si_qi_2, v16qi, y.c[0], x[15])
+
+/*
+** f_si_qi_3:
+** ins v1\.b\[0\], v0\.b\[0\]
+** ret
+*/
+TEST (f_si_qi_3, si_qi_2, v16qi, y.c[1], x[15])
+
+/*
+** f_si_hi_0:
+** ins v1\.h\[1\], v0\.h\[0\]
+** ret
+*/
+TEST (f_si_hi_0, si_hi, v8hi, y.s[0], x[7])
+
+/*
+** f_si_hi_1:
+** ins v1\.h\[0\], v0\.h\[0\]
+** ret
+*/
+TEST (f_si_hi_1, si_hi, v8hi, y.s[1], x[7])
--- /dev/null
+/* { dg-do assemble } */
+/* { dg-options "-O2 -mlittle-endian --save-temps" } */
+/* { dg-final { check-function-bodies "**" "" "" { target lp64 } } } */
+
+struct di_qi_1 { unsigned char c[4]; unsigned int x; };
+struct di_qi_2 { unsigned int x; unsigned char c[4]; };
+
+struct di_hi_1 { unsigned short s[2]; unsigned int x; };
+struct di_hi_2 { unsigned int x; unsigned short s[2]; };
+
+struct di_si { unsigned int i[2]; };
+
+struct si_qi_1 { unsigned char c[2]; unsigned short x; };
+struct si_qi_2 { unsigned short x; unsigned char c[2]; };
+
+struct si_hi { unsigned short s[2]; };
+
+#define TEST(NAME, STYPE, ETYPE, LHS) \
+ void \
+ NAME (volatile ETYPE *ptr) \
+ { \
+ register struct STYPE y asm ("v1"); \
+ asm volatile ("" : "=w" (y)); \
+ ETYPE x = *ptr; \
+ __UINT64_TYPE__ value = (ETYPE) x; \
+ LHS = value; \
+ asm volatile ("" :: "w" (y)); \
+ }
+
+/*
+** f_di_qi_0:
+** ldr b([0-9]+), \[x0\]
+** ins v1\.b\[0\], v\1\.b\[0\]
+** ret
+*/
+TEST (f_di_qi_0, di_qi_1, unsigned char, y.c[0])
+
+/*
+** f_di_qi_1:
+** ldr b([0-9]+), \[x0\]
+** ins v1\.b\[3\], v\1\.b\[0\]
+** ret
+*/
+TEST (f_di_qi_1, di_qi_1, unsigned char, y.c[3])
+
+/*
+** f_di_qi_2:
+** ldr b([0-9]+), \[x0\]
+** ins v1\.b\[4\], v\1\.b\[0\]
+** ret
+*/
+TEST (f_di_qi_2, di_qi_2, unsigned char, y.c[0])
+
+/*
+** f_di_qi_3:
+** ldr b([0-9]+), \[x0\]
+** ins v1\.b\[7\], v\1\.b\[0\]
+** ret
+*/
+TEST (f_di_qi_3, di_qi_2, unsigned char, y.c[3])
+
+/*
+** f_di_hi_0:
+** ldr h([0-9]+), \[x0\]
+** ins v1\.h\[0\], v\1\.h\[0\]
+** ret
+*/
+TEST (f_di_hi_0, di_hi_1, unsigned short, y.s[0])
+
+/*
+** f_di_hi_1:
+** ldr h([0-9]+), \[x0\]
+** ins v1\.h\[1\], v\1\.h\[0\]
+** ret
+*/
+TEST (f_di_hi_1, di_hi_1, unsigned short, y.s[1])
+
+/*
+** f_di_hi_2:
+** ldr h([0-9]+), \[x0\]
+** ins v1\.h\[2\], v\1\.h\[0\]
+** ret
+*/
+TEST (f_di_hi_2, di_hi_2, unsigned short, y.s[0])
+
+/*
+** f_di_hi_3:
+** ldr h([0-9]+), \[x0\]
+** ins v1\.h\[3\], v\1\.h\[0\]
+** ret
+*/
+TEST (f_di_hi_3, di_hi_2, unsigned short, y.s[1])
+
+/*
+** f_di_si_0:
+** ldr s([0-9]+), \[x0\]
+** ins v1\.s\[0\], v\1\.s\[0\]
+** ret
+*/
+TEST (f_di_si_0, di_si, unsigned int, y.i[0])
+
+/*
+** f_di_si_1:
+** ldr s([0-9]+), \[x0\]
+** ins v1\.s\[1\], v\1\.s\[0\]
+** ret
+*/
+TEST (f_di_si_1, di_si, unsigned int, y.i[1])
+
+/*
+** f_si_qi_0:
+** ldr b([0-9]+), \[x0\]
+** ins v1\.b\[0\], v\1\.b\[0\]
+** ret
+*/
+TEST (f_si_qi_0, si_qi_1, unsigned char, y.c[0])
+
+/*
+** f_si_qi_1:
+** ldr b([0-9]+), \[x0\]
+** ins v1\.b\[1\], v\1\.b\[0\]
+** ret
+*/
+TEST (f_si_qi_1, si_qi_1, unsigned char, y.c[1])
+
+/*
+** f_si_qi_2:
+** ldr b([0-9]+), \[x0\]
+** ins v1\.b\[2\], v\1\.b\[0\]
+** ret
+*/
+TEST (f_si_qi_2, si_qi_2, unsigned char, y.c[0])
+
+/*
+** f_si_qi_3:
+** ldr b([0-9]+), \[x0\]
+** ins v1\.b\[3\], v\1\.b\[0\]
+** ret
+*/
+TEST (f_si_qi_3, si_qi_2, unsigned char, y.c[1])
+
+/*
+** f_si_hi_0:
+** ldr h([0-9]+), \[x0\]
+** ins v1\.h\[0\], v\1\.h\[0\]
+** ret
+*/
+TEST (f_si_hi_0, si_hi, unsigned short, y.s[0])
+
+/*
+** f_si_hi_1:
+** ldr h([0-9]+), \[x0\]
+** ins v1\.h\[1\], v\1\.h\[0\]
+** ret
+*/
+TEST (f_si_hi_1, si_hi, unsigned short, y.s[1])
--- /dev/null
+/* { dg-do assemble } */
+/* { dg-options "-O2 -mbig-endian --save-temps" } */
+/* { dg-final { check-function-bodies "**" "" "" { target lp64 } } } */
+
+struct di_qi_1 { unsigned char c[4]; unsigned int x; };
+struct di_qi_2 { unsigned int x; unsigned char c[4]; };
+
+struct di_hi_1 { unsigned short s[2]; unsigned int x; };
+struct di_hi_2 { unsigned int x; unsigned short s[2]; };
+
+struct di_si { unsigned int i[2]; };
+
+struct si_qi_1 { unsigned char c[2]; unsigned short x; };
+struct si_qi_2 { unsigned short x; unsigned char c[2]; };
+
+struct si_hi { unsigned short s[2]; };
+
+#define TEST(NAME, STYPE, ETYPE, LHS) \
+ void \
+ NAME (volatile ETYPE *ptr) \
+ { \
+ register struct STYPE y asm ("v1"); \
+ asm volatile ("" : "=w" (y)); \
+ ETYPE x = *ptr; \
+ __UINT64_TYPE__ value = (ETYPE) x; \
+ LHS = value; \
+ asm volatile ("" :: "w" (y)); \
+ }
+
+/*
+** f_di_qi_0:
+** ldr b([0-9]+), \[x0\]
+** ins v1\.b\[7\], v\1\.b\[0\]
+** ret
+*/
+TEST (f_di_qi_0, di_qi_1, unsigned char, y.c[0])
+
+/*
+** f_di_qi_1:
+** ldr b([0-9]+), \[x0\]
+** ins v1\.b\[4\], v\1\.b\[0\]
+** ret
+*/
+TEST (f_di_qi_1, di_qi_1, unsigned char, y.c[3])
+
+/*
+** f_di_qi_2:
+** ldr b([0-9]+), \[x0\]
+** ins v1\.b\[3\], v\1\.b\[0\]
+** ret
+*/
+TEST (f_di_qi_2, di_qi_2, unsigned char, y.c[0])
+
+/*
+** f_di_qi_3:
+** ldr b([0-9]+), \[x0\]
+** ins v1\.b\[0\], v\1\.b\[0\]
+** ret
+*/
+TEST (f_di_qi_3, di_qi_2, unsigned char, y.c[3])
+
+/*
+** f_di_hi_0:
+** ldr h([0-9]+), \[x0\]
+** ins v1\.h\[3\], v\1\.h\[0\]
+** ret
+*/
+TEST (f_di_hi_0, di_hi_1, unsigned short, y.s[0])
+
+/*
+** f_di_hi_1:
+** ldr h([0-9]+), \[x0\]
+** ins v1\.h\[2\], v\1\.h\[0\]
+** ret
+*/
+TEST (f_di_hi_1, di_hi_1, unsigned short, y.s[1])
+
+/*
+** f_di_hi_2:
+** ldr h([0-9]+), \[x0\]
+** ins v1\.h\[1\], v\1\.h\[0\]
+** ret
+*/
+TEST (f_di_hi_2, di_hi_2, unsigned short, y.s[0])
+
+/*
+** f_di_hi_3:
+** ldr h([0-9]+), \[x0\]
+** ins v1\.h\[0\], v\1\.h\[0\]
+** ret
+*/
+TEST (f_di_hi_3, di_hi_2, unsigned short, y.s[1])
+
+/*
+** f_di_si_0:
+** ldr s([0-9]+), \[x0\]
+** ins v1\.s\[1\], v\1\.s\[0\]
+** ret
+*/
+TEST (f_di_si_0, di_si, unsigned int, y.i[0])
+
+/*
+** f_di_si_1:
+** ldr s([0-9]+), \[x0\]
+** ins v1\.s\[0\], v\1\.s\[0\]
+** ret
+*/
+TEST (f_di_si_1, di_si, unsigned int, y.i[1])
+
+/*
+** f_si_qi_0:
+** ldr b([0-9]+), \[x0\]
+** ins v1\.b\[3\], v\1\.b\[0\]
+** ret
+*/
+TEST (f_si_qi_0, si_qi_1, unsigned char, y.c[0])
+
+/*
+** f_si_qi_1:
+** ldr b([0-9]+), \[x0\]
+** ins v1\.b\[2\], v\1\.b\[0\]
+** ret
+*/
+TEST (f_si_qi_1, si_qi_1, unsigned char, y.c[1])
+
+/*
+** f_si_qi_2:
+** ldr b([0-9]+), \[x0\]
+** ins v1\.b\[1\], v\1\.b\[0\]
+** ret
+*/
+TEST (f_si_qi_2, si_qi_2, unsigned char, y.c[0])
+
+/*
+** f_si_qi_3:
+** ldr b([0-9]+), \[x0\]
+** ins v1\.b\[0\], v\1\.b\[0\]
+** ret
+*/
+TEST (f_si_qi_3, si_qi_2, unsigned char, y.c[1])
+
+/*
+** f_si_hi_0:
+** ldr h([0-9]+), \[x0\]
+** ins v1\.h\[1\], v\1\.h\[0\]
+** ret
+*/
+TEST (f_si_hi_0, si_hi, unsigned short, y.s[0])
+
+/*
+** f_si_hi_1:
+** ldr h([0-9]+), \[x0\]
+** ins v1\.h\[0\], v\1\.h\[0\]
+** ret
+*/
+TEST (f_si_hi_1, si_hi, unsigned short, y.s[1])
--- /dev/null
+/* { dg-do assemble } */
+/* { dg-options "-O2 -mlittle-endian --save-temps" } */
+/* { dg-final { check-function-bodies "**" "" "" } } */
+
+struct di_qi_1 { unsigned char c[4]; unsigned int x; };
+struct di_qi_2 { unsigned int x; unsigned char c[4]; };
+
+struct di_hi_1 { unsigned short s[2]; unsigned int x; };
+struct di_hi_2 { unsigned int x; unsigned short s[2]; };
+
+struct di_si { unsigned int i[2]; };
+
+struct si_qi_1 { unsigned char c[2]; unsigned short x; };
+struct si_qi_2 { unsigned short x; unsigned char c[2]; };
+
+struct si_hi { unsigned short s[2]; };
+
+#define TEST(NAME, STYPE, ETYPE, LHS) \
+ void \
+ NAME (void) \
+ { \
+ register struct STYPE y asm ("v1"); \
+ register ETYPE x asm ("x0"); \
+ asm volatile ("" : "=w" (y), "=r" (x)); \
+ LHS = x; \
+ asm volatile ("" :: "w" (y)); \
+ }
+
+/*
+** f_di_qi_0:
+** ins v1\.b\[0\], w0
+** ret
+*/
+TEST (f_di_qi_0, di_qi_1, unsigned char, y.c[0])
+
+/*
+** f_di_qi_1:
+** ins v1\.b\[3\], w0
+** ret
+*/
+TEST (f_di_qi_1, di_qi_1, unsigned char, y.c[3])
+
+/*
+** f_di_qi_2:
+** ins v1\.b\[4\], w0
+** ret
+*/
+TEST (f_di_qi_2, di_qi_2, unsigned char, y.c[0])
+
+/*
+** f_di_qi_3:
+** ins v1\.b\[7\], w0
+** ret
+*/
+TEST (f_di_qi_3, di_qi_2, unsigned char, y.c[3])
+
+/*
+** f_di_hi_0:
+** ins v1\.h\[0\], w0
+** ret
+*/
+TEST (f_di_hi_0, di_hi_1, unsigned short, y.s[0])
+
+/*
+** f_di_hi_1:
+** ins v1\.h\[1\], w0
+** ret
+*/
+TEST (f_di_hi_1, di_hi_1, unsigned short, y.s[1])
+
+/*
+** f_di_hi_2:
+** ins v1\.h\[2\], w0
+** ret
+*/
+TEST (f_di_hi_2, di_hi_2, unsigned short, y.s[0])
+
+/*
+** f_di_hi_3:
+** ins v1\.h\[3\], w0
+** ret
+*/
+TEST (f_di_hi_3, di_hi_2, unsigned short, y.s[1])
+
+/*
+** f_di_si_0:
+** ins v1\.s\[0\], w0
+** ret
+*/
+TEST (f_di_si_0, di_si, unsigned int, y.i[0])
+
+/*
+** f_di_si_1:
+** ins v1\.s\[1\], w0
+** ret
+*/
+TEST (f_di_si_1, di_si, unsigned int, y.i[1])
+
+/*
+** f_si_qi_0:
+** ins v1\.b\[0\], w0
+** ret
+*/
+TEST (f_si_qi_0, si_qi_1, unsigned char, y.c[0])
+
+/*
+** f_si_qi_1:
+** ins v1\.b\[1\], w0
+** ret
+*/
+TEST (f_si_qi_1, si_qi_1, unsigned char, y.c[1])
+
+/*
+** f_si_qi_2:
+** ins v1\.b\[2\], w0
+** ret
+*/
+TEST (f_si_qi_2, si_qi_2, unsigned char, y.c[0])
+
+/*
+** f_si_qi_3:
+** ins v1\.b\[3\], w0
+** ret
+*/
+TEST (f_si_qi_3, si_qi_2, unsigned char, y.c[1])
+
+/*
+** f_si_hi_0:
+** ins v1\.h\[0\], w0
+** ret
+*/
+TEST (f_si_hi_0, si_hi, unsigned short, y.s[0])
+
+/*
+** f_si_hi_1:
+** ins v1\.h\[1\], w0
+** ret
+*/
+TEST (f_si_hi_1, si_hi, unsigned short, y.s[1])
--- /dev/null
+/* { dg-do assemble } */
+/* { dg-options "-O2 -mbig-endian --save-temps" } */
+/* { dg-final { check-function-bodies "**" "" "" } } */
+
+struct di_qi_1 { unsigned char c[4]; unsigned int x; };
+struct di_qi_2 { unsigned int x; unsigned char c[4]; };
+
+struct di_hi_1 { unsigned short s[2]; unsigned int x; };
+struct di_hi_2 { unsigned int x; unsigned short s[2]; };
+
+struct di_si { unsigned int i[2]; };
+
+struct si_qi_1 { unsigned char c[2]; unsigned short x; };
+struct si_qi_2 { unsigned short x; unsigned char c[2]; };
+
+struct si_hi { unsigned short s[2]; };
+
+#define TEST(NAME, STYPE, ETYPE, LHS) \
+ void \
+ NAME (void) \
+ { \
+ register struct STYPE y asm ("v1"); \
+ register ETYPE x asm ("x0"); \
+ asm volatile ("" : "=w" (y), "=r" (x)); \
+ LHS = x; \
+ asm volatile ("" :: "w" (y)); \
+ }
+
+/*
+** f_di_qi_0:
+** ins v1\.b\[7\], w0
+** ret
+*/
+TEST (f_di_qi_0, di_qi_1, unsigned char, y.c[0])
+
+/*
+** f_di_qi_1:
+** ins v1\.b\[4\], w0
+** ret
+*/
+TEST (f_di_qi_1, di_qi_1, unsigned char, y.c[3])
+
+/*
+** f_di_qi_2:
+** ins v1\.b\[3\], w0
+** ret
+*/
+TEST (f_di_qi_2, di_qi_2, unsigned char, y.c[0])
+
+/*
+** f_di_qi_3:
+** ins v1\.b\[0\], w0
+** ret
+*/
+TEST (f_di_qi_3, di_qi_2, unsigned char, y.c[3])
+
+/*
+** f_di_hi_0:
+** ins v1\.h\[3\], w0
+** ret
+*/
+TEST (f_di_hi_0, di_hi_1, unsigned short, y.s[0])
+
+/*
+** f_di_hi_1:
+** ins v1\.h\[2\], w0
+** ret
+*/
+TEST (f_di_hi_1, di_hi_1, unsigned short, y.s[1])
+
+/*
+** f_di_hi_2:
+** ins v1\.h\[1\], w0
+** ret
+*/
+TEST (f_di_hi_2, di_hi_2, unsigned short, y.s[0])
+
+/*
+** f_di_hi_3:
+** ins v1\.h\[0\], w0
+** ret
+*/
+TEST (f_di_hi_3, di_hi_2, unsigned short, y.s[1])
+
+/*
+** f_di_si_0:
+** ins v1\.s\[1\], w0
+** ret
+*/
+TEST (f_di_si_0, di_si, unsigned int, y.i[0])
+
+/*
+** f_di_si_1:
+** ins v1\.s\[0\], w0
+** ret
+*/
+TEST (f_di_si_1, di_si, unsigned int, y.i[1])
+
+/*
+** f_si_qi_0:
+** ins v1\.b\[3\], w0
+** ret
+*/
+TEST (f_si_qi_0, si_qi_1, unsigned char, y.c[0])
+
+/*
+** f_si_qi_1:
+** ins v1\.b\[2\], w0
+** ret
+*/
+TEST (f_si_qi_1, si_qi_1, unsigned char, y.c[1])
+
+/*
+** f_si_qi_2:
+** ins v1\.b\[1\], w0
+** ret
+*/
+TEST (f_si_qi_2, si_qi_2, unsigned char, y.c[0])
+
+/*
+** f_si_qi_3:
+** ins v1\.b\[0\], w0
+** ret
+*/
+TEST (f_si_qi_3, si_qi_2, unsigned char, y.c[1])
+
+/*
+** f_si_hi_0:
+** ins v1\.h\[1\], w0
+** ret
+*/
+TEST (f_si_hi_0, si_hi, unsigned short, y.s[0])
+
+/*
+** f_si_hi_1:
+** ins v1\.h\[0\], w0
+** ret
+*/
+TEST (f_si_hi_1, si_hi, unsigned short, y.s[1])