;; Unpredicated shift operations by a constant (post-RA only).
;; These are generated by splitting a predicated instruction whose
;; predicate is unused.
-(define_insn "*post_ra_v<optab><mode>3"
+(define_insn "*post_ra_v_ashl<mode>3"
+ [(set (match_operand:SVE_I 0 "register_operand")
+ (ashift:SVE_I
+ (match_operand:SVE_I 1 "register_operand")
+ (match_operand:SVE_I 2 "aarch64_simd_lshift_imm")))]
+ "TARGET_SVE && reload_completed"
+ {@ [ cons: =0 , 1 , 2 ]
+ [ w , w , vs1 ] add\t%0.<Vetype>, %1.<Vetype>, %1.<Vetype>
+ [ w , w , Dl ] lsl\t%0.<Vetype>, %1.<Vetype>, #%2
+ }
+)
+
+(define_insn "*post_ra_v_<optab><mode>3"
[(set (match_operand:SVE_I 0 "register_operand" "=w")
- (ASHIFT:SVE_I
+ (SHIFTRT:SVE_I
(match_operand:SVE_I 1 "register_operand" "w")
- (match_operand:SVE_I 2 "aarch64_simd_<lr>shift_imm")))]
+ (match_operand:SVE_I 2 "aarch64_simd_rshift_imm")))]
"TARGET_SVE && reload_completed"
"<shift>\t%0.<Vetype>, %1.<Vetype>, #%2"
)
/*
** lsl_1_s16_x_tied1:
-** lsl z0\.h, z0\.h, #1
+** add z0\.h, z0\.h, z0\.h
** ret
*/
TEST_UNIFORM_Z (lsl_1_s16_x_tied1, svint16_t,
/*
** lsl_1_s16_x_untied:
-** lsl z0\.h, z1\.h, #1
+** add z0\.h, z1\.h, z1\.h
** ret
*/
TEST_UNIFORM_Z (lsl_1_s16_x_untied, svint16_t,
/*
** lsl_1_s32_x_tied1:
-** lsl z0\.s, z0\.s, #1
+** add z0\.s, z0\.s, z0\.s
** ret
*/
TEST_UNIFORM_Z (lsl_1_s32_x_tied1, svint32_t,
/*
** lsl_1_s32_x_untied:
-** lsl z0\.s, z1\.s, #1
+** add z0\.s, z1\.s, z1\.s
** ret
*/
TEST_UNIFORM_Z (lsl_1_s32_x_untied, svint32_t,
/*
** lsl_1_s64_x_tied1:
-** lsl z0\.d, z0\.d, #1
+** add z0\.d, z0\.d, z0\.d
** ret
*/
TEST_UNIFORM_Z (lsl_1_s64_x_tied1, svint64_t,
/*
** lsl_1_s64_x_untied:
-** lsl z0\.d, z1\.d, #1
+** add z0\.d, z1\.d, z1\.d
** ret
*/
TEST_UNIFORM_Z (lsl_1_s64_x_untied, svint64_t,
/*
** lsl_1_s8_x_tied1:
-** lsl z0\.b, z0\.b, #1
+** add z0\.b, z0\.b, z0\.b
** ret
*/
TEST_UNIFORM_Z (lsl_1_s8_x_tied1, svint8_t,
/*
** lsl_1_s8_x_untied:
-** lsl z0\.b, z1\.b, #1
+** add z0\.b, z1\.b, z1\.b
** ret
*/
TEST_UNIFORM_Z (lsl_1_s8_x_untied, svint8_t,
/*
** lsl_1_u16_x_tied1:
-** lsl z0\.h, z0\.h, #1
+** add z0\.h, z0\.h, z0\.h
** ret
*/
TEST_UNIFORM_Z (lsl_1_u16_x_tied1, svuint16_t,
/*
** lsl_1_u16_x_untied:
-** lsl z0\.h, z1\.h, #1
+** add z0\.h, z1\.h, z1\.h
** ret
*/
TEST_UNIFORM_Z (lsl_1_u16_x_untied, svuint16_t,
/*
** lsl_1_u32_x_tied1:
-** lsl z0\.s, z0\.s, #1
+** add z0\.s, z0\.s, z0\.s
** ret
*/
TEST_UNIFORM_Z (lsl_1_u32_x_tied1, svuint32_t,
/*
** lsl_1_u32_x_untied:
-** lsl z0\.s, z1\.s, #1
+** add z0\.s, z1\.s, z1\.s
** ret
*/
TEST_UNIFORM_Z (lsl_1_u32_x_untied, svuint32_t,
/*
** lsl_1_u64_x_tied1:
-** lsl z0\.d, z0\.d, #1
+** add z0\.d, z0\.d, z0\.d
** ret
*/
TEST_UNIFORM_Z (lsl_1_u64_x_tied1, svuint64_t,
/*
** lsl_1_u64_x_untied:
-** lsl z0\.d, z1\.d, #1
+** add z0\.d, z1\.d, z1\.d
** ret
*/
TEST_UNIFORM_Z (lsl_1_u64_x_untied, svuint64_t,
/*
** lsl_1_u8_x_tied1:
-** lsl z0\.b, z0\.b, #1
+** add z0\.b, z0\.b, z0\.b
** ret
*/
TEST_UNIFORM_Z (lsl_1_u8_x_tied1, svuint8_t,
/*
** lsl_1_u8_x_untied:
-** lsl z0\.b, z1\.b, #1
+** add z0\.b, z1\.b, z1\.b
** ret
*/
TEST_UNIFORM_Z (lsl_1_u8_x_untied, svuint8_t,
/*
** lsl_wide_1_s16_x_tied1:
-** lsl z0\.h, z0\.h, #1
+** add z0\.h, z0\.h, z0\.h
** ret
*/
TEST_UNIFORM_Z (lsl_wide_1_s16_x_tied1, svint16_t,
/*
** lsl_wide_1_s16_x_untied:
-** lsl z0\.h, z1\.h, #1
+** add z0\.h, z1\.h, z1\.h
** ret
*/
TEST_UNIFORM_Z (lsl_wide_1_s16_x_untied, svint16_t,
/*
** lsl_wide_1_s32_x_tied1:
-** lsl z0\.s, z0\.s, #1
+** add z0\.s, z0\.s, z0\.s
** ret
*/
TEST_UNIFORM_Z (lsl_wide_1_s32_x_tied1, svint32_t,
/*
** lsl_wide_1_s32_x_untied:
-** lsl z0\.s, z1\.s, #1
+** add z0\.s, z1\.s, z1\.s
** ret
*/
TEST_UNIFORM_Z (lsl_wide_1_s32_x_untied, svint32_t,
/*
** lsl_wide_1_s8_x_tied1:
-** lsl z0\.b, z0\.b, #1
+** add z0\.b, z0\.b, z0\.b
** ret
*/
TEST_UNIFORM_Z (lsl_wide_1_s8_x_tied1, svint8_t,
/*
** lsl_wide_1_s8_x_untied:
-** lsl z0\.b, z1\.b, #1
+** add z0\.b, z1\.b, z1\.b
** ret
*/
TEST_UNIFORM_Z (lsl_wide_1_s8_x_untied, svint8_t,
/*
** lsl_wide_1_u16_x_tied1:
-** lsl z0\.h, z0\.h, #1
+** add z0\.h, z0\.h, z0\.h
** ret
*/
TEST_UNIFORM_Z (lsl_wide_1_u16_x_tied1, svuint16_t,
/*
** lsl_wide_1_u16_x_untied:
-** lsl z0\.h, z1\.h, #1
+** add z0\.h, z1\.h, z1\.h
** ret
*/
TEST_UNIFORM_Z (lsl_wide_1_u16_x_untied, svuint16_t,
/*
** lsl_wide_1_u32_x_tied1:
-** lsl z0\.s, z0\.s, #1
+** add z0\.s, z0\.s, z0\.s
** ret
*/
TEST_UNIFORM_Z (lsl_wide_1_u32_x_tied1, svuint32_t,
/*
** lsl_wide_1_u32_x_untied:
-** lsl z0\.s, z1\.s, #1
+** add z0\.s, z1\.s, z1\.s
** ret
*/
TEST_UNIFORM_Z (lsl_wide_1_u32_x_untied, svuint32_t,
/*
** lsl_wide_1_u8_x_tied1:
-** lsl z0\.b, z0\.b, #1
+** add z0\.b, z0\.b, z0\.b
** ret
*/
TEST_UNIFORM_Z (lsl_wide_1_u8_x_tied1, svuint8_t,
/*
** lsl_wide_1_u8_x_untied:
-** lsl z0\.b, z1\.b, #1
+** add z0\.b, z1\.b, z1\.b
** ret
*/
TEST_UNIFORM_Z (lsl_wide_1_u8_x_untied, svuint8_t,
TEST_ALL (LOOP)
-/* { dg-final { scan-assembler-times {\tadd\tz[0-9]+\.b,} 2 } } */
-/* { dg-final { scan-assembler-times {\tlsl\tz[0-9]+\.b,} 2 } } */
+/* { dg-final { scan-assembler-times {\tadd\tz[0-9]+\.b,} 4 } } */
/* { dg-final { scan-assembler-not {\tadr\tz[0-9]+\.b,} } } */
-/* { dg-final { scan-assembler-times {\tadd\tz[0-9]+\.h,} 2 } } */
-/* { dg-final { scan-assembler-times {\tlsl\tz[0-9]+\.h,} 2 } } */
+/* { dg-final { scan-assembler-times {\tadd\tz[0-9]+\.h,} 4 } } */
/* { dg-final { scan-assembler-not {\tadr\tz[0-9]+\.h,} } } */
/* { dg-final { scan-assembler-not {\tadd\tz[0-9]+\.s,} } } */
TEST_TYPE (int32_t, 128)
TEST_TYPE (uint32_t, 128)
-/* { dg-final { scan-assembler-times {\tadd\tz[0-9]+\.b,} 6 } } */
-/* { dg-final { scan-assembler-times {\tlsl\tz[0-9]+\.b,} 6 } } */
+/* { dg-final { scan-assembler-times {\tadd\tz[0-9]+\.b,} 8 } } */
+/* { dg-final { scan-assembler-times {\tlsl\tz[0-9]+\.b,} 4 } } */
/* { dg-final { scan-assembler-times {\tadr\tz[0-9]+\.s, \[z[0-9]+\.s, z[0-9]+\.s, lsl #?1\]\n} 4 } } */
/* { dg-final { scan-assembler-times {\tadr\tz[0-9]+\.s, \[z[0-9]+\.s, z[0-9]+\.s, lsl #?2\]\n} 4 } } */
TEST_ALL (DEF_LOOP)
-/* { dg-final { scan-assembler-times {\tlsl\tz[0-9]+\.b, z[0-9]+\.b, #1\n} 2 } } */
+/* { dg-final { scan-assembler-times {\tadd\tz[0-9]+\.b, z[0-9]+\.b, z[0-9]+\.b\n} 2 } } */
/* { dg-final { scan-assembler-times {\tlsl\tz[0-9]+\.b, z[0-9]+\.b, #2\n} 2 } } */
/* { dg-final { scan-assembler-times {\tlsl\tz[0-9]+\.b, z[0-9]+\.b, #7\n} 2 } } */
-/* { dg-final { scan-assembler-times {\tlsl\tz[0-9]+\.h, z[0-9]+\.h, #1\n} 2 } } */
+/* { dg-final { scan-assembler-times {\tadd\tz[0-9]+\.h, z[0-9]+\.h, z[0-9]+\.h\n} 2 } } */
/* { dg-final { scan-assembler-times {\tlsl\tz[0-9]+\.h, z[0-9]+\.h, #2\n} 2 } } */
/* { dg-final { scan-assembler-times {\tlsl\tz[0-9]+\.h, z[0-9]+\.h, #15\n} 2 } } */
-/* { dg-final { scan-assembler-times {\tlsl\tz[0-9]+\.s, z[0-9]+\.s, #1\n} 2 } } */
+/* { dg-final { scan-assembler-times {\tadd\tz[0-9]+\.s, z[0-9]+\.s, z[0-9]+\.s\n} 2 } } */
/* { dg-final { scan-assembler-times {\tlsl\tz[0-9]+\.s, z[0-9]+\.s, #2\n} 2 } } */
/* { dg-final { scan-assembler-times {\tlsl\tz[0-9]+\.s, z[0-9]+\.s, #31\n} 2 } } */
-/* { dg-final { scan-assembler-times {\tlsl\tz[0-9]+\.d, z[0-9]+\.d, #1\n} 2 } } */
+/* { dg-final { scan-assembler-times {\tadd\tz[0-9]+\.d, z[0-9]+\.d, z[0-9]+\.d\n} 2 } } */
/* { dg-final { scan-assembler-times {\tlsl\tz[0-9]+\.d, z[0-9]+\.d, #2\n} 2 } } */
/* { dg-final { scan-assembler-times {\tlsl\tz[0-9]+\.d, z[0-9]+\.d, #63\n} 2 } } */
TEST_ALL (DEF_LOOP)
-/* { dg-final { scan-assembler-times {\tlsl\tz[0-9]+\.b, z[0-9]+\.b, #1\n} 2 } } */
+/* { dg-final { scan-assembler-times {\tadd\tz[0-9]+\.b, z[0-9]+\.b, z[0-9]+\.b\n} 2 } } */
/* { dg-final { scan-assembler-times {\tlsl\tz[0-9]+\.b, z[0-9]+\.b, #2\n} 2 } } */
/* { dg-final { scan-assembler-times {\tlsl\tz[0-9]+\.b, z[0-9]+\.b, #7\n} 2 } } */
-/* { dg-final { scan-assembler-times {\tlsl\tz[0-9]+\.h, z[0-9]+\.h, #1\n} 2 } } */
+/* { dg-final { scan-assembler-times {\tadd\tz[0-9]+\.h, z[0-9]+\.h, z[0-9]+\.h\n} 2 } } */
/* { dg-final { scan-assembler-times {\tlsl\tz[0-9]+\.h, z[0-9]+\.h, #2\n} 2 } } */
/* { dg-final { scan-assembler-times {\tlsl\tz[0-9]+\.h, z[0-9]+\.h, #15\n} 2 } } */
-/* { dg-final { scan-assembler-times {\tlsl\tz[0-9]+\.s, z[0-9]+\.s, #1\n} 2 } } */
+/* { dg-final { scan-assembler-times {\tadd\tz[0-9]+\.s, z[0-9]+\.s, z[0-9]+\.s\n} 2 } } */
/* { dg-final { scan-assembler-times {\tlsl\tz[0-9]+\.s, z[0-9]+\.s, #2\n} 2 } } */
/* { dg-final { scan-assembler-times {\tlsl\tz[0-9]+\.s, z[0-9]+\.s, #31\n} 2 } } */
-/* { dg-final { scan-assembler-times {\tlsl\tz[0-9]+\.d, z[0-9]+\.d, #1\n} 2 } } */
+/* { dg-final { scan-assembler-times {\tadd\tz[0-9]+\.d, z[0-9]+\.d, z[0-9]+\.d\n} 2 } } */
/* { dg-final { scan-assembler-times {\tlsl\tz[0-9]+\.d, z[0-9]+\.d, #2\n} 2 } } */
/* { dg-final { scan-assembler-times {\tlsl\tz[0-9]+\.d, z[0-9]+\.d, #63\n} 2 } } */
/* { dg-final { scan-assembler-times {\tasr\tz[0-9]+\.h, p[0-7]/m, z[0-9]+\.h, z[0-9]+\.h\n} 2 } } */
/* { dg-final { scan-assembler-times {\tasr\tz[0-9]+\.s, p[0-7]/m, z[0-9]+\.s, z[0-9]+\.s\n} 1 } } */
-/* { dg-final { scan-assembler-times {\tlsl\tz[0-9]+\.b, z[0-9]+\.b, #1\n} 6 } } */
-/* { dg-final { scan-assembler-times {\tlsl\tz[0-9]+\.h, z[0-9]+\.h, #1\n} 4 } } */
-/* { dg-final { scan-assembler-times {\tlsl\tz[0-9]+\.s, z[0-9]+\.s, #1\n} 2 } } */
+/* { dg-final { scan-assembler-times {\tadd\tz[0-9]+\.b, z[0-9]+\.b, z[0-9]+\.b\n} 6 } } */
+/* { dg-final { scan-assembler-times {\tadd\tz[0-9]+\.h, z[0-9]+\.h, z[0-9]+\.h\n} 4 } } */
+/* { dg-final { scan-assembler-times {\tadd\tz[0-9]+\.s, z[0-9]+\.s, z[0-9]+\.s\n} 2 } } */
/* { dg-final { scan-assembler-times {\tlsr\tz[0-9]+\.b, z[0-9]+\.b, #1\n} 3 } } */
/* { dg-final { scan-assembler-times {\tlsr\tz[0-9]+\.h, z[0-9]+\.h, #1\n} 2 } } */
--- /dev/null
+/* { dg-do compile } */
+/* { dg-additional-options "-O3" } */
+/* { dg-final { check-function-bodies "**" "" "" } } */
+
+#define FUNC(NAME, OPERATION, IMMEDIATE) \
+void NAME(int n) { \
+ for (int i = 0; i < n; i++) \
+ out[i] = in[i] OPERATION IMMEDIATE; \
+} \
+
+#define N 1024
+
+int out[N], in[N];
+
+/*
+** foo:
+** ...
+** add z[0-9]+.s, z[0-9]+.s, z[0-9]+.s
+** ...
+*/
+FUNC(foo, <<, 1)
+
+/*
+** foo2:
+** ...
+** lsl z[0-9]+.s, z[0-9]+.s, #15
+** ...
+*/
+FUNC(foo2, <<, 15)
+
+/*
+** foo3:
+** ...
+** asr z[0-9]+.s, z[0-9]+.s, #1
+** ...
+*/
+FUNC(foo3, >>, 1)
+
+/*
+** foo4:
+** ...
+** asr z[0-9]+.s, z[0-9]+.s, #10
+** ...
+*/
+FUNC(foo4, >>, 10)
/*
** ldnt1sh_gather_x0_s64_s64index:
-** lsl (z[0-9]+\.d), z0\.d, #1
+** add (z[0-9]+\.d), z0\.d, z0\.d
** ldnt1sh z0\.d, p0/z, \[\1, x0\]
** ret
*/
/*
** ldnt1sh_gather_tied1_s64_s64index:
-** lsl (z[0-9]+\.d), z0\.d, #1
+** add (z[0-9]+\.d), z0\.d, z0\.d
** ldnt1sh z0\.d, p0/z, \[\1, x0\]
** ret
*/
/*
** ldnt1sh_gather_untied_s64_s64index:
-** lsl (z[0-9]+\.d), z1\.d, #1
+** add (z[0-9]+\.d), z1\.d, z1\.d
** ldnt1sh z0\.d, p0/z, \[\1, x0\]
** ret
*/
/*
** ldnt1sh_gather_x0_s64_u64index:
-** lsl (z[0-9]+\.d), z0\.d, #1
+** add (z[0-9]+\.d), z0\.d, z0\.d
** ldnt1sh z0\.d, p0/z, \[\1, x0\]
** ret
*/
/*
** ldnt1sh_gather_tied1_s64_u64index:
-** lsl (z[0-9]+\.d), z0\.d, #1
+** add (z[0-9]+\.d), z0\.d, z0\.d
** ldnt1sh z0\.d, p0/z, \[\1, x0\]
** ret
*/
/*
** ldnt1sh_gather_untied_s64_u64index:
-** lsl (z[0-9]+\.d), z1\.d, #1
+** add (z[0-9]+\.d), z1\.d, z1\.d
** ldnt1sh z0\.d, p0/z, \[\1, x0\]
** ret
*/
/*
** ldnt1sh_gather_x0_u64_s64index:
-** lsl (z[0-9]+\.d), z0\.d, #1
+** add (z[0-9]+\.d), z0\.d, z0\.d
** ldnt1sh z0\.d, p0/z, \[\1, x0\]
** ret
*/
/*
** ldnt1sh_gather_tied1_u64_s64index:
-** lsl (z[0-9]+\.d), z0\.d, #1
+** add (z[0-9]+\.d), z0\.d, z0\.d
** ldnt1sh z0\.d, p0/z, \[\1, x0\]
** ret
*/
/*
** ldnt1sh_gather_untied_u64_s64index:
-** lsl (z[0-9]+\.d), z1\.d, #1
+** add (z[0-9]+\.d), z1\.d, z1\.d
** ldnt1sh z0\.d, p0/z, \[\1, x0\]
** ret
*/
/*
** ldnt1sh_gather_x0_u64_u64index:
-** lsl (z[0-9]+\.d), z0\.d, #1
+** add (z[0-9]+\.d), z0\.d, z0\.d
** ldnt1sh z0\.d, p0/z, \[\1, x0\]
** ret
*/
/*
** ldnt1sh_gather_tied1_u64_u64index:
-** lsl (z[0-9]+\.d), z0\.d, #1
+** add (z[0-9]+\.d), z0\.d, z0\.d
** ldnt1sh z0\.d, p0/z, \[\1, x0\]
** ret
*/
/*
** ldnt1sh_gather_untied_u64_u64index:
-** lsl (z[0-9]+\.d), z1\.d, #1
+** add (z[0-9]+\.d), z1\.d, z1\.d
** ldnt1sh z0\.d, p0/z, \[\1, x0\]
** ret
*/
/*
** ldnt1uh_gather_x0_s64_s64index:
-** lsl (z[0-9]+\.d), z0\.d, #1
+** add (z[0-9]+\.d), z0\.d, z0\.d
** ldnt1h z0\.d, p0/z, \[\1, x0\]
** ret
*/
/*
** ldnt1uh_gather_tied1_s64_s64index:
-** lsl (z[0-9]+\.d), z0\.d, #1
+** add (z[0-9]+\.d), z0\.d, z0\.d
** ldnt1h z0\.d, p0/z, \[\1, x0\]
** ret
*/
/*
** ldnt1uh_gather_untied_s64_s64index:
-** lsl (z[0-9]+\.d), z1\.d, #1
+** add (z[0-9]+\.d), z1\.d, z1\.d
** ldnt1h z0\.d, p0/z, \[\1, x0\]
** ret
*/
/*
** ldnt1uh_gather_x0_s64_u64index:
-** lsl (z[0-9]+\.d), z0\.d, #1
+** add (z[0-9]+\.d), z0\.d, z0\.d
** ldnt1h z0\.d, p0/z, \[\1, x0\]
** ret
*/
/*
** ldnt1uh_gather_tied1_s64_u64index:
-** lsl (z[0-9]+\.d), z0\.d, #1
+** add (z[0-9]+\.d), z0\.d, z0\.d
** ldnt1h z0\.d, p0/z, \[\1, x0\]
** ret
*/
/*
** ldnt1uh_gather_untied_s64_u64index:
-** lsl (z[0-9]+\.d), z1\.d, #1
+** add (z[0-9]+\.d), z1\.d, z1\.d
** ldnt1h z0\.d, p0/z, \[\1, x0\]
** ret
*/
/*
** ldnt1uh_gather_x0_u64_s64index:
-** lsl (z[0-9]+\.d), z0\.d, #1
+** add (z[0-9]+\.d), z0\.d, z0\.d
** ldnt1h z0\.d, p0/z, \[\1, x0\]
** ret
*/
/*
** ldnt1uh_gather_tied1_u64_s64index:
-** lsl (z[0-9]+\.d), z0\.d, #1
+** add (z[0-9]+\.d), z0\.d, z0\.d
** ldnt1h z0\.d, p0/z, \[\1, x0\]
** ret
*/
/*
** ldnt1uh_gather_untied_u64_s64index:
-** lsl (z[0-9]+\.d), z1\.d, #1
+** add (z[0-9]+\.d), z1\.d, z1\.d
** ldnt1h z0\.d, p0/z, \[\1, x0\]
** ret
*/
/*
** ldnt1uh_gather_x0_u64_u64index:
-** lsl (z[0-9]+\.d), z0\.d, #1
+** add (z[0-9]+\.d), z0\.d, z0\.d
** ldnt1h z0\.d, p0/z, \[\1, x0\]
** ret
*/
/*
** ldnt1uh_gather_tied1_u64_u64index:
-** lsl (z[0-9]+\.d), z0\.d, #1
+** add (z[0-9]+\.d), z0\.d, z0\.d
** ldnt1h z0\.d, p0/z, \[\1, x0\]
** ret
*/
/*
** ldnt1uh_gather_untied_u64_u64index:
-** lsl (z[0-9]+\.d), z1\.d, #1
+** add (z[0-9]+\.d), z1\.d, z1\.d
** ldnt1h z0\.d, p0/z, \[\1, x0\]
** ret
*/
/*
** rshl_1_s16_x_tied1:
-** lsl z0\.h, z0\.h, #1
+** add z0\.h, z0\.h, z0\.h
** ret
*/
TEST_UNIFORM_Z (rshl_1_s16_x_tied1, svint16_t,
/*
** rshl_1_s16_x_untied:
-** lsl z0\.h, z1\.h, #1
+** add z0\.h, z1\.h, z1\.h
** ret
*/
TEST_UNIFORM_Z (rshl_1_s16_x_untied, svint16_t,
/*
** rshl_1_s32_x_tied1:
-** lsl z0\.s, z0\.s, #1
+** add z0\.s, z0\.s, z0\.s
** ret
*/
TEST_UNIFORM_Z (rshl_1_s32_x_tied1, svint32_t,
/*
** rshl_1_s32_x_untied:
-** lsl z0\.s, z1\.s, #1
+** add z0\.s, z1\.s, z1\.s
** ret
*/
TEST_UNIFORM_Z (rshl_1_s32_x_untied, svint32_t,
/*
** rshl_1_s64_x_tied1:
-** lsl z0\.d, z0\.d, #1
+** add z0\.d, z0\.d, z0\.d
** ret
*/
TEST_UNIFORM_Z (rshl_1_s64_x_tied1, svint64_t,
/*
** rshl_1_s64_x_untied:
-** lsl z0\.d, z1\.d, #1
+** add z0\.d, z1\.d, z1\.d
** ret
*/
TEST_UNIFORM_Z (rshl_1_s64_x_untied, svint64_t,
/*
** rshl_1_s8_x_tied1:
-** lsl z0\.b, z0\.b, #1
+** add z0\.b, z0\.b, z0\.b
** ret
*/
TEST_UNIFORM_Z (rshl_1_s8_x_tied1, svint8_t,
/*
** rshl_1_s8_x_untied:
-** lsl z0\.b, z1\.b, #1
+** add z0\.b, z1\.b, z1\.b
** ret
*/
TEST_UNIFORM_Z (rshl_1_s8_x_untied, svint8_t,
/*
** rshl_1_u16_x_tied1:
-** lsl z0\.h, z0\.h, #1
+** add z0\.h, z0\.h, z0\.h
** ret
*/
TEST_UNIFORM_Z (rshl_1_u16_x_tied1, svuint16_t,
/*
** rshl_1_u16_x_untied:
-** lsl z0\.h, z1\.h, #1
+** add z0\.h, z1\.h, z1\.h
** ret
*/
TEST_UNIFORM_Z (rshl_1_u16_x_untied, svuint16_t,
/*
** rshl_1_u32_x_tied1:
-** lsl z0\.s, z0\.s, #1
+** add z0\.s, z0\.s, z0\.s
** ret
*/
TEST_UNIFORM_Z (rshl_1_u32_x_tied1, svuint32_t,
/*
** rshl_1_u32_x_untied:
-** lsl z0\.s, z1\.s, #1
+** add z0\.s, z1\.s, z1\.s
** ret
*/
TEST_UNIFORM_Z (rshl_1_u32_x_untied, svuint32_t,
/*
** rshl_1_u64_x_tied1:
-** lsl z0\.d, z0\.d, #1
+** add z0\.d, z0\.d, z0\.d
** ret
*/
TEST_UNIFORM_Z (rshl_1_u64_x_tied1, svuint64_t,
/*
** rshl_1_u64_x_untied:
-** lsl z0\.d, z1\.d, #1
+** add z0\.d, z1\.d, z1\.d
** ret
*/
TEST_UNIFORM_Z (rshl_1_u64_x_untied, svuint64_t,
/*
** rshl_1_u8_x_tied1:
-** lsl z0\.b, z0\.b, #1
+** add z0\.b, z0\.b, z0\.b
** ret
*/
TEST_UNIFORM_Z (rshl_1_u8_x_tied1, svuint8_t,
/*
** rshl_1_u8_x_untied:
-** lsl z0\.b, z1\.b, #1
+** add z0\.b, z1\.b, z1\.b
** ret
*/
TEST_UNIFORM_Z (rshl_1_u8_x_untied, svuint8_t,
/*
** stnt1h_scatter_x0_s64_s64index:
-** lsl (z[0-9]+\.d), z1\.d, #1
+** add (z[0-9]+\.d), z1\.d, z1\.d
** stnt1h z0\.d, p0, \[\1, x0\]
** ret
*/
/*
** stnt1h_scatter_s64_s64index:
-** lsl (z[0-9]+\.d), z1\.d, #1
+** add (z[0-9]+\.d), z1\.d, z1\.d
** stnt1h z0\.d, p0, \[\1, x0\]
** ret
*/
/*
** stnt1h_scatter_x0_s64_u64index:
-** lsl (z[0-9]+\.d), z1\.d, #1
+** add (z[0-9]+\.d), z1\.d, z1\.d
** stnt1h z0\.d, p0, \[\1, x0\]
** ret
*/
/*
** stnt1h_scatter_s64_u64index:
-** lsl (z[0-9]+\.d), z1\.d, #1
+** add (z[0-9]+\.d), z1\.d, z1\.d
** stnt1h z0\.d, p0, \[\1, x0\]
** ret
*/
/*
** stnt1h_scatter_x0_u64_s64index:
-** lsl (z[0-9]+\.d), z1\.d, #1
+** add (z[0-9]+\.d), z1\.d, z1\.d
** stnt1h z0\.d, p0, \[\1, x0\]
** ret
*/
/*
** stnt1h_scatter_u64_s64index:
-** lsl (z[0-9]+\.d), z1\.d, #1
+** add (z[0-9]+\.d), z1\.d, z1\.d
** stnt1h z0\.d, p0, \[\1, x0\]
** ret
*/
/*
** stnt1h_scatter_x0_u64_u64index:
-** lsl (z[0-9]+\.d), z1\.d, #1
+** add (z[0-9]+\.d), z1\.d, z1\.d
** stnt1h z0\.d, p0, \[\1, x0\]
** ret
*/
/*
** stnt1h_scatter_u64_u64index:
-** lsl (z[0-9]+\.d), z1\.d, #1
+** add (z[0-9]+\.d), z1\.d, z1\.d
** stnt1h z0\.d, p0, \[\1, x0\]
** ret
*/