]> git.ipfire.org Git - thirdparty/gcc.git/blob - gcc/testsuite/gcc.target/aarch64/sve2/acle/asm/sra_s8.c
[AArch64] Add support for the SVE2 ACLE
[thirdparty/gcc.git] / gcc / testsuite / gcc.target / aarch64 / sve2 / acle / asm / sra_s8.c
1 /* { dg-final { check-function-bodies "**" "" "-DCHECK_ASM" } } */
2
3 #include "test_sve_acle.h"
4
5 /*
6 ** sra_1_s8_tied1:
7 ** ssra z0\.b, z1\.b, #1
8 ** ret
9 */
10 TEST_UNIFORM_Z (sra_1_s8_tied1, svint8_t,
11 z0 = svsra_n_s8 (z0, z1, 1),
12 z0 = svsra (z0, z1, 1))
13
14 /*
15 ** sra_1_s8_tied2:
16 ** mov (z[0-9]+)\.d, z0\.d
17 ** movprfx z0, z1
18 ** ssra z0\.b, \1\.b, #1
19 ** ret
20 */
21 TEST_UNIFORM_Z (sra_1_s8_tied2, svint8_t,
22 z0 = svsra_n_s8 (z1, z0, 1),
23 z0 = svsra (z1, z0, 1))
24
25 /*
26 ** sra_1_s8_untied:
27 ** movprfx z0, z1
28 ** ssra z0\.b, z2\.b, #1
29 ** ret
30 */
31 TEST_UNIFORM_Z (sra_1_s8_untied, svint8_t,
32 z0 = svsra_n_s8 (z1, z2, 1),
33 z0 = svsra (z1, z2, 1))
34
35 /*
36 ** sra_2_s8_tied1:
37 ** ssra z0\.b, z1\.b, #2
38 ** ret
39 */
40 TEST_UNIFORM_Z (sra_2_s8_tied1, svint8_t,
41 z0 = svsra_n_s8 (z0, z1, 2),
42 z0 = svsra (z0, z1, 2))
43
44 /*
45 ** sra_2_s8_tied2:
46 ** mov (z[0-9]+)\.d, z0\.d
47 ** movprfx z0, z1
48 ** ssra z0\.b, \1\.b, #2
49 ** ret
50 */
51 TEST_UNIFORM_Z (sra_2_s8_tied2, svint8_t,
52 z0 = svsra_n_s8 (z1, z0, 2),
53 z0 = svsra (z1, z0, 2))
54
55 /*
56 ** sra_2_s8_untied:
57 ** movprfx z0, z1
58 ** ssra z0\.b, z2\.b, #2
59 ** ret
60 */
61 TEST_UNIFORM_Z (sra_2_s8_untied, svint8_t,
62 z0 = svsra_n_s8 (z1, z2, 2),
63 z0 = svsra (z1, z2, 2))
64
65 /*
66 ** sra_8_s8_tied1:
67 ** ssra z0\.b, z1\.b, #8
68 ** ret
69 */
70 TEST_UNIFORM_Z (sra_8_s8_tied1, svint8_t,
71 z0 = svsra_n_s8 (z0, z1, 8),
72 z0 = svsra (z0, z1, 8))
73
74 /*
75 ** sra_8_s8_tied2:
76 ** mov (z[0-9]+)\.d, z0\.d
77 ** movprfx z0, z1
78 ** ssra z0\.b, \1\.b, #8
79 ** ret
80 */
81 TEST_UNIFORM_Z (sra_8_s8_tied2, svint8_t,
82 z0 = svsra_n_s8 (z1, z0, 8),
83 z0 = svsra (z1, z0, 8))
84
85 /*
86 ** sra_8_s8_untied:
87 ** movprfx z0, z1
88 ** ssra z0\.b, z2\.b, #8
89 ** ret
90 */
91 TEST_UNIFORM_Z (sra_8_s8_untied, svint8_t,
92 z0 = svsra_n_s8 (z1, z2, 8),
93 z0 = svsra (z1, z2, 8))