]> git.ipfire.org Git - thirdparty/gcc.git/blob - gcc/testsuite/gcc.target/aarch64/sve2/acle/asm/rshr_s64.c
[AArch64] Add support for the SVE2 ACLE
[thirdparty/gcc.git] / gcc / testsuite / gcc.target / aarch64 / sve2 / acle / asm / rshr_s64.c
1 /* { dg-final { check-function-bodies "**" "" "-DCHECK_ASM" } } */
2
3 #include "test_sve_acle.h"
4
5 /*
6 ** rshr_1_s64_m_tied1:
7 ** srshr z0\.d, p0/m, z0\.d, #1
8 ** ret
9 */
10 TEST_UNIFORM_Z (rshr_1_s64_m_tied1, svint64_t,
11 z0 = svrshr_n_s64_m (p0, z0, 1),
12 z0 = svrshr_m (p0, z0, 1))
13
14 /*
15 ** rshr_1_s64_m_untied:
16 ** movprfx z0, z1
17 ** srshr z0\.d, p0/m, z0\.d, #1
18 ** ret
19 */
20 TEST_UNIFORM_Z (rshr_1_s64_m_untied, svint64_t,
21 z0 = svrshr_n_s64_m (p0, z1, 1),
22 z0 = svrshr_m (p0, z1, 1))
23
24 /*
25 ** rshr_2_s64_m_tied1:
26 ** srshr z0\.d, p0/m, z0\.d, #2
27 ** ret
28 */
29 TEST_UNIFORM_Z (rshr_2_s64_m_tied1, svint64_t,
30 z0 = svrshr_n_s64_m (p0, z0, 2),
31 z0 = svrshr_m (p0, z0, 2))
32
33 /*
34 ** rshr_2_s64_m_untied:
35 ** movprfx z0, z1
36 ** srshr z0\.d, p0/m, z0\.d, #2
37 ** ret
38 */
39 TEST_UNIFORM_Z (rshr_2_s64_m_untied, svint64_t,
40 z0 = svrshr_n_s64_m (p0, z1, 2),
41 z0 = svrshr_m (p0, z1, 2))
42
43 /*
44 ** rshr_64_s64_m_tied1:
45 ** srshr z0\.d, p0/m, z0\.d, #64
46 ** ret
47 */
48 TEST_UNIFORM_Z (rshr_64_s64_m_tied1, svint64_t,
49 z0 = svrshr_n_s64_m (p0, z0, 64),
50 z0 = svrshr_m (p0, z0, 64))
51
52 /*
53 ** rshr_64_s64_m_untied:
54 ** movprfx z0, z1
55 ** srshr z0\.d, p0/m, z0\.d, #64
56 ** ret
57 */
58 TEST_UNIFORM_Z (rshr_64_s64_m_untied, svint64_t,
59 z0 = svrshr_n_s64_m (p0, z1, 64),
60 z0 = svrshr_m (p0, z1, 64))
61
62 /*
63 ** rshr_1_s64_z_tied1:
64 ** movprfx z0\.d, p0/z, z0\.d
65 ** srshr z0\.d, p0/m, z0\.d, #1
66 ** ret
67 */
68 TEST_UNIFORM_Z (rshr_1_s64_z_tied1, svint64_t,
69 z0 = svrshr_n_s64_z (p0, z0, 1),
70 z0 = svrshr_z (p0, z0, 1))
71
72 /*
73 ** rshr_1_s64_z_untied:
74 ** movprfx z0\.d, p0/z, z1\.d
75 ** srshr z0\.d, p0/m, z0\.d, #1
76 ** ret
77 */
78 TEST_UNIFORM_Z (rshr_1_s64_z_untied, svint64_t,
79 z0 = svrshr_n_s64_z (p0, z1, 1),
80 z0 = svrshr_z (p0, z1, 1))
81
82 /*
83 ** rshr_2_s64_z_tied1:
84 ** movprfx z0\.d, p0/z, z0\.d
85 ** srshr z0\.d, p0/m, z0\.d, #2
86 ** ret
87 */
88 TEST_UNIFORM_Z (rshr_2_s64_z_tied1, svint64_t,
89 z0 = svrshr_n_s64_z (p0, z0, 2),
90 z0 = svrshr_z (p0, z0, 2))
91
92 /*
93 ** rshr_2_s64_z_untied:
94 ** movprfx z0\.d, p0/z, z1\.d
95 ** srshr z0\.d, p0/m, z0\.d, #2
96 ** ret
97 */
98 TEST_UNIFORM_Z (rshr_2_s64_z_untied, svint64_t,
99 z0 = svrshr_n_s64_z (p0, z1, 2),
100 z0 = svrshr_z (p0, z1, 2))
101
102 /*
103 ** rshr_64_s64_z_tied1:
104 ** movprfx z0\.d, p0/z, z0\.d
105 ** srshr z0\.d, p0/m, z0\.d, #64
106 ** ret
107 */
108 TEST_UNIFORM_Z (rshr_64_s64_z_tied1, svint64_t,
109 z0 = svrshr_n_s64_z (p0, z0, 64),
110 z0 = svrshr_z (p0, z0, 64))
111
112 /*
113 ** rshr_64_s64_z_untied:
114 ** movprfx z0\.d, p0/z, z1\.d
115 ** srshr z0\.d, p0/m, z0\.d, #64
116 ** ret
117 */
118 TEST_UNIFORM_Z (rshr_64_s64_z_untied, svint64_t,
119 z0 = svrshr_n_s64_z (p0, z1, 64),
120 z0 = svrshr_z (p0, z1, 64))
121
122 /*
123 ** rshr_1_s64_x_tied1:
124 ** srshr z0\.d, p0/m, z0\.d, #1
125 ** ret
126 */
127 TEST_UNIFORM_Z (rshr_1_s64_x_tied1, svint64_t,
128 z0 = svrshr_n_s64_x (p0, z0, 1),
129 z0 = svrshr_x (p0, z0, 1))
130
131 /*
132 ** rshr_1_s64_x_untied:
133 ** movprfx z0, z1
134 ** srshr z0\.d, p0/m, z0\.d, #1
135 ** ret
136 */
137 TEST_UNIFORM_Z (rshr_1_s64_x_untied, svint64_t,
138 z0 = svrshr_n_s64_x (p0, z1, 1),
139 z0 = svrshr_x (p0, z1, 1))
140
141 /*
142 ** rshr_2_s64_x_tied1:
143 ** srshr z0\.d, p0/m, z0\.d, #2
144 ** ret
145 */
146 TEST_UNIFORM_Z (rshr_2_s64_x_tied1, svint64_t,
147 z0 = svrshr_n_s64_x (p0, z0, 2),
148 z0 = svrshr_x (p0, z0, 2))
149
150 /*
151 ** rshr_2_s64_x_untied:
152 ** movprfx z0, z1
153 ** srshr z0\.d, p0/m, z0\.d, #2
154 ** ret
155 */
156 TEST_UNIFORM_Z (rshr_2_s64_x_untied, svint64_t,
157 z0 = svrshr_n_s64_x (p0, z1, 2),
158 z0 = svrshr_x (p0, z1, 2))
159
160 /*
161 ** rshr_64_s64_x_tied1:
162 ** srshr z0\.d, p0/m, z0\.d, #64
163 ** ret
164 */
165 TEST_UNIFORM_Z (rshr_64_s64_x_tied1, svint64_t,
166 z0 = svrshr_n_s64_x (p0, z0, 64),
167 z0 = svrshr_x (p0, z0, 64))
168
169 /*
170 ** rshr_64_s64_x_untied:
171 ** movprfx z0, z1
172 ** srshr z0\.d, p0/m, z0\.d, #64
173 ** ret
174 */
175 TEST_UNIFORM_Z (rshr_64_s64_x_untied, svint64_t,
176 z0 = svrshr_n_s64_x (p0, z1, 64),
177 z0 = svrshr_x (p0, z1, 64))