]>
git.ipfire.org Git - thirdparty/gcc.git/blob - gcc/testsuite/gcc.target/aarch64/sve2/acle/asm/rshl_u64.c
1 /* { dg-final { check-function-bodies "**" "" "-DCHECK_ASM" } } */
3 #include "test_sve_acle.h"
7 ** urshl z0\.d, p0/m, z0\.d, z4\.d
10 TEST_DUAL_Z (rshl_u64_m_tied1
, svuint64_t
, svint64_t
,
11 z0
= svrshl_u64_m (p0
, z0
, z4
),
12 z0
= svrshl_m (p0
, z0
, z4
))
16 ** mov (z[0-9]+\.d), z0\.d
18 ** urshl z0\.d, p0/m, z0\.d, \1
21 TEST_DUAL_Z_REV (rshl_u64_m_tied2
, svuint64_t
, svint64_t
,
22 z0_res
= svrshl_u64_m (p0
, z4
, z0
),
23 z0_res
= svrshl_m (p0
, z4
, z0
))
28 ** urshl z0\.d, p0/m, z0\.d, z4\.d
31 TEST_DUAL_Z (rshl_u64_m_untied
, svuint64_t
, svint64_t
,
32 z0
= svrshl_u64_m (p0
, z1
, z4
),
33 z0
= svrshl_m (p0
, z1
, z4
))
36 ** rshl_x0_u64_m_tied1:
37 ** mov (z[0-9]+\.d), x0
38 ** urshl z0\.d, p0/m, z0\.d, \1
41 TEST_UNIFORM_ZX (rshl_x0_u64_m_tied1
, svuint64_t
, int64_t,
42 z0
= svrshl_n_u64_m (p0
, z0
, x0
),
43 z0
= svrshl_m (p0
, z0
, x0
))
46 ** rshl_x0_u64_m_untied:
47 ** mov (z[0-9]+\.d), x0
49 ** urshl z0\.d, p0/m, z0\.d, \1
52 TEST_UNIFORM_ZX (rshl_x0_u64_m_untied
, svuint64_t
, int64_t,
53 z0
= svrshl_n_u64_m (p0
, z1
, x0
),
54 z0
= svrshl_m (p0
, z1
, x0
))
58 ** urshr z0\.d, p0/m, z0\.d, #64
61 TEST_UNIFORM_Z (rshl_m64_u64_m
, svuint64_t
,
62 z0
= svrshl_n_u64_m (p0
, z0
, -64),
63 z0
= svrshl_m (p0
, z0
, -64))
67 ** urshr z0\.d, p0/m, z0\.d, #2
70 TEST_UNIFORM_Z (rshl_m2_u64_m
, svuint64_t
,
71 z0
= svrshl_n_u64_m (p0
, z0
, -2),
72 z0
= svrshl_m (p0
, z0
, -2))
75 ** rshl_m1_u64_m_tied1:
76 ** urshr z0\.d, p0/m, z0\.d, #1
79 TEST_UNIFORM_Z (rshl_m1_u64_m_tied1
, svuint64_t
,
80 z0
= svrshl_n_u64_m (p0
, z0
, -1),
81 z0
= svrshl_m (p0
, z0
, -1))
84 ** rshl_m1_u64_m_untied:
86 ** urshr z0\.d, p0/m, z0\.d, #1
89 TEST_UNIFORM_Z (rshl_m1_u64_m_untied
, svuint64_t
,
90 z0
= svrshl_n_u64_m (p0
, z1
, -1),
91 z0
= svrshl_m (p0
, z1
, -1))
94 ** rshl_1_u64_m_tied1:
95 ** lsl z0\.d, p0/m, z0\.d, #1
98 TEST_UNIFORM_Z (rshl_1_u64_m_tied1
, svuint64_t
,
99 z0
= svrshl_n_u64_m (p0
, z0
, 1),
100 z0
= svrshl_m (p0
, z0
, 1))
103 ** rshl_1_u64_m_untied:
105 ** lsl z0\.d, p0/m, z0\.d, #1
108 TEST_UNIFORM_Z (rshl_1_u64_m_untied
, svuint64_t
,
109 z0
= svrshl_n_u64_m (p0
, z1
, 1),
110 z0
= svrshl_m (p0
, z1
, 1))
114 ** lsl z0\.d, p0/m, z0\.d, #2
117 TEST_UNIFORM_Z (rshl_2_u64_m
, svuint64_t
,
118 z0
= svrshl_n_u64_m (p0
, z0
, 2),
119 z0
= svrshl_m (p0
, z0
, 2))
123 ** lsl z0\.d, p0/m, z0\.d, #63
126 TEST_UNIFORM_Z (rshl_63_u64_m
, svuint64_t
,
127 z0
= svrshl_n_u64_m (p0
, z0
, 63),
128 z0
= svrshl_m (p0
, z0
, 63))
132 ** movprfx z0\.d, p0/z, z0\.d
133 ** urshl z0\.d, p0/m, z0\.d, z4\.d
136 TEST_DUAL_Z (rshl_u64_z_tied1
, svuint64_t
, svint64_t
,
137 z0
= svrshl_u64_z (p0
, z0
, z4
),
138 z0
= svrshl_z (p0
, z0
, z4
))
142 ** movprfx z0\.d, p0/z, z0\.d
143 ** urshlr z0\.d, p0/m, z0\.d, z4\.d
146 TEST_DUAL_Z_REV (rshl_u64_z_tied2
, svuint64_t
, svint64_t
,
147 z0_res
= svrshl_u64_z (p0
, z4
, z0
),
148 z0_res
= svrshl_z (p0
, z4
, z0
))
151 ** rshl_u64_z_untied:
153 ** movprfx z0\.d, p0/z, z1\.d
154 ** urshl z0\.d, p0/m, z0\.d, z4\.d
156 ** movprfx z0\.d, p0/z, z4\.d
157 ** urshlr z0\.d, p0/m, z0\.d, z1\.d
161 TEST_DUAL_Z (rshl_u64_z_untied
, svuint64_t
, svint64_t
,
162 z0
= svrshl_u64_z (p0
, z1
, z4
),
163 z0
= svrshl_z (p0
, z1
, z4
))
166 ** rshl_x0_u64_z_tied1:
167 ** mov (z[0-9]+\.d), x0
168 ** movprfx z0\.d, p0/z, z0\.d
169 ** urshl z0\.d, p0/m, z0\.d, \1
172 TEST_UNIFORM_ZX (rshl_x0_u64_z_tied1
, svuint64_t
, int64_t,
173 z0
= svrshl_n_u64_z (p0
, z0
, x0
),
174 z0
= svrshl_z (p0
, z0
, x0
))
177 ** rshl_x0_u64_z_untied:
178 ** mov (z[0-9]+\.d), x0
180 ** movprfx z0\.d, p0/z, z1\.d
181 ** urshl z0\.d, p0/m, z0\.d, \1
183 ** movprfx z0\.d, p0/z, \1
184 ** urshlr z0\.d, p0/m, z0\.d, z1\.d
188 TEST_UNIFORM_ZX (rshl_x0_u64_z_untied
, svuint64_t
, int64_t,
189 z0
= svrshl_n_u64_z (p0
, z1
, x0
),
190 z0
= svrshl_z (p0
, z1
, x0
))
194 ** movprfx z0\.d, p0/z, z0\.d
195 ** urshr z0\.d, p0/m, z0\.d, #64
198 TEST_UNIFORM_Z (rshl_m64_u64_z
, svuint64_t
,
199 z0
= svrshl_n_u64_z (p0
, z0
, -64),
200 z0
= svrshl_z (p0
, z0
, -64))
204 ** movprfx z0\.d, p0/z, z0\.d
205 ** urshr z0\.d, p0/m, z0\.d, #2
208 TEST_UNIFORM_Z (rshl_m2_u64_z
, svuint64_t
,
209 z0
= svrshl_n_u64_z (p0
, z0
, -2),
210 z0
= svrshl_z (p0
, z0
, -2))
213 ** rshl_m1_u64_z_tied1:
214 ** movprfx z0\.d, p0/z, z0\.d
215 ** urshr z0\.d, p0/m, z0\.d, #1
218 TEST_UNIFORM_Z (rshl_m1_u64_z_tied1
, svuint64_t
,
219 z0
= svrshl_n_u64_z (p0
, z0
, -1),
220 z0
= svrshl_z (p0
, z0
, -1))
223 ** rshl_m1_u64_z_untied:
224 ** movprfx z0\.d, p0/z, z1\.d
225 ** urshr z0\.d, p0/m, z0\.d, #1
228 TEST_UNIFORM_Z (rshl_m1_u64_z_untied
, svuint64_t
,
229 z0
= svrshl_n_u64_z (p0
, z1
, -1),
230 z0
= svrshl_z (p0
, z1
, -1))
233 ** rshl_1_u64_z_tied1:
234 ** movprfx z0\.d, p0/z, z0\.d
235 ** lsl z0\.d, p0/m, z0\.d, #1
238 TEST_UNIFORM_Z (rshl_1_u64_z_tied1
, svuint64_t
,
239 z0
= svrshl_n_u64_z (p0
, z0
, 1),
240 z0
= svrshl_z (p0
, z0
, 1))
243 ** rshl_1_u64_z_untied:
244 ** movprfx z0\.d, p0/z, z1\.d
245 ** lsl z0\.d, p0/m, z0\.d, #1
248 TEST_UNIFORM_Z (rshl_1_u64_z_untied
, svuint64_t
,
249 z0
= svrshl_n_u64_z (p0
, z1
, 1),
250 z0
= svrshl_z (p0
, z1
, 1))
254 ** movprfx z0\.d, p0/z, z0\.d
255 ** lsl z0\.d, p0/m, z0\.d, #2
258 TEST_UNIFORM_Z (rshl_2_u64_z
, svuint64_t
,
259 z0
= svrshl_n_u64_z (p0
, z0
, 2),
260 z0
= svrshl_z (p0
, z0
, 2))
264 ** movprfx z0\.d, p0/z, z0\.d
265 ** lsl z0\.d, p0/m, z0\.d, #63
268 TEST_UNIFORM_Z (rshl_63_u64_z
, svuint64_t
,
269 z0
= svrshl_n_u64_z (p0
, z0
, 63),
270 z0
= svrshl_z (p0
, z0
, 63))
274 ** urshl z0\.d, p0/m, z0\.d, z4\.d
277 TEST_DUAL_Z (rshl_u64_x_tied1
, svuint64_t
, svint64_t
,
278 z0
= svrshl_u64_x (p0
, z0
, z4
),
279 z0
= svrshl_x (p0
, z0
, z4
))
283 ** urshlr z0\.d, p0/m, z0\.d, z4\.d
286 TEST_DUAL_Z_REV (rshl_u64_x_tied2
, svuint64_t
, svint64_t
,
287 z0_res
= svrshl_u64_x (p0
, z4
, z0
),
288 z0_res
= svrshl_x (p0
, z4
, z0
))
291 ** rshl_u64_x_untied:
294 ** urshl z0\.d, p0/m, z0\.d, z4\.d
297 ** urshlr z0\.d, p0/m, z0\.d, z1\.d
301 TEST_DUAL_Z (rshl_u64_x_untied
, svuint64_t
, svint64_t
,
302 z0
= svrshl_u64_x (p0
, z1
, z4
),
303 z0
= svrshl_x (p0
, z1
, z4
))
306 ** rshl_x0_u64_x_tied1:
307 ** mov (z[0-9]+\.d), x0
308 ** urshl z0\.d, p0/m, z0\.d, \1
311 TEST_UNIFORM_ZX (rshl_x0_u64_x_tied1
, svuint64_t
, int64_t,
312 z0
= svrshl_n_u64_x (p0
, z0
, x0
),
313 z0
= svrshl_x (p0
, z0
, x0
))
316 ** rshl_x0_u64_x_untied:
318 ** urshlr z0\.d, p0/m, z0\.d, z1\.d
321 TEST_UNIFORM_ZX (rshl_x0_u64_x_untied
, svuint64_t
, int64_t,
322 z0
= svrshl_n_u64_x (p0
, z1
, x0
),
323 z0
= svrshl_x (p0
, z1
, x0
))
327 ** urshr z0\.d, p0/m, z0\.d, #64
330 TEST_UNIFORM_Z (rshl_m64_u64_x
, svuint64_t
,
331 z0
= svrshl_n_u64_x (p0
, z0
, -64),
332 z0
= svrshl_x (p0
, z0
, -64))
336 ** urshr z0\.d, p0/m, z0\.d, #2
339 TEST_UNIFORM_Z (rshl_m2_u64_x
, svuint64_t
,
340 z0
= svrshl_n_u64_x (p0
, z0
, -2),
341 z0
= svrshl_x (p0
, z0
, -2))
344 ** rshl_m1_u64_x_tied1:
345 ** urshr z0\.d, p0/m, z0\.d, #1
348 TEST_UNIFORM_Z (rshl_m1_u64_x_tied1
, svuint64_t
,
349 z0
= svrshl_n_u64_x (p0
, z0
, -1),
350 z0
= svrshl_x (p0
, z0
, -1))
353 ** rshl_m1_u64_x_untied:
355 ** urshr z0\.d, p0/m, z0\.d, #1
358 TEST_UNIFORM_Z (rshl_m1_u64_x_untied
, svuint64_t
,
359 z0
= svrshl_n_u64_x (p0
, z1
, -1),
360 z0
= svrshl_x (p0
, z1
, -1))
363 ** rshl_1_u64_x_tied1:
364 ** lsl z0\.d, z0\.d, #1
367 TEST_UNIFORM_Z (rshl_1_u64_x_tied1
, svuint64_t
,
368 z0
= svrshl_n_u64_x (p0
, z0
, 1),
369 z0
= svrshl_x (p0
, z0
, 1))
372 ** rshl_1_u64_x_untied:
373 ** lsl z0\.d, z1\.d, #1
376 TEST_UNIFORM_Z (rshl_1_u64_x_untied
, svuint64_t
,
377 z0
= svrshl_n_u64_x (p0
, z1
, 1),
378 z0
= svrshl_x (p0
, z1
, 1))
382 ** lsl z0\.d, z0\.d, #2
385 TEST_UNIFORM_Z (rshl_2_u64_x
, svuint64_t
,
386 z0
= svrshl_n_u64_x (p0
, z0
, 2),
387 z0
= svrshl_x (p0
, z0
, 2))
391 ** lsl z0\.d, z0\.d, #63
394 TEST_UNIFORM_Z (rshl_63_u64_x
, svuint64_t
,
395 z0
= svrshl_n_u64_x (p0
, z0
, 63),
396 z0
= svrshl_x (p0
, z0
, 63))