]>
git.ipfire.org Git - thirdparty/gcc.git/blob - gcc/testsuite/gcc.target/aarch64/sve2/acle/asm/qsub_u8.c
1 /* { dg-final { check-function-bodies "**" "" "-DCHECK_ASM" } } */
3 #include "test_sve_acle.h"
7 ** uqsub z0\.b, z0\.b, z1\.b
10 TEST_UNIFORM_Z (qsub_u8_tied1
, svuint8_t
,
11 z0
= svqsub_u8 (z0
, z1
),
16 ** uqsub z0\.b, z1\.b, z0\.b
19 TEST_UNIFORM_Z (qsub_u8_tied2
, svuint8_t
,
20 z0
= svqsub_u8 (z1
, z0
),
25 ** uqsub z0\.b, z1\.b, z2\.b
28 TEST_UNIFORM_Z (qsub_u8_untied
, svuint8_t
,
29 z0
= svqsub_u8 (z1
, z2
),
34 ** mov (z[0-9]+\.b), w0
35 ** uqsub z0\.b, z0\.b, \1
38 TEST_UNIFORM_ZX (qsub_w0_u8_tied1
, svuint8_t
, uint8_t,
39 z0
= svqsub_n_u8 (z0
, x0
),
44 ** mov (z[0-9]+\.b), w0
45 ** uqsub z0\.b, z1\.b, \1
48 TEST_UNIFORM_ZX (qsub_w0_u8_untied
, svuint8_t
, uint8_t,
49 z0
= svqsub_n_u8 (z1
, x0
),
54 ** uqsub z0\.b, z0\.b, #1
57 TEST_UNIFORM_Z (qsub_1_u8_tied1
, svuint8_t
,
58 z0
= svqsub_n_u8 (z0
, 1),
64 ** uqsub z0\.b, z0\.b, #1
67 TEST_UNIFORM_Z (qsub_1_u8_untied
, svuint8_t
,
68 z0
= svqsub_n_u8 (z1
, 1),
73 ** uqsub z0\.b, z0\.b, #127
76 TEST_UNIFORM_Z (qsub_127_u8
, svuint8_t
,
77 z0
= svqsub_n_u8 (z0
, 127),
78 z0
= svqsub (z0
, 127))
82 ** uqsub z0\.b, z0\.b, #128
85 TEST_UNIFORM_Z (qsub_128_u8
, svuint8_t
,
86 z0
= svqsub_n_u8 (z0
, 128),
87 z0
= svqsub (z0
, 128))
91 ** uqsub z0\.b, z0\.b, #255
94 TEST_UNIFORM_Z (qsub_255_u8
, svuint8_t
,
95 z0
= svqsub_n_u8 (z0
, 255),
96 z0
= svqsub (z0
, 255))
100 ** uqsub z0\.b, z0\.b, #255
103 TEST_UNIFORM_Z (qsub_m1_u8
, svuint8_t
,
104 z0
= svqsub_n_u8 (z0
, -1),
105 z0
= svqsub (z0
, -1))
109 ** uqsub z0\.b, z0\.b, #129
112 TEST_UNIFORM_Z (qsub_m127_u8
, svuint8_t
,
113 z0
= svqsub_n_u8 (z0
, -127),
114 z0
= svqsub (z0
, -127))
118 ** uqsub z0\.b, z0\.b, #128
121 TEST_UNIFORM_Z (qsub_m128_u8
, svuint8_t
,
122 z0
= svqsub_n_u8 (z0
, -128),
123 z0
= svqsub (z0
, -128))
127 ** uqsub z0\.b, p0/m, z0\.b, z1\.b
130 TEST_UNIFORM_Z (qsub_u8_m_tied1
, svuint8_t
,
131 z0
= svqsub_u8_m (p0
, z0
, z1
),
132 z0
= svqsub_m (p0
, z0
, z1
))
136 ** mov (z[0-9]+)\.d, z0\.d
138 ** uqsub z0\.b, p0/m, z0\.b, \1\.b
141 TEST_UNIFORM_Z (qsub_u8_m_tied2
, svuint8_t
,
142 z0
= svqsub_u8_m (p0
, z1
, z0
),
143 z0
= svqsub_m (p0
, z1
, z0
))
148 ** uqsub z0\.b, p0/m, z0\.b, z2\.b
151 TEST_UNIFORM_Z (qsub_u8_m_untied
, svuint8_t
,
152 z0
= svqsub_u8_m (p0
, z1
, z2
),
153 z0
= svqsub_m (p0
, z1
, z2
))
156 ** qsub_w0_u8_m_tied1:
157 ** mov (z[0-9]+\.b), w0
158 ** uqsub z0\.b, p0/m, z0\.b, \1
161 TEST_UNIFORM_ZX (qsub_w0_u8_m_tied1
, svuint8_t
, uint8_t,
162 z0
= svqsub_n_u8_m (p0
, z0
, x0
),
163 z0
= svqsub_m (p0
, z0
, x0
))
166 ** qsub_w0_u8_m_untied: { xfail *-*-* }
167 ** mov (z[0-9]+\.b), w0
169 ** uqsub z0\.b, p0/m, z0\.b, \1
172 TEST_UNIFORM_ZX (qsub_w0_u8_m_untied
, svuint8_t
, uint8_t,
173 z0
= svqsub_n_u8_m (p0
, z1
, x0
),
174 z0
= svqsub_m (p0
, z1
, x0
))
177 ** qsub_1_u8_m_tied1:
178 ** mov (z[0-9]+\.b), #1
179 ** uqsub z0\.b, p0/m, z0\.b, \1
182 TEST_UNIFORM_Z (qsub_1_u8_m_tied1
, svuint8_t
,
183 z0
= svqsub_n_u8_m (p0
, z0
, 1),
184 z0
= svqsub_m (p0
, z0
, 1))
187 ** qsub_1_u8_m_untied: { xfail *-*-* }
188 ** mov (z[0-9]+\.b), #1
190 ** uqsub z0\.b, p0/m, z0\.b, \1
193 TEST_UNIFORM_Z (qsub_1_u8_m_untied
, svuint8_t
,
194 z0
= svqsub_n_u8_m (p0
, z1
, 1),
195 z0
= svqsub_m (p0
, z1
, 1))
199 ** mov (z[0-9]+\.b), #127
200 ** uqsub z0\.b, p0/m, z0\.b, \1
203 TEST_UNIFORM_Z (qsub_127_u8_m
, svuint8_t
,
204 z0
= svqsub_n_u8_m (p0
, z0
, 127),
205 z0
= svqsub_m (p0
, z0
, 127))
209 ** mov (z[0-9]+\.b), #-128
210 ** uqsub z0\.b, p0/m, z0\.b, \1
213 TEST_UNIFORM_Z (qsub_128_u8_m
, svuint8_t
,
214 z0
= svqsub_n_u8_m (p0
, z0
, 128),
215 z0
= svqsub_m (p0
, z0
, 128))
219 ** mov (z[0-9]+\.b), #-1
220 ** uqsub z0\.b, p0/m, z0\.b, \1
223 TEST_UNIFORM_Z (qsub_255_u8_m
, svuint8_t
,
224 z0
= svqsub_n_u8_m (p0
, z0
, 255),
225 z0
= svqsub_m (p0
, z0
, 255))
229 ** mov (z[0-9]+\.b), #-1
230 ** uqsub z0\.b, p0/m, z0\.b, \1
233 TEST_UNIFORM_Z (qsub_m1_u8_m
, svuint8_t
,
234 z0
= svqsub_n_u8_m (p0
, z0
, -1),
235 z0
= svqsub_m (p0
, z0
, -1))
239 ** mov (z[0-9]+\.b), #-127
240 ** uqsub z0\.b, p0/m, z0\.b, \1
243 TEST_UNIFORM_Z (qsub_m127_u8_m
, svuint8_t
,
244 z0
= svqsub_n_u8_m (p0
, z0
, -127),
245 z0
= svqsub_m (p0
, z0
, -127))
249 ** mov (z[0-9]+\.b), #-128
250 ** uqsub z0\.b, p0/m, z0\.b, \1
253 TEST_UNIFORM_Z (qsub_m128_u8_m
, svuint8_t
,
254 z0
= svqsub_n_u8_m (p0
, z0
, -128),
255 z0
= svqsub_m (p0
, z0
, -128))
259 ** movprfx z0\.b, p0/z, z0\.b
260 ** uqsub z0\.b, p0/m, z0\.b, z1\.b
263 TEST_UNIFORM_Z (qsub_u8_z_tied1
, svuint8_t
,
264 z0
= svqsub_u8_z (p0
, z0
, z1
),
265 z0
= svqsub_z (p0
, z0
, z1
))
269 ** movprfx z0\.b, p0/z, z0\.b
270 ** uqsubr z0\.b, p0/m, z0\.b, z1\.b
273 TEST_UNIFORM_Z (qsub_u8_z_tied2
, svuint8_t
,
274 z0
= svqsub_u8_z (p0
, z1
, z0
),
275 z0
= svqsub_z (p0
, z1
, z0
))
280 ** movprfx z0\.b, p0/z, z1\.b
281 ** uqsub z0\.b, p0/m, z0\.b, z2\.b
283 ** movprfx z0\.b, p0/z, z2\.b
284 ** uqsubr z0\.b, p0/m, z0\.b, z1\.b
288 TEST_UNIFORM_Z (qsub_u8_z_untied
, svuint8_t
,
289 z0
= svqsub_u8_z (p0
, z1
, z2
),
290 z0
= svqsub_z (p0
, z1
, z2
))
293 ** qsub_w0_u8_z_tied1:
294 ** mov (z[0-9]+\.b), w0
295 ** movprfx z0\.b, p0/z, z0\.b
296 ** uqsub z0\.b, p0/m, z0\.b, \1
299 TEST_UNIFORM_ZX (qsub_w0_u8_z_tied1
, svuint8_t
, uint8_t,
300 z0
= svqsub_n_u8_z (p0
, z0
, x0
),
301 z0
= svqsub_z (p0
, z0
, x0
))
304 ** qsub_w0_u8_z_untied:
305 ** mov (z[0-9]+\.b), w0
307 ** movprfx z0\.b, p0/z, z1\.b
308 ** uqsub z0\.b, p0/m, z0\.b, \1
310 ** movprfx z0\.b, p0/z, \1
311 ** uqsubr z0\.b, p0/m, z0\.b, z1\.b
315 TEST_UNIFORM_ZX (qsub_w0_u8_z_untied
, svuint8_t
, uint8_t,
316 z0
= svqsub_n_u8_z (p0
, z1
, x0
),
317 z0
= svqsub_z (p0
, z1
, x0
))
320 ** qsub_1_u8_z_tied1:
321 ** mov (z[0-9]+\.b), #1
322 ** movprfx z0\.b, p0/z, z0\.b
323 ** uqsub z0\.b, p0/m, z0\.b, \1
326 TEST_UNIFORM_Z (qsub_1_u8_z_tied1
, svuint8_t
,
327 z0
= svqsub_n_u8_z (p0
, z0
, 1),
328 z0
= svqsub_z (p0
, z0
, 1))
331 ** qsub_1_u8_z_untied:
332 ** mov (z[0-9]+\.b), #1
334 ** movprfx z0\.b, p0/z, z1\.b
335 ** uqsub z0\.b, p0/m, z0\.b, \1
337 ** movprfx z0\.b, p0/z, \1
338 ** uqsubr z0\.b, p0/m, z0\.b, z1\.b
342 TEST_UNIFORM_Z (qsub_1_u8_z_untied
, svuint8_t
,
343 z0
= svqsub_n_u8_z (p0
, z1
, 1),
344 z0
= svqsub_z (p0
, z1
, 1))
348 ** mov (z[0-9]+\.b), #127
349 ** movprfx z0\.b, p0/z, z0\.b
350 ** uqsub z0\.b, p0/m, z0\.b, \1
353 TEST_UNIFORM_Z (qsub_127_u8_z
, svuint8_t
,
354 z0
= svqsub_n_u8_z (p0
, z0
, 127),
355 z0
= svqsub_z (p0
, z0
, 127))
359 ** mov (z[0-9]+\.b), #-128
360 ** movprfx z0\.b, p0/z, z0\.b
361 ** uqsub z0\.b, p0/m, z0\.b, \1
364 TEST_UNIFORM_Z (qsub_128_u8_z
, svuint8_t
,
365 z0
= svqsub_n_u8_z (p0
, z0
, 128),
366 z0
= svqsub_z (p0
, z0
, 128))
370 ** mov (z[0-9]+\.b), #-1
371 ** movprfx z0\.b, p0/z, z0\.b
372 ** uqsub z0\.b, p0/m, z0\.b, \1
375 TEST_UNIFORM_Z (qsub_255_u8_z
, svuint8_t
,
376 z0
= svqsub_n_u8_z (p0
, z0
, 255),
377 z0
= svqsub_z (p0
, z0
, 255))
381 ** mov (z[0-9]+\.b), #-1
382 ** movprfx z0\.b, p0/z, z0\.b
383 ** uqsub z0\.b, p0/m, z0\.b, \1
386 TEST_UNIFORM_Z (qsub_m1_u8_z
, svuint8_t
,
387 z0
= svqsub_n_u8_z (p0
, z0
, -1),
388 z0
= svqsub_z (p0
, z0
, -1))
392 ** mov (z[0-9]+\.b), #-127
393 ** movprfx z0\.b, p0/z, z0\.b
394 ** uqsub z0\.b, p0/m, z0\.b, \1
397 TEST_UNIFORM_Z (qsub_m127_u8_z
, svuint8_t
,
398 z0
= svqsub_n_u8_z (p0
, z0
, -127),
399 z0
= svqsub_z (p0
, z0
, -127))
403 ** mov (z[0-9]+\.b), #-128
404 ** movprfx z0\.b, p0/z, z0\.b
405 ** uqsub z0\.b, p0/m, z0\.b, \1
408 TEST_UNIFORM_Z (qsub_m128_u8_z
, svuint8_t
,
409 z0
= svqsub_n_u8_z (p0
, z0
, -128),
410 z0
= svqsub_z (p0
, z0
, -128))
414 ** uqsub z0\.b, z0\.b, z1\.b
417 TEST_UNIFORM_Z (qsub_u8_x_tied1
, svuint8_t
,
418 z0
= svqsub_u8_x (p0
, z0
, z1
),
419 z0
= svqsub_x (p0
, z0
, z1
))
423 ** uqsub z0\.b, z1\.b, z0\.b
426 TEST_UNIFORM_Z (qsub_u8_x_tied2
, svuint8_t
,
427 z0
= svqsub_u8_x (p0
, z1
, z0
),
428 z0
= svqsub_x (p0
, z1
, z0
))
432 ** uqsub z0\.b, z1\.b, z2\.b
435 TEST_UNIFORM_Z (qsub_u8_x_untied
, svuint8_t
,
436 z0
= svqsub_u8_x (p0
, z1
, z2
),
437 z0
= svqsub_x (p0
, z1
, z2
))
440 ** qsub_w0_u8_x_tied1:
441 ** mov (z[0-9]+\.b), w0
442 ** uqsub z0\.b, z0\.b, \1
445 TEST_UNIFORM_ZX (qsub_w0_u8_x_tied1
, svuint8_t
, uint8_t,
446 z0
= svqsub_n_u8_x (p0
, z0
, x0
),
447 z0
= svqsub_x (p0
, z0
, x0
))
450 ** qsub_w0_u8_x_untied:
451 ** mov (z[0-9]+\.b), w0
452 ** uqsub z0\.b, z1\.b, \1
455 TEST_UNIFORM_ZX (qsub_w0_u8_x_untied
, svuint8_t
, uint8_t,
456 z0
= svqsub_n_u8_x (p0
, z1
, x0
),
457 z0
= svqsub_x (p0
, z1
, x0
))
460 ** qsub_1_u8_x_tied1:
461 ** uqsub z0\.b, z0\.b, #1
464 TEST_UNIFORM_Z (qsub_1_u8_x_tied1
, svuint8_t
,
465 z0
= svqsub_n_u8_x (p0
, z0
, 1),
466 z0
= svqsub_x (p0
, z0
, 1))
469 ** qsub_1_u8_x_untied:
471 ** uqsub z0\.b, z0\.b, #1
474 TEST_UNIFORM_Z (qsub_1_u8_x_untied
, svuint8_t
,
475 z0
= svqsub_n_u8_x (p0
, z1
, 1),
476 z0
= svqsub_x (p0
, z1
, 1))
480 ** uqsub z0\.b, z0\.b, #127
483 TEST_UNIFORM_Z (qsub_127_u8_x
, svuint8_t
,
484 z0
= svqsub_n_u8_x (p0
, z0
, 127),
485 z0
= svqsub_x (p0
, z0
, 127))
489 ** uqsub z0\.b, z0\.b, #128
492 TEST_UNIFORM_Z (qsub_128_u8_x
, svuint8_t
,
493 z0
= svqsub_n_u8_x (p0
, z0
, 128),
494 z0
= svqsub_x (p0
, z0
, 128))
498 ** uqsub z0\.b, z0\.b, #255
501 TEST_UNIFORM_Z (qsub_255_u8_x
, svuint8_t
,
502 z0
= svqsub_n_u8_x (p0
, z0
, 255),
503 z0
= svqsub_x (p0
, z0
, 255))
507 ** uqsub z0\.b, z0\.b, #255
510 TEST_UNIFORM_Z (qsub_m1_u8_x
, svuint8_t
,
511 z0
= svqsub_n_u8_x (p0
, z0
, -1),
512 z0
= svqsub_x (p0
, z0
, -1))
516 ** uqsub z0\.b, z0\.b, #129
519 TEST_UNIFORM_Z (qsub_m127_u8_x
, svuint8_t
,
520 z0
= svqsub_n_u8_x (p0
, z0
, -127),
521 z0
= svqsub_x (p0
, z0
, -127))
525 ** uqsub z0\.b, z0\.b, #128
528 TEST_UNIFORM_Z (qsub_m128_u8_x
, svuint8_t
,
529 z0
= svqsub_n_u8_x (p0
, z0
, -128),
530 z0
= svqsub_x (p0
, z0
, -128))