]> git.ipfire.org Git - thirdparty/gcc.git/blob - gcc/testsuite/gcc.target/aarch64/sve2/acle/asm/bsl1n_s32.c
[AArch64] Add support for the SVE2 ACLE
[thirdparty/gcc.git] / gcc / testsuite / gcc.target / aarch64 / sve2 / acle / asm / bsl1n_s32.c
1 /* { dg-final { check-function-bodies "**" "" "-DCHECK_ASM" } } */
2
3 #include "test_sve_acle.h"
4
5 /*
6 ** bsl1n_s32_tied1:
7 ** bsl1n z0\.d, z0\.d, z1\.d, z2\.d
8 ** ret
9 */
10 TEST_UNIFORM_Z (bsl1n_s32_tied1, svint32_t,
11 z0 = svbsl1n_s32 (z0, z1, z2),
12 z0 = svbsl1n (z0, z1, z2))
13
14 /*
15 ** bsl1n_s32_tied2:
16 ** mov (z[0-9]+\.d), z0\.d
17 ** movprfx z0, z1
18 ** bsl1n z0\.d, z0\.d, \1, z2\.d
19 ** ret
20 */
21 TEST_UNIFORM_Z (bsl1n_s32_tied2, svint32_t,
22 z0 = svbsl1n_s32 (z1, z0, z2),
23 z0 = svbsl1n (z1, z0, z2))
24
25 /*
26 ** bsl1n_s32_tied3:
27 ** mov (z[0-9]+\.d), z0\.d
28 ** movprfx z0, z1
29 ** bsl1n z0\.d, z0\.d, z2\.d, \1
30 ** ret
31 */
32 TEST_UNIFORM_Z (bsl1n_s32_tied3, svint32_t,
33 z0 = svbsl1n_s32 (z1, z2, z0),
34 z0 = svbsl1n (z1, z2, z0))
35
36 /*
37 ** bsl1n_s32_untied:
38 ** movprfx z0, z1
39 ** bsl1n z0\.d, z0\.d, z2\.d, z3\.d
40 ** ret
41 */
42 TEST_UNIFORM_Z (bsl1n_s32_untied, svint32_t,
43 z0 = svbsl1n_s32 (z1, z2, z3),
44 z0 = svbsl1n (z1, z2, z3))
45
46 /*
47 ** bsl1n_w0_s32_tied1:
48 ** mov (z[0-9]+)\.s, w0
49 ** bsl1n z0\.d, z0\.d, z1\.d, \1\.d
50 ** ret
51 */
52 TEST_UNIFORM_ZX (bsl1n_w0_s32_tied1, svint32_t, int32_t,
53 z0 = svbsl1n_n_s32 (z0, z1, x0),
54 z0 = svbsl1n (z0, z1, x0))
55
56 /*
57 ** bsl1n_w0_s32_tied2:
58 ** mov (z[0-9]+)\.s, w0
59 ** mov (z[0-9]+\.d), z0\.d
60 ** movprfx z0, z1
61 ** bsl1n z0\.d, z0\.d, \2, \1\.d
62 ** ret
63 */
64 TEST_UNIFORM_ZX (bsl1n_w0_s32_tied2, svint32_t, int32_t,
65 z0 = svbsl1n_n_s32 (z1, z0, x0),
66 z0 = svbsl1n (z1, z0, x0))
67
68 /*
69 ** bsl1n_w0_s32_untied:
70 ** mov (z[0-9]+)\.s, w0
71 ** movprfx z0, z1
72 ** bsl1n z0\.d, z0\.d, z2\.d, \1\.d
73 ** ret
74 */
75 TEST_UNIFORM_ZX (bsl1n_w0_s32_untied, svint32_t, int32_t,
76 z0 = svbsl1n_n_s32 (z1, z2, x0),
77 z0 = svbsl1n (z1, z2, x0))
78
79 /*
80 ** bsl1n_11_s32_tied1:
81 ** mov (z[0-9]+)\.s, #11
82 ** bsl1n z0\.d, z0\.d, z1\.d, \1\.d
83 ** ret
84 */
85 TEST_UNIFORM_Z (bsl1n_11_s32_tied1, svint32_t,
86 z0 = svbsl1n_n_s32 (z0, z1, 11),
87 z0 = svbsl1n (z0, z1, 11))
88
89 /*
90 ** bsl1n_11_s32_tied2:
91 ** mov (z[0-9]+)\.s, #11
92 ** mov (z[0-9]+\.d), z0\.d
93 ** movprfx z0, z1
94 ** bsl1n z0\.d, z0\.d, \2, \1\.d
95 ** ret
96 */
97 TEST_UNIFORM_Z (bsl1n_11_s32_tied2, svint32_t,
98 z0 = svbsl1n_n_s32 (z1, z0, 11),
99 z0 = svbsl1n (z1, z0, 11))
100
101 /*
102 ** bsl1n_11_s32_untied:
103 ** mov (z[0-9]+)\.s, #11
104 ** movprfx z0, z1
105 ** bsl1n z0\.d, z0\.d, z2\.d, \1\.d
106 ** ret
107 */
108 TEST_UNIFORM_Z (bsl1n_11_s32_untied, svint32_t,
109 z0 = svbsl1n_n_s32 (z1, z2, 11),
110 z0 = svbsl1n (z1, z2, 11))