]>
git.ipfire.org Git - thirdparty/gcc.git/blob - gcc/testsuite/gcc.target/aarch64/sve2/acle/asm/rhadd_s64.c
1 /* { dg-final { check-function-bodies "**" "" "-DCHECK_ASM" } } */
3 #include "test_sve_acle.h"
7 ** srhadd z0\.d, p0/m, z0\.d, z1\.d
10 TEST_UNIFORM_Z (rhadd_s64_m_tied1
, svint64_t
,
11 z0
= svrhadd_s64_m (p0
, z0
, z1
),
12 z0
= svrhadd_m (p0
, z0
, z1
))
16 ** mov (z[0-9]+\.d), z0\.d
18 ** srhadd z0\.d, p0/m, z0\.d, \1
21 TEST_UNIFORM_Z (rhadd_s64_m_tied2
, svint64_t
,
22 z0
= svrhadd_s64_m (p0
, z1
, z0
),
23 z0
= svrhadd_m (p0
, z1
, z0
))
26 ** rhadd_s64_m_untied:
28 ** srhadd z0\.d, p0/m, z0\.d, z2\.d
31 TEST_UNIFORM_Z (rhadd_s64_m_untied
, svint64_t
,
32 z0
= svrhadd_s64_m (p0
, z1
, z2
),
33 z0
= svrhadd_m (p0
, z1
, z2
))
36 ** rhadd_x0_s64_m_tied1:
37 ** mov (z[0-9]+\.d), x0
38 ** srhadd z0\.d, p0/m, z0\.d, \1
41 TEST_UNIFORM_ZX (rhadd_x0_s64_m_tied1
, svint64_t
, int64_t,
42 z0
= svrhadd_n_s64_m (p0
, z0
, x0
),
43 z0
= svrhadd_m (p0
, z0
, x0
))
46 ** rhadd_x0_s64_m_untied:
47 ** mov (z[0-9]+\.d), x0
49 ** srhadd z0\.d, p0/m, z0\.d, \1
52 TEST_UNIFORM_ZX (rhadd_x0_s64_m_untied
, svint64_t
, int64_t,
53 z0
= svrhadd_n_s64_m (p0
, z1
, x0
),
54 z0
= svrhadd_m (p0
, z1
, x0
))
57 ** rhadd_11_s64_m_tied1:
58 ** mov (z[0-9]+\.d), #11
59 ** srhadd z0\.d, p0/m, z0\.d, \1
62 TEST_UNIFORM_Z (rhadd_11_s64_m_tied1
, svint64_t
,
63 z0
= svrhadd_n_s64_m (p0
, z0
, 11),
64 z0
= svrhadd_m (p0
, z0
, 11))
67 ** rhadd_11_s64_m_untied:: { xfail *-*-*}
68 ** mov (z[0-9]+\.d), #11
70 ** srhadd z0\.d, p0/m, z0\.d, \1
73 TEST_UNIFORM_Z (rhadd_11_s64_m_untied
, svint64_t
,
74 z0
= svrhadd_n_s64_m (p0
, z1
, 11),
75 z0
= svrhadd_m (p0
, z1
, 11))
79 ** movprfx z0\.d, p0/z, z0\.d
80 ** srhadd z0\.d, p0/m, z0\.d, z1\.d
83 TEST_UNIFORM_Z (rhadd_s64_z_tied1
, svint64_t
,
84 z0
= svrhadd_s64_z (p0
, z0
, z1
),
85 z0
= svrhadd_z (p0
, z0
, z1
))
89 ** movprfx z0\.d, p0/z, z0\.d
90 ** srhadd z0\.d, p0/m, z0\.d, z1\.d
93 TEST_UNIFORM_Z (rhadd_s64_z_tied2
, svint64_t
,
94 z0
= svrhadd_s64_z (p0
, z1
, z0
),
95 z0
= svrhadd_z (p0
, z1
, z0
))
98 ** rhadd_s64_z_untied:
100 ** movprfx z0\.d, p0/z, z1\.d
101 ** srhadd z0\.d, p0/m, z0\.d, z2\.d
103 ** movprfx z0\.d, p0/z, z2\.d
104 ** srhadd z0\.d, p0/m, z0\.d, z1\.d
108 TEST_UNIFORM_Z (rhadd_s64_z_untied
, svint64_t
,
109 z0
= svrhadd_s64_z (p0
, z1
, z2
),
110 z0
= svrhadd_z (p0
, z1
, z2
))
113 ** rhadd_x0_s64_z_tied1:
114 ** mov (z[0-9]+\.d), x0
115 ** movprfx z0\.d, p0/z, z0\.d
116 ** srhadd z0\.d, p0/m, z0\.d, \1
119 TEST_UNIFORM_ZX (rhadd_x0_s64_z_tied1
, svint64_t
, int64_t,
120 z0
= svrhadd_n_s64_z (p0
, z0
, x0
),
121 z0
= svrhadd_z (p0
, z0
, x0
))
124 ** rhadd_x0_s64_z_untied:
125 ** mov (z[0-9]+\.d), x0
127 ** movprfx z0\.d, p0/z, z1\.d
128 ** srhadd z0\.d, p0/m, z0\.d, \1
130 ** movprfx z0\.d, p0/z, \1
131 ** srhadd z0\.d, p0/m, z0\.d, z1\.d
135 TEST_UNIFORM_ZX (rhadd_x0_s64_z_untied
, svint64_t
, int64_t,
136 z0
= svrhadd_n_s64_z (p0
, z1
, x0
),
137 z0
= svrhadd_z (p0
, z1
, x0
))
140 ** rhadd_11_s64_z_tied1:
141 ** mov (z[0-9]+\.d), #11
142 ** movprfx z0\.d, p0/z, z0\.d
143 ** srhadd z0\.d, p0/m, z0\.d, \1
146 TEST_UNIFORM_Z (rhadd_11_s64_z_tied1
, svint64_t
,
147 z0
= svrhadd_n_s64_z (p0
, z0
, 11),
148 z0
= svrhadd_z (p0
, z0
, 11))
151 ** rhadd_11_s64_z_untied:
152 ** mov (z[0-9]+\.d), #11
154 ** movprfx z0\.d, p0/z, z1\.d
155 ** srhadd z0\.d, p0/m, z0\.d, \1
157 ** movprfx z0\.d, p0/z, \1
158 ** srhadd z0\.d, p0/m, z0\.d, z1\.d
162 TEST_UNIFORM_Z (rhadd_11_s64_z_untied
, svint64_t
,
163 z0
= svrhadd_n_s64_z (p0
, z1
, 11),
164 z0
= svrhadd_z (p0
, z1
, 11))
167 ** rhadd_s64_x_tied1:
168 ** srhadd z0\.d, p0/m, z0\.d, z1\.d
171 TEST_UNIFORM_Z (rhadd_s64_x_tied1
, svint64_t
,
172 z0
= svrhadd_s64_x (p0
, z0
, z1
),
173 z0
= svrhadd_x (p0
, z0
, z1
))
176 ** rhadd_s64_x_tied2:
177 ** srhadd z0\.d, p0/m, z0\.d, z1\.d
180 TEST_UNIFORM_Z (rhadd_s64_x_tied2
, svint64_t
,
181 z0
= svrhadd_s64_x (p0
, z1
, z0
),
182 z0
= svrhadd_x (p0
, z1
, z0
))
185 ** rhadd_s64_x_untied:
188 ** srhadd z0\.d, p0/m, z0\.d, z2\.d
191 ** srhadd z0\.d, p0/m, z0\.d, z1\.d
195 TEST_UNIFORM_Z (rhadd_s64_x_untied
, svint64_t
,
196 z0
= svrhadd_s64_x (p0
, z1
, z2
),
197 z0
= svrhadd_x (p0
, z1
, z2
))
200 ** rhadd_x0_s64_x_tied1:
201 ** mov (z[0-9]+\.d), x0
202 ** srhadd z0\.d, p0/m, z0\.d, \1
205 TEST_UNIFORM_ZX (rhadd_x0_s64_x_tied1
, svint64_t
, int64_t,
206 z0
= svrhadd_n_s64_x (p0
, z0
, x0
),
207 z0
= svrhadd_x (p0
, z0
, x0
))
210 ** rhadd_x0_s64_x_untied:
212 ** srhadd z0\.d, p0/m, z0\.d, z1\.d
215 TEST_UNIFORM_ZX (rhadd_x0_s64_x_untied
, svint64_t
, int64_t,
216 z0
= svrhadd_n_s64_x (p0
, z1
, x0
),
217 z0
= svrhadd_x (p0
, z1
, x0
))
220 ** rhadd_11_s64_x_tied1:
221 ** mov (z[0-9]+\.d), #11
222 ** srhadd z0\.d, p0/m, z0\.d, \1
225 TEST_UNIFORM_Z (rhadd_11_s64_x_tied1
, svint64_t
,
226 z0
= svrhadd_n_s64_x (p0
, z0
, 11),
227 z0
= svrhadd_x (p0
, z0
, 11))
230 ** rhadd_11_s64_x_untied:
232 ** srhadd z0\.d, p0/m, z0\.d, z1\.d
235 TEST_UNIFORM_Z (rhadd_11_s64_x_untied
, svint64_t
,
236 z0
= svrhadd_n_s64_x (p0
, z1
, 11),
237 z0
= svrhadd_x (p0
, z1
, 11))