]> git.ipfire.org Git - thirdparty/gcc.git/blame - gcc/config/aarch64/aarch64-builtins.c
invoke.texi (Optimization Options): Add missing @gol to the end of a line.
[thirdparty/gcc.git] / gcc / config / aarch64 / aarch64-builtins.c
CommitLineData
43e9d192 1/* Builtins' description for AArch64 SIMD architecture.
23a5b65a 2 Copyright (C) 2011-2014 Free Software Foundation, Inc.
43e9d192
IB
3 Contributed by ARM Ltd.
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify it
8 under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
10 any later version.
11
12 GCC is distributed in the hope that it will be useful, but
13 WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
15 General Public License for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
20
21#include "config.h"
22#include "system.h"
23#include "coretypes.h"
24#include "tm.h"
25#include "rtl.h"
26#include "tree.h"
d8a2d370
DN
27#include "stor-layout.h"
28#include "stringpool.h"
29#include "calls.h"
43e9d192
IB
30#include "expr.h"
31#include "tm_p.h"
32#include "recog.h"
33#include "langhooks.h"
34#include "diagnostic-core.h"
35#include "optabs.h"
2fb9a547
AM
36#include "hash-table.h"
37#include "vec.h"
38#include "ggc.h"
39#include "basic-block.h"
40#include "tree-ssa-alias.h"
41#include "internal-fn.h"
42#include "gimple-fold.h"
43#include "tree-eh.h"
44#include "gimple-expr.h"
45#include "is-a.h"
0ac198d3 46#include "gimple.h"
5be5c238 47#include "gimple-iterator.h"
43e9d192 48
bc5e395d
JG
49#define v8qi_UP V8QImode
50#define v4hi_UP V4HImode
51#define v2si_UP V2SImode
52#define v2sf_UP V2SFmode
53#define v1df_UP V1DFmode
54#define di_UP DImode
55#define df_UP DFmode
56#define v16qi_UP V16QImode
57#define v8hi_UP V8HImode
58#define v4si_UP V4SImode
59#define v4sf_UP V4SFmode
60#define v2di_UP V2DImode
61#define v2df_UP V2DFmode
62#define ti_UP TImode
63#define ei_UP EImode
64#define oi_UP OImode
65#define ci_UP CImode
66#define xi_UP XImode
67#define si_UP SImode
68#define sf_UP SFmode
69#define hi_UP HImode
70#define qi_UP QImode
43e9d192
IB
71#define UP(X) X##_UP
72
b5828b4b
JG
73#define SIMD_MAX_BUILTIN_ARGS 5
74
75enum aarch64_type_qualifiers
43e9d192 76{
b5828b4b
JG
77 /* T foo. */
78 qualifier_none = 0x0,
79 /* unsigned T foo. */
80 qualifier_unsigned = 0x1, /* 1 << 0 */
81 /* const T foo. */
82 qualifier_const = 0x2, /* 1 << 1 */
83 /* T *foo. */
84 qualifier_pointer = 0x4, /* 1 << 2 */
b5828b4b
JG
85 /* Used when expanding arguments if an operand could
86 be an immediate. */
87 qualifier_immediate = 0x8, /* 1 << 3 */
88 qualifier_maybe_immediate = 0x10, /* 1 << 4 */
89 /* void foo (...). */
90 qualifier_void = 0x20, /* 1 << 5 */
91 /* Some patterns may have internal operands, this qualifier is an
92 instruction to the initialisation code to skip this operand. */
93 qualifier_internal = 0x40, /* 1 << 6 */
94 /* Some builtins should use the T_*mode* encoded in a simd_builtin_datum
95 rather than using the type of the operand. */
96 qualifier_map_mode = 0x80, /* 1 << 7 */
97 /* qualifier_pointer | qualifier_map_mode */
98 qualifier_pointer_map_mode = 0x84,
e625e715 99 /* qualifier_const | qualifier_pointer | qualifier_map_mode */
6db1ec94
JG
100 qualifier_const_pointer_map_mode = 0x86,
101 /* Polynomial types. */
102 qualifier_poly = 0x100
b5828b4b 103};
43e9d192
IB
104
105typedef struct
106{
107 const char *name;
bc5e395d 108 enum machine_mode mode;
342be7f7
JG
109 const enum insn_code code;
110 unsigned int fcode;
b5828b4b 111 enum aarch64_type_qualifiers *qualifiers;
43e9d192
IB
112} aarch64_simd_builtin_datum;
113
096c59be
AL
114/* The qualifier_internal allows generation of a unary builtin from
115 a pattern with a third pseudo-operand such as a match_scratch. */
b5828b4b
JG
116static enum aarch64_type_qualifiers
117aarch64_types_unop_qualifiers[SIMD_MAX_BUILTIN_ARGS]
096c59be 118 = { qualifier_none, qualifier_none, qualifier_internal };
b5828b4b 119#define TYPES_UNOP (aarch64_types_unop_qualifiers)
5a7a4e80
TB
120static enum aarch64_type_qualifiers
121aarch64_types_unopu_qualifiers[SIMD_MAX_BUILTIN_ARGS]
122 = { qualifier_unsigned, qualifier_unsigned };
123#define TYPES_UNOPU (aarch64_types_unopu_qualifiers)
b5828b4b 124#define TYPES_CREATE (aarch64_types_unop_qualifiers)
bcd48995
AV
125#define TYPES_REINTERP_SS (aarch64_types_unop_qualifiers)
126static enum aarch64_type_qualifiers
127aarch64_types_unop_su_qualifiers[SIMD_MAX_BUILTIN_ARGS]
128 = { qualifier_none, qualifier_unsigned };
129#define TYPES_REINTERP_SU (aarch64_types_unop_su_qualifiers)
130static enum aarch64_type_qualifiers
131aarch64_types_unop_sp_qualifiers[SIMD_MAX_BUILTIN_ARGS]
132 = { qualifier_none, qualifier_poly };
133#define TYPES_REINTERP_SP (aarch64_types_unop_sp_qualifiers)
134static enum aarch64_type_qualifiers
135aarch64_types_unop_us_qualifiers[SIMD_MAX_BUILTIN_ARGS]
136 = { qualifier_unsigned, qualifier_none };
137#define TYPES_REINTERP_US (aarch64_types_unop_us_qualifiers)
138static enum aarch64_type_qualifiers
139aarch64_types_unop_ps_qualifiers[SIMD_MAX_BUILTIN_ARGS]
140 = { qualifier_poly, qualifier_none };
141#define TYPES_REINTERP_PS (aarch64_types_unop_ps_qualifiers)
b5828b4b
JG
142static enum aarch64_type_qualifiers
143aarch64_types_binop_qualifiers[SIMD_MAX_BUILTIN_ARGS]
144 = { qualifier_none, qualifier_none, qualifier_maybe_immediate };
145#define TYPES_BINOP (aarch64_types_binop_qualifiers)
146static enum aarch64_type_qualifiers
ae0533da
AL
147aarch64_types_binopv_qualifiers[SIMD_MAX_BUILTIN_ARGS]
148 = { qualifier_void, qualifier_none, qualifier_none };
149#define TYPES_BINOPV (aarch64_types_binopv_qualifiers)
150static enum aarch64_type_qualifiers
5a7a4e80
TB
151aarch64_types_binopu_qualifiers[SIMD_MAX_BUILTIN_ARGS]
152 = { qualifier_unsigned, qualifier_unsigned, qualifier_unsigned };
153#define TYPES_BINOPU (aarch64_types_binopu_qualifiers)
7baa225d 154static enum aarch64_type_qualifiers
de10bcce
AL
155aarch64_types_binop_uus_qualifiers[SIMD_MAX_BUILTIN_ARGS]
156 = { qualifier_unsigned, qualifier_unsigned, qualifier_none };
157#define TYPES_BINOP_UUS (aarch64_types_binop_uus_qualifiers)
158static enum aarch64_type_qualifiers
918621d3
AL
159aarch64_types_binop_ssu_qualifiers[SIMD_MAX_BUILTIN_ARGS]
160 = { qualifier_none, qualifier_none, qualifier_unsigned };
161#define TYPES_BINOP_SSU (aarch64_types_binop_ssu_qualifiers)
162static enum aarch64_type_qualifiers
7baa225d
TB
163aarch64_types_binopp_qualifiers[SIMD_MAX_BUILTIN_ARGS]
164 = { qualifier_poly, qualifier_poly, qualifier_poly };
165#define TYPES_BINOPP (aarch64_types_binopp_qualifiers)
166
5a7a4e80 167static enum aarch64_type_qualifiers
b5828b4b
JG
168aarch64_types_ternop_qualifiers[SIMD_MAX_BUILTIN_ARGS]
169 = { qualifier_none, qualifier_none, qualifier_none, qualifier_none };
170#define TYPES_TERNOP (aarch64_types_ternop_qualifiers)
30442682
TB
171static enum aarch64_type_qualifiers
172aarch64_types_ternopu_qualifiers[SIMD_MAX_BUILTIN_ARGS]
173 = { qualifier_unsigned, qualifier_unsigned,
174 qualifier_unsigned, qualifier_unsigned };
175#define TYPES_TERNOPU (aarch64_types_ternopu_qualifiers)
176
b5828b4b
JG
177static enum aarch64_type_qualifiers
178aarch64_types_quadop_qualifiers[SIMD_MAX_BUILTIN_ARGS]
179 = { qualifier_none, qualifier_none, qualifier_none,
180 qualifier_none, qualifier_none };
181#define TYPES_QUADOP (aarch64_types_quadop_qualifiers)
182
183static enum aarch64_type_qualifiers
184aarch64_types_getlane_qualifiers[SIMD_MAX_BUILTIN_ARGS]
185 = { qualifier_none, qualifier_none, qualifier_immediate };
186#define TYPES_GETLANE (aarch64_types_getlane_qualifiers)
187#define TYPES_SHIFTIMM (aarch64_types_getlane_qualifiers)
188static enum aarch64_type_qualifiers
de10bcce
AL
189aarch64_types_shift_to_unsigned_qualifiers[SIMD_MAX_BUILTIN_ARGS]
190 = { qualifier_unsigned, qualifier_none, qualifier_immediate };
191#define TYPES_SHIFTIMM_USS (aarch64_types_shift_to_unsigned_qualifiers)
192static enum aarch64_type_qualifiers
252c7556
AV
193aarch64_types_unsigned_shift_qualifiers[SIMD_MAX_BUILTIN_ARGS]
194 = { qualifier_unsigned, qualifier_unsigned, qualifier_immediate };
195#define TYPES_USHIFTIMM (aarch64_types_unsigned_shift_qualifiers)
de10bcce 196
252c7556 197static enum aarch64_type_qualifiers
b5828b4b
JG
198aarch64_types_setlane_qualifiers[SIMD_MAX_BUILTIN_ARGS]
199 = { qualifier_none, qualifier_none, qualifier_none, qualifier_immediate };
200#define TYPES_SETLANE (aarch64_types_setlane_qualifiers)
201#define TYPES_SHIFTINSERT (aarch64_types_setlane_qualifiers)
202#define TYPES_SHIFTACC (aarch64_types_setlane_qualifiers)
203
de10bcce
AL
204static enum aarch64_type_qualifiers
205aarch64_types_unsigned_shiftacc_qualifiers[SIMD_MAX_BUILTIN_ARGS]
206 = { qualifier_unsigned, qualifier_unsigned, qualifier_unsigned,
207 qualifier_immediate };
208#define TYPES_USHIFTACC (aarch64_types_unsigned_shiftacc_qualifiers)
209
210
b5828b4b
JG
211static enum aarch64_type_qualifiers
212aarch64_types_combine_qualifiers[SIMD_MAX_BUILTIN_ARGS]
213 = { qualifier_none, qualifier_none, qualifier_none };
214#define TYPES_COMBINE (aarch64_types_combine_qualifiers)
215
216static enum aarch64_type_qualifiers
217aarch64_types_load1_qualifiers[SIMD_MAX_BUILTIN_ARGS]
218 = { qualifier_none, qualifier_const_pointer_map_mode };
219#define TYPES_LOAD1 (aarch64_types_load1_qualifiers)
220#define TYPES_LOADSTRUCT (aarch64_types_load1_qualifiers)
221
46e778c4
JG
222static enum aarch64_type_qualifiers
223aarch64_types_bsl_p_qualifiers[SIMD_MAX_BUILTIN_ARGS]
224 = { qualifier_poly, qualifier_unsigned,
225 qualifier_poly, qualifier_poly };
226#define TYPES_BSL_P (aarch64_types_bsl_p_qualifiers)
227static enum aarch64_type_qualifiers
228aarch64_types_bsl_s_qualifiers[SIMD_MAX_BUILTIN_ARGS]
229 = { qualifier_none, qualifier_unsigned,
230 qualifier_none, qualifier_none };
231#define TYPES_BSL_S (aarch64_types_bsl_s_qualifiers)
232static enum aarch64_type_qualifiers
233aarch64_types_bsl_u_qualifiers[SIMD_MAX_BUILTIN_ARGS]
234 = { qualifier_unsigned, qualifier_unsigned,
235 qualifier_unsigned, qualifier_unsigned };
236#define TYPES_BSL_U (aarch64_types_bsl_u_qualifiers)
237
b5828b4b
JG
238/* The first argument (return type) of a store should be void type,
239 which we represent with qualifier_void. Their first operand will be
240 a DImode pointer to the location to store to, so we must use
241 qualifier_map_mode | qualifier_pointer to build a pointer to the
242 element type of the vector. */
243static enum aarch64_type_qualifiers
244aarch64_types_store1_qualifiers[SIMD_MAX_BUILTIN_ARGS]
245 = { qualifier_void, qualifier_pointer_map_mode, qualifier_none };
246#define TYPES_STORE1 (aarch64_types_store1_qualifiers)
247#define TYPES_STORESTRUCT (aarch64_types_store1_qualifiers)
ba081b77
JG
248static enum aarch64_type_qualifiers
249aarch64_types_storestruct_lane_qualifiers[SIMD_MAX_BUILTIN_ARGS]
250 = { qualifier_void, qualifier_pointer_map_mode,
251 qualifier_none, qualifier_none };
252#define TYPES_STORESTRUCT_LANE (aarch64_types_storestruct_lane_qualifiers)
b5828b4b 253
0ddec79f
JG
254#define CF0(N, X) CODE_FOR_aarch64_##N##X
255#define CF1(N, X) CODE_FOR_##N##X##1
256#define CF2(N, X) CODE_FOR_##N##X##2
257#define CF3(N, X) CODE_FOR_##N##X##3
258#define CF4(N, X) CODE_FOR_##N##X##4
259#define CF10(N, X) CODE_FOR_##N##X
260
261#define VAR1(T, N, MAP, A) \
bc5e395d 262 {#N #A, UP (A), CF##MAP (N, A), 0, TYPES_##T},
0ddec79f
JG
263#define VAR2(T, N, MAP, A, B) \
264 VAR1 (T, N, MAP, A) \
265 VAR1 (T, N, MAP, B)
266#define VAR3(T, N, MAP, A, B, C) \
267 VAR2 (T, N, MAP, A, B) \
268 VAR1 (T, N, MAP, C)
269#define VAR4(T, N, MAP, A, B, C, D) \
270 VAR3 (T, N, MAP, A, B, C) \
271 VAR1 (T, N, MAP, D)
272#define VAR5(T, N, MAP, A, B, C, D, E) \
273 VAR4 (T, N, MAP, A, B, C, D) \
274 VAR1 (T, N, MAP, E)
275#define VAR6(T, N, MAP, A, B, C, D, E, F) \
276 VAR5 (T, N, MAP, A, B, C, D, E) \
277 VAR1 (T, N, MAP, F)
278#define VAR7(T, N, MAP, A, B, C, D, E, F, G) \
279 VAR6 (T, N, MAP, A, B, C, D, E, F) \
280 VAR1 (T, N, MAP, G)
281#define VAR8(T, N, MAP, A, B, C, D, E, F, G, H) \
282 VAR7 (T, N, MAP, A, B, C, D, E, F, G) \
283 VAR1 (T, N, MAP, H)
284#define VAR9(T, N, MAP, A, B, C, D, E, F, G, H, I) \
285 VAR8 (T, N, MAP, A, B, C, D, E, F, G, H) \
286 VAR1 (T, N, MAP, I)
287#define VAR10(T, N, MAP, A, B, C, D, E, F, G, H, I, J) \
288 VAR9 (T, N, MAP, A, B, C, D, E, F, G, H, I) \
289 VAR1 (T, N, MAP, J)
290#define VAR11(T, N, MAP, A, B, C, D, E, F, G, H, I, J, K) \
291 VAR10 (T, N, MAP, A, B, C, D, E, F, G, H, I, J) \
292 VAR1 (T, N, MAP, K)
293#define VAR12(T, N, MAP, A, B, C, D, E, F, G, H, I, J, K, L) \
294 VAR11 (T, N, MAP, A, B, C, D, E, F, G, H, I, J, K) \
295 VAR1 (T, N, MAP, L)
342be7f7
JG
296
297/* BUILTIN_<ITERATOR> macros should expand to cover the same range of
298 modes as is given for each define_mode_iterator in
299 config/aarch64/iterators.md. */
300
0ddec79f
JG
301#define BUILTIN_DX(T, N, MAP) \
302 VAR2 (T, N, MAP, di, df)
303#define BUILTIN_GPF(T, N, MAP) \
304 VAR2 (T, N, MAP, sf, df)
305#define BUILTIN_SDQ_I(T, N, MAP) \
306 VAR4 (T, N, MAP, qi, hi, si, di)
307#define BUILTIN_SD_HSI(T, N, MAP) \
308 VAR2 (T, N, MAP, hi, si)
309#define BUILTIN_V2F(T, N, MAP) \
310 VAR2 (T, N, MAP, v2sf, v2df)
311#define BUILTIN_VALL(T, N, MAP) \
312 VAR10 (T, N, MAP, v8qi, v16qi, v4hi, v8hi, v2si, \
313 v4si, v2di, v2sf, v4sf, v2df)
bb60efd9
JG
314#define BUILTIN_VALLDI(T, N, MAP) \
315 VAR11 (T, N, MAP, v8qi, v16qi, v4hi, v8hi, v2si, \
316 v4si, v2di, v2sf, v4sf, v2df, di)
46e778c4
JG
317#define BUILTIN_VALLDIF(T, N, MAP) \
318 VAR12 (T, N, MAP, v8qi, v16qi, v4hi, v8hi, v2si, \
319 v4si, v2di, v2sf, v4sf, v2df, di, df)
0ddec79f
JG
320#define BUILTIN_VB(T, N, MAP) \
321 VAR2 (T, N, MAP, v8qi, v16qi)
322#define BUILTIN_VD(T, N, MAP) \
323 VAR4 (T, N, MAP, v8qi, v4hi, v2si, v2sf)
c6a29a09
AL
324#define BUILTIN_VD1(T, N, MAP) \
325 VAR5 (T, N, MAP, v8qi, v4hi, v2si, v2sf, v1df)
0ddec79f
JG
326#define BUILTIN_VDC(T, N, MAP) \
327 VAR6 (T, N, MAP, v8qi, v4hi, v2si, v2sf, di, df)
328#define BUILTIN_VDIC(T, N, MAP) \
329 VAR3 (T, N, MAP, v8qi, v4hi, v2si)
330#define BUILTIN_VDN(T, N, MAP) \
331 VAR3 (T, N, MAP, v4hi, v2si, di)
332#define BUILTIN_VDQ(T, N, MAP) \
333 VAR7 (T, N, MAP, v8qi, v16qi, v4hi, v8hi, v2si, v4si, v2di)
334#define BUILTIN_VDQF(T, N, MAP) \
335 VAR3 (T, N, MAP, v2sf, v4sf, v2df)
74dc11ed
AV
336#define BUILTIN_VDQF_DF(T, N, MAP) \
337 VAR4 (T, N, MAP, v2sf, v4sf, v2df, df)
0ddec79f
JG
338#define BUILTIN_VDQH(T, N, MAP) \
339 VAR2 (T, N, MAP, v4hi, v8hi)
340#define BUILTIN_VDQHS(T, N, MAP) \
341 VAR4 (T, N, MAP, v4hi, v8hi, v2si, v4si)
342#define BUILTIN_VDQIF(T, N, MAP) \
343 VAR9 (T, N, MAP, v8qi, v16qi, v4hi, v8hi, v2si, v4si, v2sf, v4sf, v2df)
344#define BUILTIN_VDQM(T, N, MAP) \
345 VAR6 (T, N, MAP, v8qi, v16qi, v4hi, v8hi, v2si, v4si)
346#define BUILTIN_VDQV(T, N, MAP) \
347 VAR5 (T, N, MAP, v8qi, v16qi, v4hi, v8hi, v4si)
46e778c4
JG
348#define BUILTIN_VDQQH(T, N, MAP) \
349 VAR4 (T, N, MAP, v8qi, v16qi, v4hi, v8hi)
0ddec79f
JG
350#define BUILTIN_VDQ_BHSI(T, N, MAP) \
351 VAR6 (T, N, MAP, v8qi, v16qi, v4hi, v8hi, v2si, v4si)
352#define BUILTIN_VDQ_I(T, N, MAP) \
353 VAR7 (T, N, MAP, v8qi, v16qi, v4hi, v8hi, v2si, v4si, v2di)
354#define BUILTIN_VDW(T, N, MAP) \
355 VAR3 (T, N, MAP, v8qi, v4hi, v2si)
356#define BUILTIN_VD_BHSI(T, N, MAP) \
357 VAR3 (T, N, MAP, v8qi, v4hi, v2si)
358#define BUILTIN_VD_HSI(T, N, MAP) \
359 VAR2 (T, N, MAP, v4hi, v2si)
0ddec79f
JG
360#define BUILTIN_VQ(T, N, MAP) \
361 VAR6 (T, N, MAP, v16qi, v8hi, v4si, v2di, v4sf, v2df)
362#define BUILTIN_VQN(T, N, MAP) \
363 VAR3 (T, N, MAP, v8hi, v4si, v2di)
364#define BUILTIN_VQW(T, N, MAP) \
365 VAR3 (T, N, MAP, v16qi, v8hi, v4si)
366#define BUILTIN_VQ_HSI(T, N, MAP) \
367 VAR2 (T, N, MAP, v8hi, v4si)
368#define BUILTIN_VQ_S(T, N, MAP) \
369 VAR6 (T, N, MAP, v8qi, v16qi, v4hi, v8hi, v2si, v4si)
370#define BUILTIN_VSDQ_HSI(T, N, MAP) \
371 VAR6 (T, N, MAP, v4hi, v8hi, v2si, v4si, hi, si)
372#define BUILTIN_VSDQ_I(T, N, MAP) \
373 VAR11 (T, N, MAP, v8qi, v16qi, v4hi, v8hi, v2si, v4si, v2di, qi, hi, si, di)
374#define BUILTIN_VSDQ_I_BHSI(T, N, MAP) \
375 VAR10 (T, N, MAP, v8qi, v16qi, v4hi, v8hi, v2si, v4si, v2di, qi, hi, si)
376#define BUILTIN_VSDQ_I_DI(T, N, MAP) \
377 VAR8 (T, N, MAP, v8qi, v16qi, v4hi, v8hi, v2si, v4si, v2di, di)
378#define BUILTIN_VSD_HSI(T, N, MAP) \
379 VAR4 (T, N, MAP, v4hi, v2si, hi, si)
380#define BUILTIN_VSQN_HSDI(T, N, MAP) \
381 VAR6 (T, N, MAP, v8hi, v4si, v2di, hi, si, di)
382#define BUILTIN_VSTRUCT(T, N, MAP) \
383 VAR3 (T, N, MAP, oi, ci, xi)
43e9d192
IB
384
385static aarch64_simd_builtin_datum aarch64_simd_builtin_data[] = {
342be7f7
JG
386#include "aarch64-simd-builtins.def"
387};
388
5d357f26
KT
389/* There's only 8 CRC32 builtins. Probably not worth their own .def file. */
390#define AARCH64_CRC32_BUILTINS \
391 CRC32_BUILTIN (crc32b, QI) \
392 CRC32_BUILTIN (crc32h, HI) \
393 CRC32_BUILTIN (crc32w, SI) \
394 CRC32_BUILTIN (crc32x, DI) \
395 CRC32_BUILTIN (crc32cb, QI) \
396 CRC32_BUILTIN (crc32ch, HI) \
397 CRC32_BUILTIN (crc32cw, SI) \
398 CRC32_BUILTIN (crc32cx, DI)
399
400typedef struct
401{
402 const char *name;
403 enum machine_mode mode;
404 const enum insn_code icode;
405 unsigned int fcode;
406} aarch64_crc_builtin_datum;
407
408#define CRC32_BUILTIN(N, M) \
409 AARCH64_BUILTIN_##N,
410
342be7f7 411#undef VAR1
0ddec79f 412#define VAR1(T, N, MAP, A) \
e993fea1 413 AARCH64_SIMD_BUILTIN_##T##_##N##A,
342be7f7
JG
414
415enum aarch64_builtins
416{
417 AARCH64_BUILTIN_MIN,
aa87aced
KV
418
419 AARCH64_BUILTIN_GET_FPCR,
420 AARCH64_BUILTIN_SET_FPCR,
421 AARCH64_BUILTIN_GET_FPSR,
422 AARCH64_BUILTIN_SET_FPSR,
423
342be7f7
JG
424 AARCH64_SIMD_BUILTIN_BASE,
425#include "aarch64-simd-builtins.def"
426 AARCH64_SIMD_BUILTIN_MAX = AARCH64_SIMD_BUILTIN_BASE
427 + ARRAY_SIZE (aarch64_simd_builtin_data),
5d357f26
KT
428 AARCH64_CRC32_BUILTIN_BASE,
429 AARCH64_CRC32_BUILTINS
430 AARCH64_CRC32_BUILTIN_MAX,
342be7f7 431 AARCH64_BUILTIN_MAX
43e9d192
IB
432};
433
5d357f26
KT
434#undef CRC32_BUILTIN
435#define CRC32_BUILTIN(N, M) \
436 {"__builtin_aarch64_"#N, M##mode, CODE_FOR_aarch64_##N, AARCH64_BUILTIN_##N},
437
438static aarch64_crc_builtin_datum aarch64_crc_builtin_data[] = {
439 AARCH64_CRC32_BUILTINS
440};
441
442#undef CRC32_BUILTIN
443
119103ca
JG
444static GTY(()) tree aarch64_builtin_decls[AARCH64_BUILTIN_MAX];
445
43e9d192
IB
446#define NUM_DREG_TYPES 6
447#define NUM_QREG_TYPES 6
448
b5828b4b
JG
449/* Return a tree for a signed or unsigned argument of either
450 the mode specified by MODE, or the inner mode of MODE. */
451tree
6db1ec94
JG
452aarch64_build_scalar_type (enum machine_mode mode,
453 bool unsigned_p,
454 bool poly_p)
b5828b4b
JG
455{
456#undef INT_TYPES
457#define INT_TYPES \
458 AARCH64_TYPE_BUILDER (QI) \
459 AARCH64_TYPE_BUILDER (HI) \
460 AARCH64_TYPE_BUILDER (SI) \
461 AARCH64_TYPE_BUILDER (DI) \
462 AARCH64_TYPE_BUILDER (EI) \
463 AARCH64_TYPE_BUILDER (OI) \
464 AARCH64_TYPE_BUILDER (CI) \
465 AARCH64_TYPE_BUILDER (XI) \
466 AARCH64_TYPE_BUILDER (TI) \
467
468/* Statically declare all the possible types we might need. */
469#undef AARCH64_TYPE_BUILDER
470#define AARCH64_TYPE_BUILDER(X) \
6db1ec94 471 static tree X##_aarch64_type_node_p = NULL; \
b5828b4b
JG
472 static tree X##_aarch64_type_node_s = NULL; \
473 static tree X##_aarch64_type_node_u = NULL;
474
475 INT_TYPES
476
477 static tree float_aarch64_type_node = NULL;
478 static tree double_aarch64_type_node = NULL;
479
480 gcc_assert (!VECTOR_MODE_P (mode));
481
482/* If we've already initialised this type, don't initialise it again,
483 otherwise ask for a new type of the correct size. */
484#undef AARCH64_TYPE_BUILDER
485#define AARCH64_TYPE_BUILDER(X) \
486 case X##mode: \
487 if (unsigned_p) \
488 return (X##_aarch64_type_node_u \
489 ? X##_aarch64_type_node_u \
490 : X##_aarch64_type_node_u \
491 = make_unsigned_type (GET_MODE_PRECISION (mode))); \
6db1ec94
JG
492 else if (poly_p) \
493 return (X##_aarch64_type_node_p \
494 ? X##_aarch64_type_node_p \
495 : X##_aarch64_type_node_p \
496 = make_unsigned_type (GET_MODE_PRECISION (mode))); \
b5828b4b
JG
497 else \
498 return (X##_aarch64_type_node_s \
499 ? X##_aarch64_type_node_s \
500 : X##_aarch64_type_node_s \
501 = make_signed_type (GET_MODE_PRECISION (mode))); \
502 break;
503
504 switch (mode)
505 {
506 INT_TYPES
507 case SFmode:
508 if (!float_aarch64_type_node)
509 {
510 float_aarch64_type_node = make_node (REAL_TYPE);
511 TYPE_PRECISION (float_aarch64_type_node) = FLOAT_TYPE_SIZE;
512 layout_type (float_aarch64_type_node);
513 }
514 return float_aarch64_type_node;
515 break;
516 case DFmode:
517 if (!double_aarch64_type_node)
518 {
519 double_aarch64_type_node = make_node (REAL_TYPE);
520 TYPE_PRECISION (double_aarch64_type_node) = DOUBLE_TYPE_SIZE;
521 layout_type (double_aarch64_type_node);
522 }
523 return double_aarch64_type_node;
524 break;
525 default:
526 gcc_unreachable ();
527 }
528}
529
530tree
6db1ec94
JG
531aarch64_build_vector_type (enum machine_mode mode,
532 bool unsigned_p,
533 bool poly_p)
b5828b4b
JG
534{
535 tree eltype;
536
537#define VECTOR_TYPES \
538 AARCH64_TYPE_BUILDER (V16QI) \
539 AARCH64_TYPE_BUILDER (V8HI) \
540 AARCH64_TYPE_BUILDER (V4SI) \
541 AARCH64_TYPE_BUILDER (V2DI) \
542 AARCH64_TYPE_BUILDER (V8QI) \
543 AARCH64_TYPE_BUILDER (V4HI) \
544 AARCH64_TYPE_BUILDER (V2SI) \
545 \
546 AARCH64_TYPE_BUILDER (V4SF) \
547 AARCH64_TYPE_BUILDER (V2DF) \
548 AARCH64_TYPE_BUILDER (V2SF) \
549/* Declare our "cache" of values. */
550#undef AARCH64_TYPE_BUILDER
551#define AARCH64_TYPE_BUILDER(X) \
552 static tree X##_aarch64_type_node_s = NULL; \
6db1ec94
JG
553 static tree X##_aarch64_type_node_u = NULL; \
554 static tree X##_aarch64_type_node_p = NULL;
b5828b4b
JG
555
556 VECTOR_TYPES
557
558 gcc_assert (VECTOR_MODE_P (mode));
559
560#undef AARCH64_TYPE_BUILDER
561#define AARCH64_TYPE_BUILDER(X) \
562 case X##mode: \
563 if (unsigned_p) \
564 return X##_aarch64_type_node_u \
565 ? X##_aarch64_type_node_u \
566 : X##_aarch64_type_node_u \
567 = build_vector_type_for_mode (aarch64_build_scalar_type \
568 (GET_MODE_INNER (mode), \
6db1ec94
JG
569 unsigned_p, poly_p), mode); \
570 else if (poly_p) \
571 return X##_aarch64_type_node_p \
572 ? X##_aarch64_type_node_p \
573 : X##_aarch64_type_node_p \
574 = build_vector_type_for_mode (aarch64_build_scalar_type \
575 (GET_MODE_INNER (mode), \
576 unsigned_p, poly_p), mode); \
b5828b4b
JG
577 else \
578 return X##_aarch64_type_node_s \
579 ? X##_aarch64_type_node_s \
580 : X##_aarch64_type_node_s \
581 = build_vector_type_for_mode (aarch64_build_scalar_type \
582 (GET_MODE_INNER (mode), \
6db1ec94 583 unsigned_p, poly_p), mode); \
b5828b4b
JG
584 break;
585
586 switch (mode)
587 {
588 default:
6db1ec94
JG
589 eltype = aarch64_build_scalar_type (GET_MODE_INNER (mode),
590 unsigned_p, poly_p);
b5828b4b
JG
591 return build_vector_type_for_mode (eltype, mode);
592 break;
593 VECTOR_TYPES
594 }
595}
596
597tree
6db1ec94 598aarch64_build_type (enum machine_mode mode, bool unsigned_p, bool poly_p)
b5828b4b
JG
599{
600 if (VECTOR_MODE_P (mode))
6db1ec94 601 return aarch64_build_vector_type (mode, unsigned_p, poly_p);
b5828b4b 602 else
6db1ec94
JG
603 return aarch64_build_scalar_type (mode, unsigned_p, poly_p);
604}
605
606tree
607aarch64_build_signed_type (enum machine_mode mode)
608{
609 return aarch64_build_type (mode, false, false);
610}
611
612tree
613aarch64_build_unsigned_type (enum machine_mode mode)
614{
615 return aarch64_build_type (mode, true, false);
616}
617
618tree
619aarch64_build_poly_type (enum machine_mode mode)
620{
621 return aarch64_build_type (mode, false, true);
b5828b4b
JG
622}
623
af55e82d 624static void
342be7f7 625aarch64_init_simd_builtins (void)
43e9d192 626{
342be7f7 627 unsigned int i, fcode = AARCH64_SIMD_BUILTIN_BASE + 1;
43e9d192 628
6db1ec94
JG
629 /* Signed scalar type nodes. */
630 tree aarch64_simd_intQI_type_node = aarch64_build_signed_type (QImode);
631 tree aarch64_simd_intHI_type_node = aarch64_build_signed_type (HImode);
632 tree aarch64_simd_intSI_type_node = aarch64_build_signed_type (SImode);
633 tree aarch64_simd_intDI_type_node = aarch64_build_signed_type (DImode);
634 tree aarch64_simd_intTI_type_node = aarch64_build_signed_type (TImode);
635 tree aarch64_simd_intEI_type_node = aarch64_build_signed_type (EImode);
636 tree aarch64_simd_intOI_type_node = aarch64_build_signed_type (OImode);
637 tree aarch64_simd_intCI_type_node = aarch64_build_signed_type (CImode);
638 tree aarch64_simd_intXI_type_node = aarch64_build_signed_type (XImode);
639
640 /* Unsigned scalar type nodes. */
641 tree aarch64_simd_intUQI_type_node = aarch64_build_unsigned_type (QImode);
642 tree aarch64_simd_intUHI_type_node = aarch64_build_unsigned_type (HImode);
643 tree aarch64_simd_intUSI_type_node = aarch64_build_unsigned_type (SImode);
644 tree aarch64_simd_intUDI_type_node = aarch64_build_unsigned_type (DImode);
645
646 /* Poly scalar type nodes. */
647 tree aarch64_simd_polyQI_type_node = aarch64_build_poly_type (QImode);
648 tree aarch64_simd_polyHI_type_node = aarch64_build_poly_type (HImode);
7baa225d
TB
649 tree aarch64_simd_polyDI_type_node = aarch64_build_poly_type (DImode);
650 tree aarch64_simd_polyTI_type_node = aarch64_build_poly_type (TImode);
b5828b4b
JG
651
652 /* Float type nodes. */
6db1ec94
JG
653 tree aarch64_simd_float_type_node = aarch64_build_signed_type (SFmode);
654 tree aarch64_simd_double_type_node = aarch64_build_signed_type (DFmode);
43e9d192
IB
655
656 /* Define typedefs which exactly correspond to the modes we are basing vector
657 types on. If you change these names you'll need to change
658 the table used by aarch64_mangle_type too. */
659 (*lang_hooks.types.register_builtin_type) (aarch64_simd_intQI_type_node,
660 "__builtin_aarch64_simd_qi");
661 (*lang_hooks.types.register_builtin_type) (aarch64_simd_intHI_type_node,
662 "__builtin_aarch64_simd_hi");
663 (*lang_hooks.types.register_builtin_type) (aarch64_simd_intSI_type_node,
664 "__builtin_aarch64_simd_si");
665 (*lang_hooks.types.register_builtin_type) (aarch64_simd_float_type_node,
666 "__builtin_aarch64_simd_sf");
667 (*lang_hooks.types.register_builtin_type) (aarch64_simd_intDI_type_node,
668 "__builtin_aarch64_simd_di");
669 (*lang_hooks.types.register_builtin_type) (aarch64_simd_double_type_node,
670 "__builtin_aarch64_simd_df");
671 (*lang_hooks.types.register_builtin_type) (aarch64_simd_polyQI_type_node,
672 "__builtin_aarch64_simd_poly8");
673 (*lang_hooks.types.register_builtin_type) (aarch64_simd_polyHI_type_node,
674 "__builtin_aarch64_simd_poly16");
7baa225d
TB
675 (*lang_hooks.types.register_builtin_type) (aarch64_simd_polyDI_type_node,
676 "__builtin_aarch64_simd_poly64");
677 (*lang_hooks.types.register_builtin_type) (aarch64_simd_polyTI_type_node,
678 "__builtin_aarch64_simd_poly128");
b5828b4b 679 (*lang_hooks.types.register_builtin_type) (aarch64_simd_intTI_type_node,
43e9d192 680 "__builtin_aarch64_simd_ti");
b5828b4b 681 (*lang_hooks.types.register_builtin_type) (aarch64_simd_intEI_type_node,
43e9d192 682 "__builtin_aarch64_simd_ei");
b5828b4b 683 (*lang_hooks.types.register_builtin_type) (aarch64_simd_intOI_type_node,
43e9d192 684 "__builtin_aarch64_simd_oi");
b5828b4b 685 (*lang_hooks.types.register_builtin_type) (aarch64_simd_intCI_type_node,
43e9d192 686 "__builtin_aarch64_simd_ci");
b5828b4b 687 (*lang_hooks.types.register_builtin_type) (aarch64_simd_intXI_type_node,
43e9d192
IB
688 "__builtin_aarch64_simd_xi");
689
b5828b4b
JG
690 /* Unsigned integer types for various mode sizes. */
691 (*lang_hooks.types.register_builtin_type) (aarch64_simd_intUQI_type_node,
692 "__builtin_aarch64_simd_uqi");
693 (*lang_hooks.types.register_builtin_type) (aarch64_simd_intUHI_type_node,
694 "__builtin_aarch64_simd_uhi");
695 (*lang_hooks.types.register_builtin_type) (aarch64_simd_intUSI_type_node,
696 "__builtin_aarch64_simd_usi");
697 (*lang_hooks.types.register_builtin_type) (aarch64_simd_intUDI_type_node,
698 "__builtin_aarch64_simd_udi");
43e9d192 699
342be7f7 700 for (i = 0; i < ARRAY_SIZE (aarch64_simd_builtin_data); i++, fcode++)
43e9d192 701 {
b5828b4b
JG
702 bool print_type_signature_p = false;
703 char type_signature[SIMD_MAX_BUILTIN_ARGS] = { 0 };
43e9d192 704 aarch64_simd_builtin_datum *d = &aarch64_simd_builtin_data[i];
342be7f7
JG
705 char namebuf[60];
706 tree ftype = NULL;
119103ca 707 tree fndecl = NULL;
342be7f7 708
342be7f7 709 d->fcode = fcode;
43e9d192 710
b5828b4b
JG
711 /* We must track two variables here. op_num is
712 the operand number as in the RTL pattern. This is
713 required to access the mode (e.g. V4SF mode) of the
714 argument, from which the base type can be derived.
715 arg_num is an index in to the qualifiers data, which
716 gives qualifiers to the type (e.g. const unsigned).
717 The reason these two variables may differ by one is the
718 void return type. While all return types take the 0th entry
719 in the qualifiers array, there is no operand for them in the
720 RTL pattern. */
721 int op_num = insn_data[d->code].n_operands - 1;
722 int arg_num = d->qualifiers[0] & qualifier_void
723 ? op_num + 1
724 : op_num;
725 tree return_type = void_type_node, args = void_list_node;
726 tree eltype;
727
728 /* Build a function type directly from the insn_data for this
729 builtin. The build_function_type () function takes care of
730 removing duplicates for us. */
731 for (; op_num >= 0; arg_num--, op_num--)
43e9d192 732 {
b5828b4b
JG
733 enum machine_mode op_mode = insn_data[d->code].operand[op_num].mode;
734 enum aarch64_type_qualifiers qualifiers = d->qualifiers[arg_num];
43e9d192 735
b5828b4b
JG
736 if (qualifiers & qualifier_unsigned)
737 {
738 type_signature[arg_num] = 'u';
739 print_type_signature_p = true;
740 }
6db1ec94
JG
741 else if (qualifiers & qualifier_poly)
742 {
743 type_signature[arg_num] = 'p';
744 print_type_signature_p = true;
745 }
b5828b4b
JG
746 else
747 type_signature[arg_num] = 's';
748
749 /* Skip an internal operand for vget_{low, high}. */
750 if (qualifiers & qualifier_internal)
751 continue;
752
753 /* Some builtins have different user-facing types
754 for certain arguments, encoded in d->mode. */
755 if (qualifiers & qualifier_map_mode)
bc5e395d 756 op_mode = d->mode;
b5828b4b
JG
757
758 /* For pointers, we want a pointer to the basic type
759 of the vector. */
760 if (qualifiers & qualifier_pointer && VECTOR_MODE_P (op_mode))
761 op_mode = GET_MODE_INNER (op_mode);
762
763 eltype = aarch64_build_type (op_mode,
6db1ec94
JG
764 qualifiers & qualifier_unsigned,
765 qualifiers & qualifier_poly);
b5828b4b
JG
766
767 /* Add qualifiers. */
768 if (qualifiers & qualifier_const)
769 eltype = build_qualified_type (eltype, TYPE_QUAL_CONST);
770
771 if (qualifiers & qualifier_pointer)
772 eltype = build_pointer_type (eltype);
773
774 /* If we have reached arg_num == 0, we are at a non-void
775 return type. Otherwise, we are still processing
776 arguments. */
777 if (arg_num == 0)
778 return_type = eltype;
779 else
780 args = tree_cons (NULL_TREE, eltype, args);
781 }
342be7f7 782
b5828b4b 783 ftype = build_function_type (return_type, args);
43e9d192 784
342be7f7 785 gcc_assert (ftype != NULL);
43e9d192 786
b5828b4b 787 if (print_type_signature_p)
bc5e395d
JG
788 snprintf (namebuf, sizeof (namebuf), "__builtin_aarch64_%s_%s",
789 d->name, type_signature);
b5828b4b 790 else
bc5e395d
JG
791 snprintf (namebuf, sizeof (namebuf), "__builtin_aarch64_%s",
792 d->name);
43e9d192 793
119103ca
JG
794 fndecl = add_builtin_function (namebuf, ftype, fcode, BUILT_IN_MD,
795 NULL, NULL_TREE);
796 aarch64_builtin_decls[fcode] = fndecl;
43e9d192
IB
797 }
798}
799
5d357f26
KT
800static void
801aarch64_init_crc32_builtins ()
802{
803 tree usi_type = aarch64_build_unsigned_type (SImode);
804 unsigned int i = 0;
805
806 for (i = 0; i < ARRAY_SIZE (aarch64_crc_builtin_data); ++i)
807 {
808 aarch64_crc_builtin_datum* d = &aarch64_crc_builtin_data[i];
809 tree argtype = aarch64_build_unsigned_type (d->mode);
810 tree ftype = build_function_type_list (usi_type, usi_type, argtype, NULL_TREE);
811 tree fndecl = add_builtin_function (d->name, ftype, d->fcode,
812 BUILT_IN_MD, NULL, NULL_TREE);
813
814 aarch64_builtin_decls[d->fcode] = fndecl;
815 }
816}
817
342be7f7
JG
818void
819aarch64_init_builtins (void)
43e9d192 820{
aa87aced
KV
821 tree ftype_set_fpr
822 = build_function_type_list (void_type_node, unsigned_type_node, NULL);
823 tree ftype_get_fpr
824 = build_function_type_list (unsigned_type_node, NULL);
825
826 aarch64_builtin_decls[AARCH64_BUILTIN_GET_FPCR]
827 = add_builtin_function ("__builtin_aarch64_get_fpcr", ftype_get_fpr,
828 AARCH64_BUILTIN_GET_FPCR, BUILT_IN_MD, NULL, NULL_TREE);
829 aarch64_builtin_decls[AARCH64_BUILTIN_SET_FPCR]
830 = add_builtin_function ("__builtin_aarch64_set_fpcr", ftype_set_fpr,
831 AARCH64_BUILTIN_SET_FPCR, BUILT_IN_MD, NULL, NULL_TREE);
832 aarch64_builtin_decls[AARCH64_BUILTIN_GET_FPSR]
833 = add_builtin_function ("__builtin_aarch64_get_fpsr", ftype_get_fpr,
834 AARCH64_BUILTIN_GET_FPSR, BUILT_IN_MD, NULL, NULL_TREE);
835 aarch64_builtin_decls[AARCH64_BUILTIN_SET_FPSR]
836 = add_builtin_function ("__builtin_aarch64_set_fpsr", ftype_set_fpr,
837 AARCH64_BUILTIN_SET_FPSR, BUILT_IN_MD, NULL, NULL_TREE);
838
342be7f7
JG
839 if (TARGET_SIMD)
840 aarch64_init_simd_builtins ();
5d357f26
KT
841 if (TARGET_CRC32)
842 aarch64_init_crc32_builtins ();
43e9d192
IB
843}
844
119103ca
JG
845tree
846aarch64_builtin_decl (unsigned code, bool initialize_p ATTRIBUTE_UNUSED)
847{
848 if (code >= AARCH64_BUILTIN_MAX)
849 return error_mark_node;
850
851 return aarch64_builtin_decls[code];
852}
853
43e9d192
IB
854typedef enum
855{
856 SIMD_ARG_COPY_TO_REG,
857 SIMD_ARG_CONSTANT,
858 SIMD_ARG_STOP
859} builtin_simd_arg;
860
43e9d192
IB
861static rtx
862aarch64_simd_expand_args (rtx target, int icode, int have_retval,
8d3d350a 863 tree exp, builtin_simd_arg *args)
43e9d192 864{
43e9d192
IB
865 rtx pat;
866 tree arg[SIMD_MAX_BUILTIN_ARGS];
867 rtx op[SIMD_MAX_BUILTIN_ARGS];
868 enum machine_mode tmode = insn_data[icode].operand[0].mode;
869 enum machine_mode mode[SIMD_MAX_BUILTIN_ARGS];
870 int argc = 0;
871
872 if (have_retval
873 && (!target
874 || GET_MODE (target) != tmode
875 || !(*insn_data[icode].operand[0].predicate) (target, tmode)))
876 target = gen_reg_rtx (tmode);
877
43e9d192
IB
878 for (;;)
879 {
8d3d350a 880 builtin_simd_arg thisarg = args[argc];
43e9d192
IB
881
882 if (thisarg == SIMD_ARG_STOP)
883 break;
884 else
885 {
886 arg[argc] = CALL_EXPR_ARG (exp, argc);
887 op[argc] = expand_normal (arg[argc]);
888 mode[argc] = insn_data[icode].operand[argc + have_retval].mode;
889
890 switch (thisarg)
891 {
892 case SIMD_ARG_COPY_TO_REG:
2888c331
YZ
893 if (POINTER_TYPE_P (TREE_TYPE (arg[argc])))
894 op[argc] = convert_memory_address (Pmode, op[argc]);
43e9d192
IB
895 /*gcc_assert (GET_MODE (op[argc]) == mode[argc]); */
896 if (!(*insn_data[icode].operand[argc + have_retval].predicate)
897 (op[argc], mode[argc]))
898 op[argc] = copy_to_mode_reg (mode[argc], op[argc]);
899 break;
900
901 case SIMD_ARG_CONSTANT:
902 if (!(*insn_data[icode].operand[argc + have_retval].predicate)
903 (op[argc], mode[argc]))
904 error_at (EXPR_LOCATION (exp), "incompatible type for argument %d, "
905 "expected %<const int%>", argc + 1);
906 break;
907
908 case SIMD_ARG_STOP:
909 gcc_unreachable ();
910 }
911
912 argc++;
913 }
914 }
915
43e9d192
IB
916 if (have_retval)
917 switch (argc)
918 {
919 case 1:
920 pat = GEN_FCN (icode) (target, op[0]);
921 break;
922
923 case 2:
924 pat = GEN_FCN (icode) (target, op[0], op[1]);
925 break;
926
927 case 3:
928 pat = GEN_FCN (icode) (target, op[0], op[1], op[2]);
929 break;
930
931 case 4:
932 pat = GEN_FCN (icode) (target, op[0], op[1], op[2], op[3]);
933 break;
934
935 case 5:
936 pat = GEN_FCN (icode) (target, op[0], op[1], op[2], op[3], op[4]);
937 break;
938
939 default:
940 gcc_unreachable ();
941 }
942 else
943 switch (argc)
944 {
945 case 1:
946 pat = GEN_FCN (icode) (op[0]);
947 break;
948
949 case 2:
950 pat = GEN_FCN (icode) (op[0], op[1]);
951 break;
952
953 case 3:
954 pat = GEN_FCN (icode) (op[0], op[1], op[2]);
955 break;
956
957 case 4:
958 pat = GEN_FCN (icode) (op[0], op[1], op[2], op[3]);
959 break;
960
961 case 5:
962 pat = GEN_FCN (icode) (op[0], op[1], op[2], op[3], op[4]);
963 break;
964
965 default:
966 gcc_unreachable ();
967 }
968
969 if (!pat)
970 return 0;
971
972 emit_insn (pat);
973
974 return target;
975}
976
977/* Expand an AArch64 AdvSIMD builtin(intrinsic). */
978rtx
979aarch64_simd_expand_builtin (int fcode, tree exp, rtx target)
980{
342be7f7
JG
981 aarch64_simd_builtin_datum *d =
982 &aarch64_simd_builtin_data[fcode - (AARCH64_SIMD_BUILTIN_BASE + 1)];
342be7f7 983 enum insn_code icode = d->code;
b5828b4b
JG
984 builtin_simd_arg args[SIMD_MAX_BUILTIN_ARGS];
985 int num_args = insn_data[d->code].n_operands;
986 int is_void = 0;
987 int k;
43e9d192 988
b5828b4b 989 is_void = !!(d->qualifiers[0] & qualifier_void);
43e9d192 990
b5828b4b
JG
991 num_args += is_void;
992
993 for (k = 1; k < num_args; k++)
994 {
995 /* We have four arrays of data, each indexed in a different fashion.
996 qualifiers - element 0 always describes the function return type.
997 operands - element 0 is either the operand for return value (if
998 the function has a non-void return type) or the operand for the
999 first argument.
1000 expr_args - element 0 always holds the first argument.
1001 args - element 0 is always used for the return type. */
1002 int qualifiers_k = k;
1003 int operands_k = k - is_void;
1004 int expr_args_k = k - 1;
1005
1006 if (d->qualifiers[qualifiers_k] & qualifier_immediate)
1007 args[k] = SIMD_ARG_CONSTANT;
1008 else if (d->qualifiers[qualifiers_k] & qualifier_maybe_immediate)
1009 {
1010 rtx arg
1011 = expand_normal (CALL_EXPR_ARG (exp,
1012 (expr_args_k)));
1013 /* Handle constants only if the predicate allows it. */
1014 bool op_const_int_p =
1015 (CONST_INT_P (arg)
1016 && (*insn_data[icode].operand[operands_k].predicate)
1017 (arg, insn_data[icode].operand[operands_k].mode));
1018 args[k] = op_const_int_p ? SIMD_ARG_CONSTANT : SIMD_ARG_COPY_TO_REG;
1019 }
1020 else
1021 args[k] = SIMD_ARG_COPY_TO_REG;
43e9d192 1022
43e9d192 1023 }
b5828b4b
JG
1024 args[k] = SIMD_ARG_STOP;
1025
1026 /* The interface to aarch64_simd_expand_args expects a 0 if
1027 the function is void, and a 1 if it is not. */
1028 return aarch64_simd_expand_args
8d3d350a 1029 (target, icode, !is_void, exp, &args[1]);
43e9d192 1030}
342be7f7 1031
5d357f26
KT
1032rtx
1033aarch64_crc32_expand_builtin (int fcode, tree exp, rtx target)
1034{
1035 rtx pat;
1036 aarch64_crc_builtin_datum *d
1037 = &aarch64_crc_builtin_data[fcode - (AARCH64_CRC32_BUILTIN_BASE + 1)];
1038 enum insn_code icode = d->icode;
1039 tree arg0 = CALL_EXPR_ARG (exp, 0);
1040 tree arg1 = CALL_EXPR_ARG (exp, 1);
1041 rtx op0 = expand_normal (arg0);
1042 rtx op1 = expand_normal (arg1);
1043 enum machine_mode tmode = insn_data[icode].operand[0].mode;
1044 enum machine_mode mode0 = insn_data[icode].operand[1].mode;
1045 enum machine_mode mode1 = insn_data[icode].operand[2].mode;
1046
1047 if (! target
1048 || GET_MODE (target) != tmode
1049 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
1050 target = gen_reg_rtx (tmode);
1051
1052 gcc_assert ((GET_MODE (op0) == mode0 || GET_MODE (op0) == VOIDmode)
1053 && (GET_MODE (op1) == mode1 || GET_MODE (op1) == VOIDmode));
1054
1055 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
1056 op0 = copy_to_mode_reg (mode0, op0);
1057 if (! (*insn_data[icode].operand[2].predicate) (op1, mode1))
1058 op1 = copy_to_mode_reg (mode1, op1);
1059
1060 pat = GEN_FCN (icode) (target, op0, op1);
1061 if (! pat)
1062 return 0;
1063 emit_insn (pat);
1064 return target;
1065}
1066
342be7f7
JG
1067/* Expand an expression EXP that calls a built-in function,
1068 with result going to TARGET if that's convenient. */
1069rtx
1070aarch64_expand_builtin (tree exp,
1071 rtx target,
1072 rtx subtarget ATTRIBUTE_UNUSED,
1073 enum machine_mode mode ATTRIBUTE_UNUSED,
1074 int ignore ATTRIBUTE_UNUSED)
1075{
1076 tree fndecl = TREE_OPERAND (CALL_EXPR_FN (exp), 0);
1077 int fcode = DECL_FUNCTION_CODE (fndecl);
aa87aced
KV
1078 int icode;
1079 rtx pat, op0;
1080 tree arg0;
1081
1082 switch (fcode)
1083 {
1084 case AARCH64_BUILTIN_GET_FPCR:
1085 case AARCH64_BUILTIN_SET_FPCR:
1086 case AARCH64_BUILTIN_GET_FPSR:
1087 case AARCH64_BUILTIN_SET_FPSR:
1088 if ((fcode == AARCH64_BUILTIN_GET_FPCR)
1089 || (fcode == AARCH64_BUILTIN_GET_FPSR))
1090 {
1091 icode = (fcode == AARCH64_BUILTIN_GET_FPSR) ?
1092 CODE_FOR_get_fpsr : CODE_FOR_get_fpcr;
1093 target = gen_reg_rtx (SImode);
1094 pat = GEN_FCN (icode) (target);
1095 }
1096 else
1097 {
1098 target = NULL_RTX;
1099 icode = (fcode == AARCH64_BUILTIN_SET_FPSR) ?
1100 CODE_FOR_set_fpsr : CODE_FOR_set_fpcr;
1101 arg0 = CALL_EXPR_ARG (exp, 0);
1102 op0 = expand_normal (arg0);
1103 pat = GEN_FCN (icode) (op0);
1104 }
1105 emit_insn (pat);
1106 return target;
1107 }
342be7f7 1108
5d357f26 1109 if (fcode >= AARCH64_SIMD_BUILTIN_BASE && fcode <= AARCH64_SIMD_BUILTIN_MAX)
342be7f7 1110 return aarch64_simd_expand_builtin (fcode, exp, target);
5d357f26
KT
1111 else if (fcode >= AARCH64_CRC32_BUILTIN_BASE && fcode <= AARCH64_CRC32_BUILTIN_MAX)
1112 return aarch64_crc32_expand_builtin (fcode, exp, target);
342be7f7
JG
1113
1114 return NULL_RTX;
1115}
42fc9a7f
JG
1116
1117tree
1118aarch64_builtin_vectorized_function (tree fndecl, tree type_out, tree type_in)
1119{
1120 enum machine_mode in_mode, out_mode;
1121 int in_n, out_n;
1122
1123 if (TREE_CODE (type_out) != VECTOR_TYPE
1124 || TREE_CODE (type_in) != VECTOR_TYPE)
1125 return NULL_TREE;
1126
1127 out_mode = TYPE_MODE (TREE_TYPE (type_out));
1128 out_n = TYPE_VECTOR_SUBPARTS (type_out);
1129 in_mode = TYPE_MODE (TREE_TYPE (type_in));
1130 in_n = TYPE_VECTOR_SUBPARTS (type_in);
1131
1132#undef AARCH64_CHECK_BUILTIN_MODE
1133#define AARCH64_CHECK_BUILTIN_MODE(C, N) 1
1134#define AARCH64_FIND_FRINT_VARIANT(N) \
1135 (AARCH64_CHECK_BUILTIN_MODE (2, D) \
e993fea1 1136 ? aarch64_builtin_decls[AARCH64_SIMD_BUILTIN_UNOP_##N##v2df] \
42fc9a7f 1137 : (AARCH64_CHECK_BUILTIN_MODE (4, S) \
e993fea1 1138 ? aarch64_builtin_decls[AARCH64_SIMD_BUILTIN_UNOP_##N##v4sf] \
42fc9a7f 1139 : (AARCH64_CHECK_BUILTIN_MODE (2, S) \
e993fea1 1140 ? aarch64_builtin_decls[AARCH64_SIMD_BUILTIN_UNOP_##N##v2sf] \
42fc9a7f
JG
1141 : NULL_TREE)))
1142 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
1143 {
1144 enum built_in_function fn = DECL_FUNCTION_CODE (fndecl);
1145 switch (fn)
1146 {
1147#undef AARCH64_CHECK_BUILTIN_MODE
1148#define AARCH64_CHECK_BUILTIN_MODE(C, N) \
1149 (out_mode == N##Fmode && out_n == C \
1150 && in_mode == N##Fmode && in_n == C)
1151 case BUILT_IN_FLOOR:
1152 case BUILT_IN_FLOORF:
0659ce6f 1153 return AARCH64_FIND_FRINT_VARIANT (floor);
42fc9a7f
JG
1154 case BUILT_IN_CEIL:
1155 case BUILT_IN_CEILF:
0659ce6f 1156 return AARCH64_FIND_FRINT_VARIANT (ceil);
42fc9a7f
JG
1157 case BUILT_IN_TRUNC:
1158 case BUILT_IN_TRUNCF:
0659ce6f 1159 return AARCH64_FIND_FRINT_VARIANT (btrunc);
42fc9a7f
JG
1160 case BUILT_IN_ROUND:
1161 case BUILT_IN_ROUNDF:
0659ce6f 1162 return AARCH64_FIND_FRINT_VARIANT (round);
42fc9a7f
JG
1163 case BUILT_IN_NEARBYINT:
1164 case BUILT_IN_NEARBYINTF:
0659ce6f 1165 return AARCH64_FIND_FRINT_VARIANT (nearbyint);
4dcd1054
JG
1166 case BUILT_IN_SQRT:
1167 case BUILT_IN_SQRTF:
1168 return AARCH64_FIND_FRINT_VARIANT (sqrt);
42fc9a7f 1169#undef AARCH64_CHECK_BUILTIN_MODE
b5574232
VP
1170#define AARCH64_CHECK_BUILTIN_MODE(C, N) \
1171 (out_mode == SImode && out_n == C \
1172 && in_mode == N##Imode && in_n == C)
1173 case BUILT_IN_CLZ:
1174 {
1175 if (AARCH64_CHECK_BUILTIN_MODE (4, S))
e993fea1 1176 return aarch64_builtin_decls[AARCH64_SIMD_BUILTIN_UNOP_clzv4si];
b5574232
VP
1177 return NULL_TREE;
1178 }
1179#undef AARCH64_CHECK_BUILTIN_MODE
42fc9a7f
JG
1180#define AARCH64_CHECK_BUILTIN_MODE(C, N) \
1181 (out_mode == N##Imode && out_n == C \
1182 && in_mode == N##Fmode && in_n == C)
1183 case BUILT_IN_LFLOOR:
bf0f324e
YZ
1184 case BUILT_IN_LFLOORF:
1185 case BUILT_IN_LLFLOOR:
0386b123 1186 case BUILT_IN_IFLOORF:
ce966824 1187 {
e993fea1 1188 enum aarch64_builtins builtin;
ce966824 1189 if (AARCH64_CHECK_BUILTIN_MODE (2, D))
e993fea1 1190 builtin = AARCH64_SIMD_BUILTIN_UNOP_lfloorv2dfv2di;
ce966824 1191 else if (AARCH64_CHECK_BUILTIN_MODE (4, S))
e993fea1 1192 builtin = AARCH64_SIMD_BUILTIN_UNOP_lfloorv4sfv4si;
ce966824 1193 else if (AARCH64_CHECK_BUILTIN_MODE (2, S))
e993fea1
JG
1194 builtin = AARCH64_SIMD_BUILTIN_UNOP_lfloorv2sfv2si;
1195 else
1196 return NULL_TREE;
1197
1198 return aarch64_builtin_decls[builtin];
ce966824 1199 }
42fc9a7f 1200 case BUILT_IN_LCEIL:
bf0f324e
YZ
1201 case BUILT_IN_LCEILF:
1202 case BUILT_IN_LLCEIL:
0386b123 1203 case BUILT_IN_ICEILF:
ce966824 1204 {
e993fea1 1205 enum aarch64_builtins builtin;
ce966824 1206 if (AARCH64_CHECK_BUILTIN_MODE (2, D))
e993fea1 1207 builtin = AARCH64_SIMD_BUILTIN_UNOP_lceilv2dfv2di;
ce966824 1208 else if (AARCH64_CHECK_BUILTIN_MODE (4, S))
e993fea1 1209 builtin = AARCH64_SIMD_BUILTIN_UNOP_lceilv4sfv4si;
ce966824 1210 else if (AARCH64_CHECK_BUILTIN_MODE (2, S))
e993fea1
JG
1211 builtin = AARCH64_SIMD_BUILTIN_UNOP_lceilv2sfv2si;
1212 else
1213 return NULL_TREE;
1214
1215 return aarch64_builtin_decls[builtin];
ce966824 1216 }
0386b123
JG
1217 case BUILT_IN_LROUND:
1218 case BUILT_IN_IROUNDF:
1219 {
e993fea1 1220 enum aarch64_builtins builtin;
0386b123 1221 if (AARCH64_CHECK_BUILTIN_MODE (2, D))
e993fea1 1222 builtin = AARCH64_SIMD_BUILTIN_UNOP_lroundv2dfv2di;
0386b123 1223 else if (AARCH64_CHECK_BUILTIN_MODE (4, S))
e993fea1 1224 builtin = AARCH64_SIMD_BUILTIN_UNOP_lroundv4sfv4si;
0386b123 1225 else if (AARCH64_CHECK_BUILTIN_MODE (2, S))
e993fea1
JG
1226 builtin = AARCH64_SIMD_BUILTIN_UNOP_lroundv2sfv2si;
1227 else
1228 return NULL_TREE;
1229
1230 return aarch64_builtin_decls[builtin];
0386b123 1231 }
c7f28cd5
KT
1232 case BUILT_IN_BSWAP16:
1233#undef AARCH64_CHECK_BUILTIN_MODE
1234#define AARCH64_CHECK_BUILTIN_MODE(C, N) \
1235 (out_mode == N##Imode && out_n == C \
1236 && in_mode == N##Imode && in_n == C)
1237 if (AARCH64_CHECK_BUILTIN_MODE (4, H))
1238 return aarch64_builtin_decls[AARCH64_SIMD_BUILTIN_UNOPU_bswapv4hi];
1239 else if (AARCH64_CHECK_BUILTIN_MODE (8, H))
1240 return aarch64_builtin_decls[AARCH64_SIMD_BUILTIN_UNOPU_bswapv8hi];
1241 else
1242 return NULL_TREE;
1243 case BUILT_IN_BSWAP32:
1244 if (AARCH64_CHECK_BUILTIN_MODE (2, S))
1245 return aarch64_builtin_decls[AARCH64_SIMD_BUILTIN_UNOPU_bswapv2si];
1246 else if (AARCH64_CHECK_BUILTIN_MODE (4, S))
1247 return aarch64_builtin_decls[AARCH64_SIMD_BUILTIN_UNOPU_bswapv4si];
1248 else
1249 return NULL_TREE;
1250 case BUILT_IN_BSWAP64:
1251 if (AARCH64_CHECK_BUILTIN_MODE (2, D))
1252 return aarch64_builtin_decls[AARCH64_SIMD_BUILTIN_UNOPU_bswapv2di];
1253 else
1254 return NULL_TREE;
42fc9a7f
JG
1255 default:
1256 return NULL_TREE;
1257 }
1258 }
1259
1260 return NULL_TREE;
1261}
0ac198d3
JG
1262
1263#undef VAR1
1264#define VAR1(T, N, MAP, A) \
e993fea1 1265 case AARCH64_SIMD_BUILTIN_##T##_##N##A:
0ac198d3 1266
9697e620
JG
1267tree
1268aarch64_fold_builtin (tree fndecl, int n_args ATTRIBUTE_UNUSED, tree *args,
1269 bool ignore ATTRIBUTE_UNUSED)
1270{
1271 int fcode = DECL_FUNCTION_CODE (fndecl);
1272 tree type = TREE_TYPE (TREE_TYPE (fndecl));
1273
1274 switch (fcode)
1275 {
d05d0709 1276 BUILTIN_VALLDI (UNOP, abs, 2)
9697e620
JG
1277 return fold_build1 (ABS_EXPR, type, args[0]);
1278 break;
c6a29a09
AL
1279 VAR1 (REINTERP_SS, reinterpretdi, 0, v1df)
1280 VAR1 (REINTERP_SS, reinterpretv8qi, 0, v1df)
1281 VAR1 (REINTERP_SS, reinterpretv4hi, 0, v1df)
1282 VAR1 (REINTERP_SS, reinterpretv2si, 0, v1df)
1283 VAR1 (REINTERP_SS, reinterpretv2sf, 0, v1df)
1284 BUILTIN_VD (REINTERP_SS, reinterpretv1df, 0)
1285 BUILTIN_VD (REINTERP_SU, reinterpretv1df, 0)
1286 VAR1 (REINTERP_US, reinterpretdi, 0, v1df)
1287 VAR1 (REINTERP_US, reinterpretv8qi, 0, v1df)
1288 VAR1 (REINTERP_US, reinterpretv4hi, 0, v1df)
1289 VAR1 (REINTERP_US, reinterpretv2si, 0, v1df)
1290 VAR1 (REINTERP_US, reinterpretv2sf, 0, v1df)
1291 BUILTIN_VD (REINTERP_SP, reinterpretv1df, 0)
1292 VAR1 (REINTERP_PS, reinterpretdi, 0, v1df)
1293 VAR1 (REINTERP_PS, reinterpretv8qi, 0, v1df)
1294 VAR1 (REINTERP_PS, reinterpretv4hi, 0, v1df)
1295 VAR1 (REINTERP_PS, reinterpretv2sf, 0, v1df)
bcd48995 1296 return fold_build1 (VIEW_CONVERT_EXPR, type, args[0]);
1709ff9b
JG
1297 VAR1 (UNOP, floatv2si, 2, v2sf)
1298 VAR1 (UNOP, floatv4si, 2, v4sf)
1299 VAR1 (UNOP, floatv2di, 2, v2df)
1300 return fold_build1 (FLOAT_EXPR, type, args[0]);
9697e620
JG
1301 default:
1302 break;
1303 }
1304
1305 return NULL_TREE;
1306}
1307
0ac198d3
JG
1308bool
1309aarch64_gimple_fold_builtin (gimple_stmt_iterator *gsi)
1310{
1311 bool changed = false;
1312 gimple stmt = gsi_stmt (*gsi);
1313 tree call = gimple_call_fn (stmt);
1314 tree fndecl;
1315 gimple new_stmt = NULL;
22756ccf
JG
1316
1317 /* The operations folded below are reduction operations. These are
1318 defined to leave their result in the 0'th element (from the perspective
1319 of GCC). The architectural instruction we are folding will leave the
1320 result in the 0'th element (from the perspective of the architecture).
1321 For big-endian systems, these perspectives are not aligned.
1322
1323 It is therefore wrong to perform this fold on big-endian. There
1324 are some tricks we could play with shuffling, but the mid-end is
1325 inconsistent in the way it treats reduction operations, so we will
1326 end up in difficulty. Until we fix the ambiguity - just bail out. */
1327 if (BYTES_BIG_ENDIAN)
1328 return false;
1329
0ac198d3
JG
1330 if (call)
1331 {
1332 fndecl = gimple_call_fndecl (stmt);
1333 if (fndecl)
1334 {
1335 int fcode = DECL_FUNCTION_CODE (fndecl);
1336 int nargs = gimple_call_num_args (stmt);
1337 tree *args = (nargs > 0
1338 ? gimple_call_arg_ptr (stmt, 0)
1339 : &error_mark_node);
1340
1341 switch (fcode)
1342 {
36054fab 1343 BUILTIN_VALL (UNOP, reduc_splus_, 10)
0ac198d3
JG
1344 new_stmt = gimple_build_assign_with_ops (
1345 REDUC_PLUS_EXPR,
1346 gimple_call_lhs (stmt),
1347 args[0],
1348 NULL_TREE);
1349 break;
1598945b
JG
1350 BUILTIN_VDQIF (UNOP, reduc_smax_, 10)
1351 new_stmt = gimple_build_assign_with_ops (
1352 REDUC_MAX_EXPR,
1353 gimple_call_lhs (stmt),
1354 args[0],
1355 NULL_TREE);
1356 break;
1357 BUILTIN_VDQIF (UNOP, reduc_smin_, 10)
1358 new_stmt = gimple_build_assign_with_ops (
1359 REDUC_MIN_EXPR,
1360 gimple_call_lhs (stmt),
1361 args[0],
1362 NULL_TREE);
1363 break;
1364
0ac198d3
JG
1365 default:
1366 break;
1367 }
1368 }
1369 }
1370
1371 if (new_stmt)
1372 {
1373 gsi_replace (gsi, new_stmt, true);
1374 changed = true;
1375 }
1376
1377 return changed;
1378}
1379
aa87aced
KV
1380void
1381aarch64_atomic_assign_expand_fenv (tree *hold, tree *clear, tree *update)
1382{
1383 const unsigned AARCH64_FE_INVALID = 1;
1384 const unsigned AARCH64_FE_DIVBYZERO = 2;
1385 const unsigned AARCH64_FE_OVERFLOW = 4;
1386 const unsigned AARCH64_FE_UNDERFLOW = 8;
1387 const unsigned AARCH64_FE_INEXACT = 16;
1388 const unsigned HOST_WIDE_INT AARCH64_FE_ALL_EXCEPT = (AARCH64_FE_INVALID
1389 | AARCH64_FE_DIVBYZERO
1390 | AARCH64_FE_OVERFLOW
1391 | AARCH64_FE_UNDERFLOW
1392 | AARCH64_FE_INEXACT);
1393 const unsigned HOST_WIDE_INT AARCH64_FE_EXCEPT_SHIFT = 8;
1394 tree fenv_cr, fenv_sr, get_fpcr, set_fpcr, mask_cr, mask_sr;
1395 tree ld_fenv_cr, ld_fenv_sr, masked_fenv_cr, masked_fenv_sr, hold_fnclex_cr;
1396 tree hold_fnclex_sr, new_fenv_var, reload_fenv, restore_fnenv, get_fpsr, set_fpsr;
1397 tree update_call, atomic_feraiseexcept, hold_fnclex, masked_fenv, ld_fenv;
1398
1399 /* Generate the equivalence of :
1400 unsigned int fenv_cr;
1401 fenv_cr = __builtin_aarch64_get_fpcr ();
1402
1403 unsigned int fenv_sr;
1404 fenv_sr = __builtin_aarch64_get_fpsr ();
1405
1406 Now set all exceptions to non-stop
1407 unsigned int mask_cr
1408 = ~(AARCH64_FE_ALL_EXCEPT << AARCH64_FE_EXCEPT_SHIFT);
1409 unsigned int masked_cr;
1410 masked_cr = fenv_cr & mask_cr;
1411
1412 And clear all exception flags
1413 unsigned int maske_sr = ~AARCH64_FE_ALL_EXCEPT;
1414 unsigned int masked_cr;
1415 masked_sr = fenv_sr & mask_sr;
1416
1417 __builtin_aarch64_set_cr (masked_cr);
1418 __builtin_aarch64_set_sr (masked_sr); */
1419
1420 fenv_cr = create_tmp_var (unsigned_type_node, NULL);
1421 fenv_sr = create_tmp_var (unsigned_type_node, NULL);
1422
1423 get_fpcr = aarch64_builtin_decls[AARCH64_BUILTIN_GET_FPCR];
1424 set_fpcr = aarch64_builtin_decls[AARCH64_BUILTIN_SET_FPCR];
1425 get_fpsr = aarch64_builtin_decls[AARCH64_BUILTIN_GET_FPSR];
1426 set_fpsr = aarch64_builtin_decls[AARCH64_BUILTIN_SET_FPSR];
1427
1428 mask_cr = build_int_cst (unsigned_type_node,
1429 ~(AARCH64_FE_ALL_EXCEPT << AARCH64_FE_EXCEPT_SHIFT));
1430 mask_sr = build_int_cst (unsigned_type_node,
1431 ~(AARCH64_FE_ALL_EXCEPT));
1432
1433 ld_fenv_cr = build2 (MODIFY_EXPR, unsigned_type_node,
1434 fenv_cr, build_call_expr (get_fpcr, 0));
1435 ld_fenv_sr = build2 (MODIFY_EXPR, unsigned_type_node,
1436 fenv_sr, build_call_expr (get_fpsr, 0));
1437
1438 masked_fenv_cr = build2 (BIT_AND_EXPR, unsigned_type_node, fenv_cr, mask_cr);
1439 masked_fenv_sr = build2 (BIT_AND_EXPR, unsigned_type_node, fenv_sr, mask_sr);
1440
1441 hold_fnclex_cr = build_call_expr (set_fpcr, 1, masked_fenv_cr);
1442 hold_fnclex_sr = build_call_expr (set_fpsr, 1, masked_fenv_sr);
1443
1444 hold_fnclex = build2 (COMPOUND_EXPR, void_type_node, hold_fnclex_cr,
1445 hold_fnclex_sr);
1446 masked_fenv = build2 (COMPOUND_EXPR, void_type_node, masked_fenv_cr,
1447 masked_fenv_sr);
1448 ld_fenv = build2 (COMPOUND_EXPR, void_type_node, ld_fenv_cr, ld_fenv_sr);
1449
1450 *hold = build2 (COMPOUND_EXPR, void_type_node,
1451 build2 (COMPOUND_EXPR, void_type_node, masked_fenv, ld_fenv),
1452 hold_fnclex);
1453
1454 /* Store the value of masked_fenv to clear the exceptions:
1455 __builtin_aarch64_set_fpsr (masked_fenv_sr); */
1456
1457 *clear = build_call_expr (set_fpsr, 1, masked_fenv_sr);
1458
1459 /* Generate the equivalent of :
1460 unsigned int new_fenv_var;
1461 new_fenv_var = __builtin_aarch64_get_fpsr ();
1462
1463 __builtin_aarch64_set_fpsr (fenv_sr);
1464
1465 __atomic_feraiseexcept (new_fenv_var); */
1466
1467 new_fenv_var = create_tmp_var (unsigned_type_node, NULL);
1468 reload_fenv = build2 (MODIFY_EXPR, unsigned_type_node,
1469 new_fenv_var, build_call_expr (get_fpsr, 0));
1470 restore_fnenv = build_call_expr (set_fpsr, 1, fenv_sr);
1471 atomic_feraiseexcept = builtin_decl_implicit (BUILT_IN_ATOMIC_FERAISEEXCEPT);
1472 update_call = build_call_expr (atomic_feraiseexcept, 1,
1473 fold_convert (integer_type_node, new_fenv_var));
1474 *update = build2 (COMPOUND_EXPR, void_type_node,
1475 build2 (COMPOUND_EXPR, void_type_node,
1476 reload_fenv, restore_fnenv), update_call);
1477}
1478
1479
42fc9a7f
JG
1480#undef AARCH64_CHECK_BUILTIN_MODE
1481#undef AARCH64_FIND_FRINT_VARIANT
0ddec79f
JG
1482#undef BUILTIN_DX
1483#undef BUILTIN_SDQ_I
1484#undef BUILTIN_SD_HSI
1485#undef BUILTIN_V2F
1486#undef BUILTIN_VALL
1487#undef BUILTIN_VB
1488#undef BUILTIN_VD
c6a29a09 1489#undef BUILTIN_VD1
0ddec79f
JG
1490#undef BUILTIN_VDC
1491#undef BUILTIN_VDIC
1492#undef BUILTIN_VDN
1493#undef BUILTIN_VDQ
1494#undef BUILTIN_VDQF
1495#undef BUILTIN_VDQH
1496#undef BUILTIN_VDQHS
1497#undef BUILTIN_VDQIF
1498#undef BUILTIN_VDQM
1499#undef BUILTIN_VDQV
1500#undef BUILTIN_VDQ_BHSI
1501#undef BUILTIN_VDQ_I
1502#undef BUILTIN_VDW
1503#undef BUILTIN_VD_BHSI
1504#undef BUILTIN_VD_HSI
0ddec79f
JG
1505#undef BUILTIN_VQ
1506#undef BUILTIN_VQN
1507#undef BUILTIN_VQW
1508#undef BUILTIN_VQ_HSI
1509#undef BUILTIN_VQ_S
1510#undef BUILTIN_VSDQ_HSI
1511#undef BUILTIN_VSDQ_I
1512#undef BUILTIN_VSDQ_I_BHSI
1513#undef BUILTIN_VSDQ_I_DI
1514#undef BUILTIN_VSD_HSI
1515#undef BUILTIN_VSQN_HSDI
1516#undef BUILTIN_VSTRUCT
1517#undef CF0
1518#undef CF1
1519#undef CF2
1520#undef CF3
1521#undef CF4
1522#undef CF10
1523#undef VAR1
1524#undef VAR2
1525#undef VAR3
1526#undef VAR4
1527#undef VAR5
1528#undef VAR6
1529#undef VAR7
1530#undef VAR8
1531#undef VAR9
1532#undef VAR10
1533#undef VAR11
1534