]> git.ipfire.org Git - thirdparty/gcc.git/blame - gcc/config/aarch64/aarch64-builtins.c
Fix bogus ChangeLog entry from r218521
[thirdparty/gcc.git] / gcc / config / aarch64 / aarch64-builtins.c
CommitLineData
43e9d192 1/* Builtins' description for AArch64 SIMD architecture.
23a5b65a 2 Copyright (C) 2011-2014 Free Software Foundation, Inc.
43e9d192
IB
3 Contributed by ARM Ltd.
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify it
8 under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
10 any later version.
11
12 GCC is distributed in the hope that it will be useful, but
13 WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
15 General Public License for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
20
21#include "config.h"
22#include "system.h"
23#include "coretypes.h"
24#include "tm.h"
25#include "rtl.h"
26#include "tree.h"
d8a2d370
DN
27#include "stor-layout.h"
28#include "stringpool.h"
29#include "calls.h"
43e9d192
IB
30#include "expr.h"
31#include "tm_p.h"
32#include "recog.h"
33#include "langhooks.h"
34#include "diagnostic-core.h"
b0710fe1 35#include "insn-codes.h"
43e9d192 36#include "optabs.h"
2fb9a547
AM
37#include "hash-table.h"
38#include "vec.h"
39#include "ggc.h"
60393bbc
AM
40#include "predict.h"
41#include "hashtab.h"
42#include "hash-set.h"
43#include "machmode.h"
44#include "hard-reg-set.h"
45#include "input.h"
46#include "function.h"
47#include "dominance.h"
48#include "cfg.h"
49#include "cfgrtl.h"
50#include "cfganal.h"
51#include "lcm.h"
52#include "cfgbuild.h"
53#include "cfgcleanup.h"
2fb9a547
AM
54#include "basic-block.h"
55#include "tree-ssa-alias.h"
56#include "internal-fn.h"
57#include "gimple-fold.h"
58#include "tree-eh.h"
59#include "gimple-expr.h"
60#include "is-a.h"
0ac198d3 61#include "gimple.h"
5be5c238 62#include "gimple-iterator.h"
43e9d192 63
bc5e395d
JG
64#define v8qi_UP V8QImode
65#define v4hi_UP V4HImode
66#define v2si_UP V2SImode
67#define v2sf_UP V2SFmode
68#define v1df_UP V1DFmode
69#define di_UP DImode
70#define df_UP DFmode
71#define v16qi_UP V16QImode
72#define v8hi_UP V8HImode
73#define v4si_UP V4SImode
74#define v4sf_UP V4SFmode
75#define v2di_UP V2DImode
76#define v2df_UP V2DFmode
77#define ti_UP TImode
78#define ei_UP EImode
79#define oi_UP OImode
80#define ci_UP CImode
81#define xi_UP XImode
82#define si_UP SImode
83#define sf_UP SFmode
84#define hi_UP HImode
85#define qi_UP QImode
43e9d192
IB
86#define UP(X) X##_UP
87
b5828b4b
JG
88#define SIMD_MAX_BUILTIN_ARGS 5
89
90enum aarch64_type_qualifiers
43e9d192 91{
b5828b4b
JG
92 /* T foo. */
93 qualifier_none = 0x0,
94 /* unsigned T foo. */
95 qualifier_unsigned = 0x1, /* 1 << 0 */
96 /* const T foo. */
97 qualifier_const = 0x2, /* 1 << 1 */
98 /* T *foo. */
99 qualifier_pointer = 0x4, /* 1 << 2 */
b5828b4b
JG
100 /* Used when expanding arguments if an operand could
101 be an immediate. */
102 qualifier_immediate = 0x8, /* 1 << 3 */
103 qualifier_maybe_immediate = 0x10, /* 1 << 4 */
104 /* void foo (...). */
105 qualifier_void = 0x20, /* 1 << 5 */
106 /* Some patterns may have internal operands, this qualifier is an
107 instruction to the initialisation code to skip this operand. */
108 qualifier_internal = 0x40, /* 1 << 6 */
109 /* Some builtins should use the T_*mode* encoded in a simd_builtin_datum
110 rather than using the type of the operand. */
111 qualifier_map_mode = 0x80, /* 1 << 7 */
112 /* qualifier_pointer | qualifier_map_mode */
113 qualifier_pointer_map_mode = 0x84,
e625e715 114 /* qualifier_const | qualifier_pointer | qualifier_map_mode */
6db1ec94
JG
115 qualifier_const_pointer_map_mode = 0x86,
116 /* Polynomial types. */
2a49c16d
AL
117 qualifier_poly = 0x100,
118 /* Lane indices - must be in range, and flipped for bigendian. */
119 qualifier_lane_index = 0x200
b5828b4b 120};
43e9d192
IB
121
122typedef struct
123{
124 const char *name;
ef4bddc2 125 machine_mode mode;
342be7f7
JG
126 const enum insn_code code;
127 unsigned int fcode;
b5828b4b 128 enum aarch64_type_qualifiers *qualifiers;
43e9d192
IB
129} aarch64_simd_builtin_datum;
130
096c59be
AL
131/* The qualifier_internal allows generation of a unary builtin from
132 a pattern with a third pseudo-operand such as a match_scratch. */
b5828b4b
JG
133static enum aarch64_type_qualifiers
134aarch64_types_unop_qualifiers[SIMD_MAX_BUILTIN_ARGS]
096c59be 135 = { qualifier_none, qualifier_none, qualifier_internal };
b5828b4b 136#define TYPES_UNOP (aarch64_types_unop_qualifiers)
5a7a4e80
TB
137static enum aarch64_type_qualifiers
138aarch64_types_unopu_qualifiers[SIMD_MAX_BUILTIN_ARGS]
139 = { qualifier_unsigned, qualifier_unsigned };
140#define TYPES_UNOPU (aarch64_types_unopu_qualifiers)
b5828b4b
JG
141static enum aarch64_type_qualifiers
142aarch64_types_binop_qualifiers[SIMD_MAX_BUILTIN_ARGS]
143 = { qualifier_none, qualifier_none, qualifier_maybe_immediate };
144#define TYPES_BINOP (aarch64_types_binop_qualifiers)
145static enum aarch64_type_qualifiers
ae0533da
AL
146aarch64_types_binopv_qualifiers[SIMD_MAX_BUILTIN_ARGS]
147 = { qualifier_void, qualifier_none, qualifier_none };
148#define TYPES_BINOPV (aarch64_types_binopv_qualifiers)
149static enum aarch64_type_qualifiers
5a7a4e80
TB
150aarch64_types_binopu_qualifiers[SIMD_MAX_BUILTIN_ARGS]
151 = { qualifier_unsigned, qualifier_unsigned, qualifier_unsigned };
152#define TYPES_BINOPU (aarch64_types_binopu_qualifiers)
7baa225d 153static enum aarch64_type_qualifiers
de10bcce
AL
154aarch64_types_binop_uus_qualifiers[SIMD_MAX_BUILTIN_ARGS]
155 = { qualifier_unsigned, qualifier_unsigned, qualifier_none };
156#define TYPES_BINOP_UUS (aarch64_types_binop_uus_qualifiers)
157static enum aarch64_type_qualifiers
918621d3
AL
158aarch64_types_binop_ssu_qualifiers[SIMD_MAX_BUILTIN_ARGS]
159 = { qualifier_none, qualifier_none, qualifier_unsigned };
160#define TYPES_BINOP_SSU (aarch64_types_binop_ssu_qualifiers)
161static enum aarch64_type_qualifiers
7baa225d
TB
162aarch64_types_binopp_qualifiers[SIMD_MAX_BUILTIN_ARGS]
163 = { qualifier_poly, qualifier_poly, qualifier_poly };
164#define TYPES_BINOPP (aarch64_types_binopp_qualifiers)
165
5a7a4e80 166static enum aarch64_type_qualifiers
b5828b4b
JG
167aarch64_types_ternop_qualifiers[SIMD_MAX_BUILTIN_ARGS]
168 = { qualifier_none, qualifier_none, qualifier_none, qualifier_none };
169#define TYPES_TERNOP (aarch64_types_ternop_qualifiers)
30442682 170static enum aarch64_type_qualifiers
2a49c16d
AL
171aarch64_types_ternop_lane_qualifiers[SIMD_MAX_BUILTIN_ARGS]
172 = { qualifier_none, qualifier_none, qualifier_none, qualifier_lane_index };
173#define TYPES_TERNOP_LANE (aarch64_types_ternop_lane_qualifiers)
174static enum aarch64_type_qualifiers
30442682
TB
175aarch64_types_ternopu_qualifiers[SIMD_MAX_BUILTIN_ARGS]
176 = { qualifier_unsigned, qualifier_unsigned,
177 qualifier_unsigned, qualifier_unsigned };
178#define TYPES_TERNOPU (aarch64_types_ternopu_qualifiers)
179
b5828b4b 180static enum aarch64_type_qualifiers
2a49c16d 181aarch64_types_quadop_lane_qualifiers[SIMD_MAX_BUILTIN_ARGS]
b5828b4b 182 = { qualifier_none, qualifier_none, qualifier_none,
2a49c16d
AL
183 qualifier_none, qualifier_lane_index };
184#define TYPES_QUADOP_LANE (aarch64_types_quadop_lane_qualifiers)
b5828b4b
JG
185
186static enum aarch64_type_qualifiers
2a49c16d 187aarch64_types_binop_imm_qualifiers[SIMD_MAX_BUILTIN_ARGS]
b5828b4b 188 = { qualifier_none, qualifier_none, qualifier_immediate };
2a49c16d
AL
189#define TYPES_GETREG (aarch64_types_binop_imm_qualifiers)
190#define TYPES_SHIFTIMM (aarch64_types_binop_imm_qualifiers)
b5828b4b 191static enum aarch64_type_qualifiers
de10bcce
AL
192aarch64_types_shift_to_unsigned_qualifiers[SIMD_MAX_BUILTIN_ARGS]
193 = { qualifier_unsigned, qualifier_none, qualifier_immediate };
194#define TYPES_SHIFTIMM_USS (aarch64_types_shift_to_unsigned_qualifiers)
195static enum aarch64_type_qualifiers
252c7556
AV
196aarch64_types_unsigned_shift_qualifiers[SIMD_MAX_BUILTIN_ARGS]
197 = { qualifier_unsigned, qualifier_unsigned, qualifier_immediate };
198#define TYPES_USHIFTIMM (aarch64_types_unsigned_shift_qualifiers)
de10bcce 199
252c7556 200static enum aarch64_type_qualifiers
2a49c16d 201aarch64_types_ternop_imm_qualifiers[SIMD_MAX_BUILTIN_ARGS]
b5828b4b 202 = { qualifier_none, qualifier_none, qualifier_none, qualifier_immediate };
2a49c16d
AL
203#define TYPES_SETREG (aarch64_types_ternop_imm_qualifiers)
204#define TYPES_SHIFTINSERT (aarch64_types_ternop_imm_qualifiers)
205#define TYPES_SHIFTACC (aarch64_types_ternop_imm_qualifiers)
b5828b4b 206
de10bcce
AL
207static enum aarch64_type_qualifiers
208aarch64_types_unsigned_shiftacc_qualifiers[SIMD_MAX_BUILTIN_ARGS]
209 = { qualifier_unsigned, qualifier_unsigned, qualifier_unsigned,
210 qualifier_immediate };
211#define TYPES_USHIFTACC (aarch64_types_unsigned_shiftacc_qualifiers)
212
213
b5828b4b
JG
214static enum aarch64_type_qualifiers
215aarch64_types_combine_qualifiers[SIMD_MAX_BUILTIN_ARGS]
216 = { qualifier_none, qualifier_none, qualifier_none };
217#define TYPES_COMBINE (aarch64_types_combine_qualifiers)
218
219static enum aarch64_type_qualifiers
220aarch64_types_load1_qualifiers[SIMD_MAX_BUILTIN_ARGS]
221 = { qualifier_none, qualifier_const_pointer_map_mode };
222#define TYPES_LOAD1 (aarch64_types_load1_qualifiers)
223#define TYPES_LOADSTRUCT (aarch64_types_load1_qualifiers)
3ec1be97
CB
224static enum aarch64_type_qualifiers
225aarch64_types_loadstruct_lane_qualifiers[SIMD_MAX_BUILTIN_ARGS]
226 = { qualifier_none, qualifier_const_pointer_map_mode,
227 qualifier_none, qualifier_none };
228#define TYPES_LOADSTRUCT_LANE (aarch64_types_loadstruct_lane_qualifiers)
b5828b4b 229
46e778c4
JG
230static enum aarch64_type_qualifiers
231aarch64_types_bsl_p_qualifiers[SIMD_MAX_BUILTIN_ARGS]
232 = { qualifier_poly, qualifier_unsigned,
233 qualifier_poly, qualifier_poly };
234#define TYPES_BSL_P (aarch64_types_bsl_p_qualifiers)
235static enum aarch64_type_qualifiers
236aarch64_types_bsl_s_qualifiers[SIMD_MAX_BUILTIN_ARGS]
237 = { qualifier_none, qualifier_unsigned,
238 qualifier_none, qualifier_none };
239#define TYPES_BSL_S (aarch64_types_bsl_s_qualifiers)
240static enum aarch64_type_qualifiers
241aarch64_types_bsl_u_qualifiers[SIMD_MAX_BUILTIN_ARGS]
242 = { qualifier_unsigned, qualifier_unsigned,
243 qualifier_unsigned, qualifier_unsigned };
244#define TYPES_BSL_U (aarch64_types_bsl_u_qualifiers)
245
b5828b4b
JG
246/* The first argument (return type) of a store should be void type,
247 which we represent with qualifier_void. Their first operand will be
248 a DImode pointer to the location to store to, so we must use
249 qualifier_map_mode | qualifier_pointer to build a pointer to the
250 element type of the vector. */
251static enum aarch64_type_qualifiers
252aarch64_types_store1_qualifiers[SIMD_MAX_BUILTIN_ARGS]
253 = { qualifier_void, qualifier_pointer_map_mode, qualifier_none };
254#define TYPES_STORE1 (aarch64_types_store1_qualifiers)
255#define TYPES_STORESTRUCT (aarch64_types_store1_qualifiers)
ba081b77
JG
256static enum aarch64_type_qualifiers
257aarch64_types_storestruct_lane_qualifiers[SIMD_MAX_BUILTIN_ARGS]
258 = { qualifier_void, qualifier_pointer_map_mode,
259 qualifier_none, qualifier_none };
260#define TYPES_STORESTRUCT_LANE (aarch64_types_storestruct_lane_qualifiers)
b5828b4b 261
0ddec79f
JG
262#define CF0(N, X) CODE_FOR_aarch64_##N##X
263#define CF1(N, X) CODE_FOR_##N##X##1
264#define CF2(N, X) CODE_FOR_##N##X##2
265#define CF3(N, X) CODE_FOR_##N##X##3
266#define CF4(N, X) CODE_FOR_##N##X##4
267#define CF10(N, X) CODE_FOR_##N##X
268
269#define VAR1(T, N, MAP, A) \
bc5e395d 270 {#N #A, UP (A), CF##MAP (N, A), 0, TYPES_##T},
0ddec79f
JG
271#define VAR2(T, N, MAP, A, B) \
272 VAR1 (T, N, MAP, A) \
273 VAR1 (T, N, MAP, B)
274#define VAR3(T, N, MAP, A, B, C) \
275 VAR2 (T, N, MAP, A, B) \
276 VAR1 (T, N, MAP, C)
277#define VAR4(T, N, MAP, A, B, C, D) \
278 VAR3 (T, N, MAP, A, B, C) \
279 VAR1 (T, N, MAP, D)
280#define VAR5(T, N, MAP, A, B, C, D, E) \
281 VAR4 (T, N, MAP, A, B, C, D) \
282 VAR1 (T, N, MAP, E)
283#define VAR6(T, N, MAP, A, B, C, D, E, F) \
284 VAR5 (T, N, MAP, A, B, C, D, E) \
285 VAR1 (T, N, MAP, F)
286#define VAR7(T, N, MAP, A, B, C, D, E, F, G) \
287 VAR6 (T, N, MAP, A, B, C, D, E, F) \
288 VAR1 (T, N, MAP, G)
289#define VAR8(T, N, MAP, A, B, C, D, E, F, G, H) \
290 VAR7 (T, N, MAP, A, B, C, D, E, F, G) \
291 VAR1 (T, N, MAP, H)
292#define VAR9(T, N, MAP, A, B, C, D, E, F, G, H, I) \
293 VAR8 (T, N, MAP, A, B, C, D, E, F, G, H) \
294 VAR1 (T, N, MAP, I)
295#define VAR10(T, N, MAP, A, B, C, D, E, F, G, H, I, J) \
296 VAR9 (T, N, MAP, A, B, C, D, E, F, G, H, I) \
297 VAR1 (T, N, MAP, J)
298#define VAR11(T, N, MAP, A, B, C, D, E, F, G, H, I, J, K) \
299 VAR10 (T, N, MAP, A, B, C, D, E, F, G, H, I, J) \
300 VAR1 (T, N, MAP, K)
301#define VAR12(T, N, MAP, A, B, C, D, E, F, G, H, I, J, K, L) \
302 VAR11 (T, N, MAP, A, B, C, D, E, F, G, H, I, J, K) \
303 VAR1 (T, N, MAP, L)
342be7f7 304
f421c516 305#include "aarch64-builtin-iterators.h"
43e9d192
IB
306
307static aarch64_simd_builtin_datum aarch64_simd_builtin_data[] = {
342be7f7
JG
308#include "aarch64-simd-builtins.def"
309};
310
5d357f26
KT
311/* There's only 8 CRC32 builtins. Probably not worth their own .def file. */
312#define AARCH64_CRC32_BUILTINS \
313 CRC32_BUILTIN (crc32b, QI) \
314 CRC32_BUILTIN (crc32h, HI) \
315 CRC32_BUILTIN (crc32w, SI) \
316 CRC32_BUILTIN (crc32x, DI) \
317 CRC32_BUILTIN (crc32cb, QI) \
318 CRC32_BUILTIN (crc32ch, HI) \
319 CRC32_BUILTIN (crc32cw, SI) \
320 CRC32_BUILTIN (crc32cx, DI)
321
322typedef struct
323{
324 const char *name;
ef4bddc2 325 machine_mode mode;
5d357f26
KT
326 const enum insn_code icode;
327 unsigned int fcode;
328} aarch64_crc_builtin_datum;
329
330#define CRC32_BUILTIN(N, M) \
331 AARCH64_BUILTIN_##N,
332
342be7f7 333#undef VAR1
0ddec79f 334#define VAR1(T, N, MAP, A) \
e993fea1 335 AARCH64_SIMD_BUILTIN_##T##_##N##A,
342be7f7
JG
336
337enum aarch64_builtins
338{
339 AARCH64_BUILTIN_MIN,
aa87aced
KV
340
341 AARCH64_BUILTIN_GET_FPCR,
342 AARCH64_BUILTIN_SET_FPCR,
343 AARCH64_BUILTIN_GET_FPSR,
344 AARCH64_BUILTIN_SET_FPSR,
345
342be7f7
JG
346 AARCH64_SIMD_BUILTIN_BASE,
347#include "aarch64-simd-builtins.def"
348 AARCH64_SIMD_BUILTIN_MAX = AARCH64_SIMD_BUILTIN_BASE
349 + ARRAY_SIZE (aarch64_simd_builtin_data),
5d357f26
KT
350 AARCH64_CRC32_BUILTIN_BASE,
351 AARCH64_CRC32_BUILTINS
352 AARCH64_CRC32_BUILTIN_MAX,
342be7f7 353 AARCH64_BUILTIN_MAX
43e9d192
IB
354};
355
5d357f26
KT
356#undef CRC32_BUILTIN
357#define CRC32_BUILTIN(N, M) \
358 {"__builtin_aarch64_"#N, M##mode, CODE_FOR_aarch64_##N, AARCH64_BUILTIN_##N},
359
360static aarch64_crc_builtin_datum aarch64_crc_builtin_data[] = {
361 AARCH64_CRC32_BUILTINS
362};
363
364#undef CRC32_BUILTIN
365
119103ca
JG
366static GTY(()) tree aarch64_builtin_decls[AARCH64_BUILTIN_MAX];
367
43e9d192
IB
368#define NUM_DREG_TYPES 6
369#define NUM_QREG_TYPES 6
370
f9d53c27
TB
371/* Internal scalar builtin types. These types are used to support
372 neon intrinsic builtins. They are _not_ user-visible types. Therefore
373 the mangling for these types are implementation defined. */
374const char *aarch64_scalar_builtin_types[] = {
375 "__builtin_aarch64_simd_qi",
376 "__builtin_aarch64_simd_hi",
377 "__builtin_aarch64_simd_si",
378 "__builtin_aarch64_simd_sf",
379 "__builtin_aarch64_simd_di",
380 "__builtin_aarch64_simd_df",
381 "__builtin_aarch64_simd_poly8",
382 "__builtin_aarch64_simd_poly16",
383 "__builtin_aarch64_simd_poly64",
384 "__builtin_aarch64_simd_poly128",
385 "__builtin_aarch64_simd_ti",
386 "__builtin_aarch64_simd_uqi",
387 "__builtin_aarch64_simd_uhi",
388 "__builtin_aarch64_simd_usi",
389 "__builtin_aarch64_simd_udi",
390 "__builtin_aarch64_simd_ei",
391 "__builtin_aarch64_simd_oi",
392 "__builtin_aarch64_simd_ci",
393 "__builtin_aarch64_simd_xi",
394 NULL
395};
b5828b4b 396
f9d53c27
TB
397#define ENTRY(E, M, Q, G) E,
398enum aarch64_simd_type
399{
400#include "aarch64-simd-builtin-types.def"
401 ARM_NEON_H_TYPES_LAST
402};
403#undef ENTRY
b5828b4b 404
f9d53c27 405struct aarch64_simd_type_info
b5828b4b 406{
f9d53c27
TB
407 enum aarch64_simd_type type;
408
409 /* Internal type name. */
410 const char *name;
411
412 /* Internal type name(mangled). The mangled names conform to the
413 AAPCS64 (see "Procedure Call Standard for the ARM 64-bit Architecture",
414 Appendix A). To qualify for emission with the mangled names defined in
415 that document, a vector type must not only be of the correct mode but also
416 be of the correct internal AdvSIMD vector type (e.g. __Int8x8_t); these
417 types are registered by aarch64_init_simd_builtin_types (). In other
418 words, vector types defined in other ways e.g. via vector_size attribute
419 will get default mangled names. */
420 const char *mangle;
421
422 /* Internal type. */
423 tree itype;
424
425 /* Element type. */
b5828b4b
JG
426 tree eltype;
427
f9d53c27
TB
428 /* Machine mode the internal type maps to. */
429 enum machine_mode mode;
b5828b4b 430
f9d53c27
TB
431 /* Qualifiers. */
432 enum aarch64_type_qualifiers q;
433};
434
435#define ENTRY(E, M, Q, G) \
436 {E, "__" #E, #G "__" #E, NULL_TREE, NULL_TREE, M##mode, qualifier_##Q},
437static struct aarch64_simd_type_info aarch64_simd_types [] = {
438#include "aarch64-simd-builtin-types.def"
439};
440#undef ENTRY
441
442static tree aarch64_simd_intOI_type_node = NULL_TREE;
443static tree aarch64_simd_intEI_type_node = NULL_TREE;
444static tree aarch64_simd_intCI_type_node = NULL_TREE;
445static tree aarch64_simd_intXI_type_node = NULL_TREE;
446
447static const char *
448aarch64_mangle_builtin_scalar_type (const_tree type)
449{
450 int i = 0;
451
452 while (aarch64_scalar_builtin_types[i] != NULL)
b5828b4b 453 {
f9d53c27
TB
454 const char *name = aarch64_scalar_builtin_types[i];
455
456 if (TREE_CODE (TYPE_NAME (type)) == TYPE_DECL
457 && DECL_NAME (TYPE_NAME (type))
458 && !strcmp (IDENTIFIER_POINTER (DECL_NAME (TYPE_NAME (type))), name))
459 return aarch64_scalar_builtin_types[i];
460 i++;
461 }
462 return NULL;
b5828b4b
JG
463}
464
f9d53c27
TB
465static const char *
466aarch64_mangle_builtin_vector_type (const_tree type)
b5828b4b 467{
f9d53c27
TB
468 int i;
469 int nelts = sizeof (aarch64_simd_types) / sizeof (aarch64_simd_types[0]);
470
471 for (i = 0; i < nelts; i++)
472 if (aarch64_simd_types[i].mode == TYPE_MODE (type)
473 && TYPE_NAME (type)
474 && TREE_CODE (TYPE_NAME (type)) == TYPE_DECL
475 && DECL_NAME (TYPE_NAME (type))
476 && !strcmp
477 (IDENTIFIER_POINTER (DECL_NAME (TYPE_NAME (type))),
478 aarch64_simd_types[i].name))
479 return aarch64_simd_types[i].mangle;
480
481 return NULL;
6db1ec94
JG
482}
483
f9d53c27
TB
484const char *
485aarch64_mangle_builtin_type (const_tree type)
6db1ec94 486{
f9d53c27
TB
487 const char *mangle;
488 /* Walk through all the AArch64 builtins types tables to filter out the
489 incoming type. */
490 if ((mangle = aarch64_mangle_builtin_vector_type (type))
491 || (mangle = aarch64_mangle_builtin_scalar_type (type)))
492 return mangle;
493
494 return NULL;
6db1ec94
JG
495}
496
f9d53c27
TB
497static tree
498aarch64_simd_builtin_std_type (enum machine_mode mode,
499 enum aarch64_type_qualifiers q)
6db1ec94 500{
f9d53c27
TB
501#define QUAL_TYPE(M) \
502 ((q == qualifier_none) ? int##M##_type_node : unsigned_int##M##_type_node);
503 switch (mode)
504 {
505 case QImode:
506 return QUAL_TYPE (QI);
507 case HImode:
508 return QUAL_TYPE (HI);
509 case SImode:
510 return QUAL_TYPE (SI);
511 case DImode:
512 return QUAL_TYPE (DI);
513 case TImode:
514 return QUAL_TYPE (TI);
515 case OImode:
516 return aarch64_simd_intOI_type_node;
517 case EImode:
518 return aarch64_simd_intEI_type_node;
519 case CImode:
520 return aarch64_simd_intCI_type_node;
521 case XImode:
522 return aarch64_simd_intXI_type_node;
523 case SFmode:
524 return float_type_node;
525 case DFmode:
526 return double_type_node;
527 default:
528 gcc_unreachable ();
529 }
530#undef QUAL_TYPE
6db1ec94
JG
531}
532
f9d53c27
TB
533static tree
534aarch64_lookup_simd_builtin_type (enum machine_mode mode,
535 enum aarch64_type_qualifiers q)
6db1ec94 536{
f9d53c27
TB
537 int i;
538 int nelts = sizeof (aarch64_simd_types) / sizeof (aarch64_simd_types[0]);
539
540 /* Non-poly scalar modes map to standard types not in the table. */
541 if (q != qualifier_poly && !VECTOR_MODE_P (mode))
542 return aarch64_simd_builtin_std_type (mode, q);
543
544 for (i = 0; i < nelts; i++)
545 if (aarch64_simd_types[i].mode == mode
546 && aarch64_simd_types[i].q == q)
547 return aarch64_simd_types[i].itype;
548
549 return NULL_TREE;
b5828b4b
JG
550}
551
f9d53c27
TB
552static tree
553aarch64_simd_builtin_type (enum machine_mode mode,
554 bool unsigned_p, bool poly_p)
555{
556 if (poly_p)
557 return aarch64_lookup_simd_builtin_type (mode, qualifier_poly);
558 else if (unsigned_p)
559 return aarch64_lookup_simd_builtin_type (mode, qualifier_unsigned);
560 else
561 return aarch64_lookup_simd_builtin_type (mode, qualifier_none);
562}
563
af55e82d 564static void
f9d53c27 565aarch64_init_simd_builtin_types (void)
43e9d192 566{
f9d53c27
TB
567 int i;
568 int nelts = sizeof (aarch64_simd_types) / sizeof (aarch64_simd_types[0]);
569 tree tdecl;
570
571 /* Init all the element types built by the front-end. */
572 aarch64_simd_types[Int8x8_t].eltype = intQI_type_node;
573 aarch64_simd_types[Int8x16_t].eltype = intQI_type_node;
574 aarch64_simd_types[Int16x4_t].eltype = intHI_type_node;
575 aarch64_simd_types[Int16x8_t].eltype = intHI_type_node;
576 aarch64_simd_types[Int32x2_t].eltype = intSI_type_node;
577 aarch64_simd_types[Int32x4_t].eltype = intSI_type_node;
578 aarch64_simd_types[Int64x1_t].eltype = intDI_type_node;
579 aarch64_simd_types[Int64x2_t].eltype = intDI_type_node;
580 aarch64_simd_types[Uint8x8_t].eltype = unsigned_intQI_type_node;
581 aarch64_simd_types[Uint8x16_t].eltype = unsigned_intQI_type_node;
582 aarch64_simd_types[Uint16x4_t].eltype = unsigned_intHI_type_node;
583 aarch64_simd_types[Uint16x8_t].eltype = unsigned_intHI_type_node;
584 aarch64_simd_types[Uint32x2_t].eltype = unsigned_intSI_type_node;
585 aarch64_simd_types[Uint32x4_t].eltype = unsigned_intSI_type_node;
586 aarch64_simd_types[Uint64x1_t].eltype = unsigned_intDI_type_node;
587 aarch64_simd_types[Uint64x2_t].eltype = unsigned_intDI_type_node;
588
589 /* Poly types are a world of their own. */
590 aarch64_simd_types[Poly8_t].eltype = aarch64_simd_types[Poly8_t].itype =
591 build_distinct_type_copy (unsigned_intQI_type_node);
592 aarch64_simd_types[Poly16_t].eltype = aarch64_simd_types[Poly16_t].itype =
593 build_distinct_type_copy (unsigned_intHI_type_node);
594 aarch64_simd_types[Poly64_t].eltype = aarch64_simd_types[Poly64_t].itype =
595 build_distinct_type_copy (unsigned_intDI_type_node);
596 aarch64_simd_types[Poly128_t].eltype = aarch64_simd_types[Poly128_t].itype =
597 build_distinct_type_copy (unsigned_intTI_type_node);
598 /* Init poly vector element types with scalar poly types. */
599 aarch64_simd_types[Poly8x8_t].eltype = aarch64_simd_types[Poly8_t].itype;
600 aarch64_simd_types[Poly8x16_t].eltype = aarch64_simd_types[Poly8_t].itype;
601 aarch64_simd_types[Poly16x4_t].eltype = aarch64_simd_types[Poly16_t].itype;
602 aarch64_simd_types[Poly16x8_t].eltype = aarch64_simd_types[Poly16_t].itype;
603 aarch64_simd_types[Poly64x1_t].eltype = aarch64_simd_types[Poly64_t].itype;
604 aarch64_simd_types[Poly64x2_t].eltype = aarch64_simd_types[Poly64_t].itype;
605
606 /* Continue with standard types. */
607 aarch64_simd_types[Float32x2_t].eltype = float_type_node;
608 aarch64_simd_types[Float32x4_t].eltype = float_type_node;
609 aarch64_simd_types[Float64x1_t].eltype = double_type_node;
610 aarch64_simd_types[Float64x2_t].eltype = double_type_node;
611
612 for (i = 0; i < nelts; i++)
613 {
614 tree eltype = aarch64_simd_types[i].eltype;
615 enum machine_mode mode = aarch64_simd_types[i].mode;
616
617 if (aarch64_simd_types[i].itype == NULL)
618 aarch64_simd_types[i].itype =
619 build_distinct_type_copy
620 (build_vector_type (eltype, GET_MODE_NUNITS (mode)));
621
622 tdecl = add_builtin_type (aarch64_simd_types[i].name,
623 aarch64_simd_types[i].itype);
624 TYPE_NAME (aarch64_simd_types[i].itype) = tdecl;
625 SET_TYPE_STRUCTURAL_EQUALITY (aarch64_simd_types[i].itype);
626 }
43e9d192 627
f9d53c27
TB
628#define AARCH64_BUILD_SIGNED_TYPE(mode) \
629 make_signed_type (GET_MODE_PRECISION (mode));
630 aarch64_simd_intOI_type_node = AARCH64_BUILD_SIGNED_TYPE (OImode);
631 aarch64_simd_intEI_type_node = AARCH64_BUILD_SIGNED_TYPE (EImode);
632 aarch64_simd_intCI_type_node = AARCH64_BUILD_SIGNED_TYPE (CImode);
633 aarch64_simd_intXI_type_node = AARCH64_BUILD_SIGNED_TYPE (XImode);
634#undef AARCH64_BUILD_SIGNED_TYPE
635
636 tdecl = add_builtin_type
637 ("__builtin_aarch64_simd_ei" , aarch64_simd_intEI_type_node);
638 TYPE_NAME (aarch64_simd_intEI_type_node) = tdecl;
639 tdecl = add_builtin_type
640 ("__builtin_aarch64_simd_oi" , aarch64_simd_intOI_type_node);
641 TYPE_NAME (aarch64_simd_intOI_type_node) = tdecl;
642 tdecl = add_builtin_type
643 ("__builtin_aarch64_simd_ci" , aarch64_simd_intCI_type_node);
644 TYPE_NAME (aarch64_simd_intCI_type_node) = tdecl;
645 tdecl = add_builtin_type
646 ("__builtin_aarch64_simd_xi" , aarch64_simd_intXI_type_node);
647 TYPE_NAME (aarch64_simd_intXI_type_node) = tdecl;
648}
649
650static void
651aarch64_init_simd_builtin_scalar_types (void)
652{
653 /* Define typedefs for all the standard scalar types. */
654 (*lang_hooks.types.register_builtin_type) (intQI_type_node,
43e9d192 655 "__builtin_aarch64_simd_qi");
f9d53c27 656 (*lang_hooks.types.register_builtin_type) (intHI_type_node,
43e9d192 657 "__builtin_aarch64_simd_hi");
f9d53c27 658 (*lang_hooks.types.register_builtin_type) (intSI_type_node,
43e9d192 659 "__builtin_aarch64_simd_si");
f9d53c27 660 (*lang_hooks.types.register_builtin_type) (float_type_node,
43e9d192 661 "__builtin_aarch64_simd_sf");
f9d53c27 662 (*lang_hooks.types.register_builtin_type) (intDI_type_node,
43e9d192 663 "__builtin_aarch64_simd_di");
f9d53c27 664 (*lang_hooks.types.register_builtin_type) (double_type_node,
43e9d192 665 "__builtin_aarch64_simd_df");
f9d53c27 666 (*lang_hooks.types.register_builtin_type) (unsigned_intQI_type_node,
43e9d192 667 "__builtin_aarch64_simd_poly8");
f9d53c27 668 (*lang_hooks.types.register_builtin_type) (unsigned_intHI_type_node,
43e9d192 669 "__builtin_aarch64_simd_poly16");
f9d53c27 670 (*lang_hooks.types.register_builtin_type) (unsigned_intDI_type_node,
7baa225d 671 "__builtin_aarch64_simd_poly64");
f9d53c27 672 (*lang_hooks.types.register_builtin_type) (unsigned_intTI_type_node,
7baa225d 673 "__builtin_aarch64_simd_poly128");
f9d53c27 674 (*lang_hooks.types.register_builtin_type) (intTI_type_node,
43e9d192 675 "__builtin_aarch64_simd_ti");
b5828b4b 676 /* Unsigned integer types for various mode sizes. */
f9d53c27 677 (*lang_hooks.types.register_builtin_type) (unsigned_intQI_type_node,
b5828b4b 678 "__builtin_aarch64_simd_uqi");
f9d53c27 679 (*lang_hooks.types.register_builtin_type) (unsigned_intHI_type_node,
b5828b4b 680 "__builtin_aarch64_simd_uhi");
f9d53c27 681 (*lang_hooks.types.register_builtin_type) (unsigned_intSI_type_node,
b5828b4b 682 "__builtin_aarch64_simd_usi");
f9d53c27 683 (*lang_hooks.types.register_builtin_type) (unsigned_intDI_type_node,
b5828b4b 684 "__builtin_aarch64_simd_udi");
f9d53c27
TB
685}
686
687static void
688aarch64_init_simd_builtins (void)
689{
690 unsigned int i, fcode = AARCH64_SIMD_BUILTIN_BASE + 1;
691
692 aarch64_init_simd_builtin_types ();
43e9d192 693
f9d53c27
TB
694 /* Strong-typing hasn't been implemented for all AdvSIMD builtin intrinsics.
695 Therefore we need to preserve the old __builtin scalar types. It can be
696 removed once all the intrinsics become strongly typed using the qualifier
697 system. */
698 aarch64_init_simd_builtin_scalar_types ();
699
342be7f7 700 for (i = 0; i < ARRAY_SIZE (aarch64_simd_builtin_data); i++, fcode++)
43e9d192 701 {
b5828b4b
JG
702 bool print_type_signature_p = false;
703 char type_signature[SIMD_MAX_BUILTIN_ARGS] = { 0 };
43e9d192 704 aarch64_simd_builtin_datum *d = &aarch64_simd_builtin_data[i];
342be7f7
JG
705 char namebuf[60];
706 tree ftype = NULL;
119103ca 707 tree fndecl = NULL;
342be7f7 708
342be7f7 709 d->fcode = fcode;
43e9d192 710
b5828b4b
JG
711 /* We must track two variables here. op_num is
712 the operand number as in the RTL pattern. This is
713 required to access the mode (e.g. V4SF mode) of the
714 argument, from which the base type can be derived.
715 arg_num is an index in to the qualifiers data, which
716 gives qualifiers to the type (e.g. const unsigned).
717 The reason these two variables may differ by one is the
718 void return type. While all return types take the 0th entry
719 in the qualifiers array, there is no operand for them in the
720 RTL pattern. */
721 int op_num = insn_data[d->code].n_operands - 1;
722 int arg_num = d->qualifiers[0] & qualifier_void
723 ? op_num + 1
724 : op_num;
725 tree return_type = void_type_node, args = void_list_node;
726 tree eltype;
727
728 /* Build a function type directly from the insn_data for this
729 builtin. The build_function_type () function takes care of
730 removing duplicates for us. */
731 for (; op_num >= 0; arg_num--, op_num--)
43e9d192 732 {
ef4bddc2 733 machine_mode op_mode = insn_data[d->code].operand[op_num].mode;
b5828b4b 734 enum aarch64_type_qualifiers qualifiers = d->qualifiers[arg_num];
43e9d192 735
b5828b4b
JG
736 if (qualifiers & qualifier_unsigned)
737 {
738 type_signature[arg_num] = 'u';
739 print_type_signature_p = true;
740 }
6db1ec94
JG
741 else if (qualifiers & qualifier_poly)
742 {
743 type_signature[arg_num] = 'p';
744 print_type_signature_p = true;
745 }
b5828b4b
JG
746 else
747 type_signature[arg_num] = 's';
748
749 /* Skip an internal operand for vget_{low, high}. */
750 if (qualifiers & qualifier_internal)
751 continue;
752
753 /* Some builtins have different user-facing types
754 for certain arguments, encoded in d->mode. */
755 if (qualifiers & qualifier_map_mode)
bc5e395d 756 op_mode = d->mode;
b5828b4b
JG
757
758 /* For pointers, we want a pointer to the basic type
759 of the vector. */
760 if (qualifiers & qualifier_pointer && VECTOR_MODE_P (op_mode))
761 op_mode = GET_MODE_INNER (op_mode);
762
f9d53c27
TB
763 eltype = aarch64_simd_builtin_type
764 (op_mode,
765 (qualifiers & qualifier_unsigned) != 0,
766 (qualifiers & qualifier_poly) != 0);
767 gcc_assert (eltype != NULL);
b5828b4b
JG
768
769 /* Add qualifiers. */
770 if (qualifiers & qualifier_const)
771 eltype = build_qualified_type (eltype, TYPE_QUAL_CONST);
772
773 if (qualifiers & qualifier_pointer)
774 eltype = build_pointer_type (eltype);
775
776 /* If we have reached arg_num == 0, we are at a non-void
777 return type. Otherwise, we are still processing
778 arguments. */
779 if (arg_num == 0)
780 return_type = eltype;
781 else
782 args = tree_cons (NULL_TREE, eltype, args);
783 }
342be7f7 784
b5828b4b 785 ftype = build_function_type (return_type, args);
43e9d192 786
342be7f7 787 gcc_assert (ftype != NULL);
43e9d192 788
b5828b4b 789 if (print_type_signature_p)
bc5e395d
JG
790 snprintf (namebuf, sizeof (namebuf), "__builtin_aarch64_%s_%s",
791 d->name, type_signature);
b5828b4b 792 else
bc5e395d
JG
793 snprintf (namebuf, sizeof (namebuf), "__builtin_aarch64_%s",
794 d->name);
43e9d192 795
119103ca
JG
796 fndecl = add_builtin_function (namebuf, ftype, fcode, BUILT_IN_MD,
797 NULL, NULL_TREE);
798 aarch64_builtin_decls[fcode] = fndecl;
43e9d192
IB
799 }
800}
801
5d357f26
KT
802static void
803aarch64_init_crc32_builtins ()
804{
f9d53c27 805 tree usi_type = aarch64_simd_builtin_std_type (SImode, qualifier_unsigned);
5d357f26
KT
806 unsigned int i = 0;
807
808 for (i = 0; i < ARRAY_SIZE (aarch64_crc_builtin_data); ++i)
809 {
810 aarch64_crc_builtin_datum* d = &aarch64_crc_builtin_data[i];
f9d53c27
TB
811 tree argtype = aarch64_simd_builtin_std_type (d->mode,
812 qualifier_unsigned);
5d357f26
KT
813 tree ftype = build_function_type_list (usi_type, usi_type, argtype, NULL_TREE);
814 tree fndecl = add_builtin_function (d->name, ftype, d->fcode,
815 BUILT_IN_MD, NULL, NULL_TREE);
816
817 aarch64_builtin_decls[d->fcode] = fndecl;
818 }
819}
820
342be7f7
JG
821void
822aarch64_init_builtins (void)
43e9d192 823{
aa87aced
KV
824 tree ftype_set_fpr
825 = build_function_type_list (void_type_node, unsigned_type_node, NULL);
826 tree ftype_get_fpr
827 = build_function_type_list (unsigned_type_node, NULL);
828
829 aarch64_builtin_decls[AARCH64_BUILTIN_GET_FPCR]
830 = add_builtin_function ("__builtin_aarch64_get_fpcr", ftype_get_fpr,
831 AARCH64_BUILTIN_GET_FPCR, BUILT_IN_MD, NULL, NULL_TREE);
832 aarch64_builtin_decls[AARCH64_BUILTIN_SET_FPCR]
833 = add_builtin_function ("__builtin_aarch64_set_fpcr", ftype_set_fpr,
834 AARCH64_BUILTIN_SET_FPCR, BUILT_IN_MD, NULL, NULL_TREE);
835 aarch64_builtin_decls[AARCH64_BUILTIN_GET_FPSR]
836 = add_builtin_function ("__builtin_aarch64_get_fpsr", ftype_get_fpr,
837 AARCH64_BUILTIN_GET_FPSR, BUILT_IN_MD, NULL, NULL_TREE);
838 aarch64_builtin_decls[AARCH64_BUILTIN_SET_FPSR]
839 = add_builtin_function ("__builtin_aarch64_set_fpsr", ftype_set_fpr,
840 AARCH64_BUILTIN_SET_FPSR, BUILT_IN_MD, NULL, NULL_TREE);
841
342be7f7
JG
842 if (TARGET_SIMD)
843 aarch64_init_simd_builtins ();
5d357f26
KT
844 if (TARGET_CRC32)
845 aarch64_init_crc32_builtins ();
43e9d192
IB
846}
847
119103ca
JG
848tree
849aarch64_builtin_decl (unsigned code, bool initialize_p ATTRIBUTE_UNUSED)
850{
851 if (code >= AARCH64_BUILTIN_MAX)
852 return error_mark_node;
853
854 return aarch64_builtin_decls[code];
855}
856
43e9d192
IB
857typedef enum
858{
859 SIMD_ARG_COPY_TO_REG,
860 SIMD_ARG_CONSTANT,
2a49c16d 861 SIMD_ARG_LANE_INDEX,
43e9d192
IB
862 SIMD_ARG_STOP
863} builtin_simd_arg;
864
43e9d192
IB
865static rtx
866aarch64_simd_expand_args (rtx target, int icode, int have_retval,
8d3d350a 867 tree exp, builtin_simd_arg *args)
43e9d192 868{
43e9d192 869 rtx pat;
d9e80f49
AL
870 rtx op[SIMD_MAX_BUILTIN_ARGS + 1]; /* First element for result operand. */
871 int opc = 0;
872
873 if (have_retval)
874 {
875 machine_mode tmode = insn_data[icode].operand[0].mode;
876 if (!target
43e9d192 877 || GET_MODE (target) != tmode
d9e80f49
AL
878 || !(*insn_data[icode].operand[0].predicate) (target, tmode))
879 target = gen_reg_rtx (tmode);
880 op[opc++] = target;
881 }
43e9d192 882
43e9d192
IB
883 for (;;)
884 {
d9e80f49 885 builtin_simd_arg thisarg = args[opc - have_retval];
43e9d192
IB
886
887 if (thisarg == SIMD_ARG_STOP)
888 break;
889 else
890 {
d9e80f49
AL
891 tree arg = CALL_EXPR_ARG (exp, opc - have_retval);
892 enum machine_mode mode = insn_data[icode].operand[opc].mode;
893 op[opc] = expand_normal (arg);
43e9d192
IB
894
895 switch (thisarg)
896 {
897 case SIMD_ARG_COPY_TO_REG:
d9e80f49
AL
898 if (POINTER_TYPE_P (TREE_TYPE (arg)))
899 op[opc] = convert_memory_address (Pmode, op[opc]);
900 /*gcc_assert (GET_MODE (op[opc]) == mode); */
901 if (!(*insn_data[icode].operand[opc].predicate)
902 (op[opc], mode))
903 op[opc] = copy_to_mode_reg (mode, op[opc]);
43e9d192
IB
904 break;
905
2a49c16d
AL
906 case SIMD_ARG_LANE_INDEX:
907 /* Must be a previous operand into which this is an index. */
d9e80f49
AL
908 gcc_assert (opc > 0);
909 if (CONST_INT_P (op[opc]))
2a49c16d 910 {
d9e80f49
AL
911 machine_mode vmode = insn_data[icode].operand[opc - 1].mode;
912 aarch64_simd_lane_bounds (op[opc],
46ed6024 913 0, GET_MODE_NUNITS (vmode), exp);
2a49c16d 914 /* Keep to GCC-vector-extension lane indices in the RTL. */
d9e80f49 915 op[opc] = GEN_INT (ENDIAN_LANE_N (vmode, INTVAL (op[opc])));
2a49c16d
AL
916 }
917 /* Fall through - if the lane index isn't a constant then
918 the next case will error. */
43e9d192 919 case SIMD_ARG_CONSTANT:
d9e80f49
AL
920 if (!(*insn_data[icode].operand[opc].predicate)
921 (op[opc], mode))
d5a29419 922 {
43e9d192 923 error_at (EXPR_LOCATION (exp), "incompatible type for argument %d, "
d9e80f49 924 "expected %<const int%>", opc + 1);
d5a29419
KT
925 return const0_rtx;
926 }
43e9d192
IB
927 break;
928
929 case SIMD_ARG_STOP:
930 gcc_unreachable ();
931 }
932
d9e80f49 933 opc++;
43e9d192
IB
934 }
935 }
936
d9e80f49
AL
937 switch (opc)
938 {
939 case 1:
940 pat = GEN_FCN (icode) (op[0]);
941 break;
43e9d192 942
d9e80f49
AL
943 case 2:
944 pat = GEN_FCN (icode) (op[0], op[1]);
945 break;
43e9d192 946
d9e80f49
AL
947 case 3:
948 pat = GEN_FCN (icode) (op[0], op[1], op[2]);
949 break;
43e9d192 950
d9e80f49
AL
951 case 4:
952 pat = GEN_FCN (icode) (op[0], op[1], op[2], op[3]);
953 break;
43e9d192 954
d9e80f49
AL
955 case 5:
956 pat = GEN_FCN (icode) (op[0], op[1], op[2], op[3], op[4]);
957 break;
43e9d192 958
d9e80f49
AL
959 case 6:
960 pat = GEN_FCN (icode) (op[0], op[1], op[2], op[3], op[4], op[5]);
961 break;
43e9d192 962
d9e80f49
AL
963 default:
964 gcc_unreachable ();
965 }
43e9d192
IB
966
967 if (!pat)
d5a29419 968 return NULL_RTX;
43e9d192
IB
969
970 emit_insn (pat);
971
972 return target;
973}
974
975/* Expand an AArch64 AdvSIMD builtin(intrinsic). */
976rtx
977aarch64_simd_expand_builtin (int fcode, tree exp, rtx target)
978{
342be7f7
JG
979 aarch64_simd_builtin_datum *d =
980 &aarch64_simd_builtin_data[fcode - (AARCH64_SIMD_BUILTIN_BASE + 1)];
342be7f7 981 enum insn_code icode = d->code;
b5828b4b
JG
982 builtin_simd_arg args[SIMD_MAX_BUILTIN_ARGS];
983 int num_args = insn_data[d->code].n_operands;
984 int is_void = 0;
985 int k;
43e9d192 986
b5828b4b 987 is_void = !!(d->qualifiers[0] & qualifier_void);
43e9d192 988
b5828b4b
JG
989 num_args += is_void;
990
991 for (k = 1; k < num_args; k++)
992 {
993 /* We have four arrays of data, each indexed in a different fashion.
994 qualifiers - element 0 always describes the function return type.
995 operands - element 0 is either the operand for return value (if
996 the function has a non-void return type) or the operand for the
997 first argument.
998 expr_args - element 0 always holds the first argument.
999 args - element 0 is always used for the return type. */
1000 int qualifiers_k = k;
1001 int operands_k = k - is_void;
1002 int expr_args_k = k - 1;
1003
2a49c16d
AL
1004 if (d->qualifiers[qualifiers_k] & qualifier_lane_index)
1005 args[k] = SIMD_ARG_LANE_INDEX;
1006 else if (d->qualifiers[qualifiers_k] & qualifier_immediate)
b5828b4b
JG
1007 args[k] = SIMD_ARG_CONSTANT;
1008 else if (d->qualifiers[qualifiers_k] & qualifier_maybe_immediate)
1009 {
1010 rtx arg
1011 = expand_normal (CALL_EXPR_ARG (exp,
1012 (expr_args_k)));
1013 /* Handle constants only if the predicate allows it. */
1014 bool op_const_int_p =
1015 (CONST_INT_P (arg)
1016 && (*insn_data[icode].operand[operands_k].predicate)
1017 (arg, insn_data[icode].operand[operands_k].mode));
1018 args[k] = op_const_int_p ? SIMD_ARG_CONSTANT : SIMD_ARG_COPY_TO_REG;
1019 }
1020 else
1021 args[k] = SIMD_ARG_COPY_TO_REG;
43e9d192 1022
43e9d192 1023 }
b5828b4b
JG
1024 args[k] = SIMD_ARG_STOP;
1025
1026 /* The interface to aarch64_simd_expand_args expects a 0 if
1027 the function is void, and a 1 if it is not. */
1028 return aarch64_simd_expand_args
8d3d350a 1029 (target, icode, !is_void, exp, &args[1]);
43e9d192 1030}
342be7f7 1031
5d357f26
KT
1032rtx
1033aarch64_crc32_expand_builtin (int fcode, tree exp, rtx target)
1034{
1035 rtx pat;
1036 aarch64_crc_builtin_datum *d
1037 = &aarch64_crc_builtin_data[fcode - (AARCH64_CRC32_BUILTIN_BASE + 1)];
1038 enum insn_code icode = d->icode;
1039 tree arg0 = CALL_EXPR_ARG (exp, 0);
1040 tree arg1 = CALL_EXPR_ARG (exp, 1);
1041 rtx op0 = expand_normal (arg0);
1042 rtx op1 = expand_normal (arg1);
ef4bddc2
RS
1043 machine_mode tmode = insn_data[icode].operand[0].mode;
1044 machine_mode mode0 = insn_data[icode].operand[1].mode;
1045 machine_mode mode1 = insn_data[icode].operand[2].mode;
5d357f26
KT
1046
1047 if (! target
1048 || GET_MODE (target) != tmode
1049 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
1050 target = gen_reg_rtx (tmode);
1051
1052 gcc_assert ((GET_MODE (op0) == mode0 || GET_MODE (op0) == VOIDmode)
1053 && (GET_MODE (op1) == mode1 || GET_MODE (op1) == VOIDmode));
1054
1055 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
1056 op0 = copy_to_mode_reg (mode0, op0);
1057 if (! (*insn_data[icode].operand[2].predicate) (op1, mode1))
1058 op1 = copy_to_mode_reg (mode1, op1);
1059
1060 pat = GEN_FCN (icode) (target, op0, op1);
d5a29419
KT
1061 if (!pat)
1062 return NULL_RTX;
1063
5d357f26
KT
1064 emit_insn (pat);
1065 return target;
1066}
1067
342be7f7
JG
1068/* Expand an expression EXP that calls a built-in function,
1069 with result going to TARGET if that's convenient. */
1070rtx
1071aarch64_expand_builtin (tree exp,
1072 rtx target,
1073 rtx subtarget ATTRIBUTE_UNUSED,
ef4bddc2 1074 machine_mode mode ATTRIBUTE_UNUSED,
342be7f7
JG
1075 int ignore ATTRIBUTE_UNUSED)
1076{
1077 tree fndecl = TREE_OPERAND (CALL_EXPR_FN (exp), 0);
1078 int fcode = DECL_FUNCTION_CODE (fndecl);
aa87aced
KV
1079 int icode;
1080 rtx pat, op0;
1081 tree arg0;
1082
1083 switch (fcode)
1084 {
1085 case AARCH64_BUILTIN_GET_FPCR:
1086 case AARCH64_BUILTIN_SET_FPCR:
1087 case AARCH64_BUILTIN_GET_FPSR:
1088 case AARCH64_BUILTIN_SET_FPSR:
1089 if ((fcode == AARCH64_BUILTIN_GET_FPCR)
1090 || (fcode == AARCH64_BUILTIN_GET_FPSR))
1091 {
1092 icode = (fcode == AARCH64_BUILTIN_GET_FPSR) ?
1093 CODE_FOR_get_fpsr : CODE_FOR_get_fpcr;
1094 target = gen_reg_rtx (SImode);
1095 pat = GEN_FCN (icode) (target);
1096 }
1097 else
1098 {
1099 target = NULL_RTX;
1100 icode = (fcode == AARCH64_BUILTIN_SET_FPSR) ?
1101 CODE_FOR_set_fpsr : CODE_FOR_set_fpcr;
1102 arg0 = CALL_EXPR_ARG (exp, 0);
1103 op0 = expand_normal (arg0);
1104 pat = GEN_FCN (icode) (op0);
1105 }
1106 emit_insn (pat);
1107 return target;
1108 }
342be7f7 1109
5d357f26 1110 if (fcode >= AARCH64_SIMD_BUILTIN_BASE && fcode <= AARCH64_SIMD_BUILTIN_MAX)
342be7f7 1111 return aarch64_simd_expand_builtin (fcode, exp, target);
5d357f26
KT
1112 else if (fcode >= AARCH64_CRC32_BUILTIN_BASE && fcode <= AARCH64_CRC32_BUILTIN_MAX)
1113 return aarch64_crc32_expand_builtin (fcode, exp, target);
342be7f7 1114
d5a29419 1115 gcc_unreachable ();
342be7f7 1116}
42fc9a7f
JG
1117
1118tree
1119aarch64_builtin_vectorized_function (tree fndecl, tree type_out, tree type_in)
1120{
ef4bddc2 1121 machine_mode in_mode, out_mode;
42fc9a7f
JG
1122 int in_n, out_n;
1123
1124 if (TREE_CODE (type_out) != VECTOR_TYPE
1125 || TREE_CODE (type_in) != VECTOR_TYPE)
1126 return NULL_TREE;
1127
1128 out_mode = TYPE_MODE (TREE_TYPE (type_out));
1129 out_n = TYPE_VECTOR_SUBPARTS (type_out);
1130 in_mode = TYPE_MODE (TREE_TYPE (type_in));
1131 in_n = TYPE_VECTOR_SUBPARTS (type_in);
1132
1133#undef AARCH64_CHECK_BUILTIN_MODE
1134#define AARCH64_CHECK_BUILTIN_MODE(C, N) 1
1135#define AARCH64_FIND_FRINT_VARIANT(N) \
1136 (AARCH64_CHECK_BUILTIN_MODE (2, D) \
e993fea1 1137 ? aarch64_builtin_decls[AARCH64_SIMD_BUILTIN_UNOP_##N##v2df] \
42fc9a7f 1138 : (AARCH64_CHECK_BUILTIN_MODE (4, S) \
e993fea1 1139 ? aarch64_builtin_decls[AARCH64_SIMD_BUILTIN_UNOP_##N##v4sf] \
42fc9a7f 1140 : (AARCH64_CHECK_BUILTIN_MODE (2, S) \
e993fea1 1141 ? aarch64_builtin_decls[AARCH64_SIMD_BUILTIN_UNOP_##N##v2sf] \
42fc9a7f
JG
1142 : NULL_TREE)))
1143 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
1144 {
1145 enum built_in_function fn = DECL_FUNCTION_CODE (fndecl);
1146 switch (fn)
1147 {
1148#undef AARCH64_CHECK_BUILTIN_MODE
1149#define AARCH64_CHECK_BUILTIN_MODE(C, N) \
1150 (out_mode == N##Fmode && out_n == C \
1151 && in_mode == N##Fmode && in_n == C)
1152 case BUILT_IN_FLOOR:
1153 case BUILT_IN_FLOORF:
0659ce6f 1154 return AARCH64_FIND_FRINT_VARIANT (floor);
42fc9a7f
JG
1155 case BUILT_IN_CEIL:
1156 case BUILT_IN_CEILF:
0659ce6f 1157 return AARCH64_FIND_FRINT_VARIANT (ceil);
42fc9a7f
JG
1158 case BUILT_IN_TRUNC:
1159 case BUILT_IN_TRUNCF:
0659ce6f 1160 return AARCH64_FIND_FRINT_VARIANT (btrunc);
42fc9a7f
JG
1161 case BUILT_IN_ROUND:
1162 case BUILT_IN_ROUNDF:
0659ce6f 1163 return AARCH64_FIND_FRINT_VARIANT (round);
42fc9a7f
JG
1164 case BUILT_IN_NEARBYINT:
1165 case BUILT_IN_NEARBYINTF:
0659ce6f 1166 return AARCH64_FIND_FRINT_VARIANT (nearbyint);
4dcd1054
JG
1167 case BUILT_IN_SQRT:
1168 case BUILT_IN_SQRTF:
1169 return AARCH64_FIND_FRINT_VARIANT (sqrt);
42fc9a7f 1170#undef AARCH64_CHECK_BUILTIN_MODE
b5574232
VP
1171#define AARCH64_CHECK_BUILTIN_MODE(C, N) \
1172 (out_mode == SImode && out_n == C \
1173 && in_mode == N##Imode && in_n == C)
1174 case BUILT_IN_CLZ:
1175 {
1176 if (AARCH64_CHECK_BUILTIN_MODE (4, S))
e993fea1 1177 return aarch64_builtin_decls[AARCH64_SIMD_BUILTIN_UNOP_clzv4si];
b5574232
VP
1178 return NULL_TREE;
1179 }
5e32e83b
JW
1180 case BUILT_IN_CTZ:
1181 {
1182 if (AARCH64_CHECK_BUILTIN_MODE (2, S))
1183 return aarch64_builtin_decls[AARCH64_SIMD_BUILTIN_UNOP_ctzv2si];
1184 else if (AARCH64_CHECK_BUILTIN_MODE (4, S))
1185 return aarch64_builtin_decls[AARCH64_SIMD_BUILTIN_UNOP_ctzv4si];
1186 return NULL_TREE;
1187 }
b5574232 1188#undef AARCH64_CHECK_BUILTIN_MODE
42fc9a7f
JG
1189#define AARCH64_CHECK_BUILTIN_MODE(C, N) \
1190 (out_mode == N##Imode && out_n == C \
1191 && in_mode == N##Fmode && in_n == C)
1192 case BUILT_IN_LFLOOR:
bf0f324e
YZ
1193 case BUILT_IN_LFLOORF:
1194 case BUILT_IN_LLFLOOR:
0386b123 1195 case BUILT_IN_IFLOORF:
ce966824 1196 {
e993fea1 1197 enum aarch64_builtins builtin;
ce966824 1198 if (AARCH64_CHECK_BUILTIN_MODE (2, D))
e993fea1 1199 builtin = AARCH64_SIMD_BUILTIN_UNOP_lfloorv2dfv2di;
ce966824 1200 else if (AARCH64_CHECK_BUILTIN_MODE (4, S))
e993fea1 1201 builtin = AARCH64_SIMD_BUILTIN_UNOP_lfloorv4sfv4si;
ce966824 1202 else if (AARCH64_CHECK_BUILTIN_MODE (2, S))
e993fea1
JG
1203 builtin = AARCH64_SIMD_BUILTIN_UNOP_lfloorv2sfv2si;
1204 else
1205 return NULL_TREE;
1206
1207 return aarch64_builtin_decls[builtin];
ce966824 1208 }
42fc9a7f 1209 case BUILT_IN_LCEIL:
bf0f324e
YZ
1210 case BUILT_IN_LCEILF:
1211 case BUILT_IN_LLCEIL:
0386b123 1212 case BUILT_IN_ICEILF:
ce966824 1213 {
e993fea1 1214 enum aarch64_builtins builtin;
ce966824 1215 if (AARCH64_CHECK_BUILTIN_MODE (2, D))
e993fea1 1216 builtin = AARCH64_SIMD_BUILTIN_UNOP_lceilv2dfv2di;
ce966824 1217 else if (AARCH64_CHECK_BUILTIN_MODE (4, S))
e993fea1 1218 builtin = AARCH64_SIMD_BUILTIN_UNOP_lceilv4sfv4si;
ce966824 1219 else if (AARCH64_CHECK_BUILTIN_MODE (2, S))
e993fea1
JG
1220 builtin = AARCH64_SIMD_BUILTIN_UNOP_lceilv2sfv2si;
1221 else
1222 return NULL_TREE;
1223
1224 return aarch64_builtin_decls[builtin];
ce966824 1225 }
0386b123
JG
1226 case BUILT_IN_LROUND:
1227 case BUILT_IN_IROUNDF:
1228 {
e993fea1 1229 enum aarch64_builtins builtin;
0386b123 1230 if (AARCH64_CHECK_BUILTIN_MODE (2, D))
e993fea1 1231 builtin = AARCH64_SIMD_BUILTIN_UNOP_lroundv2dfv2di;
0386b123 1232 else if (AARCH64_CHECK_BUILTIN_MODE (4, S))
e993fea1 1233 builtin = AARCH64_SIMD_BUILTIN_UNOP_lroundv4sfv4si;
0386b123 1234 else if (AARCH64_CHECK_BUILTIN_MODE (2, S))
e993fea1
JG
1235 builtin = AARCH64_SIMD_BUILTIN_UNOP_lroundv2sfv2si;
1236 else
1237 return NULL_TREE;
1238
1239 return aarch64_builtin_decls[builtin];
0386b123 1240 }
c7f28cd5
KT
1241 case BUILT_IN_BSWAP16:
1242#undef AARCH64_CHECK_BUILTIN_MODE
1243#define AARCH64_CHECK_BUILTIN_MODE(C, N) \
1244 (out_mode == N##Imode && out_n == C \
1245 && in_mode == N##Imode && in_n == C)
1246 if (AARCH64_CHECK_BUILTIN_MODE (4, H))
1247 return aarch64_builtin_decls[AARCH64_SIMD_BUILTIN_UNOPU_bswapv4hi];
1248 else if (AARCH64_CHECK_BUILTIN_MODE (8, H))
1249 return aarch64_builtin_decls[AARCH64_SIMD_BUILTIN_UNOPU_bswapv8hi];
1250 else
1251 return NULL_TREE;
1252 case BUILT_IN_BSWAP32:
1253 if (AARCH64_CHECK_BUILTIN_MODE (2, S))
1254 return aarch64_builtin_decls[AARCH64_SIMD_BUILTIN_UNOPU_bswapv2si];
1255 else if (AARCH64_CHECK_BUILTIN_MODE (4, S))
1256 return aarch64_builtin_decls[AARCH64_SIMD_BUILTIN_UNOPU_bswapv4si];
1257 else
1258 return NULL_TREE;
1259 case BUILT_IN_BSWAP64:
1260 if (AARCH64_CHECK_BUILTIN_MODE (2, D))
1261 return aarch64_builtin_decls[AARCH64_SIMD_BUILTIN_UNOPU_bswapv2di];
1262 else
1263 return NULL_TREE;
42fc9a7f
JG
1264 default:
1265 return NULL_TREE;
1266 }
1267 }
1268
1269 return NULL_TREE;
1270}
0ac198d3
JG
1271
1272#undef VAR1
1273#define VAR1(T, N, MAP, A) \
e993fea1 1274 case AARCH64_SIMD_BUILTIN_##T##_##N##A:
0ac198d3 1275
9697e620
JG
1276tree
1277aarch64_fold_builtin (tree fndecl, int n_args ATTRIBUTE_UNUSED, tree *args,
1278 bool ignore ATTRIBUTE_UNUSED)
1279{
1280 int fcode = DECL_FUNCTION_CODE (fndecl);
1281 tree type = TREE_TYPE (TREE_TYPE (fndecl));
1282
1283 switch (fcode)
1284 {
d05d0709 1285 BUILTIN_VALLDI (UNOP, abs, 2)
9697e620
JG
1286 return fold_build1 (ABS_EXPR, type, args[0]);
1287 break;
1709ff9b
JG
1288 VAR1 (UNOP, floatv2si, 2, v2sf)
1289 VAR1 (UNOP, floatv4si, 2, v4sf)
1290 VAR1 (UNOP, floatv2di, 2, v2df)
1291 return fold_build1 (FLOAT_EXPR, type, args[0]);
9697e620
JG
1292 default:
1293 break;
1294 }
1295
1296 return NULL_TREE;
1297}
1298
0ac198d3
JG
1299bool
1300aarch64_gimple_fold_builtin (gimple_stmt_iterator *gsi)
1301{
1302 bool changed = false;
1303 gimple stmt = gsi_stmt (*gsi);
1304 tree call = gimple_call_fn (stmt);
1305 tree fndecl;
1306 gimple new_stmt = NULL;
22756ccf 1307
0ac198d3
JG
1308 if (call)
1309 {
1310 fndecl = gimple_call_fndecl (stmt);
1311 if (fndecl)
1312 {
1313 int fcode = DECL_FUNCTION_CODE (fndecl);
1314 int nargs = gimple_call_num_args (stmt);
1315 tree *args = (nargs > 0
1316 ? gimple_call_arg_ptr (stmt, 0)
1317 : &error_mark_node);
1318
fc72cba7
AL
1319 /* We use gimple's REDUC_(PLUS|MIN|MAX)_EXPRs for float, signed int
1320 and unsigned int; it will distinguish according to the types of
1321 the arguments to the __builtin. */
0ac198d3
JG
1322 switch (fcode)
1323 {
fc72cba7 1324 BUILTIN_VALL (UNOP, reduc_plus_scal_, 10)
0d3d8152
JJ
1325 new_stmt = gimple_build_assign (gimple_call_lhs (stmt),
1326 REDUC_PLUS_EXPR, args[0]);
0ac198d3 1327 break;
fc72cba7
AL
1328 BUILTIN_VDQIF (UNOP, reduc_smax_scal_, 10)
1329 BUILTIN_VDQ_BHSI (UNOPU, reduc_umax_scal_, 10)
0d3d8152
JJ
1330 new_stmt = gimple_build_assign (gimple_call_lhs (stmt),
1331 REDUC_MAX_EXPR, args[0]);
1598945b 1332 break;
fc72cba7
AL
1333 BUILTIN_VDQIF (UNOP, reduc_smin_scal_, 10)
1334 BUILTIN_VDQ_BHSI (UNOPU, reduc_umin_scal_, 10)
0d3d8152
JJ
1335 new_stmt = gimple_build_assign (gimple_call_lhs (stmt),
1336 REDUC_MIN_EXPR, args[0]);
1598945b
JG
1337 break;
1338
0ac198d3
JG
1339 default:
1340 break;
1341 }
1342 }
1343 }
1344
1345 if (new_stmt)
1346 {
1347 gsi_replace (gsi, new_stmt, true);
1348 changed = true;
1349 }
1350
1351 return changed;
1352}
1353
aa87aced
KV
1354void
1355aarch64_atomic_assign_expand_fenv (tree *hold, tree *clear, tree *update)
1356{
1357 const unsigned AARCH64_FE_INVALID = 1;
1358 const unsigned AARCH64_FE_DIVBYZERO = 2;
1359 const unsigned AARCH64_FE_OVERFLOW = 4;
1360 const unsigned AARCH64_FE_UNDERFLOW = 8;
1361 const unsigned AARCH64_FE_INEXACT = 16;
1362 const unsigned HOST_WIDE_INT AARCH64_FE_ALL_EXCEPT = (AARCH64_FE_INVALID
1363 | AARCH64_FE_DIVBYZERO
1364 | AARCH64_FE_OVERFLOW
1365 | AARCH64_FE_UNDERFLOW
1366 | AARCH64_FE_INEXACT);
1367 const unsigned HOST_WIDE_INT AARCH64_FE_EXCEPT_SHIFT = 8;
1368 tree fenv_cr, fenv_sr, get_fpcr, set_fpcr, mask_cr, mask_sr;
1369 tree ld_fenv_cr, ld_fenv_sr, masked_fenv_cr, masked_fenv_sr, hold_fnclex_cr;
1370 tree hold_fnclex_sr, new_fenv_var, reload_fenv, restore_fnenv, get_fpsr, set_fpsr;
1371 tree update_call, atomic_feraiseexcept, hold_fnclex, masked_fenv, ld_fenv;
1372
1373 /* Generate the equivalence of :
1374 unsigned int fenv_cr;
1375 fenv_cr = __builtin_aarch64_get_fpcr ();
1376
1377 unsigned int fenv_sr;
1378 fenv_sr = __builtin_aarch64_get_fpsr ();
1379
1380 Now set all exceptions to non-stop
1381 unsigned int mask_cr
1382 = ~(AARCH64_FE_ALL_EXCEPT << AARCH64_FE_EXCEPT_SHIFT);
1383 unsigned int masked_cr;
1384 masked_cr = fenv_cr & mask_cr;
1385
1386 And clear all exception flags
1387 unsigned int maske_sr = ~AARCH64_FE_ALL_EXCEPT;
1388 unsigned int masked_cr;
1389 masked_sr = fenv_sr & mask_sr;
1390
1391 __builtin_aarch64_set_cr (masked_cr);
1392 __builtin_aarch64_set_sr (masked_sr); */
1393
9b489f31
JJ
1394 fenv_cr = create_tmp_var (unsigned_type_node);
1395 fenv_sr = create_tmp_var (unsigned_type_node);
aa87aced
KV
1396
1397 get_fpcr = aarch64_builtin_decls[AARCH64_BUILTIN_GET_FPCR];
1398 set_fpcr = aarch64_builtin_decls[AARCH64_BUILTIN_SET_FPCR];
1399 get_fpsr = aarch64_builtin_decls[AARCH64_BUILTIN_GET_FPSR];
1400 set_fpsr = aarch64_builtin_decls[AARCH64_BUILTIN_SET_FPSR];
1401
1402 mask_cr = build_int_cst (unsigned_type_node,
1403 ~(AARCH64_FE_ALL_EXCEPT << AARCH64_FE_EXCEPT_SHIFT));
1404 mask_sr = build_int_cst (unsigned_type_node,
1405 ~(AARCH64_FE_ALL_EXCEPT));
1406
1407 ld_fenv_cr = build2 (MODIFY_EXPR, unsigned_type_node,
1408 fenv_cr, build_call_expr (get_fpcr, 0));
1409 ld_fenv_sr = build2 (MODIFY_EXPR, unsigned_type_node,
1410 fenv_sr, build_call_expr (get_fpsr, 0));
1411
1412 masked_fenv_cr = build2 (BIT_AND_EXPR, unsigned_type_node, fenv_cr, mask_cr);
1413 masked_fenv_sr = build2 (BIT_AND_EXPR, unsigned_type_node, fenv_sr, mask_sr);
1414
1415 hold_fnclex_cr = build_call_expr (set_fpcr, 1, masked_fenv_cr);
1416 hold_fnclex_sr = build_call_expr (set_fpsr, 1, masked_fenv_sr);
1417
1418 hold_fnclex = build2 (COMPOUND_EXPR, void_type_node, hold_fnclex_cr,
1419 hold_fnclex_sr);
1420 masked_fenv = build2 (COMPOUND_EXPR, void_type_node, masked_fenv_cr,
1421 masked_fenv_sr);
1422 ld_fenv = build2 (COMPOUND_EXPR, void_type_node, ld_fenv_cr, ld_fenv_sr);
1423
1424 *hold = build2 (COMPOUND_EXPR, void_type_node,
1425 build2 (COMPOUND_EXPR, void_type_node, masked_fenv, ld_fenv),
1426 hold_fnclex);
1427
1428 /* Store the value of masked_fenv to clear the exceptions:
1429 __builtin_aarch64_set_fpsr (masked_fenv_sr); */
1430
1431 *clear = build_call_expr (set_fpsr, 1, masked_fenv_sr);
1432
1433 /* Generate the equivalent of :
1434 unsigned int new_fenv_var;
1435 new_fenv_var = __builtin_aarch64_get_fpsr ();
1436
1437 __builtin_aarch64_set_fpsr (fenv_sr);
1438
1439 __atomic_feraiseexcept (new_fenv_var); */
1440
9b489f31 1441 new_fenv_var = create_tmp_var (unsigned_type_node);
aa87aced
KV
1442 reload_fenv = build2 (MODIFY_EXPR, unsigned_type_node,
1443 new_fenv_var, build_call_expr (get_fpsr, 0));
1444 restore_fnenv = build_call_expr (set_fpsr, 1, fenv_sr);
1445 atomic_feraiseexcept = builtin_decl_implicit (BUILT_IN_ATOMIC_FERAISEEXCEPT);
1446 update_call = build_call_expr (atomic_feraiseexcept, 1,
1447 fold_convert (integer_type_node, new_fenv_var));
1448 *update = build2 (COMPOUND_EXPR, void_type_node,
1449 build2 (COMPOUND_EXPR, void_type_node,
1450 reload_fenv, restore_fnenv), update_call);
1451}
1452
1453
42fc9a7f
JG
1454#undef AARCH64_CHECK_BUILTIN_MODE
1455#undef AARCH64_FIND_FRINT_VARIANT
0ddec79f
JG
1456#undef CF0
1457#undef CF1
1458#undef CF2
1459#undef CF3
1460#undef CF4
1461#undef CF10
1462#undef VAR1
1463#undef VAR2
1464#undef VAR3
1465#undef VAR4
1466#undef VAR5
1467#undef VAR6
1468#undef VAR7
1469#undef VAR8
1470#undef VAR9
1471#undef VAR10
1472#undef VAR11
1473
3c03d39d 1474#include "gt-aarch64-builtins.h"