]> git.ipfire.org Git - thirdparty/gcc.git/blame - gcc/config/aarch64/aarch64-builtins.c
lto-cgraph.c (lto_output_node, [...]): Stream split_part.
[thirdparty/gcc.git] / gcc / config / aarch64 / aarch64-builtins.c
CommitLineData
43e9d192 1/* Builtins' description for AArch64 SIMD architecture.
5624e564 2 Copyright (C) 2011-2015 Free Software Foundation, Inc.
43e9d192
IB
3 Contributed by ARM Ltd.
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify it
8 under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
10 any later version.
11
12 GCC is distributed in the hope that it will be useful, but
13 WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
15 General Public License for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
20
21#include "config.h"
22#include "system.h"
23#include "coretypes.h"
24#include "tm.h"
25#include "rtl.h"
40e23961
MC
26#include "hash-set.h"
27#include "machmode.h"
28#include "vec.h"
29#include "double-int.h"
30#include "input.h"
31#include "alias.h"
32#include "symtab.h"
33#include "wide-int.h"
34#include "inchash.h"
43e9d192 35#include "tree.h"
40e23961 36#include "fold-const.h"
d8a2d370
DN
37#include "stor-layout.h"
38#include "stringpool.h"
39#include "calls.h"
36566b39
PK
40#include "hashtab.h"
41#include "hard-reg-set.h"
42#include "function.h"
43#include "flags.h"
44#include "statistics.h"
45#include "real.h"
46#include "fixed-value.h"
47#include "insn-config.h"
48#include "expmed.h"
49#include "dojump.h"
50#include "explow.h"
51#include "emit-rtl.h"
52#include "varasm.h"
53#include "stmt.h"
43e9d192
IB
54#include "expr.h"
55#include "tm_p.h"
56#include "recog.h"
57#include "langhooks.h"
58#include "diagnostic-core.h"
b0710fe1 59#include "insn-codes.h"
43e9d192 60#include "optabs.h"
2fb9a547 61#include "hash-table.h"
2fb9a547 62#include "ggc.h"
60393bbc 63#include "predict.h"
60393bbc
AM
64#include "dominance.h"
65#include "cfg.h"
66#include "cfgrtl.h"
67#include "cfganal.h"
68#include "lcm.h"
69#include "cfgbuild.h"
70#include "cfgcleanup.h"
2fb9a547
AM
71#include "basic-block.h"
72#include "tree-ssa-alias.h"
73#include "internal-fn.h"
74#include "gimple-fold.h"
75#include "tree-eh.h"
76#include "gimple-expr.h"
77#include "is-a.h"
0ac198d3 78#include "gimple.h"
5be5c238 79#include "gimple-iterator.h"
43e9d192 80
bc5e395d
JG
81#define v8qi_UP V8QImode
82#define v4hi_UP V4HImode
83#define v2si_UP V2SImode
84#define v2sf_UP V2SFmode
85#define v1df_UP V1DFmode
86#define di_UP DImode
87#define df_UP DFmode
88#define v16qi_UP V16QImode
89#define v8hi_UP V8HImode
90#define v4si_UP V4SImode
91#define v4sf_UP V4SFmode
92#define v2di_UP V2DImode
93#define v2df_UP V2DFmode
94#define ti_UP TImode
95#define ei_UP EImode
96#define oi_UP OImode
97#define ci_UP CImode
98#define xi_UP XImode
99#define si_UP SImode
100#define sf_UP SFmode
101#define hi_UP HImode
102#define qi_UP QImode
43e9d192
IB
103#define UP(X) X##_UP
104
b5828b4b
JG
105#define SIMD_MAX_BUILTIN_ARGS 5
106
107enum aarch64_type_qualifiers
43e9d192 108{
b5828b4b
JG
109 /* T foo. */
110 qualifier_none = 0x0,
111 /* unsigned T foo. */
112 qualifier_unsigned = 0x1, /* 1 << 0 */
113 /* const T foo. */
114 qualifier_const = 0x2, /* 1 << 1 */
115 /* T *foo. */
116 qualifier_pointer = 0x4, /* 1 << 2 */
b5828b4b
JG
117 /* Used when expanding arguments if an operand could
118 be an immediate. */
119 qualifier_immediate = 0x8, /* 1 << 3 */
120 qualifier_maybe_immediate = 0x10, /* 1 << 4 */
121 /* void foo (...). */
122 qualifier_void = 0x20, /* 1 << 5 */
123 /* Some patterns may have internal operands, this qualifier is an
124 instruction to the initialisation code to skip this operand. */
125 qualifier_internal = 0x40, /* 1 << 6 */
126 /* Some builtins should use the T_*mode* encoded in a simd_builtin_datum
127 rather than using the type of the operand. */
128 qualifier_map_mode = 0x80, /* 1 << 7 */
129 /* qualifier_pointer | qualifier_map_mode */
130 qualifier_pointer_map_mode = 0x84,
e625e715 131 /* qualifier_const | qualifier_pointer | qualifier_map_mode */
6db1ec94
JG
132 qualifier_const_pointer_map_mode = 0x86,
133 /* Polynomial types. */
2a49c16d
AL
134 qualifier_poly = 0x100,
135 /* Lane indices - must be in range, and flipped for bigendian. */
136 qualifier_lane_index = 0x200
b5828b4b 137};
43e9d192
IB
138
139typedef struct
140{
141 const char *name;
ef4bddc2 142 machine_mode mode;
342be7f7
JG
143 const enum insn_code code;
144 unsigned int fcode;
b5828b4b 145 enum aarch64_type_qualifiers *qualifiers;
43e9d192
IB
146} aarch64_simd_builtin_datum;
147
b5828b4b
JG
148static enum aarch64_type_qualifiers
149aarch64_types_unop_qualifiers[SIMD_MAX_BUILTIN_ARGS]
8f905d69 150 = { qualifier_none, qualifier_none };
b5828b4b 151#define TYPES_UNOP (aarch64_types_unop_qualifiers)
5a7a4e80
TB
152static enum aarch64_type_qualifiers
153aarch64_types_unopu_qualifiers[SIMD_MAX_BUILTIN_ARGS]
154 = { qualifier_unsigned, qualifier_unsigned };
155#define TYPES_UNOPU (aarch64_types_unopu_qualifiers)
b5828b4b
JG
156static enum aarch64_type_qualifiers
157aarch64_types_binop_qualifiers[SIMD_MAX_BUILTIN_ARGS]
158 = { qualifier_none, qualifier_none, qualifier_maybe_immediate };
159#define TYPES_BINOP (aarch64_types_binop_qualifiers)
160static enum aarch64_type_qualifiers
5a7a4e80
TB
161aarch64_types_binopu_qualifiers[SIMD_MAX_BUILTIN_ARGS]
162 = { qualifier_unsigned, qualifier_unsigned, qualifier_unsigned };
163#define TYPES_BINOPU (aarch64_types_binopu_qualifiers)
7baa225d 164static enum aarch64_type_qualifiers
de10bcce
AL
165aarch64_types_binop_uus_qualifiers[SIMD_MAX_BUILTIN_ARGS]
166 = { qualifier_unsigned, qualifier_unsigned, qualifier_none };
167#define TYPES_BINOP_UUS (aarch64_types_binop_uus_qualifiers)
168static enum aarch64_type_qualifiers
918621d3
AL
169aarch64_types_binop_ssu_qualifiers[SIMD_MAX_BUILTIN_ARGS]
170 = { qualifier_none, qualifier_none, qualifier_unsigned };
171#define TYPES_BINOP_SSU (aarch64_types_binop_ssu_qualifiers)
172static enum aarch64_type_qualifiers
7baa225d
TB
173aarch64_types_binopp_qualifiers[SIMD_MAX_BUILTIN_ARGS]
174 = { qualifier_poly, qualifier_poly, qualifier_poly };
175#define TYPES_BINOPP (aarch64_types_binopp_qualifiers)
176
5a7a4e80 177static enum aarch64_type_qualifiers
b5828b4b
JG
178aarch64_types_ternop_qualifiers[SIMD_MAX_BUILTIN_ARGS]
179 = { qualifier_none, qualifier_none, qualifier_none, qualifier_none };
180#define TYPES_TERNOP (aarch64_types_ternop_qualifiers)
30442682 181static enum aarch64_type_qualifiers
2a49c16d
AL
182aarch64_types_ternop_lane_qualifiers[SIMD_MAX_BUILTIN_ARGS]
183 = { qualifier_none, qualifier_none, qualifier_none, qualifier_lane_index };
184#define TYPES_TERNOP_LANE (aarch64_types_ternop_lane_qualifiers)
185static enum aarch64_type_qualifiers
30442682
TB
186aarch64_types_ternopu_qualifiers[SIMD_MAX_BUILTIN_ARGS]
187 = { qualifier_unsigned, qualifier_unsigned,
188 qualifier_unsigned, qualifier_unsigned };
189#define TYPES_TERNOPU (aarch64_types_ternopu_qualifiers)
190
b5828b4b 191static enum aarch64_type_qualifiers
2a49c16d 192aarch64_types_quadop_lane_qualifiers[SIMD_MAX_BUILTIN_ARGS]
b5828b4b 193 = { qualifier_none, qualifier_none, qualifier_none,
2a49c16d
AL
194 qualifier_none, qualifier_lane_index };
195#define TYPES_QUADOP_LANE (aarch64_types_quadop_lane_qualifiers)
b5828b4b
JG
196
197static enum aarch64_type_qualifiers
2a49c16d 198aarch64_types_binop_imm_qualifiers[SIMD_MAX_BUILTIN_ARGS]
b5828b4b 199 = { qualifier_none, qualifier_none, qualifier_immediate };
2a49c16d
AL
200#define TYPES_GETREG (aarch64_types_binop_imm_qualifiers)
201#define TYPES_SHIFTIMM (aarch64_types_binop_imm_qualifiers)
b5828b4b 202static enum aarch64_type_qualifiers
de10bcce
AL
203aarch64_types_shift_to_unsigned_qualifiers[SIMD_MAX_BUILTIN_ARGS]
204 = { qualifier_unsigned, qualifier_none, qualifier_immediate };
205#define TYPES_SHIFTIMM_USS (aarch64_types_shift_to_unsigned_qualifiers)
206static enum aarch64_type_qualifiers
252c7556
AV
207aarch64_types_unsigned_shift_qualifiers[SIMD_MAX_BUILTIN_ARGS]
208 = { qualifier_unsigned, qualifier_unsigned, qualifier_immediate };
209#define TYPES_USHIFTIMM (aarch64_types_unsigned_shift_qualifiers)
de10bcce 210
252c7556 211static enum aarch64_type_qualifiers
2a49c16d 212aarch64_types_ternop_imm_qualifiers[SIMD_MAX_BUILTIN_ARGS]
b5828b4b 213 = { qualifier_none, qualifier_none, qualifier_none, qualifier_immediate };
2a49c16d
AL
214#define TYPES_SETREG (aarch64_types_ternop_imm_qualifiers)
215#define TYPES_SHIFTINSERT (aarch64_types_ternop_imm_qualifiers)
216#define TYPES_SHIFTACC (aarch64_types_ternop_imm_qualifiers)
b5828b4b 217
de10bcce
AL
218static enum aarch64_type_qualifiers
219aarch64_types_unsigned_shiftacc_qualifiers[SIMD_MAX_BUILTIN_ARGS]
220 = { qualifier_unsigned, qualifier_unsigned, qualifier_unsigned,
221 qualifier_immediate };
222#define TYPES_USHIFTACC (aarch64_types_unsigned_shiftacc_qualifiers)
223
224
b5828b4b
JG
225static enum aarch64_type_qualifiers
226aarch64_types_combine_qualifiers[SIMD_MAX_BUILTIN_ARGS]
227 = { qualifier_none, qualifier_none, qualifier_none };
228#define TYPES_COMBINE (aarch64_types_combine_qualifiers)
229
230static enum aarch64_type_qualifiers
231aarch64_types_load1_qualifiers[SIMD_MAX_BUILTIN_ARGS]
232 = { qualifier_none, qualifier_const_pointer_map_mode };
233#define TYPES_LOAD1 (aarch64_types_load1_qualifiers)
234#define TYPES_LOADSTRUCT (aarch64_types_load1_qualifiers)
3ec1be97
CB
235static enum aarch64_type_qualifiers
236aarch64_types_loadstruct_lane_qualifiers[SIMD_MAX_BUILTIN_ARGS]
237 = { qualifier_none, qualifier_const_pointer_map_mode,
238 qualifier_none, qualifier_none };
239#define TYPES_LOADSTRUCT_LANE (aarch64_types_loadstruct_lane_qualifiers)
b5828b4b 240
46e778c4
JG
241static enum aarch64_type_qualifiers
242aarch64_types_bsl_p_qualifiers[SIMD_MAX_BUILTIN_ARGS]
243 = { qualifier_poly, qualifier_unsigned,
244 qualifier_poly, qualifier_poly };
245#define TYPES_BSL_P (aarch64_types_bsl_p_qualifiers)
246static enum aarch64_type_qualifiers
247aarch64_types_bsl_s_qualifiers[SIMD_MAX_BUILTIN_ARGS]
248 = { qualifier_none, qualifier_unsigned,
249 qualifier_none, qualifier_none };
250#define TYPES_BSL_S (aarch64_types_bsl_s_qualifiers)
251static enum aarch64_type_qualifiers
252aarch64_types_bsl_u_qualifiers[SIMD_MAX_BUILTIN_ARGS]
253 = { qualifier_unsigned, qualifier_unsigned,
254 qualifier_unsigned, qualifier_unsigned };
255#define TYPES_BSL_U (aarch64_types_bsl_u_qualifiers)
256
b5828b4b
JG
257/* The first argument (return type) of a store should be void type,
258 which we represent with qualifier_void. Their first operand will be
259 a DImode pointer to the location to store to, so we must use
260 qualifier_map_mode | qualifier_pointer to build a pointer to the
261 element type of the vector. */
262static enum aarch64_type_qualifiers
263aarch64_types_store1_qualifiers[SIMD_MAX_BUILTIN_ARGS]
264 = { qualifier_void, qualifier_pointer_map_mode, qualifier_none };
265#define TYPES_STORE1 (aarch64_types_store1_qualifiers)
266#define TYPES_STORESTRUCT (aarch64_types_store1_qualifiers)
ba081b77
JG
267static enum aarch64_type_qualifiers
268aarch64_types_storestruct_lane_qualifiers[SIMD_MAX_BUILTIN_ARGS]
269 = { qualifier_void, qualifier_pointer_map_mode,
270 qualifier_none, qualifier_none };
271#define TYPES_STORESTRUCT_LANE (aarch64_types_storestruct_lane_qualifiers)
b5828b4b 272
0ddec79f
JG
273#define CF0(N, X) CODE_FOR_aarch64_##N##X
274#define CF1(N, X) CODE_FOR_##N##X##1
275#define CF2(N, X) CODE_FOR_##N##X##2
276#define CF3(N, X) CODE_FOR_##N##X##3
277#define CF4(N, X) CODE_FOR_##N##X##4
278#define CF10(N, X) CODE_FOR_##N##X
279
280#define VAR1(T, N, MAP, A) \
bc5e395d 281 {#N #A, UP (A), CF##MAP (N, A), 0, TYPES_##T},
0ddec79f
JG
282#define VAR2(T, N, MAP, A, B) \
283 VAR1 (T, N, MAP, A) \
284 VAR1 (T, N, MAP, B)
285#define VAR3(T, N, MAP, A, B, C) \
286 VAR2 (T, N, MAP, A, B) \
287 VAR1 (T, N, MAP, C)
288#define VAR4(T, N, MAP, A, B, C, D) \
289 VAR3 (T, N, MAP, A, B, C) \
290 VAR1 (T, N, MAP, D)
291#define VAR5(T, N, MAP, A, B, C, D, E) \
292 VAR4 (T, N, MAP, A, B, C, D) \
293 VAR1 (T, N, MAP, E)
294#define VAR6(T, N, MAP, A, B, C, D, E, F) \
295 VAR5 (T, N, MAP, A, B, C, D, E) \
296 VAR1 (T, N, MAP, F)
297#define VAR7(T, N, MAP, A, B, C, D, E, F, G) \
298 VAR6 (T, N, MAP, A, B, C, D, E, F) \
299 VAR1 (T, N, MAP, G)
300#define VAR8(T, N, MAP, A, B, C, D, E, F, G, H) \
301 VAR7 (T, N, MAP, A, B, C, D, E, F, G) \
302 VAR1 (T, N, MAP, H)
303#define VAR9(T, N, MAP, A, B, C, D, E, F, G, H, I) \
304 VAR8 (T, N, MAP, A, B, C, D, E, F, G, H) \
305 VAR1 (T, N, MAP, I)
306#define VAR10(T, N, MAP, A, B, C, D, E, F, G, H, I, J) \
307 VAR9 (T, N, MAP, A, B, C, D, E, F, G, H, I) \
308 VAR1 (T, N, MAP, J)
309#define VAR11(T, N, MAP, A, B, C, D, E, F, G, H, I, J, K) \
310 VAR10 (T, N, MAP, A, B, C, D, E, F, G, H, I, J) \
311 VAR1 (T, N, MAP, K)
312#define VAR12(T, N, MAP, A, B, C, D, E, F, G, H, I, J, K, L) \
313 VAR11 (T, N, MAP, A, B, C, D, E, F, G, H, I, J, K) \
314 VAR1 (T, N, MAP, L)
342be7f7 315
f421c516 316#include "aarch64-builtin-iterators.h"
43e9d192
IB
317
318static aarch64_simd_builtin_datum aarch64_simd_builtin_data[] = {
342be7f7
JG
319#include "aarch64-simd-builtins.def"
320};
321
5d357f26
KT
322/* There's only 8 CRC32 builtins. Probably not worth their own .def file. */
323#define AARCH64_CRC32_BUILTINS \
324 CRC32_BUILTIN (crc32b, QI) \
325 CRC32_BUILTIN (crc32h, HI) \
326 CRC32_BUILTIN (crc32w, SI) \
327 CRC32_BUILTIN (crc32x, DI) \
328 CRC32_BUILTIN (crc32cb, QI) \
329 CRC32_BUILTIN (crc32ch, HI) \
330 CRC32_BUILTIN (crc32cw, SI) \
331 CRC32_BUILTIN (crc32cx, DI)
332
333typedef struct
334{
335 const char *name;
ef4bddc2 336 machine_mode mode;
5d357f26
KT
337 const enum insn_code icode;
338 unsigned int fcode;
339} aarch64_crc_builtin_datum;
340
341#define CRC32_BUILTIN(N, M) \
342 AARCH64_BUILTIN_##N,
343
342be7f7 344#undef VAR1
0ddec79f 345#define VAR1(T, N, MAP, A) \
e993fea1 346 AARCH64_SIMD_BUILTIN_##T##_##N##A,
342be7f7
JG
347
348enum aarch64_builtins
349{
350 AARCH64_BUILTIN_MIN,
aa87aced
KV
351
352 AARCH64_BUILTIN_GET_FPCR,
353 AARCH64_BUILTIN_SET_FPCR,
354 AARCH64_BUILTIN_GET_FPSR,
355 AARCH64_BUILTIN_SET_FPSR,
356
342be7f7 357 AARCH64_SIMD_BUILTIN_BASE,
661fce82 358 AARCH64_SIMD_BUILTIN_LANE_CHECK,
342be7f7 359#include "aarch64-simd-builtins.def"
661fce82
AL
360 /* The first enum element which is based on an insn_data pattern. */
361 AARCH64_SIMD_PATTERN_START = AARCH64_SIMD_BUILTIN_LANE_CHECK + 1,
362 AARCH64_SIMD_BUILTIN_MAX = AARCH64_SIMD_PATTERN_START
363 + ARRAY_SIZE (aarch64_simd_builtin_data) - 1,
5d357f26
KT
364 AARCH64_CRC32_BUILTIN_BASE,
365 AARCH64_CRC32_BUILTINS
366 AARCH64_CRC32_BUILTIN_MAX,
342be7f7 367 AARCH64_BUILTIN_MAX
43e9d192
IB
368};
369
5d357f26
KT
370#undef CRC32_BUILTIN
371#define CRC32_BUILTIN(N, M) \
372 {"__builtin_aarch64_"#N, M##mode, CODE_FOR_aarch64_##N, AARCH64_BUILTIN_##N},
373
374static aarch64_crc_builtin_datum aarch64_crc_builtin_data[] = {
375 AARCH64_CRC32_BUILTINS
376};
377
378#undef CRC32_BUILTIN
379
119103ca
JG
380static GTY(()) tree aarch64_builtin_decls[AARCH64_BUILTIN_MAX];
381
43e9d192
IB
382#define NUM_DREG_TYPES 6
383#define NUM_QREG_TYPES 6
384
f9d53c27
TB
385/* Internal scalar builtin types. These types are used to support
386 neon intrinsic builtins. They are _not_ user-visible types. Therefore
387 the mangling for these types are implementation defined. */
388const char *aarch64_scalar_builtin_types[] = {
389 "__builtin_aarch64_simd_qi",
390 "__builtin_aarch64_simd_hi",
391 "__builtin_aarch64_simd_si",
392 "__builtin_aarch64_simd_sf",
393 "__builtin_aarch64_simd_di",
394 "__builtin_aarch64_simd_df",
395 "__builtin_aarch64_simd_poly8",
396 "__builtin_aarch64_simd_poly16",
397 "__builtin_aarch64_simd_poly64",
398 "__builtin_aarch64_simd_poly128",
399 "__builtin_aarch64_simd_ti",
400 "__builtin_aarch64_simd_uqi",
401 "__builtin_aarch64_simd_uhi",
402 "__builtin_aarch64_simd_usi",
403 "__builtin_aarch64_simd_udi",
404 "__builtin_aarch64_simd_ei",
405 "__builtin_aarch64_simd_oi",
406 "__builtin_aarch64_simd_ci",
407 "__builtin_aarch64_simd_xi",
408 NULL
409};
b5828b4b 410
f9d53c27
TB
411#define ENTRY(E, M, Q, G) E,
412enum aarch64_simd_type
413{
414#include "aarch64-simd-builtin-types.def"
415 ARM_NEON_H_TYPES_LAST
416};
417#undef ENTRY
b5828b4b 418
f9d53c27 419struct aarch64_simd_type_info
b5828b4b 420{
f9d53c27
TB
421 enum aarch64_simd_type type;
422
423 /* Internal type name. */
424 const char *name;
425
426 /* Internal type name(mangled). The mangled names conform to the
427 AAPCS64 (see "Procedure Call Standard for the ARM 64-bit Architecture",
428 Appendix A). To qualify for emission with the mangled names defined in
429 that document, a vector type must not only be of the correct mode but also
430 be of the correct internal AdvSIMD vector type (e.g. __Int8x8_t); these
431 types are registered by aarch64_init_simd_builtin_types (). In other
432 words, vector types defined in other ways e.g. via vector_size attribute
433 will get default mangled names. */
434 const char *mangle;
435
436 /* Internal type. */
437 tree itype;
438
439 /* Element type. */
b5828b4b
JG
440 tree eltype;
441
f9d53c27
TB
442 /* Machine mode the internal type maps to. */
443 enum machine_mode mode;
b5828b4b 444
f9d53c27
TB
445 /* Qualifiers. */
446 enum aarch64_type_qualifiers q;
447};
448
449#define ENTRY(E, M, Q, G) \
450 {E, "__" #E, #G "__" #E, NULL_TREE, NULL_TREE, M##mode, qualifier_##Q},
451static struct aarch64_simd_type_info aarch64_simd_types [] = {
452#include "aarch64-simd-builtin-types.def"
453};
454#undef ENTRY
455
456static tree aarch64_simd_intOI_type_node = NULL_TREE;
457static tree aarch64_simd_intEI_type_node = NULL_TREE;
458static tree aarch64_simd_intCI_type_node = NULL_TREE;
459static tree aarch64_simd_intXI_type_node = NULL_TREE;
460
461static const char *
462aarch64_mangle_builtin_scalar_type (const_tree type)
463{
464 int i = 0;
465
466 while (aarch64_scalar_builtin_types[i] != NULL)
b5828b4b 467 {
f9d53c27
TB
468 const char *name = aarch64_scalar_builtin_types[i];
469
470 if (TREE_CODE (TYPE_NAME (type)) == TYPE_DECL
471 && DECL_NAME (TYPE_NAME (type))
472 && !strcmp (IDENTIFIER_POINTER (DECL_NAME (TYPE_NAME (type))), name))
473 return aarch64_scalar_builtin_types[i];
474 i++;
475 }
476 return NULL;
b5828b4b
JG
477}
478
f9d53c27
TB
479static const char *
480aarch64_mangle_builtin_vector_type (const_tree type)
b5828b4b 481{
f9d53c27
TB
482 int i;
483 int nelts = sizeof (aarch64_simd_types) / sizeof (aarch64_simd_types[0]);
484
485 for (i = 0; i < nelts; i++)
486 if (aarch64_simd_types[i].mode == TYPE_MODE (type)
487 && TYPE_NAME (type)
488 && TREE_CODE (TYPE_NAME (type)) == TYPE_DECL
489 && DECL_NAME (TYPE_NAME (type))
490 && !strcmp
491 (IDENTIFIER_POINTER (DECL_NAME (TYPE_NAME (type))),
492 aarch64_simd_types[i].name))
493 return aarch64_simd_types[i].mangle;
494
495 return NULL;
6db1ec94
JG
496}
497
f9d53c27
TB
498const char *
499aarch64_mangle_builtin_type (const_tree type)
6db1ec94 500{
f9d53c27
TB
501 const char *mangle;
502 /* Walk through all the AArch64 builtins types tables to filter out the
503 incoming type. */
504 if ((mangle = aarch64_mangle_builtin_vector_type (type))
505 || (mangle = aarch64_mangle_builtin_scalar_type (type)))
506 return mangle;
507
508 return NULL;
6db1ec94
JG
509}
510
f9d53c27
TB
511static tree
512aarch64_simd_builtin_std_type (enum machine_mode mode,
513 enum aarch64_type_qualifiers q)
6db1ec94 514{
f9d53c27
TB
515#define QUAL_TYPE(M) \
516 ((q == qualifier_none) ? int##M##_type_node : unsigned_int##M##_type_node);
517 switch (mode)
518 {
519 case QImode:
520 return QUAL_TYPE (QI);
521 case HImode:
522 return QUAL_TYPE (HI);
523 case SImode:
524 return QUAL_TYPE (SI);
525 case DImode:
526 return QUAL_TYPE (DI);
527 case TImode:
528 return QUAL_TYPE (TI);
529 case OImode:
530 return aarch64_simd_intOI_type_node;
531 case EImode:
532 return aarch64_simd_intEI_type_node;
533 case CImode:
534 return aarch64_simd_intCI_type_node;
535 case XImode:
536 return aarch64_simd_intXI_type_node;
537 case SFmode:
538 return float_type_node;
539 case DFmode:
540 return double_type_node;
541 default:
542 gcc_unreachable ();
543 }
544#undef QUAL_TYPE
6db1ec94
JG
545}
546
f9d53c27
TB
547static tree
548aarch64_lookup_simd_builtin_type (enum machine_mode mode,
549 enum aarch64_type_qualifiers q)
6db1ec94 550{
f9d53c27
TB
551 int i;
552 int nelts = sizeof (aarch64_simd_types) / sizeof (aarch64_simd_types[0]);
553
554 /* Non-poly scalar modes map to standard types not in the table. */
555 if (q != qualifier_poly && !VECTOR_MODE_P (mode))
556 return aarch64_simd_builtin_std_type (mode, q);
557
558 for (i = 0; i < nelts; i++)
559 if (aarch64_simd_types[i].mode == mode
560 && aarch64_simd_types[i].q == q)
561 return aarch64_simd_types[i].itype;
562
563 return NULL_TREE;
b5828b4b
JG
564}
565
f9d53c27
TB
566static tree
567aarch64_simd_builtin_type (enum machine_mode mode,
568 bool unsigned_p, bool poly_p)
569{
570 if (poly_p)
571 return aarch64_lookup_simd_builtin_type (mode, qualifier_poly);
572 else if (unsigned_p)
573 return aarch64_lookup_simd_builtin_type (mode, qualifier_unsigned);
574 else
575 return aarch64_lookup_simd_builtin_type (mode, qualifier_none);
576}
577
af55e82d 578static void
f9d53c27 579aarch64_init_simd_builtin_types (void)
43e9d192 580{
f9d53c27
TB
581 int i;
582 int nelts = sizeof (aarch64_simd_types) / sizeof (aarch64_simd_types[0]);
583 tree tdecl;
584
585 /* Init all the element types built by the front-end. */
586 aarch64_simd_types[Int8x8_t].eltype = intQI_type_node;
587 aarch64_simd_types[Int8x16_t].eltype = intQI_type_node;
588 aarch64_simd_types[Int16x4_t].eltype = intHI_type_node;
589 aarch64_simd_types[Int16x8_t].eltype = intHI_type_node;
590 aarch64_simd_types[Int32x2_t].eltype = intSI_type_node;
591 aarch64_simd_types[Int32x4_t].eltype = intSI_type_node;
592 aarch64_simd_types[Int64x1_t].eltype = intDI_type_node;
593 aarch64_simd_types[Int64x2_t].eltype = intDI_type_node;
594 aarch64_simd_types[Uint8x8_t].eltype = unsigned_intQI_type_node;
595 aarch64_simd_types[Uint8x16_t].eltype = unsigned_intQI_type_node;
596 aarch64_simd_types[Uint16x4_t].eltype = unsigned_intHI_type_node;
597 aarch64_simd_types[Uint16x8_t].eltype = unsigned_intHI_type_node;
598 aarch64_simd_types[Uint32x2_t].eltype = unsigned_intSI_type_node;
599 aarch64_simd_types[Uint32x4_t].eltype = unsigned_intSI_type_node;
600 aarch64_simd_types[Uint64x1_t].eltype = unsigned_intDI_type_node;
601 aarch64_simd_types[Uint64x2_t].eltype = unsigned_intDI_type_node;
602
603 /* Poly types are a world of their own. */
604 aarch64_simd_types[Poly8_t].eltype = aarch64_simd_types[Poly8_t].itype =
605 build_distinct_type_copy (unsigned_intQI_type_node);
606 aarch64_simd_types[Poly16_t].eltype = aarch64_simd_types[Poly16_t].itype =
607 build_distinct_type_copy (unsigned_intHI_type_node);
608 aarch64_simd_types[Poly64_t].eltype = aarch64_simd_types[Poly64_t].itype =
609 build_distinct_type_copy (unsigned_intDI_type_node);
610 aarch64_simd_types[Poly128_t].eltype = aarch64_simd_types[Poly128_t].itype =
611 build_distinct_type_copy (unsigned_intTI_type_node);
612 /* Init poly vector element types with scalar poly types. */
613 aarch64_simd_types[Poly8x8_t].eltype = aarch64_simd_types[Poly8_t].itype;
614 aarch64_simd_types[Poly8x16_t].eltype = aarch64_simd_types[Poly8_t].itype;
615 aarch64_simd_types[Poly16x4_t].eltype = aarch64_simd_types[Poly16_t].itype;
616 aarch64_simd_types[Poly16x8_t].eltype = aarch64_simd_types[Poly16_t].itype;
617 aarch64_simd_types[Poly64x1_t].eltype = aarch64_simd_types[Poly64_t].itype;
618 aarch64_simd_types[Poly64x2_t].eltype = aarch64_simd_types[Poly64_t].itype;
619
620 /* Continue with standard types. */
621 aarch64_simd_types[Float32x2_t].eltype = float_type_node;
622 aarch64_simd_types[Float32x4_t].eltype = float_type_node;
623 aarch64_simd_types[Float64x1_t].eltype = double_type_node;
624 aarch64_simd_types[Float64x2_t].eltype = double_type_node;
625
626 for (i = 0; i < nelts; i++)
627 {
628 tree eltype = aarch64_simd_types[i].eltype;
629 enum machine_mode mode = aarch64_simd_types[i].mode;
630
631 if (aarch64_simd_types[i].itype == NULL)
632 aarch64_simd_types[i].itype =
633 build_distinct_type_copy
634 (build_vector_type (eltype, GET_MODE_NUNITS (mode)));
635
636 tdecl = add_builtin_type (aarch64_simd_types[i].name,
637 aarch64_simd_types[i].itype);
638 TYPE_NAME (aarch64_simd_types[i].itype) = tdecl;
639 SET_TYPE_STRUCTURAL_EQUALITY (aarch64_simd_types[i].itype);
640 }
43e9d192 641
f9d53c27
TB
642#define AARCH64_BUILD_SIGNED_TYPE(mode) \
643 make_signed_type (GET_MODE_PRECISION (mode));
644 aarch64_simd_intOI_type_node = AARCH64_BUILD_SIGNED_TYPE (OImode);
645 aarch64_simd_intEI_type_node = AARCH64_BUILD_SIGNED_TYPE (EImode);
646 aarch64_simd_intCI_type_node = AARCH64_BUILD_SIGNED_TYPE (CImode);
647 aarch64_simd_intXI_type_node = AARCH64_BUILD_SIGNED_TYPE (XImode);
648#undef AARCH64_BUILD_SIGNED_TYPE
649
650 tdecl = add_builtin_type
651 ("__builtin_aarch64_simd_ei" , aarch64_simd_intEI_type_node);
652 TYPE_NAME (aarch64_simd_intEI_type_node) = tdecl;
653 tdecl = add_builtin_type
654 ("__builtin_aarch64_simd_oi" , aarch64_simd_intOI_type_node);
655 TYPE_NAME (aarch64_simd_intOI_type_node) = tdecl;
656 tdecl = add_builtin_type
657 ("__builtin_aarch64_simd_ci" , aarch64_simd_intCI_type_node);
658 TYPE_NAME (aarch64_simd_intCI_type_node) = tdecl;
659 tdecl = add_builtin_type
660 ("__builtin_aarch64_simd_xi" , aarch64_simd_intXI_type_node);
661 TYPE_NAME (aarch64_simd_intXI_type_node) = tdecl;
662}
663
664static void
665aarch64_init_simd_builtin_scalar_types (void)
666{
667 /* Define typedefs for all the standard scalar types. */
668 (*lang_hooks.types.register_builtin_type) (intQI_type_node,
43e9d192 669 "__builtin_aarch64_simd_qi");
f9d53c27 670 (*lang_hooks.types.register_builtin_type) (intHI_type_node,
43e9d192 671 "__builtin_aarch64_simd_hi");
f9d53c27 672 (*lang_hooks.types.register_builtin_type) (intSI_type_node,
43e9d192 673 "__builtin_aarch64_simd_si");
f9d53c27 674 (*lang_hooks.types.register_builtin_type) (float_type_node,
43e9d192 675 "__builtin_aarch64_simd_sf");
f9d53c27 676 (*lang_hooks.types.register_builtin_type) (intDI_type_node,
43e9d192 677 "__builtin_aarch64_simd_di");
f9d53c27 678 (*lang_hooks.types.register_builtin_type) (double_type_node,
43e9d192 679 "__builtin_aarch64_simd_df");
f9d53c27 680 (*lang_hooks.types.register_builtin_type) (unsigned_intQI_type_node,
43e9d192 681 "__builtin_aarch64_simd_poly8");
f9d53c27 682 (*lang_hooks.types.register_builtin_type) (unsigned_intHI_type_node,
43e9d192 683 "__builtin_aarch64_simd_poly16");
f9d53c27 684 (*lang_hooks.types.register_builtin_type) (unsigned_intDI_type_node,
7baa225d 685 "__builtin_aarch64_simd_poly64");
f9d53c27 686 (*lang_hooks.types.register_builtin_type) (unsigned_intTI_type_node,
7baa225d 687 "__builtin_aarch64_simd_poly128");
f9d53c27 688 (*lang_hooks.types.register_builtin_type) (intTI_type_node,
43e9d192 689 "__builtin_aarch64_simd_ti");
b5828b4b 690 /* Unsigned integer types for various mode sizes. */
f9d53c27 691 (*lang_hooks.types.register_builtin_type) (unsigned_intQI_type_node,
b5828b4b 692 "__builtin_aarch64_simd_uqi");
f9d53c27 693 (*lang_hooks.types.register_builtin_type) (unsigned_intHI_type_node,
b5828b4b 694 "__builtin_aarch64_simd_uhi");
f9d53c27 695 (*lang_hooks.types.register_builtin_type) (unsigned_intSI_type_node,
b5828b4b 696 "__builtin_aarch64_simd_usi");
f9d53c27 697 (*lang_hooks.types.register_builtin_type) (unsigned_intDI_type_node,
b5828b4b 698 "__builtin_aarch64_simd_udi");
f9d53c27
TB
699}
700
701static void
702aarch64_init_simd_builtins (void)
703{
661fce82 704 unsigned int i, fcode = AARCH64_SIMD_PATTERN_START;
f9d53c27
TB
705
706 aarch64_init_simd_builtin_types ();
43e9d192 707
f9d53c27
TB
708 /* Strong-typing hasn't been implemented for all AdvSIMD builtin intrinsics.
709 Therefore we need to preserve the old __builtin scalar types. It can be
710 removed once all the intrinsics become strongly typed using the qualifier
711 system. */
712 aarch64_init_simd_builtin_scalar_types ();
713
661fce82 714 tree lane_check_fpr = build_function_type_list (void_type_node,
9c4f25cc
AP
715 size_type_node,
716 size_type_node,
661fce82
AL
717 intSI_type_node,
718 NULL);
719 aarch64_builtin_decls[AARCH64_SIMD_BUILTIN_LANE_CHECK] =
720 add_builtin_function ("__builtin_aarch64_im_lane_boundsi", lane_check_fpr,
721 AARCH64_SIMD_BUILTIN_LANE_CHECK, BUILT_IN_MD,
722 NULL, NULL_TREE);
723
342be7f7 724 for (i = 0; i < ARRAY_SIZE (aarch64_simd_builtin_data); i++, fcode++)
43e9d192 725 {
b5828b4b
JG
726 bool print_type_signature_p = false;
727 char type_signature[SIMD_MAX_BUILTIN_ARGS] = { 0 };
43e9d192 728 aarch64_simd_builtin_datum *d = &aarch64_simd_builtin_data[i];
342be7f7
JG
729 char namebuf[60];
730 tree ftype = NULL;
119103ca 731 tree fndecl = NULL;
342be7f7 732
342be7f7 733 d->fcode = fcode;
43e9d192 734
b5828b4b
JG
735 /* We must track two variables here. op_num is
736 the operand number as in the RTL pattern. This is
737 required to access the mode (e.g. V4SF mode) of the
738 argument, from which the base type can be derived.
739 arg_num is an index in to the qualifiers data, which
740 gives qualifiers to the type (e.g. const unsigned).
741 The reason these two variables may differ by one is the
742 void return type. While all return types take the 0th entry
743 in the qualifiers array, there is no operand for them in the
744 RTL pattern. */
745 int op_num = insn_data[d->code].n_operands - 1;
746 int arg_num = d->qualifiers[0] & qualifier_void
747 ? op_num + 1
748 : op_num;
749 tree return_type = void_type_node, args = void_list_node;
750 tree eltype;
751
752 /* Build a function type directly from the insn_data for this
753 builtin. The build_function_type () function takes care of
754 removing duplicates for us. */
755 for (; op_num >= 0; arg_num--, op_num--)
43e9d192 756 {
ef4bddc2 757 machine_mode op_mode = insn_data[d->code].operand[op_num].mode;
b5828b4b 758 enum aarch64_type_qualifiers qualifiers = d->qualifiers[arg_num];
43e9d192 759
b5828b4b
JG
760 if (qualifiers & qualifier_unsigned)
761 {
762 type_signature[arg_num] = 'u';
763 print_type_signature_p = true;
764 }
6db1ec94
JG
765 else if (qualifiers & qualifier_poly)
766 {
767 type_signature[arg_num] = 'p';
768 print_type_signature_p = true;
769 }
b5828b4b
JG
770 else
771 type_signature[arg_num] = 's';
772
773 /* Skip an internal operand for vget_{low, high}. */
774 if (qualifiers & qualifier_internal)
775 continue;
776
777 /* Some builtins have different user-facing types
778 for certain arguments, encoded in d->mode. */
779 if (qualifiers & qualifier_map_mode)
bc5e395d 780 op_mode = d->mode;
b5828b4b
JG
781
782 /* For pointers, we want a pointer to the basic type
783 of the vector. */
784 if (qualifiers & qualifier_pointer && VECTOR_MODE_P (op_mode))
785 op_mode = GET_MODE_INNER (op_mode);
786
f9d53c27
TB
787 eltype = aarch64_simd_builtin_type
788 (op_mode,
789 (qualifiers & qualifier_unsigned) != 0,
790 (qualifiers & qualifier_poly) != 0);
791 gcc_assert (eltype != NULL);
b5828b4b
JG
792
793 /* Add qualifiers. */
794 if (qualifiers & qualifier_const)
795 eltype = build_qualified_type (eltype, TYPE_QUAL_CONST);
796
797 if (qualifiers & qualifier_pointer)
798 eltype = build_pointer_type (eltype);
799
800 /* If we have reached arg_num == 0, we are at a non-void
801 return type. Otherwise, we are still processing
802 arguments. */
803 if (arg_num == 0)
804 return_type = eltype;
805 else
806 args = tree_cons (NULL_TREE, eltype, args);
807 }
342be7f7 808
b5828b4b 809 ftype = build_function_type (return_type, args);
43e9d192 810
342be7f7 811 gcc_assert (ftype != NULL);
43e9d192 812
b5828b4b 813 if (print_type_signature_p)
bc5e395d
JG
814 snprintf (namebuf, sizeof (namebuf), "__builtin_aarch64_%s_%s",
815 d->name, type_signature);
b5828b4b 816 else
bc5e395d
JG
817 snprintf (namebuf, sizeof (namebuf), "__builtin_aarch64_%s",
818 d->name);
43e9d192 819
119103ca
JG
820 fndecl = add_builtin_function (namebuf, ftype, fcode, BUILT_IN_MD,
821 NULL, NULL_TREE);
822 aarch64_builtin_decls[fcode] = fndecl;
43e9d192
IB
823 }
824}
825
5d357f26
KT
826static void
827aarch64_init_crc32_builtins ()
828{
f9d53c27 829 tree usi_type = aarch64_simd_builtin_std_type (SImode, qualifier_unsigned);
5d357f26
KT
830 unsigned int i = 0;
831
832 for (i = 0; i < ARRAY_SIZE (aarch64_crc_builtin_data); ++i)
833 {
834 aarch64_crc_builtin_datum* d = &aarch64_crc_builtin_data[i];
f9d53c27
TB
835 tree argtype = aarch64_simd_builtin_std_type (d->mode,
836 qualifier_unsigned);
5d357f26
KT
837 tree ftype = build_function_type_list (usi_type, usi_type, argtype, NULL_TREE);
838 tree fndecl = add_builtin_function (d->name, ftype, d->fcode,
839 BUILT_IN_MD, NULL, NULL_TREE);
840
841 aarch64_builtin_decls[d->fcode] = fndecl;
842 }
843}
844
342be7f7
JG
845void
846aarch64_init_builtins (void)
43e9d192 847{
aa87aced
KV
848 tree ftype_set_fpr
849 = build_function_type_list (void_type_node, unsigned_type_node, NULL);
850 tree ftype_get_fpr
851 = build_function_type_list (unsigned_type_node, NULL);
852
853 aarch64_builtin_decls[AARCH64_BUILTIN_GET_FPCR]
854 = add_builtin_function ("__builtin_aarch64_get_fpcr", ftype_get_fpr,
855 AARCH64_BUILTIN_GET_FPCR, BUILT_IN_MD, NULL, NULL_TREE);
856 aarch64_builtin_decls[AARCH64_BUILTIN_SET_FPCR]
857 = add_builtin_function ("__builtin_aarch64_set_fpcr", ftype_set_fpr,
858 AARCH64_BUILTIN_SET_FPCR, BUILT_IN_MD, NULL, NULL_TREE);
859 aarch64_builtin_decls[AARCH64_BUILTIN_GET_FPSR]
860 = add_builtin_function ("__builtin_aarch64_get_fpsr", ftype_get_fpr,
861 AARCH64_BUILTIN_GET_FPSR, BUILT_IN_MD, NULL, NULL_TREE);
862 aarch64_builtin_decls[AARCH64_BUILTIN_SET_FPSR]
863 = add_builtin_function ("__builtin_aarch64_set_fpsr", ftype_set_fpr,
864 AARCH64_BUILTIN_SET_FPSR, BUILT_IN_MD, NULL, NULL_TREE);
865
342be7f7
JG
866 if (TARGET_SIMD)
867 aarch64_init_simd_builtins ();
5d357f26
KT
868 if (TARGET_CRC32)
869 aarch64_init_crc32_builtins ();
43e9d192
IB
870}
871
119103ca
JG
872tree
873aarch64_builtin_decl (unsigned code, bool initialize_p ATTRIBUTE_UNUSED)
874{
875 if (code >= AARCH64_BUILTIN_MAX)
876 return error_mark_node;
877
878 return aarch64_builtin_decls[code];
879}
880
43e9d192
IB
881typedef enum
882{
883 SIMD_ARG_COPY_TO_REG,
884 SIMD_ARG_CONSTANT,
2a49c16d 885 SIMD_ARG_LANE_INDEX,
43e9d192
IB
886 SIMD_ARG_STOP
887} builtin_simd_arg;
888
43e9d192
IB
889static rtx
890aarch64_simd_expand_args (rtx target, int icode, int have_retval,
8d3d350a 891 tree exp, builtin_simd_arg *args)
43e9d192 892{
43e9d192 893 rtx pat;
d9e80f49
AL
894 rtx op[SIMD_MAX_BUILTIN_ARGS + 1]; /* First element for result operand. */
895 int opc = 0;
896
897 if (have_retval)
898 {
899 machine_mode tmode = insn_data[icode].operand[0].mode;
900 if (!target
43e9d192 901 || GET_MODE (target) != tmode
d9e80f49
AL
902 || !(*insn_data[icode].operand[0].predicate) (target, tmode))
903 target = gen_reg_rtx (tmode);
904 op[opc++] = target;
905 }
43e9d192 906
43e9d192
IB
907 for (;;)
908 {
d9e80f49 909 builtin_simd_arg thisarg = args[opc - have_retval];
43e9d192
IB
910
911 if (thisarg == SIMD_ARG_STOP)
912 break;
913 else
914 {
d9e80f49
AL
915 tree arg = CALL_EXPR_ARG (exp, opc - have_retval);
916 enum machine_mode mode = insn_data[icode].operand[opc].mode;
917 op[opc] = expand_normal (arg);
43e9d192
IB
918
919 switch (thisarg)
920 {
921 case SIMD_ARG_COPY_TO_REG:
d9e80f49
AL
922 if (POINTER_TYPE_P (TREE_TYPE (arg)))
923 op[opc] = convert_memory_address (Pmode, op[opc]);
924 /*gcc_assert (GET_MODE (op[opc]) == mode); */
925 if (!(*insn_data[icode].operand[opc].predicate)
926 (op[opc], mode))
927 op[opc] = copy_to_mode_reg (mode, op[opc]);
43e9d192
IB
928 break;
929
2a49c16d
AL
930 case SIMD_ARG_LANE_INDEX:
931 /* Must be a previous operand into which this is an index. */
d9e80f49
AL
932 gcc_assert (opc > 0);
933 if (CONST_INT_P (op[opc]))
2a49c16d 934 {
d9e80f49
AL
935 machine_mode vmode = insn_data[icode].operand[opc - 1].mode;
936 aarch64_simd_lane_bounds (op[opc],
46ed6024 937 0, GET_MODE_NUNITS (vmode), exp);
2a49c16d 938 /* Keep to GCC-vector-extension lane indices in the RTL. */
d9e80f49 939 op[opc] = GEN_INT (ENDIAN_LANE_N (vmode, INTVAL (op[opc])));
2a49c16d
AL
940 }
941 /* Fall through - if the lane index isn't a constant then
942 the next case will error. */
43e9d192 943 case SIMD_ARG_CONSTANT:
d9e80f49
AL
944 if (!(*insn_data[icode].operand[opc].predicate)
945 (op[opc], mode))
d5a29419 946 {
fca051af
AL
947 error ("%Kargument %d must be a constant immediate",
948 exp, opc + 1 - have_retval);
d5a29419
KT
949 return const0_rtx;
950 }
43e9d192
IB
951 break;
952
953 case SIMD_ARG_STOP:
954 gcc_unreachable ();
955 }
956
d9e80f49 957 opc++;
43e9d192
IB
958 }
959 }
960
d9e80f49
AL
961 switch (opc)
962 {
963 case 1:
964 pat = GEN_FCN (icode) (op[0]);
965 break;
43e9d192 966
d9e80f49
AL
967 case 2:
968 pat = GEN_FCN (icode) (op[0], op[1]);
969 break;
43e9d192 970
d9e80f49
AL
971 case 3:
972 pat = GEN_FCN (icode) (op[0], op[1], op[2]);
973 break;
43e9d192 974
d9e80f49
AL
975 case 4:
976 pat = GEN_FCN (icode) (op[0], op[1], op[2], op[3]);
977 break;
43e9d192 978
d9e80f49
AL
979 case 5:
980 pat = GEN_FCN (icode) (op[0], op[1], op[2], op[3], op[4]);
981 break;
43e9d192 982
d9e80f49
AL
983 case 6:
984 pat = GEN_FCN (icode) (op[0], op[1], op[2], op[3], op[4], op[5]);
985 break;
43e9d192 986
d9e80f49
AL
987 default:
988 gcc_unreachable ();
989 }
43e9d192
IB
990
991 if (!pat)
d5a29419 992 return NULL_RTX;
43e9d192
IB
993
994 emit_insn (pat);
995
996 return target;
997}
998
999/* Expand an AArch64 AdvSIMD builtin(intrinsic). */
1000rtx
1001aarch64_simd_expand_builtin (int fcode, tree exp, rtx target)
1002{
661fce82
AL
1003 if (fcode == AARCH64_SIMD_BUILTIN_LANE_CHECK)
1004 {
9c4f25cc
AP
1005 rtx totalsize = expand_normal (CALL_EXPR_ARG (exp, 0));
1006 rtx elementsize = expand_normal (CALL_EXPR_ARG (exp, 1));
1007 if (CONST_INT_P (totalsize) && CONST_INT_P (elementsize)
1008 && UINTVAL (elementsize) != 0
1009 && UINTVAL (totalsize) != 0)
1010 {
1011 rtx lane_idx = expand_normal (CALL_EXPR_ARG (exp, 2));
1012 if (CONST_INT_P (lane_idx))
1013 aarch64_simd_lane_bounds (lane_idx, 0,
1014 UINTVAL (totalsize)
1015 / UINTVAL (elementsize),
1016 exp);
1017 else
1018 error ("%Klane index must be a constant immediate", exp);
1019 }
661fce82 1020 else
9c4f25cc 1021 error ("%Ktotal size and element size must be a non-zero constant immediate", exp);
661fce82
AL
1022 /* Don't generate any RTL. */
1023 return const0_rtx;
1024 }
342be7f7 1025 aarch64_simd_builtin_datum *d =
661fce82 1026 &aarch64_simd_builtin_data[fcode - AARCH64_SIMD_PATTERN_START];
342be7f7 1027 enum insn_code icode = d->code;
b5828b4b
JG
1028 builtin_simd_arg args[SIMD_MAX_BUILTIN_ARGS];
1029 int num_args = insn_data[d->code].n_operands;
1030 int is_void = 0;
1031 int k;
43e9d192 1032
b5828b4b 1033 is_void = !!(d->qualifiers[0] & qualifier_void);
43e9d192 1034
b5828b4b
JG
1035 num_args += is_void;
1036
1037 for (k = 1; k < num_args; k++)
1038 {
1039 /* We have four arrays of data, each indexed in a different fashion.
1040 qualifiers - element 0 always describes the function return type.
1041 operands - element 0 is either the operand for return value (if
1042 the function has a non-void return type) or the operand for the
1043 first argument.
1044 expr_args - element 0 always holds the first argument.
1045 args - element 0 is always used for the return type. */
1046 int qualifiers_k = k;
1047 int operands_k = k - is_void;
1048 int expr_args_k = k - 1;
1049
2a49c16d
AL
1050 if (d->qualifiers[qualifiers_k] & qualifier_lane_index)
1051 args[k] = SIMD_ARG_LANE_INDEX;
1052 else if (d->qualifiers[qualifiers_k] & qualifier_immediate)
b5828b4b
JG
1053 args[k] = SIMD_ARG_CONSTANT;
1054 else if (d->qualifiers[qualifiers_k] & qualifier_maybe_immediate)
1055 {
1056 rtx arg
1057 = expand_normal (CALL_EXPR_ARG (exp,
1058 (expr_args_k)));
1059 /* Handle constants only if the predicate allows it. */
1060 bool op_const_int_p =
1061 (CONST_INT_P (arg)
1062 && (*insn_data[icode].operand[operands_k].predicate)
1063 (arg, insn_data[icode].operand[operands_k].mode));
1064 args[k] = op_const_int_p ? SIMD_ARG_CONSTANT : SIMD_ARG_COPY_TO_REG;
1065 }
1066 else
1067 args[k] = SIMD_ARG_COPY_TO_REG;
43e9d192 1068
43e9d192 1069 }
b5828b4b
JG
1070 args[k] = SIMD_ARG_STOP;
1071
1072 /* The interface to aarch64_simd_expand_args expects a 0 if
1073 the function is void, and a 1 if it is not. */
1074 return aarch64_simd_expand_args
8d3d350a 1075 (target, icode, !is_void, exp, &args[1]);
43e9d192 1076}
342be7f7 1077
5d357f26
KT
1078rtx
1079aarch64_crc32_expand_builtin (int fcode, tree exp, rtx target)
1080{
1081 rtx pat;
1082 aarch64_crc_builtin_datum *d
1083 = &aarch64_crc_builtin_data[fcode - (AARCH64_CRC32_BUILTIN_BASE + 1)];
1084 enum insn_code icode = d->icode;
1085 tree arg0 = CALL_EXPR_ARG (exp, 0);
1086 tree arg1 = CALL_EXPR_ARG (exp, 1);
1087 rtx op0 = expand_normal (arg0);
1088 rtx op1 = expand_normal (arg1);
ef4bddc2
RS
1089 machine_mode tmode = insn_data[icode].operand[0].mode;
1090 machine_mode mode0 = insn_data[icode].operand[1].mode;
1091 machine_mode mode1 = insn_data[icode].operand[2].mode;
5d357f26
KT
1092
1093 if (! target
1094 || GET_MODE (target) != tmode
1095 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
1096 target = gen_reg_rtx (tmode);
1097
1098 gcc_assert ((GET_MODE (op0) == mode0 || GET_MODE (op0) == VOIDmode)
1099 && (GET_MODE (op1) == mode1 || GET_MODE (op1) == VOIDmode));
1100
1101 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
1102 op0 = copy_to_mode_reg (mode0, op0);
1103 if (! (*insn_data[icode].operand[2].predicate) (op1, mode1))
1104 op1 = copy_to_mode_reg (mode1, op1);
1105
1106 pat = GEN_FCN (icode) (target, op0, op1);
d5a29419
KT
1107 if (!pat)
1108 return NULL_RTX;
1109
5d357f26
KT
1110 emit_insn (pat);
1111 return target;
1112}
1113
342be7f7
JG
1114/* Expand an expression EXP that calls a built-in function,
1115 with result going to TARGET if that's convenient. */
1116rtx
1117aarch64_expand_builtin (tree exp,
1118 rtx target,
1119 rtx subtarget ATTRIBUTE_UNUSED,
ef4bddc2 1120 machine_mode mode ATTRIBUTE_UNUSED,
342be7f7
JG
1121 int ignore ATTRIBUTE_UNUSED)
1122{
1123 tree fndecl = TREE_OPERAND (CALL_EXPR_FN (exp), 0);
1124 int fcode = DECL_FUNCTION_CODE (fndecl);
aa87aced
KV
1125 int icode;
1126 rtx pat, op0;
1127 tree arg0;
1128
1129 switch (fcode)
1130 {
1131 case AARCH64_BUILTIN_GET_FPCR:
1132 case AARCH64_BUILTIN_SET_FPCR:
1133 case AARCH64_BUILTIN_GET_FPSR:
1134 case AARCH64_BUILTIN_SET_FPSR:
1135 if ((fcode == AARCH64_BUILTIN_GET_FPCR)
1136 || (fcode == AARCH64_BUILTIN_GET_FPSR))
1137 {
1138 icode = (fcode == AARCH64_BUILTIN_GET_FPSR) ?
1139 CODE_FOR_get_fpsr : CODE_FOR_get_fpcr;
1140 target = gen_reg_rtx (SImode);
1141 pat = GEN_FCN (icode) (target);
1142 }
1143 else
1144 {
1145 target = NULL_RTX;
1146 icode = (fcode == AARCH64_BUILTIN_SET_FPSR) ?
1147 CODE_FOR_set_fpsr : CODE_FOR_set_fpcr;
1148 arg0 = CALL_EXPR_ARG (exp, 0);
1149 op0 = expand_normal (arg0);
1150 pat = GEN_FCN (icode) (op0);
1151 }
1152 emit_insn (pat);
1153 return target;
1154 }
342be7f7 1155
5d357f26 1156 if (fcode >= AARCH64_SIMD_BUILTIN_BASE && fcode <= AARCH64_SIMD_BUILTIN_MAX)
342be7f7 1157 return aarch64_simd_expand_builtin (fcode, exp, target);
5d357f26
KT
1158 else if (fcode >= AARCH64_CRC32_BUILTIN_BASE && fcode <= AARCH64_CRC32_BUILTIN_MAX)
1159 return aarch64_crc32_expand_builtin (fcode, exp, target);
342be7f7 1160
d5a29419 1161 gcc_unreachable ();
342be7f7 1162}
42fc9a7f
JG
1163
1164tree
1165aarch64_builtin_vectorized_function (tree fndecl, tree type_out, tree type_in)
1166{
ef4bddc2 1167 machine_mode in_mode, out_mode;
42fc9a7f
JG
1168 int in_n, out_n;
1169
1170 if (TREE_CODE (type_out) != VECTOR_TYPE
1171 || TREE_CODE (type_in) != VECTOR_TYPE)
1172 return NULL_TREE;
1173
1174 out_mode = TYPE_MODE (TREE_TYPE (type_out));
1175 out_n = TYPE_VECTOR_SUBPARTS (type_out);
1176 in_mode = TYPE_MODE (TREE_TYPE (type_in));
1177 in_n = TYPE_VECTOR_SUBPARTS (type_in);
1178
1179#undef AARCH64_CHECK_BUILTIN_MODE
1180#define AARCH64_CHECK_BUILTIN_MODE(C, N) 1
1181#define AARCH64_FIND_FRINT_VARIANT(N) \
1182 (AARCH64_CHECK_BUILTIN_MODE (2, D) \
e993fea1 1183 ? aarch64_builtin_decls[AARCH64_SIMD_BUILTIN_UNOP_##N##v2df] \
42fc9a7f 1184 : (AARCH64_CHECK_BUILTIN_MODE (4, S) \
e993fea1 1185 ? aarch64_builtin_decls[AARCH64_SIMD_BUILTIN_UNOP_##N##v4sf] \
42fc9a7f 1186 : (AARCH64_CHECK_BUILTIN_MODE (2, S) \
e993fea1 1187 ? aarch64_builtin_decls[AARCH64_SIMD_BUILTIN_UNOP_##N##v2sf] \
42fc9a7f
JG
1188 : NULL_TREE)))
1189 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
1190 {
1191 enum built_in_function fn = DECL_FUNCTION_CODE (fndecl);
1192 switch (fn)
1193 {
1194#undef AARCH64_CHECK_BUILTIN_MODE
1195#define AARCH64_CHECK_BUILTIN_MODE(C, N) \
1196 (out_mode == N##Fmode && out_n == C \
1197 && in_mode == N##Fmode && in_n == C)
1198 case BUILT_IN_FLOOR:
1199 case BUILT_IN_FLOORF:
0659ce6f 1200 return AARCH64_FIND_FRINT_VARIANT (floor);
42fc9a7f
JG
1201 case BUILT_IN_CEIL:
1202 case BUILT_IN_CEILF:
0659ce6f 1203 return AARCH64_FIND_FRINT_VARIANT (ceil);
42fc9a7f
JG
1204 case BUILT_IN_TRUNC:
1205 case BUILT_IN_TRUNCF:
0659ce6f 1206 return AARCH64_FIND_FRINT_VARIANT (btrunc);
42fc9a7f
JG
1207 case BUILT_IN_ROUND:
1208 case BUILT_IN_ROUNDF:
0659ce6f 1209 return AARCH64_FIND_FRINT_VARIANT (round);
42fc9a7f
JG
1210 case BUILT_IN_NEARBYINT:
1211 case BUILT_IN_NEARBYINTF:
0659ce6f 1212 return AARCH64_FIND_FRINT_VARIANT (nearbyint);
4dcd1054
JG
1213 case BUILT_IN_SQRT:
1214 case BUILT_IN_SQRTF:
1215 return AARCH64_FIND_FRINT_VARIANT (sqrt);
42fc9a7f 1216#undef AARCH64_CHECK_BUILTIN_MODE
b5574232
VP
1217#define AARCH64_CHECK_BUILTIN_MODE(C, N) \
1218 (out_mode == SImode && out_n == C \
1219 && in_mode == N##Imode && in_n == C)
1220 case BUILT_IN_CLZ:
1221 {
1222 if (AARCH64_CHECK_BUILTIN_MODE (4, S))
e993fea1 1223 return aarch64_builtin_decls[AARCH64_SIMD_BUILTIN_UNOP_clzv4si];
b5574232
VP
1224 return NULL_TREE;
1225 }
5e32e83b
JW
1226 case BUILT_IN_CTZ:
1227 {
1228 if (AARCH64_CHECK_BUILTIN_MODE (2, S))
1229 return aarch64_builtin_decls[AARCH64_SIMD_BUILTIN_UNOP_ctzv2si];
1230 else if (AARCH64_CHECK_BUILTIN_MODE (4, S))
1231 return aarch64_builtin_decls[AARCH64_SIMD_BUILTIN_UNOP_ctzv4si];
1232 return NULL_TREE;
1233 }
b5574232 1234#undef AARCH64_CHECK_BUILTIN_MODE
42fc9a7f
JG
1235#define AARCH64_CHECK_BUILTIN_MODE(C, N) \
1236 (out_mode == N##Imode && out_n == C \
1237 && in_mode == N##Fmode && in_n == C)
1238 case BUILT_IN_LFLOOR:
bf0f324e
YZ
1239 case BUILT_IN_LFLOORF:
1240 case BUILT_IN_LLFLOOR:
0386b123 1241 case BUILT_IN_IFLOORF:
ce966824 1242 {
e993fea1 1243 enum aarch64_builtins builtin;
ce966824 1244 if (AARCH64_CHECK_BUILTIN_MODE (2, D))
e993fea1 1245 builtin = AARCH64_SIMD_BUILTIN_UNOP_lfloorv2dfv2di;
ce966824 1246 else if (AARCH64_CHECK_BUILTIN_MODE (4, S))
e993fea1 1247 builtin = AARCH64_SIMD_BUILTIN_UNOP_lfloorv4sfv4si;
ce966824 1248 else if (AARCH64_CHECK_BUILTIN_MODE (2, S))
e993fea1
JG
1249 builtin = AARCH64_SIMD_BUILTIN_UNOP_lfloorv2sfv2si;
1250 else
1251 return NULL_TREE;
1252
1253 return aarch64_builtin_decls[builtin];
ce966824 1254 }
42fc9a7f 1255 case BUILT_IN_LCEIL:
bf0f324e
YZ
1256 case BUILT_IN_LCEILF:
1257 case BUILT_IN_LLCEIL:
0386b123 1258 case BUILT_IN_ICEILF:
ce966824 1259 {
e993fea1 1260 enum aarch64_builtins builtin;
ce966824 1261 if (AARCH64_CHECK_BUILTIN_MODE (2, D))
e993fea1 1262 builtin = AARCH64_SIMD_BUILTIN_UNOP_lceilv2dfv2di;
ce966824 1263 else if (AARCH64_CHECK_BUILTIN_MODE (4, S))
e993fea1 1264 builtin = AARCH64_SIMD_BUILTIN_UNOP_lceilv4sfv4si;
ce966824 1265 else if (AARCH64_CHECK_BUILTIN_MODE (2, S))
e993fea1
JG
1266 builtin = AARCH64_SIMD_BUILTIN_UNOP_lceilv2sfv2si;
1267 else
1268 return NULL_TREE;
1269
1270 return aarch64_builtin_decls[builtin];
ce966824 1271 }
0386b123
JG
1272 case BUILT_IN_LROUND:
1273 case BUILT_IN_IROUNDF:
1274 {
e993fea1 1275 enum aarch64_builtins builtin;
0386b123 1276 if (AARCH64_CHECK_BUILTIN_MODE (2, D))
e993fea1 1277 builtin = AARCH64_SIMD_BUILTIN_UNOP_lroundv2dfv2di;
0386b123 1278 else if (AARCH64_CHECK_BUILTIN_MODE (4, S))
e993fea1 1279 builtin = AARCH64_SIMD_BUILTIN_UNOP_lroundv4sfv4si;
0386b123 1280 else if (AARCH64_CHECK_BUILTIN_MODE (2, S))
e993fea1
JG
1281 builtin = AARCH64_SIMD_BUILTIN_UNOP_lroundv2sfv2si;
1282 else
1283 return NULL_TREE;
1284
1285 return aarch64_builtin_decls[builtin];
0386b123 1286 }
c7f28cd5
KT
1287 case BUILT_IN_BSWAP16:
1288#undef AARCH64_CHECK_BUILTIN_MODE
1289#define AARCH64_CHECK_BUILTIN_MODE(C, N) \
1290 (out_mode == N##Imode && out_n == C \
1291 && in_mode == N##Imode && in_n == C)
1292 if (AARCH64_CHECK_BUILTIN_MODE (4, H))
1293 return aarch64_builtin_decls[AARCH64_SIMD_BUILTIN_UNOPU_bswapv4hi];
1294 else if (AARCH64_CHECK_BUILTIN_MODE (8, H))
1295 return aarch64_builtin_decls[AARCH64_SIMD_BUILTIN_UNOPU_bswapv8hi];
1296 else
1297 return NULL_TREE;
1298 case BUILT_IN_BSWAP32:
1299 if (AARCH64_CHECK_BUILTIN_MODE (2, S))
1300 return aarch64_builtin_decls[AARCH64_SIMD_BUILTIN_UNOPU_bswapv2si];
1301 else if (AARCH64_CHECK_BUILTIN_MODE (4, S))
1302 return aarch64_builtin_decls[AARCH64_SIMD_BUILTIN_UNOPU_bswapv4si];
1303 else
1304 return NULL_TREE;
1305 case BUILT_IN_BSWAP64:
1306 if (AARCH64_CHECK_BUILTIN_MODE (2, D))
1307 return aarch64_builtin_decls[AARCH64_SIMD_BUILTIN_UNOPU_bswapv2di];
1308 else
1309 return NULL_TREE;
42fc9a7f
JG
1310 default:
1311 return NULL_TREE;
1312 }
1313 }
1314
1315 return NULL_TREE;
1316}
0ac198d3
JG
1317
1318#undef VAR1
1319#define VAR1(T, N, MAP, A) \
e993fea1 1320 case AARCH64_SIMD_BUILTIN_##T##_##N##A:
0ac198d3 1321
9697e620
JG
1322tree
1323aarch64_fold_builtin (tree fndecl, int n_args ATTRIBUTE_UNUSED, tree *args,
1324 bool ignore ATTRIBUTE_UNUSED)
1325{
1326 int fcode = DECL_FUNCTION_CODE (fndecl);
1327 tree type = TREE_TYPE (TREE_TYPE (fndecl));
1328
1329 switch (fcode)
1330 {
8f905d69 1331 BUILTIN_VDQF (UNOP, abs, 2)
9697e620
JG
1332 return fold_build1 (ABS_EXPR, type, args[0]);
1333 break;
1709ff9b
JG
1334 VAR1 (UNOP, floatv2si, 2, v2sf)
1335 VAR1 (UNOP, floatv4si, 2, v4sf)
1336 VAR1 (UNOP, floatv2di, 2, v2df)
1337 return fold_build1 (FLOAT_EXPR, type, args[0]);
9697e620
JG
1338 default:
1339 break;
1340 }
1341
1342 return NULL_TREE;
1343}
1344
0ac198d3
JG
1345bool
1346aarch64_gimple_fold_builtin (gimple_stmt_iterator *gsi)
1347{
1348 bool changed = false;
1349 gimple stmt = gsi_stmt (*gsi);
1350 tree call = gimple_call_fn (stmt);
1351 tree fndecl;
1352 gimple new_stmt = NULL;
22756ccf 1353
0ac198d3
JG
1354 if (call)
1355 {
1356 fndecl = gimple_call_fndecl (stmt);
1357 if (fndecl)
1358 {
1359 int fcode = DECL_FUNCTION_CODE (fndecl);
1360 int nargs = gimple_call_num_args (stmt);
1361 tree *args = (nargs > 0
1362 ? gimple_call_arg_ptr (stmt, 0)
1363 : &error_mark_node);
1364
fc72cba7
AL
1365 /* We use gimple's REDUC_(PLUS|MIN|MAX)_EXPRs for float, signed int
1366 and unsigned int; it will distinguish according to the types of
1367 the arguments to the __builtin. */
0ac198d3
JG
1368 switch (fcode)
1369 {
fc72cba7 1370 BUILTIN_VALL (UNOP, reduc_plus_scal_, 10)
0d3d8152
JJ
1371 new_stmt = gimple_build_assign (gimple_call_lhs (stmt),
1372 REDUC_PLUS_EXPR, args[0]);
0ac198d3 1373 break;
fc72cba7
AL
1374 BUILTIN_VDQIF (UNOP, reduc_smax_scal_, 10)
1375 BUILTIN_VDQ_BHSI (UNOPU, reduc_umax_scal_, 10)
0d3d8152
JJ
1376 new_stmt = gimple_build_assign (gimple_call_lhs (stmt),
1377 REDUC_MAX_EXPR, args[0]);
1598945b 1378 break;
fc72cba7
AL
1379 BUILTIN_VDQIF (UNOP, reduc_smin_scal_, 10)
1380 BUILTIN_VDQ_BHSI (UNOPU, reduc_umin_scal_, 10)
0d3d8152
JJ
1381 new_stmt = gimple_build_assign (gimple_call_lhs (stmt),
1382 REDUC_MIN_EXPR, args[0]);
1598945b
JG
1383 break;
1384
0ac198d3
JG
1385 default:
1386 break;
1387 }
1388 }
1389 }
1390
1391 if (new_stmt)
1392 {
1393 gsi_replace (gsi, new_stmt, true);
1394 changed = true;
1395 }
1396
1397 return changed;
1398}
1399
aa87aced
KV
1400void
1401aarch64_atomic_assign_expand_fenv (tree *hold, tree *clear, tree *update)
1402{
1403 const unsigned AARCH64_FE_INVALID = 1;
1404 const unsigned AARCH64_FE_DIVBYZERO = 2;
1405 const unsigned AARCH64_FE_OVERFLOW = 4;
1406 const unsigned AARCH64_FE_UNDERFLOW = 8;
1407 const unsigned AARCH64_FE_INEXACT = 16;
1408 const unsigned HOST_WIDE_INT AARCH64_FE_ALL_EXCEPT = (AARCH64_FE_INVALID
1409 | AARCH64_FE_DIVBYZERO
1410 | AARCH64_FE_OVERFLOW
1411 | AARCH64_FE_UNDERFLOW
1412 | AARCH64_FE_INEXACT);
1413 const unsigned HOST_WIDE_INT AARCH64_FE_EXCEPT_SHIFT = 8;
1414 tree fenv_cr, fenv_sr, get_fpcr, set_fpcr, mask_cr, mask_sr;
1415 tree ld_fenv_cr, ld_fenv_sr, masked_fenv_cr, masked_fenv_sr, hold_fnclex_cr;
1416 tree hold_fnclex_sr, new_fenv_var, reload_fenv, restore_fnenv, get_fpsr, set_fpsr;
1417 tree update_call, atomic_feraiseexcept, hold_fnclex, masked_fenv, ld_fenv;
1418
1419 /* Generate the equivalence of :
1420 unsigned int fenv_cr;
1421 fenv_cr = __builtin_aarch64_get_fpcr ();
1422
1423 unsigned int fenv_sr;
1424 fenv_sr = __builtin_aarch64_get_fpsr ();
1425
1426 Now set all exceptions to non-stop
1427 unsigned int mask_cr
1428 = ~(AARCH64_FE_ALL_EXCEPT << AARCH64_FE_EXCEPT_SHIFT);
1429 unsigned int masked_cr;
1430 masked_cr = fenv_cr & mask_cr;
1431
1432 And clear all exception flags
1433 unsigned int maske_sr = ~AARCH64_FE_ALL_EXCEPT;
1434 unsigned int masked_cr;
1435 masked_sr = fenv_sr & mask_sr;
1436
1437 __builtin_aarch64_set_cr (masked_cr);
1438 __builtin_aarch64_set_sr (masked_sr); */
1439
9b489f31
JJ
1440 fenv_cr = create_tmp_var (unsigned_type_node);
1441 fenv_sr = create_tmp_var (unsigned_type_node);
aa87aced
KV
1442
1443 get_fpcr = aarch64_builtin_decls[AARCH64_BUILTIN_GET_FPCR];
1444 set_fpcr = aarch64_builtin_decls[AARCH64_BUILTIN_SET_FPCR];
1445 get_fpsr = aarch64_builtin_decls[AARCH64_BUILTIN_GET_FPSR];
1446 set_fpsr = aarch64_builtin_decls[AARCH64_BUILTIN_SET_FPSR];
1447
1448 mask_cr = build_int_cst (unsigned_type_node,
1449 ~(AARCH64_FE_ALL_EXCEPT << AARCH64_FE_EXCEPT_SHIFT));
1450 mask_sr = build_int_cst (unsigned_type_node,
1451 ~(AARCH64_FE_ALL_EXCEPT));
1452
1453 ld_fenv_cr = build2 (MODIFY_EXPR, unsigned_type_node,
1454 fenv_cr, build_call_expr (get_fpcr, 0));
1455 ld_fenv_sr = build2 (MODIFY_EXPR, unsigned_type_node,
1456 fenv_sr, build_call_expr (get_fpsr, 0));
1457
1458 masked_fenv_cr = build2 (BIT_AND_EXPR, unsigned_type_node, fenv_cr, mask_cr);
1459 masked_fenv_sr = build2 (BIT_AND_EXPR, unsigned_type_node, fenv_sr, mask_sr);
1460
1461 hold_fnclex_cr = build_call_expr (set_fpcr, 1, masked_fenv_cr);
1462 hold_fnclex_sr = build_call_expr (set_fpsr, 1, masked_fenv_sr);
1463
1464 hold_fnclex = build2 (COMPOUND_EXPR, void_type_node, hold_fnclex_cr,
1465 hold_fnclex_sr);
1466 masked_fenv = build2 (COMPOUND_EXPR, void_type_node, masked_fenv_cr,
1467 masked_fenv_sr);
1468 ld_fenv = build2 (COMPOUND_EXPR, void_type_node, ld_fenv_cr, ld_fenv_sr);
1469
1470 *hold = build2 (COMPOUND_EXPR, void_type_node,
1471 build2 (COMPOUND_EXPR, void_type_node, masked_fenv, ld_fenv),
1472 hold_fnclex);
1473
1474 /* Store the value of masked_fenv to clear the exceptions:
1475 __builtin_aarch64_set_fpsr (masked_fenv_sr); */
1476
1477 *clear = build_call_expr (set_fpsr, 1, masked_fenv_sr);
1478
1479 /* Generate the equivalent of :
1480 unsigned int new_fenv_var;
1481 new_fenv_var = __builtin_aarch64_get_fpsr ();
1482
1483 __builtin_aarch64_set_fpsr (fenv_sr);
1484
1485 __atomic_feraiseexcept (new_fenv_var); */
1486
9b489f31 1487 new_fenv_var = create_tmp_var (unsigned_type_node);
aa87aced
KV
1488 reload_fenv = build2 (MODIFY_EXPR, unsigned_type_node,
1489 new_fenv_var, build_call_expr (get_fpsr, 0));
1490 restore_fnenv = build_call_expr (set_fpsr, 1, fenv_sr);
1491 atomic_feraiseexcept = builtin_decl_implicit (BUILT_IN_ATOMIC_FERAISEEXCEPT);
1492 update_call = build_call_expr (atomic_feraiseexcept, 1,
1493 fold_convert (integer_type_node, new_fenv_var));
1494 *update = build2 (COMPOUND_EXPR, void_type_node,
1495 build2 (COMPOUND_EXPR, void_type_node,
1496 reload_fenv, restore_fnenv), update_call);
1497}
1498
1499
42fc9a7f
JG
1500#undef AARCH64_CHECK_BUILTIN_MODE
1501#undef AARCH64_FIND_FRINT_VARIANT
0ddec79f
JG
1502#undef CF0
1503#undef CF1
1504#undef CF2
1505#undef CF3
1506#undef CF4
1507#undef CF10
1508#undef VAR1
1509#undef VAR2
1510#undef VAR3
1511#undef VAR4
1512#undef VAR5
1513#undef VAR6
1514#undef VAR7
1515#undef VAR8
1516#undef VAR9
1517#undef VAR10
1518#undef VAR11
1519
3c03d39d 1520#include "gt-aarch64-builtins.h"