]> git.ipfire.org Git - thirdparty/gcc.git/blame - gcc/config/aarch64/aarch64-builtins.c
[AArch64] Add support for float16x{4,8}_t vectors/builtins
[thirdparty/gcc.git] / gcc / config / aarch64 / aarch64-builtins.c
CommitLineData
43e9d192 1/* Builtins' description for AArch64 SIMD architecture.
5624e564 2 Copyright (C) 2011-2015 Free Software Foundation, Inc.
43e9d192
IB
3 Contributed by ARM Ltd.
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify it
8 under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
10 any later version.
11
12 GCC is distributed in the hope that it will be useful, but
13 WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
15 General Public License for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
20
21#include "config.h"
22#include "system.h"
23#include "coretypes.h"
24#include "tm.h"
c7131fb2 25#include "function.h"
9fdcd34e 26#include "cfghooks.h"
c7131fb2
AM
27#include "basic-block.h"
28#include "cfg.h"
29#include "tree.h"
30#include "gimple.h"
43e9d192 31#include "rtl.h"
40e23961 32#include "alias.h"
40e23961 33#include "fold-const.h"
d8a2d370
DN
34#include "stor-layout.h"
35#include "stringpool.h"
36#include "calls.h"
36566b39 37#include "flags.h"
36566b39
PK
38#include "insn-config.h"
39#include "expmed.h"
40#include "dojump.h"
41#include "explow.h"
42#include "emit-rtl.h"
43#include "varasm.h"
44#include "stmt.h"
43e9d192
IB
45#include "expr.h"
46#include "tm_p.h"
47#include "recog.h"
48#include "langhooks.h"
49#include "diagnostic-core.h"
b0710fe1 50#include "insn-codes.h"
43e9d192 51#include "optabs.h"
60393bbc
AM
52#include "cfgrtl.h"
53#include "cfganal.h"
54#include "lcm.h"
55#include "cfgbuild.h"
56#include "cfgcleanup.h"
2fb9a547
AM
57#include "internal-fn.h"
58#include "gimple-fold.h"
59#include "tree-eh.h"
5be5c238 60#include "gimple-iterator.h"
43e9d192 61
bc5e395d
JG
62#define v8qi_UP V8QImode
63#define v4hi_UP V4HImode
71a11456 64#define v4hf_UP V4HFmode
bc5e395d
JG
65#define v2si_UP V2SImode
66#define v2sf_UP V2SFmode
67#define v1df_UP V1DFmode
68#define di_UP DImode
69#define df_UP DFmode
70#define v16qi_UP V16QImode
71#define v8hi_UP V8HImode
71a11456 72#define v8hf_UP V8HFmode
bc5e395d
JG
73#define v4si_UP V4SImode
74#define v4sf_UP V4SFmode
75#define v2di_UP V2DImode
76#define v2df_UP V2DFmode
77#define ti_UP TImode
78#define ei_UP EImode
79#define oi_UP OImode
80#define ci_UP CImode
81#define xi_UP XImode
82#define si_UP SImode
83#define sf_UP SFmode
84#define hi_UP HImode
85#define qi_UP QImode
43e9d192
IB
86#define UP(X) X##_UP
87
b5828b4b
JG
88#define SIMD_MAX_BUILTIN_ARGS 5
89
90enum aarch64_type_qualifiers
43e9d192 91{
b5828b4b
JG
92 /* T foo. */
93 qualifier_none = 0x0,
94 /* unsigned T foo. */
95 qualifier_unsigned = 0x1, /* 1 << 0 */
96 /* const T foo. */
97 qualifier_const = 0x2, /* 1 << 1 */
98 /* T *foo. */
99 qualifier_pointer = 0x4, /* 1 << 2 */
b5828b4b
JG
100 /* Used when expanding arguments if an operand could
101 be an immediate. */
102 qualifier_immediate = 0x8, /* 1 << 3 */
103 qualifier_maybe_immediate = 0x10, /* 1 << 4 */
104 /* void foo (...). */
105 qualifier_void = 0x20, /* 1 << 5 */
106 /* Some patterns may have internal operands, this qualifier is an
107 instruction to the initialisation code to skip this operand. */
108 qualifier_internal = 0x40, /* 1 << 6 */
109 /* Some builtins should use the T_*mode* encoded in a simd_builtin_datum
110 rather than using the type of the operand. */
111 qualifier_map_mode = 0x80, /* 1 << 7 */
112 /* qualifier_pointer | qualifier_map_mode */
113 qualifier_pointer_map_mode = 0x84,
e625e715 114 /* qualifier_const | qualifier_pointer | qualifier_map_mode */
6db1ec94
JG
115 qualifier_const_pointer_map_mode = 0x86,
116 /* Polynomial types. */
2a49c16d
AL
117 qualifier_poly = 0x100,
118 /* Lane indices - must be in range, and flipped for bigendian. */
4d0a0237
CB
119 qualifier_lane_index = 0x200,
120 /* Lane indices for single lane structure loads and stores. */
121 qualifier_struct_load_store_lane_index = 0x400
b5828b4b 122};
43e9d192
IB
123
124typedef struct
125{
126 const char *name;
ef4bddc2 127 machine_mode mode;
342be7f7
JG
128 const enum insn_code code;
129 unsigned int fcode;
b5828b4b 130 enum aarch64_type_qualifiers *qualifiers;
43e9d192
IB
131} aarch64_simd_builtin_datum;
132
b5828b4b
JG
133static enum aarch64_type_qualifiers
134aarch64_types_unop_qualifiers[SIMD_MAX_BUILTIN_ARGS]
8f905d69 135 = { qualifier_none, qualifier_none };
b5828b4b 136#define TYPES_UNOP (aarch64_types_unop_qualifiers)
5a7a4e80
TB
137static enum aarch64_type_qualifiers
138aarch64_types_unopu_qualifiers[SIMD_MAX_BUILTIN_ARGS]
139 = { qualifier_unsigned, qualifier_unsigned };
140#define TYPES_UNOPU (aarch64_types_unopu_qualifiers)
b5828b4b
JG
141static enum aarch64_type_qualifiers
142aarch64_types_binop_qualifiers[SIMD_MAX_BUILTIN_ARGS]
143 = { qualifier_none, qualifier_none, qualifier_maybe_immediate };
144#define TYPES_BINOP (aarch64_types_binop_qualifiers)
145static enum aarch64_type_qualifiers
5a7a4e80
TB
146aarch64_types_binopu_qualifiers[SIMD_MAX_BUILTIN_ARGS]
147 = { qualifier_unsigned, qualifier_unsigned, qualifier_unsigned };
148#define TYPES_BINOPU (aarch64_types_binopu_qualifiers)
7baa225d 149static enum aarch64_type_qualifiers
de10bcce
AL
150aarch64_types_binop_uus_qualifiers[SIMD_MAX_BUILTIN_ARGS]
151 = { qualifier_unsigned, qualifier_unsigned, qualifier_none };
152#define TYPES_BINOP_UUS (aarch64_types_binop_uus_qualifiers)
153static enum aarch64_type_qualifiers
918621d3
AL
154aarch64_types_binop_ssu_qualifiers[SIMD_MAX_BUILTIN_ARGS]
155 = { qualifier_none, qualifier_none, qualifier_unsigned };
156#define TYPES_BINOP_SSU (aarch64_types_binop_ssu_qualifiers)
157static enum aarch64_type_qualifiers
7baa225d
TB
158aarch64_types_binopp_qualifiers[SIMD_MAX_BUILTIN_ARGS]
159 = { qualifier_poly, qualifier_poly, qualifier_poly };
160#define TYPES_BINOPP (aarch64_types_binopp_qualifiers)
161
5a7a4e80 162static enum aarch64_type_qualifiers
b5828b4b
JG
163aarch64_types_ternop_qualifiers[SIMD_MAX_BUILTIN_ARGS]
164 = { qualifier_none, qualifier_none, qualifier_none, qualifier_none };
165#define TYPES_TERNOP (aarch64_types_ternop_qualifiers)
30442682 166static enum aarch64_type_qualifiers
2a49c16d
AL
167aarch64_types_ternop_lane_qualifiers[SIMD_MAX_BUILTIN_ARGS]
168 = { qualifier_none, qualifier_none, qualifier_none, qualifier_lane_index };
169#define TYPES_TERNOP_LANE (aarch64_types_ternop_lane_qualifiers)
170static enum aarch64_type_qualifiers
30442682
TB
171aarch64_types_ternopu_qualifiers[SIMD_MAX_BUILTIN_ARGS]
172 = { qualifier_unsigned, qualifier_unsigned,
173 qualifier_unsigned, qualifier_unsigned };
174#define TYPES_TERNOPU (aarch64_types_ternopu_qualifiers)
175
b5828b4b 176static enum aarch64_type_qualifiers
2a49c16d 177aarch64_types_quadop_lane_qualifiers[SIMD_MAX_BUILTIN_ARGS]
b5828b4b 178 = { qualifier_none, qualifier_none, qualifier_none,
2a49c16d
AL
179 qualifier_none, qualifier_lane_index };
180#define TYPES_QUADOP_LANE (aarch64_types_quadop_lane_qualifiers)
b5828b4b
JG
181
182static enum aarch64_type_qualifiers
2a49c16d 183aarch64_types_binop_imm_qualifiers[SIMD_MAX_BUILTIN_ARGS]
b5828b4b 184 = { qualifier_none, qualifier_none, qualifier_immediate };
2a49c16d
AL
185#define TYPES_GETREG (aarch64_types_binop_imm_qualifiers)
186#define TYPES_SHIFTIMM (aarch64_types_binop_imm_qualifiers)
b5828b4b 187static enum aarch64_type_qualifiers
de10bcce
AL
188aarch64_types_shift_to_unsigned_qualifiers[SIMD_MAX_BUILTIN_ARGS]
189 = { qualifier_unsigned, qualifier_none, qualifier_immediate };
190#define TYPES_SHIFTIMM_USS (aarch64_types_shift_to_unsigned_qualifiers)
191static enum aarch64_type_qualifiers
252c7556
AV
192aarch64_types_unsigned_shift_qualifiers[SIMD_MAX_BUILTIN_ARGS]
193 = { qualifier_unsigned, qualifier_unsigned, qualifier_immediate };
194#define TYPES_USHIFTIMM (aarch64_types_unsigned_shift_qualifiers)
de10bcce 195
252c7556 196static enum aarch64_type_qualifiers
2a49c16d 197aarch64_types_ternop_imm_qualifiers[SIMD_MAX_BUILTIN_ARGS]
b5828b4b 198 = { qualifier_none, qualifier_none, qualifier_none, qualifier_immediate };
2a49c16d
AL
199#define TYPES_SETREG (aarch64_types_ternop_imm_qualifiers)
200#define TYPES_SHIFTINSERT (aarch64_types_ternop_imm_qualifiers)
201#define TYPES_SHIFTACC (aarch64_types_ternop_imm_qualifiers)
b5828b4b 202
de10bcce
AL
203static enum aarch64_type_qualifiers
204aarch64_types_unsigned_shiftacc_qualifiers[SIMD_MAX_BUILTIN_ARGS]
205 = { qualifier_unsigned, qualifier_unsigned, qualifier_unsigned,
206 qualifier_immediate };
207#define TYPES_USHIFTACC (aarch64_types_unsigned_shiftacc_qualifiers)
208
209
b5828b4b
JG
210static enum aarch64_type_qualifiers
211aarch64_types_combine_qualifiers[SIMD_MAX_BUILTIN_ARGS]
212 = { qualifier_none, qualifier_none, qualifier_none };
213#define TYPES_COMBINE (aarch64_types_combine_qualifiers)
214
215static enum aarch64_type_qualifiers
216aarch64_types_load1_qualifiers[SIMD_MAX_BUILTIN_ARGS]
217 = { qualifier_none, qualifier_const_pointer_map_mode };
218#define TYPES_LOAD1 (aarch64_types_load1_qualifiers)
219#define TYPES_LOADSTRUCT (aarch64_types_load1_qualifiers)
3ec1be97
CB
220static enum aarch64_type_qualifiers
221aarch64_types_loadstruct_lane_qualifiers[SIMD_MAX_BUILTIN_ARGS]
222 = { qualifier_none, qualifier_const_pointer_map_mode,
4d0a0237 223 qualifier_none, qualifier_struct_load_store_lane_index };
3ec1be97 224#define TYPES_LOADSTRUCT_LANE (aarch64_types_loadstruct_lane_qualifiers)
b5828b4b 225
46e778c4
JG
226static enum aarch64_type_qualifiers
227aarch64_types_bsl_p_qualifiers[SIMD_MAX_BUILTIN_ARGS]
228 = { qualifier_poly, qualifier_unsigned,
229 qualifier_poly, qualifier_poly };
230#define TYPES_BSL_P (aarch64_types_bsl_p_qualifiers)
231static enum aarch64_type_qualifiers
232aarch64_types_bsl_s_qualifiers[SIMD_MAX_BUILTIN_ARGS]
233 = { qualifier_none, qualifier_unsigned,
234 qualifier_none, qualifier_none };
235#define TYPES_BSL_S (aarch64_types_bsl_s_qualifiers)
236static enum aarch64_type_qualifiers
237aarch64_types_bsl_u_qualifiers[SIMD_MAX_BUILTIN_ARGS]
238 = { qualifier_unsigned, qualifier_unsigned,
239 qualifier_unsigned, qualifier_unsigned };
240#define TYPES_BSL_U (aarch64_types_bsl_u_qualifiers)
241
b5828b4b
JG
242/* The first argument (return type) of a store should be void type,
243 which we represent with qualifier_void. Their first operand will be
244 a DImode pointer to the location to store to, so we must use
245 qualifier_map_mode | qualifier_pointer to build a pointer to the
246 element type of the vector. */
247static enum aarch64_type_qualifiers
248aarch64_types_store1_qualifiers[SIMD_MAX_BUILTIN_ARGS]
249 = { qualifier_void, qualifier_pointer_map_mode, qualifier_none };
250#define TYPES_STORE1 (aarch64_types_store1_qualifiers)
251#define TYPES_STORESTRUCT (aarch64_types_store1_qualifiers)
ba081b77
JG
252static enum aarch64_type_qualifiers
253aarch64_types_storestruct_lane_qualifiers[SIMD_MAX_BUILTIN_ARGS]
254 = { qualifier_void, qualifier_pointer_map_mode,
4d0a0237 255 qualifier_none, qualifier_struct_load_store_lane_index };
ba081b77 256#define TYPES_STORESTRUCT_LANE (aarch64_types_storestruct_lane_qualifiers)
b5828b4b 257
0ddec79f
JG
258#define CF0(N, X) CODE_FOR_aarch64_##N##X
259#define CF1(N, X) CODE_FOR_##N##X##1
260#define CF2(N, X) CODE_FOR_##N##X##2
261#define CF3(N, X) CODE_FOR_##N##X##3
262#define CF4(N, X) CODE_FOR_##N##X##4
263#define CF10(N, X) CODE_FOR_##N##X
264
265#define VAR1(T, N, MAP, A) \
bc5e395d 266 {#N #A, UP (A), CF##MAP (N, A), 0, TYPES_##T},
0ddec79f
JG
267#define VAR2(T, N, MAP, A, B) \
268 VAR1 (T, N, MAP, A) \
269 VAR1 (T, N, MAP, B)
270#define VAR3(T, N, MAP, A, B, C) \
271 VAR2 (T, N, MAP, A, B) \
272 VAR1 (T, N, MAP, C)
273#define VAR4(T, N, MAP, A, B, C, D) \
274 VAR3 (T, N, MAP, A, B, C) \
275 VAR1 (T, N, MAP, D)
276#define VAR5(T, N, MAP, A, B, C, D, E) \
277 VAR4 (T, N, MAP, A, B, C, D) \
278 VAR1 (T, N, MAP, E)
279#define VAR6(T, N, MAP, A, B, C, D, E, F) \
280 VAR5 (T, N, MAP, A, B, C, D, E) \
281 VAR1 (T, N, MAP, F)
282#define VAR7(T, N, MAP, A, B, C, D, E, F, G) \
283 VAR6 (T, N, MAP, A, B, C, D, E, F) \
284 VAR1 (T, N, MAP, G)
285#define VAR8(T, N, MAP, A, B, C, D, E, F, G, H) \
286 VAR7 (T, N, MAP, A, B, C, D, E, F, G) \
287 VAR1 (T, N, MAP, H)
288#define VAR9(T, N, MAP, A, B, C, D, E, F, G, H, I) \
289 VAR8 (T, N, MAP, A, B, C, D, E, F, G, H) \
290 VAR1 (T, N, MAP, I)
291#define VAR10(T, N, MAP, A, B, C, D, E, F, G, H, I, J) \
292 VAR9 (T, N, MAP, A, B, C, D, E, F, G, H, I) \
293 VAR1 (T, N, MAP, J)
294#define VAR11(T, N, MAP, A, B, C, D, E, F, G, H, I, J, K) \
295 VAR10 (T, N, MAP, A, B, C, D, E, F, G, H, I, J) \
296 VAR1 (T, N, MAP, K)
297#define VAR12(T, N, MAP, A, B, C, D, E, F, G, H, I, J, K, L) \
298 VAR11 (T, N, MAP, A, B, C, D, E, F, G, H, I, J, K) \
299 VAR1 (T, N, MAP, L)
342be7f7 300
f421c516 301#include "aarch64-builtin-iterators.h"
43e9d192
IB
302
303static aarch64_simd_builtin_datum aarch64_simd_builtin_data[] = {
342be7f7
JG
304#include "aarch64-simd-builtins.def"
305};
306
5d357f26
KT
307/* There's only 8 CRC32 builtins. Probably not worth their own .def file. */
308#define AARCH64_CRC32_BUILTINS \
309 CRC32_BUILTIN (crc32b, QI) \
310 CRC32_BUILTIN (crc32h, HI) \
311 CRC32_BUILTIN (crc32w, SI) \
312 CRC32_BUILTIN (crc32x, DI) \
313 CRC32_BUILTIN (crc32cb, QI) \
314 CRC32_BUILTIN (crc32ch, HI) \
315 CRC32_BUILTIN (crc32cw, SI) \
316 CRC32_BUILTIN (crc32cx, DI)
317
318typedef struct
319{
320 const char *name;
ef4bddc2 321 machine_mode mode;
5d357f26
KT
322 const enum insn_code icode;
323 unsigned int fcode;
324} aarch64_crc_builtin_datum;
325
326#define CRC32_BUILTIN(N, M) \
327 AARCH64_BUILTIN_##N,
328
342be7f7 329#undef VAR1
0ddec79f 330#define VAR1(T, N, MAP, A) \
e993fea1 331 AARCH64_SIMD_BUILTIN_##T##_##N##A,
342be7f7
JG
332
333enum aarch64_builtins
334{
335 AARCH64_BUILTIN_MIN,
aa87aced
KV
336
337 AARCH64_BUILTIN_GET_FPCR,
338 AARCH64_BUILTIN_SET_FPCR,
339 AARCH64_BUILTIN_GET_FPSR,
340 AARCH64_BUILTIN_SET_FPSR,
341
342be7f7 342 AARCH64_SIMD_BUILTIN_BASE,
661fce82 343 AARCH64_SIMD_BUILTIN_LANE_CHECK,
342be7f7 344#include "aarch64-simd-builtins.def"
661fce82
AL
345 /* The first enum element which is based on an insn_data pattern. */
346 AARCH64_SIMD_PATTERN_START = AARCH64_SIMD_BUILTIN_LANE_CHECK + 1,
347 AARCH64_SIMD_BUILTIN_MAX = AARCH64_SIMD_PATTERN_START
348 + ARRAY_SIZE (aarch64_simd_builtin_data) - 1,
5d357f26
KT
349 AARCH64_CRC32_BUILTIN_BASE,
350 AARCH64_CRC32_BUILTINS
351 AARCH64_CRC32_BUILTIN_MAX,
342be7f7 352 AARCH64_BUILTIN_MAX
43e9d192
IB
353};
354
5d357f26
KT
355#undef CRC32_BUILTIN
356#define CRC32_BUILTIN(N, M) \
357 {"__builtin_aarch64_"#N, M##mode, CODE_FOR_aarch64_##N, AARCH64_BUILTIN_##N},
358
359static aarch64_crc_builtin_datum aarch64_crc_builtin_data[] = {
360 AARCH64_CRC32_BUILTINS
361};
362
363#undef CRC32_BUILTIN
364
119103ca
JG
365static GTY(()) tree aarch64_builtin_decls[AARCH64_BUILTIN_MAX];
366
43e9d192
IB
367#define NUM_DREG_TYPES 6
368#define NUM_QREG_TYPES 6
369
f9d53c27
TB
370/* Internal scalar builtin types. These types are used to support
371 neon intrinsic builtins. They are _not_ user-visible types. Therefore
372 the mangling for these types are implementation defined. */
373const char *aarch64_scalar_builtin_types[] = {
374 "__builtin_aarch64_simd_qi",
375 "__builtin_aarch64_simd_hi",
376 "__builtin_aarch64_simd_si",
377 "__builtin_aarch64_simd_sf",
378 "__builtin_aarch64_simd_di",
379 "__builtin_aarch64_simd_df",
380 "__builtin_aarch64_simd_poly8",
381 "__builtin_aarch64_simd_poly16",
382 "__builtin_aarch64_simd_poly64",
383 "__builtin_aarch64_simd_poly128",
384 "__builtin_aarch64_simd_ti",
385 "__builtin_aarch64_simd_uqi",
386 "__builtin_aarch64_simd_uhi",
387 "__builtin_aarch64_simd_usi",
388 "__builtin_aarch64_simd_udi",
389 "__builtin_aarch64_simd_ei",
390 "__builtin_aarch64_simd_oi",
391 "__builtin_aarch64_simd_ci",
392 "__builtin_aarch64_simd_xi",
393 NULL
394};
b5828b4b 395
f9d53c27
TB
396#define ENTRY(E, M, Q, G) E,
397enum aarch64_simd_type
398{
399#include "aarch64-simd-builtin-types.def"
400 ARM_NEON_H_TYPES_LAST
401};
402#undef ENTRY
b5828b4b 403
f9d53c27 404struct aarch64_simd_type_info
b5828b4b 405{
f9d53c27
TB
406 enum aarch64_simd_type type;
407
408 /* Internal type name. */
409 const char *name;
410
411 /* Internal type name(mangled). The mangled names conform to the
412 AAPCS64 (see "Procedure Call Standard for the ARM 64-bit Architecture",
413 Appendix A). To qualify for emission with the mangled names defined in
414 that document, a vector type must not only be of the correct mode but also
415 be of the correct internal AdvSIMD vector type (e.g. __Int8x8_t); these
416 types are registered by aarch64_init_simd_builtin_types (). In other
417 words, vector types defined in other ways e.g. via vector_size attribute
418 will get default mangled names. */
419 const char *mangle;
420
421 /* Internal type. */
422 tree itype;
423
424 /* Element type. */
b5828b4b
JG
425 tree eltype;
426
f9d53c27
TB
427 /* Machine mode the internal type maps to. */
428 enum machine_mode mode;
b5828b4b 429
f9d53c27
TB
430 /* Qualifiers. */
431 enum aarch64_type_qualifiers q;
432};
433
434#define ENTRY(E, M, Q, G) \
435 {E, "__" #E, #G "__" #E, NULL_TREE, NULL_TREE, M##mode, qualifier_##Q},
436static struct aarch64_simd_type_info aarch64_simd_types [] = {
437#include "aarch64-simd-builtin-types.def"
438};
439#undef ENTRY
440
c2ec330c
AL
441/* This type is not SIMD-specific; it is the user-visible __fp16. */
442static tree aarch64_fp16_type_node = NULL_TREE;
443
f9d53c27
TB
444static tree aarch64_simd_intOI_type_node = NULL_TREE;
445static tree aarch64_simd_intEI_type_node = NULL_TREE;
446static tree aarch64_simd_intCI_type_node = NULL_TREE;
447static tree aarch64_simd_intXI_type_node = NULL_TREE;
448
449static const char *
450aarch64_mangle_builtin_scalar_type (const_tree type)
451{
452 int i = 0;
453
454 while (aarch64_scalar_builtin_types[i] != NULL)
b5828b4b 455 {
f9d53c27
TB
456 const char *name = aarch64_scalar_builtin_types[i];
457
458 if (TREE_CODE (TYPE_NAME (type)) == TYPE_DECL
459 && DECL_NAME (TYPE_NAME (type))
460 && !strcmp (IDENTIFIER_POINTER (DECL_NAME (TYPE_NAME (type))), name))
461 return aarch64_scalar_builtin_types[i];
462 i++;
463 }
464 return NULL;
b5828b4b
JG
465}
466
f9d53c27
TB
467static const char *
468aarch64_mangle_builtin_vector_type (const_tree type)
b5828b4b 469{
f9d53c27
TB
470 int i;
471 int nelts = sizeof (aarch64_simd_types) / sizeof (aarch64_simd_types[0]);
472
473 for (i = 0; i < nelts; i++)
474 if (aarch64_simd_types[i].mode == TYPE_MODE (type)
475 && TYPE_NAME (type)
476 && TREE_CODE (TYPE_NAME (type)) == TYPE_DECL
477 && DECL_NAME (TYPE_NAME (type))
478 && !strcmp
479 (IDENTIFIER_POINTER (DECL_NAME (TYPE_NAME (type))),
480 aarch64_simd_types[i].name))
481 return aarch64_simd_types[i].mangle;
482
483 return NULL;
6db1ec94
JG
484}
485
f9d53c27
TB
486const char *
487aarch64_mangle_builtin_type (const_tree type)
6db1ec94 488{
f9d53c27
TB
489 const char *mangle;
490 /* Walk through all the AArch64 builtins types tables to filter out the
491 incoming type. */
492 if ((mangle = aarch64_mangle_builtin_vector_type (type))
493 || (mangle = aarch64_mangle_builtin_scalar_type (type)))
494 return mangle;
495
496 return NULL;
6db1ec94
JG
497}
498
f9d53c27
TB
499static tree
500aarch64_simd_builtin_std_type (enum machine_mode mode,
501 enum aarch64_type_qualifiers q)
6db1ec94 502{
f9d53c27
TB
503#define QUAL_TYPE(M) \
504 ((q == qualifier_none) ? int##M##_type_node : unsigned_int##M##_type_node);
505 switch (mode)
506 {
507 case QImode:
508 return QUAL_TYPE (QI);
509 case HImode:
510 return QUAL_TYPE (HI);
511 case SImode:
512 return QUAL_TYPE (SI);
513 case DImode:
514 return QUAL_TYPE (DI);
515 case TImode:
516 return QUAL_TYPE (TI);
517 case OImode:
518 return aarch64_simd_intOI_type_node;
519 case EImode:
520 return aarch64_simd_intEI_type_node;
521 case CImode:
522 return aarch64_simd_intCI_type_node;
523 case XImode:
524 return aarch64_simd_intXI_type_node;
71a11456
AL
525 case HFmode:
526 return aarch64_fp16_type_node;
f9d53c27
TB
527 case SFmode:
528 return float_type_node;
529 case DFmode:
530 return double_type_node;
531 default:
532 gcc_unreachable ();
533 }
534#undef QUAL_TYPE
6db1ec94
JG
535}
536
f9d53c27
TB
537static tree
538aarch64_lookup_simd_builtin_type (enum machine_mode mode,
539 enum aarch64_type_qualifiers q)
6db1ec94 540{
f9d53c27
TB
541 int i;
542 int nelts = sizeof (aarch64_simd_types) / sizeof (aarch64_simd_types[0]);
543
544 /* Non-poly scalar modes map to standard types not in the table. */
545 if (q != qualifier_poly && !VECTOR_MODE_P (mode))
546 return aarch64_simd_builtin_std_type (mode, q);
547
548 for (i = 0; i < nelts; i++)
549 if (aarch64_simd_types[i].mode == mode
550 && aarch64_simd_types[i].q == q)
551 return aarch64_simd_types[i].itype;
552
553 return NULL_TREE;
b5828b4b
JG
554}
555
f9d53c27
TB
556static tree
557aarch64_simd_builtin_type (enum machine_mode mode,
558 bool unsigned_p, bool poly_p)
559{
560 if (poly_p)
561 return aarch64_lookup_simd_builtin_type (mode, qualifier_poly);
562 else if (unsigned_p)
563 return aarch64_lookup_simd_builtin_type (mode, qualifier_unsigned);
564 else
565 return aarch64_lookup_simd_builtin_type (mode, qualifier_none);
566}
567
af55e82d 568static void
f9d53c27 569aarch64_init_simd_builtin_types (void)
43e9d192 570{
f9d53c27
TB
571 int i;
572 int nelts = sizeof (aarch64_simd_types) / sizeof (aarch64_simd_types[0]);
573 tree tdecl;
574
575 /* Init all the element types built by the front-end. */
576 aarch64_simd_types[Int8x8_t].eltype = intQI_type_node;
577 aarch64_simd_types[Int8x16_t].eltype = intQI_type_node;
578 aarch64_simd_types[Int16x4_t].eltype = intHI_type_node;
579 aarch64_simd_types[Int16x8_t].eltype = intHI_type_node;
580 aarch64_simd_types[Int32x2_t].eltype = intSI_type_node;
581 aarch64_simd_types[Int32x4_t].eltype = intSI_type_node;
582 aarch64_simd_types[Int64x1_t].eltype = intDI_type_node;
583 aarch64_simd_types[Int64x2_t].eltype = intDI_type_node;
584 aarch64_simd_types[Uint8x8_t].eltype = unsigned_intQI_type_node;
585 aarch64_simd_types[Uint8x16_t].eltype = unsigned_intQI_type_node;
586 aarch64_simd_types[Uint16x4_t].eltype = unsigned_intHI_type_node;
587 aarch64_simd_types[Uint16x8_t].eltype = unsigned_intHI_type_node;
588 aarch64_simd_types[Uint32x2_t].eltype = unsigned_intSI_type_node;
589 aarch64_simd_types[Uint32x4_t].eltype = unsigned_intSI_type_node;
590 aarch64_simd_types[Uint64x1_t].eltype = unsigned_intDI_type_node;
591 aarch64_simd_types[Uint64x2_t].eltype = unsigned_intDI_type_node;
592
593 /* Poly types are a world of their own. */
594 aarch64_simd_types[Poly8_t].eltype = aarch64_simd_types[Poly8_t].itype =
595 build_distinct_type_copy (unsigned_intQI_type_node);
596 aarch64_simd_types[Poly16_t].eltype = aarch64_simd_types[Poly16_t].itype =
597 build_distinct_type_copy (unsigned_intHI_type_node);
598 aarch64_simd_types[Poly64_t].eltype = aarch64_simd_types[Poly64_t].itype =
599 build_distinct_type_copy (unsigned_intDI_type_node);
600 aarch64_simd_types[Poly128_t].eltype = aarch64_simd_types[Poly128_t].itype =
601 build_distinct_type_copy (unsigned_intTI_type_node);
602 /* Init poly vector element types with scalar poly types. */
603 aarch64_simd_types[Poly8x8_t].eltype = aarch64_simd_types[Poly8_t].itype;
604 aarch64_simd_types[Poly8x16_t].eltype = aarch64_simd_types[Poly8_t].itype;
605 aarch64_simd_types[Poly16x4_t].eltype = aarch64_simd_types[Poly16_t].itype;
606 aarch64_simd_types[Poly16x8_t].eltype = aarch64_simd_types[Poly16_t].itype;
607 aarch64_simd_types[Poly64x1_t].eltype = aarch64_simd_types[Poly64_t].itype;
608 aarch64_simd_types[Poly64x2_t].eltype = aarch64_simd_types[Poly64_t].itype;
609
610 /* Continue with standard types. */
71a11456
AL
611 aarch64_simd_types[Float16x4_t].eltype = aarch64_fp16_type_node;
612 aarch64_simd_types[Float16x8_t].eltype = aarch64_fp16_type_node;
f9d53c27
TB
613 aarch64_simd_types[Float32x2_t].eltype = float_type_node;
614 aarch64_simd_types[Float32x4_t].eltype = float_type_node;
615 aarch64_simd_types[Float64x1_t].eltype = double_type_node;
616 aarch64_simd_types[Float64x2_t].eltype = double_type_node;
617
618 for (i = 0; i < nelts; i++)
619 {
620 tree eltype = aarch64_simd_types[i].eltype;
621 enum machine_mode mode = aarch64_simd_types[i].mode;
622
623 if (aarch64_simd_types[i].itype == NULL)
624 aarch64_simd_types[i].itype =
625 build_distinct_type_copy
626 (build_vector_type (eltype, GET_MODE_NUNITS (mode)));
627
628 tdecl = add_builtin_type (aarch64_simd_types[i].name,
629 aarch64_simd_types[i].itype);
630 TYPE_NAME (aarch64_simd_types[i].itype) = tdecl;
631 SET_TYPE_STRUCTURAL_EQUALITY (aarch64_simd_types[i].itype);
632 }
43e9d192 633
f9d53c27
TB
634#define AARCH64_BUILD_SIGNED_TYPE(mode) \
635 make_signed_type (GET_MODE_PRECISION (mode));
636 aarch64_simd_intOI_type_node = AARCH64_BUILD_SIGNED_TYPE (OImode);
637 aarch64_simd_intEI_type_node = AARCH64_BUILD_SIGNED_TYPE (EImode);
638 aarch64_simd_intCI_type_node = AARCH64_BUILD_SIGNED_TYPE (CImode);
639 aarch64_simd_intXI_type_node = AARCH64_BUILD_SIGNED_TYPE (XImode);
640#undef AARCH64_BUILD_SIGNED_TYPE
641
642 tdecl = add_builtin_type
643 ("__builtin_aarch64_simd_ei" , aarch64_simd_intEI_type_node);
644 TYPE_NAME (aarch64_simd_intEI_type_node) = tdecl;
645 tdecl = add_builtin_type
646 ("__builtin_aarch64_simd_oi" , aarch64_simd_intOI_type_node);
647 TYPE_NAME (aarch64_simd_intOI_type_node) = tdecl;
648 tdecl = add_builtin_type
649 ("__builtin_aarch64_simd_ci" , aarch64_simd_intCI_type_node);
650 TYPE_NAME (aarch64_simd_intCI_type_node) = tdecl;
651 tdecl = add_builtin_type
652 ("__builtin_aarch64_simd_xi" , aarch64_simd_intXI_type_node);
653 TYPE_NAME (aarch64_simd_intXI_type_node) = tdecl;
654}
655
656static void
657aarch64_init_simd_builtin_scalar_types (void)
658{
659 /* Define typedefs for all the standard scalar types. */
660 (*lang_hooks.types.register_builtin_type) (intQI_type_node,
43e9d192 661 "__builtin_aarch64_simd_qi");
f9d53c27 662 (*lang_hooks.types.register_builtin_type) (intHI_type_node,
43e9d192 663 "__builtin_aarch64_simd_hi");
f9d53c27 664 (*lang_hooks.types.register_builtin_type) (intSI_type_node,
43e9d192 665 "__builtin_aarch64_simd_si");
f9d53c27 666 (*lang_hooks.types.register_builtin_type) (float_type_node,
43e9d192 667 "__builtin_aarch64_simd_sf");
f9d53c27 668 (*lang_hooks.types.register_builtin_type) (intDI_type_node,
43e9d192 669 "__builtin_aarch64_simd_di");
f9d53c27 670 (*lang_hooks.types.register_builtin_type) (double_type_node,
43e9d192 671 "__builtin_aarch64_simd_df");
f9d53c27 672 (*lang_hooks.types.register_builtin_type) (unsigned_intQI_type_node,
43e9d192 673 "__builtin_aarch64_simd_poly8");
f9d53c27 674 (*lang_hooks.types.register_builtin_type) (unsigned_intHI_type_node,
43e9d192 675 "__builtin_aarch64_simd_poly16");
f9d53c27 676 (*lang_hooks.types.register_builtin_type) (unsigned_intDI_type_node,
7baa225d 677 "__builtin_aarch64_simd_poly64");
f9d53c27 678 (*lang_hooks.types.register_builtin_type) (unsigned_intTI_type_node,
7baa225d 679 "__builtin_aarch64_simd_poly128");
f9d53c27 680 (*lang_hooks.types.register_builtin_type) (intTI_type_node,
43e9d192 681 "__builtin_aarch64_simd_ti");
b5828b4b 682 /* Unsigned integer types for various mode sizes. */
f9d53c27 683 (*lang_hooks.types.register_builtin_type) (unsigned_intQI_type_node,
b5828b4b 684 "__builtin_aarch64_simd_uqi");
f9d53c27 685 (*lang_hooks.types.register_builtin_type) (unsigned_intHI_type_node,
b5828b4b 686 "__builtin_aarch64_simd_uhi");
f9d53c27 687 (*lang_hooks.types.register_builtin_type) (unsigned_intSI_type_node,
b5828b4b 688 "__builtin_aarch64_simd_usi");
f9d53c27 689 (*lang_hooks.types.register_builtin_type) (unsigned_intDI_type_node,
b5828b4b 690 "__builtin_aarch64_simd_udi");
f9d53c27
TB
691}
692
e95a988a
KT
693static bool aarch64_simd_builtins_initialized_p = false;
694
695void
f9d53c27
TB
696aarch64_init_simd_builtins (void)
697{
661fce82 698 unsigned int i, fcode = AARCH64_SIMD_PATTERN_START;
f9d53c27 699
e95a988a
KT
700 if (aarch64_simd_builtins_initialized_p)
701 return;
702
703 aarch64_simd_builtins_initialized_p = true;
704
f9d53c27 705 aarch64_init_simd_builtin_types ();
43e9d192 706
f9d53c27
TB
707 /* Strong-typing hasn't been implemented for all AdvSIMD builtin intrinsics.
708 Therefore we need to preserve the old __builtin scalar types. It can be
709 removed once all the intrinsics become strongly typed using the qualifier
710 system. */
711 aarch64_init_simd_builtin_scalar_types ();
712
661fce82 713 tree lane_check_fpr = build_function_type_list (void_type_node,
9c4f25cc
AP
714 size_type_node,
715 size_type_node,
661fce82
AL
716 intSI_type_node,
717 NULL);
718 aarch64_builtin_decls[AARCH64_SIMD_BUILTIN_LANE_CHECK] =
719 add_builtin_function ("__builtin_aarch64_im_lane_boundsi", lane_check_fpr,
720 AARCH64_SIMD_BUILTIN_LANE_CHECK, BUILT_IN_MD,
721 NULL, NULL_TREE);
722
342be7f7 723 for (i = 0; i < ARRAY_SIZE (aarch64_simd_builtin_data); i++, fcode++)
43e9d192 724 {
b5828b4b
JG
725 bool print_type_signature_p = false;
726 char type_signature[SIMD_MAX_BUILTIN_ARGS] = { 0 };
43e9d192 727 aarch64_simd_builtin_datum *d = &aarch64_simd_builtin_data[i];
342be7f7
JG
728 char namebuf[60];
729 tree ftype = NULL;
119103ca 730 tree fndecl = NULL;
342be7f7 731
342be7f7 732 d->fcode = fcode;
43e9d192 733
b5828b4b
JG
734 /* We must track two variables here. op_num is
735 the operand number as in the RTL pattern. This is
736 required to access the mode (e.g. V4SF mode) of the
737 argument, from which the base type can be derived.
738 arg_num is an index in to the qualifiers data, which
739 gives qualifiers to the type (e.g. const unsigned).
740 The reason these two variables may differ by one is the
741 void return type. While all return types take the 0th entry
742 in the qualifiers array, there is no operand for them in the
743 RTL pattern. */
744 int op_num = insn_data[d->code].n_operands - 1;
745 int arg_num = d->qualifiers[0] & qualifier_void
746 ? op_num + 1
747 : op_num;
748 tree return_type = void_type_node, args = void_list_node;
749 tree eltype;
750
751 /* Build a function type directly from the insn_data for this
752 builtin. The build_function_type () function takes care of
753 removing duplicates for us. */
754 for (; op_num >= 0; arg_num--, op_num--)
43e9d192 755 {
ef4bddc2 756 machine_mode op_mode = insn_data[d->code].operand[op_num].mode;
b5828b4b 757 enum aarch64_type_qualifiers qualifiers = d->qualifiers[arg_num];
43e9d192 758
b5828b4b
JG
759 if (qualifiers & qualifier_unsigned)
760 {
761 type_signature[arg_num] = 'u';
762 print_type_signature_p = true;
763 }
6db1ec94
JG
764 else if (qualifiers & qualifier_poly)
765 {
766 type_signature[arg_num] = 'p';
767 print_type_signature_p = true;
768 }
b5828b4b
JG
769 else
770 type_signature[arg_num] = 's';
771
772 /* Skip an internal operand for vget_{low, high}. */
773 if (qualifiers & qualifier_internal)
774 continue;
775
776 /* Some builtins have different user-facing types
777 for certain arguments, encoded in d->mode. */
778 if (qualifiers & qualifier_map_mode)
bc5e395d 779 op_mode = d->mode;
b5828b4b
JG
780
781 /* For pointers, we want a pointer to the basic type
782 of the vector. */
783 if (qualifiers & qualifier_pointer && VECTOR_MODE_P (op_mode))
784 op_mode = GET_MODE_INNER (op_mode);
785
f9d53c27
TB
786 eltype = aarch64_simd_builtin_type
787 (op_mode,
788 (qualifiers & qualifier_unsigned) != 0,
789 (qualifiers & qualifier_poly) != 0);
790 gcc_assert (eltype != NULL);
b5828b4b
JG
791
792 /* Add qualifiers. */
793 if (qualifiers & qualifier_const)
794 eltype = build_qualified_type (eltype, TYPE_QUAL_CONST);
795
796 if (qualifiers & qualifier_pointer)
797 eltype = build_pointer_type (eltype);
798
799 /* If we have reached arg_num == 0, we are at a non-void
800 return type. Otherwise, we are still processing
801 arguments. */
802 if (arg_num == 0)
803 return_type = eltype;
804 else
805 args = tree_cons (NULL_TREE, eltype, args);
806 }
342be7f7 807
b5828b4b 808 ftype = build_function_type (return_type, args);
43e9d192 809
342be7f7 810 gcc_assert (ftype != NULL);
43e9d192 811
b5828b4b 812 if (print_type_signature_p)
bc5e395d
JG
813 snprintf (namebuf, sizeof (namebuf), "__builtin_aarch64_%s_%s",
814 d->name, type_signature);
b5828b4b 815 else
bc5e395d
JG
816 snprintf (namebuf, sizeof (namebuf), "__builtin_aarch64_%s",
817 d->name);
43e9d192 818
119103ca
JG
819 fndecl = add_builtin_function (namebuf, ftype, fcode, BUILT_IN_MD,
820 NULL, NULL_TREE);
821 aarch64_builtin_decls[fcode] = fndecl;
43e9d192
IB
822 }
823}
824
5d357f26
KT
825static void
826aarch64_init_crc32_builtins ()
827{
f9d53c27 828 tree usi_type = aarch64_simd_builtin_std_type (SImode, qualifier_unsigned);
5d357f26
KT
829 unsigned int i = 0;
830
831 for (i = 0; i < ARRAY_SIZE (aarch64_crc_builtin_data); ++i)
832 {
833 aarch64_crc_builtin_datum* d = &aarch64_crc_builtin_data[i];
f9d53c27
TB
834 tree argtype = aarch64_simd_builtin_std_type (d->mode,
835 qualifier_unsigned);
5d357f26
KT
836 tree ftype = build_function_type_list (usi_type, usi_type, argtype, NULL_TREE);
837 tree fndecl = add_builtin_function (d->name, ftype, d->fcode,
838 BUILT_IN_MD, NULL, NULL_TREE);
839
840 aarch64_builtin_decls[d->fcode] = fndecl;
841 }
842}
843
342be7f7
JG
844void
845aarch64_init_builtins (void)
43e9d192 846{
aa87aced
KV
847 tree ftype_set_fpr
848 = build_function_type_list (void_type_node, unsigned_type_node, NULL);
849 tree ftype_get_fpr
850 = build_function_type_list (unsigned_type_node, NULL);
851
852 aarch64_builtin_decls[AARCH64_BUILTIN_GET_FPCR]
853 = add_builtin_function ("__builtin_aarch64_get_fpcr", ftype_get_fpr,
854 AARCH64_BUILTIN_GET_FPCR, BUILT_IN_MD, NULL, NULL_TREE);
855 aarch64_builtin_decls[AARCH64_BUILTIN_SET_FPCR]
856 = add_builtin_function ("__builtin_aarch64_set_fpcr", ftype_set_fpr,
857 AARCH64_BUILTIN_SET_FPCR, BUILT_IN_MD, NULL, NULL_TREE);
858 aarch64_builtin_decls[AARCH64_BUILTIN_GET_FPSR]
859 = add_builtin_function ("__builtin_aarch64_get_fpsr", ftype_get_fpr,
860 AARCH64_BUILTIN_GET_FPSR, BUILT_IN_MD, NULL, NULL_TREE);
861 aarch64_builtin_decls[AARCH64_BUILTIN_SET_FPSR]
862 = add_builtin_function ("__builtin_aarch64_set_fpsr", ftype_set_fpr,
863 AARCH64_BUILTIN_SET_FPSR, BUILT_IN_MD, NULL, NULL_TREE);
864
c2ec330c
AL
865 aarch64_fp16_type_node = make_node (REAL_TYPE);
866 TYPE_PRECISION (aarch64_fp16_type_node) = 16;
867 layout_type (aarch64_fp16_type_node);
868
869 (*lang_hooks.types.register_builtin_type) (aarch64_fp16_type_node, "__fp16");
870
342be7f7
JG
871 if (TARGET_SIMD)
872 aarch64_init_simd_builtins ();
e95a988a
KT
873
874 aarch64_init_crc32_builtins ();
43e9d192
IB
875}
876
119103ca
JG
877tree
878aarch64_builtin_decl (unsigned code, bool initialize_p ATTRIBUTE_UNUSED)
879{
880 if (code >= AARCH64_BUILTIN_MAX)
881 return error_mark_node;
882
883 return aarch64_builtin_decls[code];
884}
885
43e9d192
IB
886typedef enum
887{
888 SIMD_ARG_COPY_TO_REG,
889 SIMD_ARG_CONSTANT,
2a49c16d 890 SIMD_ARG_LANE_INDEX,
4d0a0237 891 SIMD_ARG_STRUCT_LOAD_STORE_LANE_INDEX,
43e9d192
IB
892 SIMD_ARG_STOP
893} builtin_simd_arg;
894
e95a988a 895
43e9d192
IB
896static rtx
897aarch64_simd_expand_args (rtx target, int icode, int have_retval,
4d0a0237
CB
898 tree exp, builtin_simd_arg *args,
899 enum machine_mode builtin_mode)
43e9d192 900{
43e9d192 901 rtx pat;
d9e80f49
AL
902 rtx op[SIMD_MAX_BUILTIN_ARGS + 1]; /* First element for result operand. */
903 int opc = 0;
904
905 if (have_retval)
906 {
907 machine_mode tmode = insn_data[icode].operand[0].mode;
908 if (!target
43e9d192 909 || GET_MODE (target) != tmode
d9e80f49
AL
910 || !(*insn_data[icode].operand[0].predicate) (target, tmode))
911 target = gen_reg_rtx (tmode);
912 op[opc++] = target;
913 }
43e9d192 914
43e9d192
IB
915 for (;;)
916 {
d9e80f49 917 builtin_simd_arg thisarg = args[opc - have_retval];
43e9d192
IB
918
919 if (thisarg == SIMD_ARG_STOP)
920 break;
921 else
922 {
d9e80f49
AL
923 tree arg = CALL_EXPR_ARG (exp, opc - have_retval);
924 enum machine_mode mode = insn_data[icode].operand[opc].mode;
925 op[opc] = expand_normal (arg);
43e9d192
IB
926
927 switch (thisarg)
928 {
929 case SIMD_ARG_COPY_TO_REG:
d9e80f49
AL
930 if (POINTER_TYPE_P (TREE_TYPE (arg)))
931 op[opc] = convert_memory_address (Pmode, op[opc]);
932 /*gcc_assert (GET_MODE (op[opc]) == mode); */
933 if (!(*insn_data[icode].operand[opc].predicate)
934 (op[opc], mode))
935 op[opc] = copy_to_mode_reg (mode, op[opc]);
43e9d192
IB
936 break;
937
4d0a0237
CB
938 case SIMD_ARG_STRUCT_LOAD_STORE_LANE_INDEX:
939 gcc_assert (opc > 1);
940 if (CONST_INT_P (op[opc]))
941 {
942 aarch64_simd_lane_bounds (op[opc], 0,
943 GET_MODE_NUNITS (builtin_mode),
944 exp);
945 /* Keep to GCC-vector-extension lane indices in the RTL. */
946 op[opc] =
947 GEN_INT (ENDIAN_LANE_N (builtin_mode, INTVAL (op[opc])));
948 }
949 goto constant_arg;
950
2a49c16d
AL
951 case SIMD_ARG_LANE_INDEX:
952 /* Must be a previous operand into which this is an index. */
d9e80f49
AL
953 gcc_assert (opc > 0);
954 if (CONST_INT_P (op[opc]))
2a49c16d 955 {
d9e80f49
AL
956 machine_mode vmode = insn_data[icode].operand[opc - 1].mode;
957 aarch64_simd_lane_bounds (op[opc],
46ed6024 958 0, GET_MODE_NUNITS (vmode), exp);
2a49c16d 959 /* Keep to GCC-vector-extension lane indices in the RTL. */
d9e80f49 960 op[opc] = GEN_INT (ENDIAN_LANE_N (vmode, INTVAL (op[opc])));
2a49c16d
AL
961 }
962 /* Fall through - if the lane index isn't a constant then
963 the next case will error. */
43e9d192 964 case SIMD_ARG_CONSTANT:
4d0a0237 965constant_arg:
d9e80f49
AL
966 if (!(*insn_data[icode].operand[opc].predicate)
967 (op[opc], mode))
d5a29419 968 {
fca051af
AL
969 error ("%Kargument %d must be a constant immediate",
970 exp, opc + 1 - have_retval);
d5a29419
KT
971 return const0_rtx;
972 }
43e9d192
IB
973 break;
974
975 case SIMD_ARG_STOP:
976 gcc_unreachable ();
977 }
978
d9e80f49 979 opc++;
43e9d192
IB
980 }
981 }
982
d9e80f49
AL
983 switch (opc)
984 {
985 case 1:
986 pat = GEN_FCN (icode) (op[0]);
987 break;
43e9d192 988
d9e80f49
AL
989 case 2:
990 pat = GEN_FCN (icode) (op[0], op[1]);
991 break;
43e9d192 992
d9e80f49
AL
993 case 3:
994 pat = GEN_FCN (icode) (op[0], op[1], op[2]);
995 break;
43e9d192 996
d9e80f49
AL
997 case 4:
998 pat = GEN_FCN (icode) (op[0], op[1], op[2], op[3]);
999 break;
43e9d192 1000
d9e80f49
AL
1001 case 5:
1002 pat = GEN_FCN (icode) (op[0], op[1], op[2], op[3], op[4]);
1003 break;
43e9d192 1004
d9e80f49
AL
1005 case 6:
1006 pat = GEN_FCN (icode) (op[0], op[1], op[2], op[3], op[4], op[5]);
1007 break;
43e9d192 1008
d9e80f49
AL
1009 default:
1010 gcc_unreachable ();
1011 }
43e9d192
IB
1012
1013 if (!pat)
d5a29419 1014 return NULL_RTX;
43e9d192
IB
1015
1016 emit_insn (pat);
1017
1018 return target;
1019}
1020
1021/* Expand an AArch64 AdvSIMD builtin(intrinsic). */
1022rtx
1023aarch64_simd_expand_builtin (int fcode, tree exp, rtx target)
1024{
661fce82
AL
1025 if (fcode == AARCH64_SIMD_BUILTIN_LANE_CHECK)
1026 {
9c4f25cc
AP
1027 rtx totalsize = expand_normal (CALL_EXPR_ARG (exp, 0));
1028 rtx elementsize = expand_normal (CALL_EXPR_ARG (exp, 1));
1029 if (CONST_INT_P (totalsize) && CONST_INT_P (elementsize)
1030 && UINTVAL (elementsize) != 0
1031 && UINTVAL (totalsize) != 0)
1032 {
1033 rtx lane_idx = expand_normal (CALL_EXPR_ARG (exp, 2));
1034 if (CONST_INT_P (lane_idx))
1035 aarch64_simd_lane_bounds (lane_idx, 0,
1036 UINTVAL (totalsize)
1037 / UINTVAL (elementsize),
1038 exp);
1039 else
1040 error ("%Klane index must be a constant immediate", exp);
1041 }
661fce82 1042 else
9c4f25cc 1043 error ("%Ktotal size and element size must be a non-zero constant immediate", exp);
661fce82
AL
1044 /* Don't generate any RTL. */
1045 return const0_rtx;
1046 }
342be7f7 1047 aarch64_simd_builtin_datum *d =
661fce82 1048 &aarch64_simd_builtin_data[fcode - AARCH64_SIMD_PATTERN_START];
342be7f7 1049 enum insn_code icode = d->code;
0ff2bf46 1050 builtin_simd_arg args[SIMD_MAX_BUILTIN_ARGS + 1];
b5828b4b
JG
1051 int num_args = insn_data[d->code].n_operands;
1052 int is_void = 0;
1053 int k;
43e9d192 1054
b5828b4b 1055 is_void = !!(d->qualifiers[0] & qualifier_void);
43e9d192 1056
b5828b4b
JG
1057 num_args += is_void;
1058
1059 for (k = 1; k < num_args; k++)
1060 {
1061 /* We have four arrays of data, each indexed in a different fashion.
1062 qualifiers - element 0 always describes the function return type.
1063 operands - element 0 is either the operand for return value (if
1064 the function has a non-void return type) or the operand for the
1065 first argument.
1066 expr_args - element 0 always holds the first argument.
1067 args - element 0 is always used for the return type. */
1068 int qualifiers_k = k;
1069 int operands_k = k - is_void;
1070 int expr_args_k = k - 1;
1071
2a49c16d
AL
1072 if (d->qualifiers[qualifiers_k] & qualifier_lane_index)
1073 args[k] = SIMD_ARG_LANE_INDEX;
4d0a0237
CB
1074 else if (d->qualifiers[qualifiers_k] & qualifier_struct_load_store_lane_index)
1075 args[k] = SIMD_ARG_STRUCT_LOAD_STORE_LANE_INDEX;
2a49c16d 1076 else if (d->qualifiers[qualifiers_k] & qualifier_immediate)
b5828b4b
JG
1077 args[k] = SIMD_ARG_CONSTANT;
1078 else if (d->qualifiers[qualifiers_k] & qualifier_maybe_immediate)
1079 {
1080 rtx arg
1081 = expand_normal (CALL_EXPR_ARG (exp,
1082 (expr_args_k)));
1083 /* Handle constants only if the predicate allows it. */
1084 bool op_const_int_p =
1085 (CONST_INT_P (arg)
1086 && (*insn_data[icode].operand[operands_k].predicate)
1087 (arg, insn_data[icode].operand[operands_k].mode));
1088 args[k] = op_const_int_p ? SIMD_ARG_CONSTANT : SIMD_ARG_COPY_TO_REG;
1089 }
1090 else
1091 args[k] = SIMD_ARG_COPY_TO_REG;
43e9d192 1092
43e9d192 1093 }
b5828b4b
JG
1094 args[k] = SIMD_ARG_STOP;
1095
1096 /* The interface to aarch64_simd_expand_args expects a 0 if
1097 the function is void, and a 1 if it is not. */
1098 return aarch64_simd_expand_args
4d0a0237 1099 (target, icode, !is_void, exp, &args[1], d->mode);
43e9d192 1100}
342be7f7 1101
5d357f26
KT
1102rtx
1103aarch64_crc32_expand_builtin (int fcode, tree exp, rtx target)
1104{
1105 rtx pat;
1106 aarch64_crc_builtin_datum *d
1107 = &aarch64_crc_builtin_data[fcode - (AARCH64_CRC32_BUILTIN_BASE + 1)];
1108 enum insn_code icode = d->icode;
1109 tree arg0 = CALL_EXPR_ARG (exp, 0);
1110 tree arg1 = CALL_EXPR_ARG (exp, 1);
1111 rtx op0 = expand_normal (arg0);
1112 rtx op1 = expand_normal (arg1);
ef4bddc2
RS
1113 machine_mode tmode = insn_data[icode].operand[0].mode;
1114 machine_mode mode0 = insn_data[icode].operand[1].mode;
1115 machine_mode mode1 = insn_data[icode].operand[2].mode;
5d357f26
KT
1116
1117 if (! target
1118 || GET_MODE (target) != tmode
1119 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
1120 target = gen_reg_rtx (tmode);
1121
1122 gcc_assert ((GET_MODE (op0) == mode0 || GET_MODE (op0) == VOIDmode)
1123 && (GET_MODE (op1) == mode1 || GET_MODE (op1) == VOIDmode));
1124
1125 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
1126 op0 = copy_to_mode_reg (mode0, op0);
1127 if (! (*insn_data[icode].operand[2].predicate) (op1, mode1))
1128 op1 = copy_to_mode_reg (mode1, op1);
1129
1130 pat = GEN_FCN (icode) (target, op0, op1);
d5a29419
KT
1131 if (!pat)
1132 return NULL_RTX;
1133
5d357f26
KT
1134 emit_insn (pat);
1135 return target;
1136}
1137
342be7f7
JG
1138/* Expand an expression EXP that calls a built-in function,
1139 with result going to TARGET if that's convenient. */
1140rtx
1141aarch64_expand_builtin (tree exp,
1142 rtx target,
1143 rtx subtarget ATTRIBUTE_UNUSED,
ef4bddc2 1144 machine_mode mode ATTRIBUTE_UNUSED,
342be7f7
JG
1145 int ignore ATTRIBUTE_UNUSED)
1146{
1147 tree fndecl = TREE_OPERAND (CALL_EXPR_FN (exp), 0);
1148 int fcode = DECL_FUNCTION_CODE (fndecl);
aa87aced
KV
1149 int icode;
1150 rtx pat, op0;
1151 tree arg0;
1152
1153 switch (fcode)
1154 {
1155 case AARCH64_BUILTIN_GET_FPCR:
1156 case AARCH64_BUILTIN_SET_FPCR:
1157 case AARCH64_BUILTIN_GET_FPSR:
1158 case AARCH64_BUILTIN_SET_FPSR:
1159 if ((fcode == AARCH64_BUILTIN_GET_FPCR)
1160 || (fcode == AARCH64_BUILTIN_GET_FPSR))
1161 {
1162 icode = (fcode == AARCH64_BUILTIN_GET_FPSR) ?
1163 CODE_FOR_get_fpsr : CODE_FOR_get_fpcr;
1164 target = gen_reg_rtx (SImode);
1165 pat = GEN_FCN (icode) (target);
1166 }
1167 else
1168 {
1169 target = NULL_RTX;
1170 icode = (fcode == AARCH64_BUILTIN_SET_FPSR) ?
1171 CODE_FOR_set_fpsr : CODE_FOR_set_fpcr;
1172 arg0 = CALL_EXPR_ARG (exp, 0);
1173 op0 = expand_normal (arg0);
1174 pat = GEN_FCN (icode) (op0);
1175 }
1176 emit_insn (pat);
1177 return target;
1178 }
342be7f7 1179
5d357f26 1180 if (fcode >= AARCH64_SIMD_BUILTIN_BASE && fcode <= AARCH64_SIMD_BUILTIN_MAX)
342be7f7 1181 return aarch64_simd_expand_builtin (fcode, exp, target);
5d357f26
KT
1182 else if (fcode >= AARCH64_CRC32_BUILTIN_BASE && fcode <= AARCH64_CRC32_BUILTIN_MAX)
1183 return aarch64_crc32_expand_builtin (fcode, exp, target);
342be7f7 1184
d5a29419 1185 gcc_unreachable ();
342be7f7 1186}
42fc9a7f
JG
1187
1188tree
1189aarch64_builtin_vectorized_function (tree fndecl, tree type_out, tree type_in)
1190{
ef4bddc2 1191 machine_mode in_mode, out_mode;
42fc9a7f
JG
1192 int in_n, out_n;
1193
1194 if (TREE_CODE (type_out) != VECTOR_TYPE
1195 || TREE_CODE (type_in) != VECTOR_TYPE)
1196 return NULL_TREE;
1197
1198 out_mode = TYPE_MODE (TREE_TYPE (type_out));
1199 out_n = TYPE_VECTOR_SUBPARTS (type_out);
1200 in_mode = TYPE_MODE (TREE_TYPE (type_in));
1201 in_n = TYPE_VECTOR_SUBPARTS (type_in);
1202
1203#undef AARCH64_CHECK_BUILTIN_MODE
1204#define AARCH64_CHECK_BUILTIN_MODE(C, N) 1
1205#define AARCH64_FIND_FRINT_VARIANT(N) \
1206 (AARCH64_CHECK_BUILTIN_MODE (2, D) \
e993fea1 1207 ? aarch64_builtin_decls[AARCH64_SIMD_BUILTIN_UNOP_##N##v2df] \
42fc9a7f 1208 : (AARCH64_CHECK_BUILTIN_MODE (4, S) \
e993fea1 1209 ? aarch64_builtin_decls[AARCH64_SIMD_BUILTIN_UNOP_##N##v4sf] \
42fc9a7f 1210 : (AARCH64_CHECK_BUILTIN_MODE (2, S) \
e993fea1 1211 ? aarch64_builtin_decls[AARCH64_SIMD_BUILTIN_UNOP_##N##v2sf] \
42fc9a7f
JG
1212 : NULL_TREE)))
1213 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
1214 {
1215 enum built_in_function fn = DECL_FUNCTION_CODE (fndecl);
1216 switch (fn)
1217 {
1218#undef AARCH64_CHECK_BUILTIN_MODE
1219#define AARCH64_CHECK_BUILTIN_MODE(C, N) \
1220 (out_mode == N##Fmode && out_n == C \
1221 && in_mode == N##Fmode && in_n == C)
1222 case BUILT_IN_FLOOR:
1223 case BUILT_IN_FLOORF:
0659ce6f 1224 return AARCH64_FIND_FRINT_VARIANT (floor);
42fc9a7f
JG
1225 case BUILT_IN_CEIL:
1226 case BUILT_IN_CEILF:
0659ce6f 1227 return AARCH64_FIND_FRINT_VARIANT (ceil);
42fc9a7f
JG
1228 case BUILT_IN_TRUNC:
1229 case BUILT_IN_TRUNCF:
0659ce6f 1230 return AARCH64_FIND_FRINT_VARIANT (btrunc);
42fc9a7f
JG
1231 case BUILT_IN_ROUND:
1232 case BUILT_IN_ROUNDF:
0659ce6f 1233 return AARCH64_FIND_FRINT_VARIANT (round);
42fc9a7f
JG
1234 case BUILT_IN_NEARBYINT:
1235 case BUILT_IN_NEARBYINTF:
0659ce6f 1236 return AARCH64_FIND_FRINT_VARIANT (nearbyint);
4dcd1054
JG
1237 case BUILT_IN_SQRT:
1238 case BUILT_IN_SQRTF:
1239 return AARCH64_FIND_FRINT_VARIANT (sqrt);
42fc9a7f 1240#undef AARCH64_CHECK_BUILTIN_MODE
b5574232
VP
1241#define AARCH64_CHECK_BUILTIN_MODE(C, N) \
1242 (out_mode == SImode && out_n == C \
1243 && in_mode == N##Imode && in_n == C)
1244 case BUILT_IN_CLZ:
1245 {
1246 if (AARCH64_CHECK_BUILTIN_MODE (4, S))
e993fea1 1247 return aarch64_builtin_decls[AARCH64_SIMD_BUILTIN_UNOP_clzv4si];
b5574232
VP
1248 return NULL_TREE;
1249 }
5e32e83b
JW
1250 case BUILT_IN_CTZ:
1251 {
1252 if (AARCH64_CHECK_BUILTIN_MODE (2, S))
1253 return aarch64_builtin_decls[AARCH64_SIMD_BUILTIN_UNOP_ctzv2si];
1254 else if (AARCH64_CHECK_BUILTIN_MODE (4, S))
1255 return aarch64_builtin_decls[AARCH64_SIMD_BUILTIN_UNOP_ctzv4si];
1256 return NULL_TREE;
1257 }
b5574232 1258#undef AARCH64_CHECK_BUILTIN_MODE
42fc9a7f
JG
1259#define AARCH64_CHECK_BUILTIN_MODE(C, N) \
1260 (out_mode == N##Imode && out_n == C \
1261 && in_mode == N##Fmode && in_n == C)
1262 case BUILT_IN_LFLOOR:
bf0f324e
YZ
1263 case BUILT_IN_LFLOORF:
1264 case BUILT_IN_LLFLOOR:
0386b123 1265 case BUILT_IN_IFLOORF:
ce966824 1266 {
e993fea1 1267 enum aarch64_builtins builtin;
ce966824 1268 if (AARCH64_CHECK_BUILTIN_MODE (2, D))
e993fea1 1269 builtin = AARCH64_SIMD_BUILTIN_UNOP_lfloorv2dfv2di;
ce966824 1270 else if (AARCH64_CHECK_BUILTIN_MODE (4, S))
e993fea1 1271 builtin = AARCH64_SIMD_BUILTIN_UNOP_lfloorv4sfv4si;
ce966824 1272 else if (AARCH64_CHECK_BUILTIN_MODE (2, S))
e993fea1
JG
1273 builtin = AARCH64_SIMD_BUILTIN_UNOP_lfloorv2sfv2si;
1274 else
1275 return NULL_TREE;
1276
1277 return aarch64_builtin_decls[builtin];
ce966824 1278 }
42fc9a7f 1279 case BUILT_IN_LCEIL:
bf0f324e
YZ
1280 case BUILT_IN_LCEILF:
1281 case BUILT_IN_LLCEIL:
0386b123 1282 case BUILT_IN_ICEILF:
ce966824 1283 {
e993fea1 1284 enum aarch64_builtins builtin;
ce966824 1285 if (AARCH64_CHECK_BUILTIN_MODE (2, D))
e993fea1 1286 builtin = AARCH64_SIMD_BUILTIN_UNOP_lceilv2dfv2di;
ce966824 1287 else if (AARCH64_CHECK_BUILTIN_MODE (4, S))
e993fea1 1288 builtin = AARCH64_SIMD_BUILTIN_UNOP_lceilv4sfv4si;
ce966824 1289 else if (AARCH64_CHECK_BUILTIN_MODE (2, S))
e993fea1
JG
1290 builtin = AARCH64_SIMD_BUILTIN_UNOP_lceilv2sfv2si;
1291 else
1292 return NULL_TREE;
1293
1294 return aarch64_builtin_decls[builtin];
ce966824 1295 }
0386b123
JG
1296 case BUILT_IN_LROUND:
1297 case BUILT_IN_IROUNDF:
1298 {
e993fea1 1299 enum aarch64_builtins builtin;
0386b123 1300 if (AARCH64_CHECK_BUILTIN_MODE (2, D))
e993fea1 1301 builtin = AARCH64_SIMD_BUILTIN_UNOP_lroundv2dfv2di;
0386b123 1302 else if (AARCH64_CHECK_BUILTIN_MODE (4, S))
e993fea1 1303 builtin = AARCH64_SIMD_BUILTIN_UNOP_lroundv4sfv4si;
0386b123 1304 else if (AARCH64_CHECK_BUILTIN_MODE (2, S))
e993fea1
JG
1305 builtin = AARCH64_SIMD_BUILTIN_UNOP_lroundv2sfv2si;
1306 else
1307 return NULL_TREE;
1308
1309 return aarch64_builtin_decls[builtin];
0386b123 1310 }
c7f28cd5
KT
1311 case BUILT_IN_BSWAP16:
1312#undef AARCH64_CHECK_BUILTIN_MODE
1313#define AARCH64_CHECK_BUILTIN_MODE(C, N) \
1314 (out_mode == N##Imode && out_n == C \
1315 && in_mode == N##Imode && in_n == C)
1316 if (AARCH64_CHECK_BUILTIN_MODE (4, H))
1317 return aarch64_builtin_decls[AARCH64_SIMD_BUILTIN_UNOPU_bswapv4hi];
1318 else if (AARCH64_CHECK_BUILTIN_MODE (8, H))
1319 return aarch64_builtin_decls[AARCH64_SIMD_BUILTIN_UNOPU_bswapv8hi];
1320 else
1321 return NULL_TREE;
1322 case BUILT_IN_BSWAP32:
1323 if (AARCH64_CHECK_BUILTIN_MODE (2, S))
1324 return aarch64_builtin_decls[AARCH64_SIMD_BUILTIN_UNOPU_bswapv2si];
1325 else if (AARCH64_CHECK_BUILTIN_MODE (4, S))
1326 return aarch64_builtin_decls[AARCH64_SIMD_BUILTIN_UNOPU_bswapv4si];
1327 else
1328 return NULL_TREE;
1329 case BUILT_IN_BSWAP64:
1330 if (AARCH64_CHECK_BUILTIN_MODE (2, D))
1331 return aarch64_builtin_decls[AARCH64_SIMD_BUILTIN_UNOPU_bswapv2di];
1332 else
1333 return NULL_TREE;
42fc9a7f
JG
1334 default:
1335 return NULL_TREE;
1336 }
1337 }
1338
1339 return NULL_TREE;
1340}
0ac198d3
JG
1341
1342#undef VAR1
1343#define VAR1(T, N, MAP, A) \
e993fea1 1344 case AARCH64_SIMD_BUILTIN_##T##_##N##A:
0ac198d3 1345
9697e620
JG
1346tree
1347aarch64_fold_builtin (tree fndecl, int n_args ATTRIBUTE_UNUSED, tree *args,
1348 bool ignore ATTRIBUTE_UNUSED)
1349{
1350 int fcode = DECL_FUNCTION_CODE (fndecl);
1351 tree type = TREE_TYPE (TREE_TYPE (fndecl));
1352
1353 switch (fcode)
1354 {
8f905d69 1355 BUILTIN_VDQF (UNOP, abs, 2)
9697e620
JG
1356 return fold_build1 (ABS_EXPR, type, args[0]);
1357 break;
1709ff9b
JG
1358 VAR1 (UNOP, floatv2si, 2, v2sf)
1359 VAR1 (UNOP, floatv4si, 2, v4sf)
1360 VAR1 (UNOP, floatv2di, 2, v2df)
1361 return fold_build1 (FLOAT_EXPR, type, args[0]);
9697e620
JG
1362 default:
1363 break;
1364 }
1365
1366 return NULL_TREE;
1367}
1368
0ac198d3
JG
1369bool
1370aarch64_gimple_fold_builtin (gimple_stmt_iterator *gsi)
1371{
1372 bool changed = false;
1373 gimple stmt = gsi_stmt (*gsi);
1374 tree call = gimple_call_fn (stmt);
1375 tree fndecl;
1376 gimple new_stmt = NULL;
22756ccf 1377
0ac198d3
JG
1378 if (call)
1379 {
1380 fndecl = gimple_call_fndecl (stmt);
1381 if (fndecl)
1382 {
1383 int fcode = DECL_FUNCTION_CODE (fndecl);
1384 int nargs = gimple_call_num_args (stmt);
1385 tree *args = (nargs > 0
1386 ? gimple_call_arg_ptr (stmt, 0)
1387 : &error_mark_node);
1388
fc72cba7
AL
1389 /* We use gimple's REDUC_(PLUS|MIN|MAX)_EXPRs for float, signed int
1390 and unsigned int; it will distinguish according to the types of
1391 the arguments to the __builtin. */
0ac198d3
JG
1392 switch (fcode)
1393 {
fc72cba7 1394 BUILTIN_VALL (UNOP, reduc_plus_scal_, 10)
0d3d8152
JJ
1395 new_stmt = gimple_build_assign (gimple_call_lhs (stmt),
1396 REDUC_PLUS_EXPR, args[0]);
0ac198d3 1397 break;
fc72cba7
AL
1398 BUILTIN_VDQIF (UNOP, reduc_smax_scal_, 10)
1399 BUILTIN_VDQ_BHSI (UNOPU, reduc_umax_scal_, 10)
0d3d8152
JJ
1400 new_stmt = gimple_build_assign (gimple_call_lhs (stmt),
1401 REDUC_MAX_EXPR, args[0]);
1598945b 1402 break;
fc72cba7
AL
1403 BUILTIN_VDQIF (UNOP, reduc_smin_scal_, 10)
1404 BUILTIN_VDQ_BHSI (UNOPU, reduc_umin_scal_, 10)
0d3d8152
JJ
1405 new_stmt = gimple_build_assign (gimple_call_lhs (stmt),
1406 REDUC_MIN_EXPR, args[0]);
1598945b
JG
1407 break;
1408
0ac198d3
JG
1409 default:
1410 break;
1411 }
1412 }
1413 }
1414
1415 if (new_stmt)
1416 {
1417 gsi_replace (gsi, new_stmt, true);
1418 changed = true;
1419 }
1420
1421 return changed;
1422}
1423
aa87aced
KV
1424void
1425aarch64_atomic_assign_expand_fenv (tree *hold, tree *clear, tree *update)
1426{
1427 const unsigned AARCH64_FE_INVALID = 1;
1428 const unsigned AARCH64_FE_DIVBYZERO = 2;
1429 const unsigned AARCH64_FE_OVERFLOW = 4;
1430 const unsigned AARCH64_FE_UNDERFLOW = 8;
1431 const unsigned AARCH64_FE_INEXACT = 16;
1432 const unsigned HOST_WIDE_INT AARCH64_FE_ALL_EXCEPT = (AARCH64_FE_INVALID
1433 | AARCH64_FE_DIVBYZERO
1434 | AARCH64_FE_OVERFLOW
1435 | AARCH64_FE_UNDERFLOW
1436 | AARCH64_FE_INEXACT);
1437 const unsigned HOST_WIDE_INT AARCH64_FE_EXCEPT_SHIFT = 8;
1438 tree fenv_cr, fenv_sr, get_fpcr, set_fpcr, mask_cr, mask_sr;
1439 tree ld_fenv_cr, ld_fenv_sr, masked_fenv_cr, masked_fenv_sr, hold_fnclex_cr;
1440 tree hold_fnclex_sr, new_fenv_var, reload_fenv, restore_fnenv, get_fpsr, set_fpsr;
1441 tree update_call, atomic_feraiseexcept, hold_fnclex, masked_fenv, ld_fenv;
1442
1443 /* Generate the equivalence of :
1444 unsigned int fenv_cr;
1445 fenv_cr = __builtin_aarch64_get_fpcr ();
1446
1447 unsigned int fenv_sr;
1448 fenv_sr = __builtin_aarch64_get_fpsr ();
1449
1450 Now set all exceptions to non-stop
1451 unsigned int mask_cr
1452 = ~(AARCH64_FE_ALL_EXCEPT << AARCH64_FE_EXCEPT_SHIFT);
1453 unsigned int masked_cr;
1454 masked_cr = fenv_cr & mask_cr;
1455
1456 And clear all exception flags
1457 unsigned int maske_sr = ~AARCH64_FE_ALL_EXCEPT;
1458 unsigned int masked_cr;
1459 masked_sr = fenv_sr & mask_sr;
1460
1461 __builtin_aarch64_set_cr (masked_cr);
1462 __builtin_aarch64_set_sr (masked_sr); */
1463
9b489f31
JJ
1464 fenv_cr = create_tmp_var (unsigned_type_node);
1465 fenv_sr = create_tmp_var (unsigned_type_node);
aa87aced
KV
1466
1467 get_fpcr = aarch64_builtin_decls[AARCH64_BUILTIN_GET_FPCR];
1468 set_fpcr = aarch64_builtin_decls[AARCH64_BUILTIN_SET_FPCR];
1469 get_fpsr = aarch64_builtin_decls[AARCH64_BUILTIN_GET_FPSR];
1470 set_fpsr = aarch64_builtin_decls[AARCH64_BUILTIN_SET_FPSR];
1471
1472 mask_cr = build_int_cst (unsigned_type_node,
1473 ~(AARCH64_FE_ALL_EXCEPT << AARCH64_FE_EXCEPT_SHIFT));
1474 mask_sr = build_int_cst (unsigned_type_node,
1475 ~(AARCH64_FE_ALL_EXCEPT));
1476
1477 ld_fenv_cr = build2 (MODIFY_EXPR, unsigned_type_node,
1478 fenv_cr, build_call_expr (get_fpcr, 0));
1479 ld_fenv_sr = build2 (MODIFY_EXPR, unsigned_type_node,
1480 fenv_sr, build_call_expr (get_fpsr, 0));
1481
1482 masked_fenv_cr = build2 (BIT_AND_EXPR, unsigned_type_node, fenv_cr, mask_cr);
1483 masked_fenv_sr = build2 (BIT_AND_EXPR, unsigned_type_node, fenv_sr, mask_sr);
1484
1485 hold_fnclex_cr = build_call_expr (set_fpcr, 1, masked_fenv_cr);
1486 hold_fnclex_sr = build_call_expr (set_fpsr, 1, masked_fenv_sr);
1487
1488 hold_fnclex = build2 (COMPOUND_EXPR, void_type_node, hold_fnclex_cr,
1489 hold_fnclex_sr);
1490 masked_fenv = build2 (COMPOUND_EXPR, void_type_node, masked_fenv_cr,
1491 masked_fenv_sr);
1492 ld_fenv = build2 (COMPOUND_EXPR, void_type_node, ld_fenv_cr, ld_fenv_sr);
1493
1494 *hold = build2 (COMPOUND_EXPR, void_type_node,
1495 build2 (COMPOUND_EXPR, void_type_node, masked_fenv, ld_fenv),
1496 hold_fnclex);
1497
1498 /* Store the value of masked_fenv to clear the exceptions:
1499 __builtin_aarch64_set_fpsr (masked_fenv_sr); */
1500
1501 *clear = build_call_expr (set_fpsr, 1, masked_fenv_sr);
1502
1503 /* Generate the equivalent of :
1504 unsigned int new_fenv_var;
1505 new_fenv_var = __builtin_aarch64_get_fpsr ();
1506
1507 __builtin_aarch64_set_fpsr (fenv_sr);
1508
1509 __atomic_feraiseexcept (new_fenv_var); */
1510
9b489f31 1511 new_fenv_var = create_tmp_var (unsigned_type_node);
aa87aced
KV
1512 reload_fenv = build2 (MODIFY_EXPR, unsigned_type_node,
1513 new_fenv_var, build_call_expr (get_fpsr, 0));
1514 restore_fnenv = build_call_expr (set_fpsr, 1, fenv_sr);
1515 atomic_feraiseexcept = builtin_decl_implicit (BUILT_IN_ATOMIC_FERAISEEXCEPT);
1516 update_call = build_call_expr (atomic_feraiseexcept, 1,
1517 fold_convert (integer_type_node, new_fenv_var));
1518 *update = build2 (COMPOUND_EXPR, void_type_node,
1519 build2 (COMPOUND_EXPR, void_type_node,
1520 reload_fenv, restore_fnenv), update_call);
1521}
1522
1523
42fc9a7f
JG
1524#undef AARCH64_CHECK_BUILTIN_MODE
1525#undef AARCH64_FIND_FRINT_VARIANT
0ddec79f
JG
1526#undef CF0
1527#undef CF1
1528#undef CF2
1529#undef CF3
1530#undef CF4
1531#undef CF10
1532#undef VAR1
1533#undef VAR2
1534#undef VAR3
1535#undef VAR4
1536#undef VAR5
1537#undef VAR6
1538#undef VAR7
1539#undef VAR8
1540#undef VAR9
1541#undef VAR10
1542#undef VAR11
1543
3c03d39d 1544#include "gt-aarch64-builtins.h"