]> git.ipfire.org Git - thirdparty/gcc.git/blame - gcc/config/aarch64/aarch64-builtins.cc
aarch64: Lower vcombine to GIMPLE
[thirdparty/gcc.git] / gcc / config / aarch64 / aarch64-builtins.cc
CommitLineData
43e9d192 1/* Builtins' description for AArch64 SIMD architecture.
7adcbafe 2 Copyright (C) 2011-2022 Free Software Foundation, Inc.
43e9d192
IB
3 Contributed by ARM Ltd.
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify it
8 under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
10 any later version.
11
12 GCC is distributed in the hope that it will be useful, but
13 WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
15 General Public License for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
20
8fcc61f8
RS
21#define IN_TARGET_CODE 1
22
43e9d192
IB
23#include "config.h"
24#include "system.h"
25#include "coretypes.h"
26#include "tm.h"
c7131fb2 27#include "function.h"
c7131fb2 28#include "basic-block.h"
e11c4407 29#include "rtl.h"
c7131fb2
AM
30#include "tree.h"
31#include "gimple.h"
03312cbd 32#include "ssa.h"
4d0cdd0c 33#include "memmodel.h"
e11c4407
AM
34#include "tm_p.h"
35#include "expmed.h"
36#include "optabs.h"
37#include "recog.h"
38#include "diagnostic-core.h"
40e23961 39#include "fold-const.h"
d8a2d370 40#include "stor-layout.h"
36566b39 41#include "explow.h"
43e9d192 42#include "expr.h"
43e9d192 43#include "langhooks.h"
5be5c238 44#include "gimple-iterator.h"
10766209 45#include "case-cfn-macros.h"
9d63f43b 46#include "emit-rtl.h"
31427b97
RS
47#include "stringpool.h"
48#include "attribs.h"
ad44c6a5 49#include "gimple-fold.h"
43e9d192 50
0d4a1197 51#define v8qi_UP E_V8QImode
fdcddba8 52#define v8di_UP E_V8DImode
0d4a1197
RS
53#define v4hi_UP E_V4HImode
54#define v4hf_UP E_V4HFmode
55#define v2si_UP E_V2SImode
56#define v2sf_UP E_V2SFmode
57#define v1df_UP E_V1DFmode
5ba864c5 58#define v1di_UP E_V1DImode
0d4a1197
RS
59#define di_UP E_DImode
60#define df_UP E_DFmode
61#define v16qi_UP E_V16QImode
62#define v8hi_UP E_V8HImode
63#define v8hf_UP E_V8HFmode
64#define v4si_UP E_V4SImode
65#define v4sf_UP E_V4SFmode
66#define v2di_UP E_V2DImode
67#define v2df_UP E_V2DFmode
68#define ti_UP E_TImode
69#define oi_UP E_OImode
70#define ci_UP E_CImode
71#define xi_UP E_XImode
72#define si_UP E_SImode
73#define sf_UP E_SFmode
74#define hi_UP E_HImode
75#define hf_UP E_HFmode
76#define qi_UP E_QImode
abbe1ed2
SMW
77#define bf_UP E_BFmode
78#define v4bf_UP E_V4BFmode
79#define v8bf_UP E_V8BFmode
66f206b8
JW
80#define v2x8qi_UP E_V2x8QImode
81#define v2x4hi_UP E_V2x4HImode
82#define v2x4hf_UP E_V2x4HFmode
83#define v2x4bf_UP E_V2x4BFmode
84#define v2x2si_UP E_V2x2SImode
85#define v2x2sf_UP E_V2x2SFmode
86#define v2x1di_UP E_V2x1DImode
87#define v2x1df_UP E_V2x1DFmode
88#define v2x16qi_UP E_V2x16QImode
89#define v2x8hi_UP E_V2x8HImode
90#define v2x8hf_UP E_V2x8HFmode
91#define v2x8bf_UP E_V2x8BFmode
92#define v2x4si_UP E_V2x4SImode
93#define v2x4sf_UP E_V2x4SFmode
94#define v2x2di_UP E_V2x2DImode
95#define v2x2df_UP E_V2x2DFmode
96#define v3x8qi_UP E_V3x8QImode
97#define v3x4hi_UP E_V3x4HImode
98#define v3x4hf_UP E_V3x4HFmode
99#define v3x4bf_UP E_V3x4BFmode
100#define v3x2si_UP E_V3x2SImode
101#define v3x2sf_UP E_V3x2SFmode
102#define v3x1di_UP E_V3x1DImode
103#define v3x1df_UP E_V3x1DFmode
104#define v3x16qi_UP E_V3x16QImode
105#define v3x8hi_UP E_V3x8HImode
106#define v3x8hf_UP E_V3x8HFmode
107#define v3x8bf_UP E_V3x8BFmode
108#define v3x4si_UP E_V3x4SImode
109#define v3x4sf_UP E_V3x4SFmode
110#define v3x2di_UP E_V3x2DImode
111#define v3x2df_UP E_V3x2DFmode
112#define v4x8qi_UP E_V4x8QImode
113#define v4x4hi_UP E_V4x4HImode
114#define v4x4hf_UP E_V4x4HFmode
115#define v4x4bf_UP E_V4x4BFmode
116#define v4x2si_UP E_V4x2SImode
117#define v4x2sf_UP E_V4x2SFmode
118#define v4x1di_UP E_V4x1DImode
119#define v4x1df_UP E_V4x1DFmode
120#define v4x16qi_UP E_V4x16QImode
121#define v4x8hi_UP E_V4x8HImode
122#define v4x8hf_UP E_V4x8HFmode
123#define v4x8bf_UP E_V4x8BFmode
124#define v4x4si_UP E_V4x4SImode
125#define v4x4sf_UP E_V4x4SFmode
126#define v4x2di_UP E_V4x2DImode
127#define v4x2df_UP E_V4x2DFmode
43e9d192
IB
128#define UP(X) X##_UP
129
b5828b4b
JG
130#define SIMD_MAX_BUILTIN_ARGS 5
131
132enum aarch64_type_qualifiers
43e9d192 133{
b5828b4b
JG
134 /* T foo. */
135 qualifier_none = 0x0,
136 /* unsigned T foo. */
137 qualifier_unsigned = 0x1, /* 1 << 0 */
138 /* const T foo. */
139 qualifier_const = 0x2, /* 1 << 1 */
140 /* T *foo. */
141 qualifier_pointer = 0x4, /* 1 << 2 */
b5828b4b
JG
142 /* Used when expanding arguments if an operand could
143 be an immediate. */
144 qualifier_immediate = 0x8, /* 1 << 3 */
145 qualifier_maybe_immediate = 0x10, /* 1 << 4 */
146 /* void foo (...). */
147 qualifier_void = 0x20, /* 1 << 5 */
91259dd8 148 /* 1 << 6 is now unused */
b5828b4b
JG
149 /* Some builtins should use the T_*mode* encoded in a simd_builtin_datum
150 rather than using the type of the operand. */
151 qualifier_map_mode = 0x80, /* 1 << 7 */
152 /* qualifier_pointer | qualifier_map_mode */
153 qualifier_pointer_map_mode = 0x84,
e625e715 154 /* qualifier_const | qualifier_pointer | qualifier_map_mode */
6db1ec94
JG
155 qualifier_const_pointer_map_mode = 0x86,
156 /* Polynomial types. */
2a49c16d
AL
157 qualifier_poly = 0x100,
158 /* Lane indices - must be in range, and flipped for bigendian. */
4d0a0237
CB
159 qualifier_lane_index = 0x200,
160 /* Lane indices for single lane structure loads and stores. */
9d63f43b
TC
161 qualifier_struct_load_store_lane_index = 0x400,
162 /* Lane indices selected in pairs. - must be in range, and flipped for
163 bigendian. */
164 qualifier_lane_pair_index = 0x800,
8c197c85
SMW
165 /* Lane indices selected in quadtuplets. - must be in range, and flipped for
166 bigendian. */
167 qualifier_lane_quadtup_index = 0x1000,
b5828b4b 168};
43e9d192 169
bf592b2f 170/* Flags that describe what a function might do. */
171const unsigned int FLAG_NONE = 0U;
172const unsigned int FLAG_READ_FPCR = 1U << 0;
173const unsigned int FLAG_RAISE_FP_EXCEPTIONS = 1U << 1;
174const unsigned int FLAG_READ_MEMORY = 1U << 2;
175const unsigned int FLAG_PREFETCH_MEMORY = 1U << 3;
176const unsigned int FLAG_WRITE_MEMORY = 1U << 4;
177
35ffd4d1 178/* Not all FP intrinsics raise FP exceptions or read FPCR register,
179 use this flag to suppress it. */
180const unsigned int FLAG_AUTO_FP = 1U << 5;
181
bf592b2f 182const unsigned int FLAG_FP = FLAG_READ_FPCR | FLAG_RAISE_FP_EXCEPTIONS;
183const unsigned int FLAG_ALL = FLAG_READ_FPCR | FLAG_RAISE_FP_EXCEPTIONS
184 | FLAG_READ_MEMORY | FLAG_PREFETCH_MEMORY | FLAG_WRITE_MEMORY;
2d5aad69 185const unsigned int FLAG_STORE = FLAG_WRITE_MEMORY | FLAG_AUTO_FP;
e8062ad4 186const unsigned int FLAG_LOAD = FLAG_READ_MEMORY | FLAG_AUTO_FP;
bf592b2f 187
43e9d192
IB
188typedef struct
189{
190 const char *name;
ef4bddc2 191 machine_mode mode;
342be7f7
JG
192 const enum insn_code code;
193 unsigned int fcode;
b5828b4b 194 enum aarch64_type_qualifiers *qualifiers;
bf592b2f 195 unsigned int flags;
43e9d192
IB
196} aarch64_simd_builtin_datum;
197
b5828b4b
JG
198static enum aarch64_type_qualifiers
199aarch64_types_unop_qualifiers[SIMD_MAX_BUILTIN_ARGS]
8f905d69 200 = { qualifier_none, qualifier_none };
b5828b4b 201#define TYPES_UNOP (aarch64_types_unop_qualifiers)
5a7a4e80
TB
202static enum aarch64_type_qualifiers
203aarch64_types_unopu_qualifiers[SIMD_MAX_BUILTIN_ARGS]
204 = { qualifier_unsigned, qualifier_unsigned };
205#define TYPES_UNOPU (aarch64_types_unopu_qualifiers)
b5828b4b 206static enum aarch64_type_qualifiers
a579f4c7
JW
207aarch64_types_unopus_qualifiers[SIMD_MAX_BUILTIN_ARGS]
208 = { qualifier_unsigned, qualifier_none };
209#define TYPES_UNOPUS (aarch64_types_unopus_qualifiers)
210static enum aarch64_type_qualifiers
b5828b4b
JG
211aarch64_types_binop_qualifiers[SIMD_MAX_BUILTIN_ARGS]
212 = { qualifier_none, qualifier_none, qualifier_maybe_immediate };
213#define TYPES_BINOP (aarch64_types_binop_qualifiers)
214static enum aarch64_type_qualifiers
5a7a4e80
TB
215aarch64_types_binopu_qualifiers[SIMD_MAX_BUILTIN_ARGS]
216 = { qualifier_unsigned, qualifier_unsigned, qualifier_unsigned };
217#define TYPES_BINOPU (aarch64_types_binopu_qualifiers)
7baa225d 218static enum aarch64_type_qualifiers
de10bcce
AL
219aarch64_types_binop_uus_qualifiers[SIMD_MAX_BUILTIN_ARGS]
220 = { qualifier_unsigned, qualifier_unsigned, qualifier_none };
221#define TYPES_BINOP_UUS (aarch64_types_binop_uus_qualifiers)
222static enum aarch64_type_qualifiers
918621d3
AL
223aarch64_types_binop_ssu_qualifiers[SIMD_MAX_BUILTIN_ARGS]
224 = { qualifier_none, qualifier_none, qualifier_unsigned };
225#define TYPES_BINOP_SSU (aarch64_types_binop_ssu_qualifiers)
226static enum aarch64_type_qualifiers
daef0a8c
JW
227aarch64_types_binop_uss_qualifiers[SIMD_MAX_BUILTIN_ARGS]
228 = { qualifier_unsigned, qualifier_none, qualifier_none };
229#define TYPES_BINOP_USS (aarch64_types_binop_uss_qualifiers)
230static enum aarch64_type_qualifiers
7baa225d
TB
231aarch64_types_binopp_qualifiers[SIMD_MAX_BUILTIN_ARGS]
232 = { qualifier_poly, qualifier_poly, qualifier_poly };
233#define TYPES_BINOPP (aarch64_types_binopp_qualifiers)
3caf7f87
JW
234static enum aarch64_type_qualifiers
235aarch64_types_binop_ppu_qualifiers[SIMD_MAX_BUILTIN_ARGS]
236 = { qualifier_poly, qualifier_poly, qualifier_unsigned };
237#define TYPES_BINOP_PPU (aarch64_types_binop_ppu_qualifiers)
7baa225d 238
5a7a4e80 239static enum aarch64_type_qualifiers
b5828b4b
JG
240aarch64_types_ternop_qualifiers[SIMD_MAX_BUILTIN_ARGS]
241 = { qualifier_none, qualifier_none, qualifier_none, qualifier_none };
242#define TYPES_TERNOP (aarch64_types_ternop_qualifiers)
30442682 243static enum aarch64_type_qualifiers
2a49c16d
AL
244aarch64_types_ternop_lane_qualifiers[SIMD_MAX_BUILTIN_ARGS]
245 = { qualifier_none, qualifier_none, qualifier_none, qualifier_lane_index };
246#define TYPES_TERNOP_LANE (aarch64_types_ternop_lane_qualifiers)
247static enum aarch64_type_qualifiers
30442682
TB
248aarch64_types_ternopu_qualifiers[SIMD_MAX_BUILTIN_ARGS]
249 = { qualifier_unsigned, qualifier_unsigned,
250 qualifier_unsigned, qualifier_unsigned };
251#define TYPES_TERNOPU (aarch64_types_ternopu_qualifiers)
27086ea3 252static enum aarch64_type_qualifiers
0b839322
WD
253aarch64_types_ternopu_lane_qualifiers[SIMD_MAX_BUILTIN_ARGS]
254 = { qualifier_unsigned, qualifier_unsigned,
255 qualifier_unsigned, qualifier_lane_index };
256#define TYPES_TERNOPU_LANE (aarch64_types_ternopu_lane_qualifiers)
257static enum aarch64_type_qualifiers
27086ea3
MC
258aarch64_types_ternopu_imm_qualifiers[SIMD_MAX_BUILTIN_ARGS]
259 = { qualifier_unsigned, qualifier_unsigned,
260 qualifier_unsigned, qualifier_immediate };
261#define TYPES_TERNOPUI (aarch64_types_ternopu_imm_qualifiers)
8c197c85 262static enum aarch64_type_qualifiers
3caf7f87
JW
263aarch64_types_ternop_sssu_qualifiers[SIMD_MAX_BUILTIN_ARGS]
264 = { qualifier_none, qualifier_none, qualifier_none, qualifier_unsigned };
265#define TYPES_TERNOP_SSSU (aarch64_types_ternop_sssu_qualifiers)
266static enum aarch64_type_qualifiers
8c197c85
SMW
267aarch64_types_ternop_ssus_qualifiers[SIMD_MAX_BUILTIN_ARGS]
268 = { qualifier_none, qualifier_none, qualifier_unsigned, qualifier_none };
269#define TYPES_TERNOP_SSUS (aarch64_types_ternop_ssus_qualifiers)
2050ac1a
TC
270static enum aarch64_type_qualifiers
271aarch64_types_ternop_suss_qualifiers[SIMD_MAX_BUILTIN_ARGS]
272 = { qualifier_none, qualifier_unsigned, qualifier_none, qualifier_none };
273#define TYPES_TERNOP_SUSS (aarch64_types_ternop_suss_qualifiers)
3caf7f87
JW
274static enum aarch64_type_qualifiers
275aarch64_types_binop_pppu_qualifiers[SIMD_MAX_BUILTIN_ARGS]
276 = { qualifier_poly, qualifier_poly, qualifier_poly, qualifier_unsigned };
277#define TYPES_TERNOP_PPPU (aarch64_types_binop_pppu_qualifiers)
27086ea3 278
9d63f43b
TC
279static enum aarch64_type_qualifiers
280aarch64_types_quadop_lane_pair_qualifiers[SIMD_MAX_BUILTIN_ARGS]
281 = { qualifier_none, qualifier_none, qualifier_none,
282 qualifier_none, qualifier_lane_pair_index };
283#define TYPES_QUADOP_LANE_PAIR (aarch64_types_quadop_lane_pair_qualifiers)
b5828b4b 284static enum aarch64_type_qualifiers
2a49c16d 285aarch64_types_quadop_lane_qualifiers[SIMD_MAX_BUILTIN_ARGS]
b5828b4b 286 = { qualifier_none, qualifier_none, qualifier_none,
2a49c16d
AL
287 qualifier_none, qualifier_lane_index };
288#define TYPES_QUADOP_LANE (aarch64_types_quadop_lane_qualifiers)
7a08d813
TC
289static enum aarch64_type_qualifiers
290aarch64_types_quadopu_lane_qualifiers[SIMD_MAX_BUILTIN_ARGS]
291 = { qualifier_unsigned, qualifier_unsigned, qualifier_unsigned,
292 qualifier_unsigned, qualifier_lane_index };
293#define TYPES_QUADOPU_LANE (aarch64_types_quadopu_lane_qualifiers)
b5828b4b 294
8c197c85
SMW
295static enum aarch64_type_qualifiers
296aarch64_types_quadopssus_lane_quadtup_qualifiers[SIMD_MAX_BUILTIN_ARGS]
297 = { qualifier_none, qualifier_none, qualifier_unsigned,
298 qualifier_none, qualifier_lane_quadtup_index };
299#define TYPES_QUADOPSSUS_LANE_QUADTUP \
300 (aarch64_types_quadopssus_lane_quadtup_qualifiers)
301static enum aarch64_type_qualifiers
302aarch64_types_quadopsssu_lane_quadtup_qualifiers[SIMD_MAX_BUILTIN_ARGS]
303 = { qualifier_none, qualifier_none, qualifier_none,
304 qualifier_unsigned, qualifier_lane_quadtup_index };
305#define TYPES_QUADOPSSSU_LANE_QUADTUP \
306 (aarch64_types_quadopsssu_lane_quadtup_qualifiers)
307
27086ea3
MC
308static enum aarch64_type_qualifiers
309aarch64_types_quadopu_imm_qualifiers[SIMD_MAX_BUILTIN_ARGS]
310 = { qualifier_unsigned, qualifier_unsigned, qualifier_unsigned,
311 qualifier_unsigned, qualifier_immediate };
312#define TYPES_QUADOPUI (aarch64_types_quadopu_imm_qualifiers)
313
b5828b4b 314static enum aarch64_type_qualifiers
2a49c16d 315aarch64_types_binop_imm_qualifiers[SIMD_MAX_BUILTIN_ARGS]
b5828b4b 316 = { qualifier_none, qualifier_none, qualifier_immediate };
2a49c16d
AL
317#define TYPES_GETREG (aarch64_types_binop_imm_qualifiers)
318#define TYPES_SHIFTIMM (aarch64_types_binop_imm_qualifiers)
b5828b4b 319static enum aarch64_type_qualifiers
de10bcce
AL
320aarch64_types_shift_to_unsigned_qualifiers[SIMD_MAX_BUILTIN_ARGS]
321 = { qualifier_unsigned, qualifier_none, qualifier_immediate };
322#define TYPES_SHIFTIMM_USS (aarch64_types_shift_to_unsigned_qualifiers)
323static enum aarch64_type_qualifiers
1f0e9e34
JG
324aarch64_types_fcvt_from_unsigned_qualifiers[SIMD_MAX_BUILTIN_ARGS]
325 = { qualifier_none, qualifier_unsigned, qualifier_immediate };
326#define TYPES_FCVTIMM_SUS (aarch64_types_fcvt_from_unsigned_qualifiers)
327static enum aarch64_type_qualifiers
252c7556
AV
328aarch64_types_unsigned_shift_qualifiers[SIMD_MAX_BUILTIN_ARGS]
329 = { qualifier_unsigned, qualifier_unsigned, qualifier_immediate };
330#define TYPES_USHIFTIMM (aarch64_types_unsigned_shift_qualifiers)
05f1883c
DC
331#define TYPES_USHIFT2IMM (aarch64_types_ternopu_imm_qualifiers)
332static enum aarch64_type_qualifiers
333aarch64_types_shift2_to_unsigned_qualifiers[SIMD_MAX_BUILTIN_ARGS]
334 = { qualifier_unsigned, qualifier_unsigned, qualifier_none, qualifier_immediate };
335#define TYPES_SHIFT2IMM_UUSS (aarch64_types_shift2_to_unsigned_qualifiers)
de10bcce 336
159b8724
TC
337static enum aarch64_type_qualifiers
338aarch64_types_ternop_s_imm_qualifiers[SIMD_MAX_BUILTIN_ARGS]
339 = { qualifier_none, qualifier_none, qualifier_none, qualifier_immediate};
340#define TYPES_SETREG (aarch64_types_ternop_s_imm_qualifiers)
341#define TYPES_SHIFTINSERT (aarch64_types_ternop_s_imm_qualifiers)
342#define TYPES_SHIFTACC (aarch64_types_ternop_s_imm_qualifiers)
05f1883c 343#define TYPES_SHIFT2IMM (aarch64_types_ternop_s_imm_qualifiers)
159b8724
TC
344
345static enum aarch64_type_qualifiers
346aarch64_types_ternop_p_imm_qualifiers[SIMD_MAX_BUILTIN_ARGS]
347 = { qualifier_poly, qualifier_poly, qualifier_poly, qualifier_immediate};
348#define TYPES_SHIFTINSERTP (aarch64_types_ternop_p_imm_qualifiers)
b5828b4b 349
de10bcce
AL
350static enum aarch64_type_qualifiers
351aarch64_types_unsigned_shiftacc_qualifiers[SIMD_MAX_BUILTIN_ARGS]
352 = { qualifier_unsigned, qualifier_unsigned, qualifier_unsigned,
353 qualifier_immediate };
354#define TYPES_USHIFTACC (aarch64_types_unsigned_shiftacc_qualifiers)
355
b5828b4b
JG
356static enum aarch64_type_qualifiers
357aarch64_types_load1_qualifiers[SIMD_MAX_BUILTIN_ARGS]
358 = { qualifier_none, qualifier_const_pointer_map_mode };
359#define TYPES_LOAD1 (aarch64_types_load1_qualifiers)
360#define TYPES_LOADSTRUCT (aarch64_types_load1_qualifiers)
66f206b8
JW
361static enum aarch64_type_qualifiers
362aarch64_types_load1_u_qualifiers[SIMD_MAX_BUILTIN_ARGS]
363 = { qualifier_unsigned, qualifier_const_pointer_map_mode };
1716ddd1 364#define TYPES_LOAD1_U (aarch64_types_load1_u_qualifiers)
66f206b8
JW
365#define TYPES_LOADSTRUCT_U (aarch64_types_load1_u_qualifiers)
366static enum aarch64_type_qualifiers
367aarch64_types_load1_p_qualifiers[SIMD_MAX_BUILTIN_ARGS]
368 = { qualifier_poly, qualifier_const_pointer_map_mode };
1716ddd1 369#define TYPES_LOAD1_P (aarch64_types_load1_p_qualifiers)
66f206b8
JW
370#define TYPES_LOADSTRUCT_P (aarch64_types_load1_p_qualifiers)
371
3ec1be97
CB
372static enum aarch64_type_qualifiers
373aarch64_types_loadstruct_lane_qualifiers[SIMD_MAX_BUILTIN_ARGS]
374 = { qualifier_none, qualifier_const_pointer_map_mode,
4d0a0237 375 qualifier_none, qualifier_struct_load_store_lane_index };
3ec1be97 376#define TYPES_LOADSTRUCT_LANE (aarch64_types_loadstruct_lane_qualifiers)
66f206b8
JW
377static enum aarch64_type_qualifiers
378aarch64_types_loadstruct_lane_u_qualifiers[SIMD_MAX_BUILTIN_ARGS]
379 = { qualifier_unsigned, qualifier_const_pointer_map_mode,
380 qualifier_unsigned, qualifier_struct_load_store_lane_index };
381#define TYPES_LOADSTRUCT_LANE_U (aarch64_types_loadstruct_lane_u_qualifiers)
382static enum aarch64_type_qualifiers
383aarch64_types_loadstruct_lane_p_qualifiers[SIMD_MAX_BUILTIN_ARGS]
384 = { qualifier_poly, qualifier_const_pointer_map_mode,
385 qualifier_poly, qualifier_struct_load_store_lane_index };
386#define TYPES_LOADSTRUCT_LANE_P (aarch64_types_loadstruct_lane_p_qualifiers)
b5828b4b 387
46e778c4
JG
388static enum aarch64_type_qualifiers
389aarch64_types_bsl_p_qualifiers[SIMD_MAX_BUILTIN_ARGS]
390 = { qualifier_poly, qualifier_unsigned,
391 qualifier_poly, qualifier_poly };
392#define TYPES_BSL_P (aarch64_types_bsl_p_qualifiers)
393static enum aarch64_type_qualifiers
394aarch64_types_bsl_s_qualifiers[SIMD_MAX_BUILTIN_ARGS]
395 = { qualifier_none, qualifier_unsigned,
396 qualifier_none, qualifier_none };
397#define TYPES_BSL_S (aarch64_types_bsl_s_qualifiers)
398static enum aarch64_type_qualifiers
399aarch64_types_bsl_u_qualifiers[SIMD_MAX_BUILTIN_ARGS]
400 = { qualifier_unsigned, qualifier_unsigned,
401 qualifier_unsigned, qualifier_unsigned };
402#define TYPES_BSL_U (aarch64_types_bsl_u_qualifiers)
403
b5828b4b
JG
404/* The first argument (return type) of a store should be void type,
405 which we represent with qualifier_void. Their first operand will be
406 a DImode pointer to the location to store to, so we must use
407 qualifier_map_mode | qualifier_pointer to build a pointer to the
408 element type of the vector. */
409static enum aarch64_type_qualifiers
410aarch64_types_store1_qualifiers[SIMD_MAX_BUILTIN_ARGS]
411 = { qualifier_void, qualifier_pointer_map_mode, qualifier_none };
412#define TYPES_STORE1 (aarch64_types_store1_qualifiers)
413#define TYPES_STORESTRUCT (aarch64_types_store1_qualifiers)
66f206b8
JW
414static enum aarch64_type_qualifiers
415aarch64_types_store1_u_qualifiers[SIMD_MAX_BUILTIN_ARGS]
416 = { qualifier_void, qualifier_pointer_map_mode, qualifier_unsigned };
1716ddd1 417#define TYPES_STORE1_U (aarch64_types_store1_u_qualifiers)
66f206b8
JW
418#define TYPES_STORESTRUCT_U (aarch64_types_store1_u_qualifiers)
419static enum aarch64_type_qualifiers
420aarch64_types_store1_p_qualifiers[SIMD_MAX_BUILTIN_ARGS]
421 = { qualifier_void, qualifier_pointer_map_mode, qualifier_poly };
1716ddd1 422#define TYPES_STORE1_P (aarch64_types_store1_p_qualifiers)
66f206b8
JW
423#define TYPES_STORESTRUCT_P (aarch64_types_store1_p_qualifiers)
424
ba081b77
JG
425static enum aarch64_type_qualifiers
426aarch64_types_storestruct_lane_qualifiers[SIMD_MAX_BUILTIN_ARGS]
427 = { qualifier_void, qualifier_pointer_map_mode,
4d0a0237 428 qualifier_none, qualifier_struct_load_store_lane_index };
ba081b77 429#define TYPES_STORESTRUCT_LANE (aarch64_types_storestruct_lane_qualifiers)
66f206b8
JW
430static enum aarch64_type_qualifiers
431aarch64_types_storestruct_lane_u_qualifiers[SIMD_MAX_BUILTIN_ARGS]
432 = { qualifier_void, qualifier_pointer_map_mode,
433 qualifier_unsigned, qualifier_struct_load_store_lane_index };
434#define TYPES_STORESTRUCT_LANE_U (aarch64_types_storestruct_lane_u_qualifiers)
435static enum aarch64_type_qualifiers
436aarch64_types_storestruct_lane_p_qualifiers[SIMD_MAX_BUILTIN_ARGS]
437 = { qualifier_void, qualifier_pointer_map_mode,
438 qualifier_poly, qualifier_struct_load_store_lane_index };
439#define TYPES_STORESTRUCT_LANE_P (aarch64_types_storestruct_lane_p_qualifiers)
b5828b4b 440
0ddec79f
JG
441#define CF0(N, X) CODE_FOR_aarch64_##N##X
442#define CF1(N, X) CODE_FOR_##N##X##1
443#define CF2(N, X) CODE_FOR_##N##X##2
444#define CF3(N, X) CODE_FOR_##N##X##3
445#define CF4(N, X) CODE_FOR_##N##X##4
446#define CF10(N, X) CODE_FOR_##N##X
447
bf592b2f 448#define VAR1(T, N, MAP, FLAG, A) \
449 {#N #A, UP (A), CF##MAP (N, A), 0, TYPES_##T, FLAG_##FLAG},
450#define VAR2(T, N, MAP, FLAG, A, B) \
451 VAR1 (T, N, MAP, FLAG, A) \
452 VAR1 (T, N, MAP, FLAG, B)
453#define VAR3(T, N, MAP, FLAG, A, B, C) \
454 VAR2 (T, N, MAP, FLAG, A, B) \
455 VAR1 (T, N, MAP, FLAG, C)
456#define VAR4(T, N, MAP, FLAG, A, B, C, D) \
457 VAR3 (T, N, MAP, FLAG, A, B, C) \
458 VAR1 (T, N, MAP, FLAG, D)
459#define VAR5(T, N, MAP, FLAG, A, B, C, D, E) \
460 VAR4 (T, N, MAP, FLAG, A, B, C, D) \
461 VAR1 (T, N, MAP, FLAG, E)
462#define VAR6(T, N, MAP, FLAG, A, B, C, D, E, F) \
463 VAR5 (T, N, MAP, FLAG, A, B, C, D, E) \
464 VAR1 (T, N, MAP, FLAG, F)
465#define VAR7(T, N, MAP, FLAG, A, B, C, D, E, F, G) \
466 VAR6 (T, N, MAP, FLAG, A, B, C, D, E, F) \
467 VAR1 (T, N, MAP, FLAG, G)
468#define VAR8(T, N, MAP, FLAG, A, B, C, D, E, F, G, H) \
469 VAR7 (T, N, MAP, FLAG, A, B, C, D, E, F, G) \
470 VAR1 (T, N, MAP, FLAG, H)
471#define VAR9(T, N, MAP, FLAG, A, B, C, D, E, F, G, H, I) \
472 VAR8 (T, N, MAP, FLAG, A, B, C, D, E, F, G, H) \
473 VAR1 (T, N, MAP, FLAG, I)
474#define VAR10(T, N, MAP, FLAG, A, B, C, D, E, F, G, H, I, J) \
475 VAR9 (T, N, MAP, FLAG, A, B, C, D, E, F, G, H, I) \
476 VAR1 (T, N, MAP, FLAG, J)
477#define VAR11(T, N, MAP, FLAG, A, B, C, D, E, F, G, H, I, J, K) \
478 VAR10 (T, N, MAP, FLAG, A, B, C, D, E, F, G, H, I, J) \
479 VAR1 (T, N, MAP, FLAG, K)
480#define VAR12(T, N, MAP, FLAG, A, B, C, D, E, F, G, H, I, J, K, L) \
481 VAR11 (T, N, MAP, FLAG, A, B, C, D, E, F, G, H, I, J, K) \
482 VAR1 (T, N, MAP, FLAG, L)
483#define VAR13(T, N, MAP, FLAG, A, B, C, D, E, F, G, H, I, J, K, L, M) \
484 VAR12 (T, N, MAP, FLAG, A, B, C, D, E, F, G, H, I, J, K, L) \
485 VAR1 (T, N, MAP, FLAG, M)
486#define VAR14(T, X, MAP, FLAG, A, B, C, D, E, F, G, H, I, J, K, L, M, N) \
487 VAR13 (T, X, MAP, FLAG, A, B, C, D, E, F, G, H, I, J, K, L, M) \
488 VAR1 (T, X, MAP, FLAG, N)
489#define VAR15(T, X, MAP, FLAG, A, B, C, D, E, F, G, H, I, J, K, L, M, N, O) \
490 VAR14 (T, X, MAP, FLAG, A, B, C, D, E, F, G, H, I, J, K, L, M, N) \
491 VAR1 (T, X, MAP, FLAG, O)
492#define VAR16(T, X, MAP, FLAG, A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P) \
493 VAR15 (T, X, MAP, FLAG, A, B, C, D, E, F, G, H, I, J, K, L, M, N, O) \
494 VAR1 (T, X, MAP, FLAG, P)
342be7f7 495
f421c516 496#include "aarch64-builtin-iterators.h"
43e9d192
IB
497
498static aarch64_simd_builtin_datum aarch64_simd_builtin_data[] = {
342be7f7
JG
499#include "aarch64-simd-builtins.def"
500};
501
5d357f26
KT
502/* There's only 8 CRC32 builtins. Probably not worth their own .def file. */
503#define AARCH64_CRC32_BUILTINS \
504 CRC32_BUILTIN (crc32b, QI) \
505 CRC32_BUILTIN (crc32h, HI) \
506 CRC32_BUILTIN (crc32w, SI) \
507 CRC32_BUILTIN (crc32x, DI) \
508 CRC32_BUILTIN (crc32cb, QI) \
509 CRC32_BUILTIN (crc32ch, HI) \
510 CRC32_BUILTIN (crc32cw, SI) \
511 CRC32_BUILTIN (crc32cx, DI)
512
9d63f43b
TC
513/* The next 8 FCMLA instrinsics require some special handling compared the
514 normal simd intrinsics. */
515#define AARCH64_SIMD_FCMLA_LANEQ_BUILTINS \
516 FCMLA_LANEQ_BUILTIN (0, v2sf, fcmla, V2SF, false) \
517 FCMLA_LANEQ_BUILTIN (90, v2sf, fcmla, V2SF, false) \
518 FCMLA_LANEQ_BUILTIN (180, v2sf, fcmla, V2SF, false) \
519 FCMLA_LANEQ_BUILTIN (270, v2sf, fcmla, V2SF, false) \
520 FCMLA_LANEQ_BUILTIN (0, v4hf, fcmla_laneq, V4HF, true) \
521 FCMLA_LANEQ_BUILTIN (90, v4hf, fcmla_laneq, V4HF, true) \
522 FCMLA_LANEQ_BUILTIN (180, v4hf, fcmla_laneq, V4HF, true) \
523 FCMLA_LANEQ_BUILTIN (270, v4hf, fcmla_laneq, V4HF, true) \
524
5d357f26
KT
525typedef struct
526{
527 const char *name;
ef4bddc2 528 machine_mode mode;
5d357f26
KT
529 const enum insn_code icode;
530 unsigned int fcode;
531} aarch64_crc_builtin_datum;
532
9d63f43b
TC
533/* Hold information about how to expand the FCMLA_LANEQ builtins. */
534typedef struct
535{
536 const char *name;
537 machine_mode mode;
538 const enum insn_code icode;
539 unsigned int fcode;
540 bool lane;
541} aarch64_fcmla_laneq_builtin_datum;
542
5d357f26
KT
543#define CRC32_BUILTIN(N, M) \
544 AARCH64_BUILTIN_##N,
545
9d63f43b
TC
546#define FCMLA_LANEQ_BUILTIN(I, N, X, M, T) \
547 AARCH64_SIMD_BUILTIN_FCMLA_LANEQ##I##_##M,
548
342be7f7 549#undef VAR1
bf592b2f 550#define VAR1(T, N, MAP, FLAG, A) \
e993fea1 551 AARCH64_SIMD_BUILTIN_##T##_##N##A,
342be7f7
JG
552
553enum aarch64_builtins
554{
555 AARCH64_BUILTIN_MIN,
aa87aced
KV
556
557 AARCH64_BUILTIN_GET_FPCR,
558 AARCH64_BUILTIN_SET_FPCR,
559 AARCH64_BUILTIN_GET_FPSR,
560 AARCH64_BUILTIN_SET_FPSR,
561
0d7e5fa6
AC
562 AARCH64_BUILTIN_GET_FPCR64,
563 AARCH64_BUILTIN_SET_FPCR64,
564 AARCH64_BUILTIN_GET_FPSR64,
565 AARCH64_BUILTIN_SET_FPSR64,
566
a6fc00da
BH
567 AARCH64_BUILTIN_RSQRT_DF,
568 AARCH64_BUILTIN_RSQRT_SF,
569 AARCH64_BUILTIN_RSQRT_V2DF,
570 AARCH64_BUILTIN_RSQRT_V2SF,
571 AARCH64_BUILTIN_RSQRT_V4SF,
342be7f7 572 AARCH64_SIMD_BUILTIN_BASE,
661fce82 573 AARCH64_SIMD_BUILTIN_LANE_CHECK,
342be7f7 574#include "aarch64-simd-builtins.def"
661fce82
AL
575 /* The first enum element which is based on an insn_data pattern. */
576 AARCH64_SIMD_PATTERN_START = AARCH64_SIMD_BUILTIN_LANE_CHECK + 1,
577 AARCH64_SIMD_BUILTIN_MAX = AARCH64_SIMD_PATTERN_START
578 + ARRAY_SIZE (aarch64_simd_builtin_data) - 1,
5d357f26
KT
579 AARCH64_CRC32_BUILTIN_BASE,
580 AARCH64_CRC32_BUILTINS
581 AARCH64_CRC32_BUILTIN_MAX,
312492bd
JW
582 /* ARMv8.3-A Pointer Authentication Builtins. */
583 AARCH64_PAUTH_BUILTIN_AUTIA1716,
584 AARCH64_PAUTH_BUILTIN_PACIA1716,
8fc16d72
ST
585 AARCH64_PAUTH_BUILTIN_AUTIB1716,
586 AARCH64_PAUTH_BUILTIN_PACIB1716,
312492bd 587 AARCH64_PAUTH_BUILTIN_XPACLRI,
9d63f43b
TC
588 /* Special cased Armv8.3-A Complex FMA by Lane quad Builtins. */
589 AARCH64_SIMD_FCMLA_LANEQ_BUILTIN_BASE,
590 AARCH64_SIMD_FCMLA_LANEQ_BUILTINS
e1d5d19e
KT
591 /* Builtin for Arm8.3-a Javascript conversion instruction. */
592 AARCH64_JSCVT,
89626179
SD
593 /* TME builtins. */
594 AARCH64_TME_BUILTIN_TSTART,
595 AARCH64_TME_BUILTIN_TCOMMIT,
596 AARCH64_TME_BUILTIN_TTEST,
597 AARCH64_TME_BUILTIN_TCANCEL,
c5dc215d
KT
598 /* Armv8.5-a RNG instruction builtins. */
599 AARCH64_BUILTIN_RNG_RNDR,
600 AARCH64_BUILTIN_RNG_RNDRRS,
ef01e6bb
DZ
601 /* MEMTAG builtins. */
602 AARCH64_MEMTAG_BUILTIN_START,
603 AARCH64_MEMTAG_BUILTIN_IRG,
604 AARCH64_MEMTAG_BUILTIN_GMI,
605 AARCH64_MEMTAG_BUILTIN_SUBP,
606 AARCH64_MEMTAG_BUILTIN_INC_TAG,
607 AARCH64_MEMTAG_BUILTIN_SET_TAG,
608 AARCH64_MEMTAG_BUILTIN_GET_TAG,
609 AARCH64_MEMTAG_BUILTIN_END,
fdcddba8
PW
610 /* LS64 builtins. */
611 AARCH64_LS64_BUILTIN_LD64B,
612 AARCH64_LS64_BUILTIN_ST64B,
613 AARCH64_LS64_BUILTIN_ST64BV,
614 AARCH64_LS64_BUILTIN_ST64BV0,
eb966d39
ASDV
615 AARCH64_REV16,
616 AARCH64_REV16L,
617 AARCH64_REV16LL,
618 AARCH64_RBIT,
619 AARCH64_RBITL,
620 AARCH64_RBITLL,
342be7f7 621 AARCH64_BUILTIN_MAX
43e9d192
IB
622};
623
5d357f26
KT
624#undef CRC32_BUILTIN
625#define CRC32_BUILTIN(N, M) \
0d4a1197 626 {"__builtin_aarch64_"#N, E_##M##mode, CODE_FOR_aarch64_##N, AARCH64_BUILTIN_##N},
5d357f26
KT
627
628static aarch64_crc_builtin_datum aarch64_crc_builtin_data[] = {
629 AARCH64_CRC32_BUILTINS
630};
631
9d63f43b
TC
632
633#undef FCMLA_LANEQ_BUILTIN
634#define FCMLA_LANEQ_BUILTIN(I, N, X, M, T) \
635 {"__builtin_aarch64_fcmla_laneq"#I#N, E_##M##mode, CODE_FOR_aarch64_##X##I##N, \
636 AARCH64_SIMD_BUILTIN_FCMLA_LANEQ##I##_##M, T},
637
638/* This structure contains how to manage the mapping form the builtin to the
639 instruction to generate in the backend and how to invoke the instruction. */
5eb9ac1e 640static aarch64_fcmla_laneq_builtin_datum aarch64_fcmla_lane_builtin_data[] = {
9d63f43b
TC
641 AARCH64_SIMD_FCMLA_LANEQ_BUILTINS
642};
643
5d357f26
KT
644#undef CRC32_BUILTIN
645
119103ca
JG
646static GTY(()) tree aarch64_builtin_decls[AARCH64_BUILTIN_MAX];
647
43e9d192
IB
648#define NUM_DREG_TYPES 6
649#define NUM_QREG_TYPES 6
650
f9d53c27
TB
651/* Internal scalar builtin types. These types are used to support
652 neon intrinsic builtins. They are _not_ user-visible types. Therefore
653 the mangling for these types are implementation defined. */
654const char *aarch64_scalar_builtin_types[] = {
655 "__builtin_aarch64_simd_qi",
656 "__builtin_aarch64_simd_hi",
657 "__builtin_aarch64_simd_si",
7c369485 658 "__builtin_aarch64_simd_hf",
f9d53c27
TB
659 "__builtin_aarch64_simd_sf",
660 "__builtin_aarch64_simd_di",
661 "__builtin_aarch64_simd_df",
662 "__builtin_aarch64_simd_poly8",
663 "__builtin_aarch64_simd_poly16",
664 "__builtin_aarch64_simd_poly64",
665 "__builtin_aarch64_simd_poly128",
666 "__builtin_aarch64_simd_ti",
667 "__builtin_aarch64_simd_uqi",
668 "__builtin_aarch64_simd_uhi",
669 "__builtin_aarch64_simd_usi",
670 "__builtin_aarch64_simd_udi",
671 "__builtin_aarch64_simd_ei",
672 "__builtin_aarch64_simd_oi",
673 "__builtin_aarch64_simd_ci",
674 "__builtin_aarch64_simd_xi",
e603cd43 675 "__builtin_aarch64_simd_bf",
f9d53c27
TB
676 NULL
677};
b5828b4b 678
f9d53c27
TB
679#define ENTRY(E, M, Q, G) E,
680enum aarch64_simd_type
681{
682#include "aarch64-simd-builtin-types.def"
683 ARM_NEON_H_TYPES_LAST
684};
685#undef ENTRY
b5828b4b 686
98f1dd02 687struct GTY(()) aarch64_simd_type_info
b5828b4b 688{
f9d53c27
TB
689 enum aarch64_simd_type type;
690
691 /* Internal type name. */
692 const char *name;
693
694 /* Internal type name(mangled). The mangled names conform to the
695 AAPCS64 (see "Procedure Call Standard for the ARM 64-bit Architecture",
696 Appendix A). To qualify for emission with the mangled names defined in
697 that document, a vector type must not only be of the correct mode but also
698 be of the correct internal AdvSIMD vector type (e.g. __Int8x8_t); these
699 types are registered by aarch64_init_simd_builtin_types (). In other
700 words, vector types defined in other ways e.g. via vector_size attribute
701 will get default mangled names. */
702 const char *mangle;
703
704 /* Internal type. */
705 tree itype;
706
707 /* Element type. */
b5828b4b
JG
708 tree eltype;
709
f9d53c27
TB
710 /* Machine mode the internal type maps to. */
711 enum machine_mode mode;
b5828b4b 712
f9d53c27
TB
713 /* Qualifiers. */
714 enum aarch64_type_qualifiers q;
715};
716
717#define ENTRY(E, M, Q, G) \
0d4a1197 718 {E, "__" #E, #G "__" #E, NULL_TREE, NULL_TREE, E_##M##mode, qualifier_##Q},
98f1dd02 719static GTY(()) struct aarch64_simd_type_info aarch64_simd_types [] = {
f9d53c27
TB
720#include "aarch64-simd-builtin-types.def"
721};
722#undef ENTRY
723
14814e20 724static machine_mode aarch64_simd_tuple_modes[ARM_NEON_H_TYPES_LAST][3];
66f206b8
JW
725static GTY(()) tree aarch64_simd_tuple_types[ARM_NEON_H_TYPES_LAST][3];
726
98f1dd02
AP
727static GTY(()) tree aarch64_simd_intOI_type_node = NULL_TREE;
728static GTY(()) tree aarch64_simd_intCI_type_node = NULL_TREE;
729static GTY(()) tree aarch64_simd_intXI_type_node = NULL_TREE;
f9d53c27 730
1b62ed4f
JG
731/* The user-visible __fp16 type, and a pointer to that type. Used
732 across the back-end. */
733tree aarch64_fp16_type_node = NULL_TREE;
734tree aarch64_fp16_ptr_type_node = NULL_TREE;
735
abbe1ed2
SMW
736/* Back-end node type for brain float (bfloat) types. */
737tree aarch64_bf16_type_node = NULL_TREE;
738tree aarch64_bf16_ptr_type_node = NULL_TREE;
739
6d4d616a 740/* Wrapper around add_builtin_function. NAME is the name of the built-in
072a8b8f 741 function, TYPE is the function type, CODE is the function subcode
742 (relative to AARCH64_BUILTIN_GENERAL), and ATTRS is the function
743 attributes. */
6d4d616a 744static tree
072a8b8f 745aarch64_general_add_builtin (const char *name, tree type, unsigned int code,
746 tree attrs = NULL_TREE)
6d4d616a
RS
747{
748 code = (code << AARCH64_BUILTIN_SHIFT) | AARCH64_BUILTIN_GENERAL;
749 return add_builtin_function (name, type, code, BUILT_IN_MD,
072a8b8f 750 NULL, attrs);
6d4d616a
RS
751}
752
f9d53c27
TB
753static const char *
754aarch64_mangle_builtin_scalar_type (const_tree type)
755{
756 int i = 0;
757
758 while (aarch64_scalar_builtin_types[i] != NULL)
b5828b4b 759 {
f9d53c27
TB
760 const char *name = aarch64_scalar_builtin_types[i];
761
762 if (TREE_CODE (TYPE_NAME (type)) == TYPE_DECL
763 && DECL_NAME (TYPE_NAME (type))
764 && !strcmp (IDENTIFIER_POINTER (DECL_NAME (TYPE_NAME (type))), name))
765 return aarch64_scalar_builtin_types[i];
766 i++;
767 }
768 return NULL;
b5828b4b
JG
769}
770
f9d53c27
TB
771static const char *
772aarch64_mangle_builtin_vector_type (const_tree type)
b5828b4b 773{
31427b97
RS
774 tree attrs = TYPE_ATTRIBUTES (type);
775 if (tree attr = lookup_attribute ("Advanced SIMD type", attrs))
776 {
777 tree mangled_name = TREE_VALUE (TREE_VALUE (attr));
778 return IDENTIFIER_POINTER (mangled_name);
779 }
f9d53c27
TB
780
781 return NULL;
6db1ec94
JG
782}
783
f9d53c27 784const char *
6d4d616a 785aarch64_general_mangle_builtin_type (const_tree type)
6db1ec94 786{
f9d53c27
TB
787 const char *mangle;
788 /* Walk through all the AArch64 builtins types tables to filter out the
789 incoming type. */
790 if ((mangle = aarch64_mangle_builtin_vector_type (type))
791 || (mangle = aarch64_mangle_builtin_scalar_type (type)))
792 return mangle;
793
794 return NULL;
6db1ec94
JG
795}
796
f9d53c27 797static tree
b8506a8a 798aarch64_simd_builtin_std_type (machine_mode mode,
f9d53c27 799 enum aarch64_type_qualifiers q)
6db1ec94 800{
f9d53c27
TB
801#define QUAL_TYPE(M) \
802 ((q == qualifier_none) ? int##M##_type_node : unsigned_int##M##_type_node);
803 switch (mode)
804 {
4e10a5a7 805 case E_QImode:
f9d53c27 806 return QUAL_TYPE (QI);
4e10a5a7 807 case E_HImode:
f9d53c27 808 return QUAL_TYPE (HI);
4e10a5a7 809 case E_SImode:
f9d53c27 810 return QUAL_TYPE (SI);
4e10a5a7 811 case E_DImode:
f9d53c27 812 return QUAL_TYPE (DI);
4e10a5a7 813 case E_TImode:
f9d53c27 814 return QUAL_TYPE (TI);
4e10a5a7 815 case E_OImode:
f9d53c27 816 return aarch64_simd_intOI_type_node;
4e10a5a7 817 case E_CImode:
f9d53c27 818 return aarch64_simd_intCI_type_node;
4e10a5a7 819 case E_XImode:
f9d53c27 820 return aarch64_simd_intXI_type_node;
4e10a5a7 821 case E_HFmode:
71a11456 822 return aarch64_fp16_type_node;
4e10a5a7 823 case E_SFmode:
f9d53c27 824 return float_type_node;
4e10a5a7 825 case E_DFmode:
f9d53c27 826 return double_type_node;
abbe1ed2
SMW
827 case E_BFmode:
828 return aarch64_bf16_type_node;
f9d53c27
TB
829 default:
830 gcc_unreachable ();
831 }
832#undef QUAL_TYPE
6db1ec94
JG
833}
834
f9d53c27 835static tree
b8506a8a 836aarch64_lookup_simd_builtin_type (machine_mode mode,
f9d53c27 837 enum aarch64_type_qualifiers q)
6db1ec94 838{
f9d53c27 839 int i;
ca32b29e 840 int nelts = ARRAY_SIZE (aarch64_simd_types);
f9d53c27
TB
841
842 /* Non-poly scalar modes map to standard types not in the table. */
843 if (q != qualifier_poly && !VECTOR_MODE_P (mode))
844 return aarch64_simd_builtin_std_type (mode, q);
845
846 for (i = 0; i < nelts; i++)
66f206b8
JW
847 {
848 if (aarch64_simd_types[i].mode == mode
849 && aarch64_simd_types[i].q == q)
850 return aarch64_simd_types[i].itype;
851 if (aarch64_simd_tuple_types[i][0] != NULL_TREE)
852 for (int j = 0; j < 3; j++)
14814e20 853 if (aarch64_simd_tuple_modes[i][j] == mode
66f206b8
JW
854 && aarch64_simd_types[i].q == q)
855 return aarch64_simd_tuple_types[i][j];
856 }
f9d53c27
TB
857
858 return NULL_TREE;
b5828b4b
JG
859}
860
f9d53c27 861static tree
b8506a8a 862aarch64_simd_builtin_type (machine_mode mode,
f9d53c27
TB
863 bool unsigned_p, bool poly_p)
864{
865 if (poly_p)
866 return aarch64_lookup_simd_builtin_type (mode, qualifier_poly);
867 else if (unsigned_p)
868 return aarch64_lookup_simd_builtin_type (mode, qualifier_unsigned);
869 else
870 return aarch64_lookup_simd_builtin_type (mode, qualifier_none);
871}
872
af55e82d 873static void
f9d53c27 874aarch64_init_simd_builtin_types (void)
43e9d192 875{
f9d53c27 876 int i;
ca32b29e 877 int nelts = ARRAY_SIZE (aarch64_simd_types);
f9d53c27
TB
878 tree tdecl;
879
880 /* Init all the element types built by the front-end. */
881 aarch64_simd_types[Int8x8_t].eltype = intQI_type_node;
882 aarch64_simd_types[Int8x16_t].eltype = intQI_type_node;
883 aarch64_simd_types[Int16x4_t].eltype = intHI_type_node;
884 aarch64_simd_types[Int16x8_t].eltype = intHI_type_node;
885 aarch64_simd_types[Int32x2_t].eltype = intSI_type_node;
886 aarch64_simd_types[Int32x4_t].eltype = intSI_type_node;
887 aarch64_simd_types[Int64x1_t].eltype = intDI_type_node;
888 aarch64_simd_types[Int64x2_t].eltype = intDI_type_node;
889 aarch64_simd_types[Uint8x8_t].eltype = unsigned_intQI_type_node;
890 aarch64_simd_types[Uint8x16_t].eltype = unsigned_intQI_type_node;
891 aarch64_simd_types[Uint16x4_t].eltype = unsigned_intHI_type_node;
892 aarch64_simd_types[Uint16x8_t].eltype = unsigned_intHI_type_node;
893 aarch64_simd_types[Uint32x2_t].eltype = unsigned_intSI_type_node;
894 aarch64_simd_types[Uint32x4_t].eltype = unsigned_intSI_type_node;
895 aarch64_simd_types[Uint64x1_t].eltype = unsigned_intDI_type_node;
896 aarch64_simd_types[Uint64x2_t].eltype = unsigned_intDI_type_node;
897
898 /* Poly types are a world of their own. */
899 aarch64_simd_types[Poly8_t].eltype = aarch64_simd_types[Poly8_t].itype =
900 build_distinct_type_copy (unsigned_intQI_type_node);
bcee52c4
MS
901 /* Prevent front-ends from transforming Poly8_t arrays into string
902 literals. */
903 TYPE_STRING_FLAG (aarch64_simd_types[Poly8_t].eltype) = false;
904
f9d53c27
TB
905 aarch64_simd_types[Poly16_t].eltype = aarch64_simd_types[Poly16_t].itype =
906 build_distinct_type_copy (unsigned_intHI_type_node);
907 aarch64_simd_types[Poly64_t].eltype = aarch64_simd_types[Poly64_t].itype =
908 build_distinct_type_copy (unsigned_intDI_type_node);
909 aarch64_simd_types[Poly128_t].eltype = aarch64_simd_types[Poly128_t].itype =
910 build_distinct_type_copy (unsigned_intTI_type_node);
911 /* Init poly vector element types with scalar poly types. */
912 aarch64_simd_types[Poly8x8_t].eltype = aarch64_simd_types[Poly8_t].itype;
913 aarch64_simd_types[Poly8x16_t].eltype = aarch64_simd_types[Poly8_t].itype;
914 aarch64_simd_types[Poly16x4_t].eltype = aarch64_simd_types[Poly16_t].itype;
915 aarch64_simd_types[Poly16x8_t].eltype = aarch64_simd_types[Poly16_t].itype;
916 aarch64_simd_types[Poly64x1_t].eltype = aarch64_simd_types[Poly64_t].itype;
917 aarch64_simd_types[Poly64x2_t].eltype = aarch64_simd_types[Poly64_t].itype;
918
919 /* Continue with standard types. */
71a11456
AL
920 aarch64_simd_types[Float16x4_t].eltype = aarch64_fp16_type_node;
921 aarch64_simd_types[Float16x8_t].eltype = aarch64_fp16_type_node;
f9d53c27
TB
922 aarch64_simd_types[Float32x2_t].eltype = float_type_node;
923 aarch64_simd_types[Float32x4_t].eltype = float_type_node;
924 aarch64_simd_types[Float64x1_t].eltype = double_type_node;
925 aarch64_simd_types[Float64x2_t].eltype = double_type_node;
926
abbe1ed2
SMW
927 /* Init Bfloat vector types with underlying __bf16 type. */
928 aarch64_simd_types[Bfloat16x4_t].eltype = aarch64_bf16_type_node;
929 aarch64_simd_types[Bfloat16x8_t].eltype = aarch64_bf16_type_node;
930
f9d53c27
TB
931 for (i = 0; i < nelts; i++)
932 {
933 tree eltype = aarch64_simd_types[i].eltype;
b8506a8a 934 machine_mode mode = aarch64_simd_types[i].mode;
f9d53c27
TB
935
936 if (aarch64_simd_types[i].itype == NULL)
b96824c4 937 {
31427b97
RS
938 tree type = build_vector_type (eltype, GET_MODE_NUNITS (mode));
939 type = build_distinct_type_copy (type);
940 SET_TYPE_STRUCTURAL_EQUALITY (type);
941
942 tree mangled_name = get_identifier (aarch64_simd_types[i].mangle);
943 tree value = tree_cons (NULL_TREE, mangled_name, NULL_TREE);
944 TYPE_ATTRIBUTES (type)
945 = tree_cons (get_identifier ("Advanced SIMD type"), value,
946 TYPE_ATTRIBUTES (type));
947 aarch64_simd_types[i].itype = type;
b96824c4 948 }
f9d53c27
TB
949
950 tdecl = add_builtin_type (aarch64_simd_types[i].name,
951 aarch64_simd_types[i].itype);
952 TYPE_NAME (aarch64_simd_types[i].itype) = tdecl;
f9d53c27 953 }
43e9d192 954
f9d53c27
TB
955#define AARCH64_BUILD_SIGNED_TYPE(mode) \
956 make_signed_type (GET_MODE_PRECISION (mode));
957 aarch64_simd_intOI_type_node = AARCH64_BUILD_SIGNED_TYPE (OImode);
f9d53c27
TB
958 aarch64_simd_intCI_type_node = AARCH64_BUILD_SIGNED_TYPE (CImode);
959 aarch64_simd_intXI_type_node = AARCH64_BUILD_SIGNED_TYPE (XImode);
960#undef AARCH64_BUILD_SIGNED_TYPE
961
f9d53c27
TB
962 tdecl = add_builtin_type
963 ("__builtin_aarch64_simd_oi" , aarch64_simd_intOI_type_node);
964 TYPE_NAME (aarch64_simd_intOI_type_node) = tdecl;
965 tdecl = add_builtin_type
966 ("__builtin_aarch64_simd_ci" , aarch64_simd_intCI_type_node);
967 TYPE_NAME (aarch64_simd_intCI_type_node) = tdecl;
968 tdecl = add_builtin_type
969 ("__builtin_aarch64_simd_xi" , aarch64_simd_intXI_type_node);
970 TYPE_NAME (aarch64_simd_intXI_type_node) = tdecl;
971}
972
973static void
974aarch64_init_simd_builtin_scalar_types (void)
975{
976 /* Define typedefs for all the standard scalar types. */
977 (*lang_hooks.types.register_builtin_type) (intQI_type_node,
43e9d192 978 "__builtin_aarch64_simd_qi");
f9d53c27 979 (*lang_hooks.types.register_builtin_type) (intHI_type_node,
43e9d192 980 "__builtin_aarch64_simd_hi");
7c369485
AL
981 (*lang_hooks.types.register_builtin_type) (aarch64_fp16_type_node,
982 "__builtin_aarch64_simd_hf");
f9d53c27 983 (*lang_hooks.types.register_builtin_type) (intSI_type_node,
43e9d192 984 "__builtin_aarch64_simd_si");
f9d53c27 985 (*lang_hooks.types.register_builtin_type) (float_type_node,
43e9d192 986 "__builtin_aarch64_simd_sf");
f9d53c27 987 (*lang_hooks.types.register_builtin_type) (intDI_type_node,
43e9d192 988 "__builtin_aarch64_simd_di");
f9d53c27 989 (*lang_hooks.types.register_builtin_type) (double_type_node,
43e9d192 990 "__builtin_aarch64_simd_df");
f9d53c27 991 (*lang_hooks.types.register_builtin_type) (unsigned_intQI_type_node,
43e9d192 992 "__builtin_aarch64_simd_poly8");
f9d53c27 993 (*lang_hooks.types.register_builtin_type) (unsigned_intHI_type_node,
43e9d192 994 "__builtin_aarch64_simd_poly16");
f9d53c27 995 (*lang_hooks.types.register_builtin_type) (unsigned_intDI_type_node,
7baa225d 996 "__builtin_aarch64_simd_poly64");
f9d53c27 997 (*lang_hooks.types.register_builtin_type) (unsigned_intTI_type_node,
7baa225d 998 "__builtin_aarch64_simd_poly128");
f9d53c27 999 (*lang_hooks.types.register_builtin_type) (intTI_type_node,
43e9d192 1000 "__builtin_aarch64_simd_ti");
e603cd43
MI
1001 (*lang_hooks.types.register_builtin_type) (aarch64_bf16_type_node,
1002 "__builtin_aarch64_simd_bf");
b5828b4b 1003 /* Unsigned integer types for various mode sizes. */
f9d53c27 1004 (*lang_hooks.types.register_builtin_type) (unsigned_intQI_type_node,
b5828b4b 1005 "__builtin_aarch64_simd_uqi");
f9d53c27 1006 (*lang_hooks.types.register_builtin_type) (unsigned_intHI_type_node,
b5828b4b 1007 "__builtin_aarch64_simd_uhi");
f9d53c27 1008 (*lang_hooks.types.register_builtin_type) (unsigned_intSI_type_node,
b5828b4b 1009 "__builtin_aarch64_simd_usi");
f9d53c27 1010 (*lang_hooks.types.register_builtin_type) (unsigned_intDI_type_node,
b5828b4b 1011 "__builtin_aarch64_simd_udi");
f9d53c27
TB
1012}
1013
079c23cf
KT
1014/* Return a set of FLAG_* flags derived from FLAGS
1015 that describe what a function with result MODE could do,
072a8b8f 1016 taking the command-line flags into account. */
1017static unsigned int
079c23cf 1018aarch64_call_properties (unsigned int flags, machine_mode mode)
072a8b8f 1019{
079c23cf 1020 if (!(flags & FLAG_AUTO_FP) && FLOAT_MODE_P (mode))
35ffd4d1 1021 flags |= FLAG_FP;
072a8b8f 1022
1023 /* -fno-trapping-math means that we can assume any FP exceptions
1024 are not user-visible. */
1025 if (!flag_trapping_math)
1026 flags &= ~FLAG_RAISE_FP_EXCEPTIONS;
1027
1028 return flags;
1029}
1030
079c23cf
KT
1031/* Return true if calls to a function with flags F and mode MODE
1032 could modify some form of global state. */
072a8b8f 1033static bool
079c23cf 1034aarch64_modifies_global_state_p (unsigned int f, machine_mode mode)
072a8b8f 1035{
079c23cf 1036 unsigned int flags = aarch64_call_properties (f, mode);
072a8b8f 1037
1038 if (flags & FLAG_RAISE_FP_EXCEPTIONS)
1039 return true;
1040
1041 if (flags & FLAG_PREFETCH_MEMORY)
1042 return true;
1043
1044 return flags & FLAG_WRITE_MEMORY;
1045}
1046
079c23cf
KT
1047/* Return true if calls to a function with flags F and mode MODE
1048 could read some form of global state. */
072a8b8f 1049static bool
079c23cf 1050aarch64_reads_global_state_p (unsigned int f, machine_mode mode)
072a8b8f 1051{
079c23cf 1052 unsigned int flags = aarch64_call_properties (f, mode);
072a8b8f 1053
1054 if (flags & FLAG_READ_FPCR)
1055 return true;
1056
1057 return flags & FLAG_READ_MEMORY;
1058}
1059
079c23cf
KT
1060/* Return true if calls to a function with flags F and mode MODE
1061 could raise a signal. */
072a8b8f 1062static bool
079c23cf 1063aarch64_could_trap_p (unsigned int f, machine_mode mode)
072a8b8f 1064{
079c23cf 1065 unsigned int flags = aarch64_call_properties (f, mode);
072a8b8f 1066
1067 if (flags & FLAG_RAISE_FP_EXCEPTIONS)
1068 return true;
1069
1070 if (flags & (FLAG_READ_MEMORY | FLAG_WRITE_MEMORY))
1071 return true;
1072
1073 return false;
1074}
1075
1076/* Add attribute NAME to ATTRS. */
1077static tree
1078aarch64_add_attribute (const char *name, tree attrs)
1079{
1080 return tree_cons (get_identifier (name), NULL_TREE, attrs);
1081}
1082
079c23cf
KT
1083/* Return the appropriate attributes for a function that has
1084 flags F and mode MODE. */
072a8b8f 1085static tree
079c23cf 1086aarch64_get_attributes (unsigned int f, machine_mode mode)
072a8b8f 1087{
1088 tree attrs = NULL_TREE;
1089
079c23cf 1090 if (!aarch64_modifies_global_state_p (f, mode))
072a8b8f 1091 {
079c23cf 1092 if (aarch64_reads_global_state_p (f, mode))
072a8b8f 1093 attrs = aarch64_add_attribute ("pure", attrs);
1094 else
1095 attrs = aarch64_add_attribute ("const", attrs);
1096 }
1097
079c23cf 1098 if (!flag_non_call_exceptions || !aarch64_could_trap_p (f, mode))
072a8b8f 1099 attrs = aarch64_add_attribute ("nothrow", attrs);
1100
1101 return aarch64_add_attribute ("leaf", attrs);
1102}
1103
e95a988a
KT
1104static bool aarch64_simd_builtins_initialized_p = false;
1105
9d63f43b
TC
1106/* Due to the architecture not providing lane variant of the lane instructions
1107 for fcmla we can't use the standard simd builtin expansion code, but we
1108 still want the majority of the validation that would normally be done. */
1109
1110void
1111aarch64_init_fcmla_laneq_builtins (void)
1112{
1113 unsigned int i = 0;
1114
1115 for (i = 0; i < ARRAY_SIZE (aarch64_fcmla_lane_builtin_data); ++i)
1116 {
1117 aarch64_fcmla_laneq_builtin_datum* d
1118 = &aarch64_fcmla_lane_builtin_data[i];
1119 tree argtype = aarch64_lookup_simd_builtin_type (d->mode, qualifier_none);
1120 machine_mode quadmode = GET_MODE_2XWIDER_MODE (d->mode).require ();
1121 tree quadtype
1122 = aarch64_lookup_simd_builtin_type (quadmode, qualifier_none);
1123 tree lanetype
1124 = aarch64_simd_builtin_std_type (SImode, qualifier_lane_pair_index);
1125 tree ftype = build_function_type_list (argtype, argtype, argtype,
1126 quadtype, lanetype, NULL_TREE);
079c23cf
KT
1127 tree attrs = aarch64_get_attributes (FLAG_FP, d->mode);
1128 tree fndecl
1129 = aarch64_general_add_builtin (d->name, ftype, d->fcode, attrs);
9d63f43b
TC
1130
1131 aarch64_builtin_decls[d->fcode] = fndecl;
1132 }
1133}
1134
e95a988a 1135void
8197ab94 1136aarch64_init_simd_builtin_functions (bool called_from_pragma)
f9d53c27 1137{
661fce82 1138 unsigned int i, fcode = AARCH64_SIMD_PATTERN_START;
f9d53c27 1139
8197ab94
JW
1140 if (!called_from_pragma)
1141 {
1142 tree lane_check_fpr = build_function_type_list (void_type_node,
1143 size_type_node,
1144 size_type_node,
1145 intSI_type_node,
1146 NULL);
1147 aarch64_builtin_decls[AARCH64_SIMD_BUILTIN_LANE_CHECK]
1148 = aarch64_general_add_builtin ("__builtin_aarch64_im_lane_boundsi",
1149 lane_check_fpr,
1150 AARCH64_SIMD_BUILTIN_LANE_CHECK);
1151 }
661fce82 1152
342be7f7 1153 for (i = 0; i < ARRAY_SIZE (aarch64_simd_builtin_data); i++, fcode++)
43e9d192 1154 {
b5828b4b 1155 bool print_type_signature_p = false;
cae83731 1156 char type_signature[SIMD_MAX_BUILTIN_ARGS + 1] = { 0 };
43e9d192 1157 aarch64_simd_builtin_datum *d = &aarch64_simd_builtin_data[i];
342be7f7
JG
1158 char namebuf[60];
1159 tree ftype = NULL;
119103ca 1160 tree fndecl = NULL;
342be7f7 1161
342be7f7 1162 d->fcode = fcode;
43e9d192 1163
b5828b4b
JG
1164 /* We must track two variables here. op_num is
1165 the operand number as in the RTL pattern. This is
1166 required to access the mode (e.g. V4SF mode) of the
1167 argument, from which the base type can be derived.
1168 arg_num is an index in to the qualifiers data, which
1169 gives qualifiers to the type (e.g. const unsigned).
1170 The reason these two variables may differ by one is the
1171 void return type. While all return types take the 0th entry
1172 in the qualifiers array, there is no operand for them in the
1173 RTL pattern. */
1174 int op_num = insn_data[d->code].n_operands - 1;
1175 int arg_num = d->qualifiers[0] & qualifier_void
1176 ? op_num + 1
1177 : op_num;
1178 tree return_type = void_type_node, args = void_list_node;
1179 tree eltype;
1180
8197ab94
JW
1181 int struct_mode_args = 0;
1182 for (int j = op_num; j >= 0; j--)
1183 {
1184 machine_mode op_mode = insn_data[d->code].operand[j].mode;
1185 if (aarch64_advsimd_struct_mode_p (op_mode))
1186 struct_mode_args++;
1187 }
1188
1189 if ((called_from_pragma && struct_mode_args == 0)
1190 || (!called_from_pragma && struct_mode_args > 0))
1191 continue;
1192
b5828b4b
JG
1193 /* Build a function type directly from the insn_data for this
1194 builtin. The build_function_type () function takes care of
1195 removing duplicates for us. */
1196 for (; op_num >= 0; arg_num--, op_num--)
43e9d192 1197 {
ef4bddc2 1198 machine_mode op_mode = insn_data[d->code].operand[op_num].mode;
b5828b4b 1199 enum aarch64_type_qualifiers qualifiers = d->qualifiers[arg_num];
43e9d192 1200
b5828b4b
JG
1201 if (qualifiers & qualifier_unsigned)
1202 {
9fd2074d 1203 type_signature[op_num] = 'u';
b5828b4b
JG
1204 print_type_signature_p = true;
1205 }
6db1ec94
JG
1206 else if (qualifiers & qualifier_poly)
1207 {
9fd2074d 1208 type_signature[op_num] = 'p';
6db1ec94
JG
1209 print_type_signature_p = true;
1210 }
b5828b4b 1211 else
9fd2074d 1212 type_signature[op_num] = 's';
b5828b4b 1213
b5828b4b
JG
1214 /* Some builtins have different user-facing types
1215 for certain arguments, encoded in d->mode. */
1216 if (qualifiers & qualifier_map_mode)
bc5e395d 1217 op_mode = d->mode;
b5828b4b
JG
1218
1219 /* For pointers, we want a pointer to the basic type
1220 of the vector. */
1221 if (qualifiers & qualifier_pointer && VECTOR_MODE_P (op_mode))
1222 op_mode = GET_MODE_INNER (op_mode);
1223
f9d53c27
TB
1224 eltype = aarch64_simd_builtin_type
1225 (op_mode,
1226 (qualifiers & qualifier_unsigned) != 0,
1227 (qualifiers & qualifier_poly) != 0);
1228 gcc_assert (eltype != NULL);
b5828b4b
JG
1229
1230 /* Add qualifiers. */
1231 if (qualifiers & qualifier_const)
1232 eltype = build_qualified_type (eltype, TYPE_QUAL_CONST);
1233
1234 if (qualifiers & qualifier_pointer)
1235 eltype = build_pointer_type (eltype);
1236
1237 /* If we have reached arg_num == 0, we are at a non-void
1238 return type. Otherwise, we are still processing
1239 arguments. */
1240 if (arg_num == 0)
1241 return_type = eltype;
1242 else
1243 args = tree_cons (NULL_TREE, eltype, args);
1244 }
342be7f7 1245
b5828b4b 1246 ftype = build_function_type (return_type, args);
43e9d192 1247
342be7f7 1248 gcc_assert (ftype != NULL);
43e9d192 1249
b5828b4b 1250 if (print_type_signature_p)
bc5e395d
JG
1251 snprintf (namebuf, sizeof (namebuf), "__builtin_aarch64_%s_%s",
1252 d->name, type_signature);
b5828b4b 1253 else
bc5e395d
JG
1254 snprintf (namebuf, sizeof (namebuf), "__builtin_aarch64_%s",
1255 d->name);
43e9d192 1256
079c23cf 1257 tree attrs = aarch64_get_attributes (d->flags, d->mode);
072a8b8f 1258
66f206b8
JW
1259 if (called_from_pragma)
1260 {
1261 unsigned int raw_code
1262 = (fcode << AARCH64_BUILTIN_SHIFT) | AARCH64_BUILTIN_GENERAL;
1263 fndecl = simulate_builtin_function_decl (input_location, namebuf,
1264 ftype, raw_code, NULL,
1265 attrs);
1266 }
1267 else
1268 fndecl = aarch64_general_add_builtin (namebuf, ftype, fcode, attrs);
1269
119103ca 1270 aarch64_builtin_decls[fcode] = fndecl;
43e9d192 1271 }
8197ab94
JW
1272}
1273
1274/* Register the tuple type that contains NUM_VECTORS of the AdvSIMD type
1275 indexed by TYPE_INDEX. */
1276static void
1277register_tuple_type (unsigned int num_vectors, unsigned int type_index)
1278{
1279 aarch64_simd_type_info *type = &aarch64_simd_types[type_index];
1280
1281 /* Synthesize the name of the user-visible vector tuple type. */
1282 const char *vector_type_name = type->name;
1283 char tuple_type_name[sizeof ("bfloat16x4x2_t")];
1284 snprintf (tuple_type_name, sizeof (tuple_type_name), "%.*sx%d_t",
1285 (int) strlen (vector_type_name) - 4, vector_type_name + 2,
1286 num_vectors);
1287 tuple_type_name[0] = TOLOWER (tuple_type_name[0]);
1288
1289 tree vector_type = type->itype;
1290 tree array_type = build_array_type_nelts (vector_type, num_vectors);
66f206b8
JW
1291 if (type->mode == DImode)
1292 {
1293 if (num_vectors == 2)
1294 SET_TYPE_MODE (array_type, V2x1DImode);
1295 else if (num_vectors == 3)
1296 SET_TYPE_MODE (array_type, V3x1DImode);
1297 else if (num_vectors == 4)
1298 SET_TYPE_MODE (array_type, V4x1DImode);
1299 }
1300
8197ab94 1301 unsigned int alignment
14814e20
RS
1302 = known_eq (GET_MODE_SIZE (type->mode), 16) ? 128 : 64;
1303 machine_mode tuple_mode = TYPE_MODE_RAW (array_type);
1304 gcc_assert (VECTOR_MODE_P (tuple_mode)
1305 && TYPE_MODE (array_type) == tuple_mode
8197ab94
JW
1306 && TYPE_ALIGN (array_type) == alignment);
1307
1308 tree field = build_decl (input_location, FIELD_DECL,
1309 get_identifier ("val"), array_type);
1310
1311 tree t = lang_hooks.types.simulate_record_decl (input_location,
1312 tuple_type_name,
1313 make_array_slice (&field,
1314 1));
1315 gcc_assert (TYPE_MODE_RAW (t) == TYPE_MODE (t)
14814e20
RS
1316 && (flag_pack_struct
1317 || maximum_field_alignment
1318 || (TYPE_MODE_RAW (t) == tuple_mode
1319 && TYPE_ALIGN (t) == alignment)));
1320
1321 aarch64_simd_tuple_modes[type_index][num_vectors - 2] = tuple_mode;
1322 aarch64_simd_tuple_types[type_index][num_vectors - 2] = t;
8197ab94
JW
1323}
1324
1325static bool
1326aarch64_scalar_builtin_type_p (aarch64_simd_type t)
1327{
1328 return (t == Poly8_t || t == Poly16_t || t == Poly64_t || t == Poly128_t);
1329}
1330
14814e20
RS
1331/* Enable AARCH64_FL_* flags EXTRA_FLAGS on top of the base Advanced SIMD
1332 set. */
1333aarch64_simd_switcher::aarch64_simd_switcher (unsigned int extra_flags)
1334 : m_old_isa_flags (aarch64_isa_flags),
1335 m_old_general_regs_only (TARGET_GENERAL_REGS_ONLY)
1336{
1337 /* Changing the ISA flags should be enough here. We shouldn't need to
1338 pay the compile-time cost of a full target switch. */
1339 aarch64_isa_flags = AARCH64_FL_FP | AARCH64_FL_SIMD | extra_flags;
1340 global_options.x_target_flags &= ~MASK_GENERAL_REGS_ONLY;
1341}
1342
1343aarch64_simd_switcher::~aarch64_simd_switcher ()
1344{
1345 if (m_old_general_regs_only)
1346 global_options.x_target_flags |= MASK_GENERAL_REGS_ONLY;
1347 aarch64_isa_flags = m_old_isa_flags;
1348}
1349
8197ab94
JW
1350/* Implement #pragma GCC aarch64 "arm_neon.h". */
1351void
1352handle_arm_neon_h (void)
1353{
14814e20
RS
1354 aarch64_simd_switcher simd;
1355
8197ab94
JW
1356 /* Register the AdvSIMD vector tuple types. */
1357 for (unsigned int i = 0; i < ARM_NEON_H_TYPES_LAST; i++)
1358 for (unsigned int count = 2; count <= 4; ++count)
1359 if (!aarch64_scalar_builtin_type_p (aarch64_simd_types[i].type))
1360 register_tuple_type (count, i);
1361
1362 aarch64_init_simd_builtin_functions (true);
1363}
1364
1365void
1366aarch64_init_simd_builtins (void)
1367{
1368 if (aarch64_simd_builtins_initialized_p)
1369 return;
1370
1371 aarch64_simd_builtins_initialized_p = true;
1372
1373 aarch64_init_simd_builtin_types ();
1374
1375 /* Strong-typing hasn't been implemented for all AdvSIMD builtin intrinsics.
1376 Therefore we need to preserve the old __builtin scalar types. It can be
1377 removed once all the intrinsics become strongly typed using the qualifier
1378 system. */
1379 aarch64_init_simd_builtin_scalar_types ();
1380
1381 aarch64_init_simd_builtin_functions (false);
1382 if (in_lto_p)
1383 handle_arm_neon_h ();
280d970b 1384
8197ab94
JW
1385 /* Initialize the remaining fcmla_laneq intrinsics. */
1386 aarch64_init_fcmla_laneq_builtins ();
43e9d192
IB
1387}
1388
5d357f26
KT
1389static void
1390aarch64_init_crc32_builtins ()
1391{
f9d53c27 1392 tree usi_type = aarch64_simd_builtin_std_type (SImode, qualifier_unsigned);
5d357f26
KT
1393 unsigned int i = 0;
1394
1395 for (i = 0; i < ARRAY_SIZE (aarch64_crc_builtin_data); ++i)
1396 {
1397 aarch64_crc_builtin_datum* d = &aarch64_crc_builtin_data[i];
f9d53c27
TB
1398 tree argtype = aarch64_simd_builtin_std_type (d->mode,
1399 qualifier_unsigned);
5d357f26 1400 tree ftype = build_function_type_list (usi_type, usi_type, argtype, NULL_TREE);
079c23cf
KT
1401 tree attrs = aarch64_get_attributes (FLAG_NONE, d->mode);
1402 tree fndecl
1403 = aarch64_general_add_builtin (d->name, ftype, d->fcode, attrs);
5d357f26
KT
1404
1405 aarch64_builtin_decls[d->fcode] = fndecl;
1406 }
1407}
1408
a6fc00da
BH
1409/* Add builtins for reciprocal square root. */
1410
1411void
1412aarch64_init_builtin_rsqrt (void)
1413{
1414 tree fndecl = NULL;
1415 tree ftype = NULL;
1416
1417 tree V2SF_type_node = build_vector_type (float_type_node, 2);
1418 tree V2DF_type_node = build_vector_type (double_type_node, 2);
1419 tree V4SF_type_node = build_vector_type (float_type_node, 4);
1420
1421 struct builtin_decls_data
1422 {
1423 tree type_node;
1424 const char *builtin_name;
1425 int function_code;
1426 };
1427
1428 builtin_decls_data bdda[] =
1429 {
1430 { double_type_node, "__builtin_aarch64_rsqrt_df", AARCH64_BUILTIN_RSQRT_DF },
1431 { float_type_node, "__builtin_aarch64_rsqrt_sf", AARCH64_BUILTIN_RSQRT_SF },
1432 { V2DF_type_node, "__builtin_aarch64_rsqrt_v2df", AARCH64_BUILTIN_RSQRT_V2DF },
1433 { V2SF_type_node, "__builtin_aarch64_rsqrt_v2sf", AARCH64_BUILTIN_RSQRT_V2SF },
1434 { V4SF_type_node, "__builtin_aarch64_rsqrt_v4sf", AARCH64_BUILTIN_RSQRT_V4SF }
1435 };
1436
1437 builtin_decls_data *bdd = bdda;
ca32b29e 1438 builtin_decls_data *bdd_end = bdd + (ARRAY_SIZE (bdda));
a6fc00da
BH
1439
1440 for (; bdd < bdd_end; bdd++)
1441 {
1442 ftype = build_function_type_list (bdd->type_node, bdd->type_node, NULL_TREE);
079c23cf 1443 tree attrs = aarch64_get_attributes (FLAG_FP, TYPE_MODE (bdd->type_node));
6d4d616a 1444 fndecl = aarch64_general_add_builtin (bdd->builtin_name,
079c23cf 1445 ftype, bdd->function_code, attrs);
a6fc00da
BH
1446 aarch64_builtin_decls[bdd->function_code] = fndecl;
1447 }
1448}
1449
1b62ed4f
JG
1450/* Initialize the backend types that support the user-visible __fp16
1451 type, also initialize a pointer to that type, to be used when
1452 forming HFAs. */
1453
1454static void
1455aarch64_init_fp16_types (void)
1456{
1457 aarch64_fp16_type_node = make_node (REAL_TYPE);
1458 TYPE_PRECISION (aarch64_fp16_type_node) = 16;
1459 layout_type (aarch64_fp16_type_node);
1460
1461 (*lang_hooks.types.register_builtin_type) (aarch64_fp16_type_node, "__fp16");
1462 aarch64_fp16_ptr_type_node = build_pointer_type (aarch64_fp16_type_node);
1463}
1464
abbe1ed2
SMW
1465/* Initialize the backend REAL_TYPE type supporting bfloat types. */
1466static void
1467aarch64_init_bf16_types (void)
1468{
1469 aarch64_bf16_type_node = make_node (REAL_TYPE);
1470 TYPE_PRECISION (aarch64_bf16_type_node) = 16;
1471 SET_TYPE_MODE (aarch64_bf16_type_node, BFmode);
1472 layout_type (aarch64_bf16_type_node);
1473
1474 lang_hooks.types.register_builtin_type (aarch64_bf16_type_node, "__bf16");
1475 aarch64_bf16_ptr_type_node = build_pointer_type (aarch64_bf16_type_node);
1476}
1477
312492bd
JW
1478/* Pointer authentication builtins that will become NOP on legacy platform.
1479 Currently, these builtins are for internal use only (libgcc EH unwinder). */
1480
1481void
1482aarch64_init_pauth_hint_builtins (void)
1483{
1484 /* Pointer Authentication builtins. */
1485 tree ftype_pointer_auth
1486 = build_function_type_list (ptr_type_node, ptr_type_node,
1487 unsigned_intDI_type_node, NULL_TREE);
1488 tree ftype_pointer_strip
1489 = build_function_type_list (ptr_type_node, ptr_type_node, NULL_TREE);
1490
1491 aarch64_builtin_decls[AARCH64_PAUTH_BUILTIN_AUTIA1716]
6d4d616a
RS
1492 = aarch64_general_add_builtin ("__builtin_aarch64_autia1716",
1493 ftype_pointer_auth,
1494 AARCH64_PAUTH_BUILTIN_AUTIA1716);
312492bd 1495 aarch64_builtin_decls[AARCH64_PAUTH_BUILTIN_PACIA1716]
6d4d616a
RS
1496 = aarch64_general_add_builtin ("__builtin_aarch64_pacia1716",
1497 ftype_pointer_auth,
1498 AARCH64_PAUTH_BUILTIN_PACIA1716);
8fc16d72 1499 aarch64_builtin_decls[AARCH64_PAUTH_BUILTIN_AUTIB1716]
6d4d616a
RS
1500 = aarch64_general_add_builtin ("__builtin_aarch64_autib1716",
1501 ftype_pointer_auth,
1502 AARCH64_PAUTH_BUILTIN_AUTIB1716);
8fc16d72 1503 aarch64_builtin_decls[AARCH64_PAUTH_BUILTIN_PACIB1716]
6d4d616a
RS
1504 = aarch64_general_add_builtin ("__builtin_aarch64_pacib1716",
1505 ftype_pointer_auth,
1506 AARCH64_PAUTH_BUILTIN_PACIB1716);
312492bd 1507 aarch64_builtin_decls[AARCH64_PAUTH_BUILTIN_XPACLRI]
6d4d616a
RS
1508 = aarch64_general_add_builtin ("__builtin_aarch64_xpaclri",
1509 ftype_pointer_strip,
1510 AARCH64_PAUTH_BUILTIN_XPACLRI);
312492bd
JW
1511}
1512
89626179
SD
1513/* Initialize the transactional memory extension (TME) builtins. */
1514static void
1515aarch64_init_tme_builtins (void)
1516{
1517 tree ftype_uint64_void
1518 = build_function_type_list (uint64_type_node, NULL);
1519 tree ftype_void_void
1520 = build_function_type_list (void_type_node, NULL);
1521 tree ftype_void_uint64
1522 = build_function_type_list (void_type_node, uint64_type_node, NULL);
1523
1524 aarch64_builtin_decls[AARCH64_TME_BUILTIN_TSTART]
6d4d616a
RS
1525 = aarch64_general_add_builtin ("__builtin_aarch64_tstart",
1526 ftype_uint64_void,
1527 AARCH64_TME_BUILTIN_TSTART);
89626179 1528 aarch64_builtin_decls[AARCH64_TME_BUILTIN_TTEST]
6d4d616a
RS
1529 = aarch64_general_add_builtin ("__builtin_aarch64_ttest",
1530 ftype_uint64_void,
1531 AARCH64_TME_BUILTIN_TTEST);
89626179 1532 aarch64_builtin_decls[AARCH64_TME_BUILTIN_TCOMMIT]
6d4d616a
RS
1533 = aarch64_general_add_builtin ("__builtin_aarch64_tcommit",
1534 ftype_void_void,
1535 AARCH64_TME_BUILTIN_TCOMMIT);
89626179 1536 aarch64_builtin_decls[AARCH64_TME_BUILTIN_TCANCEL]
6d4d616a
RS
1537 = aarch64_general_add_builtin ("__builtin_aarch64_tcancel",
1538 ftype_void_uint64,
1539 AARCH64_TME_BUILTIN_TCANCEL);
89626179
SD
1540}
1541
c5dc215d
KT
1542/* Add builtins for Random Number instructions. */
1543
1544static void
1545aarch64_init_rng_builtins (void)
1546{
1547 tree unsigned_ptr_type = build_pointer_type (unsigned_intDI_type_node);
1548 tree ftype
1549 = build_function_type_list (integer_type_node, unsigned_ptr_type, NULL);
1550 aarch64_builtin_decls[AARCH64_BUILTIN_RNG_RNDR]
1551 = aarch64_general_add_builtin ("__builtin_aarch64_rndr", ftype,
1552 AARCH64_BUILTIN_RNG_RNDR);
1553 aarch64_builtin_decls[AARCH64_BUILTIN_RNG_RNDRRS]
1554 = aarch64_general_add_builtin ("__builtin_aarch64_rndrrs", ftype,
1555 AARCH64_BUILTIN_RNG_RNDRRS);
1556}
1557
ef01e6bb
DZ
1558/* Initialize the memory tagging extension (MTE) builtins. */
1559struct
1560{
1561 tree ftype;
1562 enum insn_code icode;
1563} aarch64_memtag_builtin_data[AARCH64_MEMTAG_BUILTIN_END -
1564 AARCH64_MEMTAG_BUILTIN_START - 1];
1565
1566static void
1567aarch64_init_memtag_builtins (void)
1568{
1569 tree fntype = NULL;
1570
1571#define AARCH64_INIT_MEMTAG_BUILTINS_DECL(F, N, I, T) \
1572 aarch64_builtin_decls[AARCH64_MEMTAG_BUILTIN_##F] \
1573 = aarch64_general_add_builtin ("__builtin_aarch64_memtag_"#N, \
1574 T, AARCH64_MEMTAG_BUILTIN_##F); \
1575 aarch64_memtag_builtin_data[AARCH64_MEMTAG_BUILTIN_##F - \
1576 AARCH64_MEMTAG_BUILTIN_START - 1] = \
1577 {T, CODE_FOR_##I};
1578
1579 fntype = build_function_type_list (ptr_type_node, ptr_type_node,
1580 uint64_type_node, NULL);
1581 AARCH64_INIT_MEMTAG_BUILTINS_DECL (IRG, irg, irg, fntype);
1582
1583 fntype = build_function_type_list (uint64_type_node, ptr_type_node,
1584 uint64_type_node, NULL);
1585 AARCH64_INIT_MEMTAG_BUILTINS_DECL (GMI, gmi, gmi, fntype);
1586
1587 fntype = build_function_type_list (ptrdiff_type_node, ptr_type_node,
1588 ptr_type_node, NULL);
1589 AARCH64_INIT_MEMTAG_BUILTINS_DECL (SUBP, subp, subp, fntype);
1590
1591 fntype = build_function_type_list (ptr_type_node, ptr_type_node,
1592 unsigned_type_node, NULL);
1593 AARCH64_INIT_MEMTAG_BUILTINS_DECL (INC_TAG, inc_tag, addg, fntype);
1594
1595 fntype = build_function_type_list (void_type_node, ptr_type_node, NULL);
1596 AARCH64_INIT_MEMTAG_BUILTINS_DECL (SET_TAG, set_tag, stg, fntype);
1597
1598 fntype = build_function_type_list (ptr_type_node, ptr_type_node, NULL);
1599 AARCH64_INIT_MEMTAG_BUILTINS_DECL (GET_TAG, get_tag, ldg, fntype);
1600
1601#undef AARCH64_INIT_MEMTAG_BUILTINS_DECL
1602}
c5dc215d 1603
fdcddba8
PW
1604/* Add builtins for Load/store 64 Byte instructions. */
1605
1606typedef struct
1607{
1608 const char *name;
1609 unsigned int code;
1610 tree type;
1611} ls64_builtins_data;
1612
1613static GTY(()) tree ls64_arm_data_t = NULL_TREE;
1614
1615static void
1616aarch64_init_ls64_builtins_types (void)
1617{
1618 /* Synthesize:
1619
1620 typedef struct {
1621 uint64_t val[8];
1622 } __arm_data512_t; */
1623 const char *tuple_type_name = "__arm_data512_t";
1624 tree node_type = get_typenode_from_name (UINT64_TYPE);
1625 tree array_type = build_array_type_nelts (node_type, 8);
1626 SET_TYPE_MODE (array_type, V8DImode);
1627
1628 gcc_assert (TYPE_MODE_RAW (array_type) == TYPE_MODE (array_type));
1629 gcc_assert (TYPE_ALIGN (array_type) == 64);
1630
1631 tree field = build_decl (input_location, FIELD_DECL,
1632 get_identifier ("val"), array_type);
1633
1634 ls64_arm_data_t = lang_hooks.types.simulate_record_decl (input_location,
1635 tuple_type_name,
1636 make_array_slice (&field, 1));
1637
1638 gcc_assert (TYPE_MODE (ls64_arm_data_t) == V8DImode);
1639 gcc_assert (TYPE_MODE_RAW (ls64_arm_data_t) == TYPE_MODE (ls64_arm_data_t));
1640 gcc_assert (TYPE_ALIGN (ls64_arm_data_t) == 64);
1641}
1642
1643static void
1644aarch64_init_ls64_builtins (void)
1645{
1646 aarch64_init_ls64_builtins_types ();
1647
1648 ls64_builtins_data data[4] = {
1649 {"__builtin_aarch64_ld64b", AARCH64_LS64_BUILTIN_LD64B,
1650 build_function_type_list (ls64_arm_data_t,
1651 const_ptr_type_node, NULL_TREE)},
1652 {"__builtin_aarch64_st64b", AARCH64_LS64_BUILTIN_ST64B,
1653 build_function_type_list (void_type_node, ptr_type_node,
1654 ls64_arm_data_t, NULL_TREE)},
1655 {"__builtin_aarch64_st64bv", AARCH64_LS64_BUILTIN_ST64BV,
1656 build_function_type_list (uint64_type_node, ptr_type_node,
1657 ls64_arm_data_t, NULL_TREE)},
1658 {"__builtin_aarch64_st64bv0", AARCH64_LS64_BUILTIN_ST64BV0,
1659 build_function_type_list (uint64_type_node, ptr_type_node,
1660 ls64_arm_data_t, NULL_TREE)},
1661 };
1662
1663 for (size_t i = 0; i < ARRAY_SIZE (data); ++i)
1664 aarch64_builtin_decls[data[i].code]
1665 = aarch64_general_add_builtin (data[i].name, data[i].type, data[i].code);
1666}
1667
eb966d39
ASDV
1668static void
1669aarch64_init_data_intrinsics (void)
1670{
1671 tree uint32_fntype = build_function_type_list (uint32_type_node,
1672 uint32_type_node, NULL_TREE);
1673 tree ulong_fntype = build_function_type_list (long_unsigned_type_node,
1674 long_unsigned_type_node,
1675 NULL_TREE);
1676 tree uint64_fntype = build_function_type_list (uint64_type_node,
1677 uint64_type_node, NULL_TREE);
1678 aarch64_builtin_decls[AARCH64_REV16]
1679 = aarch64_general_add_builtin ("__builtin_aarch64_rev16", uint32_fntype,
1680 AARCH64_REV16);
1681 aarch64_builtin_decls[AARCH64_REV16L]
1682 = aarch64_general_add_builtin ("__builtin_aarch64_rev16l", ulong_fntype,
1683 AARCH64_REV16L);
1684 aarch64_builtin_decls[AARCH64_REV16LL]
1685 = aarch64_general_add_builtin ("__builtin_aarch64_rev16ll", uint64_fntype,
1686 AARCH64_REV16LL);
1687 aarch64_builtin_decls[AARCH64_RBIT]
1688 = aarch64_general_add_builtin ("__builtin_aarch64_rbit", uint32_fntype,
1689 AARCH64_RBIT);
1690 aarch64_builtin_decls[AARCH64_RBITL]
1691 = aarch64_general_add_builtin ("__builtin_aarch64_rbitl", ulong_fntype,
1692 AARCH64_RBITL);
1693 aarch64_builtin_decls[AARCH64_RBITLL]
1694 = aarch64_general_add_builtin ("__builtin_aarch64_rbitll", uint64_fntype,
1695 AARCH64_RBITLL);
1696}
1697
af3cadb5
TC
1698/* Implement #pragma GCC aarch64 "arm_acle.h". */
1699void
1700handle_arm_acle_h (void)
1701{
1702 if (TARGET_LS64)
1703 aarch64_init_ls64_builtins ();
1704}
1705
0d7e5fa6 1706/* Initialize fpsr fpcr getters and setters. */
c5dc215d 1707
0d7e5fa6
AC
1708static void
1709aarch64_init_fpsr_fpcr_builtins (void)
43e9d192 1710{
0d7e5fa6 1711 tree ftype_set
aa87aced 1712 = build_function_type_list (void_type_node, unsigned_type_node, NULL);
0d7e5fa6 1713 tree ftype_get
aa87aced
KV
1714 = build_function_type_list (unsigned_type_node, NULL);
1715
1716 aarch64_builtin_decls[AARCH64_BUILTIN_GET_FPCR]
6d4d616a 1717 = aarch64_general_add_builtin ("__builtin_aarch64_get_fpcr",
0d7e5fa6 1718 ftype_get,
6d4d616a 1719 AARCH64_BUILTIN_GET_FPCR);
aa87aced 1720 aarch64_builtin_decls[AARCH64_BUILTIN_SET_FPCR]
6d4d616a 1721 = aarch64_general_add_builtin ("__builtin_aarch64_set_fpcr",
0d7e5fa6 1722 ftype_set,
6d4d616a 1723 AARCH64_BUILTIN_SET_FPCR);
aa87aced 1724 aarch64_builtin_decls[AARCH64_BUILTIN_GET_FPSR]
6d4d616a 1725 = aarch64_general_add_builtin ("__builtin_aarch64_get_fpsr",
0d7e5fa6 1726 ftype_get,
6d4d616a 1727 AARCH64_BUILTIN_GET_FPSR);
aa87aced 1728 aarch64_builtin_decls[AARCH64_BUILTIN_SET_FPSR]
6d4d616a 1729 = aarch64_general_add_builtin ("__builtin_aarch64_set_fpsr",
0d7e5fa6 1730 ftype_set,
6d4d616a 1731 AARCH64_BUILTIN_SET_FPSR);
aa87aced 1732
0d7e5fa6
AC
1733 ftype_set
1734 = build_function_type_list (void_type_node, long_long_unsigned_type_node,
1735 NULL);
1736 ftype_get
1737 = build_function_type_list (long_long_unsigned_type_node, NULL);
1738
1739 aarch64_builtin_decls[AARCH64_BUILTIN_GET_FPCR64]
1740 = aarch64_general_add_builtin ("__builtin_aarch64_get_fpcr64",
1741 ftype_get,
1742 AARCH64_BUILTIN_GET_FPCR64);
1743 aarch64_builtin_decls[AARCH64_BUILTIN_SET_FPCR64]
1744 = aarch64_general_add_builtin ("__builtin_aarch64_set_fpcr64",
1745 ftype_set,
1746 AARCH64_BUILTIN_SET_FPCR64);
1747 aarch64_builtin_decls[AARCH64_BUILTIN_GET_FPSR64]
1748 = aarch64_general_add_builtin ("__builtin_aarch64_get_fpsr64",
1749 ftype_get,
1750 AARCH64_BUILTIN_GET_FPSR64);
1751 aarch64_builtin_decls[AARCH64_BUILTIN_SET_FPSR64]
1752 = aarch64_general_add_builtin ("__builtin_aarch64_set_fpsr64",
1753 ftype_set,
1754 AARCH64_BUILTIN_SET_FPSR64);
1755}
1756
1757/* Initialize all builtins in the AARCH64_BUILTIN_GENERAL group. */
1758
1759void
1760aarch64_general_init_builtins (void)
1761{
1762 aarch64_init_fpsr_fpcr_builtins ();
1763
1b62ed4f 1764 aarch64_init_fp16_types ();
c2ec330c 1765
abbe1ed2
SMW
1766 aarch64_init_bf16_types ();
1767
14814e20
RS
1768 {
1769 aarch64_simd_switcher simd;
280d970b 1770 aarch64_init_simd_builtins ();
14814e20 1771 }
e95a988a
KT
1772
1773 aarch64_init_crc32_builtins ();
a6fc00da 1774 aarch64_init_builtin_rsqrt ();
c5dc215d 1775 aarch64_init_rng_builtins ();
eb966d39 1776 aarch64_init_data_intrinsics ();
312492bd 1777
e1d5d19e
KT
1778 tree ftype_jcvt
1779 = build_function_type_list (intSI_type_node, double_type_node, NULL);
1780 aarch64_builtin_decls[AARCH64_JSCVT]
6d4d616a
RS
1781 = aarch64_general_add_builtin ("__builtin_aarch64_jcvtzs", ftype_jcvt,
1782 AARCH64_JSCVT);
e1d5d19e 1783
a876231c
JW
1784 /* Initialize pointer authentication builtins which are backed by instructions
1785 in NOP encoding space.
1786
1787 NOTE: these builtins are supposed to be used by libgcc unwinder only, as
1788 there is no support on return address signing under ILP32, we don't
1789 register them. */
1790 if (!TARGET_ILP32)
1791 aarch64_init_pauth_hint_builtins ();
89626179
SD
1792
1793 if (TARGET_TME)
1794 aarch64_init_tme_builtins ();
ef01e6bb
DZ
1795
1796 if (TARGET_MEMTAG)
1797 aarch64_init_memtag_builtins ();
43e9d192
IB
1798}
1799
6d4d616a 1800/* Implement TARGET_BUILTIN_DECL for the AARCH64_BUILTIN_GENERAL group. */
119103ca 1801tree
6d4d616a 1802aarch64_general_builtin_decl (unsigned code, bool)
119103ca
JG
1803{
1804 if (code >= AARCH64_BUILTIN_MAX)
1805 return error_mark_node;
1806
1807 return aarch64_builtin_decls[code];
1808}
1809
43e9d192
IB
1810typedef enum
1811{
1812 SIMD_ARG_COPY_TO_REG,
1813 SIMD_ARG_CONSTANT,
2a49c16d 1814 SIMD_ARG_LANE_INDEX,
4d0a0237 1815 SIMD_ARG_STRUCT_LOAD_STORE_LANE_INDEX,
9d63f43b 1816 SIMD_ARG_LANE_PAIR_INDEX,
8c197c85 1817 SIMD_ARG_LANE_QUADTUP_INDEX,
43e9d192
IB
1818 SIMD_ARG_STOP
1819} builtin_simd_arg;
1820
e95a988a 1821
43e9d192
IB
1822static rtx
1823aarch64_simd_expand_args (rtx target, int icode, int have_retval,
4d0a0237 1824 tree exp, builtin_simd_arg *args,
b8506a8a 1825 machine_mode builtin_mode)
43e9d192 1826{
43e9d192 1827 rtx pat;
d9e80f49
AL
1828 rtx op[SIMD_MAX_BUILTIN_ARGS + 1]; /* First element for result operand. */
1829 int opc = 0;
1830
1831 if (have_retval)
1832 {
1833 machine_mode tmode = insn_data[icode].operand[0].mode;
1834 if (!target
43e9d192 1835 || GET_MODE (target) != tmode
d9e80f49
AL
1836 || !(*insn_data[icode].operand[0].predicate) (target, tmode))
1837 target = gen_reg_rtx (tmode);
1838 op[opc++] = target;
1839 }
43e9d192 1840
43e9d192
IB
1841 for (;;)
1842 {
d9e80f49 1843 builtin_simd_arg thisarg = args[opc - have_retval];
43e9d192
IB
1844
1845 if (thisarg == SIMD_ARG_STOP)
1846 break;
1847 else
1848 {
d9e80f49 1849 tree arg = CALL_EXPR_ARG (exp, opc - have_retval);
b8506a8a 1850 machine_mode mode = insn_data[icode].operand[opc].mode;
d9e80f49 1851 op[opc] = expand_normal (arg);
43e9d192
IB
1852
1853 switch (thisarg)
1854 {
1855 case SIMD_ARG_COPY_TO_REG:
d9e80f49
AL
1856 if (POINTER_TYPE_P (TREE_TYPE (arg)))
1857 op[opc] = convert_memory_address (Pmode, op[opc]);
1858 /*gcc_assert (GET_MODE (op[opc]) == mode); */
1859 if (!(*insn_data[icode].operand[opc].predicate)
1860 (op[opc], mode))
1861 op[opc] = copy_to_mode_reg (mode, op[opc]);
43e9d192
IB
1862 break;
1863
4d0a0237
CB
1864 case SIMD_ARG_STRUCT_LOAD_STORE_LANE_INDEX:
1865 gcc_assert (opc > 1);
1866 if (CONST_INT_P (op[opc]))
1867 {
6a70badb
RS
1868 unsigned int nunits
1869 = GET_MODE_NUNITS (builtin_mode).to_constant ();
1870 aarch64_simd_lane_bounds (op[opc], 0, nunits, exp);
4d0a0237 1871 /* Keep to GCC-vector-extension lane indices in the RTL. */
7ac29c0f
RS
1872 op[opc] = aarch64_endian_lane_rtx (builtin_mode,
1873 INTVAL (op[opc]));
4d0a0237
CB
1874 }
1875 goto constant_arg;
1876
2a49c16d
AL
1877 case SIMD_ARG_LANE_INDEX:
1878 /* Must be a previous operand into which this is an index. */
d9e80f49
AL
1879 gcc_assert (opc > 0);
1880 if (CONST_INT_P (op[opc]))
2a49c16d 1881 {
d9e80f49 1882 machine_mode vmode = insn_data[icode].operand[opc - 1].mode;
6a70badb
RS
1883 unsigned int nunits
1884 = GET_MODE_NUNITS (vmode).to_constant ();
1885 aarch64_simd_lane_bounds (op[opc], 0, nunits, exp);
2a49c16d 1886 /* Keep to GCC-vector-extension lane indices in the RTL. */
7ac29c0f 1887 op[opc] = aarch64_endian_lane_rtx (vmode, INTVAL (op[opc]));
2a49c16d 1888 }
9d63f43b
TC
1889 /* If the lane index isn't a constant then error out. */
1890 goto constant_arg;
1891
1892 case SIMD_ARG_LANE_PAIR_INDEX:
1893 /* Must be a previous operand into which this is an index and
1894 index is restricted to nunits / 2. */
1895 gcc_assert (opc > 0);
1896 if (CONST_INT_P (op[opc]))
1897 {
1898 machine_mode vmode = insn_data[icode].operand[opc - 1].mode;
1899 unsigned int nunits
1900 = GET_MODE_NUNITS (vmode).to_constant ();
1901 aarch64_simd_lane_bounds (op[opc], 0, nunits / 2, exp);
1902 /* Keep to GCC-vector-extension lane indices in the RTL. */
33b5a38c
TC
1903 int lane = INTVAL (op[opc]);
1904 op[opc] = gen_int_mode (ENDIAN_LANE_N (nunits / 2, lane),
1905 SImode);
9d63f43b 1906 }
8c197c85
SMW
1907 /* If the lane index isn't a constant then error out. */
1908 goto constant_arg;
1909 case SIMD_ARG_LANE_QUADTUP_INDEX:
1910 /* Must be a previous operand into which this is an index and
1911 index is restricted to nunits / 4. */
1912 gcc_assert (opc > 0);
1913 if (CONST_INT_P (op[opc]))
1914 {
1915 machine_mode vmode = insn_data[icode].operand[opc - 1].mode;
1916 unsigned int nunits
1917 = GET_MODE_NUNITS (vmode).to_constant ();
1918 aarch64_simd_lane_bounds (op[opc], 0, nunits / 4, exp);
1919 /* Keep to GCC-vector-extension lane indices in the RTL. */
1920 int lane = INTVAL (op[opc]);
1921 op[opc] = gen_int_mode (ENDIAN_LANE_N (nunits / 4, lane),
1922 SImode);
1923 }
1924 /* If the lane index isn't a constant then error out. */
1925 goto constant_arg;
43e9d192 1926 case SIMD_ARG_CONSTANT:
4d0a0237 1927constant_arg:
d9e80f49
AL
1928 if (!(*insn_data[icode].operand[opc].predicate)
1929 (op[opc], mode))
d5a29419 1930 {
62e43587
MS
1931 error_at (EXPR_LOCATION (exp),
1932 "argument %d must be a constant immediate",
1933 opc + 1 - have_retval);
d5a29419
KT
1934 return const0_rtx;
1935 }
43e9d192
IB
1936 break;
1937
1938 case SIMD_ARG_STOP:
1939 gcc_unreachable ();
1940 }
1941
d9e80f49 1942 opc++;
43e9d192
IB
1943 }
1944 }
1945
d9e80f49
AL
1946 switch (opc)
1947 {
1948 case 1:
1949 pat = GEN_FCN (icode) (op[0]);
1950 break;
43e9d192 1951
d9e80f49
AL
1952 case 2:
1953 pat = GEN_FCN (icode) (op[0], op[1]);
1954 break;
43e9d192 1955
d9e80f49
AL
1956 case 3:
1957 pat = GEN_FCN (icode) (op[0], op[1], op[2]);
1958 break;
43e9d192 1959
d9e80f49
AL
1960 case 4:
1961 pat = GEN_FCN (icode) (op[0], op[1], op[2], op[3]);
1962 break;
43e9d192 1963
d9e80f49
AL
1964 case 5:
1965 pat = GEN_FCN (icode) (op[0], op[1], op[2], op[3], op[4]);
1966 break;
43e9d192 1967
d9e80f49
AL
1968 case 6:
1969 pat = GEN_FCN (icode) (op[0], op[1], op[2], op[3], op[4], op[5]);
1970 break;
43e9d192 1971
d9e80f49
AL
1972 default:
1973 gcc_unreachable ();
1974 }
43e9d192
IB
1975
1976 if (!pat)
d5a29419 1977 return NULL_RTX;
43e9d192
IB
1978
1979 emit_insn (pat);
1980
1981 return target;
1982}
1983
1984/* Expand an AArch64 AdvSIMD builtin(intrinsic). */
1985rtx
1986aarch64_simd_expand_builtin (int fcode, tree exp, rtx target)
1987{
661fce82
AL
1988 if (fcode == AARCH64_SIMD_BUILTIN_LANE_CHECK)
1989 {
9c4f25cc
AP
1990 rtx totalsize = expand_normal (CALL_EXPR_ARG (exp, 0));
1991 rtx elementsize = expand_normal (CALL_EXPR_ARG (exp, 1));
1992 if (CONST_INT_P (totalsize) && CONST_INT_P (elementsize)
1993 && UINTVAL (elementsize) != 0
1994 && UINTVAL (totalsize) != 0)
1995 {
1996 rtx lane_idx = expand_normal (CALL_EXPR_ARG (exp, 2));
1997 if (CONST_INT_P (lane_idx))
1998 aarch64_simd_lane_bounds (lane_idx, 0,
1999 UINTVAL (totalsize)
2000 / UINTVAL (elementsize),
2001 exp);
2002 else
62e43587
MS
2003 error_at (EXPR_LOCATION (exp),
2004 "lane index must be a constant immediate");
9c4f25cc 2005 }
661fce82 2006 else
62e43587 2007 error_at (EXPR_LOCATION (exp),
58385f6a 2008 "total size and element size must be a nonzero "
62e43587 2009 "constant immediate");
661fce82
AL
2010 /* Don't generate any RTL. */
2011 return const0_rtx;
2012 }
342be7f7 2013 aarch64_simd_builtin_datum *d =
661fce82 2014 &aarch64_simd_builtin_data[fcode - AARCH64_SIMD_PATTERN_START];
342be7f7 2015 enum insn_code icode = d->code;
0ff2bf46 2016 builtin_simd_arg args[SIMD_MAX_BUILTIN_ARGS + 1];
b5828b4b
JG
2017 int num_args = insn_data[d->code].n_operands;
2018 int is_void = 0;
2019 int k;
43e9d192 2020
b5828b4b 2021 is_void = !!(d->qualifiers[0] & qualifier_void);
43e9d192 2022
b5828b4b
JG
2023 num_args += is_void;
2024
2025 for (k = 1; k < num_args; k++)
2026 {
2027 /* We have four arrays of data, each indexed in a different fashion.
2028 qualifiers - element 0 always describes the function return type.
2029 operands - element 0 is either the operand for return value (if
2030 the function has a non-void return type) or the operand for the
2031 first argument.
2032 expr_args - element 0 always holds the first argument.
2033 args - element 0 is always used for the return type. */
2034 int qualifiers_k = k;
2035 int operands_k = k - is_void;
2036 int expr_args_k = k - 1;
2037
2a49c16d
AL
2038 if (d->qualifiers[qualifiers_k] & qualifier_lane_index)
2039 args[k] = SIMD_ARG_LANE_INDEX;
9d63f43b
TC
2040 else if (d->qualifiers[qualifiers_k] & qualifier_lane_pair_index)
2041 args[k] = SIMD_ARG_LANE_PAIR_INDEX;
8c197c85
SMW
2042 else if (d->qualifiers[qualifiers_k] & qualifier_lane_quadtup_index)
2043 args[k] = SIMD_ARG_LANE_QUADTUP_INDEX;
4d0a0237
CB
2044 else if (d->qualifiers[qualifiers_k] & qualifier_struct_load_store_lane_index)
2045 args[k] = SIMD_ARG_STRUCT_LOAD_STORE_LANE_INDEX;
2a49c16d 2046 else if (d->qualifiers[qualifiers_k] & qualifier_immediate)
b5828b4b
JG
2047 args[k] = SIMD_ARG_CONSTANT;
2048 else if (d->qualifiers[qualifiers_k] & qualifier_maybe_immediate)
2049 {
2050 rtx arg
2051 = expand_normal (CALL_EXPR_ARG (exp,
2052 (expr_args_k)));
2053 /* Handle constants only if the predicate allows it. */
2054 bool op_const_int_p =
2055 (CONST_INT_P (arg)
2056 && (*insn_data[icode].operand[operands_k].predicate)
2057 (arg, insn_data[icode].operand[operands_k].mode));
2058 args[k] = op_const_int_p ? SIMD_ARG_CONSTANT : SIMD_ARG_COPY_TO_REG;
2059 }
2060 else
2061 args[k] = SIMD_ARG_COPY_TO_REG;
43e9d192 2062
43e9d192 2063 }
b5828b4b
JG
2064 args[k] = SIMD_ARG_STOP;
2065
2066 /* The interface to aarch64_simd_expand_args expects a 0 if
2067 the function is void, and a 1 if it is not. */
2068 return aarch64_simd_expand_args
4d0a0237 2069 (target, icode, !is_void, exp, &args[1], d->mode);
43e9d192 2070}
342be7f7 2071
5d357f26
KT
2072rtx
2073aarch64_crc32_expand_builtin (int fcode, tree exp, rtx target)
2074{
2075 rtx pat;
2076 aarch64_crc_builtin_datum *d
2077 = &aarch64_crc_builtin_data[fcode - (AARCH64_CRC32_BUILTIN_BASE + 1)];
2078 enum insn_code icode = d->icode;
2079 tree arg0 = CALL_EXPR_ARG (exp, 0);
2080 tree arg1 = CALL_EXPR_ARG (exp, 1);
2081 rtx op0 = expand_normal (arg0);
2082 rtx op1 = expand_normal (arg1);
ef4bddc2
RS
2083 machine_mode tmode = insn_data[icode].operand[0].mode;
2084 machine_mode mode0 = insn_data[icode].operand[1].mode;
2085 machine_mode mode1 = insn_data[icode].operand[2].mode;
5d357f26
KT
2086
2087 if (! target
2088 || GET_MODE (target) != tmode
2089 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
2090 target = gen_reg_rtx (tmode);
2091
2092 gcc_assert ((GET_MODE (op0) == mode0 || GET_MODE (op0) == VOIDmode)
2093 && (GET_MODE (op1) == mode1 || GET_MODE (op1) == VOIDmode));
2094
2095 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
2096 op0 = copy_to_mode_reg (mode0, op0);
2097 if (! (*insn_data[icode].operand[2].predicate) (op1, mode1))
2098 op1 = copy_to_mode_reg (mode1, op1);
2099
2100 pat = GEN_FCN (icode) (target, op0, op1);
d5a29419
KT
2101 if (!pat)
2102 return NULL_RTX;
2103
5d357f26
KT
2104 emit_insn (pat);
2105 return target;
2106}
2107
a6fc00da
BH
2108/* Function to expand reciprocal square root builtins. */
2109
2110static rtx
2111aarch64_expand_builtin_rsqrt (int fcode, tree exp, rtx target)
2112{
2113 tree arg0 = CALL_EXPR_ARG (exp, 0);
2114 rtx op0 = expand_normal (arg0);
2115
2116 rtx (*gen) (rtx, rtx);
2117
2118 switch (fcode)
2119 {
2120 case AARCH64_BUILTIN_RSQRT_DF:
ee62a5a6 2121 gen = gen_rsqrtdf2;
a6fc00da
BH
2122 break;
2123 case AARCH64_BUILTIN_RSQRT_SF:
ee62a5a6 2124 gen = gen_rsqrtsf2;
a6fc00da
BH
2125 break;
2126 case AARCH64_BUILTIN_RSQRT_V2DF:
ee62a5a6 2127 gen = gen_rsqrtv2df2;
a6fc00da
BH
2128 break;
2129 case AARCH64_BUILTIN_RSQRT_V2SF:
ee62a5a6 2130 gen = gen_rsqrtv2sf2;
a6fc00da
BH
2131 break;
2132 case AARCH64_BUILTIN_RSQRT_V4SF:
ee62a5a6 2133 gen = gen_rsqrtv4sf2;
a6fc00da
BH
2134 break;
2135 default: gcc_unreachable ();
2136 }
2137
2138 if (!target)
2139 target = gen_reg_rtx (GET_MODE (op0));
2140
2141 emit_insn (gen (target, op0));
2142
2143 return target;
2144}
2145
9d63f43b
TC
2146/* Expand a FCMLA lane expression EXP with code FCODE and
2147 result going to TARGET if that is convenient. */
2148
2149rtx
2150aarch64_expand_fcmla_builtin (tree exp, rtx target, int fcode)
2151{
2152 int bcode = fcode - AARCH64_SIMD_FCMLA_LANEQ_BUILTIN_BASE - 1;
2153 aarch64_fcmla_laneq_builtin_datum* d
2154 = &aarch64_fcmla_lane_builtin_data[bcode];
2155 machine_mode quadmode = GET_MODE_2XWIDER_MODE (d->mode).require ();
2156 rtx op0 = force_reg (d->mode, expand_normal (CALL_EXPR_ARG (exp, 0)));
2157 rtx op1 = force_reg (d->mode, expand_normal (CALL_EXPR_ARG (exp, 1)));
2158 rtx op2 = force_reg (quadmode, expand_normal (CALL_EXPR_ARG (exp, 2)));
2159 tree tmp = CALL_EXPR_ARG (exp, 3);
2160 rtx lane_idx = expand_expr (tmp, NULL_RTX, VOIDmode, EXPAND_INITIALIZER);
2161
2162 /* Validate that the lane index is a constant. */
2163 if (!CONST_INT_P (lane_idx))
2164 {
62e43587
MS
2165 error_at (EXPR_LOCATION (exp),
2166 "argument %d must be a constant immediate", 4);
9d63f43b
TC
2167 return const0_rtx;
2168 }
2169
2170 /* Validate that the index is within the expected range. */
2171 int nunits = GET_MODE_NUNITS (quadmode).to_constant ();
2172 aarch64_simd_lane_bounds (lane_idx, 0, nunits / 2, exp);
2173
9d63f43b
TC
2174 /* Generate the correct register and mode. */
2175 int lane = INTVAL (lane_idx);
2176
2177 if (lane < nunits / 4)
33b5a38c
TC
2178 op2 = simplify_gen_subreg (d->mode, op2, quadmode,
2179 subreg_lowpart_offset (d->mode, quadmode));
9d63f43b
TC
2180 else
2181 {
2182 /* Select the upper 64 bits, either a V2SF or V4HF, this however
2183 is quite messy, as the operation required even though simple
2184 doesn't have a simple RTL pattern, and seems it's quite hard to
2185 define using a single RTL pattern. The target generic version
2186 gen_highpart_mode generates code that isn't optimal. */
2187 rtx temp1 = gen_reg_rtx (d->mode);
2188 rtx temp2 = gen_reg_rtx (DImode);
33b5a38c
TC
2189 temp1 = simplify_gen_subreg (d->mode, op2, quadmode,
2190 subreg_lowpart_offset (d->mode, quadmode));
9d63f43b 2191 temp1 = simplify_gen_subreg (V2DImode, temp1, d->mode, 0);
33b5a38c
TC
2192 if (BYTES_BIG_ENDIAN)
2193 emit_insn (gen_aarch64_get_lanev2di (temp2, temp1, const0_rtx));
2194 else
2195 emit_insn (gen_aarch64_get_lanev2di (temp2, temp1, const1_rtx));
9d63f43b
TC
2196 op2 = simplify_gen_subreg (d->mode, temp2, GET_MODE (temp2), 0);
2197
2198 /* And recalculate the index. */
2199 lane -= nunits / 4;
2200 }
2201
33b5a38c
TC
2202 /* Keep to GCC-vector-extension lane indices in the RTL, only nunits / 4
2203 (max nunits in range check) are valid. Which means only 0-1, so we
2204 only need to know the order in a V2mode. */
2205 lane_idx = aarch64_endian_lane_rtx (V2DImode, lane);
2206
fa59c8dc
AC
2207 if (!target
2208 || !REG_P (target)
2209 || GET_MODE (target) != d->mode)
9d63f43b 2210 target = gen_reg_rtx (d->mode);
9d63f43b
TC
2211
2212 rtx pat = NULL_RTX;
2213
2214 if (d->lane)
33b5a38c 2215 pat = GEN_FCN (d->icode) (target, op0, op1, op2, lane_idx);
9d63f43b
TC
2216 else
2217 pat = GEN_FCN (d->icode) (target, op0, op1, op2);
2218
2219 if (!pat)
2220 return NULL_RTX;
2221
2222 emit_insn (pat);
2223 return target;
2224}
2225
89626179
SD
2226/* Function to expand an expression EXP which calls one of the Transactional
2227 Memory Extension (TME) builtins FCODE with the result going to TARGET. */
2228static rtx
2229aarch64_expand_builtin_tme (int fcode, tree exp, rtx target)
2230{
2231 switch (fcode)
2232 {
2233 case AARCH64_TME_BUILTIN_TSTART:
2234 target = gen_reg_rtx (DImode);
2235 emit_insn (GEN_FCN (CODE_FOR_tstart) (target));
2236 break;
2237
2238 case AARCH64_TME_BUILTIN_TTEST:
2239 target = gen_reg_rtx (DImode);
2240 emit_insn (GEN_FCN (CODE_FOR_ttest) (target));
2241 break;
2242
2243 case AARCH64_TME_BUILTIN_TCOMMIT:
2244 emit_insn (GEN_FCN (CODE_FOR_tcommit) ());
2245 break;
2246
2247 case AARCH64_TME_BUILTIN_TCANCEL:
2248 {
2249 tree arg0 = CALL_EXPR_ARG (exp, 0);
2250 rtx op0 = expand_normal (arg0);
2251 if (CONST_INT_P (op0) && UINTVAL (op0) <= 65536)
2252 emit_insn (GEN_FCN (CODE_FOR_tcancel) (op0));
2253 else
2254 {
62e43587
MS
2255 error_at (EXPR_LOCATION (exp),
2256 "argument must be a 16-bit constant immediate");
89626179
SD
2257 return const0_rtx;
2258 }
2259 }
2260 break;
2261
2262 default :
2263 gcc_unreachable ();
2264 }
2265 return target;
2266}
2267
fdcddba8
PW
2268/* Function to expand an expression EXP which calls one of the Load/Store
2269 64 Byte extension (LS64) builtins FCODE with the result going to TARGET. */
2270static rtx
2271aarch64_expand_builtin_ls64 (int fcode, tree exp, rtx target)
2272{
2273 expand_operand ops[3];
2274
2275 switch (fcode)
2276 {
2277 case AARCH64_LS64_BUILTIN_LD64B:
2278 {
2279 rtx op0 = expand_normal (CALL_EXPR_ARG (exp, 0));
2280 create_output_operand (&ops[0], target, V8DImode);
2281 create_input_operand (&ops[1], op0, DImode);
2282 expand_insn (CODE_FOR_ld64b, 2, ops);
2283 return ops[0].value;
2284 }
2285 case AARCH64_LS64_BUILTIN_ST64B:
2286 {
2287 rtx op0 = expand_normal (CALL_EXPR_ARG (exp, 0));
2288 rtx op1 = expand_normal (CALL_EXPR_ARG (exp, 1));
2289 create_output_operand (&ops[0], op0, DImode);
2290 create_input_operand (&ops[1], op1, V8DImode);
2291 expand_insn (CODE_FOR_st64b, 2, ops);
2292 return const0_rtx;
2293 }
2294 case AARCH64_LS64_BUILTIN_ST64BV:
2295 {
2296 rtx op0 = expand_normal (CALL_EXPR_ARG (exp, 0));
2297 rtx op1 = expand_normal (CALL_EXPR_ARG (exp, 1));
2298 create_output_operand (&ops[0], target, DImode);
2299 create_input_operand (&ops[1], op0, DImode);
2300 create_input_operand (&ops[2], op1, V8DImode);
2301 expand_insn (CODE_FOR_st64bv, 3, ops);
2302 return ops[0].value;
2303 }
2304 case AARCH64_LS64_BUILTIN_ST64BV0:
2305 {
2306 rtx op0 = expand_normal (CALL_EXPR_ARG (exp, 0));
2307 rtx op1 = expand_normal (CALL_EXPR_ARG (exp, 1));
2308 create_output_operand (&ops[0], target, DImode);
2309 create_input_operand (&ops[1], op0, DImode);
2310 create_input_operand (&ops[2], op1, V8DImode);
2311 expand_insn (CODE_FOR_st64bv0, 3, ops);
2312 return ops[0].value;
2313 }
2314 }
2315
2316 gcc_unreachable ();
2317}
2318
c5dc215d
KT
2319/* Expand a random number builtin EXP with code FCODE, putting the result
2320 int TARGET. If IGNORE is true the return value is ignored. */
2321
2322rtx
2323aarch64_expand_rng_builtin (tree exp, rtx target, int fcode, int ignore)
2324{
2325 rtx pat;
2326 enum insn_code icode;
2327 if (fcode == AARCH64_BUILTIN_RNG_RNDR)
2328 icode = CODE_FOR_aarch64_rndr;
2329 else if (fcode == AARCH64_BUILTIN_RNG_RNDRRS)
2330 icode = CODE_FOR_aarch64_rndrrs;
2331 else
2332 gcc_unreachable ();
2333
2334 rtx rand = gen_reg_rtx (DImode);
2335 pat = GEN_FCN (icode) (rand);
2336 if (!pat)
2337 return NULL_RTX;
2338
2339 tree arg0 = CALL_EXPR_ARG (exp, 0);
2340 rtx res_addr = expand_normal (arg0);
2341 res_addr = convert_memory_address (Pmode, res_addr);
2342 rtx res_mem = gen_rtx_MEM (DImode, res_addr);
2343 emit_insn (pat);
2344 emit_move_insn (res_mem, rand);
2345 /* If the status result is unused don't generate the CSET code. */
2346 if (ignore)
2347 return target;
2348
2349 rtx cc_reg = gen_rtx_REG (CC_Zmode, CC_REGNUM);
f7581eb3 2350 rtx cmp_rtx = gen_rtx_fmt_ee (EQ, SImode, cc_reg, const0_rtx);
c5dc215d
KT
2351 emit_insn (gen_aarch64_cstoresi (target, cmp_rtx, cc_reg));
2352 return target;
2353}
2354
ef01e6bb
DZ
2355/* Expand an expression EXP that calls a MEMTAG built-in FCODE
2356 with result going to TARGET. */
2357static rtx
2358aarch64_expand_builtin_memtag (int fcode, tree exp, rtx target)
2359{
2360 if (TARGET_ILP32)
2361 {
2362 error ("Memory Tagging Extension does not support %<-mabi=ilp32%>");
2363 return const0_rtx;
2364 }
2365
2366 rtx pat = NULL;
2367 enum insn_code icode = aarch64_memtag_builtin_data[fcode -
2368 AARCH64_MEMTAG_BUILTIN_START - 1].icode;
2369
2370 rtx op0 = expand_normal (CALL_EXPR_ARG (exp, 0));
2371 machine_mode mode0 = GET_MODE (op0);
2372 op0 = force_reg (mode0 == VOIDmode ? DImode : mode0, op0);
2373 op0 = convert_to_mode (DImode, op0, true);
2374
2375 switch (fcode)
2376 {
2377 case AARCH64_MEMTAG_BUILTIN_IRG:
2378 case AARCH64_MEMTAG_BUILTIN_GMI:
2379 case AARCH64_MEMTAG_BUILTIN_SUBP:
2380 case AARCH64_MEMTAG_BUILTIN_INC_TAG:
2381 {
2382 if (! target
2383 || GET_MODE (target) != DImode
2384 || ! (*insn_data[icode].operand[0].predicate) (target, DImode))
2385 target = gen_reg_rtx (DImode);
2386
2387 if (fcode == AARCH64_MEMTAG_BUILTIN_INC_TAG)
2388 {
2389 rtx op1 = expand_normal (CALL_EXPR_ARG (exp, 1));
2390
2391 if ((*insn_data[icode].operand[3].predicate) (op1, QImode))
2392 {
2393 pat = GEN_FCN (icode) (target, op0, const0_rtx, op1);
2394 break;
2395 }
62e43587
MS
2396 error_at (EXPR_LOCATION (exp),
2397 "argument %d must be a constant immediate "
2398 "in range [0,15]", 2);
ef01e6bb
DZ
2399 return const0_rtx;
2400 }
2401 else
2402 {
2403 rtx op1 = expand_normal (CALL_EXPR_ARG (exp, 1));
2404 machine_mode mode1 = GET_MODE (op1);
2405 op1 = force_reg (mode1 == VOIDmode ? DImode : mode1, op1);
2406 op1 = convert_to_mode (DImode, op1, true);
2407 pat = GEN_FCN (icode) (target, op0, op1);
2408 }
2409 break;
2410 }
2411 case AARCH64_MEMTAG_BUILTIN_GET_TAG:
2412 target = op0;
2413 pat = GEN_FCN (icode) (target, op0, const0_rtx);
2414 break;
2415 case AARCH64_MEMTAG_BUILTIN_SET_TAG:
2416 pat = GEN_FCN (icode) (op0, op0, const0_rtx);
2417 break;
2418 default:
2419 gcc_unreachable();
2420 }
2421
2422 if (!pat)
2423 return NULL_RTX;
2424
2425 emit_insn (pat);
2426 return target;
2427}
2428
eb966d39
ASDV
2429/* Function to expand an expression EXP which calls one of the ACLE Data
2430 Intrinsic builtins FCODE with the result going to TARGET. */
2431static rtx
2432aarch64_expand_builtin_data_intrinsic (unsigned int fcode, tree exp, rtx target)
2433{
2434 expand_operand ops[2];
2435 machine_mode mode = GET_MODE (target);
2436 create_output_operand (&ops[0], target, mode);
2437 create_input_operand (&ops[1], expand_normal (CALL_EXPR_ARG (exp, 0)), mode);
2438 enum insn_code icode;
2439
2440 switch (fcode)
2441 {
2442 case AARCH64_REV16:
2443 case AARCH64_REV16L:
2444 case AARCH64_REV16LL:
2445 icode = code_for_aarch64_rev16 (mode);
2446 break;
2447 case AARCH64_RBIT:
2448 case AARCH64_RBITL:
2449 case AARCH64_RBITLL:
2450 icode = code_for_aarch64_rbit (mode);
2451 break;
2452 default:
2453 gcc_unreachable ();
2454 }
2455
2456 expand_insn (icode, 2, ops);
2457 return ops[0].value;
2458}
2459
f5e73de0 2460/* Expand an expression EXP as fpsr or fpcr setter (depending on
0d7e5fa6
AC
2461 UNSPEC) using MODE. */
2462static void
2463aarch64_expand_fpsr_fpcr_setter (int unspec, machine_mode mode, tree exp)
2464{
2465 tree arg = CALL_EXPR_ARG (exp, 0);
2466 rtx op = force_reg (mode, expand_normal (arg));
2467 emit_insn (gen_aarch64_set (unspec, mode, op));
2468}
2469
f5e73de0
AC
2470/* Expand a fpsr or fpcr getter (depending on UNSPEC) using MODE.
2471 Return the target. */
2472static rtx
2473aarch64_expand_fpsr_fpcr_getter (enum insn_code icode, machine_mode mode,
2474 rtx target)
2475{
2476 expand_operand op;
2477 create_output_operand (&op, target, mode);
2478 expand_insn (icode, 1, &op);
2479 return op.value;
2480}
2481
6d4d616a 2482/* Expand an expression EXP that calls built-in function FCODE,
c5dc215d
KT
2483 with result going to TARGET if that's convenient. IGNORE is true
2484 if the result of the builtin is ignored. */
342be7f7 2485rtx
c5dc215d
KT
2486aarch64_general_expand_builtin (unsigned int fcode, tree exp, rtx target,
2487 int ignore)
342be7f7 2488{
aa87aced 2489 int icode;
0d7e5fa6 2490 rtx op0;
aa87aced
KV
2491 tree arg0;
2492
2493 switch (fcode)
2494 {
2495 case AARCH64_BUILTIN_GET_FPCR:
f5e73de0
AC
2496 return aarch64_expand_fpsr_fpcr_getter (CODE_FOR_aarch64_get_fpcrsi,
2497 SImode, target);
aa87aced 2498 case AARCH64_BUILTIN_SET_FPCR:
0d7e5fa6
AC
2499 aarch64_expand_fpsr_fpcr_setter (UNSPECV_SET_FPCR, SImode, exp);
2500 return target;
aa87aced 2501 case AARCH64_BUILTIN_GET_FPSR:
f5e73de0
AC
2502 return aarch64_expand_fpsr_fpcr_getter (CODE_FOR_aarch64_get_fpsrsi,
2503 SImode, target);
aa87aced 2504 case AARCH64_BUILTIN_SET_FPSR:
0d7e5fa6
AC
2505 aarch64_expand_fpsr_fpcr_setter (UNSPECV_SET_FPSR, SImode, exp);
2506 return target;
2507 case AARCH64_BUILTIN_GET_FPCR64:
f5e73de0
AC
2508 return aarch64_expand_fpsr_fpcr_getter (CODE_FOR_aarch64_get_fpcrdi,
2509 DImode, target);
0d7e5fa6
AC
2510 case AARCH64_BUILTIN_SET_FPCR64:
2511 aarch64_expand_fpsr_fpcr_setter (UNSPECV_SET_FPCR, DImode, exp);
2512 return target;
2513 case AARCH64_BUILTIN_GET_FPSR64:
f5e73de0
AC
2514 return aarch64_expand_fpsr_fpcr_getter (CODE_FOR_aarch64_get_fpsrdi,
2515 DImode, target);
0d7e5fa6
AC
2516 case AARCH64_BUILTIN_SET_FPSR64:
2517 aarch64_expand_fpsr_fpcr_setter (UNSPECV_SET_FPSR, DImode, exp);
aa87aced 2518 return target;
312492bd
JW
2519 case AARCH64_PAUTH_BUILTIN_AUTIA1716:
2520 case AARCH64_PAUTH_BUILTIN_PACIA1716:
8fc16d72
ST
2521 case AARCH64_PAUTH_BUILTIN_AUTIB1716:
2522 case AARCH64_PAUTH_BUILTIN_PACIB1716:
312492bd
JW
2523 case AARCH64_PAUTH_BUILTIN_XPACLRI:
2524 arg0 = CALL_EXPR_ARG (exp, 0);
2525 op0 = force_reg (Pmode, expand_normal (arg0));
2526
312492bd
JW
2527 if (fcode == AARCH64_PAUTH_BUILTIN_XPACLRI)
2528 {
2529 rtx lr = gen_rtx_REG (Pmode, R30_REGNUM);
2530 icode = CODE_FOR_xpaclri;
2531 emit_move_insn (lr, op0);
2532 emit_insn (GEN_FCN (icode) ());
92f0d3d0 2533 return lr;
312492bd
JW
2534 }
2535 else
2536 {
2537 tree arg1 = CALL_EXPR_ARG (exp, 1);
2538 rtx op1 = force_reg (Pmode, expand_normal (arg1));
8fc16d72
ST
2539 switch (fcode)
2540 {
2541 case AARCH64_PAUTH_BUILTIN_AUTIA1716:
2542 icode = CODE_FOR_autia1716;
2543 break;
2544 case AARCH64_PAUTH_BUILTIN_AUTIB1716:
2545 icode = CODE_FOR_autib1716;
2546 break;
2547 case AARCH64_PAUTH_BUILTIN_PACIA1716:
2548 icode = CODE_FOR_pacia1716;
2549 break;
2550 case AARCH64_PAUTH_BUILTIN_PACIB1716:
2551 icode = CODE_FOR_pacib1716;
2552 break;
2553 default:
2554 icode = 0;
2555 gcc_unreachable ();
2556 }
312492bd
JW
2557
2558 rtx x16_reg = gen_rtx_REG (Pmode, R16_REGNUM);
2559 rtx x17_reg = gen_rtx_REG (Pmode, R17_REGNUM);
2560 emit_move_insn (x17_reg, op0);
2561 emit_move_insn (x16_reg, op1);
2562 emit_insn (GEN_FCN (icode) ());
92f0d3d0 2563 return x17_reg;
312492bd
JW
2564 }
2565
e1d5d19e 2566 case AARCH64_JSCVT:
2c62952f
AC
2567 {
2568 expand_operand ops[2];
2569 create_output_operand (&ops[0], target, SImode);
2570 op0 = expand_normal (CALL_EXPR_ARG (exp, 0));
2571 create_input_operand (&ops[1], op0, DFmode);
2572 expand_insn (CODE_FOR_aarch64_fjcvtzs, 2, ops);
2573 return ops[0].value;
2574 }
e1d5d19e 2575
9d63f43b
TC
2576 case AARCH64_SIMD_BUILTIN_FCMLA_LANEQ0_V2SF:
2577 case AARCH64_SIMD_BUILTIN_FCMLA_LANEQ90_V2SF:
2578 case AARCH64_SIMD_BUILTIN_FCMLA_LANEQ180_V2SF:
2579 case AARCH64_SIMD_BUILTIN_FCMLA_LANEQ270_V2SF:
2580 case AARCH64_SIMD_BUILTIN_FCMLA_LANEQ0_V4HF:
2581 case AARCH64_SIMD_BUILTIN_FCMLA_LANEQ90_V4HF:
2582 case AARCH64_SIMD_BUILTIN_FCMLA_LANEQ180_V4HF:
2583 case AARCH64_SIMD_BUILTIN_FCMLA_LANEQ270_V4HF:
2584 return aarch64_expand_fcmla_builtin (exp, target, fcode);
c5dc215d
KT
2585 case AARCH64_BUILTIN_RNG_RNDR:
2586 case AARCH64_BUILTIN_RNG_RNDRRS:
2587 return aarch64_expand_rng_builtin (exp, target, fcode, ignore);
aa87aced 2588 }
342be7f7 2589
5d357f26 2590 if (fcode >= AARCH64_SIMD_BUILTIN_BASE && fcode <= AARCH64_SIMD_BUILTIN_MAX)
342be7f7 2591 return aarch64_simd_expand_builtin (fcode, exp, target);
5d357f26
KT
2592 else if (fcode >= AARCH64_CRC32_BUILTIN_BASE && fcode <= AARCH64_CRC32_BUILTIN_MAX)
2593 return aarch64_crc32_expand_builtin (fcode, exp, target);
342be7f7 2594
a6fc00da
BH
2595 if (fcode == AARCH64_BUILTIN_RSQRT_DF
2596 || fcode == AARCH64_BUILTIN_RSQRT_SF
2597 || fcode == AARCH64_BUILTIN_RSQRT_V2DF
2598 || fcode == AARCH64_BUILTIN_RSQRT_V2SF
2599 || fcode == AARCH64_BUILTIN_RSQRT_V4SF)
2600 return aarch64_expand_builtin_rsqrt (fcode, exp, target);
2601
89626179
SD
2602 if (fcode == AARCH64_TME_BUILTIN_TSTART
2603 || fcode == AARCH64_TME_BUILTIN_TCOMMIT
2604 || fcode == AARCH64_TME_BUILTIN_TTEST
2605 || fcode == AARCH64_TME_BUILTIN_TCANCEL)
2606 return aarch64_expand_builtin_tme (fcode, exp, target);
2607
fdcddba8
PW
2608 if (fcode == AARCH64_LS64_BUILTIN_LD64B
2609 || fcode == AARCH64_LS64_BUILTIN_ST64B
2610 || fcode == AARCH64_LS64_BUILTIN_ST64BV
2611 || fcode == AARCH64_LS64_BUILTIN_ST64BV0)
2612 return aarch64_expand_builtin_ls64 (fcode, exp, target);
2613
ef01e6bb
DZ
2614 if (fcode >= AARCH64_MEMTAG_BUILTIN_START
2615 && fcode <= AARCH64_MEMTAG_BUILTIN_END)
2616 return aarch64_expand_builtin_memtag (fcode, exp, target);
eb966d39
ASDV
2617 if (fcode >= AARCH64_REV16
2618 && fcode <= AARCH64_RBITLL)
2619 return aarch64_expand_builtin_data_intrinsic (fcode, exp, target);
ef01e6bb 2620
d5a29419 2621 gcc_unreachable ();
342be7f7 2622}
42fc9a7f 2623
a6fc00da
BH
2624/* Return builtin for reciprocal square root. */
2625
2626tree
6d4d616a 2627aarch64_general_builtin_rsqrt (unsigned int fn)
a6fc00da 2628{
ee62a5a6
RS
2629 if (fn == AARCH64_SIMD_BUILTIN_UNOP_sqrtv2df)
2630 return aarch64_builtin_decls[AARCH64_BUILTIN_RSQRT_V2DF];
2631 if (fn == AARCH64_SIMD_BUILTIN_UNOP_sqrtv2sf)
2632 return aarch64_builtin_decls[AARCH64_BUILTIN_RSQRT_V2SF];
2633 if (fn == AARCH64_SIMD_BUILTIN_UNOP_sqrtv4sf)
2634 return aarch64_builtin_decls[AARCH64_BUILTIN_RSQRT_V4SF];
a6fc00da
BH
2635 return NULL_TREE;
2636}
2637
03312cbd
AP
2638/* Return true if the lane check can be removed as there is no
2639 error going to be emitted. */
2640static bool
2641aarch64_fold_builtin_lane_check (tree arg0, tree arg1, tree arg2)
2642{
2643 if (TREE_CODE (arg0) != INTEGER_CST)
2644 return false;
2645 if (TREE_CODE (arg1) != INTEGER_CST)
2646 return false;
2647 if (TREE_CODE (arg2) != INTEGER_CST)
2648 return false;
2649
2650 auto totalsize = wi::to_widest (arg0);
2651 auto elementsize = wi::to_widest (arg1);
2652 if (totalsize == 0 || elementsize == 0)
2653 return false;
2654 auto lane = wi::to_widest (arg2);
2655 auto high = wi::udiv_trunc (totalsize, elementsize);
2656 return wi::ltu_p (lane, high);
2657}
2658
0ac198d3 2659#undef VAR1
bf592b2f 2660#define VAR1(T, N, MAP, FLAG, A) \
e993fea1 2661 case AARCH64_SIMD_BUILTIN_##T##_##N##A:
0ac198d3 2662
6d4d616a
RS
2663/* Try to fold a call to the built-in function with subcode FCODE. The
2664 function is passed the N_ARGS arguments in ARGS and it returns a value
2665 of type TYPE. Return the new expression on success and NULL_TREE on
2666 failure. */
9697e620 2667tree
6d4d616a
RS
2668aarch64_general_fold_builtin (unsigned int fcode, tree type,
2669 unsigned int n_args ATTRIBUTE_UNUSED, tree *args)
9697e620 2670{
9697e620
JG
2671 switch (fcode)
2672 {
bf592b2f 2673 BUILTIN_VDQF (UNOP, abs, 2, ALL)
9697e620 2674 return fold_build1 (ABS_EXPR, type, args[0]);
bf592b2f 2675 VAR1 (UNOP, floatv2si, 2, ALL, v2sf)
2676 VAR1 (UNOP, floatv4si, 2, ALL, v4sf)
2677 VAR1 (UNOP, floatv2di, 2, ALL, v2df)
1709ff9b 2678 return fold_build1 (FLOAT_EXPR, type, args[0]);
03312cbd
AP
2679 case AARCH64_SIMD_BUILTIN_LANE_CHECK:
2680 gcc_assert (n_args == 3);
2681 if (aarch64_fold_builtin_lane_check (args[0], args[1], args[2]))
2682 return void_node;
2683 break;
9697e620
JG
2684 default:
2685 break;
2686 }
2687
2688 return NULL_TREE;
2689}
2690
ad44c6a5
ASDV
2691enum aarch64_simd_type
2692get_mem_type_for_load_store (unsigned int fcode)
2693{
2694 switch (fcode)
2695 {
1716ddd1
JW
2696 VAR1 (LOAD1, ld1, 0, LOAD, v8qi)
2697 VAR1 (STORE1, st1, 0, STORE, v8qi)
ad44c6a5 2698 return Int8x8_t;
1716ddd1
JW
2699 VAR1 (LOAD1, ld1, 0, LOAD, v16qi)
2700 VAR1 (STORE1, st1, 0, STORE, v16qi)
ad44c6a5 2701 return Int8x16_t;
1716ddd1
JW
2702 VAR1 (LOAD1, ld1, 0, LOAD, v4hi)
2703 VAR1 (STORE1, st1, 0, STORE, v4hi)
ad44c6a5 2704 return Int16x4_t;
1716ddd1
JW
2705 VAR1 (LOAD1, ld1, 0, LOAD, v8hi)
2706 VAR1 (STORE1, st1, 0, STORE, v8hi)
ad44c6a5 2707 return Int16x8_t;
1716ddd1
JW
2708 VAR1 (LOAD1, ld1, 0, LOAD, v2si)
2709 VAR1 (STORE1, st1, 0, STORE, v2si)
ad44c6a5 2710 return Int32x2_t;
1716ddd1
JW
2711 VAR1 (LOAD1, ld1, 0, LOAD, v4si)
2712 VAR1 (STORE1, st1, 0, STORE, v4si)
ad44c6a5 2713 return Int32x4_t;
1716ddd1
JW
2714 VAR1 (LOAD1, ld1, 0, LOAD, v2di)
2715 VAR1 (STORE1, st1, 0, STORE, v2di)
ad44c6a5 2716 return Int64x2_t;
1716ddd1
JW
2717 VAR1 (LOAD1_U, ld1, 0, LOAD, v8qi)
2718 VAR1 (STORE1_U, st1, 0, STORE, v8qi)
2719 return Uint8x8_t;
2720 VAR1 (LOAD1_U, ld1, 0, LOAD, v16qi)
2721 VAR1 (STORE1_U, st1, 0, STORE, v16qi)
2722 return Uint8x16_t;
2723 VAR1 (LOAD1_U, ld1, 0, LOAD, v4hi)
2724 VAR1 (STORE1_U, st1, 0, STORE, v4hi)
2725 return Uint16x4_t;
2726 VAR1 (LOAD1_U, ld1, 0, LOAD, v8hi)
2727 VAR1 (STORE1_U, st1, 0, STORE, v8hi)
2728 return Uint16x8_t;
2729 VAR1 (LOAD1_U, ld1, 0, LOAD, v2si)
2730 VAR1 (STORE1_U, st1, 0, STORE, v2si)
2731 return Uint32x2_t;
2732 VAR1 (LOAD1_U, ld1, 0, LOAD, v4si)
2733 VAR1 (STORE1_U, st1, 0, STORE, v4si)
2734 return Uint32x4_t;
2735 VAR1 (LOAD1_U, ld1, 0, LOAD, v2di)
2736 VAR1 (STORE1_U, st1, 0, STORE, v2di)
2737 return Uint64x2_t;
2738 VAR1 (LOAD1_P, ld1, 0, LOAD, v8qi)
2739 VAR1 (STORE1_P, st1, 0, STORE, v8qi)
2740 return Poly8x8_t;
2741 VAR1 (LOAD1_P, ld1, 0, LOAD, v16qi)
2742 VAR1 (STORE1_P, st1, 0, STORE, v16qi)
2743 return Poly8x16_t;
2744 VAR1 (LOAD1_P, ld1, 0, LOAD, v4hi)
2745 VAR1 (STORE1_P, st1, 0, STORE, v4hi)
2746 return Poly16x4_t;
2747 VAR1 (LOAD1_P, ld1, 0, LOAD, v8hi)
2748 VAR1 (STORE1_P, st1, 0, STORE, v8hi)
2749 return Poly16x8_t;
2750 VAR1 (LOAD1_P, ld1, 0, LOAD, v2di)
2751 VAR1 (STORE1_P, st1, 0, STORE, v2di)
2752 return Poly64x2_t;
2753 VAR1 (LOAD1, ld1, 0, LOAD, v4hf)
2754 VAR1 (STORE1, st1, 0, STORE, v4hf)
ad44c6a5 2755 return Float16x4_t;
1716ddd1
JW
2756 VAR1 (LOAD1, ld1, 0, LOAD, v8hf)
2757 VAR1 (STORE1, st1, 0, STORE, v8hf)
ad44c6a5 2758 return Float16x8_t;
1716ddd1
JW
2759 VAR1 (LOAD1, ld1, 0, LOAD, v4bf)
2760 VAR1 (STORE1, st1, 0, STORE, v4bf)
ad44c6a5 2761 return Bfloat16x4_t;
1716ddd1
JW
2762 VAR1 (LOAD1, ld1, 0, LOAD, v8bf)
2763 VAR1 (STORE1, st1, 0, STORE, v8bf)
ad44c6a5 2764 return Bfloat16x8_t;
1716ddd1
JW
2765 VAR1 (LOAD1, ld1, 0, LOAD, v2sf)
2766 VAR1 (STORE1, st1, 0, STORE, v2sf)
ad44c6a5 2767 return Float32x2_t;
1716ddd1
JW
2768 VAR1 (LOAD1, ld1, 0, LOAD, v4sf)
2769 VAR1 (STORE1, st1, 0, STORE, v4sf)
ad44c6a5 2770 return Float32x4_t;
1716ddd1
JW
2771 VAR1 (LOAD1, ld1, 0, LOAD, v2df)
2772 VAR1 (STORE1, st1, 0, STORE, v2df)
ad44c6a5
ASDV
2773 return Float64x2_t;
2774 default:
2775 gcc_unreachable ();
2776 break;
2777 }
2778}
2779
6d4d616a
RS
2780/* Try to fold STMT, given that it's a call to the built-in function with
2781 subcode FCODE. Return the new statement on success and null on
2782 failure. */
2783gimple *
ad44c6a5 2784aarch64_general_gimple_fold_builtin (unsigned int fcode, gcall *stmt,
03f7843c 2785 gimple_stmt_iterator *gsi ATTRIBUTE_UNUSED)
0ac198d3 2786{
355fe088 2787 gimple *new_stmt = NULL;
6d4d616a
RS
2788 unsigned nargs = gimple_call_num_args (stmt);
2789 tree *args = (nargs > 0
2790 ? gimple_call_arg_ptr (stmt, 0)
2791 : &error_mark_node);
2792
2793 /* We use gimple's IFN_REDUC_(PLUS|MIN|MAX)s for float, signed int
2794 and unsigned int; it will distinguish according to the types of
2795 the arguments to the __builtin. */
2796 switch (fcode)
0ac198d3 2797 {
bf592b2f 2798 BUILTIN_VALL (UNOP, reduc_plus_scal_, 10, ALL)
6d4d616a
RS
2799 new_stmt = gimple_build_call_internal (IFN_REDUC_PLUS,
2800 1, args[0]);
2801 gimple_call_set_lhs (new_stmt, gimple_call_lhs (stmt));
2802 break;
ad44c6a5 2803
cbcf4a50
AP
2804 /* Lower sqrt builtins to gimple/internal function sqrt. */
2805 BUILTIN_VHSDF_DF (UNOP, sqrt, 2, FP)
2806 new_stmt = gimple_build_call_internal (IFN_SQRT,
2807 1, args[0]);
2808 gimple_call_set_lhs (new_stmt, gimple_call_lhs (stmt));
2809 break;
2810
8a1e05b7
AC
2811 BUILTIN_VDC (BINOP, combine, 0, AUTO_FP)
2812 BUILTIN_VD_I (BINOPU, combine, 0, NONE)
2813 BUILTIN_VDC_P (BINOPP, combine, 0, NONE)
2814 {
2815 tree first_part, second_part;
2816 if (BYTES_BIG_ENDIAN)
2817 {
2818 second_part = args[0];
2819 first_part = args[1];
2820 }
2821 else
2822 {
2823 first_part = args[0];
2824 second_part = args[1];
2825 }
2826 tree ret_type = gimple_call_return_type (stmt);
2827 tree ctor = build_constructor_va (ret_type, 2, NULL_TREE, first_part,
2828 NULL_TREE, second_part);
2829 new_stmt = gimple_build_assign (gimple_call_lhs (stmt), ctor);
2830 }
2831 break;
2832
ad44c6a5
ASDV
2833 /*lower store and load neon builtins to gimple. */
2834 BUILTIN_VALL_F16 (LOAD1, ld1, 0, LOAD)
1716ddd1
JW
2835 BUILTIN_VDQ_I (LOAD1_U, ld1, 0, LOAD)
2836 BUILTIN_VALLP_NO_DI (LOAD1_P, ld1, 0, LOAD)
ad44c6a5
ASDV
2837 if (!BYTES_BIG_ENDIAN)
2838 {
2839 enum aarch64_simd_type mem_type
2840 = get_mem_type_for_load_store(fcode);
2841 aarch64_simd_type_info simd_type
2842 = aarch64_simd_types[mem_type];
0f685601
AV
2843 tree elt_ptr_type = build_pointer_type_for_mode (simd_type.eltype,
2844 VOIDmode, true);
ad44c6a5 2845 tree zero = build_zero_cst (elt_ptr_type);
0f685601
AV
2846 /* Use element type alignment. */
2847 tree access_type
2848 = build_aligned_type (simd_type.itype,
2849 TYPE_ALIGN (simd_type.eltype));
ad44c6a5
ASDV
2850 new_stmt
2851 = gimple_build_assign (gimple_get_lhs (stmt),
2852 fold_build2 (MEM_REF,
0f685601
AV
2853 access_type,
2854 args[0], zero));
ad44c6a5
ASDV
2855 }
2856 break;
2857
2858 BUILTIN_VALL_F16 (STORE1, st1, 0, STORE)
1716ddd1
JW
2859 BUILTIN_VDQ_I (STORE1_U, st1, 0, STORE)
2860 BUILTIN_VALLP_NO_DI (STORE1_P, st1, 0, STORE)
ad44c6a5
ASDV
2861 if (!BYTES_BIG_ENDIAN)
2862 {
2863 enum aarch64_simd_type mem_type
2864 = get_mem_type_for_load_store(fcode);
2865 aarch64_simd_type_info simd_type
2866 = aarch64_simd_types[mem_type];
0f685601
AV
2867 tree elt_ptr_type = build_pointer_type_for_mode (simd_type.eltype,
2868 VOIDmode, true);
ad44c6a5 2869 tree zero = build_zero_cst (elt_ptr_type);
0f685601
AV
2870 /* Use element type alignment. */
2871 tree access_type
2872 = build_aligned_type (simd_type.itype,
2873 TYPE_ALIGN (simd_type.eltype));
ad44c6a5 2874 new_stmt
0f685601
AV
2875 = gimple_build_assign (fold_build2 (MEM_REF, access_type,
2876 args[0], zero),
2877 args[1]);
ad44c6a5
ASDV
2878 }
2879 break;
2880
bf592b2f 2881 BUILTIN_VDQIF (UNOP, reduc_smax_scal_, 10, ALL)
2882 BUILTIN_VDQ_BHSI (UNOPU, reduc_umax_scal_, 10, ALL)
6d4d616a
RS
2883 new_stmt = gimple_build_call_internal (IFN_REDUC_MAX,
2884 1, args[0]);
2885 gimple_call_set_lhs (new_stmt, gimple_call_lhs (stmt));
2886 break;
bf592b2f 2887 BUILTIN_VDQIF (UNOP, reduc_smin_scal_, 10, ALL)
2888 BUILTIN_VDQ_BHSI (UNOPU, reduc_umin_scal_, 10, ALL)
6d4d616a
RS
2889 new_stmt = gimple_build_call_internal (IFN_REDUC_MIN,
2890 1, args[0]);
2891 gimple_call_set_lhs (new_stmt, gimple_call_lhs (stmt));
2892 break;
1b4a6359
TC
2893 BUILTIN_VSDQ_I_DI (BINOP, ashl, 3, NONE)
2894 if (TREE_CODE (args[1]) == INTEGER_CST
2895 && wi::ltu_p (wi::to_wide (args[1]), element_precision (args[0])))
2896 new_stmt = gimple_build_assign (gimple_call_lhs (stmt),
2897 LSHIFT_EXPR, args[0], args[1]);
2898 break;
2899 BUILTIN_VSDQ_I_DI (BINOP, sshl, 0, NONE)
2900 BUILTIN_VSDQ_I_DI (BINOP_UUS, ushl, 0, NONE)
2901 {
2902 tree cst = args[1];
2903 tree ctype = TREE_TYPE (cst);
2904 /* Left shifts can be both scalar or vector, e.g. uint64x1_t is
2905 treated as a scalar type not a vector one. */
2906 if ((cst = uniform_integer_cst_p (cst)) != NULL_TREE)
2907 {
2908 wide_int wcst = wi::to_wide (cst);
2909 tree unit_ty = TREE_TYPE (cst);
2910
2911 wide_int abs_cst = wi::abs (wcst);
2912 if (wi::geu_p (abs_cst, element_precision (args[0])))
2913 break;
2914
2915 if (wi::neg_p (wcst, TYPE_SIGN (ctype)))
2916 {
2917 tree final_cst;
2918 final_cst = wide_int_to_tree (unit_ty, abs_cst);
2919 if (TREE_CODE (cst) != INTEGER_CST)
2920 final_cst = build_uniform_cst (ctype, final_cst);
2921
2922 new_stmt = gimple_build_assign (gimple_call_lhs (stmt),
2923 RSHIFT_EXPR, args[0],
2924 final_cst);
2925 }
2926 else
2927 new_stmt = gimple_build_assign (gimple_call_lhs (stmt),
2928 LSHIFT_EXPR, args[0], args[1]);
2929 }
2930 }
2931 break;
2932 BUILTIN_VDQ_I (SHIFTIMM, ashr, 3, NONE)
2933 VAR1 (SHIFTIMM, ashr_simd, 0, NONE, di)
2934 BUILTIN_VDQ_I (USHIFTIMM, lshr, 3, NONE)
2935 VAR1 (USHIFTIMM, lshr_simd, 0, NONE, di)
2936 if (TREE_CODE (args[1]) == INTEGER_CST
2937 && wi::ltu_p (wi::to_wide (args[1]), element_precision (args[0])))
2938 new_stmt = gimple_build_assign (gimple_call_lhs (stmt),
2939 RSHIFT_EXPR, args[0], args[1]);
2940 break;
bf592b2f 2941 BUILTIN_GPF (BINOP, fmulx, 0, ALL)
0ac198d3 2942 {
6d4d616a
RS
2943 gcc_assert (nargs == 2);
2944 bool a0_cst_p = TREE_CODE (args[0]) == REAL_CST;
2945 bool a1_cst_p = TREE_CODE (args[1]) == REAL_CST;
2946 if (a0_cst_p || a1_cst_p)
0ac198d3 2947 {
6d4d616a 2948 if (a0_cst_p && a1_cst_p)
546e500c 2949 {
6d4d616a
RS
2950 tree t0 = TREE_TYPE (args[0]);
2951 real_value a0 = (TREE_REAL_CST (args[0]));
2952 real_value a1 = (TREE_REAL_CST (args[1]));
2953 if (real_equal (&a1, &dconst0))
2954 std::swap (a0, a1);
2955 /* According to real_equal (), +0 equals -0. */
2956 if (real_equal (&a0, &dconst0) && real_isinf (&a1))
546e500c 2957 {
6d4d616a
RS
2958 real_value res = dconst2;
2959 res.sign = a0.sign ^ a1.sign;
2960 new_stmt = gimple_build_assign (gimple_call_lhs (stmt),
2961 REAL_CST,
2962 build_real (t0, res));
546e500c 2963 }
6d4d616a
RS
2964 else
2965 new_stmt = gimple_build_assign (gimple_call_lhs (stmt),
2966 MULT_EXPR,
2967 args[0], args[1]);
546e500c 2968 }
6d4d616a
RS
2969 else /* a0_cst_p ^ a1_cst_p. */
2970 {
2971 real_value const_part = a0_cst_p
2972 ? TREE_REAL_CST (args[0]) : TREE_REAL_CST (args[1]);
2973 if (!real_equal (&const_part, &dconst0)
2974 && !real_isinf (&const_part))
2975 new_stmt = gimple_build_assign (gimple_call_lhs (stmt),
2976 MULT_EXPR, args[0],
2977 args[1]);
2978 }
2979 }
2980 if (new_stmt)
2981 {
2982 gimple_set_vuse (new_stmt, gimple_vuse (stmt));
2983 gimple_set_vdef (new_stmt, gimple_vdef (stmt));
0ac198d3 2984 }
6d4d616a 2985 break;
0ac198d3 2986 }
03312cbd
AP
2987 case AARCH64_SIMD_BUILTIN_LANE_CHECK:
2988 if (aarch64_fold_builtin_lane_check (args[0], args[1], args[2]))
2989 {
2990 unlink_stmt_vdef (stmt);
2991 release_defs (stmt);
2992 new_stmt = gimple_build_nop ();
2993 }
2994 break;
6d4d616a
RS
2995 default:
2996 break;
0ac198d3 2997 }
e9cad1e5
AC
2998
2999 /* GIMPLE assign statements (unlike calls) require a non-null lhs. If we
3000 created an assign statement with a null lhs, then fix this by assigning
3001 to a new (and subsequently unused) variable. */
3002 if (new_stmt && is_gimple_assign (new_stmt) && !gimple_assign_lhs (new_stmt))
3003 {
3004 tree new_lhs = make_ssa_name (gimple_call_return_type (stmt));
3005 gimple_assign_set_lhs (new_stmt, new_lhs);
3006 }
3007
6d4d616a 3008 return new_stmt;
0ac198d3
JG
3009}
3010
aa87aced
KV
3011void
3012aarch64_atomic_assign_expand_fenv (tree *hold, tree *clear, tree *update)
3013{
3014 const unsigned AARCH64_FE_INVALID = 1;
3015 const unsigned AARCH64_FE_DIVBYZERO = 2;
3016 const unsigned AARCH64_FE_OVERFLOW = 4;
3017 const unsigned AARCH64_FE_UNDERFLOW = 8;
3018 const unsigned AARCH64_FE_INEXACT = 16;
3019 const unsigned HOST_WIDE_INT AARCH64_FE_ALL_EXCEPT = (AARCH64_FE_INVALID
3020 | AARCH64_FE_DIVBYZERO
3021 | AARCH64_FE_OVERFLOW
3022 | AARCH64_FE_UNDERFLOW
3023 | AARCH64_FE_INEXACT);
3024 const unsigned HOST_WIDE_INT AARCH64_FE_EXCEPT_SHIFT = 8;
3025 tree fenv_cr, fenv_sr, get_fpcr, set_fpcr, mask_cr, mask_sr;
3026 tree ld_fenv_cr, ld_fenv_sr, masked_fenv_cr, masked_fenv_sr, hold_fnclex_cr;
3027 tree hold_fnclex_sr, new_fenv_var, reload_fenv, restore_fnenv, get_fpsr, set_fpsr;
3028 tree update_call, atomic_feraiseexcept, hold_fnclex, masked_fenv, ld_fenv;
3029
3030 /* Generate the equivalence of :
3031 unsigned int fenv_cr;
3032 fenv_cr = __builtin_aarch64_get_fpcr ();
3033
3034 unsigned int fenv_sr;
3035 fenv_sr = __builtin_aarch64_get_fpsr ();
3036
3037 Now set all exceptions to non-stop
3038 unsigned int mask_cr
3039 = ~(AARCH64_FE_ALL_EXCEPT << AARCH64_FE_EXCEPT_SHIFT);
3040 unsigned int masked_cr;
3041 masked_cr = fenv_cr & mask_cr;
3042
3043 And clear all exception flags
3044 unsigned int maske_sr = ~AARCH64_FE_ALL_EXCEPT;
3045 unsigned int masked_cr;
3046 masked_sr = fenv_sr & mask_sr;
3047
3048 __builtin_aarch64_set_cr (masked_cr);
3049 __builtin_aarch64_set_sr (masked_sr); */
3050
09ba9ef7
RR
3051 fenv_cr = create_tmp_var_raw (unsigned_type_node);
3052 fenv_sr = create_tmp_var_raw (unsigned_type_node);
aa87aced
KV
3053
3054 get_fpcr = aarch64_builtin_decls[AARCH64_BUILTIN_GET_FPCR];
3055 set_fpcr = aarch64_builtin_decls[AARCH64_BUILTIN_SET_FPCR];
3056 get_fpsr = aarch64_builtin_decls[AARCH64_BUILTIN_GET_FPSR];
3057 set_fpsr = aarch64_builtin_decls[AARCH64_BUILTIN_SET_FPSR];
3058
3059 mask_cr = build_int_cst (unsigned_type_node,
3060 ~(AARCH64_FE_ALL_EXCEPT << AARCH64_FE_EXCEPT_SHIFT));
3061 mask_sr = build_int_cst (unsigned_type_node,
3062 ~(AARCH64_FE_ALL_EXCEPT));
3063
d81bc2af
HZ
3064 ld_fenv_cr = build4 (TARGET_EXPR, unsigned_type_node,
3065 fenv_cr, build_call_expr (get_fpcr, 0),
3066 NULL_TREE, NULL_TREE);
3067 ld_fenv_sr = build4 (TARGET_EXPR, unsigned_type_node,
3068 fenv_sr, build_call_expr (get_fpsr, 0),
3069 NULL_TREE, NULL_TREE);
aa87aced
KV
3070
3071 masked_fenv_cr = build2 (BIT_AND_EXPR, unsigned_type_node, fenv_cr, mask_cr);
3072 masked_fenv_sr = build2 (BIT_AND_EXPR, unsigned_type_node, fenv_sr, mask_sr);
3073
3074 hold_fnclex_cr = build_call_expr (set_fpcr, 1, masked_fenv_cr);
3075 hold_fnclex_sr = build_call_expr (set_fpsr, 1, masked_fenv_sr);
3076
3077 hold_fnclex = build2 (COMPOUND_EXPR, void_type_node, hold_fnclex_cr,
3078 hold_fnclex_sr);
3079 masked_fenv = build2 (COMPOUND_EXPR, void_type_node, masked_fenv_cr,
3080 masked_fenv_sr);
3081 ld_fenv = build2 (COMPOUND_EXPR, void_type_node, ld_fenv_cr, ld_fenv_sr);
3082
3083 *hold = build2 (COMPOUND_EXPR, void_type_node,
3084 build2 (COMPOUND_EXPR, void_type_node, masked_fenv, ld_fenv),
3085 hold_fnclex);
3086
3087 /* Store the value of masked_fenv to clear the exceptions:
3088 __builtin_aarch64_set_fpsr (masked_fenv_sr); */
3089
3090 *clear = build_call_expr (set_fpsr, 1, masked_fenv_sr);
3091
3092 /* Generate the equivalent of :
3093 unsigned int new_fenv_var;
3094 new_fenv_var = __builtin_aarch64_get_fpsr ();
3095
3096 __builtin_aarch64_set_fpsr (fenv_sr);
3097
3098 __atomic_feraiseexcept (new_fenv_var); */
3099
09ba9ef7 3100 new_fenv_var = create_tmp_var_raw (unsigned_type_node);
d81bc2af
HZ
3101 reload_fenv = build4 (TARGET_EXPR, unsigned_type_node,
3102 new_fenv_var, build_call_expr (get_fpsr, 0),
3103 NULL_TREE, NULL_TREE);
aa87aced
KV
3104 restore_fnenv = build_call_expr (set_fpsr, 1, fenv_sr);
3105 atomic_feraiseexcept = builtin_decl_implicit (BUILT_IN_ATOMIC_FERAISEEXCEPT);
3106 update_call = build_call_expr (atomic_feraiseexcept, 1,
3107 fold_convert (integer_type_node, new_fenv_var));
3108 *update = build2 (COMPOUND_EXPR, void_type_node,
3109 build2 (COMPOUND_EXPR, void_type_node,
3110 reload_fenv, restore_fnenv), update_call);
3111}
3112
ef01e6bb
DZ
3113/* Resolve overloaded MEMTAG build-in functions. */
3114#define AARCH64_BUILTIN_SUBCODE(F) \
3115 (DECL_MD_FUNCTION_CODE (F) >> AARCH64_BUILTIN_SHIFT)
3116
3117static tree
3118aarch64_resolve_overloaded_memtag (location_t loc,
3119 tree fndecl, void *pass_params)
3120{
3121 vec<tree, va_gc> *params = static_cast<vec<tree, va_gc> *> (pass_params);
3122 unsigned param_num = params ? params->length() : 0;
3123 unsigned int fcode = AARCH64_BUILTIN_SUBCODE (fndecl);
3124 tree inittype = aarch64_memtag_builtin_data[
3125 fcode - AARCH64_MEMTAG_BUILTIN_START - 1].ftype;
3126 unsigned arg_num = list_length (TYPE_ARG_TYPES (inittype)) - 1;
3127
3128 if (param_num != arg_num)
3129 {
3130 TREE_TYPE (fndecl) = inittype;
3131 return NULL_TREE;
3132 }
3133 tree retype = NULL;
3134
3135 if (fcode == AARCH64_MEMTAG_BUILTIN_SUBP)
3136 {
3137 tree t0 = TREE_TYPE ((*params)[0]);
3138 tree t1 = TREE_TYPE ((*params)[1]);
3139
3140 if (t0 == error_mark_node || TREE_CODE (t0) != POINTER_TYPE)
3141 t0 = ptr_type_node;
3142 if (t1 == error_mark_node || TREE_CODE (t1) != POINTER_TYPE)
3143 t1 = ptr_type_node;
3144
3145 if (TYPE_MODE (t0) != DImode)
3146 warning_at (loc, 1, "expected 64-bit address but argument 1 is %d-bit",
3147 (int)tree_to_shwi (DECL_SIZE ((*params)[0])));
3148
3149 if (TYPE_MODE (t1) != DImode)
3150 warning_at (loc, 1, "expected 64-bit address but argument 2 is %d-bit",
3151 (int)tree_to_shwi (DECL_SIZE ((*params)[1])));
3152
3153 retype = build_function_type_list (ptrdiff_type_node, t0, t1, NULL);
3154 }
3155 else
3156 {
3157 tree t0 = TREE_TYPE ((*params)[0]);
3158
3159 if (t0 == error_mark_node || TREE_CODE (t0) != POINTER_TYPE)
3160 {
3161 TREE_TYPE (fndecl) = inittype;
3162 return NULL_TREE;
3163 }
3164
3165 if (TYPE_MODE (t0) != DImode)
3166 warning_at (loc, 1, "expected 64-bit address but argument 1 is %d-bit",
3167 (int)tree_to_shwi (DECL_SIZE ((*params)[0])));
3168
3169 switch (fcode)
3170 {
3171 case AARCH64_MEMTAG_BUILTIN_IRG:
3172 retype = build_function_type_list (t0, t0, uint64_type_node, NULL);
3173 break;
3174 case AARCH64_MEMTAG_BUILTIN_GMI:
3175 retype = build_function_type_list (uint64_type_node, t0,
3176 uint64_type_node, NULL);
3177 break;
3178 case AARCH64_MEMTAG_BUILTIN_INC_TAG:
3179 retype = build_function_type_list (t0, t0, unsigned_type_node, NULL);
3180 break;
3181 case AARCH64_MEMTAG_BUILTIN_SET_TAG:
3182 retype = build_function_type_list (void_type_node, t0, NULL);
3183 break;
3184 case AARCH64_MEMTAG_BUILTIN_GET_TAG:
3185 retype = build_function_type_list (t0, t0, NULL);
3186 break;
3187 default:
3188 return NULL_TREE;
3189 }
3190 }
3191
3192 if (!retype || retype == error_mark_node)
3193 TREE_TYPE (fndecl) = inittype;
3194 else
3195 TREE_TYPE (fndecl) = retype;
3196
3197 return NULL_TREE;
3198}
3199
e53b6e56 3200/* Called at aarch64_resolve_overloaded_builtin in aarch64-c.cc. */
ef01e6bb
DZ
3201tree
3202aarch64_resolve_overloaded_builtin_general (location_t loc, tree function,
3203 void *pass_params)
3204{
3205 unsigned int fcode = AARCH64_BUILTIN_SUBCODE (function);
3206
3207 if (fcode >= AARCH64_MEMTAG_BUILTIN_START
3208 && fcode <= AARCH64_MEMTAG_BUILTIN_END)
3209 return aarch64_resolve_overloaded_memtag(loc, function, pass_params);
3210
3211 return NULL_TREE;
3212}
aa87aced 3213
42fc9a7f
JG
3214#undef AARCH64_CHECK_BUILTIN_MODE
3215#undef AARCH64_FIND_FRINT_VARIANT
0ddec79f
JG
3216#undef CF0
3217#undef CF1
3218#undef CF2
3219#undef CF3
3220#undef CF4
3221#undef CF10
3222#undef VAR1
3223#undef VAR2
3224#undef VAR3
3225#undef VAR4
3226#undef VAR5
3227#undef VAR6
3228#undef VAR7
3229#undef VAR8
3230#undef VAR9
3231#undef VAR10
3232#undef VAR11
3233
3c03d39d 3234#include "gt-aarch64-builtins.h"