]> git.ipfire.org Git - thirdparty/gcc.git/blame - gcc/config/aarch64/aarch64-builtins.cc
aarch64: Use error_n for plural text [PR104897]
[thirdparty/gcc.git] / gcc / config / aarch64 / aarch64-builtins.cc
CommitLineData
43e9d192 1/* Builtins' description for AArch64 SIMD architecture.
7adcbafe 2 Copyright (C) 2011-2022 Free Software Foundation, Inc.
43e9d192
IB
3 Contributed by ARM Ltd.
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify it
8 under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
10 any later version.
11
12 GCC is distributed in the hope that it will be useful, but
13 WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
15 General Public License for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
20
8fcc61f8
RS
21#define IN_TARGET_CODE 1
22
43e9d192
IB
23#include "config.h"
24#include "system.h"
25#include "coretypes.h"
26#include "tm.h"
c7131fb2 27#include "function.h"
c7131fb2 28#include "basic-block.h"
e11c4407 29#include "rtl.h"
c7131fb2
AM
30#include "tree.h"
31#include "gimple.h"
03312cbd 32#include "ssa.h"
4d0cdd0c 33#include "memmodel.h"
e11c4407
AM
34#include "tm_p.h"
35#include "expmed.h"
36#include "optabs.h"
37#include "recog.h"
38#include "diagnostic-core.h"
40e23961 39#include "fold-const.h"
d8a2d370 40#include "stor-layout.h"
36566b39 41#include "explow.h"
43e9d192 42#include "expr.h"
43e9d192 43#include "langhooks.h"
5be5c238 44#include "gimple-iterator.h"
10766209 45#include "case-cfn-macros.h"
9d63f43b 46#include "emit-rtl.h"
31427b97
RS
47#include "stringpool.h"
48#include "attribs.h"
ad44c6a5 49#include "gimple-fold.h"
43e9d192 50
0d4a1197 51#define v8qi_UP E_V8QImode
fdcddba8 52#define v8di_UP E_V8DImode
0d4a1197
RS
53#define v4hi_UP E_V4HImode
54#define v4hf_UP E_V4HFmode
55#define v2si_UP E_V2SImode
56#define v2sf_UP E_V2SFmode
57#define v1df_UP E_V1DFmode
58#define di_UP E_DImode
59#define df_UP E_DFmode
60#define v16qi_UP E_V16QImode
61#define v8hi_UP E_V8HImode
62#define v8hf_UP E_V8HFmode
63#define v4si_UP E_V4SImode
64#define v4sf_UP E_V4SFmode
65#define v2di_UP E_V2DImode
66#define v2df_UP E_V2DFmode
67#define ti_UP E_TImode
68#define oi_UP E_OImode
69#define ci_UP E_CImode
70#define xi_UP E_XImode
71#define si_UP E_SImode
72#define sf_UP E_SFmode
73#define hi_UP E_HImode
74#define hf_UP E_HFmode
75#define qi_UP E_QImode
abbe1ed2
SMW
76#define bf_UP E_BFmode
77#define v4bf_UP E_V4BFmode
78#define v8bf_UP E_V8BFmode
66f206b8
JW
79#define v2x8qi_UP E_V2x8QImode
80#define v2x4hi_UP E_V2x4HImode
81#define v2x4hf_UP E_V2x4HFmode
82#define v2x4bf_UP E_V2x4BFmode
83#define v2x2si_UP E_V2x2SImode
84#define v2x2sf_UP E_V2x2SFmode
85#define v2x1di_UP E_V2x1DImode
86#define v2x1df_UP E_V2x1DFmode
87#define v2x16qi_UP E_V2x16QImode
88#define v2x8hi_UP E_V2x8HImode
89#define v2x8hf_UP E_V2x8HFmode
90#define v2x8bf_UP E_V2x8BFmode
91#define v2x4si_UP E_V2x4SImode
92#define v2x4sf_UP E_V2x4SFmode
93#define v2x2di_UP E_V2x2DImode
94#define v2x2df_UP E_V2x2DFmode
95#define v3x8qi_UP E_V3x8QImode
96#define v3x4hi_UP E_V3x4HImode
97#define v3x4hf_UP E_V3x4HFmode
98#define v3x4bf_UP E_V3x4BFmode
99#define v3x2si_UP E_V3x2SImode
100#define v3x2sf_UP E_V3x2SFmode
101#define v3x1di_UP E_V3x1DImode
102#define v3x1df_UP E_V3x1DFmode
103#define v3x16qi_UP E_V3x16QImode
104#define v3x8hi_UP E_V3x8HImode
105#define v3x8hf_UP E_V3x8HFmode
106#define v3x8bf_UP E_V3x8BFmode
107#define v3x4si_UP E_V3x4SImode
108#define v3x4sf_UP E_V3x4SFmode
109#define v3x2di_UP E_V3x2DImode
110#define v3x2df_UP E_V3x2DFmode
111#define v4x8qi_UP E_V4x8QImode
112#define v4x4hi_UP E_V4x4HImode
113#define v4x4hf_UP E_V4x4HFmode
114#define v4x4bf_UP E_V4x4BFmode
115#define v4x2si_UP E_V4x2SImode
116#define v4x2sf_UP E_V4x2SFmode
117#define v4x1di_UP E_V4x1DImode
118#define v4x1df_UP E_V4x1DFmode
119#define v4x16qi_UP E_V4x16QImode
120#define v4x8hi_UP E_V4x8HImode
121#define v4x8hf_UP E_V4x8HFmode
122#define v4x8bf_UP E_V4x8BFmode
123#define v4x4si_UP E_V4x4SImode
124#define v4x4sf_UP E_V4x4SFmode
125#define v4x2di_UP E_V4x2DImode
126#define v4x2df_UP E_V4x2DFmode
43e9d192
IB
127#define UP(X) X##_UP
128
b5828b4b
JG
129#define SIMD_MAX_BUILTIN_ARGS 5
130
131enum aarch64_type_qualifiers
43e9d192 132{
b5828b4b
JG
133 /* T foo. */
134 qualifier_none = 0x0,
135 /* unsigned T foo. */
136 qualifier_unsigned = 0x1, /* 1 << 0 */
137 /* const T foo. */
138 qualifier_const = 0x2, /* 1 << 1 */
139 /* T *foo. */
140 qualifier_pointer = 0x4, /* 1 << 2 */
b5828b4b
JG
141 /* Used when expanding arguments if an operand could
142 be an immediate. */
143 qualifier_immediate = 0x8, /* 1 << 3 */
144 qualifier_maybe_immediate = 0x10, /* 1 << 4 */
145 /* void foo (...). */
146 qualifier_void = 0x20, /* 1 << 5 */
147 /* Some patterns may have internal operands, this qualifier is an
148 instruction to the initialisation code to skip this operand. */
149 qualifier_internal = 0x40, /* 1 << 6 */
150 /* Some builtins should use the T_*mode* encoded in a simd_builtin_datum
151 rather than using the type of the operand. */
152 qualifier_map_mode = 0x80, /* 1 << 7 */
153 /* qualifier_pointer | qualifier_map_mode */
154 qualifier_pointer_map_mode = 0x84,
e625e715 155 /* qualifier_const | qualifier_pointer | qualifier_map_mode */
6db1ec94
JG
156 qualifier_const_pointer_map_mode = 0x86,
157 /* Polynomial types. */
2a49c16d
AL
158 qualifier_poly = 0x100,
159 /* Lane indices - must be in range, and flipped for bigendian. */
4d0a0237
CB
160 qualifier_lane_index = 0x200,
161 /* Lane indices for single lane structure loads and stores. */
9d63f43b
TC
162 qualifier_struct_load_store_lane_index = 0x400,
163 /* Lane indices selected in pairs. - must be in range, and flipped for
164 bigendian. */
165 qualifier_lane_pair_index = 0x800,
8c197c85
SMW
166 /* Lane indices selected in quadtuplets. - must be in range, and flipped for
167 bigendian. */
168 qualifier_lane_quadtup_index = 0x1000,
b5828b4b 169};
43e9d192 170
bf592b2f 171/* Flags that describe what a function might do. */
172const unsigned int FLAG_NONE = 0U;
173const unsigned int FLAG_READ_FPCR = 1U << 0;
174const unsigned int FLAG_RAISE_FP_EXCEPTIONS = 1U << 1;
175const unsigned int FLAG_READ_MEMORY = 1U << 2;
176const unsigned int FLAG_PREFETCH_MEMORY = 1U << 3;
177const unsigned int FLAG_WRITE_MEMORY = 1U << 4;
178
35ffd4d1 179/* Not all FP intrinsics raise FP exceptions or read FPCR register,
180 use this flag to suppress it. */
181const unsigned int FLAG_AUTO_FP = 1U << 5;
182
bf592b2f 183const unsigned int FLAG_FP = FLAG_READ_FPCR | FLAG_RAISE_FP_EXCEPTIONS;
184const unsigned int FLAG_ALL = FLAG_READ_FPCR | FLAG_RAISE_FP_EXCEPTIONS
185 | FLAG_READ_MEMORY | FLAG_PREFETCH_MEMORY | FLAG_WRITE_MEMORY;
2d5aad69 186const unsigned int FLAG_STORE = FLAG_WRITE_MEMORY | FLAG_AUTO_FP;
e8062ad4 187const unsigned int FLAG_LOAD = FLAG_READ_MEMORY | FLAG_AUTO_FP;
bf592b2f 188
43e9d192
IB
189typedef struct
190{
191 const char *name;
ef4bddc2 192 machine_mode mode;
342be7f7
JG
193 const enum insn_code code;
194 unsigned int fcode;
b5828b4b 195 enum aarch64_type_qualifiers *qualifiers;
bf592b2f 196 unsigned int flags;
43e9d192
IB
197} aarch64_simd_builtin_datum;
198
b5828b4b
JG
199static enum aarch64_type_qualifiers
200aarch64_types_unop_qualifiers[SIMD_MAX_BUILTIN_ARGS]
8f905d69 201 = { qualifier_none, qualifier_none };
b5828b4b 202#define TYPES_UNOP (aarch64_types_unop_qualifiers)
5a7a4e80
TB
203static enum aarch64_type_qualifiers
204aarch64_types_unopu_qualifiers[SIMD_MAX_BUILTIN_ARGS]
205 = { qualifier_unsigned, qualifier_unsigned };
206#define TYPES_UNOPU (aarch64_types_unopu_qualifiers)
b5828b4b 207static enum aarch64_type_qualifiers
a579f4c7
JW
208aarch64_types_unopus_qualifiers[SIMD_MAX_BUILTIN_ARGS]
209 = { qualifier_unsigned, qualifier_none };
210#define TYPES_UNOPUS (aarch64_types_unopus_qualifiers)
211static enum aarch64_type_qualifiers
b5828b4b
JG
212aarch64_types_binop_qualifiers[SIMD_MAX_BUILTIN_ARGS]
213 = { qualifier_none, qualifier_none, qualifier_maybe_immediate };
214#define TYPES_BINOP (aarch64_types_binop_qualifiers)
215static enum aarch64_type_qualifiers
5a7a4e80
TB
216aarch64_types_binopu_qualifiers[SIMD_MAX_BUILTIN_ARGS]
217 = { qualifier_unsigned, qualifier_unsigned, qualifier_unsigned };
218#define TYPES_BINOPU (aarch64_types_binopu_qualifiers)
7baa225d 219static enum aarch64_type_qualifiers
de10bcce
AL
220aarch64_types_binop_uus_qualifiers[SIMD_MAX_BUILTIN_ARGS]
221 = { qualifier_unsigned, qualifier_unsigned, qualifier_none };
222#define TYPES_BINOP_UUS (aarch64_types_binop_uus_qualifiers)
223static enum aarch64_type_qualifiers
918621d3
AL
224aarch64_types_binop_ssu_qualifiers[SIMD_MAX_BUILTIN_ARGS]
225 = { qualifier_none, qualifier_none, qualifier_unsigned };
226#define TYPES_BINOP_SSU (aarch64_types_binop_ssu_qualifiers)
227static enum aarch64_type_qualifiers
daef0a8c
JW
228aarch64_types_binop_uss_qualifiers[SIMD_MAX_BUILTIN_ARGS]
229 = { qualifier_unsigned, qualifier_none, qualifier_none };
230#define TYPES_BINOP_USS (aarch64_types_binop_uss_qualifiers)
231static enum aarch64_type_qualifiers
7baa225d
TB
232aarch64_types_binopp_qualifiers[SIMD_MAX_BUILTIN_ARGS]
233 = { qualifier_poly, qualifier_poly, qualifier_poly };
234#define TYPES_BINOPP (aarch64_types_binopp_qualifiers)
3caf7f87
JW
235static enum aarch64_type_qualifiers
236aarch64_types_binop_ppu_qualifiers[SIMD_MAX_BUILTIN_ARGS]
237 = { qualifier_poly, qualifier_poly, qualifier_unsigned };
238#define TYPES_BINOP_PPU (aarch64_types_binop_ppu_qualifiers)
7baa225d 239
5a7a4e80 240static enum aarch64_type_qualifiers
b5828b4b
JG
241aarch64_types_ternop_qualifiers[SIMD_MAX_BUILTIN_ARGS]
242 = { qualifier_none, qualifier_none, qualifier_none, qualifier_none };
243#define TYPES_TERNOP (aarch64_types_ternop_qualifiers)
30442682 244static enum aarch64_type_qualifiers
2a49c16d
AL
245aarch64_types_ternop_lane_qualifiers[SIMD_MAX_BUILTIN_ARGS]
246 = { qualifier_none, qualifier_none, qualifier_none, qualifier_lane_index };
247#define TYPES_TERNOP_LANE (aarch64_types_ternop_lane_qualifiers)
248static enum aarch64_type_qualifiers
30442682
TB
249aarch64_types_ternopu_qualifiers[SIMD_MAX_BUILTIN_ARGS]
250 = { qualifier_unsigned, qualifier_unsigned,
251 qualifier_unsigned, qualifier_unsigned };
252#define TYPES_TERNOPU (aarch64_types_ternopu_qualifiers)
27086ea3 253static enum aarch64_type_qualifiers
0b839322
WD
254aarch64_types_ternopu_lane_qualifiers[SIMD_MAX_BUILTIN_ARGS]
255 = { qualifier_unsigned, qualifier_unsigned,
256 qualifier_unsigned, qualifier_lane_index };
257#define TYPES_TERNOPU_LANE (aarch64_types_ternopu_lane_qualifiers)
258static enum aarch64_type_qualifiers
27086ea3
MC
259aarch64_types_ternopu_imm_qualifiers[SIMD_MAX_BUILTIN_ARGS]
260 = { qualifier_unsigned, qualifier_unsigned,
261 qualifier_unsigned, qualifier_immediate };
262#define TYPES_TERNOPUI (aarch64_types_ternopu_imm_qualifiers)
8c197c85 263static enum aarch64_type_qualifiers
3caf7f87
JW
264aarch64_types_ternop_sssu_qualifiers[SIMD_MAX_BUILTIN_ARGS]
265 = { qualifier_none, qualifier_none, qualifier_none, qualifier_unsigned };
266#define TYPES_TERNOP_SSSU (aarch64_types_ternop_sssu_qualifiers)
267static enum aarch64_type_qualifiers
8c197c85
SMW
268aarch64_types_ternop_ssus_qualifiers[SIMD_MAX_BUILTIN_ARGS]
269 = { qualifier_none, qualifier_none, qualifier_unsigned, qualifier_none };
270#define TYPES_TERNOP_SSUS (aarch64_types_ternop_ssus_qualifiers)
2050ac1a
TC
271static enum aarch64_type_qualifiers
272aarch64_types_ternop_suss_qualifiers[SIMD_MAX_BUILTIN_ARGS]
273 = { qualifier_none, qualifier_unsigned, qualifier_none, qualifier_none };
274#define TYPES_TERNOP_SUSS (aarch64_types_ternop_suss_qualifiers)
3caf7f87
JW
275static enum aarch64_type_qualifiers
276aarch64_types_binop_pppu_qualifiers[SIMD_MAX_BUILTIN_ARGS]
277 = { qualifier_poly, qualifier_poly, qualifier_poly, qualifier_unsigned };
278#define TYPES_TERNOP_PPPU (aarch64_types_binop_pppu_qualifiers)
27086ea3 279
9d63f43b
TC
280static enum aarch64_type_qualifiers
281aarch64_types_quadop_lane_pair_qualifiers[SIMD_MAX_BUILTIN_ARGS]
282 = { qualifier_none, qualifier_none, qualifier_none,
283 qualifier_none, qualifier_lane_pair_index };
284#define TYPES_QUADOP_LANE_PAIR (aarch64_types_quadop_lane_pair_qualifiers)
b5828b4b 285static enum aarch64_type_qualifiers
2a49c16d 286aarch64_types_quadop_lane_qualifiers[SIMD_MAX_BUILTIN_ARGS]
b5828b4b 287 = { qualifier_none, qualifier_none, qualifier_none,
2a49c16d
AL
288 qualifier_none, qualifier_lane_index };
289#define TYPES_QUADOP_LANE (aarch64_types_quadop_lane_qualifiers)
7a08d813
TC
290static enum aarch64_type_qualifiers
291aarch64_types_quadopu_lane_qualifiers[SIMD_MAX_BUILTIN_ARGS]
292 = { qualifier_unsigned, qualifier_unsigned, qualifier_unsigned,
293 qualifier_unsigned, qualifier_lane_index };
294#define TYPES_QUADOPU_LANE (aarch64_types_quadopu_lane_qualifiers)
b5828b4b 295
8c197c85
SMW
296static enum aarch64_type_qualifiers
297aarch64_types_quadopssus_lane_quadtup_qualifiers[SIMD_MAX_BUILTIN_ARGS]
298 = { qualifier_none, qualifier_none, qualifier_unsigned,
299 qualifier_none, qualifier_lane_quadtup_index };
300#define TYPES_QUADOPSSUS_LANE_QUADTUP \
301 (aarch64_types_quadopssus_lane_quadtup_qualifiers)
302static enum aarch64_type_qualifiers
303aarch64_types_quadopsssu_lane_quadtup_qualifiers[SIMD_MAX_BUILTIN_ARGS]
304 = { qualifier_none, qualifier_none, qualifier_none,
305 qualifier_unsigned, qualifier_lane_quadtup_index };
306#define TYPES_QUADOPSSSU_LANE_QUADTUP \
307 (aarch64_types_quadopsssu_lane_quadtup_qualifiers)
308
27086ea3
MC
309static enum aarch64_type_qualifiers
310aarch64_types_quadopu_imm_qualifiers[SIMD_MAX_BUILTIN_ARGS]
311 = { qualifier_unsigned, qualifier_unsigned, qualifier_unsigned,
312 qualifier_unsigned, qualifier_immediate };
313#define TYPES_QUADOPUI (aarch64_types_quadopu_imm_qualifiers)
314
b5828b4b 315static enum aarch64_type_qualifiers
2a49c16d 316aarch64_types_binop_imm_qualifiers[SIMD_MAX_BUILTIN_ARGS]
b5828b4b 317 = { qualifier_none, qualifier_none, qualifier_immediate };
2a49c16d
AL
318#define TYPES_GETREG (aarch64_types_binop_imm_qualifiers)
319#define TYPES_SHIFTIMM (aarch64_types_binop_imm_qualifiers)
b5828b4b 320static enum aarch64_type_qualifiers
de10bcce
AL
321aarch64_types_shift_to_unsigned_qualifiers[SIMD_MAX_BUILTIN_ARGS]
322 = { qualifier_unsigned, qualifier_none, qualifier_immediate };
323#define TYPES_SHIFTIMM_USS (aarch64_types_shift_to_unsigned_qualifiers)
324static enum aarch64_type_qualifiers
1f0e9e34
JG
325aarch64_types_fcvt_from_unsigned_qualifiers[SIMD_MAX_BUILTIN_ARGS]
326 = { qualifier_none, qualifier_unsigned, qualifier_immediate };
327#define TYPES_FCVTIMM_SUS (aarch64_types_fcvt_from_unsigned_qualifiers)
328static enum aarch64_type_qualifiers
252c7556
AV
329aarch64_types_unsigned_shift_qualifiers[SIMD_MAX_BUILTIN_ARGS]
330 = { qualifier_unsigned, qualifier_unsigned, qualifier_immediate };
331#define TYPES_USHIFTIMM (aarch64_types_unsigned_shift_qualifiers)
05f1883c
DC
332#define TYPES_USHIFT2IMM (aarch64_types_ternopu_imm_qualifiers)
333static enum aarch64_type_qualifiers
334aarch64_types_shift2_to_unsigned_qualifiers[SIMD_MAX_BUILTIN_ARGS]
335 = { qualifier_unsigned, qualifier_unsigned, qualifier_none, qualifier_immediate };
336#define TYPES_SHIFT2IMM_UUSS (aarch64_types_shift2_to_unsigned_qualifiers)
de10bcce 337
159b8724
TC
338static enum aarch64_type_qualifiers
339aarch64_types_ternop_s_imm_qualifiers[SIMD_MAX_BUILTIN_ARGS]
340 = { qualifier_none, qualifier_none, qualifier_none, qualifier_immediate};
341#define TYPES_SETREG (aarch64_types_ternop_s_imm_qualifiers)
342#define TYPES_SHIFTINSERT (aarch64_types_ternop_s_imm_qualifiers)
343#define TYPES_SHIFTACC (aarch64_types_ternop_s_imm_qualifiers)
05f1883c 344#define TYPES_SHIFT2IMM (aarch64_types_ternop_s_imm_qualifiers)
159b8724
TC
345
346static enum aarch64_type_qualifiers
347aarch64_types_ternop_p_imm_qualifiers[SIMD_MAX_BUILTIN_ARGS]
348 = { qualifier_poly, qualifier_poly, qualifier_poly, qualifier_immediate};
349#define TYPES_SHIFTINSERTP (aarch64_types_ternop_p_imm_qualifiers)
b5828b4b 350
de10bcce
AL
351static enum aarch64_type_qualifiers
352aarch64_types_unsigned_shiftacc_qualifiers[SIMD_MAX_BUILTIN_ARGS]
353 = { qualifier_unsigned, qualifier_unsigned, qualifier_unsigned,
354 qualifier_immediate };
355#define TYPES_USHIFTACC (aarch64_types_unsigned_shiftacc_qualifiers)
356
b5828b4b
JG
357static enum aarch64_type_qualifiers
358aarch64_types_load1_qualifiers[SIMD_MAX_BUILTIN_ARGS]
359 = { qualifier_none, qualifier_const_pointer_map_mode };
360#define TYPES_LOAD1 (aarch64_types_load1_qualifiers)
361#define TYPES_LOADSTRUCT (aarch64_types_load1_qualifiers)
66f206b8
JW
362static enum aarch64_type_qualifiers
363aarch64_types_load1_u_qualifiers[SIMD_MAX_BUILTIN_ARGS]
364 = { qualifier_unsigned, qualifier_const_pointer_map_mode };
1716ddd1 365#define TYPES_LOAD1_U (aarch64_types_load1_u_qualifiers)
66f206b8
JW
366#define TYPES_LOADSTRUCT_U (aarch64_types_load1_u_qualifiers)
367static enum aarch64_type_qualifiers
368aarch64_types_load1_p_qualifiers[SIMD_MAX_BUILTIN_ARGS]
369 = { qualifier_poly, qualifier_const_pointer_map_mode };
1716ddd1 370#define TYPES_LOAD1_P (aarch64_types_load1_p_qualifiers)
66f206b8
JW
371#define TYPES_LOADSTRUCT_P (aarch64_types_load1_p_qualifiers)
372
3ec1be97
CB
373static enum aarch64_type_qualifiers
374aarch64_types_loadstruct_lane_qualifiers[SIMD_MAX_BUILTIN_ARGS]
375 = { qualifier_none, qualifier_const_pointer_map_mode,
4d0a0237 376 qualifier_none, qualifier_struct_load_store_lane_index };
3ec1be97 377#define TYPES_LOADSTRUCT_LANE (aarch64_types_loadstruct_lane_qualifiers)
66f206b8
JW
378static enum aarch64_type_qualifiers
379aarch64_types_loadstruct_lane_u_qualifiers[SIMD_MAX_BUILTIN_ARGS]
380 = { qualifier_unsigned, qualifier_const_pointer_map_mode,
381 qualifier_unsigned, qualifier_struct_load_store_lane_index };
382#define TYPES_LOADSTRUCT_LANE_U (aarch64_types_loadstruct_lane_u_qualifiers)
383static enum aarch64_type_qualifiers
384aarch64_types_loadstruct_lane_p_qualifiers[SIMD_MAX_BUILTIN_ARGS]
385 = { qualifier_poly, qualifier_const_pointer_map_mode,
386 qualifier_poly, qualifier_struct_load_store_lane_index };
387#define TYPES_LOADSTRUCT_LANE_P (aarch64_types_loadstruct_lane_p_qualifiers)
b5828b4b 388
46e778c4
JG
389static enum aarch64_type_qualifiers
390aarch64_types_bsl_p_qualifiers[SIMD_MAX_BUILTIN_ARGS]
391 = { qualifier_poly, qualifier_unsigned,
392 qualifier_poly, qualifier_poly };
393#define TYPES_BSL_P (aarch64_types_bsl_p_qualifiers)
394static enum aarch64_type_qualifiers
395aarch64_types_bsl_s_qualifiers[SIMD_MAX_BUILTIN_ARGS]
396 = { qualifier_none, qualifier_unsigned,
397 qualifier_none, qualifier_none };
398#define TYPES_BSL_S (aarch64_types_bsl_s_qualifiers)
399static enum aarch64_type_qualifiers
400aarch64_types_bsl_u_qualifiers[SIMD_MAX_BUILTIN_ARGS]
401 = { qualifier_unsigned, qualifier_unsigned,
402 qualifier_unsigned, qualifier_unsigned };
403#define TYPES_BSL_U (aarch64_types_bsl_u_qualifiers)
404
b5828b4b
JG
405/* The first argument (return type) of a store should be void type,
406 which we represent with qualifier_void. Their first operand will be
407 a DImode pointer to the location to store to, so we must use
408 qualifier_map_mode | qualifier_pointer to build a pointer to the
409 element type of the vector. */
410static enum aarch64_type_qualifiers
411aarch64_types_store1_qualifiers[SIMD_MAX_BUILTIN_ARGS]
412 = { qualifier_void, qualifier_pointer_map_mode, qualifier_none };
413#define TYPES_STORE1 (aarch64_types_store1_qualifiers)
414#define TYPES_STORESTRUCT (aarch64_types_store1_qualifiers)
66f206b8
JW
415static enum aarch64_type_qualifiers
416aarch64_types_store1_u_qualifiers[SIMD_MAX_BUILTIN_ARGS]
417 = { qualifier_void, qualifier_pointer_map_mode, qualifier_unsigned };
1716ddd1 418#define TYPES_STORE1_U (aarch64_types_store1_u_qualifiers)
66f206b8
JW
419#define TYPES_STORESTRUCT_U (aarch64_types_store1_u_qualifiers)
420static enum aarch64_type_qualifiers
421aarch64_types_store1_p_qualifiers[SIMD_MAX_BUILTIN_ARGS]
422 = { qualifier_void, qualifier_pointer_map_mode, qualifier_poly };
1716ddd1 423#define TYPES_STORE1_P (aarch64_types_store1_p_qualifiers)
66f206b8
JW
424#define TYPES_STORESTRUCT_P (aarch64_types_store1_p_qualifiers)
425
ba081b77
JG
426static enum aarch64_type_qualifiers
427aarch64_types_storestruct_lane_qualifiers[SIMD_MAX_BUILTIN_ARGS]
428 = { qualifier_void, qualifier_pointer_map_mode,
4d0a0237 429 qualifier_none, qualifier_struct_load_store_lane_index };
ba081b77 430#define TYPES_STORESTRUCT_LANE (aarch64_types_storestruct_lane_qualifiers)
66f206b8
JW
431static enum aarch64_type_qualifiers
432aarch64_types_storestruct_lane_u_qualifiers[SIMD_MAX_BUILTIN_ARGS]
433 = { qualifier_void, qualifier_pointer_map_mode,
434 qualifier_unsigned, qualifier_struct_load_store_lane_index };
435#define TYPES_STORESTRUCT_LANE_U (aarch64_types_storestruct_lane_u_qualifiers)
436static enum aarch64_type_qualifiers
437aarch64_types_storestruct_lane_p_qualifiers[SIMD_MAX_BUILTIN_ARGS]
438 = { qualifier_void, qualifier_pointer_map_mode,
439 qualifier_poly, qualifier_struct_load_store_lane_index };
440#define TYPES_STORESTRUCT_LANE_P (aarch64_types_storestruct_lane_p_qualifiers)
b5828b4b 441
0ddec79f
JG
442#define CF0(N, X) CODE_FOR_aarch64_##N##X
443#define CF1(N, X) CODE_FOR_##N##X##1
444#define CF2(N, X) CODE_FOR_##N##X##2
445#define CF3(N, X) CODE_FOR_##N##X##3
446#define CF4(N, X) CODE_FOR_##N##X##4
447#define CF10(N, X) CODE_FOR_##N##X
448
bf592b2f 449#define VAR1(T, N, MAP, FLAG, A) \
450 {#N #A, UP (A), CF##MAP (N, A), 0, TYPES_##T, FLAG_##FLAG},
451#define VAR2(T, N, MAP, FLAG, A, B) \
452 VAR1 (T, N, MAP, FLAG, A) \
453 VAR1 (T, N, MAP, FLAG, B)
454#define VAR3(T, N, MAP, FLAG, A, B, C) \
455 VAR2 (T, N, MAP, FLAG, A, B) \
456 VAR1 (T, N, MAP, FLAG, C)
457#define VAR4(T, N, MAP, FLAG, A, B, C, D) \
458 VAR3 (T, N, MAP, FLAG, A, B, C) \
459 VAR1 (T, N, MAP, FLAG, D)
460#define VAR5(T, N, MAP, FLAG, A, B, C, D, E) \
461 VAR4 (T, N, MAP, FLAG, A, B, C, D) \
462 VAR1 (T, N, MAP, FLAG, E)
463#define VAR6(T, N, MAP, FLAG, A, B, C, D, E, F) \
464 VAR5 (T, N, MAP, FLAG, A, B, C, D, E) \
465 VAR1 (T, N, MAP, FLAG, F)
466#define VAR7(T, N, MAP, FLAG, A, B, C, D, E, F, G) \
467 VAR6 (T, N, MAP, FLAG, A, B, C, D, E, F) \
468 VAR1 (T, N, MAP, FLAG, G)
469#define VAR8(T, N, MAP, FLAG, A, B, C, D, E, F, G, H) \
470 VAR7 (T, N, MAP, FLAG, A, B, C, D, E, F, G) \
471 VAR1 (T, N, MAP, FLAG, H)
472#define VAR9(T, N, MAP, FLAG, A, B, C, D, E, F, G, H, I) \
473 VAR8 (T, N, MAP, FLAG, A, B, C, D, E, F, G, H) \
474 VAR1 (T, N, MAP, FLAG, I)
475#define VAR10(T, N, MAP, FLAG, A, B, C, D, E, F, G, H, I, J) \
476 VAR9 (T, N, MAP, FLAG, A, B, C, D, E, F, G, H, I) \
477 VAR1 (T, N, MAP, FLAG, J)
478#define VAR11(T, N, MAP, FLAG, A, B, C, D, E, F, G, H, I, J, K) \
479 VAR10 (T, N, MAP, FLAG, A, B, C, D, E, F, G, H, I, J) \
480 VAR1 (T, N, MAP, FLAG, K)
481#define VAR12(T, N, MAP, FLAG, A, B, C, D, E, F, G, H, I, J, K, L) \
482 VAR11 (T, N, MAP, FLAG, A, B, C, D, E, F, G, H, I, J, K) \
483 VAR1 (T, N, MAP, FLAG, L)
484#define VAR13(T, N, MAP, FLAG, A, B, C, D, E, F, G, H, I, J, K, L, M) \
485 VAR12 (T, N, MAP, FLAG, A, B, C, D, E, F, G, H, I, J, K, L) \
486 VAR1 (T, N, MAP, FLAG, M)
487#define VAR14(T, X, MAP, FLAG, A, B, C, D, E, F, G, H, I, J, K, L, M, N) \
488 VAR13 (T, X, MAP, FLAG, A, B, C, D, E, F, G, H, I, J, K, L, M) \
489 VAR1 (T, X, MAP, FLAG, N)
490#define VAR15(T, X, MAP, FLAG, A, B, C, D, E, F, G, H, I, J, K, L, M, N, O) \
491 VAR14 (T, X, MAP, FLAG, A, B, C, D, E, F, G, H, I, J, K, L, M, N) \
492 VAR1 (T, X, MAP, FLAG, O)
493#define VAR16(T, X, MAP, FLAG, A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P) \
494 VAR15 (T, X, MAP, FLAG, A, B, C, D, E, F, G, H, I, J, K, L, M, N, O) \
495 VAR1 (T, X, MAP, FLAG, P)
342be7f7 496
f421c516 497#include "aarch64-builtin-iterators.h"
43e9d192
IB
498
499static aarch64_simd_builtin_datum aarch64_simd_builtin_data[] = {
342be7f7
JG
500#include "aarch64-simd-builtins.def"
501};
502
5d357f26
KT
503/* There's only 8 CRC32 builtins. Probably not worth their own .def file. */
504#define AARCH64_CRC32_BUILTINS \
505 CRC32_BUILTIN (crc32b, QI) \
506 CRC32_BUILTIN (crc32h, HI) \
507 CRC32_BUILTIN (crc32w, SI) \
508 CRC32_BUILTIN (crc32x, DI) \
509 CRC32_BUILTIN (crc32cb, QI) \
510 CRC32_BUILTIN (crc32ch, HI) \
511 CRC32_BUILTIN (crc32cw, SI) \
512 CRC32_BUILTIN (crc32cx, DI)
513
9d63f43b
TC
514/* The next 8 FCMLA instrinsics require some special handling compared the
515 normal simd intrinsics. */
516#define AARCH64_SIMD_FCMLA_LANEQ_BUILTINS \
517 FCMLA_LANEQ_BUILTIN (0, v2sf, fcmla, V2SF, false) \
518 FCMLA_LANEQ_BUILTIN (90, v2sf, fcmla, V2SF, false) \
519 FCMLA_LANEQ_BUILTIN (180, v2sf, fcmla, V2SF, false) \
520 FCMLA_LANEQ_BUILTIN (270, v2sf, fcmla, V2SF, false) \
521 FCMLA_LANEQ_BUILTIN (0, v4hf, fcmla_laneq, V4HF, true) \
522 FCMLA_LANEQ_BUILTIN (90, v4hf, fcmla_laneq, V4HF, true) \
523 FCMLA_LANEQ_BUILTIN (180, v4hf, fcmla_laneq, V4HF, true) \
524 FCMLA_LANEQ_BUILTIN (270, v4hf, fcmla_laneq, V4HF, true) \
525
5d357f26
KT
526typedef struct
527{
528 const char *name;
ef4bddc2 529 machine_mode mode;
5d357f26
KT
530 const enum insn_code icode;
531 unsigned int fcode;
532} aarch64_crc_builtin_datum;
533
9d63f43b
TC
534/* Hold information about how to expand the FCMLA_LANEQ builtins. */
535typedef struct
536{
537 const char *name;
538 machine_mode mode;
539 const enum insn_code icode;
540 unsigned int fcode;
541 bool lane;
542} aarch64_fcmla_laneq_builtin_datum;
543
5d357f26
KT
544#define CRC32_BUILTIN(N, M) \
545 AARCH64_BUILTIN_##N,
546
9d63f43b
TC
547#define FCMLA_LANEQ_BUILTIN(I, N, X, M, T) \
548 AARCH64_SIMD_BUILTIN_FCMLA_LANEQ##I##_##M,
549
342be7f7 550#undef VAR1
bf592b2f 551#define VAR1(T, N, MAP, FLAG, A) \
e993fea1 552 AARCH64_SIMD_BUILTIN_##T##_##N##A,
342be7f7
JG
553
554enum aarch64_builtins
555{
556 AARCH64_BUILTIN_MIN,
aa87aced
KV
557
558 AARCH64_BUILTIN_GET_FPCR,
559 AARCH64_BUILTIN_SET_FPCR,
560 AARCH64_BUILTIN_GET_FPSR,
561 AARCH64_BUILTIN_SET_FPSR,
562
0d7e5fa6
AC
563 AARCH64_BUILTIN_GET_FPCR64,
564 AARCH64_BUILTIN_SET_FPCR64,
565 AARCH64_BUILTIN_GET_FPSR64,
566 AARCH64_BUILTIN_SET_FPSR64,
567
a6fc00da
BH
568 AARCH64_BUILTIN_RSQRT_DF,
569 AARCH64_BUILTIN_RSQRT_SF,
570 AARCH64_BUILTIN_RSQRT_V2DF,
571 AARCH64_BUILTIN_RSQRT_V2SF,
572 AARCH64_BUILTIN_RSQRT_V4SF,
342be7f7 573 AARCH64_SIMD_BUILTIN_BASE,
661fce82 574 AARCH64_SIMD_BUILTIN_LANE_CHECK,
342be7f7 575#include "aarch64-simd-builtins.def"
661fce82
AL
576 /* The first enum element which is based on an insn_data pattern. */
577 AARCH64_SIMD_PATTERN_START = AARCH64_SIMD_BUILTIN_LANE_CHECK + 1,
578 AARCH64_SIMD_BUILTIN_MAX = AARCH64_SIMD_PATTERN_START
579 + ARRAY_SIZE (aarch64_simd_builtin_data) - 1,
5d357f26
KT
580 AARCH64_CRC32_BUILTIN_BASE,
581 AARCH64_CRC32_BUILTINS
582 AARCH64_CRC32_BUILTIN_MAX,
312492bd
JW
583 /* ARMv8.3-A Pointer Authentication Builtins. */
584 AARCH64_PAUTH_BUILTIN_AUTIA1716,
585 AARCH64_PAUTH_BUILTIN_PACIA1716,
8fc16d72
ST
586 AARCH64_PAUTH_BUILTIN_AUTIB1716,
587 AARCH64_PAUTH_BUILTIN_PACIB1716,
312492bd 588 AARCH64_PAUTH_BUILTIN_XPACLRI,
9d63f43b
TC
589 /* Special cased Armv8.3-A Complex FMA by Lane quad Builtins. */
590 AARCH64_SIMD_FCMLA_LANEQ_BUILTIN_BASE,
591 AARCH64_SIMD_FCMLA_LANEQ_BUILTINS
e1d5d19e
KT
592 /* Builtin for Arm8.3-a Javascript conversion instruction. */
593 AARCH64_JSCVT,
89626179
SD
594 /* TME builtins. */
595 AARCH64_TME_BUILTIN_TSTART,
596 AARCH64_TME_BUILTIN_TCOMMIT,
597 AARCH64_TME_BUILTIN_TTEST,
598 AARCH64_TME_BUILTIN_TCANCEL,
c5dc215d
KT
599 /* Armv8.5-a RNG instruction builtins. */
600 AARCH64_BUILTIN_RNG_RNDR,
601 AARCH64_BUILTIN_RNG_RNDRRS,
ef01e6bb
DZ
602 /* MEMTAG builtins. */
603 AARCH64_MEMTAG_BUILTIN_START,
604 AARCH64_MEMTAG_BUILTIN_IRG,
605 AARCH64_MEMTAG_BUILTIN_GMI,
606 AARCH64_MEMTAG_BUILTIN_SUBP,
607 AARCH64_MEMTAG_BUILTIN_INC_TAG,
608 AARCH64_MEMTAG_BUILTIN_SET_TAG,
609 AARCH64_MEMTAG_BUILTIN_GET_TAG,
610 AARCH64_MEMTAG_BUILTIN_END,
fdcddba8
PW
611 /* LS64 builtins. */
612 AARCH64_LS64_BUILTIN_LD64B,
613 AARCH64_LS64_BUILTIN_ST64B,
614 AARCH64_LS64_BUILTIN_ST64BV,
615 AARCH64_LS64_BUILTIN_ST64BV0,
342be7f7 616 AARCH64_BUILTIN_MAX
43e9d192
IB
617};
618
5d357f26
KT
619#undef CRC32_BUILTIN
620#define CRC32_BUILTIN(N, M) \
0d4a1197 621 {"__builtin_aarch64_"#N, E_##M##mode, CODE_FOR_aarch64_##N, AARCH64_BUILTIN_##N},
5d357f26
KT
622
623static aarch64_crc_builtin_datum aarch64_crc_builtin_data[] = {
624 AARCH64_CRC32_BUILTINS
625};
626
9d63f43b
TC
627
628#undef FCMLA_LANEQ_BUILTIN
629#define FCMLA_LANEQ_BUILTIN(I, N, X, M, T) \
630 {"__builtin_aarch64_fcmla_laneq"#I#N, E_##M##mode, CODE_FOR_aarch64_##X##I##N, \
631 AARCH64_SIMD_BUILTIN_FCMLA_LANEQ##I##_##M, T},
632
633/* This structure contains how to manage the mapping form the builtin to the
634 instruction to generate in the backend and how to invoke the instruction. */
5eb9ac1e 635static aarch64_fcmla_laneq_builtin_datum aarch64_fcmla_lane_builtin_data[] = {
9d63f43b
TC
636 AARCH64_SIMD_FCMLA_LANEQ_BUILTINS
637};
638
5d357f26
KT
639#undef CRC32_BUILTIN
640
119103ca
JG
641static GTY(()) tree aarch64_builtin_decls[AARCH64_BUILTIN_MAX];
642
43e9d192
IB
643#define NUM_DREG_TYPES 6
644#define NUM_QREG_TYPES 6
645
f9d53c27
TB
646/* Internal scalar builtin types. These types are used to support
647 neon intrinsic builtins. They are _not_ user-visible types. Therefore
648 the mangling for these types are implementation defined. */
649const char *aarch64_scalar_builtin_types[] = {
650 "__builtin_aarch64_simd_qi",
651 "__builtin_aarch64_simd_hi",
652 "__builtin_aarch64_simd_si",
7c369485 653 "__builtin_aarch64_simd_hf",
f9d53c27
TB
654 "__builtin_aarch64_simd_sf",
655 "__builtin_aarch64_simd_di",
656 "__builtin_aarch64_simd_df",
657 "__builtin_aarch64_simd_poly8",
658 "__builtin_aarch64_simd_poly16",
659 "__builtin_aarch64_simd_poly64",
660 "__builtin_aarch64_simd_poly128",
661 "__builtin_aarch64_simd_ti",
662 "__builtin_aarch64_simd_uqi",
663 "__builtin_aarch64_simd_uhi",
664 "__builtin_aarch64_simd_usi",
665 "__builtin_aarch64_simd_udi",
666 "__builtin_aarch64_simd_ei",
667 "__builtin_aarch64_simd_oi",
668 "__builtin_aarch64_simd_ci",
669 "__builtin_aarch64_simd_xi",
e603cd43 670 "__builtin_aarch64_simd_bf",
f9d53c27
TB
671 NULL
672};
b5828b4b 673
f9d53c27
TB
674#define ENTRY(E, M, Q, G) E,
675enum aarch64_simd_type
676{
677#include "aarch64-simd-builtin-types.def"
678 ARM_NEON_H_TYPES_LAST
679};
680#undef ENTRY
b5828b4b 681
98f1dd02 682struct GTY(()) aarch64_simd_type_info
b5828b4b 683{
f9d53c27
TB
684 enum aarch64_simd_type type;
685
686 /* Internal type name. */
687 const char *name;
688
689 /* Internal type name(mangled). The mangled names conform to the
690 AAPCS64 (see "Procedure Call Standard for the ARM 64-bit Architecture",
691 Appendix A). To qualify for emission with the mangled names defined in
692 that document, a vector type must not only be of the correct mode but also
693 be of the correct internal AdvSIMD vector type (e.g. __Int8x8_t); these
694 types are registered by aarch64_init_simd_builtin_types (). In other
695 words, vector types defined in other ways e.g. via vector_size attribute
696 will get default mangled names. */
697 const char *mangle;
698
699 /* Internal type. */
700 tree itype;
701
702 /* Element type. */
b5828b4b
JG
703 tree eltype;
704
f9d53c27
TB
705 /* Machine mode the internal type maps to. */
706 enum machine_mode mode;
b5828b4b 707
f9d53c27
TB
708 /* Qualifiers. */
709 enum aarch64_type_qualifiers q;
710};
711
712#define ENTRY(E, M, Q, G) \
0d4a1197 713 {E, "__" #E, #G "__" #E, NULL_TREE, NULL_TREE, E_##M##mode, qualifier_##Q},
98f1dd02 714static GTY(()) struct aarch64_simd_type_info aarch64_simd_types [] = {
f9d53c27
TB
715#include "aarch64-simd-builtin-types.def"
716};
717#undef ENTRY
718
66f206b8
JW
719static GTY(()) tree aarch64_simd_tuple_types[ARM_NEON_H_TYPES_LAST][3];
720
98f1dd02
AP
721static GTY(()) tree aarch64_simd_intOI_type_node = NULL_TREE;
722static GTY(()) tree aarch64_simd_intCI_type_node = NULL_TREE;
723static GTY(()) tree aarch64_simd_intXI_type_node = NULL_TREE;
f9d53c27 724
1b62ed4f
JG
725/* The user-visible __fp16 type, and a pointer to that type. Used
726 across the back-end. */
727tree aarch64_fp16_type_node = NULL_TREE;
728tree aarch64_fp16_ptr_type_node = NULL_TREE;
729
abbe1ed2
SMW
730/* Back-end node type for brain float (bfloat) types. */
731tree aarch64_bf16_type_node = NULL_TREE;
732tree aarch64_bf16_ptr_type_node = NULL_TREE;
733
6d4d616a 734/* Wrapper around add_builtin_function. NAME is the name of the built-in
072a8b8f 735 function, TYPE is the function type, CODE is the function subcode
736 (relative to AARCH64_BUILTIN_GENERAL), and ATTRS is the function
737 attributes. */
6d4d616a 738static tree
072a8b8f 739aarch64_general_add_builtin (const char *name, tree type, unsigned int code,
740 tree attrs = NULL_TREE)
6d4d616a
RS
741{
742 code = (code << AARCH64_BUILTIN_SHIFT) | AARCH64_BUILTIN_GENERAL;
743 return add_builtin_function (name, type, code, BUILT_IN_MD,
072a8b8f 744 NULL, attrs);
6d4d616a
RS
745}
746
f9d53c27
TB
747static const char *
748aarch64_mangle_builtin_scalar_type (const_tree type)
749{
750 int i = 0;
751
752 while (aarch64_scalar_builtin_types[i] != NULL)
b5828b4b 753 {
f9d53c27
TB
754 const char *name = aarch64_scalar_builtin_types[i];
755
756 if (TREE_CODE (TYPE_NAME (type)) == TYPE_DECL
757 && DECL_NAME (TYPE_NAME (type))
758 && !strcmp (IDENTIFIER_POINTER (DECL_NAME (TYPE_NAME (type))), name))
759 return aarch64_scalar_builtin_types[i];
760 i++;
761 }
762 return NULL;
b5828b4b
JG
763}
764
f9d53c27
TB
765static const char *
766aarch64_mangle_builtin_vector_type (const_tree type)
b5828b4b 767{
31427b97
RS
768 tree attrs = TYPE_ATTRIBUTES (type);
769 if (tree attr = lookup_attribute ("Advanced SIMD type", attrs))
770 {
771 tree mangled_name = TREE_VALUE (TREE_VALUE (attr));
772 return IDENTIFIER_POINTER (mangled_name);
773 }
f9d53c27
TB
774
775 return NULL;
6db1ec94
JG
776}
777
f9d53c27 778const char *
6d4d616a 779aarch64_general_mangle_builtin_type (const_tree type)
6db1ec94 780{
f9d53c27
TB
781 const char *mangle;
782 /* Walk through all the AArch64 builtins types tables to filter out the
783 incoming type. */
784 if ((mangle = aarch64_mangle_builtin_vector_type (type))
785 || (mangle = aarch64_mangle_builtin_scalar_type (type)))
786 return mangle;
787
788 return NULL;
6db1ec94
JG
789}
790
f9d53c27 791static tree
b8506a8a 792aarch64_simd_builtin_std_type (machine_mode mode,
f9d53c27 793 enum aarch64_type_qualifiers q)
6db1ec94 794{
f9d53c27
TB
795#define QUAL_TYPE(M) \
796 ((q == qualifier_none) ? int##M##_type_node : unsigned_int##M##_type_node);
797 switch (mode)
798 {
4e10a5a7 799 case E_QImode:
f9d53c27 800 return QUAL_TYPE (QI);
4e10a5a7 801 case E_HImode:
f9d53c27 802 return QUAL_TYPE (HI);
4e10a5a7 803 case E_SImode:
f9d53c27 804 return QUAL_TYPE (SI);
4e10a5a7 805 case E_DImode:
f9d53c27 806 return QUAL_TYPE (DI);
4e10a5a7 807 case E_TImode:
f9d53c27 808 return QUAL_TYPE (TI);
4e10a5a7 809 case E_OImode:
f9d53c27 810 return aarch64_simd_intOI_type_node;
4e10a5a7 811 case E_CImode:
f9d53c27 812 return aarch64_simd_intCI_type_node;
4e10a5a7 813 case E_XImode:
f9d53c27 814 return aarch64_simd_intXI_type_node;
4e10a5a7 815 case E_HFmode:
71a11456 816 return aarch64_fp16_type_node;
4e10a5a7 817 case E_SFmode:
f9d53c27 818 return float_type_node;
4e10a5a7 819 case E_DFmode:
f9d53c27 820 return double_type_node;
abbe1ed2
SMW
821 case E_BFmode:
822 return aarch64_bf16_type_node;
f9d53c27
TB
823 default:
824 gcc_unreachable ();
825 }
826#undef QUAL_TYPE
6db1ec94
JG
827}
828
f9d53c27 829static tree
b8506a8a 830aarch64_lookup_simd_builtin_type (machine_mode mode,
f9d53c27 831 enum aarch64_type_qualifiers q)
6db1ec94 832{
f9d53c27
TB
833 int i;
834 int nelts = sizeof (aarch64_simd_types) / sizeof (aarch64_simd_types[0]);
835
836 /* Non-poly scalar modes map to standard types not in the table. */
837 if (q != qualifier_poly && !VECTOR_MODE_P (mode))
838 return aarch64_simd_builtin_std_type (mode, q);
839
840 for (i = 0; i < nelts; i++)
66f206b8
JW
841 {
842 if (aarch64_simd_types[i].mode == mode
843 && aarch64_simd_types[i].q == q)
844 return aarch64_simd_types[i].itype;
845 if (aarch64_simd_tuple_types[i][0] != NULL_TREE)
846 for (int j = 0; j < 3; j++)
847 if (TYPE_MODE (aarch64_simd_tuple_types[i][j]) == mode
848 && aarch64_simd_types[i].q == q)
849 return aarch64_simd_tuple_types[i][j];
850 }
f9d53c27
TB
851
852 return NULL_TREE;
b5828b4b
JG
853}
854
f9d53c27 855static tree
b8506a8a 856aarch64_simd_builtin_type (machine_mode mode,
f9d53c27
TB
857 bool unsigned_p, bool poly_p)
858{
859 if (poly_p)
860 return aarch64_lookup_simd_builtin_type (mode, qualifier_poly);
861 else if (unsigned_p)
862 return aarch64_lookup_simd_builtin_type (mode, qualifier_unsigned);
863 else
864 return aarch64_lookup_simd_builtin_type (mode, qualifier_none);
865}
866
af55e82d 867static void
f9d53c27 868aarch64_init_simd_builtin_types (void)
43e9d192 869{
f9d53c27
TB
870 int i;
871 int nelts = sizeof (aarch64_simd_types) / sizeof (aarch64_simd_types[0]);
872 tree tdecl;
873
874 /* Init all the element types built by the front-end. */
875 aarch64_simd_types[Int8x8_t].eltype = intQI_type_node;
876 aarch64_simd_types[Int8x16_t].eltype = intQI_type_node;
877 aarch64_simd_types[Int16x4_t].eltype = intHI_type_node;
878 aarch64_simd_types[Int16x8_t].eltype = intHI_type_node;
879 aarch64_simd_types[Int32x2_t].eltype = intSI_type_node;
880 aarch64_simd_types[Int32x4_t].eltype = intSI_type_node;
881 aarch64_simd_types[Int64x1_t].eltype = intDI_type_node;
882 aarch64_simd_types[Int64x2_t].eltype = intDI_type_node;
883 aarch64_simd_types[Uint8x8_t].eltype = unsigned_intQI_type_node;
884 aarch64_simd_types[Uint8x16_t].eltype = unsigned_intQI_type_node;
885 aarch64_simd_types[Uint16x4_t].eltype = unsigned_intHI_type_node;
886 aarch64_simd_types[Uint16x8_t].eltype = unsigned_intHI_type_node;
887 aarch64_simd_types[Uint32x2_t].eltype = unsigned_intSI_type_node;
888 aarch64_simd_types[Uint32x4_t].eltype = unsigned_intSI_type_node;
889 aarch64_simd_types[Uint64x1_t].eltype = unsigned_intDI_type_node;
890 aarch64_simd_types[Uint64x2_t].eltype = unsigned_intDI_type_node;
891
892 /* Poly types are a world of their own. */
893 aarch64_simd_types[Poly8_t].eltype = aarch64_simd_types[Poly8_t].itype =
894 build_distinct_type_copy (unsigned_intQI_type_node);
bcee52c4
MS
895 /* Prevent front-ends from transforming Poly8_t arrays into string
896 literals. */
897 TYPE_STRING_FLAG (aarch64_simd_types[Poly8_t].eltype) = false;
898
f9d53c27
TB
899 aarch64_simd_types[Poly16_t].eltype = aarch64_simd_types[Poly16_t].itype =
900 build_distinct_type_copy (unsigned_intHI_type_node);
901 aarch64_simd_types[Poly64_t].eltype = aarch64_simd_types[Poly64_t].itype =
902 build_distinct_type_copy (unsigned_intDI_type_node);
903 aarch64_simd_types[Poly128_t].eltype = aarch64_simd_types[Poly128_t].itype =
904 build_distinct_type_copy (unsigned_intTI_type_node);
905 /* Init poly vector element types with scalar poly types. */
906 aarch64_simd_types[Poly8x8_t].eltype = aarch64_simd_types[Poly8_t].itype;
907 aarch64_simd_types[Poly8x16_t].eltype = aarch64_simd_types[Poly8_t].itype;
908 aarch64_simd_types[Poly16x4_t].eltype = aarch64_simd_types[Poly16_t].itype;
909 aarch64_simd_types[Poly16x8_t].eltype = aarch64_simd_types[Poly16_t].itype;
910 aarch64_simd_types[Poly64x1_t].eltype = aarch64_simd_types[Poly64_t].itype;
911 aarch64_simd_types[Poly64x2_t].eltype = aarch64_simd_types[Poly64_t].itype;
912
913 /* Continue with standard types. */
71a11456
AL
914 aarch64_simd_types[Float16x4_t].eltype = aarch64_fp16_type_node;
915 aarch64_simd_types[Float16x8_t].eltype = aarch64_fp16_type_node;
f9d53c27
TB
916 aarch64_simd_types[Float32x2_t].eltype = float_type_node;
917 aarch64_simd_types[Float32x4_t].eltype = float_type_node;
918 aarch64_simd_types[Float64x1_t].eltype = double_type_node;
919 aarch64_simd_types[Float64x2_t].eltype = double_type_node;
920
abbe1ed2
SMW
921 /* Init Bfloat vector types with underlying __bf16 type. */
922 aarch64_simd_types[Bfloat16x4_t].eltype = aarch64_bf16_type_node;
923 aarch64_simd_types[Bfloat16x8_t].eltype = aarch64_bf16_type_node;
924
f9d53c27
TB
925 for (i = 0; i < nelts; i++)
926 {
927 tree eltype = aarch64_simd_types[i].eltype;
b8506a8a 928 machine_mode mode = aarch64_simd_types[i].mode;
f9d53c27
TB
929
930 if (aarch64_simd_types[i].itype == NULL)
b96824c4 931 {
31427b97
RS
932 tree type = build_vector_type (eltype, GET_MODE_NUNITS (mode));
933 type = build_distinct_type_copy (type);
934 SET_TYPE_STRUCTURAL_EQUALITY (type);
935
936 tree mangled_name = get_identifier (aarch64_simd_types[i].mangle);
937 tree value = tree_cons (NULL_TREE, mangled_name, NULL_TREE);
938 TYPE_ATTRIBUTES (type)
939 = tree_cons (get_identifier ("Advanced SIMD type"), value,
940 TYPE_ATTRIBUTES (type));
941 aarch64_simd_types[i].itype = type;
b96824c4 942 }
f9d53c27
TB
943
944 tdecl = add_builtin_type (aarch64_simd_types[i].name,
945 aarch64_simd_types[i].itype);
946 TYPE_NAME (aarch64_simd_types[i].itype) = tdecl;
f9d53c27 947 }
43e9d192 948
f9d53c27
TB
949#define AARCH64_BUILD_SIGNED_TYPE(mode) \
950 make_signed_type (GET_MODE_PRECISION (mode));
951 aarch64_simd_intOI_type_node = AARCH64_BUILD_SIGNED_TYPE (OImode);
f9d53c27
TB
952 aarch64_simd_intCI_type_node = AARCH64_BUILD_SIGNED_TYPE (CImode);
953 aarch64_simd_intXI_type_node = AARCH64_BUILD_SIGNED_TYPE (XImode);
954#undef AARCH64_BUILD_SIGNED_TYPE
955
f9d53c27
TB
956 tdecl = add_builtin_type
957 ("__builtin_aarch64_simd_oi" , aarch64_simd_intOI_type_node);
958 TYPE_NAME (aarch64_simd_intOI_type_node) = tdecl;
959 tdecl = add_builtin_type
960 ("__builtin_aarch64_simd_ci" , aarch64_simd_intCI_type_node);
961 TYPE_NAME (aarch64_simd_intCI_type_node) = tdecl;
962 tdecl = add_builtin_type
963 ("__builtin_aarch64_simd_xi" , aarch64_simd_intXI_type_node);
964 TYPE_NAME (aarch64_simd_intXI_type_node) = tdecl;
965}
966
967static void
968aarch64_init_simd_builtin_scalar_types (void)
969{
970 /* Define typedefs for all the standard scalar types. */
971 (*lang_hooks.types.register_builtin_type) (intQI_type_node,
43e9d192 972 "__builtin_aarch64_simd_qi");
f9d53c27 973 (*lang_hooks.types.register_builtin_type) (intHI_type_node,
43e9d192 974 "__builtin_aarch64_simd_hi");
7c369485
AL
975 (*lang_hooks.types.register_builtin_type) (aarch64_fp16_type_node,
976 "__builtin_aarch64_simd_hf");
f9d53c27 977 (*lang_hooks.types.register_builtin_type) (intSI_type_node,
43e9d192 978 "__builtin_aarch64_simd_si");
f9d53c27 979 (*lang_hooks.types.register_builtin_type) (float_type_node,
43e9d192 980 "__builtin_aarch64_simd_sf");
f9d53c27 981 (*lang_hooks.types.register_builtin_type) (intDI_type_node,
43e9d192 982 "__builtin_aarch64_simd_di");
f9d53c27 983 (*lang_hooks.types.register_builtin_type) (double_type_node,
43e9d192 984 "__builtin_aarch64_simd_df");
f9d53c27 985 (*lang_hooks.types.register_builtin_type) (unsigned_intQI_type_node,
43e9d192 986 "__builtin_aarch64_simd_poly8");
f9d53c27 987 (*lang_hooks.types.register_builtin_type) (unsigned_intHI_type_node,
43e9d192 988 "__builtin_aarch64_simd_poly16");
f9d53c27 989 (*lang_hooks.types.register_builtin_type) (unsigned_intDI_type_node,
7baa225d 990 "__builtin_aarch64_simd_poly64");
f9d53c27 991 (*lang_hooks.types.register_builtin_type) (unsigned_intTI_type_node,
7baa225d 992 "__builtin_aarch64_simd_poly128");
f9d53c27 993 (*lang_hooks.types.register_builtin_type) (intTI_type_node,
43e9d192 994 "__builtin_aarch64_simd_ti");
e603cd43
MI
995 (*lang_hooks.types.register_builtin_type) (aarch64_bf16_type_node,
996 "__builtin_aarch64_simd_bf");
b5828b4b 997 /* Unsigned integer types for various mode sizes. */
f9d53c27 998 (*lang_hooks.types.register_builtin_type) (unsigned_intQI_type_node,
b5828b4b 999 "__builtin_aarch64_simd_uqi");
f9d53c27 1000 (*lang_hooks.types.register_builtin_type) (unsigned_intHI_type_node,
b5828b4b 1001 "__builtin_aarch64_simd_uhi");
f9d53c27 1002 (*lang_hooks.types.register_builtin_type) (unsigned_intSI_type_node,
b5828b4b 1003 "__builtin_aarch64_simd_usi");
f9d53c27 1004 (*lang_hooks.types.register_builtin_type) (unsigned_intDI_type_node,
b5828b4b 1005 "__builtin_aarch64_simd_udi");
f9d53c27
TB
1006}
1007
079c23cf
KT
1008/* Return a set of FLAG_* flags derived from FLAGS
1009 that describe what a function with result MODE could do,
072a8b8f 1010 taking the command-line flags into account. */
1011static unsigned int
079c23cf 1012aarch64_call_properties (unsigned int flags, machine_mode mode)
072a8b8f 1013{
079c23cf 1014 if (!(flags & FLAG_AUTO_FP) && FLOAT_MODE_P (mode))
35ffd4d1 1015 flags |= FLAG_FP;
072a8b8f 1016
1017 /* -fno-trapping-math means that we can assume any FP exceptions
1018 are not user-visible. */
1019 if (!flag_trapping_math)
1020 flags &= ~FLAG_RAISE_FP_EXCEPTIONS;
1021
1022 return flags;
1023}
1024
079c23cf
KT
1025/* Return true if calls to a function with flags F and mode MODE
1026 could modify some form of global state. */
072a8b8f 1027static bool
079c23cf 1028aarch64_modifies_global_state_p (unsigned int f, machine_mode mode)
072a8b8f 1029{
079c23cf 1030 unsigned int flags = aarch64_call_properties (f, mode);
072a8b8f 1031
1032 if (flags & FLAG_RAISE_FP_EXCEPTIONS)
1033 return true;
1034
1035 if (flags & FLAG_PREFETCH_MEMORY)
1036 return true;
1037
1038 return flags & FLAG_WRITE_MEMORY;
1039}
1040
079c23cf
KT
1041/* Return true if calls to a function with flags F and mode MODE
1042 could read some form of global state. */
072a8b8f 1043static bool
079c23cf 1044aarch64_reads_global_state_p (unsigned int f, machine_mode mode)
072a8b8f 1045{
079c23cf 1046 unsigned int flags = aarch64_call_properties (f, mode);
072a8b8f 1047
1048 if (flags & FLAG_READ_FPCR)
1049 return true;
1050
1051 return flags & FLAG_READ_MEMORY;
1052}
1053
079c23cf
KT
1054/* Return true if calls to a function with flags F and mode MODE
1055 could raise a signal. */
072a8b8f 1056static bool
079c23cf 1057aarch64_could_trap_p (unsigned int f, machine_mode mode)
072a8b8f 1058{
079c23cf 1059 unsigned int flags = aarch64_call_properties (f, mode);
072a8b8f 1060
1061 if (flags & FLAG_RAISE_FP_EXCEPTIONS)
1062 return true;
1063
1064 if (flags & (FLAG_READ_MEMORY | FLAG_WRITE_MEMORY))
1065 return true;
1066
1067 return false;
1068}
1069
1070/* Add attribute NAME to ATTRS. */
1071static tree
1072aarch64_add_attribute (const char *name, tree attrs)
1073{
1074 return tree_cons (get_identifier (name), NULL_TREE, attrs);
1075}
1076
079c23cf
KT
1077/* Return the appropriate attributes for a function that has
1078 flags F and mode MODE. */
072a8b8f 1079static tree
079c23cf 1080aarch64_get_attributes (unsigned int f, machine_mode mode)
072a8b8f 1081{
1082 tree attrs = NULL_TREE;
1083
079c23cf 1084 if (!aarch64_modifies_global_state_p (f, mode))
072a8b8f 1085 {
079c23cf 1086 if (aarch64_reads_global_state_p (f, mode))
072a8b8f 1087 attrs = aarch64_add_attribute ("pure", attrs);
1088 else
1089 attrs = aarch64_add_attribute ("const", attrs);
1090 }
1091
079c23cf 1092 if (!flag_non_call_exceptions || !aarch64_could_trap_p (f, mode))
072a8b8f 1093 attrs = aarch64_add_attribute ("nothrow", attrs);
1094
1095 return aarch64_add_attribute ("leaf", attrs);
1096}
1097
e95a988a
KT
1098static bool aarch64_simd_builtins_initialized_p = false;
1099
9d63f43b
TC
1100/* Due to the architecture not providing lane variant of the lane instructions
1101 for fcmla we can't use the standard simd builtin expansion code, but we
1102 still want the majority of the validation that would normally be done. */
1103
1104void
1105aarch64_init_fcmla_laneq_builtins (void)
1106{
1107 unsigned int i = 0;
1108
1109 for (i = 0; i < ARRAY_SIZE (aarch64_fcmla_lane_builtin_data); ++i)
1110 {
1111 aarch64_fcmla_laneq_builtin_datum* d
1112 = &aarch64_fcmla_lane_builtin_data[i];
1113 tree argtype = aarch64_lookup_simd_builtin_type (d->mode, qualifier_none);
1114 machine_mode quadmode = GET_MODE_2XWIDER_MODE (d->mode).require ();
1115 tree quadtype
1116 = aarch64_lookup_simd_builtin_type (quadmode, qualifier_none);
1117 tree lanetype
1118 = aarch64_simd_builtin_std_type (SImode, qualifier_lane_pair_index);
1119 tree ftype = build_function_type_list (argtype, argtype, argtype,
1120 quadtype, lanetype, NULL_TREE);
079c23cf
KT
1121 tree attrs = aarch64_get_attributes (FLAG_FP, d->mode);
1122 tree fndecl
1123 = aarch64_general_add_builtin (d->name, ftype, d->fcode, attrs);
9d63f43b
TC
1124
1125 aarch64_builtin_decls[d->fcode] = fndecl;
1126 }
1127}
1128
e95a988a 1129void
8197ab94 1130aarch64_init_simd_builtin_functions (bool called_from_pragma)
f9d53c27 1131{
661fce82 1132 unsigned int i, fcode = AARCH64_SIMD_PATTERN_START;
f9d53c27 1133
8197ab94
JW
1134 if (!called_from_pragma)
1135 {
1136 tree lane_check_fpr = build_function_type_list (void_type_node,
1137 size_type_node,
1138 size_type_node,
1139 intSI_type_node,
1140 NULL);
1141 aarch64_builtin_decls[AARCH64_SIMD_BUILTIN_LANE_CHECK]
1142 = aarch64_general_add_builtin ("__builtin_aarch64_im_lane_boundsi",
1143 lane_check_fpr,
1144 AARCH64_SIMD_BUILTIN_LANE_CHECK);
1145 }
661fce82 1146
342be7f7 1147 for (i = 0; i < ARRAY_SIZE (aarch64_simd_builtin_data); i++, fcode++)
43e9d192 1148 {
b5828b4b 1149 bool print_type_signature_p = false;
cae83731 1150 char type_signature[SIMD_MAX_BUILTIN_ARGS + 1] = { 0 };
43e9d192 1151 aarch64_simd_builtin_datum *d = &aarch64_simd_builtin_data[i];
342be7f7
JG
1152 char namebuf[60];
1153 tree ftype = NULL;
119103ca 1154 tree fndecl = NULL;
342be7f7 1155
342be7f7 1156 d->fcode = fcode;
43e9d192 1157
b5828b4b
JG
1158 /* We must track two variables here. op_num is
1159 the operand number as in the RTL pattern. This is
1160 required to access the mode (e.g. V4SF mode) of the
1161 argument, from which the base type can be derived.
1162 arg_num is an index in to the qualifiers data, which
1163 gives qualifiers to the type (e.g. const unsigned).
1164 The reason these two variables may differ by one is the
1165 void return type. While all return types take the 0th entry
1166 in the qualifiers array, there is no operand for them in the
1167 RTL pattern. */
1168 int op_num = insn_data[d->code].n_operands - 1;
1169 int arg_num = d->qualifiers[0] & qualifier_void
1170 ? op_num + 1
1171 : op_num;
1172 tree return_type = void_type_node, args = void_list_node;
1173 tree eltype;
1174
8197ab94
JW
1175 int struct_mode_args = 0;
1176 for (int j = op_num; j >= 0; j--)
1177 {
1178 machine_mode op_mode = insn_data[d->code].operand[j].mode;
1179 if (aarch64_advsimd_struct_mode_p (op_mode))
1180 struct_mode_args++;
1181 }
1182
1183 if ((called_from_pragma && struct_mode_args == 0)
1184 || (!called_from_pragma && struct_mode_args > 0))
1185 continue;
1186
b5828b4b
JG
1187 /* Build a function type directly from the insn_data for this
1188 builtin. The build_function_type () function takes care of
1189 removing duplicates for us. */
1190 for (; op_num >= 0; arg_num--, op_num--)
43e9d192 1191 {
ef4bddc2 1192 machine_mode op_mode = insn_data[d->code].operand[op_num].mode;
b5828b4b 1193 enum aarch64_type_qualifiers qualifiers = d->qualifiers[arg_num];
43e9d192 1194
b5828b4b
JG
1195 if (qualifiers & qualifier_unsigned)
1196 {
9fd2074d 1197 type_signature[op_num] = 'u';
b5828b4b
JG
1198 print_type_signature_p = true;
1199 }
6db1ec94
JG
1200 else if (qualifiers & qualifier_poly)
1201 {
9fd2074d 1202 type_signature[op_num] = 'p';
6db1ec94
JG
1203 print_type_signature_p = true;
1204 }
b5828b4b 1205 else
9fd2074d 1206 type_signature[op_num] = 's';
b5828b4b
JG
1207
1208 /* Skip an internal operand for vget_{low, high}. */
1209 if (qualifiers & qualifier_internal)
1210 continue;
1211
1212 /* Some builtins have different user-facing types
1213 for certain arguments, encoded in d->mode. */
1214 if (qualifiers & qualifier_map_mode)
bc5e395d 1215 op_mode = d->mode;
b5828b4b
JG
1216
1217 /* For pointers, we want a pointer to the basic type
1218 of the vector. */
1219 if (qualifiers & qualifier_pointer && VECTOR_MODE_P (op_mode))
1220 op_mode = GET_MODE_INNER (op_mode);
1221
f9d53c27
TB
1222 eltype = aarch64_simd_builtin_type
1223 (op_mode,
1224 (qualifiers & qualifier_unsigned) != 0,
1225 (qualifiers & qualifier_poly) != 0);
1226 gcc_assert (eltype != NULL);
b5828b4b
JG
1227
1228 /* Add qualifiers. */
1229 if (qualifiers & qualifier_const)
1230 eltype = build_qualified_type (eltype, TYPE_QUAL_CONST);
1231
1232 if (qualifiers & qualifier_pointer)
1233 eltype = build_pointer_type (eltype);
1234
1235 /* If we have reached arg_num == 0, we are at a non-void
1236 return type. Otherwise, we are still processing
1237 arguments. */
1238 if (arg_num == 0)
1239 return_type = eltype;
1240 else
1241 args = tree_cons (NULL_TREE, eltype, args);
1242 }
342be7f7 1243
b5828b4b 1244 ftype = build_function_type (return_type, args);
43e9d192 1245
342be7f7 1246 gcc_assert (ftype != NULL);
43e9d192 1247
b5828b4b 1248 if (print_type_signature_p)
bc5e395d
JG
1249 snprintf (namebuf, sizeof (namebuf), "__builtin_aarch64_%s_%s",
1250 d->name, type_signature);
b5828b4b 1251 else
bc5e395d
JG
1252 snprintf (namebuf, sizeof (namebuf), "__builtin_aarch64_%s",
1253 d->name);
43e9d192 1254
079c23cf 1255 tree attrs = aarch64_get_attributes (d->flags, d->mode);
072a8b8f 1256
66f206b8
JW
1257 if (called_from_pragma)
1258 {
1259 unsigned int raw_code
1260 = (fcode << AARCH64_BUILTIN_SHIFT) | AARCH64_BUILTIN_GENERAL;
1261 fndecl = simulate_builtin_function_decl (input_location, namebuf,
1262 ftype, raw_code, NULL,
1263 attrs);
1264 }
1265 else
1266 fndecl = aarch64_general_add_builtin (namebuf, ftype, fcode, attrs);
1267
119103ca 1268 aarch64_builtin_decls[fcode] = fndecl;
43e9d192 1269 }
8197ab94
JW
1270}
1271
1272/* Register the tuple type that contains NUM_VECTORS of the AdvSIMD type
1273 indexed by TYPE_INDEX. */
1274static void
1275register_tuple_type (unsigned int num_vectors, unsigned int type_index)
1276{
1277 aarch64_simd_type_info *type = &aarch64_simd_types[type_index];
1278
1279 /* Synthesize the name of the user-visible vector tuple type. */
1280 const char *vector_type_name = type->name;
1281 char tuple_type_name[sizeof ("bfloat16x4x2_t")];
1282 snprintf (tuple_type_name, sizeof (tuple_type_name), "%.*sx%d_t",
1283 (int) strlen (vector_type_name) - 4, vector_type_name + 2,
1284 num_vectors);
1285 tuple_type_name[0] = TOLOWER (tuple_type_name[0]);
1286
1287 tree vector_type = type->itype;
1288 tree array_type = build_array_type_nelts (vector_type, num_vectors);
66f206b8
JW
1289 if (type->mode == DImode)
1290 {
1291 if (num_vectors == 2)
1292 SET_TYPE_MODE (array_type, V2x1DImode);
1293 else if (num_vectors == 3)
1294 SET_TYPE_MODE (array_type, V3x1DImode);
1295 else if (num_vectors == 4)
1296 SET_TYPE_MODE (array_type, V4x1DImode);
1297 }
1298
8197ab94
JW
1299 unsigned int alignment
1300 = (known_eq (GET_MODE_SIZE (type->mode), 16) ? 128 : 64);
1301 gcc_assert (TYPE_MODE_RAW (array_type) == TYPE_MODE (array_type)
1302 && TYPE_ALIGN (array_type) == alignment);
1303
1304 tree field = build_decl (input_location, FIELD_DECL,
1305 get_identifier ("val"), array_type);
1306
1307 tree t = lang_hooks.types.simulate_record_decl (input_location,
1308 tuple_type_name,
1309 make_array_slice (&field,
1310 1));
1311 gcc_assert (TYPE_MODE_RAW (t) == TYPE_MODE (t)
1312 && TYPE_ALIGN (t) == alignment);
66f206b8
JW
1313
1314 if (num_vectors == 2)
1315 aarch64_simd_tuple_types[type_index][0] = t;
1316 else if (num_vectors == 3)
1317 aarch64_simd_tuple_types[type_index][1] = t;
1318 else if (num_vectors == 4)
1319 aarch64_simd_tuple_types[type_index][2] = t;
8197ab94
JW
1320}
1321
1322static bool
1323aarch64_scalar_builtin_type_p (aarch64_simd_type t)
1324{
1325 return (t == Poly8_t || t == Poly16_t || t == Poly64_t || t == Poly128_t);
1326}
1327
1328/* Implement #pragma GCC aarch64 "arm_neon.h". */
1329void
1330handle_arm_neon_h (void)
1331{
1332 /* Register the AdvSIMD vector tuple types. */
1333 for (unsigned int i = 0; i < ARM_NEON_H_TYPES_LAST; i++)
1334 for (unsigned int count = 2; count <= 4; ++count)
1335 if (!aarch64_scalar_builtin_type_p (aarch64_simd_types[i].type))
1336 register_tuple_type (count, i);
1337
1338 aarch64_init_simd_builtin_functions (true);
1339}
1340
1341void
1342aarch64_init_simd_builtins (void)
1343{
1344 if (aarch64_simd_builtins_initialized_p)
1345 return;
1346
1347 aarch64_simd_builtins_initialized_p = true;
1348
1349 aarch64_init_simd_builtin_types ();
1350
1351 /* Strong-typing hasn't been implemented for all AdvSIMD builtin intrinsics.
1352 Therefore we need to preserve the old __builtin scalar types. It can be
1353 removed once all the intrinsics become strongly typed using the qualifier
1354 system. */
1355 aarch64_init_simd_builtin_scalar_types ();
1356
1357 aarch64_init_simd_builtin_functions (false);
1358 if (in_lto_p)
1359 handle_arm_neon_h ();
280d970b 1360
8197ab94
JW
1361 /* Initialize the remaining fcmla_laneq intrinsics. */
1362 aarch64_init_fcmla_laneq_builtins ();
43e9d192
IB
1363}
1364
5d357f26
KT
1365static void
1366aarch64_init_crc32_builtins ()
1367{
f9d53c27 1368 tree usi_type = aarch64_simd_builtin_std_type (SImode, qualifier_unsigned);
5d357f26
KT
1369 unsigned int i = 0;
1370
1371 for (i = 0; i < ARRAY_SIZE (aarch64_crc_builtin_data); ++i)
1372 {
1373 aarch64_crc_builtin_datum* d = &aarch64_crc_builtin_data[i];
f9d53c27
TB
1374 tree argtype = aarch64_simd_builtin_std_type (d->mode,
1375 qualifier_unsigned);
5d357f26 1376 tree ftype = build_function_type_list (usi_type, usi_type, argtype, NULL_TREE);
079c23cf
KT
1377 tree attrs = aarch64_get_attributes (FLAG_NONE, d->mode);
1378 tree fndecl
1379 = aarch64_general_add_builtin (d->name, ftype, d->fcode, attrs);
5d357f26
KT
1380
1381 aarch64_builtin_decls[d->fcode] = fndecl;
1382 }
1383}
1384
a6fc00da
BH
1385/* Add builtins for reciprocal square root. */
1386
1387void
1388aarch64_init_builtin_rsqrt (void)
1389{
1390 tree fndecl = NULL;
1391 tree ftype = NULL;
1392
1393 tree V2SF_type_node = build_vector_type (float_type_node, 2);
1394 tree V2DF_type_node = build_vector_type (double_type_node, 2);
1395 tree V4SF_type_node = build_vector_type (float_type_node, 4);
1396
1397 struct builtin_decls_data
1398 {
1399 tree type_node;
1400 const char *builtin_name;
1401 int function_code;
1402 };
1403
1404 builtin_decls_data bdda[] =
1405 {
1406 { double_type_node, "__builtin_aarch64_rsqrt_df", AARCH64_BUILTIN_RSQRT_DF },
1407 { float_type_node, "__builtin_aarch64_rsqrt_sf", AARCH64_BUILTIN_RSQRT_SF },
1408 { V2DF_type_node, "__builtin_aarch64_rsqrt_v2df", AARCH64_BUILTIN_RSQRT_V2DF },
1409 { V2SF_type_node, "__builtin_aarch64_rsqrt_v2sf", AARCH64_BUILTIN_RSQRT_V2SF },
1410 { V4SF_type_node, "__builtin_aarch64_rsqrt_v4sf", AARCH64_BUILTIN_RSQRT_V4SF }
1411 };
1412
1413 builtin_decls_data *bdd = bdda;
1414 builtin_decls_data *bdd_end = bdd + (sizeof (bdda) / sizeof (builtin_decls_data));
1415
1416 for (; bdd < bdd_end; bdd++)
1417 {
1418 ftype = build_function_type_list (bdd->type_node, bdd->type_node, NULL_TREE);
079c23cf 1419 tree attrs = aarch64_get_attributes (FLAG_FP, TYPE_MODE (bdd->type_node));
6d4d616a 1420 fndecl = aarch64_general_add_builtin (bdd->builtin_name,
079c23cf 1421 ftype, bdd->function_code, attrs);
a6fc00da
BH
1422 aarch64_builtin_decls[bdd->function_code] = fndecl;
1423 }
1424}
1425
1b62ed4f
JG
1426/* Initialize the backend types that support the user-visible __fp16
1427 type, also initialize a pointer to that type, to be used when
1428 forming HFAs. */
1429
1430static void
1431aarch64_init_fp16_types (void)
1432{
1433 aarch64_fp16_type_node = make_node (REAL_TYPE);
1434 TYPE_PRECISION (aarch64_fp16_type_node) = 16;
1435 layout_type (aarch64_fp16_type_node);
1436
1437 (*lang_hooks.types.register_builtin_type) (aarch64_fp16_type_node, "__fp16");
1438 aarch64_fp16_ptr_type_node = build_pointer_type (aarch64_fp16_type_node);
1439}
1440
abbe1ed2
SMW
1441/* Initialize the backend REAL_TYPE type supporting bfloat types. */
1442static void
1443aarch64_init_bf16_types (void)
1444{
1445 aarch64_bf16_type_node = make_node (REAL_TYPE);
1446 TYPE_PRECISION (aarch64_bf16_type_node) = 16;
1447 SET_TYPE_MODE (aarch64_bf16_type_node, BFmode);
1448 layout_type (aarch64_bf16_type_node);
1449
1450 lang_hooks.types.register_builtin_type (aarch64_bf16_type_node, "__bf16");
1451 aarch64_bf16_ptr_type_node = build_pointer_type (aarch64_bf16_type_node);
1452}
1453
312492bd
JW
1454/* Pointer authentication builtins that will become NOP on legacy platform.
1455 Currently, these builtins are for internal use only (libgcc EH unwinder). */
1456
1457void
1458aarch64_init_pauth_hint_builtins (void)
1459{
1460 /* Pointer Authentication builtins. */
1461 tree ftype_pointer_auth
1462 = build_function_type_list (ptr_type_node, ptr_type_node,
1463 unsigned_intDI_type_node, NULL_TREE);
1464 tree ftype_pointer_strip
1465 = build_function_type_list (ptr_type_node, ptr_type_node, NULL_TREE);
1466
1467 aarch64_builtin_decls[AARCH64_PAUTH_BUILTIN_AUTIA1716]
6d4d616a
RS
1468 = aarch64_general_add_builtin ("__builtin_aarch64_autia1716",
1469 ftype_pointer_auth,
1470 AARCH64_PAUTH_BUILTIN_AUTIA1716);
312492bd 1471 aarch64_builtin_decls[AARCH64_PAUTH_BUILTIN_PACIA1716]
6d4d616a
RS
1472 = aarch64_general_add_builtin ("__builtin_aarch64_pacia1716",
1473 ftype_pointer_auth,
1474 AARCH64_PAUTH_BUILTIN_PACIA1716);
8fc16d72 1475 aarch64_builtin_decls[AARCH64_PAUTH_BUILTIN_AUTIB1716]
6d4d616a
RS
1476 = aarch64_general_add_builtin ("__builtin_aarch64_autib1716",
1477 ftype_pointer_auth,
1478 AARCH64_PAUTH_BUILTIN_AUTIB1716);
8fc16d72 1479 aarch64_builtin_decls[AARCH64_PAUTH_BUILTIN_PACIB1716]
6d4d616a
RS
1480 = aarch64_general_add_builtin ("__builtin_aarch64_pacib1716",
1481 ftype_pointer_auth,
1482 AARCH64_PAUTH_BUILTIN_PACIB1716);
312492bd 1483 aarch64_builtin_decls[AARCH64_PAUTH_BUILTIN_XPACLRI]
6d4d616a
RS
1484 = aarch64_general_add_builtin ("__builtin_aarch64_xpaclri",
1485 ftype_pointer_strip,
1486 AARCH64_PAUTH_BUILTIN_XPACLRI);
312492bd
JW
1487}
1488
89626179
SD
1489/* Initialize the transactional memory extension (TME) builtins. */
1490static void
1491aarch64_init_tme_builtins (void)
1492{
1493 tree ftype_uint64_void
1494 = build_function_type_list (uint64_type_node, NULL);
1495 tree ftype_void_void
1496 = build_function_type_list (void_type_node, NULL);
1497 tree ftype_void_uint64
1498 = build_function_type_list (void_type_node, uint64_type_node, NULL);
1499
1500 aarch64_builtin_decls[AARCH64_TME_BUILTIN_TSTART]
6d4d616a
RS
1501 = aarch64_general_add_builtin ("__builtin_aarch64_tstart",
1502 ftype_uint64_void,
1503 AARCH64_TME_BUILTIN_TSTART);
89626179 1504 aarch64_builtin_decls[AARCH64_TME_BUILTIN_TTEST]
6d4d616a
RS
1505 = aarch64_general_add_builtin ("__builtin_aarch64_ttest",
1506 ftype_uint64_void,
1507 AARCH64_TME_BUILTIN_TTEST);
89626179 1508 aarch64_builtin_decls[AARCH64_TME_BUILTIN_TCOMMIT]
6d4d616a
RS
1509 = aarch64_general_add_builtin ("__builtin_aarch64_tcommit",
1510 ftype_void_void,
1511 AARCH64_TME_BUILTIN_TCOMMIT);
89626179 1512 aarch64_builtin_decls[AARCH64_TME_BUILTIN_TCANCEL]
6d4d616a
RS
1513 = aarch64_general_add_builtin ("__builtin_aarch64_tcancel",
1514 ftype_void_uint64,
1515 AARCH64_TME_BUILTIN_TCANCEL);
89626179
SD
1516}
1517
c5dc215d
KT
1518/* Add builtins for Random Number instructions. */
1519
1520static void
1521aarch64_init_rng_builtins (void)
1522{
1523 tree unsigned_ptr_type = build_pointer_type (unsigned_intDI_type_node);
1524 tree ftype
1525 = build_function_type_list (integer_type_node, unsigned_ptr_type, NULL);
1526 aarch64_builtin_decls[AARCH64_BUILTIN_RNG_RNDR]
1527 = aarch64_general_add_builtin ("__builtin_aarch64_rndr", ftype,
1528 AARCH64_BUILTIN_RNG_RNDR);
1529 aarch64_builtin_decls[AARCH64_BUILTIN_RNG_RNDRRS]
1530 = aarch64_general_add_builtin ("__builtin_aarch64_rndrrs", ftype,
1531 AARCH64_BUILTIN_RNG_RNDRRS);
1532}
1533
ef01e6bb
DZ
1534/* Initialize the memory tagging extension (MTE) builtins. */
1535struct
1536{
1537 tree ftype;
1538 enum insn_code icode;
1539} aarch64_memtag_builtin_data[AARCH64_MEMTAG_BUILTIN_END -
1540 AARCH64_MEMTAG_BUILTIN_START - 1];
1541
1542static void
1543aarch64_init_memtag_builtins (void)
1544{
1545 tree fntype = NULL;
1546
1547#define AARCH64_INIT_MEMTAG_BUILTINS_DECL(F, N, I, T) \
1548 aarch64_builtin_decls[AARCH64_MEMTAG_BUILTIN_##F] \
1549 = aarch64_general_add_builtin ("__builtin_aarch64_memtag_"#N, \
1550 T, AARCH64_MEMTAG_BUILTIN_##F); \
1551 aarch64_memtag_builtin_data[AARCH64_MEMTAG_BUILTIN_##F - \
1552 AARCH64_MEMTAG_BUILTIN_START - 1] = \
1553 {T, CODE_FOR_##I};
1554
1555 fntype = build_function_type_list (ptr_type_node, ptr_type_node,
1556 uint64_type_node, NULL);
1557 AARCH64_INIT_MEMTAG_BUILTINS_DECL (IRG, irg, irg, fntype);
1558
1559 fntype = build_function_type_list (uint64_type_node, ptr_type_node,
1560 uint64_type_node, NULL);
1561 AARCH64_INIT_MEMTAG_BUILTINS_DECL (GMI, gmi, gmi, fntype);
1562
1563 fntype = build_function_type_list (ptrdiff_type_node, ptr_type_node,
1564 ptr_type_node, NULL);
1565 AARCH64_INIT_MEMTAG_BUILTINS_DECL (SUBP, subp, subp, fntype);
1566
1567 fntype = build_function_type_list (ptr_type_node, ptr_type_node,
1568 unsigned_type_node, NULL);
1569 AARCH64_INIT_MEMTAG_BUILTINS_DECL (INC_TAG, inc_tag, addg, fntype);
1570
1571 fntype = build_function_type_list (void_type_node, ptr_type_node, NULL);
1572 AARCH64_INIT_MEMTAG_BUILTINS_DECL (SET_TAG, set_tag, stg, fntype);
1573
1574 fntype = build_function_type_list (ptr_type_node, ptr_type_node, NULL);
1575 AARCH64_INIT_MEMTAG_BUILTINS_DECL (GET_TAG, get_tag, ldg, fntype);
1576
1577#undef AARCH64_INIT_MEMTAG_BUILTINS_DECL
1578}
c5dc215d 1579
fdcddba8
PW
1580/* Add builtins for Load/store 64 Byte instructions. */
1581
1582typedef struct
1583{
1584 const char *name;
1585 unsigned int code;
1586 tree type;
1587} ls64_builtins_data;
1588
1589static GTY(()) tree ls64_arm_data_t = NULL_TREE;
1590
1591static void
1592aarch64_init_ls64_builtins_types (void)
1593{
1594 /* Synthesize:
1595
1596 typedef struct {
1597 uint64_t val[8];
1598 } __arm_data512_t; */
1599 const char *tuple_type_name = "__arm_data512_t";
1600 tree node_type = get_typenode_from_name (UINT64_TYPE);
1601 tree array_type = build_array_type_nelts (node_type, 8);
1602 SET_TYPE_MODE (array_type, V8DImode);
1603
1604 gcc_assert (TYPE_MODE_RAW (array_type) == TYPE_MODE (array_type));
1605 gcc_assert (TYPE_ALIGN (array_type) == 64);
1606
1607 tree field = build_decl (input_location, FIELD_DECL,
1608 get_identifier ("val"), array_type);
1609
1610 ls64_arm_data_t = lang_hooks.types.simulate_record_decl (input_location,
1611 tuple_type_name,
1612 make_array_slice (&field, 1));
1613
1614 gcc_assert (TYPE_MODE (ls64_arm_data_t) == V8DImode);
1615 gcc_assert (TYPE_MODE_RAW (ls64_arm_data_t) == TYPE_MODE (ls64_arm_data_t));
1616 gcc_assert (TYPE_ALIGN (ls64_arm_data_t) == 64);
1617}
1618
1619static void
1620aarch64_init_ls64_builtins (void)
1621{
1622 aarch64_init_ls64_builtins_types ();
1623
1624 ls64_builtins_data data[4] = {
1625 {"__builtin_aarch64_ld64b", AARCH64_LS64_BUILTIN_LD64B,
1626 build_function_type_list (ls64_arm_data_t,
1627 const_ptr_type_node, NULL_TREE)},
1628 {"__builtin_aarch64_st64b", AARCH64_LS64_BUILTIN_ST64B,
1629 build_function_type_list (void_type_node, ptr_type_node,
1630 ls64_arm_data_t, NULL_TREE)},
1631 {"__builtin_aarch64_st64bv", AARCH64_LS64_BUILTIN_ST64BV,
1632 build_function_type_list (uint64_type_node, ptr_type_node,
1633 ls64_arm_data_t, NULL_TREE)},
1634 {"__builtin_aarch64_st64bv0", AARCH64_LS64_BUILTIN_ST64BV0,
1635 build_function_type_list (uint64_type_node, ptr_type_node,
1636 ls64_arm_data_t, NULL_TREE)},
1637 };
1638
1639 for (size_t i = 0; i < ARRAY_SIZE (data); ++i)
1640 aarch64_builtin_decls[data[i].code]
1641 = aarch64_general_add_builtin (data[i].name, data[i].type, data[i].code);
1642}
1643
0d7e5fa6 1644/* Initialize fpsr fpcr getters and setters. */
c5dc215d 1645
0d7e5fa6
AC
1646static void
1647aarch64_init_fpsr_fpcr_builtins (void)
43e9d192 1648{
0d7e5fa6 1649 tree ftype_set
aa87aced 1650 = build_function_type_list (void_type_node, unsigned_type_node, NULL);
0d7e5fa6 1651 tree ftype_get
aa87aced
KV
1652 = build_function_type_list (unsigned_type_node, NULL);
1653
1654 aarch64_builtin_decls[AARCH64_BUILTIN_GET_FPCR]
6d4d616a 1655 = aarch64_general_add_builtin ("__builtin_aarch64_get_fpcr",
0d7e5fa6 1656 ftype_get,
6d4d616a 1657 AARCH64_BUILTIN_GET_FPCR);
aa87aced 1658 aarch64_builtin_decls[AARCH64_BUILTIN_SET_FPCR]
6d4d616a 1659 = aarch64_general_add_builtin ("__builtin_aarch64_set_fpcr",
0d7e5fa6 1660 ftype_set,
6d4d616a 1661 AARCH64_BUILTIN_SET_FPCR);
aa87aced 1662 aarch64_builtin_decls[AARCH64_BUILTIN_GET_FPSR]
6d4d616a 1663 = aarch64_general_add_builtin ("__builtin_aarch64_get_fpsr",
0d7e5fa6 1664 ftype_get,
6d4d616a 1665 AARCH64_BUILTIN_GET_FPSR);
aa87aced 1666 aarch64_builtin_decls[AARCH64_BUILTIN_SET_FPSR]
6d4d616a 1667 = aarch64_general_add_builtin ("__builtin_aarch64_set_fpsr",
0d7e5fa6 1668 ftype_set,
6d4d616a 1669 AARCH64_BUILTIN_SET_FPSR);
aa87aced 1670
0d7e5fa6
AC
1671 ftype_set
1672 = build_function_type_list (void_type_node, long_long_unsigned_type_node,
1673 NULL);
1674 ftype_get
1675 = build_function_type_list (long_long_unsigned_type_node, NULL);
1676
1677 aarch64_builtin_decls[AARCH64_BUILTIN_GET_FPCR64]
1678 = aarch64_general_add_builtin ("__builtin_aarch64_get_fpcr64",
1679 ftype_get,
1680 AARCH64_BUILTIN_GET_FPCR64);
1681 aarch64_builtin_decls[AARCH64_BUILTIN_SET_FPCR64]
1682 = aarch64_general_add_builtin ("__builtin_aarch64_set_fpcr64",
1683 ftype_set,
1684 AARCH64_BUILTIN_SET_FPCR64);
1685 aarch64_builtin_decls[AARCH64_BUILTIN_GET_FPSR64]
1686 = aarch64_general_add_builtin ("__builtin_aarch64_get_fpsr64",
1687 ftype_get,
1688 AARCH64_BUILTIN_GET_FPSR64);
1689 aarch64_builtin_decls[AARCH64_BUILTIN_SET_FPSR64]
1690 = aarch64_general_add_builtin ("__builtin_aarch64_set_fpsr64",
1691 ftype_set,
1692 AARCH64_BUILTIN_SET_FPSR64);
1693}
1694
1695/* Initialize all builtins in the AARCH64_BUILTIN_GENERAL group. */
1696
1697void
1698aarch64_general_init_builtins (void)
1699{
1700 aarch64_init_fpsr_fpcr_builtins ();
1701
1b62ed4f 1702 aarch64_init_fp16_types ();
c2ec330c 1703
abbe1ed2
SMW
1704 aarch64_init_bf16_types ();
1705
342be7f7 1706 if (TARGET_SIMD)
280d970b 1707 aarch64_init_simd_builtins ();
e95a988a
KT
1708
1709 aarch64_init_crc32_builtins ();
a6fc00da 1710 aarch64_init_builtin_rsqrt ();
c5dc215d 1711 aarch64_init_rng_builtins ();
312492bd 1712
e1d5d19e
KT
1713 tree ftype_jcvt
1714 = build_function_type_list (intSI_type_node, double_type_node, NULL);
1715 aarch64_builtin_decls[AARCH64_JSCVT]
6d4d616a
RS
1716 = aarch64_general_add_builtin ("__builtin_aarch64_jcvtzs", ftype_jcvt,
1717 AARCH64_JSCVT);
e1d5d19e 1718
a876231c
JW
1719 /* Initialize pointer authentication builtins which are backed by instructions
1720 in NOP encoding space.
1721
1722 NOTE: these builtins are supposed to be used by libgcc unwinder only, as
1723 there is no support on return address signing under ILP32, we don't
1724 register them. */
1725 if (!TARGET_ILP32)
1726 aarch64_init_pauth_hint_builtins ();
89626179
SD
1727
1728 if (TARGET_TME)
1729 aarch64_init_tme_builtins ();
ef01e6bb
DZ
1730
1731 if (TARGET_MEMTAG)
1732 aarch64_init_memtag_builtins ();
fdcddba8
PW
1733
1734 if (TARGET_LS64)
1735 aarch64_init_ls64_builtins ();
43e9d192
IB
1736}
1737
6d4d616a 1738/* Implement TARGET_BUILTIN_DECL for the AARCH64_BUILTIN_GENERAL group. */
119103ca 1739tree
6d4d616a 1740aarch64_general_builtin_decl (unsigned code, bool)
119103ca
JG
1741{
1742 if (code >= AARCH64_BUILTIN_MAX)
1743 return error_mark_node;
1744
1745 return aarch64_builtin_decls[code];
1746}
1747
43e9d192
IB
1748typedef enum
1749{
1750 SIMD_ARG_COPY_TO_REG,
1751 SIMD_ARG_CONSTANT,
2a49c16d 1752 SIMD_ARG_LANE_INDEX,
4d0a0237 1753 SIMD_ARG_STRUCT_LOAD_STORE_LANE_INDEX,
9d63f43b 1754 SIMD_ARG_LANE_PAIR_INDEX,
8c197c85 1755 SIMD_ARG_LANE_QUADTUP_INDEX,
43e9d192
IB
1756 SIMD_ARG_STOP
1757} builtin_simd_arg;
1758
e95a988a 1759
43e9d192
IB
1760static rtx
1761aarch64_simd_expand_args (rtx target, int icode, int have_retval,
4d0a0237 1762 tree exp, builtin_simd_arg *args,
b8506a8a 1763 machine_mode builtin_mode)
43e9d192 1764{
43e9d192 1765 rtx pat;
d9e80f49
AL
1766 rtx op[SIMD_MAX_BUILTIN_ARGS + 1]; /* First element for result operand. */
1767 int opc = 0;
1768
1769 if (have_retval)
1770 {
1771 machine_mode tmode = insn_data[icode].operand[0].mode;
1772 if (!target
43e9d192 1773 || GET_MODE (target) != tmode
d9e80f49
AL
1774 || !(*insn_data[icode].operand[0].predicate) (target, tmode))
1775 target = gen_reg_rtx (tmode);
1776 op[opc++] = target;
1777 }
43e9d192 1778
43e9d192
IB
1779 for (;;)
1780 {
d9e80f49 1781 builtin_simd_arg thisarg = args[opc - have_retval];
43e9d192
IB
1782
1783 if (thisarg == SIMD_ARG_STOP)
1784 break;
1785 else
1786 {
d9e80f49 1787 tree arg = CALL_EXPR_ARG (exp, opc - have_retval);
b8506a8a 1788 machine_mode mode = insn_data[icode].operand[opc].mode;
d9e80f49 1789 op[opc] = expand_normal (arg);
43e9d192
IB
1790
1791 switch (thisarg)
1792 {
1793 case SIMD_ARG_COPY_TO_REG:
d9e80f49
AL
1794 if (POINTER_TYPE_P (TREE_TYPE (arg)))
1795 op[opc] = convert_memory_address (Pmode, op[opc]);
1796 /*gcc_assert (GET_MODE (op[opc]) == mode); */
1797 if (!(*insn_data[icode].operand[opc].predicate)
1798 (op[opc], mode))
1799 op[opc] = copy_to_mode_reg (mode, op[opc]);
43e9d192
IB
1800 break;
1801
4d0a0237
CB
1802 case SIMD_ARG_STRUCT_LOAD_STORE_LANE_INDEX:
1803 gcc_assert (opc > 1);
1804 if (CONST_INT_P (op[opc]))
1805 {
6a70badb
RS
1806 unsigned int nunits
1807 = GET_MODE_NUNITS (builtin_mode).to_constant ();
1808 aarch64_simd_lane_bounds (op[opc], 0, nunits, exp);
4d0a0237 1809 /* Keep to GCC-vector-extension lane indices in the RTL. */
7ac29c0f
RS
1810 op[opc] = aarch64_endian_lane_rtx (builtin_mode,
1811 INTVAL (op[opc]));
4d0a0237
CB
1812 }
1813 goto constant_arg;
1814
2a49c16d
AL
1815 case SIMD_ARG_LANE_INDEX:
1816 /* Must be a previous operand into which this is an index. */
d9e80f49
AL
1817 gcc_assert (opc > 0);
1818 if (CONST_INT_P (op[opc]))
2a49c16d 1819 {
d9e80f49 1820 machine_mode vmode = insn_data[icode].operand[opc - 1].mode;
6a70badb
RS
1821 unsigned int nunits
1822 = GET_MODE_NUNITS (vmode).to_constant ();
1823 aarch64_simd_lane_bounds (op[opc], 0, nunits, exp);
2a49c16d 1824 /* Keep to GCC-vector-extension lane indices in the RTL. */
7ac29c0f 1825 op[opc] = aarch64_endian_lane_rtx (vmode, INTVAL (op[opc]));
2a49c16d 1826 }
9d63f43b
TC
1827 /* If the lane index isn't a constant then error out. */
1828 goto constant_arg;
1829
1830 case SIMD_ARG_LANE_PAIR_INDEX:
1831 /* Must be a previous operand into which this is an index and
1832 index is restricted to nunits / 2. */
1833 gcc_assert (opc > 0);
1834 if (CONST_INT_P (op[opc]))
1835 {
1836 machine_mode vmode = insn_data[icode].operand[opc - 1].mode;
1837 unsigned int nunits
1838 = GET_MODE_NUNITS (vmode).to_constant ();
1839 aarch64_simd_lane_bounds (op[opc], 0, nunits / 2, exp);
1840 /* Keep to GCC-vector-extension lane indices in the RTL. */
33b5a38c
TC
1841 int lane = INTVAL (op[opc]);
1842 op[opc] = gen_int_mode (ENDIAN_LANE_N (nunits / 2, lane),
1843 SImode);
9d63f43b 1844 }
8c197c85
SMW
1845 /* If the lane index isn't a constant then error out. */
1846 goto constant_arg;
1847 case SIMD_ARG_LANE_QUADTUP_INDEX:
1848 /* Must be a previous operand into which this is an index and
1849 index is restricted to nunits / 4. */
1850 gcc_assert (opc > 0);
1851 if (CONST_INT_P (op[opc]))
1852 {
1853 machine_mode vmode = insn_data[icode].operand[opc - 1].mode;
1854 unsigned int nunits
1855 = GET_MODE_NUNITS (vmode).to_constant ();
1856 aarch64_simd_lane_bounds (op[opc], 0, nunits / 4, exp);
1857 /* Keep to GCC-vector-extension lane indices in the RTL. */
1858 int lane = INTVAL (op[opc]);
1859 op[opc] = gen_int_mode (ENDIAN_LANE_N (nunits / 4, lane),
1860 SImode);
1861 }
1862 /* If the lane index isn't a constant then error out. */
1863 goto constant_arg;
43e9d192 1864 case SIMD_ARG_CONSTANT:
4d0a0237 1865constant_arg:
d9e80f49
AL
1866 if (!(*insn_data[icode].operand[opc].predicate)
1867 (op[opc], mode))
d5a29419 1868 {
62e43587
MS
1869 error_at (EXPR_LOCATION (exp),
1870 "argument %d must be a constant immediate",
1871 opc + 1 - have_retval);
d5a29419
KT
1872 return const0_rtx;
1873 }
43e9d192
IB
1874 break;
1875
1876 case SIMD_ARG_STOP:
1877 gcc_unreachable ();
1878 }
1879
d9e80f49 1880 opc++;
43e9d192
IB
1881 }
1882 }
1883
d9e80f49
AL
1884 switch (opc)
1885 {
1886 case 1:
1887 pat = GEN_FCN (icode) (op[0]);
1888 break;
43e9d192 1889
d9e80f49
AL
1890 case 2:
1891 pat = GEN_FCN (icode) (op[0], op[1]);
1892 break;
43e9d192 1893
d9e80f49
AL
1894 case 3:
1895 pat = GEN_FCN (icode) (op[0], op[1], op[2]);
1896 break;
43e9d192 1897
d9e80f49
AL
1898 case 4:
1899 pat = GEN_FCN (icode) (op[0], op[1], op[2], op[3]);
1900 break;
43e9d192 1901
d9e80f49
AL
1902 case 5:
1903 pat = GEN_FCN (icode) (op[0], op[1], op[2], op[3], op[4]);
1904 break;
43e9d192 1905
d9e80f49
AL
1906 case 6:
1907 pat = GEN_FCN (icode) (op[0], op[1], op[2], op[3], op[4], op[5]);
1908 break;
43e9d192 1909
d9e80f49
AL
1910 default:
1911 gcc_unreachable ();
1912 }
43e9d192
IB
1913
1914 if (!pat)
d5a29419 1915 return NULL_RTX;
43e9d192
IB
1916
1917 emit_insn (pat);
1918
1919 return target;
1920}
1921
1922/* Expand an AArch64 AdvSIMD builtin(intrinsic). */
1923rtx
1924aarch64_simd_expand_builtin (int fcode, tree exp, rtx target)
1925{
661fce82
AL
1926 if (fcode == AARCH64_SIMD_BUILTIN_LANE_CHECK)
1927 {
9c4f25cc
AP
1928 rtx totalsize = expand_normal (CALL_EXPR_ARG (exp, 0));
1929 rtx elementsize = expand_normal (CALL_EXPR_ARG (exp, 1));
1930 if (CONST_INT_P (totalsize) && CONST_INT_P (elementsize)
1931 && UINTVAL (elementsize) != 0
1932 && UINTVAL (totalsize) != 0)
1933 {
1934 rtx lane_idx = expand_normal (CALL_EXPR_ARG (exp, 2));
1935 if (CONST_INT_P (lane_idx))
1936 aarch64_simd_lane_bounds (lane_idx, 0,
1937 UINTVAL (totalsize)
1938 / UINTVAL (elementsize),
1939 exp);
1940 else
62e43587
MS
1941 error_at (EXPR_LOCATION (exp),
1942 "lane index must be a constant immediate");
9c4f25cc 1943 }
661fce82 1944 else
62e43587 1945 error_at (EXPR_LOCATION (exp),
58385f6a 1946 "total size and element size must be a nonzero "
62e43587 1947 "constant immediate");
661fce82
AL
1948 /* Don't generate any RTL. */
1949 return const0_rtx;
1950 }
342be7f7 1951 aarch64_simd_builtin_datum *d =
661fce82 1952 &aarch64_simd_builtin_data[fcode - AARCH64_SIMD_PATTERN_START];
342be7f7 1953 enum insn_code icode = d->code;
0ff2bf46 1954 builtin_simd_arg args[SIMD_MAX_BUILTIN_ARGS + 1];
b5828b4b
JG
1955 int num_args = insn_data[d->code].n_operands;
1956 int is_void = 0;
1957 int k;
43e9d192 1958
b5828b4b 1959 is_void = !!(d->qualifiers[0] & qualifier_void);
43e9d192 1960
b5828b4b
JG
1961 num_args += is_void;
1962
1963 for (k = 1; k < num_args; k++)
1964 {
1965 /* We have four arrays of data, each indexed in a different fashion.
1966 qualifiers - element 0 always describes the function return type.
1967 operands - element 0 is either the operand for return value (if
1968 the function has a non-void return type) or the operand for the
1969 first argument.
1970 expr_args - element 0 always holds the first argument.
1971 args - element 0 is always used for the return type. */
1972 int qualifiers_k = k;
1973 int operands_k = k - is_void;
1974 int expr_args_k = k - 1;
1975
2a49c16d
AL
1976 if (d->qualifiers[qualifiers_k] & qualifier_lane_index)
1977 args[k] = SIMD_ARG_LANE_INDEX;
9d63f43b
TC
1978 else if (d->qualifiers[qualifiers_k] & qualifier_lane_pair_index)
1979 args[k] = SIMD_ARG_LANE_PAIR_INDEX;
8c197c85
SMW
1980 else if (d->qualifiers[qualifiers_k] & qualifier_lane_quadtup_index)
1981 args[k] = SIMD_ARG_LANE_QUADTUP_INDEX;
4d0a0237
CB
1982 else if (d->qualifiers[qualifiers_k] & qualifier_struct_load_store_lane_index)
1983 args[k] = SIMD_ARG_STRUCT_LOAD_STORE_LANE_INDEX;
2a49c16d 1984 else if (d->qualifiers[qualifiers_k] & qualifier_immediate)
b5828b4b
JG
1985 args[k] = SIMD_ARG_CONSTANT;
1986 else if (d->qualifiers[qualifiers_k] & qualifier_maybe_immediate)
1987 {
1988 rtx arg
1989 = expand_normal (CALL_EXPR_ARG (exp,
1990 (expr_args_k)));
1991 /* Handle constants only if the predicate allows it. */
1992 bool op_const_int_p =
1993 (CONST_INT_P (arg)
1994 && (*insn_data[icode].operand[operands_k].predicate)
1995 (arg, insn_data[icode].operand[operands_k].mode));
1996 args[k] = op_const_int_p ? SIMD_ARG_CONSTANT : SIMD_ARG_COPY_TO_REG;
1997 }
1998 else
1999 args[k] = SIMD_ARG_COPY_TO_REG;
43e9d192 2000
43e9d192 2001 }
b5828b4b
JG
2002 args[k] = SIMD_ARG_STOP;
2003
2004 /* The interface to aarch64_simd_expand_args expects a 0 if
2005 the function is void, and a 1 if it is not. */
2006 return aarch64_simd_expand_args
4d0a0237 2007 (target, icode, !is_void, exp, &args[1], d->mode);
43e9d192 2008}
342be7f7 2009
5d357f26
KT
2010rtx
2011aarch64_crc32_expand_builtin (int fcode, tree exp, rtx target)
2012{
2013 rtx pat;
2014 aarch64_crc_builtin_datum *d
2015 = &aarch64_crc_builtin_data[fcode - (AARCH64_CRC32_BUILTIN_BASE + 1)];
2016 enum insn_code icode = d->icode;
2017 tree arg0 = CALL_EXPR_ARG (exp, 0);
2018 tree arg1 = CALL_EXPR_ARG (exp, 1);
2019 rtx op0 = expand_normal (arg0);
2020 rtx op1 = expand_normal (arg1);
ef4bddc2
RS
2021 machine_mode tmode = insn_data[icode].operand[0].mode;
2022 machine_mode mode0 = insn_data[icode].operand[1].mode;
2023 machine_mode mode1 = insn_data[icode].operand[2].mode;
5d357f26
KT
2024
2025 if (! target
2026 || GET_MODE (target) != tmode
2027 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
2028 target = gen_reg_rtx (tmode);
2029
2030 gcc_assert ((GET_MODE (op0) == mode0 || GET_MODE (op0) == VOIDmode)
2031 && (GET_MODE (op1) == mode1 || GET_MODE (op1) == VOIDmode));
2032
2033 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
2034 op0 = copy_to_mode_reg (mode0, op0);
2035 if (! (*insn_data[icode].operand[2].predicate) (op1, mode1))
2036 op1 = copy_to_mode_reg (mode1, op1);
2037
2038 pat = GEN_FCN (icode) (target, op0, op1);
d5a29419
KT
2039 if (!pat)
2040 return NULL_RTX;
2041
5d357f26
KT
2042 emit_insn (pat);
2043 return target;
2044}
2045
a6fc00da
BH
2046/* Function to expand reciprocal square root builtins. */
2047
2048static rtx
2049aarch64_expand_builtin_rsqrt (int fcode, tree exp, rtx target)
2050{
2051 tree arg0 = CALL_EXPR_ARG (exp, 0);
2052 rtx op0 = expand_normal (arg0);
2053
2054 rtx (*gen) (rtx, rtx);
2055
2056 switch (fcode)
2057 {
2058 case AARCH64_BUILTIN_RSQRT_DF:
ee62a5a6 2059 gen = gen_rsqrtdf2;
a6fc00da
BH
2060 break;
2061 case AARCH64_BUILTIN_RSQRT_SF:
ee62a5a6 2062 gen = gen_rsqrtsf2;
a6fc00da
BH
2063 break;
2064 case AARCH64_BUILTIN_RSQRT_V2DF:
ee62a5a6 2065 gen = gen_rsqrtv2df2;
a6fc00da
BH
2066 break;
2067 case AARCH64_BUILTIN_RSQRT_V2SF:
ee62a5a6 2068 gen = gen_rsqrtv2sf2;
a6fc00da
BH
2069 break;
2070 case AARCH64_BUILTIN_RSQRT_V4SF:
ee62a5a6 2071 gen = gen_rsqrtv4sf2;
a6fc00da
BH
2072 break;
2073 default: gcc_unreachable ();
2074 }
2075
2076 if (!target)
2077 target = gen_reg_rtx (GET_MODE (op0));
2078
2079 emit_insn (gen (target, op0));
2080
2081 return target;
2082}
2083
9d63f43b
TC
2084/* Expand a FCMLA lane expression EXP with code FCODE and
2085 result going to TARGET if that is convenient. */
2086
2087rtx
2088aarch64_expand_fcmla_builtin (tree exp, rtx target, int fcode)
2089{
2090 int bcode = fcode - AARCH64_SIMD_FCMLA_LANEQ_BUILTIN_BASE - 1;
2091 aarch64_fcmla_laneq_builtin_datum* d
2092 = &aarch64_fcmla_lane_builtin_data[bcode];
2093 machine_mode quadmode = GET_MODE_2XWIDER_MODE (d->mode).require ();
2094 rtx op0 = force_reg (d->mode, expand_normal (CALL_EXPR_ARG (exp, 0)));
2095 rtx op1 = force_reg (d->mode, expand_normal (CALL_EXPR_ARG (exp, 1)));
2096 rtx op2 = force_reg (quadmode, expand_normal (CALL_EXPR_ARG (exp, 2)));
2097 tree tmp = CALL_EXPR_ARG (exp, 3);
2098 rtx lane_idx = expand_expr (tmp, NULL_RTX, VOIDmode, EXPAND_INITIALIZER);
2099
2100 /* Validate that the lane index is a constant. */
2101 if (!CONST_INT_P (lane_idx))
2102 {
62e43587
MS
2103 error_at (EXPR_LOCATION (exp),
2104 "argument %d must be a constant immediate", 4);
9d63f43b
TC
2105 return const0_rtx;
2106 }
2107
2108 /* Validate that the index is within the expected range. */
2109 int nunits = GET_MODE_NUNITS (quadmode).to_constant ();
2110 aarch64_simd_lane_bounds (lane_idx, 0, nunits / 2, exp);
2111
9d63f43b
TC
2112 /* Generate the correct register and mode. */
2113 int lane = INTVAL (lane_idx);
2114
2115 if (lane < nunits / 4)
33b5a38c
TC
2116 op2 = simplify_gen_subreg (d->mode, op2, quadmode,
2117 subreg_lowpart_offset (d->mode, quadmode));
9d63f43b
TC
2118 else
2119 {
2120 /* Select the upper 64 bits, either a V2SF or V4HF, this however
2121 is quite messy, as the operation required even though simple
2122 doesn't have a simple RTL pattern, and seems it's quite hard to
2123 define using a single RTL pattern. The target generic version
2124 gen_highpart_mode generates code that isn't optimal. */
2125 rtx temp1 = gen_reg_rtx (d->mode);
2126 rtx temp2 = gen_reg_rtx (DImode);
33b5a38c
TC
2127 temp1 = simplify_gen_subreg (d->mode, op2, quadmode,
2128 subreg_lowpart_offset (d->mode, quadmode));
9d63f43b 2129 temp1 = simplify_gen_subreg (V2DImode, temp1, d->mode, 0);
33b5a38c
TC
2130 if (BYTES_BIG_ENDIAN)
2131 emit_insn (gen_aarch64_get_lanev2di (temp2, temp1, const0_rtx));
2132 else
2133 emit_insn (gen_aarch64_get_lanev2di (temp2, temp1, const1_rtx));
9d63f43b
TC
2134 op2 = simplify_gen_subreg (d->mode, temp2, GET_MODE (temp2), 0);
2135
2136 /* And recalculate the index. */
2137 lane -= nunits / 4;
2138 }
2139
33b5a38c
TC
2140 /* Keep to GCC-vector-extension lane indices in the RTL, only nunits / 4
2141 (max nunits in range check) are valid. Which means only 0-1, so we
2142 only need to know the order in a V2mode. */
2143 lane_idx = aarch64_endian_lane_rtx (V2DImode, lane);
2144
fa59c8dc
AC
2145 if (!target
2146 || !REG_P (target)
2147 || GET_MODE (target) != d->mode)
9d63f43b 2148 target = gen_reg_rtx (d->mode);
9d63f43b
TC
2149
2150 rtx pat = NULL_RTX;
2151
2152 if (d->lane)
33b5a38c 2153 pat = GEN_FCN (d->icode) (target, op0, op1, op2, lane_idx);
9d63f43b
TC
2154 else
2155 pat = GEN_FCN (d->icode) (target, op0, op1, op2);
2156
2157 if (!pat)
2158 return NULL_RTX;
2159
2160 emit_insn (pat);
2161 return target;
2162}
2163
89626179
SD
2164/* Function to expand an expression EXP which calls one of the Transactional
2165 Memory Extension (TME) builtins FCODE with the result going to TARGET. */
2166static rtx
2167aarch64_expand_builtin_tme (int fcode, tree exp, rtx target)
2168{
2169 switch (fcode)
2170 {
2171 case AARCH64_TME_BUILTIN_TSTART:
2172 target = gen_reg_rtx (DImode);
2173 emit_insn (GEN_FCN (CODE_FOR_tstart) (target));
2174 break;
2175
2176 case AARCH64_TME_BUILTIN_TTEST:
2177 target = gen_reg_rtx (DImode);
2178 emit_insn (GEN_FCN (CODE_FOR_ttest) (target));
2179 break;
2180
2181 case AARCH64_TME_BUILTIN_TCOMMIT:
2182 emit_insn (GEN_FCN (CODE_FOR_tcommit) ());
2183 break;
2184
2185 case AARCH64_TME_BUILTIN_TCANCEL:
2186 {
2187 tree arg0 = CALL_EXPR_ARG (exp, 0);
2188 rtx op0 = expand_normal (arg0);
2189 if (CONST_INT_P (op0) && UINTVAL (op0) <= 65536)
2190 emit_insn (GEN_FCN (CODE_FOR_tcancel) (op0));
2191 else
2192 {
62e43587
MS
2193 error_at (EXPR_LOCATION (exp),
2194 "argument must be a 16-bit constant immediate");
89626179
SD
2195 return const0_rtx;
2196 }
2197 }
2198 break;
2199
2200 default :
2201 gcc_unreachable ();
2202 }
2203 return target;
2204}
2205
fdcddba8
PW
2206/* Function to expand an expression EXP which calls one of the Load/Store
2207 64 Byte extension (LS64) builtins FCODE with the result going to TARGET. */
2208static rtx
2209aarch64_expand_builtin_ls64 (int fcode, tree exp, rtx target)
2210{
2211 expand_operand ops[3];
2212
2213 switch (fcode)
2214 {
2215 case AARCH64_LS64_BUILTIN_LD64B:
2216 {
2217 rtx op0 = expand_normal (CALL_EXPR_ARG (exp, 0));
2218 create_output_operand (&ops[0], target, V8DImode);
2219 create_input_operand (&ops[1], op0, DImode);
2220 expand_insn (CODE_FOR_ld64b, 2, ops);
2221 return ops[0].value;
2222 }
2223 case AARCH64_LS64_BUILTIN_ST64B:
2224 {
2225 rtx op0 = expand_normal (CALL_EXPR_ARG (exp, 0));
2226 rtx op1 = expand_normal (CALL_EXPR_ARG (exp, 1));
2227 create_output_operand (&ops[0], op0, DImode);
2228 create_input_operand (&ops[1], op1, V8DImode);
2229 expand_insn (CODE_FOR_st64b, 2, ops);
2230 return const0_rtx;
2231 }
2232 case AARCH64_LS64_BUILTIN_ST64BV:
2233 {
2234 rtx op0 = expand_normal (CALL_EXPR_ARG (exp, 0));
2235 rtx op1 = expand_normal (CALL_EXPR_ARG (exp, 1));
2236 create_output_operand (&ops[0], target, DImode);
2237 create_input_operand (&ops[1], op0, DImode);
2238 create_input_operand (&ops[2], op1, V8DImode);
2239 expand_insn (CODE_FOR_st64bv, 3, ops);
2240 return ops[0].value;
2241 }
2242 case AARCH64_LS64_BUILTIN_ST64BV0:
2243 {
2244 rtx op0 = expand_normal (CALL_EXPR_ARG (exp, 0));
2245 rtx op1 = expand_normal (CALL_EXPR_ARG (exp, 1));
2246 create_output_operand (&ops[0], target, DImode);
2247 create_input_operand (&ops[1], op0, DImode);
2248 create_input_operand (&ops[2], op1, V8DImode);
2249 expand_insn (CODE_FOR_st64bv0, 3, ops);
2250 return ops[0].value;
2251 }
2252 }
2253
2254 gcc_unreachable ();
2255}
2256
c5dc215d
KT
2257/* Expand a random number builtin EXP with code FCODE, putting the result
2258 int TARGET. If IGNORE is true the return value is ignored. */
2259
2260rtx
2261aarch64_expand_rng_builtin (tree exp, rtx target, int fcode, int ignore)
2262{
2263 rtx pat;
2264 enum insn_code icode;
2265 if (fcode == AARCH64_BUILTIN_RNG_RNDR)
2266 icode = CODE_FOR_aarch64_rndr;
2267 else if (fcode == AARCH64_BUILTIN_RNG_RNDRRS)
2268 icode = CODE_FOR_aarch64_rndrrs;
2269 else
2270 gcc_unreachable ();
2271
2272 rtx rand = gen_reg_rtx (DImode);
2273 pat = GEN_FCN (icode) (rand);
2274 if (!pat)
2275 return NULL_RTX;
2276
2277 tree arg0 = CALL_EXPR_ARG (exp, 0);
2278 rtx res_addr = expand_normal (arg0);
2279 res_addr = convert_memory_address (Pmode, res_addr);
2280 rtx res_mem = gen_rtx_MEM (DImode, res_addr);
2281 emit_insn (pat);
2282 emit_move_insn (res_mem, rand);
2283 /* If the status result is unused don't generate the CSET code. */
2284 if (ignore)
2285 return target;
2286
2287 rtx cc_reg = gen_rtx_REG (CC_Zmode, CC_REGNUM);
f7581eb3 2288 rtx cmp_rtx = gen_rtx_fmt_ee (EQ, SImode, cc_reg, const0_rtx);
c5dc215d
KT
2289 emit_insn (gen_aarch64_cstoresi (target, cmp_rtx, cc_reg));
2290 return target;
2291}
2292
ef01e6bb
DZ
2293/* Expand an expression EXP that calls a MEMTAG built-in FCODE
2294 with result going to TARGET. */
2295static rtx
2296aarch64_expand_builtin_memtag (int fcode, tree exp, rtx target)
2297{
2298 if (TARGET_ILP32)
2299 {
2300 error ("Memory Tagging Extension does not support %<-mabi=ilp32%>");
2301 return const0_rtx;
2302 }
2303
2304 rtx pat = NULL;
2305 enum insn_code icode = aarch64_memtag_builtin_data[fcode -
2306 AARCH64_MEMTAG_BUILTIN_START - 1].icode;
2307
2308 rtx op0 = expand_normal (CALL_EXPR_ARG (exp, 0));
2309 machine_mode mode0 = GET_MODE (op0);
2310 op0 = force_reg (mode0 == VOIDmode ? DImode : mode0, op0);
2311 op0 = convert_to_mode (DImode, op0, true);
2312
2313 switch (fcode)
2314 {
2315 case AARCH64_MEMTAG_BUILTIN_IRG:
2316 case AARCH64_MEMTAG_BUILTIN_GMI:
2317 case AARCH64_MEMTAG_BUILTIN_SUBP:
2318 case AARCH64_MEMTAG_BUILTIN_INC_TAG:
2319 {
2320 if (! target
2321 || GET_MODE (target) != DImode
2322 || ! (*insn_data[icode].operand[0].predicate) (target, DImode))
2323 target = gen_reg_rtx (DImode);
2324
2325 if (fcode == AARCH64_MEMTAG_BUILTIN_INC_TAG)
2326 {
2327 rtx op1 = expand_normal (CALL_EXPR_ARG (exp, 1));
2328
2329 if ((*insn_data[icode].operand[3].predicate) (op1, QImode))
2330 {
2331 pat = GEN_FCN (icode) (target, op0, const0_rtx, op1);
2332 break;
2333 }
62e43587
MS
2334 error_at (EXPR_LOCATION (exp),
2335 "argument %d must be a constant immediate "
2336 "in range [0,15]", 2);
ef01e6bb
DZ
2337 return const0_rtx;
2338 }
2339 else
2340 {
2341 rtx op1 = expand_normal (CALL_EXPR_ARG (exp, 1));
2342 machine_mode mode1 = GET_MODE (op1);
2343 op1 = force_reg (mode1 == VOIDmode ? DImode : mode1, op1);
2344 op1 = convert_to_mode (DImode, op1, true);
2345 pat = GEN_FCN (icode) (target, op0, op1);
2346 }
2347 break;
2348 }
2349 case AARCH64_MEMTAG_BUILTIN_GET_TAG:
2350 target = op0;
2351 pat = GEN_FCN (icode) (target, op0, const0_rtx);
2352 break;
2353 case AARCH64_MEMTAG_BUILTIN_SET_TAG:
2354 pat = GEN_FCN (icode) (op0, op0, const0_rtx);
2355 break;
2356 default:
2357 gcc_unreachable();
2358 }
2359
2360 if (!pat)
2361 return NULL_RTX;
2362
2363 emit_insn (pat);
2364 return target;
2365}
2366
f5e73de0 2367/* Expand an expression EXP as fpsr or fpcr setter (depending on
0d7e5fa6
AC
2368 UNSPEC) using MODE. */
2369static void
2370aarch64_expand_fpsr_fpcr_setter (int unspec, machine_mode mode, tree exp)
2371{
2372 tree arg = CALL_EXPR_ARG (exp, 0);
2373 rtx op = force_reg (mode, expand_normal (arg));
2374 emit_insn (gen_aarch64_set (unspec, mode, op));
2375}
2376
f5e73de0
AC
2377/* Expand a fpsr or fpcr getter (depending on UNSPEC) using MODE.
2378 Return the target. */
2379static rtx
2380aarch64_expand_fpsr_fpcr_getter (enum insn_code icode, machine_mode mode,
2381 rtx target)
2382{
2383 expand_operand op;
2384 create_output_operand (&op, target, mode);
2385 expand_insn (icode, 1, &op);
2386 return op.value;
2387}
2388
6d4d616a 2389/* Expand an expression EXP that calls built-in function FCODE,
c5dc215d
KT
2390 with result going to TARGET if that's convenient. IGNORE is true
2391 if the result of the builtin is ignored. */
342be7f7 2392rtx
c5dc215d
KT
2393aarch64_general_expand_builtin (unsigned int fcode, tree exp, rtx target,
2394 int ignore)
342be7f7 2395{
aa87aced 2396 int icode;
0d7e5fa6 2397 rtx op0;
aa87aced
KV
2398 tree arg0;
2399
2400 switch (fcode)
2401 {
2402 case AARCH64_BUILTIN_GET_FPCR:
f5e73de0
AC
2403 return aarch64_expand_fpsr_fpcr_getter (CODE_FOR_aarch64_get_fpcrsi,
2404 SImode, target);
aa87aced 2405 case AARCH64_BUILTIN_SET_FPCR:
0d7e5fa6
AC
2406 aarch64_expand_fpsr_fpcr_setter (UNSPECV_SET_FPCR, SImode, exp);
2407 return target;
aa87aced 2408 case AARCH64_BUILTIN_GET_FPSR:
f5e73de0
AC
2409 return aarch64_expand_fpsr_fpcr_getter (CODE_FOR_aarch64_get_fpsrsi,
2410 SImode, target);
aa87aced 2411 case AARCH64_BUILTIN_SET_FPSR:
0d7e5fa6
AC
2412 aarch64_expand_fpsr_fpcr_setter (UNSPECV_SET_FPSR, SImode, exp);
2413 return target;
2414 case AARCH64_BUILTIN_GET_FPCR64:
f5e73de0
AC
2415 return aarch64_expand_fpsr_fpcr_getter (CODE_FOR_aarch64_get_fpcrdi,
2416 DImode, target);
0d7e5fa6
AC
2417 case AARCH64_BUILTIN_SET_FPCR64:
2418 aarch64_expand_fpsr_fpcr_setter (UNSPECV_SET_FPCR, DImode, exp);
2419 return target;
2420 case AARCH64_BUILTIN_GET_FPSR64:
f5e73de0
AC
2421 return aarch64_expand_fpsr_fpcr_getter (CODE_FOR_aarch64_get_fpsrdi,
2422 DImode, target);
0d7e5fa6
AC
2423 case AARCH64_BUILTIN_SET_FPSR64:
2424 aarch64_expand_fpsr_fpcr_setter (UNSPECV_SET_FPSR, DImode, exp);
aa87aced 2425 return target;
312492bd
JW
2426 case AARCH64_PAUTH_BUILTIN_AUTIA1716:
2427 case AARCH64_PAUTH_BUILTIN_PACIA1716:
8fc16d72
ST
2428 case AARCH64_PAUTH_BUILTIN_AUTIB1716:
2429 case AARCH64_PAUTH_BUILTIN_PACIB1716:
312492bd
JW
2430 case AARCH64_PAUTH_BUILTIN_XPACLRI:
2431 arg0 = CALL_EXPR_ARG (exp, 0);
2432 op0 = force_reg (Pmode, expand_normal (arg0));
2433
312492bd
JW
2434 if (fcode == AARCH64_PAUTH_BUILTIN_XPACLRI)
2435 {
2436 rtx lr = gen_rtx_REG (Pmode, R30_REGNUM);
2437 icode = CODE_FOR_xpaclri;
2438 emit_move_insn (lr, op0);
2439 emit_insn (GEN_FCN (icode) ());
92f0d3d0 2440 return lr;
312492bd
JW
2441 }
2442 else
2443 {
2444 tree arg1 = CALL_EXPR_ARG (exp, 1);
2445 rtx op1 = force_reg (Pmode, expand_normal (arg1));
8fc16d72
ST
2446 switch (fcode)
2447 {
2448 case AARCH64_PAUTH_BUILTIN_AUTIA1716:
2449 icode = CODE_FOR_autia1716;
2450 break;
2451 case AARCH64_PAUTH_BUILTIN_AUTIB1716:
2452 icode = CODE_FOR_autib1716;
2453 break;
2454 case AARCH64_PAUTH_BUILTIN_PACIA1716:
2455 icode = CODE_FOR_pacia1716;
2456 break;
2457 case AARCH64_PAUTH_BUILTIN_PACIB1716:
2458 icode = CODE_FOR_pacib1716;
2459 break;
2460 default:
2461 icode = 0;
2462 gcc_unreachable ();
2463 }
312492bd
JW
2464
2465 rtx x16_reg = gen_rtx_REG (Pmode, R16_REGNUM);
2466 rtx x17_reg = gen_rtx_REG (Pmode, R17_REGNUM);
2467 emit_move_insn (x17_reg, op0);
2468 emit_move_insn (x16_reg, op1);
2469 emit_insn (GEN_FCN (icode) ());
92f0d3d0 2470 return x17_reg;
312492bd
JW
2471 }
2472
e1d5d19e 2473 case AARCH64_JSCVT:
2c62952f
AC
2474 {
2475 expand_operand ops[2];
2476 create_output_operand (&ops[0], target, SImode);
2477 op0 = expand_normal (CALL_EXPR_ARG (exp, 0));
2478 create_input_operand (&ops[1], op0, DFmode);
2479 expand_insn (CODE_FOR_aarch64_fjcvtzs, 2, ops);
2480 return ops[0].value;
2481 }
e1d5d19e 2482
9d63f43b
TC
2483 case AARCH64_SIMD_BUILTIN_FCMLA_LANEQ0_V2SF:
2484 case AARCH64_SIMD_BUILTIN_FCMLA_LANEQ90_V2SF:
2485 case AARCH64_SIMD_BUILTIN_FCMLA_LANEQ180_V2SF:
2486 case AARCH64_SIMD_BUILTIN_FCMLA_LANEQ270_V2SF:
2487 case AARCH64_SIMD_BUILTIN_FCMLA_LANEQ0_V4HF:
2488 case AARCH64_SIMD_BUILTIN_FCMLA_LANEQ90_V4HF:
2489 case AARCH64_SIMD_BUILTIN_FCMLA_LANEQ180_V4HF:
2490 case AARCH64_SIMD_BUILTIN_FCMLA_LANEQ270_V4HF:
2491 return aarch64_expand_fcmla_builtin (exp, target, fcode);
c5dc215d
KT
2492 case AARCH64_BUILTIN_RNG_RNDR:
2493 case AARCH64_BUILTIN_RNG_RNDRRS:
2494 return aarch64_expand_rng_builtin (exp, target, fcode, ignore);
aa87aced 2495 }
342be7f7 2496
5d357f26 2497 if (fcode >= AARCH64_SIMD_BUILTIN_BASE && fcode <= AARCH64_SIMD_BUILTIN_MAX)
342be7f7 2498 return aarch64_simd_expand_builtin (fcode, exp, target);
5d357f26
KT
2499 else if (fcode >= AARCH64_CRC32_BUILTIN_BASE && fcode <= AARCH64_CRC32_BUILTIN_MAX)
2500 return aarch64_crc32_expand_builtin (fcode, exp, target);
342be7f7 2501
a6fc00da
BH
2502 if (fcode == AARCH64_BUILTIN_RSQRT_DF
2503 || fcode == AARCH64_BUILTIN_RSQRT_SF
2504 || fcode == AARCH64_BUILTIN_RSQRT_V2DF
2505 || fcode == AARCH64_BUILTIN_RSQRT_V2SF
2506 || fcode == AARCH64_BUILTIN_RSQRT_V4SF)
2507 return aarch64_expand_builtin_rsqrt (fcode, exp, target);
2508
89626179
SD
2509 if (fcode == AARCH64_TME_BUILTIN_TSTART
2510 || fcode == AARCH64_TME_BUILTIN_TCOMMIT
2511 || fcode == AARCH64_TME_BUILTIN_TTEST
2512 || fcode == AARCH64_TME_BUILTIN_TCANCEL)
2513 return aarch64_expand_builtin_tme (fcode, exp, target);
2514
fdcddba8
PW
2515 if (fcode == AARCH64_LS64_BUILTIN_LD64B
2516 || fcode == AARCH64_LS64_BUILTIN_ST64B
2517 || fcode == AARCH64_LS64_BUILTIN_ST64BV
2518 || fcode == AARCH64_LS64_BUILTIN_ST64BV0)
2519 return aarch64_expand_builtin_ls64 (fcode, exp, target);
2520
ef01e6bb
DZ
2521 if (fcode >= AARCH64_MEMTAG_BUILTIN_START
2522 && fcode <= AARCH64_MEMTAG_BUILTIN_END)
2523 return aarch64_expand_builtin_memtag (fcode, exp, target);
2524
d5a29419 2525 gcc_unreachable ();
342be7f7 2526}
42fc9a7f
JG
2527
2528tree
10766209
RS
2529aarch64_builtin_vectorized_function (unsigned int fn, tree type_out,
2530 tree type_in)
42fc9a7f 2531{
ef4bddc2 2532 machine_mode in_mode, out_mode;
42fc9a7f
JG
2533
2534 if (TREE_CODE (type_out) != VECTOR_TYPE
2535 || TREE_CODE (type_in) != VECTOR_TYPE)
2536 return NULL_TREE;
2537
7cee9637
RS
2538 out_mode = TYPE_MODE (type_out);
2539 in_mode = TYPE_MODE (type_in);
42fc9a7f
JG
2540
2541#undef AARCH64_CHECK_BUILTIN_MODE
2542#define AARCH64_CHECK_BUILTIN_MODE(C, N) 1
2543#define AARCH64_FIND_FRINT_VARIANT(N) \
2544 (AARCH64_CHECK_BUILTIN_MODE (2, D) \
e993fea1 2545 ? aarch64_builtin_decls[AARCH64_SIMD_BUILTIN_UNOP_##N##v2df] \
42fc9a7f 2546 : (AARCH64_CHECK_BUILTIN_MODE (4, S) \
e993fea1 2547 ? aarch64_builtin_decls[AARCH64_SIMD_BUILTIN_UNOP_##N##v4sf] \
42fc9a7f 2548 : (AARCH64_CHECK_BUILTIN_MODE (2, S) \
e993fea1 2549 ? aarch64_builtin_decls[AARCH64_SIMD_BUILTIN_UNOP_##N##v2sf] \
42fc9a7f 2550 : NULL_TREE)))
10766209 2551 switch (fn)
42fc9a7f 2552 {
42fc9a7f
JG
2553#undef AARCH64_CHECK_BUILTIN_MODE
2554#define AARCH64_CHECK_BUILTIN_MODE(C, N) \
7cee9637 2555 (out_mode == V##C##N##Fmode && in_mode == V##C##N##Fmode)
10766209
RS
2556 CASE_CFN_FLOOR:
2557 return AARCH64_FIND_FRINT_VARIANT (floor);
2558 CASE_CFN_CEIL:
2559 return AARCH64_FIND_FRINT_VARIANT (ceil);
2560 CASE_CFN_TRUNC:
2561 return AARCH64_FIND_FRINT_VARIANT (btrunc);
2562 CASE_CFN_ROUND:
2563 return AARCH64_FIND_FRINT_VARIANT (round);
2564 CASE_CFN_NEARBYINT:
2565 return AARCH64_FIND_FRINT_VARIANT (nearbyint);
2566 CASE_CFN_SQRT:
2567 return AARCH64_FIND_FRINT_VARIANT (sqrt);
42fc9a7f 2568#undef AARCH64_CHECK_BUILTIN_MODE
b5574232 2569#define AARCH64_CHECK_BUILTIN_MODE(C, N) \
7cee9637 2570 (out_mode == V##C##SImode && in_mode == V##C##N##Imode)
10766209
RS
2571 CASE_CFN_CLZ:
2572 {
2573 if (AARCH64_CHECK_BUILTIN_MODE (4, S))
2574 return aarch64_builtin_decls[AARCH64_SIMD_BUILTIN_UNOP_clzv4si];
2575 return NULL_TREE;
2576 }
2577 CASE_CFN_CTZ:
2578 {
2579 if (AARCH64_CHECK_BUILTIN_MODE (2, S))
2580 return aarch64_builtin_decls[AARCH64_SIMD_BUILTIN_UNOP_ctzv2si];
2581 else if (AARCH64_CHECK_BUILTIN_MODE (4, S))
2582 return aarch64_builtin_decls[AARCH64_SIMD_BUILTIN_UNOP_ctzv4si];
2583 return NULL_TREE;
2584 }
b5574232 2585#undef AARCH64_CHECK_BUILTIN_MODE
42fc9a7f 2586#define AARCH64_CHECK_BUILTIN_MODE(C, N) \
7cee9637 2587 (out_mode == V##C##N##Imode && in_mode == V##C##N##Fmode)
10766209
RS
2588 CASE_CFN_IFLOOR:
2589 CASE_CFN_LFLOOR:
2590 CASE_CFN_LLFLOOR:
2591 {
2592 enum aarch64_builtins builtin;
2593 if (AARCH64_CHECK_BUILTIN_MODE (2, D))
2594 builtin = AARCH64_SIMD_BUILTIN_UNOP_lfloorv2dfv2di;
2595 else if (AARCH64_CHECK_BUILTIN_MODE (4, S))
2596 builtin = AARCH64_SIMD_BUILTIN_UNOP_lfloorv4sfv4si;
2597 else if (AARCH64_CHECK_BUILTIN_MODE (2, S))
2598 builtin = AARCH64_SIMD_BUILTIN_UNOP_lfloorv2sfv2si;
2599 else
2600 return NULL_TREE;
2601
2602 return aarch64_builtin_decls[builtin];
2603 }
2604 CASE_CFN_ICEIL:
2605 CASE_CFN_LCEIL:
2606 CASE_CFN_LLCEIL:
2607 {
2608 enum aarch64_builtins builtin;
2609 if (AARCH64_CHECK_BUILTIN_MODE (2, D))
2610 builtin = AARCH64_SIMD_BUILTIN_UNOP_lceilv2dfv2di;
2611 else if (AARCH64_CHECK_BUILTIN_MODE (4, S))
2612 builtin = AARCH64_SIMD_BUILTIN_UNOP_lceilv4sfv4si;
2613 else if (AARCH64_CHECK_BUILTIN_MODE (2, S))
2614 builtin = AARCH64_SIMD_BUILTIN_UNOP_lceilv2sfv2si;
2615 else
2616 return NULL_TREE;
2617
2618 return aarch64_builtin_decls[builtin];
2619 }
2620 CASE_CFN_IROUND:
2621 CASE_CFN_LROUND:
2622 CASE_CFN_LLROUND:
2623 {
2624 enum aarch64_builtins builtin;
2625 if (AARCH64_CHECK_BUILTIN_MODE (2, D))
2626 builtin = AARCH64_SIMD_BUILTIN_UNOP_lroundv2dfv2di;
2627 else if (AARCH64_CHECK_BUILTIN_MODE (4, S))
2628 builtin = AARCH64_SIMD_BUILTIN_UNOP_lroundv4sfv4si;
2629 else if (AARCH64_CHECK_BUILTIN_MODE (2, S))
2630 builtin = AARCH64_SIMD_BUILTIN_UNOP_lroundv2sfv2si;
2631 else
2632 return NULL_TREE;
2633
2634 return aarch64_builtin_decls[builtin];
2635 }
10766209
RS
2636 default:
2637 return NULL_TREE;
42fc9a7f
JG
2638 }
2639
2640 return NULL_TREE;
2641}
0ac198d3 2642
a6fc00da
BH
2643/* Return builtin for reciprocal square root. */
2644
2645tree
6d4d616a 2646aarch64_general_builtin_rsqrt (unsigned int fn)
a6fc00da 2647{
ee62a5a6
RS
2648 if (fn == AARCH64_SIMD_BUILTIN_UNOP_sqrtv2df)
2649 return aarch64_builtin_decls[AARCH64_BUILTIN_RSQRT_V2DF];
2650 if (fn == AARCH64_SIMD_BUILTIN_UNOP_sqrtv2sf)
2651 return aarch64_builtin_decls[AARCH64_BUILTIN_RSQRT_V2SF];
2652 if (fn == AARCH64_SIMD_BUILTIN_UNOP_sqrtv4sf)
2653 return aarch64_builtin_decls[AARCH64_BUILTIN_RSQRT_V4SF];
a6fc00da
BH
2654 return NULL_TREE;
2655}
2656
03312cbd
AP
2657/* Return true if the lane check can be removed as there is no
2658 error going to be emitted. */
2659static bool
2660aarch64_fold_builtin_lane_check (tree arg0, tree arg1, tree arg2)
2661{
2662 if (TREE_CODE (arg0) != INTEGER_CST)
2663 return false;
2664 if (TREE_CODE (arg1) != INTEGER_CST)
2665 return false;
2666 if (TREE_CODE (arg2) != INTEGER_CST)
2667 return false;
2668
2669 auto totalsize = wi::to_widest (arg0);
2670 auto elementsize = wi::to_widest (arg1);
2671 if (totalsize == 0 || elementsize == 0)
2672 return false;
2673 auto lane = wi::to_widest (arg2);
2674 auto high = wi::udiv_trunc (totalsize, elementsize);
2675 return wi::ltu_p (lane, high);
2676}
2677
0ac198d3 2678#undef VAR1
bf592b2f 2679#define VAR1(T, N, MAP, FLAG, A) \
e993fea1 2680 case AARCH64_SIMD_BUILTIN_##T##_##N##A:
0ac198d3 2681
6d4d616a
RS
2682/* Try to fold a call to the built-in function with subcode FCODE. The
2683 function is passed the N_ARGS arguments in ARGS and it returns a value
2684 of type TYPE. Return the new expression on success and NULL_TREE on
2685 failure. */
9697e620 2686tree
6d4d616a
RS
2687aarch64_general_fold_builtin (unsigned int fcode, tree type,
2688 unsigned int n_args ATTRIBUTE_UNUSED, tree *args)
9697e620 2689{
9697e620
JG
2690 switch (fcode)
2691 {
bf592b2f 2692 BUILTIN_VDQF (UNOP, abs, 2, ALL)
9697e620 2693 return fold_build1 (ABS_EXPR, type, args[0]);
bf592b2f 2694 VAR1 (UNOP, floatv2si, 2, ALL, v2sf)
2695 VAR1 (UNOP, floatv4si, 2, ALL, v4sf)
2696 VAR1 (UNOP, floatv2di, 2, ALL, v2df)
1709ff9b 2697 return fold_build1 (FLOAT_EXPR, type, args[0]);
03312cbd
AP
2698 case AARCH64_SIMD_BUILTIN_LANE_CHECK:
2699 gcc_assert (n_args == 3);
2700 if (aarch64_fold_builtin_lane_check (args[0], args[1], args[2]))
2701 return void_node;
2702 break;
9697e620
JG
2703 default:
2704 break;
2705 }
2706
2707 return NULL_TREE;
2708}
2709
ad44c6a5
ASDV
2710enum aarch64_simd_type
2711get_mem_type_for_load_store (unsigned int fcode)
2712{
2713 switch (fcode)
2714 {
1716ddd1
JW
2715 VAR1 (LOAD1, ld1, 0, LOAD, v8qi)
2716 VAR1 (STORE1, st1, 0, STORE, v8qi)
ad44c6a5 2717 return Int8x8_t;
1716ddd1
JW
2718 VAR1 (LOAD1, ld1, 0, LOAD, v16qi)
2719 VAR1 (STORE1, st1, 0, STORE, v16qi)
ad44c6a5 2720 return Int8x16_t;
1716ddd1
JW
2721 VAR1 (LOAD1, ld1, 0, LOAD, v4hi)
2722 VAR1 (STORE1, st1, 0, STORE, v4hi)
ad44c6a5 2723 return Int16x4_t;
1716ddd1
JW
2724 VAR1 (LOAD1, ld1, 0, LOAD, v8hi)
2725 VAR1 (STORE1, st1, 0, STORE, v8hi)
ad44c6a5 2726 return Int16x8_t;
1716ddd1
JW
2727 VAR1 (LOAD1, ld1, 0, LOAD, v2si)
2728 VAR1 (STORE1, st1, 0, STORE, v2si)
ad44c6a5 2729 return Int32x2_t;
1716ddd1
JW
2730 VAR1 (LOAD1, ld1, 0, LOAD, v4si)
2731 VAR1 (STORE1, st1, 0, STORE, v4si)
ad44c6a5 2732 return Int32x4_t;
1716ddd1
JW
2733 VAR1 (LOAD1, ld1, 0, LOAD, v2di)
2734 VAR1 (STORE1, st1, 0, STORE, v2di)
ad44c6a5 2735 return Int64x2_t;
1716ddd1
JW
2736 VAR1 (LOAD1_U, ld1, 0, LOAD, v8qi)
2737 VAR1 (STORE1_U, st1, 0, STORE, v8qi)
2738 return Uint8x8_t;
2739 VAR1 (LOAD1_U, ld1, 0, LOAD, v16qi)
2740 VAR1 (STORE1_U, st1, 0, STORE, v16qi)
2741 return Uint8x16_t;
2742 VAR1 (LOAD1_U, ld1, 0, LOAD, v4hi)
2743 VAR1 (STORE1_U, st1, 0, STORE, v4hi)
2744 return Uint16x4_t;
2745 VAR1 (LOAD1_U, ld1, 0, LOAD, v8hi)
2746 VAR1 (STORE1_U, st1, 0, STORE, v8hi)
2747 return Uint16x8_t;
2748 VAR1 (LOAD1_U, ld1, 0, LOAD, v2si)
2749 VAR1 (STORE1_U, st1, 0, STORE, v2si)
2750 return Uint32x2_t;
2751 VAR1 (LOAD1_U, ld1, 0, LOAD, v4si)
2752 VAR1 (STORE1_U, st1, 0, STORE, v4si)
2753 return Uint32x4_t;
2754 VAR1 (LOAD1_U, ld1, 0, LOAD, v2di)
2755 VAR1 (STORE1_U, st1, 0, STORE, v2di)
2756 return Uint64x2_t;
2757 VAR1 (LOAD1_P, ld1, 0, LOAD, v8qi)
2758 VAR1 (STORE1_P, st1, 0, STORE, v8qi)
2759 return Poly8x8_t;
2760 VAR1 (LOAD1_P, ld1, 0, LOAD, v16qi)
2761 VAR1 (STORE1_P, st1, 0, STORE, v16qi)
2762 return Poly8x16_t;
2763 VAR1 (LOAD1_P, ld1, 0, LOAD, v4hi)
2764 VAR1 (STORE1_P, st1, 0, STORE, v4hi)
2765 return Poly16x4_t;
2766 VAR1 (LOAD1_P, ld1, 0, LOAD, v8hi)
2767 VAR1 (STORE1_P, st1, 0, STORE, v8hi)
2768 return Poly16x8_t;
2769 VAR1 (LOAD1_P, ld1, 0, LOAD, v2di)
2770 VAR1 (STORE1_P, st1, 0, STORE, v2di)
2771 return Poly64x2_t;
2772 VAR1 (LOAD1, ld1, 0, LOAD, v4hf)
2773 VAR1 (STORE1, st1, 0, STORE, v4hf)
ad44c6a5 2774 return Float16x4_t;
1716ddd1
JW
2775 VAR1 (LOAD1, ld1, 0, LOAD, v8hf)
2776 VAR1 (STORE1, st1, 0, STORE, v8hf)
ad44c6a5 2777 return Float16x8_t;
1716ddd1
JW
2778 VAR1 (LOAD1, ld1, 0, LOAD, v4bf)
2779 VAR1 (STORE1, st1, 0, STORE, v4bf)
ad44c6a5 2780 return Bfloat16x4_t;
1716ddd1
JW
2781 VAR1 (LOAD1, ld1, 0, LOAD, v8bf)
2782 VAR1 (STORE1, st1, 0, STORE, v8bf)
ad44c6a5 2783 return Bfloat16x8_t;
1716ddd1
JW
2784 VAR1 (LOAD1, ld1, 0, LOAD, v2sf)
2785 VAR1 (STORE1, st1, 0, STORE, v2sf)
ad44c6a5 2786 return Float32x2_t;
1716ddd1
JW
2787 VAR1 (LOAD1, ld1, 0, LOAD, v4sf)
2788 VAR1 (STORE1, st1, 0, STORE, v4sf)
ad44c6a5 2789 return Float32x4_t;
1716ddd1
JW
2790 VAR1 (LOAD1, ld1, 0, LOAD, v2df)
2791 VAR1 (STORE1, st1, 0, STORE, v2df)
ad44c6a5
ASDV
2792 return Float64x2_t;
2793 default:
2794 gcc_unreachable ();
2795 break;
2796 }
2797}
2798
6d4d616a
RS
2799/* Try to fold STMT, given that it's a call to the built-in function with
2800 subcode FCODE. Return the new statement on success and null on
2801 failure. */
2802gimple *
ad44c6a5 2803aarch64_general_gimple_fold_builtin (unsigned int fcode, gcall *stmt,
03f7843c 2804 gimple_stmt_iterator *gsi ATTRIBUTE_UNUSED)
0ac198d3 2805{
355fe088 2806 gimple *new_stmt = NULL;
6d4d616a
RS
2807 unsigned nargs = gimple_call_num_args (stmt);
2808 tree *args = (nargs > 0
2809 ? gimple_call_arg_ptr (stmt, 0)
2810 : &error_mark_node);
2811
2812 /* We use gimple's IFN_REDUC_(PLUS|MIN|MAX)s for float, signed int
2813 and unsigned int; it will distinguish according to the types of
2814 the arguments to the __builtin. */
2815 switch (fcode)
0ac198d3 2816 {
bf592b2f 2817 BUILTIN_VALL (UNOP, reduc_plus_scal_, 10, ALL)
6d4d616a
RS
2818 new_stmt = gimple_build_call_internal (IFN_REDUC_PLUS,
2819 1, args[0]);
2820 gimple_call_set_lhs (new_stmt, gimple_call_lhs (stmt));
2821 break;
ad44c6a5 2822
cbcf4a50
AP
2823 /* Lower sqrt builtins to gimple/internal function sqrt. */
2824 BUILTIN_VHSDF_DF (UNOP, sqrt, 2, FP)
2825 new_stmt = gimple_build_call_internal (IFN_SQRT,
2826 1, args[0]);
2827 gimple_call_set_lhs (new_stmt, gimple_call_lhs (stmt));
2828 break;
2829
ad44c6a5
ASDV
2830 /*lower store and load neon builtins to gimple. */
2831 BUILTIN_VALL_F16 (LOAD1, ld1, 0, LOAD)
1716ddd1
JW
2832 BUILTIN_VDQ_I (LOAD1_U, ld1, 0, LOAD)
2833 BUILTIN_VALLP_NO_DI (LOAD1_P, ld1, 0, LOAD)
ad44c6a5
ASDV
2834 if (!BYTES_BIG_ENDIAN)
2835 {
2836 enum aarch64_simd_type mem_type
2837 = get_mem_type_for_load_store(fcode);
2838 aarch64_simd_type_info simd_type
2839 = aarch64_simd_types[mem_type];
0f685601
AV
2840 tree elt_ptr_type = build_pointer_type_for_mode (simd_type.eltype,
2841 VOIDmode, true);
ad44c6a5 2842 tree zero = build_zero_cst (elt_ptr_type);
0f685601
AV
2843 /* Use element type alignment. */
2844 tree access_type
2845 = build_aligned_type (simd_type.itype,
2846 TYPE_ALIGN (simd_type.eltype));
ad44c6a5
ASDV
2847 new_stmt
2848 = gimple_build_assign (gimple_get_lhs (stmt),
2849 fold_build2 (MEM_REF,
0f685601
AV
2850 access_type,
2851 args[0], zero));
ad44c6a5
ASDV
2852 }
2853 break;
2854
2855 BUILTIN_VALL_F16 (STORE1, st1, 0, STORE)
1716ddd1
JW
2856 BUILTIN_VDQ_I (STORE1_U, st1, 0, STORE)
2857 BUILTIN_VALLP_NO_DI (STORE1_P, st1, 0, STORE)
ad44c6a5
ASDV
2858 if (!BYTES_BIG_ENDIAN)
2859 {
2860 enum aarch64_simd_type mem_type
2861 = get_mem_type_for_load_store(fcode);
2862 aarch64_simd_type_info simd_type
2863 = aarch64_simd_types[mem_type];
0f685601
AV
2864 tree elt_ptr_type = build_pointer_type_for_mode (simd_type.eltype,
2865 VOIDmode, true);
ad44c6a5 2866 tree zero = build_zero_cst (elt_ptr_type);
0f685601
AV
2867 /* Use element type alignment. */
2868 tree access_type
2869 = build_aligned_type (simd_type.itype,
2870 TYPE_ALIGN (simd_type.eltype));
ad44c6a5 2871 new_stmt
0f685601
AV
2872 = gimple_build_assign (fold_build2 (MEM_REF, access_type,
2873 args[0], zero),
2874 args[1]);
ad44c6a5
ASDV
2875 }
2876 break;
2877
bf592b2f 2878 BUILTIN_VDQIF (UNOP, reduc_smax_scal_, 10, ALL)
2879 BUILTIN_VDQ_BHSI (UNOPU, reduc_umax_scal_, 10, ALL)
6d4d616a
RS
2880 new_stmt = gimple_build_call_internal (IFN_REDUC_MAX,
2881 1, args[0]);
2882 gimple_call_set_lhs (new_stmt, gimple_call_lhs (stmt));
2883 break;
bf592b2f 2884 BUILTIN_VDQIF (UNOP, reduc_smin_scal_, 10, ALL)
2885 BUILTIN_VDQ_BHSI (UNOPU, reduc_umin_scal_, 10, ALL)
6d4d616a
RS
2886 new_stmt = gimple_build_call_internal (IFN_REDUC_MIN,
2887 1, args[0]);
2888 gimple_call_set_lhs (new_stmt, gimple_call_lhs (stmt));
2889 break;
1b4a6359
TC
2890 BUILTIN_VSDQ_I_DI (BINOP, ashl, 3, NONE)
2891 if (TREE_CODE (args[1]) == INTEGER_CST
2892 && wi::ltu_p (wi::to_wide (args[1]), element_precision (args[0])))
2893 new_stmt = gimple_build_assign (gimple_call_lhs (stmt),
2894 LSHIFT_EXPR, args[0], args[1]);
2895 break;
2896 BUILTIN_VSDQ_I_DI (BINOP, sshl, 0, NONE)
2897 BUILTIN_VSDQ_I_DI (BINOP_UUS, ushl, 0, NONE)
2898 {
2899 tree cst = args[1];
2900 tree ctype = TREE_TYPE (cst);
2901 /* Left shifts can be both scalar or vector, e.g. uint64x1_t is
2902 treated as a scalar type not a vector one. */
2903 if ((cst = uniform_integer_cst_p (cst)) != NULL_TREE)
2904 {
2905 wide_int wcst = wi::to_wide (cst);
2906 tree unit_ty = TREE_TYPE (cst);
2907
2908 wide_int abs_cst = wi::abs (wcst);
2909 if (wi::geu_p (abs_cst, element_precision (args[0])))
2910 break;
2911
2912 if (wi::neg_p (wcst, TYPE_SIGN (ctype)))
2913 {
2914 tree final_cst;
2915 final_cst = wide_int_to_tree (unit_ty, abs_cst);
2916 if (TREE_CODE (cst) != INTEGER_CST)
2917 final_cst = build_uniform_cst (ctype, final_cst);
2918
2919 new_stmt = gimple_build_assign (gimple_call_lhs (stmt),
2920 RSHIFT_EXPR, args[0],
2921 final_cst);
2922 }
2923 else
2924 new_stmt = gimple_build_assign (gimple_call_lhs (stmt),
2925 LSHIFT_EXPR, args[0], args[1]);
2926 }
2927 }
2928 break;
2929 BUILTIN_VDQ_I (SHIFTIMM, ashr, 3, NONE)
2930 VAR1 (SHIFTIMM, ashr_simd, 0, NONE, di)
2931 BUILTIN_VDQ_I (USHIFTIMM, lshr, 3, NONE)
2932 VAR1 (USHIFTIMM, lshr_simd, 0, NONE, di)
2933 if (TREE_CODE (args[1]) == INTEGER_CST
2934 && wi::ltu_p (wi::to_wide (args[1]), element_precision (args[0])))
2935 new_stmt = gimple_build_assign (gimple_call_lhs (stmt),
2936 RSHIFT_EXPR, args[0], args[1]);
2937 break;
bf592b2f 2938 BUILTIN_GPF (BINOP, fmulx, 0, ALL)
0ac198d3 2939 {
6d4d616a
RS
2940 gcc_assert (nargs == 2);
2941 bool a0_cst_p = TREE_CODE (args[0]) == REAL_CST;
2942 bool a1_cst_p = TREE_CODE (args[1]) == REAL_CST;
2943 if (a0_cst_p || a1_cst_p)
0ac198d3 2944 {
6d4d616a 2945 if (a0_cst_p && a1_cst_p)
546e500c 2946 {
6d4d616a
RS
2947 tree t0 = TREE_TYPE (args[0]);
2948 real_value a0 = (TREE_REAL_CST (args[0]));
2949 real_value a1 = (TREE_REAL_CST (args[1]));
2950 if (real_equal (&a1, &dconst0))
2951 std::swap (a0, a1);
2952 /* According to real_equal (), +0 equals -0. */
2953 if (real_equal (&a0, &dconst0) && real_isinf (&a1))
546e500c 2954 {
6d4d616a
RS
2955 real_value res = dconst2;
2956 res.sign = a0.sign ^ a1.sign;
2957 new_stmt = gimple_build_assign (gimple_call_lhs (stmt),
2958 REAL_CST,
2959 build_real (t0, res));
546e500c 2960 }
6d4d616a
RS
2961 else
2962 new_stmt = gimple_build_assign (gimple_call_lhs (stmt),
2963 MULT_EXPR,
2964 args[0], args[1]);
546e500c 2965 }
6d4d616a
RS
2966 else /* a0_cst_p ^ a1_cst_p. */
2967 {
2968 real_value const_part = a0_cst_p
2969 ? TREE_REAL_CST (args[0]) : TREE_REAL_CST (args[1]);
2970 if (!real_equal (&const_part, &dconst0)
2971 && !real_isinf (&const_part))
2972 new_stmt = gimple_build_assign (gimple_call_lhs (stmt),
2973 MULT_EXPR, args[0],
2974 args[1]);
2975 }
2976 }
2977 if (new_stmt)
2978 {
2979 gimple_set_vuse (new_stmt, gimple_vuse (stmt));
2980 gimple_set_vdef (new_stmt, gimple_vdef (stmt));
0ac198d3 2981 }
6d4d616a 2982 break;
0ac198d3 2983 }
03312cbd
AP
2984 case AARCH64_SIMD_BUILTIN_LANE_CHECK:
2985 if (aarch64_fold_builtin_lane_check (args[0], args[1], args[2]))
2986 {
2987 unlink_stmt_vdef (stmt);
2988 release_defs (stmt);
2989 new_stmt = gimple_build_nop ();
2990 }
2991 break;
6d4d616a
RS
2992 default:
2993 break;
0ac198d3 2994 }
6d4d616a 2995 return new_stmt;
0ac198d3
JG
2996}
2997
aa87aced
KV
2998void
2999aarch64_atomic_assign_expand_fenv (tree *hold, tree *clear, tree *update)
3000{
3001 const unsigned AARCH64_FE_INVALID = 1;
3002 const unsigned AARCH64_FE_DIVBYZERO = 2;
3003 const unsigned AARCH64_FE_OVERFLOW = 4;
3004 const unsigned AARCH64_FE_UNDERFLOW = 8;
3005 const unsigned AARCH64_FE_INEXACT = 16;
3006 const unsigned HOST_WIDE_INT AARCH64_FE_ALL_EXCEPT = (AARCH64_FE_INVALID
3007 | AARCH64_FE_DIVBYZERO
3008 | AARCH64_FE_OVERFLOW
3009 | AARCH64_FE_UNDERFLOW
3010 | AARCH64_FE_INEXACT);
3011 const unsigned HOST_WIDE_INT AARCH64_FE_EXCEPT_SHIFT = 8;
3012 tree fenv_cr, fenv_sr, get_fpcr, set_fpcr, mask_cr, mask_sr;
3013 tree ld_fenv_cr, ld_fenv_sr, masked_fenv_cr, masked_fenv_sr, hold_fnclex_cr;
3014 tree hold_fnclex_sr, new_fenv_var, reload_fenv, restore_fnenv, get_fpsr, set_fpsr;
3015 tree update_call, atomic_feraiseexcept, hold_fnclex, masked_fenv, ld_fenv;
3016
3017 /* Generate the equivalence of :
3018 unsigned int fenv_cr;
3019 fenv_cr = __builtin_aarch64_get_fpcr ();
3020
3021 unsigned int fenv_sr;
3022 fenv_sr = __builtin_aarch64_get_fpsr ();
3023
3024 Now set all exceptions to non-stop
3025 unsigned int mask_cr
3026 = ~(AARCH64_FE_ALL_EXCEPT << AARCH64_FE_EXCEPT_SHIFT);
3027 unsigned int masked_cr;
3028 masked_cr = fenv_cr & mask_cr;
3029
3030 And clear all exception flags
3031 unsigned int maske_sr = ~AARCH64_FE_ALL_EXCEPT;
3032 unsigned int masked_cr;
3033 masked_sr = fenv_sr & mask_sr;
3034
3035 __builtin_aarch64_set_cr (masked_cr);
3036 __builtin_aarch64_set_sr (masked_sr); */
3037
09ba9ef7
RR
3038 fenv_cr = create_tmp_var_raw (unsigned_type_node);
3039 fenv_sr = create_tmp_var_raw (unsigned_type_node);
aa87aced
KV
3040
3041 get_fpcr = aarch64_builtin_decls[AARCH64_BUILTIN_GET_FPCR];
3042 set_fpcr = aarch64_builtin_decls[AARCH64_BUILTIN_SET_FPCR];
3043 get_fpsr = aarch64_builtin_decls[AARCH64_BUILTIN_GET_FPSR];
3044 set_fpsr = aarch64_builtin_decls[AARCH64_BUILTIN_SET_FPSR];
3045
3046 mask_cr = build_int_cst (unsigned_type_node,
3047 ~(AARCH64_FE_ALL_EXCEPT << AARCH64_FE_EXCEPT_SHIFT));
3048 mask_sr = build_int_cst (unsigned_type_node,
3049 ~(AARCH64_FE_ALL_EXCEPT));
3050
d81bc2af
HZ
3051 ld_fenv_cr = build4 (TARGET_EXPR, unsigned_type_node,
3052 fenv_cr, build_call_expr (get_fpcr, 0),
3053 NULL_TREE, NULL_TREE);
3054 ld_fenv_sr = build4 (TARGET_EXPR, unsigned_type_node,
3055 fenv_sr, build_call_expr (get_fpsr, 0),
3056 NULL_TREE, NULL_TREE);
aa87aced
KV
3057
3058 masked_fenv_cr = build2 (BIT_AND_EXPR, unsigned_type_node, fenv_cr, mask_cr);
3059 masked_fenv_sr = build2 (BIT_AND_EXPR, unsigned_type_node, fenv_sr, mask_sr);
3060
3061 hold_fnclex_cr = build_call_expr (set_fpcr, 1, masked_fenv_cr);
3062 hold_fnclex_sr = build_call_expr (set_fpsr, 1, masked_fenv_sr);
3063
3064 hold_fnclex = build2 (COMPOUND_EXPR, void_type_node, hold_fnclex_cr,
3065 hold_fnclex_sr);
3066 masked_fenv = build2 (COMPOUND_EXPR, void_type_node, masked_fenv_cr,
3067 masked_fenv_sr);
3068 ld_fenv = build2 (COMPOUND_EXPR, void_type_node, ld_fenv_cr, ld_fenv_sr);
3069
3070 *hold = build2 (COMPOUND_EXPR, void_type_node,
3071 build2 (COMPOUND_EXPR, void_type_node, masked_fenv, ld_fenv),
3072 hold_fnclex);
3073
3074 /* Store the value of masked_fenv to clear the exceptions:
3075 __builtin_aarch64_set_fpsr (masked_fenv_sr); */
3076
3077 *clear = build_call_expr (set_fpsr, 1, masked_fenv_sr);
3078
3079 /* Generate the equivalent of :
3080 unsigned int new_fenv_var;
3081 new_fenv_var = __builtin_aarch64_get_fpsr ();
3082
3083 __builtin_aarch64_set_fpsr (fenv_sr);
3084
3085 __atomic_feraiseexcept (new_fenv_var); */
3086
09ba9ef7 3087 new_fenv_var = create_tmp_var_raw (unsigned_type_node);
d81bc2af
HZ
3088 reload_fenv = build4 (TARGET_EXPR, unsigned_type_node,
3089 new_fenv_var, build_call_expr (get_fpsr, 0),
3090 NULL_TREE, NULL_TREE);
aa87aced
KV
3091 restore_fnenv = build_call_expr (set_fpsr, 1, fenv_sr);
3092 atomic_feraiseexcept = builtin_decl_implicit (BUILT_IN_ATOMIC_FERAISEEXCEPT);
3093 update_call = build_call_expr (atomic_feraiseexcept, 1,
3094 fold_convert (integer_type_node, new_fenv_var));
3095 *update = build2 (COMPOUND_EXPR, void_type_node,
3096 build2 (COMPOUND_EXPR, void_type_node,
3097 reload_fenv, restore_fnenv), update_call);
3098}
3099
ef01e6bb
DZ
3100/* Resolve overloaded MEMTAG build-in functions. */
3101#define AARCH64_BUILTIN_SUBCODE(F) \
3102 (DECL_MD_FUNCTION_CODE (F) >> AARCH64_BUILTIN_SHIFT)
3103
3104static tree
3105aarch64_resolve_overloaded_memtag (location_t loc,
3106 tree fndecl, void *pass_params)
3107{
3108 vec<tree, va_gc> *params = static_cast<vec<tree, va_gc> *> (pass_params);
3109 unsigned param_num = params ? params->length() : 0;
3110 unsigned int fcode = AARCH64_BUILTIN_SUBCODE (fndecl);
3111 tree inittype = aarch64_memtag_builtin_data[
3112 fcode - AARCH64_MEMTAG_BUILTIN_START - 1].ftype;
3113 unsigned arg_num = list_length (TYPE_ARG_TYPES (inittype)) - 1;
3114
3115 if (param_num != arg_num)
3116 {
3117 TREE_TYPE (fndecl) = inittype;
3118 return NULL_TREE;
3119 }
3120 tree retype = NULL;
3121
3122 if (fcode == AARCH64_MEMTAG_BUILTIN_SUBP)
3123 {
3124 tree t0 = TREE_TYPE ((*params)[0]);
3125 tree t1 = TREE_TYPE ((*params)[1]);
3126
3127 if (t0 == error_mark_node || TREE_CODE (t0) != POINTER_TYPE)
3128 t0 = ptr_type_node;
3129 if (t1 == error_mark_node || TREE_CODE (t1) != POINTER_TYPE)
3130 t1 = ptr_type_node;
3131
3132 if (TYPE_MODE (t0) != DImode)
3133 warning_at (loc, 1, "expected 64-bit address but argument 1 is %d-bit",
3134 (int)tree_to_shwi (DECL_SIZE ((*params)[0])));
3135
3136 if (TYPE_MODE (t1) != DImode)
3137 warning_at (loc, 1, "expected 64-bit address but argument 2 is %d-bit",
3138 (int)tree_to_shwi (DECL_SIZE ((*params)[1])));
3139
3140 retype = build_function_type_list (ptrdiff_type_node, t0, t1, NULL);
3141 }
3142 else
3143 {
3144 tree t0 = TREE_TYPE ((*params)[0]);
3145
3146 if (t0 == error_mark_node || TREE_CODE (t0) != POINTER_TYPE)
3147 {
3148 TREE_TYPE (fndecl) = inittype;
3149 return NULL_TREE;
3150 }
3151
3152 if (TYPE_MODE (t0) != DImode)
3153 warning_at (loc, 1, "expected 64-bit address but argument 1 is %d-bit",
3154 (int)tree_to_shwi (DECL_SIZE ((*params)[0])));
3155
3156 switch (fcode)
3157 {
3158 case AARCH64_MEMTAG_BUILTIN_IRG:
3159 retype = build_function_type_list (t0, t0, uint64_type_node, NULL);
3160 break;
3161 case AARCH64_MEMTAG_BUILTIN_GMI:
3162 retype = build_function_type_list (uint64_type_node, t0,
3163 uint64_type_node, NULL);
3164 break;
3165 case AARCH64_MEMTAG_BUILTIN_INC_TAG:
3166 retype = build_function_type_list (t0, t0, unsigned_type_node, NULL);
3167 break;
3168 case AARCH64_MEMTAG_BUILTIN_SET_TAG:
3169 retype = build_function_type_list (void_type_node, t0, NULL);
3170 break;
3171 case AARCH64_MEMTAG_BUILTIN_GET_TAG:
3172 retype = build_function_type_list (t0, t0, NULL);
3173 break;
3174 default:
3175 return NULL_TREE;
3176 }
3177 }
3178
3179 if (!retype || retype == error_mark_node)
3180 TREE_TYPE (fndecl) = inittype;
3181 else
3182 TREE_TYPE (fndecl) = retype;
3183
3184 return NULL_TREE;
3185}
3186
e53b6e56 3187/* Called at aarch64_resolve_overloaded_builtin in aarch64-c.cc. */
ef01e6bb
DZ
3188tree
3189aarch64_resolve_overloaded_builtin_general (location_t loc, tree function,
3190 void *pass_params)
3191{
3192 unsigned int fcode = AARCH64_BUILTIN_SUBCODE (function);
3193
3194 if (fcode >= AARCH64_MEMTAG_BUILTIN_START
3195 && fcode <= AARCH64_MEMTAG_BUILTIN_END)
3196 return aarch64_resolve_overloaded_memtag(loc, function, pass_params);
3197
3198 return NULL_TREE;
3199}
aa87aced 3200
42fc9a7f
JG
3201#undef AARCH64_CHECK_BUILTIN_MODE
3202#undef AARCH64_FIND_FRINT_VARIANT
0ddec79f
JG
3203#undef CF0
3204#undef CF1
3205#undef CF2
3206#undef CF3
3207#undef CF4
3208#undef CF10
3209#undef VAR1
3210#undef VAR2
3211#undef VAR3
3212#undef VAR4
3213#undef VAR5
3214#undef VAR6
3215#undef VAR7
3216#undef VAR8
3217#undef VAR9
3218#undef VAR10
3219#undef VAR11
3220
3c03d39d 3221#include "gt-aarch64-builtins.h"