]> git.ipfire.org Git - thirdparty/gcc.git/blame - gcc/config/aarch64/aarch64-builtins.cc
OpenMP, C++: Add template support for the has_device_addr clause.
[thirdparty/gcc.git] / gcc / config / aarch64 / aarch64-builtins.cc
CommitLineData
43e9d192 1/* Builtins' description for AArch64 SIMD architecture.
7adcbafe 2 Copyright (C) 2011-2022 Free Software Foundation, Inc.
43e9d192
IB
3 Contributed by ARM Ltd.
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify it
8 under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
10 any later version.
11
12 GCC is distributed in the hope that it will be useful, but
13 WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
15 General Public License for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
20
8fcc61f8
RS
21#define IN_TARGET_CODE 1
22
43e9d192
IB
23#include "config.h"
24#include "system.h"
25#include "coretypes.h"
26#include "tm.h"
c7131fb2 27#include "function.h"
c7131fb2 28#include "basic-block.h"
e11c4407 29#include "rtl.h"
c7131fb2
AM
30#include "tree.h"
31#include "gimple.h"
03312cbd 32#include "ssa.h"
4d0cdd0c 33#include "memmodel.h"
e11c4407
AM
34#include "tm_p.h"
35#include "expmed.h"
36#include "optabs.h"
37#include "recog.h"
38#include "diagnostic-core.h"
40e23961 39#include "fold-const.h"
d8a2d370 40#include "stor-layout.h"
36566b39 41#include "explow.h"
43e9d192 42#include "expr.h"
43e9d192 43#include "langhooks.h"
5be5c238 44#include "gimple-iterator.h"
10766209 45#include "case-cfn-macros.h"
9d63f43b 46#include "emit-rtl.h"
31427b97
RS
47#include "stringpool.h"
48#include "attribs.h"
ad44c6a5 49#include "gimple-fold.h"
43e9d192 50
0d4a1197 51#define v8qi_UP E_V8QImode
fdcddba8 52#define v8di_UP E_V8DImode
0d4a1197
RS
53#define v4hi_UP E_V4HImode
54#define v4hf_UP E_V4HFmode
55#define v2si_UP E_V2SImode
56#define v2sf_UP E_V2SFmode
57#define v1df_UP E_V1DFmode
58#define di_UP E_DImode
59#define df_UP E_DFmode
60#define v16qi_UP E_V16QImode
61#define v8hi_UP E_V8HImode
62#define v8hf_UP E_V8HFmode
63#define v4si_UP E_V4SImode
64#define v4sf_UP E_V4SFmode
65#define v2di_UP E_V2DImode
66#define v2df_UP E_V2DFmode
67#define ti_UP E_TImode
68#define oi_UP E_OImode
69#define ci_UP E_CImode
70#define xi_UP E_XImode
71#define si_UP E_SImode
72#define sf_UP E_SFmode
73#define hi_UP E_HImode
74#define hf_UP E_HFmode
75#define qi_UP E_QImode
abbe1ed2
SMW
76#define bf_UP E_BFmode
77#define v4bf_UP E_V4BFmode
78#define v8bf_UP E_V8BFmode
66f206b8
JW
79#define v2x8qi_UP E_V2x8QImode
80#define v2x4hi_UP E_V2x4HImode
81#define v2x4hf_UP E_V2x4HFmode
82#define v2x4bf_UP E_V2x4BFmode
83#define v2x2si_UP E_V2x2SImode
84#define v2x2sf_UP E_V2x2SFmode
85#define v2x1di_UP E_V2x1DImode
86#define v2x1df_UP E_V2x1DFmode
87#define v2x16qi_UP E_V2x16QImode
88#define v2x8hi_UP E_V2x8HImode
89#define v2x8hf_UP E_V2x8HFmode
90#define v2x8bf_UP E_V2x8BFmode
91#define v2x4si_UP E_V2x4SImode
92#define v2x4sf_UP E_V2x4SFmode
93#define v2x2di_UP E_V2x2DImode
94#define v2x2df_UP E_V2x2DFmode
95#define v3x8qi_UP E_V3x8QImode
96#define v3x4hi_UP E_V3x4HImode
97#define v3x4hf_UP E_V3x4HFmode
98#define v3x4bf_UP E_V3x4BFmode
99#define v3x2si_UP E_V3x2SImode
100#define v3x2sf_UP E_V3x2SFmode
101#define v3x1di_UP E_V3x1DImode
102#define v3x1df_UP E_V3x1DFmode
103#define v3x16qi_UP E_V3x16QImode
104#define v3x8hi_UP E_V3x8HImode
105#define v3x8hf_UP E_V3x8HFmode
106#define v3x8bf_UP E_V3x8BFmode
107#define v3x4si_UP E_V3x4SImode
108#define v3x4sf_UP E_V3x4SFmode
109#define v3x2di_UP E_V3x2DImode
110#define v3x2df_UP E_V3x2DFmode
111#define v4x8qi_UP E_V4x8QImode
112#define v4x4hi_UP E_V4x4HImode
113#define v4x4hf_UP E_V4x4HFmode
114#define v4x4bf_UP E_V4x4BFmode
115#define v4x2si_UP E_V4x2SImode
116#define v4x2sf_UP E_V4x2SFmode
117#define v4x1di_UP E_V4x1DImode
118#define v4x1df_UP E_V4x1DFmode
119#define v4x16qi_UP E_V4x16QImode
120#define v4x8hi_UP E_V4x8HImode
121#define v4x8hf_UP E_V4x8HFmode
122#define v4x8bf_UP E_V4x8BFmode
123#define v4x4si_UP E_V4x4SImode
124#define v4x4sf_UP E_V4x4SFmode
125#define v4x2di_UP E_V4x2DImode
126#define v4x2df_UP E_V4x2DFmode
43e9d192
IB
127#define UP(X) X##_UP
128
b5828b4b
JG
129#define SIMD_MAX_BUILTIN_ARGS 5
130
131enum aarch64_type_qualifiers
43e9d192 132{
b5828b4b
JG
133 /* T foo. */
134 qualifier_none = 0x0,
135 /* unsigned T foo. */
136 qualifier_unsigned = 0x1, /* 1 << 0 */
137 /* const T foo. */
138 qualifier_const = 0x2, /* 1 << 1 */
139 /* T *foo. */
140 qualifier_pointer = 0x4, /* 1 << 2 */
b5828b4b
JG
141 /* Used when expanding arguments if an operand could
142 be an immediate. */
143 qualifier_immediate = 0x8, /* 1 << 3 */
144 qualifier_maybe_immediate = 0x10, /* 1 << 4 */
145 /* void foo (...). */
146 qualifier_void = 0x20, /* 1 << 5 */
147 /* Some patterns may have internal operands, this qualifier is an
148 instruction to the initialisation code to skip this operand. */
149 qualifier_internal = 0x40, /* 1 << 6 */
150 /* Some builtins should use the T_*mode* encoded in a simd_builtin_datum
151 rather than using the type of the operand. */
152 qualifier_map_mode = 0x80, /* 1 << 7 */
153 /* qualifier_pointer | qualifier_map_mode */
154 qualifier_pointer_map_mode = 0x84,
e625e715 155 /* qualifier_const | qualifier_pointer | qualifier_map_mode */
6db1ec94
JG
156 qualifier_const_pointer_map_mode = 0x86,
157 /* Polynomial types. */
2a49c16d
AL
158 qualifier_poly = 0x100,
159 /* Lane indices - must be in range, and flipped for bigendian. */
4d0a0237
CB
160 qualifier_lane_index = 0x200,
161 /* Lane indices for single lane structure loads and stores. */
9d63f43b
TC
162 qualifier_struct_load_store_lane_index = 0x400,
163 /* Lane indices selected in pairs. - must be in range, and flipped for
164 bigendian. */
165 qualifier_lane_pair_index = 0x800,
8c197c85
SMW
166 /* Lane indices selected in quadtuplets. - must be in range, and flipped for
167 bigendian. */
168 qualifier_lane_quadtup_index = 0x1000,
b5828b4b 169};
43e9d192 170
bf592b2f 171/* Flags that describe what a function might do. */
172const unsigned int FLAG_NONE = 0U;
173const unsigned int FLAG_READ_FPCR = 1U << 0;
174const unsigned int FLAG_RAISE_FP_EXCEPTIONS = 1U << 1;
175const unsigned int FLAG_READ_MEMORY = 1U << 2;
176const unsigned int FLAG_PREFETCH_MEMORY = 1U << 3;
177const unsigned int FLAG_WRITE_MEMORY = 1U << 4;
178
35ffd4d1 179/* Not all FP intrinsics raise FP exceptions or read FPCR register,
180 use this flag to suppress it. */
181const unsigned int FLAG_AUTO_FP = 1U << 5;
182
bf592b2f 183const unsigned int FLAG_FP = FLAG_READ_FPCR | FLAG_RAISE_FP_EXCEPTIONS;
184const unsigned int FLAG_ALL = FLAG_READ_FPCR | FLAG_RAISE_FP_EXCEPTIONS
185 | FLAG_READ_MEMORY | FLAG_PREFETCH_MEMORY | FLAG_WRITE_MEMORY;
2d5aad69 186const unsigned int FLAG_STORE = FLAG_WRITE_MEMORY | FLAG_AUTO_FP;
e8062ad4 187const unsigned int FLAG_LOAD = FLAG_READ_MEMORY | FLAG_AUTO_FP;
bf592b2f 188
43e9d192
IB
189typedef struct
190{
191 const char *name;
ef4bddc2 192 machine_mode mode;
342be7f7
JG
193 const enum insn_code code;
194 unsigned int fcode;
b5828b4b 195 enum aarch64_type_qualifiers *qualifiers;
bf592b2f 196 unsigned int flags;
43e9d192
IB
197} aarch64_simd_builtin_datum;
198
b5828b4b
JG
199static enum aarch64_type_qualifiers
200aarch64_types_unop_qualifiers[SIMD_MAX_BUILTIN_ARGS]
8f905d69 201 = { qualifier_none, qualifier_none };
b5828b4b 202#define TYPES_UNOP (aarch64_types_unop_qualifiers)
5a7a4e80
TB
203static enum aarch64_type_qualifiers
204aarch64_types_unopu_qualifiers[SIMD_MAX_BUILTIN_ARGS]
205 = { qualifier_unsigned, qualifier_unsigned };
206#define TYPES_UNOPU (aarch64_types_unopu_qualifiers)
b5828b4b 207static enum aarch64_type_qualifiers
a579f4c7
JW
208aarch64_types_unopus_qualifiers[SIMD_MAX_BUILTIN_ARGS]
209 = { qualifier_unsigned, qualifier_none };
210#define TYPES_UNOPUS (aarch64_types_unopus_qualifiers)
211static enum aarch64_type_qualifiers
b5828b4b
JG
212aarch64_types_binop_qualifiers[SIMD_MAX_BUILTIN_ARGS]
213 = { qualifier_none, qualifier_none, qualifier_maybe_immediate };
214#define TYPES_BINOP (aarch64_types_binop_qualifiers)
215static enum aarch64_type_qualifiers
5a7a4e80
TB
216aarch64_types_binopu_qualifiers[SIMD_MAX_BUILTIN_ARGS]
217 = { qualifier_unsigned, qualifier_unsigned, qualifier_unsigned };
218#define TYPES_BINOPU (aarch64_types_binopu_qualifiers)
7baa225d 219static enum aarch64_type_qualifiers
de10bcce
AL
220aarch64_types_binop_uus_qualifiers[SIMD_MAX_BUILTIN_ARGS]
221 = { qualifier_unsigned, qualifier_unsigned, qualifier_none };
222#define TYPES_BINOP_UUS (aarch64_types_binop_uus_qualifiers)
223static enum aarch64_type_qualifiers
918621d3
AL
224aarch64_types_binop_ssu_qualifiers[SIMD_MAX_BUILTIN_ARGS]
225 = { qualifier_none, qualifier_none, qualifier_unsigned };
226#define TYPES_BINOP_SSU (aarch64_types_binop_ssu_qualifiers)
227static enum aarch64_type_qualifiers
daef0a8c
JW
228aarch64_types_binop_uss_qualifiers[SIMD_MAX_BUILTIN_ARGS]
229 = { qualifier_unsigned, qualifier_none, qualifier_none };
230#define TYPES_BINOP_USS (aarch64_types_binop_uss_qualifiers)
231static enum aarch64_type_qualifiers
7baa225d
TB
232aarch64_types_binopp_qualifiers[SIMD_MAX_BUILTIN_ARGS]
233 = { qualifier_poly, qualifier_poly, qualifier_poly };
234#define TYPES_BINOPP (aarch64_types_binopp_qualifiers)
3caf7f87
JW
235static enum aarch64_type_qualifiers
236aarch64_types_binop_ppu_qualifiers[SIMD_MAX_BUILTIN_ARGS]
237 = { qualifier_poly, qualifier_poly, qualifier_unsigned };
238#define TYPES_BINOP_PPU (aarch64_types_binop_ppu_qualifiers)
7baa225d 239
5a7a4e80 240static enum aarch64_type_qualifiers
b5828b4b
JG
241aarch64_types_ternop_qualifiers[SIMD_MAX_BUILTIN_ARGS]
242 = { qualifier_none, qualifier_none, qualifier_none, qualifier_none };
243#define TYPES_TERNOP (aarch64_types_ternop_qualifiers)
30442682 244static enum aarch64_type_qualifiers
2a49c16d
AL
245aarch64_types_ternop_lane_qualifiers[SIMD_MAX_BUILTIN_ARGS]
246 = { qualifier_none, qualifier_none, qualifier_none, qualifier_lane_index };
247#define TYPES_TERNOP_LANE (aarch64_types_ternop_lane_qualifiers)
248static enum aarch64_type_qualifiers
30442682
TB
249aarch64_types_ternopu_qualifiers[SIMD_MAX_BUILTIN_ARGS]
250 = { qualifier_unsigned, qualifier_unsigned,
251 qualifier_unsigned, qualifier_unsigned };
252#define TYPES_TERNOPU (aarch64_types_ternopu_qualifiers)
27086ea3 253static enum aarch64_type_qualifiers
0b839322
WD
254aarch64_types_ternopu_lane_qualifiers[SIMD_MAX_BUILTIN_ARGS]
255 = { qualifier_unsigned, qualifier_unsigned,
256 qualifier_unsigned, qualifier_lane_index };
257#define TYPES_TERNOPU_LANE (aarch64_types_ternopu_lane_qualifiers)
258static enum aarch64_type_qualifiers
27086ea3
MC
259aarch64_types_ternopu_imm_qualifiers[SIMD_MAX_BUILTIN_ARGS]
260 = { qualifier_unsigned, qualifier_unsigned,
261 qualifier_unsigned, qualifier_immediate };
262#define TYPES_TERNOPUI (aarch64_types_ternopu_imm_qualifiers)
8c197c85 263static enum aarch64_type_qualifiers
3caf7f87
JW
264aarch64_types_ternop_sssu_qualifiers[SIMD_MAX_BUILTIN_ARGS]
265 = { qualifier_none, qualifier_none, qualifier_none, qualifier_unsigned };
266#define TYPES_TERNOP_SSSU (aarch64_types_ternop_sssu_qualifiers)
267static enum aarch64_type_qualifiers
8c197c85
SMW
268aarch64_types_ternop_ssus_qualifiers[SIMD_MAX_BUILTIN_ARGS]
269 = { qualifier_none, qualifier_none, qualifier_unsigned, qualifier_none };
270#define TYPES_TERNOP_SSUS (aarch64_types_ternop_ssus_qualifiers)
2050ac1a
TC
271static enum aarch64_type_qualifiers
272aarch64_types_ternop_suss_qualifiers[SIMD_MAX_BUILTIN_ARGS]
273 = { qualifier_none, qualifier_unsigned, qualifier_none, qualifier_none };
274#define TYPES_TERNOP_SUSS (aarch64_types_ternop_suss_qualifiers)
3caf7f87
JW
275static enum aarch64_type_qualifiers
276aarch64_types_binop_pppu_qualifiers[SIMD_MAX_BUILTIN_ARGS]
277 = { qualifier_poly, qualifier_poly, qualifier_poly, qualifier_unsigned };
278#define TYPES_TERNOP_PPPU (aarch64_types_binop_pppu_qualifiers)
27086ea3 279
9d63f43b
TC
280static enum aarch64_type_qualifiers
281aarch64_types_quadop_lane_pair_qualifiers[SIMD_MAX_BUILTIN_ARGS]
282 = { qualifier_none, qualifier_none, qualifier_none,
283 qualifier_none, qualifier_lane_pair_index };
284#define TYPES_QUADOP_LANE_PAIR (aarch64_types_quadop_lane_pair_qualifiers)
b5828b4b 285static enum aarch64_type_qualifiers
2a49c16d 286aarch64_types_quadop_lane_qualifiers[SIMD_MAX_BUILTIN_ARGS]
b5828b4b 287 = { qualifier_none, qualifier_none, qualifier_none,
2a49c16d
AL
288 qualifier_none, qualifier_lane_index };
289#define TYPES_QUADOP_LANE (aarch64_types_quadop_lane_qualifiers)
7a08d813
TC
290static enum aarch64_type_qualifiers
291aarch64_types_quadopu_lane_qualifiers[SIMD_MAX_BUILTIN_ARGS]
292 = { qualifier_unsigned, qualifier_unsigned, qualifier_unsigned,
293 qualifier_unsigned, qualifier_lane_index };
294#define TYPES_QUADOPU_LANE (aarch64_types_quadopu_lane_qualifiers)
b5828b4b 295
8c197c85
SMW
296static enum aarch64_type_qualifiers
297aarch64_types_quadopssus_lane_quadtup_qualifiers[SIMD_MAX_BUILTIN_ARGS]
298 = { qualifier_none, qualifier_none, qualifier_unsigned,
299 qualifier_none, qualifier_lane_quadtup_index };
300#define TYPES_QUADOPSSUS_LANE_QUADTUP \
301 (aarch64_types_quadopssus_lane_quadtup_qualifiers)
302static enum aarch64_type_qualifiers
303aarch64_types_quadopsssu_lane_quadtup_qualifiers[SIMD_MAX_BUILTIN_ARGS]
304 = { qualifier_none, qualifier_none, qualifier_none,
305 qualifier_unsigned, qualifier_lane_quadtup_index };
306#define TYPES_QUADOPSSSU_LANE_QUADTUP \
307 (aarch64_types_quadopsssu_lane_quadtup_qualifiers)
308
27086ea3
MC
309static enum aarch64_type_qualifiers
310aarch64_types_quadopu_imm_qualifiers[SIMD_MAX_BUILTIN_ARGS]
311 = { qualifier_unsigned, qualifier_unsigned, qualifier_unsigned,
312 qualifier_unsigned, qualifier_immediate };
313#define TYPES_QUADOPUI (aarch64_types_quadopu_imm_qualifiers)
314
b5828b4b 315static enum aarch64_type_qualifiers
2a49c16d 316aarch64_types_binop_imm_qualifiers[SIMD_MAX_BUILTIN_ARGS]
b5828b4b 317 = { qualifier_none, qualifier_none, qualifier_immediate };
2a49c16d
AL
318#define TYPES_GETREG (aarch64_types_binop_imm_qualifiers)
319#define TYPES_SHIFTIMM (aarch64_types_binop_imm_qualifiers)
b5828b4b 320static enum aarch64_type_qualifiers
de10bcce
AL
321aarch64_types_shift_to_unsigned_qualifiers[SIMD_MAX_BUILTIN_ARGS]
322 = { qualifier_unsigned, qualifier_none, qualifier_immediate };
323#define TYPES_SHIFTIMM_USS (aarch64_types_shift_to_unsigned_qualifiers)
324static enum aarch64_type_qualifiers
1f0e9e34
JG
325aarch64_types_fcvt_from_unsigned_qualifiers[SIMD_MAX_BUILTIN_ARGS]
326 = { qualifier_none, qualifier_unsigned, qualifier_immediate };
327#define TYPES_FCVTIMM_SUS (aarch64_types_fcvt_from_unsigned_qualifiers)
328static enum aarch64_type_qualifiers
252c7556
AV
329aarch64_types_unsigned_shift_qualifiers[SIMD_MAX_BUILTIN_ARGS]
330 = { qualifier_unsigned, qualifier_unsigned, qualifier_immediate };
331#define TYPES_USHIFTIMM (aarch64_types_unsigned_shift_qualifiers)
05f1883c
DC
332#define TYPES_USHIFT2IMM (aarch64_types_ternopu_imm_qualifiers)
333static enum aarch64_type_qualifiers
334aarch64_types_shift2_to_unsigned_qualifiers[SIMD_MAX_BUILTIN_ARGS]
335 = { qualifier_unsigned, qualifier_unsigned, qualifier_none, qualifier_immediate };
336#define TYPES_SHIFT2IMM_UUSS (aarch64_types_shift2_to_unsigned_qualifiers)
de10bcce 337
159b8724
TC
338static enum aarch64_type_qualifiers
339aarch64_types_ternop_s_imm_qualifiers[SIMD_MAX_BUILTIN_ARGS]
340 = { qualifier_none, qualifier_none, qualifier_none, qualifier_immediate};
341#define TYPES_SETREG (aarch64_types_ternop_s_imm_qualifiers)
342#define TYPES_SHIFTINSERT (aarch64_types_ternop_s_imm_qualifiers)
343#define TYPES_SHIFTACC (aarch64_types_ternop_s_imm_qualifiers)
05f1883c 344#define TYPES_SHIFT2IMM (aarch64_types_ternop_s_imm_qualifiers)
159b8724
TC
345
346static enum aarch64_type_qualifiers
347aarch64_types_ternop_p_imm_qualifiers[SIMD_MAX_BUILTIN_ARGS]
348 = { qualifier_poly, qualifier_poly, qualifier_poly, qualifier_immediate};
349#define TYPES_SHIFTINSERTP (aarch64_types_ternop_p_imm_qualifiers)
b5828b4b 350
de10bcce
AL
351static enum aarch64_type_qualifiers
352aarch64_types_unsigned_shiftacc_qualifiers[SIMD_MAX_BUILTIN_ARGS]
353 = { qualifier_unsigned, qualifier_unsigned, qualifier_unsigned,
354 qualifier_immediate };
355#define TYPES_USHIFTACC (aarch64_types_unsigned_shiftacc_qualifiers)
356
b5828b4b
JG
357static enum aarch64_type_qualifiers
358aarch64_types_load1_qualifiers[SIMD_MAX_BUILTIN_ARGS]
359 = { qualifier_none, qualifier_const_pointer_map_mode };
360#define TYPES_LOAD1 (aarch64_types_load1_qualifiers)
361#define TYPES_LOADSTRUCT (aarch64_types_load1_qualifiers)
66f206b8
JW
362static enum aarch64_type_qualifiers
363aarch64_types_load1_u_qualifiers[SIMD_MAX_BUILTIN_ARGS]
364 = { qualifier_unsigned, qualifier_const_pointer_map_mode };
1716ddd1 365#define TYPES_LOAD1_U (aarch64_types_load1_u_qualifiers)
66f206b8
JW
366#define TYPES_LOADSTRUCT_U (aarch64_types_load1_u_qualifiers)
367static enum aarch64_type_qualifiers
368aarch64_types_load1_p_qualifiers[SIMD_MAX_BUILTIN_ARGS]
369 = { qualifier_poly, qualifier_const_pointer_map_mode };
1716ddd1 370#define TYPES_LOAD1_P (aarch64_types_load1_p_qualifiers)
66f206b8
JW
371#define TYPES_LOADSTRUCT_P (aarch64_types_load1_p_qualifiers)
372
3ec1be97
CB
373static enum aarch64_type_qualifiers
374aarch64_types_loadstruct_lane_qualifiers[SIMD_MAX_BUILTIN_ARGS]
375 = { qualifier_none, qualifier_const_pointer_map_mode,
4d0a0237 376 qualifier_none, qualifier_struct_load_store_lane_index };
3ec1be97 377#define TYPES_LOADSTRUCT_LANE (aarch64_types_loadstruct_lane_qualifiers)
66f206b8
JW
378static enum aarch64_type_qualifiers
379aarch64_types_loadstruct_lane_u_qualifiers[SIMD_MAX_BUILTIN_ARGS]
380 = { qualifier_unsigned, qualifier_const_pointer_map_mode,
381 qualifier_unsigned, qualifier_struct_load_store_lane_index };
382#define TYPES_LOADSTRUCT_LANE_U (aarch64_types_loadstruct_lane_u_qualifiers)
383static enum aarch64_type_qualifiers
384aarch64_types_loadstruct_lane_p_qualifiers[SIMD_MAX_BUILTIN_ARGS]
385 = { qualifier_poly, qualifier_const_pointer_map_mode,
386 qualifier_poly, qualifier_struct_load_store_lane_index };
387#define TYPES_LOADSTRUCT_LANE_P (aarch64_types_loadstruct_lane_p_qualifiers)
b5828b4b 388
46e778c4
JG
389static enum aarch64_type_qualifiers
390aarch64_types_bsl_p_qualifiers[SIMD_MAX_BUILTIN_ARGS]
391 = { qualifier_poly, qualifier_unsigned,
392 qualifier_poly, qualifier_poly };
393#define TYPES_BSL_P (aarch64_types_bsl_p_qualifiers)
394static enum aarch64_type_qualifiers
395aarch64_types_bsl_s_qualifiers[SIMD_MAX_BUILTIN_ARGS]
396 = { qualifier_none, qualifier_unsigned,
397 qualifier_none, qualifier_none };
398#define TYPES_BSL_S (aarch64_types_bsl_s_qualifiers)
399static enum aarch64_type_qualifiers
400aarch64_types_bsl_u_qualifiers[SIMD_MAX_BUILTIN_ARGS]
401 = { qualifier_unsigned, qualifier_unsigned,
402 qualifier_unsigned, qualifier_unsigned };
403#define TYPES_BSL_U (aarch64_types_bsl_u_qualifiers)
404
b5828b4b
JG
405/* The first argument (return type) of a store should be void type,
406 which we represent with qualifier_void. Their first operand will be
407 a DImode pointer to the location to store to, so we must use
408 qualifier_map_mode | qualifier_pointer to build a pointer to the
409 element type of the vector. */
410static enum aarch64_type_qualifiers
411aarch64_types_store1_qualifiers[SIMD_MAX_BUILTIN_ARGS]
412 = { qualifier_void, qualifier_pointer_map_mode, qualifier_none };
413#define TYPES_STORE1 (aarch64_types_store1_qualifiers)
414#define TYPES_STORESTRUCT (aarch64_types_store1_qualifiers)
66f206b8
JW
415static enum aarch64_type_qualifiers
416aarch64_types_store1_u_qualifiers[SIMD_MAX_BUILTIN_ARGS]
417 = { qualifier_void, qualifier_pointer_map_mode, qualifier_unsigned };
1716ddd1 418#define TYPES_STORE1_U (aarch64_types_store1_u_qualifiers)
66f206b8
JW
419#define TYPES_STORESTRUCT_U (aarch64_types_store1_u_qualifiers)
420static enum aarch64_type_qualifiers
421aarch64_types_store1_p_qualifiers[SIMD_MAX_BUILTIN_ARGS]
422 = { qualifier_void, qualifier_pointer_map_mode, qualifier_poly };
1716ddd1 423#define TYPES_STORE1_P (aarch64_types_store1_p_qualifiers)
66f206b8
JW
424#define TYPES_STORESTRUCT_P (aarch64_types_store1_p_qualifiers)
425
ba081b77
JG
426static enum aarch64_type_qualifiers
427aarch64_types_storestruct_lane_qualifiers[SIMD_MAX_BUILTIN_ARGS]
428 = { qualifier_void, qualifier_pointer_map_mode,
4d0a0237 429 qualifier_none, qualifier_struct_load_store_lane_index };
ba081b77 430#define TYPES_STORESTRUCT_LANE (aarch64_types_storestruct_lane_qualifiers)
66f206b8
JW
431static enum aarch64_type_qualifiers
432aarch64_types_storestruct_lane_u_qualifiers[SIMD_MAX_BUILTIN_ARGS]
433 = { qualifier_void, qualifier_pointer_map_mode,
434 qualifier_unsigned, qualifier_struct_load_store_lane_index };
435#define TYPES_STORESTRUCT_LANE_U (aarch64_types_storestruct_lane_u_qualifiers)
436static enum aarch64_type_qualifiers
437aarch64_types_storestruct_lane_p_qualifiers[SIMD_MAX_BUILTIN_ARGS]
438 = { qualifier_void, qualifier_pointer_map_mode,
439 qualifier_poly, qualifier_struct_load_store_lane_index };
440#define TYPES_STORESTRUCT_LANE_P (aarch64_types_storestruct_lane_p_qualifiers)
b5828b4b 441
0ddec79f
JG
442#define CF0(N, X) CODE_FOR_aarch64_##N##X
443#define CF1(N, X) CODE_FOR_##N##X##1
444#define CF2(N, X) CODE_FOR_##N##X##2
445#define CF3(N, X) CODE_FOR_##N##X##3
446#define CF4(N, X) CODE_FOR_##N##X##4
447#define CF10(N, X) CODE_FOR_##N##X
448
bf592b2f 449#define VAR1(T, N, MAP, FLAG, A) \
450 {#N #A, UP (A), CF##MAP (N, A), 0, TYPES_##T, FLAG_##FLAG},
451#define VAR2(T, N, MAP, FLAG, A, B) \
452 VAR1 (T, N, MAP, FLAG, A) \
453 VAR1 (T, N, MAP, FLAG, B)
454#define VAR3(T, N, MAP, FLAG, A, B, C) \
455 VAR2 (T, N, MAP, FLAG, A, B) \
456 VAR1 (T, N, MAP, FLAG, C)
457#define VAR4(T, N, MAP, FLAG, A, B, C, D) \
458 VAR3 (T, N, MAP, FLAG, A, B, C) \
459 VAR1 (T, N, MAP, FLAG, D)
460#define VAR5(T, N, MAP, FLAG, A, B, C, D, E) \
461 VAR4 (T, N, MAP, FLAG, A, B, C, D) \
462 VAR1 (T, N, MAP, FLAG, E)
463#define VAR6(T, N, MAP, FLAG, A, B, C, D, E, F) \
464 VAR5 (T, N, MAP, FLAG, A, B, C, D, E) \
465 VAR1 (T, N, MAP, FLAG, F)
466#define VAR7(T, N, MAP, FLAG, A, B, C, D, E, F, G) \
467 VAR6 (T, N, MAP, FLAG, A, B, C, D, E, F) \
468 VAR1 (T, N, MAP, FLAG, G)
469#define VAR8(T, N, MAP, FLAG, A, B, C, D, E, F, G, H) \
470 VAR7 (T, N, MAP, FLAG, A, B, C, D, E, F, G) \
471 VAR1 (T, N, MAP, FLAG, H)
472#define VAR9(T, N, MAP, FLAG, A, B, C, D, E, F, G, H, I) \
473 VAR8 (T, N, MAP, FLAG, A, B, C, D, E, F, G, H) \
474 VAR1 (T, N, MAP, FLAG, I)
475#define VAR10(T, N, MAP, FLAG, A, B, C, D, E, F, G, H, I, J) \
476 VAR9 (T, N, MAP, FLAG, A, B, C, D, E, F, G, H, I) \
477 VAR1 (T, N, MAP, FLAG, J)
478#define VAR11(T, N, MAP, FLAG, A, B, C, D, E, F, G, H, I, J, K) \
479 VAR10 (T, N, MAP, FLAG, A, B, C, D, E, F, G, H, I, J) \
480 VAR1 (T, N, MAP, FLAG, K)
481#define VAR12(T, N, MAP, FLAG, A, B, C, D, E, F, G, H, I, J, K, L) \
482 VAR11 (T, N, MAP, FLAG, A, B, C, D, E, F, G, H, I, J, K) \
483 VAR1 (T, N, MAP, FLAG, L)
484#define VAR13(T, N, MAP, FLAG, A, B, C, D, E, F, G, H, I, J, K, L, M) \
485 VAR12 (T, N, MAP, FLAG, A, B, C, D, E, F, G, H, I, J, K, L) \
486 VAR1 (T, N, MAP, FLAG, M)
487#define VAR14(T, X, MAP, FLAG, A, B, C, D, E, F, G, H, I, J, K, L, M, N) \
488 VAR13 (T, X, MAP, FLAG, A, B, C, D, E, F, G, H, I, J, K, L, M) \
489 VAR1 (T, X, MAP, FLAG, N)
490#define VAR15(T, X, MAP, FLAG, A, B, C, D, E, F, G, H, I, J, K, L, M, N, O) \
491 VAR14 (T, X, MAP, FLAG, A, B, C, D, E, F, G, H, I, J, K, L, M, N) \
492 VAR1 (T, X, MAP, FLAG, O)
493#define VAR16(T, X, MAP, FLAG, A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P) \
494 VAR15 (T, X, MAP, FLAG, A, B, C, D, E, F, G, H, I, J, K, L, M, N, O) \
495 VAR1 (T, X, MAP, FLAG, P)
342be7f7 496
f421c516 497#include "aarch64-builtin-iterators.h"
43e9d192
IB
498
499static aarch64_simd_builtin_datum aarch64_simd_builtin_data[] = {
342be7f7
JG
500#include "aarch64-simd-builtins.def"
501};
502
5d357f26
KT
503/* There's only 8 CRC32 builtins. Probably not worth their own .def file. */
504#define AARCH64_CRC32_BUILTINS \
505 CRC32_BUILTIN (crc32b, QI) \
506 CRC32_BUILTIN (crc32h, HI) \
507 CRC32_BUILTIN (crc32w, SI) \
508 CRC32_BUILTIN (crc32x, DI) \
509 CRC32_BUILTIN (crc32cb, QI) \
510 CRC32_BUILTIN (crc32ch, HI) \
511 CRC32_BUILTIN (crc32cw, SI) \
512 CRC32_BUILTIN (crc32cx, DI)
513
9d63f43b
TC
514/* The next 8 FCMLA instrinsics require some special handling compared the
515 normal simd intrinsics. */
516#define AARCH64_SIMD_FCMLA_LANEQ_BUILTINS \
517 FCMLA_LANEQ_BUILTIN (0, v2sf, fcmla, V2SF, false) \
518 FCMLA_LANEQ_BUILTIN (90, v2sf, fcmla, V2SF, false) \
519 FCMLA_LANEQ_BUILTIN (180, v2sf, fcmla, V2SF, false) \
520 FCMLA_LANEQ_BUILTIN (270, v2sf, fcmla, V2SF, false) \
521 FCMLA_LANEQ_BUILTIN (0, v4hf, fcmla_laneq, V4HF, true) \
522 FCMLA_LANEQ_BUILTIN (90, v4hf, fcmla_laneq, V4HF, true) \
523 FCMLA_LANEQ_BUILTIN (180, v4hf, fcmla_laneq, V4HF, true) \
524 FCMLA_LANEQ_BUILTIN (270, v4hf, fcmla_laneq, V4HF, true) \
525
5d357f26
KT
526typedef struct
527{
528 const char *name;
ef4bddc2 529 machine_mode mode;
5d357f26
KT
530 const enum insn_code icode;
531 unsigned int fcode;
532} aarch64_crc_builtin_datum;
533
9d63f43b
TC
534/* Hold information about how to expand the FCMLA_LANEQ builtins. */
535typedef struct
536{
537 const char *name;
538 machine_mode mode;
539 const enum insn_code icode;
540 unsigned int fcode;
541 bool lane;
542} aarch64_fcmla_laneq_builtin_datum;
543
5d357f26
KT
544#define CRC32_BUILTIN(N, M) \
545 AARCH64_BUILTIN_##N,
546
9d63f43b
TC
547#define FCMLA_LANEQ_BUILTIN(I, N, X, M, T) \
548 AARCH64_SIMD_BUILTIN_FCMLA_LANEQ##I##_##M,
549
342be7f7 550#undef VAR1
bf592b2f 551#define VAR1(T, N, MAP, FLAG, A) \
e993fea1 552 AARCH64_SIMD_BUILTIN_##T##_##N##A,
342be7f7
JG
553
554enum aarch64_builtins
555{
556 AARCH64_BUILTIN_MIN,
aa87aced
KV
557
558 AARCH64_BUILTIN_GET_FPCR,
559 AARCH64_BUILTIN_SET_FPCR,
560 AARCH64_BUILTIN_GET_FPSR,
561 AARCH64_BUILTIN_SET_FPSR,
562
0d7e5fa6
AC
563 AARCH64_BUILTIN_GET_FPCR64,
564 AARCH64_BUILTIN_SET_FPCR64,
565 AARCH64_BUILTIN_GET_FPSR64,
566 AARCH64_BUILTIN_SET_FPSR64,
567
a6fc00da
BH
568 AARCH64_BUILTIN_RSQRT_DF,
569 AARCH64_BUILTIN_RSQRT_SF,
570 AARCH64_BUILTIN_RSQRT_V2DF,
571 AARCH64_BUILTIN_RSQRT_V2SF,
572 AARCH64_BUILTIN_RSQRT_V4SF,
342be7f7 573 AARCH64_SIMD_BUILTIN_BASE,
661fce82 574 AARCH64_SIMD_BUILTIN_LANE_CHECK,
342be7f7 575#include "aarch64-simd-builtins.def"
661fce82
AL
576 /* The first enum element which is based on an insn_data pattern. */
577 AARCH64_SIMD_PATTERN_START = AARCH64_SIMD_BUILTIN_LANE_CHECK + 1,
578 AARCH64_SIMD_BUILTIN_MAX = AARCH64_SIMD_PATTERN_START
579 + ARRAY_SIZE (aarch64_simd_builtin_data) - 1,
5d357f26
KT
580 AARCH64_CRC32_BUILTIN_BASE,
581 AARCH64_CRC32_BUILTINS
582 AARCH64_CRC32_BUILTIN_MAX,
312492bd
JW
583 /* ARMv8.3-A Pointer Authentication Builtins. */
584 AARCH64_PAUTH_BUILTIN_AUTIA1716,
585 AARCH64_PAUTH_BUILTIN_PACIA1716,
8fc16d72
ST
586 AARCH64_PAUTH_BUILTIN_AUTIB1716,
587 AARCH64_PAUTH_BUILTIN_PACIB1716,
312492bd 588 AARCH64_PAUTH_BUILTIN_XPACLRI,
9d63f43b
TC
589 /* Special cased Armv8.3-A Complex FMA by Lane quad Builtins. */
590 AARCH64_SIMD_FCMLA_LANEQ_BUILTIN_BASE,
591 AARCH64_SIMD_FCMLA_LANEQ_BUILTINS
e1d5d19e
KT
592 /* Builtin for Arm8.3-a Javascript conversion instruction. */
593 AARCH64_JSCVT,
89626179
SD
594 /* TME builtins. */
595 AARCH64_TME_BUILTIN_TSTART,
596 AARCH64_TME_BUILTIN_TCOMMIT,
597 AARCH64_TME_BUILTIN_TTEST,
598 AARCH64_TME_BUILTIN_TCANCEL,
c5dc215d
KT
599 /* Armv8.5-a RNG instruction builtins. */
600 AARCH64_BUILTIN_RNG_RNDR,
601 AARCH64_BUILTIN_RNG_RNDRRS,
ef01e6bb
DZ
602 /* MEMTAG builtins. */
603 AARCH64_MEMTAG_BUILTIN_START,
604 AARCH64_MEMTAG_BUILTIN_IRG,
605 AARCH64_MEMTAG_BUILTIN_GMI,
606 AARCH64_MEMTAG_BUILTIN_SUBP,
607 AARCH64_MEMTAG_BUILTIN_INC_TAG,
608 AARCH64_MEMTAG_BUILTIN_SET_TAG,
609 AARCH64_MEMTAG_BUILTIN_GET_TAG,
610 AARCH64_MEMTAG_BUILTIN_END,
fdcddba8
PW
611 /* LS64 builtins. */
612 AARCH64_LS64_BUILTIN_LD64B,
613 AARCH64_LS64_BUILTIN_ST64B,
614 AARCH64_LS64_BUILTIN_ST64BV,
615 AARCH64_LS64_BUILTIN_ST64BV0,
342be7f7 616 AARCH64_BUILTIN_MAX
43e9d192
IB
617};
618
5d357f26
KT
619#undef CRC32_BUILTIN
620#define CRC32_BUILTIN(N, M) \
0d4a1197 621 {"__builtin_aarch64_"#N, E_##M##mode, CODE_FOR_aarch64_##N, AARCH64_BUILTIN_##N},
5d357f26
KT
622
623static aarch64_crc_builtin_datum aarch64_crc_builtin_data[] = {
624 AARCH64_CRC32_BUILTINS
625};
626
9d63f43b
TC
627
628#undef FCMLA_LANEQ_BUILTIN
629#define FCMLA_LANEQ_BUILTIN(I, N, X, M, T) \
630 {"__builtin_aarch64_fcmla_laneq"#I#N, E_##M##mode, CODE_FOR_aarch64_##X##I##N, \
631 AARCH64_SIMD_BUILTIN_FCMLA_LANEQ##I##_##M, T},
632
633/* This structure contains how to manage the mapping form the builtin to the
634 instruction to generate in the backend and how to invoke the instruction. */
5eb9ac1e 635static aarch64_fcmla_laneq_builtin_datum aarch64_fcmla_lane_builtin_data[] = {
9d63f43b
TC
636 AARCH64_SIMD_FCMLA_LANEQ_BUILTINS
637};
638
5d357f26
KT
639#undef CRC32_BUILTIN
640
119103ca
JG
641static GTY(()) tree aarch64_builtin_decls[AARCH64_BUILTIN_MAX];
642
43e9d192
IB
643#define NUM_DREG_TYPES 6
644#define NUM_QREG_TYPES 6
645
f9d53c27
TB
646/* Internal scalar builtin types. These types are used to support
647 neon intrinsic builtins. They are _not_ user-visible types. Therefore
648 the mangling for these types are implementation defined. */
649const char *aarch64_scalar_builtin_types[] = {
650 "__builtin_aarch64_simd_qi",
651 "__builtin_aarch64_simd_hi",
652 "__builtin_aarch64_simd_si",
7c369485 653 "__builtin_aarch64_simd_hf",
f9d53c27
TB
654 "__builtin_aarch64_simd_sf",
655 "__builtin_aarch64_simd_di",
656 "__builtin_aarch64_simd_df",
657 "__builtin_aarch64_simd_poly8",
658 "__builtin_aarch64_simd_poly16",
659 "__builtin_aarch64_simd_poly64",
660 "__builtin_aarch64_simd_poly128",
661 "__builtin_aarch64_simd_ti",
662 "__builtin_aarch64_simd_uqi",
663 "__builtin_aarch64_simd_uhi",
664 "__builtin_aarch64_simd_usi",
665 "__builtin_aarch64_simd_udi",
666 "__builtin_aarch64_simd_ei",
667 "__builtin_aarch64_simd_oi",
668 "__builtin_aarch64_simd_ci",
669 "__builtin_aarch64_simd_xi",
e603cd43 670 "__builtin_aarch64_simd_bf",
f9d53c27
TB
671 NULL
672};
b5828b4b 673
f9d53c27
TB
674#define ENTRY(E, M, Q, G) E,
675enum aarch64_simd_type
676{
677#include "aarch64-simd-builtin-types.def"
678 ARM_NEON_H_TYPES_LAST
679};
680#undef ENTRY
b5828b4b 681
98f1dd02 682struct GTY(()) aarch64_simd_type_info
b5828b4b 683{
f9d53c27
TB
684 enum aarch64_simd_type type;
685
686 /* Internal type name. */
687 const char *name;
688
689 /* Internal type name(mangled). The mangled names conform to the
690 AAPCS64 (see "Procedure Call Standard for the ARM 64-bit Architecture",
691 Appendix A). To qualify for emission with the mangled names defined in
692 that document, a vector type must not only be of the correct mode but also
693 be of the correct internal AdvSIMD vector type (e.g. __Int8x8_t); these
694 types are registered by aarch64_init_simd_builtin_types (). In other
695 words, vector types defined in other ways e.g. via vector_size attribute
696 will get default mangled names. */
697 const char *mangle;
698
699 /* Internal type. */
700 tree itype;
701
702 /* Element type. */
b5828b4b
JG
703 tree eltype;
704
f9d53c27
TB
705 /* Machine mode the internal type maps to. */
706 enum machine_mode mode;
b5828b4b 707
f9d53c27
TB
708 /* Qualifiers. */
709 enum aarch64_type_qualifiers q;
710};
711
712#define ENTRY(E, M, Q, G) \
0d4a1197 713 {E, "__" #E, #G "__" #E, NULL_TREE, NULL_TREE, E_##M##mode, qualifier_##Q},
98f1dd02 714static GTY(()) struct aarch64_simd_type_info aarch64_simd_types [] = {
f9d53c27
TB
715#include "aarch64-simd-builtin-types.def"
716};
717#undef ENTRY
718
14814e20 719static machine_mode aarch64_simd_tuple_modes[ARM_NEON_H_TYPES_LAST][3];
66f206b8
JW
720static GTY(()) tree aarch64_simd_tuple_types[ARM_NEON_H_TYPES_LAST][3];
721
98f1dd02
AP
722static GTY(()) tree aarch64_simd_intOI_type_node = NULL_TREE;
723static GTY(()) tree aarch64_simd_intCI_type_node = NULL_TREE;
724static GTY(()) tree aarch64_simd_intXI_type_node = NULL_TREE;
f9d53c27 725
1b62ed4f
JG
726/* The user-visible __fp16 type, and a pointer to that type. Used
727 across the back-end. */
728tree aarch64_fp16_type_node = NULL_TREE;
729tree aarch64_fp16_ptr_type_node = NULL_TREE;
730
abbe1ed2
SMW
731/* Back-end node type for brain float (bfloat) types. */
732tree aarch64_bf16_type_node = NULL_TREE;
733tree aarch64_bf16_ptr_type_node = NULL_TREE;
734
6d4d616a 735/* Wrapper around add_builtin_function. NAME is the name of the built-in
072a8b8f 736 function, TYPE is the function type, CODE is the function subcode
737 (relative to AARCH64_BUILTIN_GENERAL), and ATTRS is the function
738 attributes. */
6d4d616a 739static tree
072a8b8f 740aarch64_general_add_builtin (const char *name, tree type, unsigned int code,
741 tree attrs = NULL_TREE)
6d4d616a
RS
742{
743 code = (code << AARCH64_BUILTIN_SHIFT) | AARCH64_BUILTIN_GENERAL;
744 return add_builtin_function (name, type, code, BUILT_IN_MD,
072a8b8f 745 NULL, attrs);
6d4d616a
RS
746}
747
f9d53c27
TB
748static const char *
749aarch64_mangle_builtin_scalar_type (const_tree type)
750{
751 int i = 0;
752
753 while (aarch64_scalar_builtin_types[i] != NULL)
b5828b4b 754 {
f9d53c27
TB
755 const char *name = aarch64_scalar_builtin_types[i];
756
757 if (TREE_CODE (TYPE_NAME (type)) == TYPE_DECL
758 && DECL_NAME (TYPE_NAME (type))
759 && !strcmp (IDENTIFIER_POINTER (DECL_NAME (TYPE_NAME (type))), name))
760 return aarch64_scalar_builtin_types[i];
761 i++;
762 }
763 return NULL;
b5828b4b
JG
764}
765
f9d53c27
TB
766static const char *
767aarch64_mangle_builtin_vector_type (const_tree type)
b5828b4b 768{
31427b97
RS
769 tree attrs = TYPE_ATTRIBUTES (type);
770 if (tree attr = lookup_attribute ("Advanced SIMD type", attrs))
771 {
772 tree mangled_name = TREE_VALUE (TREE_VALUE (attr));
773 return IDENTIFIER_POINTER (mangled_name);
774 }
f9d53c27
TB
775
776 return NULL;
6db1ec94
JG
777}
778
f9d53c27 779const char *
6d4d616a 780aarch64_general_mangle_builtin_type (const_tree type)
6db1ec94 781{
f9d53c27
TB
782 const char *mangle;
783 /* Walk through all the AArch64 builtins types tables to filter out the
784 incoming type. */
785 if ((mangle = aarch64_mangle_builtin_vector_type (type))
786 || (mangle = aarch64_mangle_builtin_scalar_type (type)))
787 return mangle;
788
789 return NULL;
6db1ec94
JG
790}
791
f9d53c27 792static tree
b8506a8a 793aarch64_simd_builtin_std_type (machine_mode mode,
f9d53c27 794 enum aarch64_type_qualifiers q)
6db1ec94 795{
f9d53c27
TB
796#define QUAL_TYPE(M) \
797 ((q == qualifier_none) ? int##M##_type_node : unsigned_int##M##_type_node);
798 switch (mode)
799 {
4e10a5a7 800 case E_QImode:
f9d53c27 801 return QUAL_TYPE (QI);
4e10a5a7 802 case E_HImode:
f9d53c27 803 return QUAL_TYPE (HI);
4e10a5a7 804 case E_SImode:
f9d53c27 805 return QUAL_TYPE (SI);
4e10a5a7 806 case E_DImode:
f9d53c27 807 return QUAL_TYPE (DI);
4e10a5a7 808 case E_TImode:
f9d53c27 809 return QUAL_TYPE (TI);
4e10a5a7 810 case E_OImode:
f9d53c27 811 return aarch64_simd_intOI_type_node;
4e10a5a7 812 case E_CImode:
f9d53c27 813 return aarch64_simd_intCI_type_node;
4e10a5a7 814 case E_XImode:
f9d53c27 815 return aarch64_simd_intXI_type_node;
4e10a5a7 816 case E_HFmode:
71a11456 817 return aarch64_fp16_type_node;
4e10a5a7 818 case E_SFmode:
f9d53c27 819 return float_type_node;
4e10a5a7 820 case E_DFmode:
f9d53c27 821 return double_type_node;
abbe1ed2
SMW
822 case E_BFmode:
823 return aarch64_bf16_type_node;
f9d53c27
TB
824 default:
825 gcc_unreachable ();
826 }
827#undef QUAL_TYPE
6db1ec94
JG
828}
829
f9d53c27 830static tree
b8506a8a 831aarch64_lookup_simd_builtin_type (machine_mode mode,
f9d53c27 832 enum aarch64_type_qualifiers q)
6db1ec94 833{
f9d53c27
TB
834 int i;
835 int nelts = sizeof (aarch64_simd_types) / sizeof (aarch64_simd_types[0]);
836
837 /* Non-poly scalar modes map to standard types not in the table. */
838 if (q != qualifier_poly && !VECTOR_MODE_P (mode))
839 return aarch64_simd_builtin_std_type (mode, q);
840
841 for (i = 0; i < nelts; i++)
66f206b8
JW
842 {
843 if (aarch64_simd_types[i].mode == mode
844 && aarch64_simd_types[i].q == q)
845 return aarch64_simd_types[i].itype;
846 if (aarch64_simd_tuple_types[i][0] != NULL_TREE)
847 for (int j = 0; j < 3; j++)
14814e20 848 if (aarch64_simd_tuple_modes[i][j] == mode
66f206b8
JW
849 && aarch64_simd_types[i].q == q)
850 return aarch64_simd_tuple_types[i][j];
851 }
f9d53c27
TB
852
853 return NULL_TREE;
b5828b4b
JG
854}
855
f9d53c27 856static tree
b8506a8a 857aarch64_simd_builtin_type (machine_mode mode,
f9d53c27
TB
858 bool unsigned_p, bool poly_p)
859{
860 if (poly_p)
861 return aarch64_lookup_simd_builtin_type (mode, qualifier_poly);
862 else if (unsigned_p)
863 return aarch64_lookup_simd_builtin_type (mode, qualifier_unsigned);
864 else
865 return aarch64_lookup_simd_builtin_type (mode, qualifier_none);
866}
867
af55e82d 868static void
f9d53c27 869aarch64_init_simd_builtin_types (void)
43e9d192 870{
f9d53c27
TB
871 int i;
872 int nelts = sizeof (aarch64_simd_types) / sizeof (aarch64_simd_types[0]);
873 tree tdecl;
874
875 /* Init all the element types built by the front-end. */
876 aarch64_simd_types[Int8x8_t].eltype = intQI_type_node;
877 aarch64_simd_types[Int8x16_t].eltype = intQI_type_node;
878 aarch64_simd_types[Int16x4_t].eltype = intHI_type_node;
879 aarch64_simd_types[Int16x8_t].eltype = intHI_type_node;
880 aarch64_simd_types[Int32x2_t].eltype = intSI_type_node;
881 aarch64_simd_types[Int32x4_t].eltype = intSI_type_node;
882 aarch64_simd_types[Int64x1_t].eltype = intDI_type_node;
883 aarch64_simd_types[Int64x2_t].eltype = intDI_type_node;
884 aarch64_simd_types[Uint8x8_t].eltype = unsigned_intQI_type_node;
885 aarch64_simd_types[Uint8x16_t].eltype = unsigned_intQI_type_node;
886 aarch64_simd_types[Uint16x4_t].eltype = unsigned_intHI_type_node;
887 aarch64_simd_types[Uint16x8_t].eltype = unsigned_intHI_type_node;
888 aarch64_simd_types[Uint32x2_t].eltype = unsigned_intSI_type_node;
889 aarch64_simd_types[Uint32x4_t].eltype = unsigned_intSI_type_node;
890 aarch64_simd_types[Uint64x1_t].eltype = unsigned_intDI_type_node;
891 aarch64_simd_types[Uint64x2_t].eltype = unsigned_intDI_type_node;
892
893 /* Poly types are a world of their own. */
894 aarch64_simd_types[Poly8_t].eltype = aarch64_simd_types[Poly8_t].itype =
895 build_distinct_type_copy (unsigned_intQI_type_node);
bcee52c4
MS
896 /* Prevent front-ends from transforming Poly8_t arrays into string
897 literals. */
898 TYPE_STRING_FLAG (aarch64_simd_types[Poly8_t].eltype) = false;
899
f9d53c27
TB
900 aarch64_simd_types[Poly16_t].eltype = aarch64_simd_types[Poly16_t].itype =
901 build_distinct_type_copy (unsigned_intHI_type_node);
902 aarch64_simd_types[Poly64_t].eltype = aarch64_simd_types[Poly64_t].itype =
903 build_distinct_type_copy (unsigned_intDI_type_node);
904 aarch64_simd_types[Poly128_t].eltype = aarch64_simd_types[Poly128_t].itype =
905 build_distinct_type_copy (unsigned_intTI_type_node);
906 /* Init poly vector element types with scalar poly types. */
907 aarch64_simd_types[Poly8x8_t].eltype = aarch64_simd_types[Poly8_t].itype;
908 aarch64_simd_types[Poly8x16_t].eltype = aarch64_simd_types[Poly8_t].itype;
909 aarch64_simd_types[Poly16x4_t].eltype = aarch64_simd_types[Poly16_t].itype;
910 aarch64_simd_types[Poly16x8_t].eltype = aarch64_simd_types[Poly16_t].itype;
911 aarch64_simd_types[Poly64x1_t].eltype = aarch64_simd_types[Poly64_t].itype;
912 aarch64_simd_types[Poly64x2_t].eltype = aarch64_simd_types[Poly64_t].itype;
913
914 /* Continue with standard types. */
71a11456
AL
915 aarch64_simd_types[Float16x4_t].eltype = aarch64_fp16_type_node;
916 aarch64_simd_types[Float16x8_t].eltype = aarch64_fp16_type_node;
f9d53c27
TB
917 aarch64_simd_types[Float32x2_t].eltype = float_type_node;
918 aarch64_simd_types[Float32x4_t].eltype = float_type_node;
919 aarch64_simd_types[Float64x1_t].eltype = double_type_node;
920 aarch64_simd_types[Float64x2_t].eltype = double_type_node;
921
abbe1ed2
SMW
922 /* Init Bfloat vector types with underlying __bf16 type. */
923 aarch64_simd_types[Bfloat16x4_t].eltype = aarch64_bf16_type_node;
924 aarch64_simd_types[Bfloat16x8_t].eltype = aarch64_bf16_type_node;
925
f9d53c27
TB
926 for (i = 0; i < nelts; i++)
927 {
928 tree eltype = aarch64_simd_types[i].eltype;
b8506a8a 929 machine_mode mode = aarch64_simd_types[i].mode;
f9d53c27
TB
930
931 if (aarch64_simd_types[i].itype == NULL)
b96824c4 932 {
31427b97
RS
933 tree type = build_vector_type (eltype, GET_MODE_NUNITS (mode));
934 type = build_distinct_type_copy (type);
935 SET_TYPE_STRUCTURAL_EQUALITY (type);
936
937 tree mangled_name = get_identifier (aarch64_simd_types[i].mangle);
938 tree value = tree_cons (NULL_TREE, mangled_name, NULL_TREE);
939 TYPE_ATTRIBUTES (type)
940 = tree_cons (get_identifier ("Advanced SIMD type"), value,
941 TYPE_ATTRIBUTES (type));
942 aarch64_simd_types[i].itype = type;
b96824c4 943 }
f9d53c27
TB
944
945 tdecl = add_builtin_type (aarch64_simd_types[i].name,
946 aarch64_simd_types[i].itype);
947 TYPE_NAME (aarch64_simd_types[i].itype) = tdecl;
f9d53c27 948 }
43e9d192 949
f9d53c27
TB
950#define AARCH64_BUILD_SIGNED_TYPE(mode) \
951 make_signed_type (GET_MODE_PRECISION (mode));
952 aarch64_simd_intOI_type_node = AARCH64_BUILD_SIGNED_TYPE (OImode);
f9d53c27
TB
953 aarch64_simd_intCI_type_node = AARCH64_BUILD_SIGNED_TYPE (CImode);
954 aarch64_simd_intXI_type_node = AARCH64_BUILD_SIGNED_TYPE (XImode);
955#undef AARCH64_BUILD_SIGNED_TYPE
956
f9d53c27
TB
957 tdecl = add_builtin_type
958 ("__builtin_aarch64_simd_oi" , aarch64_simd_intOI_type_node);
959 TYPE_NAME (aarch64_simd_intOI_type_node) = tdecl;
960 tdecl = add_builtin_type
961 ("__builtin_aarch64_simd_ci" , aarch64_simd_intCI_type_node);
962 TYPE_NAME (aarch64_simd_intCI_type_node) = tdecl;
963 tdecl = add_builtin_type
964 ("__builtin_aarch64_simd_xi" , aarch64_simd_intXI_type_node);
965 TYPE_NAME (aarch64_simd_intXI_type_node) = tdecl;
966}
967
968static void
969aarch64_init_simd_builtin_scalar_types (void)
970{
971 /* Define typedefs for all the standard scalar types. */
972 (*lang_hooks.types.register_builtin_type) (intQI_type_node,
43e9d192 973 "__builtin_aarch64_simd_qi");
f9d53c27 974 (*lang_hooks.types.register_builtin_type) (intHI_type_node,
43e9d192 975 "__builtin_aarch64_simd_hi");
7c369485
AL
976 (*lang_hooks.types.register_builtin_type) (aarch64_fp16_type_node,
977 "__builtin_aarch64_simd_hf");
f9d53c27 978 (*lang_hooks.types.register_builtin_type) (intSI_type_node,
43e9d192 979 "__builtin_aarch64_simd_si");
f9d53c27 980 (*lang_hooks.types.register_builtin_type) (float_type_node,
43e9d192 981 "__builtin_aarch64_simd_sf");
f9d53c27 982 (*lang_hooks.types.register_builtin_type) (intDI_type_node,
43e9d192 983 "__builtin_aarch64_simd_di");
f9d53c27 984 (*lang_hooks.types.register_builtin_type) (double_type_node,
43e9d192 985 "__builtin_aarch64_simd_df");
f9d53c27 986 (*lang_hooks.types.register_builtin_type) (unsigned_intQI_type_node,
43e9d192 987 "__builtin_aarch64_simd_poly8");
f9d53c27 988 (*lang_hooks.types.register_builtin_type) (unsigned_intHI_type_node,
43e9d192 989 "__builtin_aarch64_simd_poly16");
f9d53c27 990 (*lang_hooks.types.register_builtin_type) (unsigned_intDI_type_node,
7baa225d 991 "__builtin_aarch64_simd_poly64");
f9d53c27 992 (*lang_hooks.types.register_builtin_type) (unsigned_intTI_type_node,
7baa225d 993 "__builtin_aarch64_simd_poly128");
f9d53c27 994 (*lang_hooks.types.register_builtin_type) (intTI_type_node,
43e9d192 995 "__builtin_aarch64_simd_ti");
e603cd43
MI
996 (*lang_hooks.types.register_builtin_type) (aarch64_bf16_type_node,
997 "__builtin_aarch64_simd_bf");
b5828b4b 998 /* Unsigned integer types for various mode sizes. */
f9d53c27 999 (*lang_hooks.types.register_builtin_type) (unsigned_intQI_type_node,
b5828b4b 1000 "__builtin_aarch64_simd_uqi");
f9d53c27 1001 (*lang_hooks.types.register_builtin_type) (unsigned_intHI_type_node,
b5828b4b 1002 "__builtin_aarch64_simd_uhi");
f9d53c27 1003 (*lang_hooks.types.register_builtin_type) (unsigned_intSI_type_node,
b5828b4b 1004 "__builtin_aarch64_simd_usi");
f9d53c27 1005 (*lang_hooks.types.register_builtin_type) (unsigned_intDI_type_node,
b5828b4b 1006 "__builtin_aarch64_simd_udi");
f9d53c27
TB
1007}
1008
079c23cf
KT
1009/* Return a set of FLAG_* flags derived from FLAGS
1010 that describe what a function with result MODE could do,
072a8b8f 1011 taking the command-line flags into account. */
1012static unsigned int
079c23cf 1013aarch64_call_properties (unsigned int flags, machine_mode mode)
072a8b8f 1014{
079c23cf 1015 if (!(flags & FLAG_AUTO_FP) && FLOAT_MODE_P (mode))
35ffd4d1 1016 flags |= FLAG_FP;
072a8b8f 1017
1018 /* -fno-trapping-math means that we can assume any FP exceptions
1019 are not user-visible. */
1020 if (!flag_trapping_math)
1021 flags &= ~FLAG_RAISE_FP_EXCEPTIONS;
1022
1023 return flags;
1024}
1025
079c23cf
KT
1026/* Return true if calls to a function with flags F and mode MODE
1027 could modify some form of global state. */
072a8b8f 1028static bool
079c23cf 1029aarch64_modifies_global_state_p (unsigned int f, machine_mode mode)
072a8b8f 1030{
079c23cf 1031 unsigned int flags = aarch64_call_properties (f, mode);
072a8b8f 1032
1033 if (flags & FLAG_RAISE_FP_EXCEPTIONS)
1034 return true;
1035
1036 if (flags & FLAG_PREFETCH_MEMORY)
1037 return true;
1038
1039 return flags & FLAG_WRITE_MEMORY;
1040}
1041
079c23cf
KT
1042/* Return true if calls to a function with flags F and mode MODE
1043 could read some form of global state. */
072a8b8f 1044static bool
079c23cf 1045aarch64_reads_global_state_p (unsigned int f, machine_mode mode)
072a8b8f 1046{
079c23cf 1047 unsigned int flags = aarch64_call_properties (f, mode);
072a8b8f 1048
1049 if (flags & FLAG_READ_FPCR)
1050 return true;
1051
1052 return flags & FLAG_READ_MEMORY;
1053}
1054
079c23cf
KT
1055/* Return true if calls to a function with flags F and mode MODE
1056 could raise a signal. */
072a8b8f 1057static bool
079c23cf 1058aarch64_could_trap_p (unsigned int f, machine_mode mode)
072a8b8f 1059{
079c23cf 1060 unsigned int flags = aarch64_call_properties (f, mode);
072a8b8f 1061
1062 if (flags & FLAG_RAISE_FP_EXCEPTIONS)
1063 return true;
1064
1065 if (flags & (FLAG_READ_MEMORY | FLAG_WRITE_MEMORY))
1066 return true;
1067
1068 return false;
1069}
1070
1071/* Add attribute NAME to ATTRS. */
1072static tree
1073aarch64_add_attribute (const char *name, tree attrs)
1074{
1075 return tree_cons (get_identifier (name), NULL_TREE, attrs);
1076}
1077
079c23cf
KT
1078/* Return the appropriate attributes for a function that has
1079 flags F and mode MODE. */
072a8b8f 1080static tree
079c23cf 1081aarch64_get_attributes (unsigned int f, machine_mode mode)
072a8b8f 1082{
1083 tree attrs = NULL_TREE;
1084
079c23cf 1085 if (!aarch64_modifies_global_state_p (f, mode))
072a8b8f 1086 {
079c23cf 1087 if (aarch64_reads_global_state_p (f, mode))
072a8b8f 1088 attrs = aarch64_add_attribute ("pure", attrs);
1089 else
1090 attrs = aarch64_add_attribute ("const", attrs);
1091 }
1092
079c23cf 1093 if (!flag_non_call_exceptions || !aarch64_could_trap_p (f, mode))
072a8b8f 1094 attrs = aarch64_add_attribute ("nothrow", attrs);
1095
1096 return aarch64_add_attribute ("leaf", attrs);
1097}
1098
e95a988a
KT
1099static bool aarch64_simd_builtins_initialized_p = false;
1100
9d63f43b
TC
1101/* Due to the architecture not providing lane variant of the lane instructions
1102 for fcmla we can't use the standard simd builtin expansion code, but we
1103 still want the majority of the validation that would normally be done. */
1104
1105void
1106aarch64_init_fcmla_laneq_builtins (void)
1107{
1108 unsigned int i = 0;
1109
1110 for (i = 0; i < ARRAY_SIZE (aarch64_fcmla_lane_builtin_data); ++i)
1111 {
1112 aarch64_fcmla_laneq_builtin_datum* d
1113 = &aarch64_fcmla_lane_builtin_data[i];
1114 tree argtype = aarch64_lookup_simd_builtin_type (d->mode, qualifier_none);
1115 machine_mode quadmode = GET_MODE_2XWIDER_MODE (d->mode).require ();
1116 tree quadtype
1117 = aarch64_lookup_simd_builtin_type (quadmode, qualifier_none);
1118 tree lanetype
1119 = aarch64_simd_builtin_std_type (SImode, qualifier_lane_pair_index);
1120 tree ftype = build_function_type_list (argtype, argtype, argtype,
1121 quadtype, lanetype, NULL_TREE);
079c23cf
KT
1122 tree attrs = aarch64_get_attributes (FLAG_FP, d->mode);
1123 tree fndecl
1124 = aarch64_general_add_builtin (d->name, ftype, d->fcode, attrs);
9d63f43b
TC
1125
1126 aarch64_builtin_decls[d->fcode] = fndecl;
1127 }
1128}
1129
e95a988a 1130void
8197ab94 1131aarch64_init_simd_builtin_functions (bool called_from_pragma)
f9d53c27 1132{
661fce82 1133 unsigned int i, fcode = AARCH64_SIMD_PATTERN_START;
f9d53c27 1134
8197ab94
JW
1135 if (!called_from_pragma)
1136 {
1137 tree lane_check_fpr = build_function_type_list (void_type_node,
1138 size_type_node,
1139 size_type_node,
1140 intSI_type_node,
1141 NULL);
1142 aarch64_builtin_decls[AARCH64_SIMD_BUILTIN_LANE_CHECK]
1143 = aarch64_general_add_builtin ("__builtin_aarch64_im_lane_boundsi",
1144 lane_check_fpr,
1145 AARCH64_SIMD_BUILTIN_LANE_CHECK);
1146 }
661fce82 1147
342be7f7 1148 for (i = 0; i < ARRAY_SIZE (aarch64_simd_builtin_data); i++, fcode++)
43e9d192 1149 {
b5828b4b 1150 bool print_type_signature_p = false;
cae83731 1151 char type_signature[SIMD_MAX_BUILTIN_ARGS + 1] = { 0 };
43e9d192 1152 aarch64_simd_builtin_datum *d = &aarch64_simd_builtin_data[i];
342be7f7
JG
1153 char namebuf[60];
1154 tree ftype = NULL;
119103ca 1155 tree fndecl = NULL;
342be7f7 1156
342be7f7 1157 d->fcode = fcode;
43e9d192 1158
b5828b4b
JG
1159 /* We must track two variables here. op_num is
1160 the operand number as in the RTL pattern. This is
1161 required to access the mode (e.g. V4SF mode) of the
1162 argument, from which the base type can be derived.
1163 arg_num is an index in to the qualifiers data, which
1164 gives qualifiers to the type (e.g. const unsigned).
1165 The reason these two variables may differ by one is the
1166 void return type. While all return types take the 0th entry
1167 in the qualifiers array, there is no operand for them in the
1168 RTL pattern. */
1169 int op_num = insn_data[d->code].n_operands - 1;
1170 int arg_num = d->qualifiers[0] & qualifier_void
1171 ? op_num + 1
1172 : op_num;
1173 tree return_type = void_type_node, args = void_list_node;
1174 tree eltype;
1175
8197ab94
JW
1176 int struct_mode_args = 0;
1177 for (int j = op_num; j >= 0; j--)
1178 {
1179 machine_mode op_mode = insn_data[d->code].operand[j].mode;
1180 if (aarch64_advsimd_struct_mode_p (op_mode))
1181 struct_mode_args++;
1182 }
1183
1184 if ((called_from_pragma && struct_mode_args == 0)
1185 || (!called_from_pragma && struct_mode_args > 0))
1186 continue;
1187
b5828b4b
JG
1188 /* Build a function type directly from the insn_data for this
1189 builtin. The build_function_type () function takes care of
1190 removing duplicates for us. */
1191 for (; op_num >= 0; arg_num--, op_num--)
43e9d192 1192 {
ef4bddc2 1193 machine_mode op_mode = insn_data[d->code].operand[op_num].mode;
b5828b4b 1194 enum aarch64_type_qualifiers qualifiers = d->qualifiers[arg_num];
43e9d192 1195
b5828b4b
JG
1196 if (qualifiers & qualifier_unsigned)
1197 {
9fd2074d 1198 type_signature[op_num] = 'u';
b5828b4b
JG
1199 print_type_signature_p = true;
1200 }
6db1ec94
JG
1201 else if (qualifiers & qualifier_poly)
1202 {
9fd2074d 1203 type_signature[op_num] = 'p';
6db1ec94
JG
1204 print_type_signature_p = true;
1205 }
b5828b4b 1206 else
9fd2074d 1207 type_signature[op_num] = 's';
b5828b4b
JG
1208
1209 /* Skip an internal operand for vget_{low, high}. */
1210 if (qualifiers & qualifier_internal)
1211 continue;
1212
1213 /* Some builtins have different user-facing types
1214 for certain arguments, encoded in d->mode. */
1215 if (qualifiers & qualifier_map_mode)
bc5e395d 1216 op_mode = d->mode;
b5828b4b
JG
1217
1218 /* For pointers, we want a pointer to the basic type
1219 of the vector. */
1220 if (qualifiers & qualifier_pointer && VECTOR_MODE_P (op_mode))
1221 op_mode = GET_MODE_INNER (op_mode);
1222
f9d53c27
TB
1223 eltype = aarch64_simd_builtin_type
1224 (op_mode,
1225 (qualifiers & qualifier_unsigned) != 0,
1226 (qualifiers & qualifier_poly) != 0);
1227 gcc_assert (eltype != NULL);
b5828b4b
JG
1228
1229 /* Add qualifiers. */
1230 if (qualifiers & qualifier_const)
1231 eltype = build_qualified_type (eltype, TYPE_QUAL_CONST);
1232
1233 if (qualifiers & qualifier_pointer)
1234 eltype = build_pointer_type (eltype);
1235
1236 /* If we have reached arg_num == 0, we are at a non-void
1237 return type. Otherwise, we are still processing
1238 arguments. */
1239 if (arg_num == 0)
1240 return_type = eltype;
1241 else
1242 args = tree_cons (NULL_TREE, eltype, args);
1243 }
342be7f7 1244
b5828b4b 1245 ftype = build_function_type (return_type, args);
43e9d192 1246
342be7f7 1247 gcc_assert (ftype != NULL);
43e9d192 1248
b5828b4b 1249 if (print_type_signature_p)
bc5e395d
JG
1250 snprintf (namebuf, sizeof (namebuf), "__builtin_aarch64_%s_%s",
1251 d->name, type_signature);
b5828b4b 1252 else
bc5e395d
JG
1253 snprintf (namebuf, sizeof (namebuf), "__builtin_aarch64_%s",
1254 d->name);
43e9d192 1255
079c23cf 1256 tree attrs = aarch64_get_attributes (d->flags, d->mode);
072a8b8f 1257
66f206b8
JW
1258 if (called_from_pragma)
1259 {
1260 unsigned int raw_code
1261 = (fcode << AARCH64_BUILTIN_SHIFT) | AARCH64_BUILTIN_GENERAL;
1262 fndecl = simulate_builtin_function_decl (input_location, namebuf,
1263 ftype, raw_code, NULL,
1264 attrs);
1265 }
1266 else
1267 fndecl = aarch64_general_add_builtin (namebuf, ftype, fcode, attrs);
1268
119103ca 1269 aarch64_builtin_decls[fcode] = fndecl;
43e9d192 1270 }
8197ab94
JW
1271}
1272
1273/* Register the tuple type that contains NUM_VECTORS of the AdvSIMD type
1274 indexed by TYPE_INDEX. */
1275static void
1276register_tuple_type (unsigned int num_vectors, unsigned int type_index)
1277{
1278 aarch64_simd_type_info *type = &aarch64_simd_types[type_index];
1279
1280 /* Synthesize the name of the user-visible vector tuple type. */
1281 const char *vector_type_name = type->name;
1282 char tuple_type_name[sizeof ("bfloat16x4x2_t")];
1283 snprintf (tuple_type_name, sizeof (tuple_type_name), "%.*sx%d_t",
1284 (int) strlen (vector_type_name) - 4, vector_type_name + 2,
1285 num_vectors);
1286 tuple_type_name[0] = TOLOWER (tuple_type_name[0]);
1287
1288 tree vector_type = type->itype;
1289 tree array_type = build_array_type_nelts (vector_type, num_vectors);
66f206b8
JW
1290 if (type->mode == DImode)
1291 {
1292 if (num_vectors == 2)
1293 SET_TYPE_MODE (array_type, V2x1DImode);
1294 else if (num_vectors == 3)
1295 SET_TYPE_MODE (array_type, V3x1DImode);
1296 else if (num_vectors == 4)
1297 SET_TYPE_MODE (array_type, V4x1DImode);
1298 }
1299
8197ab94 1300 unsigned int alignment
14814e20
RS
1301 = known_eq (GET_MODE_SIZE (type->mode), 16) ? 128 : 64;
1302 machine_mode tuple_mode = TYPE_MODE_RAW (array_type);
1303 gcc_assert (VECTOR_MODE_P (tuple_mode)
1304 && TYPE_MODE (array_type) == tuple_mode
8197ab94
JW
1305 && TYPE_ALIGN (array_type) == alignment);
1306
1307 tree field = build_decl (input_location, FIELD_DECL,
1308 get_identifier ("val"), array_type);
1309
1310 tree t = lang_hooks.types.simulate_record_decl (input_location,
1311 tuple_type_name,
1312 make_array_slice (&field,
1313 1));
1314 gcc_assert (TYPE_MODE_RAW (t) == TYPE_MODE (t)
14814e20
RS
1315 && (flag_pack_struct
1316 || maximum_field_alignment
1317 || (TYPE_MODE_RAW (t) == tuple_mode
1318 && TYPE_ALIGN (t) == alignment)));
1319
1320 aarch64_simd_tuple_modes[type_index][num_vectors - 2] = tuple_mode;
1321 aarch64_simd_tuple_types[type_index][num_vectors - 2] = t;
8197ab94
JW
1322}
1323
1324static bool
1325aarch64_scalar_builtin_type_p (aarch64_simd_type t)
1326{
1327 return (t == Poly8_t || t == Poly16_t || t == Poly64_t || t == Poly128_t);
1328}
1329
14814e20
RS
1330/* Enable AARCH64_FL_* flags EXTRA_FLAGS on top of the base Advanced SIMD
1331 set. */
1332aarch64_simd_switcher::aarch64_simd_switcher (unsigned int extra_flags)
1333 : m_old_isa_flags (aarch64_isa_flags),
1334 m_old_general_regs_only (TARGET_GENERAL_REGS_ONLY)
1335{
1336 /* Changing the ISA flags should be enough here. We shouldn't need to
1337 pay the compile-time cost of a full target switch. */
1338 aarch64_isa_flags = AARCH64_FL_FP | AARCH64_FL_SIMD | extra_flags;
1339 global_options.x_target_flags &= ~MASK_GENERAL_REGS_ONLY;
1340}
1341
1342aarch64_simd_switcher::~aarch64_simd_switcher ()
1343{
1344 if (m_old_general_regs_only)
1345 global_options.x_target_flags |= MASK_GENERAL_REGS_ONLY;
1346 aarch64_isa_flags = m_old_isa_flags;
1347}
1348
8197ab94
JW
1349/* Implement #pragma GCC aarch64 "arm_neon.h". */
1350void
1351handle_arm_neon_h (void)
1352{
14814e20
RS
1353 aarch64_simd_switcher simd;
1354
8197ab94
JW
1355 /* Register the AdvSIMD vector tuple types. */
1356 for (unsigned int i = 0; i < ARM_NEON_H_TYPES_LAST; i++)
1357 for (unsigned int count = 2; count <= 4; ++count)
1358 if (!aarch64_scalar_builtin_type_p (aarch64_simd_types[i].type))
1359 register_tuple_type (count, i);
1360
1361 aarch64_init_simd_builtin_functions (true);
1362}
1363
1364void
1365aarch64_init_simd_builtins (void)
1366{
1367 if (aarch64_simd_builtins_initialized_p)
1368 return;
1369
1370 aarch64_simd_builtins_initialized_p = true;
1371
1372 aarch64_init_simd_builtin_types ();
1373
1374 /* Strong-typing hasn't been implemented for all AdvSIMD builtin intrinsics.
1375 Therefore we need to preserve the old __builtin scalar types. It can be
1376 removed once all the intrinsics become strongly typed using the qualifier
1377 system. */
1378 aarch64_init_simd_builtin_scalar_types ();
1379
1380 aarch64_init_simd_builtin_functions (false);
1381 if (in_lto_p)
1382 handle_arm_neon_h ();
280d970b 1383
8197ab94
JW
1384 /* Initialize the remaining fcmla_laneq intrinsics. */
1385 aarch64_init_fcmla_laneq_builtins ();
43e9d192
IB
1386}
1387
5d357f26
KT
1388static void
1389aarch64_init_crc32_builtins ()
1390{
f9d53c27 1391 tree usi_type = aarch64_simd_builtin_std_type (SImode, qualifier_unsigned);
5d357f26
KT
1392 unsigned int i = 0;
1393
1394 for (i = 0; i < ARRAY_SIZE (aarch64_crc_builtin_data); ++i)
1395 {
1396 aarch64_crc_builtin_datum* d = &aarch64_crc_builtin_data[i];
f9d53c27
TB
1397 tree argtype = aarch64_simd_builtin_std_type (d->mode,
1398 qualifier_unsigned);
5d357f26 1399 tree ftype = build_function_type_list (usi_type, usi_type, argtype, NULL_TREE);
079c23cf
KT
1400 tree attrs = aarch64_get_attributes (FLAG_NONE, d->mode);
1401 tree fndecl
1402 = aarch64_general_add_builtin (d->name, ftype, d->fcode, attrs);
5d357f26
KT
1403
1404 aarch64_builtin_decls[d->fcode] = fndecl;
1405 }
1406}
1407
a6fc00da
BH
1408/* Add builtins for reciprocal square root. */
1409
1410void
1411aarch64_init_builtin_rsqrt (void)
1412{
1413 tree fndecl = NULL;
1414 tree ftype = NULL;
1415
1416 tree V2SF_type_node = build_vector_type (float_type_node, 2);
1417 tree V2DF_type_node = build_vector_type (double_type_node, 2);
1418 tree V4SF_type_node = build_vector_type (float_type_node, 4);
1419
1420 struct builtin_decls_data
1421 {
1422 tree type_node;
1423 const char *builtin_name;
1424 int function_code;
1425 };
1426
1427 builtin_decls_data bdda[] =
1428 {
1429 { double_type_node, "__builtin_aarch64_rsqrt_df", AARCH64_BUILTIN_RSQRT_DF },
1430 { float_type_node, "__builtin_aarch64_rsqrt_sf", AARCH64_BUILTIN_RSQRT_SF },
1431 { V2DF_type_node, "__builtin_aarch64_rsqrt_v2df", AARCH64_BUILTIN_RSQRT_V2DF },
1432 { V2SF_type_node, "__builtin_aarch64_rsqrt_v2sf", AARCH64_BUILTIN_RSQRT_V2SF },
1433 { V4SF_type_node, "__builtin_aarch64_rsqrt_v4sf", AARCH64_BUILTIN_RSQRT_V4SF }
1434 };
1435
1436 builtin_decls_data *bdd = bdda;
1437 builtin_decls_data *bdd_end = bdd + (sizeof (bdda) / sizeof (builtin_decls_data));
1438
1439 for (; bdd < bdd_end; bdd++)
1440 {
1441 ftype = build_function_type_list (bdd->type_node, bdd->type_node, NULL_TREE);
079c23cf 1442 tree attrs = aarch64_get_attributes (FLAG_FP, TYPE_MODE (bdd->type_node));
6d4d616a 1443 fndecl = aarch64_general_add_builtin (bdd->builtin_name,
079c23cf 1444 ftype, bdd->function_code, attrs);
a6fc00da
BH
1445 aarch64_builtin_decls[bdd->function_code] = fndecl;
1446 }
1447}
1448
1b62ed4f
JG
1449/* Initialize the backend types that support the user-visible __fp16
1450 type, also initialize a pointer to that type, to be used when
1451 forming HFAs. */
1452
1453static void
1454aarch64_init_fp16_types (void)
1455{
1456 aarch64_fp16_type_node = make_node (REAL_TYPE);
1457 TYPE_PRECISION (aarch64_fp16_type_node) = 16;
1458 layout_type (aarch64_fp16_type_node);
1459
1460 (*lang_hooks.types.register_builtin_type) (aarch64_fp16_type_node, "__fp16");
1461 aarch64_fp16_ptr_type_node = build_pointer_type (aarch64_fp16_type_node);
1462}
1463
abbe1ed2
SMW
1464/* Initialize the backend REAL_TYPE type supporting bfloat types. */
1465static void
1466aarch64_init_bf16_types (void)
1467{
1468 aarch64_bf16_type_node = make_node (REAL_TYPE);
1469 TYPE_PRECISION (aarch64_bf16_type_node) = 16;
1470 SET_TYPE_MODE (aarch64_bf16_type_node, BFmode);
1471 layout_type (aarch64_bf16_type_node);
1472
1473 lang_hooks.types.register_builtin_type (aarch64_bf16_type_node, "__bf16");
1474 aarch64_bf16_ptr_type_node = build_pointer_type (aarch64_bf16_type_node);
1475}
1476
312492bd
JW
1477/* Pointer authentication builtins that will become NOP on legacy platform.
1478 Currently, these builtins are for internal use only (libgcc EH unwinder). */
1479
1480void
1481aarch64_init_pauth_hint_builtins (void)
1482{
1483 /* Pointer Authentication builtins. */
1484 tree ftype_pointer_auth
1485 = build_function_type_list (ptr_type_node, ptr_type_node,
1486 unsigned_intDI_type_node, NULL_TREE);
1487 tree ftype_pointer_strip
1488 = build_function_type_list (ptr_type_node, ptr_type_node, NULL_TREE);
1489
1490 aarch64_builtin_decls[AARCH64_PAUTH_BUILTIN_AUTIA1716]
6d4d616a
RS
1491 = aarch64_general_add_builtin ("__builtin_aarch64_autia1716",
1492 ftype_pointer_auth,
1493 AARCH64_PAUTH_BUILTIN_AUTIA1716);
312492bd 1494 aarch64_builtin_decls[AARCH64_PAUTH_BUILTIN_PACIA1716]
6d4d616a
RS
1495 = aarch64_general_add_builtin ("__builtin_aarch64_pacia1716",
1496 ftype_pointer_auth,
1497 AARCH64_PAUTH_BUILTIN_PACIA1716);
8fc16d72 1498 aarch64_builtin_decls[AARCH64_PAUTH_BUILTIN_AUTIB1716]
6d4d616a
RS
1499 = aarch64_general_add_builtin ("__builtin_aarch64_autib1716",
1500 ftype_pointer_auth,
1501 AARCH64_PAUTH_BUILTIN_AUTIB1716);
8fc16d72 1502 aarch64_builtin_decls[AARCH64_PAUTH_BUILTIN_PACIB1716]
6d4d616a
RS
1503 = aarch64_general_add_builtin ("__builtin_aarch64_pacib1716",
1504 ftype_pointer_auth,
1505 AARCH64_PAUTH_BUILTIN_PACIB1716);
312492bd 1506 aarch64_builtin_decls[AARCH64_PAUTH_BUILTIN_XPACLRI]
6d4d616a
RS
1507 = aarch64_general_add_builtin ("__builtin_aarch64_xpaclri",
1508 ftype_pointer_strip,
1509 AARCH64_PAUTH_BUILTIN_XPACLRI);
312492bd
JW
1510}
1511
89626179
SD
1512/* Initialize the transactional memory extension (TME) builtins. */
1513static void
1514aarch64_init_tme_builtins (void)
1515{
1516 tree ftype_uint64_void
1517 = build_function_type_list (uint64_type_node, NULL);
1518 tree ftype_void_void
1519 = build_function_type_list (void_type_node, NULL);
1520 tree ftype_void_uint64
1521 = build_function_type_list (void_type_node, uint64_type_node, NULL);
1522
1523 aarch64_builtin_decls[AARCH64_TME_BUILTIN_TSTART]
6d4d616a
RS
1524 = aarch64_general_add_builtin ("__builtin_aarch64_tstart",
1525 ftype_uint64_void,
1526 AARCH64_TME_BUILTIN_TSTART);
89626179 1527 aarch64_builtin_decls[AARCH64_TME_BUILTIN_TTEST]
6d4d616a
RS
1528 = aarch64_general_add_builtin ("__builtin_aarch64_ttest",
1529 ftype_uint64_void,
1530 AARCH64_TME_BUILTIN_TTEST);
89626179 1531 aarch64_builtin_decls[AARCH64_TME_BUILTIN_TCOMMIT]
6d4d616a
RS
1532 = aarch64_general_add_builtin ("__builtin_aarch64_tcommit",
1533 ftype_void_void,
1534 AARCH64_TME_BUILTIN_TCOMMIT);
89626179 1535 aarch64_builtin_decls[AARCH64_TME_BUILTIN_TCANCEL]
6d4d616a
RS
1536 = aarch64_general_add_builtin ("__builtin_aarch64_tcancel",
1537 ftype_void_uint64,
1538 AARCH64_TME_BUILTIN_TCANCEL);
89626179
SD
1539}
1540
c5dc215d
KT
1541/* Add builtins for Random Number instructions. */
1542
1543static void
1544aarch64_init_rng_builtins (void)
1545{
1546 tree unsigned_ptr_type = build_pointer_type (unsigned_intDI_type_node);
1547 tree ftype
1548 = build_function_type_list (integer_type_node, unsigned_ptr_type, NULL);
1549 aarch64_builtin_decls[AARCH64_BUILTIN_RNG_RNDR]
1550 = aarch64_general_add_builtin ("__builtin_aarch64_rndr", ftype,
1551 AARCH64_BUILTIN_RNG_RNDR);
1552 aarch64_builtin_decls[AARCH64_BUILTIN_RNG_RNDRRS]
1553 = aarch64_general_add_builtin ("__builtin_aarch64_rndrrs", ftype,
1554 AARCH64_BUILTIN_RNG_RNDRRS);
1555}
1556
ef01e6bb
DZ
1557/* Initialize the memory tagging extension (MTE) builtins. */
1558struct
1559{
1560 tree ftype;
1561 enum insn_code icode;
1562} aarch64_memtag_builtin_data[AARCH64_MEMTAG_BUILTIN_END -
1563 AARCH64_MEMTAG_BUILTIN_START - 1];
1564
1565static void
1566aarch64_init_memtag_builtins (void)
1567{
1568 tree fntype = NULL;
1569
1570#define AARCH64_INIT_MEMTAG_BUILTINS_DECL(F, N, I, T) \
1571 aarch64_builtin_decls[AARCH64_MEMTAG_BUILTIN_##F] \
1572 = aarch64_general_add_builtin ("__builtin_aarch64_memtag_"#N, \
1573 T, AARCH64_MEMTAG_BUILTIN_##F); \
1574 aarch64_memtag_builtin_data[AARCH64_MEMTAG_BUILTIN_##F - \
1575 AARCH64_MEMTAG_BUILTIN_START - 1] = \
1576 {T, CODE_FOR_##I};
1577
1578 fntype = build_function_type_list (ptr_type_node, ptr_type_node,
1579 uint64_type_node, NULL);
1580 AARCH64_INIT_MEMTAG_BUILTINS_DECL (IRG, irg, irg, fntype);
1581
1582 fntype = build_function_type_list (uint64_type_node, ptr_type_node,
1583 uint64_type_node, NULL);
1584 AARCH64_INIT_MEMTAG_BUILTINS_DECL (GMI, gmi, gmi, fntype);
1585
1586 fntype = build_function_type_list (ptrdiff_type_node, ptr_type_node,
1587 ptr_type_node, NULL);
1588 AARCH64_INIT_MEMTAG_BUILTINS_DECL (SUBP, subp, subp, fntype);
1589
1590 fntype = build_function_type_list (ptr_type_node, ptr_type_node,
1591 unsigned_type_node, NULL);
1592 AARCH64_INIT_MEMTAG_BUILTINS_DECL (INC_TAG, inc_tag, addg, fntype);
1593
1594 fntype = build_function_type_list (void_type_node, ptr_type_node, NULL);
1595 AARCH64_INIT_MEMTAG_BUILTINS_DECL (SET_TAG, set_tag, stg, fntype);
1596
1597 fntype = build_function_type_list (ptr_type_node, ptr_type_node, NULL);
1598 AARCH64_INIT_MEMTAG_BUILTINS_DECL (GET_TAG, get_tag, ldg, fntype);
1599
1600#undef AARCH64_INIT_MEMTAG_BUILTINS_DECL
1601}
c5dc215d 1602
fdcddba8
PW
1603/* Add builtins for Load/store 64 Byte instructions. */
1604
1605typedef struct
1606{
1607 const char *name;
1608 unsigned int code;
1609 tree type;
1610} ls64_builtins_data;
1611
1612static GTY(()) tree ls64_arm_data_t = NULL_TREE;
1613
1614static void
1615aarch64_init_ls64_builtins_types (void)
1616{
1617 /* Synthesize:
1618
1619 typedef struct {
1620 uint64_t val[8];
1621 } __arm_data512_t; */
1622 const char *tuple_type_name = "__arm_data512_t";
1623 tree node_type = get_typenode_from_name (UINT64_TYPE);
1624 tree array_type = build_array_type_nelts (node_type, 8);
1625 SET_TYPE_MODE (array_type, V8DImode);
1626
1627 gcc_assert (TYPE_MODE_RAW (array_type) == TYPE_MODE (array_type));
1628 gcc_assert (TYPE_ALIGN (array_type) == 64);
1629
1630 tree field = build_decl (input_location, FIELD_DECL,
1631 get_identifier ("val"), array_type);
1632
1633 ls64_arm_data_t = lang_hooks.types.simulate_record_decl (input_location,
1634 tuple_type_name,
1635 make_array_slice (&field, 1));
1636
1637 gcc_assert (TYPE_MODE (ls64_arm_data_t) == V8DImode);
1638 gcc_assert (TYPE_MODE_RAW (ls64_arm_data_t) == TYPE_MODE (ls64_arm_data_t));
1639 gcc_assert (TYPE_ALIGN (ls64_arm_data_t) == 64);
1640}
1641
1642static void
1643aarch64_init_ls64_builtins (void)
1644{
1645 aarch64_init_ls64_builtins_types ();
1646
1647 ls64_builtins_data data[4] = {
1648 {"__builtin_aarch64_ld64b", AARCH64_LS64_BUILTIN_LD64B,
1649 build_function_type_list (ls64_arm_data_t,
1650 const_ptr_type_node, NULL_TREE)},
1651 {"__builtin_aarch64_st64b", AARCH64_LS64_BUILTIN_ST64B,
1652 build_function_type_list (void_type_node, ptr_type_node,
1653 ls64_arm_data_t, NULL_TREE)},
1654 {"__builtin_aarch64_st64bv", AARCH64_LS64_BUILTIN_ST64BV,
1655 build_function_type_list (uint64_type_node, ptr_type_node,
1656 ls64_arm_data_t, NULL_TREE)},
1657 {"__builtin_aarch64_st64bv0", AARCH64_LS64_BUILTIN_ST64BV0,
1658 build_function_type_list (uint64_type_node, ptr_type_node,
1659 ls64_arm_data_t, NULL_TREE)},
1660 };
1661
1662 for (size_t i = 0; i < ARRAY_SIZE (data); ++i)
1663 aarch64_builtin_decls[data[i].code]
1664 = aarch64_general_add_builtin (data[i].name, data[i].type, data[i].code);
1665}
1666
af3cadb5
TC
1667/* Implement #pragma GCC aarch64 "arm_acle.h". */
1668void
1669handle_arm_acle_h (void)
1670{
1671 if (TARGET_LS64)
1672 aarch64_init_ls64_builtins ();
1673}
1674
0d7e5fa6 1675/* Initialize fpsr fpcr getters and setters. */
c5dc215d 1676
0d7e5fa6
AC
1677static void
1678aarch64_init_fpsr_fpcr_builtins (void)
43e9d192 1679{
0d7e5fa6 1680 tree ftype_set
aa87aced 1681 = build_function_type_list (void_type_node, unsigned_type_node, NULL);
0d7e5fa6 1682 tree ftype_get
aa87aced
KV
1683 = build_function_type_list (unsigned_type_node, NULL);
1684
1685 aarch64_builtin_decls[AARCH64_BUILTIN_GET_FPCR]
6d4d616a 1686 = aarch64_general_add_builtin ("__builtin_aarch64_get_fpcr",
0d7e5fa6 1687 ftype_get,
6d4d616a 1688 AARCH64_BUILTIN_GET_FPCR);
aa87aced 1689 aarch64_builtin_decls[AARCH64_BUILTIN_SET_FPCR]
6d4d616a 1690 = aarch64_general_add_builtin ("__builtin_aarch64_set_fpcr",
0d7e5fa6 1691 ftype_set,
6d4d616a 1692 AARCH64_BUILTIN_SET_FPCR);
aa87aced 1693 aarch64_builtin_decls[AARCH64_BUILTIN_GET_FPSR]
6d4d616a 1694 = aarch64_general_add_builtin ("__builtin_aarch64_get_fpsr",
0d7e5fa6 1695 ftype_get,
6d4d616a 1696 AARCH64_BUILTIN_GET_FPSR);
aa87aced 1697 aarch64_builtin_decls[AARCH64_BUILTIN_SET_FPSR]
6d4d616a 1698 = aarch64_general_add_builtin ("__builtin_aarch64_set_fpsr",
0d7e5fa6 1699 ftype_set,
6d4d616a 1700 AARCH64_BUILTIN_SET_FPSR);
aa87aced 1701
0d7e5fa6
AC
1702 ftype_set
1703 = build_function_type_list (void_type_node, long_long_unsigned_type_node,
1704 NULL);
1705 ftype_get
1706 = build_function_type_list (long_long_unsigned_type_node, NULL);
1707
1708 aarch64_builtin_decls[AARCH64_BUILTIN_GET_FPCR64]
1709 = aarch64_general_add_builtin ("__builtin_aarch64_get_fpcr64",
1710 ftype_get,
1711 AARCH64_BUILTIN_GET_FPCR64);
1712 aarch64_builtin_decls[AARCH64_BUILTIN_SET_FPCR64]
1713 = aarch64_general_add_builtin ("__builtin_aarch64_set_fpcr64",
1714 ftype_set,
1715 AARCH64_BUILTIN_SET_FPCR64);
1716 aarch64_builtin_decls[AARCH64_BUILTIN_GET_FPSR64]
1717 = aarch64_general_add_builtin ("__builtin_aarch64_get_fpsr64",
1718 ftype_get,
1719 AARCH64_BUILTIN_GET_FPSR64);
1720 aarch64_builtin_decls[AARCH64_BUILTIN_SET_FPSR64]
1721 = aarch64_general_add_builtin ("__builtin_aarch64_set_fpsr64",
1722 ftype_set,
1723 AARCH64_BUILTIN_SET_FPSR64);
1724}
1725
1726/* Initialize all builtins in the AARCH64_BUILTIN_GENERAL group. */
1727
1728void
1729aarch64_general_init_builtins (void)
1730{
1731 aarch64_init_fpsr_fpcr_builtins ();
1732
1b62ed4f 1733 aarch64_init_fp16_types ();
c2ec330c 1734
abbe1ed2
SMW
1735 aarch64_init_bf16_types ();
1736
14814e20
RS
1737 {
1738 aarch64_simd_switcher simd;
280d970b 1739 aarch64_init_simd_builtins ();
14814e20 1740 }
e95a988a
KT
1741
1742 aarch64_init_crc32_builtins ();
a6fc00da 1743 aarch64_init_builtin_rsqrt ();
c5dc215d 1744 aarch64_init_rng_builtins ();
312492bd 1745
e1d5d19e
KT
1746 tree ftype_jcvt
1747 = build_function_type_list (intSI_type_node, double_type_node, NULL);
1748 aarch64_builtin_decls[AARCH64_JSCVT]
6d4d616a
RS
1749 = aarch64_general_add_builtin ("__builtin_aarch64_jcvtzs", ftype_jcvt,
1750 AARCH64_JSCVT);
e1d5d19e 1751
a876231c
JW
1752 /* Initialize pointer authentication builtins which are backed by instructions
1753 in NOP encoding space.
1754
1755 NOTE: these builtins are supposed to be used by libgcc unwinder only, as
1756 there is no support on return address signing under ILP32, we don't
1757 register them. */
1758 if (!TARGET_ILP32)
1759 aarch64_init_pauth_hint_builtins ();
89626179
SD
1760
1761 if (TARGET_TME)
1762 aarch64_init_tme_builtins ();
ef01e6bb
DZ
1763
1764 if (TARGET_MEMTAG)
1765 aarch64_init_memtag_builtins ();
43e9d192
IB
1766}
1767
6d4d616a 1768/* Implement TARGET_BUILTIN_DECL for the AARCH64_BUILTIN_GENERAL group. */
119103ca 1769tree
6d4d616a 1770aarch64_general_builtin_decl (unsigned code, bool)
119103ca
JG
1771{
1772 if (code >= AARCH64_BUILTIN_MAX)
1773 return error_mark_node;
1774
1775 return aarch64_builtin_decls[code];
1776}
1777
43e9d192
IB
1778typedef enum
1779{
1780 SIMD_ARG_COPY_TO_REG,
1781 SIMD_ARG_CONSTANT,
2a49c16d 1782 SIMD_ARG_LANE_INDEX,
4d0a0237 1783 SIMD_ARG_STRUCT_LOAD_STORE_LANE_INDEX,
9d63f43b 1784 SIMD_ARG_LANE_PAIR_INDEX,
8c197c85 1785 SIMD_ARG_LANE_QUADTUP_INDEX,
43e9d192
IB
1786 SIMD_ARG_STOP
1787} builtin_simd_arg;
1788
e95a988a 1789
43e9d192
IB
1790static rtx
1791aarch64_simd_expand_args (rtx target, int icode, int have_retval,
4d0a0237 1792 tree exp, builtin_simd_arg *args,
b8506a8a 1793 machine_mode builtin_mode)
43e9d192 1794{
43e9d192 1795 rtx pat;
d9e80f49
AL
1796 rtx op[SIMD_MAX_BUILTIN_ARGS + 1]; /* First element for result operand. */
1797 int opc = 0;
1798
1799 if (have_retval)
1800 {
1801 machine_mode tmode = insn_data[icode].operand[0].mode;
1802 if (!target
43e9d192 1803 || GET_MODE (target) != tmode
d9e80f49
AL
1804 || !(*insn_data[icode].operand[0].predicate) (target, tmode))
1805 target = gen_reg_rtx (tmode);
1806 op[opc++] = target;
1807 }
43e9d192 1808
43e9d192
IB
1809 for (;;)
1810 {
d9e80f49 1811 builtin_simd_arg thisarg = args[opc - have_retval];
43e9d192
IB
1812
1813 if (thisarg == SIMD_ARG_STOP)
1814 break;
1815 else
1816 {
d9e80f49 1817 tree arg = CALL_EXPR_ARG (exp, opc - have_retval);
b8506a8a 1818 machine_mode mode = insn_data[icode].operand[opc].mode;
d9e80f49 1819 op[opc] = expand_normal (arg);
43e9d192
IB
1820
1821 switch (thisarg)
1822 {
1823 case SIMD_ARG_COPY_TO_REG:
d9e80f49
AL
1824 if (POINTER_TYPE_P (TREE_TYPE (arg)))
1825 op[opc] = convert_memory_address (Pmode, op[opc]);
1826 /*gcc_assert (GET_MODE (op[opc]) == mode); */
1827 if (!(*insn_data[icode].operand[opc].predicate)
1828 (op[opc], mode))
1829 op[opc] = copy_to_mode_reg (mode, op[opc]);
43e9d192
IB
1830 break;
1831
4d0a0237
CB
1832 case SIMD_ARG_STRUCT_LOAD_STORE_LANE_INDEX:
1833 gcc_assert (opc > 1);
1834 if (CONST_INT_P (op[opc]))
1835 {
6a70badb
RS
1836 unsigned int nunits
1837 = GET_MODE_NUNITS (builtin_mode).to_constant ();
1838 aarch64_simd_lane_bounds (op[opc], 0, nunits, exp);
4d0a0237 1839 /* Keep to GCC-vector-extension lane indices in the RTL. */
7ac29c0f
RS
1840 op[opc] = aarch64_endian_lane_rtx (builtin_mode,
1841 INTVAL (op[opc]));
4d0a0237
CB
1842 }
1843 goto constant_arg;
1844
2a49c16d
AL
1845 case SIMD_ARG_LANE_INDEX:
1846 /* Must be a previous operand into which this is an index. */
d9e80f49
AL
1847 gcc_assert (opc > 0);
1848 if (CONST_INT_P (op[opc]))
2a49c16d 1849 {
d9e80f49 1850 machine_mode vmode = insn_data[icode].operand[opc - 1].mode;
6a70badb
RS
1851 unsigned int nunits
1852 = GET_MODE_NUNITS (vmode).to_constant ();
1853 aarch64_simd_lane_bounds (op[opc], 0, nunits, exp);
2a49c16d 1854 /* Keep to GCC-vector-extension lane indices in the RTL. */
7ac29c0f 1855 op[opc] = aarch64_endian_lane_rtx (vmode, INTVAL (op[opc]));
2a49c16d 1856 }
9d63f43b
TC
1857 /* If the lane index isn't a constant then error out. */
1858 goto constant_arg;
1859
1860 case SIMD_ARG_LANE_PAIR_INDEX:
1861 /* Must be a previous operand into which this is an index and
1862 index is restricted to nunits / 2. */
1863 gcc_assert (opc > 0);
1864 if (CONST_INT_P (op[opc]))
1865 {
1866 machine_mode vmode = insn_data[icode].operand[opc - 1].mode;
1867 unsigned int nunits
1868 = GET_MODE_NUNITS (vmode).to_constant ();
1869 aarch64_simd_lane_bounds (op[opc], 0, nunits / 2, exp);
1870 /* Keep to GCC-vector-extension lane indices in the RTL. */
33b5a38c
TC
1871 int lane = INTVAL (op[opc]);
1872 op[opc] = gen_int_mode (ENDIAN_LANE_N (nunits / 2, lane),
1873 SImode);
9d63f43b 1874 }
8c197c85
SMW
1875 /* If the lane index isn't a constant then error out. */
1876 goto constant_arg;
1877 case SIMD_ARG_LANE_QUADTUP_INDEX:
1878 /* Must be a previous operand into which this is an index and
1879 index is restricted to nunits / 4. */
1880 gcc_assert (opc > 0);
1881 if (CONST_INT_P (op[opc]))
1882 {
1883 machine_mode vmode = insn_data[icode].operand[opc - 1].mode;
1884 unsigned int nunits
1885 = GET_MODE_NUNITS (vmode).to_constant ();
1886 aarch64_simd_lane_bounds (op[opc], 0, nunits / 4, exp);
1887 /* Keep to GCC-vector-extension lane indices in the RTL. */
1888 int lane = INTVAL (op[opc]);
1889 op[opc] = gen_int_mode (ENDIAN_LANE_N (nunits / 4, lane),
1890 SImode);
1891 }
1892 /* If the lane index isn't a constant then error out. */
1893 goto constant_arg;
43e9d192 1894 case SIMD_ARG_CONSTANT:
4d0a0237 1895constant_arg:
d9e80f49
AL
1896 if (!(*insn_data[icode].operand[opc].predicate)
1897 (op[opc], mode))
d5a29419 1898 {
62e43587
MS
1899 error_at (EXPR_LOCATION (exp),
1900 "argument %d must be a constant immediate",
1901 opc + 1 - have_retval);
d5a29419
KT
1902 return const0_rtx;
1903 }
43e9d192
IB
1904 break;
1905
1906 case SIMD_ARG_STOP:
1907 gcc_unreachable ();
1908 }
1909
d9e80f49 1910 opc++;
43e9d192
IB
1911 }
1912 }
1913
d9e80f49
AL
1914 switch (opc)
1915 {
1916 case 1:
1917 pat = GEN_FCN (icode) (op[0]);
1918 break;
43e9d192 1919
d9e80f49
AL
1920 case 2:
1921 pat = GEN_FCN (icode) (op[0], op[1]);
1922 break;
43e9d192 1923
d9e80f49
AL
1924 case 3:
1925 pat = GEN_FCN (icode) (op[0], op[1], op[2]);
1926 break;
43e9d192 1927
d9e80f49
AL
1928 case 4:
1929 pat = GEN_FCN (icode) (op[0], op[1], op[2], op[3]);
1930 break;
43e9d192 1931
d9e80f49
AL
1932 case 5:
1933 pat = GEN_FCN (icode) (op[0], op[1], op[2], op[3], op[4]);
1934 break;
43e9d192 1935
d9e80f49
AL
1936 case 6:
1937 pat = GEN_FCN (icode) (op[0], op[1], op[2], op[3], op[4], op[5]);
1938 break;
43e9d192 1939
d9e80f49
AL
1940 default:
1941 gcc_unreachable ();
1942 }
43e9d192
IB
1943
1944 if (!pat)
d5a29419 1945 return NULL_RTX;
43e9d192
IB
1946
1947 emit_insn (pat);
1948
1949 return target;
1950}
1951
1952/* Expand an AArch64 AdvSIMD builtin(intrinsic). */
1953rtx
1954aarch64_simd_expand_builtin (int fcode, tree exp, rtx target)
1955{
661fce82
AL
1956 if (fcode == AARCH64_SIMD_BUILTIN_LANE_CHECK)
1957 {
9c4f25cc
AP
1958 rtx totalsize = expand_normal (CALL_EXPR_ARG (exp, 0));
1959 rtx elementsize = expand_normal (CALL_EXPR_ARG (exp, 1));
1960 if (CONST_INT_P (totalsize) && CONST_INT_P (elementsize)
1961 && UINTVAL (elementsize) != 0
1962 && UINTVAL (totalsize) != 0)
1963 {
1964 rtx lane_idx = expand_normal (CALL_EXPR_ARG (exp, 2));
1965 if (CONST_INT_P (lane_idx))
1966 aarch64_simd_lane_bounds (lane_idx, 0,
1967 UINTVAL (totalsize)
1968 / UINTVAL (elementsize),
1969 exp);
1970 else
62e43587
MS
1971 error_at (EXPR_LOCATION (exp),
1972 "lane index must be a constant immediate");
9c4f25cc 1973 }
661fce82 1974 else
62e43587 1975 error_at (EXPR_LOCATION (exp),
58385f6a 1976 "total size and element size must be a nonzero "
62e43587 1977 "constant immediate");
661fce82
AL
1978 /* Don't generate any RTL. */
1979 return const0_rtx;
1980 }
342be7f7 1981 aarch64_simd_builtin_datum *d =
661fce82 1982 &aarch64_simd_builtin_data[fcode - AARCH64_SIMD_PATTERN_START];
342be7f7 1983 enum insn_code icode = d->code;
0ff2bf46 1984 builtin_simd_arg args[SIMD_MAX_BUILTIN_ARGS + 1];
b5828b4b
JG
1985 int num_args = insn_data[d->code].n_operands;
1986 int is_void = 0;
1987 int k;
43e9d192 1988
b5828b4b 1989 is_void = !!(d->qualifiers[0] & qualifier_void);
43e9d192 1990
b5828b4b
JG
1991 num_args += is_void;
1992
1993 for (k = 1; k < num_args; k++)
1994 {
1995 /* We have four arrays of data, each indexed in a different fashion.
1996 qualifiers - element 0 always describes the function return type.
1997 operands - element 0 is either the operand for return value (if
1998 the function has a non-void return type) or the operand for the
1999 first argument.
2000 expr_args - element 0 always holds the first argument.
2001 args - element 0 is always used for the return type. */
2002 int qualifiers_k = k;
2003 int operands_k = k - is_void;
2004 int expr_args_k = k - 1;
2005
2a49c16d
AL
2006 if (d->qualifiers[qualifiers_k] & qualifier_lane_index)
2007 args[k] = SIMD_ARG_LANE_INDEX;
9d63f43b
TC
2008 else if (d->qualifiers[qualifiers_k] & qualifier_lane_pair_index)
2009 args[k] = SIMD_ARG_LANE_PAIR_INDEX;
8c197c85
SMW
2010 else if (d->qualifiers[qualifiers_k] & qualifier_lane_quadtup_index)
2011 args[k] = SIMD_ARG_LANE_QUADTUP_INDEX;
4d0a0237
CB
2012 else if (d->qualifiers[qualifiers_k] & qualifier_struct_load_store_lane_index)
2013 args[k] = SIMD_ARG_STRUCT_LOAD_STORE_LANE_INDEX;
2a49c16d 2014 else if (d->qualifiers[qualifiers_k] & qualifier_immediate)
b5828b4b
JG
2015 args[k] = SIMD_ARG_CONSTANT;
2016 else if (d->qualifiers[qualifiers_k] & qualifier_maybe_immediate)
2017 {
2018 rtx arg
2019 = expand_normal (CALL_EXPR_ARG (exp,
2020 (expr_args_k)));
2021 /* Handle constants only if the predicate allows it. */
2022 bool op_const_int_p =
2023 (CONST_INT_P (arg)
2024 && (*insn_data[icode].operand[operands_k].predicate)
2025 (arg, insn_data[icode].operand[operands_k].mode));
2026 args[k] = op_const_int_p ? SIMD_ARG_CONSTANT : SIMD_ARG_COPY_TO_REG;
2027 }
2028 else
2029 args[k] = SIMD_ARG_COPY_TO_REG;
43e9d192 2030
43e9d192 2031 }
b5828b4b
JG
2032 args[k] = SIMD_ARG_STOP;
2033
2034 /* The interface to aarch64_simd_expand_args expects a 0 if
2035 the function is void, and a 1 if it is not. */
2036 return aarch64_simd_expand_args
4d0a0237 2037 (target, icode, !is_void, exp, &args[1], d->mode);
43e9d192 2038}
342be7f7 2039
5d357f26
KT
2040rtx
2041aarch64_crc32_expand_builtin (int fcode, tree exp, rtx target)
2042{
2043 rtx pat;
2044 aarch64_crc_builtin_datum *d
2045 = &aarch64_crc_builtin_data[fcode - (AARCH64_CRC32_BUILTIN_BASE + 1)];
2046 enum insn_code icode = d->icode;
2047 tree arg0 = CALL_EXPR_ARG (exp, 0);
2048 tree arg1 = CALL_EXPR_ARG (exp, 1);
2049 rtx op0 = expand_normal (arg0);
2050 rtx op1 = expand_normal (arg1);
ef4bddc2
RS
2051 machine_mode tmode = insn_data[icode].operand[0].mode;
2052 machine_mode mode0 = insn_data[icode].operand[1].mode;
2053 machine_mode mode1 = insn_data[icode].operand[2].mode;
5d357f26
KT
2054
2055 if (! target
2056 || GET_MODE (target) != tmode
2057 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
2058 target = gen_reg_rtx (tmode);
2059
2060 gcc_assert ((GET_MODE (op0) == mode0 || GET_MODE (op0) == VOIDmode)
2061 && (GET_MODE (op1) == mode1 || GET_MODE (op1) == VOIDmode));
2062
2063 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
2064 op0 = copy_to_mode_reg (mode0, op0);
2065 if (! (*insn_data[icode].operand[2].predicate) (op1, mode1))
2066 op1 = copy_to_mode_reg (mode1, op1);
2067
2068 pat = GEN_FCN (icode) (target, op0, op1);
d5a29419
KT
2069 if (!pat)
2070 return NULL_RTX;
2071
5d357f26
KT
2072 emit_insn (pat);
2073 return target;
2074}
2075
a6fc00da
BH
2076/* Function to expand reciprocal square root builtins. */
2077
2078static rtx
2079aarch64_expand_builtin_rsqrt (int fcode, tree exp, rtx target)
2080{
2081 tree arg0 = CALL_EXPR_ARG (exp, 0);
2082 rtx op0 = expand_normal (arg0);
2083
2084 rtx (*gen) (rtx, rtx);
2085
2086 switch (fcode)
2087 {
2088 case AARCH64_BUILTIN_RSQRT_DF:
ee62a5a6 2089 gen = gen_rsqrtdf2;
a6fc00da
BH
2090 break;
2091 case AARCH64_BUILTIN_RSQRT_SF:
ee62a5a6 2092 gen = gen_rsqrtsf2;
a6fc00da
BH
2093 break;
2094 case AARCH64_BUILTIN_RSQRT_V2DF:
ee62a5a6 2095 gen = gen_rsqrtv2df2;
a6fc00da
BH
2096 break;
2097 case AARCH64_BUILTIN_RSQRT_V2SF:
ee62a5a6 2098 gen = gen_rsqrtv2sf2;
a6fc00da
BH
2099 break;
2100 case AARCH64_BUILTIN_RSQRT_V4SF:
ee62a5a6 2101 gen = gen_rsqrtv4sf2;
a6fc00da
BH
2102 break;
2103 default: gcc_unreachable ();
2104 }
2105
2106 if (!target)
2107 target = gen_reg_rtx (GET_MODE (op0));
2108
2109 emit_insn (gen (target, op0));
2110
2111 return target;
2112}
2113
9d63f43b
TC
2114/* Expand a FCMLA lane expression EXP with code FCODE and
2115 result going to TARGET if that is convenient. */
2116
2117rtx
2118aarch64_expand_fcmla_builtin (tree exp, rtx target, int fcode)
2119{
2120 int bcode = fcode - AARCH64_SIMD_FCMLA_LANEQ_BUILTIN_BASE - 1;
2121 aarch64_fcmla_laneq_builtin_datum* d
2122 = &aarch64_fcmla_lane_builtin_data[bcode];
2123 machine_mode quadmode = GET_MODE_2XWIDER_MODE (d->mode).require ();
2124 rtx op0 = force_reg (d->mode, expand_normal (CALL_EXPR_ARG (exp, 0)));
2125 rtx op1 = force_reg (d->mode, expand_normal (CALL_EXPR_ARG (exp, 1)));
2126 rtx op2 = force_reg (quadmode, expand_normal (CALL_EXPR_ARG (exp, 2)));
2127 tree tmp = CALL_EXPR_ARG (exp, 3);
2128 rtx lane_idx = expand_expr (tmp, NULL_RTX, VOIDmode, EXPAND_INITIALIZER);
2129
2130 /* Validate that the lane index is a constant. */
2131 if (!CONST_INT_P (lane_idx))
2132 {
62e43587
MS
2133 error_at (EXPR_LOCATION (exp),
2134 "argument %d must be a constant immediate", 4);
9d63f43b
TC
2135 return const0_rtx;
2136 }
2137
2138 /* Validate that the index is within the expected range. */
2139 int nunits = GET_MODE_NUNITS (quadmode).to_constant ();
2140 aarch64_simd_lane_bounds (lane_idx, 0, nunits / 2, exp);
2141
9d63f43b
TC
2142 /* Generate the correct register and mode. */
2143 int lane = INTVAL (lane_idx);
2144
2145 if (lane < nunits / 4)
33b5a38c
TC
2146 op2 = simplify_gen_subreg (d->mode, op2, quadmode,
2147 subreg_lowpart_offset (d->mode, quadmode));
9d63f43b
TC
2148 else
2149 {
2150 /* Select the upper 64 bits, either a V2SF or V4HF, this however
2151 is quite messy, as the operation required even though simple
2152 doesn't have a simple RTL pattern, and seems it's quite hard to
2153 define using a single RTL pattern. The target generic version
2154 gen_highpart_mode generates code that isn't optimal. */
2155 rtx temp1 = gen_reg_rtx (d->mode);
2156 rtx temp2 = gen_reg_rtx (DImode);
33b5a38c
TC
2157 temp1 = simplify_gen_subreg (d->mode, op2, quadmode,
2158 subreg_lowpart_offset (d->mode, quadmode));
9d63f43b 2159 temp1 = simplify_gen_subreg (V2DImode, temp1, d->mode, 0);
33b5a38c
TC
2160 if (BYTES_BIG_ENDIAN)
2161 emit_insn (gen_aarch64_get_lanev2di (temp2, temp1, const0_rtx));
2162 else
2163 emit_insn (gen_aarch64_get_lanev2di (temp2, temp1, const1_rtx));
9d63f43b
TC
2164 op2 = simplify_gen_subreg (d->mode, temp2, GET_MODE (temp2), 0);
2165
2166 /* And recalculate the index. */
2167 lane -= nunits / 4;
2168 }
2169
33b5a38c
TC
2170 /* Keep to GCC-vector-extension lane indices in the RTL, only nunits / 4
2171 (max nunits in range check) are valid. Which means only 0-1, so we
2172 only need to know the order in a V2mode. */
2173 lane_idx = aarch64_endian_lane_rtx (V2DImode, lane);
2174
fa59c8dc
AC
2175 if (!target
2176 || !REG_P (target)
2177 || GET_MODE (target) != d->mode)
9d63f43b 2178 target = gen_reg_rtx (d->mode);
9d63f43b
TC
2179
2180 rtx pat = NULL_RTX;
2181
2182 if (d->lane)
33b5a38c 2183 pat = GEN_FCN (d->icode) (target, op0, op1, op2, lane_idx);
9d63f43b
TC
2184 else
2185 pat = GEN_FCN (d->icode) (target, op0, op1, op2);
2186
2187 if (!pat)
2188 return NULL_RTX;
2189
2190 emit_insn (pat);
2191 return target;
2192}
2193
89626179
SD
2194/* Function to expand an expression EXP which calls one of the Transactional
2195 Memory Extension (TME) builtins FCODE with the result going to TARGET. */
2196static rtx
2197aarch64_expand_builtin_tme (int fcode, tree exp, rtx target)
2198{
2199 switch (fcode)
2200 {
2201 case AARCH64_TME_BUILTIN_TSTART:
2202 target = gen_reg_rtx (DImode);
2203 emit_insn (GEN_FCN (CODE_FOR_tstart) (target));
2204 break;
2205
2206 case AARCH64_TME_BUILTIN_TTEST:
2207 target = gen_reg_rtx (DImode);
2208 emit_insn (GEN_FCN (CODE_FOR_ttest) (target));
2209 break;
2210
2211 case AARCH64_TME_BUILTIN_TCOMMIT:
2212 emit_insn (GEN_FCN (CODE_FOR_tcommit) ());
2213 break;
2214
2215 case AARCH64_TME_BUILTIN_TCANCEL:
2216 {
2217 tree arg0 = CALL_EXPR_ARG (exp, 0);
2218 rtx op0 = expand_normal (arg0);
2219 if (CONST_INT_P (op0) && UINTVAL (op0) <= 65536)
2220 emit_insn (GEN_FCN (CODE_FOR_tcancel) (op0));
2221 else
2222 {
62e43587
MS
2223 error_at (EXPR_LOCATION (exp),
2224 "argument must be a 16-bit constant immediate");
89626179
SD
2225 return const0_rtx;
2226 }
2227 }
2228 break;
2229
2230 default :
2231 gcc_unreachable ();
2232 }
2233 return target;
2234}
2235
fdcddba8
PW
2236/* Function to expand an expression EXP which calls one of the Load/Store
2237 64 Byte extension (LS64) builtins FCODE with the result going to TARGET. */
2238static rtx
2239aarch64_expand_builtin_ls64 (int fcode, tree exp, rtx target)
2240{
2241 expand_operand ops[3];
2242
2243 switch (fcode)
2244 {
2245 case AARCH64_LS64_BUILTIN_LD64B:
2246 {
2247 rtx op0 = expand_normal (CALL_EXPR_ARG (exp, 0));
2248 create_output_operand (&ops[0], target, V8DImode);
2249 create_input_operand (&ops[1], op0, DImode);
2250 expand_insn (CODE_FOR_ld64b, 2, ops);
2251 return ops[0].value;
2252 }
2253 case AARCH64_LS64_BUILTIN_ST64B:
2254 {
2255 rtx op0 = expand_normal (CALL_EXPR_ARG (exp, 0));
2256 rtx op1 = expand_normal (CALL_EXPR_ARG (exp, 1));
2257 create_output_operand (&ops[0], op0, DImode);
2258 create_input_operand (&ops[1], op1, V8DImode);
2259 expand_insn (CODE_FOR_st64b, 2, ops);
2260 return const0_rtx;
2261 }
2262 case AARCH64_LS64_BUILTIN_ST64BV:
2263 {
2264 rtx op0 = expand_normal (CALL_EXPR_ARG (exp, 0));
2265 rtx op1 = expand_normal (CALL_EXPR_ARG (exp, 1));
2266 create_output_operand (&ops[0], target, DImode);
2267 create_input_operand (&ops[1], op0, DImode);
2268 create_input_operand (&ops[2], op1, V8DImode);
2269 expand_insn (CODE_FOR_st64bv, 3, ops);
2270 return ops[0].value;
2271 }
2272 case AARCH64_LS64_BUILTIN_ST64BV0:
2273 {
2274 rtx op0 = expand_normal (CALL_EXPR_ARG (exp, 0));
2275 rtx op1 = expand_normal (CALL_EXPR_ARG (exp, 1));
2276 create_output_operand (&ops[0], target, DImode);
2277 create_input_operand (&ops[1], op0, DImode);
2278 create_input_operand (&ops[2], op1, V8DImode);
2279 expand_insn (CODE_FOR_st64bv0, 3, ops);
2280 return ops[0].value;
2281 }
2282 }
2283
2284 gcc_unreachable ();
2285}
2286
c5dc215d
KT
2287/* Expand a random number builtin EXP with code FCODE, putting the result
2288 int TARGET. If IGNORE is true the return value is ignored. */
2289
2290rtx
2291aarch64_expand_rng_builtin (tree exp, rtx target, int fcode, int ignore)
2292{
2293 rtx pat;
2294 enum insn_code icode;
2295 if (fcode == AARCH64_BUILTIN_RNG_RNDR)
2296 icode = CODE_FOR_aarch64_rndr;
2297 else if (fcode == AARCH64_BUILTIN_RNG_RNDRRS)
2298 icode = CODE_FOR_aarch64_rndrrs;
2299 else
2300 gcc_unreachable ();
2301
2302 rtx rand = gen_reg_rtx (DImode);
2303 pat = GEN_FCN (icode) (rand);
2304 if (!pat)
2305 return NULL_RTX;
2306
2307 tree arg0 = CALL_EXPR_ARG (exp, 0);
2308 rtx res_addr = expand_normal (arg0);
2309 res_addr = convert_memory_address (Pmode, res_addr);
2310 rtx res_mem = gen_rtx_MEM (DImode, res_addr);
2311 emit_insn (pat);
2312 emit_move_insn (res_mem, rand);
2313 /* If the status result is unused don't generate the CSET code. */
2314 if (ignore)
2315 return target;
2316
2317 rtx cc_reg = gen_rtx_REG (CC_Zmode, CC_REGNUM);
f7581eb3 2318 rtx cmp_rtx = gen_rtx_fmt_ee (EQ, SImode, cc_reg, const0_rtx);
c5dc215d
KT
2319 emit_insn (gen_aarch64_cstoresi (target, cmp_rtx, cc_reg));
2320 return target;
2321}
2322
ef01e6bb
DZ
2323/* Expand an expression EXP that calls a MEMTAG built-in FCODE
2324 with result going to TARGET. */
2325static rtx
2326aarch64_expand_builtin_memtag (int fcode, tree exp, rtx target)
2327{
2328 if (TARGET_ILP32)
2329 {
2330 error ("Memory Tagging Extension does not support %<-mabi=ilp32%>");
2331 return const0_rtx;
2332 }
2333
2334 rtx pat = NULL;
2335 enum insn_code icode = aarch64_memtag_builtin_data[fcode -
2336 AARCH64_MEMTAG_BUILTIN_START - 1].icode;
2337
2338 rtx op0 = expand_normal (CALL_EXPR_ARG (exp, 0));
2339 machine_mode mode0 = GET_MODE (op0);
2340 op0 = force_reg (mode0 == VOIDmode ? DImode : mode0, op0);
2341 op0 = convert_to_mode (DImode, op0, true);
2342
2343 switch (fcode)
2344 {
2345 case AARCH64_MEMTAG_BUILTIN_IRG:
2346 case AARCH64_MEMTAG_BUILTIN_GMI:
2347 case AARCH64_MEMTAG_BUILTIN_SUBP:
2348 case AARCH64_MEMTAG_BUILTIN_INC_TAG:
2349 {
2350 if (! target
2351 || GET_MODE (target) != DImode
2352 || ! (*insn_data[icode].operand[0].predicate) (target, DImode))
2353 target = gen_reg_rtx (DImode);
2354
2355 if (fcode == AARCH64_MEMTAG_BUILTIN_INC_TAG)
2356 {
2357 rtx op1 = expand_normal (CALL_EXPR_ARG (exp, 1));
2358
2359 if ((*insn_data[icode].operand[3].predicate) (op1, QImode))
2360 {
2361 pat = GEN_FCN (icode) (target, op0, const0_rtx, op1);
2362 break;
2363 }
62e43587
MS
2364 error_at (EXPR_LOCATION (exp),
2365 "argument %d must be a constant immediate "
2366 "in range [0,15]", 2);
ef01e6bb
DZ
2367 return const0_rtx;
2368 }
2369 else
2370 {
2371 rtx op1 = expand_normal (CALL_EXPR_ARG (exp, 1));
2372 machine_mode mode1 = GET_MODE (op1);
2373 op1 = force_reg (mode1 == VOIDmode ? DImode : mode1, op1);
2374 op1 = convert_to_mode (DImode, op1, true);
2375 pat = GEN_FCN (icode) (target, op0, op1);
2376 }
2377 break;
2378 }
2379 case AARCH64_MEMTAG_BUILTIN_GET_TAG:
2380 target = op0;
2381 pat = GEN_FCN (icode) (target, op0, const0_rtx);
2382 break;
2383 case AARCH64_MEMTAG_BUILTIN_SET_TAG:
2384 pat = GEN_FCN (icode) (op0, op0, const0_rtx);
2385 break;
2386 default:
2387 gcc_unreachable();
2388 }
2389
2390 if (!pat)
2391 return NULL_RTX;
2392
2393 emit_insn (pat);
2394 return target;
2395}
2396
f5e73de0 2397/* Expand an expression EXP as fpsr or fpcr setter (depending on
0d7e5fa6
AC
2398 UNSPEC) using MODE. */
2399static void
2400aarch64_expand_fpsr_fpcr_setter (int unspec, machine_mode mode, tree exp)
2401{
2402 tree arg = CALL_EXPR_ARG (exp, 0);
2403 rtx op = force_reg (mode, expand_normal (arg));
2404 emit_insn (gen_aarch64_set (unspec, mode, op));
2405}
2406
f5e73de0
AC
2407/* Expand a fpsr or fpcr getter (depending on UNSPEC) using MODE.
2408 Return the target. */
2409static rtx
2410aarch64_expand_fpsr_fpcr_getter (enum insn_code icode, machine_mode mode,
2411 rtx target)
2412{
2413 expand_operand op;
2414 create_output_operand (&op, target, mode);
2415 expand_insn (icode, 1, &op);
2416 return op.value;
2417}
2418
6d4d616a 2419/* Expand an expression EXP that calls built-in function FCODE,
c5dc215d
KT
2420 with result going to TARGET if that's convenient. IGNORE is true
2421 if the result of the builtin is ignored. */
342be7f7 2422rtx
c5dc215d
KT
2423aarch64_general_expand_builtin (unsigned int fcode, tree exp, rtx target,
2424 int ignore)
342be7f7 2425{
aa87aced 2426 int icode;
0d7e5fa6 2427 rtx op0;
aa87aced
KV
2428 tree arg0;
2429
2430 switch (fcode)
2431 {
2432 case AARCH64_BUILTIN_GET_FPCR:
f5e73de0
AC
2433 return aarch64_expand_fpsr_fpcr_getter (CODE_FOR_aarch64_get_fpcrsi,
2434 SImode, target);
aa87aced 2435 case AARCH64_BUILTIN_SET_FPCR:
0d7e5fa6
AC
2436 aarch64_expand_fpsr_fpcr_setter (UNSPECV_SET_FPCR, SImode, exp);
2437 return target;
aa87aced 2438 case AARCH64_BUILTIN_GET_FPSR:
f5e73de0
AC
2439 return aarch64_expand_fpsr_fpcr_getter (CODE_FOR_aarch64_get_fpsrsi,
2440 SImode, target);
aa87aced 2441 case AARCH64_BUILTIN_SET_FPSR:
0d7e5fa6
AC
2442 aarch64_expand_fpsr_fpcr_setter (UNSPECV_SET_FPSR, SImode, exp);
2443 return target;
2444 case AARCH64_BUILTIN_GET_FPCR64:
f5e73de0
AC
2445 return aarch64_expand_fpsr_fpcr_getter (CODE_FOR_aarch64_get_fpcrdi,
2446 DImode, target);
0d7e5fa6
AC
2447 case AARCH64_BUILTIN_SET_FPCR64:
2448 aarch64_expand_fpsr_fpcr_setter (UNSPECV_SET_FPCR, DImode, exp);
2449 return target;
2450 case AARCH64_BUILTIN_GET_FPSR64:
f5e73de0
AC
2451 return aarch64_expand_fpsr_fpcr_getter (CODE_FOR_aarch64_get_fpsrdi,
2452 DImode, target);
0d7e5fa6
AC
2453 case AARCH64_BUILTIN_SET_FPSR64:
2454 aarch64_expand_fpsr_fpcr_setter (UNSPECV_SET_FPSR, DImode, exp);
aa87aced 2455 return target;
312492bd
JW
2456 case AARCH64_PAUTH_BUILTIN_AUTIA1716:
2457 case AARCH64_PAUTH_BUILTIN_PACIA1716:
8fc16d72
ST
2458 case AARCH64_PAUTH_BUILTIN_AUTIB1716:
2459 case AARCH64_PAUTH_BUILTIN_PACIB1716:
312492bd
JW
2460 case AARCH64_PAUTH_BUILTIN_XPACLRI:
2461 arg0 = CALL_EXPR_ARG (exp, 0);
2462 op0 = force_reg (Pmode, expand_normal (arg0));
2463
312492bd
JW
2464 if (fcode == AARCH64_PAUTH_BUILTIN_XPACLRI)
2465 {
2466 rtx lr = gen_rtx_REG (Pmode, R30_REGNUM);
2467 icode = CODE_FOR_xpaclri;
2468 emit_move_insn (lr, op0);
2469 emit_insn (GEN_FCN (icode) ());
92f0d3d0 2470 return lr;
312492bd
JW
2471 }
2472 else
2473 {
2474 tree arg1 = CALL_EXPR_ARG (exp, 1);
2475 rtx op1 = force_reg (Pmode, expand_normal (arg1));
8fc16d72
ST
2476 switch (fcode)
2477 {
2478 case AARCH64_PAUTH_BUILTIN_AUTIA1716:
2479 icode = CODE_FOR_autia1716;
2480 break;
2481 case AARCH64_PAUTH_BUILTIN_AUTIB1716:
2482 icode = CODE_FOR_autib1716;
2483 break;
2484 case AARCH64_PAUTH_BUILTIN_PACIA1716:
2485 icode = CODE_FOR_pacia1716;
2486 break;
2487 case AARCH64_PAUTH_BUILTIN_PACIB1716:
2488 icode = CODE_FOR_pacib1716;
2489 break;
2490 default:
2491 icode = 0;
2492 gcc_unreachable ();
2493 }
312492bd
JW
2494
2495 rtx x16_reg = gen_rtx_REG (Pmode, R16_REGNUM);
2496 rtx x17_reg = gen_rtx_REG (Pmode, R17_REGNUM);
2497 emit_move_insn (x17_reg, op0);
2498 emit_move_insn (x16_reg, op1);
2499 emit_insn (GEN_FCN (icode) ());
92f0d3d0 2500 return x17_reg;
312492bd
JW
2501 }
2502
e1d5d19e 2503 case AARCH64_JSCVT:
2c62952f
AC
2504 {
2505 expand_operand ops[2];
2506 create_output_operand (&ops[0], target, SImode);
2507 op0 = expand_normal (CALL_EXPR_ARG (exp, 0));
2508 create_input_operand (&ops[1], op0, DFmode);
2509 expand_insn (CODE_FOR_aarch64_fjcvtzs, 2, ops);
2510 return ops[0].value;
2511 }
e1d5d19e 2512
9d63f43b
TC
2513 case AARCH64_SIMD_BUILTIN_FCMLA_LANEQ0_V2SF:
2514 case AARCH64_SIMD_BUILTIN_FCMLA_LANEQ90_V2SF:
2515 case AARCH64_SIMD_BUILTIN_FCMLA_LANEQ180_V2SF:
2516 case AARCH64_SIMD_BUILTIN_FCMLA_LANEQ270_V2SF:
2517 case AARCH64_SIMD_BUILTIN_FCMLA_LANEQ0_V4HF:
2518 case AARCH64_SIMD_BUILTIN_FCMLA_LANEQ90_V4HF:
2519 case AARCH64_SIMD_BUILTIN_FCMLA_LANEQ180_V4HF:
2520 case AARCH64_SIMD_BUILTIN_FCMLA_LANEQ270_V4HF:
2521 return aarch64_expand_fcmla_builtin (exp, target, fcode);
c5dc215d
KT
2522 case AARCH64_BUILTIN_RNG_RNDR:
2523 case AARCH64_BUILTIN_RNG_RNDRRS:
2524 return aarch64_expand_rng_builtin (exp, target, fcode, ignore);
aa87aced 2525 }
342be7f7 2526
5d357f26 2527 if (fcode >= AARCH64_SIMD_BUILTIN_BASE && fcode <= AARCH64_SIMD_BUILTIN_MAX)
342be7f7 2528 return aarch64_simd_expand_builtin (fcode, exp, target);
5d357f26
KT
2529 else if (fcode >= AARCH64_CRC32_BUILTIN_BASE && fcode <= AARCH64_CRC32_BUILTIN_MAX)
2530 return aarch64_crc32_expand_builtin (fcode, exp, target);
342be7f7 2531
a6fc00da
BH
2532 if (fcode == AARCH64_BUILTIN_RSQRT_DF
2533 || fcode == AARCH64_BUILTIN_RSQRT_SF
2534 || fcode == AARCH64_BUILTIN_RSQRT_V2DF
2535 || fcode == AARCH64_BUILTIN_RSQRT_V2SF
2536 || fcode == AARCH64_BUILTIN_RSQRT_V4SF)
2537 return aarch64_expand_builtin_rsqrt (fcode, exp, target);
2538
89626179
SD
2539 if (fcode == AARCH64_TME_BUILTIN_TSTART
2540 || fcode == AARCH64_TME_BUILTIN_TCOMMIT
2541 || fcode == AARCH64_TME_BUILTIN_TTEST
2542 || fcode == AARCH64_TME_BUILTIN_TCANCEL)
2543 return aarch64_expand_builtin_tme (fcode, exp, target);
2544
fdcddba8
PW
2545 if (fcode == AARCH64_LS64_BUILTIN_LD64B
2546 || fcode == AARCH64_LS64_BUILTIN_ST64B
2547 || fcode == AARCH64_LS64_BUILTIN_ST64BV
2548 || fcode == AARCH64_LS64_BUILTIN_ST64BV0)
2549 return aarch64_expand_builtin_ls64 (fcode, exp, target);
2550
ef01e6bb
DZ
2551 if (fcode >= AARCH64_MEMTAG_BUILTIN_START
2552 && fcode <= AARCH64_MEMTAG_BUILTIN_END)
2553 return aarch64_expand_builtin_memtag (fcode, exp, target);
2554
d5a29419 2555 gcc_unreachable ();
342be7f7 2556}
42fc9a7f
JG
2557
2558tree
10766209
RS
2559aarch64_builtin_vectorized_function (unsigned int fn, tree type_out,
2560 tree type_in)
42fc9a7f 2561{
ef4bddc2 2562 machine_mode in_mode, out_mode;
42fc9a7f
JG
2563
2564 if (TREE_CODE (type_out) != VECTOR_TYPE
2565 || TREE_CODE (type_in) != VECTOR_TYPE)
2566 return NULL_TREE;
2567
7cee9637
RS
2568 out_mode = TYPE_MODE (type_out);
2569 in_mode = TYPE_MODE (type_in);
42fc9a7f
JG
2570
2571#undef AARCH64_CHECK_BUILTIN_MODE
2572#define AARCH64_CHECK_BUILTIN_MODE(C, N) 1
2573#define AARCH64_FIND_FRINT_VARIANT(N) \
2574 (AARCH64_CHECK_BUILTIN_MODE (2, D) \
e993fea1 2575 ? aarch64_builtin_decls[AARCH64_SIMD_BUILTIN_UNOP_##N##v2df] \
42fc9a7f 2576 : (AARCH64_CHECK_BUILTIN_MODE (4, S) \
e993fea1 2577 ? aarch64_builtin_decls[AARCH64_SIMD_BUILTIN_UNOP_##N##v4sf] \
42fc9a7f 2578 : (AARCH64_CHECK_BUILTIN_MODE (2, S) \
e993fea1 2579 ? aarch64_builtin_decls[AARCH64_SIMD_BUILTIN_UNOP_##N##v2sf] \
42fc9a7f 2580 : NULL_TREE)))
10766209 2581 switch (fn)
42fc9a7f 2582 {
42fc9a7f
JG
2583#undef AARCH64_CHECK_BUILTIN_MODE
2584#define AARCH64_CHECK_BUILTIN_MODE(C, N) \
7cee9637 2585 (out_mode == V##C##N##Fmode && in_mode == V##C##N##Fmode)
10766209
RS
2586 CASE_CFN_FLOOR:
2587 return AARCH64_FIND_FRINT_VARIANT (floor);
2588 CASE_CFN_CEIL:
2589 return AARCH64_FIND_FRINT_VARIANT (ceil);
2590 CASE_CFN_TRUNC:
2591 return AARCH64_FIND_FRINT_VARIANT (btrunc);
2592 CASE_CFN_ROUND:
2593 return AARCH64_FIND_FRINT_VARIANT (round);
2594 CASE_CFN_NEARBYINT:
2595 return AARCH64_FIND_FRINT_VARIANT (nearbyint);
2596 CASE_CFN_SQRT:
2597 return AARCH64_FIND_FRINT_VARIANT (sqrt);
42fc9a7f 2598#undef AARCH64_CHECK_BUILTIN_MODE
b5574232 2599#define AARCH64_CHECK_BUILTIN_MODE(C, N) \
7cee9637 2600 (out_mode == V##C##SImode && in_mode == V##C##N##Imode)
10766209
RS
2601 CASE_CFN_CLZ:
2602 {
2603 if (AARCH64_CHECK_BUILTIN_MODE (4, S))
2604 return aarch64_builtin_decls[AARCH64_SIMD_BUILTIN_UNOP_clzv4si];
2605 return NULL_TREE;
2606 }
2607 CASE_CFN_CTZ:
2608 {
2609 if (AARCH64_CHECK_BUILTIN_MODE (2, S))
2610 return aarch64_builtin_decls[AARCH64_SIMD_BUILTIN_UNOP_ctzv2si];
2611 else if (AARCH64_CHECK_BUILTIN_MODE (4, S))
2612 return aarch64_builtin_decls[AARCH64_SIMD_BUILTIN_UNOP_ctzv4si];
2613 return NULL_TREE;
2614 }
b5574232 2615#undef AARCH64_CHECK_BUILTIN_MODE
42fc9a7f 2616#define AARCH64_CHECK_BUILTIN_MODE(C, N) \
7cee9637 2617 (out_mode == V##C##N##Imode && in_mode == V##C##N##Fmode)
10766209
RS
2618 CASE_CFN_IFLOOR:
2619 CASE_CFN_LFLOOR:
2620 CASE_CFN_LLFLOOR:
2621 {
2622 enum aarch64_builtins builtin;
2623 if (AARCH64_CHECK_BUILTIN_MODE (2, D))
2624 builtin = AARCH64_SIMD_BUILTIN_UNOP_lfloorv2dfv2di;
2625 else if (AARCH64_CHECK_BUILTIN_MODE (4, S))
2626 builtin = AARCH64_SIMD_BUILTIN_UNOP_lfloorv4sfv4si;
2627 else if (AARCH64_CHECK_BUILTIN_MODE (2, S))
2628 builtin = AARCH64_SIMD_BUILTIN_UNOP_lfloorv2sfv2si;
2629 else
2630 return NULL_TREE;
2631
2632 return aarch64_builtin_decls[builtin];
2633 }
2634 CASE_CFN_ICEIL:
2635 CASE_CFN_LCEIL:
2636 CASE_CFN_LLCEIL:
2637 {
2638 enum aarch64_builtins builtin;
2639 if (AARCH64_CHECK_BUILTIN_MODE (2, D))
2640 builtin = AARCH64_SIMD_BUILTIN_UNOP_lceilv2dfv2di;
2641 else if (AARCH64_CHECK_BUILTIN_MODE (4, S))
2642 builtin = AARCH64_SIMD_BUILTIN_UNOP_lceilv4sfv4si;
2643 else if (AARCH64_CHECK_BUILTIN_MODE (2, S))
2644 builtin = AARCH64_SIMD_BUILTIN_UNOP_lceilv2sfv2si;
2645 else
2646 return NULL_TREE;
2647
2648 return aarch64_builtin_decls[builtin];
2649 }
2650 CASE_CFN_IROUND:
2651 CASE_CFN_LROUND:
2652 CASE_CFN_LLROUND:
2653 {
2654 enum aarch64_builtins builtin;
2655 if (AARCH64_CHECK_BUILTIN_MODE (2, D))
2656 builtin = AARCH64_SIMD_BUILTIN_UNOP_lroundv2dfv2di;
2657 else if (AARCH64_CHECK_BUILTIN_MODE (4, S))
2658 builtin = AARCH64_SIMD_BUILTIN_UNOP_lroundv4sfv4si;
2659 else if (AARCH64_CHECK_BUILTIN_MODE (2, S))
2660 builtin = AARCH64_SIMD_BUILTIN_UNOP_lroundv2sfv2si;
2661 else
2662 return NULL_TREE;
2663
2664 return aarch64_builtin_decls[builtin];
2665 }
10766209
RS
2666 default:
2667 return NULL_TREE;
42fc9a7f
JG
2668 }
2669
2670 return NULL_TREE;
2671}
0ac198d3 2672
a6fc00da
BH
2673/* Return builtin for reciprocal square root. */
2674
2675tree
6d4d616a 2676aarch64_general_builtin_rsqrt (unsigned int fn)
a6fc00da 2677{
ee62a5a6
RS
2678 if (fn == AARCH64_SIMD_BUILTIN_UNOP_sqrtv2df)
2679 return aarch64_builtin_decls[AARCH64_BUILTIN_RSQRT_V2DF];
2680 if (fn == AARCH64_SIMD_BUILTIN_UNOP_sqrtv2sf)
2681 return aarch64_builtin_decls[AARCH64_BUILTIN_RSQRT_V2SF];
2682 if (fn == AARCH64_SIMD_BUILTIN_UNOP_sqrtv4sf)
2683 return aarch64_builtin_decls[AARCH64_BUILTIN_RSQRT_V4SF];
a6fc00da
BH
2684 return NULL_TREE;
2685}
2686
03312cbd
AP
2687/* Return true if the lane check can be removed as there is no
2688 error going to be emitted. */
2689static bool
2690aarch64_fold_builtin_lane_check (tree arg0, tree arg1, tree arg2)
2691{
2692 if (TREE_CODE (arg0) != INTEGER_CST)
2693 return false;
2694 if (TREE_CODE (arg1) != INTEGER_CST)
2695 return false;
2696 if (TREE_CODE (arg2) != INTEGER_CST)
2697 return false;
2698
2699 auto totalsize = wi::to_widest (arg0);
2700 auto elementsize = wi::to_widest (arg1);
2701 if (totalsize == 0 || elementsize == 0)
2702 return false;
2703 auto lane = wi::to_widest (arg2);
2704 auto high = wi::udiv_trunc (totalsize, elementsize);
2705 return wi::ltu_p (lane, high);
2706}
2707
0ac198d3 2708#undef VAR1
bf592b2f 2709#define VAR1(T, N, MAP, FLAG, A) \
e993fea1 2710 case AARCH64_SIMD_BUILTIN_##T##_##N##A:
0ac198d3 2711
6d4d616a
RS
2712/* Try to fold a call to the built-in function with subcode FCODE. The
2713 function is passed the N_ARGS arguments in ARGS and it returns a value
2714 of type TYPE. Return the new expression on success and NULL_TREE on
2715 failure. */
9697e620 2716tree
6d4d616a
RS
2717aarch64_general_fold_builtin (unsigned int fcode, tree type,
2718 unsigned int n_args ATTRIBUTE_UNUSED, tree *args)
9697e620 2719{
9697e620
JG
2720 switch (fcode)
2721 {
bf592b2f 2722 BUILTIN_VDQF (UNOP, abs, 2, ALL)
9697e620 2723 return fold_build1 (ABS_EXPR, type, args[0]);
bf592b2f 2724 VAR1 (UNOP, floatv2si, 2, ALL, v2sf)
2725 VAR1 (UNOP, floatv4si, 2, ALL, v4sf)
2726 VAR1 (UNOP, floatv2di, 2, ALL, v2df)
1709ff9b 2727 return fold_build1 (FLOAT_EXPR, type, args[0]);
03312cbd
AP
2728 case AARCH64_SIMD_BUILTIN_LANE_CHECK:
2729 gcc_assert (n_args == 3);
2730 if (aarch64_fold_builtin_lane_check (args[0], args[1], args[2]))
2731 return void_node;
2732 break;
9697e620
JG
2733 default:
2734 break;
2735 }
2736
2737 return NULL_TREE;
2738}
2739
ad44c6a5
ASDV
2740enum aarch64_simd_type
2741get_mem_type_for_load_store (unsigned int fcode)
2742{
2743 switch (fcode)
2744 {
1716ddd1
JW
2745 VAR1 (LOAD1, ld1, 0, LOAD, v8qi)
2746 VAR1 (STORE1, st1, 0, STORE, v8qi)
ad44c6a5 2747 return Int8x8_t;
1716ddd1
JW
2748 VAR1 (LOAD1, ld1, 0, LOAD, v16qi)
2749 VAR1 (STORE1, st1, 0, STORE, v16qi)
ad44c6a5 2750 return Int8x16_t;
1716ddd1
JW
2751 VAR1 (LOAD1, ld1, 0, LOAD, v4hi)
2752 VAR1 (STORE1, st1, 0, STORE, v4hi)
ad44c6a5 2753 return Int16x4_t;
1716ddd1
JW
2754 VAR1 (LOAD1, ld1, 0, LOAD, v8hi)
2755 VAR1 (STORE1, st1, 0, STORE, v8hi)
ad44c6a5 2756 return Int16x8_t;
1716ddd1
JW
2757 VAR1 (LOAD1, ld1, 0, LOAD, v2si)
2758 VAR1 (STORE1, st1, 0, STORE, v2si)
ad44c6a5 2759 return Int32x2_t;
1716ddd1
JW
2760 VAR1 (LOAD1, ld1, 0, LOAD, v4si)
2761 VAR1 (STORE1, st1, 0, STORE, v4si)
ad44c6a5 2762 return Int32x4_t;
1716ddd1
JW
2763 VAR1 (LOAD1, ld1, 0, LOAD, v2di)
2764 VAR1 (STORE1, st1, 0, STORE, v2di)
ad44c6a5 2765 return Int64x2_t;
1716ddd1
JW
2766 VAR1 (LOAD1_U, ld1, 0, LOAD, v8qi)
2767 VAR1 (STORE1_U, st1, 0, STORE, v8qi)
2768 return Uint8x8_t;
2769 VAR1 (LOAD1_U, ld1, 0, LOAD, v16qi)
2770 VAR1 (STORE1_U, st1, 0, STORE, v16qi)
2771 return Uint8x16_t;
2772 VAR1 (LOAD1_U, ld1, 0, LOAD, v4hi)
2773 VAR1 (STORE1_U, st1, 0, STORE, v4hi)
2774 return Uint16x4_t;
2775 VAR1 (LOAD1_U, ld1, 0, LOAD, v8hi)
2776 VAR1 (STORE1_U, st1, 0, STORE, v8hi)
2777 return Uint16x8_t;
2778 VAR1 (LOAD1_U, ld1, 0, LOAD, v2si)
2779 VAR1 (STORE1_U, st1, 0, STORE, v2si)
2780 return Uint32x2_t;
2781 VAR1 (LOAD1_U, ld1, 0, LOAD, v4si)
2782 VAR1 (STORE1_U, st1, 0, STORE, v4si)
2783 return Uint32x4_t;
2784 VAR1 (LOAD1_U, ld1, 0, LOAD, v2di)
2785 VAR1 (STORE1_U, st1, 0, STORE, v2di)
2786 return Uint64x2_t;
2787 VAR1 (LOAD1_P, ld1, 0, LOAD, v8qi)
2788 VAR1 (STORE1_P, st1, 0, STORE, v8qi)
2789 return Poly8x8_t;
2790 VAR1 (LOAD1_P, ld1, 0, LOAD, v16qi)
2791 VAR1 (STORE1_P, st1, 0, STORE, v16qi)
2792 return Poly8x16_t;
2793 VAR1 (LOAD1_P, ld1, 0, LOAD, v4hi)
2794 VAR1 (STORE1_P, st1, 0, STORE, v4hi)
2795 return Poly16x4_t;
2796 VAR1 (LOAD1_P, ld1, 0, LOAD, v8hi)
2797 VAR1 (STORE1_P, st1, 0, STORE, v8hi)
2798 return Poly16x8_t;
2799 VAR1 (LOAD1_P, ld1, 0, LOAD, v2di)
2800 VAR1 (STORE1_P, st1, 0, STORE, v2di)
2801 return Poly64x2_t;
2802 VAR1 (LOAD1, ld1, 0, LOAD, v4hf)
2803 VAR1 (STORE1, st1, 0, STORE, v4hf)
ad44c6a5 2804 return Float16x4_t;
1716ddd1
JW
2805 VAR1 (LOAD1, ld1, 0, LOAD, v8hf)
2806 VAR1 (STORE1, st1, 0, STORE, v8hf)
ad44c6a5 2807 return Float16x8_t;
1716ddd1
JW
2808 VAR1 (LOAD1, ld1, 0, LOAD, v4bf)
2809 VAR1 (STORE1, st1, 0, STORE, v4bf)
ad44c6a5 2810 return Bfloat16x4_t;
1716ddd1
JW
2811 VAR1 (LOAD1, ld1, 0, LOAD, v8bf)
2812 VAR1 (STORE1, st1, 0, STORE, v8bf)
ad44c6a5 2813 return Bfloat16x8_t;
1716ddd1
JW
2814 VAR1 (LOAD1, ld1, 0, LOAD, v2sf)
2815 VAR1 (STORE1, st1, 0, STORE, v2sf)
ad44c6a5 2816 return Float32x2_t;
1716ddd1
JW
2817 VAR1 (LOAD1, ld1, 0, LOAD, v4sf)
2818 VAR1 (STORE1, st1, 0, STORE, v4sf)
ad44c6a5 2819 return Float32x4_t;
1716ddd1
JW
2820 VAR1 (LOAD1, ld1, 0, LOAD, v2df)
2821 VAR1 (STORE1, st1, 0, STORE, v2df)
ad44c6a5
ASDV
2822 return Float64x2_t;
2823 default:
2824 gcc_unreachable ();
2825 break;
2826 }
2827}
2828
6d4d616a
RS
2829/* Try to fold STMT, given that it's a call to the built-in function with
2830 subcode FCODE. Return the new statement on success and null on
2831 failure. */
2832gimple *
ad44c6a5 2833aarch64_general_gimple_fold_builtin (unsigned int fcode, gcall *stmt,
03f7843c 2834 gimple_stmt_iterator *gsi ATTRIBUTE_UNUSED)
0ac198d3 2835{
355fe088 2836 gimple *new_stmt = NULL;
6d4d616a
RS
2837 unsigned nargs = gimple_call_num_args (stmt);
2838 tree *args = (nargs > 0
2839 ? gimple_call_arg_ptr (stmt, 0)
2840 : &error_mark_node);
2841
2842 /* We use gimple's IFN_REDUC_(PLUS|MIN|MAX)s for float, signed int
2843 and unsigned int; it will distinguish according to the types of
2844 the arguments to the __builtin. */
2845 switch (fcode)
0ac198d3 2846 {
bf592b2f 2847 BUILTIN_VALL (UNOP, reduc_plus_scal_, 10, ALL)
6d4d616a
RS
2848 new_stmt = gimple_build_call_internal (IFN_REDUC_PLUS,
2849 1, args[0]);
2850 gimple_call_set_lhs (new_stmt, gimple_call_lhs (stmt));
2851 break;
ad44c6a5 2852
cbcf4a50
AP
2853 /* Lower sqrt builtins to gimple/internal function sqrt. */
2854 BUILTIN_VHSDF_DF (UNOP, sqrt, 2, FP)
2855 new_stmt = gimple_build_call_internal (IFN_SQRT,
2856 1, args[0]);
2857 gimple_call_set_lhs (new_stmt, gimple_call_lhs (stmt));
2858 break;
2859
ad44c6a5
ASDV
2860 /*lower store and load neon builtins to gimple. */
2861 BUILTIN_VALL_F16 (LOAD1, ld1, 0, LOAD)
1716ddd1
JW
2862 BUILTIN_VDQ_I (LOAD1_U, ld1, 0, LOAD)
2863 BUILTIN_VALLP_NO_DI (LOAD1_P, ld1, 0, LOAD)
ad44c6a5
ASDV
2864 if (!BYTES_BIG_ENDIAN)
2865 {
2866 enum aarch64_simd_type mem_type
2867 = get_mem_type_for_load_store(fcode);
2868 aarch64_simd_type_info simd_type
2869 = aarch64_simd_types[mem_type];
0f685601
AV
2870 tree elt_ptr_type = build_pointer_type_for_mode (simd_type.eltype,
2871 VOIDmode, true);
ad44c6a5 2872 tree zero = build_zero_cst (elt_ptr_type);
0f685601
AV
2873 /* Use element type alignment. */
2874 tree access_type
2875 = build_aligned_type (simd_type.itype,
2876 TYPE_ALIGN (simd_type.eltype));
ad44c6a5
ASDV
2877 new_stmt
2878 = gimple_build_assign (gimple_get_lhs (stmt),
2879 fold_build2 (MEM_REF,
0f685601
AV
2880 access_type,
2881 args[0], zero));
ad44c6a5
ASDV
2882 }
2883 break;
2884
2885 BUILTIN_VALL_F16 (STORE1, st1, 0, STORE)
1716ddd1
JW
2886 BUILTIN_VDQ_I (STORE1_U, st1, 0, STORE)
2887 BUILTIN_VALLP_NO_DI (STORE1_P, st1, 0, STORE)
ad44c6a5
ASDV
2888 if (!BYTES_BIG_ENDIAN)
2889 {
2890 enum aarch64_simd_type mem_type
2891 = get_mem_type_for_load_store(fcode);
2892 aarch64_simd_type_info simd_type
2893 = aarch64_simd_types[mem_type];
0f685601
AV
2894 tree elt_ptr_type = build_pointer_type_for_mode (simd_type.eltype,
2895 VOIDmode, true);
ad44c6a5 2896 tree zero = build_zero_cst (elt_ptr_type);
0f685601
AV
2897 /* Use element type alignment. */
2898 tree access_type
2899 = build_aligned_type (simd_type.itype,
2900 TYPE_ALIGN (simd_type.eltype));
ad44c6a5 2901 new_stmt
0f685601
AV
2902 = gimple_build_assign (fold_build2 (MEM_REF, access_type,
2903 args[0], zero),
2904 args[1]);
ad44c6a5
ASDV
2905 }
2906 break;
2907
bf592b2f 2908 BUILTIN_VDQIF (UNOP, reduc_smax_scal_, 10, ALL)
2909 BUILTIN_VDQ_BHSI (UNOPU, reduc_umax_scal_, 10, ALL)
6d4d616a
RS
2910 new_stmt = gimple_build_call_internal (IFN_REDUC_MAX,
2911 1, args[0]);
2912 gimple_call_set_lhs (new_stmt, gimple_call_lhs (stmt));
2913 break;
bf592b2f 2914 BUILTIN_VDQIF (UNOP, reduc_smin_scal_, 10, ALL)
2915 BUILTIN_VDQ_BHSI (UNOPU, reduc_umin_scal_, 10, ALL)
6d4d616a
RS
2916 new_stmt = gimple_build_call_internal (IFN_REDUC_MIN,
2917 1, args[0]);
2918 gimple_call_set_lhs (new_stmt, gimple_call_lhs (stmt));
2919 break;
1b4a6359
TC
2920 BUILTIN_VSDQ_I_DI (BINOP, ashl, 3, NONE)
2921 if (TREE_CODE (args[1]) == INTEGER_CST
2922 && wi::ltu_p (wi::to_wide (args[1]), element_precision (args[0])))
2923 new_stmt = gimple_build_assign (gimple_call_lhs (stmt),
2924 LSHIFT_EXPR, args[0], args[1]);
2925 break;
2926 BUILTIN_VSDQ_I_DI (BINOP, sshl, 0, NONE)
2927 BUILTIN_VSDQ_I_DI (BINOP_UUS, ushl, 0, NONE)
2928 {
2929 tree cst = args[1];
2930 tree ctype = TREE_TYPE (cst);
2931 /* Left shifts can be both scalar or vector, e.g. uint64x1_t is
2932 treated as a scalar type not a vector one. */
2933 if ((cst = uniform_integer_cst_p (cst)) != NULL_TREE)
2934 {
2935 wide_int wcst = wi::to_wide (cst);
2936 tree unit_ty = TREE_TYPE (cst);
2937
2938 wide_int abs_cst = wi::abs (wcst);
2939 if (wi::geu_p (abs_cst, element_precision (args[0])))
2940 break;
2941
2942 if (wi::neg_p (wcst, TYPE_SIGN (ctype)))
2943 {
2944 tree final_cst;
2945 final_cst = wide_int_to_tree (unit_ty, abs_cst);
2946 if (TREE_CODE (cst) != INTEGER_CST)
2947 final_cst = build_uniform_cst (ctype, final_cst);
2948
2949 new_stmt = gimple_build_assign (gimple_call_lhs (stmt),
2950 RSHIFT_EXPR, args[0],
2951 final_cst);
2952 }
2953 else
2954 new_stmt = gimple_build_assign (gimple_call_lhs (stmt),
2955 LSHIFT_EXPR, args[0], args[1]);
2956 }
2957 }
2958 break;
2959 BUILTIN_VDQ_I (SHIFTIMM, ashr, 3, NONE)
2960 VAR1 (SHIFTIMM, ashr_simd, 0, NONE, di)
2961 BUILTIN_VDQ_I (USHIFTIMM, lshr, 3, NONE)
2962 VAR1 (USHIFTIMM, lshr_simd, 0, NONE, di)
2963 if (TREE_CODE (args[1]) == INTEGER_CST
2964 && wi::ltu_p (wi::to_wide (args[1]), element_precision (args[0])))
2965 new_stmt = gimple_build_assign (gimple_call_lhs (stmt),
2966 RSHIFT_EXPR, args[0], args[1]);
2967 break;
bf592b2f 2968 BUILTIN_GPF (BINOP, fmulx, 0, ALL)
0ac198d3 2969 {
6d4d616a
RS
2970 gcc_assert (nargs == 2);
2971 bool a0_cst_p = TREE_CODE (args[0]) == REAL_CST;
2972 bool a1_cst_p = TREE_CODE (args[1]) == REAL_CST;
2973 if (a0_cst_p || a1_cst_p)
0ac198d3 2974 {
6d4d616a 2975 if (a0_cst_p && a1_cst_p)
546e500c 2976 {
6d4d616a
RS
2977 tree t0 = TREE_TYPE (args[0]);
2978 real_value a0 = (TREE_REAL_CST (args[0]));
2979 real_value a1 = (TREE_REAL_CST (args[1]));
2980 if (real_equal (&a1, &dconst0))
2981 std::swap (a0, a1);
2982 /* According to real_equal (), +0 equals -0. */
2983 if (real_equal (&a0, &dconst0) && real_isinf (&a1))
546e500c 2984 {
6d4d616a
RS
2985 real_value res = dconst2;
2986 res.sign = a0.sign ^ a1.sign;
2987 new_stmt = gimple_build_assign (gimple_call_lhs (stmt),
2988 REAL_CST,
2989 build_real (t0, res));
546e500c 2990 }
6d4d616a
RS
2991 else
2992 new_stmt = gimple_build_assign (gimple_call_lhs (stmt),
2993 MULT_EXPR,
2994 args[0], args[1]);
546e500c 2995 }
6d4d616a
RS
2996 else /* a0_cst_p ^ a1_cst_p. */
2997 {
2998 real_value const_part = a0_cst_p
2999 ? TREE_REAL_CST (args[0]) : TREE_REAL_CST (args[1]);
3000 if (!real_equal (&const_part, &dconst0)
3001 && !real_isinf (&const_part))
3002 new_stmt = gimple_build_assign (gimple_call_lhs (stmt),
3003 MULT_EXPR, args[0],
3004 args[1]);
3005 }
3006 }
3007 if (new_stmt)
3008 {
3009 gimple_set_vuse (new_stmt, gimple_vuse (stmt));
3010 gimple_set_vdef (new_stmt, gimple_vdef (stmt));
0ac198d3 3011 }
6d4d616a 3012 break;
0ac198d3 3013 }
03312cbd
AP
3014 case AARCH64_SIMD_BUILTIN_LANE_CHECK:
3015 if (aarch64_fold_builtin_lane_check (args[0], args[1], args[2]))
3016 {
3017 unlink_stmt_vdef (stmt);
3018 release_defs (stmt);
3019 new_stmt = gimple_build_nop ();
3020 }
3021 break;
6d4d616a
RS
3022 default:
3023 break;
0ac198d3 3024 }
6d4d616a 3025 return new_stmt;
0ac198d3
JG
3026}
3027
aa87aced
KV
3028void
3029aarch64_atomic_assign_expand_fenv (tree *hold, tree *clear, tree *update)
3030{
3031 const unsigned AARCH64_FE_INVALID = 1;
3032 const unsigned AARCH64_FE_DIVBYZERO = 2;
3033 const unsigned AARCH64_FE_OVERFLOW = 4;
3034 const unsigned AARCH64_FE_UNDERFLOW = 8;
3035 const unsigned AARCH64_FE_INEXACT = 16;
3036 const unsigned HOST_WIDE_INT AARCH64_FE_ALL_EXCEPT = (AARCH64_FE_INVALID
3037 | AARCH64_FE_DIVBYZERO
3038 | AARCH64_FE_OVERFLOW
3039 | AARCH64_FE_UNDERFLOW
3040 | AARCH64_FE_INEXACT);
3041 const unsigned HOST_WIDE_INT AARCH64_FE_EXCEPT_SHIFT = 8;
3042 tree fenv_cr, fenv_sr, get_fpcr, set_fpcr, mask_cr, mask_sr;
3043 tree ld_fenv_cr, ld_fenv_sr, masked_fenv_cr, masked_fenv_sr, hold_fnclex_cr;
3044 tree hold_fnclex_sr, new_fenv_var, reload_fenv, restore_fnenv, get_fpsr, set_fpsr;
3045 tree update_call, atomic_feraiseexcept, hold_fnclex, masked_fenv, ld_fenv;
3046
3047 /* Generate the equivalence of :
3048 unsigned int fenv_cr;
3049 fenv_cr = __builtin_aarch64_get_fpcr ();
3050
3051 unsigned int fenv_sr;
3052 fenv_sr = __builtin_aarch64_get_fpsr ();
3053
3054 Now set all exceptions to non-stop
3055 unsigned int mask_cr
3056 = ~(AARCH64_FE_ALL_EXCEPT << AARCH64_FE_EXCEPT_SHIFT);
3057 unsigned int masked_cr;
3058 masked_cr = fenv_cr & mask_cr;
3059
3060 And clear all exception flags
3061 unsigned int maske_sr = ~AARCH64_FE_ALL_EXCEPT;
3062 unsigned int masked_cr;
3063 masked_sr = fenv_sr & mask_sr;
3064
3065 __builtin_aarch64_set_cr (masked_cr);
3066 __builtin_aarch64_set_sr (masked_sr); */
3067
09ba9ef7
RR
3068 fenv_cr = create_tmp_var_raw (unsigned_type_node);
3069 fenv_sr = create_tmp_var_raw (unsigned_type_node);
aa87aced
KV
3070
3071 get_fpcr = aarch64_builtin_decls[AARCH64_BUILTIN_GET_FPCR];
3072 set_fpcr = aarch64_builtin_decls[AARCH64_BUILTIN_SET_FPCR];
3073 get_fpsr = aarch64_builtin_decls[AARCH64_BUILTIN_GET_FPSR];
3074 set_fpsr = aarch64_builtin_decls[AARCH64_BUILTIN_SET_FPSR];
3075
3076 mask_cr = build_int_cst (unsigned_type_node,
3077 ~(AARCH64_FE_ALL_EXCEPT << AARCH64_FE_EXCEPT_SHIFT));
3078 mask_sr = build_int_cst (unsigned_type_node,
3079 ~(AARCH64_FE_ALL_EXCEPT));
3080
d81bc2af
HZ
3081 ld_fenv_cr = build4 (TARGET_EXPR, unsigned_type_node,
3082 fenv_cr, build_call_expr (get_fpcr, 0),
3083 NULL_TREE, NULL_TREE);
3084 ld_fenv_sr = build4 (TARGET_EXPR, unsigned_type_node,
3085 fenv_sr, build_call_expr (get_fpsr, 0),
3086 NULL_TREE, NULL_TREE);
aa87aced
KV
3087
3088 masked_fenv_cr = build2 (BIT_AND_EXPR, unsigned_type_node, fenv_cr, mask_cr);
3089 masked_fenv_sr = build2 (BIT_AND_EXPR, unsigned_type_node, fenv_sr, mask_sr);
3090
3091 hold_fnclex_cr = build_call_expr (set_fpcr, 1, masked_fenv_cr);
3092 hold_fnclex_sr = build_call_expr (set_fpsr, 1, masked_fenv_sr);
3093
3094 hold_fnclex = build2 (COMPOUND_EXPR, void_type_node, hold_fnclex_cr,
3095 hold_fnclex_sr);
3096 masked_fenv = build2 (COMPOUND_EXPR, void_type_node, masked_fenv_cr,
3097 masked_fenv_sr);
3098 ld_fenv = build2 (COMPOUND_EXPR, void_type_node, ld_fenv_cr, ld_fenv_sr);
3099
3100 *hold = build2 (COMPOUND_EXPR, void_type_node,
3101 build2 (COMPOUND_EXPR, void_type_node, masked_fenv, ld_fenv),
3102 hold_fnclex);
3103
3104 /* Store the value of masked_fenv to clear the exceptions:
3105 __builtin_aarch64_set_fpsr (masked_fenv_sr); */
3106
3107 *clear = build_call_expr (set_fpsr, 1, masked_fenv_sr);
3108
3109 /* Generate the equivalent of :
3110 unsigned int new_fenv_var;
3111 new_fenv_var = __builtin_aarch64_get_fpsr ();
3112
3113 __builtin_aarch64_set_fpsr (fenv_sr);
3114
3115 __atomic_feraiseexcept (new_fenv_var); */
3116
09ba9ef7 3117 new_fenv_var = create_tmp_var_raw (unsigned_type_node);
d81bc2af
HZ
3118 reload_fenv = build4 (TARGET_EXPR, unsigned_type_node,
3119 new_fenv_var, build_call_expr (get_fpsr, 0),
3120 NULL_TREE, NULL_TREE);
aa87aced
KV
3121 restore_fnenv = build_call_expr (set_fpsr, 1, fenv_sr);
3122 atomic_feraiseexcept = builtin_decl_implicit (BUILT_IN_ATOMIC_FERAISEEXCEPT);
3123 update_call = build_call_expr (atomic_feraiseexcept, 1,
3124 fold_convert (integer_type_node, new_fenv_var));
3125 *update = build2 (COMPOUND_EXPR, void_type_node,
3126 build2 (COMPOUND_EXPR, void_type_node,
3127 reload_fenv, restore_fnenv), update_call);
3128}
3129
ef01e6bb
DZ
3130/* Resolve overloaded MEMTAG build-in functions. */
3131#define AARCH64_BUILTIN_SUBCODE(F) \
3132 (DECL_MD_FUNCTION_CODE (F) >> AARCH64_BUILTIN_SHIFT)
3133
3134static tree
3135aarch64_resolve_overloaded_memtag (location_t loc,
3136 tree fndecl, void *pass_params)
3137{
3138 vec<tree, va_gc> *params = static_cast<vec<tree, va_gc> *> (pass_params);
3139 unsigned param_num = params ? params->length() : 0;
3140 unsigned int fcode = AARCH64_BUILTIN_SUBCODE (fndecl);
3141 tree inittype = aarch64_memtag_builtin_data[
3142 fcode - AARCH64_MEMTAG_BUILTIN_START - 1].ftype;
3143 unsigned arg_num = list_length (TYPE_ARG_TYPES (inittype)) - 1;
3144
3145 if (param_num != arg_num)
3146 {
3147 TREE_TYPE (fndecl) = inittype;
3148 return NULL_TREE;
3149 }
3150 tree retype = NULL;
3151
3152 if (fcode == AARCH64_MEMTAG_BUILTIN_SUBP)
3153 {
3154 tree t0 = TREE_TYPE ((*params)[0]);
3155 tree t1 = TREE_TYPE ((*params)[1]);
3156
3157 if (t0 == error_mark_node || TREE_CODE (t0) != POINTER_TYPE)
3158 t0 = ptr_type_node;
3159 if (t1 == error_mark_node || TREE_CODE (t1) != POINTER_TYPE)
3160 t1 = ptr_type_node;
3161
3162 if (TYPE_MODE (t0) != DImode)
3163 warning_at (loc, 1, "expected 64-bit address but argument 1 is %d-bit",
3164 (int)tree_to_shwi (DECL_SIZE ((*params)[0])));
3165
3166 if (TYPE_MODE (t1) != DImode)
3167 warning_at (loc, 1, "expected 64-bit address but argument 2 is %d-bit",
3168 (int)tree_to_shwi (DECL_SIZE ((*params)[1])));
3169
3170 retype = build_function_type_list (ptrdiff_type_node, t0, t1, NULL);
3171 }
3172 else
3173 {
3174 tree t0 = TREE_TYPE ((*params)[0]);
3175
3176 if (t0 == error_mark_node || TREE_CODE (t0) != POINTER_TYPE)
3177 {
3178 TREE_TYPE (fndecl) = inittype;
3179 return NULL_TREE;
3180 }
3181
3182 if (TYPE_MODE (t0) != DImode)
3183 warning_at (loc, 1, "expected 64-bit address but argument 1 is %d-bit",
3184 (int)tree_to_shwi (DECL_SIZE ((*params)[0])));
3185
3186 switch (fcode)
3187 {
3188 case AARCH64_MEMTAG_BUILTIN_IRG:
3189 retype = build_function_type_list (t0, t0, uint64_type_node, NULL);
3190 break;
3191 case AARCH64_MEMTAG_BUILTIN_GMI:
3192 retype = build_function_type_list (uint64_type_node, t0,
3193 uint64_type_node, NULL);
3194 break;
3195 case AARCH64_MEMTAG_BUILTIN_INC_TAG:
3196 retype = build_function_type_list (t0, t0, unsigned_type_node, NULL);
3197 break;
3198 case AARCH64_MEMTAG_BUILTIN_SET_TAG:
3199 retype = build_function_type_list (void_type_node, t0, NULL);
3200 break;
3201 case AARCH64_MEMTAG_BUILTIN_GET_TAG:
3202 retype = build_function_type_list (t0, t0, NULL);
3203 break;
3204 default:
3205 return NULL_TREE;
3206 }
3207 }
3208
3209 if (!retype || retype == error_mark_node)
3210 TREE_TYPE (fndecl) = inittype;
3211 else
3212 TREE_TYPE (fndecl) = retype;
3213
3214 return NULL_TREE;
3215}
3216
e53b6e56 3217/* Called at aarch64_resolve_overloaded_builtin in aarch64-c.cc. */
ef01e6bb
DZ
3218tree
3219aarch64_resolve_overloaded_builtin_general (location_t loc, tree function,
3220 void *pass_params)
3221{
3222 unsigned int fcode = AARCH64_BUILTIN_SUBCODE (function);
3223
3224 if (fcode >= AARCH64_MEMTAG_BUILTIN_START
3225 && fcode <= AARCH64_MEMTAG_BUILTIN_END)
3226 return aarch64_resolve_overloaded_memtag(loc, function, pass_params);
3227
3228 return NULL_TREE;
3229}
aa87aced 3230
42fc9a7f
JG
3231#undef AARCH64_CHECK_BUILTIN_MODE
3232#undef AARCH64_FIND_FRINT_VARIANT
0ddec79f
JG
3233#undef CF0
3234#undef CF1
3235#undef CF2
3236#undef CF3
3237#undef CF4
3238#undef CF10
3239#undef VAR1
3240#undef VAR2
3241#undef VAR3
3242#undef VAR4
3243#undef VAR5
3244#undef VAR6
3245#undef VAR7
3246#undef VAR8
3247#undef VAR9
3248#undef VAR10
3249#undef VAR11
3250
3c03d39d 3251#include "gt-aarch64-builtins.h"