]> git.ipfire.org Git - thirdparty/gcc.git/blame - gcc/config/aarch64/aarch64-builtins.cc
aarch64: Sync `aarch64-sys-regs.def' with Binutils.
[thirdparty/gcc.git] / gcc / config / aarch64 / aarch64-builtins.cc
CommitLineData
43e9d192 1/* Builtins' description for AArch64 SIMD architecture.
83ffe9cd 2 Copyright (C) 2011-2023 Free Software Foundation, Inc.
43e9d192
IB
3 Contributed by ARM Ltd.
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify it
8 under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
10 any later version.
11
12 GCC is distributed in the hope that it will be useful, but
13 WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
15 General Public License for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
20
8fcc61f8
RS
21#define IN_TARGET_CODE 1
22
43e9d192
IB
23#include "config.h"
24#include "system.h"
25#include "coretypes.h"
26#include "tm.h"
c7131fb2 27#include "function.h"
c7131fb2 28#include "basic-block.h"
e11c4407 29#include "rtl.h"
c7131fb2
AM
30#include "tree.h"
31#include "gimple.h"
03312cbd 32#include "ssa.h"
4d0cdd0c 33#include "memmodel.h"
e11c4407
AM
34#include "tm_p.h"
35#include "expmed.h"
36#include "optabs.h"
37#include "recog.h"
38#include "diagnostic-core.h"
40e23961 39#include "fold-const.h"
d8a2d370 40#include "stor-layout.h"
36566b39 41#include "explow.h"
43e9d192 42#include "expr.h"
43e9d192 43#include "langhooks.h"
5be5c238 44#include "gimple-iterator.h"
10766209 45#include "case-cfn-macros.h"
9d63f43b 46#include "emit-rtl.h"
31427b97
RS
47#include "stringpool.h"
48#include "attribs.h"
ad44c6a5 49#include "gimple-fold.h"
fc42900d 50#include "builtins.h"
43e9d192 51
0d4a1197 52#define v8qi_UP E_V8QImode
fdcddba8 53#define v8di_UP E_V8DImode
0d4a1197
RS
54#define v4hi_UP E_V4HImode
55#define v4hf_UP E_V4HFmode
56#define v2si_UP E_V2SImode
57#define v2sf_UP E_V2SFmode
58#define v1df_UP E_V1DFmode
5ba864c5 59#define v1di_UP E_V1DImode
0d4a1197
RS
60#define di_UP E_DImode
61#define df_UP E_DFmode
62#define v16qi_UP E_V16QImode
63#define v8hi_UP E_V8HImode
64#define v8hf_UP E_V8HFmode
65#define v4si_UP E_V4SImode
66#define v4sf_UP E_V4SFmode
67#define v2di_UP E_V2DImode
68#define v2df_UP E_V2DFmode
69#define ti_UP E_TImode
70#define oi_UP E_OImode
71#define ci_UP E_CImode
72#define xi_UP E_XImode
73#define si_UP E_SImode
74#define sf_UP E_SFmode
75#define hi_UP E_HImode
76#define hf_UP E_HFmode
77#define qi_UP E_QImode
abbe1ed2
SMW
78#define bf_UP E_BFmode
79#define v4bf_UP E_V4BFmode
80#define v8bf_UP E_V8BFmode
66f206b8
JW
81#define v2x8qi_UP E_V2x8QImode
82#define v2x4hi_UP E_V2x4HImode
83#define v2x4hf_UP E_V2x4HFmode
84#define v2x4bf_UP E_V2x4BFmode
85#define v2x2si_UP E_V2x2SImode
86#define v2x2sf_UP E_V2x2SFmode
87#define v2x1di_UP E_V2x1DImode
88#define v2x1df_UP E_V2x1DFmode
89#define v2x16qi_UP E_V2x16QImode
90#define v2x8hi_UP E_V2x8HImode
91#define v2x8hf_UP E_V2x8HFmode
92#define v2x8bf_UP E_V2x8BFmode
93#define v2x4si_UP E_V2x4SImode
94#define v2x4sf_UP E_V2x4SFmode
95#define v2x2di_UP E_V2x2DImode
96#define v2x2df_UP E_V2x2DFmode
97#define v3x8qi_UP E_V3x8QImode
98#define v3x4hi_UP E_V3x4HImode
99#define v3x4hf_UP E_V3x4HFmode
100#define v3x4bf_UP E_V3x4BFmode
101#define v3x2si_UP E_V3x2SImode
102#define v3x2sf_UP E_V3x2SFmode
103#define v3x1di_UP E_V3x1DImode
104#define v3x1df_UP E_V3x1DFmode
105#define v3x16qi_UP E_V3x16QImode
106#define v3x8hi_UP E_V3x8HImode
107#define v3x8hf_UP E_V3x8HFmode
108#define v3x8bf_UP E_V3x8BFmode
109#define v3x4si_UP E_V3x4SImode
110#define v3x4sf_UP E_V3x4SFmode
111#define v3x2di_UP E_V3x2DImode
112#define v3x2df_UP E_V3x2DFmode
113#define v4x8qi_UP E_V4x8QImode
114#define v4x4hi_UP E_V4x4HImode
115#define v4x4hf_UP E_V4x4HFmode
116#define v4x4bf_UP E_V4x4BFmode
117#define v4x2si_UP E_V4x2SImode
118#define v4x2sf_UP E_V4x2SFmode
119#define v4x1di_UP E_V4x1DImode
120#define v4x1df_UP E_V4x1DFmode
121#define v4x16qi_UP E_V4x16QImode
122#define v4x8hi_UP E_V4x8HImode
123#define v4x8hf_UP E_V4x8HFmode
124#define v4x8bf_UP E_V4x8BFmode
125#define v4x4si_UP E_V4x4SImode
126#define v4x4sf_UP E_V4x4SFmode
127#define v4x2di_UP E_V4x2DImode
128#define v4x2df_UP E_V4x2DFmode
43e9d192
IB
129#define UP(X) X##_UP
130
c906efc7
AC
131#define MODE_d_bf16 E_V4BFmode
132#define MODE_d_f16 E_V4HFmode
133#define MODE_d_f32 E_V2SFmode
134#define MODE_d_f64 E_V1DFmode
135#define MODE_d_s8 E_V8QImode
136#define MODE_d_s16 E_V4HImode
137#define MODE_d_s32 E_V2SImode
138#define MODE_d_s64 E_V1DImode
139#define MODE_d_u8 E_V8QImode
140#define MODE_d_u16 E_V4HImode
141#define MODE_d_u32 E_V2SImode
142#define MODE_d_u64 E_V1DImode
143#define MODE_d_p8 E_V8QImode
144#define MODE_d_p16 E_V4HImode
145#define MODE_d_p64 E_V1DImode
146#define MODE_q_bf16 E_V8BFmode
147#define MODE_q_f16 E_V8HFmode
148#define MODE_q_f32 E_V4SFmode
149#define MODE_q_f64 E_V2DFmode
150#define MODE_q_s8 E_V16QImode
151#define MODE_q_s16 E_V8HImode
152#define MODE_q_s32 E_V4SImode
153#define MODE_q_s64 E_V2DImode
154#define MODE_q_u8 E_V16QImode
155#define MODE_q_u16 E_V8HImode
156#define MODE_q_u32 E_V4SImode
157#define MODE_q_u64 E_V2DImode
158#define MODE_q_p8 E_V16QImode
159#define MODE_q_p16 E_V8HImode
160#define MODE_q_p64 E_V2DImode
161#define MODE_q_p128 E_TImode
162
163#define QUAL_bf16 qualifier_none
164#define QUAL_f16 qualifier_none
165#define QUAL_f32 qualifier_none
166#define QUAL_f64 qualifier_none
167#define QUAL_s8 qualifier_none
168#define QUAL_s16 qualifier_none
169#define QUAL_s32 qualifier_none
170#define QUAL_s64 qualifier_none
171#define QUAL_u8 qualifier_unsigned
172#define QUAL_u16 qualifier_unsigned
173#define QUAL_u32 qualifier_unsigned
174#define QUAL_u64 qualifier_unsigned
175#define QUAL_p8 qualifier_poly
176#define QUAL_p16 qualifier_poly
177#define QUAL_p64 qualifier_poly
178#define QUAL_p128 qualifier_poly
179
180#define LENGTH_d ""
181#define LENGTH_q "q"
182
183#define SIMD_INTR_MODE(suffix, length) MODE_##length##_##suffix
184#define SIMD_INTR_QUAL(suffix) QUAL_##suffix
185#define SIMD_INTR_LENGTH_CHAR(length) LENGTH_##length
186
187
b5828b4b
JG
188#define SIMD_MAX_BUILTIN_ARGS 5
189
190enum aarch64_type_qualifiers
43e9d192 191{
b5828b4b
JG
192 /* T foo. */
193 qualifier_none = 0x0,
194 /* unsigned T foo. */
195 qualifier_unsigned = 0x1, /* 1 << 0 */
196 /* const T foo. */
197 qualifier_const = 0x2, /* 1 << 1 */
198 /* T *foo. */
199 qualifier_pointer = 0x4, /* 1 << 2 */
b5828b4b
JG
200 /* Used when expanding arguments if an operand could
201 be an immediate. */
202 qualifier_immediate = 0x8, /* 1 << 3 */
203 qualifier_maybe_immediate = 0x10, /* 1 << 4 */
204 /* void foo (...). */
205 qualifier_void = 0x20, /* 1 << 5 */
91259dd8 206 /* 1 << 6 is now unused */
b5828b4b
JG
207 /* Some builtins should use the T_*mode* encoded in a simd_builtin_datum
208 rather than using the type of the operand. */
209 qualifier_map_mode = 0x80, /* 1 << 7 */
210 /* qualifier_pointer | qualifier_map_mode */
211 qualifier_pointer_map_mode = 0x84,
e625e715 212 /* qualifier_const | qualifier_pointer | qualifier_map_mode */
6db1ec94
JG
213 qualifier_const_pointer_map_mode = 0x86,
214 /* Polynomial types. */
2a49c16d
AL
215 qualifier_poly = 0x100,
216 /* Lane indices - must be in range, and flipped for bigendian. */
4d0a0237
CB
217 qualifier_lane_index = 0x200,
218 /* Lane indices for single lane structure loads and stores. */
9d63f43b
TC
219 qualifier_struct_load_store_lane_index = 0x400,
220 /* Lane indices selected in pairs. - must be in range, and flipped for
221 bigendian. */
222 qualifier_lane_pair_index = 0x800,
8c197c85
SMW
223 /* Lane indices selected in quadtuplets. - must be in range, and flipped for
224 bigendian. */
225 qualifier_lane_quadtup_index = 0x1000,
b5828b4b 226};
43e9d192 227
bf592b2f 228/* Flags that describe what a function might do. */
229const unsigned int FLAG_NONE = 0U;
230const unsigned int FLAG_READ_FPCR = 1U << 0;
231const unsigned int FLAG_RAISE_FP_EXCEPTIONS = 1U << 1;
232const unsigned int FLAG_READ_MEMORY = 1U << 2;
233const unsigned int FLAG_PREFETCH_MEMORY = 1U << 3;
234const unsigned int FLAG_WRITE_MEMORY = 1U << 4;
235
35ffd4d1 236/* Not all FP intrinsics raise FP exceptions or read FPCR register,
237 use this flag to suppress it. */
238const unsigned int FLAG_AUTO_FP = 1U << 5;
239
bf592b2f 240const unsigned int FLAG_FP = FLAG_READ_FPCR | FLAG_RAISE_FP_EXCEPTIONS;
241const unsigned int FLAG_ALL = FLAG_READ_FPCR | FLAG_RAISE_FP_EXCEPTIONS
242 | FLAG_READ_MEMORY | FLAG_PREFETCH_MEMORY | FLAG_WRITE_MEMORY;
2d5aad69 243const unsigned int FLAG_STORE = FLAG_WRITE_MEMORY | FLAG_AUTO_FP;
e8062ad4 244const unsigned int FLAG_LOAD = FLAG_READ_MEMORY | FLAG_AUTO_FP;
bf592b2f 245
43e9d192
IB
246typedef struct
247{
248 const char *name;
ef4bddc2 249 machine_mode mode;
342be7f7
JG
250 const enum insn_code code;
251 unsigned int fcode;
b5828b4b 252 enum aarch64_type_qualifiers *qualifiers;
bf592b2f 253 unsigned int flags;
43e9d192
IB
254} aarch64_simd_builtin_datum;
255
b5828b4b
JG
256static enum aarch64_type_qualifiers
257aarch64_types_unop_qualifiers[SIMD_MAX_BUILTIN_ARGS]
8f905d69 258 = { qualifier_none, qualifier_none };
b5828b4b 259#define TYPES_UNOP (aarch64_types_unop_qualifiers)
5a7a4e80
TB
260static enum aarch64_type_qualifiers
261aarch64_types_unopu_qualifiers[SIMD_MAX_BUILTIN_ARGS]
262 = { qualifier_unsigned, qualifier_unsigned };
263#define TYPES_UNOPU (aarch64_types_unopu_qualifiers)
b5828b4b 264static enum aarch64_type_qualifiers
a579f4c7
JW
265aarch64_types_unopus_qualifiers[SIMD_MAX_BUILTIN_ARGS]
266 = { qualifier_unsigned, qualifier_none };
267#define TYPES_UNOPUS (aarch64_types_unopus_qualifiers)
268static enum aarch64_type_qualifiers
b5828b4b
JG
269aarch64_types_binop_qualifiers[SIMD_MAX_BUILTIN_ARGS]
270 = { qualifier_none, qualifier_none, qualifier_maybe_immediate };
271#define TYPES_BINOP (aarch64_types_binop_qualifiers)
272static enum aarch64_type_qualifiers
5a7a4e80
TB
273aarch64_types_binopu_qualifiers[SIMD_MAX_BUILTIN_ARGS]
274 = { qualifier_unsigned, qualifier_unsigned, qualifier_unsigned };
275#define TYPES_BINOPU (aarch64_types_binopu_qualifiers)
7baa225d 276static enum aarch64_type_qualifiers
de10bcce
AL
277aarch64_types_binop_uus_qualifiers[SIMD_MAX_BUILTIN_ARGS]
278 = { qualifier_unsigned, qualifier_unsigned, qualifier_none };
279#define TYPES_BINOP_UUS (aarch64_types_binop_uus_qualifiers)
280static enum aarch64_type_qualifiers
918621d3
AL
281aarch64_types_binop_ssu_qualifiers[SIMD_MAX_BUILTIN_ARGS]
282 = { qualifier_none, qualifier_none, qualifier_unsigned };
283#define TYPES_BINOP_SSU (aarch64_types_binop_ssu_qualifiers)
284static enum aarch64_type_qualifiers
daef0a8c
JW
285aarch64_types_binop_uss_qualifiers[SIMD_MAX_BUILTIN_ARGS]
286 = { qualifier_unsigned, qualifier_none, qualifier_none };
287#define TYPES_BINOP_USS (aarch64_types_binop_uss_qualifiers)
288static enum aarch64_type_qualifiers
7baa225d
TB
289aarch64_types_binopp_qualifiers[SIMD_MAX_BUILTIN_ARGS]
290 = { qualifier_poly, qualifier_poly, qualifier_poly };
291#define TYPES_BINOPP (aarch64_types_binopp_qualifiers)
3caf7f87
JW
292static enum aarch64_type_qualifiers
293aarch64_types_binop_ppu_qualifiers[SIMD_MAX_BUILTIN_ARGS]
294 = { qualifier_poly, qualifier_poly, qualifier_unsigned };
295#define TYPES_BINOP_PPU (aarch64_types_binop_ppu_qualifiers)
7baa225d 296
5a7a4e80 297static enum aarch64_type_qualifiers
b5828b4b
JG
298aarch64_types_ternop_qualifiers[SIMD_MAX_BUILTIN_ARGS]
299 = { qualifier_none, qualifier_none, qualifier_none, qualifier_none };
300#define TYPES_TERNOP (aarch64_types_ternop_qualifiers)
30442682 301static enum aarch64_type_qualifiers
2a49c16d
AL
302aarch64_types_ternop_lane_qualifiers[SIMD_MAX_BUILTIN_ARGS]
303 = { qualifier_none, qualifier_none, qualifier_none, qualifier_lane_index };
304#define TYPES_TERNOP_LANE (aarch64_types_ternop_lane_qualifiers)
305static enum aarch64_type_qualifiers
30442682
TB
306aarch64_types_ternopu_qualifiers[SIMD_MAX_BUILTIN_ARGS]
307 = { qualifier_unsigned, qualifier_unsigned,
308 qualifier_unsigned, qualifier_unsigned };
309#define TYPES_TERNOPU (aarch64_types_ternopu_qualifiers)
27086ea3 310static enum aarch64_type_qualifiers
0b839322
WD
311aarch64_types_ternopu_lane_qualifiers[SIMD_MAX_BUILTIN_ARGS]
312 = { qualifier_unsigned, qualifier_unsigned,
313 qualifier_unsigned, qualifier_lane_index };
314#define TYPES_TERNOPU_LANE (aarch64_types_ternopu_lane_qualifiers)
315static enum aarch64_type_qualifiers
27086ea3
MC
316aarch64_types_ternopu_imm_qualifiers[SIMD_MAX_BUILTIN_ARGS]
317 = { qualifier_unsigned, qualifier_unsigned,
318 qualifier_unsigned, qualifier_immediate };
319#define TYPES_TERNOPUI (aarch64_types_ternopu_imm_qualifiers)
8c197c85 320static enum aarch64_type_qualifiers
3caf7f87
JW
321aarch64_types_ternop_sssu_qualifiers[SIMD_MAX_BUILTIN_ARGS]
322 = { qualifier_none, qualifier_none, qualifier_none, qualifier_unsigned };
323#define TYPES_TERNOP_SSSU (aarch64_types_ternop_sssu_qualifiers)
324static enum aarch64_type_qualifiers
8c197c85
SMW
325aarch64_types_ternop_ssus_qualifiers[SIMD_MAX_BUILTIN_ARGS]
326 = { qualifier_none, qualifier_none, qualifier_unsigned, qualifier_none };
327#define TYPES_TERNOP_SSUS (aarch64_types_ternop_ssus_qualifiers)
2050ac1a
TC
328static enum aarch64_type_qualifiers
329aarch64_types_ternop_suss_qualifiers[SIMD_MAX_BUILTIN_ARGS]
330 = { qualifier_none, qualifier_unsigned, qualifier_none, qualifier_none };
331#define TYPES_TERNOP_SUSS (aarch64_types_ternop_suss_qualifiers)
3caf7f87
JW
332static enum aarch64_type_qualifiers
333aarch64_types_binop_pppu_qualifiers[SIMD_MAX_BUILTIN_ARGS]
334 = { qualifier_poly, qualifier_poly, qualifier_poly, qualifier_unsigned };
335#define TYPES_TERNOP_PPPU (aarch64_types_binop_pppu_qualifiers)
27086ea3 336
9d63f43b
TC
337static enum aarch64_type_qualifiers
338aarch64_types_quadop_lane_pair_qualifiers[SIMD_MAX_BUILTIN_ARGS]
339 = { qualifier_none, qualifier_none, qualifier_none,
340 qualifier_none, qualifier_lane_pair_index };
341#define TYPES_QUADOP_LANE_PAIR (aarch64_types_quadop_lane_pair_qualifiers)
b5828b4b 342static enum aarch64_type_qualifiers
2a49c16d 343aarch64_types_quadop_lane_qualifiers[SIMD_MAX_BUILTIN_ARGS]
b5828b4b 344 = { qualifier_none, qualifier_none, qualifier_none,
2a49c16d
AL
345 qualifier_none, qualifier_lane_index };
346#define TYPES_QUADOP_LANE (aarch64_types_quadop_lane_qualifiers)
7a08d813
TC
347static enum aarch64_type_qualifiers
348aarch64_types_quadopu_lane_qualifiers[SIMD_MAX_BUILTIN_ARGS]
349 = { qualifier_unsigned, qualifier_unsigned, qualifier_unsigned,
350 qualifier_unsigned, qualifier_lane_index };
351#define TYPES_QUADOPU_LANE (aarch64_types_quadopu_lane_qualifiers)
b5828b4b 352
8c197c85
SMW
353static enum aarch64_type_qualifiers
354aarch64_types_quadopssus_lane_quadtup_qualifiers[SIMD_MAX_BUILTIN_ARGS]
355 = { qualifier_none, qualifier_none, qualifier_unsigned,
356 qualifier_none, qualifier_lane_quadtup_index };
357#define TYPES_QUADOPSSUS_LANE_QUADTUP \
358 (aarch64_types_quadopssus_lane_quadtup_qualifiers)
359static enum aarch64_type_qualifiers
360aarch64_types_quadopsssu_lane_quadtup_qualifiers[SIMD_MAX_BUILTIN_ARGS]
361 = { qualifier_none, qualifier_none, qualifier_none,
362 qualifier_unsigned, qualifier_lane_quadtup_index };
363#define TYPES_QUADOPSSSU_LANE_QUADTUP \
364 (aarch64_types_quadopsssu_lane_quadtup_qualifiers)
365
27086ea3
MC
366static enum aarch64_type_qualifiers
367aarch64_types_quadopu_imm_qualifiers[SIMD_MAX_BUILTIN_ARGS]
368 = { qualifier_unsigned, qualifier_unsigned, qualifier_unsigned,
369 qualifier_unsigned, qualifier_immediate };
370#define TYPES_QUADOPUI (aarch64_types_quadopu_imm_qualifiers)
371
b5828b4b 372static enum aarch64_type_qualifiers
2a49c16d 373aarch64_types_binop_imm_qualifiers[SIMD_MAX_BUILTIN_ARGS]
b5828b4b 374 = { qualifier_none, qualifier_none, qualifier_immediate };
2a49c16d
AL
375#define TYPES_GETREG (aarch64_types_binop_imm_qualifiers)
376#define TYPES_SHIFTIMM (aarch64_types_binop_imm_qualifiers)
b5828b4b 377static enum aarch64_type_qualifiers
de10bcce
AL
378aarch64_types_shift_to_unsigned_qualifiers[SIMD_MAX_BUILTIN_ARGS]
379 = { qualifier_unsigned, qualifier_none, qualifier_immediate };
380#define TYPES_SHIFTIMM_USS (aarch64_types_shift_to_unsigned_qualifiers)
381static enum aarch64_type_qualifiers
1f0e9e34
JG
382aarch64_types_fcvt_from_unsigned_qualifiers[SIMD_MAX_BUILTIN_ARGS]
383 = { qualifier_none, qualifier_unsigned, qualifier_immediate };
384#define TYPES_FCVTIMM_SUS (aarch64_types_fcvt_from_unsigned_qualifiers)
385static enum aarch64_type_qualifiers
252c7556
AV
386aarch64_types_unsigned_shift_qualifiers[SIMD_MAX_BUILTIN_ARGS]
387 = { qualifier_unsigned, qualifier_unsigned, qualifier_immediate };
388#define TYPES_USHIFTIMM (aarch64_types_unsigned_shift_qualifiers)
05f1883c
DC
389#define TYPES_USHIFT2IMM (aarch64_types_ternopu_imm_qualifiers)
390static enum aarch64_type_qualifiers
391aarch64_types_shift2_to_unsigned_qualifiers[SIMD_MAX_BUILTIN_ARGS]
392 = { qualifier_unsigned, qualifier_unsigned, qualifier_none, qualifier_immediate };
393#define TYPES_SHIFT2IMM_UUSS (aarch64_types_shift2_to_unsigned_qualifiers)
de10bcce 394
159b8724
TC
395static enum aarch64_type_qualifiers
396aarch64_types_ternop_s_imm_qualifiers[SIMD_MAX_BUILTIN_ARGS]
397 = { qualifier_none, qualifier_none, qualifier_none, qualifier_immediate};
398#define TYPES_SETREG (aarch64_types_ternop_s_imm_qualifiers)
399#define TYPES_SHIFTINSERT (aarch64_types_ternop_s_imm_qualifiers)
400#define TYPES_SHIFTACC (aarch64_types_ternop_s_imm_qualifiers)
05f1883c 401#define TYPES_SHIFT2IMM (aarch64_types_ternop_s_imm_qualifiers)
159b8724
TC
402
403static enum aarch64_type_qualifiers
404aarch64_types_ternop_p_imm_qualifiers[SIMD_MAX_BUILTIN_ARGS]
405 = { qualifier_poly, qualifier_poly, qualifier_poly, qualifier_immediate};
406#define TYPES_SHIFTINSERTP (aarch64_types_ternop_p_imm_qualifiers)
b5828b4b 407
de10bcce
AL
408static enum aarch64_type_qualifiers
409aarch64_types_unsigned_shiftacc_qualifiers[SIMD_MAX_BUILTIN_ARGS]
410 = { qualifier_unsigned, qualifier_unsigned, qualifier_unsigned,
411 qualifier_immediate };
412#define TYPES_USHIFTACC (aarch64_types_unsigned_shiftacc_qualifiers)
413
b5828b4b
JG
414static enum aarch64_type_qualifiers
415aarch64_types_load1_qualifiers[SIMD_MAX_BUILTIN_ARGS]
416 = { qualifier_none, qualifier_const_pointer_map_mode };
417#define TYPES_LOAD1 (aarch64_types_load1_qualifiers)
418#define TYPES_LOADSTRUCT (aarch64_types_load1_qualifiers)
66f206b8
JW
419static enum aarch64_type_qualifiers
420aarch64_types_load1_u_qualifiers[SIMD_MAX_BUILTIN_ARGS]
421 = { qualifier_unsigned, qualifier_const_pointer_map_mode };
1716ddd1 422#define TYPES_LOAD1_U (aarch64_types_load1_u_qualifiers)
66f206b8
JW
423#define TYPES_LOADSTRUCT_U (aarch64_types_load1_u_qualifiers)
424static enum aarch64_type_qualifiers
425aarch64_types_load1_p_qualifiers[SIMD_MAX_BUILTIN_ARGS]
426 = { qualifier_poly, qualifier_const_pointer_map_mode };
1716ddd1 427#define TYPES_LOAD1_P (aarch64_types_load1_p_qualifiers)
66f206b8
JW
428#define TYPES_LOADSTRUCT_P (aarch64_types_load1_p_qualifiers)
429
3ec1be97
CB
430static enum aarch64_type_qualifiers
431aarch64_types_loadstruct_lane_qualifiers[SIMD_MAX_BUILTIN_ARGS]
432 = { qualifier_none, qualifier_const_pointer_map_mode,
4d0a0237 433 qualifier_none, qualifier_struct_load_store_lane_index };
3ec1be97 434#define TYPES_LOADSTRUCT_LANE (aarch64_types_loadstruct_lane_qualifiers)
66f206b8
JW
435static enum aarch64_type_qualifiers
436aarch64_types_loadstruct_lane_u_qualifiers[SIMD_MAX_BUILTIN_ARGS]
437 = { qualifier_unsigned, qualifier_const_pointer_map_mode,
438 qualifier_unsigned, qualifier_struct_load_store_lane_index };
439#define TYPES_LOADSTRUCT_LANE_U (aarch64_types_loadstruct_lane_u_qualifiers)
440static enum aarch64_type_qualifiers
441aarch64_types_loadstruct_lane_p_qualifiers[SIMD_MAX_BUILTIN_ARGS]
442 = { qualifier_poly, qualifier_const_pointer_map_mode,
443 qualifier_poly, qualifier_struct_load_store_lane_index };
444#define TYPES_LOADSTRUCT_LANE_P (aarch64_types_loadstruct_lane_p_qualifiers)
b5828b4b 445
46e778c4
JG
446static enum aarch64_type_qualifiers
447aarch64_types_bsl_p_qualifiers[SIMD_MAX_BUILTIN_ARGS]
448 = { qualifier_poly, qualifier_unsigned,
449 qualifier_poly, qualifier_poly };
450#define TYPES_BSL_P (aarch64_types_bsl_p_qualifiers)
451static enum aarch64_type_qualifiers
452aarch64_types_bsl_s_qualifiers[SIMD_MAX_BUILTIN_ARGS]
453 = { qualifier_none, qualifier_unsigned,
454 qualifier_none, qualifier_none };
455#define TYPES_BSL_S (aarch64_types_bsl_s_qualifiers)
456static enum aarch64_type_qualifiers
457aarch64_types_bsl_u_qualifiers[SIMD_MAX_BUILTIN_ARGS]
458 = { qualifier_unsigned, qualifier_unsigned,
459 qualifier_unsigned, qualifier_unsigned };
460#define TYPES_BSL_U (aarch64_types_bsl_u_qualifiers)
461
b5828b4b
JG
462/* The first argument (return type) of a store should be void type,
463 which we represent with qualifier_void. Their first operand will be
464 a DImode pointer to the location to store to, so we must use
465 qualifier_map_mode | qualifier_pointer to build a pointer to the
466 element type of the vector. */
467static enum aarch64_type_qualifiers
468aarch64_types_store1_qualifiers[SIMD_MAX_BUILTIN_ARGS]
469 = { qualifier_void, qualifier_pointer_map_mode, qualifier_none };
470#define TYPES_STORE1 (aarch64_types_store1_qualifiers)
471#define TYPES_STORESTRUCT (aarch64_types_store1_qualifiers)
66f206b8
JW
472static enum aarch64_type_qualifiers
473aarch64_types_store1_u_qualifiers[SIMD_MAX_BUILTIN_ARGS]
474 = { qualifier_void, qualifier_pointer_map_mode, qualifier_unsigned };
1716ddd1 475#define TYPES_STORE1_U (aarch64_types_store1_u_qualifiers)
66f206b8
JW
476#define TYPES_STORESTRUCT_U (aarch64_types_store1_u_qualifiers)
477static enum aarch64_type_qualifiers
478aarch64_types_store1_p_qualifiers[SIMD_MAX_BUILTIN_ARGS]
479 = { qualifier_void, qualifier_pointer_map_mode, qualifier_poly };
1716ddd1 480#define TYPES_STORE1_P (aarch64_types_store1_p_qualifiers)
66f206b8
JW
481#define TYPES_STORESTRUCT_P (aarch64_types_store1_p_qualifiers)
482
ba081b77
JG
483static enum aarch64_type_qualifiers
484aarch64_types_storestruct_lane_qualifiers[SIMD_MAX_BUILTIN_ARGS]
485 = { qualifier_void, qualifier_pointer_map_mode,
4d0a0237 486 qualifier_none, qualifier_struct_load_store_lane_index };
ba081b77 487#define TYPES_STORESTRUCT_LANE (aarch64_types_storestruct_lane_qualifiers)
66f206b8
JW
488static enum aarch64_type_qualifiers
489aarch64_types_storestruct_lane_u_qualifiers[SIMD_MAX_BUILTIN_ARGS]
490 = { qualifier_void, qualifier_pointer_map_mode,
491 qualifier_unsigned, qualifier_struct_load_store_lane_index };
492#define TYPES_STORESTRUCT_LANE_U (aarch64_types_storestruct_lane_u_qualifiers)
493static enum aarch64_type_qualifiers
494aarch64_types_storestruct_lane_p_qualifiers[SIMD_MAX_BUILTIN_ARGS]
495 = { qualifier_void, qualifier_pointer_map_mode,
496 qualifier_poly, qualifier_struct_load_store_lane_index };
497#define TYPES_STORESTRUCT_LANE_P (aarch64_types_storestruct_lane_p_qualifiers)
b5828b4b 498
0ddec79f
JG
499#define CF0(N, X) CODE_FOR_aarch64_##N##X
500#define CF1(N, X) CODE_FOR_##N##X##1
501#define CF2(N, X) CODE_FOR_##N##X##2
502#define CF3(N, X) CODE_FOR_##N##X##3
503#define CF4(N, X) CODE_FOR_##N##X##4
504#define CF10(N, X) CODE_FOR_##N##X
505
a4dae58a
KT
506/* Define cascading VAR<N> macros that are used from
507 aarch64-builtin-iterators.h to iterate over modes. These definitions
508 will end up generating a number of VAR1 expansions and code later on in the
509 file should redefine VAR1 to whatever it needs to process on a per-mode
510 basis. */
bf592b2f 511#define VAR2(T, N, MAP, FLAG, A, B) \
512 VAR1 (T, N, MAP, FLAG, A) \
513 VAR1 (T, N, MAP, FLAG, B)
514#define VAR3(T, N, MAP, FLAG, A, B, C) \
515 VAR2 (T, N, MAP, FLAG, A, B) \
516 VAR1 (T, N, MAP, FLAG, C)
517#define VAR4(T, N, MAP, FLAG, A, B, C, D) \
518 VAR3 (T, N, MAP, FLAG, A, B, C) \
519 VAR1 (T, N, MAP, FLAG, D)
520#define VAR5(T, N, MAP, FLAG, A, B, C, D, E) \
521 VAR4 (T, N, MAP, FLAG, A, B, C, D) \
522 VAR1 (T, N, MAP, FLAG, E)
523#define VAR6(T, N, MAP, FLAG, A, B, C, D, E, F) \
524 VAR5 (T, N, MAP, FLAG, A, B, C, D, E) \
525 VAR1 (T, N, MAP, FLAG, F)
526#define VAR7(T, N, MAP, FLAG, A, B, C, D, E, F, G) \
527 VAR6 (T, N, MAP, FLAG, A, B, C, D, E, F) \
528 VAR1 (T, N, MAP, FLAG, G)
529#define VAR8(T, N, MAP, FLAG, A, B, C, D, E, F, G, H) \
530 VAR7 (T, N, MAP, FLAG, A, B, C, D, E, F, G) \
531 VAR1 (T, N, MAP, FLAG, H)
532#define VAR9(T, N, MAP, FLAG, A, B, C, D, E, F, G, H, I) \
533 VAR8 (T, N, MAP, FLAG, A, B, C, D, E, F, G, H) \
534 VAR1 (T, N, MAP, FLAG, I)
535#define VAR10(T, N, MAP, FLAG, A, B, C, D, E, F, G, H, I, J) \
536 VAR9 (T, N, MAP, FLAG, A, B, C, D, E, F, G, H, I) \
537 VAR1 (T, N, MAP, FLAG, J)
538#define VAR11(T, N, MAP, FLAG, A, B, C, D, E, F, G, H, I, J, K) \
539 VAR10 (T, N, MAP, FLAG, A, B, C, D, E, F, G, H, I, J) \
540 VAR1 (T, N, MAP, FLAG, K)
541#define VAR12(T, N, MAP, FLAG, A, B, C, D, E, F, G, H, I, J, K, L) \
542 VAR11 (T, N, MAP, FLAG, A, B, C, D, E, F, G, H, I, J, K) \
543 VAR1 (T, N, MAP, FLAG, L)
544#define VAR13(T, N, MAP, FLAG, A, B, C, D, E, F, G, H, I, J, K, L, M) \
545 VAR12 (T, N, MAP, FLAG, A, B, C, D, E, F, G, H, I, J, K, L) \
546 VAR1 (T, N, MAP, FLAG, M)
547#define VAR14(T, X, MAP, FLAG, A, B, C, D, E, F, G, H, I, J, K, L, M, N) \
548 VAR13 (T, X, MAP, FLAG, A, B, C, D, E, F, G, H, I, J, K, L, M) \
549 VAR1 (T, X, MAP, FLAG, N)
550#define VAR15(T, X, MAP, FLAG, A, B, C, D, E, F, G, H, I, J, K, L, M, N, O) \
551 VAR14 (T, X, MAP, FLAG, A, B, C, D, E, F, G, H, I, J, K, L, M, N) \
552 VAR1 (T, X, MAP, FLAG, O)
553#define VAR16(T, X, MAP, FLAG, A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P) \
554 VAR15 (T, X, MAP, FLAG, A, B, C, D, E, F, G, H, I, J, K, L, M, N, O) \
555 VAR1 (T, X, MAP, FLAG, P)
342be7f7 556
f421c516 557#include "aarch64-builtin-iterators.h"
43e9d192 558
a4dae58a
KT
559/* The builtins below should be expanded through the standard optabs
560 CODE_FOR_[u]avg<mode>3_[floor,ceil]. However the mapping scheme in
561 aarch64-simd-builtins.def does not easily allow us to have a pre-mode
562 ("uavg") and post-mode string ("_ceil") in the CODE_FOR_* construction.
563 So the builtins use a name that is natural for AArch64 instructions
564 e.g. "aarch64_srhadd<mode>" and we re-map these to the optab-related
565 CODE_FOR_ here. */
566#undef VAR1
567#define VAR1(F,T1,T2,I,M) \
568constexpr insn_code CODE_FOR_aarch64_##F##M = CODE_FOR_##T1##M##3##T2;
569
570BUILTIN_VDQ_BHSI (srhadd, avg, _ceil, 0)
571BUILTIN_VDQ_BHSI (urhadd, uavg, _ceil, 0)
572BUILTIN_VDQ_BHSI (shadd, avg, _floor, 0)
573BUILTIN_VDQ_BHSI (uhadd, uavg, _floor, 0)
574
575#undef VAR1
576#define VAR1(T, N, MAP, FLAG, A) \
577 {#N #A, UP (A), CF##MAP (N, A), 0, TYPES_##T, FLAG_##FLAG},
578
43e9d192 579static aarch64_simd_builtin_datum aarch64_simd_builtin_data[] = {
342be7f7
JG
580#include "aarch64-simd-builtins.def"
581};
582
5d357f26
KT
583/* There's only 8 CRC32 builtins. Probably not worth their own .def file. */
584#define AARCH64_CRC32_BUILTINS \
585 CRC32_BUILTIN (crc32b, QI) \
586 CRC32_BUILTIN (crc32h, HI) \
587 CRC32_BUILTIN (crc32w, SI) \
588 CRC32_BUILTIN (crc32x, DI) \
589 CRC32_BUILTIN (crc32cb, QI) \
590 CRC32_BUILTIN (crc32ch, HI) \
591 CRC32_BUILTIN (crc32cw, SI) \
592 CRC32_BUILTIN (crc32cx, DI)
593
9d63f43b
TC
594/* The next 8 FCMLA instrinsics require some special handling compared the
595 normal simd intrinsics. */
596#define AARCH64_SIMD_FCMLA_LANEQ_BUILTINS \
597 FCMLA_LANEQ_BUILTIN (0, v2sf, fcmla, V2SF, false) \
598 FCMLA_LANEQ_BUILTIN (90, v2sf, fcmla, V2SF, false) \
599 FCMLA_LANEQ_BUILTIN (180, v2sf, fcmla, V2SF, false) \
600 FCMLA_LANEQ_BUILTIN (270, v2sf, fcmla, V2SF, false) \
601 FCMLA_LANEQ_BUILTIN (0, v4hf, fcmla_laneq, V4HF, true) \
602 FCMLA_LANEQ_BUILTIN (90, v4hf, fcmla_laneq, V4HF, true) \
603 FCMLA_LANEQ_BUILTIN (180, v4hf, fcmla_laneq, V4HF, true) \
604 FCMLA_LANEQ_BUILTIN (270, v4hf, fcmla_laneq, V4HF, true) \
605
c906efc7
AC
606
607/* vreinterpret intrinsics are defined for any pair of element types.
608 { _bf16 } { _bf16 }
609 { _f16 _f32 _f64 } { _f16 _f32 _f64 }
610 { _s8 _s16 _s32 _s64 } x { _s8 _s16 _s32 _s64 }
611 { _u8 _u16 _u32 _u64 } { _u8 _u16 _u32 _u64 }
612 { _p8 _p16 _p64 } { _p8 _p16 _p64 }. */
613#define VREINTERPRET_BUILTIN2(A, B) \
614 VREINTERPRET_BUILTIN (A, B, d)
615
616#define VREINTERPRET_BUILTINS1(A) \
617 VREINTERPRET_BUILTIN2 (A, bf16) \
618 VREINTERPRET_BUILTIN2 (A, f16) \
619 VREINTERPRET_BUILTIN2 (A, f32) \
620 VREINTERPRET_BUILTIN2 (A, f64) \
621 VREINTERPRET_BUILTIN2 (A, s8) \
622 VREINTERPRET_BUILTIN2 (A, s16) \
623 VREINTERPRET_BUILTIN2 (A, s32) \
624 VREINTERPRET_BUILTIN2 (A, s64) \
625 VREINTERPRET_BUILTIN2 (A, u8) \
626 VREINTERPRET_BUILTIN2 (A, u16) \
627 VREINTERPRET_BUILTIN2 (A, u32) \
628 VREINTERPRET_BUILTIN2 (A, u64) \
629 VREINTERPRET_BUILTIN2 (A, p8) \
630 VREINTERPRET_BUILTIN2 (A, p16) \
631 VREINTERPRET_BUILTIN2 (A, p64)
632
633#define VREINTERPRET_BUILTINS \
634 VREINTERPRET_BUILTINS1 (bf16) \
635 VREINTERPRET_BUILTINS1 (f16) \
636 VREINTERPRET_BUILTINS1 (f32) \
637 VREINTERPRET_BUILTINS1 (f64) \
638 VREINTERPRET_BUILTINS1 (s8) \
639 VREINTERPRET_BUILTINS1 (s16) \
640 VREINTERPRET_BUILTINS1 (s32) \
641 VREINTERPRET_BUILTINS1 (s64) \
642 VREINTERPRET_BUILTINS1 (u8) \
643 VREINTERPRET_BUILTINS1 (u16) \
644 VREINTERPRET_BUILTINS1 (u32) \
645 VREINTERPRET_BUILTINS1 (u64) \
646 VREINTERPRET_BUILTINS1 (p8) \
647 VREINTERPRET_BUILTINS1 (p16) \
648 VREINTERPRET_BUILTINS1 (p64)
649
650/* vreinterpretq intrinsics are additionally defined for p128.
651 { _bf16 } { _bf16 }
652 { _f16 _f32 _f64 } { _f16 _f32 _f64 }
653 { _s8 _s16 _s32 _s64 } x { _s8 _s16 _s32 _s64 }
654 { _u8 _u16 _u32 _u64 } { _u8 _u16 _u32 _u64 }
655 { _p8 _p16 _p64 _p128 } { _p8 _p16 _p64 _p128 }. */
656#define VREINTERPRETQ_BUILTIN2(A, B) \
657 VREINTERPRET_BUILTIN (A, B, q)
658
659#define VREINTERPRETQ_BUILTINS1(A) \
660 VREINTERPRETQ_BUILTIN2 (A, bf16) \
661 VREINTERPRETQ_BUILTIN2 (A, f16) \
662 VREINTERPRETQ_BUILTIN2 (A, f32) \
663 VREINTERPRETQ_BUILTIN2 (A, f64) \
664 VREINTERPRETQ_BUILTIN2 (A, s8) \
665 VREINTERPRETQ_BUILTIN2 (A, s16) \
666 VREINTERPRETQ_BUILTIN2 (A, s32) \
667 VREINTERPRETQ_BUILTIN2 (A, s64) \
668 VREINTERPRETQ_BUILTIN2 (A, u8) \
669 VREINTERPRETQ_BUILTIN2 (A, u16) \
670 VREINTERPRETQ_BUILTIN2 (A, u32) \
671 VREINTERPRETQ_BUILTIN2 (A, u64) \
672 VREINTERPRETQ_BUILTIN2 (A, p8) \
673 VREINTERPRETQ_BUILTIN2 (A, p16) \
674 VREINTERPRETQ_BUILTIN2 (A, p64) \
675 VREINTERPRETQ_BUILTIN2 (A, p128)
676
677#define VREINTERPRETQ_BUILTINS \
678 VREINTERPRETQ_BUILTINS1 (bf16) \
679 VREINTERPRETQ_BUILTINS1 (f16) \
680 VREINTERPRETQ_BUILTINS1 (f32) \
681 VREINTERPRETQ_BUILTINS1 (f64) \
682 VREINTERPRETQ_BUILTINS1 (s8) \
683 VREINTERPRETQ_BUILTINS1 (s16) \
684 VREINTERPRETQ_BUILTINS1 (s32) \
685 VREINTERPRETQ_BUILTINS1 (s64) \
686 VREINTERPRETQ_BUILTINS1 (u8) \
687 VREINTERPRETQ_BUILTINS1 (u16) \
688 VREINTERPRETQ_BUILTINS1 (u32) \
689 VREINTERPRETQ_BUILTINS1 (u64) \
690 VREINTERPRETQ_BUILTINS1 (p8) \
691 VREINTERPRETQ_BUILTINS1 (p16) \
692 VREINTERPRETQ_BUILTINS1 (p64) \
693 VREINTERPRETQ_BUILTINS1 (p128)
694
695#define AARCH64_SIMD_VREINTERPRET_BUILTINS \
696 VREINTERPRET_BUILTINS \
697 VREINTERPRETQ_BUILTINS
698
5d357f26
KT
699typedef struct
700{
701 const char *name;
ef4bddc2 702 machine_mode mode;
5d357f26
KT
703 const enum insn_code icode;
704 unsigned int fcode;
705} aarch64_crc_builtin_datum;
706
9d63f43b
TC
707/* Hold information about how to expand the FCMLA_LANEQ builtins. */
708typedef struct
709{
710 const char *name;
711 machine_mode mode;
712 const enum insn_code icode;
713 unsigned int fcode;
714 bool lane;
715} aarch64_fcmla_laneq_builtin_datum;
716
c906efc7
AC
717/* Hold information about how to declare SIMD intrinsics. */
718typedef struct
719{
720 const char *name;
721 unsigned int fcode;
722 unsigned int op_count;
723 machine_mode op_modes[SIMD_MAX_BUILTIN_ARGS];
724 enum aarch64_type_qualifiers qualifiers[SIMD_MAX_BUILTIN_ARGS];
725 unsigned int flags;
726 bool skip;
727} aarch64_simd_intrinsic_datum;
728
5d357f26
KT
729#define CRC32_BUILTIN(N, M) \
730 AARCH64_BUILTIN_##N,
731
9d63f43b
TC
732#define FCMLA_LANEQ_BUILTIN(I, N, X, M, T) \
733 AARCH64_SIMD_BUILTIN_FCMLA_LANEQ##I##_##M,
734
c906efc7
AC
735#define VREINTERPRET_BUILTIN(A, B, L) \
736 AARCH64_SIMD_BUILTIN_VREINTERPRET##L##_##A##_##B,
737
342be7f7 738#undef VAR1
bf592b2f 739#define VAR1(T, N, MAP, FLAG, A) \
e993fea1 740 AARCH64_SIMD_BUILTIN_##T##_##N##A,
342be7f7
JG
741
742enum aarch64_builtins
743{
744 AARCH64_BUILTIN_MIN,
aa87aced
KV
745
746 AARCH64_BUILTIN_GET_FPCR,
747 AARCH64_BUILTIN_SET_FPCR,
748 AARCH64_BUILTIN_GET_FPSR,
749 AARCH64_BUILTIN_SET_FPSR,
750
0d7e5fa6
AC
751 AARCH64_BUILTIN_GET_FPCR64,
752 AARCH64_BUILTIN_SET_FPCR64,
753 AARCH64_BUILTIN_GET_FPSR64,
754 AARCH64_BUILTIN_SET_FPSR64,
755
a6fc00da
BH
756 AARCH64_BUILTIN_RSQRT_DF,
757 AARCH64_BUILTIN_RSQRT_SF,
758 AARCH64_BUILTIN_RSQRT_V2DF,
759 AARCH64_BUILTIN_RSQRT_V2SF,
760 AARCH64_BUILTIN_RSQRT_V4SF,
342be7f7 761 AARCH64_SIMD_BUILTIN_BASE,
661fce82 762 AARCH64_SIMD_BUILTIN_LANE_CHECK,
342be7f7 763#include "aarch64-simd-builtins.def"
661fce82
AL
764 /* The first enum element which is based on an insn_data pattern. */
765 AARCH64_SIMD_PATTERN_START = AARCH64_SIMD_BUILTIN_LANE_CHECK + 1,
766 AARCH64_SIMD_BUILTIN_MAX = AARCH64_SIMD_PATTERN_START
767 + ARRAY_SIZE (aarch64_simd_builtin_data) - 1,
5d357f26
KT
768 AARCH64_CRC32_BUILTIN_BASE,
769 AARCH64_CRC32_BUILTINS
770 AARCH64_CRC32_BUILTIN_MAX,
c906efc7
AC
771 /* SIMD intrinsic builtins. */
772 AARCH64_SIMD_VREINTERPRET_BUILTINS
312492bd
JW
773 /* ARMv8.3-A Pointer Authentication Builtins. */
774 AARCH64_PAUTH_BUILTIN_AUTIA1716,
775 AARCH64_PAUTH_BUILTIN_PACIA1716,
8fc16d72
ST
776 AARCH64_PAUTH_BUILTIN_AUTIB1716,
777 AARCH64_PAUTH_BUILTIN_PACIB1716,
312492bd 778 AARCH64_PAUTH_BUILTIN_XPACLRI,
9d63f43b
TC
779 /* Special cased Armv8.3-A Complex FMA by Lane quad Builtins. */
780 AARCH64_SIMD_FCMLA_LANEQ_BUILTIN_BASE,
781 AARCH64_SIMD_FCMLA_LANEQ_BUILTINS
e1d5d19e
KT
782 /* Builtin for Arm8.3-a Javascript conversion instruction. */
783 AARCH64_JSCVT,
89626179
SD
784 /* TME builtins. */
785 AARCH64_TME_BUILTIN_TSTART,
786 AARCH64_TME_BUILTIN_TCOMMIT,
787 AARCH64_TME_BUILTIN_TTEST,
788 AARCH64_TME_BUILTIN_TCANCEL,
c5dc215d
KT
789 /* Armv8.5-a RNG instruction builtins. */
790 AARCH64_BUILTIN_RNG_RNDR,
791 AARCH64_BUILTIN_RNG_RNDRRS,
ef01e6bb
DZ
792 /* MEMTAG builtins. */
793 AARCH64_MEMTAG_BUILTIN_START,
794 AARCH64_MEMTAG_BUILTIN_IRG,
795 AARCH64_MEMTAG_BUILTIN_GMI,
796 AARCH64_MEMTAG_BUILTIN_SUBP,
797 AARCH64_MEMTAG_BUILTIN_INC_TAG,
798 AARCH64_MEMTAG_BUILTIN_SET_TAG,
799 AARCH64_MEMTAG_BUILTIN_GET_TAG,
800 AARCH64_MEMTAG_BUILTIN_END,
fdcddba8
PW
801 /* LS64 builtins. */
802 AARCH64_LS64_BUILTIN_LD64B,
803 AARCH64_LS64_BUILTIN_ST64B,
804 AARCH64_LS64_BUILTIN_ST64BV,
805 AARCH64_LS64_BUILTIN_ST64BV0,
eb966d39
ASDV
806 AARCH64_REV16,
807 AARCH64_REV16L,
808 AARCH64_REV16LL,
809 AARCH64_RBIT,
810 AARCH64_RBITL,
811 AARCH64_RBITLL,
fc42900d
VDN
812 /* System register builtins. */
813 AARCH64_RSR,
814 AARCH64_RSRP,
815 AARCH64_RSR64,
816 AARCH64_RSRF,
817 AARCH64_RSRF64,
818 AARCH64_WSR,
819 AARCH64_WSRP,
820 AARCH64_WSR64,
821 AARCH64_WSRF,
822 AARCH64_WSRF64,
342be7f7 823 AARCH64_BUILTIN_MAX
43e9d192
IB
824};
825
5d357f26
KT
826#undef CRC32_BUILTIN
827#define CRC32_BUILTIN(N, M) \
0d4a1197 828 {"__builtin_aarch64_"#N, E_##M##mode, CODE_FOR_aarch64_##N, AARCH64_BUILTIN_##N},
5d357f26
KT
829
830static aarch64_crc_builtin_datum aarch64_crc_builtin_data[] = {
831 AARCH64_CRC32_BUILTINS
832};
833
9d63f43b
TC
834
835#undef FCMLA_LANEQ_BUILTIN
836#define FCMLA_LANEQ_BUILTIN(I, N, X, M, T) \
837 {"__builtin_aarch64_fcmla_laneq"#I#N, E_##M##mode, CODE_FOR_aarch64_##X##I##N, \
838 AARCH64_SIMD_BUILTIN_FCMLA_LANEQ##I##_##M, T},
839
840/* This structure contains how to manage the mapping form the builtin to the
841 instruction to generate in the backend and how to invoke the instruction. */
5eb9ac1e 842static aarch64_fcmla_laneq_builtin_datum aarch64_fcmla_lane_builtin_data[] = {
9d63f43b
TC
843 AARCH64_SIMD_FCMLA_LANEQ_BUILTINS
844};
845
c906efc7
AC
846#undef VREINTERPRET_BUILTIN
847#define VREINTERPRET_BUILTIN(A, B, L) \
848 {"vreinterpret" SIMD_INTR_LENGTH_CHAR(L) "_" #A "_" #B, \
849 AARCH64_SIMD_BUILTIN_VREINTERPRET##L##_##A##_##B, \
850 2, \
851 { SIMD_INTR_MODE(A, L), SIMD_INTR_MODE(B, L) }, \
852 { SIMD_INTR_QUAL(A), SIMD_INTR_QUAL(B) }, \
853 FLAG_AUTO_FP, \
854 SIMD_INTR_MODE(A, L) == SIMD_INTR_MODE(B, L) \
855 && SIMD_INTR_QUAL(A) == SIMD_INTR_QUAL(B) \
856 },
857
858static const aarch64_simd_intrinsic_datum aarch64_simd_intrinsic_data[] = {
859 AARCH64_SIMD_VREINTERPRET_BUILTINS
860};
861
862
5d357f26
KT
863#undef CRC32_BUILTIN
864
119103ca
JG
865static GTY(()) tree aarch64_builtin_decls[AARCH64_BUILTIN_MAX];
866
43e9d192
IB
867#define NUM_DREG_TYPES 6
868#define NUM_QREG_TYPES 6
869
f9d53c27
TB
870/* Internal scalar builtin types. These types are used to support
871 neon intrinsic builtins. They are _not_ user-visible types. Therefore
872 the mangling for these types are implementation defined. */
873const char *aarch64_scalar_builtin_types[] = {
874 "__builtin_aarch64_simd_qi",
875 "__builtin_aarch64_simd_hi",
876 "__builtin_aarch64_simd_si",
7c369485 877 "__builtin_aarch64_simd_hf",
f9d53c27
TB
878 "__builtin_aarch64_simd_sf",
879 "__builtin_aarch64_simd_di",
880 "__builtin_aarch64_simd_df",
881 "__builtin_aarch64_simd_poly8",
882 "__builtin_aarch64_simd_poly16",
883 "__builtin_aarch64_simd_poly64",
884 "__builtin_aarch64_simd_poly128",
885 "__builtin_aarch64_simd_ti",
886 "__builtin_aarch64_simd_uqi",
887 "__builtin_aarch64_simd_uhi",
888 "__builtin_aarch64_simd_usi",
889 "__builtin_aarch64_simd_udi",
890 "__builtin_aarch64_simd_ei",
891 "__builtin_aarch64_simd_oi",
892 "__builtin_aarch64_simd_ci",
893 "__builtin_aarch64_simd_xi",
e603cd43 894 "__builtin_aarch64_simd_bf",
f9d53c27
TB
895 NULL
896};
b5828b4b 897
f9d53c27
TB
898#define ENTRY(E, M, Q, G) E,
899enum aarch64_simd_type
900{
901#include "aarch64-simd-builtin-types.def"
902 ARM_NEON_H_TYPES_LAST
903};
904#undef ENTRY
b5828b4b 905
98f1dd02 906struct GTY(()) aarch64_simd_type_info
b5828b4b 907{
f9d53c27
TB
908 enum aarch64_simd_type type;
909
910 /* Internal type name. */
911 const char *name;
912
913 /* Internal type name(mangled). The mangled names conform to the
914 AAPCS64 (see "Procedure Call Standard for the ARM 64-bit Architecture",
915 Appendix A). To qualify for emission with the mangled names defined in
916 that document, a vector type must not only be of the correct mode but also
917 be of the correct internal AdvSIMD vector type (e.g. __Int8x8_t); these
918 types are registered by aarch64_init_simd_builtin_types (). In other
919 words, vector types defined in other ways e.g. via vector_size attribute
920 will get default mangled names. */
921 const char *mangle;
922
923 /* Internal type. */
924 tree itype;
925
926 /* Element type. */
b5828b4b
JG
927 tree eltype;
928
f9d53c27
TB
929 /* Machine mode the internal type maps to. */
930 enum machine_mode mode;
b5828b4b 931
f9d53c27
TB
932 /* Qualifiers. */
933 enum aarch64_type_qualifiers q;
934};
935
936#define ENTRY(E, M, Q, G) \
0d4a1197 937 {E, "__" #E, #G "__" #E, NULL_TREE, NULL_TREE, E_##M##mode, qualifier_##Q},
98f1dd02 938static GTY(()) struct aarch64_simd_type_info aarch64_simd_types [] = {
f9d53c27
TB
939#include "aarch64-simd-builtin-types.def"
940};
941#undef ENTRY
942
14814e20 943static machine_mode aarch64_simd_tuple_modes[ARM_NEON_H_TYPES_LAST][3];
66f206b8
JW
944static GTY(()) tree aarch64_simd_tuple_types[ARM_NEON_H_TYPES_LAST][3];
945
98f1dd02
AP
946static GTY(()) tree aarch64_simd_intOI_type_node = NULL_TREE;
947static GTY(()) tree aarch64_simd_intCI_type_node = NULL_TREE;
948static GTY(()) tree aarch64_simd_intXI_type_node = NULL_TREE;
f9d53c27 949
1b62ed4f
JG
950/* The user-visible __fp16 type, and a pointer to that type. Used
951 across the back-end. */
952tree aarch64_fp16_type_node = NULL_TREE;
953tree aarch64_fp16_ptr_type_node = NULL_TREE;
954
abbe1ed2 955/* Back-end node type for brain float (bfloat) types. */
abbe1ed2
SMW
956tree aarch64_bf16_ptr_type_node = NULL_TREE;
957
6d4d616a 958/* Wrapper around add_builtin_function. NAME is the name of the built-in
072a8b8f 959 function, TYPE is the function type, CODE is the function subcode
960 (relative to AARCH64_BUILTIN_GENERAL), and ATTRS is the function
961 attributes. */
6d4d616a 962static tree
072a8b8f 963aarch64_general_add_builtin (const char *name, tree type, unsigned int code,
964 tree attrs = NULL_TREE)
6d4d616a
RS
965{
966 code = (code << AARCH64_BUILTIN_SHIFT) | AARCH64_BUILTIN_GENERAL;
967 return add_builtin_function (name, type, code, BUILT_IN_MD,
072a8b8f 968 NULL, attrs);
6d4d616a
RS
969}
970
9963029a
AC
971static tree
972aarch64_general_simulate_builtin (const char *name, tree fntype,
973 unsigned int code,
974 tree attrs = NULL_TREE)
975{
976 code = (code << AARCH64_BUILTIN_SHIFT) | AARCH64_BUILTIN_GENERAL;
977 return simulate_builtin_function_decl (input_location, name, fntype,
978 code, NULL, attrs);
979}
980
f9d53c27
TB
981static const char *
982aarch64_mangle_builtin_scalar_type (const_tree type)
983{
984 int i = 0;
985
986 while (aarch64_scalar_builtin_types[i] != NULL)
b5828b4b 987 {
f9d53c27
TB
988 const char *name = aarch64_scalar_builtin_types[i];
989
990 if (TREE_CODE (TYPE_NAME (type)) == TYPE_DECL
991 && DECL_NAME (TYPE_NAME (type))
992 && !strcmp (IDENTIFIER_POINTER (DECL_NAME (TYPE_NAME (type))), name))
993 return aarch64_scalar_builtin_types[i];
994 i++;
995 }
996 return NULL;
b5828b4b
JG
997}
998
f9d53c27
TB
999static const char *
1000aarch64_mangle_builtin_vector_type (const_tree type)
b5828b4b 1001{
31427b97
RS
1002 tree attrs = TYPE_ATTRIBUTES (type);
1003 if (tree attr = lookup_attribute ("Advanced SIMD type", attrs))
1004 {
1005 tree mangled_name = TREE_VALUE (TREE_VALUE (attr));
1006 return IDENTIFIER_POINTER (mangled_name);
1007 }
f9d53c27
TB
1008
1009 return NULL;
6db1ec94
JG
1010}
1011
f9d53c27 1012const char *
6d4d616a 1013aarch64_general_mangle_builtin_type (const_tree type)
6db1ec94 1014{
f9d53c27
TB
1015 const char *mangle;
1016 /* Walk through all the AArch64 builtins types tables to filter out the
1017 incoming type. */
1018 if ((mangle = aarch64_mangle_builtin_vector_type (type))
1019 || (mangle = aarch64_mangle_builtin_scalar_type (type)))
1020 return mangle;
1021
1022 return NULL;
6db1ec94
JG
1023}
1024
f023cc54 1025/* Helper function for aarch64_simd_builtin_type. */
f9d53c27 1026static tree
f023cc54
AC
1027aarch64_int_or_fp_type (machine_mode mode,
1028 enum aarch64_type_qualifiers qualifiers)
6db1ec94 1029{
f023cc54
AC
1030#define QUAL_TYPE(M) ((qualifiers & qualifier_unsigned) \
1031 ? unsigned_int##M##_type_node : int##M##_type_node);
f9d53c27
TB
1032 switch (mode)
1033 {
4e10a5a7 1034 case E_QImode:
f9d53c27 1035 return QUAL_TYPE (QI);
4e10a5a7 1036 case E_HImode:
f9d53c27 1037 return QUAL_TYPE (HI);
4e10a5a7 1038 case E_SImode:
f9d53c27 1039 return QUAL_TYPE (SI);
4e10a5a7 1040 case E_DImode:
f9d53c27 1041 return QUAL_TYPE (DI);
4e10a5a7 1042 case E_TImode:
f9d53c27 1043 return QUAL_TYPE (TI);
4e10a5a7 1044 case E_OImode:
f9d53c27 1045 return aarch64_simd_intOI_type_node;
4e10a5a7 1046 case E_CImode:
f9d53c27 1047 return aarch64_simd_intCI_type_node;
4e10a5a7 1048 case E_XImode:
f9d53c27 1049 return aarch64_simd_intXI_type_node;
4e10a5a7 1050 case E_HFmode:
71a11456 1051 return aarch64_fp16_type_node;
4e10a5a7 1052 case E_SFmode:
f9d53c27 1053 return float_type_node;
4e10a5a7 1054 case E_DFmode:
f9d53c27 1055 return double_type_node;
abbe1ed2 1056 case E_BFmode:
13071c3c 1057 return bfloat16_type_node;
f9d53c27
TB
1058 default:
1059 gcc_unreachable ();
1060 }
1061#undef QUAL_TYPE
6db1ec94
JG
1062}
1063
f023cc54 1064/* Helper function for aarch64_simd_builtin_type. */
f9d53c27 1065static tree
f023cc54
AC
1066aarch64_lookup_simd_type_in_table (machine_mode mode,
1067 enum aarch64_type_qualifiers qualifiers)
6db1ec94 1068{
f9d53c27 1069 int i;
ca32b29e 1070 int nelts = ARRAY_SIZE (aarch64_simd_types);
f023cc54 1071 int q = qualifiers & (qualifier_poly | qualifier_unsigned);
f9d53c27
TB
1072
1073 for (i = 0; i < nelts; i++)
66f206b8
JW
1074 {
1075 if (aarch64_simd_types[i].mode == mode
1076 && aarch64_simd_types[i].q == q)
1077 return aarch64_simd_types[i].itype;
1078 if (aarch64_simd_tuple_types[i][0] != NULL_TREE)
1079 for (int j = 0; j < 3; j++)
14814e20 1080 if (aarch64_simd_tuple_modes[i][j] == mode
66f206b8
JW
1081 && aarch64_simd_types[i].q == q)
1082 return aarch64_simd_tuple_types[i][j];
1083 }
f9d53c27
TB
1084
1085 return NULL_TREE;
b5828b4b
JG
1086}
1087
f023cc54 1088/* Return a type for an operand with specified mode and qualifiers. */
f9d53c27 1089static tree
b8506a8a 1090aarch64_simd_builtin_type (machine_mode mode,
f023cc54 1091 enum aarch64_type_qualifiers qualifiers)
f9d53c27 1092{
f023cc54
AC
1093 tree type = NULL_TREE;
1094
1095 /* For pointers, we want a pointer to the basic type of the vector. */
1096 if ((qualifiers & qualifier_pointer) && VECTOR_MODE_P (mode))
1097 mode = GET_MODE_INNER (mode);
1098
1099 /* Non-poly scalar modes map to standard types not in the table. */
1100 if ((qualifiers & qualifier_poly) || VECTOR_MODE_P (mode))
1101 type = aarch64_lookup_simd_type_in_table (mode, qualifiers);
f9d53c27 1102 else
f023cc54
AC
1103 type = aarch64_int_or_fp_type (mode, qualifiers);
1104
1105 gcc_assert (type != NULL_TREE);
1106
1107 /* Add qualifiers. */
1108 if (qualifiers & qualifier_const)
1109 type = build_qualified_type (type, TYPE_QUAL_CONST);
1110 if (qualifiers & qualifier_pointer)
1111 type = build_pointer_type (type);
1112
1113 return type;
f9d53c27
TB
1114}
1115
af55e82d 1116static void
f9d53c27 1117aarch64_init_simd_builtin_types (void)
43e9d192 1118{
f9d53c27 1119 int i;
ca32b29e 1120 int nelts = ARRAY_SIZE (aarch64_simd_types);
f9d53c27
TB
1121 tree tdecl;
1122
1123 /* Init all the element types built by the front-end. */
1124 aarch64_simd_types[Int8x8_t].eltype = intQI_type_node;
1125 aarch64_simd_types[Int8x16_t].eltype = intQI_type_node;
1126 aarch64_simd_types[Int16x4_t].eltype = intHI_type_node;
1127 aarch64_simd_types[Int16x8_t].eltype = intHI_type_node;
1128 aarch64_simd_types[Int32x2_t].eltype = intSI_type_node;
1129 aarch64_simd_types[Int32x4_t].eltype = intSI_type_node;
1130 aarch64_simd_types[Int64x1_t].eltype = intDI_type_node;
1131 aarch64_simd_types[Int64x2_t].eltype = intDI_type_node;
1132 aarch64_simd_types[Uint8x8_t].eltype = unsigned_intQI_type_node;
1133 aarch64_simd_types[Uint8x16_t].eltype = unsigned_intQI_type_node;
1134 aarch64_simd_types[Uint16x4_t].eltype = unsigned_intHI_type_node;
1135 aarch64_simd_types[Uint16x8_t].eltype = unsigned_intHI_type_node;
1136 aarch64_simd_types[Uint32x2_t].eltype = unsigned_intSI_type_node;
1137 aarch64_simd_types[Uint32x4_t].eltype = unsigned_intSI_type_node;
1138 aarch64_simd_types[Uint64x1_t].eltype = unsigned_intDI_type_node;
1139 aarch64_simd_types[Uint64x2_t].eltype = unsigned_intDI_type_node;
1140
1141 /* Poly types are a world of their own. */
1142 aarch64_simd_types[Poly8_t].eltype = aarch64_simd_types[Poly8_t].itype =
1143 build_distinct_type_copy (unsigned_intQI_type_node);
bcee52c4
MS
1144 /* Prevent front-ends from transforming Poly8_t arrays into string
1145 literals. */
1146 TYPE_STRING_FLAG (aarch64_simd_types[Poly8_t].eltype) = false;
1147
f9d53c27
TB
1148 aarch64_simd_types[Poly16_t].eltype = aarch64_simd_types[Poly16_t].itype =
1149 build_distinct_type_copy (unsigned_intHI_type_node);
1150 aarch64_simd_types[Poly64_t].eltype = aarch64_simd_types[Poly64_t].itype =
1151 build_distinct_type_copy (unsigned_intDI_type_node);
1152 aarch64_simd_types[Poly128_t].eltype = aarch64_simd_types[Poly128_t].itype =
1153 build_distinct_type_copy (unsigned_intTI_type_node);
1154 /* Init poly vector element types with scalar poly types. */
1155 aarch64_simd_types[Poly8x8_t].eltype = aarch64_simd_types[Poly8_t].itype;
1156 aarch64_simd_types[Poly8x16_t].eltype = aarch64_simd_types[Poly8_t].itype;
1157 aarch64_simd_types[Poly16x4_t].eltype = aarch64_simd_types[Poly16_t].itype;
1158 aarch64_simd_types[Poly16x8_t].eltype = aarch64_simd_types[Poly16_t].itype;
1159 aarch64_simd_types[Poly64x1_t].eltype = aarch64_simd_types[Poly64_t].itype;
1160 aarch64_simd_types[Poly64x2_t].eltype = aarch64_simd_types[Poly64_t].itype;
1161
1162 /* Continue with standard types. */
71a11456
AL
1163 aarch64_simd_types[Float16x4_t].eltype = aarch64_fp16_type_node;
1164 aarch64_simd_types[Float16x8_t].eltype = aarch64_fp16_type_node;
f9d53c27
TB
1165 aarch64_simd_types[Float32x2_t].eltype = float_type_node;
1166 aarch64_simd_types[Float32x4_t].eltype = float_type_node;
1167 aarch64_simd_types[Float64x1_t].eltype = double_type_node;
1168 aarch64_simd_types[Float64x2_t].eltype = double_type_node;
1169
abbe1ed2 1170 /* Init Bfloat vector types with underlying __bf16 type. */
13071c3c
JJ
1171 aarch64_simd_types[Bfloat16x4_t].eltype = bfloat16_type_node;
1172 aarch64_simd_types[Bfloat16x8_t].eltype = bfloat16_type_node;
abbe1ed2 1173
f9d53c27
TB
1174 for (i = 0; i < nelts; i++)
1175 {
1176 tree eltype = aarch64_simd_types[i].eltype;
b8506a8a 1177 machine_mode mode = aarch64_simd_types[i].mode;
f9d53c27
TB
1178
1179 if (aarch64_simd_types[i].itype == NULL)
b96824c4 1180 {
31427b97
RS
1181 tree type = build_vector_type (eltype, GET_MODE_NUNITS (mode));
1182 type = build_distinct_type_copy (type);
1183 SET_TYPE_STRUCTURAL_EQUALITY (type);
1184
1185 tree mangled_name = get_identifier (aarch64_simd_types[i].mangle);
1186 tree value = tree_cons (NULL_TREE, mangled_name, NULL_TREE);
1187 TYPE_ATTRIBUTES (type)
1188 = tree_cons (get_identifier ("Advanced SIMD type"), value,
1189 TYPE_ATTRIBUTES (type));
1190 aarch64_simd_types[i].itype = type;
b96824c4 1191 }
f9d53c27
TB
1192
1193 tdecl = add_builtin_type (aarch64_simd_types[i].name,
1194 aarch64_simd_types[i].itype);
1195 TYPE_NAME (aarch64_simd_types[i].itype) = tdecl;
f9d53c27 1196 }
43e9d192 1197
f9d53c27
TB
1198#define AARCH64_BUILD_SIGNED_TYPE(mode) \
1199 make_signed_type (GET_MODE_PRECISION (mode));
1200 aarch64_simd_intOI_type_node = AARCH64_BUILD_SIGNED_TYPE (OImode);
f9d53c27
TB
1201 aarch64_simd_intCI_type_node = AARCH64_BUILD_SIGNED_TYPE (CImode);
1202 aarch64_simd_intXI_type_node = AARCH64_BUILD_SIGNED_TYPE (XImode);
1203#undef AARCH64_BUILD_SIGNED_TYPE
1204
f9d53c27
TB
1205 tdecl = add_builtin_type
1206 ("__builtin_aarch64_simd_oi" , aarch64_simd_intOI_type_node);
1207 TYPE_NAME (aarch64_simd_intOI_type_node) = tdecl;
1208 tdecl = add_builtin_type
1209 ("__builtin_aarch64_simd_ci" , aarch64_simd_intCI_type_node);
1210 TYPE_NAME (aarch64_simd_intCI_type_node) = tdecl;
1211 tdecl = add_builtin_type
1212 ("__builtin_aarch64_simd_xi" , aarch64_simd_intXI_type_node);
1213 TYPE_NAME (aarch64_simd_intXI_type_node) = tdecl;
1214}
1215
1216static void
1217aarch64_init_simd_builtin_scalar_types (void)
1218{
1219 /* Define typedefs for all the standard scalar types. */
1220 (*lang_hooks.types.register_builtin_type) (intQI_type_node,
43e9d192 1221 "__builtin_aarch64_simd_qi");
f9d53c27 1222 (*lang_hooks.types.register_builtin_type) (intHI_type_node,
43e9d192 1223 "__builtin_aarch64_simd_hi");
7c369485
AL
1224 (*lang_hooks.types.register_builtin_type) (aarch64_fp16_type_node,
1225 "__builtin_aarch64_simd_hf");
f9d53c27 1226 (*lang_hooks.types.register_builtin_type) (intSI_type_node,
43e9d192 1227 "__builtin_aarch64_simd_si");
f9d53c27 1228 (*lang_hooks.types.register_builtin_type) (float_type_node,
43e9d192 1229 "__builtin_aarch64_simd_sf");
f9d53c27 1230 (*lang_hooks.types.register_builtin_type) (intDI_type_node,
43e9d192 1231 "__builtin_aarch64_simd_di");
f9d53c27 1232 (*lang_hooks.types.register_builtin_type) (double_type_node,
43e9d192 1233 "__builtin_aarch64_simd_df");
f9d53c27 1234 (*lang_hooks.types.register_builtin_type) (unsigned_intQI_type_node,
43e9d192 1235 "__builtin_aarch64_simd_poly8");
f9d53c27 1236 (*lang_hooks.types.register_builtin_type) (unsigned_intHI_type_node,
43e9d192 1237 "__builtin_aarch64_simd_poly16");
f9d53c27 1238 (*lang_hooks.types.register_builtin_type) (unsigned_intDI_type_node,
7baa225d 1239 "__builtin_aarch64_simd_poly64");
f9d53c27 1240 (*lang_hooks.types.register_builtin_type) (unsigned_intTI_type_node,
7baa225d 1241 "__builtin_aarch64_simd_poly128");
f9d53c27 1242 (*lang_hooks.types.register_builtin_type) (intTI_type_node,
43e9d192 1243 "__builtin_aarch64_simd_ti");
13071c3c 1244 (*lang_hooks.types.register_builtin_type) (bfloat16_type_node,
e603cd43 1245 "__builtin_aarch64_simd_bf");
b5828b4b 1246 /* Unsigned integer types for various mode sizes. */
f9d53c27 1247 (*lang_hooks.types.register_builtin_type) (unsigned_intQI_type_node,
b5828b4b 1248 "__builtin_aarch64_simd_uqi");
f9d53c27 1249 (*lang_hooks.types.register_builtin_type) (unsigned_intHI_type_node,
b5828b4b 1250 "__builtin_aarch64_simd_uhi");
f9d53c27 1251 (*lang_hooks.types.register_builtin_type) (unsigned_intSI_type_node,
b5828b4b 1252 "__builtin_aarch64_simd_usi");
f9d53c27 1253 (*lang_hooks.types.register_builtin_type) (unsigned_intDI_type_node,
b5828b4b 1254 "__builtin_aarch64_simd_udi");
f9d53c27
TB
1255}
1256
079c23cf
KT
1257/* Return a set of FLAG_* flags derived from FLAGS
1258 that describe what a function with result MODE could do,
072a8b8f 1259 taking the command-line flags into account. */
1260static unsigned int
079c23cf 1261aarch64_call_properties (unsigned int flags, machine_mode mode)
072a8b8f 1262{
079c23cf 1263 if (!(flags & FLAG_AUTO_FP) && FLOAT_MODE_P (mode))
35ffd4d1 1264 flags |= FLAG_FP;
072a8b8f 1265
1266 /* -fno-trapping-math means that we can assume any FP exceptions
1267 are not user-visible. */
1268 if (!flag_trapping_math)
1269 flags &= ~FLAG_RAISE_FP_EXCEPTIONS;
1270
1271 return flags;
1272}
1273
079c23cf
KT
1274/* Return true if calls to a function with flags F and mode MODE
1275 could modify some form of global state. */
072a8b8f 1276static bool
079c23cf 1277aarch64_modifies_global_state_p (unsigned int f, machine_mode mode)
072a8b8f 1278{
079c23cf 1279 unsigned int flags = aarch64_call_properties (f, mode);
072a8b8f 1280
1281 if (flags & FLAG_RAISE_FP_EXCEPTIONS)
1282 return true;
1283
1284 if (flags & FLAG_PREFETCH_MEMORY)
1285 return true;
1286
1287 return flags & FLAG_WRITE_MEMORY;
1288}
1289
079c23cf
KT
1290/* Return true if calls to a function with flags F and mode MODE
1291 could read some form of global state. */
072a8b8f 1292static bool
079c23cf 1293aarch64_reads_global_state_p (unsigned int f, machine_mode mode)
072a8b8f 1294{
079c23cf 1295 unsigned int flags = aarch64_call_properties (f, mode);
072a8b8f 1296
1297 if (flags & FLAG_READ_FPCR)
1298 return true;
1299
1300 return flags & FLAG_READ_MEMORY;
1301}
1302
079c23cf
KT
1303/* Return true if calls to a function with flags F and mode MODE
1304 could raise a signal. */
072a8b8f 1305static bool
079c23cf 1306aarch64_could_trap_p (unsigned int f, machine_mode mode)
072a8b8f 1307{
079c23cf 1308 unsigned int flags = aarch64_call_properties (f, mode);
072a8b8f 1309
1310 if (flags & FLAG_RAISE_FP_EXCEPTIONS)
1311 return true;
1312
1313 if (flags & (FLAG_READ_MEMORY | FLAG_WRITE_MEMORY))
1314 return true;
1315
1316 return false;
1317}
1318
1319/* Add attribute NAME to ATTRS. */
1320static tree
1321aarch64_add_attribute (const char *name, tree attrs)
1322{
1323 return tree_cons (get_identifier (name), NULL_TREE, attrs);
1324}
1325
079c23cf
KT
1326/* Return the appropriate attributes for a function that has
1327 flags F and mode MODE. */
072a8b8f 1328static tree
079c23cf 1329aarch64_get_attributes (unsigned int f, machine_mode mode)
072a8b8f 1330{
1331 tree attrs = NULL_TREE;
1332
079c23cf 1333 if (!aarch64_modifies_global_state_p (f, mode))
072a8b8f 1334 {
079c23cf 1335 if (aarch64_reads_global_state_p (f, mode))
072a8b8f 1336 attrs = aarch64_add_attribute ("pure", attrs);
1337 else
1338 attrs = aarch64_add_attribute ("const", attrs);
1339 }
1340
079c23cf 1341 if (!flag_non_call_exceptions || !aarch64_could_trap_p (f, mode))
072a8b8f 1342 attrs = aarch64_add_attribute ("nothrow", attrs);
1343
1344 return aarch64_add_attribute ("leaf", attrs);
1345}
1346
9d63f43b
TC
1347/* Due to the architecture not providing lane variant of the lane instructions
1348 for fcmla we can't use the standard simd builtin expansion code, but we
1349 still want the majority of the validation that would normally be done. */
1350
1351void
1352aarch64_init_fcmla_laneq_builtins (void)
1353{
1354 unsigned int i = 0;
1355
1356 for (i = 0; i < ARRAY_SIZE (aarch64_fcmla_lane_builtin_data); ++i)
1357 {
1358 aarch64_fcmla_laneq_builtin_datum* d
1359 = &aarch64_fcmla_lane_builtin_data[i];
f023cc54 1360 tree argtype = aarch64_simd_builtin_type (d->mode, qualifier_none);
9d63f43b 1361 machine_mode quadmode = GET_MODE_2XWIDER_MODE (d->mode).require ();
f023cc54 1362 tree quadtype = aarch64_simd_builtin_type (quadmode, qualifier_none);
9d63f43b 1363 tree lanetype
f023cc54 1364 = aarch64_simd_builtin_type (SImode, qualifier_lane_pair_index);
9d63f43b
TC
1365 tree ftype = build_function_type_list (argtype, argtype, argtype,
1366 quadtype, lanetype, NULL_TREE);
079c23cf
KT
1367 tree attrs = aarch64_get_attributes (FLAG_FP, d->mode);
1368 tree fndecl
1369 = aarch64_general_add_builtin (d->name, ftype, d->fcode, attrs);
9d63f43b
TC
1370
1371 aarch64_builtin_decls[d->fcode] = fndecl;
1372 }
1373}
1374
c906efc7
AC
1375void
1376aarch64_init_simd_intrinsics (void)
1377{
1378 unsigned int i = 0;
1379
1380 for (i = 0; i < ARRAY_SIZE (aarch64_simd_intrinsic_data); ++i)
1381 {
1382 auto d = &aarch64_simd_intrinsic_data[i];
1383
1384 if (d->skip)
1385 continue;
1386
1387 tree return_type = void_type_node;
1388 tree args = void_list_node;
1389
1390 for (int op_num = d->op_count - 1; op_num >= 0; op_num--)
1391 {
1392 machine_mode op_mode = d->op_modes[op_num];
1393 enum aarch64_type_qualifiers qualifiers = d->qualifiers[op_num];
1394
1395 tree eltype = aarch64_simd_builtin_type (op_mode, qualifiers);
1396
1397 if (op_num == 0)
1398 return_type = eltype;
1399 else
1400 args = tree_cons (NULL_TREE, eltype, args);
1401 }
1402
1403 tree ftype = build_function_type (return_type, args);
8b271f60 1404 tree attrs = aarch64_get_attributes (d->flags, d->op_modes[0]);
c906efc7
AC
1405 unsigned int code
1406 = (d->fcode << AARCH64_BUILTIN_SHIFT | AARCH64_BUILTIN_GENERAL);
1407 tree fndecl = simulate_builtin_function_decl (input_location, d->name,
1408 ftype, code, NULL, attrs);
1409 aarch64_builtin_decls[d->fcode] = fndecl;
1410 }
1411}
1412
e95a988a 1413void
8197ab94 1414aarch64_init_simd_builtin_functions (bool called_from_pragma)
f9d53c27 1415{
661fce82 1416 unsigned int i, fcode = AARCH64_SIMD_PATTERN_START;
f9d53c27 1417
8197ab94
JW
1418 if (!called_from_pragma)
1419 {
1420 tree lane_check_fpr = build_function_type_list (void_type_node,
1421 size_type_node,
1422 size_type_node,
1423 intSI_type_node,
1424 NULL);
1425 aarch64_builtin_decls[AARCH64_SIMD_BUILTIN_LANE_CHECK]
1426 = aarch64_general_add_builtin ("__builtin_aarch64_im_lane_boundsi",
1427 lane_check_fpr,
1428 AARCH64_SIMD_BUILTIN_LANE_CHECK);
1429 }
661fce82 1430
342be7f7 1431 for (i = 0; i < ARRAY_SIZE (aarch64_simd_builtin_data); i++, fcode++)
43e9d192 1432 {
b5828b4b 1433 bool print_type_signature_p = false;
cae83731 1434 char type_signature[SIMD_MAX_BUILTIN_ARGS + 1] = { 0 };
43e9d192 1435 aarch64_simd_builtin_datum *d = &aarch64_simd_builtin_data[i];
342be7f7
JG
1436 char namebuf[60];
1437 tree ftype = NULL;
119103ca 1438 tree fndecl = NULL;
342be7f7 1439
342be7f7 1440 d->fcode = fcode;
43e9d192 1441
b5828b4b
JG
1442 /* We must track two variables here. op_num is
1443 the operand number as in the RTL pattern. This is
1444 required to access the mode (e.g. V4SF mode) of the
1445 argument, from which the base type can be derived.
1446 arg_num is an index in to the qualifiers data, which
1447 gives qualifiers to the type (e.g. const unsigned).
1448 The reason these two variables may differ by one is the
1449 void return type. While all return types take the 0th entry
1450 in the qualifiers array, there is no operand for them in the
1451 RTL pattern. */
1452 int op_num = insn_data[d->code].n_operands - 1;
1453 int arg_num = d->qualifiers[0] & qualifier_void
1454 ? op_num + 1
1455 : op_num;
1456 tree return_type = void_type_node, args = void_list_node;
1457 tree eltype;
1458
8197ab94
JW
1459 int struct_mode_args = 0;
1460 for (int j = op_num; j >= 0; j--)
1461 {
1462 machine_mode op_mode = insn_data[d->code].operand[j].mode;
1463 if (aarch64_advsimd_struct_mode_p (op_mode))
1464 struct_mode_args++;
1465 }
1466
1467 if ((called_from_pragma && struct_mode_args == 0)
1468 || (!called_from_pragma && struct_mode_args > 0))
1469 continue;
1470
b5828b4b
JG
1471 /* Build a function type directly from the insn_data for this
1472 builtin. The build_function_type () function takes care of
1473 removing duplicates for us. */
1474 for (; op_num >= 0; arg_num--, op_num--)
43e9d192 1475 {
ef4bddc2 1476 machine_mode op_mode = insn_data[d->code].operand[op_num].mode;
b5828b4b 1477 enum aarch64_type_qualifiers qualifiers = d->qualifiers[arg_num];
43e9d192 1478
b5828b4b
JG
1479 if (qualifiers & qualifier_unsigned)
1480 {
9fd2074d 1481 type_signature[op_num] = 'u';
b5828b4b
JG
1482 print_type_signature_p = true;
1483 }
6db1ec94
JG
1484 else if (qualifiers & qualifier_poly)
1485 {
9fd2074d 1486 type_signature[op_num] = 'p';
6db1ec94
JG
1487 print_type_signature_p = true;
1488 }
b5828b4b 1489 else
9fd2074d 1490 type_signature[op_num] = 's';
b5828b4b 1491
b5828b4b
JG
1492 /* Some builtins have different user-facing types
1493 for certain arguments, encoded in d->mode. */
1494 if (qualifiers & qualifier_map_mode)
bc5e395d 1495 op_mode = d->mode;
b5828b4b 1496
f023cc54 1497 eltype = aarch64_simd_builtin_type (op_mode, qualifiers);
b5828b4b
JG
1498
1499 /* If we have reached arg_num == 0, we are at a non-void
1500 return type. Otherwise, we are still processing
1501 arguments. */
1502 if (arg_num == 0)
1503 return_type = eltype;
1504 else
1505 args = tree_cons (NULL_TREE, eltype, args);
1506 }
342be7f7 1507
b5828b4b 1508 ftype = build_function_type (return_type, args);
43e9d192 1509
342be7f7 1510 gcc_assert (ftype != NULL);
43e9d192 1511
b5828b4b 1512 if (print_type_signature_p)
bc5e395d
JG
1513 snprintf (namebuf, sizeof (namebuf), "__builtin_aarch64_%s_%s",
1514 d->name, type_signature);
b5828b4b 1515 else
bc5e395d
JG
1516 snprintf (namebuf, sizeof (namebuf), "__builtin_aarch64_%s",
1517 d->name);
43e9d192 1518
079c23cf 1519 tree attrs = aarch64_get_attributes (d->flags, d->mode);
072a8b8f 1520
66f206b8
JW
1521 if (called_from_pragma)
1522 {
1523 unsigned int raw_code
1524 = (fcode << AARCH64_BUILTIN_SHIFT) | AARCH64_BUILTIN_GENERAL;
1525 fndecl = simulate_builtin_function_decl (input_location, namebuf,
1526 ftype, raw_code, NULL,
1527 attrs);
1528 }
1529 else
1530 fndecl = aarch64_general_add_builtin (namebuf, ftype, fcode, attrs);
1531
119103ca 1532 aarch64_builtin_decls[fcode] = fndecl;
43e9d192 1533 }
8197ab94
JW
1534}
1535
1536/* Register the tuple type that contains NUM_VECTORS of the AdvSIMD type
1537 indexed by TYPE_INDEX. */
1538static void
1539register_tuple_type (unsigned int num_vectors, unsigned int type_index)
1540{
1541 aarch64_simd_type_info *type = &aarch64_simd_types[type_index];
1542
1543 /* Synthesize the name of the user-visible vector tuple type. */
1544 const char *vector_type_name = type->name;
1545 char tuple_type_name[sizeof ("bfloat16x4x2_t")];
1546 snprintf (tuple_type_name, sizeof (tuple_type_name), "%.*sx%d_t",
1547 (int) strlen (vector_type_name) - 4, vector_type_name + 2,
1548 num_vectors);
1549 tuple_type_name[0] = TOLOWER (tuple_type_name[0]);
1550
1551 tree vector_type = type->itype;
1552 tree array_type = build_array_type_nelts (vector_type, num_vectors);
66f206b8
JW
1553 if (type->mode == DImode)
1554 {
1555 if (num_vectors == 2)
1556 SET_TYPE_MODE (array_type, V2x1DImode);
1557 else if (num_vectors == 3)
1558 SET_TYPE_MODE (array_type, V3x1DImode);
1559 else if (num_vectors == 4)
1560 SET_TYPE_MODE (array_type, V4x1DImode);
1561 }
1562
8197ab94 1563 unsigned int alignment
14814e20
RS
1564 = known_eq (GET_MODE_SIZE (type->mode), 16) ? 128 : 64;
1565 machine_mode tuple_mode = TYPE_MODE_RAW (array_type);
1566 gcc_assert (VECTOR_MODE_P (tuple_mode)
1567 && TYPE_MODE (array_type) == tuple_mode
8197ab94
JW
1568 && TYPE_ALIGN (array_type) == alignment);
1569
1570 tree field = build_decl (input_location, FIELD_DECL,
1571 get_identifier ("val"), array_type);
1572
1573 tree t = lang_hooks.types.simulate_record_decl (input_location,
1574 tuple_type_name,
1575 make_array_slice (&field,
1576 1));
1577 gcc_assert (TYPE_MODE_RAW (t) == TYPE_MODE (t)
14814e20
RS
1578 && (flag_pack_struct
1579 || maximum_field_alignment
1580 || (TYPE_MODE_RAW (t) == tuple_mode
1581 && TYPE_ALIGN (t) == alignment)));
1582
1583 aarch64_simd_tuple_modes[type_index][num_vectors - 2] = tuple_mode;
1584 aarch64_simd_tuple_types[type_index][num_vectors - 2] = t;
8197ab94
JW
1585}
1586
1587static bool
1588aarch64_scalar_builtin_type_p (aarch64_simd_type t)
1589{
1590 return (t == Poly8_t || t == Poly16_t || t == Poly64_t || t == Poly128_t);
1591}
1592
14814e20
RS
1593/* Enable AARCH64_FL_* flags EXTRA_FLAGS on top of the base Advanced SIMD
1594 set. */
a1a9ce24 1595aarch64_simd_switcher::aarch64_simd_switcher (aarch64_feature_flags extra_flags)
2a269bda 1596 : m_old_asm_isa_flags (aarch64_asm_isa_flags),
14814e20
RS
1597 m_old_general_regs_only (TARGET_GENERAL_REGS_ONLY)
1598{
1599 /* Changing the ISA flags should be enough here. We shouldn't need to
1600 pay the compile-time cost of a full target switch. */
14814e20 1601 global_options.x_target_flags &= ~MASK_GENERAL_REGS_ONLY;
2a269bda 1602 aarch64_set_asm_isa_flags (AARCH64_FL_FP | AARCH64_FL_SIMD | extra_flags);
14814e20
RS
1603}
1604
1605aarch64_simd_switcher::~aarch64_simd_switcher ()
1606{
1607 if (m_old_general_regs_only)
1608 global_options.x_target_flags |= MASK_GENERAL_REGS_ONLY;
2a269bda 1609 aarch64_set_asm_isa_flags (m_old_asm_isa_flags);
14814e20
RS
1610}
1611
c906efc7
AC
1612/* Implement #pragma GCC aarch64 "arm_neon.h".
1613
1614 The types and functions defined here need to be available internally
1615 during LTO as well. */
8197ab94
JW
1616void
1617handle_arm_neon_h (void)
1618{
14814e20
RS
1619 aarch64_simd_switcher simd;
1620
8197ab94
JW
1621 /* Register the AdvSIMD vector tuple types. */
1622 for (unsigned int i = 0; i < ARM_NEON_H_TYPES_LAST; i++)
1623 for (unsigned int count = 2; count <= 4; ++count)
1624 if (!aarch64_scalar_builtin_type_p (aarch64_simd_types[i].type))
1625 register_tuple_type (count, i);
1626
1627 aarch64_init_simd_builtin_functions (true);
c906efc7 1628 aarch64_init_simd_intrinsics ();
8197ab94
JW
1629}
1630
d29ecf23 1631static void
8197ab94
JW
1632aarch64_init_simd_builtins (void)
1633{
8197ab94
JW
1634 aarch64_init_simd_builtin_types ();
1635
1636 /* Strong-typing hasn't been implemented for all AdvSIMD builtin intrinsics.
1637 Therefore we need to preserve the old __builtin scalar types. It can be
1638 removed once all the intrinsics become strongly typed using the qualifier
1639 system. */
1640 aarch64_init_simd_builtin_scalar_types ();
1641
1642 aarch64_init_simd_builtin_functions (false);
1643 if (in_lto_p)
1644 handle_arm_neon_h ();
280d970b 1645
8197ab94
JW
1646 /* Initialize the remaining fcmla_laneq intrinsics. */
1647 aarch64_init_fcmla_laneq_builtins ();
43e9d192
IB
1648}
1649
5d357f26
KT
1650static void
1651aarch64_init_crc32_builtins ()
1652{
f023cc54 1653 tree usi_type = aarch64_simd_builtin_type (SImode, qualifier_unsigned);
5d357f26
KT
1654 unsigned int i = 0;
1655
1656 for (i = 0; i < ARRAY_SIZE (aarch64_crc_builtin_data); ++i)
1657 {
1658 aarch64_crc_builtin_datum* d = &aarch64_crc_builtin_data[i];
f023cc54 1659 tree argtype = aarch64_simd_builtin_type (d->mode, qualifier_unsigned);
5d357f26 1660 tree ftype = build_function_type_list (usi_type, usi_type, argtype, NULL_TREE);
079c23cf
KT
1661 tree attrs = aarch64_get_attributes (FLAG_NONE, d->mode);
1662 tree fndecl
1663 = aarch64_general_add_builtin (d->name, ftype, d->fcode, attrs);
5d357f26
KT
1664
1665 aarch64_builtin_decls[d->fcode] = fndecl;
1666 }
1667}
1668
a6fc00da
BH
1669/* Add builtins for reciprocal square root. */
1670
1671void
1672aarch64_init_builtin_rsqrt (void)
1673{
1674 tree fndecl = NULL;
1675 tree ftype = NULL;
1676
1677 tree V2SF_type_node = build_vector_type (float_type_node, 2);
1678 tree V2DF_type_node = build_vector_type (double_type_node, 2);
1679 tree V4SF_type_node = build_vector_type (float_type_node, 4);
1680
1681 struct builtin_decls_data
1682 {
1683 tree type_node;
1684 const char *builtin_name;
1685 int function_code;
1686 };
1687
1688 builtin_decls_data bdda[] =
1689 {
1690 { double_type_node, "__builtin_aarch64_rsqrt_df", AARCH64_BUILTIN_RSQRT_DF },
1691 { float_type_node, "__builtin_aarch64_rsqrt_sf", AARCH64_BUILTIN_RSQRT_SF },
1692 { V2DF_type_node, "__builtin_aarch64_rsqrt_v2df", AARCH64_BUILTIN_RSQRT_V2DF },
1693 { V2SF_type_node, "__builtin_aarch64_rsqrt_v2sf", AARCH64_BUILTIN_RSQRT_V2SF },
1694 { V4SF_type_node, "__builtin_aarch64_rsqrt_v4sf", AARCH64_BUILTIN_RSQRT_V4SF }
1695 };
1696
1697 builtin_decls_data *bdd = bdda;
ca32b29e 1698 builtin_decls_data *bdd_end = bdd + (ARRAY_SIZE (bdda));
a6fc00da
BH
1699
1700 for (; bdd < bdd_end; bdd++)
1701 {
1702 ftype = build_function_type_list (bdd->type_node, bdd->type_node, NULL_TREE);
079c23cf 1703 tree attrs = aarch64_get_attributes (FLAG_FP, TYPE_MODE (bdd->type_node));
6d4d616a 1704 fndecl = aarch64_general_add_builtin (bdd->builtin_name,
079c23cf 1705 ftype, bdd->function_code, attrs);
a6fc00da
BH
1706 aarch64_builtin_decls[bdd->function_code] = fndecl;
1707 }
1708}
1709
1b62ed4f
JG
1710/* Initialize the backend types that support the user-visible __fp16
1711 type, also initialize a pointer to that type, to be used when
1712 forming HFAs. */
1713
1714static void
1715aarch64_init_fp16_types (void)
1716{
1717 aarch64_fp16_type_node = make_node (REAL_TYPE);
1718 TYPE_PRECISION (aarch64_fp16_type_node) = 16;
1719 layout_type (aarch64_fp16_type_node);
1720
1721 (*lang_hooks.types.register_builtin_type) (aarch64_fp16_type_node, "__fp16");
1722 aarch64_fp16_ptr_type_node = build_pointer_type (aarch64_fp16_type_node);
1723}
1724
abbe1ed2
SMW
1725/* Initialize the backend REAL_TYPE type supporting bfloat types. */
1726static void
1727aarch64_init_bf16_types (void)
1728{
13071c3c
JJ
1729 lang_hooks.types.register_builtin_type (bfloat16_type_node, "__bf16");
1730 aarch64_bf16_ptr_type_node = build_pointer_type (bfloat16_type_node);
abbe1ed2
SMW
1731}
1732
312492bd
JW
1733/* Pointer authentication builtins that will become NOP on legacy platform.
1734 Currently, these builtins are for internal use only (libgcc EH unwinder). */
1735
1736void
1737aarch64_init_pauth_hint_builtins (void)
1738{
1739 /* Pointer Authentication builtins. */
1740 tree ftype_pointer_auth
1741 = build_function_type_list (ptr_type_node, ptr_type_node,
1742 unsigned_intDI_type_node, NULL_TREE);
1743 tree ftype_pointer_strip
1744 = build_function_type_list (ptr_type_node, ptr_type_node, NULL_TREE);
1745
1746 aarch64_builtin_decls[AARCH64_PAUTH_BUILTIN_AUTIA1716]
6d4d616a
RS
1747 = aarch64_general_add_builtin ("__builtin_aarch64_autia1716",
1748 ftype_pointer_auth,
1749 AARCH64_PAUTH_BUILTIN_AUTIA1716);
312492bd 1750 aarch64_builtin_decls[AARCH64_PAUTH_BUILTIN_PACIA1716]
6d4d616a
RS
1751 = aarch64_general_add_builtin ("__builtin_aarch64_pacia1716",
1752 ftype_pointer_auth,
1753 AARCH64_PAUTH_BUILTIN_PACIA1716);
8fc16d72 1754 aarch64_builtin_decls[AARCH64_PAUTH_BUILTIN_AUTIB1716]
6d4d616a
RS
1755 = aarch64_general_add_builtin ("__builtin_aarch64_autib1716",
1756 ftype_pointer_auth,
1757 AARCH64_PAUTH_BUILTIN_AUTIB1716);
8fc16d72 1758 aarch64_builtin_decls[AARCH64_PAUTH_BUILTIN_PACIB1716]
6d4d616a
RS
1759 = aarch64_general_add_builtin ("__builtin_aarch64_pacib1716",
1760 ftype_pointer_auth,
1761 AARCH64_PAUTH_BUILTIN_PACIB1716);
312492bd 1762 aarch64_builtin_decls[AARCH64_PAUTH_BUILTIN_XPACLRI]
6d4d616a
RS
1763 = aarch64_general_add_builtin ("__builtin_aarch64_xpaclri",
1764 ftype_pointer_strip,
1765 AARCH64_PAUTH_BUILTIN_XPACLRI);
312492bd
JW
1766}
1767
89626179
SD
1768/* Initialize the transactional memory extension (TME) builtins. */
1769static void
1770aarch64_init_tme_builtins (void)
1771{
1772 tree ftype_uint64_void
1773 = build_function_type_list (uint64_type_node, NULL);
1774 tree ftype_void_void
1775 = build_function_type_list (void_type_node, NULL);
1776 tree ftype_void_uint64
1777 = build_function_type_list (void_type_node, uint64_type_node, NULL);
1778
1779 aarch64_builtin_decls[AARCH64_TME_BUILTIN_TSTART]
6d4d616a
RS
1780 = aarch64_general_add_builtin ("__builtin_aarch64_tstart",
1781 ftype_uint64_void,
1782 AARCH64_TME_BUILTIN_TSTART);
89626179 1783 aarch64_builtin_decls[AARCH64_TME_BUILTIN_TTEST]
6d4d616a
RS
1784 = aarch64_general_add_builtin ("__builtin_aarch64_ttest",
1785 ftype_uint64_void,
1786 AARCH64_TME_BUILTIN_TTEST);
89626179 1787 aarch64_builtin_decls[AARCH64_TME_BUILTIN_TCOMMIT]
6d4d616a
RS
1788 = aarch64_general_add_builtin ("__builtin_aarch64_tcommit",
1789 ftype_void_void,
1790 AARCH64_TME_BUILTIN_TCOMMIT);
89626179 1791 aarch64_builtin_decls[AARCH64_TME_BUILTIN_TCANCEL]
6d4d616a
RS
1792 = aarch64_general_add_builtin ("__builtin_aarch64_tcancel",
1793 ftype_void_uint64,
1794 AARCH64_TME_BUILTIN_TCANCEL);
89626179
SD
1795}
1796
c5dc215d
KT
1797/* Add builtins for Random Number instructions. */
1798
1799static void
1800aarch64_init_rng_builtins (void)
1801{
1802 tree unsigned_ptr_type = build_pointer_type (unsigned_intDI_type_node);
1803 tree ftype
1804 = build_function_type_list (integer_type_node, unsigned_ptr_type, NULL);
1805 aarch64_builtin_decls[AARCH64_BUILTIN_RNG_RNDR]
1806 = aarch64_general_add_builtin ("__builtin_aarch64_rndr", ftype,
1807 AARCH64_BUILTIN_RNG_RNDR);
1808 aarch64_builtin_decls[AARCH64_BUILTIN_RNG_RNDRRS]
1809 = aarch64_general_add_builtin ("__builtin_aarch64_rndrrs", ftype,
1810 AARCH64_BUILTIN_RNG_RNDRRS);
1811}
1812
fc42900d
VDN
1813/* Add builtins for reading system register. */
1814static void
1815aarch64_init_rwsr_builtins (void)
1816{
1817 tree fntype = NULL;
1818 tree const_char_ptr_type
1819 = build_pointer_type (build_type_variant (char_type_node, true, false));
1820
1821#define AARCH64_INIT_RWSR_BUILTINS_DECL(F, N, T) \
1822 aarch64_builtin_decls[AARCH64_##F] \
1823 = aarch64_general_add_builtin ("__builtin_aarch64_"#N, T, AARCH64_##F);
1824
1825 fntype
1826 = build_function_type_list (uint32_type_node, const_char_ptr_type, NULL);
1827 AARCH64_INIT_RWSR_BUILTINS_DECL (RSR, rsr, fntype);
1828
1829 fntype
1830 = build_function_type_list (ptr_type_node, const_char_ptr_type, NULL);
1831 AARCH64_INIT_RWSR_BUILTINS_DECL (RSRP, rsrp, fntype);
1832
1833 fntype
1834 = build_function_type_list (uint64_type_node, const_char_ptr_type, NULL);
1835 AARCH64_INIT_RWSR_BUILTINS_DECL (RSR64, rsr64, fntype);
1836
1837 fntype
1838 = build_function_type_list (float_type_node, const_char_ptr_type, NULL);
1839 AARCH64_INIT_RWSR_BUILTINS_DECL (RSRF, rsrf, fntype);
1840
1841 fntype
1842 = build_function_type_list (double_type_node, const_char_ptr_type, NULL);
1843 AARCH64_INIT_RWSR_BUILTINS_DECL (RSRF64, rsrf64, fntype);
1844
1845 fntype
1846 = build_function_type_list (void_type_node, const_char_ptr_type,
1847 uint32_type_node, NULL);
1848
1849 AARCH64_INIT_RWSR_BUILTINS_DECL (WSR, wsr, fntype);
1850
1851 fntype
1852 = build_function_type_list (void_type_node, const_char_ptr_type,
1853 const_ptr_type_node, NULL);
1854 AARCH64_INIT_RWSR_BUILTINS_DECL (WSRP, wsrp, fntype);
1855
1856 fntype
1857 = build_function_type_list (void_type_node, const_char_ptr_type,
1858 uint64_type_node, NULL);
1859 AARCH64_INIT_RWSR_BUILTINS_DECL (WSR64, wsr64, fntype);
1860
1861 fntype
1862 = build_function_type_list (void_type_node, const_char_ptr_type,
1863 float_type_node, NULL);
1864 AARCH64_INIT_RWSR_BUILTINS_DECL (WSRF, wsrf, fntype);
1865
1866 fntype
1867 = build_function_type_list (void_type_node, const_char_ptr_type,
1868 double_type_node, NULL);
1869 AARCH64_INIT_RWSR_BUILTINS_DECL (WSRF64, wsrf64, fntype);
1870}
1871
ef01e6bb
DZ
1872/* Initialize the memory tagging extension (MTE) builtins. */
1873struct
1874{
1875 tree ftype;
1876 enum insn_code icode;
1877} aarch64_memtag_builtin_data[AARCH64_MEMTAG_BUILTIN_END -
1878 AARCH64_MEMTAG_BUILTIN_START - 1];
1879
1880static void
1881aarch64_init_memtag_builtins (void)
1882{
1883 tree fntype = NULL;
1884
1885#define AARCH64_INIT_MEMTAG_BUILTINS_DECL(F, N, I, T) \
1886 aarch64_builtin_decls[AARCH64_MEMTAG_BUILTIN_##F] \
1887 = aarch64_general_add_builtin ("__builtin_aarch64_memtag_"#N, \
1888 T, AARCH64_MEMTAG_BUILTIN_##F); \
1889 aarch64_memtag_builtin_data[AARCH64_MEMTAG_BUILTIN_##F - \
1890 AARCH64_MEMTAG_BUILTIN_START - 1] = \
1891 {T, CODE_FOR_##I};
1892
1893 fntype = build_function_type_list (ptr_type_node, ptr_type_node,
1894 uint64_type_node, NULL);
1895 AARCH64_INIT_MEMTAG_BUILTINS_DECL (IRG, irg, irg, fntype);
1896
1897 fntype = build_function_type_list (uint64_type_node, ptr_type_node,
1898 uint64_type_node, NULL);
1899 AARCH64_INIT_MEMTAG_BUILTINS_DECL (GMI, gmi, gmi, fntype);
1900
1901 fntype = build_function_type_list (ptrdiff_type_node, ptr_type_node,
1902 ptr_type_node, NULL);
1903 AARCH64_INIT_MEMTAG_BUILTINS_DECL (SUBP, subp, subp, fntype);
1904
1905 fntype = build_function_type_list (ptr_type_node, ptr_type_node,
1906 unsigned_type_node, NULL);
1907 AARCH64_INIT_MEMTAG_BUILTINS_DECL (INC_TAG, inc_tag, addg, fntype);
1908
1909 fntype = build_function_type_list (void_type_node, ptr_type_node, NULL);
1910 AARCH64_INIT_MEMTAG_BUILTINS_DECL (SET_TAG, set_tag, stg, fntype);
1911
1912 fntype = build_function_type_list (ptr_type_node, ptr_type_node, NULL);
1913 AARCH64_INIT_MEMTAG_BUILTINS_DECL (GET_TAG, get_tag, ldg, fntype);
1914
1915#undef AARCH64_INIT_MEMTAG_BUILTINS_DECL
1916}
c5dc215d 1917
fdcddba8
PW
1918/* Add builtins for Load/store 64 Byte instructions. */
1919
1920typedef struct
1921{
1922 const char *name;
1923 unsigned int code;
1924 tree type;
1925} ls64_builtins_data;
1926
1927static GTY(()) tree ls64_arm_data_t = NULL_TREE;
1928
1929static void
1930aarch64_init_ls64_builtins_types (void)
1931{
1932 /* Synthesize:
1933
1934 typedef struct {
1935 uint64_t val[8];
1936 } __arm_data512_t; */
1937 const char *tuple_type_name = "__arm_data512_t";
1938 tree node_type = get_typenode_from_name (UINT64_TYPE);
1939 tree array_type = build_array_type_nelts (node_type, 8);
1940 SET_TYPE_MODE (array_type, V8DImode);
1941
1942 gcc_assert (TYPE_MODE_RAW (array_type) == TYPE_MODE (array_type));
1943 gcc_assert (TYPE_ALIGN (array_type) == 64);
1944
1945 tree field = build_decl (input_location, FIELD_DECL,
71361354 1946 get_identifier ("val"), array_type);
fdcddba8
PW
1947
1948 ls64_arm_data_t = lang_hooks.types.simulate_record_decl (input_location,
71361354
AC
1949 tuple_type_name,
1950 make_array_slice (&field, 1));
fdcddba8
PW
1951
1952 gcc_assert (TYPE_MODE (ls64_arm_data_t) == V8DImode);
1953 gcc_assert (TYPE_MODE_RAW (ls64_arm_data_t) == TYPE_MODE (ls64_arm_data_t));
1954 gcc_assert (TYPE_ALIGN (ls64_arm_data_t) == 64);
1955}
1956
1957static void
1958aarch64_init_ls64_builtins (void)
1959{
1960 aarch64_init_ls64_builtins_types ();
1961
1962 ls64_builtins_data data[4] = {
9963029a 1963 {"__arm_ld64b", AARCH64_LS64_BUILTIN_LD64B,
fdcddba8 1964 build_function_type_list (ls64_arm_data_t,
71361354 1965 const_ptr_type_node, NULL_TREE)},
9963029a 1966 {"__arm_st64b", AARCH64_LS64_BUILTIN_ST64B,
fdcddba8 1967 build_function_type_list (void_type_node, ptr_type_node,
71361354 1968 ls64_arm_data_t, NULL_TREE)},
9963029a 1969 {"__arm_st64bv", AARCH64_LS64_BUILTIN_ST64BV,
fdcddba8 1970 build_function_type_list (uint64_type_node, ptr_type_node,
71361354 1971 ls64_arm_data_t, NULL_TREE)},
9963029a 1972 {"__arm_st64bv0", AARCH64_LS64_BUILTIN_ST64BV0,
fdcddba8 1973 build_function_type_list (uint64_type_node, ptr_type_node,
71361354 1974 ls64_arm_data_t, NULL_TREE)},
fdcddba8
PW
1975 };
1976
1977 for (size_t i = 0; i < ARRAY_SIZE (data); ++i)
1978 aarch64_builtin_decls[data[i].code]
9963029a
AC
1979 = aarch64_general_simulate_builtin (data[i].name, data[i].type,
1980 data[i].code);
fdcddba8
PW
1981}
1982
eb966d39
ASDV
1983static void
1984aarch64_init_data_intrinsics (void)
1985{
1986 tree uint32_fntype = build_function_type_list (uint32_type_node,
1987 uint32_type_node, NULL_TREE);
1988 tree ulong_fntype = build_function_type_list (long_unsigned_type_node,
1989 long_unsigned_type_node,
1990 NULL_TREE);
1991 tree uint64_fntype = build_function_type_list (uint64_type_node,
1992 uint64_type_node, NULL_TREE);
1993 aarch64_builtin_decls[AARCH64_REV16]
1994 = aarch64_general_add_builtin ("__builtin_aarch64_rev16", uint32_fntype,
1995 AARCH64_REV16);
1996 aarch64_builtin_decls[AARCH64_REV16L]
1997 = aarch64_general_add_builtin ("__builtin_aarch64_rev16l", ulong_fntype,
1998 AARCH64_REV16L);
1999 aarch64_builtin_decls[AARCH64_REV16LL]
2000 = aarch64_general_add_builtin ("__builtin_aarch64_rev16ll", uint64_fntype,
2001 AARCH64_REV16LL);
2002 aarch64_builtin_decls[AARCH64_RBIT]
2003 = aarch64_general_add_builtin ("__builtin_aarch64_rbit", uint32_fntype,
2004 AARCH64_RBIT);
2005 aarch64_builtin_decls[AARCH64_RBITL]
2006 = aarch64_general_add_builtin ("__builtin_aarch64_rbitl", ulong_fntype,
2007 AARCH64_RBITL);
2008 aarch64_builtin_decls[AARCH64_RBITLL]
2009 = aarch64_general_add_builtin ("__builtin_aarch64_rbitll", uint64_fntype,
2010 AARCH64_RBITLL);
2011}
2012
af3cadb5
TC
2013/* Implement #pragma GCC aarch64 "arm_acle.h". */
2014void
2015handle_arm_acle_h (void)
2016{
2017 if (TARGET_LS64)
2018 aarch64_init_ls64_builtins ();
2019}
2020
0d7e5fa6 2021/* Initialize fpsr fpcr getters and setters. */
c5dc215d 2022
0d7e5fa6
AC
2023static void
2024aarch64_init_fpsr_fpcr_builtins (void)
43e9d192 2025{
0d7e5fa6 2026 tree ftype_set
aa87aced 2027 = build_function_type_list (void_type_node, unsigned_type_node, NULL);
0d7e5fa6 2028 tree ftype_get
aa87aced
KV
2029 = build_function_type_list (unsigned_type_node, NULL);
2030
2031 aarch64_builtin_decls[AARCH64_BUILTIN_GET_FPCR]
6d4d616a 2032 = aarch64_general_add_builtin ("__builtin_aarch64_get_fpcr",
0d7e5fa6 2033 ftype_get,
6d4d616a 2034 AARCH64_BUILTIN_GET_FPCR);
aa87aced 2035 aarch64_builtin_decls[AARCH64_BUILTIN_SET_FPCR]
6d4d616a 2036 = aarch64_general_add_builtin ("__builtin_aarch64_set_fpcr",
0d7e5fa6 2037 ftype_set,
6d4d616a 2038 AARCH64_BUILTIN_SET_FPCR);
aa87aced 2039 aarch64_builtin_decls[AARCH64_BUILTIN_GET_FPSR]
6d4d616a 2040 = aarch64_general_add_builtin ("__builtin_aarch64_get_fpsr",
0d7e5fa6 2041 ftype_get,
6d4d616a 2042 AARCH64_BUILTIN_GET_FPSR);
aa87aced 2043 aarch64_builtin_decls[AARCH64_BUILTIN_SET_FPSR]
6d4d616a 2044 = aarch64_general_add_builtin ("__builtin_aarch64_set_fpsr",
0d7e5fa6 2045 ftype_set,
6d4d616a 2046 AARCH64_BUILTIN_SET_FPSR);
aa87aced 2047
0d7e5fa6
AC
2048 ftype_set
2049 = build_function_type_list (void_type_node, long_long_unsigned_type_node,
2050 NULL);
2051 ftype_get
2052 = build_function_type_list (long_long_unsigned_type_node, NULL);
2053
2054 aarch64_builtin_decls[AARCH64_BUILTIN_GET_FPCR64]
2055 = aarch64_general_add_builtin ("__builtin_aarch64_get_fpcr64",
2056 ftype_get,
2057 AARCH64_BUILTIN_GET_FPCR64);
2058 aarch64_builtin_decls[AARCH64_BUILTIN_SET_FPCR64]
2059 = aarch64_general_add_builtin ("__builtin_aarch64_set_fpcr64",
2060 ftype_set,
2061 AARCH64_BUILTIN_SET_FPCR64);
2062 aarch64_builtin_decls[AARCH64_BUILTIN_GET_FPSR64]
2063 = aarch64_general_add_builtin ("__builtin_aarch64_get_fpsr64",
2064 ftype_get,
2065 AARCH64_BUILTIN_GET_FPSR64);
2066 aarch64_builtin_decls[AARCH64_BUILTIN_SET_FPSR64]
2067 = aarch64_general_add_builtin ("__builtin_aarch64_set_fpsr64",
2068 ftype_set,
2069 AARCH64_BUILTIN_SET_FPSR64);
2070}
2071
2072/* Initialize all builtins in the AARCH64_BUILTIN_GENERAL group. */
2073
2074void
2075aarch64_general_init_builtins (void)
2076{
2077 aarch64_init_fpsr_fpcr_builtins ();
2078
1b62ed4f 2079 aarch64_init_fp16_types ();
c2ec330c 2080
abbe1ed2
SMW
2081 aarch64_init_bf16_types ();
2082
14814e20
RS
2083 {
2084 aarch64_simd_switcher simd;
280d970b 2085 aarch64_init_simd_builtins ();
14814e20 2086 }
e95a988a
KT
2087
2088 aarch64_init_crc32_builtins ();
a6fc00da 2089 aarch64_init_builtin_rsqrt ();
c5dc215d 2090 aarch64_init_rng_builtins ();
eb966d39 2091 aarch64_init_data_intrinsics ();
312492bd 2092
fc42900d
VDN
2093 aarch64_init_rwsr_builtins ();
2094
e1d5d19e
KT
2095 tree ftype_jcvt
2096 = build_function_type_list (intSI_type_node, double_type_node, NULL);
2097 aarch64_builtin_decls[AARCH64_JSCVT]
6d4d616a
RS
2098 = aarch64_general_add_builtin ("__builtin_aarch64_jcvtzs", ftype_jcvt,
2099 AARCH64_JSCVT);
e1d5d19e 2100
a876231c
JW
2101 /* Initialize pointer authentication builtins which are backed by instructions
2102 in NOP encoding space.
2103
2104 NOTE: these builtins are supposed to be used by libgcc unwinder only, as
2105 there is no support on return address signing under ILP32, we don't
2106 register them. */
2107 if (!TARGET_ILP32)
2108 aarch64_init_pauth_hint_builtins ();
89626179
SD
2109
2110 if (TARGET_TME)
2111 aarch64_init_tme_builtins ();
ef01e6bb
DZ
2112
2113 if (TARGET_MEMTAG)
2114 aarch64_init_memtag_builtins ();
9963029a
AC
2115
2116 if (in_lto_p)
2117 handle_arm_acle_h ();
43e9d192
IB
2118}
2119
6d4d616a 2120/* Implement TARGET_BUILTIN_DECL for the AARCH64_BUILTIN_GENERAL group. */
119103ca 2121tree
6d4d616a 2122aarch64_general_builtin_decl (unsigned code, bool)
119103ca
JG
2123{
2124 if (code >= AARCH64_BUILTIN_MAX)
2125 return error_mark_node;
2126
2127 return aarch64_builtin_decls[code];
2128}
2129
5af697d7
VDN
2130bool
2131aarch64_general_check_builtin_call (location_t location, vec<location_t>,
2132 unsigned int code, tree fndecl,
2133 unsigned int nargs ATTRIBUTE_UNUSED, tree *args)
2134{
2135 switch (code)
2136 {
2137 case AARCH64_RSR:
2138 case AARCH64_RSRP:
2139 case AARCH64_RSR64:
2140 case AARCH64_RSRF:
2141 case AARCH64_RSRF64:
2142 case AARCH64_WSR:
2143 case AARCH64_WSRP:
2144 case AARCH64_WSR64:
2145 case AARCH64_WSRF:
2146 case AARCH64_WSRF64:
2147 tree addr = STRIP_NOPS (args[0]);
2148 if (TREE_CODE (TREE_TYPE (addr)) != POINTER_TYPE
2149 || TREE_CODE (addr) != ADDR_EXPR
2150 || TREE_CODE (TREE_OPERAND (addr, 0)) != STRING_CST)
2151 {
2152 error_at (location, "first argument to %qD must be a string literal",
2153 fndecl);
2154 return false;
2155 }
2156 }
2157 /* Default behavior. */
2158 return true;
2159}
2160
43e9d192
IB
2161typedef enum
2162{
2163 SIMD_ARG_COPY_TO_REG,
2164 SIMD_ARG_CONSTANT,
2a49c16d 2165 SIMD_ARG_LANE_INDEX,
4d0a0237 2166 SIMD_ARG_STRUCT_LOAD_STORE_LANE_INDEX,
9d63f43b 2167 SIMD_ARG_LANE_PAIR_INDEX,
8c197c85 2168 SIMD_ARG_LANE_QUADTUP_INDEX,
43e9d192
IB
2169 SIMD_ARG_STOP
2170} builtin_simd_arg;
2171
e95a988a 2172
43e9d192
IB
2173static rtx
2174aarch64_simd_expand_args (rtx target, int icode, int have_retval,
4d0a0237 2175 tree exp, builtin_simd_arg *args,
b8506a8a 2176 machine_mode builtin_mode)
43e9d192 2177{
43e9d192 2178 rtx pat;
d9e80f49
AL
2179 rtx op[SIMD_MAX_BUILTIN_ARGS + 1]; /* First element for result operand. */
2180 int opc = 0;
2181
2182 if (have_retval)
2183 {
2184 machine_mode tmode = insn_data[icode].operand[0].mode;
2185 if (!target
43e9d192 2186 || GET_MODE (target) != tmode
d9e80f49
AL
2187 || !(*insn_data[icode].operand[0].predicate) (target, tmode))
2188 target = gen_reg_rtx (tmode);
2189 op[opc++] = target;
2190 }
43e9d192 2191
43e9d192
IB
2192 for (;;)
2193 {
d9e80f49 2194 builtin_simd_arg thisarg = args[opc - have_retval];
43e9d192
IB
2195
2196 if (thisarg == SIMD_ARG_STOP)
2197 break;
2198 else
2199 {
d9e80f49 2200 tree arg = CALL_EXPR_ARG (exp, opc - have_retval);
b8506a8a 2201 machine_mode mode = insn_data[icode].operand[opc].mode;
d9e80f49 2202 op[opc] = expand_normal (arg);
43e9d192
IB
2203
2204 switch (thisarg)
2205 {
2206 case SIMD_ARG_COPY_TO_REG:
d9e80f49
AL
2207 if (POINTER_TYPE_P (TREE_TYPE (arg)))
2208 op[opc] = convert_memory_address (Pmode, op[opc]);
2209 /*gcc_assert (GET_MODE (op[opc]) == mode); */
2210 if (!(*insn_data[icode].operand[opc].predicate)
2211 (op[opc], mode))
2212 op[opc] = copy_to_mode_reg (mode, op[opc]);
43e9d192
IB
2213 break;
2214
4d0a0237
CB
2215 case SIMD_ARG_STRUCT_LOAD_STORE_LANE_INDEX:
2216 gcc_assert (opc > 1);
2217 if (CONST_INT_P (op[opc]))
2218 {
6a70badb
RS
2219 unsigned int nunits
2220 = GET_MODE_NUNITS (builtin_mode).to_constant ();
2221 aarch64_simd_lane_bounds (op[opc], 0, nunits, exp);
4d0a0237 2222 /* Keep to GCC-vector-extension lane indices in the RTL. */
7ac29c0f
RS
2223 op[opc] = aarch64_endian_lane_rtx (builtin_mode,
2224 INTVAL (op[opc]));
4d0a0237
CB
2225 }
2226 goto constant_arg;
2227
2a49c16d
AL
2228 case SIMD_ARG_LANE_INDEX:
2229 /* Must be a previous operand into which this is an index. */
d9e80f49
AL
2230 gcc_assert (opc > 0);
2231 if (CONST_INT_P (op[opc]))
2a49c16d 2232 {
d9e80f49 2233 machine_mode vmode = insn_data[icode].operand[opc - 1].mode;
6a70badb
RS
2234 unsigned int nunits
2235 = GET_MODE_NUNITS (vmode).to_constant ();
2236 aarch64_simd_lane_bounds (op[opc], 0, nunits, exp);
2a49c16d 2237 /* Keep to GCC-vector-extension lane indices in the RTL. */
7ac29c0f 2238 op[opc] = aarch64_endian_lane_rtx (vmode, INTVAL (op[opc]));
2a49c16d 2239 }
9d63f43b
TC
2240 /* If the lane index isn't a constant then error out. */
2241 goto constant_arg;
2242
2243 case SIMD_ARG_LANE_PAIR_INDEX:
2244 /* Must be a previous operand into which this is an index and
2245 index is restricted to nunits / 2. */
2246 gcc_assert (opc > 0);
2247 if (CONST_INT_P (op[opc]))
2248 {
2249 machine_mode vmode = insn_data[icode].operand[opc - 1].mode;
2250 unsigned int nunits
2251 = GET_MODE_NUNITS (vmode).to_constant ();
2252 aarch64_simd_lane_bounds (op[opc], 0, nunits / 2, exp);
2253 /* Keep to GCC-vector-extension lane indices in the RTL. */
33b5a38c
TC
2254 int lane = INTVAL (op[opc]);
2255 op[opc] = gen_int_mode (ENDIAN_LANE_N (nunits / 2, lane),
2256 SImode);
9d63f43b 2257 }
8c197c85
SMW
2258 /* If the lane index isn't a constant then error out. */
2259 goto constant_arg;
2260 case SIMD_ARG_LANE_QUADTUP_INDEX:
2261 /* Must be a previous operand into which this is an index and
2262 index is restricted to nunits / 4. */
2263 gcc_assert (opc > 0);
2264 if (CONST_INT_P (op[opc]))
2265 {
2266 machine_mode vmode = insn_data[icode].operand[opc - 1].mode;
2267 unsigned int nunits
2268 = GET_MODE_NUNITS (vmode).to_constant ();
2269 aarch64_simd_lane_bounds (op[opc], 0, nunits / 4, exp);
2270 /* Keep to GCC-vector-extension lane indices in the RTL. */
2271 int lane = INTVAL (op[opc]);
2272 op[opc] = gen_int_mode (ENDIAN_LANE_N (nunits / 4, lane),
2273 SImode);
2274 }
2275 /* If the lane index isn't a constant then error out. */
2276 goto constant_arg;
43e9d192 2277 case SIMD_ARG_CONSTANT:
4d0a0237 2278constant_arg:
d9e80f49
AL
2279 if (!(*insn_data[icode].operand[opc].predicate)
2280 (op[opc], mode))
d5a29419 2281 {
62e43587
MS
2282 error_at (EXPR_LOCATION (exp),
2283 "argument %d must be a constant immediate",
2284 opc + 1 - have_retval);
d5a29419
KT
2285 return const0_rtx;
2286 }
43e9d192
IB
2287 break;
2288
2289 case SIMD_ARG_STOP:
2290 gcc_unreachable ();
2291 }
2292
d9e80f49 2293 opc++;
43e9d192
IB
2294 }
2295 }
2296
d9e80f49
AL
2297 switch (opc)
2298 {
2299 case 1:
2300 pat = GEN_FCN (icode) (op[0]);
2301 break;
43e9d192 2302
d9e80f49
AL
2303 case 2:
2304 pat = GEN_FCN (icode) (op[0], op[1]);
2305 break;
43e9d192 2306
d9e80f49
AL
2307 case 3:
2308 pat = GEN_FCN (icode) (op[0], op[1], op[2]);
2309 break;
43e9d192 2310
d9e80f49
AL
2311 case 4:
2312 pat = GEN_FCN (icode) (op[0], op[1], op[2], op[3]);
2313 break;
43e9d192 2314
d9e80f49
AL
2315 case 5:
2316 pat = GEN_FCN (icode) (op[0], op[1], op[2], op[3], op[4]);
2317 break;
43e9d192 2318
d9e80f49
AL
2319 case 6:
2320 pat = GEN_FCN (icode) (op[0], op[1], op[2], op[3], op[4], op[5]);
2321 break;
43e9d192 2322
d9e80f49
AL
2323 default:
2324 gcc_unreachable ();
2325 }
43e9d192
IB
2326
2327 if (!pat)
d5a29419 2328 return NULL_RTX;
43e9d192
IB
2329
2330 emit_insn (pat);
2331
2332 return target;
2333}
2334
2335/* Expand an AArch64 AdvSIMD builtin(intrinsic). */
2336rtx
2337aarch64_simd_expand_builtin (int fcode, tree exp, rtx target)
2338{
661fce82
AL
2339 if (fcode == AARCH64_SIMD_BUILTIN_LANE_CHECK)
2340 {
9c4f25cc
AP
2341 rtx totalsize = expand_normal (CALL_EXPR_ARG (exp, 0));
2342 rtx elementsize = expand_normal (CALL_EXPR_ARG (exp, 1));
2343 if (CONST_INT_P (totalsize) && CONST_INT_P (elementsize)
2344 && UINTVAL (elementsize) != 0
2345 && UINTVAL (totalsize) != 0)
2346 {
2347 rtx lane_idx = expand_normal (CALL_EXPR_ARG (exp, 2));
2348 if (CONST_INT_P (lane_idx))
2349 aarch64_simd_lane_bounds (lane_idx, 0,
2350 UINTVAL (totalsize)
2351 / UINTVAL (elementsize),
2352 exp);
2353 else
62e43587
MS
2354 error_at (EXPR_LOCATION (exp),
2355 "lane index must be a constant immediate");
9c4f25cc 2356 }
661fce82 2357 else
62e43587 2358 error_at (EXPR_LOCATION (exp),
58385f6a 2359 "total size and element size must be a nonzero "
62e43587 2360 "constant immediate");
661fce82
AL
2361 /* Don't generate any RTL. */
2362 return const0_rtx;
2363 }
342be7f7 2364 aarch64_simd_builtin_datum *d =
661fce82 2365 &aarch64_simd_builtin_data[fcode - AARCH64_SIMD_PATTERN_START];
342be7f7 2366 enum insn_code icode = d->code;
0ff2bf46 2367 builtin_simd_arg args[SIMD_MAX_BUILTIN_ARGS + 1];
b5828b4b
JG
2368 int num_args = insn_data[d->code].n_operands;
2369 int is_void = 0;
2370 int k;
43e9d192 2371
b5828b4b 2372 is_void = !!(d->qualifiers[0] & qualifier_void);
43e9d192 2373
b5828b4b
JG
2374 num_args += is_void;
2375
2376 for (k = 1; k < num_args; k++)
2377 {
2378 /* We have four arrays of data, each indexed in a different fashion.
2379 qualifiers - element 0 always describes the function return type.
2380 operands - element 0 is either the operand for return value (if
2381 the function has a non-void return type) or the operand for the
2382 first argument.
2383 expr_args - element 0 always holds the first argument.
2384 args - element 0 is always used for the return type. */
2385 int qualifiers_k = k;
2386 int operands_k = k - is_void;
2387 int expr_args_k = k - 1;
2388
2a49c16d
AL
2389 if (d->qualifiers[qualifiers_k] & qualifier_lane_index)
2390 args[k] = SIMD_ARG_LANE_INDEX;
9d63f43b
TC
2391 else if (d->qualifiers[qualifiers_k] & qualifier_lane_pair_index)
2392 args[k] = SIMD_ARG_LANE_PAIR_INDEX;
8c197c85
SMW
2393 else if (d->qualifiers[qualifiers_k] & qualifier_lane_quadtup_index)
2394 args[k] = SIMD_ARG_LANE_QUADTUP_INDEX;
4d0a0237
CB
2395 else if (d->qualifiers[qualifiers_k] & qualifier_struct_load_store_lane_index)
2396 args[k] = SIMD_ARG_STRUCT_LOAD_STORE_LANE_INDEX;
2a49c16d 2397 else if (d->qualifiers[qualifiers_k] & qualifier_immediate)
b5828b4b
JG
2398 args[k] = SIMD_ARG_CONSTANT;
2399 else if (d->qualifiers[qualifiers_k] & qualifier_maybe_immediate)
2400 {
2401 rtx arg
2402 = expand_normal (CALL_EXPR_ARG (exp,
2403 (expr_args_k)));
2404 /* Handle constants only if the predicate allows it. */
2405 bool op_const_int_p =
2406 (CONST_INT_P (arg)
2407 && (*insn_data[icode].operand[operands_k].predicate)
2408 (arg, insn_data[icode].operand[operands_k].mode));
2409 args[k] = op_const_int_p ? SIMD_ARG_CONSTANT : SIMD_ARG_COPY_TO_REG;
2410 }
2411 else
2412 args[k] = SIMD_ARG_COPY_TO_REG;
43e9d192 2413
43e9d192 2414 }
b5828b4b
JG
2415 args[k] = SIMD_ARG_STOP;
2416
2417 /* The interface to aarch64_simd_expand_args expects a 0 if
2418 the function is void, and a 1 if it is not. */
2419 return aarch64_simd_expand_args
4d0a0237 2420 (target, icode, !is_void, exp, &args[1], d->mode);
43e9d192 2421}
342be7f7 2422
5d357f26
KT
2423rtx
2424aarch64_crc32_expand_builtin (int fcode, tree exp, rtx target)
2425{
2426 rtx pat;
2427 aarch64_crc_builtin_datum *d
2428 = &aarch64_crc_builtin_data[fcode - (AARCH64_CRC32_BUILTIN_BASE + 1)];
2429 enum insn_code icode = d->icode;
2430 tree arg0 = CALL_EXPR_ARG (exp, 0);
2431 tree arg1 = CALL_EXPR_ARG (exp, 1);
2432 rtx op0 = expand_normal (arg0);
2433 rtx op1 = expand_normal (arg1);
ef4bddc2
RS
2434 machine_mode tmode = insn_data[icode].operand[0].mode;
2435 machine_mode mode0 = insn_data[icode].operand[1].mode;
2436 machine_mode mode1 = insn_data[icode].operand[2].mode;
5d357f26
KT
2437
2438 if (! target
2439 || GET_MODE (target) != tmode
2440 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
2441 target = gen_reg_rtx (tmode);
2442
2443 gcc_assert ((GET_MODE (op0) == mode0 || GET_MODE (op0) == VOIDmode)
2444 && (GET_MODE (op1) == mode1 || GET_MODE (op1) == VOIDmode));
2445
2446 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
2447 op0 = copy_to_mode_reg (mode0, op0);
2448 if (! (*insn_data[icode].operand[2].predicate) (op1, mode1))
2449 op1 = copy_to_mode_reg (mode1, op1);
2450
2451 pat = GEN_FCN (icode) (target, op0, op1);
d5a29419
KT
2452 if (!pat)
2453 return NULL_RTX;
2454
5d357f26
KT
2455 emit_insn (pat);
2456 return target;
2457}
2458
a6fc00da
BH
2459/* Function to expand reciprocal square root builtins. */
2460
2461static rtx
2462aarch64_expand_builtin_rsqrt (int fcode, tree exp, rtx target)
2463{
2464 tree arg0 = CALL_EXPR_ARG (exp, 0);
2465 rtx op0 = expand_normal (arg0);
2466
2467 rtx (*gen) (rtx, rtx);
2468
2469 switch (fcode)
2470 {
2471 case AARCH64_BUILTIN_RSQRT_DF:
ee62a5a6 2472 gen = gen_rsqrtdf2;
a6fc00da
BH
2473 break;
2474 case AARCH64_BUILTIN_RSQRT_SF:
ee62a5a6 2475 gen = gen_rsqrtsf2;
a6fc00da
BH
2476 break;
2477 case AARCH64_BUILTIN_RSQRT_V2DF:
ee62a5a6 2478 gen = gen_rsqrtv2df2;
a6fc00da
BH
2479 break;
2480 case AARCH64_BUILTIN_RSQRT_V2SF:
ee62a5a6 2481 gen = gen_rsqrtv2sf2;
a6fc00da
BH
2482 break;
2483 case AARCH64_BUILTIN_RSQRT_V4SF:
ee62a5a6 2484 gen = gen_rsqrtv4sf2;
a6fc00da
BH
2485 break;
2486 default: gcc_unreachable ();
2487 }
2488
2489 if (!target)
2490 target = gen_reg_rtx (GET_MODE (op0));
2491
2492 emit_insn (gen (target, op0));
2493
2494 return target;
2495}
2496
9d63f43b
TC
2497/* Expand a FCMLA lane expression EXP with code FCODE and
2498 result going to TARGET if that is convenient. */
2499
2500rtx
2501aarch64_expand_fcmla_builtin (tree exp, rtx target, int fcode)
2502{
2503 int bcode = fcode - AARCH64_SIMD_FCMLA_LANEQ_BUILTIN_BASE - 1;
2504 aarch64_fcmla_laneq_builtin_datum* d
2505 = &aarch64_fcmla_lane_builtin_data[bcode];
2506 machine_mode quadmode = GET_MODE_2XWIDER_MODE (d->mode).require ();
2507 rtx op0 = force_reg (d->mode, expand_normal (CALL_EXPR_ARG (exp, 0)));
2508 rtx op1 = force_reg (d->mode, expand_normal (CALL_EXPR_ARG (exp, 1)));
2509 rtx op2 = force_reg (quadmode, expand_normal (CALL_EXPR_ARG (exp, 2)));
2510 tree tmp = CALL_EXPR_ARG (exp, 3);
2511 rtx lane_idx = expand_expr (tmp, NULL_RTX, VOIDmode, EXPAND_INITIALIZER);
2512
2513 /* Validate that the lane index is a constant. */
2514 if (!CONST_INT_P (lane_idx))
2515 {
62e43587
MS
2516 error_at (EXPR_LOCATION (exp),
2517 "argument %d must be a constant immediate", 4);
9d63f43b
TC
2518 return const0_rtx;
2519 }
2520
2521 /* Validate that the index is within the expected range. */
2522 int nunits = GET_MODE_NUNITS (quadmode).to_constant ();
2523 aarch64_simd_lane_bounds (lane_idx, 0, nunits / 2, exp);
2524
9d63f43b
TC
2525 /* Generate the correct register and mode. */
2526 int lane = INTVAL (lane_idx);
2527
2528 if (lane < nunits / 4)
33b5a38c
TC
2529 op2 = simplify_gen_subreg (d->mode, op2, quadmode,
2530 subreg_lowpart_offset (d->mode, quadmode));
9d63f43b
TC
2531 else
2532 {
2533 /* Select the upper 64 bits, either a V2SF or V4HF, this however
2534 is quite messy, as the operation required even though simple
2535 doesn't have a simple RTL pattern, and seems it's quite hard to
2536 define using a single RTL pattern. The target generic version
2537 gen_highpart_mode generates code that isn't optimal. */
2538 rtx temp1 = gen_reg_rtx (d->mode);
2539 rtx temp2 = gen_reg_rtx (DImode);
33b5a38c
TC
2540 temp1 = simplify_gen_subreg (d->mode, op2, quadmode,
2541 subreg_lowpart_offset (d->mode, quadmode));
9d63f43b 2542 temp1 = simplify_gen_subreg (V2DImode, temp1, d->mode, 0);
33b5a38c
TC
2543 if (BYTES_BIG_ENDIAN)
2544 emit_insn (gen_aarch64_get_lanev2di (temp2, temp1, const0_rtx));
2545 else
2546 emit_insn (gen_aarch64_get_lanev2di (temp2, temp1, const1_rtx));
9d63f43b
TC
2547 op2 = simplify_gen_subreg (d->mode, temp2, GET_MODE (temp2), 0);
2548
2549 /* And recalculate the index. */
2550 lane -= nunits / 4;
2551 }
2552
33b5a38c
TC
2553 /* Keep to GCC-vector-extension lane indices in the RTL, only nunits / 4
2554 (max nunits in range check) are valid. Which means only 0-1, so we
2555 only need to know the order in a V2mode. */
2556 lane_idx = aarch64_endian_lane_rtx (V2DImode, lane);
2557
fa59c8dc
AC
2558 if (!target
2559 || !REG_P (target)
2560 || GET_MODE (target) != d->mode)
9d63f43b 2561 target = gen_reg_rtx (d->mode);
9d63f43b
TC
2562
2563 rtx pat = NULL_RTX;
2564
2565 if (d->lane)
33b5a38c 2566 pat = GEN_FCN (d->icode) (target, op0, op1, op2, lane_idx);
9d63f43b
TC
2567 else
2568 pat = GEN_FCN (d->icode) (target, op0, op1, op2);
2569
2570 if (!pat)
2571 return NULL_RTX;
2572
2573 emit_insn (pat);
2574 return target;
2575}
2576
89626179
SD
2577/* Function to expand an expression EXP which calls one of the Transactional
2578 Memory Extension (TME) builtins FCODE with the result going to TARGET. */
2579static rtx
2580aarch64_expand_builtin_tme (int fcode, tree exp, rtx target)
2581{
2582 switch (fcode)
2583 {
2584 case AARCH64_TME_BUILTIN_TSTART:
2585 target = gen_reg_rtx (DImode);
2586 emit_insn (GEN_FCN (CODE_FOR_tstart) (target));
2587 break;
2588
2589 case AARCH64_TME_BUILTIN_TTEST:
2590 target = gen_reg_rtx (DImode);
2591 emit_insn (GEN_FCN (CODE_FOR_ttest) (target));
2592 break;
2593
2594 case AARCH64_TME_BUILTIN_TCOMMIT:
2595 emit_insn (GEN_FCN (CODE_FOR_tcommit) ());
2596 break;
2597
2598 case AARCH64_TME_BUILTIN_TCANCEL:
2599 {
2600 tree arg0 = CALL_EXPR_ARG (exp, 0);
2601 rtx op0 = expand_normal (arg0);
2602 if (CONST_INT_P (op0) && UINTVAL (op0) <= 65536)
2603 emit_insn (GEN_FCN (CODE_FOR_tcancel) (op0));
2604 else
2605 {
62e43587
MS
2606 error_at (EXPR_LOCATION (exp),
2607 "argument must be a 16-bit constant immediate");
89626179
SD
2608 return const0_rtx;
2609 }
2610 }
2611 break;
2612
2613 default :
2614 gcc_unreachable ();
2615 }
2616 return target;
2617}
2618
fdcddba8
PW
2619/* Function to expand an expression EXP which calls one of the Load/Store
2620 64 Byte extension (LS64) builtins FCODE with the result going to TARGET. */
2621static rtx
2622aarch64_expand_builtin_ls64 (int fcode, tree exp, rtx target)
2623{
2624 expand_operand ops[3];
2625
2626 switch (fcode)
2627 {
2628 case AARCH64_LS64_BUILTIN_LD64B:
2629 {
71361354
AC
2630 rtx op0 = expand_normal (CALL_EXPR_ARG (exp, 0));
2631 create_output_operand (&ops[0], target, V8DImode);
2632 create_input_operand (&ops[1], op0, DImode);
2633 expand_insn (CODE_FOR_ld64b, 2, ops);
2634 return ops[0].value;
fdcddba8
PW
2635 }
2636 case AARCH64_LS64_BUILTIN_ST64B:
2637 {
71361354
AC
2638 rtx op0 = expand_normal (CALL_EXPR_ARG (exp, 0));
2639 rtx op1 = expand_normal (CALL_EXPR_ARG (exp, 1));
737a0b74 2640 create_input_operand (&ops[0], op0, DImode);
71361354
AC
2641 create_input_operand (&ops[1], op1, V8DImode);
2642 expand_insn (CODE_FOR_st64b, 2, ops);
2643 return const0_rtx;
fdcddba8
PW
2644 }
2645 case AARCH64_LS64_BUILTIN_ST64BV:
2646 {
71361354
AC
2647 rtx op0 = expand_normal (CALL_EXPR_ARG (exp, 0));
2648 rtx op1 = expand_normal (CALL_EXPR_ARG (exp, 1));
2649 create_output_operand (&ops[0], target, DImode);
2650 create_input_operand (&ops[1], op0, DImode);
2651 create_input_operand (&ops[2], op1, V8DImode);
2652 expand_insn (CODE_FOR_st64bv, 3, ops);
2653 return ops[0].value;
fdcddba8
PW
2654 }
2655 case AARCH64_LS64_BUILTIN_ST64BV0:
2656 {
71361354
AC
2657 rtx op0 = expand_normal (CALL_EXPR_ARG (exp, 0));
2658 rtx op1 = expand_normal (CALL_EXPR_ARG (exp, 1));
2659 create_output_operand (&ops[0], target, DImode);
2660 create_input_operand (&ops[1], op0, DImode);
2661 create_input_operand (&ops[2], op1, V8DImode);
2662 expand_insn (CODE_FOR_st64bv0, 3, ops);
2663 return ops[0].value;
fdcddba8
PW
2664 }
2665 }
2666
2667 gcc_unreachable ();
2668}
2669
c5dc215d
KT
2670/* Expand a random number builtin EXP with code FCODE, putting the result
2671 int TARGET. If IGNORE is true the return value is ignored. */
2672
2673rtx
2674aarch64_expand_rng_builtin (tree exp, rtx target, int fcode, int ignore)
2675{
2676 rtx pat;
2677 enum insn_code icode;
2678 if (fcode == AARCH64_BUILTIN_RNG_RNDR)
2679 icode = CODE_FOR_aarch64_rndr;
2680 else if (fcode == AARCH64_BUILTIN_RNG_RNDRRS)
2681 icode = CODE_FOR_aarch64_rndrrs;
2682 else
2683 gcc_unreachable ();
2684
2685 rtx rand = gen_reg_rtx (DImode);
2686 pat = GEN_FCN (icode) (rand);
2687 if (!pat)
2688 return NULL_RTX;
2689
2690 tree arg0 = CALL_EXPR_ARG (exp, 0);
2691 rtx res_addr = expand_normal (arg0);
2692 res_addr = convert_memory_address (Pmode, res_addr);
2693 rtx res_mem = gen_rtx_MEM (DImode, res_addr);
2694 emit_insn (pat);
2695 emit_move_insn (res_mem, rand);
2696 /* If the status result is unused don't generate the CSET code. */
2697 if (ignore)
2698 return target;
2699
2700 rtx cc_reg = gen_rtx_REG (CC_Zmode, CC_REGNUM);
f7581eb3 2701 rtx cmp_rtx = gen_rtx_fmt_ee (EQ, SImode, cc_reg, const0_rtx);
c5dc215d
KT
2702 emit_insn (gen_aarch64_cstoresi (target, cmp_rtx, cc_reg));
2703 return target;
2704}
2705
fc42900d
VDN
2706/* Expand the read/write system register builtin EXPs. */
2707rtx
2708aarch64_expand_rwsr_builtin (tree exp, rtx target, int fcode)
2709{
2710 tree arg0, arg1;
2711 rtx const_str, input_val, subreg;
2712 enum machine_mode mode;
2713 class expand_operand ops[2];
2714
2715 arg0 = CALL_EXPR_ARG (exp, 0);
2716
2717 bool write_op = (fcode == AARCH64_WSR
2718 || fcode == AARCH64_WSRP
2719 || fcode == AARCH64_WSR64
2720 || fcode == AARCH64_WSRF
2721 || fcode == AARCH64_WSRF64);
2722
2723 /* Argument 0 (system register name) must be a string literal. */
2724 gcc_assert (TREE_CODE (arg0) == ADDR_EXPR
2725 && TREE_CODE (TREE_TYPE (arg0)) == POINTER_TYPE
2726 && TREE_CODE (TREE_OPERAND (arg0, 0)) == STRING_CST);
2727
2728 const char *name_input = TREE_STRING_POINTER (TREE_OPERAND (arg0, 0));
2729
2730 tree len_tree = c_strlen (arg0, 1);
2731 if (len_tree == NULL_TREE)
2732 {
2733 error_at (EXPR_LOCATION (exp), "invalid system register name provided");
2734 return const0_rtx;
2735 }
2736
2737 size_t len = TREE_INT_CST_LOW (len_tree);
2738 char *sysreg_name = xstrdup (name_input);
2739
2740 for (unsigned pos = 0; pos <= len; pos++)
2741 sysreg_name[pos] = TOLOWER (sysreg_name[pos]);
2742
2743 const char *name_output = aarch64_retrieve_sysreg (sysreg_name, write_op);
2744 if (name_output == NULL)
2745 {
2746 error_at (EXPR_LOCATION (exp), "invalid system register name %qs",
2747 sysreg_name);
2748 return const0_rtx;
2749 }
2750
2751 /* Assign the string corresponding to the system register name to an RTX. */
2752 const_str = rtx_alloc (CONST_STRING);
2753 PUT_CODE (const_str, CONST_STRING);
2754 XSTR (const_str, 0) = ggc_strdup (name_output);
2755
2756 /* Set up expander operands and call instruction expansion. */
2757 if (write_op)
2758 {
2759 arg1 = CALL_EXPR_ARG (exp, 1);
2760 mode = TYPE_MODE (TREE_TYPE (arg1));
2761 input_val = copy_to_mode_reg (mode, expand_normal (arg1));
2762
2763 switch (fcode)
2764 {
2765 case AARCH64_WSR:
2766 case AARCH64_WSRP:
2767 case AARCH64_WSR64:
2768 case AARCH64_WSRF64:
2769 subreg = lowpart_subreg (DImode, input_val, mode);
2770 break;
2771 case AARCH64_WSRF:
2772 subreg = gen_lowpart_SUBREG (SImode, input_val);
2773 subreg = gen_lowpart_SUBREG (DImode, subreg);
2774 break;
2775 }
2776
2777 create_fixed_operand (&ops[0], const_str);
2778 create_input_operand (&ops[1], subreg, DImode);
2779 expand_insn (CODE_FOR_aarch64_write_sysregdi, 2, ops);
2780
2781 return target;
2782 }
2783
2784 /* Read operations are implied by !write_op. */
2785 gcc_assert (call_expr_nargs (exp) == 1);
2786
2787 /* Emit the initial read_sysregdi rtx. */
2788 create_output_operand (&ops[0], target, DImode);
2789 create_fixed_operand (&ops[1], const_str);
2790 expand_insn (CODE_FOR_aarch64_read_sysregdi, 2, ops);
2791 target = ops[0].value;
2792
2793 /* Do any necessary post-processing on the result. */
2794 switch (fcode)
2795 {
2796 case AARCH64_RSR:
2797 case AARCH64_RSRP:
2798 case AARCH64_RSR64:
2799 case AARCH64_RSRF64:
2800 return lowpart_subreg (TYPE_MODE (TREE_TYPE (exp)), target, DImode);
2801 case AARCH64_RSRF:
2802 subreg = gen_lowpart_SUBREG (SImode, target);
2803 return gen_lowpart_SUBREG (SFmode, subreg);
2804 default:
2805 gcc_unreachable ();
2806 }
2807}
2808
ef01e6bb
DZ
2809/* Expand an expression EXP that calls a MEMTAG built-in FCODE
2810 with result going to TARGET. */
2811static rtx
2812aarch64_expand_builtin_memtag (int fcode, tree exp, rtx target)
2813{
2814 if (TARGET_ILP32)
2815 {
2816 error ("Memory Tagging Extension does not support %<-mabi=ilp32%>");
2817 return const0_rtx;
2818 }
2819
2820 rtx pat = NULL;
2821 enum insn_code icode = aarch64_memtag_builtin_data[fcode -
2822 AARCH64_MEMTAG_BUILTIN_START - 1].icode;
2823
2824 rtx op0 = expand_normal (CALL_EXPR_ARG (exp, 0));
2825 machine_mode mode0 = GET_MODE (op0);
2826 op0 = force_reg (mode0 == VOIDmode ? DImode : mode0, op0);
2827 op0 = convert_to_mode (DImode, op0, true);
2828
2829 switch (fcode)
2830 {
2831 case AARCH64_MEMTAG_BUILTIN_IRG:
2832 case AARCH64_MEMTAG_BUILTIN_GMI:
2833 case AARCH64_MEMTAG_BUILTIN_SUBP:
2834 case AARCH64_MEMTAG_BUILTIN_INC_TAG:
2835 {
2836 if (! target
2837 || GET_MODE (target) != DImode
2838 || ! (*insn_data[icode].operand[0].predicate) (target, DImode))
2839 target = gen_reg_rtx (DImode);
2840
2841 if (fcode == AARCH64_MEMTAG_BUILTIN_INC_TAG)
2842 {
2843 rtx op1 = expand_normal (CALL_EXPR_ARG (exp, 1));
2844
2845 if ((*insn_data[icode].operand[3].predicate) (op1, QImode))
2846 {
2847 pat = GEN_FCN (icode) (target, op0, const0_rtx, op1);
2848 break;
2849 }
62e43587
MS
2850 error_at (EXPR_LOCATION (exp),
2851 "argument %d must be a constant immediate "
2852 "in range [0,15]", 2);
ef01e6bb
DZ
2853 return const0_rtx;
2854 }
2855 else
2856 {
2857 rtx op1 = expand_normal (CALL_EXPR_ARG (exp, 1));
2858 machine_mode mode1 = GET_MODE (op1);
2859 op1 = force_reg (mode1 == VOIDmode ? DImode : mode1, op1);
2860 op1 = convert_to_mode (DImode, op1, true);
2861 pat = GEN_FCN (icode) (target, op0, op1);
2862 }
2863 break;
2864 }
2865 case AARCH64_MEMTAG_BUILTIN_GET_TAG:
2866 target = op0;
2867 pat = GEN_FCN (icode) (target, op0, const0_rtx);
2868 break;
2869 case AARCH64_MEMTAG_BUILTIN_SET_TAG:
2870 pat = GEN_FCN (icode) (op0, op0, const0_rtx);
2871 break;
2872 default:
2873 gcc_unreachable();
2874 }
2875
2876 if (!pat)
2877 return NULL_RTX;
2878
2879 emit_insn (pat);
2880 return target;
2881}
2882
eb966d39
ASDV
2883/* Function to expand an expression EXP which calls one of the ACLE Data
2884 Intrinsic builtins FCODE with the result going to TARGET. */
2885static rtx
2886aarch64_expand_builtin_data_intrinsic (unsigned int fcode, tree exp, rtx target)
2887{
2888 expand_operand ops[2];
98756bcb 2889 machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
eb966d39
ASDV
2890 create_output_operand (&ops[0], target, mode);
2891 create_input_operand (&ops[1], expand_normal (CALL_EXPR_ARG (exp, 0)), mode);
2892 enum insn_code icode;
2893
2894 switch (fcode)
2895 {
2896 case AARCH64_REV16:
2897 case AARCH64_REV16L:
2898 case AARCH64_REV16LL:
2899 icode = code_for_aarch64_rev16 (mode);
2900 break;
2901 case AARCH64_RBIT:
2902 case AARCH64_RBITL:
2903 case AARCH64_RBITLL:
2904 icode = code_for_aarch64_rbit (mode);
2905 break;
2906 default:
2907 gcc_unreachable ();
2908 }
2909
2910 expand_insn (icode, 2, ops);
2911 return ops[0].value;
2912}
2913
f5e73de0 2914/* Expand an expression EXP as fpsr or fpcr setter (depending on
0d7e5fa6
AC
2915 UNSPEC) using MODE. */
2916static void
2917aarch64_expand_fpsr_fpcr_setter (int unspec, machine_mode mode, tree exp)
2918{
2919 tree arg = CALL_EXPR_ARG (exp, 0);
2920 rtx op = force_reg (mode, expand_normal (arg));
2921 emit_insn (gen_aarch64_set (unspec, mode, op));
2922}
2923
f5e73de0
AC
2924/* Expand a fpsr or fpcr getter (depending on UNSPEC) using MODE.
2925 Return the target. */
2926static rtx
2927aarch64_expand_fpsr_fpcr_getter (enum insn_code icode, machine_mode mode,
2928 rtx target)
2929{
2930 expand_operand op;
2931 create_output_operand (&op, target, mode);
2932 expand_insn (icode, 1, &op);
2933 return op.value;
2934}
2935
6d4d616a 2936/* Expand an expression EXP that calls built-in function FCODE,
c5dc215d
KT
2937 with result going to TARGET if that's convenient. IGNORE is true
2938 if the result of the builtin is ignored. */
342be7f7 2939rtx
c5dc215d
KT
2940aarch64_general_expand_builtin (unsigned int fcode, tree exp, rtx target,
2941 int ignore)
342be7f7 2942{
aa87aced 2943 int icode;
0d7e5fa6 2944 rtx op0;
aa87aced
KV
2945 tree arg0;
2946
2947 switch (fcode)
2948 {
2949 case AARCH64_BUILTIN_GET_FPCR:
f5e73de0
AC
2950 return aarch64_expand_fpsr_fpcr_getter (CODE_FOR_aarch64_get_fpcrsi,
2951 SImode, target);
aa87aced 2952 case AARCH64_BUILTIN_SET_FPCR:
0d7e5fa6
AC
2953 aarch64_expand_fpsr_fpcr_setter (UNSPECV_SET_FPCR, SImode, exp);
2954 return target;
aa87aced 2955 case AARCH64_BUILTIN_GET_FPSR:
f5e73de0
AC
2956 return aarch64_expand_fpsr_fpcr_getter (CODE_FOR_aarch64_get_fpsrsi,
2957 SImode, target);
aa87aced 2958 case AARCH64_BUILTIN_SET_FPSR:
0d7e5fa6
AC
2959 aarch64_expand_fpsr_fpcr_setter (UNSPECV_SET_FPSR, SImode, exp);
2960 return target;
2961 case AARCH64_BUILTIN_GET_FPCR64:
f5e73de0
AC
2962 return aarch64_expand_fpsr_fpcr_getter (CODE_FOR_aarch64_get_fpcrdi,
2963 DImode, target);
0d7e5fa6
AC
2964 case AARCH64_BUILTIN_SET_FPCR64:
2965 aarch64_expand_fpsr_fpcr_setter (UNSPECV_SET_FPCR, DImode, exp);
2966 return target;
2967 case AARCH64_BUILTIN_GET_FPSR64:
f5e73de0
AC
2968 return aarch64_expand_fpsr_fpcr_getter (CODE_FOR_aarch64_get_fpsrdi,
2969 DImode, target);
0d7e5fa6
AC
2970 case AARCH64_BUILTIN_SET_FPSR64:
2971 aarch64_expand_fpsr_fpcr_setter (UNSPECV_SET_FPSR, DImode, exp);
aa87aced 2972 return target;
312492bd
JW
2973 case AARCH64_PAUTH_BUILTIN_AUTIA1716:
2974 case AARCH64_PAUTH_BUILTIN_PACIA1716:
8fc16d72
ST
2975 case AARCH64_PAUTH_BUILTIN_AUTIB1716:
2976 case AARCH64_PAUTH_BUILTIN_PACIB1716:
312492bd
JW
2977 case AARCH64_PAUTH_BUILTIN_XPACLRI:
2978 arg0 = CALL_EXPR_ARG (exp, 0);
2979 op0 = force_reg (Pmode, expand_normal (arg0));
2980
312492bd
JW
2981 if (fcode == AARCH64_PAUTH_BUILTIN_XPACLRI)
2982 {
2983 rtx lr = gen_rtx_REG (Pmode, R30_REGNUM);
2984 icode = CODE_FOR_xpaclri;
2985 emit_move_insn (lr, op0);
2986 emit_insn (GEN_FCN (icode) ());
92f0d3d0 2987 return lr;
312492bd
JW
2988 }
2989 else
2990 {
2991 tree arg1 = CALL_EXPR_ARG (exp, 1);
2992 rtx op1 = force_reg (Pmode, expand_normal (arg1));
8fc16d72
ST
2993 switch (fcode)
2994 {
2995 case AARCH64_PAUTH_BUILTIN_AUTIA1716:
2996 icode = CODE_FOR_autia1716;
2997 break;
2998 case AARCH64_PAUTH_BUILTIN_AUTIB1716:
2999 icode = CODE_FOR_autib1716;
3000 break;
3001 case AARCH64_PAUTH_BUILTIN_PACIA1716:
3002 icode = CODE_FOR_pacia1716;
3003 break;
3004 case AARCH64_PAUTH_BUILTIN_PACIB1716:
3005 icode = CODE_FOR_pacib1716;
3006 break;
3007 default:
3008 icode = 0;
3009 gcc_unreachable ();
3010 }
312492bd
JW
3011
3012 rtx x16_reg = gen_rtx_REG (Pmode, R16_REGNUM);
3013 rtx x17_reg = gen_rtx_REG (Pmode, R17_REGNUM);
3014 emit_move_insn (x17_reg, op0);
3015 emit_move_insn (x16_reg, op1);
3016 emit_insn (GEN_FCN (icode) ());
92f0d3d0 3017 return x17_reg;
312492bd
JW
3018 }
3019
e1d5d19e 3020 case AARCH64_JSCVT:
2c62952f
AC
3021 {
3022 expand_operand ops[2];
3023 create_output_operand (&ops[0], target, SImode);
3024 op0 = expand_normal (CALL_EXPR_ARG (exp, 0));
3025 create_input_operand (&ops[1], op0, DFmode);
3026 expand_insn (CODE_FOR_aarch64_fjcvtzs, 2, ops);
3027 return ops[0].value;
3028 }
e1d5d19e 3029
9d63f43b
TC
3030 case AARCH64_SIMD_BUILTIN_FCMLA_LANEQ0_V2SF:
3031 case AARCH64_SIMD_BUILTIN_FCMLA_LANEQ90_V2SF:
3032 case AARCH64_SIMD_BUILTIN_FCMLA_LANEQ180_V2SF:
3033 case AARCH64_SIMD_BUILTIN_FCMLA_LANEQ270_V2SF:
3034 case AARCH64_SIMD_BUILTIN_FCMLA_LANEQ0_V4HF:
3035 case AARCH64_SIMD_BUILTIN_FCMLA_LANEQ90_V4HF:
3036 case AARCH64_SIMD_BUILTIN_FCMLA_LANEQ180_V4HF:
3037 case AARCH64_SIMD_BUILTIN_FCMLA_LANEQ270_V4HF:
3038 return aarch64_expand_fcmla_builtin (exp, target, fcode);
c5dc215d
KT
3039 case AARCH64_BUILTIN_RNG_RNDR:
3040 case AARCH64_BUILTIN_RNG_RNDRRS:
3041 return aarch64_expand_rng_builtin (exp, target, fcode, ignore);
fc42900d
VDN
3042 case AARCH64_RSR:
3043 case AARCH64_RSRP:
3044 case AARCH64_RSR64:
3045 case AARCH64_RSRF:
3046 case AARCH64_RSRF64:
3047 case AARCH64_WSR:
3048 case AARCH64_WSRP:
3049 case AARCH64_WSR64:
3050 case AARCH64_WSRF:
3051 case AARCH64_WSRF64:
3052 return aarch64_expand_rwsr_builtin (exp, target, fcode);
aa87aced 3053 }
342be7f7 3054
5d357f26 3055 if (fcode >= AARCH64_SIMD_BUILTIN_BASE && fcode <= AARCH64_SIMD_BUILTIN_MAX)
342be7f7 3056 return aarch64_simd_expand_builtin (fcode, exp, target);
5d357f26
KT
3057 else if (fcode >= AARCH64_CRC32_BUILTIN_BASE && fcode <= AARCH64_CRC32_BUILTIN_MAX)
3058 return aarch64_crc32_expand_builtin (fcode, exp, target);
342be7f7 3059
a6fc00da
BH
3060 if (fcode == AARCH64_BUILTIN_RSQRT_DF
3061 || fcode == AARCH64_BUILTIN_RSQRT_SF
3062 || fcode == AARCH64_BUILTIN_RSQRT_V2DF
3063 || fcode == AARCH64_BUILTIN_RSQRT_V2SF
3064 || fcode == AARCH64_BUILTIN_RSQRT_V4SF)
3065 return aarch64_expand_builtin_rsqrt (fcode, exp, target);
3066
89626179
SD
3067 if (fcode == AARCH64_TME_BUILTIN_TSTART
3068 || fcode == AARCH64_TME_BUILTIN_TCOMMIT
3069 || fcode == AARCH64_TME_BUILTIN_TTEST
3070 || fcode == AARCH64_TME_BUILTIN_TCANCEL)
3071 return aarch64_expand_builtin_tme (fcode, exp, target);
3072
fdcddba8
PW
3073 if (fcode == AARCH64_LS64_BUILTIN_LD64B
3074 || fcode == AARCH64_LS64_BUILTIN_ST64B
3075 || fcode == AARCH64_LS64_BUILTIN_ST64BV
3076 || fcode == AARCH64_LS64_BUILTIN_ST64BV0)
3077 return aarch64_expand_builtin_ls64 (fcode, exp, target);
3078
ef01e6bb
DZ
3079 if (fcode >= AARCH64_MEMTAG_BUILTIN_START
3080 && fcode <= AARCH64_MEMTAG_BUILTIN_END)
3081 return aarch64_expand_builtin_memtag (fcode, exp, target);
eb966d39
ASDV
3082 if (fcode >= AARCH64_REV16
3083 && fcode <= AARCH64_RBITLL)
3084 return aarch64_expand_builtin_data_intrinsic (fcode, exp, target);
ef01e6bb 3085
d5a29419 3086 gcc_unreachable ();
342be7f7 3087}
42fc9a7f 3088
a6fc00da
BH
3089/* Return builtin for reciprocal square root. */
3090
3091tree
6d4d616a 3092aarch64_general_builtin_rsqrt (unsigned int fn)
a6fc00da 3093{
ee62a5a6
RS
3094 if (fn == AARCH64_SIMD_BUILTIN_UNOP_sqrtv2df)
3095 return aarch64_builtin_decls[AARCH64_BUILTIN_RSQRT_V2DF];
3096 if (fn == AARCH64_SIMD_BUILTIN_UNOP_sqrtv2sf)
3097 return aarch64_builtin_decls[AARCH64_BUILTIN_RSQRT_V2SF];
3098 if (fn == AARCH64_SIMD_BUILTIN_UNOP_sqrtv4sf)
3099 return aarch64_builtin_decls[AARCH64_BUILTIN_RSQRT_V4SF];
a6fc00da
BH
3100 return NULL_TREE;
3101}
3102
03312cbd
AP
3103/* Return true if the lane check can be removed as there is no
3104 error going to be emitted. */
3105static bool
3106aarch64_fold_builtin_lane_check (tree arg0, tree arg1, tree arg2)
3107{
3108 if (TREE_CODE (arg0) != INTEGER_CST)
3109 return false;
3110 if (TREE_CODE (arg1) != INTEGER_CST)
3111 return false;
3112 if (TREE_CODE (arg2) != INTEGER_CST)
3113 return false;
3114
3115 auto totalsize = wi::to_widest (arg0);
3116 auto elementsize = wi::to_widest (arg1);
3117 if (totalsize == 0 || elementsize == 0)
3118 return false;
3119 auto lane = wi::to_widest (arg2);
3120 auto high = wi::udiv_trunc (totalsize, elementsize);
3121 return wi::ltu_p (lane, high);
3122}
3123
0ac198d3 3124#undef VAR1
bf592b2f 3125#define VAR1(T, N, MAP, FLAG, A) \
e993fea1 3126 case AARCH64_SIMD_BUILTIN_##T##_##N##A:
0ac198d3 3127
c906efc7
AC
3128#undef VREINTERPRET_BUILTIN
3129#define VREINTERPRET_BUILTIN(A, B, L) \
3130 case AARCH64_SIMD_BUILTIN_VREINTERPRET##L##_##A##_##B:
3131
3132
6d4d616a
RS
3133/* Try to fold a call to the built-in function with subcode FCODE. The
3134 function is passed the N_ARGS arguments in ARGS and it returns a value
3135 of type TYPE. Return the new expression on success and NULL_TREE on
3136 failure. */
9697e620 3137tree
6d4d616a
RS
3138aarch64_general_fold_builtin (unsigned int fcode, tree type,
3139 unsigned int n_args ATTRIBUTE_UNUSED, tree *args)
9697e620 3140{
9697e620
JG
3141 switch (fcode)
3142 {
bf592b2f 3143 BUILTIN_VDQF (UNOP, abs, 2, ALL)
9697e620 3144 return fold_build1 (ABS_EXPR, type, args[0]);
bf592b2f 3145 VAR1 (UNOP, floatv2si, 2, ALL, v2sf)
3146 VAR1 (UNOP, floatv4si, 2, ALL, v4sf)
3147 VAR1 (UNOP, floatv2di, 2, ALL, v2df)
1709ff9b 3148 return fold_build1 (FLOAT_EXPR, type, args[0]);
c906efc7
AC
3149 AARCH64_SIMD_VREINTERPRET_BUILTINS
3150 return fold_build1 (VIEW_CONVERT_EXPR, type, args[0]);
03312cbd
AP
3151 case AARCH64_SIMD_BUILTIN_LANE_CHECK:
3152 gcc_assert (n_args == 3);
3153 if (aarch64_fold_builtin_lane_check (args[0], args[1], args[2]))
3154 return void_node;
3155 break;
9697e620
JG
3156 default:
3157 break;
3158 }
3159
3160 return NULL_TREE;
3161}
3162
ad44c6a5
ASDV
3163enum aarch64_simd_type
3164get_mem_type_for_load_store (unsigned int fcode)
3165{
3166 switch (fcode)
3167 {
1716ddd1
JW
3168 VAR1 (LOAD1, ld1, 0, LOAD, v8qi)
3169 VAR1 (STORE1, st1, 0, STORE, v8qi)
ad44c6a5 3170 return Int8x8_t;
1716ddd1
JW
3171 VAR1 (LOAD1, ld1, 0, LOAD, v16qi)
3172 VAR1 (STORE1, st1, 0, STORE, v16qi)
ad44c6a5 3173 return Int8x16_t;
1716ddd1
JW
3174 VAR1 (LOAD1, ld1, 0, LOAD, v4hi)
3175 VAR1 (STORE1, st1, 0, STORE, v4hi)
ad44c6a5 3176 return Int16x4_t;
1716ddd1
JW
3177 VAR1 (LOAD1, ld1, 0, LOAD, v8hi)
3178 VAR1 (STORE1, st1, 0, STORE, v8hi)
ad44c6a5 3179 return Int16x8_t;
1716ddd1
JW
3180 VAR1 (LOAD1, ld1, 0, LOAD, v2si)
3181 VAR1 (STORE1, st1, 0, STORE, v2si)
ad44c6a5 3182 return Int32x2_t;
1716ddd1
JW
3183 VAR1 (LOAD1, ld1, 0, LOAD, v4si)
3184 VAR1 (STORE1, st1, 0, STORE, v4si)
ad44c6a5 3185 return Int32x4_t;
1716ddd1
JW
3186 VAR1 (LOAD1, ld1, 0, LOAD, v2di)
3187 VAR1 (STORE1, st1, 0, STORE, v2di)
ad44c6a5 3188 return Int64x2_t;
1716ddd1
JW
3189 VAR1 (LOAD1_U, ld1, 0, LOAD, v8qi)
3190 VAR1 (STORE1_U, st1, 0, STORE, v8qi)
3191 return Uint8x8_t;
3192 VAR1 (LOAD1_U, ld1, 0, LOAD, v16qi)
3193 VAR1 (STORE1_U, st1, 0, STORE, v16qi)
3194 return Uint8x16_t;
3195 VAR1 (LOAD1_U, ld1, 0, LOAD, v4hi)
3196 VAR1 (STORE1_U, st1, 0, STORE, v4hi)
3197 return Uint16x4_t;
3198 VAR1 (LOAD1_U, ld1, 0, LOAD, v8hi)
3199 VAR1 (STORE1_U, st1, 0, STORE, v8hi)
3200 return Uint16x8_t;
3201 VAR1 (LOAD1_U, ld1, 0, LOAD, v2si)
3202 VAR1 (STORE1_U, st1, 0, STORE, v2si)
3203 return Uint32x2_t;
3204 VAR1 (LOAD1_U, ld1, 0, LOAD, v4si)
3205 VAR1 (STORE1_U, st1, 0, STORE, v4si)
3206 return Uint32x4_t;
3207 VAR1 (LOAD1_U, ld1, 0, LOAD, v2di)
3208 VAR1 (STORE1_U, st1, 0, STORE, v2di)
3209 return Uint64x2_t;
3210 VAR1 (LOAD1_P, ld1, 0, LOAD, v8qi)
3211 VAR1 (STORE1_P, st1, 0, STORE, v8qi)
3212 return Poly8x8_t;
3213 VAR1 (LOAD1_P, ld1, 0, LOAD, v16qi)
3214 VAR1 (STORE1_P, st1, 0, STORE, v16qi)
3215 return Poly8x16_t;
3216 VAR1 (LOAD1_P, ld1, 0, LOAD, v4hi)
3217 VAR1 (STORE1_P, st1, 0, STORE, v4hi)
3218 return Poly16x4_t;
3219 VAR1 (LOAD1_P, ld1, 0, LOAD, v8hi)
3220 VAR1 (STORE1_P, st1, 0, STORE, v8hi)
3221 return Poly16x8_t;
3222 VAR1 (LOAD1_P, ld1, 0, LOAD, v2di)
3223 VAR1 (STORE1_P, st1, 0, STORE, v2di)
3224 return Poly64x2_t;
3225 VAR1 (LOAD1, ld1, 0, LOAD, v4hf)
3226 VAR1 (STORE1, st1, 0, STORE, v4hf)
ad44c6a5 3227 return Float16x4_t;
1716ddd1
JW
3228 VAR1 (LOAD1, ld1, 0, LOAD, v8hf)
3229 VAR1 (STORE1, st1, 0, STORE, v8hf)
ad44c6a5 3230 return Float16x8_t;
1716ddd1
JW
3231 VAR1 (LOAD1, ld1, 0, LOAD, v4bf)
3232 VAR1 (STORE1, st1, 0, STORE, v4bf)
ad44c6a5 3233 return Bfloat16x4_t;
1716ddd1
JW
3234 VAR1 (LOAD1, ld1, 0, LOAD, v8bf)
3235 VAR1 (STORE1, st1, 0, STORE, v8bf)
ad44c6a5 3236 return Bfloat16x8_t;
1716ddd1
JW
3237 VAR1 (LOAD1, ld1, 0, LOAD, v2sf)
3238 VAR1 (STORE1, st1, 0, STORE, v2sf)
ad44c6a5 3239 return Float32x2_t;
1716ddd1
JW
3240 VAR1 (LOAD1, ld1, 0, LOAD, v4sf)
3241 VAR1 (STORE1, st1, 0, STORE, v4sf)
ad44c6a5 3242 return Float32x4_t;
1716ddd1
JW
3243 VAR1 (LOAD1, ld1, 0, LOAD, v2df)
3244 VAR1 (STORE1, st1, 0, STORE, v2df)
ad44c6a5
ASDV
3245 return Float64x2_t;
3246 default:
3247 gcc_unreachable ();
3248 break;
3249 }
3250}
3251
fcb41156
RS
3252/* We've seen a vector load from address ADDR. Record it in
3253 vector_load_decls, if appropriate. */
3254static void
3255aarch64_record_vector_load_arg (tree addr)
3256{
3257 tree decl = aarch64_vector_load_decl (addr);
3258 if (!decl)
3259 return;
3260 if (!cfun->machine->vector_load_decls)
3261 cfun->machine->vector_load_decls = hash_set<tree>::create_ggc (31);
3262 cfun->machine->vector_load_decls->add (decl);
3263}
3264
6d4d616a
RS
3265/* Try to fold STMT, given that it's a call to the built-in function with
3266 subcode FCODE. Return the new statement on success and null on
3267 failure. */
3268gimple *
ad44c6a5 3269aarch64_general_gimple_fold_builtin (unsigned int fcode, gcall *stmt,
03f7843c 3270 gimple_stmt_iterator *gsi ATTRIBUTE_UNUSED)
0ac198d3 3271{
355fe088 3272 gimple *new_stmt = NULL;
6d4d616a
RS
3273 unsigned nargs = gimple_call_num_args (stmt);
3274 tree *args = (nargs > 0
3275 ? gimple_call_arg_ptr (stmt, 0)
3276 : &error_mark_node);
3277
3278 /* We use gimple's IFN_REDUC_(PLUS|MIN|MAX)s for float, signed int
3279 and unsigned int; it will distinguish according to the types of
3280 the arguments to the __builtin. */
3281 switch (fcode)
0ac198d3 3282 {
bf592b2f 3283 BUILTIN_VALL (UNOP, reduc_plus_scal_, 10, ALL)
6be5d852 3284 BUILTIN_VDQ_I (UNOPU, reduc_plus_scal_, 10, NONE)
6d4d616a
RS
3285 new_stmt = gimple_build_call_internal (IFN_REDUC_PLUS,
3286 1, args[0]);
3287 gimple_call_set_lhs (new_stmt, gimple_call_lhs (stmt));
3288 break;
ad44c6a5 3289
cbcf4a50
AP
3290 /* Lower sqrt builtins to gimple/internal function sqrt. */
3291 BUILTIN_VHSDF_DF (UNOP, sqrt, 2, FP)
3292 new_stmt = gimple_build_call_internal (IFN_SQRT,
3293 1, args[0]);
3294 gimple_call_set_lhs (new_stmt, gimple_call_lhs (stmt));
3295 break;
3296
8a1e05b7
AC
3297 BUILTIN_VDC (BINOP, combine, 0, AUTO_FP)
3298 BUILTIN_VD_I (BINOPU, combine, 0, NONE)
3299 BUILTIN_VDC_P (BINOPP, combine, 0, NONE)
3300 {
3301 tree first_part, second_part;
3302 if (BYTES_BIG_ENDIAN)
3303 {
3304 second_part = args[0];
3305 first_part = args[1];
3306 }
3307 else
3308 {
3309 first_part = args[0];
3310 second_part = args[1];
3311 }
3312 tree ret_type = gimple_call_return_type (stmt);
3313 tree ctor = build_constructor_va (ret_type, 2, NULL_TREE, first_part,
3314 NULL_TREE, second_part);
3315 new_stmt = gimple_build_assign (gimple_call_lhs (stmt), ctor);
3316 }
3317 break;
3318
ad44c6a5
ASDV
3319 /*lower store and load neon builtins to gimple. */
3320 BUILTIN_VALL_F16 (LOAD1, ld1, 0, LOAD)
1716ddd1
JW
3321 BUILTIN_VDQ_I (LOAD1_U, ld1, 0, LOAD)
3322 BUILTIN_VALLP_NO_DI (LOAD1_P, ld1, 0, LOAD)
fcb41156
RS
3323 /* Punt until after inlining, so that we stand more chance of
3324 recording something meaningful in vector_load_decls. */
3325 if (!cfun->after_inlining)
3326 break;
3327 aarch64_record_vector_load_arg (args[0]);
ad44c6a5
ASDV
3328 if (!BYTES_BIG_ENDIAN)
3329 {
3330 enum aarch64_simd_type mem_type
3331 = get_mem_type_for_load_store(fcode);
3332 aarch64_simd_type_info simd_type
3333 = aarch64_simd_types[mem_type];
0f685601
AV
3334 tree elt_ptr_type = build_pointer_type_for_mode (simd_type.eltype,
3335 VOIDmode, true);
ad44c6a5 3336 tree zero = build_zero_cst (elt_ptr_type);
0f685601
AV
3337 /* Use element type alignment. */
3338 tree access_type
3339 = build_aligned_type (simd_type.itype,
3340 TYPE_ALIGN (simd_type.eltype));
ad44c6a5
ASDV
3341 new_stmt
3342 = gimple_build_assign (gimple_get_lhs (stmt),
3343 fold_build2 (MEM_REF,
0f685601
AV
3344 access_type,
3345 args[0], zero));
fcb41156
RS
3346 gimple_set_vuse (new_stmt, gimple_vuse (stmt));
3347 gimple_set_vdef (new_stmt, gimple_vdef (stmt));
ad44c6a5
ASDV
3348 }
3349 break;
3350
3351 BUILTIN_VALL_F16 (STORE1, st1, 0, STORE)
1716ddd1
JW
3352 BUILTIN_VDQ_I (STORE1_U, st1, 0, STORE)
3353 BUILTIN_VALLP_NO_DI (STORE1_P, st1, 0, STORE)
ad44c6a5
ASDV
3354 if (!BYTES_BIG_ENDIAN)
3355 {
3356 enum aarch64_simd_type mem_type
3357 = get_mem_type_for_load_store(fcode);
3358 aarch64_simd_type_info simd_type
3359 = aarch64_simd_types[mem_type];
0f685601
AV
3360 tree elt_ptr_type = build_pointer_type_for_mode (simd_type.eltype,
3361 VOIDmode, true);
ad44c6a5 3362 tree zero = build_zero_cst (elt_ptr_type);
0f685601
AV
3363 /* Use element type alignment. */
3364 tree access_type
3365 = build_aligned_type (simd_type.itype,
3366 TYPE_ALIGN (simd_type.eltype));
ad44c6a5 3367 new_stmt
0f685601
AV
3368 = gimple_build_assign (fold_build2 (MEM_REF, access_type,
3369 args[0], zero),
3370 args[1]);
fcb41156
RS
3371 gimple_set_vuse (new_stmt, gimple_vuse (stmt));
3372 gimple_set_vdef (new_stmt, gimple_vdef (stmt));
ad44c6a5
ASDV
3373 }
3374 break;
3375
bf592b2f 3376 BUILTIN_VDQIF (UNOP, reduc_smax_scal_, 10, ALL)
3377 BUILTIN_VDQ_BHSI (UNOPU, reduc_umax_scal_, 10, ALL)
6d4d616a
RS
3378 new_stmt = gimple_build_call_internal (IFN_REDUC_MAX,
3379 1, args[0]);
3380 gimple_call_set_lhs (new_stmt, gimple_call_lhs (stmt));
3381 break;
bf592b2f 3382 BUILTIN_VDQIF (UNOP, reduc_smin_scal_, 10, ALL)
3383 BUILTIN_VDQ_BHSI (UNOPU, reduc_umin_scal_, 10, ALL)
6d4d616a
RS
3384 new_stmt = gimple_build_call_internal (IFN_REDUC_MIN,
3385 1, args[0]);
3386 gimple_call_set_lhs (new_stmt, gimple_call_lhs (stmt));
3387 break;
1b4a6359
TC
3388 BUILTIN_VSDQ_I_DI (BINOP, ashl, 3, NONE)
3389 if (TREE_CODE (args[1]) == INTEGER_CST
3390 && wi::ltu_p (wi::to_wide (args[1]), element_precision (args[0])))
3391 new_stmt = gimple_build_assign (gimple_call_lhs (stmt),
3392 LSHIFT_EXPR, args[0], args[1]);
3393 break;
3394 BUILTIN_VSDQ_I_DI (BINOP, sshl, 0, NONE)
3395 BUILTIN_VSDQ_I_DI (BINOP_UUS, ushl, 0, NONE)
3396 {
3397 tree cst = args[1];
3398 tree ctype = TREE_TYPE (cst);
3399 /* Left shifts can be both scalar or vector, e.g. uint64x1_t is
3400 treated as a scalar type not a vector one. */
3401 if ((cst = uniform_integer_cst_p (cst)) != NULL_TREE)
3402 {
3403 wide_int wcst = wi::to_wide (cst);
3404 tree unit_ty = TREE_TYPE (cst);
3405
3406 wide_int abs_cst = wi::abs (wcst);
3407 if (wi::geu_p (abs_cst, element_precision (args[0])))
3408 break;
3409
3410 if (wi::neg_p (wcst, TYPE_SIGN (ctype)))
3411 {
3412 tree final_cst;
3413 final_cst = wide_int_to_tree (unit_ty, abs_cst);
3414 if (TREE_CODE (cst) != INTEGER_CST)
3415 final_cst = build_uniform_cst (ctype, final_cst);
3416
3417 new_stmt = gimple_build_assign (gimple_call_lhs (stmt),
3418 RSHIFT_EXPR, args[0],
3419 final_cst);
3420 }
3421 else
3422 new_stmt = gimple_build_assign (gimple_call_lhs (stmt),
3423 LSHIFT_EXPR, args[0], args[1]);
3424 }
3425 }
3426 break;
3427 BUILTIN_VDQ_I (SHIFTIMM, ashr, 3, NONE)
3428 VAR1 (SHIFTIMM, ashr_simd, 0, NONE, di)
3429 BUILTIN_VDQ_I (USHIFTIMM, lshr, 3, NONE)
3430 VAR1 (USHIFTIMM, lshr_simd, 0, NONE, di)
3431 if (TREE_CODE (args[1]) == INTEGER_CST
3432 && wi::ltu_p (wi::to_wide (args[1]), element_precision (args[0])))
3433 new_stmt = gimple_build_assign (gimple_call_lhs (stmt),
3434 RSHIFT_EXPR, args[0], args[1]);
3435 break;
bf592b2f 3436 BUILTIN_GPF (BINOP, fmulx, 0, ALL)
0ac198d3 3437 {
6d4d616a
RS
3438 gcc_assert (nargs == 2);
3439 bool a0_cst_p = TREE_CODE (args[0]) == REAL_CST;
3440 bool a1_cst_p = TREE_CODE (args[1]) == REAL_CST;
3441 if (a0_cst_p || a1_cst_p)
0ac198d3 3442 {
6d4d616a 3443 if (a0_cst_p && a1_cst_p)
546e500c 3444 {
6d4d616a
RS
3445 tree t0 = TREE_TYPE (args[0]);
3446 real_value a0 = (TREE_REAL_CST (args[0]));
3447 real_value a1 = (TREE_REAL_CST (args[1]));
3448 if (real_equal (&a1, &dconst0))
3449 std::swap (a0, a1);
3450 /* According to real_equal (), +0 equals -0. */
3451 if (real_equal (&a0, &dconst0) && real_isinf (&a1))
546e500c 3452 {
6d4d616a
RS
3453 real_value res = dconst2;
3454 res.sign = a0.sign ^ a1.sign;
3455 new_stmt = gimple_build_assign (gimple_call_lhs (stmt),
3456 REAL_CST,
3457 build_real (t0, res));
546e500c 3458 }
6d4d616a
RS
3459 else
3460 new_stmt = gimple_build_assign (gimple_call_lhs (stmt),
3461 MULT_EXPR,
3462 args[0], args[1]);
546e500c 3463 }
6d4d616a
RS
3464 else /* a0_cst_p ^ a1_cst_p. */
3465 {
3466 real_value const_part = a0_cst_p
3467 ? TREE_REAL_CST (args[0]) : TREE_REAL_CST (args[1]);
3468 if (!real_equal (&const_part, &dconst0)
3469 && !real_isinf (&const_part))
3470 new_stmt = gimple_build_assign (gimple_call_lhs (stmt),
3471 MULT_EXPR, args[0],
3472 args[1]);
3473 }
3474 }
3475 if (new_stmt)
3476 {
3477 gimple_set_vuse (new_stmt, gimple_vuse (stmt));
3478 gimple_set_vdef (new_stmt, gimple_vdef (stmt));
0ac198d3 3479 }
6d4d616a 3480 break;
0ac198d3 3481 }
03312cbd
AP
3482 case AARCH64_SIMD_BUILTIN_LANE_CHECK:
3483 if (aarch64_fold_builtin_lane_check (args[0], args[1], args[2]))
3484 {
3485 unlink_stmt_vdef (stmt);
3486 release_defs (stmt);
3487 new_stmt = gimple_build_nop ();
3488 }
3489 break;
6d4d616a
RS
3490 default:
3491 break;
0ac198d3 3492 }
e9cad1e5
AC
3493
3494 /* GIMPLE assign statements (unlike calls) require a non-null lhs. If we
3495 created an assign statement with a null lhs, then fix this by assigning
3496 to a new (and subsequently unused) variable. */
3497 if (new_stmt && is_gimple_assign (new_stmt) && !gimple_assign_lhs (new_stmt))
3498 {
3499 tree new_lhs = make_ssa_name (gimple_call_return_type (stmt));
3500 gimple_assign_set_lhs (new_stmt, new_lhs);
3501 }
3502
6d4d616a 3503 return new_stmt;
0ac198d3
JG
3504}
3505
aa87aced
KV
3506void
3507aarch64_atomic_assign_expand_fenv (tree *hold, tree *clear, tree *update)
3508{
3509 const unsigned AARCH64_FE_INVALID = 1;
3510 const unsigned AARCH64_FE_DIVBYZERO = 2;
3511 const unsigned AARCH64_FE_OVERFLOW = 4;
3512 const unsigned AARCH64_FE_UNDERFLOW = 8;
3513 const unsigned AARCH64_FE_INEXACT = 16;
3514 const unsigned HOST_WIDE_INT AARCH64_FE_ALL_EXCEPT = (AARCH64_FE_INVALID
3515 | AARCH64_FE_DIVBYZERO
3516 | AARCH64_FE_OVERFLOW
3517 | AARCH64_FE_UNDERFLOW
3518 | AARCH64_FE_INEXACT);
3519 const unsigned HOST_WIDE_INT AARCH64_FE_EXCEPT_SHIFT = 8;
3520 tree fenv_cr, fenv_sr, get_fpcr, set_fpcr, mask_cr, mask_sr;
3521 tree ld_fenv_cr, ld_fenv_sr, masked_fenv_cr, masked_fenv_sr, hold_fnclex_cr;
3522 tree hold_fnclex_sr, new_fenv_var, reload_fenv, restore_fnenv, get_fpsr, set_fpsr;
3523 tree update_call, atomic_feraiseexcept, hold_fnclex, masked_fenv, ld_fenv;
3524
3525 /* Generate the equivalence of :
3526 unsigned int fenv_cr;
3527 fenv_cr = __builtin_aarch64_get_fpcr ();
3528
3529 unsigned int fenv_sr;
3530 fenv_sr = __builtin_aarch64_get_fpsr ();
3531
3532 Now set all exceptions to non-stop
3533 unsigned int mask_cr
3534 = ~(AARCH64_FE_ALL_EXCEPT << AARCH64_FE_EXCEPT_SHIFT);
3535 unsigned int masked_cr;
3536 masked_cr = fenv_cr & mask_cr;
3537
3538 And clear all exception flags
3539 unsigned int maske_sr = ~AARCH64_FE_ALL_EXCEPT;
3540 unsigned int masked_cr;
3541 masked_sr = fenv_sr & mask_sr;
3542
3543 __builtin_aarch64_set_cr (masked_cr);
3544 __builtin_aarch64_set_sr (masked_sr); */
3545
09ba9ef7
RR
3546 fenv_cr = create_tmp_var_raw (unsigned_type_node);
3547 fenv_sr = create_tmp_var_raw (unsigned_type_node);
aa87aced
KV
3548
3549 get_fpcr = aarch64_builtin_decls[AARCH64_BUILTIN_GET_FPCR];
3550 set_fpcr = aarch64_builtin_decls[AARCH64_BUILTIN_SET_FPCR];
3551 get_fpsr = aarch64_builtin_decls[AARCH64_BUILTIN_GET_FPSR];
3552 set_fpsr = aarch64_builtin_decls[AARCH64_BUILTIN_SET_FPSR];
3553
3554 mask_cr = build_int_cst (unsigned_type_node,
3555 ~(AARCH64_FE_ALL_EXCEPT << AARCH64_FE_EXCEPT_SHIFT));
3556 mask_sr = build_int_cst (unsigned_type_node,
3557 ~(AARCH64_FE_ALL_EXCEPT));
3558
d81bc2af
HZ
3559 ld_fenv_cr = build4 (TARGET_EXPR, unsigned_type_node,
3560 fenv_cr, build_call_expr (get_fpcr, 0),
3561 NULL_TREE, NULL_TREE);
3562 ld_fenv_sr = build4 (TARGET_EXPR, unsigned_type_node,
3563 fenv_sr, build_call_expr (get_fpsr, 0),
3564 NULL_TREE, NULL_TREE);
aa87aced
KV
3565
3566 masked_fenv_cr = build2 (BIT_AND_EXPR, unsigned_type_node, fenv_cr, mask_cr);
3567 masked_fenv_sr = build2 (BIT_AND_EXPR, unsigned_type_node, fenv_sr, mask_sr);
3568
3569 hold_fnclex_cr = build_call_expr (set_fpcr, 1, masked_fenv_cr);
3570 hold_fnclex_sr = build_call_expr (set_fpsr, 1, masked_fenv_sr);
3571
3572 hold_fnclex = build2 (COMPOUND_EXPR, void_type_node, hold_fnclex_cr,
3573 hold_fnclex_sr);
3574 masked_fenv = build2 (COMPOUND_EXPR, void_type_node, masked_fenv_cr,
3575 masked_fenv_sr);
3576 ld_fenv = build2 (COMPOUND_EXPR, void_type_node, ld_fenv_cr, ld_fenv_sr);
3577
3578 *hold = build2 (COMPOUND_EXPR, void_type_node,
3579 build2 (COMPOUND_EXPR, void_type_node, masked_fenv, ld_fenv),
3580 hold_fnclex);
3581
3582 /* Store the value of masked_fenv to clear the exceptions:
3583 __builtin_aarch64_set_fpsr (masked_fenv_sr); */
3584
3585 *clear = build_call_expr (set_fpsr, 1, masked_fenv_sr);
3586
3587 /* Generate the equivalent of :
3588 unsigned int new_fenv_var;
3589 new_fenv_var = __builtin_aarch64_get_fpsr ();
3590
3591 __builtin_aarch64_set_fpsr (fenv_sr);
3592
3593 __atomic_feraiseexcept (new_fenv_var); */
3594
09ba9ef7 3595 new_fenv_var = create_tmp_var_raw (unsigned_type_node);
d81bc2af
HZ
3596 reload_fenv = build4 (TARGET_EXPR, unsigned_type_node,
3597 new_fenv_var, build_call_expr (get_fpsr, 0),
3598 NULL_TREE, NULL_TREE);
aa87aced
KV
3599 restore_fnenv = build_call_expr (set_fpsr, 1, fenv_sr);
3600 atomic_feraiseexcept = builtin_decl_implicit (BUILT_IN_ATOMIC_FERAISEEXCEPT);
3601 update_call = build_call_expr (atomic_feraiseexcept, 1,
3602 fold_convert (integer_type_node, new_fenv_var));
3603 *update = build2 (COMPOUND_EXPR, void_type_node,
3604 build2 (COMPOUND_EXPR, void_type_node,
3605 reload_fenv, restore_fnenv), update_call);
3606}
3607
ef01e6bb
DZ
3608/* Resolve overloaded MEMTAG build-in functions. */
3609#define AARCH64_BUILTIN_SUBCODE(F) \
3610 (DECL_MD_FUNCTION_CODE (F) >> AARCH64_BUILTIN_SHIFT)
3611
3612static tree
3613aarch64_resolve_overloaded_memtag (location_t loc,
3614 tree fndecl, void *pass_params)
3615{
3616 vec<tree, va_gc> *params = static_cast<vec<tree, va_gc> *> (pass_params);
3617 unsigned param_num = params ? params->length() : 0;
3618 unsigned int fcode = AARCH64_BUILTIN_SUBCODE (fndecl);
3619 tree inittype = aarch64_memtag_builtin_data[
3620 fcode - AARCH64_MEMTAG_BUILTIN_START - 1].ftype;
3621 unsigned arg_num = list_length (TYPE_ARG_TYPES (inittype)) - 1;
3622
3623 if (param_num != arg_num)
3624 {
3625 TREE_TYPE (fndecl) = inittype;
3626 return NULL_TREE;
3627 }
3628 tree retype = NULL;
3629
3630 if (fcode == AARCH64_MEMTAG_BUILTIN_SUBP)
3631 {
3632 tree t0 = TREE_TYPE ((*params)[0]);
3633 tree t1 = TREE_TYPE ((*params)[1]);
3634
3635 if (t0 == error_mark_node || TREE_CODE (t0) != POINTER_TYPE)
3636 t0 = ptr_type_node;
3637 if (t1 == error_mark_node || TREE_CODE (t1) != POINTER_TYPE)
3638 t1 = ptr_type_node;
3639
3640 if (TYPE_MODE (t0) != DImode)
3641 warning_at (loc, 1, "expected 64-bit address but argument 1 is %d-bit",
3642 (int)tree_to_shwi (DECL_SIZE ((*params)[0])));
3643
3644 if (TYPE_MODE (t1) != DImode)
3645 warning_at (loc, 1, "expected 64-bit address but argument 2 is %d-bit",
3646 (int)tree_to_shwi (DECL_SIZE ((*params)[1])));
3647
3648 retype = build_function_type_list (ptrdiff_type_node, t0, t1, NULL);
3649 }
3650 else
3651 {
3652 tree t0 = TREE_TYPE ((*params)[0]);
3653
3654 if (t0 == error_mark_node || TREE_CODE (t0) != POINTER_TYPE)
3655 {
3656 TREE_TYPE (fndecl) = inittype;
3657 return NULL_TREE;
3658 }
3659
3660 if (TYPE_MODE (t0) != DImode)
3661 warning_at (loc, 1, "expected 64-bit address but argument 1 is %d-bit",
3662 (int)tree_to_shwi (DECL_SIZE ((*params)[0])));
3663
3664 switch (fcode)
3665 {
3666 case AARCH64_MEMTAG_BUILTIN_IRG:
3667 retype = build_function_type_list (t0, t0, uint64_type_node, NULL);
3668 break;
3669 case AARCH64_MEMTAG_BUILTIN_GMI:
3670 retype = build_function_type_list (uint64_type_node, t0,
3671 uint64_type_node, NULL);
3672 break;
3673 case AARCH64_MEMTAG_BUILTIN_INC_TAG:
3674 retype = build_function_type_list (t0, t0, unsigned_type_node, NULL);
3675 break;
3676 case AARCH64_MEMTAG_BUILTIN_SET_TAG:
3677 retype = build_function_type_list (void_type_node, t0, NULL);
3678 break;
3679 case AARCH64_MEMTAG_BUILTIN_GET_TAG:
3680 retype = build_function_type_list (t0, t0, NULL);
3681 break;
3682 default:
3683 return NULL_TREE;
3684 }
3685 }
3686
3687 if (!retype || retype == error_mark_node)
3688 TREE_TYPE (fndecl) = inittype;
3689 else
3690 TREE_TYPE (fndecl) = retype;
3691
3692 return NULL_TREE;
3693}
3694
e53b6e56 3695/* Called at aarch64_resolve_overloaded_builtin in aarch64-c.cc. */
ef01e6bb
DZ
3696tree
3697aarch64_resolve_overloaded_builtin_general (location_t loc, tree function,
3698 void *pass_params)
3699{
3700 unsigned int fcode = AARCH64_BUILTIN_SUBCODE (function);
3701
3702 if (fcode >= AARCH64_MEMTAG_BUILTIN_START
3703 && fcode <= AARCH64_MEMTAG_BUILTIN_END)
3704 return aarch64_resolve_overloaded_memtag(loc, function, pass_params);
3705
3706 return NULL_TREE;
3707}
aa87aced 3708
42fc9a7f
JG
3709#undef AARCH64_CHECK_BUILTIN_MODE
3710#undef AARCH64_FIND_FRINT_VARIANT
0ddec79f
JG
3711#undef CF0
3712#undef CF1
3713#undef CF2
3714#undef CF3
3715#undef CF4
3716#undef CF10
3717#undef VAR1
3718#undef VAR2
3719#undef VAR3
3720#undef VAR4
3721#undef VAR5
3722#undef VAR6
3723#undef VAR7
3724#undef VAR8
3725#undef VAR9
3726#undef VAR10
3727#undef VAR11
3728
3c03d39d 3729#include "gt-aarch64-builtins.h"