]> git.ipfire.org Git - thirdparty/gcc.git/blame - gcc/config/aarch64/aarch64-builtins.c
gcc-changelog: Fix typo in output
[thirdparty/gcc.git] / gcc / config / aarch64 / aarch64-builtins.c
CommitLineData
43e9d192 1/* Builtins' description for AArch64 SIMD architecture.
8d9254fc 2 Copyright (C) 2011-2020 Free Software Foundation, Inc.
43e9d192
IB
3 Contributed by ARM Ltd.
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify it
8 under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
10 any later version.
11
12 GCC is distributed in the hope that it will be useful, but
13 WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
15 General Public License for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
20
8fcc61f8
RS
21#define IN_TARGET_CODE 1
22
43e9d192
IB
23#include "config.h"
24#include "system.h"
25#include "coretypes.h"
26#include "tm.h"
c7131fb2 27#include "function.h"
c7131fb2 28#include "basic-block.h"
e11c4407 29#include "rtl.h"
c7131fb2
AM
30#include "tree.h"
31#include "gimple.h"
4d0cdd0c 32#include "memmodel.h"
e11c4407
AM
33#include "tm_p.h"
34#include "expmed.h"
35#include "optabs.h"
36#include "recog.h"
37#include "diagnostic-core.h"
40e23961 38#include "fold-const.h"
d8a2d370 39#include "stor-layout.h"
36566b39 40#include "explow.h"
43e9d192 41#include "expr.h"
43e9d192 42#include "langhooks.h"
5be5c238 43#include "gimple-iterator.h"
10766209 44#include "case-cfn-macros.h"
9d63f43b 45#include "emit-rtl.h"
31427b97
RS
46#include "stringpool.h"
47#include "attribs.h"
43e9d192 48
0d4a1197
RS
49#define v8qi_UP E_V8QImode
50#define v4hi_UP E_V4HImode
51#define v4hf_UP E_V4HFmode
52#define v2si_UP E_V2SImode
53#define v2sf_UP E_V2SFmode
54#define v1df_UP E_V1DFmode
55#define di_UP E_DImode
56#define df_UP E_DFmode
57#define v16qi_UP E_V16QImode
58#define v8hi_UP E_V8HImode
59#define v8hf_UP E_V8HFmode
60#define v4si_UP E_V4SImode
61#define v4sf_UP E_V4SFmode
62#define v2di_UP E_V2DImode
63#define v2df_UP E_V2DFmode
64#define ti_UP E_TImode
65#define oi_UP E_OImode
66#define ci_UP E_CImode
67#define xi_UP E_XImode
68#define si_UP E_SImode
69#define sf_UP E_SFmode
70#define hi_UP E_HImode
71#define hf_UP E_HFmode
72#define qi_UP E_QImode
abbe1ed2
SMW
73#define bf_UP E_BFmode
74#define v4bf_UP E_V4BFmode
75#define v8bf_UP E_V8BFmode
43e9d192
IB
76#define UP(X) X##_UP
77
b5828b4b
JG
78#define SIMD_MAX_BUILTIN_ARGS 5
79
80enum aarch64_type_qualifiers
43e9d192 81{
b5828b4b
JG
82 /* T foo. */
83 qualifier_none = 0x0,
84 /* unsigned T foo. */
85 qualifier_unsigned = 0x1, /* 1 << 0 */
86 /* const T foo. */
87 qualifier_const = 0x2, /* 1 << 1 */
88 /* T *foo. */
89 qualifier_pointer = 0x4, /* 1 << 2 */
b5828b4b
JG
90 /* Used when expanding arguments if an operand could
91 be an immediate. */
92 qualifier_immediate = 0x8, /* 1 << 3 */
93 qualifier_maybe_immediate = 0x10, /* 1 << 4 */
94 /* void foo (...). */
95 qualifier_void = 0x20, /* 1 << 5 */
96 /* Some patterns may have internal operands, this qualifier is an
97 instruction to the initialisation code to skip this operand. */
98 qualifier_internal = 0x40, /* 1 << 6 */
99 /* Some builtins should use the T_*mode* encoded in a simd_builtin_datum
100 rather than using the type of the operand. */
101 qualifier_map_mode = 0x80, /* 1 << 7 */
102 /* qualifier_pointer | qualifier_map_mode */
103 qualifier_pointer_map_mode = 0x84,
e625e715 104 /* qualifier_const | qualifier_pointer | qualifier_map_mode */
6db1ec94
JG
105 qualifier_const_pointer_map_mode = 0x86,
106 /* Polynomial types. */
2a49c16d
AL
107 qualifier_poly = 0x100,
108 /* Lane indices - must be in range, and flipped for bigendian. */
4d0a0237
CB
109 qualifier_lane_index = 0x200,
110 /* Lane indices for single lane structure loads and stores. */
9d63f43b
TC
111 qualifier_struct_load_store_lane_index = 0x400,
112 /* Lane indices selected in pairs. - must be in range, and flipped for
113 bigendian. */
114 qualifier_lane_pair_index = 0x800,
8c197c85
SMW
115 /* Lane indices selected in quadtuplets. - must be in range, and flipped for
116 bigendian. */
117 qualifier_lane_quadtup_index = 0x1000,
b5828b4b 118};
43e9d192
IB
119
120typedef struct
121{
122 const char *name;
ef4bddc2 123 machine_mode mode;
342be7f7
JG
124 const enum insn_code code;
125 unsigned int fcode;
b5828b4b 126 enum aarch64_type_qualifiers *qualifiers;
43e9d192
IB
127} aarch64_simd_builtin_datum;
128
b5828b4b
JG
129static enum aarch64_type_qualifiers
130aarch64_types_unop_qualifiers[SIMD_MAX_BUILTIN_ARGS]
8f905d69 131 = { qualifier_none, qualifier_none };
b5828b4b 132#define TYPES_UNOP (aarch64_types_unop_qualifiers)
5a7a4e80
TB
133static enum aarch64_type_qualifiers
134aarch64_types_unopu_qualifiers[SIMD_MAX_BUILTIN_ARGS]
135 = { qualifier_unsigned, qualifier_unsigned };
136#define TYPES_UNOPU (aarch64_types_unopu_qualifiers)
b5828b4b 137static enum aarch64_type_qualifiers
a579f4c7
JW
138aarch64_types_unopus_qualifiers[SIMD_MAX_BUILTIN_ARGS]
139 = { qualifier_unsigned, qualifier_none };
140#define TYPES_UNOPUS (aarch64_types_unopus_qualifiers)
141static enum aarch64_type_qualifiers
b5828b4b
JG
142aarch64_types_binop_qualifiers[SIMD_MAX_BUILTIN_ARGS]
143 = { qualifier_none, qualifier_none, qualifier_maybe_immediate };
144#define TYPES_BINOP (aarch64_types_binop_qualifiers)
145static enum aarch64_type_qualifiers
5a7a4e80
TB
146aarch64_types_binopu_qualifiers[SIMD_MAX_BUILTIN_ARGS]
147 = { qualifier_unsigned, qualifier_unsigned, qualifier_unsigned };
148#define TYPES_BINOPU (aarch64_types_binopu_qualifiers)
7baa225d 149static enum aarch64_type_qualifiers
de10bcce
AL
150aarch64_types_binop_uus_qualifiers[SIMD_MAX_BUILTIN_ARGS]
151 = { qualifier_unsigned, qualifier_unsigned, qualifier_none };
152#define TYPES_BINOP_UUS (aarch64_types_binop_uus_qualifiers)
153static enum aarch64_type_qualifiers
918621d3
AL
154aarch64_types_binop_ssu_qualifiers[SIMD_MAX_BUILTIN_ARGS]
155 = { qualifier_none, qualifier_none, qualifier_unsigned };
156#define TYPES_BINOP_SSU (aarch64_types_binop_ssu_qualifiers)
157static enum aarch64_type_qualifiers
daef0a8c
JW
158aarch64_types_binop_uss_qualifiers[SIMD_MAX_BUILTIN_ARGS]
159 = { qualifier_unsigned, qualifier_none, qualifier_none };
160#define TYPES_BINOP_USS (aarch64_types_binop_uss_qualifiers)
161static enum aarch64_type_qualifiers
7baa225d
TB
162aarch64_types_binopp_qualifiers[SIMD_MAX_BUILTIN_ARGS]
163 = { qualifier_poly, qualifier_poly, qualifier_poly };
164#define TYPES_BINOPP (aarch64_types_binopp_qualifiers)
165
5a7a4e80 166static enum aarch64_type_qualifiers
b5828b4b
JG
167aarch64_types_ternop_qualifiers[SIMD_MAX_BUILTIN_ARGS]
168 = { qualifier_none, qualifier_none, qualifier_none, qualifier_none };
169#define TYPES_TERNOP (aarch64_types_ternop_qualifiers)
30442682 170static enum aarch64_type_qualifiers
2a49c16d
AL
171aarch64_types_ternop_lane_qualifiers[SIMD_MAX_BUILTIN_ARGS]
172 = { qualifier_none, qualifier_none, qualifier_none, qualifier_lane_index };
173#define TYPES_TERNOP_LANE (aarch64_types_ternop_lane_qualifiers)
174static enum aarch64_type_qualifiers
30442682
TB
175aarch64_types_ternopu_qualifiers[SIMD_MAX_BUILTIN_ARGS]
176 = { qualifier_unsigned, qualifier_unsigned,
177 qualifier_unsigned, qualifier_unsigned };
178#define TYPES_TERNOPU (aarch64_types_ternopu_qualifiers)
27086ea3 179static enum aarch64_type_qualifiers
0b839322
WD
180aarch64_types_ternopu_lane_qualifiers[SIMD_MAX_BUILTIN_ARGS]
181 = { qualifier_unsigned, qualifier_unsigned,
182 qualifier_unsigned, qualifier_lane_index };
183#define TYPES_TERNOPU_LANE (aarch64_types_ternopu_lane_qualifiers)
184static enum aarch64_type_qualifiers
27086ea3
MC
185aarch64_types_ternopu_imm_qualifiers[SIMD_MAX_BUILTIN_ARGS]
186 = { qualifier_unsigned, qualifier_unsigned,
187 qualifier_unsigned, qualifier_immediate };
188#define TYPES_TERNOPUI (aarch64_types_ternopu_imm_qualifiers)
8c197c85
SMW
189static enum aarch64_type_qualifiers
190aarch64_types_ternop_ssus_qualifiers[SIMD_MAX_BUILTIN_ARGS]
191 = { qualifier_none, qualifier_none, qualifier_unsigned, qualifier_none };
192#define TYPES_TERNOP_SSUS (aarch64_types_ternop_ssus_qualifiers)
27086ea3 193
30442682 194
9d63f43b
TC
195static enum aarch64_type_qualifiers
196aarch64_types_quadop_lane_pair_qualifiers[SIMD_MAX_BUILTIN_ARGS]
197 = { qualifier_none, qualifier_none, qualifier_none,
198 qualifier_none, qualifier_lane_pair_index };
199#define TYPES_QUADOP_LANE_PAIR (aarch64_types_quadop_lane_pair_qualifiers)
b5828b4b 200static enum aarch64_type_qualifiers
2a49c16d 201aarch64_types_quadop_lane_qualifiers[SIMD_MAX_BUILTIN_ARGS]
b5828b4b 202 = { qualifier_none, qualifier_none, qualifier_none,
2a49c16d
AL
203 qualifier_none, qualifier_lane_index };
204#define TYPES_QUADOP_LANE (aarch64_types_quadop_lane_qualifiers)
7a08d813
TC
205static enum aarch64_type_qualifiers
206aarch64_types_quadopu_lane_qualifiers[SIMD_MAX_BUILTIN_ARGS]
207 = { qualifier_unsigned, qualifier_unsigned, qualifier_unsigned,
208 qualifier_unsigned, qualifier_lane_index };
209#define TYPES_QUADOPU_LANE (aarch64_types_quadopu_lane_qualifiers)
b5828b4b 210
8c197c85
SMW
211static enum aarch64_type_qualifiers
212aarch64_types_quadopssus_lane_quadtup_qualifiers[SIMD_MAX_BUILTIN_ARGS]
213 = { qualifier_none, qualifier_none, qualifier_unsigned,
214 qualifier_none, qualifier_lane_quadtup_index };
215#define TYPES_QUADOPSSUS_LANE_QUADTUP \
216 (aarch64_types_quadopssus_lane_quadtup_qualifiers)
217static enum aarch64_type_qualifiers
218aarch64_types_quadopsssu_lane_quadtup_qualifiers[SIMD_MAX_BUILTIN_ARGS]
219 = { qualifier_none, qualifier_none, qualifier_none,
220 qualifier_unsigned, qualifier_lane_quadtup_index };
221#define TYPES_QUADOPSSSU_LANE_QUADTUP \
222 (aarch64_types_quadopsssu_lane_quadtup_qualifiers)
223
27086ea3
MC
224static enum aarch64_type_qualifiers
225aarch64_types_quadopu_imm_qualifiers[SIMD_MAX_BUILTIN_ARGS]
226 = { qualifier_unsigned, qualifier_unsigned, qualifier_unsigned,
227 qualifier_unsigned, qualifier_immediate };
228#define TYPES_QUADOPUI (aarch64_types_quadopu_imm_qualifiers)
229
159b8724
TC
230static enum aarch64_type_qualifiers
231aarch64_types_binop_imm_p_qualifiers[SIMD_MAX_BUILTIN_ARGS]
232 = { qualifier_poly, qualifier_none, qualifier_immediate };
233#define TYPES_GETREGP (aarch64_types_binop_imm_p_qualifiers)
b5828b4b 234static enum aarch64_type_qualifiers
2a49c16d 235aarch64_types_binop_imm_qualifiers[SIMD_MAX_BUILTIN_ARGS]
b5828b4b 236 = { qualifier_none, qualifier_none, qualifier_immediate };
2a49c16d
AL
237#define TYPES_GETREG (aarch64_types_binop_imm_qualifiers)
238#define TYPES_SHIFTIMM (aarch64_types_binop_imm_qualifiers)
b5828b4b 239static enum aarch64_type_qualifiers
de10bcce
AL
240aarch64_types_shift_to_unsigned_qualifiers[SIMD_MAX_BUILTIN_ARGS]
241 = { qualifier_unsigned, qualifier_none, qualifier_immediate };
242#define TYPES_SHIFTIMM_USS (aarch64_types_shift_to_unsigned_qualifiers)
243static enum aarch64_type_qualifiers
1f0e9e34
JG
244aarch64_types_fcvt_from_unsigned_qualifiers[SIMD_MAX_BUILTIN_ARGS]
245 = { qualifier_none, qualifier_unsigned, qualifier_immediate };
246#define TYPES_FCVTIMM_SUS (aarch64_types_fcvt_from_unsigned_qualifiers)
247static enum aarch64_type_qualifiers
252c7556
AV
248aarch64_types_unsigned_shift_qualifiers[SIMD_MAX_BUILTIN_ARGS]
249 = { qualifier_unsigned, qualifier_unsigned, qualifier_immediate };
250#define TYPES_USHIFTIMM (aarch64_types_unsigned_shift_qualifiers)
de10bcce 251
252c7556 252static enum aarch64_type_qualifiers
159b8724
TC
253aarch64_types_ternop_s_imm_p_qualifiers[SIMD_MAX_BUILTIN_ARGS]
254 = { qualifier_none, qualifier_none, qualifier_poly, qualifier_immediate};
255#define TYPES_SETREGP (aarch64_types_ternop_s_imm_p_qualifiers)
256static enum aarch64_type_qualifiers
257aarch64_types_ternop_s_imm_qualifiers[SIMD_MAX_BUILTIN_ARGS]
258 = { qualifier_none, qualifier_none, qualifier_none, qualifier_immediate};
259#define TYPES_SETREG (aarch64_types_ternop_s_imm_qualifiers)
260#define TYPES_SHIFTINSERT (aarch64_types_ternop_s_imm_qualifiers)
261#define TYPES_SHIFTACC (aarch64_types_ternop_s_imm_qualifiers)
262
263static enum aarch64_type_qualifiers
264aarch64_types_ternop_p_imm_qualifiers[SIMD_MAX_BUILTIN_ARGS]
265 = { qualifier_poly, qualifier_poly, qualifier_poly, qualifier_immediate};
266#define TYPES_SHIFTINSERTP (aarch64_types_ternop_p_imm_qualifiers)
b5828b4b 267
de10bcce
AL
268static enum aarch64_type_qualifiers
269aarch64_types_unsigned_shiftacc_qualifiers[SIMD_MAX_BUILTIN_ARGS]
270 = { qualifier_unsigned, qualifier_unsigned, qualifier_unsigned,
271 qualifier_immediate };
272#define TYPES_USHIFTACC (aarch64_types_unsigned_shiftacc_qualifiers)
273
274
b5828b4b
JG
275static enum aarch64_type_qualifiers
276aarch64_types_combine_qualifiers[SIMD_MAX_BUILTIN_ARGS]
277 = { qualifier_none, qualifier_none, qualifier_none };
278#define TYPES_COMBINE (aarch64_types_combine_qualifiers)
279
159b8724
TC
280static enum aarch64_type_qualifiers
281aarch64_types_combine_p_qualifiers[SIMD_MAX_BUILTIN_ARGS]
282 = { qualifier_poly, qualifier_poly, qualifier_poly };
283#define TYPES_COMBINEP (aarch64_types_combine_p_qualifiers)
284
b5828b4b
JG
285static enum aarch64_type_qualifiers
286aarch64_types_load1_qualifiers[SIMD_MAX_BUILTIN_ARGS]
287 = { qualifier_none, qualifier_const_pointer_map_mode };
288#define TYPES_LOAD1 (aarch64_types_load1_qualifiers)
289#define TYPES_LOADSTRUCT (aarch64_types_load1_qualifiers)
3ec1be97
CB
290static enum aarch64_type_qualifiers
291aarch64_types_loadstruct_lane_qualifiers[SIMD_MAX_BUILTIN_ARGS]
292 = { qualifier_none, qualifier_const_pointer_map_mode,
4d0a0237 293 qualifier_none, qualifier_struct_load_store_lane_index };
3ec1be97 294#define TYPES_LOADSTRUCT_LANE (aarch64_types_loadstruct_lane_qualifiers)
b5828b4b 295
46e778c4
JG
296static enum aarch64_type_qualifiers
297aarch64_types_bsl_p_qualifiers[SIMD_MAX_BUILTIN_ARGS]
298 = { qualifier_poly, qualifier_unsigned,
299 qualifier_poly, qualifier_poly };
300#define TYPES_BSL_P (aarch64_types_bsl_p_qualifiers)
301static enum aarch64_type_qualifiers
302aarch64_types_bsl_s_qualifiers[SIMD_MAX_BUILTIN_ARGS]
303 = { qualifier_none, qualifier_unsigned,
304 qualifier_none, qualifier_none };
305#define TYPES_BSL_S (aarch64_types_bsl_s_qualifiers)
306static enum aarch64_type_qualifiers
307aarch64_types_bsl_u_qualifiers[SIMD_MAX_BUILTIN_ARGS]
308 = { qualifier_unsigned, qualifier_unsigned,
309 qualifier_unsigned, qualifier_unsigned };
310#define TYPES_BSL_U (aarch64_types_bsl_u_qualifiers)
311
b5828b4b
JG
312/* The first argument (return type) of a store should be void type,
313 which we represent with qualifier_void. Their first operand will be
314 a DImode pointer to the location to store to, so we must use
315 qualifier_map_mode | qualifier_pointer to build a pointer to the
316 element type of the vector. */
317static enum aarch64_type_qualifiers
159b8724
TC
318aarch64_types_store1_p_qualifiers[SIMD_MAX_BUILTIN_ARGS]
319 = { qualifier_void, qualifier_pointer_map_mode, qualifier_poly };
320#define TYPES_STORE1P (aarch64_types_store1_p_qualifiers)
321static enum aarch64_type_qualifiers
b5828b4b
JG
322aarch64_types_store1_qualifiers[SIMD_MAX_BUILTIN_ARGS]
323 = { qualifier_void, qualifier_pointer_map_mode, qualifier_none };
324#define TYPES_STORE1 (aarch64_types_store1_qualifiers)
325#define TYPES_STORESTRUCT (aarch64_types_store1_qualifiers)
ba081b77
JG
326static enum aarch64_type_qualifiers
327aarch64_types_storestruct_lane_qualifiers[SIMD_MAX_BUILTIN_ARGS]
328 = { qualifier_void, qualifier_pointer_map_mode,
4d0a0237 329 qualifier_none, qualifier_struct_load_store_lane_index };
ba081b77 330#define TYPES_STORESTRUCT_LANE (aarch64_types_storestruct_lane_qualifiers)
b5828b4b 331
0ddec79f
JG
332#define CF0(N, X) CODE_FOR_aarch64_##N##X
333#define CF1(N, X) CODE_FOR_##N##X##1
334#define CF2(N, X) CODE_FOR_##N##X##2
335#define CF3(N, X) CODE_FOR_##N##X##3
336#define CF4(N, X) CODE_FOR_##N##X##4
337#define CF10(N, X) CODE_FOR_##N##X
338
339#define VAR1(T, N, MAP, A) \
bc5e395d 340 {#N #A, UP (A), CF##MAP (N, A), 0, TYPES_##T},
0ddec79f
JG
341#define VAR2(T, N, MAP, A, B) \
342 VAR1 (T, N, MAP, A) \
343 VAR1 (T, N, MAP, B)
344#define VAR3(T, N, MAP, A, B, C) \
345 VAR2 (T, N, MAP, A, B) \
346 VAR1 (T, N, MAP, C)
347#define VAR4(T, N, MAP, A, B, C, D) \
348 VAR3 (T, N, MAP, A, B, C) \
349 VAR1 (T, N, MAP, D)
350#define VAR5(T, N, MAP, A, B, C, D, E) \
351 VAR4 (T, N, MAP, A, B, C, D) \
352 VAR1 (T, N, MAP, E)
353#define VAR6(T, N, MAP, A, B, C, D, E, F) \
354 VAR5 (T, N, MAP, A, B, C, D, E) \
355 VAR1 (T, N, MAP, F)
356#define VAR7(T, N, MAP, A, B, C, D, E, F, G) \
357 VAR6 (T, N, MAP, A, B, C, D, E, F) \
358 VAR1 (T, N, MAP, G)
359#define VAR8(T, N, MAP, A, B, C, D, E, F, G, H) \
360 VAR7 (T, N, MAP, A, B, C, D, E, F, G) \
361 VAR1 (T, N, MAP, H)
362#define VAR9(T, N, MAP, A, B, C, D, E, F, G, H, I) \
363 VAR8 (T, N, MAP, A, B, C, D, E, F, G, H) \
364 VAR1 (T, N, MAP, I)
365#define VAR10(T, N, MAP, A, B, C, D, E, F, G, H, I, J) \
366 VAR9 (T, N, MAP, A, B, C, D, E, F, G, H, I) \
367 VAR1 (T, N, MAP, J)
368#define VAR11(T, N, MAP, A, B, C, D, E, F, G, H, I, J, K) \
369 VAR10 (T, N, MAP, A, B, C, D, E, F, G, H, I, J) \
370 VAR1 (T, N, MAP, K)
371#define VAR12(T, N, MAP, A, B, C, D, E, F, G, H, I, J, K, L) \
372 VAR11 (T, N, MAP, A, B, C, D, E, F, G, H, I, J, K) \
373 VAR1 (T, N, MAP, L)
7c369485
AL
374#define VAR13(T, N, MAP, A, B, C, D, E, F, G, H, I, J, K, L, M) \
375 VAR12 (T, N, MAP, A, B, C, D, E, F, G, H, I, J, K, L) \
376 VAR1 (T, N, MAP, M)
377#define VAR14(T, X, MAP, A, B, C, D, E, F, G, H, I, J, K, L, M, N) \
378 VAR13 (T, X, MAP, A, B, C, D, E, F, G, H, I, J, K, L, M) \
379 VAR1 (T, X, MAP, N)
e603cd43
MI
380#define VAR15(T, X, MAP, A, B, C, D, E, F, G, H, I, J, K, L, M, N, O) \
381 VAR14 (T, X, MAP, A, B, C, D, E, F, G, H, I, J, K, L, M, N) \
382 VAR1 (T, X, MAP, O)
383#define VAR16(T, X, MAP, A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P) \
384 VAR15 (T, X, MAP, A, B, C, D, E, F, G, H, I, J, K, L, M, N, O) \
385 VAR1 (T, X, MAP, P)
342be7f7 386
f421c516 387#include "aarch64-builtin-iterators.h"
43e9d192
IB
388
389static aarch64_simd_builtin_datum aarch64_simd_builtin_data[] = {
342be7f7
JG
390#include "aarch64-simd-builtins.def"
391};
392
5d357f26
KT
393/* There's only 8 CRC32 builtins. Probably not worth their own .def file. */
394#define AARCH64_CRC32_BUILTINS \
395 CRC32_BUILTIN (crc32b, QI) \
396 CRC32_BUILTIN (crc32h, HI) \
397 CRC32_BUILTIN (crc32w, SI) \
398 CRC32_BUILTIN (crc32x, DI) \
399 CRC32_BUILTIN (crc32cb, QI) \
400 CRC32_BUILTIN (crc32ch, HI) \
401 CRC32_BUILTIN (crc32cw, SI) \
402 CRC32_BUILTIN (crc32cx, DI)
403
9d63f43b
TC
404/* The next 8 FCMLA instrinsics require some special handling compared the
405 normal simd intrinsics. */
406#define AARCH64_SIMD_FCMLA_LANEQ_BUILTINS \
407 FCMLA_LANEQ_BUILTIN (0, v2sf, fcmla, V2SF, false) \
408 FCMLA_LANEQ_BUILTIN (90, v2sf, fcmla, V2SF, false) \
409 FCMLA_LANEQ_BUILTIN (180, v2sf, fcmla, V2SF, false) \
410 FCMLA_LANEQ_BUILTIN (270, v2sf, fcmla, V2SF, false) \
411 FCMLA_LANEQ_BUILTIN (0, v4hf, fcmla_laneq, V4HF, true) \
412 FCMLA_LANEQ_BUILTIN (90, v4hf, fcmla_laneq, V4HF, true) \
413 FCMLA_LANEQ_BUILTIN (180, v4hf, fcmla_laneq, V4HF, true) \
414 FCMLA_LANEQ_BUILTIN (270, v4hf, fcmla_laneq, V4HF, true) \
415
5d357f26
KT
416typedef struct
417{
418 const char *name;
ef4bddc2 419 machine_mode mode;
5d357f26
KT
420 const enum insn_code icode;
421 unsigned int fcode;
422} aarch64_crc_builtin_datum;
423
9d63f43b
TC
424/* Hold information about how to expand the FCMLA_LANEQ builtins. */
425typedef struct
426{
427 const char *name;
428 machine_mode mode;
429 const enum insn_code icode;
430 unsigned int fcode;
431 bool lane;
432} aarch64_fcmla_laneq_builtin_datum;
433
5d357f26
KT
434#define CRC32_BUILTIN(N, M) \
435 AARCH64_BUILTIN_##N,
436
9d63f43b
TC
437#define FCMLA_LANEQ_BUILTIN(I, N, X, M, T) \
438 AARCH64_SIMD_BUILTIN_FCMLA_LANEQ##I##_##M,
439
342be7f7 440#undef VAR1
0ddec79f 441#define VAR1(T, N, MAP, A) \
e993fea1 442 AARCH64_SIMD_BUILTIN_##T##_##N##A,
342be7f7
JG
443
444enum aarch64_builtins
445{
446 AARCH64_BUILTIN_MIN,
aa87aced
KV
447
448 AARCH64_BUILTIN_GET_FPCR,
449 AARCH64_BUILTIN_SET_FPCR,
450 AARCH64_BUILTIN_GET_FPSR,
451 AARCH64_BUILTIN_SET_FPSR,
452
0d7e5fa6
AC
453 AARCH64_BUILTIN_GET_FPCR64,
454 AARCH64_BUILTIN_SET_FPCR64,
455 AARCH64_BUILTIN_GET_FPSR64,
456 AARCH64_BUILTIN_SET_FPSR64,
457
a6fc00da
BH
458 AARCH64_BUILTIN_RSQRT_DF,
459 AARCH64_BUILTIN_RSQRT_SF,
460 AARCH64_BUILTIN_RSQRT_V2DF,
461 AARCH64_BUILTIN_RSQRT_V2SF,
462 AARCH64_BUILTIN_RSQRT_V4SF,
342be7f7 463 AARCH64_SIMD_BUILTIN_BASE,
661fce82 464 AARCH64_SIMD_BUILTIN_LANE_CHECK,
342be7f7 465#include "aarch64-simd-builtins.def"
661fce82
AL
466 /* The first enum element which is based on an insn_data pattern. */
467 AARCH64_SIMD_PATTERN_START = AARCH64_SIMD_BUILTIN_LANE_CHECK + 1,
468 AARCH64_SIMD_BUILTIN_MAX = AARCH64_SIMD_PATTERN_START
469 + ARRAY_SIZE (aarch64_simd_builtin_data) - 1,
5d357f26
KT
470 AARCH64_CRC32_BUILTIN_BASE,
471 AARCH64_CRC32_BUILTINS
472 AARCH64_CRC32_BUILTIN_MAX,
312492bd
JW
473 /* ARMv8.3-A Pointer Authentication Builtins. */
474 AARCH64_PAUTH_BUILTIN_AUTIA1716,
475 AARCH64_PAUTH_BUILTIN_PACIA1716,
8fc16d72
ST
476 AARCH64_PAUTH_BUILTIN_AUTIB1716,
477 AARCH64_PAUTH_BUILTIN_PACIB1716,
312492bd 478 AARCH64_PAUTH_BUILTIN_XPACLRI,
9d63f43b
TC
479 /* Special cased Armv8.3-A Complex FMA by Lane quad Builtins. */
480 AARCH64_SIMD_FCMLA_LANEQ_BUILTIN_BASE,
481 AARCH64_SIMD_FCMLA_LANEQ_BUILTINS
e1d5d19e
KT
482 /* Builtin for Arm8.3-a Javascript conversion instruction. */
483 AARCH64_JSCVT,
89626179
SD
484 /* TME builtins. */
485 AARCH64_TME_BUILTIN_TSTART,
486 AARCH64_TME_BUILTIN_TCOMMIT,
487 AARCH64_TME_BUILTIN_TTEST,
488 AARCH64_TME_BUILTIN_TCANCEL,
c5dc215d
KT
489 /* Armv8.5-a RNG instruction builtins. */
490 AARCH64_BUILTIN_RNG_RNDR,
491 AARCH64_BUILTIN_RNG_RNDRRS,
ef01e6bb
DZ
492 /* MEMTAG builtins. */
493 AARCH64_MEMTAG_BUILTIN_START,
494 AARCH64_MEMTAG_BUILTIN_IRG,
495 AARCH64_MEMTAG_BUILTIN_GMI,
496 AARCH64_MEMTAG_BUILTIN_SUBP,
497 AARCH64_MEMTAG_BUILTIN_INC_TAG,
498 AARCH64_MEMTAG_BUILTIN_SET_TAG,
499 AARCH64_MEMTAG_BUILTIN_GET_TAG,
500 AARCH64_MEMTAG_BUILTIN_END,
342be7f7 501 AARCH64_BUILTIN_MAX
43e9d192
IB
502};
503
5d357f26
KT
504#undef CRC32_BUILTIN
505#define CRC32_BUILTIN(N, M) \
0d4a1197 506 {"__builtin_aarch64_"#N, E_##M##mode, CODE_FOR_aarch64_##N, AARCH64_BUILTIN_##N},
5d357f26
KT
507
508static aarch64_crc_builtin_datum aarch64_crc_builtin_data[] = {
509 AARCH64_CRC32_BUILTINS
510};
511
9d63f43b
TC
512
513#undef FCMLA_LANEQ_BUILTIN
514#define FCMLA_LANEQ_BUILTIN(I, N, X, M, T) \
515 {"__builtin_aarch64_fcmla_laneq"#I#N, E_##M##mode, CODE_FOR_aarch64_##X##I##N, \
516 AARCH64_SIMD_BUILTIN_FCMLA_LANEQ##I##_##M, T},
517
518/* This structure contains how to manage the mapping form the builtin to the
519 instruction to generate in the backend and how to invoke the instruction. */
5eb9ac1e 520static aarch64_fcmla_laneq_builtin_datum aarch64_fcmla_lane_builtin_data[] = {
9d63f43b
TC
521 AARCH64_SIMD_FCMLA_LANEQ_BUILTINS
522};
523
5d357f26
KT
524#undef CRC32_BUILTIN
525
119103ca
JG
526static GTY(()) tree aarch64_builtin_decls[AARCH64_BUILTIN_MAX];
527
43e9d192
IB
528#define NUM_DREG_TYPES 6
529#define NUM_QREG_TYPES 6
530
f9d53c27
TB
531/* Internal scalar builtin types. These types are used to support
532 neon intrinsic builtins. They are _not_ user-visible types. Therefore
533 the mangling for these types are implementation defined. */
534const char *aarch64_scalar_builtin_types[] = {
535 "__builtin_aarch64_simd_qi",
536 "__builtin_aarch64_simd_hi",
537 "__builtin_aarch64_simd_si",
7c369485 538 "__builtin_aarch64_simd_hf",
f9d53c27
TB
539 "__builtin_aarch64_simd_sf",
540 "__builtin_aarch64_simd_di",
541 "__builtin_aarch64_simd_df",
542 "__builtin_aarch64_simd_poly8",
543 "__builtin_aarch64_simd_poly16",
544 "__builtin_aarch64_simd_poly64",
545 "__builtin_aarch64_simd_poly128",
546 "__builtin_aarch64_simd_ti",
547 "__builtin_aarch64_simd_uqi",
548 "__builtin_aarch64_simd_uhi",
549 "__builtin_aarch64_simd_usi",
550 "__builtin_aarch64_simd_udi",
551 "__builtin_aarch64_simd_ei",
552 "__builtin_aarch64_simd_oi",
553 "__builtin_aarch64_simd_ci",
554 "__builtin_aarch64_simd_xi",
e603cd43 555 "__builtin_aarch64_simd_bf",
f9d53c27
TB
556 NULL
557};
b5828b4b 558
f9d53c27
TB
559#define ENTRY(E, M, Q, G) E,
560enum aarch64_simd_type
561{
562#include "aarch64-simd-builtin-types.def"
563 ARM_NEON_H_TYPES_LAST
564};
565#undef ENTRY
b5828b4b 566
f9d53c27 567struct aarch64_simd_type_info
b5828b4b 568{
f9d53c27
TB
569 enum aarch64_simd_type type;
570
571 /* Internal type name. */
572 const char *name;
573
574 /* Internal type name(mangled). The mangled names conform to the
575 AAPCS64 (see "Procedure Call Standard for the ARM 64-bit Architecture",
576 Appendix A). To qualify for emission with the mangled names defined in
577 that document, a vector type must not only be of the correct mode but also
578 be of the correct internal AdvSIMD vector type (e.g. __Int8x8_t); these
579 types are registered by aarch64_init_simd_builtin_types (). In other
580 words, vector types defined in other ways e.g. via vector_size attribute
581 will get default mangled names. */
582 const char *mangle;
583
584 /* Internal type. */
585 tree itype;
586
587 /* Element type. */
b5828b4b
JG
588 tree eltype;
589
f9d53c27
TB
590 /* Machine mode the internal type maps to. */
591 enum machine_mode mode;
b5828b4b 592
f9d53c27
TB
593 /* Qualifiers. */
594 enum aarch64_type_qualifiers q;
595};
596
597#define ENTRY(E, M, Q, G) \
0d4a1197 598 {E, "__" #E, #G "__" #E, NULL_TREE, NULL_TREE, E_##M##mode, qualifier_##Q},
f9d53c27
TB
599static struct aarch64_simd_type_info aarch64_simd_types [] = {
600#include "aarch64-simd-builtin-types.def"
601};
602#undef ENTRY
603
604static tree aarch64_simd_intOI_type_node = NULL_TREE;
f9d53c27
TB
605static tree aarch64_simd_intCI_type_node = NULL_TREE;
606static tree aarch64_simd_intXI_type_node = NULL_TREE;
607
1b62ed4f
JG
608/* The user-visible __fp16 type, and a pointer to that type. Used
609 across the back-end. */
610tree aarch64_fp16_type_node = NULL_TREE;
611tree aarch64_fp16_ptr_type_node = NULL_TREE;
612
abbe1ed2
SMW
613/* Back-end node type for brain float (bfloat) types. */
614tree aarch64_bf16_type_node = NULL_TREE;
615tree aarch64_bf16_ptr_type_node = NULL_TREE;
616
6d4d616a
RS
617/* Wrapper around add_builtin_function. NAME is the name of the built-in
618 function, TYPE is the function type, and CODE is the function subcode
619 (relative to AARCH64_BUILTIN_GENERAL). */
620static tree
621aarch64_general_add_builtin (const char *name, tree type, unsigned int code)
622{
623 code = (code << AARCH64_BUILTIN_SHIFT) | AARCH64_BUILTIN_GENERAL;
624 return add_builtin_function (name, type, code, BUILT_IN_MD,
625 NULL, NULL_TREE);
626}
627
f9d53c27
TB
628static const char *
629aarch64_mangle_builtin_scalar_type (const_tree type)
630{
631 int i = 0;
632
633 while (aarch64_scalar_builtin_types[i] != NULL)
b5828b4b 634 {
f9d53c27
TB
635 const char *name = aarch64_scalar_builtin_types[i];
636
637 if (TREE_CODE (TYPE_NAME (type)) == TYPE_DECL
638 && DECL_NAME (TYPE_NAME (type))
639 && !strcmp (IDENTIFIER_POINTER (DECL_NAME (TYPE_NAME (type))), name))
640 return aarch64_scalar_builtin_types[i];
641 i++;
642 }
643 return NULL;
b5828b4b
JG
644}
645
f9d53c27
TB
646static const char *
647aarch64_mangle_builtin_vector_type (const_tree type)
b5828b4b 648{
31427b97
RS
649 tree attrs = TYPE_ATTRIBUTES (type);
650 if (tree attr = lookup_attribute ("Advanced SIMD type", attrs))
651 {
652 tree mangled_name = TREE_VALUE (TREE_VALUE (attr));
653 return IDENTIFIER_POINTER (mangled_name);
654 }
f9d53c27
TB
655
656 return NULL;
6db1ec94
JG
657}
658
f9d53c27 659const char *
6d4d616a 660aarch64_general_mangle_builtin_type (const_tree type)
6db1ec94 661{
f9d53c27
TB
662 const char *mangle;
663 /* Walk through all the AArch64 builtins types tables to filter out the
664 incoming type. */
665 if ((mangle = aarch64_mangle_builtin_vector_type (type))
666 || (mangle = aarch64_mangle_builtin_scalar_type (type)))
667 return mangle;
668
669 return NULL;
6db1ec94
JG
670}
671
f9d53c27 672static tree
b8506a8a 673aarch64_simd_builtin_std_type (machine_mode mode,
f9d53c27 674 enum aarch64_type_qualifiers q)
6db1ec94 675{
f9d53c27
TB
676#define QUAL_TYPE(M) \
677 ((q == qualifier_none) ? int##M##_type_node : unsigned_int##M##_type_node);
678 switch (mode)
679 {
4e10a5a7 680 case E_QImode:
f9d53c27 681 return QUAL_TYPE (QI);
4e10a5a7 682 case E_HImode:
f9d53c27 683 return QUAL_TYPE (HI);
4e10a5a7 684 case E_SImode:
f9d53c27 685 return QUAL_TYPE (SI);
4e10a5a7 686 case E_DImode:
f9d53c27 687 return QUAL_TYPE (DI);
4e10a5a7 688 case E_TImode:
f9d53c27 689 return QUAL_TYPE (TI);
4e10a5a7 690 case E_OImode:
f9d53c27 691 return aarch64_simd_intOI_type_node;
4e10a5a7 692 case E_CImode:
f9d53c27 693 return aarch64_simd_intCI_type_node;
4e10a5a7 694 case E_XImode:
f9d53c27 695 return aarch64_simd_intXI_type_node;
4e10a5a7 696 case E_HFmode:
71a11456 697 return aarch64_fp16_type_node;
4e10a5a7 698 case E_SFmode:
f9d53c27 699 return float_type_node;
4e10a5a7 700 case E_DFmode:
f9d53c27 701 return double_type_node;
abbe1ed2
SMW
702 case E_BFmode:
703 return aarch64_bf16_type_node;
f9d53c27
TB
704 default:
705 gcc_unreachable ();
706 }
707#undef QUAL_TYPE
6db1ec94
JG
708}
709
f9d53c27 710static tree
b8506a8a 711aarch64_lookup_simd_builtin_type (machine_mode mode,
f9d53c27 712 enum aarch64_type_qualifiers q)
6db1ec94 713{
f9d53c27
TB
714 int i;
715 int nelts = sizeof (aarch64_simd_types) / sizeof (aarch64_simd_types[0]);
716
717 /* Non-poly scalar modes map to standard types not in the table. */
718 if (q != qualifier_poly && !VECTOR_MODE_P (mode))
719 return aarch64_simd_builtin_std_type (mode, q);
720
721 for (i = 0; i < nelts; i++)
722 if (aarch64_simd_types[i].mode == mode
723 && aarch64_simd_types[i].q == q)
724 return aarch64_simd_types[i].itype;
725
726 return NULL_TREE;
b5828b4b
JG
727}
728
f9d53c27 729static tree
b8506a8a 730aarch64_simd_builtin_type (machine_mode mode,
f9d53c27
TB
731 bool unsigned_p, bool poly_p)
732{
733 if (poly_p)
734 return aarch64_lookup_simd_builtin_type (mode, qualifier_poly);
735 else if (unsigned_p)
736 return aarch64_lookup_simd_builtin_type (mode, qualifier_unsigned);
737 else
738 return aarch64_lookup_simd_builtin_type (mode, qualifier_none);
739}
740
af55e82d 741static void
f9d53c27 742aarch64_init_simd_builtin_types (void)
43e9d192 743{
f9d53c27
TB
744 int i;
745 int nelts = sizeof (aarch64_simd_types) / sizeof (aarch64_simd_types[0]);
746 tree tdecl;
747
748 /* Init all the element types built by the front-end. */
749 aarch64_simd_types[Int8x8_t].eltype = intQI_type_node;
750 aarch64_simd_types[Int8x16_t].eltype = intQI_type_node;
751 aarch64_simd_types[Int16x4_t].eltype = intHI_type_node;
752 aarch64_simd_types[Int16x8_t].eltype = intHI_type_node;
753 aarch64_simd_types[Int32x2_t].eltype = intSI_type_node;
754 aarch64_simd_types[Int32x4_t].eltype = intSI_type_node;
755 aarch64_simd_types[Int64x1_t].eltype = intDI_type_node;
756 aarch64_simd_types[Int64x2_t].eltype = intDI_type_node;
757 aarch64_simd_types[Uint8x8_t].eltype = unsigned_intQI_type_node;
758 aarch64_simd_types[Uint8x16_t].eltype = unsigned_intQI_type_node;
759 aarch64_simd_types[Uint16x4_t].eltype = unsigned_intHI_type_node;
760 aarch64_simd_types[Uint16x8_t].eltype = unsigned_intHI_type_node;
761 aarch64_simd_types[Uint32x2_t].eltype = unsigned_intSI_type_node;
762 aarch64_simd_types[Uint32x4_t].eltype = unsigned_intSI_type_node;
763 aarch64_simd_types[Uint64x1_t].eltype = unsigned_intDI_type_node;
764 aarch64_simd_types[Uint64x2_t].eltype = unsigned_intDI_type_node;
765
766 /* Poly types are a world of their own. */
767 aarch64_simd_types[Poly8_t].eltype = aarch64_simd_types[Poly8_t].itype =
768 build_distinct_type_copy (unsigned_intQI_type_node);
bcee52c4
MS
769 /* Prevent front-ends from transforming Poly8_t arrays into string
770 literals. */
771 TYPE_STRING_FLAG (aarch64_simd_types[Poly8_t].eltype) = false;
772
f9d53c27
TB
773 aarch64_simd_types[Poly16_t].eltype = aarch64_simd_types[Poly16_t].itype =
774 build_distinct_type_copy (unsigned_intHI_type_node);
775 aarch64_simd_types[Poly64_t].eltype = aarch64_simd_types[Poly64_t].itype =
776 build_distinct_type_copy (unsigned_intDI_type_node);
777 aarch64_simd_types[Poly128_t].eltype = aarch64_simd_types[Poly128_t].itype =
778 build_distinct_type_copy (unsigned_intTI_type_node);
779 /* Init poly vector element types with scalar poly types. */
780 aarch64_simd_types[Poly8x8_t].eltype = aarch64_simd_types[Poly8_t].itype;
781 aarch64_simd_types[Poly8x16_t].eltype = aarch64_simd_types[Poly8_t].itype;
782 aarch64_simd_types[Poly16x4_t].eltype = aarch64_simd_types[Poly16_t].itype;
783 aarch64_simd_types[Poly16x8_t].eltype = aarch64_simd_types[Poly16_t].itype;
784 aarch64_simd_types[Poly64x1_t].eltype = aarch64_simd_types[Poly64_t].itype;
785 aarch64_simd_types[Poly64x2_t].eltype = aarch64_simd_types[Poly64_t].itype;
786
787 /* Continue with standard types. */
71a11456
AL
788 aarch64_simd_types[Float16x4_t].eltype = aarch64_fp16_type_node;
789 aarch64_simd_types[Float16x8_t].eltype = aarch64_fp16_type_node;
f9d53c27
TB
790 aarch64_simd_types[Float32x2_t].eltype = float_type_node;
791 aarch64_simd_types[Float32x4_t].eltype = float_type_node;
792 aarch64_simd_types[Float64x1_t].eltype = double_type_node;
793 aarch64_simd_types[Float64x2_t].eltype = double_type_node;
794
abbe1ed2
SMW
795 /* Init Bfloat vector types with underlying __bf16 type. */
796 aarch64_simd_types[Bfloat16x4_t].eltype = aarch64_bf16_type_node;
797 aarch64_simd_types[Bfloat16x8_t].eltype = aarch64_bf16_type_node;
798
f9d53c27
TB
799 for (i = 0; i < nelts; i++)
800 {
801 tree eltype = aarch64_simd_types[i].eltype;
b8506a8a 802 machine_mode mode = aarch64_simd_types[i].mode;
f9d53c27
TB
803
804 if (aarch64_simd_types[i].itype == NULL)
b96824c4 805 {
31427b97
RS
806 tree type = build_vector_type (eltype, GET_MODE_NUNITS (mode));
807 type = build_distinct_type_copy (type);
808 SET_TYPE_STRUCTURAL_EQUALITY (type);
809
810 tree mangled_name = get_identifier (aarch64_simd_types[i].mangle);
811 tree value = tree_cons (NULL_TREE, mangled_name, NULL_TREE);
812 TYPE_ATTRIBUTES (type)
813 = tree_cons (get_identifier ("Advanced SIMD type"), value,
814 TYPE_ATTRIBUTES (type));
815 aarch64_simd_types[i].itype = type;
b96824c4 816 }
f9d53c27
TB
817
818 tdecl = add_builtin_type (aarch64_simd_types[i].name,
819 aarch64_simd_types[i].itype);
820 TYPE_NAME (aarch64_simd_types[i].itype) = tdecl;
f9d53c27 821 }
43e9d192 822
f9d53c27
TB
823#define AARCH64_BUILD_SIGNED_TYPE(mode) \
824 make_signed_type (GET_MODE_PRECISION (mode));
825 aarch64_simd_intOI_type_node = AARCH64_BUILD_SIGNED_TYPE (OImode);
f9d53c27
TB
826 aarch64_simd_intCI_type_node = AARCH64_BUILD_SIGNED_TYPE (CImode);
827 aarch64_simd_intXI_type_node = AARCH64_BUILD_SIGNED_TYPE (XImode);
828#undef AARCH64_BUILD_SIGNED_TYPE
829
f9d53c27
TB
830 tdecl = add_builtin_type
831 ("__builtin_aarch64_simd_oi" , aarch64_simd_intOI_type_node);
832 TYPE_NAME (aarch64_simd_intOI_type_node) = tdecl;
833 tdecl = add_builtin_type
834 ("__builtin_aarch64_simd_ci" , aarch64_simd_intCI_type_node);
835 TYPE_NAME (aarch64_simd_intCI_type_node) = tdecl;
836 tdecl = add_builtin_type
837 ("__builtin_aarch64_simd_xi" , aarch64_simd_intXI_type_node);
838 TYPE_NAME (aarch64_simd_intXI_type_node) = tdecl;
839}
840
841static void
842aarch64_init_simd_builtin_scalar_types (void)
843{
844 /* Define typedefs for all the standard scalar types. */
845 (*lang_hooks.types.register_builtin_type) (intQI_type_node,
43e9d192 846 "__builtin_aarch64_simd_qi");
f9d53c27 847 (*lang_hooks.types.register_builtin_type) (intHI_type_node,
43e9d192 848 "__builtin_aarch64_simd_hi");
7c369485
AL
849 (*lang_hooks.types.register_builtin_type) (aarch64_fp16_type_node,
850 "__builtin_aarch64_simd_hf");
f9d53c27 851 (*lang_hooks.types.register_builtin_type) (intSI_type_node,
43e9d192 852 "__builtin_aarch64_simd_si");
f9d53c27 853 (*lang_hooks.types.register_builtin_type) (float_type_node,
43e9d192 854 "__builtin_aarch64_simd_sf");
f9d53c27 855 (*lang_hooks.types.register_builtin_type) (intDI_type_node,
43e9d192 856 "__builtin_aarch64_simd_di");
f9d53c27 857 (*lang_hooks.types.register_builtin_type) (double_type_node,
43e9d192 858 "__builtin_aarch64_simd_df");
f9d53c27 859 (*lang_hooks.types.register_builtin_type) (unsigned_intQI_type_node,
43e9d192 860 "__builtin_aarch64_simd_poly8");
f9d53c27 861 (*lang_hooks.types.register_builtin_type) (unsigned_intHI_type_node,
43e9d192 862 "__builtin_aarch64_simd_poly16");
f9d53c27 863 (*lang_hooks.types.register_builtin_type) (unsigned_intDI_type_node,
7baa225d 864 "__builtin_aarch64_simd_poly64");
f9d53c27 865 (*lang_hooks.types.register_builtin_type) (unsigned_intTI_type_node,
7baa225d 866 "__builtin_aarch64_simd_poly128");
f9d53c27 867 (*lang_hooks.types.register_builtin_type) (intTI_type_node,
43e9d192 868 "__builtin_aarch64_simd_ti");
e603cd43
MI
869 (*lang_hooks.types.register_builtin_type) (aarch64_bf16_type_node,
870 "__builtin_aarch64_simd_bf");
b5828b4b 871 /* Unsigned integer types for various mode sizes. */
f9d53c27 872 (*lang_hooks.types.register_builtin_type) (unsigned_intQI_type_node,
b5828b4b 873 "__builtin_aarch64_simd_uqi");
f9d53c27 874 (*lang_hooks.types.register_builtin_type) (unsigned_intHI_type_node,
b5828b4b 875 "__builtin_aarch64_simd_uhi");
f9d53c27 876 (*lang_hooks.types.register_builtin_type) (unsigned_intSI_type_node,
b5828b4b 877 "__builtin_aarch64_simd_usi");
f9d53c27 878 (*lang_hooks.types.register_builtin_type) (unsigned_intDI_type_node,
b5828b4b 879 "__builtin_aarch64_simd_udi");
f9d53c27
TB
880}
881
e95a988a
KT
882static bool aarch64_simd_builtins_initialized_p = false;
883
9d63f43b
TC
884/* Due to the architecture not providing lane variant of the lane instructions
885 for fcmla we can't use the standard simd builtin expansion code, but we
886 still want the majority of the validation that would normally be done. */
887
888void
889aarch64_init_fcmla_laneq_builtins (void)
890{
891 unsigned int i = 0;
892
893 for (i = 0; i < ARRAY_SIZE (aarch64_fcmla_lane_builtin_data); ++i)
894 {
895 aarch64_fcmla_laneq_builtin_datum* d
896 = &aarch64_fcmla_lane_builtin_data[i];
897 tree argtype = aarch64_lookup_simd_builtin_type (d->mode, qualifier_none);
898 machine_mode quadmode = GET_MODE_2XWIDER_MODE (d->mode).require ();
899 tree quadtype
900 = aarch64_lookup_simd_builtin_type (quadmode, qualifier_none);
901 tree lanetype
902 = aarch64_simd_builtin_std_type (SImode, qualifier_lane_pair_index);
903 tree ftype = build_function_type_list (argtype, argtype, argtype,
904 quadtype, lanetype, NULL_TREE);
6d4d616a 905 tree fndecl = aarch64_general_add_builtin (d->name, ftype, d->fcode);
9d63f43b
TC
906
907 aarch64_builtin_decls[d->fcode] = fndecl;
908 }
909}
910
e95a988a 911void
f9d53c27
TB
912aarch64_init_simd_builtins (void)
913{
661fce82 914 unsigned int i, fcode = AARCH64_SIMD_PATTERN_START;
f9d53c27 915
e95a988a
KT
916 if (aarch64_simd_builtins_initialized_p)
917 return;
918
919 aarch64_simd_builtins_initialized_p = true;
920
f9d53c27 921 aarch64_init_simd_builtin_types ();
43e9d192 922
f9d53c27
TB
923 /* Strong-typing hasn't been implemented for all AdvSIMD builtin intrinsics.
924 Therefore we need to preserve the old __builtin scalar types. It can be
925 removed once all the intrinsics become strongly typed using the qualifier
926 system. */
927 aarch64_init_simd_builtin_scalar_types ();
928
661fce82 929 tree lane_check_fpr = build_function_type_list (void_type_node,
9c4f25cc
AP
930 size_type_node,
931 size_type_node,
661fce82
AL
932 intSI_type_node,
933 NULL);
6d4d616a
RS
934 aarch64_builtin_decls[AARCH64_SIMD_BUILTIN_LANE_CHECK]
935 = aarch64_general_add_builtin ("__builtin_aarch64_im_lane_boundsi",
936 lane_check_fpr,
937 AARCH64_SIMD_BUILTIN_LANE_CHECK);
661fce82 938
342be7f7 939 for (i = 0; i < ARRAY_SIZE (aarch64_simd_builtin_data); i++, fcode++)
43e9d192 940 {
b5828b4b 941 bool print_type_signature_p = false;
cae83731 942 char type_signature[SIMD_MAX_BUILTIN_ARGS + 1] = { 0 };
43e9d192 943 aarch64_simd_builtin_datum *d = &aarch64_simd_builtin_data[i];
342be7f7
JG
944 char namebuf[60];
945 tree ftype = NULL;
119103ca 946 tree fndecl = NULL;
342be7f7 947
342be7f7 948 d->fcode = fcode;
43e9d192 949
b5828b4b
JG
950 /* We must track two variables here. op_num is
951 the operand number as in the RTL pattern. This is
952 required to access the mode (e.g. V4SF mode) of the
953 argument, from which the base type can be derived.
954 arg_num is an index in to the qualifiers data, which
955 gives qualifiers to the type (e.g. const unsigned).
956 The reason these two variables may differ by one is the
957 void return type. While all return types take the 0th entry
958 in the qualifiers array, there is no operand for them in the
959 RTL pattern. */
960 int op_num = insn_data[d->code].n_operands - 1;
961 int arg_num = d->qualifiers[0] & qualifier_void
962 ? op_num + 1
963 : op_num;
964 tree return_type = void_type_node, args = void_list_node;
965 tree eltype;
966
967 /* Build a function type directly from the insn_data for this
968 builtin. The build_function_type () function takes care of
969 removing duplicates for us. */
970 for (; op_num >= 0; arg_num--, op_num--)
43e9d192 971 {
ef4bddc2 972 machine_mode op_mode = insn_data[d->code].operand[op_num].mode;
b5828b4b 973 enum aarch64_type_qualifiers qualifiers = d->qualifiers[arg_num];
43e9d192 974
b5828b4b
JG
975 if (qualifiers & qualifier_unsigned)
976 {
9fd2074d 977 type_signature[op_num] = 'u';
b5828b4b
JG
978 print_type_signature_p = true;
979 }
6db1ec94
JG
980 else if (qualifiers & qualifier_poly)
981 {
9fd2074d 982 type_signature[op_num] = 'p';
6db1ec94
JG
983 print_type_signature_p = true;
984 }
b5828b4b 985 else
9fd2074d 986 type_signature[op_num] = 's';
b5828b4b
JG
987
988 /* Skip an internal operand for vget_{low, high}. */
989 if (qualifiers & qualifier_internal)
990 continue;
991
992 /* Some builtins have different user-facing types
993 for certain arguments, encoded in d->mode. */
994 if (qualifiers & qualifier_map_mode)
bc5e395d 995 op_mode = d->mode;
b5828b4b
JG
996
997 /* For pointers, we want a pointer to the basic type
998 of the vector. */
999 if (qualifiers & qualifier_pointer && VECTOR_MODE_P (op_mode))
1000 op_mode = GET_MODE_INNER (op_mode);
1001
f9d53c27
TB
1002 eltype = aarch64_simd_builtin_type
1003 (op_mode,
1004 (qualifiers & qualifier_unsigned) != 0,
1005 (qualifiers & qualifier_poly) != 0);
1006 gcc_assert (eltype != NULL);
b5828b4b
JG
1007
1008 /* Add qualifiers. */
1009 if (qualifiers & qualifier_const)
1010 eltype = build_qualified_type (eltype, TYPE_QUAL_CONST);
1011
1012 if (qualifiers & qualifier_pointer)
1013 eltype = build_pointer_type (eltype);
1014
1015 /* If we have reached arg_num == 0, we are at a non-void
1016 return type. Otherwise, we are still processing
1017 arguments. */
1018 if (arg_num == 0)
1019 return_type = eltype;
1020 else
1021 args = tree_cons (NULL_TREE, eltype, args);
1022 }
342be7f7 1023
b5828b4b 1024 ftype = build_function_type (return_type, args);
43e9d192 1025
342be7f7 1026 gcc_assert (ftype != NULL);
43e9d192 1027
b5828b4b 1028 if (print_type_signature_p)
bc5e395d
JG
1029 snprintf (namebuf, sizeof (namebuf), "__builtin_aarch64_%s_%s",
1030 d->name, type_signature);
b5828b4b 1031 else
bc5e395d
JG
1032 snprintf (namebuf, sizeof (namebuf), "__builtin_aarch64_%s",
1033 d->name);
43e9d192 1034
6d4d616a 1035 fndecl = aarch64_general_add_builtin (namebuf, ftype, fcode);
119103ca 1036 aarch64_builtin_decls[fcode] = fndecl;
43e9d192 1037 }
280d970b
TC
1038
1039 /* Initialize the remaining fcmla_laneq intrinsics. */
1040 aarch64_init_fcmla_laneq_builtins ();
43e9d192
IB
1041}
1042
5d357f26
KT
1043static void
1044aarch64_init_crc32_builtins ()
1045{
f9d53c27 1046 tree usi_type = aarch64_simd_builtin_std_type (SImode, qualifier_unsigned);
5d357f26
KT
1047 unsigned int i = 0;
1048
1049 for (i = 0; i < ARRAY_SIZE (aarch64_crc_builtin_data); ++i)
1050 {
1051 aarch64_crc_builtin_datum* d = &aarch64_crc_builtin_data[i];
f9d53c27
TB
1052 tree argtype = aarch64_simd_builtin_std_type (d->mode,
1053 qualifier_unsigned);
5d357f26 1054 tree ftype = build_function_type_list (usi_type, usi_type, argtype, NULL_TREE);
6d4d616a 1055 tree fndecl = aarch64_general_add_builtin (d->name, ftype, d->fcode);
5d357f26
KT
1056
1057 aarch64_builtin_decls[d->fcode] = fndecl;
1058 }
1059}
1060
a6fc00da
BH
1061/* Add builtins for reciprocal square root. */
1062
1063void
1064aarch64_init_builtin_rsqrt (void)
1065{
1066 tree fndecl = NULL;
1067 tree ftype = NULL;
1068
1069 tree V2SF_type_node = build_vector_type (float_type_node, 2);
1070 tree V2DF_type_node = build_vector_type (double_type_node, 2);
1071 tree V4SF_type_node = build_vector_type (float_type_node, 4);
1072
1073 struct builtin_decls_data
1074 {
1075 tree type_node;
1076 const char *builtin_name;
1077 int function_code;
1078 };
1079
1080 builtin_decls_data bdda[] =
1081 {
1082 { double_type_node, "__builtin_aarch64_rsqrt_df", AARCH64_BUILTIN_RSQRT_DF },
1083 { float_type_node, "__builtin_aarch64_rsqrt_sf", AARCH64_BUILTIN_RSQRT_SF },
1084 { V2DF_type_node, "__builtin_aarch64_rsqrt_v2df", AARCH64_BUILTIN_RSQRT_V2DF },
1085 { V2SF_type_node, "__builtin_aarch64_rsqrt_v2sf", AARCH64_BUILTIN_RSQRT_V2SF },
1086 { V4SF_type_node, "__builtin_aarch64_rsqrt_v4sf", AARCH64_BUILTIN_RSQRT_V4SF }
1087 };
1088
1089 builtin_decls_data *bdd = bdda;
1090 builtin_decls_data *bdd_end = bdd + (sizeof (bdda) / sizeof (builtin_decls_data));
1091
1092 for (; bdd < bdd_end; bdd++)
1093 {
1094 ftype = build_function_type_list (bdd->type_node, bdd->type_node, NULL_TREE);
6d4d616a
RS
1095 fndecl = aarch64_general_add_builtin (bdd->builtin_name,
1096 ftype, bdd->function_code);
a6fc00da
BH
1097 aarch64_builtin_decls[bdd->function_code] = fndecl;
1098 }
1099}
1100
1b62ed4f
JG
1101/* Initialize the backend types that support the user-visible __fp16
1102 type, also initialize a pointer to that type, to be used when
1103 forming HFAs. */
1104
1105static void
1106aarch64_init_fp16_types (void)
1107{
1108 aarch64_fp16_type_node = make_node (REAL_TYPE);
1109 TYPE_PRECISION (aarch64_fp16_type_node) = 16;
1110 layout_type (aarch64_fp16_type_node);
1111
1112 (*lang_hooks.types.register_builtin_type) (aarch64_fp16_type_node, "__fp16");
1113 aarch64_fp16_ptr_type_node = build_pointer_type (aarch64_fp16_type_node);
1114}
1115
abbe1ed2
SMW
1116/* Initialize the backend REAL_TYPE type supporting bfloat types. */
1117static void
1118aarch64_init_bf16_types (void)
1119{
1120 aarch64_bf16_type_node = make_node (REAL_TYPE);
1121 TYPE_PRECISION (aarch64_bf16_type_node) = 16;
1122 SET_TYPE_MODE (aarch64_bf16_type_node, BFmode);
1123 layout_type (aarch64_bf16_type_node);
1124
1125 lang_hooks.types.register_builtin_type (aarch64_bf16_type_node, "__bf16");
1126 aarch64_bf16_ptr_type_node = build_pointer_type (aarch64_bf16_type_node);
1127}
1128
312492bd
JW
1129/* Pointer authentication builtins that will become NOP on legacy platform.
1130 Currently, these builtins are for internal use only (libgcc EH unwinder). */
1131
1132void
1133aarch64_init_pauth_hint_builtins (void)
1134{
1135 /* Pointer Authentication builtins. */
1136 tree ftype_pointer_auth
1137 = build_function_type_list (ptr_type_node, ptr_type_node,
1138 unsigned_intDI_type_node, NULL_TREE);
1139 tree ftype_pointer_strip
1140 = build_function_type_list (ptr_type_node, ptr_type_node, NULL_TREE);
1141
1142 aarch64_builtin_decls[AARCH64_PAUTH_BUILTIN_AUTIA1716]
6d4d616a
RS
1143 = aarch64_general_add_builtin ("__builtin_aarch64_autia1716",
1144 ftype_pointer_auth,
1145 AARCH64_PAUTH_BUILTIN_AUTIA1716);
312492bd 1146 aarch64_builtin_decls[AARCH64_PAUTH_BUILTIN_PACIA1716]
6d4d616a
RS
1147 = aarch64_general_add_builtin ("__builtin_aarch64_pacia1716",
1148 ftype_pointer_auth,
1149 AARCH64_PAUTH_BUILTIN_PACIA1716);
8fc16d72 1150 aarch64_builtin_decls[AARCH64_PAUTH_BUILTIN_AUTIB1716]
6d4d616a
RS
1151 = aarch64_general_add_builtin ("__builtin_aarch64_autib1716",
1152 ftype_pointer_auth,
1153 AARCH64_PAUTH_BUILTIN_AUTIB1716);
8fc16d72 1154 aarch64_builtin_decls[AARCH64_PAUTH_BUILTIN_PACIB1716]
6d4d616a
RS
1155 = aarch64_general_add_builtin ("__builtin_aarch64_pacib1716",
1156 ftype_pointer_auth,
1157 AARCH64_PAUTH_BUILTIN_PACIB1716);
312492bd 1158 aarch64_builtin_decls[AARCH64_PAUTH_BUILTIN_XPACLRI]
6d4d616a
RS
1159 = aarch64_general_add_builtin ("__builtin_aarch64_xpaclri",
1160 ftype_pointer_strip,
1161 AARCH64_PAUTH_BUILTIN_XPACLRI);
312492bd
JW
1162}
1163
89626179
SD
1164/* Initialize the transactional memory extension (TME) builtins. */
1165static void
1166aarch64_init_tme_builtins (void)
1167{
1168 tree ftype_uint64_void
1169 = build_function_type_list (uint64_type_node, NULL);
1170 tree ftype_void_void
1171 = build_function_type_list (void_type_node, NULL);
1172 tree ftype_void_uint64
1173 = build_function_type_list (void_type_node, uint64_type_node, NULL);
1174
1175 aarch64_builtin_decls[AARCH64_TME_BUILTIN_TSTART]
6d4d616a
RS
1176 = aarch64_general_add_builtin ("__builtin_aarch64_tstart",
1177 ftype_uint64_void,
1178 AARCH64_TME_BUILTIN_TSTART);
89626179 1179 aarch64_builtin_decls[AARCH64_TME_BUILTIN_TTEST]
6d4d616a
RS
1180 = aarch64_general_add_builtin ("__builtin_aarch64_ttest",
1181 ftype_uint64_void,
1182 AARCH64_TME_BUILTIN_TTEST);
89626179 1183 aarch64_builtin_decls[AARCH64_TME_BUILTIN_TCOMMIT]
6d4d616a
RS
1184 = aarch64_general_add_builtin ("__builtin_aarch64_tcommit",
1185 ftype_void_void,
1186 AARCH64_TME_BUILTIN_TCOMMIT);
89626179 1187 aarch64_builtin_decls[AARCH64_TME_BUILTIN_TCANCEL]
6d4d616a
RS
1188 = aarch64_general_add_builtin ("__builtin_aarch64_tcancel",
1189 ftype_void_uint64,
1190 AARCH64_TME_BUILTIN_TCANCEL);
89626179
SD
1191}
1192
c5dc215d
KT
1193/* Add builtins for Random Number instructions. */
1194
1195static void
1196aarch64_init_rng_builtins (void)
1197{
1198 tree unsigned_ptr_type = build_pointer_type (unsigned_intDI_type_node);
1199 tree ftype
1200 = build_function_type_list (integer_type_node, unsigned_ptr_type, NULL);
1201 aarch64_builtin_decls[AARCH64_BUILTIN_RNG_RNDR]
1202 = aarch64_general_add_builtin ("__builtin_aarch64_rndr", ftype,
1203 AARCH64_BUILTIN_RNG_RNDR);
1204 aarch64_builtin_decls[AARCH64_BUILTIN_RNG_RNDRRS]
1205 = aarch64_general_add_builtin ("__builtin_aarch64_rndrrs", ftype,
1206 AARCH64_BUILTIN_RNG_RNDRRS);
1207}
1208
ef01e6bb
DZ
1209/* Initialize the memory tagging extension (MTE) builtins. */
1210struct
1211{
1212 tree ftype;
1213 enum insn_code icode;
1214} aarch64_memtag_builtin_data[AARCH64_MEMTAG_BUILTIN_END -
1215 AARCH64_MEMTAG_BUILTIN_START - 1];
1216
1217static void
1218aarch64_init_memtag_builtins (void)
1219{
1220 tree fntype = NULL;
1221
1222#define AARCH64_INIT_MEMTAG_BUILTINS_DECL(F, N, I, T) \
1223 aarch64_builtin_decls[AARCH64_MEMTAG_BUILTIN_##F] \
1224 = aarch64_general_add_builtin ("__builtin_aarch64_memtag_"#N, \
1225 T, AARCH64_MEMTAG_BUILTIN_##F); \
1226 aarch64_memtag_builtin_data[AARCH64_MEMTAG_BUILTIN_##F - \
1227 AARCH64_MEMTAG_BUILTIN_START - 1] = \
1228 {T, CODE_FOR_##I};
1229
1230 fntype = build_function_type_list (ptr_type_node, ptr_type_node,
1231 uint64_type_node, NULL);
1232 AARCH64_INIT_MEMTAG_BUILTINS_DECL (IRG, irg, irg, fntype);
1233
1234 fntype = build_function_type_list (uint64_type_node, ptr_type_node,
1235 uint64_type_node, NULL);
1236 AARCH64_INIT_MEMTAG_BUILTINS_DECL (GMI, gmi, gmi, fntype);
1237
1238 fntype = build_function_type_list (ptrdiff_type_node, ptr_type_node,
1239 ptr_type_node, NULL);
1240 AARCH64_INIT_MEMTAG_BUILTINS_DECL (SUBP, subp, subp, fntype);
1241
1242 fntype = build_function_type_list (ptr_type_node, ptr_type_node,
1243 unsigned_type_node, NULL);
1244 AARCH64_INIT_MEMTAG_BUILTINS_DECL (INC_TAG, inc_tag, addg, fntype);
1245
1246 fntype = build_function_type_list (void_type_node, ptr_type_node, NULL);
1247 AARCH64_INIT_MEMTAG_BUILTINS_DECL (SET_TAG, set_tag, stg, fntype);
1248
1249 fntype = build_function_type_list (ptr_type_node, ptr_type_node, NULL);
1250 AARCH64_INIT_MEMTAG_BUILTINS_DECL (GET_TAG, get_tag, ldg, fntype);
1251
1252#undef AARCH64_INIT_MEMTAG_BUILTINS_DECL
1253}
c5dc215d 1254
0d7e5fa6 1255/* Initialize fpsr fpcr getters and setters. */
c5dc215d 1256
0d7e5fa6
AC
1257static void
1258aarch64_init_fpsr_fpcr_builtins (void)
43e9d192 1259{
0d7e5fa6 1260 tree ftype_set
aa87aced 1261 = build_function_type_list (void_type_node, unsigned_type_node, NULL);
0d7e5fa6 1262 tree ftype_get
aa87aced
KV
1263 = build_function_type_list (unsigned_type_node, NULL);
1264
1265 aarch64_builtin_decls[AARCH64_BUILTIN_GET_FPCR]
6d4d616a 1266 = aarch64_general_add_builtin ("__builtin_aarch64_get_fpcr",
0d7e5fa6 1267 ftype_get,
6d4d616a 1268 AARCH64_BUILTIN_GET_FPCR);
aa87aced 1269 aarch64_builtin_decls[AARCH64_BUILTIN_SET_FPCR]
6d4d616a 1270 = aarch64_general_add_builtin ("__builtin_aarch64_set_fpcr",
0d7e5fa6 1271 ftype_set,
6d4d616a 1272 AARCH64_BUILTIN_SET_FPCR);
aa87aced 1273 aarch64_builtin_decls[AARCH64_BUILTIN_GET_FPSR]
6d4d616a 1274 = aarch64_general_add_builtin ("__builtin_aarch64_get_fpsr",
0d7e5fa6 1275 ftype_get,
6d4d616a 1276 AARCH64_BUILTIN_GET_FPSR);
aa87aced 1277 aarch64_builtin_decls[AARCH64_BUILTIN_SET_FPSR]
6d4d616a 1278 = aarch64_general_add_builtin ("__builtin_aarch64_set_fpsr",
0d7e5fa6 1279 ftype_set,
6d4d616a 1280 AARCH64_BUILTIN_SET_FPSR);
aa87aced 1281
0d7e5fa6
AC
1282 ftype_set
1283 = build_function_type_list (void_type_node, long_long_unsigned_type_node,
1284 NULL);
1285 ftype_get
1286 = build_function_type_list (long_long_unsigned_type_node, NULL);
1287
1288 aarch64_builtin_decls[AARCH64_BUILTIN_GET_FPCR64]
1289 = aarch64_general_add_builtin ("__builtin_aarch64_get_fpcr64",
1290 ftype_get,
1291 AARCH64_BUILTIN_GET_FPCR64);
1292 aarch64_builtin_decls[AARCH64_BUILTIN_SET_FPCR64]
1293 = aarch64_general_add_builtin ("__builtin_aarch64_set_fpcr64",
1294 ftype_set,
1295 AARCH64_BUILTIN_SET_FPCR64);
1296 aarch64_builtin_decls[AARCH64_BUILTIN_GET_FPSR64]
1297 = aarch64_general_add_builtin ("__builtin_aarch64_get_fpsr64",
1298 ftype_get,
1299 AARCH64_BUILTIN_GET_FPSR64);
1300 aarch64_builtin_decls[AARCH64_BUILTIN_SET_FPSR64]
1301 = aarch64_general_add_builtin ("__builtin_aarch64_set_fpsr64",
1302 ftype_set,
1303 AARCH64_BUILTIN_SET_FPSR64);
1304}
1305
1306/* Initialize all builtins in the AARCH64_BUILTIN_GENERAL group. */
1307
1308void
1309aarch64_general_init_builtins (void)
1310{
1311 aarch64_init_fpsr_fpcr_builtins ();
1312
1b62ed4f 1313 aarch64_init_fp16_types ();
c2ec330c 1314
abbe1ed2
SMW
1315 aarch64_init_bf16_types ();
1316
342be7f7 1317 if (TARGET_SIMD)
280d970b 1318 aarch64_init_simd_builtins ();
e95a988a
KT
1319
1320 aarch64_init_crc32_builtins ();
a6fc00da 1321 aarch64_init_builtin_rsqrt ();
c5dc215d 1322 aarch64_init_rng_builtins ();
312492bd 1323
e1d5d19e
KT
1324 tree ftype_jcvt
1325 = build_function_type_list (intSI_type_node, double_type_node, NULL);
1326 aarch64_builtin_decls[AARCH64_JSCVT]
6d4d616a
RS
1327 = aarch64_general_add_builtin ("__builtin_aarch64_jcvtzs", ftype_jcvt,
1328 AARCH64_JSCVT);
e1d5d19e 1329
a876231c
JW
1330 /* Initialize pointer authentication builtins which are backed by instructions
1331 in NOP encoding space.
1332
1333 NOTE: these builtins are supposed to be used by libgcc unwinder only, as
1334 there is no support on return address signing under ILP32, we don't
1335 register them. */
1336 if (!TARGET_ILP32)
1337 aarch64_init_pauth_hint_builtins ();
89626179
SD
1338
1339 if (TARGET_TME)
1340 aarch64_init_tme_builtins ();
ef01e6bb
DZ
1341
1342 if (TARGET_MEMTAG)
1343 aarch64_init_memtag_builtins ();
43e9d192
IB
1344}
1345
6d4d616a 1346/* Implement TARGET_BUILTIN_DECL for the AARCH64_BUILTIN_GENERAL group. */
119103ca 1347tree
6d4d616a 1348aarch64_general_builtin_decl (unsigned code, bool)
119103ca
JG
1349{
1350 if (code >= AARCH64_BUILTIN_MAX)
1351 return error_mark_node;
1352
1353 return aarch64_builtin_decls[code];
1354}
1355
43e9d192
IB
1356typedef enum
1357{
1358 SIMD_ARG_COPY_TO_REG,
1359 SIMD_ARG_CONSTANT,
2a49c16d 1360 SIMD_ARG_LANE_INDEX,
4d0a0237 1361 SIMD_ARG_STRUCT_LOAD_STORE_LANE_INDEX,
9d63f43b 1362 SIMD_ARG_LANE_PAIR_INDEX,
8c197c85 1363 SIMD_ARG_LANE_QUADTUP_INDEX,
43e9d192
IB
1364 SIMD_ARG_STOP
1365} builtin_simd_arg;
1366
e95a988a 1367
43e9d192
IB
1368static rtx
1369aarch64_simd_expand_args (rtx target, int icode, int have_retval,
4d0a0237 1370 tree exp, builtin_simd_arg *args,
b8506a8a 1371 machine_mode builtin_mode)
43e9d192 1372{
43e9d192 1373 rtx pat;
d9e80f49
AL
1374 rtx op[SIMD_MAX_BUILTIN_ARGS + 1]; /* First element for result operand. */
1375 int opc = 0;
1376
1377 if (have_retval)
1378 {
1379 machine_mode tmode = insn_data[icode].operand[0].mode;
1380 if (!target
43e9d192 1381 || GET_MODE (target) != tmode
d9e80f49
AL
1382 || !(*insn_data[icode].operand[0].predicate) (target, tmode))
1383 target = gen_reg_rtx (tmode);
1384 op[opc++] = target;
1385 }
43e9d192 1386
43e9d192
IB
1387 for (;;)
1388 {
d9e80f49 1389 builtin_simd_arg thisarg = args[opc - have_retval];
43e9d192
IB
1390
1391 if (thisarg == SIMD_ARG_STOP)
1392 break;
1393 else
1394 {
d9e80f49 1395 tree arg = CALL_EXPR_ARG (exp, opc - have_retval);
b8506a8a 1396 machine_mode mode = insn_data[icode].operand[opc].mode;
d9e80f49 1397 op[opc] = expand_normal (arg);
43e9d192
IB
1398
1399 switch (thisarg)
1400 {
1401 case SIMD_ARG_COPY_TO_REG:
d9e80f49
AL
1402 if (POINTER_TYPE_P (TREE_TYPE (arg)))
1403 op[opc] = convert_memory_address (Pmode, op[opc]);
1404 /*gcc_assert (GET_MODE (op[opc]) == mode); */
1405 if (!(*insn_data[icode].operand[opc].predicate)
1406 (op[opc], mode))
1407 op[opc] = copy_to_mode_reg (mode, op[opc]);
43e9d192
IB
1408 break;
1409
4d0a0237
CB
1410 case SIMD_ARG_STRUCT_LOAD_STORE_LANE_INDEX:
1411 gcc_assert (opc > 1);
1412 if (CONST_INT_P (op[opc]))
1413 {
6a70badb
RS
1414 unsigned int nunits
1415 = GET_MODE_NUNITS (builtin_mode).to_constant ();
1416 aarch64_simd_lane_bounds (op[opc], 0, nunits, exp);
4d0a0237 1417 /* Keep to GCC-vector-extension lane indices in the RTL. */
7ac29c0f
RS
1418 op[opc] = aarch64_endian_lane_rtx (builtin_mode,
1419 INTVAL (op[opc]));
4d0a0237
CB
1420 }
1421 goto constant_arg;
1422
2a49c16d
AL
1423 case SIMD_ARG_LANE_INDEX:
1424 /* Must be a previous operand into which this is an index. */
d9e80f49
AL
1425 gcc_assert (opc > 0);
1426 if (CONST_INT_P (op[opc]))
2a49c16d 1427 {
d9e80f49 1428 machine_mode vmode = insn_data[icode].operand[opc - 1].mode;
6a70badb
RS
1429 unsigned int nunits
1430 = GET_MODE_NUNITS (vmode).to_constant ();
1431 aarch64_simd_lane_bounds (op[opc], 0, nunits, exp);
2a49c16d 1432 /* Keep to GCC-vector-extension lane indices in the RTL. */
7ac29c0f 1433 op[opc] = aarch64_endian_lane_rtx (vmode, INTVAL (op[opc]));
2a49c16d 1434 }
9d63f43b
TC
1435 /* If the lane index isn't a constant then error out. */
1436 goto constant_arg;
1437
1438 case SIMD_ARG_LANE_PAIR_INDEX:
1439 /* Must be a previous operand into which this is an index and
1440 index is restricted to nunits / 2. */
1441 gcc_assert (opc > 0);
1442 if (CONST_INT_P (op[opc]))
1443 {
1444 machine_mode vmode = insn_data[icode].operand[opc - 1].mode;
1445 unsigned int nunits
1446 = GET_MODE_NUNITS (vmode).to_constant ();
1447 aarch64_simd_lane_bounds (op[opc], 0, nunits / 2, exp);
1448 /* Keep to GCC-vector-extension lane indices in the RTL. */
33b5a38c
TC
1449 int lane = INTVAL (op[opc]);
1450 op[opc] = gen_int_mode (ENDIAN_LANE_N (nunits / 2, lane),
1451 SImode);
9d63f43b 1452 }
8c197c85
SMW
1453 /* If the lane index isn't a constant then error out. */
1454 goto constant_arg;
1455 case SIMD_ARG_LANE_QUADTUP_INDEX:
1456 /* Must be a previous operand into which this is an index and
1457 index is restricted to nunits / 4. */
1458 gcc_assert (opc > 0);
1459 if (CONST_INT_P (op[opc]))
1460 {
1461 machine_mode vmode = insn_data[icode].operand[opc - 1].mode;
1462 unsigned int nunits
1463 = GET_MODE_NUNITS (vmode).to_constant ();
1464 aarch64_simd_lane_bounds (op[opc], 0, nunits / 4, exp);
1465 /* Keep to GCC-vector-extension lane indices in the RTL. */
1466 int lane = INTVAL (op[opc]);
1467 op[opc] = gen_int_mode (ENDIAN_LANE_N (nunits / 4, lane),
1468 SImode);
1469 }
1470 /* If the lane index isn't a constant then error out. */
1471 goto constant_arg;
43e9d192 1472 case SIMD_ARG_CONSTANT:
4d0a0237 1473constant_arg:
d9e80f49
AL
1474 if (!(*insn_data[icode].operand[opc].predicate)
1475 (op[opc], mode))
d5a29419 1476 {
fca051af
AL
1477 error ("%Kargument %d must be a constant immediate",
1478 exp, opc + 1 - have_retval);
d5a29419
KT
1479 return const0_rtx;
1480 }
43e9d192
IB
1481 break;
1482
1483 case SIMD_ARG_STOP:
1484 gcc_unreachable ();
1485 }
1486
d9e80f49 1487 opc++;
43e9d192
IB
1488 }
1489 }
1490
d9e80f49
AL
1491 switch (opc)
1492 {
1493 case 1:
1494 pat = GEN_FCN (icode) (op[0]);
1495 break;
43e9d192 1496
d9e80f49
AL
1497 case 2:
1498 pat = GEN_FCN (icode) (op[0], op[1]);
1499 break;
43e9d192 1500
d9e80f49
AL
1501 case 3:
1502 pat = GEN_FCN (icode) (op[0], op[1], op[2]);
1503 break;
43e9d192 1504
d9e80f49
AL
1505 case 4:
1506 pat = GEN_FCN (icode) (op[0], op[1], op[2], op[3]);
1507 break;
43e9d192 1508
d9e80f49
AL
1509 case 5:
1510 pat = GEN_FCN (icode) (op[0], op[1], op[2], op[3], op[4]);
1511 break;
43e9d192 1512
d9e80f49
AL
1513 case 6:
1514 pat = GEN_FCN (icode) (op[0], op[1], op[2], op[3], op[4], op[5]);
1515 break;
43e9d192 1516
d9e80f49
AL
1517 default:
1518 gcc_unreachable ();
1519 }
43e9d192
IB
1520
1521 if (!pat)
d5a29419 1522 return NULL_RTX;
43e9d192
IB
1523
1524 emit_insn (pat);
1525
1526 return target;
1527}
1528
1529/* Expand an AArch64 AdvSIMD builtin(intrinsic). */
1530rtx
1531aarch64_simd_expand_builtin (int fcode, tree exp, rtx target)
1532{
661fce82
AL
1533 if (fcode == AARCH64_SIMD_BUILTIN_LANE_CHECK)
1534 {
9c4f25cc
AP
1535 rtx totalsize = expand_normal (CALL_EXPR_ARG (exp, 0));
1536 rtx elementsize = expand_normal (CALL_EXPR_ARG (exp, 1));
1537 if (CONST_INT_P (totalsize) && CONST_INT_P (elementsize)
1538 && UINTVAL (elementsize) != 0
1539 && UINTVAL (totalsize) != 0)
1540 {
1541 rtx lane_idx = expand_normal (CALL_EXPR_ARG (exp, 2));
1542 if (CONST_INT_P (lane_idx))
1543 aarch64_simd_lane_bounds (lane_idx, 0,
1544 UINTVAL (totalsize)
1545 / UINTVAL (elementsize),
1546 exp);
1547 else
1548 error ("%Klane index must be a constant immediate", exp);
1549 }
661fce82 1550 else
9c4f25cc 1551 error ("%Ktotal size and element size must be a non-zero constant immediate", exp);
661fce82
AL
1552 /* Don't generate any RTL. */
1553 return const0_rtx;
1554 }
342be7f7 1555 aarch64_simd_builtin_datum *d =
661fce82 1556 &aarch64_simd_builtin_data[fcode - AARCH64_SIMD_PATTERN_START];
342be7f7 1557 enum insn_code icode = d->code;
0ff2bf46 1558 builtin_simd_arg args[SIMD_MAX_BUILTIN_ARGS + 1];
b5828b4b
JG
1559 int num_args = insn_data[d->code].n_operands;
1560 int is_void = 0;
1561 int k;
43e9d192 1562
b5828b4b 1563 is_void = !!(d->qualifiers[0] & qualifier_void);
43e9d192 1564
b5828b4b
JG
1565 num_args += is_void;
1566
1567 for (k = 1; k < num_args; k++)
1568 {
1569 /* We have four arrays of data, each indexed in a different fashion.
1570 qualifiers - element 0 always describes the function return type.
1571 operands - element 0 is either the operand for return value (if
1572 the function has a non-void return type) or the operand for the
1573 first argument.
1574 expr_args - element 0 always holds the first argument.
1575 args - element 0 is always used for the return type. */
1576 int qualifiers_k = k;
1577 int operands_k = k - is_void;
1578 int expr_args_k = k - 1;
1579
2a49c16d
AL
1580 if (d->qualifiers[qualifiers_k] & qualifier_lane_index)
1581 args[k] = SIMD_ARG_LANE_INDEX;
9d63f43b
TC
1582 else if (d->qualifiers[qualifiers_k] & qualifier_lane_pair_index)
1583 args[k] = SIMD_ARG_LANE_PAIR_INDEX;
8c197c85
SMW
1584 else if (d->qualifiers[qualifiers_k] & qualifier_lane_quadtup_index)
1585 args[k] = SIMD_ARG_LANE_QUADTUP_INDEX;
4d0a0237
CB
1586 else if (d->qualifiers[qualifiers_k] & qualifier_struct_load_store_lane_index)
1587 args[k] = SIMD_ARG_STRUCT_LOAD_STORE_LANE_INDEX;
2a49c16d 1588 else if (d->qualifiers[qualifiers_k] & qualifier_immediate)
b5828b4b
JG
1589 args[k] = SIMD_ARG_CONSTANT;
1590 else if (d->qualifiers[qualifiers_k] & qualifier_maybe_immediate)
1591 {
1592 rtx arg
1593 = expand_normal (CALL_EXPR_ARG (exp,
1594 (expr_args_k)));
1595 /* Handle constants only if the predicate allows it. */
1596 bool op_const_int_p =
1597 (CONST_INT_P (arg)
1598 && (*insn_data[icode].operand[operands_k].predicate)
1599 (arg, insn_data[icode].operand[operands_k].mode));
1600 args[k] = op_const_int_p ? SIMD_ARG_CONSTANT : SIMD_ARG_COPY_TO_REG;
1601 }
1602 else
1603 args[k] = SIMD_ARG_COPY_TO_REG;
43e9d192 1604
43e9d192 1605 }
b5828b4b
JG
1606 args[k] = SIMD_ARG_STOP;
1607
1608 /* The interface to aarch64_simd_expand_args expects a 0 if
1609 the function is void, and a 1 if it is not. */
1610 return aarch64_simd_expand_args
4d0a0237 1611 (target, icode, !is_void, exp, &args[1], d->mode);
43e9d192 1612}
342be7f7 1613
5d357f26
KT
1614rtx
1615aarch64_crc32_expand_builtin (int fcode, tree exp, rtx target)
1616{
1617 rtx pat;
1618 aarch64_crc_builtin_datum *d
1619 = &aarch64_crc_builtin_data[fcode - (AARCH64_CRC32_BUILTIN_BASE + 1)];
1620 enum insn_code icode = d->icode;
1621 tree arg0 = CALL_EXPR_ARG (exp, 0);
1622 tree arg1 = CALL_EXPR_ARG (exp, 1);
1623 rtx op0 = expand_normal (arg0);
1624 rtx op1 = expand_normal (arg1);
ef4bddc2
RS
1625 machine_mode tmode = insn_data[icode].operand[0].mode;
1626 machine_mode mode0 = insn_data[icode].operand[1].mode;
1627 machine_mode mode1 = insn_data[icode].operand[2].mode;
5d357f26
KT
1628
1629 if (! target
1630 || GET_MODE (target) != tmode
1631 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
1632 target = gen_reg_rtx (tmode);
1633
1634 gcc_assert ((GET_MODE (op0) == mode0 || GET_MODE (op0) == VOIDmode)
1635 && (GET_MODE (op1) == mode1 || GET_MODE (op1) == VOIDmode));
1636
1637 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
1638 op0 = copy_to_mode_reg (mode0, op0);
1639 if (! (*insn_data[icode].operand[2].predicate) (op1, mode1))
1640 op1 = copy_to_mode_reg (mode1, op1);
1641
1642 pat = GEN_FCN (icode) (target, op0, op1);
d5a29419
KT
1643 if (!pat)
1644 return NULL_RTX;
1645
5d357f26
KT
1646 emit_insn (pat);
1647 return target;
1648}
1649
a6fc00da
BH
1650/* Function to expand reciprocal square root builtins. */
1651
1652static rtx
1653aarch64_expand_builtin_rsqrt (int fcode, tree exp, rtx target)
1654{
1655 tree arg0 = CALL_EXPR_ARG (exp, 0);
1656 rtx op0 = expand_normal (arg0);
1657
1658 rtx (*gen) (rtx, rtx);
1659
1660 switch (fcode)
1661 {
1662 case AARCH64_BUILTIN_RSQRT_DF:
ee62a5a6 1663 gen = gen_rsqrtdf2;
a6fc00da
BH
1664 break;
1665 case AARCH64_BUILTIN_RSQRT_SF:
ee62a5a6 1666 gen = gen_rsqrtsf2;
a6fc00da
BH
1667 break;
1668 case AARCH64_BUILTIN_RSQRT_V2DF:
ee62a5a6 1669 gen = gen_rsqrtv2df2;
a6fc00da
BH
1670 break;
1671 case AARCH64_BUILTIN_RSQRT_V2SF:
ee62a5a6 1672 gen = gen_rsqrtv2sf2;
a6fc00da
BH
1673 break;
1674 case AARCH64_BUILTIN_RSQRT_V4SF:
ee62a5a6 1675 gen = gen_rsqrtv4sf2;
a6fc00da
BH
1676 break;
1677 default: gcc_unreachable ();
1678 }
1679
1680 if (!target)
1681 target = gen_reg_rtx (GET_MODE (op0));
1682
1683 emit_insn (gen (target, op0));
1684
1685 return target;
1686}
1687
9d63f43b
TC
1688/* Expand a FCMLA lane expression EXP with code FCODE and
1689 result going to TARGET if that is convenient. */
1690
1691rtx
1692aarch64_expand_fcmla_builtin (tree exp, rtx target, int fcode)
1693{
1694 int bcode = fcode - AARCH64_SIMD_FCMLA_LANEQ_BUILTIN_BASE - 1;
1695 aarch64_fcmla_laneq_builtin_datum* d
1696 = &aarch64_fcmla_lane_builtin_data[bcode];
1697 machine_mode quadmode = GET_MODE_2XWIDER_MODE (d->mode).require ();
1698 rtx op0 = force_reg (d->mode, expand_normal (CALL_EXPR_ARG (exp, 0)));
1699 rtx op1 = force_reg (d->mode, expand_normal (CALL_EXPR_ARG (exp, 1)));
1700 rtx op2 = force_reg (quadmode, expand_normal (CALL_EXPR_ARG (exp, 2)));
1701 tree tmp = CALL_EXPR_ARG (exp, 3);
1702 rtx lane_idx = expand_expr (tmp, NULL_RTX, VOIDmode, EXPAND_INITIALIZER);
1703
1704 /* Validate that the lane index is a constant. */
1705 if (!CONST_INT_P (lane_idx))
1706 {
1707 error ("%Kargument %d must be a constant immediate", exp, 4);
1708 return const0_rtx;
1709 }
1710
1711 /* Validate that the index is within the expected range. */
1712 int nunits = GET_MODE_NUNITS (quadmode).to_constant ();
1713 aarch64_simd_lane_bounds (lane_idx, 0, nunits / 2, exp);
1714
9d63f43b
TC
1715 /* Generate the correct register and mode. */
1716 int lane = INTVAL (lane_idx);
1717
1718 if (lane < nunits / 4)
33b5a38c
TC
1719 op2 = simplify_gen_subreg (d->mode, op2, quadmode,
1720 subreg_lowpart_offset (d->mode, quadmode));
9d63f43b
TC
1721 else
1722 {
1723 /* Select the upper 64 bits, either a V2SF or V4HF, this however
1724 is quite messy, as the operation required even though simple
1725 doesn't have a simple RTL pattern, and seems it's quite hard to
1726 define using a single RTL pattern. The target generic version
1727 gen_highpart_mode generates code that isn't optimal. */
1728 rtx temp1 = gen_reg_rtx (d->mode);
1729 rtx temp2 = gen_reg_rtx (DImode);
33b5a38c
TC
1730 temp1 = simplify_gen_subreg (d->mode, op2, quadmode,
1731 subreg_lowpart_offset (d->mode, quadmode));
9d63f43b 1732 temp1 = simplify_gen_subreg (V2DImode, temp1, d->mode, 0);
33b5a38c
TC
1733 if (BYTES_BIG_ENDIAN)
1734 emit_insn (gen_aarch64_get_lanev2di (temp2, temp1, const0_rtx));
1735 else
1736 emit_insn (gen_aarch64_get_lanev2di (temp2, temp1, const1_rtx));
9d63f43b
TC
1737 op2 = simplify_gen_subreg (d->mode, temp2, GET_MODE (temp2), 0);
1738
1739 /* And recalculate the index. */
1740 lane -= nunits / 4;
1741 }
1742
33b5a38c
TC
1743 /* Keep to GCC-vector-extension lane indices in the RTL, only nunits / 4
1744 (max nunits in range check) are valid. Which means only 0-1, so we
1745 only need to know the order in a V2mode. */
1746 lane_idx = aarch64_endian_lane_rtx (V2DImode, lane);
1747
9d63f43b
TC
1748 if (!target)
1749 target = gen_reg_rtx (d->mode);
1750 else
1751 target = force_reg (d->mode, target);
1752
1753 rtx pat = NULL_RTX;
1754
1755 if (d->lane)
33b5a38c 1756 pat = GEN_FCN (d->icode) (target, op0, op1, op2, lane_idx);
9d63f43b
TC
1757 else
1758 pat = GEN_FCN (d->icode) (target, op0, op1, op2);
1759
1760 if (!pat)
1761 return NULL_RTX;
1762
1763 emit_insn (pat);
1764 return target;
1765}
1766
89626179
SD
1767/* Function to expand an expression EXP which calls one of the Transactional
1768 Memory Extension (TME) builtins FCODE with the result going to TARGET. */
1769static rtx
1770aarch64_expand_builtin_tme (int fcode, tree exp, rtx target)
1771{
1772 switch (fcode)
1773 {
1774 case AARCH64_TME_BUILTIN_TSTART:
1775 target = gen_reg_rtx (DImode);
1776 emit_insn (GEN_FCN (CODE_FOR_tstart) (target));
1777 break;
1778
1779 case AARCH64_TME_BUILTIN_TTEST:
1780 target = gen_reg_rtx (DImode);
1781 emit_insn (GEN_FCN (CODE_FOR_ttest) (target));
1782 break;
1783
1784 case AARCH64_TME_BUILTIN_TCOMMIT:
1785 emit_insn (GEN_FCN (CODE_FOR_tcommit) ());
1786 break;
1787
1788 case AARCH64_TME_BUILTIN_TCANCEL:
1789 {
1790 tree arg0 = CALL_EXPR_ARG (exp, 0);
1791 rtx op0 = expand_normal (arg0);
1792 if (CONST_INT_P (op0) && UINTVAL (op0) <= 65536)
1793 emit_insn (GEN_FCN (CODE_FOR_tcancel) (op0));
1794 else
1795 {
1796 error ("%Kargument must be a 16-bit constant immediate", exp);
1797 return const0_rtx;
1798 }
1799 }
1800 break;
1801
1802 default :
1803 gcc_unreachable ();
1804 }
1805 return target;
1806}
1807
c5dc215d
KT
1808/* Expand a random number builtin EXP with code FCODE, putting the result
1809 int TARGET. If IGNORE is true the return value is ignored. */
1810
1811rtx
1812aarch64_expand_rng_builtin (tree exp, rtx target, int fcode, int ignore)
1813{
1814 rtx pat;
1815 enum insn_code icode;
1816 if (fcode == AARCH64_BUILTIN_RNG_RNDR)
1817 icode = CODE_FOR_aarch64_rndr;
1818 else if (fcode == AARCH64_BUILTIN_RNG_RNDRRS)
1819 icode = CODE_FOR_aarch64_rndrrs;
1820 else
1821 gcc_unreachable ();
1822
1823 rtx rand = gen_reg_rtx (DImode);
1824 pat = GEN_FCN (icode) (rand);
1825 if (!pat)
1826 return NULL_RTX;
1827
1828 tree arg0 = CALL_EXPR_ARG (exp, 0);
1829 rtx res_addr = expand_normal (arg0);
1830 res_addr = convert_memory_address (Pmode, res_addr);
1831 rtx res_mem = gen_rtx_MEM (DImode, res_addr);
1832 emit_insn (pat);
1833 emit_move_insn (res_mem, rand);
1834 /* If the status result is unused don't generate the CSET code. */
1835 if (ignore)
1836 return target;
1837
1838 rtx cc_reg = gen_rtx_REG (CC_Zmode, CC_REGNUM);
1839 rtx cmp_rtx = gen_rtx_fmt_ee (NE, SImode, cc_reg, const0_rtx);
1840 emit_insn (gen_aarch64_cstoresi (target, cmp_rtx, cc_reg));
1841 return target;
1842}
1843
ef01e6bb
DZ
1844/* Expand an expression EXP that calls a MEMTAG built-in FCODE
1845 with result going to TARGET. */
1846static rtx
1847aarch64_expand_builtin_memtag (int fcode, tree exp, rtx target)
1848{
1849 if (TARGET_ILP32)
1850 {
1851 error ("Memory Tagging Extension does not support %<-mabi=ilp32%>");
1852 return const0_rtx;
1853 }
1854
1855 rtx pat = NULL;
1856 enum insn_code icode = aarch64_memtag_builtin_data[fcode -
1857 AARCH64_MEMTAG_BUILTIN_START - 1].icode;
1858
1859 rtx op0 = expand_normal (CALL_EXPR_ARG (exp, 0));
1860 machine_mode mode0 = GET_MODE (op0);
1861 op0 = force_reg (mode0 == VOIDmode ? DImode : mode0, op0);
1862 op0 = convert_to_mode (DImode, op0, true);
1863
1864 switch (fcode)
1865 {
1866 case AARCH64_MEMTAG_BUILTIN_IRG:
1867 case AARCH64_MEMTAG_BUILTIN_GMI:
1868 case AARCH64_MEMTAG_BUILTIN_SUBP:
1869 case AARCH64_MEMTAG_BUILTIN_INC_TAG:
1870 {
1871 if (! target
1872 || GET_MODE (target) != DImode
1873 || ! (*insn_data[icode].operand[0].predicate) (target, DImode))
1874 target = gen_reg_rtx (DImode);
1875
1876 if (fcode == AARCH64_MEMTAG_BUILTIN_INC_TAG)
1877 {
1878 rtx op1 = expand_normal (CALL_EXPR_ARG (exp, 1));
1879
1880 if ((*insn_data[icode].operand[3].predicate) (op1, QImode))
1881 {
1882 pat = GEN_FCN (icode) (target, op0, const0_rtx, op1);
1883 break;
1884 }
1885 error ("%Kargument %d must be a constant immediate "
1886 "in range [0,15]", exp, 2);
1887 return const0_rtx;
1888 }
1889 else
1890 {
1891 rtx op1 = expand_normal (CALL_EXPR_ARG (exp, 1));
1892 machine_mode mode1 = GET_MODE (op1);
1893 op1 = force_reg (mode1 == VOIDmode ? DImode : mode1, op1);
1894 op1 = convert_to_mode (DImode, op1, true);
1895 pat = GEN_FCN (icode) (target, op0, op1);
1896 }
1897 break;
1898 }
1899 case AARCH64_MEMTAG_BUILTIN_GET_TAG:
1900 target = op0;
1901 pat = GEN_FCN (icode) (target, op0, const0_rtx);
1902 break;
1903 case AARCH64_MEMTAG_BUILTIN_SET_TAG:
1904 pat = GEN_FCN (icode) (op0, op0, const0_rtx);
1905 break;
1906 default:
1907 gcc_unreachable();
1908 }
1909
1910 if (!pat)
1911 return NULL_RTX;
1912
1913 emit_insn (pat);
1914 return target;
1915}
1916
0d7e5fa6
AC
1917/* Expand an expression EXP as fpsr or cpsr setter (depending on
1918 UNSPEC) using MODE. */
1919static void
1920aarch64_expand_fpsr_fpcr_setter (int unspec, machine_mode mode, tree exp)
1921{
1922 tree arg = CALL_EXPR_ARG (exp, 0);
1923 rtx op = force_reg (mode, expand_normal (arg));
1924 emit_insn (gen_aarch64_set (unspec, mode, op));
1925}
1926
6d4d616a 1927/* Expand an expression EXP that calls built-in function FCODE,
c5dc215d
KT
1928 with result going to TARGET if that's convenient. IGNORE is true
1929 if the result of the builtin is ignored. */
342be7f7 1930rtx
c5dc215d
KT
1931aarch64_general_expand_builtin (unsigned int fcode, tree exp, rtx target,
1932 int ignore)
342be7f7 1933{
aa87aced 1934 int icode;
0d7e5fa6 1935 rtx op0;
aa87aced
KV
1936 tree arg0;
1937
1938 switch (fcode)
1939 {
1940 case AARCH64_BUILTIN_GET_FPCR:
0d7e5fa6
AC
1941 emit_insn (gen_aarch64_get (UNSPECV_GET_FPCR, SImode, target));
1942 return target;
aa87aced 1943 case AARCH64_BUILTIN_SET_FPCR:
0d7e5fa6
AC
1944 aarch64_expand_fpsr_fpcr_setter (UNSPECV_SET_FPCR, SImode, exp);
1945 return target;
aa87aced 1946 case AARCH64_BUILTIN_GET_FPSR:
0d7e5fa6
AC
1947 emit_insn (gen_aarch64_get (UNSPECV_GET_FPSR, SImode, target));
1948 return target;
aa87aced 1949 case AARCH64_BUILTIN_SET_FPSR:
0d7e5fa6
AC
1950 aarch64_expand_fpsr_fpcr_setter (UNSPECV_SET_FPSR, SImode, exp);
1951 return target;
1952 case AARCH64_BUILTIN_GET_FPCR64:
1953 emit_insn (gen_aarch64_get (UNSPECV_GET_FPCR, DImode, target));
1954 return target;
1955 case AARCH64_BUILTIN_SET_FPCR64:
1956 aarch64_expand_fpsr_fpcr_setter (UNSPECV_SET_FPCR, DImode, exp);
1957 return target;
1958 case AARCH64_BUILTIN_GET_FPSR64:
1959 emit_insn (gen_aarch64_get (UNSPECV_GET_FPSR, DImode, target));
1960 return target;
1961 case AARCH64_BUILTIN_SET_FPSR64:
1962 aarch64_expand_fpsr_fpcr_setter (UNSPECV_SET_FPSR, DImode, exp);
aa87aced 1963 return target;
312492bd
JW
1964 case AARCH64_PAUTH_BUILTIN_AUTIA1716:
1965 case AARCH64_PAUTH_BUILTIN_PACIA1716:
8fc16d72
ST
1966 case AARCH64_PAUTH_BUILTIN_AUTIB1716:
1967 case AARCH64_PAUTH_BUILTIN_PACIB1716:
312492bd
JW
1968 case AARCH64_PAUTH_BUILTIN_XPACLRI:
1969 arg0 = CALL_EXPR_ARG (exp, 0);
1970 op0 = force_reg (Pmode, expand_normal (arg0));
1971
1972 if (!target)
1973 target = gen_reg_rtx (Pmode);
1974 else
1975 target = force_reg (Pmode, target);
1976
1977 emit_move_insn (target, op0);
1978
1979 if (fcode == AARCH64_PAUTH_BUILTIN_XPACLRI)
1980 {
1981 rtx lr = gen_rtx_REG (Pmode, R30_REGNUM);
1982 icode = CODE_FOR_xpaclri;
1983 emit_move_insn (lr, op0);
1984 emit_insn (GEN_FCN (icode) ());
1985 emit_move_insn (target, lr);
1986 }
1987 else
1988 {
1989 tree arg1 = CALL_EXPR_ARG (exp, 1);
1990 rtx op1 = force_reg (Pmode, expand_normal (arg1));
8fc16d72
ST
1991 switch (fcode)
1992 {
1993 case AARCH64_PAUTH_BUILTIN_AUTIA1716:
1994 icode = CODE_FOR_autia1716;
1995 break;
1996 case AARCH64_PAUTH_BUILTIN_AUTIB1716:
1997 icode = CODE_FOR_autib1716;
1998 break;
1999 case AARCH64_PAUTH_BUILTIN_PACIA1716:
2000 icode = CODE_FOR_pacia1716;
2001 break;
2002 case AARCH64_PAUTH_BUILTIN_PACIB1716:
2003 icode = CODE_FOR_pacib1716;
2004 break;
2005 default:
2006 icode = 0;
2007 gcc_unreachable ();
2008 }
312492bd
JW
2009
2010 rtx x16_reg = gen_rtx_REG (Pmode, R16_REGNUM);
2011 rtx x17_reg = gen_rtx_REG (Pmode, R17_REGNUM);
2012 emit_move_insn (x17_reg, op0);
2013 emit_move_insn (x16_reg, op1);
2014 emit_insn (GEN_FCN (icode) ());
2015 emit_move_insn (target, x17_reg);
2016 }
2017
2018 return target;
9d63f43b 2019
e1d5d19e
KT
2020 case AARCH64_JSCVT:
2021 arg0 = CALL_EXPR_ARG (exp, 0);
2022 op0 = force_reg (DFmode, expand_normal (arg0));
2023 if (!target)
2024 target = gen_reg_rtx (SImode);
2025 else
2026 target = force_reg (SImode, target);
2027 emit_insn (GEN_FCN (CODE_FOR_aarch64_fjcvtzs) (target, op0));
2028 return target;
2029
9d63f43b
TC
2030 case AARCH64_SIMD_BUILTIN_FCMLA_LANEQ0_V2SF:
2031 case AARCH64_SIMD_BUILTIN_FCMLA_LANEQ90_V2SF:
2032 case AARCH64_SIMD_BUILTIN_FCMLA_LANEQ180_V2SF:
2033 case AARCH64_SIMD_BUILTIN_FCMLA_LANEQ270_V2SF:
2034 case AARCH64_SIMD_BUILTIN_FCMLA_LANEQ0_V4HF:
2035 case AARCH64_SIMD_BUILTIN_FCMLA_LANEQ90_V4HF:
2036 case AARCH64_SIMD_BUILTIN_FCMLA_LANEQ180_V4HF:
2037 case AARCH64_SIMD_BUILTIN_FCMLA_LANEQ270_V4HF:
2038 return aarch64_expand_fcmla_builtin (exp, target, fcode);
c5dc215d
KT
2039 case AARCH64_BUILTIN_RNG_RNDR:
2040 case AARCH64_BUILTIN_RNG_RNDRRS:
2041 return aarch64_expand_rng_builtin (exp, target, fcode, ignore);
aa87aced 2042 }
342be7f7 2043
5d357f26 2044 if (fcode >= AARCH64_SIMD_BUILTIN_BASE && fcode <= AARCH64_SIMD_BUILTIN_MAX)
342be7f7 2045 return aarch64_simd_expand_builtin (fcode, exp, target);
5d357f26
KT
2046 else if (fcode >= AARCH64_CRC32_BUILTIN_BASE && fcode <= AARCH64_CRC32_BUILTIN_MAX)
2047 return aarch64_crc32_expand_builtin (fcode, exp, target);
342be7f7 2048
a6fc00da
BH
2049 if (fcode == AARCH64_BUILTIN_RSQRT_DF
2050 || fcode == AARCH64_BUILTIN_RSQRT_SF
2051 || fcode == AARCH64_BUILTIN_RSQRT_V2DF
2052 || fcode == AARCH64_BUILTIN_RSQRT_V2SF
2053 || fcode == AARCH64_BUILTIN_RSQRT_V4SF)
2054 return aarch64_expand_builtin_rsqrt (fcode, exp, target);
2055
89626179
SD
2056 if (fcode == AARCH64_TME_BUILTIN_TSTART
2057 || fcode == AARCH64_TME_BUILTIN_TCOMMIT
2058 || fcode == AARCH64_TME_BUILTIN_TTEST
2059 || fcode == AARCH64_TME_BUILTIN_TCANCEL)
2060 return aarch64_expand_builtin_tme (fcode, exp, target);
2061
ef01e6bb
DZ
2062 if (fcode >= AARCH64_MEMTAG_BUILTIN_START
2063 && fcode <= AARCH64_MEMTAG_BUILTIN_END)
2064 return aarch64_expand_builtin_memtag (fcode, exp, target);
2065
d5a29419 2066 gcc_unreachable ();
342be7f7 2067}
42fc9a7f
JG
2068
2069tree
10766209
RS
2070aarch64_builtin_vectorized_function (unsigned int fn, tree type_out,
2071 tree type_in)
42fc9a7f 2072{
ef4bddc2 2073 machine_mode in_mode, out_mode;
42fc9a7f
JG
2074
2075 if (TREE_CODE (type_out) != VECTOR_TYPE
2076 || TREE_CODE (type_in) != VECTOR_TYPE)
2077 return NULL_TREE;
2078
7cee9637
RS
2079 out_mode = TYPE_MODE (type_out);
2080 in_mode = TYPE_MODE (type_in);
42fc9a7f
JG
2081
2082#undef AARCH64_CHECK_BUILTIN_MODE
2083#define AARCH64_CHECK_BUILTIN_MODE(C, N) 1
2084#define AARCH64_FIND_FRINT_VARIANT(N) \
2085 (AARCH64_CHECK_BUILTIN_MODE (2, D) \
e993fea1 2086 ? aarch64_builtin_decls[AARCH64_SIMD_BUILTIN_UNOP_##N##v2df] \
42fc9a7f 2087 : (AARCH64_CHECK_BUILTIN_MODE (4, S) \
e993fea1 2088 ? aarch64_builtin_decls[AARCH64_SIMD_BUILTIN_UNOP_##N##v4sf] \
42fc9a7f 2089 : (AARCH64_CHECK_BUILTIN_MODE (2, S) \
e993fea1 2090 ? aarch64_builtin_decls[AARCH64_SIMD_BUILTIN_UNOP_##N##v2sf] \
42fc9a7f 2091 : NULL_TREE)))
10766209 2092 switch (fn)
42fc9a7f 2093 {
42fc9a7f
JG
2094#undef AARCH64_CHECK_BUILTIN_MODE
2095#define AARCH64_CHECK_BUILTIN_MODE(C, N) \
7cee9637 2096 (out_mode == V##C##N##Fmode && in_mode == V##C##N##Fmode)
10766209
RS
2097 CASE_CFN_FLOOR:
2098 return AARCH64_FIND_FRINT_VARIANT (floor);
2099 CASE_CFN_CEIL:
2100 return AARCH64_FIND_FRINT_VARIANT (ceil);
2101 CASE_CFN_TRUNC:
2102 return AARCH64_FIND_FRINT_VARIANT (btrunc);
2103 CASE_CFN_ROUND:
2104 return AARCH64_FIND_FRINT_VARIANT (round);
2105 CASE_CFN_NEARBYINT:
2106 return AARCH64_FIND_FRINT_VARIANT (nearbyint);
2107 CASE_CFN_SQRT:
2108 return AARCH64_FIND_FRINT_VARIANT (sqrt);
42fc9a7f 2109#undef AARCH64_CHECK_BUILTIN_MODE
b5574232 2110#define AARCH64_CHECK_BUILTIN_MODE(C, N) \
7cee9637 2111 (out_mode == V##C##SImode && in_mode == V##C##N##Imode)
10766209
RS
2112 CASE_CFN_CLZ:
2113 {
2114 if (AARCH64_CHECK_BUILTIN_MODE (4, S))
2115 return aarch64_builtin_decls[AARCH64_SIMD_BUILTIN_UNOP_clzv4si];
2116 return NULL_TREE;
2117 }
2118 CASE_CFN_CTZ:
2119 {
2120 if (AARCH64_CHECK_BUILTIN_MODE (2, S))
2121 return aarch64_builtin_decls[AARCH64_SIMD_BUILTIN_UNOP_ctzv2si];
2122 else if (AARCH64_CHECK_BUILTIN_MODE (4, S))
2123 return aarch64_builtin_decls[AARCH64_SIMD_BUILTIN_UNOP_ctzv4si];
2124 return NULL_TREE;
2125 }
b5574232 2126#undef AARCH64_CHECK_BUILTIN_MODE
42fc9a7f 2127#define AARCH64_CHECK_BUILTIN_MODE(C, N) \
7cee9637 2128 (out_mode == V##C##N##Imode && in_mode == V##C##N##Fmode)
10766209
RS
2129 CASE_CFN_IFLOOR:
2130 CASE_CFN_LFLOOR:
2131 CASE_CFN_LLFLOOR:
2132 {
2133 enum aarch64_builtins builtin;
2134 if (AARCH64_CHECK_BUILTIN_MODE (2, D))
2135 builtin = AARCH64_SIMD_BUILTIN_UNOP_lfloorv2dfv2di;
2136 else if (AARCH64_CHECK_BUILTIN_MODE (4, S))
2137 builtin = AARCH64_SIMD_BUILTIN_UNOP_lfloorv4sfv4si;
2138 else if (AARCH64_CHECK_BUILTIN_MODE (2, S))
2139 builtin = AARCH64_SIMD_BUILTIN_UNOP_lfloorv2sfv2si;
2140 else
2141 return NULL_TREE;
2142
2143 return aarch64_builtin_decls[builtin];
2144 }
2145 CASE_CFN_ICEIL:
2146 CASE_CFN_LCEIL:
2147 CASE_CFN_LLCEIL:
2148 {
2149 enum aarch64_builtins builtin;
2150 if (AARCH64_CHECK_BUILTIN_MODE (2, D))
2151 builtin = AARCH64_SIMD_BUILTIN_UNOP_lceilv2dfv2di;
2152 else if (AARCH64_CHECK_BUILTIN_MODE (4, S))
2153 builtin = AARCH64_SIMD_BUILTIN_UNOP_lceilv4sfv4si;
2154 else if (AARCH64_CHECK_BUILTIN_MODE (2, S))
2155 builtin = AARCH64_SIMD_BUILTIN_UNOP_lceilv2sfv2si;
2156 else
2157 return NULL_TREE;
2158
2159 return aarch64_builtin_decls[builtin];
2160 }
2161 CASE_CFN_IROUND:
2162 CASE_CFN_LROUND:
2163 CASE_CFN_LLROUND:
2164 {
2165 enum aarch64_builtins builtin;
2166 if (AARCH64_CHECK_BUILTIN_MODE (2, D))
2167 builtin = AARCH64_SIMD_BUILTIN_UNOP_lroundv2dfv2di;
2168 else if (AARCH64_CHECK_BUILTIN_MODE (4, S))
2169 builtin = AARCH64_SIMD_BUILTIN_UNOP_lroundv4sfv4si;
2170 else if (AARCH64_CHECK_BUILTIN_MODE (2, S))
2171 builtin = AARCH64_SIMD_BUILTIN_UNOP_lroundv2sfv2si;
2172 else
2173 return NULL_TREE;
2174
2175 return aarch64_builtin_decls[builtin];
2176 }
10766209
RS
2177 default:
2178 return NULL_TREE;
42fc9a7f
JG
2179 }
2180
2181 return NULL_TREE;
2182}
0ac198d3 2183
a6fc00da
BH
2184/* Return builtin for reciprocal square root. */
2185
2186tree
6d4d616a 2187aarch64_general_builtin_rsqrt (unsigned int fn)
a6fc00da 2188{
ee62a5a6
RS
2189 if (fn == AARCH64_SIMD_BUILTIN_UNOP_sqrtv2df)
2190 return aarch64_builtin_decls[AARCH64_BUILTIN_RSQRT_V2DF];
2191 if (fn == AARCH64_SIMD_BUILTIN_UNOP_sqrtv2sf)
2192 return aarch64_builtin_decls[AARCH64_BUILTIN_RSQRT_V2SF];
2193 if (fn == AARCH64_SIMD_BUILTIN_UNOP_sqrtv4sf)
2194 return aarch64_builtin_decls[AARCH64_BUILTIN_RSQRT_V4SF];
a6fc00da
BH
2195 return NULL_TREE;
2196}
2197
0ac198d3
JG
2198#undef VAR1
2199#define VAR1(T, N, MAP, A) \
e993fea1 2200 case AARCH64_SIMD_BUILTIN_##T##_##N##A:
0ac198d3 2201
6d4d616a
RS
2202/* Try to fold a call to the built-in function with subcode FCODE. The
2203 function is passed the N_ARGS arguments in ARGS and it returns a value
2204 of type TYPE. Return the new expression on success and NULL_TREE on
2205 failure. */
9697e620 2206tree
6d4d616a
RS
2207aarch64_general_fold_builtin (unsigned int fcode, tree type,
2208 unsigned int n_args ATTRIBUTE_UNUSED, tree *args)
9697e620 2209{
9697e620
JG
2210 switch (fcode)
2211 {
8f905d69 2212 BUILTIN_VDQF (UNOP, abs, 2)
9697e620 2213 return fold_build1 (ABS_EXPR, type, args[0]);
1709ff9b
JG
2214 VAR1 (UNOP, floatv2si, 2, v2sf)
2215 VAR1 (UNOP, floatv4si, 2, v4sf)
2216 VAR1 (UNOP, floatv2di, 2, v2df)
2217 return fold_build1 (FLOAT_EXPR, type, args[0]);
9697e620
JG
2218 default:
2219 break;
2220 }
2221
2222 return NULL_TREE;
2223}
2224
6d4d616a
RS
2225/* Try to fold STMT, given that it's a call to the built-in function with
2226 subcode FCODE. Return the new statement on success and null on
2227 failure. */
2228gimple *
2229aarch64_general_gimple_fold_builtin (unsigned int fcode, gcall *stmt)
0ac198d3 2230{
355fe088 2231 gimple *new_stmt = NULL;
6d4d616a
RS
2232 unsigned nargs = gimple_call_num_args (stmt);
2233 tree *args = (nargs > 0
2234 ? gimple_call_arg_ptr (stmt, 0)
2235 : &error_mark_node);
2236
2237 /* We use gimple's IFN_REDUC_(PLUS|MIN|MAX)s for float, signed int
2238 and unsigned int; it will distinguish according to the types of
2239 the arguments to the __builtin. */
2240 switch (fcode)
0ac198d3 2241 {
6d4d616a
RS
2242 BUILTIN_VALL (UNOP, reduc_plus_scal_, 10)
2243 new_stmt = gimple_build_call_internal (IFN_REDUC_PLUS,
2244 1, args[0]);
2245 gimple_call_set_lhs (new_stmt, gimple_call_lhs (stmt));
2246 break;
2247 BUILTIN_VDQIF (UNOP, reduc_smax_scal_, 10)
2248 BUILTIN_VDQ_BHSI (UNOPU, reduc_umax_scal_, 10)
2249 new_stmt = gimple_build_call_internal (IFN_REDUC_MAX,
2250 1, args[0]);
2251 gimple_call_set_lhs (new_stmt, gimple_call_lhs (stmt));
2252 break;
2253 BUILTIN_VDQIF (UNOP, reduc_smin_scal_, 10)
2254 BUILTIN_VDQ_BHSI (UNOPU, reduc_umin_scal_, 10)
2255 new_stmt = gimple_build_call_internal (IFN_REDUC_MIN,
2256 1, args[0]);
2257 gimple_call_set_lhs (new_stmt, gimple_call_lhs (stmt));
2258 break;
2259 BUILTIN_GPF (BINOP, fmulx, 0)
0ac198d3 2260 {
6d4d616a
RS
2261 gcc_assert (nargs == 2);
2262 bool a0_cst_p = TREE_CODE (args[0]) == REAL_CST;
2263 bool a1_cst_p = TREE_CODE (args[1]) == REAL_CST;
2264 if (a0_cst_p || a1_cst_p)
0ac198d3 2265 {
6d4d616a 2266 if (a0_cst_p && a1_cst_p)
546e500c 2267 {
6d4d616a
RS
2268 tree t0 = TREE_TYPE (args[0]);
2269 real_value a0 = (TREE_REAL_CST (args[0]));
2270 real_value a1 = (TREE_REAL_CST (args[1]));
2271 if (real_equal (&a1, &dconst0))
2272 std::swap (a0, a1);
2273 /* According to real_equal (), +0 equals -0. */
2274 if (real_equal (&a0, &dconst0) && real_isinf (&a1))
546e500c 2275 {
6d4d616a
RS
2276 real_value res = dconst2;
2277 res.sign = a0.sign ^ a1.sign;
2278 new_stmt = gimple_build_assign (gimple_call_lhs (stmt),
2279 REAL_CST,
2280 build_real (t0, res));
546e500c 2281 }
6d4d616a
RS
2282 else
2283 new_stmt = gimple_build_assign (gimple_call_lhs (stmt),
2284 MULT_EXPR,
2285 args[0], args[1]);
546e500c 2286 }
6d4d616a
RS
2287 else /* a0_cst_p ^ a1_cst_p. */
2288 {
2289 real_value const_part = a0_cst_p
2290 ? TREE_REAL_CST (args[0]) : TREE_REAL_CST (args[1]);
2291 if (!real_equal (&const_part, &dconst0)
2292 && !real_isinf (&const_part))
2293 new_stmt = gimple_build_assign (gimple_call_lhs (stmt),
2294 MULT_EXPR, args[0],
2295 args[1]);
2296 }
2297 }
2298 if (new_stmt)
2299 {
2300 gimple_set_vuse (new_stmt, gimple_vuse (stmt));
2301 gimple_set_vdef (new_stmt, gimple_vdef (stmt));
0ac198d3 2302 }
6d4d616a 2303 break;
0ac198d3 2304 }
6d4d616a
RS
2305 default:
2306 break;
0ac198d3 2307 }
6d4d616a 2308 return new_stmt;
0ac198d3
JG
2309}
2310
aa87aced
KV
2311void
2312aarch64_atomic_assign_expand_fenv (tree *hold, tree *clear, tree *update)
2313{
2314 const unsigned AARCH64_FE_INVALID = 1;
2315 const unsigned AARCH64_FE_DIVBYZERO = 2;
2316 const unsigned AARCH64_FE_OVERFLOW = 4;
2317 const unsigned AARCH64_FE_UNDERFLOW = 8;
2318 const unsigned AARCH64_FE_INEXACT = 16;
2319 const unsigned HOST_WIDE_INT AARCH64_FE_ALL_EXCEPT = (AARCH64_FE_INVALID
2320 | AARCH64_FE_DIVBYZERO
2321 | AARCH64_FE_OVERFLOW
2322 | AARCH64_FE_UNDERFLOW
2323 | AARCH64_FE_INEXACT);
2324 const unsigned HOST_WIDE_INT AARCH64_FE_EXCEPT_SHIFT = 8;
2325 tree fenv_cr, fenv_sr, get_fpcr, set_fpcr, mask_cr, mask_sr;
2326 tree ld_fenv_cr, ld_fenv_sr, masked_fenv_cr, masked_fenv_sr, hold_fnclex_cr;
2327 tree hold_fnclex_sr, new_fenv_var, reload_fenv, restore_fnenv, get_fpsr, set_fpsr;
2328 tree update_call, atomic_feraiseexcept, hold_fnclex, masked_fenv, ld_fenv;
2329
2330 /* Generate the equivalence of :
2331 unsigned int fenv_cr;
2332 fenv_cr = __builtin_aarch64_get_fpcr ();
2333
2334 unsigned int fenv_sr;
2335 fenv_sr = __builtin_aarch64_get_fpsr ();
2336
2337 Now set all exceptions to non-stop
2338 unsigned int mask_cr
2339 = ~(AARCH64_FE_ALL_EXCEPT << AARCH64_FE_EXCEPT_SHIFT);
2340 unsigned int masked_cr;
2341 masked_cr = fenv_cr & mask_cr;
2342
2343 And clear all exception flags
2344 unsigned int maske_sr = ~AARCH64_FE_ALL_EXCEPT;
2345 unsigned int masked_cr;
2346 masked_sr = fenv_sr & mask_sr;
2347
2348 __builtin_aarch64_set_cr (masked_cr);
2349 __builtin_aarch64_set_sr (masked_sr); */
2350
09ba9ef7
RR
2351 fenv_cr = create_tmp_var_raw (unsigned_type_node);
2352 fenv_sr = create_tmp_var_raw (unsigned_type_node);
aa87aced
KV
2353
2354 get_fpcr = aarch64_builtin_decls[AARCH64_BUILTIN_GET_FPCR];
2355 set_fpcr = aarch64_builtin_decls[AARCH64_BUILTIN_SET_FPCR];
2356 get_fpsr = aarch64_builtin_decls[AARCH64_BUILTIN_GET_FPSR];
2357 set_fpsr = aarch64_builtin_decls[AARCH64_BUILTIN_SET_FPSR];
2358
2359 mask_cr = build_int_cst (unsigned_type_node,
2360 ~(AARCH64_FE_ALL_EXCEPT << AARCH64_FE_EXCEPT_SHIFT));
2361 mask_sr = build_int_cst (unsigned_type_node,
2362 ~(AARCH64_FE_ALL_EXCEPT));
2363
d81bc2af
HZ
2364 ld_fenv_cr = build4 (TARGET_EXPR, unsigned_type_node,
2365 fenv_cr, build_call_expr (get_fpcr, 0),
2366 NULL_TREE, NULL_TREE);
2367 ld_fenv_sr = build4 (TARGET_EXPR, unsigned_type_node,
2368 fenv_sr, build_call_expr (get_fpsr, 0),
2369 NULL_TREE, NULL_TREE);
aa87aced
KV
2370
2371 masked_fenv_cr = build2 (BIT_AND_EXPR, unsigned_type_node, fenv_cr, mask_cr);
2372 masked_fenv_sr = build2 (BIT_AND_EXPR, unsigned_type_node, fenv_sr, mask_sr);
2373
2374 hold_fnclex_cr = build_call_expr (set_fpcr, 1, masked_fenv_cr);
2375 hold_fnclex_sr = build_call_expr (set_fpsr, 1, masked_fenv_sr);
2376
2377 hold_fnclex = build2 (COMPOUND_EXPR, void_type_node, hold_fnclex_cr,
2378 hold_fnclex_sr);
2379 masked_fenv = build2 (COMPOUND_EXPR, void_type_node, masked_fenv_cr,
2380 masked_fenv_sr);
2381 ld_fenv = build2 (COMPOUND_EXPR, void_type_node, ld_fenv_cr, ld_fenv_sr);
2382
2383 *hold = build2 (COMPOUND_EXPR, void_type_node,
2384 build2 (COMPOUND_EXPR, void_type_node, masked_fenv, ld_fenv),
2385 hold_fnclex);
2386
2387 /* Store the value of masked_fenv to clear the exceptions:
2388 __builtin_aarch64_set_fpsr (masked_fenv_sr); */
2389
2390 *clear = build_call_expr (set_fpsr, 1, masked_fenv_sr);
2391
2392 /* Generate the equivalent of :
2393 unsigned int new_fenv_var;
2394 new_fenv_var = __builtin_aarch64_get_fpsr ();
2395
2396 __builtin_aarch64_set_fpsr (fenv_sr);
2397
2398 __atomic_feraiseexcept (new_fenv_var); */
2399
09ba9ef7 2400 new_fenv_var = create_tmp_var_raw (unsigned_type_node);
d81bc2af
HZ
2401 reload_fenv = build4 (TARGET_EXPR, unsigned_type_node,
2402 new_fenv_var, build_call_expr (get_fpsr, 0),
2403 NULL_TREE, NULL_TREE);
aa87aced
KV
2404 restore_fnenv = build_call_expr (set_fpsr, 1, fenv_sr);
2405 atomic_feraiseexcept = builtin_decl_implicit (BUILT_IN_ATOMIC_FERAISEEXCEPT);
2406 update_call = build_call_expr (atomic_feraiseexcept, 1,
2407 fold_convert (integer_type_node, new_fenv_var));
2408 *update = build2 (COMPOUND_EXPR, void_type_node,
2409 build2 (COMPOUND_EXPR, void_type_node,
2410 reload_fenv, restore_fnenv), update_call);
2411}
2412
ef01e6bb
DZ
2413/* Resolve overloaded MEMTAG build-in functions. */
2414#define AARCH64_BUILTIN_SUBCODE(F) \
2415 (DECL_MD_FUNCTION_CODE (F) >> AARCH64_BUILTIN_SHIFT)
2416
2417static tree
2418aarch64_resolve_overloaded_memtag (location_t loc,
2419 tree fndecl, void *pass_params)
2420{
2421 vec<tree, va_gc> *params = static_cast<vec<tree, va_gc> *> (pass_params);
2422 unsigned param_num = params ? params->length() : 0;
2423 unsigned int fcode = AARCH64_BUILTIN_SUBCODE (fndecl);
2424 tree inittype = aarch64_memtag_builtin_data[
2425 fcode - AARCH64_MEMTAG_BUILTIN_START - 1].ftype;
2426 unsigned arg_num = list_length (TYPE_ARG_TYPES (inittype)) - 1;
2427
2428 if (param_num != arg_num)
2429 {
2430 TREE_TYPE (fndecl) = inittype;
2431 return NULL_TREE;
2432 }
2433 tree retype = NULL;
2434
2435 if (fcode == AARCH64_MEMTAG_BUILTIN_SUBP)
2436 {
2437 tree t0 = TREE_TYPE ((*params)[0]);
2438 tree t1 = TREE_TYPE ((*params)[1]);
2439
2440 if (t0 == error_mark_node || TREE_CODE (t0) != POINTER_TYPE)
2441 t0 = ptr_type_node;
2442 if (t1 == error_mark_node || TREE_CODE (t1) != POINTER_TYPE)
2443 t1 = ptr_type_node;
2444
2445 if (TYPE_MODE (t0) != DImode)
2446 warning_at (loc, 1, "expected 64-bit address but argument 1 is %d-bit",
2447 (int)tree_to_shwi (DECL_SIZE ((*params)[0])));
2448
2449 if (TYPE_MODE (t1) != DImode)
2450 warning_at (loc, 1, "expected 64-bit address but argument 2 is %d-bit",
2451 (int)tree_to_shwi (DECL_SIZE ((*params)[1])));
2452
2453 retype = build_function_type_list (ptrdiff_type_node, t0, t1, NULL);
2454 }
2455 else
2456 {
2457 tree t0 = TREE_TYPE ((*params)[0]);
2458
2459 if (t0 == error_mark_node || TREE_CODE (t0) != POINTER_TYPE)
2460 {
2461 TREE_TYPE (fndecl) = inittype;
2462 return NULL_TREE;
2463 }
2464
2465 if (TYPE_MODE (t0) != DImode)
2466 warning_at (loc, 1, "expected 64-bit address but argument 1 is %d-bit",
2467 (int)tree_to_shwi (DECL_SIZE ((*params)[0])));
2468
2469 switch (fcode)
2470 {
2471 case AARCH64_MEMTAG_BUILTIN_IRG:
2472 retype = build_function_type_list (t0, t0, uint64_type_node, NULL);
2473 break;
2474 case AARCH64_MEMTAG_BUILTIN_GMI:
2475 retype = build_function_type_list (uint64_type_node, t0,
2476 uint64_type_node, NULL);
2477 break;
2478 case AARCH64_MEMTAG_BUILTIN_INC_TAG:
2479 retype = build_function_type_list (t0, t0, unsigned_type_node, NULL);
2480 break;
2481 case AARCH64_MEMTAG_BUILTIN_SET_TAG:
2482 retype = build_function_type_list (void_type_node, t0, NULL);
2483 break;
2484 case AARCH64_MEMTAG_BUILTIN_GET_TAG:
2485 retype = build_function_type_list (t0, t0, NULL);
2486 break;
2487 default:
2488 return NULL_TREE;
2489 }
2490 }
2491
2492 if (!retype || retype == error_mark_node)
2493 TREE_TYPE (fndecl) = inittype;
2494 else
2495 TREE_TYPE (fndecl) = retype;
2496
2497 return NULL_TREE;
2498}
2499
2500/* Called at aarch64_resolve_overloaded_builtin in aarch64-c.c. */
2501tree
2502aarch64_resolve_overloaded_builtin_general (location_t loc, tree function,
2503 void *pass_params)
2504{
2505 unsigned int fcode = AARCH64_BUILTIN_SUBCODE (function);
2506
2507 if (fcode >= AARCH64_MEMTAG_BUILTIN_START
2508 && fcode <= AARCH64_MEMTAG_BUILTIN_END)
2509 return aarch64_resolve_overloaded_memtag(loc, function, pass_params);
2510
2511 return NULL_TREE;
2512}
aa87aced 2513
42fc9a7f
JG
2514#undef AARCH64_CHECK_BUILTIN_MODE
2515#undef AARCH64_FIND_FRINT_VARIANT
0ddec79f
JG
2516#undef CF0
2517#undef CF1
2518#undef CF2
2519#undef CF3
2520#undef CF4
2521#undef CF10
2522#undef VAR1
2523#undef VAR2
2524#undef VAR3
2525#undef VAR4
2526#undef VAR5
2527#undef VAR6
2528#undef VAR7
2529#undef VAR8
2530#undef VAR9
2531#undef VAR10
2532#undef VAR11
2533
3c03d39d 2534#include "gt-aarch64-builtins.h"