]> git.ipfire.org Git - thirdparty/gcc.git/blame - gcc/config/aarch64/aarch64-builtins.c
Improve var-tracking dataflow iteration order
[thirdparty/gcc.git] / gcc / config / aarch64 / aarch64-builtins.c
CommitLineData
43e9d192 1/* Builtins' description for AArch64 SIMD architecture.
8d9254fc 2 Copyright (C) 2011-2020 Free Software Foundation, Inc.
43e9d192
IB
3 Contributed by ARM Ltd.
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify it
8 under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
10 any later version.
11
12 GCC is distributed in the hope that it will be useful, but
13 WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
15 General Public License for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
20
8fcc61f8
RS
21#define IN_TARGET_CODE 1
22
43e9d192
IB
23#include "config.h"
24#include "system.h"
25#include "coretypes.h"
26#include "tm.h"
c7131fb2 27#include "function.h"
c7131fb2 28#include "basic-block.h"
e11c4407 29#include "rtl.h"
c7131fb2
AM
30#include "tree.h"
31#include "gimple.h"
4d0cdd0c 32#include "memmodel.h"
e11c4407
AM
33#include "tm_p.h"
34#include "expmed.h"
35#include "optabs.h"
36#include "recog.h"
37#include "diagnostic-core.h"
40e23961 38#include "fold-const.h"
d8a2d370 39#include "stor-layout.h"
36566b39 40#include "explow.h"
43e9d192 41#include "expr.h"
43e9d192 42#include "langhooks.h"
5be5c238 43#include "gimple-iterator.h"
10766209 44#include "case-cfn-macros.h"
9d63f43b 45#include "emit-rtl.h"
31427b97
RS
46#include "stringpool.h"
47#include "attribs.h"
43e9d192 48
0d4a1197
RS
49#define v8qi_UP E_V8QImode
50#define v4hi_UP E_V4HImode
51#define v4hf_UP E_V4HFmode
52#define v2si_UP E_V2SImode
53#define v2sf_UP E_V2SFmode
54#define v1df_UP E_V1DFmode
55#define di_UP E_DImode
56#define df_UP E_DFmode
57#define v16qi_UP E_V16QImode
58#define v8hi_UP E_V8HImode
59#define v8hf_UP E_V8HFmode
60#define v4si_UP E_V4SImode
61#define v4sf_UP E_V4SFmode
62#define v2di_UP E_V2DImode
63#define v2df_UP E_V2DFmode
64#define ti_UP E_TImode
65#define oi_UP E_OImode
66#define ci_UP E_CImode
67#define xi_UP E_XImode
68#define si_UP E_SImode
69#define sf_UP E_SFmode
70#define hi_UP E_HImode
71#define hf_UP E_HFmode
72#define qi_UP E_QImode
abbe1ed2
SMW
73#define bf_UP E_BFmode
74#define v4bf_UP E_V4BFmode
75#define v8bf_UP E_V8BFmode
43e9d192
IB
76#define UP(X) X##_UP
77
b5828b4b
JG
78#define SIMD_MAX_BUILTIN_ARGS 5
79
80enum aarch64_type_qualifiers
43e9d192 81{
b5828b4b
JG
82 /* T foo. */
83 qualifier_none = 0x0,
84 /* unsigned T foo. */
85 qualifier_unsigned = 0x1, /* 1 << 0 */
86 /* const T foo. */
87 qualifier_const = 0x2, /* 1 << 1 */
88 /* T *foo. */
89 qualifier_pointer = 0x4, /* 1 << 2 */
b5828b4b
JG
90 /* Used when expanding arguments if an operand could
91 be an immediate. */
92 qualifier_immediate = 0x8, /* 1 << 3 */
93 qualifier_maybe_immediate = 0x10, /* 1 << 4 */
94 /* void foo (...). */
95 qualifier_void = 0x20, /* 1 << 5 */
96 /* Some patterns may have internal operands, this qualifier is an
97 instruction to the initialisation code to skip this operand. */
98 qualifier_internal = 0x40, /* 1 << 6 */
99 /* Some builtins should use the T_*mode* encoded in a simd_builtin_datum
100 rather than using the type of the operand. */
101 qualifier_map_mode = 0x80, /* 1 << 7 */
102 /* qualifier_pointer | qualifier_map_mode */
103 qualifier_pointer_map_mode = 0x84,
e625e715 104 /* qualifier_const | qualifier_pointer | qualifier_map_mode */
6db1ec94
JG
105 qualifier_const_pointer_map_mode = 0x86,
106 /* Polynomial types. */
2a49c16d
AL
107 qualifier_poly = 0x100,
108 /* Lane indices - must be in range, and flipped for bigendian. */
4d0a0237
CB
109 qualifier_lane_index = 0x200,
110 /* Lane indices for single lane structure loads and stores. */
9d63f43b
TC
111 qualifier_struct_load_store_lane_index = 0x400,
112 /* Lane indices selected in pairs. - must be in range, and flipped for
113 bigendian. */
114 qualifier_lane_pair_index = 0x800,
8c197c85
SMW
115 /* Lane indices selected in quadtuplets. - must be in range, and flipped for
116 bigendian. */
117 qualifier_lane_quadtup_index = 0x1000,
b5828b4b 118};
43e9d192 119
bf592b2f 120/* Flags that describe what a function might do. */
121const unsigned int FLAG_NONE = 0U;
122const unsigned int FLAG_READ_FPCR = 1U << 0;
123const unsigned int FLAG_RAISE_FP_EXCEPTIONS = 1U << 1;
124const unsigned int FLAG_READ_MEMORY = 1U << 2;
125const unsigned int FLAG_PREFETCH_MEMORY = 1U << 3;
126const unsigned int FLAG_WRITE_MEMORY = 1U << 4;
127
128const unsigned int FLAG_FP = FLAG_READ_FPCR | FLAG_RAISE_FP_EXCEPTIONS;
129const unsigned int FLAG_ALL = FLAG_READ_FPCR | FLAG_RAISE_FP_EXCEPTIONS
130 | FLAG_READ_MEMORY | FLAG_PREFETCH_MEMORY | FLAG_WRITE_MEMORY;
131
43e9d192
IB
132typedef struct
133{
134 const char *name;
ef4bddc2 135 machine_mode mode;
342be7f7
JG
136 const enum insn_code code;
137 unsigned int fcode;
b5828b4b 138 enum aarch64_type_qualifiers *qualifiers;
bf592b2f 139 unsigned int flags;
43e9d192
IB
140} aarch64_simd_builtin_datum;
141
b5828b4b
JG
142static enum aarch64_type_qualifiers
143aarch64_types_unop_qualifiers[SIMD_MAX_BUILTIN_ARGS]
8f905d69 144 = { qualifier_none, qualifier_none };
b5828b4b 145#define TYPES_UNOP (aarch64_types_unop_qualifiers)
5a7a4e80
TB
146static enum aarch64_type_qualifiers
147aarch64_types_unopu_qualifiers[SIMD_MAX_BUILTIN_ARGS]
148 = { qualifier_unsigned, qualifier_unsigned };
149#define TYPES_UNOPU (aarch64_types_unopu_qualifiers)
b5828b4b 150static enum aarch64_type_qualifiers
a579f4c7
JW
151aarch64_types_unopus_qualifiers[SIMD_MAX_BUILTIN_ARGS]
152 = { qualifier_unsigned, qualifier_none };
153#define TYPES_UNOPUS (aarch64_types_unopus_qualifiers)
154static enum aarch64_type_qualifiers
b5828b4b
JG
155aarch64_types_binop_qualifiers[SIMD_MAX_BUILTIN_ARGS]
156 = { qualifier_none, qualifier_none, qualifier_maybe_immediate };
157#define TYPES_BINOP (aarch64_types_binop_qualifiers)
158static enum aarch64_type_qualifiers
5a7a4e80
TB
159aarch64_types_binopu_qualifiers[SIMD_MAX_BUILTIN_ARGS]
160 = { qualifier_unsigned, qualifier_unsigned, qualifier_unsigned };
161#define TYPES_BINOPU (aarch64_types_binopu_qualifiers)
7baa225d 162static enum aarch64_type_qualifiers
de10bcce
AL
163aarch64_types_binop_uus_qualifiers[SIMD_MAX_BUILTIN_ARGS]
164 = { qualifier_unsigned, qualifier_unsigned, qualifier_none };
165#define TYPES_BINOP_UUS (aarch64_types_binop_uus_qualifiers)
166static enum aarch64_type_qualifiers
918621d3
AL
167aarch64_types_binop_ssu_qualifiers[SIMD_MAX_BUILTIN_ARGS]
168 = { qualifier_none, qualifier_none, qualifier_unsigned };
169#define TYPES_BINOP_SSU (aarch64_types_binop_ssu_qualifiers)
170static enum aarch64_type_qualifiers
daef0a8c
JW
171aarch64_types_binop_uss_qualifiers[SIMD_MAX_BUILTIN_ARGS]
172 = { qualifier_unsigned, qualifier_none, qualifier_none };
173#define TYPES_BINOP_USS (aarch64_types_binop_uss_qualifiers)
174static enum aarch64_type_qualifiers
7baa225d
TB
175aarch64_types_binopp_qualifiers[SIMD_MAX_BUILTIN_ARGS]
176 = { qualifier_poly, qualifier_poly, qualifier_poly };
177#define TYPES_BINOPP (aarch64_types_binopp_qualifiers)
178
5a7a4e80 179static enum aarch64_type_qualifiers
b5828b4b
JG
180aarch64_types_ternop_qualifiers[SIMD_MAX_BUILTIN_ARGS]
181 = { qualifier_none, qualifier_none, qualifier_none, qualifier_none };
182#define TYPES_TERNOP (aarch64_types_ternop_qualifiers)
30442682 183static enum aarch64_type_qualifiers
2a49c16d
AL
184aarch64_types_ternop_lane_qualifiers[SIMD_MAX_BUILTIN_ARGS]
185 = { qualifier_none, qualifier_none, qualifier_none, qualifier_lane_index };
186#define TYPES_TERNOP_LANE (aarch64_types_ternop_lane_qualifiers)
187static enum aarch64_type_qualifiers
30442682
TB
188aarch64_types_ternopu_qualifiers[SIMD_MAX_BUILTIN_ARGS]
189 = { qualifier_unsigned, qualifier_unsigned,
190 qualifier_unsigned, qualifier_unsigned };
191#define TYPES_TERNOPU (aarch64_types_ternopu_qualifiers)
27086ea3 192static enum aarch64_type_qualifiers
0b839322
WD
193aarch64_types_ternopu_lane_qualifiers[SIMD_MAX_BUILTIN_ARGS]
194 = { qualifier_unsigned, qualifier_unsigned,
195 qualifier_unsigned, qualifier_lane_index };
196#define TYPES_TERNOPU_LANE (aarch64_types_ternopu_lane_qualifiers)
197static enum aarch64_type_qualifiers
27086ea3
MC
198aarch64_types_ternopu_imm_qualifiers[SIMD_MAX_BUILTIN_ARGS]
199 = { qualifier_unsigned, qualifier_unsigned,
200 qualifier_unsigned, qualifier_immediate };
201#define TYPES_TERNOPUI (aarch64_types_ternopu_imm_qualifiers)
8c197c85
SMW
202static enum aarch64_type_qualifiers
203aarch64_types_ternop_ssus_qualifiers[SIMD_MAX_BUILTIN_ARGS]
204 = { qualifier_none, qualifier_none, qualifier_unsigned, qualifier_none };
205#define TYPES_TERNOP_SSUS (aarch64_types_ternop_ssus_qualifiers)
27086ea3 206
30442682 207
9d63f43b
TC
208static enum aarch64_type_qualifiers
209aarch64_types_quadop_lane_pair_qualifiers[SIMD_MAX_BUILTIN_ARGS]
210 = { qualifier_none, qualifier_none, qualifier_none,
211 qualifier_none, qualifier_lane_pair_index };
212#define TYPES_QUADOP_LANE_PAIR (aarch64_types_quadop_lane_pair_qualifiers)
b5828b4b 213static enum aarch64_type_qualifiers
2a49c16d 214aarch64_types_quadop_lane_qualifiers[SIMD_MAX_BUILTIN_ARGS]
b5828b4b 215 = { qualifier_none, qualifier_none, qualifier_none,
2a49c16d
AL
216 qualifier_none, qualifier_lane_index };
217#define TYPES_QUADOP_LANE (aarch64_types_quadop_lane_qualifiers)
7a08d813
TC
218static enum aarch64_type_qualifiers
219aarch64_types_quadopu_lane_qualifiers[SIMD_MAX_BUILTIN_ARGS]
220 = { qualifier_unsigned, qualifier_unsigned, qualifier_unsigned,
221 qualifier_unsigned, qualifier_lane_index };
222#define TYPES_QUADOPU_LANE (aarch64_types_quadopu_lane_qualifiers)
b5828b4b 223
8c197c85
SMW
224static enum aarch64_type_qualifiers
225aarch64_types_quadopssus_lane_quadtup_qualifiers[SIMD_MAX_BUILTIN_ARGS]
226 = { qualifier_none, qualifier_none, qualifier_unsigned,
227 qualifier_none, qualifier_lane_quadtup_index };
228#define TYPES_QUADOPSSUS_LANE_QUADTUP \
229 (aarch64_types_quadopssus_lane_quadtup_qualifiers)
230static enum aarch64_type_qualifiers
231aarch64_types_quadopsssu_lane_quadtup_qualifiers[SIMD_MAX_BUILTIN_ARGS]
232 = { qualifier_none, qualifier_none, qualifier_none,
233 qualifier_unsigned, qualifier_lane_quadtup_index };
234#define TYPES_QUADOPSSSU_LANE_QUADTUP \
235 (aarch64_types_quadopsssu_lane_quadtup_qualifiers)
236
27086ea3
MC
237static enum aarch64_type_qualifiers
238aarch64_types_quadopu_imm_qualifiers[SIMD_MAX_BUILTIN_ARGS]
239 = { qualifier_unsigned, qualifier_unsigned, qualifier_unsigned,
240 qualifier_unsigned, qualifier_immediate };
241#define TYPES_QUADOPUI (aarch64_types_quadopu_imm_qualifiers)
242
159b8724
TC
243static enum aarch64_type_qualifiers
244aarch64_types_binop_imm_p_qualifiers[SIMD_MAX_BUILTIN_ARGS]
245 = { qualifier_poly, qualifier_none, qualifier_immediate };
246#define TYPES_GETREGP (aarch64_types_binop_imm_p_qualifiers)
b5828b4b 247static enum aarch64_type_qualifiers
2a49c16d 248aarch64_types_binop_imm_qualifiers[SIMD_MAX_BUILTIN_ARGS]
b5828b4b 249 = { qualifier_none, qualifier_none, qualifier_immediate };
2a49c16d
AL
250#define TYPES_GETREG (aarch64_types_binop_imm_qualifiers)
251#define TYPES_SHIFTIMM (aarch64_types_binop_imm_qualifiers)
b5828b4b 252static enum aarch64_type_qualifiers
de10bcce
AL
253aarch64_types_shift_to_unsigned_qualifiers[SIMD_MAX_BUILTIN_ARGS]
254 = { qualifier_unsigned, qualifier_none, qualifier_immediate };
255#define TYPES_SHIFTIMM_USS (aarch64_types_shift_to_unsigned_qualifiers)
256static enum aarch64_type_qualifiers
1f0e9e34
JG
257aarch64_types_fcvt_from_unsigned_qualifiers[SIMD_MAX_BUILTIN_ARGS]
258 = { qualifier_none, qualifier_unsigned, qualifier_immediate };
259#define TYPES_FCVTIMM_SUS (aarch64_types_fcvt_from_unsigned_qualifiers)
260static enum aarch64_type_qualifiers
252c7556
AV
261aarch64_types_unsigned_shift_qualifiers[SIMD_MAX_BUILTIN_ARGS]
262 = { qualifier_unsigned, qualifier_unsigned, qualifier_immediate };
263#define TYPES_USHIFTIMM (aarch64_types_unsigned_shift_qualifiers)
de10bcce 264
252c7556 265static enum aarch64_type_qualifiers
159b8724
TC
266aarch64_types_ternop_s_imm_p_qualifiers[SIMD_MAX_BUILTIN_ARGS]
267 = { qualifier_none, qualifier_none, qualifier_poly, qualifier_immediate};
268#define TYPES_SETREGP (aarch64_types_ternop_s_imm_p_qualifiers)
269static enum aarch64_type_qualifiers
270aarch64_types_ternop_s_imm_qualifiers[SIMD_MAX_BUILTIN_ARGS]
271 = { qualifier_none, qualifier_none, qualifier_none, qualifier_immediate};
272#define TYPES_SETREG (aarch64_types_ternop_s_imm_qualifiers)
273#define TYPES_SHIFTINSERT (aarch64_types_ternop_s_imm_qualifiers)
274#define TYPES_SHIFTACC (aarch64_types_ternop_s_imm_qualifiers)
275
276static enum aarch64_type_qualifiers
277aarch64_types_ternop_p_imm_qualifiers[SIMD_MAX_BUILTIN_ARGS]
278 = { qualifier_poly, qualifier_poly, qualifier_poly, qualifier_immediate};
279#define TYPES_SHIFTINSERTP (aarch64_types_ternop_p_imm_qualifiers)
b5828b4b 280
de10bcce
AL
281static enum aarch64_type_qualifiers
282aarch64_types_unsigned_shiftacc_qualifiers[SIMD_MAX_BUILTIN_ARGS]
283 = { qualifier_unsigned, qualifier_unsigned, qualifier_unsigned,
284 qualifier_immediate };
285#define TYPES_USHIFTACC (aarch64_types_unsigned_shiftacc_qualifiers)
286
287
b5828b4b
JG
288static enum aarch64_type_qualifiers
289aarch64_types_combine_qualifiers[SIMD_MAX_BUILTIN_ARGS]
290 = { qualifier_none, qualifier_none, qualifier_none };
291#define TYPES_COMBINE (aarch64_types_combine_qualifiers)
292
159b8724
TC
293static enum aarch64_type_qualifiers
294aarch64_types_combine_p_qualifiers[SIMD_MAX_BUILTIN_ARGS]
295 = { qualifier_poly, qualifier_poly, qualifier_poly };
296#define TYPES_COMBINEP (aarch64_types_combine_p_qualifiers)
297
b5828b4b
JG
298static enum aarch64_type_qualifiers
299aarch64_types_load1_qualifiers[SIMD_MAX_BUILTIN_ARGS]
300 = { qualifier_none, qualifier_const_pointer_map_mode };
301#define TYPES_LOAD1 (aarch64_types_load1_qualifiers)
302#define TYPES_LOADSTRUCT (aarch64_types_load1_qualifiers)
3ec1be97
CB
303static enum aarch64_type_qualifiers
304aarch64_types_loadstruct_lane_qualifiers[SIMD_MAX_BUILTIN_ARGS]
305 = { qualifier_none, qualifier_const_pointer_map_mode,
4d0a0237 306 qualifier_none, qualifier_struct_load_store_lane_index };
3ec1be97 307#define TYPES_LOADSTRUCT_LANE (aarch64_types_loadstruct_lane_qualifiers)
b5828b4b 308
46e778c4
JG
309static enum aarch64_type_qualifiers
310aarch64_types_bsl_p_qualifiers[SIMD_MAX_BUILTIN_ARGS]
311 = { qualifier_poly, qualifier_unsigned,
312 qualifier_poly, qualifier_poly };
313#define TYPES_BSL_P (aarch64_types_bsl_p_qualifiers)
314static enum aarch64_type_qualifiers
315aarch64_types_bsl_s_qualifiers[SIMD_MAX_BUILTIN_ARGS]
316 = { qualifier_none, qualifier_unsigned,
317 qualifier_none, qualifier_none };
318#define TYPES_BSL_S (aarch64_types_bsl_s_qualifiers)
319static enum aarch64_type_qualifiers
320aarch64_types_bsl_u_qualifiers[SIMD_MAX_BUILTIN_ARGS]
321 = { qualifier_unsigned, qualifier_unsigned,
322 qualifier_unsigned, qualifier_unsigned };
323#define TYPES_BSL_U (aarch64_types_bsl_u_qualifiers)
324
b5828b4b
JG
325/* The first argument (return type) of a store should be void type,
326 which we represent with qualifier_void. Their first operand will be
327 a DImode pointer to the location to store to, so we must use
328 qualifier_map_mode | qualifier_pointer to build a pointer to the
329 element type of the vector. */
330static enum aarch64_type_qualifiers
159b8724
TC
331aarch64_types_store1_p_qualifiers[SIMD_MAX_BUILTIN_ARGS]
332 = { qualifier_void, qualifier_pointer_map_mode, qualifier_poly };
333#define TYPES_STORE1P (aarch64_types_store1_p_qualifiers)
334static enum aarch64_type_qualifiers
b5828b4b
JG
335aarch64_types_store1_qualifiers[SIMD_MAX_BUILTIN_ARGS]
336 = { qualifier_void, qualifier_pointer_map_mode, qualifier_none };
337#define TYPES_STORE1 (aarch64_types_store1_qualifiers)
338#define TYPES_STORESTRUCT (aarch64_types_store1_qualifiers)
ba081b77
JG
339static enum aarch64_type_qualifiers
340aarch64_types_storestruct_lane_qualifiers[SIMD_MAX_BUILTIN_ARGS]
341 = { qualifier_void, qualifier_pointer_map_mode,
4d0a0237 342 qualifier_none, qualifier_struct_load_store_lane_index };
ba081b77 343#define TYPES_STORESTRUCT_LANE (aarch64_types_storestruct_lane_qualifiers)
b5828b4b 344
0ddec79f
JG
345#define CF0(N, X) CODE_FOR_aarch64_##N##X
346#define CF1(N, X) CODE_FOR_##N##X##1
347#define CF2(N, X) CODE_FOR_##N##X##2
348#define CF3(N, X) CODE_FOR_##N##X##3
349#define CF4(N, X) CODE_FOR_##N##X##4
350#define CF10(N, X) CODE_FOR_##N##X
351
bf592b2f 352#define VAR1(T, N, MAP, FLAG, A) \
353 {#N #A, UP (A), CF##MAP (N, A), 0, TYPES_##T, FLAG_##FLAG},
354#define VAR2(T, N, MAP, FLAG, A, B) \
355 VAR1 (T, N, MAP, FLAG, A) \
356 VAR1 (T, N, MAP, FLAG, B)
357#define VAR3(T, N, MAP, FLAG, A, B, C) \
358 VAR2 (T, N, MAP, FLAG, A, B) \
359 VAR1 (T, N, MAP, FLAG, C)
360#define VAR4(T, N, MAP, FLAG, A, B, C, D) \
361 VAR3 (T, N, MAP, FLAG, A, B, C) \
362 VAR1 (T, N, MAP, FLAG, D)
363#define VAR5(T, N, MAP, FLAG, A, B, C, D, E) \
364 VAR4 (T, N, MAP, FLAG, A, B, C, D) \
365 VAR1 (T, N, MAP, FLAG, E)
366#define VAR6(T, N, MAP, FLAG, A, B, C, D, E, F) \
367 VAR5 (T, N, MAP, FLAG, A, B, C, D, E) \
368 VAR1 (T, N, MAP, FLAG, F)
369#define VAR7(T, N, MAP, FLAG, A, B, C, D, E, F, G) \
370 VAR6 (T, N, MAP, FLAG, A, B, C, D, E, F) \
371 VAR1 (T, N, MAP, FLAG, G)
372#define VAR8(T, N, MAP, FLAG, A, B, C, D, E, F, G, H) \
373 VAR7 (T, N, MAP, FLAG, A, B, C, D, E, F, G) \
374 VAR1 (T, N, MAP, FLAG, H)
375#define VAR9(T, N, MAP, FLAG, A, B, C, D, E, F, G, H, I) \
376 VAR8 (T, N, MAP, FLAG, A, B, C, D, E, F, G, H) \
377 VAR1 (T, N, MAP, FLAG, I)
378#define VAR10(T, N, MAP, FLAG, A, B, C, D, E, F, G, H, I, J) \
379 VAR9 (T, N, MAP, FLAG, A, B, C, D, E, F, G, H, I) \
380 VAR1 (T, N, MAP, FLAG, J)
381#define VAR11(T, N, MAP, FLAG, A, B, C, D, E, F, G, H, I, J, K) \
382 VAR10 (T, N, MAP, FLAG, A, B, C, D, E, F, G, H, I, J) \
383 VAR1 (T, N, MAP, FLAG, K)
384#define VAR12(T, N, MAP, FLAG, A, B, C, D, E, F, G, H, I, J, K, L) \
385 VAR11 (T, N, MAP, FLAG, A, B, C, D, E, F, G, H, I, J, K) \
386 VAR1 (T, N, MAP, FLAG, L)
387#define VAR13(T, N, MAP, FLAG, A, B, C, D, E, F, G, H, I, J, K, L, M) \
388 VAR12 (T, N, MAP, FLAG, A, B, C, D, E, F, G, H, I, J, K, L) \
389 VAR1 (T, N, MAP, FLAG, M)
390#define VAR14(T, X, MAP, FLAG, A, B, C, D, E, F, G, H, I, J, K, L, M, N) \
391 VAR13 (T, X, MAP, FLAG, A, B, C, D, E, F, G, H, I, J, K, L, M) \
392 VAR1 (T, X, MAP, FLAG, N)
393#define VAR15(T, X, MAP, FLAG, A, B, C, D, E, F, G, H, I, J, K, L, M, N, O) \
394 VAR14 (T, X, MAP, FLAG, A, B, C, D, E, F, G, H, I, J, K, L, M, N) \
395 VAR1 (T, X, MAP, FLAG, O)
396#define VAR16(T, X, MAP, FLAG, A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P) \
397 VAR15 (T, X, MAP, FLAG, A, B, C, D, E, F, G, H, I, J, K, L, M, N, O) \
398 VAR1 (T, X, MAP, FLAG, P)
342be7f7 399
f421c516 400#include "aarch64-builtin-iterators.h"
43e9d192
IB
401
402static aarch64_simd_builtin_datum aarch64_simd_builtin_data[] = {
342be7f7
JG
403#include "aarch64-simd-builtins.def"
404};
405
5d357f26
KT
406/* There's only 8 CRC32 builtins. Probably not worth their own .def file. */
407#define AARCH64_CRC32_BUILTINS \
408 CRC32_BUILTIN (crc32b, QI) \
409 CRC32_BUILTIN (crc32h, HI) \
410 CRC32_BUILTIN (crc32w, SI) \
411 CRC32_BUILTIN (crc32x, DI) \
412 CRC32_BUILTIN (crc32cb, QI) \
413 CRC32_BUILTIN (crc32ch, HI) \
414 CRC32_BUILTIN (crc32cw, SI) \
415 CRC32_BUILTIN (crc32cx, DI)
416
9d63f43b
TC
417/* The next 8 FCMLA instrinsics require some special handling compared the
418 normal simd intrinsics. */
419#define AARCH64_SIMD_FCMLA_LANEQ_BUILTINS \
420 FCMLA_LANEQ_BUILTIN (0, v2sf, fcmla, V2SF, false) \
421 FCMLA_LANEQ_BUILTIN (90, v2sf, fcmla, V2SF, false) \
422 FCMLA_LANEQ_BUILTIN (180, v2sf, fcmla, V2SF, false) \
423 FCMLA_LANEQ_BUILTIN (270, v2sf, fcmla, V2SF, false) \
424 FCMLA_LANEQ_BUILTIN (0, v4hf, fcmla_laneq, V4HF, true) \
425 FCMLA_LANEQ_BUILTIN (90, v4hf, fcmla_laneq, V4HF, true) \
426 FCMLA_LANEQ_BUILTIN (180, v4hf, fcmla_laneq, V4HF, true) \
427 FCMLA_LANEQ_BUILTIN (270, v4hf, fcmla_laneq, V4HF, true) \
428
5d357f26
KT
429typedef struct
430{
431 const char *name;
ef4bddc2 432 machine_mode mode;
5d357f26
KT
433 const enum insn_code icode;
434 unsigned int fcode;
435} aarch64_crc_builtin_datum;
436
9d63f43b
TC
437/* Hold information about how to expand the FCMLA_LANEQ builtins. */
438typedef struct
439{
440 const char *name;
441 machine_mode mode;
442 const enum insn_code icode;
443 unsigned int fcode;
444 bool lane;
445} aarch64_fcmla_laneq_builtin_datum;
446
5d357f26
KT
447#define CRC32_BUILTIN(N, M) \
448 AARCH64_BUILTIN_##N,
449
9d63f43b
TC
450#define FCMLA_LANEQ_BUILTIN(I, N, X, M, T) \
451 AARCH64_SIMD_BUILTIN_FCMLA_LANEQ##I##_##M,
452
342be7f7 453#undef VAR1
bf592b2f 454#define VAR1(T, N, MAP, FLAG, A) \
e993fea1 455 AARCH64_SIMD_BUILTIN_##T##_##N##A,
342be7f7
JG
456
457enum aarch64_builtins
458{
459 AARCH64_BUILTIN_MIN,
aa87aced
KV
460
461 AARCH64_BUILTIN_GET_FPCR,
462 AARCH64_BUILTIN_SET_FPCR,
463 AARCH64_BUILTIN_GET_FPSR,
464 AARCH64_BUILTIN_SET_FPSR,
465
0d7e5fa6
AC
466 AARCH64_BUILTIN_GET_FPCR64,
467 AARCH64_BUILTIN_SET_FPCR64,
468 AARCH64_BUILTIN_GET_FPSR64,
469 AARCH64_BUILTIN_SET_FPSR64,
470
a6fc00da
BH
471 AARCH64_BUILTIN_RSQRT_DF,
472 AARCH64_BUILTIN_RSQRT_SF,
473 AARCH64_BUILTIN_RSQRT_V2DF,
474 AARCH64_BUILTIN_RSQRT_V2SF,
475 AARCH64_BUILTIN_RSQRT_V4SF,
342be7f7 476 AARCH64_SIMD_BUILTIN_BASE,
661fce82 477 AARCH64_SIMD_BUILTIN_LANE_CHECK,
342be7f7 478#include "aarch64-simd-builtins.def"
661fce82
AL
479 /* The first enum element which is based on an insn_data pattern. */
480 AARCH64_SIMD_PATTERN_START = AARCH64_SIMD_BUILTIN_LANE_CHECK + 1,
481 AARCH64_SIMD_BUILTIN_MAX = AARCH64_SIMD_PATTERN_START
482 + ARRAY_SIZE (aarch64_simd_builtin_data) - 1,
5d357f26
KT
483 AARCH64_CRC32_BUILTIN_BASE,
484 AARCH64_CRC32_BUILTINS
485 AARCH64_CRC32_BUILTIN_MAX,
312492bd
JW
486 /* ARMv8.3-A Pointer Authentication Builtins. */
487 AARCH64_PAUTH_BUILTIN_AUTIA1716,
488 AARCH64_PAUTH_BUILTIN_PACIA1716,
8fc16d72
ST
489 AARCH64_PAUTH_BUILTIN_AUTIB1716,
490 AARCH64_PAUTH_BUILTIN_PACIB1716,
312492bd 491 AARCH64_PAUTH_BUILTIN_XPACLRI,
9d63f43b
TC
492 /* Special cased Armv8.3-A Complex FMA by Lane quad Builtins. */
493 AARCH64_SIMD_FCMLA_LANEQ_BUILTIN_BASE,
494 AARCH64_SIMD_FCMLA_LANEQ_BUILTINS
e1d5d19e
KT
495 /* Builtin for Arm8.3-a Javascript conversion instruction. */
496 AARCH64_JSCVT,
89626179
SD
497 /* TME builtins. */
498 AARCH64_TME_BUILTIN_TSTART,
499 AARCH64_TME_BUILTIN_TCOMMIT,
500 AARCH64_TME_BUILTIN_TTEST,
501 AARCH64_TME_BUILTIN_TCANCEL,
c5dc215d
KT
502 /* Armv8.5-a RNG instruction builtins. */
503 AARCH64_BUILTIN_RNG_RNDR,
504 AARCH64_BUILTIN_RNG_RNDRRS,
ef01e6bb
DZ
505 /* MEMTAG builtins. */
506 AARCH64_MEMTAG_BUILTIN_START,
507 AARCH64_MEMTAG_BUILTIN_IRG,
508 AARCH64_MEMTAG_BUILTIN_GMI,
509 AARCH64_MEMTAG_BUILTIN_SUBP,
510 AARCH64_MEMTAG_BUILTIN_INC_TAG,
511 AARCH64_MEMTAG_BUILTIN_SET_TAG,
512 AARCH64_MEMTAG_BUILTIN_GET_TAG,
513 AARCH64_MEMTAG_BUILTIN_END,
342be7f7 514 AARCH64_BUILTIN_MAX
43e9d192
IB
515};
516
5d357f26
KT
517#undef CRC32_BUILTIN
518#define CRC32_BUILTIN(N, M) \
0d4a1197 519 {"__builtin_aarch64_"#N, E_##M##mode, CODE_FOR_aarch64_##N, AARCH64_BUILTIN_##N},
5d357f26
KT
520
521static aarch64_crc_builtin_datum aarch64_crc_builtin_data[] = {
522 AARCH64_CRC32_BUILTINS
523};
524
9d63f43b
TC
525
526#undef FCMLA_LANEQ_BUILTIN
527#define FCMLA_LANEQ_BUILTIN(I, N, X, M, T) \
528 {"__builtin_aarch64_fcmla_laneq"#I#N, E_##M##mode, CODE_FOR_aarch64_##X##I##N, \
529 AARCH64_SIMD_BUILTIN_FCMLA_LANEQ##I##_##M, T},
530
531/* This structure contains how to manage the mapping form the builtin to the
532 instruction to generate in the backend and how to invoke the instruction. */
5eb9ac1e 533static aarch64_fcmla_laneq_builtin_datum aarch64_fcmla_lane_builtin_data[] = {
9d63f43b
TC
534 AARCH64_SIMD_FCMLA_LANEQ_BUILTINS
535};
536
5d357f26
KT
537#undef CRC32_BUILTIN
538
119103ca
JG
539static GTY(()) tree aarch64_builtin_decls[AARCH64_BUILTIN_MAX];
540
43e9d192
IB
541#define NUM_DREG_TYPES 6
542#define NUM_QREG_TYPES 6
543
f9d53c27
TB
544/* Internal scalar builtin types. These types are used to support
545 neon intrinsic builtins. They are _not_ user-visible types. Therefore
546 the mangling for these types are implementation defined. */
547const char *aarch64_scalar_builtin_types[] = {
548 "__builtin_aarch64_simd_qi",
549 "__builtin_aarch64_simd_hi",
550 "__builtin_aarch64_simd_si",
7c369485 551 "__builtin_aarch64_simd_hf",
f9d53c27
TB
552 "__builtin_aarch64_simd_sf",
553 "__builtin_aarch64_simd_di",
554 "__builtin_aarch64_simd_df",
555 "__builtin_aarch64_simd_poly8",
556 "__builtin_aarch64_simd_poly16",
557 "__builtin_aarch64_simd_poly64",
558 "__builtin_aarch64_simd_poly128",
559 "__builtin_aarch64_simd_ti",
560 "__builtin_aarch64_simd_uqi",
561 "__builtin_aarch64_simd_uhi",
562 "__builtin_aarch64_simd_usi",
563 "__builtin_aarch64_simd_udi",
564 "__builtin_aarch64_simd_ei",
565 "__builtin_aarch64_simd_oi",
566 "__builtin_aarch64_simd_ci",
567 "__builtin_aarch64_simd_xi",
e603cd43 568 "__builtin_aarch64_simd_bf",
f9d53c27
TB
569 NULL
570};
b5828b4b 571
f9d53c27
TB
572#define ENTRY(E, M, Q, G) E,
573enum aarch64_simd_type
574{
575#include "aarch64-simd-builtin-types.def"
576 ARM_NEON_H_TYPES_LAST
577};
578#undef ENTRY
b5828b4b 579
f9d53c27 580struct aarch64_simd_type_info
b5828b4b 581{
f9d53c27
TB
582 enum aarch64_simd_type type;
583
584 /* Internal type name. */
585 const char *name;
586
587 /* Internal type name(mangled). The mangled names conform to the
588 AAPCS64 (see "Procedure Call Standard for the ARM 64-bit Architecture",
589 Appendix A). To qualify for emission with the mangled names defined in
590 that document, a vector type must not only be of the correct mode but also
591 be of the correct internal AdvSIMD vector type (e.g. __Int8x8_t); these
592 types are registered by aarch64_init_simd_builtin_types (). In other
593 words, vector types defined in other ways e.g. via vector_size attribute
594 will get default mangled names. */
595 const char *mangle;
596
597 /* Internal type. */
598 tree itype;
599
600 /* Element type. */
b5828b4b
JG
601 tree eltype;
602
f9d53c27
TB
603 /* Machine mode the internal type maps to. */
604 enum machine_mode mode;
b5828b4b 605
f9d53c27
TB
606 /* Qualifiers. */
607 enum aarch64_type_qualifiers q;
608};
609
610#define ENTRY(E, M, Q, G) \
0d4a1197 611 {E, "__" #E, #G "__" #E, NULL_TREE, NULL_TREE, E_##M##mode, qualifier_##Q},
f9d53c27
TB
612static struct aarch64_simd_type_info aarch64_simd_types [] = {
613#include "aarch64-simd-builtin-types.def"
614};
615#undef ENTRY
616
617static tree aarch64_simd_intOI_type_node = NULL_TREE;
f9d53c27
TB
618static tree aarch64_simd_intCI_type_node = NULL_TREE;
619static tree aarch64_simd_intXI_type_node = NULL_TREE;
620
1b62ed4f
JG
621/* The user-visible __fp16 type, and a pointer to that type. Used
622 across the back-end. */
623tree aarch64_fp16_type_node = NULL_TREE;
624tree aarch64_fp16_ptr_type_node = NULL_TREE;
625
abbe1ed2
SMW
626/* Back-end node type for brain float (bfloat) types. */
627tree aarch64_bf16_type_node = NULL_TREE;
628tree aarch64_bf16_ptr_type_node = NULL_TREE;
629
6d4d616a
RS
630/* Wrapper around add_builtin_function. NAME is the name of the built-in
631 function, TYPE is the function type, and CODE is the function subcode
632 (relative to AARCH64_BUILTIN_GENERAL). */
633static tree
634aarch64_general_add_builtin (const char *name, tree type, unsigned int code)
635{
636 code = (code << AARCH64_BUILTIN_SHIFT) | AARCH64_BUILTIN_GENERAL;
637 return add_builtin_function (name, type, code, BUILT_IN_MD,
638 NULL, NULL_TREE);
639}
640
f9d53c27
TB
641static const char *
642aarch64_mangle_builtin_scalar_type (const_tree type)
643{
644 int i = 0;
645
646 while (aarch64_scalar_builtin_types[i] != NULL)
b5828b4b 647 {
f9d53c27
TB
648 const char *name = aarch64_scalar_builtin_types[i];
649
650 if (TREE_CODE (TYPE_NAME (type)) == TYPE_DECL
651 && DECL_NAME (TYPE_NAME (type))
652 && !strcmp (IDENTIFIER_POINTER (DECL_NAME (TYPE_NAME (type))), name))
653 return aarch64_scalar_builtin_types[i];
654 i++;
655 }
656 return NULL;
b5828b4b
JG
657}
658
f9d53c27
TB
659static const char *
660aarch64_mangle_builtin_vector_type (const_tree type)
b5828b4b 661{
31427b97
RS
662 tree attrs = TYPE_ATTRIBUTES (type);
663 if (tree attr = lookup_attribute ("Advanced SIMD type", attrs))
664 {
665 tree mangled_name = TREE_VALUE (TREE_VALUE (attr));
666 return IDENTIFIER_POINTER (mangled_name);
667 }
f9d53c27
TB
668
669 return NULL;
6db1ec94
JG
670}
671
f9d53c27 672const char *
6d4d616a 673aarch64_general_mangle_builtin_type (const_tree type)
6db1ec94 674{
f9d53c27
TB
675 const char *mangle;
676 /* Walk through all the AArch64 builtins types tables to filter out the
677 incoming type. */
678 if ((mangle = aarch64_mangle_builtin_vector_type (type))
679 || (mangle = aarch64_mangle_builtin_scalar_type (type)))
680 return mangle;
681
682 return NULL;
6db1ec94
JG
683}
684
f9d53c27 685static tree
b8506a8a 686aarch64_simd_builtin_std_type (machine_mode mode,
f9d53c27 687 enum aarch64_type_qualifiers q)
6db1ec94 688{
f9d53c27
TB
689#define QUAL_TYPE(M) \
690 ((q == qualifier_none) ? int##M##_type_node : unsigned_int##M##_type_node);
691 switch (mode)
692 {
4e10a5a7 693 case E_QImode:
f9d53c27 694 return QUAL_TYPE (QI);
4e10a5a7 695 case E_HImode:
f9d53c27 696 return QUAL_TYPE (HI);
4e10a5a7 697 case E_SImode:
f9d53c27 698 return QUAL_TYPE (SI);
4e10a5a7 699 case E_DImode:
f9d53c27 700 return QUAL_TYPE (DI);
4e10a5a7 701 case E_TImode:
f9d53c27 702 return QUAL_TYPE (TI);
4e10a5a7 703 case E_OImode:
f9d53c27 704 return aarch64_simd_intOI_type_node;
4e10a5a7 705 case E_CImode:
f9d53c27 706 return aarch64_simd_intCI_type_node;
4e10a5a7 707 case E_XImode:
f9d53c27 708 return aarch64_simd_intXI_type_node;
4e10a5a7 709 case E_HFmode:
71a11456 710 return aarch64_fp16_type_node;
4e10a5a7 711 case E_SFmode:
f9d53c27 712 return float_type_node;
4e10a5a7 713 case E_DFmode:
f9d53c27 714 return double_type_node;
abbe1ed2
SMW
715 case E_BFmode:
716 return aarch64_bf16_type_node;
f9d53c27
TB
717 default:
718 gcc_unreachable ();
719 }
720#undef QUAL_TYPE
6db1ec94
JG
721}
722
f9d53c27 723static tree
b8506a8a 724aarch64_lookup_simd_builtin_type (machine_mode mode,
f9d53c27 725 enum aarch64_type_qualifiers q)
6db1ec94 726{
f9d53c27
TB
727 int i;
728 int nelts = sizeof (aarch64_simd_types) / sizeof (aarch64_simd_types[0]);
729
730 /* Non-poly scalar modes map to standard types not in the table. */
731 if (q != qualifier_poly && !VECTOR_MODE_P (mode))
732 return aarch64_simd_builtin_std_type (mode, q);
733
734 for (i = 0; i < nelts; i++)
735 if (aarch64_simd_types[i].mode == mode
736 && aarch64_simd_types[i].q == q)
737 return aarch64_simd_types[i].itype;
738
739 return NULL_TREE;
b5828b4b
JG
740}
741
f9d53c27 742static tree
b8506a8a 743aarch64_simd_builtin_type (machine_mode mode,
f9d53c27
TB
744 bool unsigned_p, bool poly_p)
745{
746 if (poly_p)
747 return aarch64_lookup_simd_builtin_type (mode, qualifier_poly);
748 else if (unsigned_p)
749 return aarch64_lookup_simd_builtin_type (mode, qualifier_unsigned);
750 else
751 return aarch64_lookup_simd_builtin_type (mode, qualifier_none);
752}
753
af55e82d 754static void
f9d53c27 755aarch64_init_simd_builtin_types (void)
43e9d192 756{
f9d53c27
TB
757 int i;
758 int nelts = sizeof (aarch64_simd_types) / sizeof (aarch64_simd_types[0]);
759 tree tdecl;
760
761 /* Init all the element types built by the front-end. */
762 aarch64_simd_types[Int8x8_t].eltype = intQI_type_node;
763 aarch64_simd_types[Int8x16_t].eltype = intQI_type_node;
764 aarch64_simd_types[Int16x4_t].eltype = intHI_type_node;
765 aarch64_simd_types[Int16x8_t].eltype = intHI_type_node;
766 aarch64_simd_types[Int32x2_t].eltype = intSI_type_node;
767 aarch64_simd_types[Int32x4_t].eltype = intSI_type_node;
768 aarch64_simd_types[Int64x1_t].eltype = intDI_type_node;
769 aarch64_simd_types[Int64x2_t].eltype = intDI_type_node;
770 aarch64_simd_types[Uint8x8_t].eltype = unsigned_intQI_type_node;
771 aarch64_simd_types[Uint8x16_t].eltype = unsigned_intQI_type_node;
772 aarch64_simd_types[Uint16x4_t].eltype = unsigned_intHI_type_node;
773 aarch64_simd_types[Uint16x8_t].eltype = unsigned_intHI_type_node;
774 aarch64_simd_types[Uint32x2_t].eltype = unsigned_intSI_type_node;
775 aarch64_simd_types[Uint32x4_t].eltype = unsigned_intSI_type_node;
776 aarch64_simd_types[Uint64x1_t].eltype = unsigned_intDI_type_node;
777 aarch64_simd_types[Uint64x2_t].eltype = unsigned_intDI_type_node;
778
779 /* Poly types are a world of their own. */
780 aarch64_simd_types[Poly8_t].eltype = aarch64_simd_types[Poly8_t].itype =
781 build_distinct_type_copy (unsigned_intQI_type_node);
bcee52c4
MS
782 /* Prevent front-ends from transforming Poly8_t arrays into string
783 literals. */
784 TYPE_STRING_FLAG (aarch64_simd_types[Poly8_t].eltype) = false;
785
f9d53c27
TB
786 aarch64_simd_types[Poly16_t].eltype = aarch64_simd_types[Poly16_t].itype =
787 build_distinct_type_copy (unsigned_intHI_type_node);
788 aarch64_simd_types[Poly64_t].eltype = aarch64_simd_types[Poly64_t].itype =
789 build_distinct_type_copy (unsigned_intDI_type_node);
790 aarch64_simd_types[Poly128_t].eltype = aarch64_simd_types[Poly128_t].itype =
791 build_distinct_type_copy (unsigned_intTI_type_node);
792 /* Init poly vector element types with scalar poly types. */
793 aarch64_simd_types[Poly8x8_t].eltype = aarch64_simd_types[Poly8_t].itype;
794 aarch64_simd_types[Poly8x16_t].eltype = aarch64_simd_types[Poly8_t].itype;
795 aarch64_simd_types[Poly16x4_t].eltype = aarch64_simd_types[Poly16_t].itype;
796 aarch64_simd_types[Poly16x8_t].eltype = aarch64_simd_types[Poly16_t].itype;
797 aarch64_simd_types[Poly64x1_t].eltype = aarch64_simd_types[Poly64_t].itype;
798 aarch64_simd_types[Poly64x2_t].eltype = aarch64_simd_types[Poly64_t].itype;
799
800 /* Continue with standard types. */
71a11456
AL
801 aarch64_simd_types[Float16x4_t].eltype = aarch64_fp16_type_node;
802 aarch64_simd_types[Float16x8_t].eltype = aarch64_fp16_type_node;
f9d53c27
TB
803 aarch64_simd_types[Float32x2_t].eltype = float_type_node;
804 aarch64_simd_types[Float32x4_t].eltype = float_type_node;
805 aarch64_simd_types[Float64x1_t].eltype = double_type_node;
806 aarch64_simd_types[Float64x2_t].eltype = double_type_node;
807
abbe1ed2
SMW
808 /* Init Bfloat vector types with underlying __bf16 type. */
809 aarch64_simd_types[Bfloat16x4_t].eltype = aarch64_bf16_type_node;
810 aarch64_simd_types[Bfloat16x8_t].eltype = aarch64_bf16_type_node;
811
f9d53c27
TB
812 for (i = 0; i < nelts; i++)
813 {
814 tree eltype = aarch64_simd_types[i].eltype;
b8506a8a 815 machine_mode mode = aarch64_simd_types[i].mode;
f9d53c27
TB
816
817 if (aarch64_simd_types[i].itype == NULL)
b96824c4 818 {
31427b97
RS
819 tree type = build_vector_type (eltype, GET_MODE_NUNITS (mode));
820 type = build_distinct_type_copy (type);
821 SET_TYPE_STRUCTURAL_EQUALITY (type);
822
823 tree mangled_name = get_identifier (aarch64_simd_types[i].mangle);
824 tree value = tree_cons (NULL_TREE, mangled_name, NULL_TREE);
825 TYPE_ATTRIBUTES (type)
826 = tree_cons (get_identifier ("Advanced SIMD type"), value,
827 TYPE_ATTRIBUTES (type));
828 aarch64_simd_types[i].itype = type;
b96824c4 829 }
f9d53c27
TB
830
831 tdecl = add_builtin_type (aarch64_simd_types[i].name,
832 aarch64_simd_types[i].itype);
833 TYPE_NAME (aarch64_simd_types[i].itype) = tdecl;
f9d53c27 834 }
43e9d192 835
f9d53c27
TB
836#define AARCH64_BUILD_SIGNED_TYPE(mode) \
837 make_signed_type (GET_MODE_PRECISION (mode));
838 aarch64_simd_intOI_type_node = AARCH64_BUILD_SIGNED_TYPE (OImode);
f9d53c27
TB
839 aarch64_simd_intCI_type_node = AARCH64_BUILD_SIGNED_TYPE (CImode);
840 aarch64_simd_intXI_type_node = AARCH64_BUILD_SIGNED_TYPE (XImode);
841#undef AARCH64_BUILD_SIGNED_TYPE
842
f9d53c27
TB
843 tdecl = add_builtin_type
844 ("__builtin_aarch64_simd_oi" , aarch64_simd_intOI_type_node);
845 TYPE_NAME (aarch64_simd_intOI_type_node) = tdecl;
846 tdecl = add_builtin_type
847 ("__builtin_aarch64_simd_ci" , aarch64_simd_intCI_type_node);
848 TYPE_NAME (aarch64_simd_intCI_type_node) = tdecl;
849 tdecl = add_builtin_type
850 ("__builtin_aarch64_simd_xi" , aarch64_simd_intXI_type_node);
851 TYPE_NAME (aarch64_simd_intXI_type_node) = tdecl;
852}
853
854static void
855aarch64_init_simd_builtin_scalar_types (void)
856{
857 /* Define typedefs for all the standard scalar types. */
858 (*lang_hooks.types.register_builtin_type) (intQI_type_node,
43e9d192 859 "__builtin_aarch64_simd_qi");
f9d53c27 860 (*lang_hooks.types.register_builtin_type) (intHI_type_node,
43e9d192 861 "__builtin_aarch64_simd_hi");
7c369485
AL
862 (*lang_hooks.types.register_builtin_type) (aarch64_fp16_type_node,
863 "__builtin_aarch64_simd_hf");
f9d53c27 864 (*lang_hooks.types.register_builtin_type) (intSI_type_node,
43e9d192 865 "__builtin_aarch64_simd_si");
f9d53c27 866 (*lang_hooks.types.register_builtin_type) (float_type_node,
43e9d192 867 "__builtin_aarch64_simd_sf");
f9d53c27 868 (*lang_hooks.types.register_builtin_type) (intDI_type_node,
43e9d192 869 "__builtin_aarch64_simd_di");
f9d53c27 870 (*lang_hooks.types.register_builtin_type) (double_type_node,
43e9d192 871 "__builtin_aarch64_simd_df");
f9d53c27 872 (*lang_hooks.types.register_builtin_type) (unsigned_intQI_type_node,
43e9d192 873 "__builtin_aarch64_simd_poly8");
f9d53c27 874 (*lang_hooks.types.register_builtin_type) (unsigned_intHI_type_node,
43e9d192 875 "__builtin_aarch64_simd_poly16");
f9d53c27 876 (*lang_hooks.types.register_builtin_type) (unsigned_intDI_type_node,
7baa225d 877 "__builtin_aarch64_simd_poly64");
f9d53c27 878 (*lang_hooks.types.register_builtin_type) (unsigned_intTI_type_node,
7baa225d 879 "__builtin_aarch64_simd_poly128");
f9d53c27 880 (*lang_hooks.types.register_builtin_type) (intTI_type_node,
43e9d192 881 "__builtin_aarch64_simd_ti");
e603cd43
MI
882 (*lang_hooks.types.register_builtin_type) (aarch64_bf16_type_node,
883 "__builtin_aarch64_simd_bf");
b5828b4b 884 /* Unsigned integer types for various mode sizes. */
f9d53c27 885 (*lang_hooks.types.register_builtin_type) (unsigned_intQI_type_node,
b5828b4b 886 "__builtin_aarch64_simd_uqi");
f9d53c27 887 (*lang_hooks.types.register_builtin_type) (unsigned_intHI_type_node,
b5828b4b 888 "__builtin_aarch64_simd_uhi");
f9d53c27 889 (*lang_hooks.types.register_builtin_type) (unsigned_intSI_type_node,
b5828b4b 890 "__builtin_aarch64_simd_usi");
f9d53c27 891 (*lang_hooks.types.register_builtin_type) (unsigned_intDI_type_node,
b5828b4b 892 "__builtin_aarch64_simd_udi");
f9d53c27
TB
893}
894
e95a988a
KT
895static bool aarch64_simd_builtins_initialized_p = false;
896
9d63f43b
TC
897/* Due to the architecture not providing lane variant of the lane instructions
898 for fcmla we can't use the standard simd builtin expansion code, but we
899 still want the majority of the validation that would normally be done. */
900
901void
902aarch64_init_fcmla_laneq_builtins (void)
903{
904 unsigned int i = 0;
905
906 for (i = 0; i < ARRAY_SIZE (aarch64_fcmla_lane_builtin_data); ++i)
907 {
908 aarch64_fcmla_laneq_builtin_datum* d
909 = &aarch64_fcmla_lane_builtin_data[i];
910 tree argtype = aarch64_lookup_simd_builtin_type (d->mode, qualifier_none);
911 machine_mode quadmode = GET_MODE_2XWIDER_MODE (d->mode).require ();
912 tree quadtype
913 = aarch64_lookup_simd_builtin_type (quadmode, qualifier_none);
914 tree lanetype
915 = aarch64_simd_builtin_std_type (SImode, qualifier_lane_pair_index);
916 tree ftype = build_function_type_list (argtype, argtype, argtype,
917 quadtype, lanetype, NULL_TREE);
6d4d616a 918 tree fndecl = aarch64_general_add_builtin (d->name, ftype, d->fcode);
9d63f43b
TC
919
920 aarch64_builtin_decls[d->fcode] = fndecl;
921 }
922}
923
e95a988a 924void
f9d53c27
TB
925aarch64_init_simd_builtins (void)
926{
661fce82 927 unsigned int i, fcode = AARCH64_SIMD_PATTERN_START;
f9d53c27 928
e95a988a
KT
929 if (aarch64_simd_builtins_initialized_p)
930 return;
931
932 aarch64_simd_builtins_initialized_p = true;
933
f9d53c27 934 aarch64_init_simd_builtin_types ();
43e9d192 935
f9d53c27
TB
936 /* Strong-typing hasn't been implemented for all AdvSIMD builtin intrinsics.
937 Therefore we need to preserve the old __builtin scalar types. It can be
938 removed once all the intrinsics become strongly typed using the qualifier
939 system. */
940 aarch64_init_simd_builtin_scalar_types ();
941
661fce82 942 tree lane_check_fpr = build_function_type_list (void_type_node,
9c4f25cc
AP
943 size_type_node,
944 size_type_node,
661fce82
AL
945 intSI_type_node,
946 NULL);
6d4d616a
RS
947 aarch64_builtin_decls[AARCH64_SIMD_BUILTIN_LANE_CHECK]
948 = aarch64_general_add_builtin ("__builtin_aarch64_im_lane_boundsi",
949 lane_check_fpr,
950 AARCH64_SIMD_BUILTIN_LANE_CHECK);
661fce82 951
342be7f7 952 for (i = 0; i < ARRAY_SIZE (aarch64_simd_builtin_data); i++, fcode++)
43e9d192 953 {
b5828b4b 954 bool print_type_signature_p = false;
cae83731 955 char type_signature[SIMD_MAX_BUILTIN_ARGS + 1] = { 0 };
43e9d192 956 aarch64_simd_builtin_datum *d = &aarch64_simd_builtin_data[i];
342be7f7
JG
957 char namebuf[60];
958 tree ftype = NULL;
119103ca 959 tree fndecl = NULL;
342be7f7 960
342be7f7 961 d->fcode = fcode;
43e9d192 962
b5828b4b
JG
963 /* We must track two variables here. op_num is
964 the operand number as in the RTL pattern. This is
965 required to access the mode (e.g. V4SF mode) of the
966 argument, from which the base type can be derived.
967 arg_num is an index in to the qualifiers data, which
968 gives qualifiers to the type (e.g. const unsigned).
969 The reason these two variables may differ by one is the
970 void return type. While all return types take the 0th entry
971 in the qualifiers array, there is no operand for them in the
972 RTL pattern. */
973 int op_num = insn_data[d->code].n_operands - 1;
974 int arg_num = d->qualifiers[0] & qualifier_void
975 ? op_num + 1
976 : op_num;
977 tree return_type = void_type_node, args = void_list_node;
978 tree eltype;
979
980 /* Build a function type directly from the insn_data for this
981 builtin. The build_function_type () function takes care of
982 removing duplicates for us. */
983 for (; op_num >= 0; arg_num--, op_num--)
43e9d192 984 {
ef4bddc2 985 machine_mode op_mode = insn_data[d->code].operand[op_num].mode;
b5828b4b 986 enum aarch64_type_qualifiers qualifiers = d->qualifiers[arg_num];
43e9d192 987
b5828b4b
JG
988 if (qualifiers & qualifier_unsigned)
989 {
9fd2074d 990 type_signature[op_num] = 'u';
b5828b4b
JG
991 print_type_signature_p = true;
992 }
6db1ec94
JG
993 else if (qualifiers & qualifier_poly)
994 {
9fd2074d 995 type_signature[op_num] = 'p';
6db1ec94
JG
996 print_type_signature_p = true;
997 }
b5828b4b 998 else
9fd2074d 999 type_signature[op_num] = 's';
b5828b4b
JG
1000
1001 /* Skip an internal operand for vget_{low, high}. */
1002 if (qualifiers & qualifier_internal)
1003 continue;
1004
1005 /* Some builtins have different user-facing types
1006 for certain arguments, encoded in d->mode. */
1007 if (qualifiers & qualifier_map_mode)
bc5e395d 1008 op_mode = d->mode;
b5828b4b
JG
1009
1010 /* For pointers, we want a pointer to the basic type
1011 of the vector. */
1012 if (qualifiers & qualifier_pointer && VECTOR_MODE_P (op_mode))
1013 op_mode = GET_MODE_INNER (op_mode);
1014
f9d53c27
TB
1015 eltype = aarch64_simd_builtin_type
1016 (op_mode,
1017 (qualifiers & qualifier_unsigned) != 0,
1018 (qualifiers & qualifier_poly) != 0);
1019 gcc_assert (eltype != NULL);
b5828b4b
JG
1020
1021 /* Add qualifiers. */
1022 if (qualifiers & qualifier_const)
1023 eltype = build_qualified_type (eltype, TYPE_QUAL_CONST);
1024
1025 if (qualifiers & qualifier_pointer)
1026 eltype = build_pointer_type (eltype);
1027
1028 /* If we have reached arg_num == 0, we are at a non-void
1029 return type. Otherwise, we are still processing
1030 arguments. */
1031 if (arg_num == 0)
1032 return_type = eltype;
1033 else
1034 args = tree_cons (NULL_TREE, eltype, args);
1035 }
342be7f7 1036
b5828b4b 1037 ftype = build_function_type (return_type, args);
43e9d192 1038
342be7f7 1039 gcc_assert (ftype != NULL);
43e9d192 1040
b5828b4b 1041 if (print_type_signature_p)
bc5e395d
JG
1042 snprintf (namebuf, sizeof (namebuf), "__builtin_aarch64_%s_%s",
1043 d->name, type_signature);
b5828b4b 1044 else
bc5e395d
JG
1045 snprintf (namebuf, sizeof (namebuf), "__builtin_aarch64_%s",
1046 d->name);
43e9d192 1047
6d4d616a 1048 fndecl = aarch64_general_add_builtin (namebuf, ftype, fcode);
119103ca 1049 aarch64_builtin_decls[fcode] = fndecl;
43e9d192 1050 }
280d970b
TC
1051
1052 /* Initialize the remaining fcmla_laneq intrinsics. */
1053 aarch64_init_fcmla_laneq_builtins ();
43e9d192
IB
1054}
1055
5d357f26
KT
1056static void
1057aarch64_init_crc32_builtins ()
1058{
f9d53c27 1059 tree usi_type = aarch64_simd_builtin_std_type (SImode, qualifier_unsigned);
5d357f26
KT
1060 unsigned int i = 0;
1061
1062 for (i = 0; i < ARRAY_SIZE (aarch64_crc_builtin_data); ++i)
1063 {
1064 aarch64_crc_builtin_datum* d = &aarch64_crc_builtin_data[i];
f9d53c27
TB
1065 tree argtype = aarch64_simd_builtin_std_type (d->mode,
1066 qualifier_unsigned);
5d357f26 1067 tree ftype = build_function_type_list (usi_type, usi_type, argtype, NULL_TREE);
6d4d616a 1068 tree fndecl = aarch64_general_add_builtin (d->name, ftype, d->fcode);
5d357f26
KT
1069
1070 aarch64_builtin_decls[d->fcode] = fndecl;
1071 }
1072}
1073
a6fc00da
BH
1074/* Add builtins for reciprocal square root. */
1075
1076void
1077aarch64_init_builtin_rsqrt (void)
1078{
1079 tree fndecl = NULL;
1080 tree ftype = NULL;
1081
1082 tree V2SF_type_node = build_vector_type (float_type_node, 2);
1083 tree V2DF_type_node = build_vector_type (double_type_node, 2);
1084 tree V4SF_type_node = build_vector_type (float_type_node, 4);
1085
1086 struct builtin_decls_data
1087 {
1088 tree type_node;
1089 const char *builtin_name;
1090 int function_code;
1091 };
1092
1093 builtin_decls_data bdda[] =
1094 {
1095 { double_type_node, "__builtin_aarch64_rsqrt_df", AARCH64_BUILTIN_RSQRT_DF },
1096 { float_type_node, "__builtin_aarch64_rsqrt_sf", AARCH64_BUILTIN_RSQRT_SF },
1097 { V2DF_type_node, "__builtin_aarch64_rsqrt_v2df", AARCH64_BUILTIN_RSQRT_V2DF },
1098 { V2SF_type_node, "__builtin_aarch64_rsqrt_v2sf", AARCH64_BUILTIN_RSQRT_V2SF },
1099 { V4SF_type_node, "__builtin_aarch64_rsqrt_v4sf", AARCH64_BUILTIN_RSQRT_V4SF }
1100 };
1101
1102 builtin_decls_data *bdd = bdda;
1103 builtin_decls_data *bdd_end = bdd + (sizeof (bdda) / sizeof (builtin_decls_data));
1104
1105 for (; bdd < bdd_end; bdd++)
1106 {
1107 ftype = build_function_type_list (bdd->type_node, bdd->type_node, NULL_TREE);
6d4d616a
RS
1108 fndecl = aarch64_general_add_builtin (bdd->builtin_name,
1109 ftype, bdd->function_code);
a6fc00da
BH
1110 aarch64_builtin_decls[bdd->function_code] = fndecl;
1111 }
1112}
1113
1b62ed4f
JG
1114/* Initialize the backend types that support the user-visible __fp16
1115 type, also initialize a pointer to that type, to be used when
1116 forming HFAs. */
1117
1118static void
1119aarch64_init_fp16_types (void)
1120{
1121 aarch64_fp16_type_node = make_node (REAL_TYPE);
1122 TYPE_PRECISION (aarch64_fp16_type_node) = 16;
1123 layout_type (aarch64_fp16_type_node);
1124
1125 (*lang_hooks.types.register_builtin_type) (aarch64_fp16_type_node, "__fp16");
1126 aarch64_fp16_ptr_type_node = build_pointer_type (aarch64_fp16_type_node);
1127}
1128
abbe1ed2
SMW
1129/* Initialize the backend REAL_TYPE type supporting bfloat types. */
1130static void
1131aarch64_init_bf16_types (void)
1132{
1133 aarch64_bf16_type_node = make_node (REAL_TYPE);
1134 TYPE_PRECISION (aarch64_bf16_type_node) = 16;
1135 SET_TYPE_MODE (aarch64_bf16_type_node, BFmode);
1136 layout_type (aarch64_bf16_type_node);
1137
1138 lang_hooks.types.register_builtin_type (aarch64_bf16_type_node, "__bf16");
1139 aarch64_bf16_ptr_type_node = build_pointer_type (aarch64_bf16_type_node);
1140}
1141
312492bd
JW
1142/* Pointer authentication builtins that will become NOP on legacy platform.
1143 Currently, these builtins are for internal use only (libgcc EH unwinder). */
1144
1145void
1146aarch64_init_pauth_hint_builtins (void)
1147{
1148 /* Pointer Authentication builtins. */
1149 tree ftype_pointer_auth
1150 = build_function_type_list (ptr_type_node, ptr_type_node,
1151 unsigned_intDI_type_node, NULL_TREE);
1152 tree ftype_pointer_strip
1153 = build_function_type_list (ptr_type_node, ptr_type_node, NULL_TREE);
1154
1155 aarch64_builtin_decls[AARCH64_PAUTH_BUILTIN_AUTIA1716]
6d4d616a
RS
1156 = aarch64_general_add_builtin ("__builtin_aarch64_autia1716",
1157 ftype_pointer_auth,
1158 AARCH64_PAUTH_BUILTIN_AUTIA1716);
312492bd 1159 aarch64_builtin_decls[AARCH64_PAUTH_BUILTIN_PACIA1716]
6d4d616a
RS
1160 = aarch64_general_add_builtin ("__builtin_aarch64_pacia1716",
1161 ftype_pointer_auth,
1162 AARCH64_PAUTH_BUILTIN_PACIA1716);
8fc16d72 1163 aarch64_builtin_decls[AARCH64_PAUTH_BUILTIN_AUTIB1716]
6d4d616a
RS
1164 = aarch64_general_add_builtin ("__builtin_aarch64_autib1716",
1165 ftype_pointer_auth,
1166 AARCH64_PAUTH_BUILTIN_AUTIB1716);
8fc16d72 1167 aarch64_builtin_decls[AARCH64_PAUTH_BUILTIN_PACIB1716]
6d4d616a
RS
1168 = aarch64_general_add_builtin ("__builtin_aarch64_pacib1716",
1169 ftype_pointer_auth,
1170 AARCH64_PAUTH_BUILTIN_PACIB1716);
312492bd 1171 aarch64_builtin_decls[AARCH64_PAUTH_BUILTIN_XPACLRI]
6d4d616a
RS
1172 = aarch64_general_add_builtin ("__builtin_aarch64_xpaclri",
1173 ftype_pointer_strip,
1174 AARCH64_PAUTH_BUILTIN_XPACLRI);
312492bd
JW
1175}
1176
89626179
SD
1177/* Initialize the transactional memory extension (TME) builtins. */
1178static void
1179aarch64_init_tme_builtins (void)
1180{
1181 tree ftype_uint64_void
1182 = build_function_type_list (uint64_type_node, NULL);
1183 tree ftype_void_void
1184 = build_function_type_list (void_type_node, NULL);
1185 tree ftype_void_uint64
1186 = build_function_type_list (void_type_node, uint64_type_node, NULL);
1187
1188 aarch64_builtin_decls[AARCH64_TME_BUILTIN_TSTART]
6d4d616a
RS
1189 = aarch64_general_add_builtin ("__builtin_aarch64_tstart",
1190 ftype_uint64_void,
1191 AARCH64_TME_BUILTIN_TSTART);
89626179 1192 aarch64_builtin_decls[AARCH64_TME_BUILTIN_TTEST]
6d4d616a
RS
1193 = aarch64_general_add_builtin ("__builtin_aarch64_ttest",
1194 ftype_uint64_void,
1195 AARCH64_TME_BUILTIN_TTEST);
89626179 1196 aarch64_builtin_decls[AARCH64_TME_BUILTIN_TCOMMIT]
6d4d616a
RS
1197 = aarch64_general_add_builtin ("__builtin_aarch64_tcommit",
1198 ftype_void_void,
1199 AARCH64_TME_BUILTIN_TCOMMIT);
89626179 1200 aarch64_builtin_decls[AARCH64_TME_BUILTIN_TCANCEL]
6d4d616a
RS
1201 = aarch64_general_add_builtin ("__builtin_aarch64_tcancel",
1202 ftype_void_uint64,
1203 AARCH64_TME_BUILTIN_TCANCEL);
89626179
SD
1204}
1205
c5dc215d
KT
1206/* Add builtins for Random Number instructions. */
1207
1208static void
1209aarch64_init_rng_builtins (void)
1210{
1211 tree unsigned_ptr_type = build_pointer_type (unsigned_intDI_type_node);
1212 tree ftype
1213 = build_function_type_list (integer_type_node, unsigned_ptr_type, NULL);
1214 aarch64_builtin_decls[AARCH64_BUILTIN_RNG_RNDR]
1215 = aarch64_general_add_builtin ("__builtin_aarch64_rndr", ftype,
1216 AARCH64_BUILTIN_RNG_RNDR);
1217 aarch64_builtin_decls[AARCH64_BUILTIN_RNG_RNDRRS]
1218 = aarch64_general_add_builtin ("__builtin_aarch64_rndrrs", ftype,
1219 AARCH64_BUILTIN_RNG_RNDRRS);
1220}
1221
ef01e6bb
DZ
1222/* Initialize the memory tagging extension (MTE) builtins. */
1223struct
1224{
1225 tree ftype;
1226 enum insn_code icode;
1227} aarch64_memtag_builtin_data[AARCH64_MEMTAG_BUILTIN_END -
1228 AARCH64_MEMTAG_BUILTIN_START - 1];
1229
1230static void
1231aarch64_init_memtag_builtins (void)
1232{
1233 tree fntype = NULL;
1234
1235#define AARCH64_INIT_MEMTAG_BUILTINS_DECL(F, N, I, T) \
1236 aarch64_builtin_decls[AARCH64_MEMTAG_BUILTIN_##F] \
1237 = aarch64_general_add_builtin ("__builtin_aarch64_memtag_"#N, \
1238 T, AARCH64_MEMTAG_BUILTIN_##F); \
1239 aarch64_memtag_builtin_data[AARCH64_MEMTAG_BUILTIN_##F - \
1240 AARCH64_MEMTAG_BUILTIN_START - 1] = \
1241 {T, CODE_FOR_##I};
1242
1243 fntype = build_function_type_list (ptr_type_node, ptr_type_node,
1244 uint64_type_node, NULL);
1245 AARCH64_INIT_MEMTAG_BUILTINS_DECL (IRG, irg, irg, fntype);
1246
1247 fntype = build_function_type_list (uint64_type_node, ptr_type_node,
1248 uint64_type_node, NULL);
1249 AARCH64_INIT_MEMTAG_BUILTINS_DECL (GMI, gmi, gmi, fntype);
1250
1251 fntype = build_function_type_list (ptrdiff_type_node, ptr_type_node,
1252 ptr_type_node, NULL);
1253 AARCH64_INIT_MEMTAG_BUILTINS_DECL (SUBP, subp, subp, fntype);
1254
1255 fntype = build_function_type_list (ptr_type_node, ptr_type_node,
1256 unsigned_type_node, NULL);
1257 AARCH64_INIT_MEMTAG_BUILTINS_DECL (INC_TAG, inc_tag, addg, fntype);
1258
1259 fntype = build_function_type_list (void_type_node, ptr_type_node, NULL);
1260 AARCH64_INIT_MEMTAG_BUILTINS_DECL (SET_TAG, set_tag, stg, fntype);
1261
1262 fntype = build_function_type_list (ptr_type_node, ptr_type_node, NULL);
1263 AARCH64_INIT_MEMTAG_BUILTINS_DECL (GET_TAG, get_tag, ldg, fntype);
1264
1265#undef AARCH64_INIT_MEMTAG_BUILTINS_DECL
1266}
c5dc215d 1267
0d7e5fa6 1268/* Initialize fpsr fpcr getters and setters. */
c5dc215d 1269
0d7e5fa6
AC
1270static void
1271aarch64_init_fpsr_fpcr_builtins (void)
43e9d192 1272{
0d7e5fa6 1273 tree ftype_set
aa87aced 1274 = build_function_type_list (void_type_node, unsigned_type_node, NULL);
0d7e5fa6 1275 tree ftype_get
aa87aced
KV
1276 = build_function_type_list (unsigned_type_node, NULL);
1277
1278 aarch64_builtin_decls[AARCH64_BUILTIN_GET_FPCR]
6d4d616a 1279 = aarch64_general_add_builtin ("__builtin_aarch64_get_fpcr",
0d7e5fa6 1280 ftype_get,
6d4d616a 1281 AARCH64_BUILTIN_GET_FPCR);
aa87aced 1282 aarch64_builtin_decls[AARCH64_BUILTIN_SET_FPCR]
6d4d616a 1283 = aarch64_general_add_builtin ("__builtin_aarch64_set_fpcr",
0d7e5fa6 1284 ftype_set,
6d4d616a 1285 AARCH64_BUILTIN_SET_FPCR);
aa87aced 1286 aarch64_builtin_decls[AARCH64_BUILTIN_GET_FPSR]
6d4d616a 1287 = aarch64_general_add_builtin ("__builtin_aarch64_get_fpsr",
0d7e5fa6 1288 ftype_get,
6d4d616a 1289 AARCH64_BUILTIN_GET_FPSR);
aa87aced 1290 aarch64_builtin_decls[AARCH64_BUILTIN_SET_FPSR]
6d4d616a 1291 = aarch64_general_add_builtin ("__builtin_aarch64_set_fpsr",
0d7e5fa6 1292 ftype_set,
6d4d616a 1293 AARCH64_BUILTIN_SET_FPSR);
aa87aced 1294
0d7e5fa6
AC
1295 ftype_set
1296 = build_function_type_list (void_type_node, long_long_unsigned_type_node,
1297 NULL);
1298 ftype_get
1299 = build_function_type_list (long_long_unsigned_type_node, NULL);
1300
1301 aarch64_builtin_decls[AARCH64_BUILTIN_GET_FPCR64]
1302 = aarch64_general_add_builtin ("__builtin_aarch64_get_fpcr64",
1303 ftype_get,
1304 AARCH64_BUILTIN_GET_FPCR64);
1305 aarch64_builtin_decls[AARCH64_BUILTIN_SET_FPCR64]
1306 = aarch64_general_add_builtin ("__builtin_aarch64_set_fpcr64",
1307 ftype_set,
1308 AARCH64_BUILTIN_SET_FPCR64);
1309 aarch64_builtin_decls[AARCH64_BUILTIN_GET_FPSR64]
1310 = aarch64_general_add_builtin ("__builtin_aarch64_get_fpsr64",
1311 ftype_get,
1312 AARCH64_BUILTIN_GET_FPSR64);
1313 aarch64_builtin_decls[AARCH64_BUILTIN_SET_FPSR64]
1314 = aarch64_general_add_builtin ("__builtin_aarch64_set_fpsr64",
1315 ftype_set,
1316 AARCH64_BUILTIN_SET_FPSR64);
1317}
1318
1319/* Initialize all builtins in the AARCH64_BUILTIN_GENERAL group. */
1320
1321void
1322aarch64_general_init_builtins (void)
1323{
1324 aarch64_init_fpsr_fpcr_builtins ();
1325
1b62ed4f 1326 aarch64_init_fp16_types ();
c2ec330c 1327
abbe1ed2
SMW
1328 aarch64_init_bf16_types ();
1329
342be7f7 1330 if (TARGET_SIMD)
280d970b 1331 aarch64_init_simd_builtins ();
e95a988a
KT
1332
1333 aarch64_init_crc32_builtins ();
a6fc00da 1334 aarch64_init_builtin_rsqrt ();
c5dc215d 1335 aarch64_init_rng_builtins ();
312492bd 1336
e1d5d19e
KT
1337 tree ftype_jcvt
1338 = build_function_type_list (intSI_type_node, double_type_node, NULL);
1339 aarch64_builtin_decls[AARCH64_JSCVT]
6d4d616a
RS
1340 = aarch64_general_add_builtin ("__builtin_aarch64_jcvtzs", ftype_jcvt,
1341 AARCH64_JSCVT);
e1d5d19e 1342
a876231c
JW
1343 /* Initialize pointer authentication builtins which are backed by instructions
1344 in NOP encoding space.
1345
1346 NOTE: these builtins are supposed to be used by libgcc unwinder only, as
1347 there is no support on return address signing under ILP32, we don't
1348 register them. */
1349 if (!TARGET_ILP32)
1350 aarch64_init_pauth_hint_builtins ();
89626179
SD
1351
1352 if (TARGET_TME)
1353 aarch64_init_tme_builtins ();
ef01e6bb
DZ
1354
1355 if (TARGET_MEMTAG)
1356 aarch64_init_memtag_builtins ();
43e9d192
IB
1357}
1358
6d4d616a 1359/* Implement TARGET_BUILTIN_DECL for the AARCH64_BUILTIN_GENERAL group. */
119103ca 1360tree
6d4d616a 1361aarch64_general_builtin_decl (unsigned code, bool)
119103ca
JG
1362{
1363 if (code >= AARCH64_BUILTIN_MAX)
1364 return error_mark_node;
1365
1366 return aarch64_builtin_decls[code];
1367}
1368
43e9d192
IB
1369typedef enum
1370{
1371 SIMD_ARG_COPY_TO_REG,
1372 SIMD_ARG_CONSTANT,
2a49c16d 1373 SIMD_ARG_LANE_INDEX,
4d0a0237 1374 SIMD_ARG_STRUCT_LOAD_STORE_LANE_INDEX,
9d63f43b 1375 SIMD_ARG_LANE_PAIR_INDEX,
8c197c85 1376 SIMD_ARG_LANE_QUADTUP_INDEX,
43e9d192
IB
1377 SIMD_ARG_STOP
1378} builtin_simd_arg;
1379
e95a988a 1380
43e9d192
IB
1381static rtx
1382aarch64_simd_expand_args (rtx target, int icode, int have_retval,
4d0a0237 1383 tree exp, builtin_simd_arg *args,
b8506a8a 1384 machine_mode builtin_mode)
43e9d192 1385{
43e9d192 1386 rtx pat;
d9e80f49
AL
1387 rtx op[SIMD_MAX_BUILTIN_ARGS + 1]; /* First element for result operand. */
1388 int opc = 0;
1389
1390 if (have_retval)
1391 {
1392 machine_mode tmode = insn_data[icode].operand[0].mode;
1393 if (!target
43e9d192 1394 || GET_MODE (target) != tmode
d9e80f49
AL
1395 || !(*insn_data[icode].operand[0].predicate) (target, tmode))
1396 target = gen_reg_rtx (tmode);
1397 op[opc++] = target;
1398 }
43e9d192 1399
43e9d192
IB
1400 for (;;)
1401 {
d9e80f49 1402 builtin_simd_arg thisarg = args[opc - have_retval];
43e9d192
IB
1403
1404 if (thisarg == SIMD_ARG_STOP)
1405 break;
1406 else
1407 {
d9e80f49 1408 tree arg = CALL_EXPR_ARG (exp, opc - have_retval);
b8506a8a 1409 machine_mode mode = insn_data[icode].operand[opc].mode;
d9e80f49 1410 op[opc] = expand_normal (arg);
43e9d192
IB
1411
1412 switch (thisarg)
1413 {
1414 case SIMD_ARG_COPY_TO_REG:
d9e80f49
AL
1415 if (POINTER_TYPE_P (TREE_TYPE (arg)))
1416 op[opc] = convert_memory_address (Pmode, op[opc]);
1417 /*gcc_assert (GET_MODE (op[opc]) == mode); */
1418 if (!(*insn_data[icode].operand[opc].predicate)
1419 (op[opc], mode))
1420 op[opc] = copy_to_mode_reg (mode, op[opc]);
43e9d192
IB
1421 break;
1422
4d0a0237
CB
1423 case SIMD_ARG_STRUCT_LOAD_STORE_LANE_INDEX:
1424 gcc_assert (opc > 1);
1425 if (CONST_INT_P (op[opc]))
1426 {
6a70badb
RS
1427 unsigned int nunits
1428 = GET_MODE_NUNITS (builtin_mode).to_constant ();
1429 aarch64_simd_lane_bounds (op[opc], 0, nunits, exp);
4d0a0237 1430 /* Keep to GCC-vector-extension lane indices in the RTL. */
7ac29c0f
RS
1431 op[opc] = aarch64_endian_lane_rtx (builtin_mode,
1432 INTVAL (op[opc]));
4d0a0237
CB
1433 }
1434 goto constant_arg;
1435
2a49c16d
AL
1436 case SIMD_ARG_LANE_INDEX:
1437 /* Must be a previous operand into which this is an index. */
d9e80f49
AL
1438 gcc_assert (opc > 0);
1439 if (CONST_INT_P (op[opc]))
2a49c16d 1440 {
d9e80f49 1441 machine_mode vmode = insn_data[icode].operand[opc - 1].mode;
6a70badb
RS
1442 unsigned int nunits
1443 = GET_MODE_NUNITS (vmode).to_constant ();
1444 aarch64_simd_lane_bounds (op[opc], 0, nunits, exp);
2a49c16d 1445 /* Keep to GCC-vector-extension lane indices in the RTL. */
7ac29c0f 1446 op[opc] = aarch64_endian_lane_rtx (vmode, INTVAL (op[opc]));
2a49c16d 1447 }
9d63f43b
TC
1448 /* If the lane index isn't a constant then error out. */
1449 goto constant_arg;
1450
1451 case SIMD_ARG_LANE_PAIR_INDEX:
1452 /* Must be a previous operand into which this is an index and
1453 index is restricted to nunits / 2. */
1454 gcc_assert (opc > 0);
1455 if (CONST_INT_P (op[opc]))
1456 {
1457 machine_mode vmode = insn_data[icode].operand[opc - 1].mode;
1458 unsigned int nunits
1459 = GET_MODE_NUNITS (vmode).to_constant ();
1460 aarch64_simd_lane_bounds (op[opc], 0, nunits / 2, exp);
1461 /* Keep to GCC-vector-extension lane indices in the RTL. */
33b5a38c
TC
1462 int lane = INTVAL (op[opc]);
1463 op[opc] = gen_int_mode (ENDIAN_LANE_N (nunits / 2, lane),
1464 SImode);
9d63f43b 1465 }
8c197c85
SMW
1466 /* If the lane index isn't a constant then error out. */
1467 goto constant_arg;
1468 case SIMD_ARG_LANE_QUADTUP_INDEX:
1469 /* Must be a previous operand into which this is an index and
1470 index is restricted to nunits / 4. */
1471 gcc_assert (opc > 0);
1472 if (CONST_INT_P (op[opc]))
1473 {
1474 machine_mode vmode = insn_data[icode].operand[opc - 1].mode;
1475 unsigned int nunits
1476 = GET_MODE_NUNITS (vmode).to_constant ();
1477 aarch64_simd_lane_bounds (op[opc], 0, nunits / 4, exp);
1478 /* Keep to GCC-vector-extension lane indices in the RTL. */
1479 int lane = INTVAL (op[opc]);
1480 op[opc] = gen_int_mode (ENDIAN_LANE_N (nunits / 4, lane),
1481 SImode);
1482 }
1483 /* If the lane index isn't a constant then error out. */
1484 goto constant_arg;
43e9d192 1485 case SIMD_ARG_CONSTANT:
4d0a0237 1486constant_arg:
d9e80f49
AL
1487 if (!(*insn_data[icode].operand[opc].predicate)
1488 (op[opc], mode))
d5a29419 1489 {
fca051af
AL
1490 error ("%Kargument %d must be a constant immediate",
1491 exp, opc + 1 - have_retval);
d5a29419
KT
1492 return const0_rtx;
1493 }
43e9d192
IB
1494 break;
1495
1496 case SIMD_ARG_STOP:
1497 gcc_unreachable ();
1498 }
1499
d9e80f49 1500 opc++;
43e9d192
IB
1501 }
1502 }
1503
d9e80f49
AL
1504 switch (opc)
1505 {
1506 case 1:
1507 pat = GEN_FCN (icode) (op[0]);
1508 break;
43e9d192 1509
d9e80f49
AL
1510 case 2:
1511 pat = GEN_FCN (icode) (op[0], op[1]);
1512 break;
43e9d192 1513
d9e80f49
AL
1514 case 3:
1515 pat = GEN_FCN (icode) (op[0], op[1], op[2]);
1516 break;
43e9d192 1517
d9e80f49
AL
1518 case 4:
1519 pat = GEN_FCN (icode) (op[0], op[1], op[2], op[3]);
1520 break;
43e9d192 1521
d9e80f49
AL
1522 case 5:
1523 pat = GEN_FCN (icode) (op[0], op[1], op[2], op[3], op[4]);
1524 break;
43e9d192 1525
d9e80f49
AL
1526 case 6:
1527 pat = GEN_FCN (icode) (op[0], op[1], op[2], op[3], op[4], op[5]);
1528 break;
43e9d192 1529
d9e80f49
AL
1530 default:
1531 gcc_unreachable ();
1532 }
43e9d192
IB
1533
1534 if (!pat)
d5a29419 1535 return NULL_RTX;
43e9d192
IB
1536
1537 emit_insn (pat);
1538
1539 return target;
1540}
1541
1542/* Expand an AArch64 AdvSIMD builtin(intrinsic). */
1543rtx
1544aarch64_simd_expand_builtin (int fcode, tree exp, rtx target)
1545{
661fce82
AL
1546 if (fcode == AARCH64_SIMD_BUILTIN_LANE_CHECK)
1547 {
9c4f25cc
AP
1548 rtx totalsize = expand_normal (CALL_EXPR_ARG (exp, 0));
1549 rtx elementsize = expand_normal (CALL_EXPR_ARG (exp, 1));
1550 if (CONST_INT_P (totalsize) && CONST_INT_P (elementsize)
1551 && UINTVAL (elementsize) != 0
1552 && UINTVAL (totalsize) != 0)
1553 {
1554 rtx lane_idx = expand_normal (CALL_EXPR_ARG (exp, 2));
1555 if (CONST_INT_P (lane_idx))
1556 aarch64_simd_lane_bounds (lane_idx, 0,
1557 UINTVAL (totalsize)
1558 / UINTVAL (elementsize),
1559 exp);
1560 else
1561 error ("%Klane index must be a constant immediate", exp);
1562 }
661fce82 1563 else
9c4f25cc 1564 error ("%Ktotal size and element size must be a non-zero constant immediate", exp);
661fce82
AL
1565 /* Don't generate any RTL. */
1566 return const0_rtx;
1567 }
342be7f7 1568 aarch64_simd_builtin_datum *d =
661fce82 1569 &aarch64_simd_builtin_data[fcode - AARCH64_SIMD_PATTERN_START];
342be7f7 1570 enum insn_code icode = d->code;
0ff2bf46 1571 builtin_simd_arg args[SIMD_MAX_BUILTIN_ARGS + 1];
b5828b4b
JG
1572 int num_args = insn_data[d->code].n_operands;
1573 int is_void = 0;
1574 int k;
43e9d192 1575
b5828b4b 1576 is_void = !!(d->qualifiers[0] & qualifier_void);
43e9d192 1577
b5828b4b
JG
1578 num_args += is_void;
1579
1580 for (k = 1; k < num_args; k++)
1581 {
1582 /* We have four arrays of data, each indexed in a different fashion.
1583 qualifiers - element 0 always describes the function return type.
1584 operands - element 0 is either the operand for return value (if
1585 the function has a non-void return type) or the operand for the
1586 first argument.
1587 expr_args - element 0 always holds the first argument.
1588 args - element 0 is always used for the return type. */
1589 int qualifiers_k = k;
1590 int operands_k = k - is_void;
1591 int expr_args_k = k - 1;
1592
2a49c16d
AL
1593 if (d->qualifiers[qualifiers_k] & qualifier_lane_index)
1594 args[k] = SIMD_ARG_LANE_INDEX;
9d63f43b
TC
1595 else if (d->qualifiers[qualifiers_k] & qualifier_lane_pair_index)
1596 args[k] = SIMD_ARG_LANE_PAIR_INDEX;
8c197c85
SMW
1597 else if (d->qualifiers[qualifiers_k] & qualifier_lane_quadtup_index)
1598 args[k] = SIMD_ARG_LANE_QUADTUP_INDEX;
4d0a0237
CB
1599 else if (d->qualifiers[qualifiers_k] & qualifier_struct_load_store_lane_index)
1600 args[k] = SIMD_ARG_STRUCT_LOAD_STORE_LANE_INDEX;
2a49c16d 1601 else if (d->qualifiers[qualifiers_k] & qualifier_immediate)
b5828b4b
JG
1602 args[k] = SIMD_ARG_CONSTANT;
1603 else if (d->qualifiers[qualifiers_k] & qualifier_maybe_immediate)
1604 {
1605 rtx arg
1606 = expand_normal (CALL_EXPR_ARG (exp,
1607 (expr_args_k)));
1608 /* Handle constants only if the predicate allows it. */
1609 bool op_const_int_p =
1610 (CONST_INT_P (arg)
1611 && (*insn_data[icode].operand[operands_k].predicate)
1612 (arg, insn_data[icode].operand[operands_k].mode));
1613 args[k] = op_const_int_p ? SIMD_ARG_CONSTANT : SIMD_ARG_COPY_TO_REG;
1614 }
1615 else
1616 args[k] = SIMD_ARG_COPY_TO_REG;
43e9d192 1617
43e9d192 1618 }
b5828b4b
JG
1619 args[k] = SIMD_ARG_STOP;
1620
1621 /* The interface to aarch64_simd_expand_args expects a 0 if
1622 the function is void, and a 1 if it is not. */
1623 return aarch64_simd_expand_args
4d0a0237 1624 (target, icode, !is_void, exp, &args[1], d->mode);
43e9d192 1625}
342be7f7 1626
5d357f26
KT
1627rtx
1628aarch64_crc32_expand_builtin (int fcode, tree exp, rtx target)
1629{
1630 rtx pat;
1631 aarch64_crc_builtin_datum *d
1632 = &aarch64_crc_builtin_data[fcode - (AARCH64_CRC32_BUILTIN_BASE + 1)];
1633 enum insn_code icode = d->icode;
1634 tree arg0 = CALL_EXPR_ARG (exp, 0);
1635 tree arg1 = CALL_EXPR_ARG (exp, 1);
1636 rtx op0 = expand_normal (arg0);
1637 rtx op1 = expand_normal (arg1);
ef4bddc2
RS
1638 machine_mode tmode = insn_data[icode].operand[0].mode;
1639 machine_mode mode0 = insn_data[icode].operand[1].mode;
1640 machine_mode mode1 = insn_data[icode].operand[2].mode;
5d357f26
KT
1641
1642 if (! target
1643 || GET_MODE (target) != tmode
1644 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
1645 target = gen_reg_rtx (tmode);
1646
1647 gcc_assert ((GET_MODE (op0) == mode0 || GET_MODE (op0) == VOIDmode)
1648 && (GET_MODE (op1) == mode1 || GET_MODE (op1) == VOIDmode));
1649
1650 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
1651 op0 = copy_to_mode_reg (mode0, op0);
1652 if (! (*insn_data[icode].operand[2].predicate) (op1, mode1))
1653 op1 = copy_to_mode_reg (mode1, op1);
1654
1655 pat = GEN_FCN (icode) (target, op0, op1);
d5a29419
KT
1656 if (!pat)
1657 return NULL_RTX;
1658
5d357f26
KT
1659 emit_insn (pat);
1660 return target;
1661}
1662
a6fc00da
BH
1663/* Function to expand reciprocal square root builtins. */
1664
1665static rtx
1666aarch64_expand_builtin_rsqrt (int fcode, tree exp, rtx target)
1667{
1668 tree arg0 = CALL_EXPR_ARG (exp, 0);
1669 rtx op0 = expand_normal (arg0);
1670
1671 rtx (*gen) (rtx, rtx);
1672
1673 switch (fcode)
1674 {
1675 case AARCH64_BUILTIN_RSQRT_DF:
ee62a5a6 1676 gen = gen_rsqrtdf2;
a6fc00da
BH
1677 break;
1678 case AARCH64_BUILTIN_RSQRT_SF:
ee62a5a6 1679 gen = gen_rsqrtsf2;
a6fc00da
BH
1680 break;
1681 case AARCH64_BUILTIN_RSQRT_V2DF:
ee62a5a6 1682 gen = gen_rsqrtv2df2;
a6fc00da
BH
1683 break;
1684 case AARCH64_BUILTIN_RSQRT_V2SF:
ee62a5a6 1685 gen = gen_rsqrtv2sf2;
a6fc00da
BH
1686 break;
1687 case AARCH64_BUILTIN_RSQRT_V4SF:
ee62a5a6 1688 gen = gen_rsqrtv4sf2;
a6fc00da
BH
1689 break;
1690 default: gcc_unreachable ();
1691 }
1692
1693 if (!target)
1694 target = gen_reg_rtx (GET_MODE (op0));
1695
1696 emit_insn (gen (target, op0));
1697
1698 return target;
1699}
1700
9d63f43b
TC
1701/* Expand a FCMLA lane expression EXP with code FCODE and
1702 result going to TARGET if that is convenient. */
1703
1704rtx
1705aarch64_expand_fcmla_builtin (tree exp, rtx target, int fcode)
1706{
1707 int bcode = fcode - AARCH64_SIMD_FCMLA_LANEQ_BUILTIN_BASE - 1;
1708 aarch64_fcmla_laneq_builtin_datum* d
1709 = &aarch64_fcmla_lane_builtin_data[bcode];
1710 machine_mode quadmode = GET_MODE_2XWIDER_MODE (d->mode).require ();
1711 rtx op0 = force_reg (d->mode, expand_normal (CALL_EXPR_ARG (exp, 0)));
1712 rtx op1 = force_reg (d->mode, expand_normal (CALL_EXPR_ARG (exp, 1)));
1713 rtx op2 = force_reg (quadmode, expand_normal (CALL_EXPR_ARG (exp, 2)));
1714 tree tmp = CALL_EXPR_ARG (exp, 3);
1715 rtx lane_idx = expand_expr (tmp, NULL_RTX, VOIDmode, EXPAND_INITIALIZER);
1716
1717 /* Validate that the lane index is a constant. */
1718 if (!CONST_INT_P (lane_idx))
1719 {
1720 error ("%Kargument %d must be a constant immediate", exp, 4);
1721 return const0_rtx;
1722 }
1723
1724 /* Validate that the index is within the expected range. */
1725 int nunits = GET_MODE_NUNITS (quadmode).to_constant ();
1726 aarch64_simd_lane_bounds (lane_idx, 0, nunits / 2, exp);
1727
9d63f43b
TC
1728 /* Generate the correct register and mode. */
1729 int lane = INTVAL (lane_idx);
1730
1731 if (lane < nunits / 4)
33b5a38c
TC
1732 op2 = simplify_gen_subreg (d->mode, op2, quadmode,
1733 subreg_lowpart_offset (d->mode, quadmode));
9d63f43b
TC
1734 else
1735 {
1736 /* Select the upper 64 bits, either a V2SF or V4HF, this however
1737 is quite messy, as the operation required even though simple
1738 doesn't have a simple RTL pattern, and seems it's quite hard to
1739 define using a single RTL pattern. The target generic version
1740 gen_highpart_mode generates code that isn't optimal. */
1741 rtx temp1 = gen_reg_rtx (d->mode);
1742 rtx temp2 = gen_reg_rtx (DImode);
33b5a38c
TC
1743 temp1 = simplify_gen_subreg (d->mode, op2, quadmode,
1744 subreg_lowpart_offset (d->mode, quadmode));
9d63f43b 1745 temp1 = simplify_gen_subreg (V2DImode, temp1, d->mode, 0);
33b5a38c
TC
1746 if (BYTES_BIG_ENDIAN)
1747 emit_insn (gen_aarch64_get_lanev2di (temp2, temp1, const0_rtx));
1748 else
1749 emit_insn (gen_aarch64_get_lanev2di (temp2, temp1, const1_rtx));
9d63f43b
TC
1750 op2 = simplify_gen_subreg (d->mode, temp2, GET_MODE (temp2), 0);
1751
1752 /* And recalculate the index. */
1753 lane -= nunits / 4;
1754 }
1755
33b5a38c
TC
1756 /* Keep to GCC-vector-extension lane indices in the RTL, only nunits / 4
1757 (max nunits in range check) are valid. Which means only 0-1, so we
1758 only need to know the order in a V2mode. */
1759 lane_idx = aarch64_endian_lane_rtx (V2DImode, lane);
1760
9d63f43b
TC
1761 if (!target)
1762 target = gen_reg_rtx (d->mode);
1763 else
1764 target = force_reg (d->mode, target);
1765
1766 rtx pat = NULL_RTX;
1767
1768 if (d->lane)
33b5a38c 1769 pat = GEN_FCN (d->icode) (target, op0, op1, op2, lane_idx);
9d63f43b
TC
1770 else
1771 pat = GEN_FCN (d->icode) (target, op0, op1, op2);
1772
1773 if (!pat)
1774 return NULL_RTX;
1775
1776 emit_insn (pat);
1777 return target;
1778}
1779
89626179
SD
1780/* Function to expand an expression EXP which calls one of the Transactional
1781 Memory Extension (TME) builtins FCODE with the result going to TARGET. */
1782static rtx
1783aarch64_expand_builtin_tme (int fcode, tree exp, rtx target)
1784{
1785 switch (fcode)
1786 {
1787 case AARCH64_TME_BUILTIN_TSTART:
1788 target = gen_reg_rtx (DImode);
1789 emit_insn (GEN_FCN (CODE_FOR_tstart) (target));
1790 break;
1791
1792 case AARCH64_TME_BUILTIN_TTEST:
1793 target = gen_reg_rtx (DImode);
1794 emit_insn (GEN_FCN (CODE_FOR_ttest) (target));
1795 break;
1796
1797 case AARCH64_TME_BUILTIN_TCOMMIT:
1798 emit_insn (GEN_FCN (CODE_FOR_tcommit) ());
1799 break;
1800
1801 case AARCH64_TME_BUILTIN_TCANCEL:
1802 {
1803 tree arg0 = CALL_EXPR_ARG (exp, 0);
1804 rtx op0 = expand_normal (arg0);
1805 if (CONST_INT_P (op0) && UINTVAL (op0) <= 65536)
1806 emit_insn (GEN_FCN (CODE_FOR_tcancel) (op0));
1807 else
1808 {
1809 error ("%Kargument must be a 16-bit constant immediate", exp);
1810 return const0_rtx;
1811 }
1812 }
1813 break;
1814
1815 default :
1816 gcc_unreachable ();
1817 }
1818 return target;
1819}
1820
c5dc215d
KT
1821/* Expand a random number builtin EXP with code FCODE, putting the result
1822 int TARGET. If IGNORE is true the return value is ignored. */
1823
1824rtx
1825aarch64_expand_rng_builtin (tree exp, rtx target, int fcode, int ignore)
1826{
1827 rtx pat;
1828 enum insn_code icode;
1829 if (fcode == AARCH64_BUILTIN_RNG_RNDR)
1830 icode = CODE_FOR_aarch64_rndr;
1831 else if (fcode == AARCH64_BUILTIN_RNG_RNDRRS)
1832 icode = CODE_FOR_aarch64_rndrrs;
1833 else
1834 gcc_unreachable ();
1835
1836 rtx rand = gen_reg_rtx (DImode);
1837 pat = GEN_FCN (icode) (rand);
1838 if (!pat)
1839 return NULL_RTX;
1840
1841 tree arg0 = CALL_EXPR_ARG (exp, 0);
1842 rtx res_addr = expand_normal (arg0);
1843 res_addr = convert_memory_address (Pmode, res_addr);
1844 rtx res_mem = gen_rtx_MEM (DImode, res_addr);
1845 emit_insn (pat);
1846 emit_move_insn (res_mem, rand);
1847 /* If the status result is unused don't generate the CSET code. */
1848 if (ignore)
1849 return target;
1850
1851 rtx cc_reg = gen_rtx_REG (CC_Zmode, CC_REGNUM);
1852 rtx cmp_rtx = gen_rtx_fmt_ee (NE, SImode, cc_reg, const0_rtx);
1853 emit_insn (gen_aarch64_cstoresi (target, cmp_rtx, cc_reg));
1854 return target;
1855}
1856
ef01e6bb
DZ
1857/* Expand an expression EXP that calls a MEMTAG built-in FCODE
1858 with result going to TARGET. */
1859static rtx
1860aarch64_expand_builtin_memtag (int fcode, tree exp, rtx target)
1861{
1862 if (TARGET_ILP32)
1863 {
1864 error ("Memory Tagging Extension does not support %<-mabi=ilp32%>");
1865 return const0_rtx;
1866 }
1867
1868 rtx pat = NULL;
1869 enum insn_code icode = aarch64_memtag_builtin_data[fcode -
1870 AARCH64_MEMTAG_BUILTIN_START - 1].icode;
1871
1872 rtx op0 = expand_normal (CALL_EXPR_ARG (exp, 0));
1873 machine_mode mode0 = GET_MODE (op0);
1874 op0 = force_reg (mode0 == VOIDmode ? DImode : mode0, op0);
1875 op0 = convert_to_mode (DImode, op0, true);
1876
1877 switch (fcode)
1878 {
1879 case AARCH64_MEMTAG_BUILTIN_IRG:
1880 case AARCH64_MEMTAG_BUILTIN_GMI:
1881 case AARCH64_MEMTAG_BUILTIN_SUBP:
1882 case AARCH64_MEMTAG_BUILTIN_INC_TAG:
1883 {
1884 if (! target
1885 || GET_MODE (target) != DImode
1886 || ! (*insn_data[icode].operand[0].predicate) (target, DImode))
1887 target = gen_reg_rtx (DImode);
1888
1889 if (fcode == AARCH64_MEMTAG_BUILTIN_INC_TAG)
1890 {
1891 rtx op1 = expand_normal (CALL_EXPR_ARG (exp, 1));
1892
1893 if ((*insn_data[icode].operand[3].predicate) (op1, QImode))
1894 {
1895 pat = GEN_FCN (icode) (target, op0, const0_rtx, op1);
1896 break;
1897 }
1898 error ("%Kargument %d must be a constant immediate "
1899 "in range [0,15]", exp, 2);
1900 return const0_rtx;
1901 }
1902 else
1903 {
1904 rtx op1 = expand_normal (CALL_EXPR_ARG (exp, 1));
1905 machine_mode mode1 = GET_MODE (op1);
1906 op1 = force_reg (mode1 == VOIDmode ? DImode : mode1, op1);
1907 op1 = convert_to_mode (DImode, op1, true);
1908 pat = GEN_FCN (icode) (target, op0, op1);
1909 }
1910 break;
1911 }
1912 case AARCH64_MEMTAG_BUILTIN_GET_TAG:
1913 target = op0;
1914 pat = GEN_FCN (icode) (target, op0, const0_rtx);
1915 break;
1916 case AARCH64_MEMTAG_BUILTIN_SET_TAG:
1917 pat = GEN_FCN (icode) (op0, op0, const0_rtx);
1918 break;
1919 default:
1920 gcc_unreachable();
1921 }
1922
1923 if (!pat)
1924 return NULL_RTX;
1925
1926 emit_insn (pat);
1927 return target;
1928}
1929
0d7e5fa6
AC
1930/* Expand an expression EXP as fpsr or cpsr setter (depending on
1931 UNSPEC) using MODE. */
1932static void
1933aarch64_expand_fpsr_fpcr_setter (int unspec, machine_mode mode, tree exp)
1934{
1935 tree arg = CALL_EXPR_ARG (exp, 0);
1936 rtx op = force_reg (mode, expand_normal (arg));
1937 emit_insn (gen_aarch64_set (unspec, mode, op));
1938}
1939
6d4d616a 1940/* Expand an expression EXP that calls built-in function FCODE,
c5dc215d
KT
1941 with result going to TARGET if that's convenient. IGNORE is true
1942 if the result of the builtin is ignored. */
342be7f7 1943rtx
c5dc215d
KT
1944aarch64_general_expand_builtin (unsigned int fcode, tree exp, rtx target,
1945 int ignore)
342be7f7 1946{
aa87aced 1947 int icode;
0d7e5fa6 1948 rtx op0;
aa87aced
KV
1949 tree arg0;
1950
1951 switch (fcode)
1952 {
1953 case AARCH64_BUILTIN_GET_FPCR:
0d7e5fa6
AC
1954 emit_insn (gen_aarch64_get (UNSPECV_GET_FPCR, SImode, target));
1955 return target;
aa87aced 1956 case AARCH64_BUILTIN_SET_FPCR:
0d7e5fa6
AC
1957 aarch64_expand_fpsr_fpcr_setter (UNSPECV_SET_FPCR, SImode, exp);
1958 return target;
aa87aced 1959 case AARCH64_BUILTIN_GET_FPSR:
0d7e5fa6
AC
1960 emit_insn (gen_aarch64_get (UNSPECV_GET_FPSR, SImode, target));
1961 return target;
aa87aced 1962 case AARCH64_BUILTIN_SET_FPSR:
0d7e5fa6
AC
1963 aarch64_expand_fpsr_fpcr_setter (UNSPECV_SET_FPSR, SImode, exp);
1964 return target;
1965 case AARCH64_BUILTIN_GET_FPCR64:
1966 emit_insn (gen_aarch64_get (UNSPECV_GET_FPCR, DImode, target));
1967 return target;
1968 case AARCH64_BUILTIN_SET_FPCR64:
1969 aarch64_expand_fpsr_fpcr_setter (UNSPECV_SET_FPCR, DImode, exp);
1970 return target;
1971 case AARCH64_BUILTIN_GET_FPSR64:
1972 emit_insn (gen_aarch64_get (UNSPECV_GET_FPSR, DImode, target));
1973 return target;
1974 case AARCH64_BUILTIN_SET_FPSR64:
1975 aarch64_expand_fpsr_fpcr_setter (UNSPECV_SET_FPSR, DImode, exp);
aa87aced 1976 return target;
312492bd
JW
1977 case AARCH64_PAUTH_BUILTIN_AUTIA1716:
1978 case AARCH64_PAUTH_BUILTIN_PACIA1716:
8fc16d72
ST
1979 case AARCH64_PAUTH_BUILTIN_AUTIB1716:
1980 case AARCH64_PAUTH_BUILTIN_PACIB1716:
312492bd
JW
1981 case AARCH64_PAUTH_BUILTIN_XPACLRI:
1982 arg0 = CALL_EXPR_ARG (exp, 0);
1983 op0 = force_reg (Pmode, expand_normal (arg0));
1984
1985 if (!target)
1986 target = gen_reg_rtx (Pmode);
1987 else
1988 target = force_reg (Pmode, target);
1989
1990 emit_move_insn (target, op0);
1991
1992 if (fcode == AARCH64_PAUTH_BUILTIN_XPACLRI)
1993 {
1994 rtx lr = gen_rtx_REG (Pmode, R30_REGNUM);
1995 icode = CODE_FOR_xpaclri;
1996 emit_move_insn (lr, op0);
1997 emit_insn (GEN_FCN (icode) ());
1998 emit_move_insn (target, lr);
1999 }
2000 else
2001 {
2002 tree arg1 = CALL_EXPR_ARG (exp, 1);
2003 rtx op1 = force_reg (Pmode, expand_normal (arg1));
8fc16d72
ST
2004 switch (fcode)
2005 {
2006 case AARCH64_PAUTH_BUILTIN_AUTIA1716:
2007 icode = CODE_FOR_autia1716;
2008 break;
2009 case AARCH64_PAUTH_BUILTIN_AUTIB1716:
2010 icode = CODE_FOR_autib1716;
2011 break;
2012 case AARCH64_PAUTH_BUILTIN_PACIA1716:
2013 icode = CODE_FOR_pacia1716;
2014 break;
2015 case AARCH64_PAUTH_BUILTIN_PACIB1716:
2016 icode = CODE_FOR_pacib1716;
2017 break;
2018 default:
2019 icode = 0;
2020 gcc_unreachable ();
2021 }
312492bd
JW
2022
2023 rtx x16_reg = gen_rtx_REG (Pmode, R16_REGNUM);
2024 rtx x17_reg = gen_rtx_REG (Pmode, R17_REGNUM);
2025 emit_move_insn (x17_reg, op0);
2026 emit_move_insn (x16_reg, op1);
2027 emit_insn (GEN_FCN (icode) ());
2028 emit_move_insn (target, x17_reg);
2029 }
2030
2031 return target;
9d63f43b 2032
e1d5d19e
KT
2033 case AARCH64_JSCVT:
2034 arg0 = CALL_EXPR_ARG (exp, 0);
2035 op0 = force_reg (DFmode, expand_normal (arg0));
2036 if (!target)
2037 target = gen_reg_rtx (SImode);
2038 else
2039 target = force_reg (SImode, target);
2040 emit_insn (GEN_FCN (CODE_FOR_aarch64_fjcvtzs) (target, op0));
2041 return target;
2042
9d63f43b
TC
2043 case AARCH64_SIMD_BUILTIN_FCMLA_LANEQ0_V2SF:
2044 case AARCH64_SIMD_BUILTIN_FCMLA_LANEQ90_V2SF:
2045 case AARCH64_SIMD_BUILTIN_FCMLA_LANEQ180_V2SF:
2046 case AARCH64_SIMD_BUILTIN_FCMLA_LANEQ270_V2SF:
2047 case AARCH64_SIMD_BUILTIN_FCMLA_LANEQ0_V4HF:
2048 case AARCH64_SIMD_BUILTIN_FCMLA_LANEQ90_V4HF:
2049 case AARCH64_SIMD_BUILTIN_FCMLA_LANEQ180_V4HF:
2050 case AARCH64_SIMD_BUILTIN_FCMLA_LANEQ270_V4HF:
2051 return aarch64_expand_fcmla_builtin (exp, target, fcode);
c5dc215d
KT
2052 case AARCH64_BUILTIN_RNG_RNDR:
2053 case AARCH64_BUILTIN_RNG_RNDRRS:
2054 return aarch64_expand_rng_builtin (exp, target, fcode, ignore);
aa87aced 2055 }
342be7f7 2056
5d357f26 2057 if (fcode >= AARCH64_SIMD_BUILTIN_BASE && fcode <= AARCH64_SIMD_BUILTIN_MAX)
342be7f7 2058 return aarch64_simd_expand_builtin (fcode, exp, target);
5d357f26
KT
2059 else if (fcode >= AARCH64_CRC32_BUILTIN_BASE && fcode <= AARCH64_CRC32_BUILTIN_MAX)
2060 return aarch64_crc32_expand_builtin (fcode, exp, target);
342be7f7 2061
a6fc00da
BH
2062 if (fcode == AARCH64_BUILTIN_RSQRT_DF
2063 || fcode == AARCH64_BUILTIN_RSQRT_SF
2064 || fcode == AARCH64_BUILTIN_RSQRT_V2DF
2065 || fcode == AARCH64_BUILTIN_RSQRT_V2SF
2066 || fcode == AARCH64_BUILTIN_RSQRT_V4SF)
2067 return aarch64_expand_builtin_rsqrt (fcode, exp, target);
2068
89626179
SD
2069 if (fcode == AARCH64_TME_BUILTIN_TSTART
2070 || fcode == AARCH64_TME_BUILTIN_TCOMMIT
2071 || fcode == AARCH64_TME_BUILTIN_TTEST
2072 || fcode == AARCH64_TME_BUILTIN_TCANCEL)
2073 return aarch64_expand_builtin_tme (fcode, exp, target);
2074
ef01e6bb
DZ
2075 if (fcode >= AARCH64_MEMTAG_BUILTIN_START
2076 && fcode <= AARCH64_MEMTAG_BUILTIN_END)
2077 return aarch64_expand_builtin_memtag (fcode, exp, target);
2078
d5a29419 2079 gcc_unreachable ();
342be7f7 2080}
42fc9a7f
JG
2081
2082tree
10766209
RS
2083aarch64_builtin_vectorized_function (unsigned int fn, tree type_out,
2084 tree type_in)
42fc9a7f 2085{
ef4bddc2 2086 machine_mode in_mode, out_mode;
42fc9a7f
JG
2087
2088 if (TREE_CODE (type_out) != VECTOR_TYPE
2089 || TREE_CODE (type_in) != VECTOR_TYPE)
2090 return NULL_TREE;
2091
7cee9637
RS
2092 out_mode = TYPE_MODE (type_out);
2093 in_mode = TYPE_MODE (type_in);
42fc9a7f
JG
2094
2095#undef AARCH64_CHECK_BUILTIN_MODE
2096#define AARCH64_CHECK_BUILTIN_MODE(C, N) 1
2097#define AARCH64_FIND_FRINT_VARIANT(N) \
2098 (AARCH64_CHECK_BUILTIN_MODE (2, D) \
e993fea1 2099 ? aarch64_builtin_decls[AARCH64_SIMD_BUILTIN_UNOP_##N##v2df] \
42fc9a7f 2100 : (AARCH64_CHECK_BUILTIN_MODE (4, S) \
e993fea1 2101 ? aarch64_builtin_decls[AARCH64_SIMD_BUILTIN_UNOP_##N##v4sf] \
42fc9a7f 2102 : (AARCH64_CHECK_BUILTIN_MODE (2, S) \
e993fea1 2103 ? aarch64_builtin_decls[AARCH64_SIMD_BUILTIN_UNOP_##N##v2sf] \
42fc9a7f 2104 : NULL_TREE)))
10766209 2105 switch (fn)
42fc9a7f 2106 {
42fc9a7f
JG
2107#undef AARCH64_CHECK_BUILTIN_MODE
2108#define AARCH64_CHECK_BUILTIN_MODE(C, N) \
7cee9637 2109 (out_mode == V##C##N##Fmode && in_mode == V##C##N##Fmode)
10766209
RS
2110 CASE_CFN_FLOOR:
2111 return AARCH64_FIND_FRINT_VARIANT (floor);
2112 CASE_CFN_CEIL:
2113 return AARCH64_FIND_FRINT_VARIANT (ceil);
2114 CASE_CFN_TRUNC:
2115 return AARCH64_FIND_FRINT_VARIANT (btrunc);
2116 CASE_CFN_ROUND:
2117 return AARCH64_FIND_FRINT_VARIANT (round);
2118 CASE_CFN_NEARBYINT:
2119 return AARCH64_FIND_FRINT_VARIANT (nearbyint);
2120 CASE_CFN_SQRT:
2121 return AARCH64_FIND_FRINT_VARIANT (sqrt);
42fc9a7f 2122#undef AARCH64_CHECK_BUILTIN_MODE
b5574232 2123#define AARCH64_CHECK_BUILTIN_MODE(C, N) \
7cee9637 2124 (out_mode == V##C##SImode && in_mode == V##C##N##Imode)
10766209
RS
2125 CASE_CFN_CLZ:
2126 {
2127 if (AARCH64_CHECK_BUILTIN_MODE (4, S))
2128 return aarch64_builtin_decls[AARCH64_SIMD_BUILTIN_UNOP_clzv4si];
2129 return NULL_TREE;
2130 }
2131 CASE_CFN_CTZ:
2132 {
2133 if (AARCH64_CHECK_BUILTIN_MODE (2, S))
2134 return aarch64_builtin_decls[AARCH64_SIMD_BUILTIN_UNOP_ctzv2si];
2135 else if (AARCH64_CHECK_BUILTIN_MODE (4, S))
2136 return aarch64_builtin_decls[AARCH64_SIMD_BUILTIN_UNOP_ctzv4si];
2137 return NULL_TREE;
2138 }
b5574232 2139#undef AARCH64_CHECK_BUILTIN_MODE
42fc9a7f 2140#define AARCH64_CHECK_BUILTIN_MODE(C, N) \
7cee9637 2141 (out_mode == V##C##N##Imode && in_mode == V##C##N##Fmode)
10766209
RS
2142 CASE_CFN_IFLOOR:
2143 CASE_CFN_LFLOOR:
2144 CASE_CFN_LLFLOOR:
2145 {
2146 enum aarch64_builtins builtin;
2147 if (AARCH64_CHECK_BUILTIN_MODE (2, D))
2148 builtin = AARCH64_SIMD_BUILTIN_UNOP_lfloorv2dfv2di;
2149 else if (AARCH64_CHECK_BUILTIN_MODE (4, S))
2150 builtin = AARCH64_SIMD_BUILTIN_UNOP_lfloorv4sfv4si;
2151 else if (AARCH64_CHECK_BUILTIN_MODE (2, S))
2152 builtin = AARCH64_SIMD_BUILTIN_UNOP_lfloorv2sfv2si;
2153 else
2154 return NULL_TREE;
2155
2156 return aarch64_builtin_decls[builtin];
2157 }
2158 CASE_CFN_ICEIL:
2159 CASE_CFN_LCEIL:
2160 CASE_CFN_LLCEIL:
2161 {
2162 enum aarch64_builtins builtin;
2163 if (AARCH64_CHECK_BUILTIN_MODE (2, D))
2164 builtin = AARCH64_SIMD_BUILTIN_UNOP_lceilv2dfv2di;
2165 else if (AARCH64_CHECK_BUILTIN_MODE (4, S))
2166 builtin = AARCH64_SIMD_BUILTIN_UNOP_lceilv4sfv4si;
2167 else if (AARCH64_CHECK_BUILTIN_MODE (2, S))
2168 builtin = AARCH64_SIMD_BUILTIN_UNOP_lceilv2sfv2si;
2169 else
2170 return NULL_TREE;
2171
2172 return aarch64_builtin_decls[builtin];
2173 }
2174 CASE_CFN_IROUND:
2175 CASE_CFN_LROUND:
2176 CASE_CFN_LLROUND:
2177 {
2178 enum aarch64_builtins builtin;
2179 if (AARCH64_CHECK_BUILTIN_MODE (2, D))
2180 builtin = AARCH64_SIMD_BUILTIN_UNOP_lroundv2dfv2di;
2181 else if (AARCH64_CHECK_BUILTIN_MODE (4, S))
2182 builtin = AARCH64_SIMD_BUILTIN_UNOP_lroundv4sfv4si;
2183 else if (AARCH64_CHECK_BUILTIN_MODE (2, S))
2184 builtin = AARCH64_SIMD_BUILTIN_UNOP_lroundv2sfv2si;
2185 else
2186 return NULL_TREE;
2187
2188 return aarch64_builtin_decls[builtin];
2189 }
10766209
RS
2190 default:
2191 return NULL_TREE;
42fc9a7f
JG
2192 }
2193
2194 return NULL_TREE;
2195}
0ac198d3 2196
a6fc00da
BH
2197/* Return builtin for reciprocal square root. */
2198
2199tree
6d4d616a 2200aarch64_general_builtin_rsqrt (unsigned int fn)
a6fc00da 2201{
ee62a5a6
RS
2202 if (fn == AARCH64_SIMD_BUILTIN_UNOP_sqrtv2df)
2203 return aarch64_builtin_decls[AARCH64_BUILTIN_RSQRT_V2DF];
2204 if (fn == AARCH64_SIMD_BUILTIN_UNOP_sqrtv2sf)
2205 return aarch64_builtin_decls[AARCH64_BUILTIN_RSQRT_V2SF];
2206 if (fn == AARCH64_SIMD_BUILTIN_UNOP_sqrtv4sf)
2207 return aarch64_builtin_decls[AARCH64_BUILTIN_RSQRT_V4SF];
a6fc00da
BH
2208 return NULL_TREE;
2209}
2210
0ac198d3 2211#undef VAR1
bf592b2f 2212#define VAR1(T, N, MAP, FLAG, A) \
e993fea1 2213 case AARCH64_SIMD_BUILTIN_##T##_##N##A:
0ac198d3 2214
6d4d616a
RS
2215/* Try to fold a call to the built-in function with subcode FCODE. The
2216 function is passed the N_ARGS arguments in ARGS and it returns a value
2217 of type TYPE. Return the new expression on success and NULL_TREE on
2218 failure. */
9697e620 2219tree
6d4d616a
RS
2220aarch64_general_fold_builtin (unsigned int fcode, tree type,
2221 unsigned int n_args ATTRIBUTE_UNUSED, tree *args)
9697e620 2222{
9697e620
JG
2223 switch (fcode)
2224 {
bf592b2f 2225 BUILTIN_VDQF (UNOP, abs, 2, ALL)
9697e620 2226 return fold_build1 (ABS_EXPR, type, args[0]);
bf592b2f 2227 VAR1 (UNOP, floatv2si, 2, ALL, v2sf)
2228 VAR1 (UNOP, floatv4si, 2, ALL, v4sf)
2229 VAR1 (UNOP, floatv2di, 2, ALL, v2df)
1709ff9b 2230 return fold_build1 (FLOAT_EXPR, type, args[0]);
9697e620
JG
2231 default:
2232 break;
2233 }
2234
2235 return NULL_TREE;
2236}
2237
6d4d616a
RS
2238/* Try to fold STMT, given that it's a call to the built-in function with
2239 subcode FCODE. Return the new statement on success and null on
2240 failure. */
2241gimple *
2242aarch64_general_gimple_fold_builtin (unsigned int fcode, gcall *stmt)
0ac198d3 2243{
355fe088 2244 gimple *new_stmt = NULL;
6d4d616a
RS
2245 unsigned nargs = gimple_call_num_args (stmt);
2246 tree *args = (nargs > 0
2247 ? gimple_call_arg_ptr (stmt, 0)
2248 : &error_mark_node);
2249
2250 /* We use gimple's IFN_REDUC_(PLUS|MIN|MAX)s for float, signed int
2251 and unsigned int; it will distinguish according to the types of
2252 the arguments to the __builtin. */
2253 switch (fcode)
0ac198d3 2254 {
bf592b2f 2255 BUILTIN_VALL (UNOP, reduc_plus_scal_, 10, ALL)
6d4d616a
RS
2256 new_stmt = gimple_build_call_internal (IFN_REDUC_PLUS,
2257 1, args[0]);
2258 gimple_call_set_lhs (new_stmt, gimple_call_lhs (stmt));
2259 break;
bf592b2f 2260 BUILTIN_VDQIF (UNOP, reduc_smax_scal_, 10, ALL)
2261 BUILTIN_VDQ_BHSI (UNOPU, reduc_umax_scal_, 10, ALL)
6d4d616a
RS
2262 new_stmt = gimple_build_call_internal (IFN_REDUC_MAX,
2263 1, args[0]);
2264 gimple_call_set_lhs (new_stmt, gimple_call_lhs (stmt));
2265 break;
bf592b2f 2266 BUILTIN_VDQIF (UNOP, reduc_smin_scal_, 10, ALL)
2267 BUILTIN_VDQ_BHSI (UNOPU, reduc_umin_scal_, 10, ALL)
6d4d616a
RS
2268 new_stmt = gimple_build_call_internal (IFN_REDUC_MIN,
2269 1, args[0]);
2270 gimple_call_set_lhs (new_stmt, gimple_call_lhs (stmt));
2271 break;
bf592b2f 2272 BUILTIN_GPF (BINOP, fmulx, 0, ALL)
0ac198d3 2273 {
6d4d616a
RS
2274 gcc_assert (nargs == 2);
2275 bool a0_cst_p = TREE_CODE (args[0]) == REAL_CST;
2276 bool a1_cst_p = TREE_CODE (args[1]) == REAL_CST;
2277 if (a0_cst_p || a1_cst_p)
0ac198d3 2278 {
6d4d616a 2279 if (a0_cst_p && a1_cst_p)
546e500c 2280 {
6d4d616a
RS
2281 tree t0 = TREE_TYPE (args[0]);
2282 real_value a0 = (TREE_REAL_CST (args[0]));
2283 real_value a1 = (TREE_REAL_CST (args[1]));
2284 if (real_equal (&a1, &dconst0))
2285 std::swap (a0, a1);
2286 /* According to real_equal (), +0 equals -0. */
2287 if (real_equal (&a0, &dconst0) && real_isinf (&a1))
546e500c 2288 {
6d4d616a
RS
2289 real_value res = dconst2;
2290 res.sign = a0.sign ^ a1.sign;
2291 new_stmt = gimple_build_assign (gimple_call_lhs (stmt),
2292 REAL_CST,
2293 build_real (t0, res));
546e500c 2294 }
6d4d616a
RS
2295 else
2296 new_stmt = gimple_build_assign (gimple_call_lhs (stmt),
2297 MULT_EXPR,
2298 args[0], args[1]);
546e500c 2299 }
6d4d616a
RS
2300 else /* a0_cst_p ^ a1_cst_p. */
2301 {
2302 real_value const_part = a0_cst_p
2303 ? TREE_REAL_CST (args[0]) : TREE_REAL_CST (args[1]);
2304 if (!real_equal (&const_part, &dconst0)
2305 && !real_isinf (&const_part))
2306 new_stmt = gimple_build_assign (gimple_call_lhs (stmt),
2307 MULT_EXPR, args[0],
2308 args[1]);
2309 }
2310 }
2311 if (new_stmt)
2312 {
2313 gimple_set_vuse (new_stmt, gimple_vuse (stmt));
2314 gimple_set_vdef (new_stmt, gimple_vdef (stmt));
0ac198d3 2315 }
6d4d616a 2316 break;
0ac198d3 2317 }
6d4d616a
RS
2318 default:
2319 break;
0ac198d3 2320 }
6d4d616a 2321 return new_stmt;
0ac198d3
JG
2322}
2323
aa87aced
KV
2324void
2325aarch64_atomic_assign_expand_fenv (tree *hold, tree *clear, tree *update)
2326{
2327 const unsigned AARCH64_FE_INVALID = 1;
2328 const unsigned AARCH64_FE_DIVBYZERO = 2;
2329 const unsigned AARCH64_FE_OVERFLOW = 4;
2330 const unsigned AARCH64_FE_UNDERFLOW = 8;
2331 const unsigned AARCH64_FE_INEXACT = 16;
2332 const unsigned HOST_WIDE_INT AARCH64_FE_ALL_EXCEPT = (AARCH64_FE_INVALID
2333 | AARCH64_FE_DIVBYZERO
2334 | AARCH64_FE_OVERFLOW
2335 | AARCH64_FE_UNDERFLOW
2336 | AARCH64_FE_INEXACT);
2337 const unsigned HOST_WIDE_INT AARCH64_FE_EXCEPT_SHIFT = 8;
2338 tree fenv_cr, fenv_sr, get_fpcr, set_fpcr, mask_cr, mask_sr;
2339 tree ld_fenv_cr, ld_fenv_sr, masked_fenv_cr, masked_fenv_sr, hold_fnclex_cr;
2340 tree hold_fnclex_sr, new_fenv_var, reload_fenv, restore_fnenv, get_fpsr, set_fpsr;
2341 tree update_call, atomic_feraiseexcept, hold_fnclex, masked_fenv, ld_fenv;
2342
2343 /* Generate the equivalence of :
2344 unsigned int fenv_cr;
2345 fenv_cr = __builtin_aarch64_get_fpcr ();
2346
2347 unsigned int fenv_sr;
2348 fenv_sr = __builtin_aarch64_get_fpsr ();
2349
2350 Now set all exceptions to non-stop
2351 unsigned int mask_cr
2352 = ~(AARCH64_FE_ALL_EXCEPT << AARCH64_FE_EXCEPT_SHIFT);
2353 unsigned int masked_cr;
2354 masked_cr = fenv_cr & mask_cr;
2355
2356 And clear all exception flags
2357 unsigned int maske_sr = ~AARCH64_FE_ALL_EXCEPT;
2358 unsigned int masked_cr;
2359 masked_sr = fenv_sr & mask_sr;
2360
2361 __builtin_aarch64_set_cr (masked_cr);
2362 __builtin_aarch64_set_sr (masked_sr); */
2363
09ba9ef7
RR
2364 fenv_cr = create_tmp_var_raw (unsigned_type_node);
2365 fenv_sr = create_tmp_var_raw (unsigned_type_node);
aa87aced
KV
2366
2367 get_fpcr = aarch64_builtin_decls[AARCH64_BUILTIN_GET_FPCR];
2368 set_fpcr = aarch64_builtin_decls[AARCH64_BUILTIN_SET_FPCR];
2369 get_fpsr = aarch64_builtin_decls[AARCH64_BUILTIN_GET_FPSR];
2370 set_fpsr = aarch64_builtin_decls[AARCH64_BUILTIN_SET_FPSR];
2371
2372 mask_cr = build_int_cst (unsigned_type_node,
2373 ~(AARCH64_FE_ALL_EXCEPT << AARCH64_FE_EXCEPT_SHIFT));
2374 mask_sr = build_int_cst (unsigned_type_node,
2375 ~(AARCH64_FE_ALL_EXCEPT));
2376
d81bc2af
HZ
2377 ld_fenv_cr = build4 (TARGET_EXPR, unsigned_type_node,
2378 fenv_cr, build_call_expr (get_fpcr, 0),
2379 NULL_TREE, NULL_TREE);
2380 ld_fenv_sr = build4 (TARGET_EXPR, unsigned_type_node,
2381 fenv_sr, build_call_expr (get_fpsr, 0),
2382 NULL_TREE, NULL_TREE);
aa87aced
KV
2383
2384 masked_fenv_cr = build2 (BIT_AND_EXPR, unsigned_type_node, fenv_cr, mask_cr);
2385 masked_fenv_sr = build2 (BIT_AND_EXPR, unsigned_type_node, fenv_sr, mask_sr);
2386
2387 hold_fnclex_cr = build_call_expr (set_fpcr, 1, masked_fenv_cr);
2388 hold_fnclex_sr = build_call_expr (set_fpsr, 1, masked_fenv_sr);
2389
2390 hold_fnclex = build2 (COMPOUND_EXPR, void_type_node, hold_fnclex_cr,
2391 hold_fnclex_sr);
2392 masked_fenv = build2 (COMPOUND_EXPR, void_type_node, masked_fenv_cr,
2393 masked_fenv_sr);
2394 ld_fenv = build2 (COMPOUND_EXPR, void_type_node, ld_fenv_cr, ld_fenv_sr);
2395
2396 *hold = build2 (COMPOUND_EXPR, void_type_node,
2397 build2 (COMPOUND_EXPR, void_type_node, masked_fenv, ld_fenv),
2398 hold_fnclex);
2399
2400 /* Store the value of masked_fenv to clear the exceptions:
2401 __builtin_aarch64_set_fpsr (masked_fenv_sr); */
2402
2403 *clear = build_call_expr (set_fpsr, 1, masked_fenv_sr);
2404
2405 /* Generate the equivalent of :
2406 unsigned int new_fenv_var;
2407 new_fenv_var = __builtin_aarch64_get_fpsr ();
2408
2409 __builtin_aarch64_set_fpsr (fenv_sr);
2410
2411 __atomic_feraiseexcept (new_fenv_var); */
2412
09ba9ef7 2413 new_fenv_var = create_tmp_var_raw (unsigned_type_node);
d81bc2af
HZ
2414 reload_fenv = build4 (TARGET_EXPR, unsigned_type_node,
2415 new_fenv_var, build_call_expr (get_fpsr, 0),
2416 NULL_TREE, NULL_TREE);
aa87aced
KV
2417 restore_fnenv = build_call_expr (set_fpsr, 1, fenv_sr);
2418 atomic_feraiseexcept = builtin_decl_implicit (BUILT_IN_ATOMIC_FERAISEEXCEPT);
2419 update_call = build_call_expr (atomic_feraiseexcept, 1,
2420 fold_convert (integer_type_node, new_fenv_var));
2421 *update = build2 (COMPOUND_EXPR, void_type_node,
2422 build2 (COMPOUND_EXPR, void_type_node,
2423 reload_fenv, restore_fnenv), update_call);
2424}
2425
ef01e6bb
DZ
2426/* Resolve overloaded MEMTAG build-in functions. */
2427#define AARCH64_BUILTIN_SUBCODE(F) \
2428 (DECL_MD_FUNCTION_CODE (F) >> AARCH64_BUILTIN_SHIFT)
2429
2430static tree
2431aarch64_resolve_overloaded_memtag (location_t loc,
2432 tree fndecl, void *pass_params)
2433{
2434 vec<tree, va_gc> *params = static_cast<vec<tree, va_gc> *> (pass_params);
2435 unsigned param_num = params ? params->length() : 0;
2436 unsigned int fcode = AARCH64_BUILTIN_SUBCODE (fndecl);
2437 tree inittype = aarch64_memtag_builtin_data[
2438 fcode - AARCH64_MEMTAG_BUILTIN_START - 1].ftype;
2439 unsigned arg_num = list_length (TYPE_ARG_TYPES (inittype)) - 1;
2440
2441 if (param_num != arg_num)
2442 {
2443 TREE_TYPE (fndecl) = inittype;
2444 return NULL_TREE;
2445 }
2446 tree retype = NULL;
2447
2448 if (fcode == AARCH64_MEMTAG_BUILTIN_SUBP)
2449 {
2450 tree t0 = TREE_TYPE ((*params)[0]);
2451 tree t1 = TREE_TYPE ((*params)[1]);
2452
2453 if (t0 == error_mark_node || TREE_CODE (t0) != POINTER_TYPE)
2454 t0 = ptr_type_node;
2455 if (t1 == error_mark_node || TREE_CODE (t1) != POINTER_TYPE)
2456 t1 = ptr_type_node;
2457
2458 if (TYPE_MODE (t0) != DImode)
2459 warning_at (loc, 1, "expected 64-bit address but argument 1 is %d-bit",
2460 (int)tree_to_shwi (DECL_SIZE ((*params)[0])));
2461
2462 if (TYPE_MODE (t1) != DImode)
2463 warning_at (loc, 1, "expected 64-bit address but argument 2 is %d-bit",
2464 (int)tree_to_shwi (DECL_SIZE ((*params)[1])));
2465
2466 retype = build_function_type_list (ptrdiff_type_node, t0, t1, NULL);
2467 }
2468 else
2469 {
2470 tree t0 = TREE_TYPE ((*params)[0]);
2471
2472 if (t0 == error_mark_node || TREE_CODE (t0) != POINTER_TYPE)
2473 {
2474 TREE_TYPE (fndecl) = inittype;
2475 return NULL_TREE;
2476 }
2477
2478 if (TYPE_MODE (t0) != DImode)
2479 warning_at (loc, 1, "expected 64-bit address but argument 1 is %d-bit",
2480 (int)tree_to_shwi (DECL_SIZE ((*params)[0])));
2481
2482 switch (fcode)
2483 {
2484 case AARCH64_MEMTAG_BUILTIN_IRG:
2485 retype = build_function_type_list (t0, t0, uint64_type_node, NULL);
2486 break;
2487 case AARCH64_MEMTAG_BUILTIN_GMI:
2488 retype = build_function_type_list (uint64_type_node, t0,
2489 uint64_type_node, NULL);
2490 break;
2491 case AARCH64_MEMTAG_BUILTIN_INC_TAG:
2492 retype = build_function_type_list (t0, t0, unsigned_type_node, NULL);
2493 break;
2494 case AARCH64_MEMTAG_BUILTIN_SET_TAG:
2495 retype = build_function_type_list (void_type_node, t0, NULL);
2496 break;
2497 case AARCH64_MEMTAG_BUILTIN_GET_TAG:
2498 retype = build_function_type_list (t0, t0, NULL);
2499 break;
2500 default:
2501 return NULL_TREE;
2502 }
2503 }
2504
2505 if (!retype || retype == error_mark_node)
2506 TREE_TYPE (fndecl) = inittype;
2507 else
2508 TREE_TYPE (fndecl) = retype;
2509
2510 return NULL_TREE;
2511}
2512
2513/* Called at aarch64_resolve_overloaded_builtin in aarch64-c.c. */
2514tree
2515aarch64_resolve_overloaded_builtin_general (location_t loc, tree function,
2516 void *pass_params)
2517{
2518 unsigned int fcode = AARCH64_BUILTIN_SUBCODE (function);
2519
2520 if (fcode >= AARCH64_MEMTAG_BUILTIN_START
2521 && fcode <= AARCH64_MEMTAG_BUILTIN_END)
2522 return aarch64_resolve_overloaded_memtag(loc, function, pass_params);
2523
2524 return NULL_TREE;
2525}
aa87aced 2526
42fc9a7f
JG
2527#undef AARCH64_CHECK_BUILTIN_MODE
2528#undef AARCH64_FIND_FRINT_VARIANT
0ddec79f
JG
2529#undef CF0
2530#undef CF1
2531#undef CF2
2532#undef CF3
2533#undef CF4
2534#undef CF10
2535#undef VAR1
2536#undef VAR2
2537#undef VAR3
2538#undef VAR4
2539#undef VAR5
2540#undef VAR6
2541#undef VAR7
2542#undef VAR8
2543#undef VAR9
2544#undef VAR10
2545#undef VAR11
2546
3c03d39d 2547#include "gt-aarch64-builtins.h"