]> git.ipfire.org Git - thirdparty/gcc.git/blame - gcc/config/aarch64/aarch64-builtins.c
Update copyright years in gcc/
[thirdparty/gcc.git] / gcc / config / aarch64 / aarch64-builtins.c
CommitLineData
43e9d192 1/* Builtins' description for AArch64 SIMD architecture.
23a5b65a 2 Copyright (C) 2011-2014 Free Software Foundation, Inc.
43e9d192
IB
3 Contributed by ARM Ltd.
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify it
8 under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
10 any later version.
11
12 GCC is distributed in the hope that it will be useful, but
13 WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
15 General Public License for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
20
21#include "config.h"
22#include "system.h"
23#include "coretypes.h"
24#include "tm.h"
25#include "rtl.h"
26#include "tree.h"
d8a2d370
DN
27#include "stor-layout.h"
28#include "stringpool.h"
29#include "calls.h"
43e9d192
IB
30#include "expr.h"
31#include "tm_p.h"
32#include "recog.h"
33#include "langhooks.h"
34#include "diagnostic-core.h"
35#include "optabs.h"
2fb9a547
AM
36#include "pointer-set.h"
37#include "hash-table.h"
38#include "vec.h"
39#include "ggc.h"
40#include "basic-block.h"
41#include "tree-ssa-alias.h"
42#include "internal-fn.h"
43#include "gimple-fold.h"
44#include "tree-eh.h"
45#include "gimple-expr.h"
46#include "is-a.h"
0ac198d3 47#include "gimple.h"
5be5c238 48#include "gimple-iterator.h"
43e9d192 49
342be7f7 50enum aarch64_simd_builtin_type_mode
43e9d192 51{
342be7f7
JG
52 T_V8QI,
53 T_V4HI,
54 T_V2SI,
55 T_V2SF,
56 T_DI,
57 T_DF,
58 T_V16QI,
59 T_V8HI,
60 T_V4SI,
61 T_V4SF,
62 T_V2DI,
63 T_V2DF,
64 T_TI,
65 T_EI,
66 T_OI,
67 T_XI,
68 T_SI,
0050faf8 69 T_SF,
342be7f7
JG
70 T_HI,
71 T_QI,
72 T_MAX
43e9d192
IB
73};
74
75#define v8qi_UP T_V8QI
76#define v4hi_UP T_V4HI
77#define v2si_UP T_V2SI
78#define v2sf_UP T_V2SF
79#define di_UP T_DI
80#define df_UP T_DF
81#define v16qi_UP T_V16QI
82#define v8hi_UP T_V8HI
83#define v4si_UP T_V4SI
84#define v4sf_UP T_V4SF
85#define v2di_UP T_V2DI
86#define v2df_UP T_V2DF
87#define ti_UP T_TI
88#define ei_UP T_EI
89#define oi_UP T_OI
90#define xi_UP T_XI
91#define si_UP T_SI
0050faf8 92#define sf_UP T_SF
43e9d192
IB
93#define hi_UP T_HI
94#define qi_UP T_QI
95
96#define UP(X) X##_UP
97
b5828b4b
JG
98#define SIMD_MAX_BUILTIN_ARGS 5
99
100enum aarch64_type_qualifiers
43e9d192 101{
b5828b4b
JG
102 /* T foo. */
103 qualifier_none = 0x0,
104 /* unsigned T foo. */
105 qualifier_unsigned = 0x1, /* 1 << 0 */
106 /* const T foo. */
107 qualifier_const = 0x2, /* 1 << 1 */
108 /* T *foo. */
109 qualifier_pointer = 0x4, /* 1 << 2 */
110 /* const T *foo. */
111 qualifier_const_pointer = 0x6, /* qualifier_const | qualifier_pointer */
112 /* Used when expanding arguments if an operand could
113 be an immediate. */
114 qualifier_immediate = 0x8, /* 1 << 3 */
115 qualifier_maybe_immediate = 0x10, /* 1 << 4 */
116 /* void foo (...). */
117 qualifier_void = 0x20, /* 1 << 5 */
118 /* Some patterns may have internal operands, this qualifier is an
119 instruction to the initialisation code to skip this operand. */
120 qualifier_internal = 0x40, /* 1 << 6 */
121 /* Some builtins should use the T_*mode* encoded in a simd_builtin_datum
122 rather than using the type of the operand. */
123 qualifier_map_mode = 0x80, /* 1 << 7 */
124 /* qualifier_pointer | qualifier_map_mode */
125 qualifier_pointer_map_mode = 0x84,
126 /* qualifier_const_pointer | qualifier_map_mode */
6db1ec94
JG
127 qualifier_const_pointer_map_mode = 0x86,
128 /* Polynomial types. */
129 qualifier_poly = 0x100
b5828b4b 130};
43e9d192
IB
131
132typedef struct
133{
134 const char *name;
342be7f7
JG
135 enum aarch64_simd_builtin_type_mode mode;
136 const enum insn_code code;
137 unsigned int fcode;
b5828b4b 138 enum aarch64_type_qualifiers *qualifiers;
43e9d192
IB
139} aarch64_simd_builtin_datum;
140
b5828b4b
JG
141static enum aarch64_type_qualifiers
142aarch64_types_unop_qualifiers[SIMD_MAX_BUILTIN_ARGS]
143 = { qualifier_none, qualifier_none };
144#define TYPES_UNOP (aarch64_types_unop_qualifiers)
5a7a4e80
TB
145static enum aarch64_type_qualifiers
146aarch64_types_unopu_qualifiers[SIMD_MAX_BUILTIN_ARGS]
147 = { qualifier_unsigned, qualifier_unsigned };
148#define TYPES_UNOPU (aarch64_types_unopu_qualifiers)
b5828b4b
JG
149#define TYPES_CREATE (aarch64_types_unop_qualifiers)
150#define TYPES_REINTERP (aarch64_types_unop_qualifiers)
151static enum aarch64_type_qualifiers
152aarch64_types_binop_qualifiers[SIMD_MAX_BUILTIN_ARGS]
153 = { qualifier_none, qualifier_none, qualifier_maybe_immediate };
154#define TYPES_BINOP (aarch64_types_binop_qualifiers)
155static enum aarch64_type_qualifiers
5a7a4e80
TB
156aarch64_types_binopu_qualifiers[SIMD_MAX_BUILTIN_ARGS]
157 = { qualifier_unsigned, qualifier_unsigned, qualifier_unsigned };
158#define TYPES_BINOPU (aarch64_types_binopu_qualifiers)
7baa225d
TB
159static enum aarch64_type_qualifiers
160aarch64_types_binopp_qualifiers[SIMD_MAX_BUILTIN_ARGS]
161 = { qualifier_poly, qualifier_poly, qualifier_poly };
162#define TYPES_BINOPP (aarch64_types_binopp_qualifiers)
163
5a7a4e80 164static enum aarch64_type_qualifiers
b5828b4b
JG
165aarch64_types_ternop_qualifiers[SIMD_MAX_BUILTIN_ARGS]
166 = { qualifier_none, qualifier_none, qualifier_none, qualifier_none };
167#define TYPES_TERNOP (aarch64_types_ternop_qualifiers)
30442682
TB
168static enum aarch64_type_qualifiers
169aarch64_types_ternopu_qualifiers[SIMD_MAX_BUILTIN_ARGS]
170 = { qualifier_unsigned, qualifier_unsigned,
171 qualifier_unsigned, qualifier_unsigned };
172#define TYPES_TERNOPU (aarch64_types_ternopu_qualifiers)
173
b5828b4b
JG
174static enum aarch64_type_qualifiers
175aarch64_types_quadop_qualifiers[SIMD_MAX_BUILTIN_ARGS]
176 = { qualifier_none, qualifier_none, qualifier_none,
177 qualifier_none, qualifier_none };
178#define TYPES_QUADOP (aarch64_types_quadop_qualifiers)
179
180static enum aarch64_type_qualifiers
181aarch64_types_getlane_qualifiers[SIMD_MAX_BUILTIN_ARGS]
182 = { qualifier_none, qualifier_none, qualifier_immediate };
183#define TYPES_GETLANE (aarch64_types_getlane_qualifiers)
184#define TYPES_SHIFTIMM (aarch64_types_getlane_qualifiers)
185static enum aarch64_type_qualifiers
186aarch64_types_setlane_qualifiers[SIMD_MAX_BUILTIN_ARGS]
187 = { qualifier_none, qualifier_none, qualifier_none, qualifier_immediate };
188#define TYPES_SETLANE (aarch64_types_setlane_qualifiers)
189#define TYPES_SHIFTINSERT (aarch64_types_setlane_qualifiers)
190#define TYPES_SHIFTACC (aarch64_types_setlane_qualifiers)
191
192static enum aarch64_type_qualifiers
193aarch64_types_combine_qualifiers[SIMD_MAX_BUILTIN_ARGS]
194 = { qualifier_none, qualifier_none, qualifier_none };
195#define TYPES_COMBINE (aarch64_types_combine_qualifiers)
196
197static enum aarch64_type_qualifiers
198aarch64_types_load1_qualifiers[SIMD_MAX_BUILTIN_ARGS]
199 = { qualifier_none, qualifier_const_pointer_map_mode };
200#define TYPES_LOAD1 (aarch64_types_load1_qualifiers)
201#define TYPES_LOADSTRUCT (aarch64_types_load1_qualifiers)
202
46e778c4
JG
203static enum aarch64_type_qualifiers
204aarch64_types_bsl_p_qualifiers[SIMD_MAX_BUILTIN_ARGS]
205 = { qualifier_poly, qualifier_unsigned,
206 qualifier_poly, qualifier_poly };
207#define TYPES_BSL_P (aarch64_types_bsl_p_qualifiers)
208static enum aarch64_type_qualifiers
209aarch64_types_bsl_s_qualifiers[SIMD_MAX_BUILTIN_ARGS]
210 = { qualifier_none, qualifier_unsigned,
211 qualifier_none, qualifier_none };
212#define TYPES_BSL_S (aarch64_types_bsl_s_qualifiers)
213static enum aarch64_type_qualifiers
214aarch64_types_bsl_u_qualifiers[SIMD_MAX_BUILTIN_ARGS]
215 = { qualifier_unsigned, qualifier_unsigned,
216 qualifier_unsigned, qualifier_unsigned };
217#define TYPES_BSL_U (aarch64_types_bsl_u_qualifiers)
218
b5828b4b
JG
219/* The first argument (return type) of a store should be void type,
220 which we represent with qualifier_void. Their first operand will be
221 a DImode pointer to the location to store to, so we must use
222 qualifier_map_mode | qualifier_pointer to build a pointer to the
223 element type of the vector. */
224static enum aarch64_type_qualifiers
225aarch64_types_store1_qualifiers[SIMD_MAX_BUILTIN_ARGS]
226 = { qualifier_void, qualifier_pointer_map_mode, qualifier_none };
227#define TYPES_STORE1 (aarch64_types_store1_qualifiers)
228#define TYPES_STORESTRUCT (aarch64_types_store1_qualifiers)
229
0ddec79f
JG
230#define CF0(N, X) CODE_FOR_aarch64_##N##X
231#define CF1(N, X) CODE_FOR_##N##X##1
232#define CF2(N, X) CODE_FOR_##N##X##2
233#define CF3(N, X) CODE_FOR_##N##X##3
234#define CF4(N, X) CODE_FOR_##N##X##4
235#define CF10(N, X) CODE_FOR_##N##X
236
237#define VAR1(T, N, MAP, A) \
b5828b4b 238 {#N, UP (A), CF##MAP (N, A), 0, TYPES_##T},
0ddec79f
JG
239#define VAR2(T, N, MAP, A, B) \
240 VAR1 (T, N, MAP, A) \
241 VAR1 (T, N, MAP, B)
242#define VAR3(T, N, MAP, A, B, C) \
243 VAR2 (T, N, MAP, A, B) \
244 VAR1 (T, N, MAP, C)
245#define VAR4(T, N, MAP, A, B, C, D) \
246 VAR3 (T, N, MAP, A, B, C) \
247 VAR1 (T, N, MAP, D)
248#define VAR5(T, N, MAP, A, B, C, D, E) \
249 VAR4 (T, N, MAP, A, B, C, D) \
250 VAR1 (T, N, MAP, E)
251#define VAR6(T, N, MAP, A, B, C, D, E, F) \
252 VAR5 (T, N, MAP, A, B, C, D, E) \
253 VAR1 (T, N, MAP, F)
254#define VAR7(T, N, MAP, A, B, C, D, E, F, G) \
255 VAR6 (T, N, MAP, A, B, C, D, E, F) \
256 VAR1 (T, N, MAP, G)
257#define VAR8(T, N, MAP, A, B, C, D, E, F, G, H) \
258 VAR7 (T, N, MAP, A, B, C, D, E, F, G) \
259 VAR1 (T, N, MAP, H)
260#define VAR9(T, N, MAP, A, B, C, D, E, F, G, H, I) \
261 VAR8 (T, N, MAP, A, B, C, D, E, F, G, H) \
262 VAR1 (T, N, MAP, I)
263#define VAR10(T, N, MAP, A, B, C, D, E, F, G, H, I, J) \
264 VAR9 (T, N, MAP, A, B, C, D, E, F, G, H, I) \
265 VAR1 (T, N, MAP, J)
266#define VAR11(T, N, MAP, A, B, C, D, E, F, G, H, I, J, K) \
267 VAR10 (T, N, MAP, A, B, C, D, E, F, G, H, I, J) \
268 VAR1 (T, N, MAP, K)
269#define VAR12(T, N, MAP, A, B, C, D, E, F, G, H, I, J, K, L) \
270 VAR11 (T, N, MAP, A, B, C, D, E, F, G, H, I, J, K) \
271 VAR1 (T, N, MAP, L)
342be7f7
JG
272
273/* BUILTIN_<ITERATOR> macros should expand to cover the same range of
274 modes as is given for each define_mode_iterator in
275 config/aarch64/iterators.md. */
276
0ddec79f
JG
277#define BUILTIN_DX(T, N, MAP) \
278 VAR2 (T, N, MAP, di, df)
279#define BUILTIN_GPF(T, N, MAP) \
280 VAR2 (T, N, MAP, sf, df)
281#define BUILTIN_SDQ_I(T, N, MAP) \
282 VAR4 (T, N, MAP, qi, hi, si, di)
283#define BUILTIN_SD_HSI(T, N, MAP) \
284 VAR2 (T, N, MAP, hi, si)
285#define BUILTIN_V2F(T, N, MAP) \
286 VAR2 (T, N, MAP, v2sf, v2df)
287#define BUILTIN_VALL(T, N, MAP) \
288 VAR10 (T, N, MAP, v8qi, v16qi, v4hi, v8hi, v2si, \
289 v4si, v2di, v2sf, v4sf, v2df)
bb60efd9
JG
290#define BUILTIN_VALLDI(T, N, MAP) \
291 VAR11 (T, N, MAP, v8qi, v16qi, v4hi, v8hi, v2si, \
292 v4si, v2di, v2sf, v4sf, v2df, di)
46e778c4
JG
293#define BUILTIN_VALLDIF(T, N, MAP) \
294 VAR12 (T, N, MAP, v8qi, v16qi, v4hi, v8hi, v2si, \
295 v4si, v2di, v2sf, v4sf, v2df, di, df)
0ddec79f
JG
296#define BUILTIN_VB(T, N, MAP) \
297 VAR2 (T, N, MAP, v8qi, v16qi)
298#define BUILTIN_VD(T, N, MAP) \
299 VAR4 (T, N, MAP, v8qi, v4hi, v2si, v2sf)
300#define BUILTIN_VDC(T, N, MAP) \
301 VAR6 (T, N, MAP, v8qi, v4hi, v2si, v2sf, di, df)
302#define BUILTIN_VDIC(T, N, MAP) \
303 VAR3 (T, N, MAP, v8qi, v4hi, v2si)
304#define BUILTIN_VDN(T, N, MAP) \
305 VAR3 (T, N, MAP, v4hi, v2si, di)
306#define BUILTIN_VDQ(T, N, MAP) \
307 VAR7 (T, N, MAP, v8qi, v16qi, v4hi, v8hi, v2si, v4si, v2di)
308#define BUILTIN_VDQF(T, N, MAP) \
309 VAR3 (T, N, MAP, v2sf, v4sf, v2df)
310#define BUILTIN_VDQH(T, N, MAP) \
311 VAR2 (T, N, MAP, v4hi, v8hi)
312#define BUILTIN_VDQHS(T, N, MAP) \
313 VAR4 (T, N, MAP, v4hi, v8hi, v2si, v4si)
314#define BUILTIN_VDQIF(T, N, MAP) \
315 VAR9 (T, N, MAP, v8qi, v16qi, v4hi, v8hi, v2si, v4si, v2sf, v4sf, v2df)
316#define BUILTIN_VDQM(T, N, MAP) \
317 VAR6 (T, N, MAP, v8qi, v16qi, v4hi, v8hi, v2si, v4si)
318#define BUILTIN_VDQV(T, N, MAP) \
319 VAR5 (T, N, MAP, v8qi, v16qi, v4hi, v8hi, v4si)
46e778c4
JG
320#define BUILTIN_VDQQH(T, N, MAP) \
321 VAR4 (T, N, MAP, v8qi, v16qi, v4hi, v8hi)
0ddec79f
JG
322#define BUILTIN_VDQ_BHSI(T, N, MAP) \
323 VAR6 (T, N, MAP, v8qi, v16qi, v4hi, v8hi, v2si, v4si)
324#define BUILTIN_VDQ_I(T, N, MAP) \
325 VAR7 (T, N, MAP, v8qi, v16qi, v4hi, v8hi, v2si, v4si, v2di)
326#define BUILTIN_VDW(T, N, MAP) \
327 VAR3 (T, N, MAP, v8qi, v4hi, v2si)
328#define BUILTIN_VD_BHSI(T, N, MAP) \
329 VAR3 (T, N, MAP, v8qi, v4hi, v2si)
330#define BUILTIN_VD_HSI(T, N, MAP) \
331 VAR2 (T, N, MAP, v4hi, v2si)
332#define BUILTIN_VD_RE(T, N, MAP) \
333 VAR6 (T, N, MAP, v8qi, v4hi, v2si, v2sf, di, df)
334#define BUILTIN_VQ(T, N, MAP) \
335 VAR6 (T, N, MAP, v16qi, v8hi, v4si, v2di, v4sf, v2df)
336#define BUILTIN_VQN(T, N, MAP) \
337 VAR3 (T, N, MAP, v8hi, v4si, v2di)
338#define BUILTIN_VQW(T, N, MAP) \
339 VAR3 (T, N, MAP, v16qi, v8hi, v4si)
340#define BUILTIN_VQ_HSI(T, N, MAP) \
341 VAR2 (T, N, MAP, v8hi, v4si)
342#define BUILTIN_VQ_S(T, N, MAP) \
343 VAR6 (T, N, MAP, v8qi, v16qi, v4hi, v8hi, v2si, v4si)
344#define BUILTIN_VSDQ_HSI(T, N, MAP) \
345 VAR6 (T, N, MAP, v4hi, v8hi, v2si, v4si, hi, si)
346#define BUILTIN_VSDQ_I(T, N, MAP) \
347 VAR11 (T, N, MAP, v8qi, v16qi, v4hi, v8hi, v2si, v4si, v2di, qi, hi, si, di)
348#define BUILTIN_VSDQ_I_BHSI(T, N, MAP) \
349 VAR10 (T, N, MAP, v8qi, v16qi, v4hi, v8hi, v2si, v4si, v2di, qi, hi, si)
350#define BUILTIN_VSDQ_I_DI(T, N, MAP) \
351 VAR8 (T, N, MAP, v8qi, v16qi, v4hi, v8hi, v2si, v4si, v2di, di)
352#define BUILTIN_VSD_HSI(T, N, MAP) \
353 VAR4 (T, N, MAP, v4hi, v2si, hi, si)
354#define BUILTIN_VSQN_HSDI(T, N, MAP) \
355 VAR6 (T, N, MAP, v8hi, v4si, v2di, hi, si, di)
356#define BUILTIN_VSTRUCT(T, N, MAP) \
357 VAR3 (T, N, MAP, oi, ci, xi)
43e9d192
IB
358
359static aarch64_simd_builtin_datum aarch64_simd_builtin_data[] = {
342be7f7
JG
360#include "aarch64-simd-builtins.def"
361};
362
363#undef VAR1
0ddec79f 364#define VAR1(T, N, MAP, A) \
e993fea1 365 AARCH64_SIMD_BUILTIN_##T##_##N##A,
342be7f7
JG
366
367enum aarch64_builtins
368{
369 AARCH64_BUILTIN_MIN,
370 AARCH64_SIMD_BUILTIN_BASE,
371#include "aarch64-simd-builtins.def"
372 AARCH64_SIMD_BUILTIN_MAX = AARCH64_SIMD_BUILTIN_BASE
373 + ARRAY_SIZE (aarch64_simd_builtin_data),
374 AARCH64_BUILTIN_MAX
43e9d192
IB
375};
376
119103ca
JG
377static GTY(()) tree aarch64_builtin_decls[AARCH64_BUILTIN_MAX];
378
43e9d192
IB
379#define NUM_DREG_TYPES 6
380#define NUM_QREG_TYPES 6
381
b5828b4b
JG
382/* Return a tree for a signed or unsigned argument of either
383 the mode specified by MODE, or the inner mode of MODE. */
384tree
6db1ec94
JG
385aarch64_build_scalar_type (enum machine_mode mode,
386 bool unsigned_p,
387 bool poly_p)
b5828b4b
JG
388{
389#undef INT_TYPES
390#define INT_TYPES \
391 AARCH64_TYPE_BUILDER (QI) \
392 AARCH64_TYPE_BUILDER (HI) \
393 AARCH64_TYPE_BUILDER (SI) \
394 AARCH64_TYPE_BUILDER (DI) \
395 AARCH64_TYPE_BUILDER (EI) \
396 AARCH64_TYPE_BUILDER (OI) \
397 AARCH64_TYPE_BUILDER (CI) \
398 AARCH64_TYPE_BUILDER (XI) \
399 AARCH64_TYPE_BUILDER (TI) \
400
401/* Statically declare all the possible types we might need. */
402#undef AARCH64_TYPE_BUILDER
403#define AARCH64_TYPE_BUILDER(X) \
6db1ec94 404 static tree X##_aarch64_type_node_p = NULL; \
b5828b4b
JG
405 static tree X##_aarch64_type_node_s = NULL; \
406 static tree X##_aarch64_type_node_u = NULL;
407
408 INT_TYPES
409
410 static tree float_aarch64_type_node = NULL;
411 static tree double_aarch64_type_node = NULL;
412
413 gcc_assert (!VECTOR_MODE_P (mode));
414
415/* If we've already initialised this type, don't initialise it again,
416 otherwise ask for a new type of the correct size. */
417#undef AARCH64_TYPE_BUILDER
418#define AARCH64_TYPE_BUILDER(X) \
419 case X##mode: \
420 if (unsigned_p) \
421 return (X##_aarch64_type_node_u \
422 ? X##_aarch64_type_node_u \
423 : X##_aarch64_type_node_u \
424 = make_unsigned_type (GET_MODE_PRECISION (mode))); \
6db1ec94
JG
425 else if (poly_p) \
426 return (X##_aarch64_type_node_p \
427 ? X##_aarch64_type_node_p \
428 : X##_aarch64_type_node_p \
429 = make_unsigned_type (GET_MODE_PRECISION (mode))); \
b5828b4b
JG
430 else \
431 return (X##_aarch64_type_node_s \
432 ? X##_aarch64_type_node_s \
433 : X##_aarch64_type_node_s \
434 = make_signed_type (GET_MODE_PRECISION (mode))); \
435 break;
436
437 switch (mode)
438 {
439 INT_TYPES
440 case SFmode:
441 if (!float_aarch64_type_node)
442 {
443 float_aarch64_type_node = make_node (REAL_TYPE);
444 TYPE_PRECISION (float_aarch64_type_node) = FLOAT_TYPE_SIZE;
445 layout_type (float_aarch64_type_node);
446 }
447 return float_aarch64_type_node;
448 break;
449 case DFmode:
450 if (!double_aarch64_type_node)
451 {
452 double_aarch64_type_node = make_node (REAL_TYPE);
453 TYPE_PRECISION (double_aarch64_type_node) = DOUBLE_TYPE_SIZE;
454 layout_type (double_aarch64_type_node);
455 }
456 return double_aarch64_type_node;
457 break;
458 default:
459 gcc_unreachable ();
460 }
461}
462
463tree
6db1ec94
JG
464aarch64_build_vector_type (enum machine_mode mode,
465 bool unsigned_p,
466 bool poly_p)
b5828b4b
JG
467{
468 tree eltype;
469
470#define VECTOR_TYPES \
471 AARCH64_TYPE_BUILDER (V16QI) \
472 AARCH64_TYPE_BUILDER (V8HI) \
473 AARCH64_TYPE_BUILDER (V4SI) \
474 AARCH64_TYPE_BUILDER (V2DI) \
475 AARCH64_TYPE_BUILDER (V8QI) \
476 AARCH64_TYPE_BUILDER (V4HI) \
477 AARCH64_TYPE_BUILDER (V2SI) \
478 \
479 AARCH64_TYPE_BUILDER (V4SF) \
480 AARCH64_TYPE_BUILDER (V2DF) \
481 AARCH64_TYPE_BUILDER (V2SF) \
482/* Declare our "cache" of values. */
483#undef AARCH64_TYPE_BUILDER
484#define AARCH64_TYPE_BUILDER(X) \
485 static tree X##_aarch64_type_node_s = NULL; \
6db1ec94
JG
486 static tree X##_aarch64_type_node_u = NULL; \
487 static tree X##_aarch64_type_node_p = NULL;
b5828b4b
JG
488
489 VECTOR_TYPES
490
491 gcc_assert (VECTOR_MODE_P (mode));
492
493#undef AARCH64_TYPE_BUILDER
494#define AARCH64_TYPE_BUILDER(X) \
495 case X##mode: \
496 if (unsigned_p) \
497 return X##_aarch64_type_node_u \
498 ? X##_aarch64_type_node_u \
499 : X##_aarch64_type_node_u \
500 = build_vector_type_for_mode (aarch64_build_scalar_type \
501 (GET_MODE_INNER (mode), \
6db1ec94
JG
502 unsigned_p, poly_p), mode); \
503 else if (poly_p) \
504 return X##_aarch64_type_node_p \
505 ? X##_aarch64_type_node_p \
506 : X##_aarch64_type_node_p \
507 = build_vector_type_for_mode (aarch64_build_scalar_type \
508 (GET_MODE_INNER (mode), \
509 unsigned_p, poly_p), mode); \
b5828b4b
JG
510 else \
511 return X##_aarch64_type_node_s \
512 ? X##_aarch64_type_node_s \
513 : X##_aarch64_type_node_s \
514 = build_vector_type_for_mode (aarch64_build_scalar_type \
515 (GET_MODE_INNER (mode), \
6db1ec94 516 unsigned_p, poly_p), mode); \
b5828b4b
JG
517 break;
518
519 switch (mode)
520 {
521 default:
6db1ec94
JG
522 eltype = aarch64_build_scalar_type (GET_MODE_INNER (mode),
523 unsigned_p, poly_p);
b5828b4b
JG
524 return build_vector_type_for_mode (eltype, mode);
525 break;
526 VECTOR_TYPES
527 }
528}
529
530tree
6db1ec94 531aarch64_build_type (enum machine_mode mode, bool unsigned_p, bool poly_p)
b5828b4b
JG
532{
533 if (VECTOR_MODE_P (mode))
6db1ec94 534 return aarch64_build_vector_type (mode, unsigned_p, poly_p);
b5828b4b 535 else
6db1ec94
JG
536 return aarch64_build_scalar_type (mode, unsigned_p, poly_p);
537}
538
539tree
540aarch64_build_signed_type (enum machine_mode mode)
541{
542 return aarch64_build_type (mode, false, false);
543}
544
545tree
546aarch64_build_unsigned_type (enum machine_mode mode)
547{
548 return aarch64_build_type (mode, true, false);
549}
550
551tree
552aarch64_build_poly_type (enum machine_mode mode)
553{
554 return aarch64_build_type (mode, false, true);
b5828b4b
JG
555}
556
af55e82d 557static void
342be7f7 558aarch64_init_simd_builtins (void)
43e9d192 559{
342be7f7 560 unsigned int i, fcode = AARCH64_SIMD_BUILTIN_BASE + 1;
43e9d192 561
6db1ec94
JG
562 /* Signed scalar type nodes. */
563 tree aarch64_simd_intQI_type_node = aarch64_build_signed_type (QImode);
564 tree aarch64_simd_intHI_type_node = aarch64_build_signed_type (HImode);
565 tree aarch64_simd_intSI_type_node = aarch64_build_signed_type (SImode);
566 tree aarch64_simd_intDI_type_node = aarch64_build_signed_type (DImode);
567 tree aarch64_simd_intTI_type_node = aarch64_build_signed_type (TImode);
568 tree aarch64_simd_intEI_type_node = aarch64_build_signed_type (EImode);
569 tree aarch64_simd_intOI_type_node = aarch64_build_signed_type (OImode);
570 tree aarch64_simd_intCI_type_node = aarch64_build_signed_type (CImode);
571 tree aarch64_simd_intXI_type_node = aarch64_build_signed_type (XImode);
572
573 /* Unsigned scalar type nodes. */
574 tree aarch64_simd_intUQI_type_node = aarch64_build_unsigned_type (QImode);
575 tree aarch64_simd_intUHI_type_node = aarch64_build_unsigned_type (HImode);
576 tree aarch64_simd_intUSI_type_node = aarch64_build_unsigned_type (SImode);
577 tree aarch64_simd_intUDI_type_node = aarch64_build_unsigned_type (DImode);
578
579 /* Poly scalar type nodes. */
580 tree aarch64_simd_polyQI_type_node = aarch64_build_poly_type (QImode);
581 tree aarch64_simd_polyHI_type_node = aarch64_build_poly_type (HImode);
7baa225d
TB
582 tree aarch64_simd_polyDI_type_node = aarch64_build_poly_type (DImode);
583 tree aarch64_simd_polyTI_type_node = aarch64_build_poly_type (TImode);
b5828b4b
JG
584
585 /* Float type nodes. */
6db1ec94
JG
586 tree aarch64_simd_float_type_node = aarch64_build_signed_type (SFmode);
587 tree aarch64_simd_double_type_node = aarch64_build_signed_type (DFmode);
43e9d192
IB
588
589 /* Define typedefs which exactly correspond to the modes we are basing vector
590 types on. If you change these names you'll need to change
591 the table used by aarch64_mangle_type too. */
592 (*lang_hooks.types.register_builtin_type) (aarch64_simd_intQI_type_node,
593 "__builtin_aarch64_simd_qi");
594 (*lang_hooks.types.register_builtin_type) (aarch64_simd_intHI_type_node,
595 "__builtin_aarch64_simd_hi");
596 (*lang_hooks.types.register_builtin_type) (aarch64_simd_intSI_type_node,
597 "__builtin_aarch64_simd_si");
598 (*lang_hooks.types.register_builtin_type) (aarch64_simd_float_type_node,
599 "__builtin_aarch64_simd_sf");
600 (*lang_hooks.types.register_builtin_type) (aarch64_simd_intDI_type_node,
601 "__builtin_aarch64_simd_di");
602 (*lang_hooks.types.register_builtin_type) (aarch64_simd_double_type_node,
603 "__builtin_aarch64_simd_df");
604 (*lang_hooks.types.register_builtin_type) (aarch64_simd_polyQI_type_node,
605 "__builtin_aarch64_simd_poly8");
606 (*lang_hooks.types.register_builtin_type) (aarch64_simd_polyHI_type_node,
607 "__builtin_aarch64_simd_poly16");
7baa225d
TB
608 (*lang_hooks.types.register_builtin_type) (aarch64_simd_polyDI_type_node,
609 "__builtin_aarch64_simd_poly64");
610 (*lang_hooks.types.register_builtin_type) (aarch64_simd_polyTI_type_node,
611 "__builtin_aarch64_simd_poly128");
b5828b4b 612 (*lang_hooks.types.register_builtin_type) (aarch64_simd_intTI_type_node,
43e9d192 613 "__builtin_aarch64_simd_ti");
b5828b4b 614 (*lang_hooks.types.register_builtin_type) (aarch64_simd_intEI_type_node,
43e9d192 615 "__builtin_aarch64_simd_ei");
b5828b4b 616 (*lang_hooks.types.register_builtin_type) (aarch64_simd_intOI_type_node,
43e9d192 617 "__builtin_aarch64_simd_oi");
b5828b4b 618 (*lang_hooks.types.register_builtin_type) (aarch64_simd_intCI_type_node,
43e9d192 619 "__builtin_aarch64_simd_ci");
b5828b4b 620 (*lang_hooks.types.register_builtin_type) (aarch64_simd_intXI_type_node,
43e9d192
IB
621 "__builtin_aarch64_simd_xi");
622
b5828b4b
JG
623 /* Unsigned integer types for various mode sizes. */
624 (*lang_hooks.types.register_builtin_type) (aarch64_simd_intUQI_type_node,
625 "__builtin_aarch64_simd_uqi");
626 (*lang_hooks.types.register_builtin_type) (aarch64_simd_intUHI_type_node,
627 "__builtin_aarch64_simd_uhi");
628 (*lang_hooks.types.register_builtin_type) (aarch64_simd_intUSI_type_node,
629 "__builtin_aarch64_simd_usi");
630 (*lang_hooks.types.register_builtin_type) (aarch64_simd_intUDI_type_node,
631 "__builtin_aarch64_simd_udi");
43e9d192 632
342be7f7 633 for (i = 0; i < ARRAY_SIZE (aarch64_simd_builtin_data); i++, fcode++)
43e9d192 634 {
b5828b4b
JG
635 bool print_type_signature_p = false;
636 char type_signature[SIMD_MAX_BUILTIN_ARGS] = { 0 };
43e9d192 637 aarch64_simd_builtin_datum *d = &aarch64_simd_builtin_data[i];
342be7f7 638 const char *const modenames[] =
b5828b4b
JG
639 {
640 "v8qi", "v4hi", "v2si", "v2sf", "di", "df",
641 "v16qi", "v8hi", "v4si", "v4sf", "v2di", "v2df",
642 "ti", "ei", "oi", "xi", "si", "sf", "hi", "qi"
643 };
644 const enum machine_mode modes[] =
645 {
646 V8QImode, V4HImode, V2SImode, V2SFmode, DImode, DFmode,
647 V16QImode, V8HImode, V4SImode, V4SFmode, V2DImode,
648 V2DFmode, TImode, EImode, OImode, XImode, SImode,
649 SFmode, HImode, QImode
650 };
342be7f7
JG
651 char namebuf[60];
652 tree ftype = NULL;
119103ca 653 tree fndecl = NULL;
342be7f7
JG
654
655 gcc_assert (ARRAY_SIZE (modenames) == T_MAX);
43e9d192 656
342be7f7 657 d->fcode = fcode;
43e9d192 658
b5828b4b
JG
659 /* We must track two variables here. op_num is
660 the operand number as in the RTL pattern. This is
661 required to access the mode (e.g. V4SF mode) of the
662 argument, from which the base type can be derived.
663 arg_num is an index in to the qualifiers data, which
664 gives qualifiers to the type (e.g. const unsigned).
665 The reason these two variables may differ by one is the
666 void return type. While all return types take the 0th entry
667 in the qualifiers array, there is no operand for them in the
668 RTL pattern. */
669 int op_num = insn_data[d->code].n_operands - 1;
670 int arg_num = d->qualifiers[0] & qualifier_void
671 ? op_num + 1
672 : op_num;
673 tree return_type = void_type_node, args = void_list_node;
674 tree eltype;
675
676 /* Build a function type directly from the insn_data for this
677 builtin. The build_function_type () function takes care of
678 removing duplicates for us. */
679 for (; op_num >= 0; arg_num--, op_num--)
43e9d192 680 {
b5828b4b
JG
681 enum machine_mode op_mode = insn_data[d->code].operand[op_num].mode;
682 enum aarch64_type_qualifiers qualifiers = d->qualifiers[arg_num];
43e9d192 683
b5828b4b
JG
684 if (qualifiers & qualifier_unsigned)
685 {
686 type_signature[arg_num] = 'u';
687 print_type_signature_p = true;
688 }
6db1ec94
JG
689 else if (qualifiers & qualifier_poly)
690 {
691 type_signature[arg_num] = 'p';
692 print_type_signature_p = true;
693 }
b5828b4b
JG
694 else
695 type_signature[arg_num] = 's';
696
697 /* Skip an internal operand for vget_{low, high}. */
698 if (qualifiers & qualifier_internal)
699 continue;
700
701 /* Some builtins have different user-facing types
702 for certain arguments, encoded in d->mode. */
703 if (qualifiers & qualifier_map_mode)
704 op_mode = modes[d->mode];
705
706 /* For pointers, we want a pointer to the basic type
707 of the vector. */
708 if (qualifiers & qualifier_pointer && VECTOR_MODE_P (op_mode))
709 op_mode = GET_MODE_INNER (op_mode);
710
711 eltype = aarch64_build_type (op_mode,
6db1ec94
JG
712 qualifiers & qualifier_unsigned,
713 qualifiers & qualifier_poly);
b5828b4b
JG
714
715 /* Add qualifiers. */
716 if (qualifiers & qualifier_const)
717 eltype = build_qualified_type (eltype, TYPE_QUAL_CONST);
718
719 if (qualifiers & qualifier_pointer)
720 eltype = build_pointer_type (eltype);
721
722 /* If we have reached arg_num == 0, we are at a non-void
723 return type. Otherwise, we are still processing
724 arguments. */
725 if (arg_num == 0)
726 return_type = eltype;
727 else
728 args = tree_cons (NULL_TREE, eltype, args);
729 }
342be7f7 730
b5828b4b 731 ftype = build_function_type (return_type, args);
43e9d192 732
342be7f7 733 gcc_assert (ftype != NULL);
43e9d192 734
b5828b4b
JG
735 if (print_type_signature_p)
736 snprintf (namebuf, sizeof (namebuf), "__builtin_aarch64_%s%s_%s",
737 d->name, modenames[d->mode], type_signature);
738 else
739 snprintf (namebuf, sizeof (namebuf), "__builtin_aarch64_%s%s",
740 d->name, modenames[d->mode]);
43e9d192 741
119103ca
JG
742 fndecl = add_builtin_function (namebuf, ftype, fcode, BUILT_IN_MD,
743 NULL, NULL_TREE);
744 aarch64_builtin_decls[fcode] = fndecl;
43e9d192
IB
745 }
746}
747
342be7f7
JG
748void
749aarch64_init_builtins (void)
43e9d192 750{
342be7f7
JG
751 if (TARGET_SIMD)
752 aarch64_init_simd_builtins ();
43e9d192
IB
753}
754
119103ca
JG
755tree
756aarch64_builtin_decl (unsigned code, bool initialize_p ATTRIBUTE_UNUSED)
757{
758 if (code >= AARCH64_BUILTIN_MAX)
759 return error_mark_node;
760
761 return aarch64_builtin_decls[code];
762}
763
43e9d192
IB
764typedef enum
765{
766 SIMD_ARG_COPY_TO_REG,
767 SIMD_ARG_CONSTANT,
768 SIMD_ARG_STOP
769} builtin_simd_arg;
770
43e9d192
IB
771static rtx
772aarch64_simd_expand_args (rtx target, int icode, int have_retval,
773 tree exp, ...)
774{
775 va_list ap;
776 rtx pat;
777 tree arg[SIMD_MAX_BUILTIN_ARGS];
778 rtx op[SIMD_MAX_BUILTIN_ARGS];
779 enum machine_mode tmode = insn_data[icode].operand[0].mode;
780 enum machine_mode mode[SIMD_MAX_BUILTIN_ARGS];
781 int argc = 0;
782
783 if (have_retval
784 && (!target
785 || GET_MODE (target) != tmode
786 || !(*insn_data[icode].operand[0].predicate) (target, tmode)))
787 target = gen_reg_rtx (tmode);
788
789 va_start (ap, exp);
790
791 for (;;)
792 {
793 builtin_simd_arg thisarg = (builtin_simd_arg) va_arg (ap, int);
794
795 if (thisarg == SIMD_ARG_STOP)
796 break;
797 else
798 {
799 arg[argc] = CALL_EXPR_ARG (exp, argc);
800 op[argc] = expand_normal (arg[argc]);
801 mode[argc] = insn_data[icode].operand[argc + have_retval].mode;
802
803 switch (thisarg)
804 {
805 case SIMD_ARG_COPY_TO_REG:
2888c331
YZ
806 if (POINTER_TYPE_P (TREE_TYPE (arg[argc])))
807 op[argc] = convert_memory_address (Pmode, op[argc]);
43e9d192
IB
808 /*gcc_assert (GET_MODE (op[argc]) == mode[argc]); */
809 if (!(*insn_data[icode].operand[argc + have_retval].predicate)
810 (op[argc], mode[argc]))
811 op[argc] = copy_to_mode_reg (mode[argc], op[argc]);
812 break;
813
814 case SIMD_ARG_CONSTANT:
815 if (!(*insn_data[icode].operand[argc + have_retval].predicate)
816 (op[argc], mode[argc]))
817 error_at (EXPR_LOCATION (exp), "incompatible type for argument %d, "
818 "expected %<const int%>", argc + 1);
819 break;
820
821 case SIMD_ARG_STOP:
822 gcc_unreachable ();
823 }
824
825 argc++;
826 }
827 }
828
829 va_end (ap);
830
831 if (have_retval)
832 switch (argc)
833 {
834 case 1:
835 pat = GEN_FCN (icode) (target, op[0]);
836 break;
837
838 case 2:
839 pat = GEN_FCN (icode) (target, op[0], op[1]);
840 break;
841
842 case 3:
843 pat = GEN_FCN (icode) (target, op[0], op[1], op[2]);
844 break;
845
846 case 4:
847 pat = GEN_FCN (icode) (target, op[0], op[1], op[2], op[3]);
848 break;
849
850 case 5:
851 pat = GEN_FCN (icode) (target, op[0], op[1], op[2], op[3], op[4]);
852 break;
853
854 default:
855 gcc_unreachable ();
856 }
857 else
858 switch (argc)
859 {
860 case 1:
861 pat = GEN_FCN (icode) (op[0]);
862 break;
863
864 case 2:
865 pat = GEN_FCN (icode) (op[0], op[1]);
866 break;
867
868 case 3:
869 pat = GEN_FCN (icode) (op[0], op[1], op[2]);
870 break;
871
872 case 4:
873 pat = GEN_FCN (icode) (op[0], op[1], op[2], op[3]);
874 break;
875
876 case 5:
877 pat = GEN_FCN (icode) (op[0], op[1], op[2], op[3], op[4]);
878 break;
879
880 default:
881 gcc_unreachable ();
882 }
883
884 if (!pat)
885 return 0;
886
887 emit_insn (pat);
888
889 return target;
890}
891
892/* Expand an AArch64 AdvSIMD builtin(intrinsic). */
893rtx
894aarch64_simd_expand_builtin (int fcode, tree exp, rtx target)
895{
342be7f7
JG
896 aarch64_simd_builtin_datum *d =
897 &aarch64_simd_builtin_data[fcode - (AARCH64_SIMD_BUILTIN_BASE + 1)];
342be7f7 898 enum insn_code icode = d->code;
b5828b4b
JG
899 builtin_simd_arg args[SIMD_MAX_BUILTIN_ARGS];
900 int num_args = insn_data[d->code].n_operands;
901 int is_void = 0;
902 int k;
43e9d192 903
b5828b4b 904 is_void = !!(d->qualifiers[0] & qualifier_void);
43e9d192 905
b5828b4b
JG
906 num_args += is_void;
907
908 for (k = 1; k < num_args; k++)
909 {
910 /* We have four arrays of data, each indexed in a different fashion.
911 qualifiers - element 0 always describes the function return type.
912 operands - element 0 is either the operand for return value (if
913 the function has a non-void return type) or the operand for the
914 first argument.
915 expr_args - element 0 always holds the first argument.
916 args - element 0 is always used for the return type. */
917 int qualifiers_k = k;
918 int operands_k = k - is_void;
919 int expr_args_k = k - 1;
920
921 if (d->qualifiers[qualifiers_k] & qualifier_immediate)
922 args[k] = SIMD_ARG_CONSTANT;
923 else if (d->qualifiers[qualifiers_k] & qualifier_maybe_immediate)
924 {
925 rtx arg
926 = expand_normal (CALL_EXPR_ARG (exp,
927 (expr_args_k)));
928 /* Handle constants only if the predicate allows it. */
929 bool op_const_int_p =
930 (CONST_INT_P (arg)
931 && (*insn_data[icode].operand[operands_k].predicate)
932 (arg, insn_data[icode].operand[operands_k].mode));
933 args[k] = op_const_int_p ? SIMD_ARG_CONSTANT : SIMD_ARG_COPY_TO_REG;
934 }
935 else
936 args[k] = SIMD_ARG_COPY_TO_REG;
43e9d192 937
43e9d192 938 }
b5828b4b
JG
939 args[k] = SIMD_ARG_STOP;
940
941 /* The interface to aarch64_simd_expand_args expects a 0 if
942 the function is void, and a 1 if it is not. */
943 return aarch64_simd_expand_args
944 (target, icode, !is_void, exp,
945 args[1],
946 args[2],
947 args[3],
948 args[4],
949 SIMD_ARG_STOP);
43e9d192 950}
342be7f7
JG
951
952/* Expand an expression EXP that calls a built-in function,
953 with result going to TARGET if that's convenient. */
954rtx
955aarch64_expand_builtin (tree exp,
956 rtx target,
957 rtx subtarget ATTRIBUTE_UNUSED,
958 enum machine_mode mode ATTRIBUTE_UNUSED,
959 int ignore ATTRIBUTE_UNUSED)
960{
961 tree fndecl = TREE_OPERAND (CALL_EXPR_FN (exp), 0);
962 int fcode = DECL_FUNCTION_CODE (fndecl);
963
964 if (fcode >= AARCH64_SIMD_BUILTIN_BASE)
965 return aarch64_simd_expand_builtin (fcode, exp, target);
966
967 return NULL_RTX;
968}
42fc9a7f
JG
969
970tree
971aarch64_builtin_vectorized_function (tree fndecl, tree type_out, tree type_in)
972{
973 enum machine_mode in_mode, out_mode;
974 int in_n, out_n;
975
976 if (TREE_CODE (type_out) != VECTOR_TYPE
977 || TREE_CODE (type_in) != VECTOR_TYPE)
978 return NULL_TREE;
979
980 out_mode = TYPE_MODE (TREE_TYPE (type_out));
981 out_n = TYPE_VECTOR_SUBPARTS (type_out);
982 in_mode = TYPE_MODE (TREE_TYPE (type_in));
983 in_n = TYPE_VECTOR_SUBPARTS (type_in);
984
985#undef AARCH64_CHECK_BUILTIN_MODE
986#define AARCH64_CHECK_BUILTIN_MODE(C, N) 1
987#define AARCH64_FIND_FRINT_VARIANT(N) \
988 (AARCH64_CHECK_BUILTIN_MODE (2, D) \
e993fea1 989 ? aarch64_builtin_decls[AARCH64_SIMD_BUILTIN_UNOP_##N##v2df] \
42fc9a7f 990 : (AARCH64_CHECK_BUILTIN_MODE (4, S) \
e993fea1 991 ? aarch64_builtin_decls[AARCH64_SIMD_BUILTIN_UNOP_##N##v4sf] \
42fc9a7f 992 : (AARCH64_CHECK_BUILTIN_MODE (2, S) \
e993fea1 993 ? aarch64_builtin_decls[AARCH64_SIMD_BUILTIN_UNOP_##N##v2sf] \
42fc9a7f
JG
994 : NULL_TREE)))
995 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
996 {
997 enum built_in_function fn = DECL_FUNCTION_CODE (fndecl);
998 switch (fn)
999 {
1000#undef AARCH64_CHECK_BUILTIN_MODE
1001#define AARCH64_CHECK_BUILTIN_MODE(C, N) \
1002 (out_mode == N##Fmode && out_n == C \
1003 && in_mode == N##Fmode && in_n == C)
1004 case BUILT_IN_FLOOR:
1005 case BUILT_IN_FLOORF:
0659ce6f 1006 return AARCH64_FIND_FRINT_VARIANT (floor);
42fc9a7f
JG
1007 case BUILT_IN_CEIL:
1008 case BUILT_IN_CEILF:
0659ce6f 1009 return AARCH64_FIND_FRINT_VARIANT (ceil);
42fc9a7f
JG
1010 case BUILT_IN_TRUNC:
1011 case BUILT_IN_TRUNCF:
0659ce6f 1012 return AARCH64_FIND_FRINT_VARIANT (btrunc);
42fc9a7f
JG
1013 case BUILT_IN_ROUND:
1014 case BUILT_IN_ROUNDF:
0659ce6f 1015 return AARCH64_FIND_FRINT_VARIANT (round);
42fc9a7f
JG
1016 case BUILT_IN_NEARBYINT:
1017 case BUILT_IN_NEARBYINTF:
0659ce6f 1018 return AARCH64_FIND_FRINT_VARIANT (nearbyint);
4dcd1054
JG
1019 case BUILT_IN_SQRT:
1020 case BUILT_IN_SQRTF:
1021 return AARCH64_FIND_FRINT_VARIANT (sqrt);
42fc9a7f 1022#undef AARCH64_CHECK_BUILTIN_MODE
b5574232
VP
1023#define AARCH64_CHECK_BUILTIN_MODE(C, N) \
1024 (out_mode == SImode && out_n == C \
1025 && in_mode == N##Imode && in_n == C)
1026 case BUILT_IN_CLZ:
1027 {
1028 if (AARCH64_CHECK_BUILTIN_MODE (4, S))
e993fea1 1029 return aarch64_builtin_decls[AARCH64_SIMD_BUILTIN_UNOP_clzv4si];
b5574232
VP
1030 return NULL_TREE;
1031 }
1032#undef AARCH64_CHECK_BUILTIN_MODE
42fc9a7f
JG
1033#define AARCH64_CHECK_BUILTIN_MODE(C, N) \
1034 (out_mode == N##Imode && out_n == C \
1035 && in_mode == N##Fmode && in_n == C)
1036 case BUILT_IN_LFLOOR:
0386b123 1037 case BUILT_IN_IFLOORF:
ce966824 1038 {
e993fea1 1039 enum aarch64_builtins builtin;
ce966824 1040 if (AARCH64_CHECK_BUILTIN_MODE (2, D))
e993fea1 1041 builtin = AARCH64_SIMD_BUILTIN_UNOP_lfloorv2dfv2di;
ce966824 1042 else if (AARCH64_CHECK_BUILTIN_MODE (4, S))
e993fea1 1043 builtin = AARCH64_SIMD_BUILTIN_UNOP_lfloorv4sfv4si;
ce966824 1044 else if (AARCH64_CHECK_BUILTIN_MODE (2, S))
e993fea1
JG
1045 builtin = AARCH64_SIMD_BUILTIN_UNOP_lfloorv2sfv2si;
1046 else
1047 return NULL_TREE;
1048
1049 return aarch64_builtin_decls[builtin];
ce966824 1050 }
42fc9a7f 1051 case BUILT_IN_LCEIL:
0386b123 1052 case BUILT_IN_ICEILF:
ce966824 1053 {
e993fea1 1054 enum aarch64_builtins builtin;
ce966824 1055 if (AARCH64_CHECK_BUILTIN_MODE (2, D))
e993fea1 1056 builtin = AARCH64_SIMD_BUILTIN_UNOP_lceilv2dfv2di;
ce966824 1057 else if (AARCH64_CHECK_BUILTIN_MODE (4, S))
e993fea1 1058 builtin = AARCH64_SIMD_BUILTIN_UNOP_lceilv4sfv4si;
ce966824 1059 else if (AARCH64_CHECK_BUILTIN_MODE (2, S))
e993fea1
JG
1060 builtin = AARCH64_SIMD_BUILTIN_UNOP_lceilv2sfv2si;
1061 else
1062 return NULL_TREE;
1063
1064 return aarch64_builtin_decls[builtin];
ce966824 1065 }
0386b123
JG
1066 case BUILT_IN_LROUND:
1067 case BUILT_IN_IROUNDF:
1068 {
e993fea1 1069 enum aarch64_builtins builtin;
0386b123 1070 if (AARCH64_CHECK_BUILTIN_MODE (2, D))
e993fea1 1071 builtin = AARCH64_SIMD_BUILTIN_UNOP_lroundv2dfv2di;
0386b123 1072 else if (AARCH64_CHECK_BUILTIN_MODE (4, S))
e993fea1 1073 builtin = AARCH64_SIMD_BUILTIN_UNOP_lroundv4sfv4si;
0386b123 1074 else if (AARCH64_CHECK_BUILTIN_MODE (2, S))
e993fea1
JG
1075 builtin = AARCH64_SIMD_BUILTIN_UNOP_lroundv2sfv2si;
1076 else
1077 return NULL_TREE;
1078
1079 return aarch64_builtin_decls[builtin];
0386b123
JG
1080 }
1081
42fc9a7f
JG
1082 default:
1083 return NULL_TREE;
1084 }
1085 }
1086
1087 return NULL_TREE;
1088}
0ac198d3
JG
1089
1090#undef VAR1
1091#define VAR1(T, N, MAP, A) \
e993fea1 1092 case AARCH64_SIMD_BUILTIN_##T##_##N##A:
0ac198d3 1093
9697e620
JG
1094tree
1095aarch64_fold_builtin (tree fndecl, int n_args ATTRIBUTE_UNUSED, tree *args,
1096 bool ignore ATTRIBUTE_UNUSED)
1097{
1098 int fcode = DECL_FUNCTION_CODE (fndecl);
1099 tree type = TREE_TYPE (TREE_TYPE (fndecl));
1100
1101 switch (fcode)
1102 {
d05d0709 1103 BUILTIN_VALLDI (UNOP, abs, 2)
9697e620
JG
1104 return fold_build1 (ABS_EXPR, type, args[0]);
1105 break;
bb60efd9
JG
1106 BUILTIN_VALLDI (BINOP, cmge, 0)
1107 return fold_build2 (GE_EXPR, type, args[0], args[1]);
1108 break;
1109 BUILTIN_VALLDI (BINOP, cmgt, 0)
1110 return fold_build2 (GT_EXPR, type, args[0], args[1]);
1111 break;
1112 BUILTIN_VALLDI (BINOP, cmeq, 0)
1113 return fold_build2 (EQ_EXPR, type, args[0], args[1]);
1114 break;
1115 BUILTIN_VSDQ_I_DI (BINOP, cmtst, 0)
1116 {
1117 tree and_node = fold_build2 (BIT_AND_EXPR, type, args[0], args[1]);
1118 tree vec_zero_node = build_zero_cst (type);
1119 return fold_build2 (NE_EXPR, type, and_node, vec_zero_node);
1120 break;
1121 }
1709ff9b
JG
1122 VAR1 (UNOP, floatv2si, 2, v2sf)
1123 VAR1 (UNOP, floatv4si, 2, v4sf)
1124 VAR1 (UNOP, floatv2di, 2, v2df)
1125 return fold_build1 (FLOAT_EXPR, type, args[0]);
9697e620
JG
1126 default:
1127 break;
1128 }
1129
1130 return NULL_TREE;
1131}
1132
0ac198d3
JG
1133bool
1134aarch64_gimple_fold_builtin (gimple_stmt_iterator *gsi)
1135{
1136 bool changed = false;
1137 gimple stmt = gsi_stmt (*gsi);
1138 tree call = gimple_call_fn (stmt);
1139 tree fndecl;
1140 gimple new_stmt = NULL;
1141 if (call)
1142 {
1143 fndecl = gimple_call_fndecl (stmt);
1144 if (fndecl)
1145 {
1146 int fcode = DECL_FUNCTION_CODE (fndecl);
1147 int nargs = gimple_call_num_args (stmt);
1148 tree *args = (nargs > 0
1149 ? gimple_call_arg_ptr (stmt, 0)
1150 : &error_mark_node);
1151
1152 switch (fcode)
1153 {
36054fab 1154 BUILTIN_VALL (UNOP, reduc_splus_, 10)
0ac198d3
JG
1155 new_stmt = gimple_build_assign_with_ops (
1156 REDUC_PLUS_EXPR,
1157 gimple_call_lhs (stmt),
1158 args[0],
1159 NULL_TREE);
1160 break;
1598945b
JG
1161 BUILTIN_VDQIF (UNOP, reduc_smax_, 10)
1162 new_stmt = gimple_build_assign_with_ops (
1163 REDUC_MAX_EXPR,
1164 gimple_call_lhs (stmt),
1165 args[0],
1166 NULL_TREE);
1167 break;
1168 BUILTIN_VDQIF (UNOP, reduc_smin_, 10)
1169 new_stmt = gimple_build_assign_with_ops (
1170 REDUC_MIN_EXPR,
1171 gimple_call_lhs (stmt),
1172 args[0],
1173 NULL_TREE);
1174 break;
1175
0ac198d3
JG
1176 default:
1177 break;
1178 }
1179 }
1180 }
1181
1182 if (new_stmt)
1183 {
1184 gsi_replace (gsi, new_stmt, true);
1185 changed = true;
1186 }
1187
1188 return changed;
1189}
1190
42fc9a7f
JG
1191#undef AARCH64_CHECK_BUILTIN_MODE
1192#undef AARCH64_FIND_FRINT_VARIANT
0ddec79f
JG
1193#undef BUILTIN_DX
1194#undef BUILTIN_SDQ_I
1195#undef BUILTIN_SD_HSI
1196#undef BUILTIN_V2F
1197#undef BUILTIN_VALL
1198#undef BUILTIN_VB
1199#undef BUILTIN_VD
1200#undef BUILTIN_VDC
1201#undef BUILTIN_VDIC
1202#undef BUILTIN_VDN
1203#undef BUILTIN_VDQ
1204#undef BUILTIN_VDQF
1205#undef BUILTIN_VDQH
1206#undef BUILTIN_VDQHS
1207#undef BUILTIN_VDQIF
1208#undef BUILTIN_VDQM
1209#undef BUILTIN_VDQV
1210#undef BUILTIN_VDQ_BHSI
1211#undef BUILTIN_VDQ_I
1212#undef BUILTIN_VDW
1213#undef BUILTIN_VD_BHSI
1214#undef BUILTIN_VD_HSI
1215#undef BUILTIN_VD_RE
1216#undef BUILTIN_VQ
1217#undef BUILTIN_VQN
1218#undef BUILTIN_VQW
1219#undef BUILTIN_VQ_HSI
1220#undef BUILTIN_VQ_S
1221#undef BUILTIN_VSDQ_HSI
1222#undef BUILTIN_VSDQ_I
1223#undef BUILTIN_VSDQ_I_BHSI
1224#undef BUILTIN_VSDQ_I_DI
1225#undef BUILTIN_VSD_HSI
1226#undef BUILTIN_VSQN_HSDI
1227#undef BUILTIN_VSTRUCT
1228#undef CF0
1229#undef CF1
1230#undef CF2
1231#undef CF3
1232#undef CF4
1233#undef CF10
1234#undef VAR1
1235#undef VAR2
1236#undef VAR3
1237#undef VAR4
1238#undef VAR5
1239#undef VAR6
1240#undef VAR7
1241#undef VAR8
1242#undef VAR9
1243#undef VAR10
1244#undef VAR11
1245