]> git.ipfire.org Git - thirdparty/gcc.git/blame - gcc/config/aarch64/aarch64-builtins.c
gimple.h: Remove all includes.
[thirdparty/gcc.git] / gcc / config / aarch64 / aarch64-builtins.c
CommitLineData
43e9d192 1/* Builtins' description for AArch64 SIMD architecture.
d1e082c2 2 Copyright (C) 2011-2013 Free Software Foundation, Inc.
43e9d192
IB
3 Contributed by ARM Ltd.
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify it
8 under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
10 any later version.
11
12 GCC is distributed in the hope that it will be useful, but
13 WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
15 General Public License for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
20
21#include "config.h"
22#include "system.h"
23#include "coretypes.h"
24#include "tm.h"
25#include "rtl.h"
26#include "tree.h"
d8a2d370
DN
27#include "stor-layout.h"
28#include "stringpool.h"
29#include "calls.h"
43e9d192
IB
30#include "expr.h"
31#include "tm_p.h"
32#include "recog.h"
33#include "langhooks.h"
34#include "diagnostic-core.h"
35#include "optabs.h"
2fb9a547
AM
36#include "pointer-set.h"
37#include "hash-table.h"
38#include "vec.h"
39#include "ggc.h"
40#include "basic-block.h"
41#include "tree-ssa-alias.h"
42#include "internal-fn.h"
43#include "gimple-fold.h"
44#include "tree-eh.h"
45#include "gimple-expr.h"
46#include "is-a.h"
0ac198d3 47#include "gimple.h"
5be5c238 48#include "gimple-iterator.h"
43e9d192 49
342be7f7 50enum aarch64_simd_builtin_type_mode
43e9d192 51{
342be7f7
JG
52 T_V8QI,
53 T_V4HI,
54 T_V2SI,
55 T_V2SF,
56 T_DI,
57 T_DF,
58 T_V16QI,
59 T_V8HI,
60 T_V4SI,
61 T_V4SF,
62 T_V2DI,
63 T_V2DF,
64 T_TI,
65 T_EI,
66 T_OI,
67 T_XI,
68 T_SI,
0050faf8 69 T_SF,
342be7f7
JG
70 T_HI,
71 T_QI,
72 T_MAX
43e9d192
IB
73};
74
75#define v8qi_UP T_V8QI
76#define v4hi_UP T_V4HI
77#define v2si_UP T_V2SI
78#define v2sf_UP T_V2SF
79#define di_UP T_DI
80#define df_UP T_DF
81#define v16qi_UP T_V16QI
82#define v8hi_UP T_V8HI
83#define v4si_UP T_V4SI
84#define v4sf_UP T_V4SF
85#define v2di_UP T_V2DI
86#define v2df_UP T_V2DF
87#define ti_UP T_TI
88#define ei_UP T_EI
89#define oi_UP T_OI
90#define xi_UP T_XI
91#define si_UP T_SI
0050faf8 92#define sf_UP T_SF
43e9d192
IB
93#define hi_UP T_HI
94#define qi_UP T_QI
95
96#define UP(X) X##_UP
97
b5828b4b
JG
98#define SIMD_MAX_BUILTIN_ARGS 5
99
100enum aarch64_type_qualifiers
43e9d192 101{
b5828b4b
JG
102 /* T foo. */
103 qualifier_none = 0x0,
104 /* unsigned T foo. */
105 qualifier_unsigned = 0x1, /* 1 << 0 */
106 /* const T foo. */
107 qualifier_const = 0x2, /* 1 << 1 */
108 /* T *foo. */
109 qualifier_pointer = 0x4, /* 1 << 2 */
110 /* const T *foo. */
111 qualifier_const_pointer = 0x6, /* qualifier_const | qualifier_pointer */
112 /* Used when expanding arguments if an operand could
113 be an immediate. */
114 qualifier_immediate = 0x8, /* 1 << 3 */
115 qualifier_maybe_immediate = 0x10, /* 1 << 4 */
116 /* void foo (...). */
117 qualifier_void = 0x20, /* 1 << 5 */
118 /* Some patterns may have internal operands, this qualifier is an
119 instruction to the initialisation code to skip this operand. */
120 qualifier_internal = 0x40, /* 1 << 6 */
121 /* Some builtins should use the T_*mode* encoded in a simd_builtin_datum
122 rather than using the type of the operand. */
123 qualifier_map_mode = 0x80, /* 1 << 7 */
124 /* qualifier_pointer | qualifier_map_mode */
125 qualifier_pointer_map_mode = 0x84,
126 /* qualifier_const_pointer | qualifier_map_mode */
127 qualifier_const_pointer_map_mode = 0x86
128};
43e9d192
IB
129
130typedef struct
131{
132 const char *name;
342be7f7
JG
133 enum aarch64_simd_builtin_type_mode mode;
134 const enum insn_code code;
135 unsigned int fcode;
b5828b4b 136 enum aarch64_type_qualifiers *qualifiers;
43e9d192
IB
137} aarch64_simd_builtin_datum;
138
b5828b4b
JG
139static enum aarch64_type_qualifiers
140aarch64_types_unop_qualifiers[SIMD_MAX_BUILTIN_ARGS]
141 = { qualifier_none, qualifier_none };
142#define TYPES_UNOP (aarch64_types_unop_qualifiers)
143#define TYPES_CREATE (aarch64_types_unop_qualifiers)
144#define TYPES_REINTERP (aarch64_types_unop_qualifiers)
145static enum aarch64_type_qualifiers
146aarch64_types_binop_qualifiers[SIMD_MAX_BUILTIN_ARGS]
147 = { qualifier_none, qualifier_none, qualifier_maybe_immediate };
148#define TYPES_BINOP (aarch64_types_binop_qualifiers)
149static enum aarch64_type_qualifiers
150aarch64_types_ternop_qualifiers[SIMD_MAX_BUILTIN_ARGS]
151 = { qualifier_none, qualifier_none, qualifier_none, qualifier_none };
152#define TYPES_TERNOP (aarch64_types_ternop_qualifiers)
153static enum aarch64_type_qualifiers
154aarch64_types_quadop_qualifiers[SIMD_MAX_BUILTIN_ARGS]
155 = { qualifier_none, qualifier_none, qualifier_none,
156 qualifier_none, qualifier_none };
157#define TYPES_QUADOP (aarch64_types_quadop_qualifiers)
158
159static enum aarch64_type_qualifiers
160aarch64_types_getlane_qualifiers[SIMD_MAX_BUILTIN_ARGS]
161 = { qualifier_none, qualifier_none, qualifier_immediate };
162#define TYPES_GETLANE (aarch64_types_getlane_qualifiers)
163#define TYPES_SHIFTIMM (aarch64_types_getlane_qualifiers)
164static enum aarch64_type_qualifiers
165aarch64_types_setlane_qualifiers[SIMD_MAX_BUILTIN_ARGS]
166 = { qualifier_none, qualifier_none, qualifier_none, qualifier_immediate };
167#define TYPES_SETLANE (aarch64_types_setlane_qualifiers)
168#define TYPES_SHIFTINSERT (aarch64_types_setlane_qualifiers)
169#define TYPES_SHIFTACC (aarch64_types_setlane_qualifiers)
170
171static enum aarch64_type_qualifiers
172aarch64_types_combine_qualifiers[SIMD_MAX_BUILTIN_ARGS]
173 = { qualifier_none, qualifier_none, qualifier_none };
174#define TYPES_COMBINE (aarch64_types_combine_qualifiers)
175
176static enum aarch64_type_qualifiers
177aarch64_types_load1_qualifiers[SIMD_MAX_BUILTIN_ARGS]
178 = { qualifier_none, qualifier_const_pointer_map_mode };
179#define TYPES_LOAD1 (aarch64_types_load1_qualifiers)
180#define TYPES_LOADSTRUCT (aarch64_types_load1_qualifiers)
181
182/* The first argument (return type) of a store should be void type,
183 which we represent with qualifier_void. Their first operand will be
184 a DImode pointer to the location to store to, so we must use
185 qualifier_map_mode | qualifier_pointer to build a pointer to the
186 element type of the vector. */
187static enum aarch64_type_qualifiers
188aarch64_types_store1_qualifiers[SIMD_MAX_BUILTIN_ARGS]
189 = { qualifier_void, qualifier_pointer_map_mode, qualifier_none };
190#define TYPES_STORE1 (aarch64_types_store1_qualifiers)
191#define TYPES_STORESTRUCT (aarch64_types_store1_qualifiers)
192
0ddec79f
JG
193#define CF0(N, X) CODE_FOR_aarch64_##N##X
194#define CF1(N, X) CODE_FOR_##N##X##1
195#define CF2(N, X) CODE_FOR_##N##X##2
196#define CF3(N, X) CODE_FOR_##N##X##3
197#define CF4(N, X) CODE_FOR_##N##X##4
198#define CF10(N, X) CODE_FOR_##N##X
199
200#define VAR1(T, N, MAP, A) \
b5828b4b 201 {#N, UP (A), CF##MAP (N, A), 0, TYPES_##T},
0ddec79f
JG
202#define VAR2(T, N, MAP, A, B) \
203 VAR1 (T, N, MAP, A) \
204 VAR1 (T, N, MAP, B)
205#define VAR3(T, N, MAP, A, B, C) \
206 VAR2 (T, N, MAP, A, B) \
207 VAR1 (T, N, MAP, C)
208#define VAR4(T, N, MAP, A, B, C, D) \
209 VAR3 (T, N, MAP, A, B, C) \
210 VAR1 (T, N, MAP, D)
211#define VAR5(T, N, MAP, A, B, C, D, E) \
212 VAR4 (T, N, MAP, A, B, C, D) \
213 VAR1 (T, N, MAP, E)
214#define VAR6(T, N, MAP, A, B, C, D, E, F) \
215 VAR5 (T, N, MAP, A, B, C, D, E) \
216 VAR1 (T, N, MAP, F)
217#define VAR7(T, N, MAP, A, B, C, D, E, F, G) \
218 VAR6 (T, N, MAP, A, B, C, D, E, F) \
219 VAR1 (T, N, MAP, G)
220#define VAR8(T, N, MAP, A, B, C, D, E, F, G, H) \
221 VAR7 (T, N, MAP, A, B, C, D, E, F, G) \
222 VAR1 (T, N, MAP, H)
223#define VAR9(T, N, MAP, A, B, C, D, E, F, G, H, I) \
224 VAR8 (T, N, MAP, A, B, C, D, E, F, G, H) \
225 VAR1 (T, N, MAP, I)
226#define VAR10(T, N, MAP, A, B, C, D, E, F, G, H, I, J) \
227 VAR9 (T, N, MAP, A, B, C, D, E, F, G, H, I) \
228 VAR1 (T, N, MAP, J)
229#define VAR11(T, N, MAP, A, B, C, D, E, F, G, H, I, J, K) \
230 VAR10 (T, N, MAP, A, B, C, D, E, F, G, H, I, J) \
231 VAR1 (T, N, MAP, K)
232#define VAR12(T, N, MAP, A, B, C, D, E, F, G, H, I, J, K, L) \
233 VAR11 (T, N, MAP, A, B, C, D, E, F, G, H, I, J, K) \
234 VAR1 (T, N, MAP, L)
342be7f7
JG
235
236/* BUILTIN_<ITERATOR> macros should expand to cover the same range of
237 modes as is given for each define_mode_iterator in
238 config/aarch64/iterators.md. */
239
0ddec79f
JG
240#define BUILTIN_DX(T, N, MAP) \
241 VAR2 (T, N, MAP, di, df)
242#define BUILTIN_GPF(T, N, MAP) \
243 VAR2 (T, N, MAP, sf, df)
244#define BUILTIN_SDQ_I(T, N, MAP) \
245 VAR4 (T, N, MAP, qi, hi, si, di)
246#define BUILTIN_SD_HSI(T, N, MAP) \
247 VAR2 (T, N, MAP, hi, si)
248#define BUILTIN_V2F(T, N, MAP) \
249 VAR2 (T, N, MAP, v2sf, v2df)
250#define BUILTIN_VALL(T, N, MAP) \
251 VAR10 (T, N, MAP, v8qi, v16qi, v4hi, v8hi, v2si, \
252 v4si, v2di, v2sf, v4sf, v2df)
bb60efd9
JG
253#define BUILTIN_VALLDI(T, N, MAP) \
254 VAR11 (T, N, MAP, v8qi, v16qi, v4hi, v8hi, v2si, \
255 v4si, v2di, v2sf, v4sf, v2df, di)
0ddec79f
JG
256#define BUILTIN_VB(T, N, MAP) \
257 VAR2 (T, N, MAP, v8qi, v16qi)
258#define BUILTIN_VD(T, N, MAP) \
259 VAR4 (T, N, MAP, v8qi, v4hi, v2si, v2sf)
260#define BUILTIN_VDC(T, N, MAP) \
261 VAR6 (T, N, MAP, v8qi, v4hi, v2si, v2sf, di, df)
262#define BUILTIN_VDIC(T, N, MAP) \
263 VAR3 (T, N, MAP, v8qi, v4hi, v2si)
264#define BUILTIN_VDN(T, N, MAP) \
265 VAR3 (T, N, MAP, v4hi, v2si, di)
266#define BUILTIN_VDQ(T, N, MAP) \
267 VAR7 (T, N, MAP, v8qi, v16qi, v4hi, v8hi, v2si, v4si, v2di)
268#define BUILTIN_VDQF(T, N, MAP) \
269 VAR3 (T, N, MAP, v2sf, v4sf, v2df)
270#define BUILTIN_VDQH(T, N, MAP) \
271 VAR2 (T, N, MAP, v4hi, v8hi)
272#define BUILTIN_VDQHS(T, N, MAP) \
273 VAR4 (T, N, MAP, v4hi, v8hi, v2si, v4si)
274#define BUILTIN_VDQIF(T, N, MAP) \
275 VAR9 (T, N, MAP, v8qi, v16qi, v4hi, v8hi, v2si, v4si, v2sf, v4sf, v2df)
276#define BUILTIN_VDQM(T, N, MAP) \
277 VAR6 (T, N, MAP, v8qi, v16qi, v4hi, v8hi, v2si, v4si)
278#define BUILTIN_VDQV(T, N, MAP) \
279 VAR5 (T, N, MAP, v8qi, v16qi, v4hi, v8hi, v4si)
280#define BUILTIN_VDQ_BHSI(T, N, MAP) \
281 VAR6 (T, N, MAP, v8qi, v16qi, v4hi, v8hi, v2si, v4si)
282#define BUILTIN_VDQ_I(T, N, MAP) \
283 VAR7 (T, N, MAP, v8qi, v16qi, v4hi, v8hi, v2si, v4si, v2di)
284#define BUILTIN_VDW(T, N, MAP) \
285 VAR3 (T, N, MAP, v8qi, v4hi, v2si)
286#define BUILTIN_VD_BHSI(T, N, MAP) \
287 VAR3 (T, N, MAP, v8qi, v4hi, v2si)
288#define BUILTIN_VD_HSI(T, N, MAP) \
289 VAR2 (T, N, MAP, v4hi, v2si)
290#define BUILTIN_VD_RE(T, N, MAP) \
291 VAR6 (T, N, MAP, v8qi, v4hi, v2si, v2sf, di, df)
292#define BUILTIN_VQ(T, N, MAP) \
293 VAR6 (T, N, MAP, v16qi, v8hi, v4si, v2di, v4sf, v2df)
294#define BUILTIN_VQN(T, N, MAP) \
295 VAR3 (T, N, MAP, v8hi, v4si, v2di)
296#define BUILTIN_VQW(T, N, MAP) \
297 VAR3 (T, N, MAP, v16qi, v8hi, v4si)
298#define BUILTIN_VQ_HSI(T, N, MAP) \
299 VAR2 (T, N, MAP, v8hi, v4si)
300#define BUILTIN_VQ_S(T, N, MAP) \
301 VAR6 (T, N, MAP, v8qi, v16qi, v4hi, v8hi, v2si, v4si)
302#define BUILTIN_VSDQ_HSI(T, N, MAP) \
303 VAR6 (T, N, MAP, v4hi, v8hi, v2si, v4si, hi, si)
304#define BUILTIN_VSDQ_I(T, N, MAP) \
305 VAR11 (T, N, MAP, v8qi, v16qi, v4hi, v8hi, v2si, v4si, v2di, qi, hi, si, di)
306#define BUILTIN_VSDQ_I_BHSI(T, N, MAP) \
307 VAR10 (T, N, MAP, v8qi, v16qi, v4hi, v8hi, v2si, v4si, v2di, qi, hi, si)
308#define BUILTIN_VSDQ_I_DI(T, N, MAP) \
309 VAR8 (T, N, MAP, v8qi, v16qi, v4hi, v8hi, v2si, v4si, v2di, di)
310#define BUILTIN_VSD_HSI(T, N, MAP) \
311 VAR4 (T, N, MAP, v4hi, v2si, hi, si)
312#define BUILTIN_VSQN_HSDI(T, N, MAP) \
313 VAR6 (T, N, MAP, v8hi, v4si, v2di, hi, si, di)
314#define BUILTIN_VSTRUCT(T, N, MAP) \
315 VAR3 (T, N, MAP, oi, ci, xi)
43e9d192
IB
316
317static aarch64_simd_builtin_datum aarch64_simd_builtin_data[] = {
342be7f7
JG
318#include "aarch64-simd-builtins.def"
319};
320
321#undef VAR1
0ddec79f 322#define VAR1(T, N, MAP, A) \
342be7f7
JG
323 AARCH64_SIMD_BUILTIN_##N##A,
324
325enum aarch64_builtins
326{
327 AARCH64_BUILTIN_MIN,
328 AARCH64_SIMD_BUILTIN_BASE,
329#include "aarch64-simd-builtins.def"
330 AARCH64_SIMD_BUILTIN_MAX = AARCH64_SIMD_BUILTIN_BASE
331 + ARRAY_SIZE (aarch64_simd_builtin_data),
332 AARCH64_BUILTIN_MAX
43e9d192
IB
333};
334
119103ca
JG
335static GTY(()) tree aarch64_builtin_decls[AARCH64_BUILTIN_MAX];
336
43e9d192
IB
337#define NUM_DREG_TYPES 6
338#define NUM_QREG_TYPES 6
339
b5828b4b
JG
340/* Return a tree for a signed or unsigned argument of either
341 the mode specified by MODE, or the inner mode of MODE. */
342tree
343aarch64_build_scalar_type (enum machine_mode mode, bool unsigned_p)
344{
345#undef INT_TYPES
346#define INT_TYPES \
347 AARCH64_TYPE_BUILDER (QI) \
348 AARCH64_TYPE_BUILDER (HI) \
349 AARCH64_TYPE_BUILDER (SI) \
350 AARCH64_TYPE_BUILDER (DI) \
351 AARCH64_TYPE_BUILDER (EI) \
352 AARCH64_TYPE_BUILDER (OI) \
353 AARCH64_TYPE_BUILDER (CI) \
354 AARCH64_TYPE_BUILDER (XI) \
355 AARCH64_TYPE_BUILDER (TI) \
356
357/* Statically declare all the possible types we might need. */
358#undef AARCH64_TYPE_BUILDER
359#define AARCH64_TYPE_BUILDER(X) \
360 static tree X##_aarch64_type_node_s = NULL; \
361 static tree X##_aarch64_type_node_u = NULL;
362
363 INT_TYPES
364
365 static tree float_aarch64_type_node = NULL;
366 static tree double_aarch64_type_node = NULL;
367
368 gcc_assert (!VECTOR_MODE_P (mode));
369
370/* If we've already initialised this type, don't initialise it again,
371 otherwise ask for a new type of the correct size. */
372#undef AARCH64_TYPE_BUILDER
373#define AARCH64_TYPE_BUILDER(X) \
374 case X##mode: \
375 if (unsigned_p) \
376 return (X##_aarch64_type_node_u \
377 ? X##_aarch64_type_node_u \
378 : X##_aarch64_type_node_u \
379 = make_unsigned_type (GET_MODE_PRECISION (mode))); \
380 else \
381 return (X##_aarch64_type_node_s \
382 ? X##_aarch64_type_node_s \
383 : X##_aarch64_type_node_s \
384 = make_signed_type (GET_MODE_PRECISION (mode))); \
385 break;
386
387 switch (mode)
388 {
389 INT_TYPES
390 case SFmode:
391 if (!float_aarch64_type_node)
392 {
393 float_aarch64_type_node = make_node (REAL_TYPE);
394 TYPE_PRECISION (float_aarch64_type_node) = FLOAT_TYPE_SIZE;
395 layout_type (float_aarch64_type_node);
396 }
397 return float_aarch64_type_node;
398 break;
399 case DFmode:
400 if (!double_aarch64_type_node)
401 {
402 double_aarch64_type_node = make_node (REAL_TYPE);
403 TYPE_PRECISION (double_aarch64_type_node) = DOUBLE_TYPE_SIZE;
404 layout_type (double_aarch64_type_node);
405 }
406 return double_aarch64_type_node;
407 break;
408 default:
409 gcc_unreachable ();
410 }
411}
412
413tree
414aarch64_build_vector_type (enum machine_mode mode, bool unsigned_p)
415{
416 tree eltype;
417
418#define VECTOR_TYPES \
419 AARCH64_TYPE_BUILDER (V16QI) \
420 AARCH64_TYPE_BUILDER (V8HI) \
421 AARCH64_TYPE_BUILDER (V4SI) \
422 AARCH64_TYPE_BUILDER (V2DI) \
423 AARCH64_TYPE_BUILDER (V8QI) \
424 AARCH64_TYPE_BUILDER (V4HI) \
425 AARCH64_TYPE_BUILDER (V2SI) \
426 \
427 AARCH64_TYPE_BUILDER (V4SF) \
428 AARCH64_TYPE_BUILDER (V2DF) \
429 AARCH64_TYPE_BUILDER (V2SF) \
430/* Declare our "cache" of values. */
431#undef AARCH64_TYPE_BUILDER
432#define AARCH64_TYPE_BUILDER(X) \
433 static tree X##_aarch64_type_node_s = NULL; \
434 static tree X##_aarch64_type_node_u = NULL;
435
436 VECTOR_TYPES
437
438 gcc_assert (VECTOR_MODE_P (mode));
439
440#undef AARCH64_TYPE_BUILDER
441#define AARCH64_TYPE_BUILDER(X) \
442 case X##mode: \
443 if (unsigned_p) \
444 return X##_aarch64_type_node_u \
445 ? X##_aarch64_type_node_u \
446 : X##_aarch64_type_node_u \
447 = build_vector_type_for_mode (aarch64_build_scalar_type \
448 (GET_MODE_INNER (mode), \
449 unsigned_p), mode); \
450 else \
451 return X##_aarch64_type_node_s \
452 ? X##_aarch64_type_node_s \
453 : X##_aarch64_type_node_s \
454 = build_vector_type_for_mode (aarch64_build_scalar_type \
455 (GET_MODE_INNER (mode), \
456 unsigned_p), mode); \
457 break;
458
459 switch (mode)
460 {
461 default:
462 eltype = aarch64_build_scalar_type (GET_MODE_INNER (mode), unsigned_p);
463 return build_vector_type_for_mode (eltype, mode);
464 break;
465 VECTOR_TYPES
466 }
467}
468
469tree
470aarch64_build_type (enum machine_mode mode, bool unsigned_p)
471{
472 if (VECTOR_MODE_P (mode))
473 return aarch64_build_vector_type (mode, unsigned_p);
474 else
475 return aarch64_build_scalar_type (mode, unsigned_p);
476}
477
af55e82d 478static void
342be7f7 479aarch64_init_simd_builtins (void)
43e9d192 480{
342be7f7 481 unsigned int i, fcode = AARCH64_SIMD_BUILTIN_BASE + 1;
43e9d192 482
b5828b4b
JG
483 /* In order that 'poly' types mangle correctly they must not share
484 a base tree with the other scalar types, thus we must generate them
485 as a special case. */
486 tree aarch64_simd_polyQI_type_node =
43e9d192 487 make_signed_type (GET_MODE_PRECISION (QImode));
b5828b4b 488 tree aarch64_simd_polyHI_type_node =
43e9d192 489 make_signed_type (GET_MODE_PRECISION (HImode));
b5828b4b
JG
490
491 /* Scalar type nodes. */
492 tree aarch64_simd_intQI_type_node = aarch64_build_type (QImode, false);
493 tree aarch64_simd_intHI_type_node = aarch64_build_type (HImode, false);
494 tree aarch64_simd_intSI_type_node = aarch64_build_type (SImode, false);
495 tree aarch64_simd_intDI_type_node = aarch64_build_type (DImode, false);
496 tree aarch64_simd_intTI_type_node = aarch64_build_type (TImode, false);
497 tree aarch64_simd_intEI_type_node = aarch64_build_type (EImode, false);
498 tree aarch64_simd_intOI_type_node = aarch64_build_type (OImode, false);
499 tree aarch64_simd_intCI_type_node = aarch64_build_type (CImode, false);
500 tree aarch64_simd_intXI_type_node = aarch64_build_type (XImode, false);
501 tree aarch64_simd_intUQI_type_node = aarch64_build_type (QImode, true);
502 tree aarch64_simd_intUHI_type_node = aarch64_build_type (HImode, true);
503 tree aarch64_simd_intUSI_type_node = aarch64_build_type (SImode, true);
504 tree aarch64_simd_intUDI_type_node = aarch64_build_type (DImode, true);
505
506 /* Float type nodes. */
507 tree aarch64_simd_float_type_node = aarch64_build_type (SFmode, false);
508 tree aarch64_simd_double_type_node = aarch64_build_type (DFmode, false);
43e9d192
IB
509
510 /* Define typedefs which exactly correspond to the modes we are basing vector
511 types on. If you change these names you'll need to change
512 the table used by aarch64_mangle_type too. */
513 (*lang_hooks.types.register_builtin_type) (aarch64_simd_intQI_type_node,
514 "__builtin_aarch64_simd_qi");
515 (*lang_hooks.types.register_builtin_type) (aarch64_simd_intHI_type_node,
516 "__builtin_aarch64_simd_hi");
517 (*lang_hooks.types.register_builtin_type) (aarch64_simd_intSI_type_node,
518 "__builtin_aarch64_simd_si");
519 (*lang_hooks.types.register_builtin_type) (aarch64_simd_float_type_node,
520 "__builtin_aarch64_simd_sf");
521 (*lang_hooks.types.register_builtin_type) (aarch64_simd_intDI_type_node,
522 "__builtin_aarch64_simd_di");
523 (*lang_hooks.types.register_builtin_type) (aarch64_simd_double_type_node,
524 "__builtin_aarch64_simd_df");
525 (*lang_hooks.types.register_builtin_type) (aarch64_simd_polyQI_type_node,
526 "__builtin_aarch64_simd_poly8");
527 (*lang_hooks.types.register_builtin_type) (aarch64_simd_polyHI_type_node,
528 "__builtin_aarch64_simd_poly16");
b5828b4b 529 (*lang_hooks.types.register_builtin_type) (aarch64_simd_intTI_type_node,
43e9d192 530 "__builtin_aarch64_simd_ti");
b5828b4b 531 (*lang_hooks.types.register_builtin_type) (aarch64_simd_intEI_type_node,
43e9d192 532 "__builtin_aarch64_simd_ei");
b5828b4b 533 (*lang_hooks.types.register_builtin_type) (aarch64_simd_intOI_type_node,
43e9d192 534 "__builtin_aarch64_simd_oi");
b5828b4b 535 (*lang_hooks.types.register_builtin_type) (aarch64_simd_intCI_type_node,
43e9d192 536 "__builtin_aarch64_simd_ci");
b5828b4b 537 (*lang_hooks.types.register_builtin_type) (aarch64_simd_intXI_type_node,
43e9d192
IB
538 "__builtin_aarch64_simd_xi");
539
b5828b4b
JG
540 /* Unsigned integer types for various mode sizes. */
541 (*lang_hooks.types.register_builtin_type) (aarch64_simd_intUQI_type_node,
542 "__builtin_aarch64_simd_uqi");
543 (*lang_hooks.types.register_builtin_type) (aarch64_simd_intUHI_type_node,
544 "__builtin_aarch64_simd_uhi");
545 (*lang_hooks.types.register_builtin_type) (aarch64_simd_intUSI_type_node,
546 "__builtin_aarch64_simd_usi");
547 (*lang_hooks.types.register_builtin_type) (aarch64_simd_intUDI_type_node,
548 "__builtin_aarch64_simd_udi");
43e9d192 549
342be7f7 550 for (i = 0; i < ARRAY_SIZE (aarch64_simd_builtin_data); i++, fcode++)
43e9d192 551 {
b5828b4b
JG
552 bool print_type_signature_p = false;
553 char type_signature[SIMD_MAX_BUILTIN_ARGS] = { 0 };
43e9d192 554 aarch64_simd_builtin_datum *d = &aarch64_simd_builtin_data[i];
342be7f7 555 const char *const modenames[] =
b5828b4b
JG
556 {
557 "v8qi", "v4hi", "v2si", "v2sf", "di", "df",
558 "v16qi", "v8hi", "v4si", "v4sf", "v2di", "v2df",
559 "ti", "ei", "oi", "xi", "si", "sf", "hi", "qi"
560 };
561 const enum machine_mode modes[] =
562 {
563 V8QImode, V4HImode, V2SImode, V2SFmode, DImode, DFmode,
564 V16QImode, V8HImode, V4SImode, V4SFmode, V2DImode,
565 V2DFmode, TImode, EImode, OImode, XImode, SImode,
566 SFmode, HImode, QImode
567 };
342be7f7
JG
568 char namebuf[60];
569 tree ftype = NULL;
119103ca 570 tree fndecl = NULL;
342be7f7
JG
571
572 gcc_assert (ARRAY_SIZE (modenames) == T_MAX);
43e9d192 573
342be7f7 574 d->fcode = fcode;
43e9d192 575
b5828b4b
JG
576 /* We must track two variables here. op_num is
577 the operand number as in the RTL pattern. This is
578 required to access the mode (e.g. V4SF mode) of the
579 argument, from which the base type can be derived.
580 arg_num is an index in to the qualifiers data, which
581 gives qualifiers to the type (e.g. const unsigned).
582 The reason these two variables may differ by one is the
583 void return type. While all return types take the 0th entry
584 in the qualifiers array, there is no operand for them in the
585 RTL pattern. */
586 int op_num = insn_data[d->code].n_operands - 1;
587 int arg_num = d->qualifiers[0] & qualifier_void
588 ? op_num + 1
589 : op_num;
590 tree return_type = void_type_node, args = void_list_node;
591 tree eltype;
592
593 /* Build a function type directly from the insn_data for this
594 builtin. The build_function_type () function takes care of
595 removing duplicates for us. */
596 for (; op_num >= 0; arg_num--, op_num--)
43e9d192 597 {
b5828b4b
JG
598 enum machine_mode op_mode = insn_data[d->code].operand[op_num].mode;
599 enum aarch64_type_qualifiers qualifiers = d->qualifiers[arg_num];
43e9d192 600
b5828b4b
JG
601 if (qualifiers & qualifier_unsigned)
602 {
603 type_signature[arg_num] = 'u';
604 print_type_signature_p = true;
605 }
606 else
607 type_signature[arg_num] = 's';
608
609 /* Skip an internal operand for vget_{low, high}. */
610 if (qualifiers & qualifier_internal)
611 continue;
612
613 /* Some builtins have different user-facing types
614 for certain arguments, encoded in d->mode. */
615 if (qualifiers & qualifier_map_mode)
616 op_mode = modes[d->mode];
617
618 /* For pointers, we want a pointer to the basic type
619 of the vector. */
620 if (qualifiers & qualifier_pointer && VECTOR_MODE_P (op_mode))
621 op_mode = GET_MODE_INNER (op_mode);
622
623 eltype = aarch64_build_type (op_mode,
624 qualifiers & qualifier_unsigned);
625
626 /* Add qualifiers. */
627 if (qualifiers & qualifier_const)
628 eltype = build_qualified_type (eltype, TYPE_QUAL_CONST);
629
630 if (qualifiers & qualifier_pointer)
631 eltype = build_pointer_type (eltype);
632
633 /* If we have reached arg_num == 0, we are at a non-void
634 return type. Otherwise, we are still processing
635 arguments. */
636 if (arg_num == 0)
637 return_type = eltype;
638 else
639 args = tree_cons (NULL_TREE, eltype, args);
640 }
342be7f7 641
b5828b4b 642 ftype = build_function_type (return_type, args);
43e9d192 643
342be7f7 644 gcc_assert (ftype != NULL);
43e9d192 645
b5828b4b
JG
646 if (print_type_signature_p)
647 snprintf (namebuf, sizeof (namebuf), "__builtin_aarch64_%s%s_%s",
648 d->name, modenames[d->mode], type_signature);
649 else
650 snprintf (namebuf, sizeof (namebuf), "__builtin_aarch64_%s%s",
651 d->name, modenames[d->mode]);
43e9d192 652
119103ca
JG
653 fndecl = add_builtin_function (namebuf, ftype, fcode, BUILT_IN_MD,
654 NULL, NULL_TREE);
655 aarch64_builtin_decls[fcode] = fndecl;
43e9d192
IB
656 }
657}
658
342be7f7
JG
659void
660aarch64_init_builtins (void)
43e9d192 661{
342be7f7
JG
662 if (TARGET_SIMD)
663 aarch64_init_simd_builtins ();
43e9d192
IB
664}
665
119103ca
JG
666tree
667aarch64_builtin_decl (unsigned code, bool initialize_p ATTRIBUTE_UNUSED)
668{
669 if (code >= AARCH64_BUILTIN_MAX)
670 return error_mark_node;
671
672 return aarch64_builtin_decls[code];
673}
674
43e9d192
IB
675typedef enum
676{
677 SIMD_ARG_COPY_TO_REG,
678 SIMD_ARG_CONSTANT,
679 SIMD_ARG_STOP
680} builtin_simd_arg;
681
43e9d192
IB
682static rtx
683aarch64_simd_expand_args (rtx target, int icode, int have_retval,
684 tree exp, ...)
685{
686 va_list ap;
687 rtx pat;
688 tree arg[SIMD_MAX_BUILTIN_ARGS];
689 rtx op[SIMD_MAX_BUILTIN_ARGS];
690 enum machine_mode tmode = insn_data[icode].operand[0].mode;
691 enum machine_mode mode[SIMD_MAX_BUILTIN_ARGS];
692 int argc = 0;
693
694 if (have_retval
695 && (!target
696 || GET_MODE (target) != tmode
697 || !(*insn_data[icode].operand[0].predicate) (target, tmode)))
698 target = gen_reg_rtx (tmode);
699
700 va_start (ap, exp);
701
702 for (;;)
703 {
704 builtin_simd_arg thisarg = (builtin_simd_arg) va_arg (ap, int);
705
706 if (thisarg == SIMD_ARG_STOP)
707 break;
708 else
709 {
710 arg[argc] = CALL_EXPR_ARG (exp, argc);
711 op[argc] = expand_normal (arg[argc]);
712 mode[argc] = insn_data[icode].operand[argc + have_retval].mode;
713
714 switch (thisarg)
715 {
716 case SIMD_ARG_COPY_TO_REG:
2888c331
YZ
717 if (POINTER_TYPE_P (TREE_TYPE (arg[argc])))
718 op[argc] = convert_memory_address (Pmode, op[argc]);
43e9d192
IB
719 /*gcc_assert (GET_MODE (op[argc]) == mode[argc]); */
720 if (!(*insn_data[icode].operand[argc + have_retval].predicate)
721 (op[argc], mode[argc]))
722 op[argc] = copy_to_mode_reg (mode[argc], op[argc]);
723 break;
724
725 case SIMD_ARG_CONSTANT:
726 if (!(*insn_data[icode].operand[argc + have_retval].predicate)
727 (op[argc], mode[argc]))
728 error_at (EXPR_LOCATION (exp), "incompatible type for argument %d, "
729 "expected %<const int%>", argc + 1);
730 break;
731
732 case SIMD_ARG_STOP:
733 gcc_unreachable ();
734 }
735
736 argc++;
737 }
738 }
739
740 va_end (ap);
741
742 if (have_retval)
743 switch (argc)
744 {
745 case 1:
746 pat = GEN_FCN (icode) (target, op[0]);
747 break;
748
749 case 2:
750 pat = GEN_FCN (icode) (target, op[0], op[1]);
751 break;
752
753 case 3:
754 pat = GEN_FCN (icode) (target, op[0], op[1], op[2]);
755 break;
756
757 case 4:
758 pat = GEN_FCN (icode) (target, op[0], op[1], op[2], op[3]);
759 break;
760
761 case 5:
762 pat = GEN_FCN (icode) (target, op[0], op[1], op[2], op[3], op[4]);
763 break;
764
765 default:
766 gcc_unreachable ();
767 }
768 else
769 switch (argc)
770 {
771 case 1:
772 pat = GEN_FCN (icode) (op[0]);
773 break;
774
775 case 2:
776 pat = GEN_FCN (icode) (op[0], op[1]);
777 break;
778
779 case 3:
780 pat = GEN_FCN (icode) (op[0], op[1], op[2]);
781 break;
782
783 case 4:
784 pat = GEN_FCN (icode) (op[0], op[1], op[2], op[3]);
785 break;
786
787 case 5:
788 pat = GEN_FCN (icode) (op[0], op[1], op[2], op[3], op[4]);
789 break;
790
791 default:
792 gcc_unreachable ();
793 }
794
795 if (!pat)
796 return 0;
797
798 emit_insn (pat);
799
800 return target;
801}
802
803/* Expand an AArch64 AdvSIMD builtin(intrinsic). */
804rtx
805aarch64_simd_expand_builtin (int fcode, tree exp, rtx target)
806{
342be7f7
JG
807 aarch64_simd_builtin_datum *d =
808 &aarch64_simd_builtin_data[fcode - (AARCH64_SIMD_BUILTIN_BASE + 1)];
342be7f7 809 enum insn_code icode = d->code;
b5828b4b
JG
810 builtin_simd_arg args[SIMD_MAX_BUILTIN_ARGS];
811 int num_args = insn_data[d->code].n_operands;
812 int is_void = 0;
813 int k;
43e9d192 814
b5828b4b 815 is_void = !!(d->qualifiers[0] & qualifier_void);
43e9d192 816
b5828b4b
JG
817 num_args += is_void;
818
819 for (k = 1; k < num_args; k++)
820 {
821 /* We have four arrays of data, each indexed in a different fashion.
822 qualifiers - element 0 always describes the function return type.
823 operands - element 0 is either the operand for return value (if
824 the function has a non-void return type) or the operand for the
825 first argument.
826 expr_args - element 0 always holds the first argument.
827 args - element 0 is always used for the return type. */
828 int qualifiers_k = k;
829 int operands_k = k - is_void;
830 int expr_args_k = k - 1;
831
832 if (d->qualifiers[qualifiers_k] & qualifier_immediate)
833 args[k] = SIMD_ARG_CONSTANT;
834 else if (d->qualifiers[qualifiers_k] & qualifier_maybe_immediate)
835 {
836 rtx arg
837 = expand_normal (CALL_EXPR_ARG (exp,
838 (expr_args_k)));
839 /* Handle constants only if the predicate allows it. */
840 bool op_const_int_p =
841 (CONST_INT_P (arg)
842 && (*insn_data[icode].operand[operands_k].predicate)
843 (arg, insn_data[icode].operand[operands_k].mode));
844 args[k] = op_const_int_p ? SIMD_ARG_CONSTANT : SIMD_ARG_COPY_TO_REG;
845 }
846 else
847 args[k] = SIMD_ARG_COPY_TO_REG;
43e9d192 848
43e9d192 849 }
b5828b4b
JG
850 args[k] = SIMD_ARG_STOP;
851
852 /* The interface to aarch64_simd_expand_args expects a 0 if
853 the function is void, and a 1 if it is not. */
854 return aarch64_simd_expand_args
855 (target, icode, !is_void, exp,
856 args[1],
857 args[2],
858 args[3],
859 args[4],
860 SIMD_ARG_STOP);
43e9d192 861}
342be7f7
JG
862
863/* Expand an expression EXP that calls a built-in function,
864 with result going to TARGET if that's convenient. */
865rtx
866aarch64_expand_builtin (tree exp,
867 rtx target,
868 rtx subtarget ATTRIBUTE_UNUSED,
869 enum machine_mode mode ATTRIBUTE_UNUSED,
870 int ignore ATTRIBUTE_UNUSED)
871{
872 tree fndecl = TREE_OPERAND (CALL_EXPR_FN (exp), 0);
873 int fcode = DECL_FUNCTION_CODE (fndecl);
874
875 if (fcode >= AARCH64_SIMD_BUILTIN_BASE)
876 return aarch64_simd_expand_builtin (fcode, exp, target);
877
878 return NULL_RTX;
879}
42fc9a7f
JG
880
881tree
882aarch64_builtin_vectorized_function (tree fndecl, tree type_out, tree type_in)
883{
884 enum machine_mode in_mode, out_mode;
885 int in_n, out_n;
886
887 if (TREE_CODE (type_out) != VECTOR_TYPE
888 || TREE_CODE (type_in) != VECTOR_TYPE)
889 return NULL_TREE;
890
891 out_mode = TYPE_MODE (TREE_TYPE (type_out));
892 out_n = TYPE_VECTOR_SUBPARTS (type_out);
893 in_mode = TYPE_MODE (TREE_TYPE (type_in));
894 in_n = TYPE_VECTOR_SUBPARTS (type_in);
895
896#undef AARCH64_CHECK_BUILTIN_MODE
897#define AARCH64_CHECK_BUILTIN_MODE(C, N) 1
898#define AARCH64_FIND_FRINT_VARIANT(N) \
899 (AARCH64_CHECK_BUILTIN_MODE (2, D) \
900 ? aarch64_builtin_decls[AARCH64_SIMD_BUILTIN_##N##v2df] \
901 : (AARCH64_CHECK_BUILTIN_MODE (4, S) \
902 ? aarch64_builtin_decls[AARCH64_SIMD_BUILTIN_##N##v4sf] \
903 : (AARCH64_CHECK_BUILTIN_MODE (2, S) \
904 ? aarch64_builtin_decls[AARCH64_SIMD_BUILTIN_##N##v2sf] \
905 : NULL_TREE)))
906 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
907 {
908 enum built_in_function fn = DECL_FUNCTION_CODE (fndecl);
909 switch (fn)
910 {
911#undef AARCH64_CHECK_BUILTIN_MODE
912#define AARCH64_CHECK_BUILTIN_MODE(C, N) \
913 (out_mode == N##Fmode && out_n == C \
914 && in_mode == N##Fmode && in_n == C)
915 case BUILT_IN_FLOOR:
916 case BUILT_IN_FLOORF:
0659ce6f 917 return AARCH64_FIND_FRINT_VARIANT (floor);
42fc9a7f
JG
918 case BUILT_IN_CEIL:
919 case BUILT_IN_CEILF:
0659ce6f 920 return AARCH64_FIND_FRINT_VARIANT (ceil);
42fc9a7f
JG
921 case BUILT_IN_TRUNC:
922 case BUILT_IN_TRUNCF:
0659ce6f 923 return AARCH64_FIND_FRINT_VARIANT (btrunc);
42fc9a7f
JG
924 case BUILT_IN_ROUND:
925 case BUILT_IN_ROUNDF:
0659ce6f 926 return AARCH64_FIND_FRINT_VARIANT (round);
42fc9a7f
JG
927 case BUILT_IN_NEARBYINT:
928 case BUILT_IN_NEARBYINTF:
0659ce6f 929 return AARCH64_FIND_FRINT_VARIANT (nearbyint);
4dcd1054
JG
930 case BUILT_IN_SQRT:
931 case BUILT_IN_SQRTF:
932 return AARCH64_FIND_FRINT_VARIANT (sqrt);
42fc9a7f 933#undef AARCH64_CHECK_BUILTIN_MODE
b5574232
VP
934#define AARCH64_CHECK_BUILTIN_MODE(C, N) \
935 (out_mode == SImode && out_n == C \
936 && in_mode == N##Imode && in_n == C)
937 case BUILT_IN_CLZ:
938 {
939 if (AARCH64_CHECK_BUILTIN_MODE (4, S))
940 return aarch64_builtin_decls[AARCH64_SIMD_BUILTIN_clzv4si];
941 return NULL_TREE;
942 }
943#undef AARCH64_CHECK_BUILTIN_MODE
42fc9a7f
JG
944#define AARCH64_CHECK_BUILTIN_MODE(C, N) \
945 (out_mode == N##Imode && out_n == C \
946 && in_mode == N##Fmode && in_n == C)
947 case BUILT_IN_LFLOOR:
0386b123 948 case BUILT_IN_IFLOORF:
ce966824
JG
949 {
950 tree new_tree = NULL_TREE;
951 if (AARCH64_CHECK_BUILTIN_MODE (2, D))
952 new_tree =
953 aarch64_builtin_decls[AARCH64_SIMD_BUILTIN_lfloorv2dfv2di];
954 else if (AARCH64_CHECK_BUILTIN_MODE (4, S))
955 new_tree =
956 aarch64_builtin_decls[AARCH64_SIMD_BUILTIN_lfloorv4sfv4si];
957 else if (AARCH64_CHECK_BUILTIN_MODE (2, S))
958 new_tree =
959 aarch64_builtin_decls[AARCH64_SIMD_BUILTIN_lfloorv2sfv2si];
960 return new_tree;
961 }
42fc9a7f 962 case BUILT_IN_LCEIL:
0386b123 963 case BUILT_IN_ICEILF:
ce966824
JG
964 {
965 tree new_tree = NULL_TREE;
966 if (AARCH64_CHECK_BUILTIN_MODE (2, D))
967 new_tree =
968 aarch64_builtin_decls[AARCH64_SIMD_BUILTIN_lceilv2dfv2di];
969 else if (AARCH64_CHECK_BUILTIN_MODE (4, S))
970 new_tree =
971 aarch64_builtin_decls[AARCH64_SIMD_BUILTIN_lceilv4sfv4si];
972 else if (AARCH64_CHECK_BUILTIN_MODE (2, S))
973 new_tree =
974 aarch64_builtin_decls[AARCH64_SIMD_BUILTIN_lceilv2sfv2si];
975 return new_tree;
976 }
0386b123
JG
977 case BUILT_IN_LROUND:
978 case BUILT_IN_IROUNDF:
979 {
980 tree new_tree = NULL_TREE;
981 if (AARCH64_CHECK_BUILTIN_MODE (2, D))
982 new_tree =
983 aarch64_builtin_decls[AARCH64_SIMD_BUILTIN_lroundv2dfv2di];
984 else if (AARCH64_CHECK_BUILTIN_MODE (4, S))
985 new_tree =
986 aarch64_builtin_decls[AARCH64_SIMD_BUILTIN_lroundv4sfv4si];
987 else if (AARCH64_CHECK_BUILTIN_MODE (2, S))
988 new_tree =
989 aarch64_builtin_decls[AARCH64_SIMD_BUILTIN_lroundv2sfv2si];
990 return new_tree;
991 }
992
42fc9a7f
JG
993 default:
994 return NULL_TREE;
995 }
996 }
997
998 return NULL_TREE;
999}
0ac198d3
JG
1000
1001#undef VAR1
1002#define VAR1(T, N, MAP, A) \
1003 case AARCH64_SIMD_BUILTIN_##N##A:
1004
9697e620
JG
1005tree
1006aarch64_fold_builtin (tree fndecl, int n_args ATTRIBUTE_UNUSED, tree *args,
1007 bool ignore ATTRIBUTE_UNUSED)
1008{
1009 int fcode = DECL_FUNCTION_CODE (fndecl);
1010 tree type = TREE_TYPE (TREE_TYPE (fndecl));
1011
1012 switch (fcode)
1013 {
d05d0709 1014 BUILTIN_VALLDI (UNOP, abs, 2)
9697e620
JG
1015 return fold_build1 (ABS_EXPR, type, args[0]);
1016 break;
bb60efd9
JG
1017 BUILTIN_VALLDI (BINOP, cmge, 0)
1018 return fold_build2 (GE_EXPR, type, args[0], args[1]);
1019 break;
1020 BUILTIN_VALLDI (BINOP, cmgt, 0)
1021 return fold_build2 (GT_EXPR, type, args[0], args[1]);
1022 break;
1023 BUILTIN_VALLDI (BINOP, cmeq, 0)
1024 return fold_build2 (EQ_EXPR, type, args[0], args[1]);
1025 break;
1026 BUILTIN_VSDQ_I_DI (BINOP, cmtst, 0)
1027 {
1028 tree and_node = fold_build2 (BIT_AND_EXPR, type, args[0], args[1]);
1029 tree vec_zero_node = build_zero_cst (type);
1030 return fold_build2 (NE_EXPR, type, and_node, vec_zero_node);
1031 break;
1032 }
1709ff9b
JG
1033 VAR1 (UNOP, floatv2si, 2, v2sf)
1034 VAR1 (UNOP, floatv4si, 2, v4sf)
1035 VAR1 (UNOP, floatv2di, 2, v2df)
1036 return fold_build1 (FLOAT_EXPR, type, args[0]);
9697e620
JG
1037 default:
1038 break;
1039 }
1040
1041 return NULL_TREE;
1042}
1043
0ac198d3
JG
1044bool
1045aarch64_gimple_fold_builtin (gimple_stmt_iterator *gsi)
1046{
1047 bool changed = false;
1048 gimple stmt = gsi_stmt (*gsi);
1049 tree call = gimple_call_fn (stmt);
1050 tree fndecl;
1051 gimple new_stmt = NULL;
1052 if (call)
1053 {
1054 fndecl = gimple_call_fndecl (stmt);
1055 if (fndecl)
1056 {
1057 int fcode = DECL_FUNCTION_CODE (fndecl);
1058 int nargs = gimple_call_num_args (stmt);
1059 tree *args = (nargs > 0
1060 ? gimple_call_arg_ptr (stmt, 0)
1061 : &error_mark_node);
1062
1063 switch (fcode)
1064 {
36054fab 1065 BUILTIN_VALL (UNOP, reduc_splus_, 10)
0ac198d3
JG
1066 new_stmt = gimple_build_assign_with_ops (
1067 REDUC_PLUS_EXPR,
1068 gimple_call_lhs (stmt),
1069 args[0],
1070 NULL_TREE);
1071 break;
1598945b
JG
1072 BUILTIN_VDQIF (UNOP, reduc_smax_, 10)
1073 new_stmt = gimple_build_assign_with_ops (
1074 REDUC_MAX_EXPR,
1075 gimple_call_lhs (stmt),
1076 args[0],
1077 NULL_TREE);
1078 break;
1079 BUILTIN_VDQIF (UNOP, reduc_smin_, 10)
1080 new_stmt = gimple_build_assign_with_ops (
1081 REDUC_MIN_EXPR,
1082 gimple_call_lhs (stmt),
1083 args[0],
1084 NULL_TREE);
1085 break;
1086
0ac198d3
JG
1087 default:
1088 break;
1089 }
1090 }
1091 }
1092
1093 if (new_stmt)
1094 {
1095 gsi_replace (gsi, new_stmt, true);
1096 changed = true;
1097 }
1098
1099 return changed;
1100}
1101
42fc9a7f
JG
1102#undef AARCH64_CHECK_BUILTIN_MODE
1103#undef AARCH64_FIND_FRINT_VARIANT
0ddec79f
JG
1104#undef BUILTIN_DX
1105#undef BUILTIN_SDQ_I
1106#undef BUILTIN_SD_HSI
1107#undef BUILTIN_V2F
1108#undef BUILTIN_VALL
1109#undef BUILTIN_VB
1110#undef BUILTIN_VD
1111#undef BUILTIN_VDC
1112#undef BUILTIN_VDIC
1113#undef BUILTIN_VDN
1114#undef BUILTIN_VDQ
1115#undef BUILTIN_VDQF
1116#undef BUILTIN_VDQH
1117#undef BUILTIN_VDQHS
1118#undef BUILTIN_VDQIF
1119#undef BUILTIN_VDQM
1120#undef BUILTIN_VDQV
1121#undef BUILTIN_VDQ_BHSI
1122#undef BUILTIN_VDQ_I
1123#undef BUILTIN_VDW
1124#undef BUILTIN_VD_BHSI
1125#undef BUILTIN_VD_HSI
1126#undef BUILTIN_VD_RE
1127#undef BUILTIN_VQ
1128#undef BUILTIN_VQN
1129#undef BUILTIN_VQW
1130#undef BUILTIN_VQ_HSI
1131#undef BUILTIN_VQ_S
1132#undef BUILTIN_VSDQ_HSI
1133#undef BUILTIN_VSDQ_I
1134#undef BUILTIN_VSDQ_I_BHSI
1135#undef BUILTIN_VSDQ_I_DI
1136#undef BUILTIN_VSD_HSI
1137#undef BUILTIN_VSQN_HSDI
1138#undef BUILTIN_VSTRUCT
1139#undef CF0
1140#undef CF1
1141#undef CF2
1142#undef CF3
1143#undef CF4
1144#undef CF10
1145#undef VAR1
1146#undef VAR2
1147#undef VAR3
1148#undef VAR4
1149#undef VAR5
1150#undef VAR6
1151#undef VAR7
1152#undef VAR8
1153#undef VAR9
1154#undef VAR10
1155#undef VAR11
1156