]> git.ipfire.org Git - thirdparty/gcc.git/blame - gcc/config/aarch64/aarch64-builtins.c
[AArch64/AArch64-4.7] Fix warning - aarch64_simd_make_constant has no prototype.
[thirdparty/gcc.git] / gcc / config / aarch64 / aarch64-builtins.c
CommitLineData
43e9d192 1/* Builtins' description for AArch64 SIMD architecture.
d1e082c2 2 Copyright (C) 2011-2013 Free Software Foundation, Inc.
43e9d192
IB
3 Contributed by ARM Ltd.
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify it
8 under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
10 any later version.
11
12 GCC is distributed in the hope that it will be useful, but
13 WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
15 General Public License for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
20
21#include "config.h"
22#include "system.h"
23#include "coretypes.h"
24#include "tm.h"
25#include "rtl.h"
26#include "tree.h"
27#include "expr.h"
28#include "tm_p.h"
29#include "recog.h"
30#include "langhooks.h"
31#include "diagnostic-core.h"
32#include "optabs.h"
33
342be7f7 34enum aarch64_simd_builtin_type_mode
43e9d192 35{
342be7f7
JG
36 T_V8QI,
37 T_V4HI,
38 T_V2SI,
39 T_V2SF,
40 T_DI,
41 T_DF,
42 T_V16QI,
43 T_V8HI,
44 T_V4SI,
45 T_V4SF,
46 T_V2DI,
47 T_V2DF,
48 T_TI,
49 T_EI,
50 T_OI,
51 T_XI,
52 T_SI,
53 T_HI,
54 T_QI,
55 T_MAX
43e9d192
IB
56};
57
58#define v8qi_UP T_V8QI
59#define v4hi_UP T_V4HI
60#define v2si_UP T_V2SI
61#define v2sf_UP T_V2SF
62#define di_UP T_DI
63#define df_UP T_DF
64#define v16qi_UP T_V16QI
65#define v8hi_UP T_V8HI
66#define v4si_UP T_V4SI
67#define v4sf_UP T_V4SF
68#define v2di_UP T_V2DI
69#define v2df_UP T_V2DF
70#define ti_UP T_TI
71#define ei_UP T_EI
72#define oi_UP T_OI
73#define xi_UP T_XI
74#define si_UP T_SI
75#define hi_UP T_HI
76#define qi_UP T_QI
77
78#define UP(X) X##_UP
79
43e9d192
IB
80typedef enum
81{
82 AARCH64_SIMD_BINOP,
83 AARCH64_SIMD_TERNOP,
84 AARCH64_SIMD_QUADOP,
85 AARCH64_SIMD_UNOP,
86 AARCH64_SIMD_GETLANE,
87 AARCH64_SIMD_SETLANE,
88 AARCH64_SIMD_CREATE,
89 AARCH64_SIMD_DUP,
90 AARCH64_SIMD_DUPLANE,
91 AARCH64_SIMD_COMBINE,
92 AARCH64_SIMD_SPLIT,
93 AARCH64_SIMD_LANEMUL,
94 AARCH64_SIMD_LANEMULL,
95 AARCH64_SIMD_LANEMULH,
96 AARCH64_SIMD_LANEMAC,
97 AARCH64_SIMD_SCALARMUL,
98 AARCH64_SIMD_SCALARMULL,
99 AARCH64_SIMD_SCALARMULH,
100 AARCH64_SIMD_SCALARMAC,
101 AARCH64_SIMD_CONVERT,
102 AARCH64_SIMD_FIXCONV,
103 AARCH64_SIMD_SELECT,
104 AARCH64_SIMD_RESULTPAIR,
105 AARCH64_SIMD_REINTERP,
106 AARCH64_SIMD_VTBL,
107 AARCH64_SIMD_VTBX,
108 AARCH64_SIMD_LOAD1,
109 AARCH64_SIMD_LOAD1LANE,
110 AARCH64_SIMD_STORE1,
111 AARCH64_SIMD_STORE1LANE,
112 AARCH64_SIMD_LOADSTRUCT,
113 AARCH64_SIMD_LOADSTRUCTLANE,
114 AARCH64_SIMD_STORESTRUCT,
115 AARCH64_SIMD_STORESTRUCTLANE,
116 AARCH64_SIMD_LOGICBINOP,
117 AARCH64_SIMD_SHIFTINSERT,
118 AARCH64_SIMD_SHIFTIMM,
119 AARCH64_SIMD_SHIFTACC
120} aarch64_simd_itype;
121
122typedef struct
123{
124 const char *name;
125 const aarch64_simd_itype itype;
342be7f7
JG
126 enum aarch64_simd_builtin_type_mode mode;
127 const enum insn_code code;
128 unsigned int fcode;
43e9d192
IB
129} aarch64_simd_builtin_datum;
130
131#define CF(N, X) CODE_FOR_aarch64_##N##X
132
133#define VAR1(T, N, A) \
342be7f7 134 {#N, AARCH64_SIMD_##T, UP (A), CF (N, A), 0},
43e9d192 135#define VAR2(T, N, A, B) \
342be7f7
JG
136 VAR1 (T, N, A) \
137 VAR1 (T, N, B)
43e9d192 138#define VAR3(T, N, A, B, C) \
342be7f7
JG
139 VAR2 (T, N, A, B) \
140 VAR1 (T, N, C)
43e9d192 141#define VAR4(T, N, A, B, C, D) \
342be7f7
JG
142 VAR3 (T, N, A, B, C) \
143 VAR1 (T, N, D)
43e9d192 144#define VAR5(T, N, A, B, C, D, E) \
342be7f7
JG
145 VAR4 (T, N, A, B, C, D) \
146 VAR1 (T, N, E)
43e9d192 147#define VAR6(T, N, A, B, C, D, E, F) \
342be7f7
JG
148 VAR5 (T, N, A, B, C, D, E) \
149 VAR1 (T, N, F)
43e9d192 150#define VAR7(T, N, A, B, C, D, E, F, G) \
342be7f7
JG
151 VAR6 (T, N, A, B, C, D, E, F) \
152 VAR1 (T, N, G)
43e9d192 153#define VAR8(T, N, A, B, C, D, E, F, G, H) \
342be7f7
JG
154 VAR7 (T, N, A, B, C, D, E, F, G) \
155 VAR1 (T, N, H)
43e9d192 156#define VAR9(T, N, A, B, C, D, E, F, G, H, I) \
342be7f7
JG
157 VAR8 (T, N, A, B, C, D, E, F, G, H) \
158 VAR1 (T, N, I)
43e9d192 159#define VAR10(T, N, A, B, C, D, E, F, G, H, I, J) \
342be7f7
JG
160 VAR9 (T, N, A, B, C, D, E, F, G, H, I) \
161 VAR1 (T, N, J)
43e9d192 162#define VAR11(T, N, A, B, C, D, E, F, G, H, I, J, K) \
342be7f7
JG
163 VAR10 (T, N, A, B, C, D, E, F, G, H, I, J) \
164 VAR1 (T, N, K)
43e9d192 165#define VAR12(T, N, A, B, C, D, E, F, G, H, I, J, K, L) \
342be7f7
JG
166 VAR11 (T, N, A, B, C, D, E, F, G, H, I, J, K) \
167 VAR1 (T, N, L)
168
169/* BUILTIN_<ITERATOR> macros should expand to cover the same range of
170 modes as is given for each define_mode_iterator in
171 config/aarch64/iterators.md. */
172
173#define BUILTIN_DX(T, N) \
174 VAR2 (T, N, di, df)
175#define BUILTIN_SDQ_I(T, N) \
176 VAR4 (T, N, qi, hi, si, di)
177#define BUILTIN_SD_HSI(T, N) \
178 VAR2 (T, N, hi, si)
179#define BUILTIN_V2F(T, N) \
180 VAR2 (T, N, v2sf, v2df)
181#define BUILTIN_VALL(T, N) \
182 VAR10 (T, N, v8qi, v16qi, v4hi, v8hi, v2si, v4si, v2di, v2sf, v4sf, v2df)
183#define BUILTIN_VB(T, N) \
184 VAR2 (T, N, v8qi, v16qi)
185#define BUILTIN_VD(T, N) \
186 VAR4 (T, N, v8qi, v4hi, v2si, v2sf)
187#define BUILTIN_VDC(T, N) \
188 VAR6 (T, N, v8qi, v4hi, v2si, v2sf, di, df)
189#define BUILTIN_VDIC(T, N) \
190 VAR3 (T, N, v8qi, v4hi, v2si)
191#define BUILTIN_VDN(T, N) \
192 VAR3 (T, N, v4hi, v2si, di)
193#define BUILTIN_VDQ(T, N) \
194 VAR7 (T, N, v8qi, v16qi, v4hi, v8hi, v2si, v4si, v2di)
195#define BUILTIN_VDQF(T, N) \
196 VAR3 (T, N, v2sf, v4sf, v2df)
197#define BUILTIN_VDQHS(T, N) \
198 VAR4 (T, N, v4hi, v8hi, v2si, v4si)
199#define BUILTIN_VDQIF(T, N) \
200 VAR9 (T, N, v8qi, v16qi, v4hi, v8hi, v2si, v4si, v2sf, v4sf, v2df)
201#define BUILTIN_VDQM(T, N) \
202 VAR6 (T, N, v8qi, v16qi, v4hi, v8hi, v2si, v4si)
203#define BUILTIN_VDQV(T, N) \
204 VAR5 (T, N, v8qi, v16qi, v4hi, v8hi, v4si)
205#define BUILTIN_VDQ_BHSI(T, N) \
206 VAR6 (T, N, v8qi, v16qi, v4hi, v8hi, v2si, v4si)
207#define BUILTIN_VDQ_I(T, N) \
208 VAR7 (T, N, v8qi, v16qi, v4hi, v8hi, v2si, v4si, v2di)
209#define BUILTIN_VDW(T, N) \
210 VAR3 (T, N, v8qi, v4hi, v2si)
211#define BUILTIN_VD_BHSI(T, N) \
212 VAR3 (T, N, v8qi, v4hi, v2si)
213#define BUILTIN_VD_HSI(T, N) \
214 VAR2 (T, N, v4hi, v2si)
215#define BUILTIN_VD_RE(T, N) \
216 VAR6 (T, N, v8qi, v4hi, v2si, v2sf, di, df)
217#define BUILTIN_VQ(T, N) \
218 VAR6 (T, N, v16qi, v8hi, v4si, v2di, v4sf, v2df)
219#define BUILTIN_VQN(T, N) \
220 VAR3 (T, N, v8hi, v4si, v2di)
221#define BUILTIN_VQW(T, N) \
222 VAR3 (T, N, v16qi, v8hi, v4si)
223#define BUILTIN_VQ_HSI(T, N) \
224 VAR2 (T, N, v8hi, v4si)
225#define BUILTIN_VQ_S(T, N) \
226 VAR6 (T, N, v8qi, v16qi, v4hi, v8hi, v2si, v4si)
227#define BUILTIN_VSDQ_HSI(T, N) \
228 VAR6 (T, N, v4hi, v8hi, v2si, v4si, hi, si)
229#define BUILTIN_VSDQ_I(T, N) \
230 VAR11 (T, N, v8qi, v16qi, v4hi, v8hi, v2si, v4si, v2di, qi, hi, si, di)
231#define BUILTIN_VSDQ_I_BHSI(T, N) \
232 VAR10 (T, N, v8qi, v16qi, v4hi, v8hi, v2si, v4si, v2di, qi, hi, si)
233#define BUILTIN_VSDQ_I_DI(T, N) \
234 VAR8 (T, N, v8qi, v16qi, v4hi, v8hi, v2si, v4si, v2di, di)
235#define BUILTIN_VSD_HSI(T, N) \
236 VAR4 (T, N, v4hi, v2si, hi, si)
237#define BUILTIN_VSQN_HSDI(T, N) \
238 VAR6 (T, N, v8hi, v4si, v2di, hi, si, di)
239#define BUILTIN_VSTRUCT(T, N) \
240 VAR3 (T, N, oi, ci, xi)
43e9d192
IB
241
242static aarch64_simd_builtin_datum aarch64_simd_builtin_data[] = {
342be7f7
JG
243#include "aarch64-simd-builtins.def"
244};
245
246#undef VAR1
247#define VAR1(T, N, A) \
248 AARCH64_SIMD_BUILTIN_##N##A,
249
250enum aarch64_builtins
251{
252 AARCH64_BUILTIN_MIN,
253 AARCH64_SIMD_BUILTIN_BASE,
254#include "aarch64-simd-builtins.def"
255 AARCH64_SIMD_BUILTIN_MAX = AARCH64_SIMD_BUILTIN_BASE
256 + ARRAY_SIZE (aarch64_simd_builtin_data),
257 AARCH64_BUILTIN_MAX
43e9d192
IB
258};
259
342be7f7
JG
260#undef BUILTIN_DX
261#undef BUILTIN_SDQ_I
262#undef BUILTIN_SD_HSI
263#undef BUILTIN_V2F
264#undef BUILTIN_VALL
265#undef BUILTIN_VB
266#undef BUILTIN_VD
267#undef BUILTIN_VDC
268#undef BUILTIN_VDIC
269#undef BUILTIN_VDN
270#undef BUILTIN_VDQ
271#undef BUILTIN_VDQF
272#undef BUILTIN_VDQHS
273#undef BUILTIN_VDQIF
274#undef BUILTIN_VDQM
275#undef BUILTIN_VDQV
276#undef BUILTIN_VDQ_BHSI
277#undef BUILTIN_VDQ_I
278#undef BUILTIN_VDW
279#undef BUILTIN_VD_BHSI
280#undef BUILTIN_VD_HSI
281#undef BUILTIN_VD_RE
282#undef BUILTIN_VQ
283#undef BUILTIN_VQN
284#undef BUILTIN_VQW
285#undef BUILTIN_VQ_HSI
286#undef BUILTIN_VQ_S
287#undef BUILTIN_VSDQ_HSI
288#undef BUILTIN_VSDQ_I
289#undef BUILTIN_VSDQ_I_BHSI
290#undef BUILTIN_VSDQ_I_DI
291#undef BUILTIN_VSD_HSI
292#undef BUILTIN_VSQN_HSDI
293#undef BUILTIN_VSTRUCT
43e9d192
IB
294#undef CF
295#undef VAR1
296#undef VAR2
297#undef VAR3
298#undef VAR4
299#undef VAR5
300#undef VAR6
301#undef VAR7
302#undef VAR8
303#undef VAR9
304#undef VAR10
305#undef VAR11
306
119103ca
JG
307static GTY(()) tree aarch64_builtin_decls[AARCH64_BUILTIN_MAX];
308
43e9d192
IB
309#define NUM_DREG_TYPES 6
310#define NUM_QREG_TYPES 6
311
312void
342be7f7 313aarch64_init_simd_builtins (void)
43e9d192 314{
342be7f7 315 unsigned int i, fcode = AARCH64_SIMD_BUILTIN_BASE + 1;
43e9d192
IB
316
317 /* Scalar type nodes. */
318 tree aarch64_simd_intQI_type_node;
319 tree aarch64_simd_intHI_type_node;
320 tree aarch64_simd_polyQI_type_node;
321 tree aarch64_simd_polyHI_type_node;
322 tree aarch64_simd_intSI_type_node;
323 tree aarch64_simd_intDI_type_node;
324 tree aarch64_simd_float_type_node;
325 tree aarch64_simd_double_type_node;
326
327 /* Pointer to scalar type nodes. */
328 tree intQI_pointer_node;
329 tree intHI_pointer_node;
330 tree intSI_pointer_node;
331 tree intDI_pointer_node;
332 tree float_pointer_node;
333 tree double_pointer_node;
334
335 /* Const scalar type nodes. */
336 tree const_intQI_node;
337 tree const_intHI_node;
338 tree const_intSI_node;
339 tree const_intDI_node;
340 tree const_float_node;
341 tree const_double_node;
342
343 /* Pointer to const scalar type nodes. */
344 tree const_intQI_pointer_node;
345 tree const_intHI_pointer_node;
346 tree const_intSI_pointer_node;
347 tree const_intDI_pointer_node;
348 tree const_float_pointer_node;
349 tree const_double_pointer_node;
350
351 /* Vector type nodes. */
352 tree V8QI_type_node;
353 tree V4HI_type_node;
354 tree V2SI_type_node;
355 tree V2SF_type_node;
356 tree V16QI_type_node;
357 tree V8HI_type_node;
358 tree V4SI_type_node;
359 tree V4SF_type_node;
360 tree V2DI_type_node;
361 tree V2DF_type_node;
362
363 /* Scalar unsigned type nodes. */
364 tree intUQI_type_node;
365 tree intUHI_type_node;
366 tree intUSI_type_node;
367 tree intUDI_type_node;
368
369 /* Opaque integer types for structures of vectors. */
370 tree intEI_type_node;
371 tree intOI_type_node;
372 tree intCI_type_node;
373 tree intXI_type_node;
374
375 /* Pointer to vector type nodes. */
376 tree V8QI_pointer_node;
377 tree V4HI_pointer_node;
378 tree V2SI_pointer_node;
379 tree V2SF_pointer_node;
380 tree V16QI_pointer_node;
381 tree V8HI_pointer_node;
382 tree V4SI_pointer_node;
383 tree V4SF_pointer_node;
384 tree V2DI_pointer_node;
385 tree V2DF_pointer_node;
386
387 /* Operations which return results as pairs. */
388 tree void_ftype_pv8qi_v8qi_v8qi;
389 tree void_ftype_pv4hi_v4hi_v4hi;
390 tree void_ftype_pv2si_v2si_v2si;
391 tree void_ftype_pv2sf_v2sf_v2sf;
392 tree void_ftype_pdi_di_di;
393 tree void_ftype_pv16qi_v16qi_v16qi;
394 tree void_ftype_pv8hi_v8hi_v8hi;
395 tree void_ftype_pv4si_v4si_v4si;
396 tree void_ftype_pv4sf_v4sf_v4sf;
397 tree void_ftype_pv2di_v2di_v2di;
398 tree void_ftype_pv2df_v2df_v2df;
399
400 tree reinterp_ftype_dreg[NUM_DREG_TYPES][NUM_DREG_TYPES];
401 tree reinterp_ftype_qreg[NUM_QREG_TYPES][NUM_QREG_TYPES];
402 tree dreg_types[NUM_DREG_TYPES], qreg_types[NUM_QREG_TYPES];
403
404 /* Create distinguished type nodes for AARCH64_SIMD vector element types,
405 and pointers to values of such types, so we can detect them later. */
406 aarch64_simd_intQI_type_node =
407 make_signed_type (GET_MODE_PRECISION (QImode));
408 aarch64_simd_intHI_type_node =
409 make_signed_type (GET_MODE_PRECISION (HImode));
410 aarch64_simd_polyQI_type_node =
411 make_signed_type (GET_MODE_PRECISION (QImode));
412 aarch64_simd_polyHI_type_node =
413 make_signed_type (GET_MODE_PRECISION (HImode));
414 aarch64_simd_intSI_type_node =
415 make_signed_type (GET_MODE_PRECISION (SImode));
416 aarch64_simd_intDI_type_node =
417 make_signed_type (GET_MODE_PRECISION (DImode));
418 aarch64_simd_float_type_node = make_node (REAL_TYPE);
419 aarch64_simd_double_type_node = make_node (REAL_TYPE);
420 TYPE_PRECISION (aarch64_simd_float_type_node) = FLOAT_TYPE_SIZE;
421 TYPE_PRECISION (aarch64_simd_double_type_node) = DOUBLE_TYPE_SIZE;
422 layout_type (aarch64_simd_float_type_node);
423 layout_type (aarch64_simd_double_type_node);
424
425 /* Define typedefs which exactly correspond to the modes we are basing vector
426 types on. If you change these names you'll need to change
427 the table used by aarch64_mangle_type too. */
428 (*lang_hooks.types.register_builtin_type) (aarch64_simd_intQI_type_node,
429 "__builtin_aarch64_simd_qi");
430 (*lang_hooks.types.register_builtin_type) (aarch64_simd_intHI_type_node,
431 "__builtin_aarch64_simd_hi");
432 (*lang_hooks.types.register_builtin_type) (aarch64_simd_intSI_type_node,
433 "__builtin_aarch64_simd_si");
434 (*lang_hooks.types.register_builtin_type) (aarch64_simd_float_type_node,
435 "__builtin_aarch64_simd_sf");
436 (*lang_hooks.types.register_builtin_type) (aarch64_simd_intDI_type_node,
437 "__builtin_aarch64_simd_di");
438 (*lang_hooks.types.register_builtin_type) (aarch64_simd_double_type_node,
439 "__builtin_aarch64_simd_df");
440 (*lang_hooks.types.register_builtin_type) (aarch64_simd_polyQI_type_node,
441 "__builtin_aarch64_simd_poly8");
442 (*lang_hooks.types.register_builtin_type) (aarch64_simd_polyHI_type_node,
443 "__builtin_aarch64_simd_poly16");
444
445 intQI_pointer_node = build_pointer_type (aarch64_simd_intQI_type_node);
446 intHI_pointer_node = build_pointer_type (aarch64_simd_intHI_type_node);
447 intSI_pointer_node = build_pointer_type (aarch64_simd_intSI_type_node);
448 intDI_pointer_node = build_pointer_type (aarch64_simd_intDI_type_node);
449 float_pointer_node = build_pointer_type (aarch64_simd_float_type_node);
450 double_pointer_node = build_pointer_type (aarch64_simd_double_type_node);
451
452 /* Next create constant-qualified versions of the above types. */
453 const_intQI_node = build_qualified_type (aarch64_simd_intQI_type_node,
454 TYPE_QUAL_CONST);
455 const_intHI_node = build_qualified_type (aarch64_simd_intHI_type_node,
456 TYPE_QUAL_CONST);
457 const_intSI_node = build_qualified_type (aarch64_simd_intSI_type_node,
458 TYPE_QUAL_CONST);
459 const_intDI_node = build_qualified_type (aarch64_simd_intDI_type_node,
460 TYPE_QUAL_CONST);
461 const_float_node = build_qualified_type (aarch64_simd_float_type_node,
462 TYPE_QUAL_CONST);
463 const_double_node = build_qualified_type (aarch64_simd_double_type_node,
464 TYPE_QUAL_CONST);
465
466 const_intQI_pointer_node = build_pointer_type (const_intQI_node);
467 const_intHI_pointer_node = build_pointer_type (const_intHI_node);
468 const_intSI_pointer_node = build_pointer_type (const_intSI_node);
469 const_intDI_pointer_node = build_pointer_type (const_intDI_node);
470 const_float_pointer_node = build_pointer_type (const_float_node);
471 const_double_pointer_node = build_pointer_type (const_double_node);
472
473 /* Now create vector types based on our AARCH64 SIMD element types. */
474 /* 64-bit vectors. */
475 V8QI_type_node =
476 build_vector_type_for_mode (aarch64_simd_intQI_type_node, V8QImode);
477 V4HI_type_node =
478 build_vector_type_for_mode (aarch64_simd_intHI_type_node, V4HImode);
479 V2SI_type_node =
480 build_vector_type_for_mode (aarch64_simd_intSI_type_node, V2SImode);
481 V2SF_type_node =
482 build_vector_type_for_mode (aarch64_simd_float_type_node, V2SFmode);
483 /* 128-bit vectors. */
484 V16QI_type_node =
485 build_vector_type_for_mode (aarch64_simd_intQI_type_node, V16QImode);
486 V8HI_type_node =
487 build_vector_type_for_mode (aarch64_simd_intHI_type_node, V8HImode);
488 V4SI_type_node =
489 build_vector_type_for_mode (aarch64_simd_intSI_type_node, V4SImode);
490 V4SF_type_node =
491 build_vector_type_for_mode (aarch64_simd_float_type_node, V4SFmode);
492 V2DI_type_node =
493 build_vector_type_for_mode (aarch64_simd_intDI_type_node, V2DImode);
494 V2DF_type_node =
495 build_vector_type_for_mode (aarch64_simd_double_type_node, V2DFmode);
496
497 /* Unsigned integer types for various mode sizes. */
498 intUQI_type_node = make_unsigned_type (GET_MODE_PRECISION (QImode));
499 intUHI_type_node = make_unsigned_type (GET_MODE_PRECISION (HImode));
500 intUSI_type_node = make_unsigned_type (GET_MODE_PRECISION (SImode));
501 intUDI_type_node = make_unsigned_type (GET_MODE_PRECISION (DImode));
502
503 (*lang_hooks.types.register_builtin_type) (intUQI_type_node,
504 "__builtin_aarch64_simd_uqi");
505 (*lang_hooks.types.register_builtin_type) (intUHI_type_node,
506 "__builtin_aarch64_simd_uhi");
507 (*lang_hooks.types.register_builtin_type) (intUSI_type_node,
508 "__builtin_aarch64_simd_usi");
509 (*lang_hooks.types.register_builtin_type) (intUDI_type_node,
510 "__builtin_aarch64_simd_udi");
511
512 /* Opaque integer types for structures of vectors. */
513 intEI_type_node = make_signed_type (GET_MODE_PRECISION (EImode));
514 intOI_type_node = make_signed_type (GET_MODE_PRECISION (OImode));
515 intCI_type_node = make_signed_type (GET_MODE_PRECISION (CImode));
516 intXI_type_node = make_signed_type (GET_MODE_PRECISION (XImode));
517
518 (*lang_hooks.types.register_builtin_type) (intTI_type_node,
519 "__builtin_aarch64_simd_ti");
520 (*lang_hooks.types.register_builtin_type) (intEI_type_node,
521 "__builtin_aarch64_simd_ei");
522 (*lang_hooks.types.register_builtin_type) (intOI_type_node,
523 "__builtin_aarch64_simd_oi");
524 (*lang_hooks.types.register_builtin_type) (intCI_type_node,
525 "__builtin_aarch64_simd_ci");
526 (*lang_hooks.types.register_builtin_type) (intXI_type_node,
527 "__builtin_aarch64_simd_xi");
528
529 /* Pointers to vector types. */
530 V8QI_pointer_node = build_pointer_type (V8QI_type_node);
531 V4HI_pointer_node = build_pointer_type (V4HI_type_node);
532 V2SI_pointer_node = build_pointer_type (V2SI_type_node);
533 V2SF_pointer_node = build_pointer_type (V2SF_type_node);
534 V16QI_pointer_node = build_pointer_type (V16QI_type_node);
535 V8HI_pointer_node = build_pointer_type (V8HI_type_node);
536 V4SI_pointer_node = build_pointer_type (V4SI_type_node);
537 V4SF_pointer_node = build_pointer_type (V4SF_type_node);
538 V2DI_pointer_node = build_pointer_type (V2DI_type_node);
539 V2DF_pointer_node = build_pointer_type (V2DF_type_node);
540
541 /* Operations which return results as pairs. */
542 void_ftype_pv8qi_v8qi_v8qi =
543 build_function_type_list (void_type_node, V8QI_pointer_node,
544 V8QI_type_node, V8QI_type_node, NULL);
545 void_ftype_pv4hi_v4hi_v4hi =
546 build_function_type_list (void_type_node, V4HI_pointer_node,
547 V4HI_type_node, V4HI_type_node, NULL);
548 void_ftype_pv2si_v2si_v2si =
549 build_function_type_list (void_type_node, V2SI_pointer_node,
550 V2SI_type_node, V2SI_type_node, NULL);
551 void_ftype_pv2sf_v2sf_v2sf =
552 build_function_type_list (void_type_node, V2SF_pointer_node,
553 V2SF_type_node, V2SF_type_node, NULL);
554 void_ftype_pdi_di_di =
555 build_function_type_list (void_type_node, intDI_pointer_node,
556 aarch64_simd_intDI_type_node,
557 aarch64_simd_intDI_type_node, NULL);
558 void_ftype_pv16qi_v16qi_v16qi =
559 build_function_type_list (void_type_node, V16QI_pointer_node,
560 V16QI_type_node, V16QI_type_node, NULL);
561 void_ftype_pv8hi_v8hi_v8hi =
562 build_function_type_list (void_type_node, V8HI_pointer_node,
563 V8HI_type_node, V8HI_type_node, NULL);
564 void_ftype_pv4si_v4si_v4si =
565 build_function_type_list (void_type_node, V4SI_pointer_node,
566 V4SI_type_node, V4SI_type_node, NULL);
567 void_ftype_pv4sf_v4sf_v4sf =
568 build_function_type_list (void_type_node, V4SF_pointer_node,
569 V4SF_type_node, V4SF_type_node, NULL);
570 void_ftype_pv2di_v2di_v2di =
571 build_function_type_list (void_type_node, V2DI_pointer_node,
572 V2DI_type_node, V2DI_type_node, NULL);
573 void_ftype_pv2df_v2df_v2df =
574 build_function_type_list (void_type_node, V2DF_pointer_node,
575 V2DF_type_node, V2DF_type_node, NULL);
576
577 dreg_types[0] = V8QI_type_node;
578 dreg_types[1] = V4HI_type_node;
579 dreg_types[2] = V2SI_type_node;
580 dreg_types[3] = V2SF_type_node;
581 dreg_types[4] = aarch64_simd_intDI_type_node;
582 dreg_types[5] = aarch64_simd_double_type_node;
583
584 qreg_types[0] = V16QI_type_node;
585 qreg_types[1] = V8HI_type_node;
586 qreg_types[2] = V4SI_type_node;
587 qreg_types[3] = V4SF_type_node;
588 qreg_types[4] = V2DI_type_node;
589 qreg_types[5] = V2DF_type_node;
590
591 /* If NUM_DREG_TYPES != NUM_QREG_TYPES, we will need separate nested loops
592 for qreg and dreg reinterp inits. */
593 for (i = 0; i < NUM_DREG_TYPES; i++)
594 {
595 int j;
596 for (j = 0; j < NUM_DREG_TYPES; j++)
597 {
598 reinterp_ftype_dreg[i][j]
599 = build_function_type_list (dreg_types[i], dreg_types[j], NULL);
600 reinterp_ftype_qreg[i][j]
601 = build_function_type_list (qreg_types[i], qreg_types[j], NULL);
602 }
603 }
604
342be7f7 605 for (i = 0; i < ARRAY_SIZE (aarch64_simd_builtin_data); i++, fcode++)
43e9d192
IB
606 {
607 aarch64_simd_builtin_datum *d = &aarch64_simd_builtin_data[i];
342be7f7
JG
608 const char *const modenames[] =
609 {
610 "v8qi", "v4hi", "v2si", "v2sf", "di", "df",
611 "v16qi", "v8hi", "v4si", "v4sf", "v2di", "v2df",
612 "ti", "ei", "oi", "xi", "si", "hi", "qi"
613 };
614 char namebuf[60];
615 tree ftype = NULL;
119103ca 616 tree fndecl = NULL;
342be7f7
JG
617 int is_load = 0;
618 int is_store = 0;
619
620 gcc_assert (ARRAY_SIZE (modenames) == T_MAX);
43e9d192 621
342be7f7 622 d->fcode = fcode;
43e9d192 623
342be7f7 624 switch (d->itype)
43e9d192 625 {
342be7f7
JG
626 case AARCH64_SIMD_LOAD1:
627 case AARCH64_SIMD_LOAD1LANE:
628 case AARCH64_SIMD_LOADSTRUCT:
629 case AARCH64_SIMD_LOADSTRUCTLANE:
630 is_load = 1;
631 /* Fall through. */
632 case AARCH64_SIMD_STORE1:
633 case AARCH64_SIMD_STORE1LANE:
634 case AARCH64_SIMD_STORESTRUCT:
635 case AARCH64_SIMD_STORESTRUCTLANE:
636 if (!is_load)
637 is_store = 1;
638 /* Fall through. */
639 case AARCH64_SIMD_UNOP:
640 case AARCH64_SIMD_BINOP:
641 case AARCH64_SIMD_TERNOP:
642 case AARCH64_SIMD_QUADOP:
643 case AARCH64_SIMD_COMBINE:
644 case AARCH64_SIMD_CONVERT:
645 case AARCH64_SIMD_CREATE:
646 case AARCH64_SIMD_DUP:
647 case AARCH64_SIMD_DUPLANE:
648 case AARCH64_SIMD_FIXCONV:
649 case AARCH64_SIMD_GETLANE:
650 case AARCH64_SIMD_LANEMAC:
651 case AARCH64_SIMD_LANEMUL:
652 case AARCH64_SIMD_LANEMULH:
653 case AARCH64_SIMD_LANEMULL:
654 case AARCH64_SIMD_LOGICBINOP:
655 case AARCH64_SIMD_SCALARMAC:
656 case AARCH64_SIMD_SCALARMUL:
657 case AARCH64_SIMD_SCALARMULH:
658 case AARCH64_SIMD_SCALARMULL:
659 case AARCH64_SIMD_SELECT:
660 case AARCH64_SIMD_SETLANE:
661 case AARCH64_SIMD_SHIFTACC:
662 case AARCH64_SIMD_SHIFTIMM:
663 case AARCH64_SIMD_SHIFTINSERT:
664 case AARCH64_SIMD_SPLIT:
665 case AARCH64_SIMD_VTBL:
666 case AARCH64_SIMD_VTBX:
667 {
668 int k;
669 tree return_type = void_type_node, args = void_list_node;
670 tree eltype;
671 /* Build a function type directly from the insn_data for this
672 builtin. The build_function_type () function takes care of
673 removing duplicates for us. */
674
675 for (k = insn_data[d->code].n_operands -1; k >= 0; k--)
43e9d192 676 {
342be7f7
JG
677 /* Skip an internal operand for vget_{low, high}. */
678 if (k == 2 && d->itype == AARCH64_SIMD_SPLIT)
679 continue;
43e9d192 680
342be7f7 681 if (is_load && k == 1)
43e9d192 682 {
342be7f7
JG
683 /* AdvSIMD load patterns always have the memory operand
684 (a DImode pointer) in the operand 1 position. We
685 want a const pointer to the element type in that
686 position. */
687 gcc_assert (insn_data[d->code].operand[k].mode == DImode);
43e9d192 688
342be7f7 689 switch (d->mode)
43e9d192 690 {
342be7f7
JG
691 case T_V8QI:
692 case T_V16QI:
693 eltype = const_intQI_pointer_node;
694 break;
695
696 case T_V4HI:
697 case T_V8HI:
698 eltype = const_intHI_pointer_node;
699 break;
700
701 case T_V2SI:
702 case T_V4SI:
703 eltype = const_intSI_pointer_node;
704 break;
705
706 case T_V2SF:
707 case T_V4SF:
708 eltype = const_float_pointer_node;
709 break;
710
711 case T_DI:
712 case T_V2DI:
713 eltype = const_intDI_pointer_node;
714 break;
715
716 case T_DF:
717 case T_V2DF:
718 eltype = const_double_pointer_node;
719 break;
720
721 default:
722 gcc_unreachable ();
43e9d192 723 }
342be7f7
JG
724 }
725 else if (is_store && k == 0)
726 {
727 /* Similarly, AdvSIMD store patterns use operand 0 as
728 the memory location to store to (a DImode pointer).
729 Use a pointer to the element type of the store in
730 that position. */
731 gcc_assert (insn_data[d->code].operand[k].mode == DImode);
732
733 switch (d->mode)
43e9d192 734 {
342be7f7
JG
735 case T_V8QI:
736 case T_V16QI:
737 eltype = intQI_pointer_node;
738 break;
739
740 case T_V4HI:
741 case T_V8HI:
742 eltype = intHI_pointer_node;
743 break;
744
745 case T_V2SI:
746 case T_V4SI:
747 eltype = intSI_pointer_node;
748 break;
749
750 case T_V2SF:
751 case T_V4SF:
752 eltype = float_pointer_node;
753 break;
754
755 case T_DI:
756 case T_V2DI:
757 eltype = intDI_pointer_node;
758 break;
759
760 case T_DF:
761 case T_V2DF:
762 eltype = double_pointer_node;
763 break;
764
765 default:
766 gcc_unreachable ();
43e9d192 767 }
342be7f7
JG
768 }
769 else
770 {
771 switch (insn_data[d->code].operand[k].mode)
43e9d192 772 {
342be7f7
JG
773 case VOIDmode:
774 eltype = void_type_node;
775 break;
776 /* Scalars. */
777 case QImode:
778 eltype = aarch64_simd_intQI_type_node;
779 break;
780 case HImode:
781 eltype = aarch64_simd_intHI_type_node;
782 break;
783 case SImode:
784 eltype = aarch64_simd_intSI_type_node;
785 break;
786 case SFmode:
787 eltype = aarch64_simd_float_type_node;
788 break;
789 case DFmode:
790 eltype = aarch64_simd_double_type_node;
791 break;
792 case DImode:
793 eltype = aarch64_simd_intDI_type_node;
794 break;
795 case TImode:
796 eltype = intTI_type_node;
797 break;
798 case EImode:
799 eltype = intEI_type_node;
800 break;
801 case OImode:
802 eltype = intOI_type_node;
803 break;
804 case CImode:
805 eltype = intCI_type_node;
806 break;
807 case XImode:
808 eltype = intXI_type_node;
809 break;
810 /* 64-bit vectors. */
811 case V8QImode:
812 eltype = V8QI_type_node;
813 break;
814 case V4HImode:
815 eltype = V4HI_type_node;
816 break;
817 case V2SImode:
818 eltype = V2SI_type_node;
819 break;
820 case V2SFmode:
821 eltype = V2SF_type_node;
822 break;
823 /* 128-bit vectors. */
824 case V16QImode:
825 eltype = V16QI_type_node;
826 break;
827 case V8HImode:
828 eltype = V8HI_type_node;
829 break;
830 case V4SImode:
831 eltype = V4SI_type_node;
832 break;
833 case V4SFmode:
834 eltype = V4SF_type_node;
835 break;
836 case V2DImode:
837 eltype = V2DI_type_node;
838 break;
839 case V2DFmode:
840 eltype = V2DF_type_node;
841 break;
842 default:
843 gcc_unreachable ();
43e9d192 844 }
43e9d192
IB
845 }
846
342be7f7
JG
847 if (k == 0 && !is_store)
848 return_type = eltype;
849 else
850 args = tree_cons (NULL_TREE, eltype, args);
43e9d192 851 }
342be7f7
JG
852 ftype = build_function_type (return_type, args);
853 }
854 break;
43e9d192 855
342be7f7
JG
856 case AARCH64_SIMD_RESULTPAIR:
857 {
858 switch (insn_data[d->code].operand[1].mode)
43e9d192 859 {
342be7f7
JG
860 case V8QImode:
861 ftype = void_ftype_pv8qi_v8qi_v8qi;
862 break;
863 case V4HImode:
864 ftype = void_ftype_pv4hi_v4hi_v4hi;
865 break;
866 case V2SImode:
867 ftype = void_ftype_pv2si_v2si_v2si;
868 break;
869 case V2SFmode:
870 ftype = void_ftype_pv2sf_v2sf_v2sf;
871 break;
872 case DImode:
873 ftype = void_ftype_pdi_di_di;
874 break;
875 case V16QImode:
876 ftype = void_ftype_pv16qi_v16qi_v16qi;
877 break;
878 case V8HImode:
879 ftype = void_ftype_pv8hi_v8hi_v8hi;
880 break;
881 case V4SImode:
882 ftype = void_ftype_pv4si_v4si_v4si;
883 break;
884 case V4SFmode:
885 ftype = void_ftype_pv4sf_v4sf_v4sf;
886 break;
887 case V2DImode:
888 ftype = void_ftype_pv2di_v2di_v2di;
889 break;
890 case V2DFmode:
891 ftype = void_ftype_pv2df_v2df_v2df;
892 break;
893 default:
894 gcc_unreachable ();
43e9d192 895 }
342be7f7
JG
896 }
897 break;
898
899 case AARCH64_SIMD_REINTERP:
900 {
901 /* We iterate over 6 doubleword types, then 6 quadword
902 types. */
903 int rhs_d = d->mode % NUM_DREG_TYPES;
904 int rhs_q = (d->mode - NUM_DREG_TYPES) % NUM_QREG_TYPES;
905 switch (insn_data[d->code].operand[0].mode)
43e9d192 906 {
342be7f7
JG
907 case V8QImode:
908 ftype = reinterp_ftype_dreg[0][rhs_d];
909 break;
910 case V4HImode:
911 ftype = reinterp_ftype_dreg[1][rhs_d];
912 break;
913 case V2SImode:
914 ftype = reinterp_ftype_dreg[2][rhs_d];
915 break;
916 case V2SFmode:
917 ftype = reinterp_ftype_dreg[3][rhs_d];
918 break;
919 case DImode:
920 ftype = reinterp_ftype_dreg[4][rhs_d];
921 break;
922 case DFmode:
923 ftype = reinterp_ftype_dreg[5][rhs_d];
924 break;
925 case V16QImode:
926 ftype = reinterp_ftype_qreg[0][rhs_q];
927 break;
928 case V8HImode:
929 ftype = reinterp_ftype_qreg[1][rhs_q];
930 break;
931 case V4SImode:
932 ftype = reinterp_ftype_qreg[2][rhs_q];
933 break;
934 case V4SFmode:
935 ftype = reinterp_ftype_qreg[3][rhs_q];
936 break;
937 case V2DImode:
938 ftype = reinterp_ftype_qreg[4][rhs_q];
939 break;
940 case V2DFmode:
941 ftype = reinterp_ftype_qreg[5][rhs_q];
942 break;
943 default:
944 gcc_unreachable ();
43e9d192 945 }
342be7f7
JG
946 }
947 break;
43e9d192 948
342be7f7
JG
949 default:
950 gcc_unreachable ();
951 }
952 gcc_assert (ftype != NULL);
43e9d192 953
342be7f7
JG
954 snprintf (namebuf, sizeof (namebuf), "__builtin_aarch64_%s%s",
955 d->name, modenames[d->mode]);
43e9d192 956
119103ca
JG
957 fndecl = add_builtin_function (namebuf, ftype, fcode, BUILT_IN_MD,
958 NULL, NULL_TREE);
959 aarch64_builtin_decls[fcode] = fndecl;
43e9d192
IB
960 }
961}
962
342be7f7
JG
963void
964aarch64_init_builtins (void)
43e9d192 965{
342be7f7
JG
966 if (TARGET_SIMD)
967 aarch64_init_simd_builtins ();
43e9d192
IB
968}
969
119103ca
JG
970tree
971aarch64_builtin_decl (unsigned code, bool initialize_p ATTRIBUTE_UNUSED)
972{
973 if (code >= AARCH64_BUILTIN_MAX)
974 return error_mark_node;
975
976 return aarch64_builtin_decls[code];
977}
978
43e9d192
IB
979typedef enum
980{
981 SIMD_ARG_COPY_TO_REG,
982 SIMD_ARG_CONSTANT,
983 SIMD_ARG_STOP
984} builtin_simd_arg;
985
986#define SIMD_MAX_BUILTIN_ARGS 5
987
988static rtx
989aarch64_simd_expand_args (rtx target, int icode, int have_retval,
990 tree exp, ...)
991{
992 va_list ap;
993 rtx pat;
994 tree arg[SIMD_MAX_BUILTIN_ARGS];
995 rtx op[SIMD_MAX_BUILTIN_ARGS];
996 enum machine_mode tmode = insn_data[icode].operand[0].mode;
997 enum machine_mode mode[SIMD_MAX_BUILTIN_ARGS];
998 int argc = 0;
999
1000 if (have_retval
1001 && (!target
1002 || GET_MODE (target) != tmode
1003 || !(*insn_data[icode].operand[0].predicate) (target, tmode)))
1004 target = gen_reg_rtx (tmode);
1005
1006 va_start (ap, exp);
1007
1008 for (;;)
1009 {
1010 builtin_simd_arg thisarg = (builtin_simd_arg) va_arg (ap, int);
1011
1012 if (thisarg == SIMD_ARG_STOP)
1013 break;
1014 else
1015 {
1016 arg[argc] = CALL_EXPR_ARG (exp, argc);
1017 op[argc] = expand_normal (arg[argc]);
1018 mode[argc] = insn_data[icode].operand[argc + have_retval].mode;
1019
1020 switch (thisarg)
1021 {
1022 case SIMD_ARG_COPY_TO_REG:
1023 /*gcc_assert (GET_MODE (op[argc]) == mode[argc]); */
1024 if (!(*insn_data[icode].operand[argc + have_retval].predicate)
1025 (op[argc], mode[argc]))
1026 op[argc] = copy_to_mode_reg (mode[argc], op[argc]);
1027 break;
1028
1029 case SIMD_ARG_CONSTANT:
1030 if (!(*insn_data[icode].operand[argc + have_retval].predicate)
1031 (op[argc], mode[argc]))
1032 error_at (EXPR_LOCATION (exp), "incompatible type for argument %d, "
1033 "expected %<const int%>", argc + 1);
1034 break;
1035
1036 case SIMD_ARG_STOP:
1037 gcc_unreachable ();
1038 }
1039
1040 argc++;
1041 }
1042 }
1043
1044 va_end (ap);
1045
1046 if (have_retval)
1047 switch (argc)
1048 {
1049 case 1:
1050 pat = GEN_FCN (icode) (target, op[0]);
1051 break;
1052
1053 case 2:
1054 pat = GEN_FCN (icode) (target, op[0], op[1]);
1055 break;
1056
1057 case 3:
1058 pat = GEN_FCN (icode) (target, op[0], op[1], op[2]);
1059 break;
1060
1061 case 4:
1062 pat = GEN_FCN (icode) (target, op[0], op[1], op[2], op[3]);
1063 break;
1064
1065 case 5:
1066 pat = GEN_FCN (icode) (target, op[0], op[1], op[2], op[3], op[4]);
1067 break;
1068
1069 default:
1070 gcc_unreachable ();
1071 }
1072 else
1073 switch (argc)
1074 {
1075 case 1:
1076 pat = GEN_FCN (icode) (op[0]);
1077 break;
1078
1079 case 2:
1080 pat = GEN_FCN (icode) (op[0], op[1]);
1081 break;
1082
1083 case 3:
1084 pat = GEN_FCN (icode) (op[0], op[1], op[2]);
1085 break;
1086
1087 case 4:
1088 pat = GEN_FCN (icode) (op[0], op[1], op[2], op[3]);
1089 break;
1090
1091 case 5:
1092 pat = GEN_FCN (icode) (op[0], op[1], op[2], op[3], op[4]);
1093 break;
1094
1095 default:
1096 gcc_unreachable ();
1097 }
1098
1099 if (!pat)
1100 return 0;
1101
1102 emit_insn (pat);
1103
1104 return target;
1105}
1106
1107/* Expand an AArch64 AdvSIMD builtin(intrinsic). */
1108rtx
1109aarch64_simd_expand_builtin (int fcode, tree exp, rtx target)
1110{
342be7f7
JG
1111 aarch64_simd_builtin_datum *d =
1112 &aarch64_simd_builtin_data[fcode - (AARCH64_SIMD_BUILTIN_BASE + 1)];
1113 aarch64_simd_itype itype = d->itype;
1114 enum insn_code icode = d->code;
43e9d192
IB
1115
1116 switch (itype)
1117 {
1118 case AARCH64_SIMD_UNOP:
1119 return aarch64_simd_expand_args (target, icode, 1, exp,
1120 SIMD_ARG_COPY_TO_REG,
1121 SIMD_ARG_STOP);
1122
1123 case AARCH64_SIMD_BINOP:
1124 {
1125 rtx arg2 = expand_normal (CALL_EXPR_ARG (exp, 1));
1126 /* Handle constants only if the predicate allows it. */
1127 bool op1_const_int_p =
1128 (CONST_INT_P (arg2)
1129 && (*insn_data[icode].operand[2].predicate)
1130 (arg2, insn_data[icode].operand[2].mode));
1131 return aarch64_simd_expand_args
1132 (target, icode, 1, exp,
1133 SIMD_ARG_COPY_TO_REG,
1134 op1_const_int_p ? SIMD_ARG_CONSTANT : SIMD_ARG_COPY_TO_REG,
1135 SIMD_ARG_STOP);
1136 }
1137
1138 case AARCH64_SIMD_TERNOP:
1139 return aarch64_simd_expand_args (target, icode, 1, exp,
1140 SIMD_ARG_COPY_TO_REG,
1141 SIMD_ARG_COPY_TO_REG,
1142 SIMD_ARG_COPY_TO_REG,
1143 SIMD_ARG_STOP);
1144
1145 case AARCH64_SIMD_QUADOP:
1146 return aarch64_simd_expand_args (target, icode, 1, exp,
1147 SIMD_ARG_COPY_TO_REG,
1148 SIMD_ARG_COPY_TO_REG,
1149 SIMD_ARG_COPY_TO_REG,
1150 SIMD_ARG_COPY_TO_REG,
1151 SIMD_ARG_STOP);
1152 case AARCH64_SIMD_LOAD1:
1153 case AARCH64_SIMD_LOADSTRUCT:
1154 return aarch64_simd_expand_args (target, icode, 1, exp,
1155 SIMD_ARG_COPY_TO_REG, SIMD_ARG_STOP);
1156
1157 case AARCH64_SIMD_STORESTRUCT:
1158 return aarch64_simd_expand_args (target, icode, 0, exp,
1159 SIMD_ARG_COPY_TO_REG,
1160 SIMD_ARG_COPY_TO_REG, SIMD_ARG_STOP);
1161
1162 case AARCH64_SIMD_REINTERP:
1163 return aarch64_simd_expand_args (target, icode, 1, exp,
1164 SIMD_ARG_COPY_TO_REG, SIMD_ARG_STOP);
1165
1166 case AARCH64_SIMD_CREATE:
1167 return aarch64_simd_expand_args (target, icode, 1, exp,
1168 SIMD_ARG_COPY_TO_REG, SIMD_ARG_STOP);
1169
1170 case AARCH64_SIMD_COMBINE:
1171 return aarch64_simd_expand_args (target, icode, 1, exp,
1172 SIMD_ARG_COPY_TO_REG,
1173 SIMD_ARG_COPY_TO_REG, SIMD_ARG_STOP);
1174
1175 case AARCH64_SIMD_GETLANE:
1176 return aarch64_simd_expand_args (target, icode, 1, exp,
1177 SIMD_ARG_COPY_TO_REG,
1178 SIMD_ARG_CONSTANT,
1179 SIMD_ARG_STOP);
1180
1181 case AARCH64_SIMD_SETLANE:
1182 return aarch64_simd_expand_args (target, icode, 1, exp,
1183 SIMD_ARG_COPY_TO_REG,
1184 SIMD_ARG_COPY_TO_REG,
1185 SIMD_ARG_CONSTANT,
1186 SIMD_ARG_STOP);
1187
1188 case AARCH64_SIMD_SHIFTIMM:
1189 return aarch64_simd_expand_args (target, icode, 1, exp,
1190 SIMD_ARG_COPY_TO_REG,
1191 SIMD_ARG_CONSTANT,
1192 SIMD_ARG_STOP);
1193
1194 case AARCH64_SIMD_SHIFTACC:
1195 case AARCH64_SIMD_SHIFTINSERT:
1196 return aarch64_simd_expand_args (target, icode, 1, exp,
1197 SIMD_ARG_COPY_TO_REG,
1198 SIMD_ARG_COPY_TO_REG,
1199 SIMD_ARG_CONSTANT,
1200 SIMD_ARG_STOP);
1201
1202 default:
1203 gcc_unreachable ();
1204 }
1205}
342be7f7
JG
1206
1207/* Expand an expression EXP that calls a built-in function,
1208 with result going to TARGET if that's convenient. */
1209rtx
1210aarch64_expand_builtin (tree exp,
1211 rtx target,
1212 rtx subtarget ATTRIBUTE_UNUSED,
1213 enum machine_mode mode ATTRIBUTE_UNUSED,
1214 int ignore ATTRIBUTE_UNUSED)
1215{
1216 tree fndecl = TREE_OPERAND (CALL_EXPR_FN (exp), 0);
1217 int fcode = DECL_FUNCTION_CODE (fndecl);
1218
1219 if (fcode >= AARCH64_SIMD_BUILTIN_BASE)
1220 return aarch64_simd_expand_builtin (fcode, exp, target);
1221
1222 return NULL_RTX;
1223}
42fc9a7f
JG
1224
1225tree
1226aarch64_builtin_vectorized_function (tree fndecl, tree type_out, tree type_in)
1227{
1228 enum machine_mode in_mode, out_mode;
1229 int in_n, out_n;
1230
1231 if (TREE_CODE (type_out) != VECTOR_TYPE
1232 || TREE_CODE (type_in) != VECTOR_TYPE)
1233 return NULL_TREE;
1234
1235 out_mode = TYPE_MODE (TREE_TYPE (type_out));
1236 out_n = TYPE_VECTOR_SUBPARTS (type_out);
1237 in_mode = TYPE_MODE (TREE_TYPE (type_in));
1238 in_n = TYPE_VECTOR_SUBPARTS (type_in);
1239
1240#undef AARCH64_CHECK_BUILTIN_MODE
1241#define AARCH64_CHECK_BUILTIN_MODE(C, N) 1
1242#define AARCH64_FIND_FRINT_VARIANT(N) \
1243 (AARCH64_CHECK_BUILTIN_MODE (2, D) \
1244 ? aarch64_builtin_decls[AARCH64_SIMD_BUILTIN_##N##v2df] \
1245 : (AARCH64_CHECK_BUILTIN_MODE (4, S) \
1246 ? aarch64_builtin_decls[AARCH64_SIMD_BUILTIN_##N##v4sf] \
1247 : (AARCH64_CHECK_BUILTIN_MODE (2, S) \
1248 ? aarch64_builtin_decls[AARCH64_SIMD_BUILTIN_##N##v2sf] \
1249 : NULL_TREE)))
1250 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
1251 {
1252 enum built_in_function fn = DECL_FUNCTION_CODE (fndecl);
1253 switch (fn)
1254 {
1255#undef AARCH64_CHECK_BUILTIN_MODE
1256#define AARCH64_CHECK_BUILTIN_MODE(C, N) \
1257 (out_mode == N##Fmode && out_n == C \
1258 && in_mode == N##Fmode && in_n == C)
1259 case BUILT_IN_FLOOR:
1260 case BUILT_IN_FLOORF:
1261 return AARCH64_FIND_FRINT_VARIANT (frintm);
1262 case BUILT_IN_CEIL:
1263 case BUILT_IN_CEILF:
1264 return AARCH64_FIND_FRINT_VARIANT (frintp);
1265 case BUILT_IN_TRUNC:
1266 case BUILT_IN_TRUNCF:
1267 return AARCH64_FIND_FRINT_VARIANT (frintz);
1268 case BUILT_IN_ROUND:
1269 case BUILT_IN_ROUNDF:
1270 return AARCH64_FIND_FRINT_VARIANT (frinta);
1271 case BUILT_IN_NEARBYINT:
1272 case BUILT_IN_NEARBYINTF:
1273 return AARCH64_FIND_FRINT_VARIANT (frinti);
4dcd1054
JG
1274 case BUILT_IN_SQRT:
1275 case BUILT_IN_SQRTF:
1276 return AARCH64_FIND_FRINT_VARIANT (sqrt);
42fc9a7f
JG
1277#undef AARCH64_CHECK_BUILTIN_MODE
1278#define AARCH64_CHECK_BUILTIN_MODE(C, N) \
1279 (out_mode == N##Imode && out_n == C \
1280 && in_mode == N##Fmode && in_n == C)
1281 case BUILT_IN_LFLOOR:
1282 return AARCH64_FIND_FRINT_VARIANT (fcvtms);
1283 case BUILT_IN_LCEIL:
1284 return AARCH64_FIND_FRINT_VARIANT (fcvtps);
1285 default:
1286 return NULL_TREE;
1287 }
1288 }
1289
1290 return NULL_TREE;
1291}
1292#undef AARCH64_CHECK_BUILTIN_MODE
1293#undef AARCH64_FIND_FRINT_VARIANT