1 /* Builtins' description for AArch64 SIMD architecture.
2 Copyright (C) 2011-2013 Free Software Foundation, Inc.
3 Contributed by ARM Ltd.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it
8 under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
12 GCC is distributed in the hope that it will be useful, but
13 WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
15 General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
23 #include "coretypes.h"
27 #include "stor-layout.h"
28 #include "stringpool.h"
33 #include "langhooks.h"
34 #include "diagnostic-core.h"
37 #include "gimple-iterator.h"
39 enum aarch64_simd_builtin_type_mode
64 #define v8qi_UP T_V8QI
65 #define v4hi_UP T_V4HI
66 #define v2si_UP T_V2SI
67 #define v2sf_UP T_V2SF
70 #define v16qi_UP T_V16QI
71 #define v8hi_UP T_V8HI
72 #define v4si_UP T_V4SI
73 #define v4sf_UP T_V4SF
74 #define v2di_UP T_V2DI
75 #define v2df_UP T_V2DF
100 AARCH64_SIMD_LANEMUL
,
101 AARCH64_SIMD_LANEMULL
,
102 AARCH64_SIMD_LANEMULH
,
103 AARCH64_SIMD_LANEMAC
,
104 AARCH64_SIMD_SCALARMUL
,
105 AARCH64_SIMD_SCALARMULL
,
106 AARCH64_SIMD_SCALARMULH
,
107 AARCH64_SIMD_SCALARMAC
,
108 AARCH64_SIMD_CONVERT
,
109 AARCH64_SIMD_FIXCONV
,
111 AARCH64_SIMD_RESULTPAIR
,
112 AARCH64_SIMD_REINTERP
,
116 AARCH64_SIMD_LOAD1LANE
,
118 AARCH64_SIMD_STORE1LANE
,
119 AARCH64_SIMD_LOADSTRUCT
,
120 AARCH64_SIMD_LOADSTRUCTLANE
,
121 AARCH64_SIMD_STORESTRUCT
,
122 AARCH64_SIMD_STORESTRUCTLANE
,
123 AARCH64_SIMD_LOGICBINOP
,
124 AARCH64_SIMD_SHIFTINSERT
,
125 AARCH64_SIMD_SHIFTIMM
,
126 AARCH64_SIMD_SHIFTACC
127 } aarch64_simd_itype
;
132 const aarch64_simd_itype itype
;
133 enum aarch64_simd_builtin_type_mode mode
;
134 const enum insn_code code
;
136 } aarch64_simd_builtin_datum
;
138 #define CF0(N, X) CODE_FOR_aarch64_##N##X
139 #define CF1(N, X) CODE_FOR_##N##X##1
140 #define CF2(N, X) CODE_FOR_##N##X##2
141 #define CF3(N, X) CODE_FOR_##N##X##3
142 #define CF4(N, X) CODE_FOR_##N##X##4
143 #define CF10(N, X) CODE_FOR_##N##X
145 #define VAR1(T, N, MAP, A) \
146 {#N, AARCH64_SIMD_##T, UP (A), CF##MAP (N, A), 0},
147 #define VAR2(T, N, MAP, A, B) \
148 VAR1 (T, N, MAP, A) \
150 #define VAR3(T, N, MAP, A, B, C) \
151 VAR2 (T, N, MAP, A, B) \
153 #define VAR4(T, N, MAP, A, B, C, D) \
154 VAR3 (T, N, MAP, A, B, C) \
156 #define VAR5(T, N, MAP, A, B, C, D, E) \
157 VAR4 (T, N, MAP, A, B, C, D) \
159 #define VAR6(T, N, MAP, A, B, C, D, E, F) \
160 VAR5 (T, N, MAP, A, B, C, D, E) \
162 #define VAR7(T, N, MAP, A, B, C, D, E, F, G) \
163 VAR6 (T, N, MAP, A, B, C, D, E, F) \
165 #define VAR8(T, N, MAP, A, B, C, D, E, F, G, H) \
166 VAR7 (T, N, MAP, A, B, C, D, E, F, G) \
168 #define VAR9(T, N, MAP, A, B, C, D, E, F, G, H, I) \
169 VAR8 (T, N, MAP, A, B, C, D, E, F, G, H) \
171 #define VAR10(T, N, MAP, A, B, C, D, E, F, G, H, I, J) \
172 VAR9 (T, N, MAP, A, B, C, D, E, F, G, H, I) \
174 #define VAR11(T, N, MAP, A, B, C, D, E, F, G, H, I, J, K) \
175 VAR10 (T, N, MAP, A, B, C, D, E, F, G, H, I, J) \
177 #define VAR12(T, N, MAP, A, B, C, D, E, F, G, H, I, J, K, L) \
178 VAR11 (T, N, MAP, A, B, C, D, E, F, G, H, I, J, K) \
181 /* BUILTIN_<ITERATOR> macros should expand to cover the same range of
182 modes as is given for each define_mode_iterator in
183 config/aarch64/iterators.md. */
185 #define BUILTIN_DX(T, N, MAP) \
186 VAR2 (T, N, MAP, di, df)
187 #define BUILTIN_GPF(T, N, MAP) \
188 VAR2 (T, N, MAP, sf, df)
189 #define BUILTIN_SDQ_I(T, N, MAP) \
190 VAR4 (T, N, MAP, qi, hi, si, di)
191 #define BUILTIN_SD_HSI(T, N, MAP) \
192 VAR2 (T, N, MAP, hi, si)
193 #define BUILTIN_V2F(T, N, MAP) \
194 VAR2 (T, N, MAP, v2sf, v2df)
195 #define BUILTIN_VALL(T, N, MAP) \
196 VAR10 (T, N, MAP, v8qi, v16qi, v4hi, v8hi, v2si, \
197 v4si, v2di, v2sf, v4sf, v2df)
198 #define BUILTIN_VALLDI(T, N, MAP) \
199 VAR11 (T, N, MAP, v8qi, v16qi, v4hi, v8hi, v2si, \
200 v4si, v2di, v2sf, v4sf, v2df, di)
201 #define BUILTIN_VB(T, N, MAP) \
202 VAR2 (T, N, MAP, v8qi, v16qi)
203 #define BUILTIN_VD(T, N, MAP) \
204 VAR4 (T, N, MAP, v8qi, v4hi, v2si, v2sf)
205 #define BUILTIN_VDC(T, N, MAP) \
206 VAR6 (T, N, MAP, v8qi, v4hi, v2si, v2sf, di, df)
207 #define BUILTIN_VDIC(T, N, MAP) \
208 VAR3 (T, N, MAP, v8qi, v4hi, v2si)
209 #define BUILTIN_VDN(T, N, MAP) \
210 VAR3 (T, N, MAP, v4hi, v2si, di)
211 #define BUILTIN_VDQ(T, N, MAP) \
212 VAR7 (T, N, MAP, v8qi, v16qi, v4hi, v8hi, v2si, v4si, v2di)
213 #define BUILTIN_VDQF(T, N, MAP) \
214 VAR3 (T, N, MAP, v2sf, v4sf, v2df)
215 #define BUILTIN_VDQH(T, N, MAP) \
216 VAR2 (T, N, MAP, v4hi, v8hi)
217 #define BUILTIN_VDQHS(T, N, MAP) \
218 VAR4 (T, N, MAP, v4hi, v8hi, v2si, v4si)
219 #define BUILTIN_VDQIF(T, N, MAP) \
220 VAR9 (T, N, MAP, v8qi, v16qi, v4hi, v8hi, v2si, v4si, v2sf, v4sf, v2df)
221 #define BUILTIN_VDQM(T, N, MAP) \
222 VAR6 (T, N, MAP, v8qi, v16qi, v4hi, v8hi, v2si, v4si)
223 #define BUILTIN_VDQV(T, N, MAP) \
224 VAR5 (T, N, MAP, v8qi, v16qi, v4hi, v8hi, v4si)
225 #define BUILTIN_VDQ_BHSI(T, N, MAP) \
226 VAR6 (T, N, MAP, v8qi, v16qi, v4hi, v8hi, v2si, v4si)
227 #define BUILTIN_VDQ_I(T, N, MAP) \
228 VAR7 (T, N, MAP, v8qi, v16qi, v4hi, v8hi, v2si, v4si, v2di)
229 #define BUILTIN_VDW(T, N, MAP) \
230 VAR3 (T, N, MAP, v8qi, v4hi, v2si)
231 #define BUILTIN_VD_BHSI(T, N, MAP) \
232 VAR3 (T, N, MAP, v8qi, v4hi, v2si)
233 #define BUILTIN_VD_HSI(T, N, MAP) \
234 VAR2 (T, N, MAP, v4hi, v2si)
235 #define BUILTIN_VD_RE(T, N, MAP) \
236 VAR6 (T, N, MAP, v8qi, v4hi, v2si, v2sf, di, df)
237 #define BUILTIN_VQ(T, N, MAP) \
238 VAR6 (T, N, MAP, v16qi, v8hi, v4si, v2di, v4sf, v2df)
239 #define BUILTIN_VQN(T, N, MAP) \
240 VAR3 (T, N, MAP, v8hi, v4si, v2di)
241 #define BUILTIN_VQW(T, N, MAP) \
242 VAR3 (T, N, MAP, v16qi, v8hi, v4si)
243 #define BUILTIN_VQ_HSI(T, N, MAP) \
244 VAR2 (T, N, MAP, v8hi, v4si)
245 #define BUILTIN_VQ_S(T, N, MAP) \
246 VAR6 (T, N, MAP, v8qi, v16qi, v4hi, v8hi, v2si, v4si)
247 #define BUILTIN_VSDQ_HSI(T, N, MAP) \
248 VAR6 (T, N, MAP, v4hi, v8hi, v2si, v4si, hi, si)
249 #define BUILTIN_VSDQ_I(T, N, MAP) \
250 VAR11 (T, N, MAP, v8qi, v16qi, v4hi, v8hi, v2si, v4si, v2di, qi, hi, si, di)
251 #define BUILTIN_VSDQ_I_BHSI(T, N, MAP) \
252 VAR10 (T, N, MAP, v8qi, v16qi, v4hi, v8hi, v2si, v4si, v2di, qi, hi, si)
253 #define BUILTIN_VSDQ_I_DI(T, N, MAP) \
254 VAR8 (T, N, MAP, v8qi, v16qi, v4hi, v8hi, v2si, v4si, v2di, di)
255 #define BUILTIN_VSD_HSI(T, N, MAP) \
256 VAR4 (T, N, MAP, v4hi, v2si, hi, si)
257 #define BUILTIN_VSQN_HSDI(T, N, MAP) \
258 VAR6 (T, N, MAP, v8hi, v4si, v2di, hi, si, di)
259 #define BUILTIN_VSTRUCT(T, N, MAP) \
260 VAR3 (T, N, MAP, oi, ci, xi)
262 static aarch64_simd_builtin_datum aarch64_simd_builtin_data
[] = {
263 #include "aarch64-simd-builtins.def"
267 #define VAR1(T, N, MAP, A) \
268 AARCH64_SIMD_BUILTIN_##N##A,
270 enum aarch64_builtins
273 AARCH64_SIMD_BUILTIN_BASE
,
274 #include "aarch64-simd-builtins.def"
275 AARCH64_SIMD_BUILTIN_MAX
= AARCH64_SIMD_BUILTIN_BASE
276 + ARRAY_SIZE (aarch64_simd_builtin_data
),
280 static GTY(()) tree aarch64_builtin_decls
[AARCH64_BUILTIN_MAX
];
282 #define NUM_DREG_TYPES 6
283 #define NUM_QREG_TYPES 6
286 aarch64_init_simd_builtins (void)
288 unsigned int i
, fcode
= AARCH64_SIMD_BUILTIN_BASE
+ 1;
290 /* Scalar type nodes. */
291 tree aarch64_simd_intQI_type_node
;
292 tree aarch64_simd_intHI_type_node
;
293 tree aarch64_simd_polyQI_type_node
;
294 tree aarch64_simd_polyHI_type_node
;
295 tree aarch64_simd_intSI_type_node
;
296 tree aarch64_simd_intDI_type_node
;
297 tree aarch64_simd_float_type_node
;
298 tree aarch64_simd_double_type_node
;
300 /* Pointer to scalar type nodes. */
301 tree intQI_pointer_node
;
302 tree intHI_pointer_node
;
303 tree intSI_pointer_node
;
304 tree intDI_pointer_node
;
305 tree float_pointer_node
;
306 tree double_pointer_node
;
308 /* Const scalar type nodes. */
309 tree const_intQI_node
;
310 tree const_intHI_node
;
311 tree const_intSI_node
;
312 tree const_intDI_node
;
313 tree const_float_node
;
314 tree const_double_node
;
316 /* Pointer to const scalar type nodes. */
317 tree const_intQI_pointer_node
;
318 tree const_intHI_pointer_node
;
319 tree const_intSI_pointer_node
;
320 tree const_intDI_pointer_node
;
321 tree const_float_pointer_node
;
322 tree const_double_pointer_node
;
324 /* Vector type nodes. */
329 tree V16QI_type_node
;
336 /* Scalar unsigned type nodes. */
337 tree intUQI_type_node
;
338 tree intUHI_type_node
;
339 tree intUSI_type_node
;
340 tree intUDI_type_node
;
342 /* Opaque integer types for structures of vectors. */
343 tree intEI_type_node
;
344 tree intOI_type_node
;
345 tree intCI_type_node
;
346 tree intXI_type_node
;
348 /* Pointer to vector type nodes. */
349 tree V8QI_pointer_node
;
350 tree V4HI_pointer_node
;
351 tree V2SI_pointer_node
;
352 tree V2SF_pointer_node
;
353 tree V16QI_pointer_node
;
354 tree V8HI_pointer_node
;
355 tree V4SI_pointer_node
;
356 tree V4SF_pointer_node
;
357 tree V2DI_pointer_node
;
358 tree V2DF_pointer_node
;
360 /* Operations which return results as pairs. */
361 tree void_ftype_pv8qi_v8qi_v8qi
;
362 tree void_ftype_pv4hi_v4hi_v4hi
;
363 tree void_ftype_pv2si_v2si_v2si
;
364 tree void_ftype_pv2sf_v2sf_v2sf
;
365 tree void_ftype_pdi_di_di
;
366 tree void_ftype_pv16qi_v16qi_v16qi
;
367 tree void_ftype_pv8hi_v8hi_v8hi
;
368 tree void_ftype_pv4si_v4si_v4si
;
369 tree void_ftype_pv4sf_v4sf_v4sf
;
370 tree void_ftype_pv2di_v2di_v2di
;
371 tree void_ftype_pv2df_v2df_v2df
;
373 tree reinterp_ftype_dreg
[NUM_DREG_TYPES
][NUM_DREG_TYPES
];
374 tree reinterp_ftype_qreg
[NUM_QREG_TYPES
][NUM_QREG_TYPES
];
375 tree dreg_types
[NUM_DREG_TYPES
], qreg_types
[NUM_QREG_TYPES
];
377 /* Create distinguished type nodes for AARCH64_SIMD vector element types,
378 and pointers to values of such types, so we can detect them later. */
379 aarch64_simd_intQI_type_node
=
380 make_signed_type (GET_MODE_PRECISION (QImode
));
381 aarch64_simd_intHI_type_node
=
382 make_signed_type (GET_MODE_PRECISION (HImode
));
383 aarch64_simd_polyQI_type_node
=
384 make_signed_type (GET_MODE_PRECISION (QImode
));
385 aarch64_simd_polyHI_type_node
=
386 make_signed_type (GET_MODE_PRECISION (HImode
));
387 aarch64_simd_intSI_type_node
=
388 make_signed_type (GET_MODE_PRECISION (SImode
));
389 aarch64_simd_intDI_type_node
=
390 make_signed_type (GET_MODE_PRECISION (DImode
));
391 aarch64_simd_float_type_node
= make_node (REAL_TYPE
);
392 aarch64_simd_double_type_node
= make_node (REAL_TYPE
);
393 TYPE_PRECISION (aarch64_simd_float_type_node
) = FLOAT_TYPE_SIZE
;
394 TYPE_PRECISION (aarch64_simd_double_type_node
) = DOUBLE_TYPE_SIZE
;
395 layout_type (aarch64_simd_float_type_node
);
396 layout_type (aarch64_simd_double_type_node
);
398 /* Define typedefs which exactly correspond to the modes we are basing vector
399 types on. If you change these names you'll need to change
400 the table used by aarch64_mangle_type too. */
401 (*lang_hooks
.types
.register_builtin_type
) (aarch64_simd_intQI_type_node
,
402 "__builtin_aarch64_simd_qi");
403 (*lang_hooks
.types
.register_builtin_type
) (aarch64_simd_intHI_type_node
,
404 "__builtin_aarch64_simd_hi");
405 (*lang_hooks
.types
.register_builtin_type
) (aarch64_simd_intSI_type_node
,
406 "__builtin_aarch64_simd_si");
407 (*lang_hooks
.types
.register_builtin_type
) (aarch64_simd_float_type_node
,
408 "__builtin_aarch64_simd_sf");
409 (*lang_hooks
.types
.register_builtin_type
) (aarch64_simd_intDI_type_node
,
410 "__builtin_aarch64_simd_di");
411 (*lang_hooks
.types
.register_builtin_type
) (aarch64_simd_double_type_node
,
412 "__builtin_aarch64_simd_df");
413 (*lang_hooks
.types
.register_builtin_type
) (aarch64_simd_polyQI_type_node
,
414 "__builtin_aarch64_simd_poly8");
415 (*lang_hooks
.types
.register_builtin_type
) (aarch64_simd_polyHI_type_node
,
416 "__builtin_aarch64_simd_poly16");
418 intQI_pointer_node
= build_pointer_type (aarch64_simd_intQI_type_node
);
419 intHI_pointer_node
= build_pointer_type (aarch64_simd_intHI_type_node
);
420 intSI_pointer_node
= build_pointer_type (aarch64_simd_intSI_type_node
);
421 intDI_pointer_node
= build_pointer_type (aarch64_simd_intDI_type_node
);
422 float_pointer_node
= build_pointer_type (aarch64_simd_float_type_node
);
423 double_pointer_node
= build_pointer_type (aarch64_simd_double_type_node
);
425 /* Next create constant-qualified versions of the above types. */
426 const_intQI_node
= build_qualified_type (aarch64_simd_intQI_type_node
,
428 const_intHI_node
= build_qualified_type (aarch64_simd_intHI_type_node
,
430 const_intSI_node
= build_qualified_type (aarch64_simd_intSI_type_node
,
432 const_intDI_node
= build_qualified_type (aarch64_simd_intDI_type_node
,
434 const_float_node
= build_qualified_type (aarch64_simd_float_type_node
,
436 const_double_node
= build_qualified_type (aarch64_simd_double_type_node
,
439 const_intQI_pointer_node
= build_pointer_type (const_intQI_node
);
440 const_intHI_pointer_node
= build_pointer_type (const_intHI_node
);
441 const_intSI_pointer_node
= build_pointer_type (const_intSI_node
);
442 const_intDI_pointer_node
= build_pointer_type (const_intDI_node
);
443 const_float_pointer_node
= build_pointer_type (const_float_node
);
444 const_double_pointer_node
= build_pointer_type (const_double_node
);
446 /* Now create vector types based on our AARCH64 SIMD element types. */
447 /* 64-bit vectors. */
449 build_vector_type_for_mode (aarch64_simd_intQI_type_node
, V8QImode
);
451 build_vector_type_for_mode (aarch64_simd_intHI_type_node
, V4HImode
);
453 build_vector_type_for_mode (aarch64_simd_intSI_type_node
, V2SImode
);
455 build_vector_type_for_mode (aarch64_simd_float_type_node
, V2SFmode
);
456 /* 128-bit vectors. */
458 build_vector_type_for_mode (aarch64_simd_intQI_type_node
, V16QImode
);
460 build_vector_type_for_mode (aarch64_simd_intHI_type_node
, V8HImode
);
462 build_vector_type_for_mode (aarch64_simd_intSI_type_node
, V4SImode
);
464 build_vector_type_for_mode (aarch64_simd_float_type_node
, V4SFmode
);
466 build_vector_type_for_mode (aarch64_simd_intDI_type_node
, V2DImode
);
468 build_vector_type_for_mode (aarch64_simd_double_type_node
, V2DFmode
);
470 /* Unsigned integer types for various mode sizes. */
471 intUQI_type_node
= make_unsigned_type (GET_MODE_PRECISION (QImode
));
472 intUHI_type_node
= make_unsigned_type (GET_MODE_PRECISION (HImode
));
473 intUSI_type_node
= make_unsigned_type (GET_MODE_PRECISION (SImode
));
474 intUDI_type_node
= make_unsigned_type (GET_MODE_PRECISION (DImode
));
476 (*lang_hooks
.types
.register_builtin_type
) (intUQI_type_node
,
477 "__builtin_aarch64_simd_uqi");
478 (*lang_hooks
.types
.register_builtin_type
) (intUHI_type_node
,
479 "__builtin_aarch64_simd_uhi");
480 (*lang_hooks
.types
.register_builtin_type
) (intUSI_type_node
,
481 "__builtin_aarch64_simd_usi");
482 (*lang_hooks
.types
.register_builtin_type
) (intUDI_type_node
,
483 "__builtin_aarch64_simd_udi");
485 /* Opaque integer types for structures of vectors. */
486 intEI_type_node
= make_signed_type (GET_MODE_PRECISION (EImode
));
487 intOI_type_node
= make_signed_type (GET_MODE_PRECISION (OImode
));
488 intCI_type_node
= make_signed_type (GET_MODE_PRECISION (CImode
));
489 intXI_type_node
= make_signed_type (GET_MODE_PRECISION (XImode
));
491 (*lang_hooks
.types
.register_builtin_type
) (intTI_type_node
,
492 "__builtin_aarch64_simd_ti");
493 (*lang_hooks
.types
.register_builtin_type
) (intEI_type_node
,
494 "__builtin_aarch64_simd_ei");
495 (*lang_hooks
.types
.register_builtin_type
) (intOI_type_node
,
496 "__builtin_aarch64_simd_oi");
497 (*lang_hooks
.types
.register_builtin_type
) (intCI_type_node
,
498 "__builtin_aarch64_simd_ci");
499 (*lang_hooks
.types
.register_builtin_type
) (intXI_type_node
,
500 "__builtin_aarch64_simd_xi");
502 /* Pointers to vector types. */
503 V8QI_pointer_node
= build_pointer_type (V8QI_type_node
);
504 V4HI_pointer_node
= build_pointer_type (V4HI_type_node
);
505 V2SI_pointer_node
= build_pointer_type (V2SI_type_node
);
506 V2SF_pointer_node
= build_pointer_type (V2SF_type_node
);
507 V16QI_pointer_node
= build_pointer_type (V16QI_type_node
);
508 V8HI_pointer_node
= build_pointer_type (V8HI_type_node
);
509 V4SI_pointer_node
= build_pointer_type (V4SI_type_node
);
510 V4SF_pointer_node
= build_pointer_type (V4SF_type_node
);
511 V2DI_pointer_node
= build_pointer_type (V2DI_type_node
);
512 V2DF_pointer_node
= build_pointer_type (V2DF_type_node
);
514 /* Operations which return results as pairs. */
515 void_ftype_pv8qi_v8qi_v8qi
=
516 build_function_type_list (void_type_node
, V8QI_pointer_node
,
517 V8QI_type_node
, V8QI_type_node
, NULL
);
518 void_ftype_pv4hi_v4hi_v4hi
=
519 build_function_type_list (void_type_node
, V4HI_pointer_node
,
520 V4HI_type_node
, V4HI_type_node
, NULL
);
521 void_ftype_pv2si_v2si_v2si
=
522 build_function_type_list (void_type_node
, V2SI_pointer_node
,
523 V2SI_type_node
, V2SI_type_node
, NULL
);
524 void_ftype_pv2sf_v2sf_v2sf
=
525 build_function_type_list (void_type_node
, V2SF_pointer_node
,
526 V2SF_type_node
, V2SF_type_node
, NULL
);
527 void_ftype_pdi_di_di
=
528 build_function_type_list (void_type_node
, intDI_pointer_node
,
529 aarch64_simd_intDI_type_node
,
530 aarch64_simd_intDI_type_node
, NULL
);
531 void_ftype_pv16qi_v16qi_v16qi
=
532 build_function_type_list (void_type_node
, V16QI_pointer_node
,
533 V16QI_type_node
, V16QI_type_node
, NULL
);
534 void_ftype_pv8hi_v8hi_v8hi
=
535 build_function_type_list (void_type_node
, V8HI_pointer_node
,
536 V8HI_type_node
, V8HI_type_node
, NULL
);
537 void_ftype_pv4si_v4si_v4si
=
538 build_function_type_list (void_type_node
, V4SI_pointer_node
,
539 V4SI_type_node
, V4SI_type_node
, NULL
);
540 void_ftype_pv4sf_v4sf_v4sf
=
541 build_function_type_list (void_type_node
, V4SF_pointer_node
,
542 V4SF_type_node
, V4SF_type_node
, NULL
);
543 void_ftype_pv2di_v2di_v2di
=
544 build_function_type_list (void_type_node
, V2DI_pointer_node
,
545 V2DI_type_node
, V2DI_type_node
, NULL
);
546 void_ftype_pv2df_v2df_v2df
=
547 build_function_type_list (void_type_node
, V2DF_pointer_node
,
548 V2DF_type_node
, V2DF_type_node
, NULL
);
550 dreg_types
[0] = V8QI_type_node
;
551 dreg_types
[1] = V4HI_type_node
;
552 dreg_types
[2] = V2SI_type_node
;
553 dreg_types
[3] = V2SF_type_node
;
554 dreg_types
[4] = aarch64_simd_intDI_type_node
;
555 dreg_types
[5] = aarch64_simd_double_type_node
;
557 qreg_types
[0] = V16QI_type_node
;
558 qreg_types
[1] = V8HI_type_node
;
559 qreg_types
[2] = V4SI_type_node
;
560 qreg_types
[3] = V4SF_type_node
;
561 qreg_types
[4] = V2DI_type_node
;
562 qreg_types
[5] = V2DF_type_node
;
564 /* If NUM_DREG_TYPES != NUM_QREG_TYPES, we will need separate nested loops
565 for qreg and dreg reinterp inits. */
566 for (i
= 0; i
< NUM_DREG_TYPES
; i
++)
569 for (j
= 0; j
< NUM_DREG_TYPES
; j
++)
571 reinterp_ftype_dreg
[i
][j
]
572 = build_function_type_list (dreg_types
[i
], dreg_types
[j
], NULL
);
573 reinterp_ftype_qreg
[i
][j
]
574 = build_function_type_list (qreg_types
[i
], qreg_types
[j
], NULL
);
578 for (i
= 0; i
< ARRAY_SIZE (aarch64_simd_builtin_data
); i
++, fcode
++)
580 aarch64_simd_builtin_datum
*d
= &aarch64_simd_builtin_data
[i
];
581 const char *const modenames
[] =
583 "v8qi", "v4hi", "v2si", "v2sf", "di", "df",
584 "v16qi", "v8hi", "v4si", "v4sf", "v2di", "v2df",
585 "ti", "ei", "oi", "xi", "si", "sf", "hi", "qi"
593 gcc_assert (ARRAY_SIZE (modenames
) == T_MAX
);
599 case AARCH64_SIMD_LOAD1
:
600 case AARCH64_SIMD_LOAD1LANE
:
601 case AARCH64_SIMD_LOADSTRUCT
:
602 case AARCH64_SIMD_LOADSTRUCTLANE
:
605 case AARCH64_SIMD_STORE1
:
606 case AARCH64_SIMD_STORE1LANE
:
607 case AARCH64_SIMD_STORESTRUCT
:
608 case AARCH64_SIMD_STORESTRUCTLANE
:
612 case AARCH64_SIMD_UNOP
:
613 case AARCH64_SIMD_BINOP
:
614 case AARCH64_SIMD_TERNOP
:
615 case AARCH64_SIMD_QUADOP
:
616 case AARCH64_SIMD_COMBINE
:
617 case AARCH64_SIMD_CONVERT
:
618 case AARCH64_SIMD_CREATE
:
619 case AARCH64_SIMD_DUP
:
620 case AARCH64_SIMD_DUPLANE
:
621 case AARCH64_SIMD_FIXCONV
:
622 case AARCH64_SIMD_GETLANE
:
623 case AARCH64_SIMD_LANEMAC
:
624 case AARCH64_SIMD_LANEMUL
:
625 case AARCH64_SIMD_LANEMULH
:
626 case AARCH64_SIMD_LANEMULL
:
627 case AARCH64_SIMD_LOGICBINOP
:
628 case AARCH64_SIMD_SCALARMAC
:
629 case AARCH64_SIMD_SCALARMUL
:
630 case AARCH64_SIMD_SCALARMULH
:
631 case AARCH64_SIMD_SCALARMULL
:
632 case AARCH64_SIMD_SELECT
:
633 case AARCH64_SIMD_SETLANE
:
634 case AARCH64_SIMD_SHIFTACC
:
635 case AARCH64_SIMD_SHIFTIMM
:
636 case AARCH64_SIMD_SHIFTINSERT
:
637 case AARCH64_SIMD_SPLIT
:
638 case AARCH64_SIMD_VTBL
:
639 case AARCH64_SIMD_VTBX
:
642 tree return_type
= void_type_node
, args
= void_list_node
;
644 /* Build a function type directly from the insn_data for this
645 builtin. The build_function_type () function takes care of
646 removing duplicates for us. */
648 for (k
= insn_data
[d
->code
].n_operands
-1; k
>= 0; k
--)
650 /* Skip an internal operand for vget_{low, high}. */
651 if (k
== 2 && d
->itype
== AARCH64_SIMD_SPLIT
)
654 if (is_load
&& k
== 1)
656 /* AdvSIMD load patterns always have the memory operand
657 (a DImode pointer) in the operand 1 position. We
658 want a const pointer to the element type in that
660 gcc_assert (insn_data
[d
->code
].operand
[k
].mode
== DImode
);
666 eltype
= const_intQI_pointer_node
;
671 eltype
= const_intHI_pointer_node
;
676 eltype
= const_intSI_pointer_node
;
681 eltype
= const_float_pointer_node
;
686 eltype
= const_intDI_pointer_node
;
691 eltype
= const_double_pointer_node
;
698 else if (is_store
&& k
== 0)
700 /* Similarly, AdvSIMD store patterns use operand 0 as
701 the memory location to store to (a DImode pointer).
702 Use a pointer to the element type of the store in
704 gcc_assert (insn_data
[d
->code
].operand
[k
].mode
== DImode
);
710 eltype
= intQI_pointer_node
;
715 eltype
= intHI_pointer_node
;
720 eltype
= intSI_pointer_node
;
725 eltype
= float_pointer_node
;
730 eltype
= intDI_pointer_node
;
735 eltype
= double_pointer_node
;
744 switch (insn_data
[d
->code
].operand
[k
].mode
)
747 eltype
= void_type_node
;
751 eltype
= aarch64_simd_intQI_type_node
;
754 eltype
= aarch64_simd_intHI_type_node
;
757 eltype
= aarch64_simd_intSI_type_node
;
760 eltype
= aarch64_simd_float_type_node
;
763 eltype
= aarch64_simd_double_type_node
;
766 eltype
= aarch64_simd_intDI_type_node
;
769 eltype
= intTI_type_node
;
772 eltype
= intEI_type_node
;
775 eltype
= intOI_type_node
;
778 eltype
= intCI_type_node
;
781 eltype
= intXI_type_node
;
783 /* 64-bit vectors. */
785 eltype
= V8QI_type_node
;
788 eltype
= V4HI_type_node
;
791 eltype
= V2SI_type_node
;
794 eltype
= V2SF_type_node
;
796 /* 128-bit vectors. */
798 eltype
= V16QI_type_node
;
801 eltype
= V8HI_type_node
;
804 eltype
= V4SI_type_node
;
807 eltype
= V4SF_type_node
;
810 eltype
= V2DI_type_node
;
813 eltype
= V2DF_type_node
;
820 if (k
== 0 && !is_store
)
821 return_type
= eltype
;
823 args
= tree_cons (NULL_TREE
, eltype
, args
);
825 ftype
= build_function_type (return_type
, args
);
829 case AARCH64_SIMD_RESULTPAIR
:
831 switch (insn_data
[d
->code
].operand
[1].mode
)
834 ftype
= void_ftype_pv8qi_v8qi_v8qi
;
837 ftype
= void_ftype_pv4hi_v4hi_v4hi
;
840 ftype
= void_ftype_pv2si_v2si_v2si
;
843 ftype
= void_ftype_pv2sf_v2sf_v2sf
;
846 ftype
= void_ftype_pdi_di_di
;
849 ftype
= void_ftype_pv16qi_v16qi_v16qi
;
852 ftype
= void_ftype_pv8hi_v8hi_v8hi
;
855 ftype
= void_ftype_pv4si_v4si_v4si
;
858 ftype
= void_ftype_pv4sf_v4sf_v4sf
;
861 ftype
= void_ftype_pv2di_v2di_v2di
;
864 ftype
= void_ftype_pv2df_v2df_v2df
;
872 case AARCH64_SIMD_REINTERP
:
874 /* We iterate over 6 doubleword types, then 6 quadword
876 int rhs_d
= d
->mode
% NUM_DREG_TYPES
;
877 int rhs_q
= (d
->mode
- NUM_DREG_TYPES
) % NUM_QREG_TYPES
;
878 switch (insn_data
[d
->code
].operand
[0].mode
)
881 ftype
= reinterp_ftype_dreg
[0][rhs_d
];
884 ftype
= reinterp_ftype_dreg
[1][rhs_d
];
887 ftype
= reinterp_ftype_dreg
[2][rhs_d
];
890 ftype
= reinterp_ftype_dreg
[3][rhs_d
];
893 ftype
= reinterp_ftype_dreg
[4][rhs_d
];
896 ftype
= reinterp_ftype_dreg
[5][rhs_d
];
899 ftype
= reinterp_ftype_qreg
[0][rhs_q
];
902 ftype
= reinterp_ftype_qreg
[1][rhs_q
];
905 ftype
= reinterp_ftype_qreg
[2][rhs_q
];
908 ftype
= reinterp_ftype_qreg
[3][rhs_q
];
911 ftype
= reinterp_ftype_qreg
[4][rhs_q
];
914 ftype
= reinterp_ftype_qreg
[5][rhs_q
];
925 gcc_assert (ftype
!= NULL
);
927 snprintf (namebuf
, sizeof (namebuf
), "__builtin_aarch64_%s%s",
928 d
->name
, modenames
[d
->mode
]);
930 fndecl
= add_builtin_function (namebuf
, ftype
, fcode
, BUILT_IN_MD
,
932 aarch64_builtin_decls
[fcode
] = fndecl
;
937 aarch64_init_builtins (void)
940 aarch64_init_simd_builtins ();
944 aarch64_builtin_decl (unsigned code
, bool initialize_p ATTRIBUTE_UNUSED
)
946 if (code
>= AARCH64_BUILTIN_MAX
)
947 return error_mark_node
;
949 return aarch64_builtin_decls
[code
];
954 SIMD_ARG_COPY_TO_REG
,
959 #define SIMD_MAX_BUILTIN_ARGS 5
962 aarch64_simd_expand_args (rtx target
, int icode
, int have_retval
,
967 tree arg
[SIMD_MAX_BUILTIN_ARGS
];
968 rtx op
[SIMD_MAX_BUILTIN_ARGS
];
969 enum machine_mode tmode
= insn_data
[icode
].operand
[0].mode
;
970 enum machine_mode mode
[SIMD_MAX_BUILTIN_ARGS
];
975 || GET_MODE (target
) != tmode
976 || !(*insn_data
[icode
].operand
[0].predicate
) (target
, tmode
)))
977 target
= gen_reg_rtx (tmode
);
983 builtin_simd_arg thisarg
= (builtin_simd_arg
) va_arg (ap
, int);
985 if (thisarg
== SIMD_ARG_STOP
)
989 arg
[argc
] = CALL_EXPR_ARG (exp
, argc
);
990 op
[argc
] = expand_normal (arg
[argc
]);
991 mode
[argc
] = insn_data
[icode
].operand
[argc
+ have_retval
].mode
;
995 case SIMD_ARG_COPY_TO_REG
:
996 if (POINTER_TYPE_P (TREE_TYPE (arg
[argc
])))
997 op
[argc
] = convert_memory_address (Pmode
, op
[argc
]);
998 /*gcc_assert (GET_MODE (op[argc]) == mode[argc]); */
999 if (!(*insn_data
[icode
].operand
[argc
+ have_retval
].predicate
)
1000 (op
[argc
], mode
[argc
]))
1001 op
[argc
] = copy_to_mode_reg (mode
[argc
], op
[argc
]);
1004 case SIMD_ARG_CONSTANT
:
1005 if (!(*insn_data
[icode
].operand
[argc
+ have_retval
].predicate
)
1006 (op
[argc
], mode
[argc
]))
1007 error_at (EXPR_LOCATION (exp
), "incompatible type for argument %d, "
1008 "expected %<const int%>", argc
+ 1);
1025 pat
= GEN_FCN (icode
) (target
, op
[0]);
1029 pat
= GEN_FCN (icode
) (target
, op
[0], op
[1]);
1033 pat
= GEN_FCN (icode
) (target
, op
[0], op
[1], op
[2]);
1037 pat
= GEN_FCN (icode
) (target
, op
[0], op
[1], op
[2], op
[3]);
1041 pat
= GEN_FCN (icode
) (target
, op
[0], op
[1], op
[2], op
[3], op
[4]);
1051 pat
= GEN_FCN (icode
) (op
[0]);
1055 pat
= GEN_FCN (icode
) (op
[0], op
[1]);
1059 pat
= GEN_FCN (icode
) (op
[0], op
[1], op
[2]);
1063 pat
= GEN_FCN (icode
) (op
[0], op
[1], op
[2], op
[3]);
1067 pat
= GEN_FCN (icode
) (op
[0], op
[1], op
[2], op
[3], op
[4]);
1082 /* Expand an AArch64 AdvSIMD builtin(intrinsic). */
1084 aarch64_simd_expand_builtin (int fcode
, tree exp
, rtx target
)
1086 aarch64_simd_builtin_datum
*d
=
1087 &aarch64_simd_builtin_data
[fcode
- (AARCH64_SIMD_BUILTIN_BASE
+ 1)];
1088 aarch64_simd_itype itype
= d
->itype
;
1089 enum insn_code icode
= d
->code
;
1093 case AARCH64_SIMD_UNOP
:
1094 return aarch64_simd_expand_args (target
, icode
, 1, exp
,
1095 SIMD_ARG_COPY_TO_REG
,
1098 case AARCH64_SIMD_BINOP
:
1100 rtx arg2
= expand_normal (CALL_EXPR_ARG (exp
, 1));
1101 /* Handle constants only if the predicate allows it. */
1102 bool op1_const_int_p
=
1104 && (*insn_data
[icode
].operand
[2].predicate
)
1105 (arg2
, insn_data
[icode
].operand
[2].mode
));
1106 return aarch64_simd_expand_args
1107 (target
, icode
, 1, exp
,
1108 SIMD_ARG_COPY_TO_REG
,
1109 op1_const_int_p
? SIMD_ARG_CONSTANT
: SIMD_ARG_COPY_TO_REG
,
1113 case AARCH64_SIMD_TERNOP
:
1114 return aarch64_simd_expand_args (target
, icode
, 1, exp
,
1115 SIMD_ARG_COPY_TO_REG
,
1116 SIMD_ARG_COPY_TO_REG
,
1117 SIMD_ARG_COPY_TO_REG
,
1120 case AARCH64_SIMD_QUADOP
:
1121 return aarch64_simd_expand_args (target
, icode
, 1, exp
,
1122 SIMD_ARG_COPY_TO_REG
,
1123 SIMD_ARG_COPY_TO_REG
,
1124 SIMD_ARG_COPY_TO_REG
,
1125 SIMD_ARG_COPY_TO_REG
,
1127 case AARCH64_SIMD_LOAD1
:
1128 case AARCH64_SIMD_LOADSTRUCT
:
1129 return aarch64_simd_expand_args (target
, icode
, 1, exp
,
1130 SIMD_ARG_COPY_TO_REG
, SIMD_ARG_STOP
);
1132 case AARCH64_SIMD_STORE1
:
1133 case AARCH64_SIMD_STORESTRUCT
:
1134 return aarch64_simd_expand_args (target
, icode
, 0, exp
,
1135 SIMD_ARG_COPY_TO_REG
,
1136 SIMD_ARG_COPY_TO_REG
, SIMD_ARG_STOP
);
1138 case AARCH64_SIMD_REINTERP
:
1139 return aarch64_simd_expand_args (target
, icode
, 1, exp
,
1140 SIMD_ARG_COPY_TO_REG
, SIMD_ARG_STOP
);
1142 case AARCH64_SIMD_CREATE
:
1143 return aarch64_simd_expand_args (target
, icode
, 1, exp
,
1144 SIMD_ARG_COPY_TO_REG
, SIMD_ARG_STOP
);
1146 case AARCH64_SIMD_COMBINE
:
1147 return aarch64_simd_expand_args (target
, icode
, 1, exp
,
1148 SIMD_ARG_COPY_TO_REG
,
1149 SIMD_ARG_COPY_TO_REG
, SIMD_ARG_STOP
);
1151 case AARCH64_SIMD_GETLANE
:
1152 return aarch64_simd_expand_args (target
, icode
, 1, exp
,
1153 SIMD_ARG_COPY_TO_REG
,
1157 case AARCH64_SIMD_SETLANE
:
1158 return aarch64_simd_expand_args (target
, icode
, 1, exp
,
1159 SIMD_ARG_COPY_TO_REG
,
1160 SIMD_ARG_COPY_TO_REG
,
1164 case AARCH64_SIMD_SHIFTIMM
:
1165 return aarch64_simd_expand_args (target
, icode
, 1, exp
,
1166 SIMD_ARG_COPY_TO_REG
,
1170 case AARCH64_SIMD_SHIFTACC
:
1171 case AARCH64_SIMD_SHIFTINSERT
:
1172 return aarch64_simd_expand_args (target
, icode
, 1, exp
,
1173 SIMD_ARG_COPY_TO_REG
,
1174 SIMD_ARG_COPY_TO_REG
,
1183 /* Expand an expression EXP that calls a built-in function,
1184 with result going to TARGET if that's convenient. */
1186 aarch64_expand_builtin (tree exp
,
1188 rtx subtarget ATTRIBUTE_UNUSED
,
1189 enum machine_mode mode ATTRIBUTE_UNUSED
,
1190 int ignore ATTRIBUTE_UNUSED
)
1192 tree fndecl
= TREE_OPERAND (CALL_EXPR_FN (exp
), 0);
1193 int fcode
= DECL_FUNCTION_CODE (fndecl
);
1195 if (fcode
>= AARCH64_SIMD_BUILTIN_BASE
)
1196 return aarch64_simd_expand_builtin (fcode
, exp
, target
);
1202 aarch64_builtin_vectorized_function (tree fndecl
, tree type_out
, tree type_in
)
1204 enum machine_mode in_mode
, out_mode
;
1207 if (TREE_CODE (type_out
) != VECTOR_TYPE
1208 || TREE_CODE (type_in
) != VECTOR_TYPE
)
1211 out_mode
= TYPE_MODE (TREE_TYPE (type_out
));
1212 out_n
= TYPE_VECTOR_SUBPARTS (type_out
);
1213 in_mode
= TYPE_MODE (TREE_TYPE (type_in
));
1214 in_n
= TYPE_VECTOR_SUBPARTS (type_in
);
1216 #undef AARCH64_CHECK_BUILTIN_MODE
1217 #define AARCH64_CHECK_BUILTIN_MODE(C, N) 1
1218 #define AARCH64_FIND_FRINT_VARIANT(N) \
1219 (AARCH64_CHECK_BUILTIN_MODE (2, D) \
1220 ? aarch64_builtin_decls[AARCH64_SIMD_BUILTIN_##N##v2df] \
1221 : (AARCH64_CHECK_BUILTIN_MODE (4, S) \
1222 ? aarch64_builtin_decls[AARCH64_SIMD_BUILTIN_##N##v4sf] \
1223 : (AARCH64_CHECK_BUILTIN_MODE (2, S) \
1224 ? aarch64_builtin_decls[AARCH64_SIMD_BUILTIN_##N##v2sf] \
1226 if (DECL_BUILT_IN_CLASS (fndecl
) == BUILT_IN_NORMAL
)
1228 enum built_in_function fn
= DECL_FUNCTION_CODE (fndecl
);
1231 #undef AARCH64_CHECK_BUILTIN_MODE
1232 #define AARCH64_CHECK_BUILTIN_MODE(C, N) \
1233 (out_mode == N##Fmode && out_n == C \
1234 && in_mode == N##Fmode && in_n == C)
1235 case BUILT_IN_FLOOR
:
1236 case BUILT_IN_FLOORF
:
1237 return AARCH64_FIND_FRINT_VARIANT (floor
);
1239 case BUILT_IN_CEILF
:
1240 return AARCH64_FIND_FRINT_VARIANT (ceil
);
1241 case BUILT_IN_TRUNC
:
1242 case BUILT_IN_TRUNCF
:
1243 return AARCH64_FIND_FRINT_VARIANT (btrunc
);
1244 case BUILT_IN_ROUND
:
1245 case BUILT_IN_ROUNDF
:
1246 return AARCH64_FIND_FRINT_VARIANT (round
);
1247 case BUILT_IN_NEARBYINT
:
1248 case BUILT_IN_NEARBYINTF
:
1249 return AARCH64_FIND_FRINT_VARIANT (nearbyint
);
1251 case BUILT_IN_SQRTF
:
1252 return AARCH64_FIND_FRINT_VARIANT (sqrt
);
1253 #undef AARCH64_CHECK_BUILTIN_MODE
1254 #define AARCH64_CHECK_BUILTIN_MODE(C, N) \
1255 (out_mode == SImode && out_n == C \
1256 && in_mode == N##Imode && in_n == C)
1259 if (AARCH64_CHECK_BUILTIN_MODE (4, S
))
1260 return aarch64_builtin_decls
[AARCH64_SIMD_BUILTIN_clzv4si
];
1263 #undef AARCH64_CHECK_BUILTIN_MODE
1264 #define AARCH64_CHECK_BUILTIN_MODE(C, N) \
1265 (out_mode == N##Imode && out_n == C \
1266 && in_mode == N##Fmode && in_n == C)
1267 case BUILT_IN_LFLOOR
:
1268 case BUILT_IN_IFLOORF
:
1270 tree new_tree
= NULL_TREE
;
1271 if (AARCH64_CHECK_BUILTIN_MODE (2, D
))
1273 aarch64_builtin_decls
[AARCH64_SIMD_BUILTIN_lfloorv2dfv2di
];
1274 else if (AARCH64_CHECK_BUILTIN_MODE (4, S
))
1276 aarch64_builtin_decls
[AARCH64_SIMD_BUILTIN_lfloorv4sfv4si
];
1277 else if (AARCH64_CHECK_BUILTIN_MODE (2, S
))
1279 aarch64_builtin_decls
[AARCH64_SIMD_BUILTIN_lfloorv2sfv2si
];
1282 case BUILT_IN_LCEIL
:
1283 case BUILT_IN_ICEILF
:
1285 tree new_tree
= NULL_TREE
;
1286 if (AARCH64_CHECK_BUILTIN_MODE (2, D
))
1288 aarch64_builtin_decls
[AARCH64_SIMD_BUILTIN_lceilv2dfv2di
];
1289 else if (AARCH64_CHECK_BUILTIN_MODE (4, S
))
1291 aarch64_builtin_decls
[AARCH64_SIMD_BUILTIN_lceilv4sfv4si
];
1292 else if (AARCH64_CHECK_BUILTIN_MODE (2, S
))
1294 aarch64_builtin_decls
[AARCH64_SIMD_BUILTIN_lceilv2sfv2si
];
1297 case BUILT_IN_LROUND
:
1298 case BUILT_IN_IROUNDF
:
1300 tree new_tree
= NULL_TREE
;
1301 if (AARCH64_CHECK_BUILTIN_MODE (2, D
))
1303 aarch64_builtin_decls
[AARCH64_SIMD_BUILTIN_lroundv2dfv2di
];
1304 else if (AARCH64_CHECK_BUILTIN_MODE (4, S
))
1306 aarch64_builtin_decls
[AARCH64_SIMD_BUILTIN_lroundv4sfv4si
];
1307 else if (AARCH64_CHECK_BUILTIN_MODE (2, S
))
1309 aarch64_builtin_decls
[AARCH64_SIMD_BUILTIN_lroundv2sfv2si
];
1322 #define VAR1(T, N, MAP, A) \
1323 case AARCH64_SIMD_BUILTIN_##N##A:
1326 aarch64_fold_builtin (tree fndecl
, int n_args ATTRIBUTE_UNUSED
, tree
*args
,
1327 bool ignore ATTRIBUTE_UNUSED
)
1329 int fcode
= DECL_FUNCTION_CODE (fndecl
);
1330 tree type
= TREE_TYPE (TREE_TYPE (fndecl
));
1334 BUILTIN_VALLDI (UNOP
, abs
, 2)
1335 return fold_build1 (ABS_EXPR
, type
, args
[0]);
1337 BUILTIN_VALLDI (BINOP
, cmge
, 0)
1338 return fold_build2 (GE_EXPR
, type
, args
[0], args
[1]);
1340 BUILTIN_VALLDI (BINOP
, cmgt
, 0)
1341 return fold_build2 (GT_EXPR
, type
, args
[0], args
[1]);
1343 BUILTIN_VALLDI (BINOP
, cmeq
, 0)
1344 return fold_build2 (EQ_EXPR
, type
, args
[0], args
[1]);
1346 BUILTIN_VSDQ_I_DI (BINOP
, cmtst
, 0)
1348 tree and_node
= fold_build2 (BIT_AND_EXPR
, type
, args
[0], args
[1]);
1349 tree vec_zero_node
= build_zero_cst (type
);
1350 return fold_build2 (NE_EXPR
, type
, and_node
, vec_zero_node
);
1353 VAR1 (UNOP
, floatv2si
, 2, v2sf
)
1354 VAR1 (UNOP
, floatv4si
, 2, v4sf
)
1355 VAR1 (UNOP
, floatv2di
, 2, v2df
)
1356 return fold_build1 (FLOAT_EXPR
, type
, args
[0]);
1365 aarch64_gimple_fold_builtin (gimple_stmt_iterator
*gsi
)
1367 bool changed
= false;
1368 gimple stmt
= gsi_stmt (*gsi
);
1369 tree call
= gimple_call_fn (stmt
);
1371 gimple new_stmt
= NULL
;
1374 fndecl
= gimple_call_fndecl (stmt
);
1377 int fcode
= DECL_FUNCTION_CODE (fndecl
);
1378 int nargs
= gimple_call_num_args (stmt
);
1379 tree
*args
= (nargs
> 0
1380 ? gimple_call_arg_ptr (stmt
, 0)
1381 : &error_mark_node
);
1385 BUILTIN_VALL (UNOP
, reduc_splus_
, 10)
1386 new_stmt
= gimple_build_assign_with_ops (
1388 gimple_call_lhs (stmt
),
1392 BUILTIN_VDQIF (UNOP
, reduc_smax_
, 10)
1393 new_stmt
= gimple_build_assign_with_ops (
1395 gimple_call_lhs (stmt
),
1399 BUILTIN_VDQIF (UNOP
, reduc_smin_
, 10)
1400 new_stmt
= gimple_build_assign_with_ops (
1402 gimple_call_lhs (stmt
),
1415 gsi_replace (gsi
, new_stmt
, true);
1422 #undef AARCH64_CHECK_BUILTIN_MODE
1423 #undef AARCH64_FIND_FRINT_VARIANT
1425 #undef BUILTIN_SDQ_I
1426 #undef BUILTIN_SD_HSI
1437 #undef BUILTIN_VDQHS
1438 #undef BUILTIN_VDQIF
1441 #undef BUILTIN_VDQ_BHSI
1442 #undef BUILTIN_VDQ_I
1444 #undef BUILTIN_VD_BHSI
1445 #undef BUILTIN_VD_HSI
1446 #undef BUILTIN_VD_RE
1450 #undef BUILTIN_VQ_HSI
1452 #undef BUILTIN_VSDQ_HSI
1453 #undef BUILTIN_VSDQ_I
1454 #undef BUILTIN_VSDQ_I_BHSI
1455 #undef BUILTIN_VSDQ_I_DI
1456 #undef BUILTIN_VSD_HSI
1457 #undef BUILTIN_VSQN_HSDI
1458 #undef BUILTIN_VSTRUCT