]> git.ipfire.org Git - thirdparty/gcc.git/blame - gcc/config/aarch64/aarch64-builtins.c
Make std::mem_fn work with varargs functions.
[thirdparty/gcc.git] / gcc / config / aarch64 / aarch64-builtins.c
CommitLineData
43e9d192 1/* Builtins' description for AArch64 SIMD architecture.
23a5b65a 2 Copyright (C) 2011-2014 Free Software Foundation, Inc.
43e9d192
IB
3 Contributed by ARM Ltd.
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify it
8 under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
10 any later version.
11
12 GCC is distributed in the hope that it will be useful, but
13 WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
15 General Public License for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
20
21#include "config.h"
22#include "system.h"
23#include "coretypes.h"
24#include "tm.h"
25#include "rtl.h"
26#include "tree.h"
d8a2d370
DN
27#include "stor-layout.h"
28#include "stringpool.h"
29#include "calls.h"
43e9d192
IB
30#include "expr.h"
31#include "tm_p.h"
32#include "recog.h"
33#include "langhooks.h"
34#include "diagnostic-core.h"
b0710fe1 35#include "insn-codes.h"
43e9d192 36#include "optabs.h"
2fb9a547
AM
37#include "hash-table.h"
38#include "vec.h"
39#include "ggc.h"
60393bbc
AM
40#include "predict.h"
41#include "hashtab.h"
42#include "hash-set.h"
43#include "machmode.h"
44#include "hard-reg-set.h"
45#include "input.h"
46#include "function.h"
47#include "dominance.h"
48#include "cfg.h"
49#include "cfgrtl.h"
50#include "cfganal.h"
51#include "lcm.h"
52#include "cfgbuild.h"
53#include "cfgcleanup.h"
2fb9a547
AM
54#include "basic-block.h"
55#include "tree-ssa-alias.h"
56#include "internal-fn.h"
57#include "gimple-fold.h"
58#include "tree-eh.h"
59#include "gimple-expr.h"
60#include "is-a.h"
0ac198d3 61#include "gimple.h"
5be5c238 62#include "gimple-iterator.h"
43e9d192 63
bc5e395d
JG
64#define v8qi_UP V8QImode
65#define v4hi_UP V4HImode
66#define v2si_UP V2SImode
67#define v2sf_UP V2SFmode
68#define v1df_UP V1DFmode
69#define di_UP DImode
70#define df_UP DFmode
71#define v16qi_UP V16QImode
72#define v8hi_UP V8HImode
73#define v4si_UP V4SImode
74#define v4sf_UP V4SFmode
75#define v2di_UP V2DImode
76#define v2df_UP V2DFmode
77#define ti_UP TImode
78#define ei_UP EImode
79#define oi_UP OImode
80#define ci_UP CImode
81#define xi_UP XImode
82#define si_UP SImode
83#define sf_UP SFmode
84#define hi_UP HImode
85#define qi_UP QImode
43e9d192
IB
86#define UP(X) X##_UP
87
b5828b4b
JG
88#define SIMD_MAX_BUILTIN_ARGS 5
89
90enum aarch64_type_qualifiers
43e9d192 91{
b5828b4b
JG
92 /* T foo. */
93 qualifier_none = 0x0,
94 /* unsigned T foo. */
95 qualifier_unsigned = 0x1, /* 1 << 0 */
96 /* const T foo. */
97 qualifier_const = 0x2, /* 1 << 1 */
98 /* T *foo. */
99 qualifier_pointer = 0x4, /* 1 << 2 */
b5828b4b
JG
100 /* Used when expanding arguments if an operand could
101 be an immediate. */
102 qualifier_immediate = 0x8, /* 1 << 3 */
103 qualifier_maybe_immediate = 0x10, /* 1 << 4 */
104 /* void foo (...). */
105 qualifier_void = 0x20, /* 1 << 5 */
106 /* Some patterns may have internal operands, this qualifier is an
107 instruction to the initialisation code to skip this operand. */
108 qualifier_internal = 0x40, /* 1 << 6 */
109 /* Some builtins should use the T_*mode* encoded in a simd_builtin_datum
110 rather than using the type of the operand. */
111 qualifier_map_mode = 0x80, /* 1 << 7 */
112 /* qualifier_pointer | qualifier_map_mode */
113 qualifier_pointer_map_mode = 0x84,
e625e715 114 /* qualifier_const | qualifier_pointer | qualifier_map_mode */
6db1ec94
JG
115 qualifier_const_pointer_map_mode = 0x86,
116 /* Polynomial types. */
117 qualifier_poly = 0x100
b5828b4b 118};
43e9d192
IB
119
120typedef struct
121{
122 const char *name;
ef4bddc2 123 machine_mode mode;
342be7f7
JG
124 const enum insn_code code;
125 unsigned int fcode;
b5828b4b 126 enum aarch64_type_qualifiers *qualifiers;
43e9d192
IB
127} aarch64_simd_builtin_datum;
128
096c59be
AL
129/* The qualifier_internal allows generation of a unary builtin from
130 a pattern with a third pseudo-operand such as a match_scratch. */
b5828b4b
JG
131static enum aarch64_type_qualifiers
132aarch64_types_unop_qualifiers[SIMD_MAX_BUILTIN_ARGS]
096c59be 133 = { qualifier_none, qualifier_none, qualifier_internal };
b5828b4b 134#define TYPES_UNOP (aarch64_types_unop_qualifiers)
5a7a4e80
TB
135static enum aarch64_type_qualifiers
136aarch64_types_unopu_qualifiers[SIMD_MAX_BUILTIN_ARGS]
137 = { qualifier_unsigned, qualifier_unsigned };
138#define TYPES_UNOPU (aarch64_types_unopu_qualifiers)
b5828b4b 139#define TYPES_CREATE (aarch64_types_unop_qualifiers)
b5828b4b
JG
140static enum aarch64_type_qualifiers
141aarch64_types_binop_qualifiers[SIMD_MAX_BUILTIN_ARGS]
142 = { qualifier_none, qualifier_none, qualifier_maybe_immediate };
143#define TYPES_BINOP (aarch64_types_binop_qualifiers)
144static enum aarch64_type_qualifiers
ae0533da
AL
145aarch64_types_binopv_qualifiers[SIMD_MAX_BUILTIN_ARGS]
146 = { qualifier_void, qualifier_none, qualifier_none };
147#define TYPES_BINOPV (aarch64_types_binopv_qualifiers)
148static enum aarch64_type_qualifiers
5a7a4e80
TB
149aarch64_types_binopu_qualifiers[SIMD_MAX_BUILTIN_ARGS]
150 = { qualifier_unsigned, qualifier_unsigned, qualifier_unsigned };
151#define TYPES_BINOPU (aarch64_types_binopu_qualifiers)
7baa225d 152static enum aarch64_type_qualifiers
de10bcce
AL
153aarch64_types_binop_uus_qualifiers[SIMD_MAX_BUILTIN_ARGS]
154 = { qualifier_unsigned, qualifier_unsigned, qualifier_none };
155#define TYPES_BINOP_UUS (aarch64_types_binop_uus_qualifiers)
156static enum aarch64_type_qualifiers
918621d3
AL
157aarch64_types_binop_ssu_qualifiers[SIMD_MAX_BUILTIN_ARGS]
158 = { qualifier_none, qualifier_none, qualifier_unsigned };
159#define TYPES_BINOP_SSU (aarch64_types_binop_ssu_qualifiers)
160static enum aarch64_type_qualifiers
7baa225d
TB
161aarch64_types_binopp_qualifiers[SIMD_MAX_BUILTIN_ARGS]
162 = { qualifier_poly, qualifier_poly, qualifier_poly };
163#define TYPES_BINOPP (aarch64_types_binopp_qualifiers)
164
5a7a4e80 165static enum aarch64_type_qualifiers
b5828b4b
JG
166aarch64_types_ternop_qualifiers[SIMD_MAX_BUILTIN_ARGS]
167 = { qualifier_none, qualifier_none, qualifier_none, qualifier_none };
168#define TYPES_TERNOP (aarch64_types_ternop_qualifiers)
30442682
TB
169static enum aarch64_type_qualifiers
170aarch64_types_ternopu_qualifiers[SIMD_MAX_BUILTIN_ARGS]
171 = { qualifier_unsigned, qualifier_unsigned,
172 qualifier_unsigned, qualifier_unsigned };
173#define TYPES_TERNOPU (aarch64_types_ternopu_qualifiers)
174
b5828b4b 175static enum aarch64_type_qualifiers
d5a29419 176aarch64_types_ternop_lane_qualifiers[SIMD_MAX_BUILTIN_ARGS]
b5828b4b 177 = { qualifier_none, qualifier_none, qualifier_none,
d5a29419
KT
178 qualifier_none, qualifier_immediate };
179#define TYPES_TERNOP_LANE (aarch64_types_ternop_lane_qualifiers)
b5828b4b
JG
180
181static enum aarch64_type_qualifiers
182aarch64_types_getlane_qualifiers[SIMD_MAX_BUILTIN_ARGS]
183 = { qualifier_none, qualifier_none, qualifier_immediate };
184#define TYPES_GETLANE (aarch64_types_getlane_qualifiers)
185#define TYPES_SHIFTIMM (aarch64_types_getlane_qualifiers)
186static enum aarch64_type_qualifiers
de10bcce
AL
187aarch64_types_shift_to_unsigned_qualifiers[SIMD_MAX_BUILTIN_ARGS]
188 = { qualifier_unsigned, qualifier_none, qualifier_immediate };
189#define TYPES_SHIFTIMM_USS (aarch64_types_shift_to_unsigned_qualifiers)
190static enum aarch64_type_qualifiers
252c7556
AV
191aarch64_types_unsigned_shift_qualifiers[SIMD_MAX_BUILTIN_ARGS]
192 = { qualifier_unsigned, qualifier_unsigned, qualifier_immediate };
193#define TYPES_USHIFTIMM (aarch64_types_unsigned_shift_qualifiers)
de10bcce 194
252c7556 195static enum aarch64_type_qualifiers
b5828b4b
JG
196aarch64_types_setlane_qualifiers[SIMD_MAX_BUILTIN_ARGS]
197 = { qualifier_none, qualifier_none, qualifier_none, qualifier_immediate };
198#define TYPES_SETLANE (aarch64_types_setlane_qualifiers)
199#define TYPES_SHIFTINSERT (aarch64_types_setlane_qualifiers)
200#define TYPES_SHIFTACC (aarch64_types_setlane_qualifiers)
201
de10bcce
AL
202static enum aarch64_type_qualifiers
203aarch64_types_unsigned_shiftacc_qualifiers[SIMD_MAX_BUILTIN_ARGS]
204 = { qualifier_unsigned, qualifier_unsigned, qualifier_unsigned,
205 qualifier_immediate };
206#define TYPES_USHIFTACC (aarch64_types_unsigned_shiftacc_qualifiers)
207
208
b5828b4b
JG
209static enum aarch64_type_qualifiers
210aarch64_types_combine_qualifiers[SIMD_MAX_BUILTIN_ARGS]
211 = { qualifier_none, qualifier_none, qualifier_none };
212#define TYPES_COMBINE (aarch64_types_combine_qualifiers)
213
214static enum aarch64_type_qualifiers
215aarch64_types_load1_qualifiers[SIMD_MAX_BUILTIN_ARGS]
216 = { qualifier_none, qualifier_const_pointer_map_mode };
217#define TYPES_LOAD1 (aarch64_types_load1_qualifiers)
218#define TYPES_LOADSTRUCT (aarch64_types_load1_qualifiers)
3ec1be97
CB
219static enum aarch64_type_qualifiers
220aarch64_types_loadstruct_lane_qualifiers[SIMD_MAX_BUILTIN_ARGS]
221 = { qualifier_none, qualifier_const_pointer_map_mode,
222 qualifier_none, qualifier_none };
223#define TYPES_LOADSTRUCT_LANE (aarch64_types_loadstruct_lane_qualifiers)
b5828b4b 224
46e778c4
JG
225static enum aarch64_type_qualifiers
226aarch64_types_bsl_p_qualifiers[SIMD_MAX_BUILTIN_ARGS]
227 = { qualifier_poly, qualifier_unsigned,
228 qualifier_poly, qualifier_poly };
229#define TYPES_BSL_P (aarch64_types_bsl_p_qualifiers)
230static enum aarch64_type_qualifiers
231aarch64_types_bsl_s_qualifiers[SIMD_MAX_BUILTIN_ARGS]
232 = { qualifier_none, qualifier_unsigned,
233 qualifier_none, qualifier_none };
234#define TYPES_BSL_S (aarch64_types_bsl_s_qualifiers)
235static enum aarch64_type_qualifiers
236aarch64_types_bsl_u_qualifiers[SIMD_MAX_BUILTIN_ARGS]
237 = { qualifier_unsigned, qualifier_unsigned,
238 qualifier_unsigned, qualifier_unsigned };
239#define TYPES_BSL_U (aarch64_types_bsl_u_qualifiers)
240
b5828b4b
JG
241/* The first argument (return type) of a store should be void type,
242 which we represent with qualifier_void. Their first operand will be
243 a DImode pointer to the location to store to, so we must use
244 qualifier_map_mode | qualifier_pointer to build a pointer to the
245 element type of the vector. */
246static enum aarch64_type_qualifiers
247aarch64_types_store1_qualifiers[SIMD_MAX_BUILTIN_ARGS]
248 = { qualifier_void, qualifier_pointer_map_mode, qualifier_none };
249#define TYPES_STORE1 (aarch64_types_store1_qualifiers)
250#define TYPES_STORESTRUCT (aarch64_types_store1_qualifiers)
ba081b77
JG
251static enum aarch64_type_qualifiers
252aarch64_types_storestruct_lane_qualifiers[SIMD_MAX_BUILTIN_ARGS]
253 = { qualifier_void, qualifier_pointer_map_mode,
254 qualifier_none, qualifier_none };
255#define TYPES_STORESTRUCT_LANE (aarch64_types_storestruct_lane_qualifiers)
b5828b4b 256
0ddec79f
JG
257#define CF0(N, X) CODE_FOR_aarch64_##N##X
258#define CF1(N, X) CODE_FOR_##N##X##1
259#define CF2(N, X) CODE_FOR_##N##X##2
260#define CF3(N, X) CODE_FOR_##N##X##3
261#define CF4(N, X) CODE_FOR_##N##X##4
262#define CF10(N, X) CODE_FOR_##N##X
263
264#define VAR1(T, N, MAP, A) \
bc5e395d 265 {#N #A, UP (A), CF##MAP (N, A), 0, TYPES_##T},
0ddec79f
JG
266#define VAR2(T, N, MAP, A, B) \
267 VAR1 (T, N, MAP, A) \
268 VAR1 (T, N, MAP, B)
269#define VAR3(T, N, MAP, A, B, C) \
270 VAR2 (T, N, MAP, A, B) \
271 VAR1 (T, N, MAP, C)
272#define VAR4(T, N, MAP, A, B, C, D) \
273 VAR3 (T, N, MAP, A, B, C) \
274 VAR1 (T, N, MAP, D)
275#define VAR5(T, N, MAP, A, B, C, D, E) \
276 VAR4 (T, N, MAP, A, B, C, D) \
277 VAR1 (T, N, MAP, E)
278#define VAR6(T, N, MAP, A, B, C, D, E, F) \
279 VAR5 (T, N, MAP, A, B, C, D, E) \
280 VAR1 (T, N, MAP, F)
281#define VAR7(T, N, MAP, A, B, C, D, E, F, G) \
282 VAR6 (T, N, MAP, A, B, C, D, E, F) \
283 VAR1 (T, N, MAP, G)
284#define VAR8(T, N, MAP, A, B, C, D, E, F, G, H) \
285 VAR7 (T, N, MAP, A, B, C, D, E, F, G) \
286 VAR1 (T, N, MAP, H)
287#define VAR9(T, N, MAP, A, B, C, D, E, F, G, H, I) \
288 VAR8 (T, N, MAP, A, B, C, D, E, F, G, H) \
289 VAR1 (T, N, MAP, I)
290#define VAR10(T, N, MAP, A, B, C, D, E, F, G, H, I, J) \
291 VAR9 (T, N, MAP, A, B, C, D, E, F, G, H, I) \
292 VAR1 (T, N, MAP, J)
293#define VAR11(T, N, MAP, A, B, C, D, E, F, G, H, I, J, K) \
294 VAR10 (T, N, MAP, A, B, C, D, E, F, G, H, I, J) \
295 VAR1 (T, N, MAP, K)
296#define VAR12(T, N, MAP, A, B, C, D, E, F, G, H, I, J, K, L) \
297 VAR11 (T, N, MAP, A, B, C, D, E, F, G, H, I, J, K) \
298 VAR1 (T, N, MAP, L)
342be7f7 299
f421c516 300#include "aarch64-builtin-iterators.h"
43e9d192
IB
301
302static aarch64_simd_builtin_datum aarch64_simd_builtin_data[] = {
342be7f7
JG
303#include "aarch64-simd-builtins.def"
304};
305
5d357f26
KT
306/* There's only 8 CRC32 builtins. Probably not worth their own .def file. */
307#define AARCH64_CRC32_BUILTINS \
308 CRC32_BUILTIN (crc32b, QI) \
309 CRC32_BUILTIN (crc32h, HI) \
310 CRC32_BUILTIN (crc32w, SI) \
311 CRC32_BUILTIN (crc32x, DI) \
312 CRC32_BUILTIN (crc32cb, QI) \
313 CRC32_BUILTIN (crc32ch, HI) \
314 CRC32_BUILTIN (crc32cw, SI) \
315 CRC32_BUILTIN (crc32cx, DI)
316
317typedef struct
318{
319 const char *name;
ef4bddc2 320 machine_mode mode;
5d357f26
KT
321 const enum insn_code icode;
322 unsigned int fcode;
323} aarch64_crc_builtin_datum;
324
325#define CRC32_BUILTIN(N, M) \
326 AARCH64_BUILTIN_##N,
327
342be7f7 328#undef VAR1
0ddec79f 329#define VAR1(T, N, MAP, A) \
e993fea1 330 AARCH64_SIMD_BUILTIN_##T##_##N##A,
342be7f7
JG
331
332enum aarch64_builtins
333{
334 AARCH64_BUILTIN_MIN,
aa87aced
KV
335
336 AARCH64_BUILTIN_GET_FPCR,
337 AARCH64_BUILTIN_SET_FPCR,
338 AARCH64_BUILTIN_GET_FPSR,
339 AARCH64_BUILTIN_SET_FPSR,
340
342be7f7
JG
341 AARCH64_SIMD_BUILTIN_BASE,
342#include "aarch64-simd-builtins.def"
343 AARCH64_SIMD_BUILTIN_MAX = AARCH64_SIMD_BUILTIN_BASE
344 + ARRAY_SIZE (aarch64_simd_builtin_data),
5d357f26
KT
345 AARCH64_CRC32_BUILTIN_BASE,
346 AARCH64_CRC32_BUILTINS
347 AARCH64_CRC32_BUILTIN_MAX,
342be7f7 348 AARCH64_BUILTIN_MAX
43e9d192
IB
349};
350
5d357f26
KT
351#undef CRC32_BUILTIN
352#define CRC32_BUILTIN(N, M) \
353 {"__builtin_aarch64_"#N, M##mode, CODE_FOR_aarch64_##N, AARCH64_BUILTIN_##N},
354
355static aarch64_crc_builtin_datum aarch64_crc_builtin_data[] = {
356 AARCH64_CRC32_BUILTINS
357};
358
359#undef CRC32_BUILTIN
360
119103ca
JG
361static GTY(()) tree aarch64_builtin_decls[AARCH64_BUILTIN_MAX];
362
43e9d192
IB
363#define NUM_DREG_TYPES 6
364#define NUM_QREG_TYPES 6
365
f9d53c27
TB
366/* Internal scalar builtin types. These types are used to support
367 neon intrinsic builtins. They are _not_ user-visible types. Therefore
368 the mangling for these types are implementation defined. */
369const char *aarch64_scalar_builtin_types[] = {
370 "__builtin_aarch64_simd_qi",
371 "__builtin_aarch64_simd_hi",
372 "__builtin_aarch64_simd_si",
373 "__builtin_aarch64_simd_sf",
374 "__builtin_aarch64_simd_di",
375 "__builtin_aarch64_simd_df",
376 "__builtin_aarch64_simd_poly8",
377 "__builtin_aarch64_simd_poly16",
378 "__builtin_aarch64_simd_poly64",
379 "__builtin_aarch64_simd_poly128",
380 "__builtin_aarch64_simd_ti",
381 "__builtin_aarch64_simd_uqi",
382 "__builtin_aarch64_simd_uhi",
383 "__builtin_aarch64_simd_usi",
384 "__builtin_aarch64_simd_udi",
385 "__builtin_aarch64_simd_ei",
386 "__builtin_aarch64_simd_oi",
387 "__builtin_aarch64_simd_ci",
388 "__builtin_aarch64_simd_xi",
389 NULL
390};
b5828b4b 391
f9d53c27
TB
392#define ENTRY(E, M, Q, G) E,
393enum aarch64_simd_type
394{
395#include "aarch64-simd-builtin-types.def"
396 ARM_NEON_H_TYPES_LAST
397};
398#undef ENTRY
b5828b4b 399
f9d53c27 400struct aarch64_simd_type_info
b5828b4b 401{
f9d53c27
TB
402 enum aarch64_simd_type type;
403
404 /* Internal type name. */
405 const char *name;
406
407 /* Internal type name(mangled). The mangled names conform to the
408 AAPCS64 (see "Procedure Call Standard for the ARM 64-bit Architecture",
409 Appendix A). To qualify for emission with the mangled names defined in
410 that document, a vector type must not only be of the correct mode but also
411 be of the correct internal AdvSIMD vector type (e.g. __Int8x8_t); these
412 types are registered by aarch64_init_simd_builtin_types (). In other
413 words, vector types defined in other ways e.g. via vector_size attribute
414 will get default mangled names. */
415 const char *mangle;
416
417 /* Internal type. */
418 tree itype;
419
420 /* Element type. */
b5828b4b
JG
421 tree eltype;
422
f9d53c27
TB
423 /* Machine mode the internal type maps to. */
424 enum machine_mode mode;
b5828b4b 425
f9d53c27
TB
426 /* Qualifiers. */
427 enum aarch64_type_qualifiers q;
428};
429
430#define ENTRY(E, M, Q, G) \
431 {E, "__" #E, #G "__" #E, NULL_TREE, NULL_TREE, M##mode, qualifier_##Q},
432static struct aarch64_simd_type_info aarch64_simd_types [] = {
433#include "aarch64-simd-builtin-types.def"
434};
435#undef ENTRY
436
437static tree aarch64_simd_intOI_type_node = NULL_TREE;
438static tree aarch64_simd_intEI_type_node = NULL_TREE;
439static tree aarch64_simd_intCI_type_node = NULL_TREE;
440static tree aarch64_simd_intXI_type_node = NULL_TREE;
441
442static const char *
443aarch64_mangle_builtin_scalar_type (const_tree type)
444{
445 int i = 0;
446
447 while (aarch64_scalar_builtin_types[i] != NULL)
b5828b4b 448 {
f9d53c27
TB
449 const char *name = aarch64_scalar_builtin_types[i];
450
451 if (TREE_CODE (TYPE_NAME (type)) == TYPE_DECL
452 && DECL_NAME (TYPE_NAME (type))
453 && !strcmp (IDENTIFIER_POINTER (DECL_NAME (TYPE_NAME (type))), name))
454 return aarch64_scalar_builtin_types[i];
455 i++;
456 }
457 return NULL;
b5828b4b
JG
458}
459
f9d53c27
TB
460static const char *
461aarch64_mangle_builtin_vector_type (const_tree type)
b5828b4b 462{
f9d53c27
TB
463 int i;
464 int nelts = sizeof (aarch64_simd_types) / sizeof (aarch64_simd_types[0]);
465
466 for (i = 0; i < nelts; i++)
467 if (aarch64_simd_types[i].mode == TYPE_MODE (type)
468 && TYPE_NAME (type)
469 && TREE_CODE (TYPE_NAME (type)) == TYPE_DECL
470 && DECL_NAME (TYPE_NAME (type))
471 && !strcmp
472 (IDENTIFIER_POINTER (DECL_NAME (TYPE_NAME (type))),
473 aarch64_simd_types[i].name))
474 return aarch64_simd_types[i].mangle;
475
476 return NULL;
6db1ec94
JG
477}
478
f9d53c27
TB
479const char *
480aarch64_mangle_builtin_type (const_tree type)
6db1ec94 481{
f9d53c27
TB
482 const char *mangle;
483 /* Walk through all the AArch64 builtins types tables to filter out the
484 incoming type. */
485 if ((mangle = aarch64_mangle_builtin_vector_type (type))
486 || (mangle = aarch64_mangle_builtin_scalar_type (type)))
487 return mangle;
488
489 return NULL;
6db1ec94
JG
490}
491
f9d53c27
TB
492static tree
493aarch64_simd_builtin_std_type (enum machine_mode mode,
494 enum aarch64_type_qualifiers q)
6db1ec94 495{
f9d53c27
TB
496#define QUAL_TYPE(M) \
497 ((q == qualifier_none) ? int##M##_type_node : unsigned_int##M##_type_node);
498 switch (mode)
499 {
500 case QImode:
501 return QUAL_TYPE (QI);
502 case HImode:
503 return QUAL_TYPE (HI);
504 case SImode:
505 return QUAL_TYPE (SI);
506 case DImode:
507 return QUAL_TYPE (DI);
508 case TImode:
509 return QUAL_TYPE (TI);
510 case OImode:
511 return aarch64_simd_intOI_type_node;
512 case EImode:
513 return aarch64_simd_intEI_type_node;
514 case CImode:
515 return aarch64_simd_intCI_type_node;
516 case XImode:
517 return aarch64_simd_intXI_type_node;
518 case SFmode:
519 return float_type_node;
520 case DFmode:
521 return double_type_node;
522 default:
523 gcc_unreachable ();
524 }
525#undef QUAL_TYPE
6db1ec94
JG
526}
527
f9d53c27
TB
528static tree
529aarch64_lookup_simd_builtin_type (enum machine_mode mode,
530 enum aarch64_type_qualifiers q)
6db1ec94 531{
f9d53c27
TB
532 int i;
533 int nelts = sizeof (aarch64_simd_types) / sizeof (aarch64_simd_types[0]);
534
535 /* Non-poly scalar modes map to standard types not in the table. */
536 if (q != qualifier_poly && !VECTOR_MODE_P (mode))
537 return aarch64_simd_builtin_std_type (mode, q);
538
539 for (i = 0; i < nelts; i++)
540 if (aarch64_simd_types[i].mode == mode
541 && aarch64_simd_types[i].q == q)
542 return aarch64_simd_types[i].itype;
543
544 return NULL_TREE;
b5828b4b
JG
545}
546
f9d53c27
TB
547static tree
548aarch64_simd_builtin_type (enum machine_mode mode,
549 bool unsigned_p, bool poly_p)
550{
551 if (poly_p)
552 return aarch64_lookup_simd_builtin_type (mode, qualifier_poly);
553 else if (unsigned_p)
554 return aarch64_lookup_simd_builtin_type (mode, qualifier_unsigned);
555 else
556 return aarch64_lookup_simd_builtin_type (mode, qualifier_none);
557}
558
af55e82d 559static void
f9d53c27 560aarch64_init_simd_builtin_types (void)
43e9d192 561{
f9d53c27
TB
562 int i;
563 int nelts = sizeof (aarch64_simd_types) / sizeof (aarch64_simd_types[0]);
564 tree tdecl;
565
566 /* Init all the element types built by the front-end. */
567 aarch64_simd_types[Int8x8_t].eltype = intQI_type_node;
568 aarch64_simd_types[Int8x16_t].eltype = intQI_type_node;
569 aarch64_simd_types[Int16x4_t].eltype = intHI_type_node;
570 aarch64_simd_types[Int16x8_t].eltype = intHI_type_node;
571 aarch64_simd_types[Int32x2_t].eltype = intSI_type_node;
572 aarch64_simd_types[Int32x4_t].eltype = intSI_type_node;
573 aarch64_simd_types[Int64x1_t].eltype = intDI_type_node;
574 aarch64_simd_types[Int64x2_t].eltype = intDI_type_node;
575 aarch64_simd_types[Uint8x8_t].eltype = unsigned_intQI_type_node;
576 aarch64_simd_types[Uint8x16_t].eltype = unsigned_intQI_type_node;
577 aarch64_simd_types[Uint16x4_t].eltype = unsigned_intHI_type_node;
578 aarch64_simd_types[Uint16x8_t].eltype = unsigned_intHI_type_node;
579 aarch64_simd_types[Uint32x2_t].eltype = unsigned_intSI_type_node;
580 aarch64_simd_types[Uint32x4_t].eltype = unsigned_intSI_type_node;
581 aarch64_simd_types[Uint64x1_t].eltype = unsigned_intDI_type_node;
582 aarch64_simd_types[Uint64x2_t].eltype = unsigned_intDI_type_node;
583
584 /* Poly types are a world of their own. */
585 aarch64_simd_types[Poly8_t].eltype = aarch64_simd_types[Poly8_t].itype =
586 build_distinct_type_copy (unsigned_intQI_type_node);
587 aarch64_simd_types[Poly16_t].eltype = aarch64_simd_types[Poly16_t].itype =
588 build_distinct_type_copy (unsigned_intHI_type_node);
589 aarch64_simd_types[Poly64_t].eltype = aarch64_simd_types[Poly64_t].itype =
590 build_distinct_type_copy (unsigned_intDI_type_node);
591 aarch64_simd_types[Poly128_t].eltype = aarch64_simd_types[Poly128_t].itype =
592 build_distinct_type_copy (unsigned_intTI_type_node);
593 /* Init poly vector element types with scalar poly types. */
594 aarch64_simd_types[Poly8x8_t].eltype = aarch64_simd_types[Poly8_t].itype;
595 aarch64_simd_types[Poly8x16_t].eltype = aarch64_simd_types[Poly8_t].itype;
596 aarch64_simd_types[Poly16x4_t].eltype = aarch64_simd_types[Poly16_t].itype;
597 aarch64_simd_types[Poly16x8_t].eltype = aarch64_simd_types[Poly16_t].itype;
598 aarch64_simd_types[Poly64x1_t].eltype = aarch64_simd_types[Poly64_t].itype;
599 aarch64_simd_types[Poly64x2_t].eltype = aarch64_simd_types[Poly64_t].itype;
600
601 /* Continue with standard types. */
602 aarch64_simd_types[Float32x2_t].eltype = float_type_node;
603 aarch64_simd_types[Float32x4_t].eltype = float_type_node;
604 aarch64_simd_types[Float64x1_t].eltype = double_type_node;
605 aarch64_simd_types[Float64x2_t].eltype = double_type_node;
606
607 for (i = 0; i < nelts; i++)
608 {
609 tree eltype = aarch64_simd_types[i].eltype;
610 enum machine_mode mode = aarch64_simd_types[i].mode;
611
612 if (aarch64_simd_types[i].itype == NULL)
613 aarch64_simd_types[i].itype =
614 build_distinct_type_copy
615 (build_vector_type (eltype, GET_MODE_NUNITS (mode)));
616
617 tdecl = add_builtin_type (aarch64_simd_types[i].name,
618 aarch64_simd_types[i].itype);
619 TYPE_NAME (aarch64_simd_types[i].itype) = tdecl;
620 SET_TYPE_STRUCTURAL_EQUALITY (aarch64_simd_types[i].itype);
621 }
43e9d192 622
f9d53c27
TB
623#define AARCH64_BUILD_SIGNED_TYPE(mode) \
624 make_signed_type (GET_MODE_PRECISION (mode));
625 aarch64_simd_intOI_type_node = AARCH64_BUILD_SIGNED_TYPE (OImode);
626 aarch64_simd_intEI_type_node = AARCH64_BUILD_SIGNED_TYPE (EImode);
627 aarch64_simd_intCI_type_node = AARCH64_BUILD_SIGNED_TYPE (CImode);
628 aarch64_simd_intXI_type_node = AARCH64_BUILD_SIGNED_TYPE (XImode);
629#undef AARCH64_BUILD_SIGNED_TYPE
630
631 tdecl = add_builtin_type
632 ("__builtin_aarch64_simd_ei" , aarch64_simd_intEI_type_node);
633 TYPE_NAME (aarch64_simd_intEI_type_node) = tdecl;
634 tdecl = add_builtin_type
635 ("__builtin_aarch64_simd_oi" , aarch64_simd_intOI_type_node);
636 TYPE_NAME (aarch64_simd_intOI_type_node) = tdecl;
637 tdecl = add_builtin_type
638 ("__builtin_aarch64_simd_ci" , aarch64_simd_intCI_type_node);
639 TYPE_NAME (aarch64_simd_intCI_type_node) = tdecl;
640 tdecl = add_builtin_type
641 ("__builtin_aarch64_simd_xi" , aarch64_simd_intXI_type_node);
642 TYPE_NAME (aarch64_simd_intXI_type_node) = tdecl;
643}
644
645static void
646aarch64_init_simd_builtin_scalar_types (void)
647{
648 /* Define typedefs for all the standard scalar types. */
649 (*lang_hooks.types.register_builtin_type) (intQI_type_node,
43e9d192 650 "__builtin_aarch64_simd_qi");
f9d53c27 651 (*lang_hooks.types.register_builtin_type) (intHI_type_node,
43e9d192 652 "__builtin_aarch64_simd_hi");
f9d53c27 653 (*lang_hooks.types.register_builtin_type) (intSI_type_node,
43e9d192 654 "__builtin_aarch64_simd_si");
f9d53c27 655 (*lang_hooks.types.register_builtin_type) (float_type_node,
43e9d192 656 "__builtin_aarch64_simd_sf");
f9d53c27 657 (*lang_hooks.types.register_builtin_type) (intDI_type_node,
43e9d192 658 "__builtin_aarch64_simd_di");
f9d53c27 659 (*lang_hooks.types.register_builtin_type) (double_type_node,
43e9d192 660 "__builtin_aarch64_simd_df");
f9d53c27 661 (*lang_hooks.types.register_builtin_type) (unsigned_intQI_type_node,
43e9d192 662 "__builtin_aarch64_simd_poly8");
f9d53c27 663 (*lang_hooks.types.register_builtin_type) (unsigned_intHI_type_node,
43e9d192 664 "__builtin_aarch64_simd_poly16");
f9d53c27 665 (*lang_hooks.types.register_builtin_type) (unsigned_intDI_type_node,
7baa225d 666 "__builtin_aarch64_simd_poly64");
f9d53c27 667 (*lang_hooks.types.register_builtin_type) (unsigned_intTI_type_node,
7baa225d 668 "__builtin_aarch64_simd_poly128");
f9d53c27 669 (*lang_hooks.types.register_builtin_type) (intTI_type_node,
43e9d192 670 "__builtin_aarch64_simd_ti");
b5828b4b 671 /* Unsigned integer types for various mode sizes. */
f9d53c27 672 (*lang_hooks.types.register_builtin_type) (unsigned_intQI_type_node,
b5828b4b 673 "__builtin_aarch64_simd_uqi");
f9d53c27 674 (*lang_hooks.types.register_builtin_type) (unsigned_intHI_type_node,
b5828b4b 675 "__builtin_aarch64_simd_uhi");
f9d53c27 676 (*lang_hooks.types.register_builtin_type) (unsigned_intSI_type_node,
b5828b4b 677 "__builtin_aarch64_simd_usi");
f9d53c27 678 (*lang_hooks.types.register_builtin_type) (unsigned_intDI_type_node,
b5828b4b 679 "__builtin_aarch64_simd_udi");
f9d53c27
TB
680}
681
682static void
683aarch64_init_simd_builtins (void)
684{
685 unsigned int i, fcode = AARCH64_SIMD_BUILTIN_BASE + 1;
686
687 aarch64_init_simd_builtin_types ();
43e9d192 688
f9d53c27
TB
689 /* Strong-typing hasn't been implemented for all AdvSIMD builtin intrinsics.
690 Therefore we need to preserve the old __builtin scalar types. It can be
691 removed once all the intrinsics become strongly typed using the qualifier
692 system. */
693 aarch64_init_simd_builtin_scalar_types ();
694
342be7f7 695 for (i = 0; i < ARRAY_SIZE (aarch64_simd_builtin_data); i++, fcode++)
43e9d192 696 {
b5828b4b
JG
697 bool print_type_signature_p = false;
698 char type_signature[SIMD_MAX_BUILTIN_ARGS] = { 0 };
43e9d192 699 aarch64_simd_builtin_datum *d = &aarch64_simd_builtin_data[i];
342be7f7
JG
700 char namebuf[60];
701 tree ftype = NULL;
119103ca 702 tree fndecl = NULL;
342be7f7 703
342be7f7 704 d->fcode = fcode;
43e9d192 705
b5828b4b
JG
706 /* We must track two variables here. op_num is
707 the operand number as in the RTL pattern. This is
708 required to access the mode (e.g. V4SF mode) of the
709 argument, from which the base type can be derived.
710 arg_num is an index in to the qualifiers data, which
711 gives qualifiers to the type (e.g. const unsigned).
712 The reason these two variables may differ by one is the
713 void return type. While all return types take the 0th entry
714 in the qualifiers array, there is no operand for them in the
715 RTL pattern. */
716 int op_num = insn_data[d->code].n_operands - 1;
717 int arg_num = d->qualifiers[0] & qualifier_void
718 ? op_num + 1
719 : op_num;
720 tree return_type = void_type_node, args = void_list_node;
721 tree eltype;
722
723 /* Build a function type directly from the insn_data for this
724 builtin. The build_function_type () function takes care of
725 removing duplicates for us. */
726 for (; op_num >= 0; arg_num--, op_num--)
43e9d192 727 {
ef4bddc2 728 machine_mode op_mode = insn_data[d->code].operand[op_num].mode;
b5828b4b 729 enum aarch64_type_qualifiers qualifiers = d->qualifiers[arg_num];
43e9d192 730
b5828b4b
JG
731 if (qualifiers & qualifier_unsigned)
732 {
733 type_signature[arg_num] = 'u';
734 print_type_signature_p = true;
735 }
6db1ec94
JG
736 else if (qualifiers & qualifier_poly)
737 {
738 type_signature[arg_num] = 'p';
739 print_type_signature_p = true;
740 }
b5828b4b
JG
741 else
742 type_signature[arg_num] = 's';
743
744 /* Skip an internal operand for vget_{low, high}. */
745 if (qualifiers & qualifier_internal)
746 continue;
747
748 /* Some builtins have different user-facing types
749 for certain arguments, encoded in d->mode. */
750 if (qualifiers & qualifier_map_mode)
bc5e395d 751 op_mode = d->mode;
b5828b4b
JG
752
753 /* For pointers, we want a pointer to the basic type
754 of the vector. */
755 if (qualifiers & qualifier_pointer && VECTOR_MODE_P (op_mode))
756 op_mode = GET_MODE_INNER (op_mode);
757
f9d53c27
TB
758 eltype = aarch64_simd_builtin_type
759 (op_mode,
760 (qualifiers & qualifier_unsigned) != 0,
761 (qualifiers & qualifier_poly) != 0);
762 gcc_assert (eltype != NULL);
b5828b4b
JG
763
764 /* Add qualifiers. */
765 if (qualifiers & qualifier_const)
766 eltype = build_qualified_type (eltype, TYPE_QUAL_CONST);
767
768 if (qualifiers & qualifier_pointer)
769 eltype = build_pointer_type (eltype);
770
771 /* If we have reached arg_num == 0, we are at a non-void
772 return type. Otherwise, we are still processing
773 arguments. */
774 if (arg_num == 0)
775 return_type = eltype;
776 else
777 args = tree_cons (NULL_TREE, eltype, args);
778 }
342be7f7 779
b5828b4b 780 ftype = build_function_type (return_type, args);
43e9d192 781
342be7f7 782 gcc_assert (ftype != NULL);
43e9d192 783
b5828b4b 784 if (print_type_signature_p)
bc5e395d
JG
785 snprintf (namebuf, sizeof (namebuf), "__builtin_aarch64_%s_%s",
786 d->name, type_signature);
b5828b4b 787 else
bc5e395d
JG
788 snprintf (namebuf, sizeof (namebuf), "__builtin_aarch64_%s",
789 d->name);
43e9d192 790
119103ca
JG
791 fndecl = add_builtin_function (namebuf, ftype, fcode, BUILT_IN_MD,
792 NULL, NULL_TREE);
793 aarch64_builtin_decls[fcode] = fndecl;
43e9d192
IB
794 }
795}
796
5d357f26
KT
797static void
798aarch64_init_crc32_builtins ()
799{
f9d53c27 800 tree usi_type = aarch64_simd_builtin_std_type (SImode, qualifier_unsigned);
5d357f26
KT
801 unsigned int i = 0;
802
803 for (i = 0; i < ARRAY_SIZE (aarch64_crc_builtin_data); ++i)
804 {
805 aarch64_crc_builtin_datum* d = &aarch64_crc_builtin_data[i];
f9d53c27
TB
806 tree argtype = aarch64_simd_builtin_std_type (d->mode,
807 qualifier_unsigned);
5d357f26
KT
808 tree ftype = build_function_type_list (usi_type, usi_type, argtype, NULL_TREE);
809 tree fndecl = add_builtin_function (d->name, ftype, d->fcode,
810 BUILT_IN_MD, NULL, NULL_TREE);
811
812 aarch64_builtin_decls[d->fcode] = fndecl;
813 }
814}
815
342be7f7
JG
816void
817aarch64_init_builtins (void)
43e9d192 818{
aa87aced
KV
819 tree ftype_set_fpr
820 = build_function_type_list (void_type_node, unsigned_type_node, NULL);
821 tree ftype_get_fpr
822 = build_function_type_list (unsigned_type_node, NULL);
823
824 aarch64_builtin_decls[AARCH64_BUILTIN_GET_FPCR]
825 = add_builtin_function ("__builtin_aarch64_get_fpcr", ftype_get_fpr,
826 AARCH64_BUILTIN_GET_FPCR, BUILT_IN_MD, NULL, NULL_TREE);
827 aarch64_builtin_decls[AARCH64_BUILTIN_SET_FPCR]
828 = add_builtin_function ("__builtin_aarch64_set_fpcr", ftype_set_fpr,
829 AARCH64_BUILTIN_SET_FPCR, BUILT_IN_MD, NULL, NULL_TREE);
830 aarch64_builtin_decls[AARCH64_BUILTIN_GET_FPSR]
831 = add_builtin_function ("__builtin_aarch64_get_fpsr", ftype_get_fpr,
832 AARCH64_BUILTIN_GET_FPSR, BUILT_IN_MD, NULL, NULL_TREE);
833 aarch64_builtin_decls[AARCH64_BUILTIN_SET_FPSR]
834 = add_builtin_function ("__builtin_aarch64_set_fpsr", ftype_set_fpr,
835 AARCH64_BUILTIN_SET_FPSR, BUILT_IN_MD, NULL, NULL_TREE);
836
342be7f7
JG
837 if (TARGET_SIMD)
838 aarch64_init_simd_builtins ();
5d357f26
KT
839 if (TARGET_CRC32)
840 aarch64_init_crc32_builtins ();
43e9d192
IB
841}
842
119103ca
JG
843tree
844aarch64_builtin_decl (unsigned code, bool initialize_p ATTRIBUTE_UNUSED)
845{
846 if (code >= AARCH64_BUILTIN_MAX)
847 return error_mark_node;
848
849 return aarch64_builtin_decls[code];
850}
851
43e9d192
IB
852typedef enum
853{
854 SIMD_ARG_COPY_TO_REG,
855 SIMD_ARG_CONSTANT,
856 SIMD_ARG_STOP
857} builtin_simd_arg;
858
43e9d192
IB
859static rtx
860aarch64_simd_expand_args (rtx target, int icode, int have_retval,
8d3d350a 861 tree exp, builtin_simd_arg *args)
43e9d192 862{
43e9d192
IB
863 rtx pat;
864 tree arg[SIMD_MAX_BUILTIN_ARGS];
865 rtx op[SIMD_MAX_BUILTIN_ARGS];
ef4bddc2
RS
866 machine_mode tmode = insn_data[icode].operand[0].mode;
867 machine_mode mode[SIMD_MAX_BUILTIN_ARGS];
43e9d192
IB
868 int argc = 0;
869
870 if (have_retval
871 && (!target
872 || GET_MODE (target) != tmode
873 || !(*insn_data[icode].operand[0].predicate) (target, tmode)))
874 target = gen_reg_rtx (tmode);
875
43e9d192
IB
876 for (;;)
877 {
8d3d350a 878 builtin_simd_arg thisarg = args[argc];
43e9d192
IB
879
880 if (thisarg == SIMD_ARG_STOP)
881 break;
882 else
883 {
884 arg[argc] = CALL_EXPR_ARG (exp, argc);
885 op[argc] = expand_normal (arg[argc]);
886 mode[argc] = insn_data[icode].operand[argc + have_retval].mode;
887
888 switch (thisarg)
889 {
890 case SIMD_ARG_COPY_TO_REG:
2888c331
YZ
891 if (POINTER_TYPE_P (TREE_TYPE (arg[argc])))
892 op[argc] = convert_memory_address (Pmode, op[argc]);
43e9d192
IB
893 /*gcc_assert (GET_MODE (op[argc]) == mode[argc]); */
894 if (!(*insn_data[icode].operand[argc + have_retval].predicate)
895 (op[argc], mode[argc]))
896 op[argc] = copy_to_mode_reg (mode[argc], op[argc]);
897 break;
898
899 case SIMD_ARG_CONSTANT:
900 if (!(*insn_data[icode].operand[argc + have_retval].predicate)
901 (op[argc], mode[argc]))
d5a29419 902 {
43e9d192
IB
903 error_at (EXPR_LOCATION (exp), "incompatible type for argument %d, "
904 "expected %<const int%>", argc + 1);
d5a29419
KT
905 return const0_rtx;
906 }
43e9d192
IB
907 break;
908
909 case SIMD_ARG_STOP:
910 gcc_unreachable ();
911 }
912
913 argc++;
914 }
915 }
916
43e9d192
IB
917 if (have_retval)
918 switch (argc)
919 {
920 case 1:
921 pat = GEN_FCN (icode) (target, op[0]);
922 break;
923
924 case 2:
925 pat = GEN_FCN (icode) (target, op[0], op[1]);
926 break;
927
928 case 3:
929 pat = GEN_FCN (icode) (target, op[0], op[1], op[2]);
930 break;
931
932 case 4:
933 pat = GEN_FCN (icode) (target, op[0], op[1], op[2], op[3]);
934 break;
935
936 case 5:
937 pat = GEN_FCN (icode) (target, op[0], op[1], op[2], op[3], op[4]);
938 break;
939
940 default:
941 gcc_unreachable ();
942 }
943 else
944 switch (argc)
945 {
946 case 1:
947 pat = GEN_FCN (icode) (op[0]);
948 break;
949
950 case 2:
951 pat = GEN_FCN (icode) (op[0], op[1]);
952 break;
953
954 case 3:
955 pat = GEN_FCN (icode) (op[0], op[1], op[2]);
956 break;
957
958 case 4:
959 pat = GEN_FCN (icode) (op[0], op[1], op[2], op[3]);
960 break;
961
962 case 5:
963 pat = GEN_FCN (icode) (op[0], op[1], op[2], op[3], op[4]);
964 break;
965
966 default:
967 gcc_unreachable ();
968 }
969
970 if (!pat)
d5a29419 971 return NULL_RTX;
43e9d192
IB
972
973 emit_insn (pat);
974
975 return target;
976}
977
978/* Expand an AArch64 AdvSIMD builtin(intrinsic). */
979rtx
980aarch64_simd_expand_builtin (int fcode, tree exp, rtx target)
981{
342be7f7
JG
982 aarch64_simd_builtin_datum *d =
983 &aarch64_simd_builtin_data[fcode - (AARCH64_SIMD_BUILTIN_BASE + 1)];
342be7f7 984 enum insn_code icode = d->code;
b5828b4b
JG
985 builtin_simd_arg args[SIMD_MAX_BUILTIN_ARGS];
986 int num_args = insn_data[d->code].n_operands;
987 int is_void = 0;
988 int k;
43e9d192 989
b5828b4b 990 is_void = !!(d->qualifiers[0] & qualifier_void);
43e9d192 991
b5828b4b
JG
992 num_args += is_void;
993
994 for (k = 1; k < num_args; k++)
995 {
996 /* We have four arrays of data, each indexed in a different fashion.
997 qualifiers - element 0 always describes the function return type.
998 operands - element 0 is either the operand for return value (if
999 the function has a non-void return type) or the operand for the
1000 first argument.
1001 expr_args - element 0 always holds the first argument.
1002 args - element 0 is always used for the return type. */
1003 int qualifiers_k = k;
1004 int operands_k = k - is_void;
1005 int expr_args_k = k - 1;
1006
1007 if (d->qualifiers[qualifiers_k] & qualifier_immediate)
1008 args[k] = SIMD_ARG_CONSTANT;
1009 else if (d->qualifiers[qualifiers_k] & qualifier_maybe_immediate)
1010 {
1011 rtx arg
1012 = expand_normal (CALL_EXPR_ARG (exp,
1013 (expr_args_k)));
1014 /* Handle constants only if the predicate allows it. */
1015 bool op_const_int_p =
1016 (CONST_INT_P (arg)
1017 && (*insn_data[icode].operand[operands_k].predicate)
1018 (arg, insn_data[icode].operand[operands_k].mode));
1019 args[k] = op_const_int_p ? SIMD_ARG_CONSTANT : SIMD_ARG_COPY_TO_REG;
1020 }
1021 else
1022 args[k] = SIMD_ARG_COPY_TO_REG;
43e9d192 1023
43e9d192 1024 }
b5828b4b
JG
1025 args[k] = SIMD_ARG_STOP;
1026
1027 /* The interface to aarch64_simd_expand_args expects a 0 if
1028 the function is void, and a 1 if it is not. */
1029 return aarch64_simd_expand_args
8d3d350a 1030 (target, icode, !is_void, exp, &args[1]);
43e9d192 1031}
342be7f7 1032
5d357f26
KT
1033rtx
1034aarch64_crc32_expand_builtin (int fcode, tree exp, rtx target)
1035{
1036 rtx pat;
1037 aarch64_crc_builtin_datum *d
1038 = &aarch64_crc_builtin_data[fcode - (AARCH64_CRC32_BUILTIN_BASE + 1)];
1039 enum insn_code icode = d->icode;
1040 tree arg0 = CALL_EXPR_ARG (exp, 0);
1041 tree arg1 = CALL_EXPR_ARG (exp, 1);
1042 rtx op0 = expand_normal (arg0);
1043 rtx op1 = expand_normal (arg1);
ef4bddc2
RS
1044 machine_mode tmode = insn_data[icode].operand[0].mode;
1045 machine_mode mode0 = insn_data[icode].operand[1].mode;
1046 machine_mode mode1 = insn_data[icode].operand[2].mode;
5d357f26
KT
1047
1048 if (! target
1049 || GET_MODE (target) != tmode
1050 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
1051 target = gen_reg_rtx (tmode);
1052
1053 gcc_assert ((GET_MODE (op0) == mode0 || GET_MODE (op0) == VOIDmode)
1054 && (GET_MODE (op1) == mode1 || GET_MODE (op1) == VOIDmode));
1055
1056 if (! (*insn_data[icode].operand[1].predicate) (op0, mode0))
1057 op0 = copy_to_mode_reg (mode0, op0);
1058 if (! (*insn_data[icode].operand[2].predicate) (op1, mode1))
1059 op1 = copy_to_mode_reg (mode1, op1);
1060
1061 pat = GEN_FCN (icode) (target, op0, op1);
d5a29419
KT
1062 if (!pat)
1063 return NULL_RTX;
1064
5d357f26
KT
1065 emit_insn (pat);
1066 return target;
1067}
1068
342be7f7
JG
1069/* Expand an expression EXP that calls a built-in function,
1070 with result going to TARGET if that's convenient. */
1071rtx
1072aarch64_expand_builtin (tree exp,
1073 rtx target,
1074 rtx subtarget ATTRIBUTE_UNUSED,
ef4bddc2 1075 machine_mode mode ATTRIBUTE_UNUSED,
342be7f7
JG
1076 int ignore ATTRIBUTE_UNUSED)
1077{
1078 tree fndecl = TREE_OPERAND (CALL_EXPR_FN (exp), 0);
1079 int fcode = DECL_FUNCTION_CODE (fndecl);
aa87aced
KV
1080 int icode;
1081 rtx pat, op0;
1082 tree arg0;
1083
1084 switch (fcode)
1085 {
1086 case AARCH64_BUILTIN_GET_FPCR:
1087 case AARCH64_BUILTIN_SET_FPCR:
1088 case AARCH64_BUILTIN_GET_FPSR:
1089 case AARCH64_BUILTIN_SET_FPSR:
1090 if ((fcode == AARCH64_BUILTIN_GET_FPCR)
1091 || (fcode == AARCH64_BUILTIN_GET_FPSR))
1092 {
1093 icode = (fcode == AARCH64_BUILTIN_GET_FPSR) ?
1094 CODE_FOR_get_fpsr : CODE_FOR_get_fpcr;
1095 target = gen_reg_rtx (SImode);
1096 pat = GEN_FCN (icode) (target);
1097 }
1098 else
1099 {
1100 target = NULL_RTX;
1101 icode = (fcode == AARCH64_BUILTIN_SET_FPSR) ?
1102 CODE_FOR_set_fpsr : CODE_FOR_set_fpcr;
1103 arg0 = CALL_EXPR_ARG (exp, 0);
1104 op0 = expand_normal (arg0);
1105 pat = GEN_FCN (icode) (op0);
1106 }
1107 emit_insn (pat);
1108 return target;
1109 }
342be7f7 1110
5d357f26 1111 if (fcode >= AARCH64_SIMD_BUILTIN_BASE && fcode <= AARCH64_SIMD_BUILTIN_MAX)
342be7f7 1112 return aarch64_simd_expand_builtin (fcode, exp, target);
5d357f26
KT
1113 else if (fcode >= AARCH64_CRC32_BUILTIN_BASE && fcode <= AARCH64_CRC32_BUILTIN_MAX)
1114 return aarch64_crc32_expand_builtin (fcode, exp, target);
342be7f7 1115
d5a29419 1116 gcc_unreachable ();
342be7f7 1117}
42fc9a7f
JG
1118
1119tree
1120aarch64_builtin_vectorized_function (tree fndecl, tree type_out, tree type_in)
1121{
ef4bddc2 1122 machine_mode in_mode, out_mode;
42fc9a7f
JG
1123 int in_n, out_n;
1124
1125 if (TREE_CODE (type_out) != VECTOR_TYPE
1126 || TREE_CODE (type_in) != VECTOR_TYPE)
1127 return NULL_TREE;
1128
1129 out_mode = TYPE_MODE (TREE_TYPE (type_out));
1130 out_n = TYPE_VECTOR_SUBPARTS (type_out);
1131 in_mode = TYPE_MODE (TREE_TYPE (type_in));
1132 in_n = TYPE_VECTOR_SUBPARTS (type_in);
1133
1134#undef AARCH64_CHECK_BUILTIN_MODE
1135#define AARCH64_CHECK_BUILTIN_MODE(C, N) 1
1136#define AARCH64_FIND_FRINT_VARIANT(N) \
1137 (AARCH64_CHECK_BUILTIN_MODE (2, D) \
e993fea1 1138 ? aarch64_builtin_decls[AARCH64_SIMD_BUILTIN_UNOP_##N##v2df] \
42fc9a7f 1139 : (AARCH64_CHECK_BUILTIN_MODE (4, S) \
e993fea1 1140 ? aarch64_builtin_decls[AARCH64_SIMD_BUILTIN_UNOP_##N##v4sf] \
42fc9a7f 1141 : (AARCH64_CHECK_BUILTIN_MODE (2, S) \
e993fea1 1142 ? aarch64_builtin_decls[AARCH64_SIMD_BUILTIN_UNOP_##N##v2sf] \
42fc9a7f
JG
1143 : NULL_TREE)))
1144 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
1145 {
1146 enum built_in_function fn = DECL_FUNCTION_CODE (fndecl);
1147 switch (fn)
1148 {
1149#undef AARCH64_CHECK_BUILTIN_MODE
1150#define AARCH64_CHECK_BUILTIN_MODE(C, N) \
1151 (out_mode == N##Fmode && out_n == C \
1152 && in_mode == N##Fmode && in_n == C)
1153 case BUILT_IN_FLOOR:
1154 case BUILT_IN_FLOORF:
0659ce6f 1155 return AARCH64_FIND_FRINT_VARIANT (floor);
42fc9a7f
JG
1156 case BUILT_IN_CEIL:
1157 case BUILT_IN_CEILF:
0659ce6f 1158 return AARCH64_FIND_FRINT_VARIANT (ceil);
42fc9a7f
JG
1159 case BUILT_IN_TRUNC:
1160 case BUILT_IN_TRUNCF:
0659ce6f 1161 return AARCH64_FIND_FRINT_VARIANT (btrunc);
42fc9a7f
JG
1162 case BUILT_IN_ROUND:
1163 case BUILT_IN_ROUNDF:
0659ce6f 1164 return AARCH64_FIND_FRINT_VARIANT (round);
42fc9a7f
JG
1165 case BUILT_IN_NEARBYINT:
1166 case BUILT_IN_NEARBYINTF:
0659ce6f 1167 return AARCH64_FIND_FRINT_VARIANT (nearbyint);
4dcd1054
JG
1168 case BUILT_IN_SQRT:
1169 case BUILT_IN_SQRTF:
1170 return AARCH64_FIND_FRINT_VARIANT (sqrt);
42fc9a7f 1171#undef AARCH64_CHECK_BUILTIN_MODE
b5574232
VP
1172#define AARCH64_CHECK_BUILTIN_MODE(C, N) \
1173 (out_mode == SImode && out_n == C \
1174 && in_mode == N##Imode && in_n == C)
1175 case BUILT_IN_CLZ:
1176 {
1177 if (AARCH64_CHECK_BUILTIN_MODE (4, S))
e993fea1 1178 return aarch64_builtin_decls[AARCH64_SIMD_BUILTIN_UNOP_clzv4si];
b5574232
VP
1179 return NULL_TREE;
1180 }
1181#undef AARCH64_CHECK_BUILTIN_MODE
42fc9a7f
JG
1182#define AARCH64_CHECK_BUILTIN_MODE(C, N) \
1183 (out_mode == N##Imode && out_n == C \
1184 && in_mode == N##Fmode && in_n == C)
1185 case BUILT_IN_LFLOOR:
bf0f324e
YZ
1186 case BUILT_IN_LFLOORF:
1187 case BUILT_IN_LLFLOOR:
0386b123 1188 case BUILT_IN_IFLOORF:
ce966824 1189 {
e993fea1 1190 enum aarch64_builtins builtin;
ce966824 1191 if (AARCH64_CHECK_BUILTIN_MODE (2, D))
e993fea1 1192 builtin = AARCH64_SIMD_BUILTIN_UNOP_lfloorv2dfv2di;
ce966824 1193 else if (AARCH64_CHECK_BUILTIN_MODE (4, S))
e993fea1 1194 builtin = AARCH64_SIMD_BUILTIN_UNOP_lfloorv4sfv4si;
ce966824 1195 else if (AARCH64_CHECK_BUILTIN_MODE (2, S))
e993fea1
JG
1196 builtin = AARCH64_SIMD_BUILTIN_UNOP_lfloorv2sfv2si;
1197 else
1198 return NULL_TREE;
1199
1200 return aarch64_builtin_decls[builtin];
ce966824 1201 }
42fc9a7f 1202 case BUILT_IN_LCEIL:
bf0f324e
YZ
1203 case BUILT_IN_LCEILF:
1204 case BUILT_IN_LLCEIL:
0386b123 1205 case BUILT_IN_ICEILF:
ce966824 1206 {
e993fea1 1207 enum aarch64_builtins builtin;
ce966824 1208 if (AARCH64_CHECK_BUILTIN_MODE (2, D))
e993fea1 1209 builtin = AARCH64_SIMD_BUILTIN_UNOP_lceilv2dfv2di;
ce966824 1210 else if (AARCH64_CHECK_BUILTIN_MODE (4, S))
e993fea1 1211 builtin = AARCH64_SIMD_BUILTIN_UNOP_lceilv4sfv4si;
ce966824 1212 else if (AARCH64_CHECK_BUILTIN_MODE (2, S))
e993fea1
JG
1213 builtin = AARCH64_SIMD_BUILTIN_UNOP_lceilv2sfv2si;
1214 else
1215 return NULL_TREE;
1216
1217 return aarch64_builtin_decls[builtin];
ce966824 1218 }
0386b123
JG
1219 case BUILT_IN_LROUND:
1220 case BUILT_IN_IROUNDF:
1221 {
e993fea1 1222 enum aarch64_builtins builtin;
0386b123 1223 if (AARCH64_CHECK_BUILTIN_MODE (2, D))
e993fea1 1224 builtin = AARCH64_SIMD_BUILTIN_UNOP_lroundv2dfv2di;
0386b123 1225 else if (AARCH64_CHECK_BUILTIN_MODE (4, S))
e993fea1 1226 builtin = AARCH64_SIMD_BUILTIN_UNOP_lroundv4sfv4si;
0386b123 1227 else if (AARCH64_CHECK_BUILTIN_MODE (2, S))
e993fea1
JG
1228 builtin = AARCH64_SIMD_BUILTIN_UNOP_lroundv2sfv2si;
1229 else
1230 return NULL_TREE;
1231
1232 return aarch64_builtin_decls[builtin];
0386b123 1233 }
c7f28cd5
KT
1234 case BUILT_IN_BSWAP16:
1235#undef AARCH64_CHECK_BUILTIN_MODE
1236#define AARCH64_CHECK_BUILTIN_MODE(C, N) \
1237 (out_mode == N##Imode && out_n == C \
1238 && in_mode == N##Imode && in_n == C)
1239 if (AARCH64_CHECK_BUILTIN_MODE (4, H))
1240 return aarch64_builtin_decls[AARCH64_SIMD_BUILTIN_UNOPU_bswapv4hi];
1241 else if (AARCH64_CHECK_BUILTIN_MODE (8, H))
1242 return aarch64_builtin_decls[AARCH64_SIMD_BUILTIN_UNOPU_bswapv8hi];
1243 else
1244 return NULL_TREE;
1245 case BUILT_IN_BSWAP32:
1246 if (AARCH64_CHECK_BUILTIN_MODE (2, S))
1247 return aarch64_builtin_decls[AARCH64_SIMD_BUILTIN_UNOPU_bswapv2si];
1248 else if (AARCH64_CHECK_BUILTIN_MODE (4, S))
1249 return aarch64_builtin_decls[AARCH64_SIMD_BUILTIN_UNOPU_bswapv4si];
1250 else
1251 return NULL_TREE;
1252 case BUILT_IN_BSWAP64:
1253 if (AARCH64_CHECK_BUILTIN_MODE (2, D))
1254 return aarch64_builtin_decls[AARCH64_SIMD_BUILTIN_UNOPU_bswapv2di];
1255 else
1256 return NULL_TREE;
42fc9a7f
JG
1257 default:
1258 return NULL_TREE;
1259 }
1260 }
1261
1262 return NULL_TREE;
1263}
0ac198d3
JG
1264
1265#undef VAR1
1266#define VAR1(T, N, MAP, A) \
e993fea1 1267 case AARCH64_SIMD_BUILTIN_##T##_##N##A:
0ac198d3 1268
9697e620
JG
1269tree
1270aarch64_fold_builtin (tree fndecl, int n_args ATTRIBUTE_UNUSED, tree *args,
1271 bool ignore ATTRIBUTE_UNUSED)
1272{
1273 int fcode = DECL_FUNCTION_CODE (fndecl);
1274 tree type = TREE_TYPE (TREE_TYPE (fndecl));
1275
1276 switch (fcode)
1277 {
d05d0709 1278 BUILTIN_VALLDI (UNOP, abs, 2)
9697e620
JG
1279 return fold_build1 (ABS_EXPR, type, args[0]);
1280 break;
1709ff9b
JG
1281 VAR1 (UNOP, floatv2si, 2, v2sf)
1282 VAR1 (UNOP, floatv4si, 2, v4sf)
1283 VAR1 (UNOP, floatv2di, 2, v2df)
1284 return fold_build1 (FLOAT_EXPR, type, args[0]);
9697e620
JG
1285 default:
1286 break;
1287 }
1288
1289 return NULL_TREE;
1290}
1291
0ac198d3
JG
1292bool
1293aarch64_gimple_fold_builtin (gimple_stmt_iterator *gsi)
1294{
1295 bool changed = false;
1296 gimple stmt = gsi_stmt (*gsi);
1297 tree call = gimple_call_fn (stmt);
1298 tree fndecl;
1299 gimple new_stmt = NULL;
22756ccf 1300
0ac198d3
JG
1301 if (call)
1302 {
1303 fndecl = gimple_call_fndecl (stmt);
1304 if (fndecl)
1305 {
1306 int fcode = DECL_FUNCTION_CODE (fndecl);
1307 int nargs = gimple_call_num_args (stmt);
1308 tree *args = (nargs > 0
1309 ? gimple_call_arg_ptr (stmt, 0)
1310 : &error_mark_node);
1311
fc72cba7
AL
1312 /* We use gimple's REDUC_(PLUS|MIN|MAX)_EXPRs for float, signed int
1313 and unsigned int; it will distinguish according to the types of
1314 the arguments to the __builtin. */
0ac198d3
JG
1315 switch (fcode)
1316 {
fc72cba7
AL
1317 BUILTIN_VALL (UNOP, reduc_plus_scal_, 10)
1318 new_stmt = gimple_build_assign_with_ops (
0ac198d3
JG
1319 REDUC_PLUS_EXPR,
1320 gimple_call_lhs (stmt),
1321 args[0],
1322 NULL_TREE);
1323 break;
fc72cba7
AL
1324 BUILTIN_VDQIF (UNOP, reduc_smax_scal_, 10)
1325 BUILTIN_VDQ_BHSI (UNOPU, reduc_umax_scal_, 10)
1598945b
JG
1326 new_stmt = gimple_build_assign_with_ops (
1327 REDUC_MAX_EXPR,
1328 gimple_call_lhs (stmt),
1329 args[0],
1330 NULL_TREE);
1331 break;
fc72cba7
AL
1332 BUILTIN_VDQIF (UNOP, reduc_smin_scal_, 10)
1333 BUILTIN_VDQ_BHSI (UNOPU, reduc_umin_scal_, 10)
1598945b
JG
1334 new_stmt = gimple_build_assign_with_ops (
1335 REDUC_MIN_EXPR,
1336 gimple_call_lhs (stmt),
1337 args[0],
1338 NULL_TREE);
1339 break;
1340
0ac198d3
JG
1341 default:
1342 break;
1343 }
1344 }
1345 }
1346
1347 if (new_stmt)
1348 {
1349 gsi_replace (gsi, new_stmt, true);
1350 changed = true;
1351 }
1352
1353 return changed;
1354}
1355
aa87aced
KV
1356void
1357aarch64_atomic_assign_expand_fenv (tree *hold, tree *clear, tree *update)
1358{
1359 const unsigned AARCH64_FE_INVALID = 1;
1360 const unsigned AARCH64_FE_DIVBYZERO = 2;
1361 const unsigned AARCH64_FE_OVERFLOW = 4;
1362 const unsigned AARCH64_FE_UNDERFLOW = 8;
1363 const unsigned AARCH64_FE_INEXACT = 16;
1364 const unsigned HOST_WIDE_INT AARCH64_FE_ALL_EXCEPT = (AARCH64_FE_INVALID
1365 | AARCH64_FE_DIVBYZERO
1366 | AARCH64_FE_OVERFLOW
1367 | AARCH64_FE_UNDERFLOW
1368 | AARCH64_FE_INEXACT);
1369 const unsigned HOST_WIDE_INT AARCH64_FE_EXCEPT_SHIFT = 8;
1370 tree fenv_cr, fenv_sr, get_fpcr, set_fpcr, mask_cr, mask_sr;
1371 tree ld_fenv_cr, ld_fenv_sr, masked_fenv_cr, masked_fenv_sr, hold_fnclex_cr;
1372 tree hold_fnclex_sr, new_fenv_var, reload_fenv, restore_fnenv, get_fpsr, set_fpsr;
1373 tree update_call, atomic_feraiseexcept, hold_fnclex, masked_fenv, ld_fenv;
1374
1375 /* Generate the equivalence of :
1376 unsigned int fenv_cr;
1377 fenv_cr = __builtin_aarch64_get_fpcr ();
1378
1379 unsigned int fenv_sr;
1380 fenv_sr = __builtin_aarch64_get_fpsr ();
1381
1382 Now set all exceptions to non-stop
1383 unsigned int mask_cr
1384 = ~(AARCH64_FE_ALL_EXCEPT << AARCH64_FE_EXCEPT_SHIFT);
1385 unsigned int masked_cr;
1386 masked_cr = fenv_cr & mask_cr;
1387
1388 And clear all exception flags
1389 unsigned int maske_sr = ~AARCH64_FE_ALL_EXCEPT;
1390 unsigned int masked_cr;
1391 masked_sr = fenv_sr & mask_sr;
1392
1393 __builtin_aarch64_set_cr (masked_cr);
1394 __builtin_aarch64_set_sr (masked_sr); */
1395
1396 fenv_cr = create_tmp_var (unsigned_type_node, NULL);
1397 fenv_sr = create_tmp_var (unsigned_type_node, NULL);
1398
1399 get_fpcr = aarch64_builtin_decls[AARCH64_BUILTIN_GET_FPCR];
1400 set_fpcr = aarch64_builtin_decls[AARCH64_BUILTIN_SET_FPCR];
1401 get_fpsr = aarch64_builtin_decls[AARCH64_BUILTIN_GET_FPSR];
1402 set_fpsr = aarch64_builtin_decls[AARCH64_BUILTIN_SET_FPSR];
1403
1404 mask_cr = build_int_cst (unsigned_type_node,
1405 ~(AARCH64_FE_ALL_EXCEPT << AARCH64_FE_EXCEPT_SHIFT));
1406 mask_sr = build_int_cst (unsigned_type_node,
1407 ~(AARCH64_FE_ALL_EXCEPT));
1408
1409 ld_fenv_cr = build2 (MODIFY_EXPR, unsigned_type_node,
1410 fenv_cr, build_call_expr (get_fpcr, 0));
1411 ld_fenv_sr = build2 (MODIFY_EXPR, unsigned_type_node,
1412 fenv_sr, build_call_expr (get_fpsr, 0));
1413
1414 masked_fenv_cr = build2 (BIT_AND_EXPR, unsigned_type_node, fenv_cr, mask_cr);
1415 masked_fenv_sr = build2 (BIT_AND_EXPR, unsigned_type_node, fenv_sr, mask_sr);
1416
1417 hold_fnclex_cr = build_call_expr (set_fpcr, 1, masked_fenv_cr);
1418 hold_fnclex_sr = build_call_expr (set_fpsr, 1, masked_fenv_sr);
1419
1420 hold_fnclex = build2 (COMPOUND_EXPR, void_type_node, hold_fnclex_cr,
1421 hold_fnclex_sr);
1422 masked_fenv = build2 (COMPOUND_EXPR, void_type_node, masked_fenv_cr,
1423 masked_fenv_sr);
1424 ld_fenv = build2 (COMPOUND_EXPR, void_type_node, ld_fenv_cr, ld_fenv_sr);
1425
1426 *hold = build2 (COMPOUND_EXPR, void_type_node,
1427 build2 (COMPOUND_EXPR, void_type_node, masked_fenv, ld_fenv),
1428 hold_fnclex);
1429
1430 /* Store the value of masked_fenv to clear the exceptions:
1431 __builtin_aarch64_set_fpsr (masked_fenv_sr); */
1432
1433 *clear = build_call_expr (set_fpsr, 1, masked_fenv_sr);
1434
1435 /* Generate the equivalent of :
1436 unsigned int new_fenv_var;
1437 new_fenv_var = __builtin_aarch64_get_fpsr ();
1438
1439 __builtin_aarch64_set_fpsr (fenv_sr);
1440
1441 __atomic_feraiseexcept (new_fenv_var); */
1442
1443 new_fenv_var = create_tmp_var (unsigned_type_node, NULL);
1444 reload_fenv = build2 (MODIFY_EXPR, unsigned_type_node,
1445 new_fenv_var, build_call_expr (get_fpsr, 0));
1446 restore_fnenv = build_call_expr (set_fpsr, 1, fenv_sr);
1447 atomic_feraiseexcept = builtin_decl_implicit (BUILT_IN_ATOMIC_FERAISEEXCEPT);
1448 update_call = build_call_expr (atomic_feraiseexcept, 1,
1449 fold_convert (integer_type_node, new_fenv_var));
1450 *update = build2 (COMPOUND_EXPR, void_type_node,
1451 build2 (COMPOUND_EXPR, void_type_node,
1452 reload_fenv, restore_fnenv), update_call);
1453}
1454
1455
42fc9a7f
JG
1456#undef AARCH64_CHECK_BUILTIN_MODE
1457#undef AARCH64_FIND_FRINT_VARIANT
0ddec79f
JG
1458#undef CF0
1459#undef CF1
1460#undef CF2
1461#undef CF3
1462#undef CF4
1463#undef CF10
1464#undef VAR1
1465#undef VAR2
1466#undef VAR3
1467#undef VAR4
1468#undef VAR5
1469#undef VAR6
1470#undef VAR7
1471#undef VAR8
1472#undef VAR9
1473#undef VAR10
1474#undef VAR11
1475