1 /* Description of builtins used by the ARM backend.
2 Copyright (C) 2014-2017 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it
7 under the terms of the GNU General Public License as published
8 by the Free Software Foundation; either version 3, or (at your
9 option) any later version.
11 GCC is distributed in the hope that it will be useful, but WITHOUT
12 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
13 or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public
14 License for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
22 #include "coretypes.h"
27 #include "gimple-expr.h"
33 #include "diagnostic-core.h"
34 #include "fold-const.h"
35 #include "stor-layout.h"
38 #include "langhooks.h"
39 #include "case-cfn-macros.h"
42 #define SIMD_MAX_BUILTIN_ARGS 7
44 enum arm_type_qualifiers
49 qualifier_unsigned
= 0x1, /* 1 << 0 */
51 qualifier_const
= 0x2, /* 1 << 1 */
53 qualifier_pointer
= 0x4, /* 1 << 2 */
54 /* Used when expanding arguments if an operand could
56 qualifier_immediate
= 0x8, /* 1 << 3 */
57 qualifier_unsigned_immediate
= 0x9,
58 qualifier_maybe_immediate
= 0x10, /* 1 << 4 */
60 qualifier_void
= 0x20, /* 1 << 5 */
61 /* Some patterns may have internal operands, this qualifier is an
62 instruction to the initialisation code to skip this operand. */
63 qualifier_internal
= 0x40, /* 1 << 6 */
64 /* Some builtins should use the T_*mode* encoded in a simd_builtin_datum
65 rather than using the type of the operand. */
66 qualifier_map_mode
= 0x80, /* 1 << 7 */
67 /* qualifier_pointer | qualifier_map_mode */
68 qualifier_pointer_map_mode
= 0x84,
69 /* qualifier_const_pointer | qualifier_map_mode */
70 qualifier_const_pointer_map_mode
= 0x86,
71 /* Polynomial types. */
72 qualifier_poly
= 0x100,
73 /* Lane indices - must be within range of previous argument = a vector. */
74 qualifier_lane_index
= 0x200,
75 /* Lane indices for single lane structure loads and stores. */
76 qualifier_struct_load_store_lane_index
= 0x400
79 /* The qualifier_internal allows generation of a unary builtin from
80 a pattern with a third pseudo-operand such as a match_scratch.
82 static enum arm_type_qualifiers
83 arm_unop_qualifiers
[SIMD_MAX_BUILTIN_ARGS
]
84 = { qualifier_none
, qualifier_none
, qualifier_internal
};
85 #define UNOP_QUALIFIERS (arm_unop_qualifiers)
87 /* unsigned T (unsigned T). */
88 static enum arm_type_qualifiers
89 arm_bswap_qualifiers
[SIMD_MAX_BUILTIN_ARGS
]
90 = { qualifier_unsigned
, qualifier_unsigned
};
91 #define BSWAP_QUALIFIERS (arm_bswap_qualifiers)
93 /* T (T, T [maybe_immediate]). */
94 static enum arm_type_qualifiers
95 arm_binop_qualifiers
[SIMD_MAX_BUILTIN_ARGS
]
96 = { qualifier_none
, qualifier_none
, qualifier_maybe_immediate
};
97 #define BINOP_QUALIFIERS (arm_binop_qualifiers)
100 static enum arm_type_qualifiers
101 arm_ternop_qualifiers
[SIMD_MAX_BUILTIN_ARGS
]
102 = { qualifier_none
, qualifier_none
, qualifier_none
, qualifier_none
};
103 #define TERNOP_QUALIFIERS (arm_ternop_qualifiers)
105 /* T (T, immediate). */
106 static enum arm_type_qualifiers
107 arm_binop_imm_qualifiers
[SIMD_MAX_BUILTIN_ARGS
]
108 = { qualifier_none
, qualifier_none
, qualifier_immediate
};
109 #define BINOP_IMM_QUALIFIERS (arm_binop_imm_qualifiers)
111 /* T (T, lane index). */
112 static enum arm_type_qualifiers
113 arm_getlane_qualifiers
[SIMD_MAX_BUILTIN_ARGS
]
114 = { qualifier_none
, qualifier_none
, qualifier_lane_index
};
115 #define GETLANE_QUALIFIERS (arm_getlane_qualifiers)
117 /* T (T, T, T, immediate). */
118 static enum arm_type_qualifiers
119 arm_mac_n_qualifiers
[SIMD_MAX_BUILTIN_ARGS
]
120 = { qualifier_none
, qualifier_none
, qualifier_none
,
121 qualifier_none
, qualifier_immediate
};
122 #define MAC_N_QUALIFIERS (arm_mac_n_qualifiers)
124 /* T (T, T, T, lane index). */
125 static enum arm_type_qualifiers
126 arm_mac_lane_qualifiers
[SIMD_MAX_BUILTIN_ARGS
]
127 = { qualifier_none
, qualifier_none
, qualifier_none
,
128 qualifier_none
, qualifier_lane_index
};
129 #define MAC_LANE_QUALIFIERS (arm_mac_lane_qualifiers)
131 /* T (T, T, immediate). */
132 static enum arm_type_qualifiers
133 arm_ternop_imm_qualifiers
[SIMD_MAX_BUILTIN_ARGS
]
134 = { qualifier_none
, qualifier_none
, qualifier_none
, qualifier_immediate
};
135 #define TERNOP_IMM_QUALIFIERS (arm_ternop_imm_qualifiers)
137 /* T (T, T, lane index). */
138 static enum arm_type_qualifiers
139 arm_setlane_qualifiers
[SIMD_MAX_BUILTIN_ARGS
]
140 = { qualifier_none
, qualifier_none
, qualifier_none
, qualifier_lane_index
};
141 #define SETLANE_QUALIFIERS (arm_setlane_qualifiers)
144 static enum arm_type_qualifiers
145 arm_combine_qualifiers
[SIMD_MAX_BUILTIN_ARGS
]
146 = { qualifier_none
, qualifier_none
, qualifier_none
};
147 #define COMBINE_QUALIFIERS (arm_combine_qualifiers)
149 /* T ([T element type] *). */
150 static enum arm_type_qualifiers
151 arm_load1_qualifiers
[SIMD_MAX_BUILTIN_ARGS
]
152 = { qualifier_none
, qualifier_const_pointer_map_mode
};
153 #define LOAD1_QUALIFIERS (arm_load1_qualifiers)
155 /* T ([T element type] *, T, immediate). */
156 static enum arm_type_qualifiers
157 arm_load1_lane_qualifiers
[SIMD_MAX_BUILTIN_ARGS
]
158 = { qualifier_none
, qualifier_const_pointer_map_mode
,
159 qualifier_none
, qualifier_struct_load_store_lane_index
};
160 #define LOAD1LANE_QUALIFIERS (arm_load1_lane_qualifiers)
162 /* unsigned T (unsigned T, unsigned T, unsigned T). */
163 static enum arm_type_qualifiers
164 arm_unsigned_binop_qualifiers
[SIMD_MAX_BUILTIN_ARGS
]
165 = { qualifier_unsigned
, qualifier_unsigned
, qualifier_unsigned
,
166 qualifier_unsigned
};
167 #define UBINOP_QUALIFIERS (arm_unsigned_binop_qualifiers)
169 /* void (unsigned immediate, unsigned immediate, unsigned immediate,
170 unsigned immediate, unsigned immediate, unsigned immediate). */
171 static enum arm_type_qualifiers
172 arm_cdp_qualifiers
[SIMD_MAX_BUILTIN_ARGS
]
173 = { qualifier_void
, qualifier_unsigned_immediate
,
174 qualifier_unsigned_immediate
,
175 qualifier_unsigned_immediate
,
176 qualifier_unsigned_immediate
,
177 qualifier_unsigned_immediate
,
178 qualifier_unsigned_immediate
};
179 #define CDP_QUALIFIERS \
181 /* The first argument (return type) of a store should be void type,
182 which we represent with qualifier_void. Their first operand will be
183 a DImode pointer to the location to store to, so we must use
184 qualifier_map_mode | qualifier_pointer to build a pointer to the
185 element type of the vector.
187 void ([T element type] *, T). */
188 static enum arm_type_qualifiers
189 arm_store1_qualifiers
[SIMD_MAX_BUILTIN_ARGS
]
190 = { qualifier_void
, qualifier_pointer_map_mode
, qualifier_none
};
191 #define STORE1_QUALIFIERS (arm_store1_qualifiers)
193 /* void ([T element type] *, T, immediate). */
194 static enum arm_type_qualifiers
195 arm_storestruct_lane_qualifiers
[SIMD_MAX_BUILTIN_ARGS
]
196 = { qualifier_void
, qualifier_pointer_map_mode
,
197 qualifier_none
, qualifier_struct_load_store_lane_index
};
198 #define STORE1LANE_QUALIFIERS (arm_storestruct_lane_qualifiers)
200 #define v8qi_UP V8QImode
201 #define v4hi_UP V4HImode
202 #define v4hf_UP V4HFmode
203 #define v2si_UP V2SImode
204 #define v2sf_UP V2SFmode
206 #define v16qi_UP V16QImode
207 #define v8hi_UP V8HImode
208 #define v8hf_UP V8HFmode
209 #define v4si_UP V4SImode
210 #define v4sf_UP V4SFmode
211 #define v2di_UP V2DImode
217 #define void_UP VOIDmode
224 const enum insn_code code
;
226 enum arm_type_qualifiers
*qualifiers
;
229 #define CF(N,X) CODE_FOR_neon_##N##X
231 #define VAR1(T, N, A) \
232 {#N #A, UP (A), CF (N, A), 0, T##_QUALIFIERS},
233 #define VAR2(T, N, A, B) \
236 #define VAR3(T, N, A, B, C) \
239 #define VAR4(T, N, A, B, C, D) \
240 VAR3 (T, N, A, B, C) \
242 #define VAR5(T, N, A, B, C, D, E) \
243 VAR4 (T, N, A, B, C, D) \
245 #define VAR6(T, N, A, B, C, D, E, F) \
246 VAR5 (T, N, A, B, C, D, E) \
248 #define VAR7(T, N, A, B, C, D, E, F, G) \
249 VAR6 (T, N, A, B, C, D, E, F) \
251 #define VAR8(T, N, A, B, C, D, E, F, G, H) \
252 VAR7 (T, N, A, B, C, D, E, F, G) \
254 #define VAR9(T, N, A, B, C, D, E, F, G, H, I) \
255 VAR8 (T, N, A, B, C, D, E, F, G, H) \
257 #define VAR10(T, N, A, B, C, D, E, F, G, H, I, J) \
258 VAR9 (T, N, A, B, C, D, E, F, G, H, I) \
260 #define VAR11(T, N, A, B, C, D, E, F, G, H, I, J, K) \
261 VAR10 (T, N, A, B, C, D, E, F, G, H, I, J) \
263 #define VAR12(T, N, A, B, C, D, E, F, G, H, I, J, K, L) \
264 VAR11 (T, N, A, B, C, D, E, F, G, H, I, J, K) \
267 /* The builtin data can be found in arm_neon_builtins.def, arm_vfp_builtins.def
268 and arm_acle_builtins.def. The entries in arm_neon_builtins.def require
269 TARGET_NEON to be true. The feature tests are checked when the builtins are
272 The mode entries in the following table correspond to the "key" type of the
273 instruction variant, i.e. equivalent to that which would be specified after
274 the assembler mnemonic for neon instructions, which usually refers to the
275 last vector operand. The modes listed per instruction should be the same as
276 those defined for that instruction's pattern, for instance in neon.md. */
278 static arm_builtin_datum vfp_builtin_data
[] =
280 #include "arm_vfp_builtins.def"
283 static arm_builtin_datum neon_builtin_data
[] =
285 #include "arm_neon_builtins.def"
290 #define VAR1(T, N, A) \
291 {#N, UP (A), CODE_FOR_##N, 0, T##_QUALIFIERS},
293 static arm_builtin_datum acle_builtin_data
[] =
295 #include "arm_acle_builtins.def"
300 #define VAR1(T, N, X) \
301 ARM_BUILTIN_NEON_##N##X,
305 ARM_BUILTIN_GETWCGR0
,
306 ARM_BUILTIN_GETWCGR1
,
307 ARM_BUILTIN_GETWCGR2
,
308 ARM_BUILTIN_GETWCGR3
,
310 ARM_BUILTIN_SETWCGR0
,
311 ARM_BUILTIN_SETWCGR1
,
312 ARM_BUILTIN_SETWCGR2
,
313 ARM_BUILTIN_SETWCGR3
,
337 ARM_BUILTIN_WALIGNR0
,
338 ARM_BUILTIN_WALIGNR1
,
339 ARM_BUILTIN_WALIGNR2
,
340 ARM_BUILTIN_WALIGNR3
,
349 ARM_BUILTIN_TMOVMSKB
,
350 ARM_BUILTIN_TMOVMSKH
,
351 ARM_BUILTIN_TMOVMSKW
,
360 ARM_BUILTIN_WPACKHSS
,
361 ARM_BUILTIN_WPACKWSS
,
362 ARM_BUILTIN_WPACKDSS
,
363 ARM_BUILTIN_WPACKHUS
,
364 ARM_BUILTIN_WPACKWUS
,
365 ARM_BUILTIN_WPACKDUS
,
394 ARM_BUILTIN_WCMPGTUB
,
395 ARM_BUILTIN_WCMPGTUH
,
396 ARM_BUILTIN_WCMPGTUW
,
397 ARM_BUILTIN_WCMPGTSB
,
398 ARM_BUILTIN_WCMPGTSH
,
399 ARM_BUILTIN_WCMPGTSW
,
401 ARM_BUILTIN_TEXTRMSB
,
402 ARM_BUILTIN_TEXTRMSH
,
403 ARM_BUILTIN_TEXTRMSW
,
404 ARM_BUILTIN_TEXTRMUB
,
405 ARM_BUILTIN_TEXTRMUH
,
406 ARM_BUILTIN_TEXTRMUW
,
456 ARM_BUILTIN_WUNPCKIHB
,
457 ARM_BUILTIN_WUNPCKIHH
,
458 ARM_BUILTIN_WUNPCKIHW
,
459 ARM_BUILTIN_WUNPCKILB
,
460 ARM_BUILTIN_WUNPCKILH
,
461 ARM_BUILTIN_WUNPCKILW
,
463 ARM_BUILTIN_WUNPCKEHSB
,
464 ARM_BUILTIN_WUNPCKEHSH
,
465 ARM_BUILTIN_WUNPCKEHSW
,
466 ARM_BUILTIN_WUNPCKEHUB
,
467 ARM_BUILTIN_WUNPCKEHUH
,
468 ARM_BUILTIN_WUNPCKEHUW
,
469 ARM_BUILTIN_WUNPCKELSB
,
470 ARM_BUILTIN_WUNPCKELSH
,
471 ARM_BUILTIN_WUNPCKELSW
,
472 ARM_BUILTIN_WUNPCKELUB
,
473 ARM_BUILTIN_WUNPCKELUH
,
474 ARM_BUILTIN_WUNPCKELUW
,
480 ARM_BUILTIN_WADDSUBHX
,
481 ARM_BUILTIN_WSUBADDHX
,
483 ARM_BUILTIN_WABSDIFFB
,
484 ARM_BUILTIN_WABSDIFFH
,
485 ARM_BUILTIN_WABSDIFFW
,
502 ARM_BUILTIN_WMULWSMR
,
503 ARM_BUILTIN_WMULWUMR
,
514 ARM_BUILTIN_WQMULWMR
,
516 ARM_BUILTIN_WADDBHUSM
,
517 ARM_BUILTIN_WADDBHUSL
,
524 ARM_BUILTIN_WQMIABBN
,
525 ARM_BUILTIN_WQMIABTN
,
526 ARM_BUILTIN_WQMIATBN
,
527 ARM_BUILTIN_WQMIATTN
,
544 ARM_BUILTIN_WMIAWBBN
,
545 ARM_BUILTIN_WMIAWBTN
,
546 ARM_BUILTIN_WMIAWTBN
,
547 ARM_BUILTIN_WMIAWTTN
,
551 ARM_BUILTIN_GET_FPSCR
,
552 ARM_BUILTIN_SET_FPSCR
,
554 ARM_BUILTIN_CMSE_NONSECURE_CALLER
,
560 #define CRYPTO1(L, U, M1, M2) \
561 ARM_BUILTIN_CRYPTO_##U,
562 #define CRYPTO2(L, U, M1, M2, M3) \
563 ARM_BUILTIN_CRYPTO_##U,
564 #define CRYPTO3(L, U, M1, M2, M3, M4) \
565 ARM_BUILTIN_CRYPTO_##U,
567 ARM_BUILTIN_CRYPTO_BASE
,
569 #include "crypto.def"
575 ARM_BUILTIN_VFP_BASE
,
577 #include "arm_vfp_builtins.def"
579 ARM_BUILTIN_NEON_BASE
,
580 ARM_BUILTIN_NEON_LANE_CHECK
= ARM_BUILTIN_NEON_BASE
,
582 #include "arm_neon_builtins.def"
585 #define VAR1(T, N, X) \
588 ARM_BUILTIN_ACLE_BASE
,
590 #include "arm_acle_builtins.def"
595 #define ARM_BUILTIN_VFP_PATTERN_START \
596 (ARM_BUILTIN_VFP_BASE + 1)
598 #define ARM_BUILTIN_NEON_PATTERN_START \
599 (ARM_BUILTIN_NEON_BASE + 1)
601 #define ARM_BUILTIN_ACLE_PATTERN_START \
602 (ARM_BUILTIN_ACLE_BASE + 1)
616 static GTY(()) tree arm_builtin_decls
[ARM_BUILTIN_MAX
];
618 #define NUM_DREG_TYPES 5
619 #define NUM_QREG_TYPES 6
621 /* Internal scalar builtin types. These types are used to support
622 neon intrinsic builtins. They are _not_ user-visible types. Therefore
623 the mangling for these types are implementation defined. */
624 const char *arm_scalar_builtin_types
[] = {
632 "__builtin_neon_uqi",
633 "__builtin_neon_uhi",
634 "__builtin_neon_usi",
635 "__builtin_neon_udi",
643 #define ENTRY(E, M, Q, S, T, G) E,
646 #include "arm-simd-builtin-types.def"
651 struct arm_simd_type_info
653 enum arm_simd_type type
;
655 /* Internal type name. */
658 /* Internal type name(mangled). The mangled names conform to the
659 AAPCS (see "Procedure Call Standard for the ARM Architecture",
660 Appendix A). To qualify for emission with the mangled names defined in
661 that document, a vector type must not only be of the correct mode but also
662 be of the correct internal Neon vector type (e.g. __simd64_int8_t);
663 these types are registered by arm_init_simd_builtin_types (). In other
664 words, vector types defined in other ways e.g. via vector_size attribute
665 will get default mangled names. */
674 /* Machine mode the internal type maps to. */
678 enum arm_type_qualifiers q
;
681 #define ENTRY(E, M, Q, S, T, G) \
683 "__simd" #S "_" #T "_t", \
684 #G "__simd" #S "_" #T "_t", \
685 NULL_TREE, NULL_TREE, M##mode, qualifier_##Q},
686 static struct arm_simd_type_info arm_simd_types
[] = {
687 #include "arm-simd-builtin-types.def"
691 /* The user-visible __fp16 type. */
692 tree arm_fp16_type_node
= NULL_TREE
;
693 static tree arm_simd_intOI_type_node
= NULL_TREE
;
694 static tree arm_simd_intEI_type_node
= NULL_TREE
;
695 static tree arm_simd_intCI_type_node
= NULL_TREE
;
696 static tree arm_simd_intXI_type_node
= NULL_TREE
;
697 static tree arm_simd_polyQI_type_node
= NULL_TREE
;
698 static tree arm_simd_polyHI_type_node
= NULL_TREE
;
699 static tree arm_simd_polyDI_type_node
= NULL_TREE
;
700 static tree arm_simd_polyTI_type_node
= NULL_TREE
;
703 arm_mangle_builtin_scalar_type (const_tree type
)
707 while (arm_scalar_builtin_types
[i
] != NULL
)
709 const char *name
= arm_scalar_builtin_types
[i
];
711 if (TREE_CODE (TYPE_NAME (type
)) == TYPE_DECL
712 && DECL_NAME (TYPE_NAME (type
))
713 && !strcmp (IDENTIFIER_POINTER (DECL_NAME (TYPE_NAME (type
))), name
))
714 return arm_scalar_builtin_types
[i
];
721 arm_mangle_builtin_vector_type (const_tree type
)
724 int nelts
= sizeof (arm_simd_types
) / sizeof (arm_simd_types
[0]);
726 for (i
= 0; i
< nelts
; i
++)
727 if (arm_simd_types
[i
].mode
== TYPE_MODE (type
)
729 && TREE_CODE (TYPE_NAME (type
)) == TYPE_DECL
730 && DECL_NAME (TYPE_NAME (type
))
732 (IDENTIFIER_POINTER (DECL_NAME (TYPE_NAME (type
))),
733 arm_simd_types
[i
].name
))
734 return arm_simd_types
[i
].mangle
;
740 arm_mangle_builtin_type (const_tree type
)
743 /* Walk through all the AArch64 builtins types tables to filter out the
745 if ((mangle
= arm_mangle_builtin_vector_type (type
))
746 || (mangle
= arm_mangle_builtin_scalar_type (type
)))
753 arm_simd_builtin_std_type (enum machine_mode mode
,
754 enum arm_type_qualifiers q
)
756 #define QUAL_TYPE(M) \
757 ((q == qualifier_none) ? int##M##_type_node : unsigned_int##M##_type_node);
761 return QUAL_TYPE (QI
);
763 return QUAL_TYPE (HI
);
765 return QUAL_TYPE (SI
);
767 return QUAL_TYPE (DI
);
769 return QUAL_TYPE (TI
);
771 return arm_simd_intOI_type_node
;
773 return arm_simd_intEI_type_node
;
775 return arm_simd_intCI_type_node
;
777 return arm_simd_intXI_type_node
;
779 return arm_fp16_type_node
;
781 return float_type_node
;
783 return double_type_node
;
791 arm_lookup_simd_builtin_type (enum machine_mode mode
,
792 enum arm_type_qualifiers q
)
795 int nelts
= sizeof (arm_simd_types
) / sizeof (arm_simd_types
[0]);
797 /* Non-poly scalar modes map to standard types not in the table. */
798 if (q
!= qualifier_poly
&& !VECTOR_MODE_P (mode
))
799 return arm_simd_builtin_std_type (mode
, q
);
801 for (i
= 0; i
< nelts
; i
++)
802 if (arm_simd_types
[i
].mode
== mode
803 && arm_simd_types
[i
].q
== q
)
804 return arm_simd_types
[i
].itype
;
806 /* Note that we won't have caught the underlying type for poly64x2_t
807 in the above table. This gets default mangling. */
813 arm_simd_builtin_type (enum machine_mode mode
,
814 bool unsigned_p
, bool poly_p
)
817 return arm_lookup_simd_builtin_type (mode
, qualifier_poly
);
819 return arm_lookup_simd_builtin_type (mode
, qualifier_unsigned
);
821 return arm_lookup_simd_builtin_type (mode
, qualifier_none
);
825 arm_init_simd_builtin_types (void)
828 int nelts
= sizeof (arm_simd_types
) / sizeof (arm_simd_types
[0]);
831 /* Poly types are a world of their own. In order to maintain legacy
832 ABI, they get initialized using the old interface, and don't get
833 an entry in our mangling table, consequently, they get default
834 mangling. As a further gotcha, poly8_t and poly16_t are signed
835 types, poly64_t and poly128_t are unsigned types. */
836 arm_simd_polyQI_type_node
837 = build_distinct_type_copy (intQI_type_node
);
838 (*lang_hooks
.types
.register_builtin_type
) (arm_simd_polyQI_type_node
,
839 "__builtin_neon_poly8");
840 arm_simd_polyHI_type_node
841 = build_distinct_type_copy (intHI_type_node
);
842 (*lang_hooks
.types
.register_builtin_type
) (arm_simd_polyHI_type_node
,
843 "__builtin_neon_poly16");
844 arm_simd_polyDI_type_node
845 = build_distinct_type_copy (unsigned_intDI_type_node
);
846 (*lang_hooks
.types
.register_builtin_type
) (arm_simd_polyDI_type_node
,
847 "__builtin_neon_poly64");
848 arm_simd_polyTI_type_node
849 = build_distinct_type_copy (unsigned_intTI_type_node
);
850 (*lang_hooks
.types
.register_builtin_type
) (arm_simd_polyTI_type_node
,
851 "__builtin_neon_poly128");
853 /* Init all the element types built by the front-end. */
854 arm_simd_types
[Int8x8_t
].eltype
= intQI_type_node
;
855 arm_simd_types
[Int8x16_t
].eltype
= intQI_type_node
;
856 arm_simd_types
[Int16x4_t
].eltype
= intHI_type_node
;
857 arm_simd_types
[Int16x8_t
].eltype
= intHI_type_node
;
858 arm_simd_types
[Int32x2_t
].eltype
= intSI_type_node
;
859 arm_simd_types
[Int32x4_t
].eltype
= intSI_type_node
;
860 arm_simd_types
[Int64x2_t
].eltype
= intDI_type_node
;
861 arm_simd_types
[Uint8x8_t
].eltype
= unsigned_intQI_type_node
;
862 arm_simd_types
[Uint8x16_t
].eltype
= unsigned_intQI_type_node
;
863 arm_simd_types
[Uint16x4_t
].eltype
= unsigned_intHI_type_node
;
864 arm_simd_types
[Uint16x8_t
].eltype
= unsigned_intHI_type_node
;
865 arm_simd_types
[Uint32x2_t
].eltype
= unsigned_intSI_type_node
;
866 arm_simd_types
[Uint32x4_t
].eltype
= unsigned_intSI_type_node
;
867 arm_simd_types
[Uint64x2_t
].eltype
= unsigned_intDI_type_node
;
869 /* Init poly vector element types with scalar poly types. */
870 arm_simd_types
[Poly8x8_t
].eltype
= arm_simd_polyQI_type_node
;
871 arm_simd_types
[Poly8x16_t
].eltype
= arm_simd_polyQI_type_node
;
872 arm_simd_types
[Poly16x4_t
].eltype
= arm_simd_polyHI_type_node
;
873 arm_simd_types
[Poly16x8_t
].eltype
= arm_simd_polyHI_type_node
;
874 /* Note: poly64x2_t is defined in arm_neon.h, to ensure it gets default
877 /* Continue with standard types. */
878 /* The __builtin_simd{64,128}_float16 types are kept private unless
879 we have a scalar __fp16 type. */
880 arm_simd_types
[Float16x4_t
].eltype
= arm_fp16_type_node
;
881 arm_simd_types
[Float16x8_t
].eltype
= arm_fp16_type_node
;
882 arm_simd_types
[Float32x2_t
].eltype
= float_type_node
;
883 arm_simd_types
[Float32x4_t
].eltype
= float_type_node
;
885 for (i
= 0; i
< nelts
; i
++)
887 tree eltype
= arm_simd_types
[i
].eltype
;
888 enum machine_mode mode
= arm_simd_types
[i
].mode
;
890 if (arm_simd_types
[i
].itype
== NULL
)
891 arm_simd_types
[i
].itype
=
892 build_distinct_type_copy
893 (build_vector_type (eltype
, GET_MODE_NUNITS (mode
)));
895 tdecl
= add_builtin_type (arm_simd_types
[i
].name
,
896 arm_simd_types
[i
].itype
);
897 TYPE_NAME (arm_simd_types
[i
].itype
) = tdecl
;
898 SET_TYPE_STRUCTURAL_EQUALITY (arm_simd_types
[i
].itype
);
901 #define AARCH_BUILD_SIGNED_TYPE(mode) \
902 make_signed_type (GET_MODE_PRECISION (mode));
903 arm_simd_intOI_type_node
= AARCH_BUILD_SIGNED_TYPE (OImode
);
904 arm_simd_intEI_type_node
= AARCH_BUILD_SIGNED_TYPE (EImode
);
905 arm_simd_intCI_type_node
= AARCH_BUILD_SIGNED_TYPE (CImode
);
906 arm_simd_intXI_type_node
= AARCH_BUILD_SIGNED_TYPE (XImode
);
907 #undef AARCH_BUILD_SIGNED_TYPE
909 tdecl
= add_builtin_type
910 ("__builtin_neon_ei" , arm_simd_intEI_type_node
);
911 TYPE_NAME (arm_simd_intEI_type_node
) = tdecl
;
912 tdecl
= add_builtin_type
913 ("__builtin_neon_oi" , arm_simd_intOI_type_node
);
914 TYPE_NAME (arm_simd_intOI_type_node
) = tdecl
;
915 tdecl
= add_builtin_type
916 ("__builtin_neon_ci" , arm_simd_intCI_type_node
);
917 TYPE_NAME (arm_simd_intCI_type_node
) = tdecl
;
918 tdecl
= add_builtin_type
919 ("__builtin_neon_xi" , arm_simd_intXI_type_node
);
920 TYPE_NAME (arm_simd_intXI_type_node
) = tdecl
;
924 arm_init_simd_builtin_scalar_types (void)
926 /* Define typedefs for all the standard scalar types. */
927 (*lang_hooks
.types
.register_builtin_type
) (intQI_type_node
,
928 "__builtin_neon_qi");
929 (*lang_hooks
.types
.register_builtin_type
) (intHI_type_node
,
930 "__builtin_neon_hi");
931 (*lang_hooks
.types
.register_builtin_type
) (intSI_type_node
,
932 "__builtin_neon_si");
933 (*lang_hooks
.types
.register_builtin_type
) (float_type_node
,
934 "__builtin_neon_sf");
935 (*lang_hooks
.types
.register_builtin_type
) (intDI_type_node
,
936 "__builtin_neon_di");
937 (*lang_hooks
.types
.register_builtin_type
) (double_type_node
,
938 "__builtin_neon_df");
939 (*lang_hooks
.types
.register_builtin_type
) (intTI_type_node
,
940 "__builtin_neon_ti");
942 /* Unsigned integer types for various mode sizes. */
943 (*lang_hooks
.types
.register_builtin_type
) (unsigned_intQI_type_node
,
944 "__builtin_neon_uqi");
945 (*lang_hooks
.types
.register_builtin_type
) (unsigned_intHI_type_node
,
946 "__builtin_neon_uhi");
947 (*lang_hooks
.types
.register_builtin_type
) (unsigned_intSI_type_node
,
948 "__builtin_neon_usi");
949 (*lang_hooks
.types
.register_builtin_type
) (unsigned_intDI_type_node
,
950 "__builtin_neon_udi");
951 (*lang_hooks
.types
.register_builtin_type
) (unsigned_intTI_type_node
,
952 "__builtin_neon_uti");
955 /* Set up a builtin. It will use information stored in the argument struct D to
956 derive the builtin's type signature and name. It will append the name in D
957 to the PREFIX passed and use these to create a builtin declaration that is
958 then stored in 'arm_builtin_decls' under index FCODE. This FCODE is also
959 written back to D for future use. */
962 arm_init_builtin (unsigned int fcode
, arm_builtin_datum
*d
,
965 bool print_type_signature_p
= false;
966 char type_signature
[SIMD_MAX_BUILTIN_ARGS
] = { 0 };
973 /* We must track two variables here. op_num is
974 the operand number as in the RTL pattern. This is
975 required to access the mode (e.g. V4SF mode) of the
976 argument, from which the base type can be derived.
977 arg_num is an index in to the qualifiers data, which
978 gives qualifiers to the type (e.g. const unsigned).
979 The reason these two variables may differ by one is the
980 void return type. While all return types take the 0th entry
981 in the qualifiers array, there is no operand for them in the
983 int op_num
= insn_data
[d
->code
].n_operands
- 1;
984 int arg_num
= d
->qualifiers
[0] & qualifier_void
987 tree return_type
= void_type_node
, args
= void_list_node
;
990 /* Build a function type directly from the insn_data for this
991 builtin. The build_function_type () function takes care of
992 removing duplicates for us. */
993 for (; op_num
>= 0; arg_num
--, op_num
--)
995 machine_mode op_mode
= insn_data
[d
->code
].operand
[op_num
].mode
;
996 enum arm_type_qualifiers qualifiers
= d
->qualifiers
[arg_num
];
998 if (qualifiers
& qualifier_unsigned
)
1000 type_signature
[arg_num
] = 'u';
1001 print_type_signature_p
= true;
1003 else if (qualifiers
& qualifier_poly
)
1005 type_signature
[arg_num
] = 'p';
1006 print_type_signature_p
= true;
1009 type_signature
[arg_num
] = 's';
1011 /* Skip an internal operand for vget_{low, high}. */
1012 if (qualifiers
& qualifier_internal
)
1015 /* Some builtins have different user-facing types
1016 for certain arguments, encoded in d->mode. */
1017 if (qualifiers
& qualifier_map_mode
)
1020 /* For pointers, we want a pointer to the basic type
1022 if (qualifiers
& qualifier_pointer
&& VECTOR_MODE_P (op_mode
))
1023 op_mode
= GET_MODE_INNER (op_mode
);
1025 eltype
= arm_simd_builtin_type
1027 (qualifiers
& qualifier_unsigned
) != 0,
1028 (qualifiers
& qualifier_poly
) != 0);
1029 gcc_assert (eltype
!= NULL
);
1031 /* Add qualifiers. */
1032 if (qualifiers
& qualifier_const
)
1033 eltype
= build_qualified_type (eltype
, TYPE_QUAL_CONST
);
1035 if (qualifiers
& qualifier_pointer
)
1036 eltype
= build_pointer_type (eltype
);
1038 /* If we have reached arg_num == 0, we are at a non-void
1039 return type. Otherwise, we are still processing
1042 return_type
= eltype
;
1044 args
= tree_cons (NULL_TREE
, eltype
, args
);
1047 ftype
= build_function_type (return_type
, args
);
1049 gcc_assert (ftype
!= NULL
);
1051 if (print_type_signature_p
1052 && IN_RANGE (fcode
, ARM_BUILTIN_VFP_BASE
, ARM_BUILTIN_ACLE_BASE
- 1))
1053 snprintf (namebuf
, sizeof (namebuf
), "%s_%s_%s",
1054 prefix
, d
->name
, type_signature
);
1056 snprintf (namebuf
, sizeof (namebuf
), "%s_%s",
1059 fndecl
= add_builtin_function (namebuf
, ftype
, fcode
, BUILT_IN_MD
,
1061 arm_builtin_decls
[fcode
] = fndecl
;
1064 /* Set up ACLE builtins, even builtins for instructions that are not
1065 in the current target ISA to allow the user to compile particular modules
1066 with different target specific options that differ from the command line
1067 options. Such builtins will be rejected in arm_expand_builtin. */
1070 arm_init_acle_builtins (void)
1072 unsigned int i
, fcode
= ARM_BUILTIN_ACLE_PATTERN_START
;
1074 for (i
= 0; i
< ARRAY_SIZE (acle_builtin_data
); i
++, fcode
++)
1076 arm_builtin_datum
*d
= &acle_builtin_data
[i
];
1077 arm_init_builtin (fcode
, d
, "__builtin_arm");
1081 /* Set up all the NEON builtins, even builtins for instructions that are not
1082 in the current target ISA to allow the user to compile particular modules
1083 with different target specific options that differ from the command line
1084 options. Such builtins will be rejected in arm_expand_builtin. */
1087 arm_init_neon_builtins (void)
1089 unsigned int i
, fcode
= ARM_BUILTIN_NEON_PATTERN_START
;
1091 arm_init_simd_builtin_types ();
1093 /* Strong-typing hasn't been implemented for all AdvSIMD builtin intrinsics.
1094 Therefore we need to preserve the old __builtin scalar types. It can be
1095 removed once all the intrinsics become strongly typed using the qualifier
1097 arm_init_simd_builtin_scalar_types ();
1099 tree lane_check_fpr
= build_function_type_list (void_type_node
,
1103 arm_builtin_decls
[ARM_BUILTIN_NEON_LANE_CHECK
] =
1104 add_builtin_function ("__builtin_arm_lane_check", lane_check_fpr
,
1105 ARM_BUILTIN_NEON_LANE_CHECK
, BUILT_IN_MD
,
1108 for (i
= 0; i
< ARRAY_SIZE (neon_builtin_data
); i
++, fcode
++)
1110 arm_builtin_datum
*d
= &neon_builtin_data
[i
];
1111 arm_init_builtin (fcode
, d
, "__builtin_neon");
1115 /* Set up all the scalar floating point builtins. */
1118 arm_init_vfp_builtins (void)
1120 unsigned int i
, fcode
= ARM_BUILTIN_VFP_PATTERN_START
;
1122 for (i
= 0; i
< ARRAY_SIZE (vfp_builtin_data
); i
++, fcode
++)
1124 arm_builtin_datum
*d
= &vfp_builtin_data
[i
];
1125 arm_init_builtin (fcode
, d
, "__builtin_neon");
1130 arm_init_crypto_builtins (void)
1132 tree V16UQI_type_node
1133 = arm_simd_builtin_type (V16QImode
, true, false);
1135 tree V4USI_type_node
1136 = arm_simd_builtin_type (V4SImode
, true, false);
1138 tree v16uqi_ftype_v16uqi
1139 = build_function_type_list (V16UQI_type_node
, V16UQI_type_node
,
1142 tree v16uqi_ftype_v16uqi_v16uqi
1143 = build_function_type_list (V16UQI_type_node
, V16UQI_type_node
,
1144 V16UQI_type_node
, NULL_TREE
);
1146 tree v4usi_ftype_v4usi
1147 = build_function_type_list (V4USI_type_node
, V4USI_type_node
,
1150 tree v4usi_ftype_v4usi_v4usi
1151 = build_function_type_list (V4USI_type_node
, V4USI_type_node
,
1152 V4USI_type_node
, NULL_TREE
);
1154 tree v4usi_ftype_v4usi_v4usi_v4usi
1155 = build_function_type_list (V4USI_type_node
, V4USI_type_node
,
1156 V4USI_type_node
, V4USI_type_node
,
1159 tree uti_ftype_udi_udi
1160 = build_function_type_list (unsigned_intTI_type_node
,
1161 unsigned_intDI_type_node
,
1162 unsigned_intDI_type_node
,
1176 ARM_BUILTIN_CRYPTO_##U
1178 "__builtin_arm_crypto_"#L
1181 #define FT2(R, A1, A2) \
1182 R##_ftype_##A1##_##A2
1183 #define FT3(R, A1, A2, A3) \
1184 R##_ftype_##A1##_##A2##_##A3
1185 #define CRYPTO1(L, U, R, A) \
1186 arm_builtin_decls[C (U)] \
1187 = add_builtin_function (N (L), FT1 (R, A), \
1188 C (U), BUILT_IN_MD, NULL, NULL_TREE);
1189 #define CRYPTO2(L, U, R, A1, A2) \
1190 arm_builtin_decls[C (U)] \
1191 = add_builtin_function (N (L), FT2 (R, A1, A2), \
1192 C (U), BUILT_IN_MD, NULL, NULL_TREE);
1194 #define CRYPTO3(L, U, R, A1, A2, A3) \
1195 arm_builtin_decls[C (U)] \
1196 = add_builtin_function (N (L), FT3 (R, A1, A2, A3), \
1197 C (U), BUILT_IN_MD, NULL, NULL_TREE);
1198 #include "crypto.def"
1210 #undef NUM_DREG_TYPES
1211 #undef NUM_QREG_TYPES
1213 #define def_mbuiltin(FLAG, NAME, TYPE, CODE) \
1216 if (FLAG == isa_nobit \
1217 || bitmap_bit_p (arm_active_target.isa, FLAG)) \
1220 bdecl = add_builtin_function ((NAME), (TYPE), (CODE), \
1221 BUILT_IN_MD, NULL, NULL_TREE); \
1222 arm_builtin_decls[CODE] = bdecl; \
1227 struct builtin_description
1229 const enum isa_feature feature
;
1230 const enum insn_code icode
;
1231 const char * const name
;
1232 const enum arm_builtins code
;
1233 const enum rtx_code comparison
;
1234 const unsigned int flag
;
1237 static const struct builtin_description bdesc_2arg
[] =
1239 #define IWMMXT_BUILTIN(code, string, builtin) \
1240 { isa_bit_iwmmxt, CODE_FOR_##code, \
1241 "__builtin_arm_" string, \
1242 ARM_BUILTIN_##builtin, UNKNOWN, 0 },
1244 #define IWMMXT2_BUILTIN(code, string, builtin) \
1245 { isa_bit_iwmmxt2, CODE_FOR_##code, \
1246 "__builtin_arm_" string, \
1247 ARM_BUILTIN_##builtin, UNKNOWN, 0 },
1249 IWMMXT_BUILTIN (addv8qi3
, "waddb", WADDB
)
1250 IWMMXT_BUILTIN (addv4hi3
, "waddh", WADDH
)
1251 IWMMXT_BUILTIN (addv2si3
, "waddw", WADDW
)
1252 IWMMXT_BUILTIN (subv8qi3
, "wsubb", WSUBB
)
1253 IWMMXT_BUILTIN (subv4hi3
, "wsubh", WSUBH
)
1254 IWMMXT_BUILTIN (subv2si3
, "wsubw", WSUBW
)
1255 IWMMXT_BUILTIN (ssaddv8qi3
, "waddbss", WADDSSB
)
1256 IWMMXT_BUILTIN (ssaddv4hi3
, "waddhss", WADDSSH
)
1257 IWMMXT_BUILTIN (ssaddv2si3
, "waddwss", WADDSSW
)
1258 IWMMXT_BUILTIN (sssubv8qi3
, "wsubbss", WSUBSSB
)
1259 IWMMXT_BUILTIN (sssubv4hi3
, "wsubhss", WSUBSSH
)
1260 IWMMXT_BUILTIN (sssubv2si3
, "wsubwss", WSUBSSW
)
1261 IWMMXT_BUILTIN (usaddv8qi3
, "waddbus", WADDUSB
)
1262 IWMMXT_BUILTIN (usaddv4hi3
, "waddhus", WADDUSH
)
1263 IWMMXT_BUILTIN (usaddv2si3
, "waddwus", WADDUSW
)
1264 IWMMXT_BUILTIN (ussubv8qi3
, "wsubbus", WSUBUSB
)
1265 IWMMXT_BUILTIN (ussubv4hi3
, "wsubhus", WSUBUSH
)
1266 IWMMXT_BUILTIN (ussubv2si3
, "wsubwus", WSUBUSW
)
1267 IWMMXT_BUILTIN (mulv4hi3
, "wmulul", WMULUL
)
1268 IWMMXT_BUILTIN (smulv4hi3_highpart
, "wmulsm", WMULSM
)
1269 IWMMXT_BUILTIN (umulv4hi3_highpart
, "wmulum", WMULUM
)
1270 IWMMXT_BUILTIN (eqv8qi3
, "wcmpeqb", WCMPEQB
)
1271 IWMMXT_BUILTIN (eqv4hi3
, "wcmpeqh", WCMPEQH
)
1272 IWMMXT_BUILTIN (eqv2si3
, "wcmpeqw", WCMPEQW
)
1273 IWMMXT_BUILTIN (gtuv8qi3
, "wcmpgtub", WCMPGTUB
)
1274 IWMMXT_BUILTIN (gtuv4hi3
, "wcmpgtuh", WCMPGTUH
)
1275 IWMMXT_BUILTIN (gtuv2si3
, "wcmpgtuw", WCMPGTUW
)
1276 IWMMXT_BUILTIN (gtv8qi3
, "wcmpgtsb", WCMPGTSB
)
1277 IWMMXT_BUILTIN (gtv4hi3
, "wcmpgtsh", WCMPGTSH
)
1278 IWMMXT_BUILTIN (gtv2si3
, "wcmpgtsw", WCMPGTSW
)
1279 IWMMXT_BUILTIN (umaxv8qi3
, "wmaxub", WMAXUB
)
1280 IWMMXT_BUILTIN (smaxv8qi3
, "wmaxsb", WMAXSB
)
1281 IWMMXT_BUILTIN (umaxv4hi3
, "wmaxuh", WMAXUH
)
1282 IWMMXT_BUILTIN (smaxv4hi3
, "wmaxsh", WMAXSH
)
1283 IWMMXT_BUILTIN (umaxv2si3
, "wmaxuw", WMAXUW
)
1284 IWMMXT_BUILTIN (smaxv2si3
, "wmaxsw", WMAXSW
)
1285 IWMMXT_BUILTIN (uminv8qi3
, "wminub", WMINUB
)
1286 IWMMXT_BUILTIN (sminv8qi3
, "wminsb", WMINSB
)
1287 IWMMXT_BUILTIN (uminv4hi3
, "wminuh", WMINUH
)
1288 IWMMXT_BUILTIN (sminv4hi3
, "wminsh", WMINSH
)
1289 IWMMXT_BUILTIN (uminv2si3
, "wminuw", WMINUW
)
1290 IWMMXT_BUILTIN (sminv2si3
, "wminsw", WMINSW
)
1291 IWMMXT_BUILTIN (iwmmxt_anddi3
, "wand", WAND
)
1292 IWMMXT_BUILTIN (iwmmxt_nanddi3
, "wandn", WANDN
)
1293 IWMMXT_BUILTIN (iwmmxt_iordi3
, "wor", WOR
)
1294 IWMMXT_BUILTIN (iwmmxt_xordi3
, "wxor", WXOR
)
1295 IWMMXT_BUILTIN (iwmmxt_uavgv8qi3
, "wavg2b", WAVG2B
)
1296 IWMMXT_BUILTIN (iwmmxt_uavgv4hi3
, "wavg2h", WAVG2H
)
1297 IWMMXT_BUILTIN (iwmmxt_uavgrndv8qi3
, "wavg2br", WAVG2BR
)
1298 IWMMXT_BUILTIN (iwmmxt_uavgrndv4hi3
, "wavg2hr", WAVG2HR
)
1299 IWMMXT_BUILTIN (iwmmxt_wunpckilb
, "wunpckilb", WUNPCKILB
)
1300 IWMMXT_BUILTIN (iwmmxt_wunpckilh
, "wunpckilh", WUNPCKILH
)
1301 IWMMXT_BUILTIN (iwmmxt_wunpckilw
, "wunpckilw", WUNPCKILW
)
1302 IWMMXT_BUILTIN (iwmmxt_wunpckihb
, "wunpckihb", WUNPCKIHB
)
1303 IWMMXT_BUILTIN (iwmmxt_wunpckihh
, "wunpckihh", WUNPCKIHH
)
1304 IWMMXT_BUILTIN (iwmmxt_wunpckihw
, "wunpckihw", WUNPCKIHW
)
1305 IWMMXT2_BUILTIN (iwmmxt_waddsubhx
, "waddsubhx", WADDSUBHX
)
1306 IWMMXT2_BUILTIN (iwmmxt_wsubaddhx
, "wsubaddhx", WSUBADDHX
)
1307 IWMMXT2_BUILTIN (iwmmxt_wabsdiffb
, "wabsdiffb", WABSDIFFB
)
1308 IWMMXT2_BUILTIN (iwmmxt_wabsdiffh
, "wabsdiffh", WABSDIFFH
)
1309 IWMMXT2_BUILTIN (iwmmxt_wabsdiffw
, "wabsdiffw", WABSDIFFW
)
1310 IWMMXT2_BUILTIN (iwmmxt_avg4
, "wavg4", WAVG4
)
1311 IWMMXT2_BUILTIN (iwmmxt_avg4r
, "wavg4r", WAVG4R
)
1312 IWMMXT2_BUILTIN (iwmmxt_wmulwsm
, "wmulwsm", WMULWSM
)
1313 IWMMXT2_BUILTIN (iwmmxt_wmulwum
, "wmulwum", WMULWUM
)
1314 IWMMXT2_BUILTIN (iwmmxt_wmulwsmr
, "wmulwsmr", WMULWSMR
)
1315 IWMMXT2_BUILTIN (iwmmxt_wmulwumr
, "wmulwumr", WMULWUMR
)
1316 IWMMXT2_BUILTIN (iwmmxt_wmulwl
, "wmulwl", WMULWL
)
1317 IWMMXT2_BUILTIN (iwmmxt_wmulsmr
, "wmulsmr", WMULSMR
)
1318 IWMMXT2_BUILTIN (iwmmxt_wmulumr
, "wmulumr", WMULUMR
)
1319 IWMMXT2_BUILTIN (iwmmxt_wqmulm
, "wqmulm", WQMULM
)
1320 IWMMXT2_BUILTIN (iwmmxt_wqmulmr
, "wqmulmr", WQMULMR
)
1321 IWMMXT2_BUILTIN (iwmmxt_wqmulwm
, "wqmulwm", WQMULWM
)
1322 IWMMXT2_BUILTIN (iwmmxt_wqmulwmr
, "wqmulwmr", WQMULWMR
)
1323 IWMMXT_BUILTIN (iwmmxt_walignr0
, "walignr0", WALIGNR0
)
1324 IWMMXT_BUILTIN (iwmmxt_walignr1
, "walignr1", WALIGNR1
)
1325 IWMMXT_BUILTIN (iwmmxt_walignr2
, "walignr2", WALIGNR2
)
1326 IWMMXT_BUILTIN (iwmmxt_walignr3
, "walignr3", WALIGNR3
)
1328 #define IWMMXT_BUILTIN2(code, builtin) \
1329 { isa_bit_iwmmxt, CODE_FOR_##code, NULL, \
1330 ARM_BUILTIN_##builtin, UNKNOWN, 0 },
1332 #define IWMMXT2_BUILTIN2(code, builtin) \
1333 { isa_bit_iwmmxt2, CODE_FOR_##code, NULL, \
1334 ARM_BUILTIN_##builtin, UNKNOWN, 0 },
1336 IWMMXT2_BUILTIN2 (iwmmxt_waddbhusm
, WADDBHUSM
)
1337 IWMMXT2_BUILTIN2 (iwmmxt_waddbhusl
, WADDBHUSL
)
1338 IWMMXT_BUILTIN2 (iwmmxt_wpackhss
, WPACKHSS
)
1339 IWMMXT_BUILTIN2 (iwmmxt_wpackwss
, WPACKWSS
)
1340 IWMMXT_BUILTIN2 (iwmmxt_wpackdss
, WPACKDSS
)
1341 IWMMXT_BUILTIN2 (iwmmxt_wpackhus
, WPACKHUS
)
1342 IWMMXT_BUILTIN2 (iwmmxt_wpackwus
, WPACKWUS
)
1343 IWMMXT_BUILTIN2 (iwmmxt_wpackdus
, WPACKDUS
)
1344 IWMMXT_BUILTIN2 (iwmmxt_wmacuz
, WMACUZ
)
1345 IWMMXT_BUILTIN2 (iwmmxt_wmacsz
, WMACSZ
)
1348 #define FP_BUILTIN(L, U) \
1349 {isa_nobit, CODE_FOR_##L, "__builtin_arm_"#L, ARM_BUILTIN_##U, \
1352 FP_BUILTIN (get_fpscr
, GET_FPSCR
)
1353 FP_BUILTIN (set_fpscr
, SET_FPSCR
)
1356 #define CRYPTO_BUILTIN(L, U) \
1357 {isa_nobit, CODE_FOR_crypto_##L, "__builtin_arm_crypto_"#L, \
1358 ARM_BUILTIN_CRYPTO_##U, UNKNOWN, 0},
1362 #define CRYPTO2(L, U, R, A1, A2) CRYPTO_BUILTIN (L, U)
1363 #define CRYPTO1(L, U, R, A)
1364 #define CRYPTO3(L, U, R, A1, A2, A3)
1365 #include "crypto.def"
1372 static const struct builtin_description bdesc_1arg
[] =
1374 IWMMXT_BUILTIN (iwmmxt_tmovmskb
, "tmovmskb", TMOVMSKB
)
1375 IWMMXT_BUILTIN (iwmmxt_tmovmskh
, "tmovmskh", TMOVMSKH
)
1376 IWMMXT_BUILTIN (iwmmxt_tmovmskw
, "tmovmskw", TMOVMSKW
)
1377 IWMMXT_BUILTIN (iwmmxt_waccb
, "waccb", WACCB
)
1378 IWMMXT_BUILTIN (iwmmxt_wacch
, "wacch", WACCH
)
1379 IWMMXT_BUILTIN (iwmmxt_waccw
, "waccw", WACCW
)
1380 IWMMXT_BUILTIN (iwmmxt_wunpckehub
, "wunpckehub", WUNPCKEHUB
)
1381 IWMMXT_BUILTIN (iwmmxt_wunpckehuh
, "wunpckehuh", WUNPCKEHUH
)
1382 IWMMXT_BUILTIN (iwmmxt_wunpckehuw
, "wunpckehuw", WUNPCKEHUW
)
1383 IWMMXT_BUILTIN (iwmmxt_wunpckehsb
, "wunpckehsb", WUNPCKEHSB
)
1384 IWMMXT_BUILTIN (iwmmxt_wunpckehsh
, "wunpckehsh", WUNPCKEHSH
)
1385 IWMMXT_BUILTIN (iwmmxt_wunpckehsw
, "wunpckehsw", WUNPCKEHSW
)
1386 IWMMXT_BUILTIN (iwmmxt_wunpckelub
, "wunpckelub", WUNPCKELUB
)
1387 IWMMXT_BUILTIN (iwmmxt_wunpckeluh
, "wunpckeluh", WUNPCKELUH
)
1388 IWMMXT_BUILTIN (iwmmxt_wunpckeluw
, "wunpckeluw", WUNPCKELUW
)
1389 IWMMXT_BUILTIN (iwmmxt_wunpckelsb
, "wunpckelsb", WUNPCKELSB
)
1390 IWMMXT_BUILTIN (iwmmxt_wunpckelsh
, "wunpckelsh", WUNPCKELSH
)
1391 IWMMXT_BUILTIN (iwmmxt_wunpckelsw
, "wunpckelsw", WUNPCKELSW
)
1392 IWMMXT2_BUILTIN (iwmmxt_wabsv8qi3
, "wabsb", WABSB
)
1393 IWMMXT2_BUILTIN (iwmmxt_wabsv4hi3
, "wabsh", WABSH
)
1394 IWMMXT2_BUILTIN (iwmmxt_wabsv2si3
, "wabsw", WABSW
)
1395 IWMMXT_BUILTIN (tbcstv8qi
, "tbcstb", TBCSTB
)
1396 IWMMXT_BUILTIN (tbcstv4hi
, "tbcsth", TBCSTH
)
1397 IWMMXT_BUILTIN (tbcstv2si
, "tbcstw", TBCSTW
)
1399 #define CRYPTO1(L, U, R, A) CRYPTO_BUILTIN (L, U)
1400 #define CRYPTO2(L, U, R, A1, A2)
1401 #define CRYPTO3(L, U, R, A1, A2, A3)
1402 #include "crypto.def"
1408 static const struct builtin_description bdesc_3arg
[] =
1410 #define CRYPTO3(L, U, R, A1, A2, A3) CRYPTO_BUILTIN (L, U)
1411 #define CRYPTO1(L, U, R, A)
1412 #define CRYPTO2(L, U, R, A1, A2)
1413 #include "crypto.def"
1418 #undef CRYPTO_BUILTIN
1420 /* Set up all the iWMMXt builtins. This is not called if
1421 TARGET_IWMMXT is zero. */
1424 arm_init_iwmmxt_builtins (void)
1426 const struct builtin_description
* d
;
1429 tree V2SI_type_node
= build_vector_type_for_mode (intSI_type_node
, V2SImode
);
1430 tree V4HI_type_node
= build_vector_type_for_mode (intHI_type_node
, V4HImode
);
1431 tree V8QI_type_node
= build_vector_type_for_mode (intQI_type_node
, V8QImode
);
1433 tree v8qi_ftype_v8qi_v8qi_int
1434 = build_function_type_list (V8QI_type_node
,
1435 V8QI_type_node
, V8QI_type_node
,
1436 integer_type_node
, NULL_TREE
);
1437 tree v4hi_ftype_v4hi_int
1438 = build_function_type_list (V4HI_type_node
,
1439 V4HI_type_node
, integer_type_node
, NULL_TREE
);
1440 tree v2si_ftype_v2si_int
1441 = build_function_type_list (V2SI_type_node
,
1442 V2SI_type_node
, integer_type_node
, NULL_TREE
);
1443 tree v2si_ftype_di_di
1444 = build_function_type_list (V2SI_type_node
,
1445 long_long_integer_type_node
,
1446 long_long_integer_type_node
,
1448 tree di_ftype_di_int
1449 = build_function_type_list (long_long_integer_type_node
,
1450 long_long_integer_type_node
,
1451 integer_type_node
, NULL_TREE
);
1452 tree di_ftype_di_int_int
1453 = build_function_type_list (long_long_integer_type_node
,
1454 long_long_integer_type_node
,
1456 integer_type_node
, NULL_TREE
);
1458 = build_function_type_list (integer_type_node
,
1459 V8QI_type_node
, NULL_TREE
);
1461 = build_function_type_list (integer_type_node
,
1462 V4HI_type_node
, NULL_TREE
);
1464 = build_function_type_list (integer_type_node
,
1465 V2SI_type_node
, NULL_TREE
);
1466 tree int_ftype_v8qi_int
1467 = build_function_type_list (integer_type_node
,
1468 V8QI_type_node
, integer_type_node
, NULL_TREE
);
1469 tree int_ftype_v4hi_int
1470 = build_function_type_list (integer_type_node
,
1471 V4HI_type_node
, integer_type_node
, NULL_TREE
);
1472 tree int_ftype_v2si_int
1473 = build_function_type_list (integer_type_node
,
1474 V2SI_type_node
, integer_type_node
, NULL_TREE
);
1475 tree v8qi_ftype_v8qi_int_int
1476 = build_function_type_list (V8QI_type_node
,
1477 V8QI_type_node
, integer_type_node
,
1478 integer_type_node
, NULL_TREE
);
1479 tree v4hi_ftype_v4hi_int_int
1480 = build_function_type_list (V4HI_type_node
,
1481 V4HI_type_node
, integer_type_node
,
1482 integer_type_node
, NULL_TREE
);
1483 tree v2si_ftype_v2si_int_int
1484 = build_function_type_list (V2SI_type_node
,
1485 V2SI_type_node
, integer_type_node
,
1486 integer_type_node
, NULL_TREE
);
1487 /* Miscellaneous. */
1488 tree v8qi_ftype_v4hi_v4hi
1489 = build_function_type_list (V8QI_type_node
,
1490 V4HI_type_node
, V4HI_type_node
, NULL_TREE
);
1491 tree v4hi_ftype_v2si_v2si
1492 = build_function_type_list (V4HI_type_node
,
1493 V2SI_type_node
, V2SI_type_node
, NULL_TREE
);
1494 tree v8qi_ftype_v4hi_v8qi
1495 = build_function_type_list (V8QI_type_node
,
1496 V4HI_type_node
, V8QI_type_node
, NULL_TREE
);
1497 tree v2si_ftype_v4hi_v4hi
1498 = build_function_type_list (V2SI_type_node
,
1499 V4HI_type_node
, V4HI_type_node
, NULL_TREE
);
1500 tree v2si_ftype_v8qi_v8qi
1501 = build_function_type_list (V2SI_type_node
,
1502 V8QI_type_node
, V8QI_type_node
, NULL_TREE
);
1503 tree v4hi_ftype_v4hi_di
1504 = build_function_type_list (V4HI_type_node
,
1505 V4HI_type_node
, long_long_integer_type_node
,
1507 tree v2si_ftype_v2si_di
1508 = build_function_type_list (V2SI_type_node
,
1509 V2SI_type_node
, long_long_integer_type_node
,
1512 = build_function_type_list (long_long_unsigned_type_node
, NULL_TREE
);
1514 = build_function_type_list (integer_type_node
, NULL_TREE
);
1516 = build_function_type_list (long_long_integer_type_node
,
1517 V8QI_type_node
, NULL_TREE
);
1519 = build_function_type_list (long_long_integer_type_node
,
1520 V4HI_type_node
, NULL_TREE
);
1522 = build_function_type_list (long_long_integer_type_node
,
1523 V2SI_type_node
, NULL_TREE
);
1524 tree v2si_ftype_v4hi
1525 = build_function_type_list (V2SI_type_node
,
1526 V4HI_type_node
, NULL_TREE
);
1527 tree v4hi_ftype_v8qi
1528 = build_function_type_list (V4HI_type_node
,
1529 V8QI_type_node
, NULL_TREE
);
1530 tree v8qi_ftype_v8qi
1531 = build_function_type_list (V8QI_type_node
,
1532 V8QI_type_node
, NULL_TREE
);
1533 tree v4hi_ftype_v4hi
1534 = build_function_type_list (V4HI_type_node
,
1535 V4HI_type_node
, NULL_TREE
);
1536 tree v2si_ftype_v2si
1537 = build_function_type_list (V2SI_type_node
,
1538 V2SI_type_node
, NULL_TREE
);
1540 tree di_ftype_di_v4hi_v4hi
1541 = build_function_type_list (long_long_unsigned_type_node
,
1542 long_long_unsigned_type_node
,
1543 V4HI_type_node
, V4HI_type_node
,
1546 tree di_ftype_v4hi_v4hi
1547 = build_function_type_list (long_long_unsigned_type_node
,
1548 V4HI_type_node
,V4HI_type_node
,
1551 tree v2si_ftype_v2si_v4hi_v4hi
1552 = build_function_type_list (V2SI_type_node
,
1553 V2SI_type_node
, V4HI_type_node
,
1554 V4HI_type_node
, NULL_TREE
);
1556 tree v2si_ftype_v2si_v8qi_v8qi
1557 = build_function_type_list (V2SI_type_node
,
1558 V2SI_type_node
, V8QI_type_node
,
1559 V8QI_type_node
, NULL_TREE
);
1561 tree di_ftype_di_v2si_v2si
1562 = build_function_type_list (long_long_unsigned_type_node
,
1563 long_long_unsigned_type_node
,
1564 V2SI_type_node
, V2SI_type_node
,
1567 tree di_ftype_di_di_int
1568 = build_function_type_list (long_long_unsigned_type_node
,
1569 long_long_unsigned_type_node
,
1570 long_long_unsigned_type_node
,
1571 integer_type_node
, NULL_TREE
);
1574 = build_function_type_list (void_type_node
,
1575 integer_type_node
, NULL_TREE
);
1577 tree v8qi_ftype_char
1578 = build_function_type_list (V8QI_type_node
,
1579 signed_char_type_node
, NULL_TREE
);
1581 tree v4hi_ftype_short
1582 = build_function_type_list (V4HI_type_node
,
1583 short_integer_type_node
, NULL_TREE
);
1586 = build_function_type_list (V2SI_type_node
,
1587 integer_type_node
, NULL_TREE
);
1589 /* Normal vector binops. */
1590 tree v8qi_ftype_v8qi_v8qi
1591 = build_function_type_list (V8QI_type_node
,
1592 V8QI_type_node
, V8QI_type_node
, NULL_TREE
);
1593 tree v4hi_ftype_v4hi_v4hi
1594 = build_function_type_list (V4HI_type_node
,
1595 V4HI_type_node
,V4HI_type_node
, NULL_TREE
);
1596 tree v2si_ftype_v2si_v2si
1597 = build_function_type_list (V2SI_type_node
,
1598 V2SI_type_node
, V2SI_type_node
, NULL_TREE
);
1600 = build_function_type_list (long_long_unsigned_type_node
,
1601 long_long_unsigned_type_node
,
1602 long_long_unsigned_type_node
,
1605 /* Add all builtins that are more or less simple operations on two
1607 for (i
= 0, d
= bdesc_2arg
; i
< ARRAY_SIZE (bdesc_2arg
); i
++, d
++)
1609 /* Use one of the operands; the target can have a different mode for
1610 mask-generating compares. */
1615 || !(d
->feature
== isa_bit_iwmmxt
1616 || d
->feature
== isa_bit_iwmmxt2
))
1619 mode
= insn_data
[d
->icode
].operand
[1].mode
;
1624 type
= v8qi_ftype_v8qi_v8qi
;
1627 type
= v4hi_ftype_v4hi_v4hi
;
1630 type
= v2si_ftype_v2si_v2si
;
1633 type
= di_ftype_di_di
;
1640 def_mbuiltin (d
->feature
, d
->name
, type
, d
->code
);
1643 /* Add the remaining MMX insns with somewhat more complicated types. */
1644 #define iwmmx_mbuiltin(NAME, TYPE, CODE) \
1645 def_mbuiltin (isa_bit_iwmmxt, "__builtin_arm_" NAME, \
1646 (TYPE), ARM_BUILTIN_ ## CODE)
1648 #define iwmmx2_mbuiltin(NAME, TYPE, CODE) \
1649 def_mbuiltin (isa_bit_iwmmxt2, "__builtin_arm_" NAME, \
1650 (TYPE), ARM_BUILTIN_ ## CODE)
1652 iwmmx_mbuiltin ("wzero", di_ftype_void
, WZERO
);
1653 iwmmx_mbuiltin ("setwcgr0", void_ftype_int
, SETWCGR0
);
1654 iwmmx_mbuiltin ("setwcgr1", void_ftype_int
, SETWCGR1
);
1655 iwmmx_mbuiltin ("setwcgr2", void_ftype_int
, SETWCGR2
);
1656 iwmmx_mbuiltin ("setwcgr3", void_ftype_int
, SETWCGR3
);
1657 iwmmx_mbuiltin ("getwcgr0", int_ftype_void
, GETWCGR0
);
1658 iwmmx_mbuiltin ("getwcgr1", int_ftype_void
, GETWCGR1
);
1659 iwmmx_mbuiltin ("getwcgr2", int_ftype_void
, GETWCGR2
);
1660 iwmmx_mbuiltin ("getwcgr3", int_ftype_void
, GETWCGR3
);
1662 iwmmx_mbuiltin ("wsllh", v4hi_ftype_v4hi_di
, WSLLH
);
1663 iwmmx_mbuiltin ("wsllw", v2si_ftype_v2si_di
, WSLLW
);
1664 iwmmx_mbuiltin ("wslld", di_ftype_di_di
, WSLLD
);
1665 iwmmx_mbuiltin ("wsllhi", v4hi_ftype_v4hi_int
, WSLLHI
);
1666 iwmmx_mbuiltin ("wsllwi", v2si_ftype_v2si_int
, WSLLWI
);
1667 iwmmx_mbuiltin ("wslldi", di_ftype_di_int
, WSLLDI
);
1669 iwmmx_mbuiltin ("wsrlh", v4hi_ftype_v4hi_di
, WSRLH
);
1670 iwmmx_mbuiltin ("wsrlw", v2si_ftype_v2si_di
, WSRLW
);
1671 iwmmx_mbuiltin ("wsrld", di_ftype_di_di
, WSRLD
);
1672 iwmmx_mbuiltin ("wsrlhi", v4hi_ftype_v4hi_int
, WSRLHI
);
1673 iwmmx_mbuiltin ("wsrlwi", v2si_ftype_v2si_int
, WSRLWI
);
1674 iwmmx_mbuiltin ("wsrldi", di_ftype_di_int
, WSRLDI
);
1676 iwmmx_mbuiltin ("wsrah", v4hi_ftype_v4hi_di
, WSRAH
);
1677 iwmmx_mbuiltin ("wsraw", v2si_ftype_v2si_di
, WSRAW
);
1678 iwmmx_mbuiltin ("wsrad", di_ftype_di_di
, WSRAD
);
1679 iwmmx_mbuiltin ("wsrahi", v4hi_ftype_v4hi_int
, WSRAHI
);
1680 iwmmx_mbuiltin ("wsrawi", v2si_ftype_v2si_int
, WSRAWI
);
1681 iwmmx_mbuiltin ("wsradi", di_ftype_di_int
, WSRADI
);
1683 iwmmx_mbuiltin ("wrorh", v4hi_ftype_v4hi_di
, WRORH
);
1684 iwmmx_mbuiltin ("wrorw", v2si_ftype_v2si_di
, WRORW
);
1685 iwmmx_mbuiltin ("wrord", di_ftype_di_di
, WRORD
);
1686 iwmmx_mbuiltin ("wrorhi", v4hi_ftype_v4hi_int
, WRORHI
);
1687 iwmmx_mbuiltin ("wrorwi", v2si_ftype_v2si_int
, WRORWI
);
1688 iwmmx_mbuiltin ("wrordi", di_ftype_di_int
, WRORDI
);
1690 iwmmx_mbuiltin ("wshufh", v4hi_ftype_v4hi_int
, WSHUFH
);
1692 iwmmx_mbuiltin ("wsadb", v2si_ftype_v2si_v8qi_v8qi
, WSADB
);
1693 iwmmx_mbuiltin ("wsadh", v2si_ftype_v2si_v4hi_v4hi
, WSADH
);
1694 iwmmx_mbuiltin ("wmadds", v2si_ftype_v4hi_v4hi
, WMADDS
);
1695 iwmmx2_mbuiltin ("wmaddsx", v2si_ftype_v4hi_v4hi
, WMADDSX
);
1696 iwmmx2_mbuiltin ("wmaddsn", v2si_ftype_v4hi_v4hi
, WMADDSN
);
1697 iwmmx_mbuiltin ("wmaddu", v2si_ftype_v4hi_v4hi
, WMADDU
);
1698 iwmmx2_mbuiltin ("wmaddux", v2si_ftype_v4hi_v4hi
, WMADDUX
);
1699 iwmmx2_mbuiltin ("wmaddun", v2si_ftype_v4hi_v4hi
, WMADDUN
);
1700 iwmmx_mbuiltin ("wsadbz", v2si_ftype_v8qi_v8qi
, WSADBZ
);
1701 iwmmx_mbuiltin ("wsadhz", v2si_ftype_v4hi_v4hi
, WSADHZ
);
1703 iwmmx_mbuiltin ("textrmsb", int_ftype_v8qi_int
, TEXTRMSB
);
1704 iwmmx_mbuiltin ("textrmsh", int_ftype_v4hi_int
, TEXTRMSH
);
1705 iwmmx_mbuiltin ("textrmsw", int_ftype_v2si_int
, TEXTRMSW
);
1706 iwmmx_mbuiltin ("textrmub", int_ftype_v8qi_int
, TEXTRMUB
);
1707 iwmmx_mbuiltin ("textrmuh", int_ftype_v4hi_int
, TEXTRMUH
);
1708 iwmmx_mbuiltin ("textrmuw", int_ftype_v2si_int
, TEXTRMUW
);
1709 iwmmx_mbuiltin ("tinsrb", v8qi_ftype_v8qi_int_int
, TINSRB
);
1710 iwmmx_mbuiltin ("tinsrh", v4hi_ftype_v4hi_int_int
, TINSRH
);
1711 iwmmx_mbuiltin ("tinsrw", v2si_ftype_v2si_int_int
, TINSRW
);
1713 iwmmx_mbuiltin ("waccb", di_ftype_v8qi
, WACCB
);
1714 iwmmx_mbuiltin ("wacch", di_ftype_v4hi
, WACCH
);
1715 iwmmx_mbuiltin ("waccw", di_ftype_v2si
, WACCW
);
1717 iwmmx_mbuiltin ("tmovmskb", int_ftype_v8qi
, TMOVMSKB
);
1718 iwmmx_mbuiltin ("tmovmskh", int_ftype_v4hi
, TMOVMSKH
);
1719 iwmmx_mbuiltin ("tmovmskw", int_ftype_v2si
, TMOVMSKW
);
1721 iwmmx2_mbuiltin ("waddbhusm", v8qi_ftype_v4hi_v8qi
, WADDBHUSM
);
1722 iwmmx2_mbuiltin ("waddbhusl", v8qi_ftype_v4hi_v8qi
, WADDBHUSL
);
1724 iwmmx_mbuiltin ("wpackhss", v8qi_ftype_v4hi_v4hi
, WPACKHSS
);
1725 iwmmx_mbuiltin ("wpackhus", v8qi_ftype_v4hi_v4hi
, WPACKHUS
);
1726 iwmmx_mbuiltin ("wpackwus", v4hi_ftype_v2si_v2si
, WPACKWUS
);
1727 iwmmx_mbuiltin ("wpackwss", v4hi_ftype_v2si_v2si
, WPACKWSS
);
1728 iwmmx_mbuiltin ("wpackdus", v2si_ftype_di_di
, WPACKDUS
);
1729 iwmmx_mbuiltin ("wpackdss", v2si_ftype_di_di
, WPACKDSS
);
1731 iwmmx_mbuiltin ("wunpckehub", v4hi_ftype_v8qi
, WUNPCKEHUB
);
1732 iwmmx_mbuiltin ("wunpckehuh", v2si_ftype_v4hi
, WUNPCKEHUH
);
1733 iwmmx_mbuiltin ("wunpckehuw", di_ftype_v2si
, WUNPCKEHUW
);
1734 iwmmx_mbuiltin ("wunpckehsb", v4hi_ftype_v8qi
, WUNPCKEHSB
);
1735 iwmmx_mbuiltin ("wunpckehsh", v2si_ftype_v4hi
, WUNPCKEHSH
);
1736 iwmmx_mbuiltin ("wunpckehsw", di_ftype_v2si
, WUNPCKEHSW
);
1737 iwmmx_mbuiltin ("wunpckelub", v4hi_ftype_v8qi
, WUNPCKELUB
);
1738 iwmmx_mbuiltin ("wunpckeluh", v2si_ftype_v4hi
, WUNPCKELUH
);
1739 iwmmx_mbuiltin ("wunpckeluw", di_ftype_v2si
, WUNPCKELUW
);
1740 iwmmx_mbuiltin ("wunpckelsb", v4hi_ftype_v8qi
, WUNPCKELSB
);
1741 iwmmx_mbuiltin ("wunpckelsh", v2si_ftype_v4hi
, WUNPCKELSH
);
1742 iwmmx_mbuiltin ("wunpckelsw", di_ftype_v2si
, WUNPCKELSW
);
1744 iwmmx_mbuiltin ("wmacs", di_ftype_di_v4hi_v4hi
, WMACS
);
1745 iwmmx_mbuiltin ("wmacsz", di_ftype_v4hi_v4hi
, WMACSZ
);
1746 iwmmx_mbuiltin ("wmacu", di_ftype_di_v4hi_v4hi
, WMACU
);
1747 iwmmx_mbuiltin ("wmacuz", di_ftype_v4hi_v4hi
, WMACUZ
);
1749 iwmmx_mbuiltin ("walign", v8qi_ftype_v8qi_v8qi_int
, WALIGNI
);
1750 iwmmx_mbuiltin ("tmia", di_ftype_di_int_int
, TMIA
);
1751 iwmmx_mbuiltin ("tmiaph", di_ftype_di_int_int
, TMIAPH
);
1752 iwmmx_mbuiltin ("tmiabb", di_ftype_di_int_int
, TMIABB
);
1753 iwmmx_mbuiltin ("tmiabt", di_ftype_di_int_int
, TMIABT
);
1754 iwmmx_mbuiltin ("tmiatb", di_ftype_di_int_int
, TMIATB
);
1755 iwmmx_mbuiltin ("tmiatt", di_ftype_di_int_int
, TMIATT
);
1757 iwmmx2_mbuiltin ("wabsb", v8qi_ftype_v8qi
, WABSB
);
1758 iwmmx2_mbuiltin ("wabsh", v4hi_ftype_v4hi
, WABSH
);
1759 iwmmx2_mbuiltin ("wabsw", v2si_ftype_v2si
, WABSW
);
1761 iwmmx2_mbuiltin ("wqmiabb", v2si_ftype_v2si_v4hi_v4hi
, WQMIABB
);
1762 iwmmx2_mbuiltin ("wqmiabt", v2si_ftype_v2si_v4hi_v4hi
, WQMIABT
);
1763 iwmmx2_mbuiltin ("wqmiatb", v2si_ftype_v2si_v4hi_v4hi
, WQMIATB
);
1764 iwmmx2_mbuiltin ("wqmiatt", v2si_ftype_v2si_v4hi_v4hi
, WQMIATT
);
1766 iwmmx2_mbuiltin ("wqmiabbn", v2si_ftype_v2si_v4hi_v4hi
, WQMIABBN
);
1767 iwmmx2_mbuiltin ("wqmiabtn", v2si_ftype_v2si_v4hi_v4hi
, WQMIABTN
);
1768 iwmmx2_mbuiltin ("wqmiatbn", v2si_ftype_v2si_v4hi_v4hi
, WQMIATBN
);
1769 iwmmx2_mbuiltin ("wqmiattn", v2si_ftype_v2si_v4hi_v4hi
, WQMIATTN
);
1771 iwmmx2_mbuiltin ("wmiabb", di_ftype_di_v4hi_v4hi
, WMIABB
);
1772 iwmmx2_mbuiltin ("wmiabt", di_ftype_di_v4hi_v4hi
, WMIABT
);
1773 iwmmx2_mbuiltin ("wmiatb", di_ftype_di_v4hi_v4hi
, WMIATB
);
1774 iwmmx2_mbuiltin ("wmiatt", di_ftype_di_v4hi_v4hi
, WMIATT
);
1776 iwmmx2_mbuiltin ("wmiabbn", di_ftype_di_v4hi_v4hi
, WMIABBN
);
1777 iwmmx2_mbuiltin ("wmiabtn", di_ftype_di_v4hi_v4hi
, WMIABTN
);
1778 iwmmx2_mbuiltin ("wmiatbn", di_ftype_di_v4hi_v4hi
, WMIATBN
);
1779 iwmmx2_mbuiltin ("wmiattn", di_ftype_di_v4hi_v4hi
, WMIATTN
);
1781 iwmmx2_mbuiltin ("wmiawbb", di_ftype_di_v2si_v2si
, WMIAWBB
);
1782 iwmmx2_mbuiltin ("wmiawbt", di_ftype_di_v2si_v2si
, WMIAWBT
);
1783 iwmmx2_mbuiltin ("wmiawtb", di_ftype_di_v2si_v2si
, WMIAWTB
);
1784 iwmmx2_mbuiltin ("wmiawtt", di_ftype_di_v2si_v2si
, WMIAWTT
);
1786 iwmmx2_mbuiltin ("wmiawbbn", di_ftype_di_v2si_v2si
, WMIAWBBN
);
1787 iwmmx2_mbuiltin ("wmiawbtn", di_ftype_di_v2si_v2si
, WMIAWBTN
);
1788 iwmmx2_mbuiltin ("wmiawtbn", di_ftype_di_v2si_v2si
, WMIAWTBN
);
1789 iwmmx2_mbuiltin ("wmiawttn", di_ftype_di_v2si_v2si
, WMIAWTTN
);
1791 iwmmx2_mbuiltin ("wmerge", di_ftype_di_di_int
, WMERGE
);
1793 iwmmx_mbuiltin ("tbcstb", v8qi_ftype_char
, TBCSTB
);
1794 iwmmx_mbuiltin ("tbcsth", v4hi_ftype_short
, TBCSTH
);
1795 iwmmx_mbuiltin ("tbcstw", v2si_ftype_int
, TBCSTW
);
1797 #undef iwmmx_mbuiltin
1798 #undef iwmmx2_mbuiltin
1802 arm_init_fp16_builtins (void)
1804 arm_fp16_type_node
= make_node (REAL_TYPE
);
1805 TYPE_PRECISION (arm_fp16_type_node
) = GET_MODE_PRECISION (HFmode
);
1806 layout_type (arm_fp16_type_node
);
1807 if (arm_fp16_format
)
1808 (*lang_hooks
.types
.register_builtin_type
) (arm_fp16_type_node
,
1813 arm_init_builtins (void)
1815 if (TARGET_REALLY_IWMMXT
)
1816 arm_init_iwmmxt_builtins ();
1818 /* This creates the arm_simd_floatHF_type_node so must come before
1819 arm_init_neon_builtins which uses it. */
1820 arm_init_fp16_builtins ();
1822 if (TARGET_HARD_FLOAT
)
1824 arm_init_neon_builtins ();
1825 arm_init_vfp_builtins ();
1826 arm_init_crypto_builtins ();
1829 arm_init_acle_builtins ();
1831 if (TARGET_HARD_FLOAT
)
1833 tree ftype_set_fpscr
1834 = build_function_type_list (void_type_node
, unsigned_type_node
, NULL
);
1835 tree ftype_get_fpscr
1836 = build_function_type_list (unsigned_type_node
, NULL
);
1838 arm_builtin_decls
[ARM_BUILTIN_GET_FPSCR
]
1839 = add_builtin_function ("__builtin_arm_ldfscr", ftype_get_fpscr
,
1840 ARM_BUILTIN_GET_FPSCR
, BUILT_IN_MD
, NULL
, NULL_TREE
);
1841 arm_builtin_decls
[ARM_BUILTIN_SET_FPSCR
]
1842 = add_builtin_function ("__builtin_arm_stfscr", ftype_set_fpscr
,
1843 ARM_BUILTIN_SET_FPSCR
, BUILT_IN_MD
, NULL
, NULL_TREE
);
1848 tree ftype_cmse_nonsecure_caller
1849 = build_function_type_list (unsigned_type_node
, NULL
);
1850 arm_builtin_decls
[ARM_BUILTIN_CMSE_NONSECURE_CALLER
]
1851 = add_builtin_function ("__builtin_arm_cmse_nonsecure_caller",
1852 ftype_cmse_nonsecure_caller
,
1853 ARM_BUILTIN_CMSE_NONSECURE_CALLER
, BUILT_IN_MD
,
1858 /* Return the ARM builtin for CODE. */
1861 arm_builtin_decl (unsigned code
, bool initialize_p ATTRIBUTE_UNUSED
)
1863 if (code
>= ARM_BUILTIN_MAX
)
1864 return error_mark_node
;
1866 return arm_builtin_decls
[code
];
1869 /* Errors in the source file can cause expand_expr to return const0_rtx
1870 where we expect a vector. To avoid crashing, use one of the vector
1871 clear instructions. */
1874 safe_vector_operand (rtx x
, machine_mode mode
)
1876 if (x
!= const0_rtx
)
1878 x
= gen_reg_rtx (mode
);
1880 emit_insn (gen_iwmmxt_clrdi (mode
== DImode
? x
1881 : gen_rtx_SUBREG (DImode
, x
, 0)));
1885 /* Function to expand ternary builtins. */
1887 arm_expand_ternop_builtin (enum insn_code icode
,
1888 tree exp
, rtx target
)
1891 tree arg0
= CALL_EXPR_ARG (exp
, 0);
1892 tree arg1
= CALL_EXPR_ARG (exp
, 1);
1893 tree arg2
= CALL_EXPR_ARG (exp
, 2);
1895 rtx op0
= expand_normal (arg0
);
1896 rtx op1
= expand_normal (arg1
);
1897 rtx op2
= expand_normal (arg2
);
1900 /* The sha1c, sha1p, sha1m crypto builtins require a different vec_select
1901 lane operand depending on endianness. */
1902 bool builtin_sha1cpm_p
= false;
1904 if (insn_data
[icode
].n_operands
== 5)
1906 gcc_assert (icode
== CODE_FOR_crypto_sha1c
1907 || icode
== CODE_FOR_crypto_sha1p
1908 || icode
== CODE_FOR_crypto_sha1m
);
1909 builtin_sha1cpm_p
= true;
1911 machine_mode tmode
= insn_data
[icode
].operand
[0].mode
;
1912 machine_mode mode0
= insn_data
[icode
].operand
[1].mode
;
1913 machine_mode mode1
= insn_data
[icode
].operand
[2].mode
;
1914 machine_mode mode2
= insn_data
[icode
].operand
[3].mode
;
1917 if (VECTOR_MODE_P (mode0
))
1918 op0
= safe_vector_operand (op0
, mode0
);
1919 if (VECTOR_MODE_P (mode1
))
1920 op1
= safe_vector_operand (op1
, mode1
);
1921 if (VECTOR_MODE_P (mode2
))
1922 op2
= safe_vector_operand (op2
, mode2
);
1925 || GET_MODE (target
) != tmode
1926 || ! (*insn_data
[icode
].operand
[0].predicate
) (target
, tmode
))
1927 target
= gen_reg_rtx (tmode
);
1929 gcc_assert ((GET_MODE (op0
) == mode0
|| GET_MODE (op0
) == VOIDmode
)
1930 && (GET_MODE (op1
) == mode1
|| GET_MODE (op1
) == VOIDmode
)
1931 && (GET_MODE (op2
) == mode2
|| GET_MODE (op2
) == VOIDmode
));
1933 if (! (*insn_data
[icode
].operand
[1].predicate
) (op0
, mode0
))
1934 op0
= copy_to_mode_reg (mode0
, op0
);
1935 if (! (*insn_data
[icode
].operand
[2].predicate
) (op1
, mode1
))
1936 op1
= copy_to_mode_reg (mode1
, op1
);
1937 if (! (*insn_data
[icode
].operand
[3].predicate
) (op2
, mode2
))
1938 op2
= copy_to_mode_reg (mode2
, op2
);
1939 if (builtin_sha1cpm_p
)
1940 op3
= GEN_INT (TARGET_BIG_END
? 1 : 0);
1942 if (builtin_sha1cpm_p
)
1943 pat
= GEN_FCN (icode
) (target
, op0
, op1
, op2
, op3
);
1945 pat
= GEN_FCN (icode
) (target
, op0
, op1
, op2
);
1952 /* Subroutine of arm_expand_builtin to take care of binop insns. */
1955 arm_expand_binop_builtin (enum insn_code icode
,
1956 tree exp
, rtx target
)
1959 tree arg0
= CALL_EXPR_ARG (exp
, 0);
1960 tree arg1
= CALL_EXPR_ARG (exp
, 1);
1961 rtx op0
= expand_normal (arg0
);
1962 rtx op1
= expand_normal (arg1
);
1963 machine_mode tmode
= insn_data
[icode
].operand
[0].mode
;
1964 machine_mode mode0
= insn_data
[icode
].operand
[1].mode
;
1965 machine_mode mode1
= insn_data
[icode
].operand
[2].mode
;
1967 if (VECTOR_MODE_P (mode0
))
1968 op0
= safe_vector_operand (op0
, mode0
);
1969 if (VECTOR_MODE_P (mode1
))
1970 op1
= safe_vector_operand (op1
, mode1
);
1973 || GET_MODE (target
) != tmode
1974 || ! (*insn_data
[icode
].operand
[0].predicate
) (target
, tmode
))
1975 target
= gen_reg_rtx (tmode
);
1977 gcc_assert ((GET_MODE (op0
) == mode0
|| GET_MODE (op0
) == VOIDmode
)
1978 && (GET_MODE (op1
) == mode1
|| GET_MODE (op1
) == VOIDmode
));
1980 if (! (*insn_data
[icode
].operand
[1].predicate
) (op0
, mode0
))
1981 op0
= copy_to_mode_reg (mode0
, op0
);
1982 if (! (*insn_data
[icode
].operand
[2].predicate
) (op1
, mode1
))
1983 op1
= copy_to_mode_reg (mode1
, op1
);
1985 pat
= GEN_FCN (icode
) (target
, op0
, op1
);
1992 /* Subroutine of arm_expand_builtin to take care of unop insns. */
1995 arm_expand_unop_builtin (enum insn_code icode
,
1996 tree exp
, rtx target
, int do_load
)
1999 tree arg0
= CALL_EXPR_ARG (exp
, 0);
2000 rtx op0
= expand_normal (arg0
);
2002 machine_mode tmode
= insn_data
[icode
].operand
[0].mode
;
2003 machine_mode mode0
= insn_data
[icode
].operand
[1].mode
;
2004 bool builtin_sha1h_p
= false;
2006 if (insn_data
[icode
].n_operands
== 3)
2008 gcc_assert (icode
== CODE_FOR_crypto_sha1h
);
2009 builtin_sha1h_p
= true;
2013 || GET_MODE (target
) != tmode
2014 || ! (*insn_data
[icode
].operand
[0].predicate
) (target
, tmode
))
2015 target
= gen_reg_rtx (tmode
);
2017 op0
= gen_rtx_MEM (mode0
, copy_to_mode_reg (Pmode
, op0
));
2020 if (VECTOR_MODE_P (mode0
))
2021 op0
= safe_vector_operand (op0
, mode0
);
2023 if (! (*insn_data
[icode
].operand
[1].predicate
) (op0
, mode0
))
2024 op0
= copy_to_mode_reg (mode0
, op0
);
2026 if (builtin_sha1h_p
)
2027 op1
= GEN_INT (TARGET_BIG_END
? 1 : 0);
2029 if (builtin_sha1h_p
)
2030 pat
= GEN_FCN (icode
) (target
, op0
, op1
);
2032 pat
= GEN_FCN (icode
) (target
, op0
);
2040 ARG_BUILTIN_COPY_TO_REG
,
2041 ARG_BUILTIN_CONSTANT
,
2042 ARG_BUILTIN_LANE_INDEX
,
2043 ARG_BUILTIN_STRUCT_LOAD_STORE_LANE_INDEX
,
2044 ARG_BUILTIN_NEON_MEMORY
,
2050 /* EXP is a pointer argument to a Neon load or store intrinsic. Derive
2051 and return an expression for the accessed memory.
2053 The intrinsic function operates on a block of registers that has
2054 mode REG_MODE. This block contains vectors of type TYPE_MODE. The
2055 function references the memory at EXP of type TYPE and in mode
2056 MEM_MODE; this mode may be BLKmode if no more suitable mode is
2060 neon_dereference_pointer (tree exp
, tree type
, machine_mode mem_mode
,
2061 machine_mode reg_mode
,
2062 machine_mode vector_mode
)
2064 HOST_WIDE_INT reg_size
, vector_size
, nvectors
, nelems
;
2065 tree elem_type
, upper_bound
, array_type
;
2067 /* Work out the size of the register block in bytes. */
2068 reg_size
= GET_MODE_SIZE (reg_mode
);
2070 /* Work out the size of each vector in bytes. */
2071 vector_size
= GET_MODE_SIZE (vector_mode
);
2073 /* Work out how many vectors there are. */
2074 gcc_assert (reg_size
% vector_size
== 0);
2075 nvectors
= reg_size
/ vector_size
;
2077 /* Work out the type of each element. */
2078 gcc_assert (POINTER_TYPE_P (type
));
2079 elem_type
= TREE_TYPE (type
);
2081 /* Work out how many elements are being loaded or stored.
2082 MEM_MODE == REG_MODE implies a one-to-one mapping between register
2083 and memory elements; anything else implies a lane load or store. */
2084 if (mem_mode
== reg_mode
)
2085 nelems
= vector_size
* nvectors
/ int_size_in_bytes (elem_type
);
2089 /* Create a type that describes the full access. */
2090 upper_bound
= build_int_cst (size_type_node
, nelems
- 1);
2091 array_type
= build_array_type (elem_type
, build_index_type (upper_bound
));
2093 /* Dereference EXP using that type. */
2094 return fold_build2 (MEM_REF
, array_type
, exp
,
2095 build_int_cst (build_pointer_type (array_type
), 0));
2098 /* Expand a builtin. */
2100 arm_expand_builtin_args (rtx target
, machine_mode map_mode
, int fcode
,
2101 int icode
, int have_retval
, tree exp
,
2105 tree arg
[SIMD_MAX_BUILTIN_ARGS
];
2106 rtx op
[SIMD_MAX_BUILTIN_ARGS
];
2107 machine_mode tmode
= insn_data
[icode
].operand
[0].mode
;
2108 machine_mode mode
[SIMD_MAX_BUILTIN_ARGS
];
2115 || GET_MODE (target
) != tmode
2116 || !(*insn_data
[icode
].operand
[0].predicate
) (target
, tmode
)))
2117 target
= gen_reg_rtx (tmode
);
2119 formals
= TYPE_ARG_TYPES (TREE_TYPE (arm_builtin_decls
[fcode
]));
2123 builtin_arg thisarg
= args
[argc
];
2125 if (thisarg
== ARG_BUILTIN_STOP
)
2129 int opno
= argc
+ have_retval
;
2130 arg
[argc
] = CALL_EXPR_ARG (exp
, argc
);
2131 mode
[argc
] = insn_data
[icode
].operand
[opno
].mode
;
2132 if (thisarg
== ARG_BUILTIN_NEON_MEMORY
)
2134 machine_mode other_mode
2135 = insn_data
[icode
].operand
[1 - opno
].mode
;
2136 arg
[argc
] = neon_dereference_pointer (arg
[argc
],
2137 TREE_VALUE (formals
),
2138 mode
[argc
], other_mode
,
2142 /* Use EXPAND_MEMORY for ARG_BUILTIN_MEMORY and
2143 ARG_BUILTIN_NEON_MEMORY to ensure a MEM_P be returned. */
2144 op
[argc
] = expand_expr (arg
[argc
], NULL_RTX
, VOIDmode
,
2145 ((thisarg
== ARG_BUILTIN_MEMORY
2146 || thisarg
== ARG_BUILTIN_NEON_MEMORY
)
2147 ? EXPAND_MEMORY
: EXPAND_NORMAL
));
2151 case ARG_BUILTIN_MEMORY
:
2152 case ARG_BUILTIN_COPY_TO_REG
:
2153 if (POINTER_TYPE_P (TREE_TYPE (arg
[argc
])))
2154 op
[argc
] = convert_memory_address (Pmode
, op
[argc
]);
2155 /*gcc_assert (GET_MODE (op[argc]) == mode[argc]); */
2156 if (!(*insn_data
[icode
].operand
[opno
].predicate
)
2157 (op
[argc
], mode
[argc
]))
2158 op
[argc
] = copy_to_mode_reg (mode
[argc
], op
[argc
]);
2161 case ARG_BUILTIN_STRUCT_LOAD_STORE_LANE_INDEX
:
2162 gcc_assert (argc
> 1);
2163 if (CONST_INT_P (op
[argc
]))
2165 neon_lane_bounds (op
[argc
], 0,
2166 GET_MODE_NUNITS (map_mode
), exp
);
2167 /* Keep to GCC-vector-extension lane indices in the RTL. */
2169 GEN_INT (NEON_ENDIAN_LANE_N (map_mode
, INTVAL (op
[argc
])));
2173 case ARG_BUILTIN_LANE_INDEX
:
2174 /* Previous argument must be a vector, which this indexes. */
2175 gcc_assert (argc
> 0);
2176 if (CONST_INT_P (op
[argc
]))
2178 enum machine_mode vmode
= mode
[argc
- 1];
2179 neon_lane_bounds (op
[argc
], 0, GET_MODE_NUNITS (vmode
), exp
);
2181 /* If the lane index isn't a constant then the next
2184 case ARG_BUILTIN_CONSTANT
:
2186 if (!(*insn_data
[icode
].operand
[opno
].predicate
)
2187 (op
[argc
], mode
[argc
]))
2189 error ("%Kargument %d must be a constant immediate",
2195 case ARG_BUILTIN_NEON_MEMORY
:
2196 /* Check if expand failed. */
2197 if (op
[argc
] == const0_rtx
)
2199 gcc_assert (MEM_P (op
[argc
]));
2200 PUT_MODE (op
[argc
], mode
[argc
]);
2201 /* ??? arm_neon.h uses the same built-in functions for signed
2202 and unsigned accesses, casting where necessary. This isn't
2204 set_mem_alias_set (op
[argc
], 0);
2205 if (!(*insn_data
[icode
].operand
[opno
].predicate
)
2206 (op
[argc
], mode
[argc
]))
2207 op
[argc
] = (replace_equiv_address
2209 copy_to_mode_reg (Pmode
, XEXP (op
[argc
], 0))));
2212 case ARG_BUILTIN_STOP
:
2224 pat
= GEN_FCN (icode
) (target
, op
[0]);
2228 pat
= GEN_FCN (icode
) (target
, op
[0], op
[1]);
2232 pat
= GEN_FCN (icode
) (target
, op
[0], op
[1], op
[2]);
2236 pat
= GEN_FCN (icode
) (target
, op
[0], op
[1], op
[2], op
[3]);
2240 pat
= GEN_FCN (icode
) (target
, op
[0], op
[1], op
[2], op
[3], op
[4]);
2244 pat
= GEN_FCN (icode
) (target
, op
[0], op
[1], op
[2], op
[3], op
[4], op
[5]);
2254 pat
= GEN_FCN (icode
) (op
[0]);
2258 pat
= GEN_FCN (icode
) (op
[0], op
[1]);
2262 pat
= GEN_FCN (icode
) (op
[0], op
[1], op
[2]);
2266 pat
= GEN_FCN (icode
) (op
[0], op
[1], op
[2], op
[3]);
2270 pat
= GEN_FCN (icode
) (op
[0], op
[1], op
[2], op
[3], op
[4]);
2274 pat
= GEN_FCN (icode
) (op
[0], op
[1], op
[2], op
[3], op
[4], op
[5]);
2284 /* Check whether our current target implements the pattern chosen for this
2285 builtin and error out if not. */
2288 insn
= get_insns ();
2291 if (recog_memoized (insn
) < 0)
2292 error ("this builtin is not supported for this target");
2299 /* Expand a builtin. These builtins are "special" because they don't have
2300 symbolic constants defined per-instruction or per instruction-variant.
2301 Instead, the required info is looked up in the ARM_BUILTIN_DATA record that
2302 is passed into the function. */
2305 arm_expand_builtin_1 (int fcode
, tree exp
, rtx target
,
2306 arm_builtin_datum
*d
)
2308 enum insn_code icode
= d
->code
;
2309 builtin_arg args
[SIMD_MAX_BUILTIN_ARGS
+ 1];
2310 int num_args
= insn_data
[d
->code
].n_operands
;
2315 if (IN_RANGE (fcode
, ARM_BUILTIN_VFP_BASE
, ARM_BUILTIN_ACLE_BASE
- 1))
2318 is_void
= !!(d
->qualifiers
[0] & qualifier_void
);
2320 num_args
+= is_void
;
2322 for (k
= 1; k
< num_args
; k
++)
2324 /* We have four arrays of data, each indexed in a different fashion.
2325 qualifiers - element 0 always describes the function return type.
2326 operands - element 0 is either the operand for return value (if
2327 the function has a non-void return type) or the operand for the
2329 expr_args - element 0 always holds the first argument.
2330 args - element 0 is always used for the return type. */
2331 int qualifiers_k
= k
;
2332 int operands_k
= k
- is_void
;
2333 int expr_args_k
= k
- 1;
2335 if (d
->qualifiers
[qualifiers_k
] & qualifier_lane_index
)
2336 args
[k
] = ARG_BUILTIN_LANE_INDEX
;
2337 else if (d
->qualifiers
[qualifiers_k
] & qualifier_struct_load_store_lane_index
)
2338 args
[k
] = ARG_BUILTIN_STRUCT_LOAD_STORE_LANE_INDEX
;
2339 else if (d
->qualifiers
[qualifiers_k
] & qualifier_immediate
)
2340 args
[k
] = ARG_BUILTIN_CONSTANT
;
2341 else if (d
->qualifiers
[qualifiers_k
] & qualifier_maybe_immediate
)
2344 = expand_normal (CALL_EXPR_ARG (exp
,
2346 /* Handle constants only if the predicate allows it. */
2347 bool op_const_int_p
=
2349 && (*insn_data
[icode
].operand
[operands_k
].predicate
)
2350 (arg
, insn_data
[icode
].operand
[operands_k
].mode
));
2351 args
[k
] = op_const_int_p
? ARG_BUILTIN_CONSTANT
: ARG_BUILTIN_COPY_TO_REG
;
2353 else if (d
->qualifiers
[qualifiers_k
] & qualifier_pointer
)
2356 args
[k
] = ARG_BUILTIN_NEON_MEMORY
;
2358 args
[k
] = ARG_BUILTIN_MEMORY
;
2361 args
[k
] = ARG_BUILTIN_COPY_TO_REG
;
2363 args
[k
] = ARG_BUILTIN_STOP
;
2365 /* The interface to arm_expand_builtin_args expects a 0 if
2366 the function is void, and a 1 if it is not. */
2367 return arm_expand_builtin_args
2368 (target
, d
->mode
, fcode
, icode
, !is_void
, exp
,
2372 /* Expand an ACLE builtin, i.e. those registered only if their respective
2373 target constraints are met. This check happens within
2374 arm_expand_builtin_args. */
2377 arm_expand_acle_builtin (int fcode
, tree exp
, rtx target
)
2380 arm_builtin_datum
*d
2381 = &acle_builtin_data
[fcode
- ARM_BUILTIN_ACLE_PATTERN_START
];
2383 return arm_expand_builtin_1 (fcode
, exp
, target
, d
);
2386 /* Expand a Neon builtin, i.e. those registered only if TARGET_NEON holds.
2387 Most of these are "special" because they don't have symbolic
2388 constants defined per-instruction or per instruction-variant. Instead, the
2389 required info is looked up in the table neon_builtin_data. */
2392 arm_expand_neon_builtin (int fcode
, tree exp
, rtx target
)
2394 if (fcode
>= ARM_BUILTIN_NEON_BASE
&& ! TARGET_NEON
)
2396 fatal_error (input_location
,
2397 "You must enable NEON instructions"
2398 " (e.g. -mfloat-abi=softfp -mfpu=neon)"
2399 " to use these intrinsics.");
2403 if (fcode
== ARM_BUILTIN_NEON_LANE_CHECK
)
2405 /* Builtin is only to check bounds of the lane passed to some intrinsics
2406 that are implemented with gcc vector extensions in arm_neon.h. */
2408 tree nlanes
= CALL_EXPR_ARG (exp
, 0);
2409 gcc_assert (TREE_CODE (nlanes
) == INTEGER_CST
);
2410 rtx lane_idx
= expand_normal (CALL_EXPR_ARG (exp
, 1));
2411 if (CONST_INT_P (lane_idx
))
2412 neon_lane_bounds (lane_idx
, 0, TREE_INT_CST_LOW (nlanes
), exp
);
2414 error ("%Klane index must be a constant immediate", exp
);
2415 /* Don't generate any RTL. */
2419 arm_builtin_datum
*d
2420 = &neon_builtin_data
[fcode
- ARM_BUILTIN_NEON_PATTERN_START
];
2422 return arm_expand_builtin_1 (fcode
, exp
, target
, d
);
2425 /* Expand a VFP builtin. These builtins are treated like
2426 neon builtins except that the data is looked up in table
2427 VFP_BUILTIN_DATA. */
2430 arm_expand_vfp_builtin (int fcode
, tree exp
, rtx target
)
2432 if (fcode
>= ARM_BUILTIN_VFP_BASE
&& ! TARGET_HARD_FLOAT
)
2434 fatal_error (input_location
,
2435 "You must enable VFP instructions"
2436 " to use these intrinsics.");
2440 arm_builtin_datum
*d
2441 = &vfp_builtin_data
[fcode
- ARM_BUILTIN_VFP_PATTERN_START
];
2443 return arm_expand_builtin_1 (fcode
, exp
, target
, d
);
2446 /* Expand an expression EXP that calls a built-in function,
2447 with result going to TARGET if that's convenient
2448 (and in mode MODE if that's convenient).
2449 SUBTARGET may be used as the target for computing one of EXP's operands.
2450 IGNORE is nonzero if the value is to be ignored. */
2453 arm_expand_builtin (tree exp
,
2455 rtx subtarget ATTRIBUTE_UNUSED
,
2456 machine_mode mode ATTRIBUTE_UNUSED
,
2457 int ignore ATTRIBUTE_UNUSED
)
2459 const struct builtin_description
* d
;
2460 enum insn_code icode
;
2461 tree fndecl
= TREE_OPERAND (CALL_EXPR_FN (exp
), 0);
2469 unsigned int fcode
= DECL_FUNCTION_CODE (fndecl
);
2480 if (fcode
>= ARM_BUILTIN_ACLE_BASE
)
2481 return arm_expand_acle_builtin (fcode
, exp
, target
);
2483 if (fcode
>= ARM_BUILTIN_NEON_BASE
)
2484 return arm_expand_neon_builtin (fcode
, exp
, target
);
2486 if (fcode
>= ARM_BUILTIN_VFP_BASE
)
2487 return arm_expand_vfp_builtin (fcode
, exp
, target
);
2489 /* Check in the context of the function making the call whether the
2490 builtin is supported. */
2491 if (fcode
>= ARM_BUILTIN_CRYPTO_BASE
2492 && (!TARGET_CRYPTO
|| !TARGET_HARD_FLOAT
))
2494 fatal_error (input_location
,
2495 "You must enable crypto instructions"
2496 " (e.g. include -mfloat-abi=softfp -mfpu=crypto-neon...)"
2497 " to use these intrinsics.");
2503 case ARM_BUILTIN_GET_FPSCR
:
2504 case ARM_BUILTIN_SET_FPSCR
:
2505 if (fcode
== ARM_BUILTIN_GET_FPSCR
)
2507 icode
= CODE_FOR_get_fpscr
;
2508 target
= gen_reg_rtx (SImode
);
2509 pat
= GEN_FCN (icode
) (target
);
2514 icode
= CODE_FOR_set_fpscr
;
2515 arg0
= CALL_EXPR_ARG (exp
, 0);
2516 op0
= expand_normal (arg0
);
2517 pat
= GEN_FCN (icode
) (op0
);
2522 case ARM_BUILTIN_CMSE_NONSECURE_CALLER
:
2523 target
= gen_reg_rtx (SImode
);
2524 op0
= arm_return_addr (0, NULL_RTX
);
2525 emit_insn (gen_addsi3 (target
, op0
, const1_rtx
));
2528 case ARM_BUILTIN_TEXTRMSB
:
2529 case ARM_BUILTIN_TEXTRMUB
:
2530 case ARM_BUILTIN_TEXTRMSH
:
2531 case ARM_BUILTIN_TEXTRMUH
:
2532 case ARM_BUILTIN_TEXTRMSW
:
2533 case ARM_BUILTIN_TEXTRMUW
:
2534 icode
= (fcode
== ARM_BUILTIN_TEXTRMSB
? CODE_FOR_iwmmxt_textrmsb
2535 : fcode
== ARM_BUILTIN_TEXTRMUB
? CODE_FOR_iwmmxt_textrmub
2536 : fcode
== ARM_BUILTIN_TEXTRMSH
? CODE_FOR_iwmmxt_textrmsh
2537 : fcode
== ARM_BUILTIN_TEXTRMUH
? CODE_FOR_iwmmxt_textrmuh
2538 : CODE_FOR_iwmmxt_textrmw
);
2540 arg0
= CALL_EXPR_ARG (exp
, 0);
2541 arg1
= CALL_EXPR_ARG (exp
, 1);
2542 op0
= expand_normal (arg0
);
2543 op1
= expand_normal (arg1
);
2544 tmode
= insn_data
[icode
].operand
[0].mode
;
2545 mode0
= insn_data
[icode
].operand
[1].mode
;
2546 mode1
= insn_data
[icode
].operand
[2].mode
;
2548 if (! (*insn_data
[icode
].operand
[1].predicate
) (op0
, mode0
))
2549 op0
= copy_to_mode_reg (mode0
, op0
);
2550 if (! (*insn_data
[icode
].operand
[2].predicate
) (op1
, mode1
))
2552 /* @@@ better error message */
2553 error ("selector must be an immediate");
2554 return gen_reg_rtx (tmode
);
2557 opint
= INTVAL (op1
);
2558 if (fcode
== ARM_BUILTIN_TEXTRMSB
|| fcode
== ARM_BUILTIN_TEXTRMUB
)
2560 if (opint
> 7 || opint
< 0)
2561 error ("the range of selector should be in 0 to 7");
2563 else if (fcode
== ARM_BUILTIN_TEXTRMSH
|| fcode
== ARM_BUILTIN_TEXTRMUH
)
2565 if (opint
> 3 || opint
< 0)
2566 error ("the range of selector should be in 0 to 3");
2568 else /* ARM_BUILTIN_TEXTRMSW || ARM_BUILTIN_TEXTRMUW. */
2570 if (opint
> 1 || opint
< 0)
2571 error ("the range of selector should be in 0 to 1");
2575 || GET_MODE (target
) != tmode
2576 || ! (*insn_data
[icode
].operand
[0].predicate
) (target
, tmode
))
2577 target
= gen_reg_rtx (tmode
);
2578 pat
= GEN_FCN (icode
) (target
, op0
, op1
);
2584 case ARM_BUILTIN_WALIGNI
:
2585 /* If op2 is immediate, call walighi, else call walighr. */
2586 arg0
= CALL_EXPR_ARG (exp
, 0);
2587 arg1
= CALL_EXPR_ARG (exp
, 1);
2588 arg2
= CALL_EXPR_ARG (exp
, 2);
2589 op0
= expand_normal (arg0
);
2590 op1
= expand_normal (arg1
);
2591 op2
= expand_normal (arg2
);
2592 if (CONST_INT_P (op2
))
2594 icode
= CODE_FOR_iwmmxt_waligni
;
2595 tmode
= insn_data
[icode
].operand
[0].mode
;
2596 mode0
= insn_data
[icode
].operand
[1].mode
;
2597 mode1
= insn_data
[icode
].operand
[2].mode
;
2598 mode2
= insn_data
[icode
].operand
[3].mode
;
2599 if (!(*insn_data
[icode
].operand
[1].predicate
) (op0
, mode0
))
2600 op0
= copy_to_mode_reg (mode0
, op0
);
2601 if (!(*insn_data
[icode
].operand
[2].predicate
) (op1
, mode1
))
2602 op1
= copy_to_mode_reg (mode1
, op1
);
2603 gcc_assert ((*insn_data
[icode
].operand
[3].predicate
) (op2
, mode2
));
2604 selector
= INTVAL (op2
);
2605 if (selector
> 7 || selector
< 0)
2606 error ("the range of selector should be in 0 to 7");
2610 icode
= CODE_FOR_iwmmxt_walignr
;
2611 tmode
= insn_data
[icode
].operand
[0].mode
;
2612 mode0
= insn_data
[icode
].operand
[1].mode
;
2613 mode1
= insn_data
[icode
].operand
[2].mode
;
2614 mode2
= insn_data
[icode
].operand
[3].mode
;
2615 if (!(*insn_data
[icode
].operand
[1].predicate
) (op0
, mode0
))
2616 op0
= copy_to_mode_reg (mode0
, op0
);
2617 if (!(*insn_data
[icode
].operand
[2].predicate
) (op1
, mode1
))
2618 op1
= copy_to_mode_reg (mode1
, op1
);
2619 if (!(*insn_data
[icode
].operand
[3].predicate
) (op2
, mode2
))
2620 op2
= copy_to_mode_reg (mode2
, op2
);
2623 || GET_MODE (target
) != tmode
2624 || !(*insn_data
[icode
].operand
[0].predicate
) (target
, tmode
))
2625 target
= gen_reg_rtx (tmode
);
2626 pat
= GEN_FCN (icode
) (target
, op0
, op1
, op2
);
2632 case ARM_BUILTIN_TINSRB
:
2633 case ARM_BUILTIN_TINSRH
:
2634 case ARM_BUILTIN_TINSRW
:
2635 case ARM_BUILTIN_WMERGE
:
2636 icode
= (fcode
== ARM_BUILTIN_TINSRB
? CODE_FOR_iwmmxt_tinsrb
2637 : fcode
== ARM_BUILTIN_TINSRH
? CODE_FOR_iwmmxt_tinsrh
2638 : fcode
== ARM_BUILTIN_WMERGE
? CODE_FOR_iwmmxt_wmerge
2639 : CODE_FOR_iwmmxt_tinsrw
);
2640 arg0
= CALL_EXPR_ARG (exp
, 0);
2641 arg1
= CALL_EXPR_ARG (exp
, 1);
2642 arg2
= CALL_EXPR_ARG (exp
, 2);
2643 op0
= expand_normal (arg0
);
2644 op1
= expand_normal (arg1
);
2645 op2
= expand_normal (arg2
);
2646 tmode
= insn_data
[icode
].operand
[0].mode
;
2647 mode0
= insn_data
[icode
].operand
[1].mode
;
2648 mode1
= insn_data
[icode
].operand
[2].mode
;
2649 mode2
= insn_data
[icode
].operand
[3].mode
;
2651 if (! (*insn_data
[icode
].operand
[1].predicate
) (op0
, mode0
))
2652 op0
= copy_to_mode_reg (mode0
, op0
);
2653 if (! (*insn_data
[icode
].operand
[2].predicate
) (op1
, mode1
))
2654 op1
= copy_to_mode_reg (mode1
, op1
);
2655 if (! (*insn_data
[icode
].operand
[3].predicate
) (op2
, mode2
))
2657 error ("selector must be an immediate");
2660 if (icode
== CODE_FOR_iwmmxt_wmerge
)
2662 selector
= INTVAL (op2
);
2663 if (selector
> 7 || selector
< 0)
2664 error ("the range of selector should be in 0 to 7");
2666 if ((icode
== CODE_FOR_iwmmxt_tinsrb
)
2667 || (icode
== CODE_FOR_iwmmxt_tinsrh
)
2668 || (icode
== CODE_FOR_iwmmxt_tinsrw
))
2671 selector
= INTVAL (op2
);
2672 if (icode
== CODE_FOR_iwmmxt_tinsrb
&& (selector
< 0 || selector
> 7))
2673 error ("the range of selector should be in 0 to 7");
2674 else if (icode
== CODE_FOR_iwmmxt_tinsrh
&& (selector
< 0 ||selector
> 3))
2675 error ("the range of selector should be in 0 to 3");
2676 else if (icode
== CODE_FOR_iwmmxt_tinsrw
&& (selector
< 0 ||selector
> 1))
2677 error ("the range of selector should be in 0 to 1");
2679 op2
= GEN_INT (mask
);
2682 || GET_MODE (target
) != tmode
2683 || ! (*insn_data
[icode
].operand
[0].predicate
) (target
, tmode
))
2684 target
= gen_reg_rtx (tmode
);
2685 pat
= GEN_FCN (icode
) (target
, op0
, op1
, op2
);
2691 case ARM_BUILTIN_SETWCGR0
:
2692 case ARM_BUILTIN_SETWCGR1
:
2693 case ARM_BUILTIN_SETWCGR2
:
2694 case ARM_BUILTIN_SETWCGR3
:
2695 icode
= (fcode
== ARM_BUILTIN_SETWCGR0
? CODE_FOR_iwmmxt_setwcgr0
2696 : fcode
== ARM_BUILTIN_SETWCGR1
? CODE_FOR_iwmmxt_setwcgr1
2697 : fcode
== ARM_BUILTIN_SETWCGR2
? CODE_FOR_iwmmxt_setwcgr2
2698 : CODE_FOR_iwmmxt_setwcgr3
);
2699 arg0
= CALL_EXPR_ARG (exp
, 0);
2700 op0
= expand_normal (arg0
);
2701 mode0
= insn_data
[icode
].operand
[0].mode
;
2702 if (!(*insn_data
[icode
].operand
[0].predicate
) (op0
, mode0
))
2703 op0
= copy_to_mode_reg (mode0
, op0
);
2704 pat
= GEN_FCN (icode
) (op0
);
2710 case ARM_BUILTIN_GETWCGR0
:
2711 case ARM_BUILTIN_GETWCGR1
:
2712 case ARM_BUILTIN_GETWCGR2
:
2713 case ARM_BUILTIN_GETWCGR3
:
2714 icode
= (fcode
== ARM_BUILTIN_GETWCGR0
? CODE_FOR_iwmmxt_getwcgr0
2715 : fcode
== ARM_BUILTIN_GETWCGR1
? CODE_FOR_iwmmxt_getwcgr1
2716 : fcode
== ARM_BUILTIN_GETWCGR2
? CODE_FOR_iwmmxt_getwcgr2
2717 : CODE_FOR_iwmmxt_getwcgr3
);
2718 tmode
= insn_data
[icode
].operand
[0].mode
;
2720 || GET_MODE (target
) != tmode
2721 || !(*insn_data
[icode
].operand
[0].predicate
) (target
, tmode
))
2722 target
= gen_reg_rtx (tmode
);
2723 pat
= GEN_FCN (icode
) (target
);
2729 case ARM_BUILTIN_WSHUFH
:
2730 icode
= CODE_FOR_iwmmxt_wshufh
;
2731 arg0
= CALL_EXPR_ARG (exp
, 0);
2732 arg1
= CALL_EXPR_ARG (exp
, 1);
2733 op0
= expand_normal (arg0
);
2734 op1
= expand_normal (arg1
);
2735 tmode
= insn_data
[icode
].operand
[0].mode
;
2736 mode1
= insn_data
[icode
].operand
[1].mode
;
2737 mode2
= insn_data
[icode
].operand
[2].mode
;
2739 if (! (*insn_data
[icode
].operand
[1].predicate
) (op0
, mode1
))
2740 op0
= copy_to_mode_reg (mode1
, op0
);
2741 if (! (*insn_data
[icode
].operand
[2].predicate
) (op1
, mode2
))
2743 error ("mask must be an immediate");
2746 selector
= INTVAL (op1
);
2747 if (selector
< 0 || selector
> 255)
2748 error ("the range of mask should be in 0 to 255");
2750 || GET_MODE (target
) != tmode
2751 || ! (*insn_data
[icode
].operand
[0].predicate
) (target
, tmode
))
2752 target
= gen_reg_rtx (tmode
);
2753 pat
= GEN_FCN (icode
) (target
, op0
, op1
);
2759 case ARM_BUILTIN_WMADDS
:
2760 return arm_expand_binop_builtin (CODE_FOR_iwmmxt_wmadds
, exp
, target
);
2761 case ARM_BUILTIN_WMADDSX
:
2762 return arm_expand_binop_builtin (CODE_FOR_iwmmxt_wmaddsx
, exp
, target
);
2763 case ARM_BUILTIN_WMADDSN
:
2764 return arm_expand_binop_builtin (CODE_FOR_iwmmxt_wmaddsn
, exp
, target
);
2765 case ARM_BUILTIN_WMADDU
:
2766 return arm_expand_binop_builtin (CODE_FOR_iwmmxt_wmaddu
, exp
, target
);
2767 case ARM_BUILTIN_WMADDUX
:
2768 return arm_expand_binop_builtin (CODE_FOR_iwmmxt_wmaddux
, exp
, target
);
2769 case ARM_BUILTIN_WMADDUN
:
2770 return arm_expand_binop_builtin (CODE_FOR_iwmmxt_wmaddun
, exp
, target
);
2771 case ARM_BUILTIN_WSADBZ
:
2772 return arm_expand_binop_builtin (CODE_FOR_iwmmxt_wsadbz
, exp
, target
);
2773 case ARM_BUILTIN_WSADHZ
:
2774 return arm_expand_binop_builtin (CODE_FOR_iwmmxt_wsadhz
, exp
, target
);
2776 /* Several three-argument builtins. */
2777 case ARM_BUILTIN_WMACS
:
2778 case ARM_BUILTIN_WMACU
:
2779 case ARM_BUILTIN_TMIA
:
2780 case ARM_BUILTIN_TMIAPH
:
2781 case ARM_BUILTIN_TMIATT
:
2782 case ARM_BUILTIN_TMIATB
:
2783 case ARM_BUILTIN_TMIABT
:
2784 case ARM_BUILTIN_TMIABB
:
2785 case ARM_BUILTIN_WQMIABB
:
2786 case ARM_BUILTIN_WQMIABT
:
2787 case ARM_BUILTIN_WQMIATB
:
2788 case ARM_BUILTIN_WQMIATT
:
2789 case ARM_BUILTIN_WQMIABBN
:
2790 case ARM_BUILTIN_WQMIABTN
:
2791 case ARM_BUILTIN_WQMIATBN
:
2792 case ARM_BUILTIN_WQMIATTN
:
2793 case ARM_BUILTIN_WMIABB
:
2794 case ARM_BUILTIN_WMIABT
:
2795 case ARM_BUILTIN_WMIATB
:
2796 case ARM_BUILTIN_WMIATT
:
2797 case ARM_BUILTIN_WMIABBN
:
2798 case ARM_BUILTIN_WMIABTN
:
2799 case ARM_BUILTIN_WMIATBN
:
2800 case ARM_BUILTIN_WMIATTN
:
2801 case ARM_BUILTIN_WMIAWBB
:
2802 case ARM_BUILTIN_WMIAWBT
:
2803 case ARM_BUILTIN_WMIAWTB
:
2804 case ARM_BUILTIN_WMIAWTT
:
2805 case ARM_BUILTIN_WMIAWBBN
:
2806 case ARM_BUILTIN_WMIAWBTN
:
2807 case ARM_BUILTIN_WMIAWTBN
:
2808 case ARM_BUILTIN_WMIAWTTN
:
2809 case ARM_BUILTIN_WSADB
:
2810 case ARM_BUILTIN_WSADH
:
2811 icode
= (fcode
== ARM_BUILTIN_WMACS
? CODE_FOR_iwmmxt_wmacs
2812 : fcode
== ARM_BUILTIN_WMACU
? CODE_FOR_iwmmxt_wmacu
2813 : fcode
== ARM_BUILTIN_TMIA
? CODE_FOR_iwmmxt_tmia
2814 : fcode
== ARM_BUILTIN_TMIAPH
? CODE_FOR_iwmmxt_tmiaph
2815 : fcode
== ARM_BUILTIN_TMIABB
? CODE_FOR_iwmmxt_tmiabb
2816 : fcode
== ARM_BUILTIN_TMIABT
? CODE_FOR_iwmmxt_tmiabt
2817 : fcode
== ARM_BUILTIN_TMIATB
? CODE_FOR_iwmmxt_tmiatb
2818 : fcode
== ARM_BUILTIN_TMIATT
? CODE_FOR_iwmmxt_tmiatt
2819 : fcode
== ARM_BUILTIN_WQMIABB
? CODE_FOR_iwmmxt_wqmiabb
2820 : fcode
== ARM_BUILTIN_WQMIABT
? CODE_FOR_iwmmxt_wqmiabt
2821 : fcode
== ARM_BUILTIN_WQMIATB
? CODE_FOR_iwmmxt_wqmiatb
2822 : fcode
== ARM_BUILTIN_WQMIATT
? CODE_FOR_iwmmxt_wqmiatt
2823 : fcode
== ARM_BUILTIN_WQMIABBN
? CODE_FOR_iwmmxt_wqmiabbn
2824 : fcode
== ARM_BUILTIN_WQMIABTN
? CODE_FOR_iwmmxt_wqmiabtn
2825 : fcode
== ARM_BUILTIN_WQMIATBN
? CODE_FOR_iwmmxt_wqmiatbn
2826 : fcode
== ARM_BUILTIN_WQMIATTN
? CODE_FOR_iwmmxt_wqmiattn
2827 : fcode
== ARM_BUILTIN_WMIABB
? CODE_FOR_iwmmxt_wmiabb
2828 : fcode
== ARM_BUILTIN_WMIABT
? CODE_FOR_iwmmxt_wmiabt
2829 : fcode
== ARM_BUILTIN_WMIATB
? CODE_FOR_iwmmxt_wmiatb
2830 : fcode
== ARM_BUILTIN_WMIATT
? CODE_FOR_iwmmxt_wmiatt
2831 : fcode
== ARM_BUILTIN_WMIABBN
? CODE_FOR_iwmmxt_wmiabbn
2832 : fcode
== ARM_BUILTIN_WMIABTN
? CODE_FOR_iwmmxt_wmiabtn
2833 : fcode
== ARM_BUILTIN_WMIATBN
? CODE_FOR_iwmmxt_wmiatbn
2834 : fcode
== ARM_BUILTIN_WMIATTN
? CODE_FOR_iwmmxt_wmiattn
2835 : fcode
== ARM_BUILTIN_WMIAWBB
? CODE_FOR_iwmmxt_wmiawbb
2836 : fcode
== ARM_BUILTIN_WMIAWBT
? CODE_FOR_iwmmxt_wmiawbt
2837 : fcode
== ARM_BUILTIN_WMIAWTB
? CODE_FOR_iwmmxt_wmiawtb
2838 : fcode
== ARM_BUILTIN_WMIAWTT
? CODE_FOR_iwmmxt_wmiawtt
2839 : fcode
== ARM_BUILTIN_WMIAWBBN
? CODE_FOR_iwmmxt_wmiawbbn
2840 : fcode
== ARM_BUILTIN_WMIAWBTN
? CODE_FOR_iwmmxt_wmiawbtn
2841 : fcode
== ARM_BUILTIN_WMIAWTBN
? CODE_FOR_iwmmxt_wmiawtbn
2842 : fcode
== ARM_BUILTIN_WMIAWTTN
? CODE_FOR_iwmmxt_wmiawttn
2843 : fcode
== ARM_BUILTIN_WSADB
? CODE_FOR_iwmmxt_wsadb
2844 : CODE_FOR_iwmmxt_wsadh
);
2845 arg0
= CALL_EXPR_ARG (exp
, 0);
2846 arg1
= CALL_EXPR_ARG (exp
, 1);
2847 arg2
= CALL_EXPR_ARG (exp
, 2);
2848 op0
= expand_normal (arg0
);
2849 op1
= expand_normal (arg1
);
2850 op2
= expand_normal (arg2
);
2851 tmode
= insn_data
[icode
].operand
[0].mode
;
2852 mode0
= insn_data
[icode
].operand
[1].mode
;
2853 mode1
= insn_data
[icode
].operand
[2].mode
;
2854 mode2
= insn_data
[icode
].operand
[3].mode
;
2856 if (! (*insn_data
[icode
].operand
[1].predicate
) (op0
, mode0
))
2857 op0
= copy_to_mode_reg (mode0
, op0
);
2858 if (! (*insn_data
[icode
].operand
[2].predicate
) (op1
, mode1
))
2859 op1
= copy_to_mode_reg (mode1
, op1
);
2860 if (! (*insn_data
[icode
].operand
[3].predicate
) (op2
, mode2
))
2861 op2
= copy_to_mode_reg (mode2
, op2
);
2863 || GET_MODE (target
) != tmode
2864 || ! (*insn_data
[icode
].operand
[0].predicate
) (target
, tmode
))
2865 target
= gen_reg_rtx (tmode
);
2866 pat
= GEN_FCN (icode
) (target
, op0
, op1
, op2
);
2872 case ARM_BUILTIN_WZERO
:
2873 target
= gen_reg_rtx (DImode
);
2874 emit_insn (gen_iwmmxt_clrdi (target
));
2877 case ARM_BUILTIN_WSRLHI
:
2878 case ARM_BUILTIN_WSRLWI
:
2879 case ARM_BUILTIN_WSRLDI
:
2880 case ARM_BUILTIN_WSLLHI
:
2881 case ARM_BUILTIN_WSLLWI
:
2882 case ARM_BUILTIN_WSLLDI
:
2883 case ARM_BUILTIN_WSRAHI
:
2884 case ARM_BUILTIN_WSRAWI
:
2885 case ARM_BUILTIN_WSRADI
:
2886 case ARM_BUILTIN_WRORHI
:
2887 case ARM_BUILTIN_WRORWI
:
2888 case ARM_BUILTIN_WRORDI
:
2889 case ARM_BUILTIN_WSRLH
:
2890 case ARM_BUILTIN_WSRLW
:
2891 case ARM_BUILTIN_WSRLD
:
2892 case ARM_BUILTIN_WSLLH
:
2893 case ARM_BUILTIN_WSLLW
:
2894 case ARM_BUILTIN_WSLLD
:
2895 case ARM_BUILTIN_WSRAH
:
2896 case ARM_BUILTIN_WSRAW
:
2897 case ARM_BUILTIN_WSRAD
:
2898 case ARM_BUILTIN_WRORH
:
2899 case ARM_BUILTIN_WRORW
:
2900 case ARM_BUILTIN_WRORD
:
2901 icode
= (fcode
== ARM_BUILTIN_WSRLHI
? CODE_FOR_lshrv4hi3_iwmmxt
2902 : fcode
== ARM_BUILTIN_WSRLWI
? CODE_FOR_lshrv2si3_iwmmxt
2903 : fcode
== ARM_BUILTIN_WSRLDI
? CODE_FOR_lshrdi3_iwmmxt
2904 : fcode
== ARM_BUILTIN_WSLLHI
? CODE_FOR_ashlv4hi3_iwmmxt
2905 : fcode
== ARM_BUILTIN_WSLLWI
? CODE_FOR_ashlv2si3_iwmmxt
2906 : fcode
== ARM_BUILTIN_WSLLDI
? CODE_FOR_ashldi3_iwmmxt
2907 : fcode
== ARM_BUILTIN_WSRAHI
? CODE_FOR_ashrv4hi3_iwmmxt
2908 : fcode
== ARM_BUILTIN_WSRAWI
? CODE_FOR_ashrv2si3_iwmmxt
2909 : fcode
== ARM_BUILTIN_WSRADI
? CODE_FOR_ashrdi3_iwmmxt
2910 : fcode
== ARM_BUILTIN_WRORHI
? CODE_FOR_rorv4hi3
2911 : fcode
== ARM_BUILTIN_WRORWI
? CODE_FOR_rorv2si3
2912 : fcode
== ARM_BUILTIN_WRORDI
? CODE_FOR_rordi3
2913 : fcode
== ARM_BUILTIN_WSRLH
? CODE_FOR_lshrv4hi3_di
2914 : fcode
== ARM_BUILTIN_WSRLW
? CODE_FOR_lshrv2si3_di
2915 : fcode
== ARM_BUILTIN_WSRLD
? CODE_FOR_lshrdi3_di
2916 : fcode
== ARM_BUILTIN_WSLLH
? CODE_FOR_ashlv4hi3_di
2917 : fcode
== ARM_BUILTIN_WSLLW
? CODE_FOR_ashlv2si3_di
2918 : fcode
== ARM_BUILTIN_WSLLD
? CODE_FOR_ashldi3_di
2919 : fcode
== ARM_BUILTIN_WSRAH
? CODE_FOR_ashrv4hi3_di
2920 : fcode
== ARM_BUILTIN_WSRAW
? CODE_FOR_ashrv2si3_di
2921 : fcode
== ARM_BUILTIN_WSRAD
? CODE_FOR_ashrdi3_di
2922 : fcode
== ARM_BUILTIN_WRORH
? CODE_FOR_rorv4hi3_di
2923 : fcode
== ARM_BUILTIN_WRORW
? CODE_FOR_rorv2si3_di
2924 : fcode
== ARM_BUILTIN_WRORD
? CODE_FOR_rordi3_di
2925 : CODE_FOR_nothing
);
2926 arg1
= CALL_EXPR_ARG (exp
, 1);
2927 op1
= expand_normal (arg1
);
2928 if (GET_MODE (op1
) == VOIDmode
)
2931 if ((fcode
== ARM_BUILTIN_WRORHI
|| fcode
== ARM_BUILTIN_WRORWI
2932 || fcode
== ARM_BUILTIN_WRORH
|| fcode
== ARM_BUILTIN_WRORW
)
2933 && (imm
< 0 || imm
> 32))
2935 if (fcode
== ARM_BUILTIN_WRORHI
)
2936 error ("the range of count should be in 0 to 32. please check the intrinsic _mm_rori_pi16 in code.");
2937 else if (fcode
== ARM_BUILTIN_WRORWI
)
2938 error ("the range of count should be in 0 to 32. please check the intrinsic _mm_rori_pi32 in code.");
2939 else if (fcode
== ARM_BUILTIN_WRORH
)
2940 error ("the range of count should be in 0 to 32. please check the intrinsic _mm_ror_pi16 in code.");
2942 error ("the range of count should be in 0 to 32. please check the intrinsic _mm_ror_pi32 in code.");
2944 else if ((fcode
== ARM_BUILTIN_WRORDI
|| fcode
== ARM_BUILTIN_WRORD
)
2945 && (imm
< 0 || imm
> 64))
2947 if (fcode
== ARM_BUILTIN_WRORDI
)
2948 error ("the range of count should be in 0 to 64. please check the intrinsic _mm_rori_si64 in code.");
2950 error ("the range of count should be in 0 to 64. please check the intrinsic _mm_ror_si64 in code.");
2954 if (fcode
== ARM_BUILTIN_WSRLHI
)
2955 error ("the count should be no less than 0. please check the intrinsic _mm_srli_pi16 in code.");
2956 else if (fcode
== ARM_BUILTIN_WSRLWI
)
2957 error ("the count should be no less than 0. please check the intrinsic _mm_srli_pi32 in code.");
2958 else if (fcode
== ARM_BUILTIN_WSRLDI
)
2959 error ("the count should be no less than 0. please check the intrinsic _mm_srli_si64 in code.");
2960 else if (fcode
== ARM_BUILTIN_WSLLHI
)
2961 error ("the count should be no less than 0. please check the intrinsic _mm_slli_pi16 in code.");
2962 else if (fcode
== ARM_BUILTIN_WSLLWI
)
2963 error ("the count should be no less than 0. please check the intrinsic _mm_slli_pi32 in code.");
2964 else if (fcode
== ARM_BUILTIN_WSLLDI
)
2965 error ("the count should be no less than 0. please check the intrinsic _mm_slli_si64 in code.");
2966 else if (fcode
== ARM_BUILTIN_WSRAHI
)
2967 error ("the count should be no less than 0. please check the intrinsic _mm_srai_pi16 in code.");
2968 else if (fcode
== ARM_BUILTIN_WSRAWI
)
2969 error ("the count should be no less than 0. please check the intrinsic _mm_srai_pi32 in code.");
2970 else if (fcode
== ARM_BUILTIN_WSRADI
)
2971 error ("the count should be no less than 0. please check the intrinsic _mm_srai_si64 in code.");
2972 else if (fcode
== ARM_BUILTIN_WSRLH
)
2973 error ("the count should be no less than 0. please check the intrinsic _mm_srl_pi16 in code.");
2974 else if (fcode
== ARM_BUILTIN_WSRLW
)
2975 error ("the count should be no less than 0. please check the intrinsic _mm_srl_pi32 in code.");
2976 else if (fcode
== ARM_BUILTIN_WSRLD
)
2977 error ("the count should be no less than 0. please check the intrinsic _mm_srl_si64 in code.");
2978 else if (fcode
== ARM_BUILTIN_WSLLH
)
2979 error ("the count should be no less than 0. please check the intrinsic _mm_sll_pi16 in code.");
2980 else if (fcode
== ARM_BUILTIN_WSLLW
)
2981 error ("the count should be no less than 0. please check the intrinsic _mm_sll_pi32 in code.");
2982 else if (fcode
== ARM_BUILTIN_WSLLD
)
2983 error ("the count should be no less than 0. please check the intrinsic _mm_sll_si64 in code.");
2984 else if (fcode
== ARM_BUILTIN_WSRAH
)
2985 error ("the count should be no less than 0. please check the intrinsic _mm_sra_pi16 in code.");
2986 else if (fcode
== ARM_BUILTIN_WSRAW
)
2987 error ("the count should be no less than 0. please check the intrinsic _mm_sra_pi32 in code.");
2989 error ("the count should be no less than 0. please check the intrinsic _mm_sra_si64 in code.");
2992 return arm_expand_binop_builtin (icode
, exp
, target
);
2998 for (i
= 0, d
= bdesc_2arg
; i
< ARRAY_SIZE (bdesc_2arg
); i
++, d
++)
2999 if (d
->code
== (const enum arm_builtins
) fcode
)
3000 return arm_expand_binop_builtin (d
->icode
, exp
, target
);
3002 for (i
= 0, d
= bdesc_1arg
; i
< ARRAY_SIZE (bdesc_1arg
); i
++, d
++)
3003 if (d
->code
== (const enum arm_builtins
) fcode
)
3004 return arm_expand_unop_builtin (d
->icode
, exp
, target
, 0);
3006 for (i
= 0, d
= bdesc_3arg
; i
< ARRAY_SIZE (bdesc_3arg
); i
++, d
++)
3007 if (d
->code
== (const enum arm_builtins
) fcode
)
3008 return arm_expand_ternop_builtin (d
->icode
, exp
, target
);
3010 /* @@@ Should really do something sensible here. */
3015 arm_builtin_vectorized_function (unsigned int fn
, tree type_out
, tree type_in
)
3017 machine_mode in_mode
, out_mode
;
3019 bool out_unsigned_p
= TYPE_UNSIGNED (type_out
);
3021 /* Can't provide any vectorized builtins when we can't use NEON. */
3025 if (TREE_CODE (type_out
) != VECTOR_TYPE
3026 || TREE_CODE (type_in
) != VECTOR_TYPE
)
3029 out_mode
= TYPE_MODE (TREE_TYPE (type_out
));
3030 out_n
= TYPE_VECTOR_SUBPARTS (type_out
);
3031 in_mode
= TYPE_MODE (TREE_TYPE (type_in
));
3032 in_n
= TYPE_VECTOR_SUBPARTS (type_in
);
3034 /* ARM_CHECK_BUILTIN_MODE and ARM_FIND_VRINT_VARIANT are used to find the
3035 decl of the vectorized builtin for the appropriate vector mode.
3036 NULL_TREE is returned if no such builtin is available. */
3037 #undef ARM_CHECK_BUILTIN_MODE
3038 #define ARM_CHECK_BUILTIN_MODE(C) \
3040 && flag_unsafe_math_optimizations \
3041 && ARM_CHECK_BUILTIN_MODE_1 (C))
3043 #undef ARM_CHECK_BUILTIN_MODE_1
3044 #define ARM_CHECK_BUILTIN_MODE_1(C) \
3045 (out_mode == SFmode && out_n == C \
3046 && in_mode == SFmode && in_n == C)
3048 #undef ARM_FIND_VRINT_VARIANT
3049 #define ARM_FIND_VRINT_VARIANT(N) \
3050 (ARM_CHECK_BUILTIN_MODE (2) \
3051 ? arm_builtin_decl(ARM_BUILTIN_NEON_##N##v2sf, false) \
3052 : (ARM_CHECK_BUILTIN_MODE (4) \
3053 ? arm_builtin_decl(ARM_BUILTIN_NEON_##N##v4sf, false) \
3059 return ARM_FIND_VRINT_VARIANT (vrintm
);
3061 return ARM_FIND_VRINT_VARIANT (vrintp
);
3063 return ARM_FIND_VRINT_VARIANT (vrintz
);
3065 return ARM_FIND_VRINT_VARIANT (vrinta
);
3066 #undef ARM_CHECK_BUILTIN_MODE_1
3067 #define ARM_CHECK_BUILTIN_MODE_1(C) \
3068 (out_mode == SImode && out_n == C \
3069 && in_mode == SFmode && in_n == C)
3071 #define ARM_FIND_VCVT_VARIANT(N) \
3072 (ARM_CHECK_BUILTIN_MODE (2) \
3073 ? arm_builtin_decl(ARM_BUILTIN_NEON_##N##v2sfv2si, false) \
3074 : (ARM_CHECK_BUILTIN_MODE (4) \
3075 ? arm_builtin_decl(ARM_BUILTIN_NEON_##N##v4sfv4si, false) \
3078 #define ARM_FIND_VCVTU_VARIANT(N) \
3079 (ARM_CHECK_BUILTIN_MODE (2) \
3080 ? arm_builtin_decl(ARM_BUILTIN_NEON_##N##uv2sfv2si, false) \
3081 : (ARM_CHECK_BUILTIN_MODE (4) \
3082 ? arm_builtin_decl(ARM_BUILTIN_NEON_##N##uv4sfv4si, false) \
3085 return (out_unsigned_p
3086 ? ARM_FIND_VCVTU_VARIANT (vcvta
)
3087 : ARM_FIND_VCVT_VARIANT (vcvta
));
3089 return (out_unsigned_p
3090 ? ARM_FIND_VCVTU_VARIANT (vcvtp
)
3091 : ARM_FIND_VCVT_VARIANT (vcvtp
));
3093 return (out_unsigned_p
3094 ? ARM_FIND_VCVTU_VARIANT (vcvtm
)
3095 : ARM_FIND_VCVT_VARIANT (vcvtm
));
3096 #undef ARM_CHECK_BUILTIN_MODE
3097 #define ARM_CHECK_BUILTIN_MODE(C, N) \
3098 (out_mode == N##mode && out_n == C \
3099 && in_mode == N##mode && in_n == C)
3100 case CFN_BUILT_IN_BSWAP16
:
3101 if (ARM_CHECK_BUILTIN_MODE (4, HI
))
3102 return arm_builtin_decl (ARM_BUILTIN_NEON_bswapv4hi
, false);
3103 else if (ARM_CHECK_BUILTIN_MODE (8, HI
))
3104 return arm_builtin_decl (ARM_BUILTIN_NEON_bswapv8hi
, false);
3107 case CFN_BUILT_IN_BSWAP32
:
3108 if (ARM_CHECK_BUILTIN_MODE (2, SI
))
3109 return arm_builtin_decl (ARM_BUILTIN_NEON_bswapv2si
, false);
3110 else if (ARM_CHECK_BUILTIN_MODE (4, SI
))
3111 return arm_builtin_decl (ARM_BUILTIN_NEON_bswapv4si
, false);
3114 case CFN_BUILT_IN_BSWAP64
:
3115 if (ARM_CHECK_BUILTIN_MODE (2, DI
))
3116 return arm_builtin_decl (ARM_BUILTIN_NEON_bswapv2di
, false);
3120 if (ARM_CHECK_BUILTIN_MODE (2, SF
))
3121 return arm_builtin_decl (ARM_BUILTIN_NEON_copysignfv2sf
, false);
3122 else if (ARM_CHECK_BUILTIN_MODE (4, SF
))
3123 return arm_builtin_decl (ARM_BUILTIN_NEON_copysignfv4sf
, false);
3132 #undef ARM_FIND_VCVT_VARIANT
3133 #undef ARM_FIND_VCVTU_VARIANT
3134 #undef ARM_CHECK_BUILTIN_MODE
3135 #undef ARM_FIND_VRINT_VARIANT
3138 arm_atomic_assign_expand_fenv (tree
*hold
, tree
*clear
, tree
*update
)
3140 const unsigned ARM_FE_INVALID
= 1;
3141 const unsigned ARM_FE_DIVBYZERO
= 2;
3142 const unsigned ARM_FE_OVERFLOW
= 4;
3143 const unsigned ARM_FE_UNDERFLOW
= 8;
3144 const unsigned ARM_FE_INEXACT
= 16;
3145 const unsigned HOST_WIDE_INT ARM_FE_ALL_EXCEPT
= (ARM_FE_INVALID
3150 const unsigned HOST_WIDE_INT ARM_FE_EXCEPT_SHIFT
= 8;
3151 tree fenv_var
, get_fpscr
, set_fpscr
, mask
, ld_fenv
, masked_fenv
;
3152 tree new_fenv_var
, reload_fenv
, restore_fnenv
;
3153 tree update_call
, atomic_feraiseexcept
, hold_fnclex
;
3155 if (!TARGET_HARD_FLOAT
)
3158 /* Generate the equivalent of :
3159 unsigned int fenv_var;
3160 fenv_var = __builtin_arm_get_fpscr ();
3162 unsigned int masked_fenv;
3163 masked_fenv = fenv_var & mask;
3165 __builtin_arm_set_fpscr (masked_fenv); */
3167 fenv_var
= create_tmp_var_raw (unsigned_type_node
);
3168 get_fpscr
= arm_builtin_decls
[ARM_BUILTIN_GET_FPSCR
];
3169 set_fpscr
= arm_builtin_decls
[ARM_BUILTIN_SET_FPSCR
];
3170 mask
= build_int_cst (unsigned_type_node
,
3171 ~((ARM_FE_ALL_EXCEPT
<< ARM_FE_EXCEPT_SHIFT
)
3172 | ARM_FE_ALL_EXCEPT
));
3173 ld_fenv
= build2 (MODIFY_EXPR
, unsigned_type_node
,
3174 fenv_var
, build_call_expr (get_fpscr
, 0));
3175 masked_fenv
= build2 (BIT_AND_EXPR
, unsigned_type_node
, fenv_var
, mask
);
3176 hold_fnclex
= build_call_expr (set_fpscr
, 1, masked_fenv
);
3177 *hold
= build2 (COMPOUND_EXPR
, void_type_node
,
3178 build2 (COMPOUND_EXPR
, void_type_node
, masked_fenv
, ld_fenv
),
3181 /* Store the value of masked_fenv to clear the exceptions:
3182 __builtin_arm_set_fpscr (masked_fenv); */
3184 *clear
= build_call_expr (set_fpscr
, 1, masked_fenv
);
3186 /* Generate the equivalent of :
3187 unsigned int new_fenv_var;
3188 new_fenv_var = __builtin_arm_get_fpscr ();
3190 __builtin_arm_set_fpscr (fenv_var);
3192 __atomic_feraiseexcept (new_fenv_var); */
3194 new_fenv_var
= create_tmp_var_raw (unsigned_type_node
);
3195 reload_fenv
= build2 (MODIFY_EXPR
, unsigned_type_node
, new_fenv_var
,
3196 build_call_expr (get_fpscr
, 0));
3197 restore_fnenv
= build_call_expr (set_fpscr
, 1, fenv_var
);
3198 atomic_feraiseexcept
= builtin_decl_implicit (BUILT_IN_ATOMIC_FERAISEEXCEPT
);
3199 update_call
= build_call_expr (atomic_feraiseexcept
, 1,
3200 fold_convert (integer_type_node
, new_fenv_var
));
3201 *update
= build2 (COMPOUND_EXPR
, void_type_node
,
3202 build2 (COMPOUND_EXPR
, void_type_node
,
3203 reload_fenv
, restore_fnenv
), update_call
);
3206 #include "gt-arm-builtins.h"