1 /* Description of builtins used by the ARM backend.
2 Copyright (C) 2014-2015 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it
7 under the terms of the GNU General Public License as published
8 by the Free Software Foundation; either version 3, or (at your
9 option) any later version.
11 GCC is distributed in the hope that it will be useful, but WITHOUT
12 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
13 or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public
14 License for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
22 #include "coretypes.h"
27 #include "gimple-expr.h"
32 #include "diagnostic-core.h"
33 #include "fold-const.h"
34 #include "stor-layout.h"
37 #include "langhooks.h"
39 #define SIMD_MAX_BUILTIN_ARGS 5
41 enum arm_type_qualifiers
46 qualifier_unsigned
= 0x1, /* 1 << 0 */
48 qualifier_const
= 0x2, /* 1 << 1 */
50 qualifier_pointer
= 0x4, /* 1 << 2 */
51 /* Used when expanding arguments if an operand could
53 qualifier_immediate
= 0x8, /* 1 << 3 */
54 qualifier_maybe_immediate
= 0x10, /* 1 << 4 */
56 qualifier_void
= 0x20, /* 1 << 5 */
57 /* Some patterns may have internal operands, this qualifier is an
58 instruction to the initialisation code to skip this operand. */
59 qualifier_internal
= 0x40, /* 1 << 6 */
60 /* Some builtins should use the T_*mode* encoded in a simd_builtin_datum
61 rather than using the type of the operand. */
62 qualifier_map_mode
= 0x80, /* 1 << 7 */
63 /* qualifier_pointer | qualifier_map_mode */
64 qualifier_pointer_map_mode
= 0x84,
65 /* qualifier_const_pointer | qualifier_map_mode */
66 qualifier_const_pointer_map_mode
= 0x86,
67 /* Polynomial types. */
68 qualifier_poly
= 0x100,
69 /* Lane indices - must be within range of previous argument = a vector. */
70 qualifier_lane_index
= 0x200
73 /* The qualifier_internal allows generation of a unary builtin from
74 a pattern with a third pseudo-operand such as a match_scratch.
76 static enum arm_type_qualifiers
77 arm_unop_qualifiers
[SIMD_MAX_BUILTIN_ARGS
]
78 = { qualifier_none
, qualifier_none
, qualifier_internal
};
79 #define UNOP_QUALIFIERS (arm_unop_qualifiers)
81 /* unsigned T (unsigned T). */
82 static enum arm_type_qualifiers
83 arm_bswap_qualifiers
[SIMD_MAX_BUILTIN_ARGS
]
84 = { qualifier_unsigned
, qualifier_unsigned
};
85 #define BSWAP_QUALIFIERS (arm_bswap_qualifiers)
87 /* T (T, T [maybe_immediate]). */
88 static enum arm_type_qualifiers
89 arm_binop_qualifiers
[SIMD_MAX_BUILTIN_ARGS
]
90 = { qualifier_none
, qualifier_none
, qualifier_maybe_immediate
};
91 #define BINOP_QUALIFIERS (arm_binop_qualifiers)
94 static enum arm_type_qualifiers
95 arm_ternop_qualifiers
[SIMD_MAX_BUILTIN_ARGS
]
96 = { qualifier_none
, qualifier_none
, qualifier_none
, qualifier_none
};
97 #define TERNOP_QUALIFIERS (arm_ternop_qualifiers)
99 /* T (T, immediate). */
100 static enum arm_type_qualifiers
101 arm_binop_imm_qualifiers
[SIMD_MAX_BUILTIN_ARGS
]
102 = { qualifier_none
, qualifier_none
, qualifier_immediate
};
103 #define BINOP_IMM_QUALIFIERS (arm_binop_imm_qualifiers)
105 /* T (T, lane index). */
106 static enum arm_type_qualifiers
107 arm_getlane_qualifiers
[SIMD_MAX_BUILTIN_ARGS
]
108 = { qualifier_none
, qualifier_none
, qualifier_lane_index
};
109 #define GETLANE_QUALIFIERS (arm_getlane_qualifiers)
111 /* T (T, T, T, immediate). */
112 static enum arm_type_qualifiers
113 arm_mac_n_qualifiers
[SIMD_MAX_BUILTIN_ARGS
]
114 = { qualifier_none
, qualifier_none
, qualifier_none
,
115 qualifier_none
, qualifier_immediate
};
116 #define MAC_N_QUALIFIERS (arm_mac_n_qualifiers)
118 /* T (T, T, T, lane index). */
119 static enum arm_type_qualifiers
120 arm_mac_lane_qualifiers
[SIMD_MAX_BUILTIN_ARGS
]
121 = { qualifier_none
, qualifier_none
, qualifier_none
,
122 qualifier_none
, qualifier_lane_index
};
123 #define MAC_LANE_QUALIFIERS (arm_mac_lane_qualifiers)
125 /* T (T, T, immediate). */
126 static enum arm_type_qualifiers
127 arm_ternop_imm_qualifiers
[SIMD_MAX_BUILTIN_ARGS
]
128 = { qualifier_none
, qualifier_none
, qualifier_none
, qualifier_immediate
};
129 #define TERNOP_IMM_QUALIFIERS (arm_ternop_imm_qualifiers)
131 /* T (T, T, lane index). */
132 static enum arm_type_qualifiers
133 arm_setlane_qualifiers
[SIMD_MAX_BUILTIN_ARGS
]
134 = { qualifier_none
, qualifier_none
, qualifier_none
, qualifier_lane_index
};
135 #define SETLANE_QUALIFIERS (arm_setlane_qualifiers)
138 static enum arm_type_qualifiers
139 arm_combine_qualifiers
[SIMD_MAX_BUILTIN_ARGS
]
140 = { qualifier_none
, qualifier_none
, qualifier_none
};
141 #define COMBINE_QUALIFIERS (arm_combine_qualifiers)
143 /* T ([T element type] *). */
144 static enum arm_type_qualifiers
145 arm_load1_qualifiers
[SIMD_MAX_BUILTIN_ARGS
]
146 = { qualifier_none
, qualifier_const_pointer_map_mode
};
147 #define LOAD1_QUALIFIERS (arm_load1_qualifiers)
149 /* T ([T element type] *, T, immediate). */
150 static enum arm_type_qualifiers
151 arm_load1_lane_qualifiers
[SIMD_MAX_BUILTIN_ARGS
]
152 = { qualifier_none
, qualifier_const_pointer_map_mode
,
153 qualifier_none
, qualifier_immediate
};
154 #define LOAD1LANE_QUALIFIERS (arm_load1_lane_qualifiers)
156 /* The first argument (return type) of a store should be void type,
157 which we represent with qualifier_void. Their first operand will be
158 a DImode pointer to the location to store to, so we must use
159 qualifier_map_mode | qualifier_pointer to build a pointer to the
160 element type of the vector.
162 void ([T element type] *, T). */
163 static enum arm_type_qualifiers
164 arm_store1_qualifiers
[SIMD_MAX_BUILTIN_ARGS
]
165 = { qualifier_void
, qualifier_pointer_map_mode
, qualifier_none
};
166 #define STORE1_QUALIFIERS (arm_store1_qualifiers)
168 /* void ([T element type] *, T, immediate). */
169 static enum arm_type_qualifiers
170 arm_storestruct_lane_qualifiers
[SIMD_MAX_BUILTIN_ARGS
]
171 = { qualifier_void
, qualifier_pointer_map_mode
,
172 qualifier_none
, qualifier_immediate
};
173 #define STORE1LANE_QUALIFIERS (arm_storestruct_lane_qualifiers)
175 #define v8qi_UP V8QImode
176 #define v4hi_UP V4HImode
177 #define v4hf_UP V4HFmode
178 #define v2si_UP V2SImode
179 #define v2sf_UP V2SFmode
181 #define v16qi_UP V16QImode
182 #define v8hi_UP V8HImode
183 #define v8hf_UP V8HFmode
184 #define v4si_UP V4SImode
185 #define v4sf_UP V4SFmode
186 #define v2di_UP V2DImode
196 const enum insn_code code
;
198 enum arm_type_qualifiers
*qualifiers
;
199 } neon_builtin_datum
;
201 #define CF(N,X) CODE_FOR_neon_##N##X
203 #define VAR1(T, N, A) \
204 {#N #A, UP (A), CF (N, A), 0, T##_QUALIFIERS},
205 #define VAR2(T, N, A, B) \
208 #define VAR3(T, N, A, B, C) \
211 #define VAR4(T, N, A, B, C, D) \
212 VAR3 (T, N, A, B, C) \
214 #define VAR5(T, N, A, B, C, D, E) \
215 VAR4 (T, N, A, B, C, D) \
217 #define VAR6(T, N, A, B, C, D, E, F) \
218 VAR5 (T, N, A, B, C, D, E) \
220 #define VAR7(T, N, A, B, C, D, E, F, G) \
221 VAR6 (T, N, A, B, C, D, E, F) \
223 #define VAR8(T, N, A, B, C, D, E, F, G, H) \
224 VAR7 (T, N, A, B, C, D, E, F, G) \
226 #define VAR9(T, N, A, B, C, D, E, F, G, H, I) \
227 VAR8 (T, N, A, B, C, D, E, F, G, H) \
229 #define VAR10(T, N, A, B, C, D, E, F, G, H, I, J) \
230 VAR9 (T, N, A, B, C, D, E, F, G, H, I) \
232 #define VAR11(T, N, A, B, C, D, E, F, G, H, I, J, K) \
233 VAR10 (T, N, A, B, C, D, E, F, G, H, I, J) \
235 #define VAR12(T, N, A, B, C, D, E, F, G, H, I, J, K, L) \
236 VAR11 (T, N, A, B, C, D, E, F, G, H, I, J, K) \
239 /* The NEON builtin data can be found in arm_neon_builtins.def.
240 The mode entries in the following table correspond to the "key" type of the
241 instruction variant, i.e. equivalent to that which would be specified after
242 the assembler mnemonic, which usually refers to the last vector operand.
243 The modes listed per instruction should be the same as those defined for
244 that instruction's pattern in neon.md. */
246 static neon_builtin_datum neon_builtin_data
[] =
248 #include "arm_neon_builtins.def"
254 #define VAR1(T, N, X) \
255 ARM_BUILTIN_NEON_##N##X,
259 ARM_BUILTIN_GETWCGR0
,
260 ARM_BUILTIN_GETWCGR1
,
261 ARM_BUILTIN_GETWCGR2
,
262 ARM_BUILTIN_GETWCGR3
,
264 ARM_BUILTIN_SETWCGR0
,
265 ARM_BUILTIN_SETWCGR1
,
266 ARM_BUILTIN_SETWCGR2
,
267 ARM_BUILTIN_SETWCGR3
,
291 ARM_BUILTIN_WALIGNR0
,
292 ARM_BUILTIN_WALIGNR1
,
293 ARM_BUILTIN_WALIGNR2
,
294 ARM_BUILTIN_WALIGNR3
,
303 ARM_BUILTIN_TMOVMSKB
,
304 ARM_BUILTIN_TMOVMSKH
,
305 ARM_BUILTIN_TMOVMSKW
,
314 ARM_BUILTIN_WPACKHSS
,
315 ARM_BUILTIN_WPACKWSS
,
316 ARM_BUILTIN_WPACKDSS
,
317 ARM_BUILTIN_WPACKHUS
,
318 ARM_BUILTIN_WPACKWUS
,
319 ARM_BUILTIN_WPACKDUS
,
348 ARM_BUILTIN_WCMPGTUB
,
349 ARM_BUILTIN_WCMPGTUH
,
350 ARM_BUILTIN_WCMPGTUW
,
351 ARM_BUILTIN_WCMPGTSB
,
352 ARM_BUILTIN_WCMPGTSH
,
353 ARM_BUILTIN_WCMPGTSW
,
355 ARM_BUILTIN_TEXTRMSB
,
356 ARM_BUILTIN_TEXTRMSH
,
357 ARM_BUILTIN_TEXTRMSW
,
358 ARM_BUILTIN_TEXTRMUB
,
359 ARM_BUILTIN_TEXTRMUH
,
360 ARM_BUILTIN_TEXTRMUW
,
410 ARM_BUILTIN_WUNPCKIHB
,
411 ARM_BUILTIN_WUNPCKIHH
,
412 ARM_BUILTIN_WUNPCKIHW
,
413 ARM_BUILTIN_WUNPCKILB
,
414 ARM_BUILTIN_WUNPCKILH
,
415 ARM_BUILTIN_WUNPCKILW
,
417 ARM_BUILTIN_WUNPCKEHSB
,
418 ARM_BUILTIN_WUNPCKEHSH
,
419 ARM_BUILTIN_WUNPCKEHSW
,
420 ARM_BUILTIN_WUNPCKEHUB
,
421 ARM_BUILTIN_WUNPCKEHUH
,
422 ARM_BUILTIN_WUNPCKEHUW
,
423 ARM_BUILTIN_WUNPCKELSB
,
424 ARM_BUILTIN_WUNPCKELSH
,
425 ARM_BUILTIN_WUNPCKELSW
,
426 ARM_BUILTIN_WUNPCKELUB
,
427 ARM_BUILTIN_WUNPCKELUH
,
428 ARM_BUILTIN_WUNPCKELUW
,
434 ARM_BUILTIN_WADDSUBHX
,
435 ARM_BUILTIN_WSUBADDHX
,
437 ARM_BUILTIN_WABSDIFFB
,
438 ARM_BUILTIN_WABSDIFFH
,
439 ARM_BUILTIN_WABSDIFFW
,
456 ARM_BUILTIN_WMULWSMR
,
457 ARM_BUILTIN_WMULWUMR
,
468 ARM_BUILTIN_WQMULWMR
,
470 ARM_BUILTIN_WADDBHUSM
,
471 ARM_BUILTIN_WADDBHUSL
,
478 ARM_BUILTIN_WQMIABBN
,
479 ARM_BUILTIN_WQMIABTN
,
480 ARM_BUILTIN_WQMIATBN
,
481 ARM_BUILTIN_WQMIATTN
,
498 ARM_BUILTIN_WMIAWBBN
,
499 ARM_BUILTIN_WMIAWBTN
,
500 ARM_BUILTIN_WMIAWTBN
,
501 ARM_BUILTIN_WMIAWTTN
,
512 ARM_BUILTIN_GET_FPSCR
,
513 ARM_BUILTIN_SET_FPSCR
,
519 #define CRYPTO1(L, U, M1, M2) \
520 ARM_BUILTIN_CRYPTO_##U,
521 #define CRYPTO2(L, U, M1, M2, M3) \
522 ARM_BUILTIN_CRYPTO_##U,
523 #define CRYPTO3(L, U, M1, M2, M3, M4) \
524 ARM_BUILTIN_CRYPTO_##U,
526 #include "crypto.def"
532 ARM_BUILTIN_NEON_BASE
,
533 ARM_BUILTIN_NEON_LANE_CHECK
= ARM_BUILTIN_NEON_BASE
,
535 #include "arm_neon_builtins.def"
540 #define ARM_BUILTIN_NEON_PATTERN_START \
541 (ARM_BUILTIN_MAX - ARRAY_SIZE (neon_builtin_data))
555 static GTY(()) tree arm_builtin_decls
[ARM_BUILTIN_MAX
];
557 #define NUM_DREG_TYPES 5
558 #define NUM_QREG_TYPES 6
560 /* Internal scalar builtin types. These types are used to support
561 neon intrinsic builtins. They are _not_ user-visible types. Therefore
562 the mangling for these types are implementation defined. */
563 const char *arm_scalar_builtin_types
[] = {
571 "__builtin_neon_uqi",
572 "__builtin_neon_uhi",
573 "__builtin_neon_usi",
574 "__builtin_neon_udi",
582 #define ENTRY(E, M, Q, S, T, G) E,
585 #include "arm-simd-builtin-types.def"
590 struct arm_simd_type_info
592 enum arm_simd_type type
;
594 /* Internal type name. */
597 /* Internal type name(mangled). The mangled names conform to the
598 AAPCS (see "Procedure Call Standard for the ARM Architecture",
599 Appendix A). To qualify for emission with the mangled names defined in
600 that document, a vector type must not only be of the correct mode but also
601 be of the correct internal Neon vector type (e.g. __simd64_int8_t);
602 these types are registered by arm_init_simd_builtin_types (). In other
603 words, vector types defined in other ways e.g. via vector_size attribute
604 will get default mangled names. */
613 /* Machine mode the internal type maps to. */
617 enum arm_type_qualifiers q
;
620 #define ENTRY(E, M, Q, S, T, G) \
622 "__simd" #S "_" #T "_t", \
623 #G "__simd" #S "_" #T "_t", \
624 NULL_TREE, NULL_TREE, M##mode, qualifier_##Q},
625 static struct arm_simd_type_info arm_simd_types
[] = {
626 #include "arm-simd-builtin-types.def"
630 static tree arm_simd_floatHF_type_node
= NULL_TREE
;
631 static tree arm_simd_intOI_type_node
= NULL_TREE
;
632 static tree arm_simd_intEI_type_node
= NULL_TREE
;
633 static tree arm_simd_intCI_type_node
= NULL_TREE
;
634 static tree arm_simd_intXI_type_node
= NULL_TREE
;
635 static tree arm_simd_polyQI_type_node
= NULL_TREE
;
636 static tree arm_simd_polyHI_type_node
= NULL_TREE
;
637 static tree arm_simd_polyDI_type_node
= NULL_TREE
;
638 static tree arm_simd_polyTI_type_node
= NULL_TREE
;
641 arm_mangle_builtin_scalar_type (const_tree type
)
645 while (arm_scalar_builtin_types
[i
] != NULL
)
647 const char *name
= arm_scalar_builtin_types
[i
];
649 if (TREE_CODE (TYPE_NAME (type
)) == TYPE_DECL
650 && DECL_NAME (TYPE_NAME (type
))
651 && !strcmp (IDENTIFIER_POINTER (DECL_NAME (TYPE_NAME (type
))), name
))
652 return arm_scalar_builtin_types
[i
];
659 arm_mangle_builtin_vector_type (const_tree type
)
662 int nelts
= sizeof (arm_simd_types
) / sizeof (arm_simd_types
[0]);
664 for (i
= 0; i
< nelts
; i
++)
665 if (arm_simd_types
[i
].mode
== TYPE_MODE (type
)
667 && TREE_CODE (TYPE_NAME (type
)) == TYPE_DECL
668 && DECL_NAME (TYPE_NAME (type
))
670 (IDENTIFIER_POINTER (DECL_NAME (TYPE_NAME (type
))),
671 arm_simd_types
[i
].name
))
672 return arm_simd_types
[i
].mangle
;
678 arm_mangle_builtin_type (const_tree type
)
681 /* Walk through all the AArch64 builtins types tables to filter out the
683 if ((mangle
= arm_mangle_builtin_vector_type (type
))
684 || (mangle
= arm_mangle_builtin_scalar_type (type
)))
691 arm_simd_builtin_std_type (enum machine_mode mode
,
692 enum arm_type_qualifiers q
)
694 #define QUAL_TYPE(M) \
695 ((q == qualifier_none) ? int##M##_type_node : unsigned_int##M##_type_node);
699 return QUAL_TYPE (QI
);
701 return QUAL_TYPE (HI
);
703 return QUAL_TYPE (SI
);
705 return QUAL_TYPE (DI
);
707 return QUAL_TYPE (TI
);
709 return arm_simd_intOI_type_node
;
711 return arm_simd_intEI_type_node
;
713 return arm_simd_intCI_type_node
;
715 return arm_simd_intXI_type_node
;
717 return arm_simd_floatHF_type_node
;
719 return float_type_node
;
721 return double_type_node
;
729 arm_lookup_simd_builtin_type (enum machine_mode mode
,
730 enum arm_type_qualifiers q
)
733 int nelts
= sizeof (arm_simd_types
) / sizeof (arm_simd_types
[0]);
735 /* Non-poly scalar modes map to standard types not in the table. */
736 if (q
!= qualifier_poly
&& !VECTOR_MODE_P (mode
))
737 return arm_simd_builtin_std_type (mode
, q
);
739 for (i
= 0; i
< nelts
; i
++)
740 if (arm_simd_types
[i
].mode
== mode
741 && arm_simd_types
[i
].q
== q
)
742 return arm_simd_types
[i
].itype
;
744 /* Note that we won't have caught the underlying type for poly64x2_t
745 in the above table. This gets default mangling. */
751 arm_simd_builtin_type (enum machine_mode mode
,
752 bool unsigned_p
, bool poly_p
)
755 return arm_lookup_simd_builtin_type (mode
, qualifier_poly
);
757 return arm_lookup_simd_builtin_type (mode
, qualifier_unsigned
);
759 return arm_lookup_simd_builtin_type (mode
, qualifier_none
);
763 arm_init_simd_builtin_types (void)
766 int nelts
= sizeof (arm_simd_types
) / sizeof (arm_simd_types
[0]);
769 /* Poly types are a world of their own. In order to maintain legacy
770 ABI, they get initialized using the old interface, and don't get
771 an entry in our mangling table, consequently, they get default
772 mangling. As a further gotcha, poly8_t and poly16_t are signed
773 types, poly64_t and poly128_t are unsigned types. */
774 arm_simd_polyQI_type_node
775 = build_distinct_type_copy (intQI_type_node
);
776 (*lang_hooks
.types
.register_builtin_type
) (arm_simd_polyQI_type_node
,
777 "__builtin_neon_poly8");
778 arm_simd_polyHI_type_node
779 = build_distinct_type_copy (intHI_type_node
);
780 (*lang_hooks
.types
.register_builtin_type
) (arm_simd_polyHI_type_node
,
781 "__builtin_neon_poly16");
782 arm_simd_polyDI_type_node
783 = build_distinct_type_copy (unsigned_intDI_type_node
);
784 (*lang_hooks
.types
.register_builtin_type
) (arm_simd_polyDI_type_node
,
785 "__builtin_neon_poly64");
786 arm_simd_polyTI_type_node
787 = build_distinct_type_copy (unsigned_intTI_type_node
);
788 (*lang_hooks
.types
.register_builtin_type
) (arm_simd_polyTI_type_node
,
789 "__builtin_neon_poly128");
791 /* Init all the element types built by the front-end. */
792 arm_simd_types
[Int8x8_t
].eltype
= intQI_type_node
;
793 arm_simd_types
[Int8x16_t
].eltype
= intQI_type_node
;
794 arm_simd_types
[Int16x4_t
].eltype
= intHI_type_node
;
795 arm_simd_types
[Int16x8_t
].eltype
= intHI_type_node
;
796 arm_simd_types
[Int32x2_t
].eltype
= intSI_type_node
;
797 arm_simd_types
[Int32x4_t
].eltype
= intSI_type_node
;
798 arm_simd_types
[Int64x2_t
].eltype
= intDI_type_node
;
799 arm_simd_types
[Uint8x8_t
].eltype
= unsigned_intQI_type_node
;
800 arm_simd_types
[Uint8x16_t
].eltype
= unsigned_intQI_type_node
;
801 arm_simd_types
[Uint16x4_t
].eltype
= unsigned_intHI_type_node
;
802 arm_simd_types
[Uint16x8_t
].eltype
= unsigned_intHI_type_node
;
803 arm_simd_types
[Uint32x2_t
].eltype
= unsigned_intSI_type_node
;
804 arm_simd_types
[Uint32x4_t
].eltype
= unsigned_intSI_type_node
;
805 arm_simd_types
[Uint64x2_t
].eltype
= unsigned_intDI_type_node
;
807 /* Init poly vector element types with scalar poly types. */
808 arm_simd_types
[Poly8x8_t
].eltype
= arm_simd_polyQI_type_node
;
809 arm_simd_types
[Poly8x16_t
].eltype
= arm_simd_polyQI_type_node
;
810 arm_simd_types
[Poly16x4_t
].eltype
= arm_simd_polyHI_type_node
;
811 arm_simd_types
[Poly16x8_t
].eltype
= arm_simd_polyHI_type_node
;
812 /* Note: poly64x2_t is defined in arm_neon.h, to ensure it gets default
815 /* Continue with standard types. */
816 /* The __builtin_simd{64,128}_float16 types are kept private unless
817 we have a scalar __fp16 type. */
818 arm_simd_types
[Float16x4_t
].eltype
= arm_simd_floatHF_type_node
;
819 arm_simd_types
[Float16x8_t
].eltype
= arm_simd_floatHF_type_node
;
820 arm_simd_types
[Float32x2_t
].eltype
= float_type_node
;
821 arm_simd_types
[Float32x4_t
].eltype
= float_type_node
;
823 for (i
= 0; i
< nelts
; i
++)
825 tree eltype
= arm_simd_types
[i
].eltype
;
826 enum machine_mode mode
= arm_simd_types
[i
].mode
;
828 if (arm_simd_types
[i
].itype
== NULL
)
829 arm_simd_types
[i
].itype
=
830 build_distinct_type_copy
831 (build_vector_type (eltype
, GET_MODE_NUNITS (mode
)));
833 tdecl
= add_builtin_type (arm_simd_types
[i
].name
,
834 arm_simd_types
[i
].itype
);
835 TYPE_NAME (arm_simd_types
[i
].itype
) = tdecl
;
836 SET_TYPE_STRUCTURAL_EQUALITY (arm_simd_types
[i
].itype
);
839 #define AARCH_BUILD_SIGNED_TYPE(mode) \
840 make_signed_type (GET_MODE_PRECISION (mode));
841 arm_simd_intOI_type_node
= AARCH_BUILD_SIGNED_TYPE (OImode
);
842 arm_simd_intEI_type_node
= AARCH_BUILD_SIGNED_TYPE (EImode
);
843 arm_simd_intCI_type_node
= AARCH_BUILD_SIGNED_TYPE (CImode
);
844 arm_simd_intXI_type_node
= AARCH_BUILD_SIGNED_TYPE (XImode
);
845 #undef AARCH_BUILD_SIGNED_TYPE
847 tdecl
= add_builtin_type
848 ("__builtin_neon_ei" , arm_simd_intEI_type_node
);
849 TYPE_NAME (arm_simd_intEI_type_node
) = tdecl
;
850 tdecl
= add_builtin_type
851 ("__builtin_neon_oi" , arm_simd_intOI_type_node
);
852 TYPE_NAME (arm_simd_intOI_type_node
) = tdecl
;
853 tdecl
= add_builtin_type
854 ("__builtin_neon_ci" , arm_simd_intCI_type_node
);
855 TYPE_NAME (arm_simd_intCI_type_node
) = tdecl
;
856 tdecl
= add_builtin_type
857 ("__builtin_neon_xi" , arm_simd_intXI_type_node
);
858 TYPE_NAME (arm_simd_intXI_type_node
) = tdecl
;
862 arm_init_simd_builtin_scalar_types (void)
864 /* Define typedefs for all the standard scalar types. */
865 (*lang_hooks
.types
.register_builtin_type
) (intQI_type_node
,
866 "__builtin_neon_qi");
867 (*lang_hooks
.types
.register_builtin_type
) (intHI_type_node
,
868 "__builtin_neon_hi");
869 (*lang_hooks
.types
.register_builtin_type
) (intSI_type_node
,
870 "__builtin_neon_si");
871 (*lang_hooks
.types
.register_builtin_type
) (float_type_node
,
872 "__builtin_neon_sf");
873 (*lang_hooks
.types
.register_builtin_type
) (intDI_type_node
,
874 "__builtin_neon_di");
875 (*lang_hooks
.types
.register_builtin_type
) (double_type_node
,
876 "__builtin_neon_df");
877 (*lang_hooks
.types
.register_builtin_type
) (intTI_type_node
,
878 "__builtin_neon_ti");
880 /* Unsigned integer types for various mode sizes. */
881 (*lang_hooks
.types
.register_builtin_type
) (unsigned_intQI_type_node
,
882 "__builtin_neon_uqi");
883 (*lang_hooks
.types
.register_builtin_type
) (unsigned_intHI_type_node
,
884 "__builtin_neon_uhi");
885 (*lang_hooks
.types
.register_builtin_type
) (unsigned_intSI_type_node
,
886 "__builtin_neon_usi");
887 (*lang_hooks
.types
.register_builtin_type
) (unsigned_intDI_type_node
,
888 "__builtin_neon_udi");
889 (*lang_hooks
.types
.register_builtin_type
) (unsigned_intTI_type_node
,
890 "__builtin_neon_uti");
894 arm_init_neon_builtins (void)
896 unsigned int i
, fcode
= ARM_BUILTIN_NEON_PATTERN_START
;
898 arm_init_simd_builtin_types ();
900 /* Strong-typing hasn't been implemented for all AdvSIMD builtin intrinsics.
901 Therefore we need to preserve the old __builtin scalar types. It can be
902 removed once all the intrinsics become strongly typed using the qualifier
904 arm_init_simd_builtin_scalar_types ();
906 tree lane_check_fpr
= build_function_type_list (void_type_node
,
910 arm_builtin_decls
[ARM_BUILTIN_NEON_LANE_CHECK
] =
911 add_builtin_function ("__builtin_arm_lane_check", lane_check_fpr
,
912 ARM_BUILTIN_NEON_LANE_CHECK
, BUILT_IN_MD
,
915 for (i
= 0; i
< ARRAY_SIZE (neon_builtin_data
); i
++, fcode
++)
917 bool print_type_signature_p
= false;
918 char type_signature
[SIMD_MAX_BUILTIN_ARGS
] = { 0 };
919 neon_builtin_datum
*d
= &neon_builtin_data
[i
];
926 /* We must track two variables here. op_num is
927 the operand number as in the RTL pattern. This is
928 required to access the mode (e.g. V4SF mode) of the
929 argument, from which the base type can be derived.
930 arg_num is an index in to the qualifiers data, which
931 gives qualifiers to the type (e.g. const unsigned).
932 The reason these two variables may differ by one is the
933 void return type. While all return types take the 0th entry
934 in the qualifiers array, there is no operand for them in the
936 int op_num
= insn_data
[d
->code
].n_operands
- 1;
937 int arg_num
= d
->qualifiers
[0] & qualifier_void
940 tree return_type
= void_type_node
, args
= void_list_node
;
943 /* Build a function type directly from the insn_data for this
944 builtin. The build_function_type () function takes care of
945 removing duplicates for us. */
946 for (; op_num
>= 0; arg_num
--, op_num
--)
948 machine_mode op_mode
= insn_data
[d
->code
].operand
[op_num
].mode
;
949 enum arm_type_qualifiers qualifiers
= d
->qualifiers
[arg_num
];
951 if (qualifiers
& qualifier_unsigned
)
953 type_signature
[arg_num
] = 'u';
954 print_type_signature_p
= true;
956 else if (qualifiers
& qualifier_poly
)
958 type_signature
[arg_num
] = 'p';
959 print_type_signature_p
= true;
962 type_signature
[arg_num
] = 's';
964 /* Skip an internal operand for vget_{low, high}. */
965 if (qualifiers
& qualifier_internal
)
968 /* Some builtins have different user-facing types
969 for certain arguments, encoded in d->mode. */
970 if (qualifiers
& qualifier_map_mode
)
973 /* For pointers, we want a pointer to the basic type
975 if (qualifiers
& qualifier_pointer
&& VECTOR_MODE_P (op_mode
))
976 op_mode
= GET_MODE_INNER (op_mode
);
978 eltype
= arm_simd_builtin_type
980 (qualifiers
& qualifier_unsigned
) != 0,
981 (qualifiers
& qualifier_poly
) != 0);
982 gcc_assert (eltype
!= NULL
);
984 /* Add qualifiers. */
985 if (qualifiers
& qualifier_const
)
986 eltype
= build_qualified_type (eltype
, TYPE_QUAL_CONST
);
988 if (qualifiers
& qualifier_pointer
)
989 eltype
= build_pointer_type (eltype
);
991 /* If we have reached arg_num == 0, we are at a non-void
992 return type. Otherwise, we are still processing
995 return_type
= eltype
;
997 args
= tree_cons (NULL_TREE
, eltype
, args
);
1000 ftype
= build_function_type (return_type
, args
);
1002 gcc_assert (ftype
!= NULL
);
1004 if (print_type_signature_p
)
1005 snprintf (namebuf
, sizeof (namebuf
), "__builtin_neon_%s_%s",
1006 d
->name
, type_signature
);
1008 snprintf (namebuf
, sizeof (namebuf
), "__builtin_neon_%s",
1011 fndecl
= add_builtin_function (namebuf
, ftype
, fcode
, BUILT_IN_MD
,
1013 arm_builtin_decls
[fcode
] = fndecl
;
1016 if (TARGET_CRYPTO
&& TARGET_HARD_FLOAT
)
1018 tree V16UQI_type_node
= arm_simd_builtin_type (V16QImode
,
1022 tree V4USI_type_node
= arm_simd_builtin_type (V4SImode
,
1026 tree v16uqi_ftype_v16uqi
1027 = build_function_type_list (V16UQI_type_node
, V16UQI_type_node
,
1030 tree v16uqi_ftype_v16uqi_v16uqi
1031 = build_function_type_list (V16UQI_type_node
, V16UQI_type_node
,
1032 V16UQI_type_node
, NULL_TREE
);
1034 tree v4usi_ftype_v4usi
1035 = build_function_type_list (V4USI_type_node
, V4USI_type_node
,
1038 tree v4usi_ftype_v4usi_v4usi
1039 = build_function_type_list (V4USI_type_node
, V4USI_type_node
,
1040 V4USI_type_node
, NULL_TREE
);
1042 tree v4usi_ftype_v4usi_v4usi_v4usi
1043 = build_function_type_list (V4USI_type_node
, V4USI_type_node
,
1044 V4USI_type_node
, V4USI_type_node
,
1047 tree uti_ftype_udi_udi
1048 = build_function_type_list (unsigned_intTI_type_node
,
1049 unsigned_intDI_type_node
,
1050 unsigned_intDI_type_node
,
1064 ARM_BUILTIN_CRYPTO_##U
1066 "__builtin_arm_crypto_"#L
1069 #define FT2(R, A1, A2) \
1070 R##_ftype_##A1##_##A2
1071 #define FT3(R, A1, A2, A3) \
1072 R##_ftype_##A1##_##A2##_##A3
1073 #define CRYPTO1(L, U, R, A) \
1074 arm_builtin_decls[C (U)] \
1075 = add_builtin_function (N (L), FT1 (R, A), \
1076 C (U), BUILT_IN_MD, NULL, NULL_TREE);
1077 #define CRYPTO2(L, U, R, A1, A2) \
1078 arm_builtin_decls[C (U)] \
1079 = add_builtin_function (N (L), FT2 (R, A1, A2), \
1080 C (U), BUILT_IN_MD, NULL, NULL_TREE);
1082 #define CRYPTO3(L, U, R, A1, A2, A3) \
1083 arm_builtin_decls[C (U)] \
1084 = add_builtin_function (N (L), FT3 (R, A1, A2, A3), \
1085 C (U), BUILT_IN_MD, NULL, NULL_TREE);
1086 #include "crypto.def"
1099 #undef NUM_DREG_TYPES
1100 #undef NUM_QREG_TYPES
1102 #define def_mbuiltin(FLAGS, NAME, TYPE, CODE) \
1105 const arm_feature_set flags = FLAGS; \
1106 if (ARM_FSET_CPU_SUBSET (flags, insn_flags)) \
1109 bdecl = add_builtin_function ((NAME), (TYPE), (CODE), \
1110 BUILT_IN_MD, NULL, NULL_TREE); \
1111 arm_builtin_decls[CODE] = bdecl; \
1116 struct builtin_description
1118 const arm_feature_set features
;
1119 const enum insn_code icode
;
1120 const char * const name
;
1121 const enum arm_builtins code
;
1122 const enum rtx_code comparison
;
1123 const unsigned int flag
;
1126 static const struct builtin_description bdesc_2arg
[] =
1128 #define IWMMXT_BUILTIN(code, string, builtin) \
1129 { ARM_FSET_MAKE_CPU1 (FL_IWMMXT), CODE_FOR_##code, \
1130 "__builtin_arm_" string, \
1131 ARM_BUILTIN_##builtin, UNKNOWN, 0 },
1133 #define IWMMXT2_BUILTIN(code, string, builtin) \
1134 { ARM_FSET_MAKE_CPU1 (FL_IWMMXT2), CODE_FOR_##code, \
1135 "__builtin_arm_" string, \
1136 ARM_BUILTIN_##builtin, UNKNOWN, 0 },
1138 IWMMXT_BUILTIN (addv8qi3
, "waddb", WADDB
)
1139 IWMMXT_BUILTIN (addv4hi3
, "waddh", WADDH
)
1140 IWMMXT_BUILTIN (addv2si3
, "waddw", WADDW
)
1141 IWMMXT_BUILTIN (subv8qi3
, "wsubb", WSUBB
)
1142 IWMMXT_BUILTIN (subv4hi3
, "wsubh", WSUBH
)
1143 IWMMXT_BUILTIN (subv2si3
, "wsubw", WSUBW
)
1144 IWMMXT_BUILTIN (ssaddv8qi3
, "waddbss", WADDSSB
)
1145 IWMMXT_BUILTIN (ssaddv4hi3
, "waddhss", WADDSSH
)
1146 IWMMXT_BUILTIN (ssaddv2si3
, "waddwss", WADDSSW
)
1147 IWMMXT_BUILTIN (sssubv8qi3
, "wsubbss", WSUBSSB
)
1148 IWMMXT_BUILTIN (sssubv4hi3
, "wsubhss", WSUBSSH
)
1149 IWMMXT_BUILTIN (sssubv2si3
, "wsubwss", WSUBSSW
)
1150 IWMMXT_BUILTIN (usaddv8qi3
, "waddbus", WADDUSB
)
1151 IWMMXT_BUILTIN (usaddv4hi3
, "waddhus", WADDUSH
)
1152 IWMMXT_BUILTIN (usaddv2si3
, "waddwus", WADDUSW
)
1153 IWMMXT_BUILTIN (ussubv8qi3
, "wsubbus", WSUBUSB
)
1154 IWMMXT_BUILTIN (ussubv4hi3
, "wsubhus", WSUBUSH
)
1155 IWMMXT_BUILTIN (ussubv2si3
, "wsubwus", WSUBUSW
)
1156 IWMMXT_BUILTIN (mulv4hi3
, "wmulul", WMULUL
)
1157 IWMMXT_BUILTIN (smulv4hi3_highpart
, "wmulsm", WMULSM
)
1158 IWMMXT_BUILTIN (umulv4hi3_highpart
, "wmulum", WMULUM
)
1159 IWMMXT_BUILTIN (eqv8qi3
, "wcmpeqb", WCMPEQB
)
1160 IWMMXT_BUILTIN (eqv4hi3
, "wcmpeqh", WCMPEQH
)
1161 IWMMXT_BUILTIN (eqv2si3
, "wcmpeqw", WCMPEQW
)
1162 IWMMXT_BUILTIN (gtuv8qi3
, "wcmpgtub", WCMPGTUB
)
1163 IWMMXT_BUILTIN (gtuv4hi3
, "wcmpgtuh", WCMPGTUH
)
1164 IWMMXT_BUILTIN (gtuv2si3
, "wcmpgtuw", WCMPGTUW
)
1165 IWMMXT_BUILTIN (gtv8qi3
, "wcmpgtsb", WCMPGTSB
)
1166 IWMMXT_BUILTIN (gtv4hi3
, "wcmpgtsh", WCMPGTSH
)
1167 IWMMXT_BUILTIN (gtv2si3
, "wcmpgtsw", WCMPGTSW
)
1168 IWMMXT_BUILTIN (umaxv8qi3
, "wmaxub", WMAXUB
)
1169 IWMMXT_BUILTIN (smaxv8qi3
, "wmaxsb", WMAXSB
)
1170 IWMMXT_BUILTIN (umaxv4hi3
, "wmaxuh", WMAXUH
)
1171 IWMMXT_BUILTIN (smaxv4hi3
, "wmaxsh", WMAXSH
)
1172 IWMMXT_BUILTIN (umaxv2si3
, "wmaxuw", WMAXUW
)
1173 IWMMXT_BUILTIN (smaxv2si3
, "wmaxsw", WMAXSW
)
1174 IWMMXT_BUILTIN (uminv8qi3
, "wminub", WMINUB
)
1175 IWMMXT_BUILTIN (sminv8qi3
, "wminsb", WMINSB
)
1176 IWMMXT_BUILTIN (uminv4hi3
, "wminuh", WMINUH
)
1177 IWMMXT_BUILTIN (sminv4hi3
, "wminsh", WMINSH
)
1178 IWMMXT_BUILTIN (uminv2si3
, "wminuw", WMINUW
)
1179 IWMMXT_BUILTIN (sminv2si3
, "wminsw", WMINSW
)
1180 IWMMXT_BUILTIN (iwmmxt_anddi3
, "wand", WAND
)
1181 IWMMXT_BUILTIN (iwmmxt_nanddi3
, "wandn", WANDN
)
1182 IWMMXT_BUILTIN (iwmmxt_iordi3
, "wor", WOR
)
1183 IWMMXT_BUILTIN (iwmmxt_xordi3
, "wxor", WXOR
)
1184 IWMMXT_BUILTIN (iwmmxt_uavgv8qi3
, "wavg2b", WAVG2B
)
1185 IWMMXT_BUILTIN (iwmmxt_uavgv4hi3
, "wavg2h", WAVG2H
)
1186 IWMMXT_BUILTIN (iwmmxt_uavgrndv8qi3
, "wavg2br", WAVG2BR
)
1187 IWMMXT_BUILTIN (iwmmxt_uavgrndv4hi3
, "wavg2hr", WAVG2HR
)
1188 IWMMXT_BUILTIN (iwmmxt_wunpckilb
, "wunpckilb", WUNPCKILB
)
1189 IWMMXT_BUILTIN (iwmmxt_wunpckilh
, "wunpckilh", WUNPCKILH
)
1190 IWMMXT_BUILTIN (iwmmxt_wunpckilw
, "wunpckilw", WUNPCKILW
)
1191 IWMMXT_BUILTIN (iwmmxt_wunpckihb
, "wunpckihb", WUNPCKIHB
)
1192 IWMMXT_BUILTIN (iwmmxt_wunpckihh
, "wunpckihh", WUNPCKIHH
)
1193 IWMMXT_BUILTIN (iwmmxt_wunpckihw
, "wunpckihw", WUNPCKIHW
)
1194 IWMMXT2_BUILTIN (iwmmxt_waddsubhx
, "waddsubhx", WADDSUBHX
)
1195 IWMMXT2_BUILTIN (iwmmxt_wsubaddhx
, "wsubaddhx", WSUBADDHX
)
1196 IWMMXT2_BUILTIN (iwmmxt_wabsdiffb
, "wabsdiffb", WABSDIFFB
)
1197 IWMMXT2_BUILTIN (iwmmxt_wabsdiffh
, "wabsdiffh", WABSDIFFH
)
1198 IWMMXT2_BUILTIN (iwmmxt_wabsdiffw
, "wabsdiffw", WABSDIFFW
)
1199 IWMMXT2_BUILTIN (iwmmxt_avg4
, "wavg4", WAVG4
)
1200 IWMMXT2_BUILTIN (iwmmxt_avg4r
, "wavg4r", WAVG4R
)
1201 IWMMXT2_BUILTIN (iwmmxt_wmulwsm
, "wmulwsm", WMULWSM
)
1202 IWMMXT2_BUILTIN (iwmmxt_wmulwum
, "wmulwum", WMULWUM
)
1203 IWMMXT2_BUILTIN (iwmmxt_wmulwsmr
, "wmulwsmr", WMULWSMR
)
1204 IWMMXT2_BUILTIN (iwmmxt_wmulwumr
, "wmulwumr", WMULWUMR
)
1205 IWMMXT2_BUILTIN (iwmmxt_wmulwl
, "wmulwl", WMULWL
)
1206 IWMMXT2_BUILTIN (iwmmxt_wmulsmr
, "wmulsmr", WMULSMR
)
1207 IWMMXT2_BUILTIN (iwmmxt_wmulumr
, "wmulumr", WMULUMR
)
1208 IWMMXT2_BUILTIN (iwmmxt_wqmulm
, "wqmulm", WQMULM
)
1209 IWMMXT2_BUILTIN (iwmmxt_wqmulmr
, "wqmulmr", WQMULMR
)
1210 IWMMXT2_BUILTIN (iwmmxt_wqmulwm
, "wqmulwm", WQMULWM
)
1211 IWMMXT2_BUILTIN (iwmmxt_wqmulwmr
, "wqmulwmr", WQMULWMR
)
1212 IWMMXT_BUILTIN (iwmmxt_walignr0
, "walignr0", WALIGNR0
)
1213 IWMMXT_BUILTIN (iwmmxt_walignr1
, "walignr1", WALIGNR1
)
1214 IWMMXT_BUILTIN (iwmmxt_walignr2
, "walignr2", WALIGNR2
)
1215 IWMMXT_BUILTIN (iwmmxt_walignr3
, "walignr3", WALIGNR3
)
1217 #define IWMMXT_BUILTIN2(code, builtin) \
1218 { ARM_FSET_MAKE_CPU1 (FL_IWMMXT), CODE_FOR_##code, NULL, \
1219 ARM_BUILTIN_##builtin, UNKNOWN, 0 },
1221 #define IWMMXT2_BUILTIN2(code, builtin) \
1222 { ARM_FSET_MAKE_CPU2 (FL_IWMMXT2), CODE_FOR_##code, NULL, \
1223 ARM_BUILTIN_##builtin, UNKNOWN, 0 },
1225 IWMMXT2_BUILTIN2 (iwmmxt_waddbhusm
, WADDBHUSM
)
1226 IWMMXT2_BUILTIN2 (iwmmxt_waddbhusl
, WADDBHUSL
)
1227 IWMMXT_BUILTIN2 (iwmmxt_wpackhss
, WPACKHSS
)
1228 IWMMXT_BUILTIN2 (iwmmxt_wpackwss
, WPACKWSS
)
1229 IWMMXT_BUILTIN2 (iwmmxt_wpackdss
, WPACKDSS
)
1230 IWMMXT_BUILTIN2 (iwmmxt_wpackhus
, WPACKHUS
)
1231 IWMMXT_BUILTIN2 (iwmmxt_wpackwus
, WPACKWUS
)
1232 IWMMXT_BUILTIN2 (iwmmxt_wpackdus
, WPACKDUS
)
1233 IWMMXT_BUILTIN2 (iwmmxt_wmacuz
, WMACUZ
)
1234 IWMMXT_BUILTIN2 (iwmmxt_wmacsz
, WMACSZ
)
1237 #define FP_BUILTIN(L, U) \
1238 {ARM_FSET_EMPTY, CODE_FOR_##L, "__builtin_arm_"#L, ARM_BUILTIN_##U, \
1241 FP_BUILTIN (get_fpscr
, GET_FPSCR
)
1242 FP_BUILTIN (set_fpscr
, SET_FPSCR
)
1245 #define CRC32_BUILTIN(L, U) \
1246 {ARM_FSET_EMPTY, CODE_FOR_##L, "__builtin_arm_"#L, \
1247 ARM_BUILTIN_##U, UNKNOWN, 0},
1248 CRC32_BUILTIN (crc32b
, CRC32B
)
1249 CRC32_BUILTIN (crc32h
, CRC32H
)
1250 CRC32_BUILTIN (crc32w
, CRC32W
)
1251 CRC32_BUILTIN (crc32cb
, CRC32CB
)
1252 CRC32_BUILTIN (crc32ch
, CRC32CH
)
1253 CRC32_BUILTIN (crc32cw
, CRC32CW
)
1254 #undef CRC32_BUILTIN
1257 #define CRYPTO_BUILTIN(L, U) \
1258 {ARM_FSET_EMPTY, CODE_FOR_crypto_##L, "__builtin_arm_crypto_"#L, \
1259 ARM_BUILTIN_CRYPTO_##U, UNKNOWN, 0},
1263 #define CRYPTO2(L, U, R, A1, A2) CRYPTO_BUILTIN (L, U)
1264 #define CRYPTO1(L, U, R, A)
1265 #define CRYPTO3(L, U, R, A1, A2, A3)
1266 #include "crypto.def"
1273 static const struct builtin_description bdesc_1arg
[] =
1275 IWMMXT_BUILTIN (iwmmxt_tmovmskb
, "tmovmskb", TMOVMSKB
)
1276 IWMMXT_BUILTIN (iwmmxt_tmovmskh
, "tmovmskh", TMOVMSKH
)
1277 IWMMXT_BUILTIN (iwmmxt_tmovmskw
, "tmovmskw", TMOVMSKW
)
1278 IWMMXT_BUILTIN (iwmmxt_waccb
, "waccb", WACCB
)
1279 IWMMXT_BUILTIN (iwmmxt_wacch
, "wacch", WACCH
)
1280 IWMMXT_BUILTIN (iwmmxt_waccw
, "waccw", WACCW
)
1281 IWMMXT_BUILTIN (iwmmxt_wunpckehub
, "wunpckehub", WUNPCKEHUB
)
1282 IWMMXT_BUILTIN (iwmmxt_wunpckehuh
, "wunpckehuh", WUNPCKEHUH
)
1283 IWMMXT_BUILTIN (iwmmxt_wunpckehuw
, "wunpckehuw", WUNPCKEHUW
)
1284 IWMMXT_BUILTIN (iwmmxt_wunpckehsb
, "wunpckehsb", WUNPCKEHSB
)
1285 IWMMXT_BUILTIN (iwmmxt_wunpckehsh
, "wunpckehsh", WUNPCKEHSH
)
1286 IWMMXT_BUILTIN (iwmmxt_wunpckehsw
, "wunpckehsw", WUNPCKEHSW
)
1287 IWMMXT_BUILTIN (iwmmxt_wunpckelub
, "wunpckelub", WUNPCKELUB
)
1288 IWMMXT_BUILTIN (iwmmxt_wunpckeluh
, "wunpckeluh", WUNPCKELUH
)
1289 IWMMXT_BUILTIN (iwmmxt_wunpckeluw
, "wunpckeluw", WUNPCKELUW
)
1290 IWMMXT_BUILTIN (iwmmxt_wunpckelsb
, "wunpckelsb", WUNPCKELSB
)
1291 IWMMXT_BUILTIN (iwmmxt_wunpckelsh
, "wunpckelsh", WUNPCKELSH
)
1292 IWMMXT_BUILTIN (iwmmxt_wunpckelsw
, "wunpckelsw", WUNPCKELSW
)
1293 IWMMXT2_BUILTIN (iwmmxt_wabsv8qi3
, "wabsb", WABSB
)
1294 IWMMXT2_BUILTIN (iwmmxt_wabsv4hi3
, "wabsh", WABSH
)
1295 IWMMXT2_BUILTIN (iwmmxt_wabsv2si3
, "wabsw", WABSW
)
1296 IWMMXT_BUILTIN (tbcstv8qi
, "tbcstb", TBCSTB
)
1297 IWMMXT_BUILTIN (tbcstv4hi
, "tbcsth", TBCSTH
)
1298 IWMMXT_BUILTIN (tbcstv2si
, "tbcstw", TBCSTW
)
1300 #define CRYPTO1(L, U, R, A) CRYPTO_BUILTIN (L, U)
1301 #define CRYPTO2(L, U, R, A1, A2)
1302 #define CRYPTO3(L, U, R, A1, A2, A3)
1303 #include "crypto.def"
1309 static const struct builtin_description bdesc_3arg
[] =
1311 #define CRYPTO3(L, U, R, A1, A2, A3) CRYPTO_BUILTIN (L, U)
1312 #define CRYPTO1(L, U, R, A)
1313 #define CRYPTO2(L, U, R, A1, A2)
1314 #include "crypto.def"
1319 #undef CRYPTO_BUILTIN
1321 /* Set up all the iWMMXt builtins. This is not called if
1322 TARGET_IWMMXT is zero. */
1325 arm_init_iwmmxt_builtins (void)
1327 const struct builtin_description
* d
;
1330 tree V2SI_type_node
= build_vector_type_for_mode (intSI_type_node
, V2SImode
);
1331 tree V4HI_type_node
= build_vector_type_for_mode (intHI_type_node
, V4HImode
);
1332 tree V8QI_type_node
= build_vector_type_for_mode (intQI_type_node
, V8QImode
);
1334 tree v8qi_ftype_v8qi_v8qi_int
1335 = build_function_type_list (V8QI_type_node
,
1336 V8QI_type_node
, V8QI_type_node
,
1337 integer_type_node
, NULL_TREE
);
1338 tree v4hi_ftype_v4hi_int
1339 = build_function_type_list (V4HI_type_node
,
1340 V4HI_type_node
, integer_type_node
, NULL_TREE
);
1341 tree v2si_ftype_v2si_int
1342 = build_function_type_list (V2SI_type_node
,
1343 V2SI_type_node
, integer_type_node
, NULL_TREE
);
1344 tree v2si_ftype_di_di
1345 = build_function_type_list (V2SI_type_node
,
1346 long_long_integer_type_node
,
1347 long_long_integer_type_node
,
1349 tree di_ftype_di_int
1350 = build_function_type_list (long_long_integer_type_node
,
1351 long_long_integer_type_node
,
1352 integer_type_node
, NULL_TREE
);
1353 tree di_ftype_di_int_int
1354 = build_function_type_list (long_long_integer_type_node
,
1355 long_long_integer_type_node
,
1357 integer_type_node
, NULL_TREE
);
1359 = build_function_type_list (integer_type_node
,
1360 V8QI_type_node
, NULL_TREE
);
1362 = build_function_type_list (integer_type_node
,
1363 V4HI_type_node
, NULL_TREE
);
1365 = build_function_type_list (integer_type_node
,
1366 V2SI_type_node
, NULL_TREE
);
1367 tree int_ftype_v8qi_int
1368 = build_function_type_list (integer_type_node
,
1369 V8QI_type_node
, integer_type_node
, NULL_TREE
);
1370 tree int_ftype_v4hi_int
1371 = build_function_type_list (integer_type_node
,
1372 V4HI_type_node
, integer_type_node
, NULL_TREE
);
1373 tree int_ftype_v2si_int
1374 = build_function_type_list (integer_type_node
,
1375 V2SI_type_node
, integer_type_node
, NULL_TREE
);
1376 tree v8qi_ftype_v8qi_int_int
1377 = build_function_type_list (V8QI_type_node
,
1378 V8QI_type_node
, integer_type_node
,
1379 integer_type_node
, NULL_TREE
);
1380 tree v4hi_ftype_v4hi_int_int
1381 = build_function_type_list (V4HI_type_node
,
1382 V4HI_type_node
, integer_type_node
,
1383 integer_type_node
, NULL_TREE
);
1384 tree v2si_ftype_v2si_int_int
1385 = build_function_type_list (V2SI_type_node
,
1386 V2SI_type_node
, integer_type_node
,
1387 integer_type_node
, NULL_TREE
);
1388 /* Miscellaneous. */
1389 tree v8qi_ftype_v4hi_v4hi
1390 = build_function_type_list (V8QI_type_node
,
1391 V4HI_type_node
, V4HI_type_node
, NULL_TREE
);
1392 tree v4hi_ftype_v2si_v2si
1393 = build_function_type_list (V4HI_type_node
,
1394 V2SI_type_node
, V2SI_type_node
, NULL_TREE
);
1395 tree v8qi_ftype_v4hi_v8qi
1396 = build_function_type_list (V8QI_type_node
,
1397 V4HI_type_node
, V8QI_type_node
, NULL_TREE
);
1398 tree v2si_ftype_v4hi_v4hi
1399 = build_function_type_list (V2SI_type_node
,
1400 V4HI_type_node
, V4HI_type_node
, NULL_TREE
);
1401 tree v2si_ftype_v8qi_v8qi
1402 = build_function_type_list (V2SI_type_node
,
1403 V8QI_type_node
, V8QI_type_node
, NULL_TREE
);
1404 tree v4hi_ftype_v4hi_di
1405 = build_function_type_list (V4HI_type_node
,
1406 V4HI_type_node
, long_long_integer_type_node
,
1408 tree v2si_ftype_v2si_di
1409 = build_function_type_list (V2SI_type_node
,
1410 V2SI_type_node
, long_long_integer_type_node
,
1413 = build_function_type_list (long_long_unsigned_type_node
, NULL_TREE
);
1415 = build_function_type_list (integer_type_node
, NULL_TREE
);
1417 = build_function_type_list (long_long_integer_type_node
,
1418 V8QI_type_node
, NULL_TREE
);
1420 = build_function_type_list (long_long_integer_type_node
,
1421 V4HI_type_node
, NULL_TREE
);
1423 = build_function_type_list (long_long_integer_type_node
,
1424 V2SI_type_node
, NULL_TREE
);
1425 tree v2si_ftype_v4hi
1426 = build_function_type_list (V2SI_type_node
,
1427 V4HI_type_node
, NULL_TREE
);
1428 tree v4hi_ftype_v8qi
1429 = build_function_type_list (V4HI_type_node
,
1430 V8QI_type_node
, NULL_TREE
);
1431 tree v8qi_ftype_v8qi
1432 = build_function_type_list (V8QI_type_node
,
1433 V8QI_type_node
, NULL_TREE
);
1434 tree v4hi_ftype_v4hi
1435 = build_function_type_list (V4HI_type_node
,
1436 V4HI_type_node
, NULL_TREE
);
1437 tree v2si_ftype_v2si
1438 = build_function_type_list (V2SI_type_node
,
1439 V2SI_type_node
, NULL_TREE
);
1441 tree di_ftype_di_v4hi_v4hi
1442 = build_function_type_list (long_long_unsigned_type_node
,
1443 long_long_unsigned_type_node
,
1444 V4HI_type_node
, V4HI_type_node
,
1447 tree di_ftype_v4hi_v4hi
1448 = build_function_type_list (long_long_unsigned_type_node
,
1449 V4HI_type_node
,V4HI_type_node
,
1452 tree v2si_ftype_v2si_v4hi_v4hi
1453 = build_function_type_list (V2SI_type_node
,
1454 V2SI_type_node
, V4HI_type_node
,
1455 V4HI_type_node
, NULL_TREE
);
1457 tree v2si_ftype_v2si_v8qi_v8qi
1458 = build_function_type_list (V2SI_type_node
,
1459 V2SI_type_node
, V8QI_type_node
,
1460 V8QI_type_node
, NULL_TREE
);
1462 tree di_ftype_di_v2si_v2si
1463 = build_function_type_list (long_long_unsigned_type_node
,
1464 long_long_unsigned_type_node
,
1465 V2SI_type_node
, V2SI_type_node
,
1468 tree di_ftype_di_di_int
1469 = build_function_type_list (long_long_unsigned_type_node
,
1470 long_long_unsigned_type_node
,
1471 long_long_unsigned_type_node
,
1472 integer_type_node
, NULL_TREE
);
1475 = build_function_type_list (void_type_node
,
1476 integer_type_node
, NULL_TREE
);
1478 tree v8qi_ftype_char
1479 = build_function_type_list (V8QI_type_node
,
1480 signed_char_type_node
, NULL_TREE
);
1482 tree v4hi_ftype_short
1483 = build_function_type_list (V4HI_type_node
,
1484 short_integer_type_node
, NULL_TREE
);
1487 = build_function_type_list (V2SI_type_node
,
1488 integer_type_node
, NULL_TREE
);
1490 /* Normal vector binops. */
1491 tree v8qi_ftype_v8qi_v8qi
1492 = build_function_type_list (V8QI_type_node
,
1493 V8QI_type_node
, V8QI_type_node
, NULL_TREE
);
1494 tree v4hi_ftype_v4hi_v4hi
1495 = build_function_type_list (V4HI_type_node
,
1496 V4HI_type_node
,V4HI_type_node
, NULL_TREE
);
1497 tree v2si_ftype_v2si_v2si
1498 = build_function_type_list (V2SI_type_node
,
1499 V2SI_type_node
, V2SI_type_node
, NULL_TREE
);
1501 = build_function_type_list (long_long_unsigned_type_node
,
1502 long_long_unsigned_type_node
,
1503 long_long_unsigned_type_node
,
1506 /* Add all builtins that are more or less simple operations on two
1508 for (i
= 0, d
= bdesc_2arg
; i
< ARRAY_SIZE (bdesc_2arg
); i
++, d
++)
1510 /* Use one of the operands; the target can have a different mode for
1511 mask-generating compares. */
1516 !(ARM_FSET_HAS_CPU1 (d
->features
, FL_IWMMXT
) ||
1517 ARM_FSET_HAS_CPU1 (d
->features
, FL_IWMMXT2
)))
1520 mode
= insn_data
[d
->icode
].operand
[1].mode
;
1525 type
= v8qi_ftype_v8qi_v8qi
;
1528 type
= v4hi_ftype_v4hi_v4hi
;
1531 type
= v2si_ftype_v2si_v2si
;
1534 type
= di_ftype_di_di
;
1541 def_mbuiltin (d
->features
, d
->name
, type
, d
->code
);
1544 /* Add the remaining MMX insns with somewhat more complicated types. */
1545 #define iwmmx_mbuiltin(NAME, TYPE, CODE) \
1546 def_mbuiltin (ARM_FSET_MAKE_CPU1 (FL_IWMMXT), "__builtin_arm_" NAME, \
1547 (TYPE), ARM_BUILTIN_ ## CODE)
1549 #define iwmmx2_mbuiltin(NAME, TYPE, CODE) \
1550 def_mbuiltin (ARM_FSET_MAKE_CPU1 (FL_IWMMXT2), "__builtin_arm_" NAME, \
1551 (TYPE), ARM_BUILTIN_ ## CODE)
1553 iwmmx_mbuiltin ("wzero", di_ftype_void
, WZERO
);
1554 iwmmx_mbuiltin ("setwcgr0", void_ftype_int
, SETWCGR0
);
1555 iwmmx_mbuiltin ("setwcgr1", void_ftype_int
, SETWCGR1
);
1556 iwmmx_mbuiltin ("setwcgr2", void_ftype_int
, SETWCGR2
);
1557 iwmmx_mbuiltin ("setwcgr3", void_ftype_int
, SETWCGR3
);
1558 iwmmx_mbuiltin ("getwcgr0", int_ftype_void
, GETWCGR0
);
1559 iwmmx_mbuiltin ("getwcgr1", int_ftype_void
, GETWCGR1
);
1560 iwmmx_mbuiltin ("getwcgr2", int_ftype_void
, GETWCGR2
);
1561 iwmmx_mbuiltin ("getwcgr3", int_ftype_void
, GETWCGR3
);
1563 iwmmx_mbuiltin ("wsllh", v4hi_ftype_v4hi_di
, WSLLH
);
1564 iwmmx_mbuiltin ("wsllw", v2si_ftype_v2si_di
, WSLLW
);
1565 iwmmx_mbuiltin ("wslld", di_ftype_di_di
, WSLLD
);
1566 iwmmx_mbuiltin ("wsllhi", v4hi_ftype_v4hi_int
, WSLLHI
);
1567 iwmmx_mbuiltin ("wsllwi", v2si_ftype_v2si_int
, WSLLWI
);
1568 iwmmx_mbuiltin ("wslldi", di_ftype_di_int
, WSLLDI
);
1570 iwmmx_mbuiltin ("wsrlh", v4hi_ftype_v4hi_di
, WSRLH
);
1571 iwmmx_mbuiltin ("wsrlw", v2si_ftype_v2si_di
, WSRLW
);
1572 iwmmx_mbuiltin ("wsrld", di_ftype_di_di
, WSRLD
);
1573 iwmmx_mbuiltin ("wsrlhi", v4hi_ftype_v4hi_int
, WSRLHI
);
1574 iwmmx_mbuiltin ("wsrlwi", v2si_ftype_v2si_int
, WSRLWI
);
1575 iwmmx_mbuiltin ("wsrldi", di_ftype_di_int
, WSRLDI
);
1577 iwmmx_mbuiltin ("wsrah", v4hi_ftype_v4hi_di
, WSRAH
);
1578 iwmmx_mbuiltin ("wsraw", v2si_ftype_v2si_di
, WSRAW
);
1579 iwmmx_mbuiltin ("wsrad", di_ftype_di_di
, WSRAD
);
1580 iwmmx_mbuiltin ("wsrahi", v4hi_ftype_v4hi_int
, WSRAHI
);
1581 iwmmx_mbuiltin ("wsrawi", v2si_ftype_v2si_int
, WSRAWI
);
1582 iwmmx_mbuiltin ("wsradi", di_ftype_di_int
, WSRADI
);
1584 iwmmx_mbuiltin ("wrorh", v4hi_ftype_v4hi_di
, WRORH
);
1585 iwmmx_mbuiltin ("wrorw", v2si_ftype_v2si_di
, WRORW
);
1586 iwmmx_mbuiltin ("wrord", di_ftype_di_di
, WRORD
);
1587 iwmmx_mbuiltin ("wrorhi", v4hi_ftype_v4hi_int
, WRORHI
);
1588 iwmmx_mbuiltin ("wrorwi", v2si_ftype_v2si_int
, WRORWI
);
1589 iwmmx_mbuiltin ("wrordi", di_ftype_di_int
, WRORDI
);
1591 iwmmx_mbuiltin ("wshufh", v4hi_ftype_v4hi_int
, WSHUFH
);
1593 iwmmx_mbuiltin ("wsadb", v2si_ftype_v2si_v8qi_v8qi
, WSADB
);
1594 iwmmx_mbuiltin ("wsadh", v2si_ftype_v2si_v4hi_v4hi
, WSADH
);
1595 iwmmx_mbuiltin ("wmadds", v2si_ftype_v4hi_v4hi
, WMADDS
);
1596 iwmmx2_mbuiltin ("wmaddsx", v2si_ftype_v4hi_v4hi
, WMADDSX
);
1597 iwmmx2_mbuiltin ("wmaddsn", v2si_ftype_v4hi_v4hi
, WMADDSN
);
1598 iwmmx_mbuiltin ("wmaddu", v2si_ftype_v4hi_v4hi
, WMADDU
);
1599 iwmmx2_mbuiltin ("wmaddux", v2si_ftype_v4hi_v4hi
, WMADDUX
);
1600 iwmmx2_mbuiltin ("wmaddun", v2si_ftype_v4hi_v4hi
, WMADDUN
);
1601 iwmmx_mbuiltin ("wsadbz", v2si_ftype_v8qi_v8qi
, WSADBZ
);
1602 iwmmx_mbuiltin ("wsadhz", v2si_ftype_v4hi_v4hi
, WSADHZ
);
1604 iwmmx_mbuiltin ("textrmsb", int_ftype_v8qi_int
, TEXTRMSB
);
1605 iwmmx_mbuiltin ("textrmsh", int_ftype_v4hi_int
, TEXTRMSH
);
1606 iwmmx_mbuiltin ("textrmsw", int_ftype_v2si_int
, TEXTRMSW
);
1607 iwmmx_mbuiltin ("textrmub", int_ftype_v8qi_int
, TEXTRMUB
);
1608 iwmmx_mbuiltin ("textrmuh", int_ftype_v4hi_int
, TEXTRMUH
);
1609 iwmmx_mbuiltin ("textrmuw", int_ftype_v2si_int
, TEXTRMUW
);
1610 iwmmx_mbuiltin ("tinsrb", v8qi_ftype_v8qi_int_int
, TINSRB
);
1611 iwmmx_mbuiltin ("tinsrh", v4hi_ftype_v4hi_int_int
, TINSRH
);
1612 iwmmx_mbuiltin ("tinsrw", v2si_ftype_v2si_int_int
, TINSRW
);
1614 iwmmx_mbuiltin ("waccb", di_ftype_v8qi
, WACCB
);
1615 iwmmx_mbuiltin ("wacch", di_ftype_v4hi
, WACCH
);
1616 iwmmx_mbuiltin ("waccw", di_ftype_v2si
, WACCW
);
1618 iwmmx_mbuiltin ("tmovmskb", int_ftype_v8qi
, TMOVMSKB
);
1619 iwmmx_mbuiltin ("tmovmskh", int_ftype_v4hi
, TMOVMSKH
);
1620 iwmmx_mbuiltin ("tmovmskw", int_ftype_v2si
, TMOVMSKW
);
1622 iwmmx2_mbuiltin ("waddbhusm", v8qi_ftype_v4hi_v8qi
, WADDBHUSM
);
1623 iwmmx2_mbuiltin ("waddbhusl", v8qi_ftype_v4hi_v8qi
, WADDBHUSL
);
1625 iwmmx_mbuiltin ("wpackhss", v8qi_ftype_v4hi_v4hi
, WPACKHSS
);
1626 iwmmx_mbuiltin ("wpackhus", v8qi_ftype_v4hi_v4hi
, WPACKHUS
);
1627 iwmmx_mbuiltin ("wpackwus", v4hi_ftype_v2si_v2si
, WPACKWUS
);
1628 iwmmx_mbuiltin ("wpackwss", v4hi_ftype_v2si_v2si
, WPACKWSS
);
1629 iwmmx_mbuiltin ("wpackdus", v2si_ftype_di_di
, WPACKDUS
);
1630 iwmmx_mbuiltin ("wpackdss", v2si_ftype_di_di
, WPACKDSS
);
1632 iwmmx_mbuiltin ("wunpckehub", v4hi_ftype_v8qi
, WUNPCKEHUB
);
1633 iwmmx_mbuiltin ("wunpckehuh", v2si_ftype_v4hi
, WUNPCKEHUH
);
1634 iwmmx_mbuiltin ("wunpckehuw", di_ftype_v2si
, WUNPCKEHUW
);
1635 iwmmx_mbuiltin ("wunpckehsb", v4hi_ftype_v8qi
, WUNPCKEHSB
);
1636 iwmmx_mbuiltin ("wunpckehsh", v2si_ftype_v4hi
, WUNPCKEHSH
);
1637 iwmmx_mbuiltin ("wunpckehsw", di_ftype_v2si
, WUNPCKEHSW
);
1638 iwmmx_mbuiltin ("wunpckelub", v4hi_ftype_v8qi
, WUNPCKELUB
);
1639 iwmmx_mbuiltin ("wunpckeluh", v2si_ftype_v4hi
, WUNPCKELUH
);
1640 iwmmx_mbuiltin ("wunpckeluw", di_ftype_v2si
, WUNPCKELUW
);
1641 iwmmx_mbuiltin ("wunpckelsb", v4hi_ftype_v8qi
, WUNPCKELSB
);
1642 iwmmx_mbuiltin ("wunpckelsh", v2si_ftype_v4hi
, WUNPCKELSH
);
1643 iwmmx_mbuiltin ("wunpckelsw", di_ftype_v2si
, WUNPCKELSW
);
1645 iwmmx_mbuiltin ("wmacs", di_ftype_di_v4hi_v4hi
, WMACS
);
1646 iwmmx_mbuiltin ("wmacsz", di_ftype_v4hi_v4hi
, WMACSZ
);
1647 iwmmx_mbuiltin ("wmacu", di_ftype_di_v4hi_v4hi
, WMACU
);
1648 iwmmx_mbuiltin ("wmacuz", di_ftype_v4hi_v4hi
, WMACUZ
);
1650 iwmmx_mbuiltin ("walign", v8qi_ftype_v8qi_v8qi_int
, WALIGNI
);
1651 iwmmx_mbuiltin ("tmia", di_ftype_di_int_int
, TMIA
);
1652 iwmmx_mbuiltin ("tmiaph", di_ftype_di_int_int
, TMIAPH
);
1653 iwmmx_mbuiltin ("tmiabb", di_ftype_di_int_int
, TMIABB
);
1654 iwmmx_mbuiltin ("tmiabt", di_ftype_di_int_int
, TMIABT
);
1655 iwmmx_mbuiltin ("tmiatb", di_ftype_di_int_int
, TMIATB
);
1656 iwmmx_mbuiltin ("tmiatt", di_ftype_di_int_int
, TMIATT
);
1658 iwmmx2_mbuiltin ("wabsb", v8qi_ftype_v8qi
, WABSB
);
1659 iwmmx2_mbuiltin ("wabsh", v4hi_ftype_v4hi
, WABSH
);
1660 iwmmx2_mbuiltin ("wabsw", v2si_ftype_v2si
, WABSW
);
1662 iwmmx2_mbuiltin ("wqmiabb", v2si_ftype_v2si_v4hi_v4hi
, WQMIABB
);
1663 iwmmx2_mbuiltin ("wqmiabt", v2si_ftype_v2si_v4hi_v4hi
, WQMIABT
);
1664 iwmmx2_mbuiltin ("wqmiatb", v2si_ftype_v2si_v4hi_v4hi
, WQMIATB
);
1665 iwmmx2_mbuiltin ("wqmiatt", v2si_ftype_v2si_v4hi_v4hi
, WQMIATT
);
1667 iwmmx2_mbuiltin ("wqmiabbn", v2si_ftype_v2si_v4hi_v4hi
, WQMIABBN
);
1668 iwmmx2_mbuiltin ("wqmiabtn", v2si_ftype_v2si_v4hi_v4hi
, WQMIABTN
);
1669 iwmmx2_mbuiltin ("wqmiatbn", v2si_ftype_v2si_v4hi_v4hi
, WQMIATBN
);
1670 iwmmx2_mbuiltin ("wqmiattn", v2si_ftype_v2si_v4hi_v4hi
, WQMIATTN
);
1672 iwmmx2_mbuiltin ("wmiabb", di_ftype_di_v4hi_v4hi
, WMIABB
);
1673 iwmmx2_mbuiltin ("wmiabt", di_ftype_di_v4hi_v4hi
, WMIABT
);
1674 iwmmx2_mbuiltin ("wmiatb", di_ftype_di_v4hi_v4hi
, WMIATB
);
1675 iwmmx2_mbuiltin ("wmiatt", di_ftype_di_v4hi_v4hi
, WMIATT
);
1677 iwmmx2_mbuiltin ("wmiabbn", di_ftype_di_v4hi_v4hi
, WMIABBN
);
1678 iwmmx2_mbuiltin ("wmiabtn", di_ftype_di_v4hi_v4hi
, WMIABTN
);
1679 iwmmx2_mbuiltin ("wmiatbn", di_ftype_di_v4hi_v4hi
, WMIATBN
);
1680 iwmmx2_mbuiltin ("wmiattn", di_ftype_di_v4hi_v4hi
, WMIATTN
);
1682 iwmmx2_mbuiltin ("wmiawbb", di_ftype_di_v2si_v2si
, WMIAWBB
);
1683 iwmmx2_mbuiltin ("wmiawbt", di_ftype_di_v2si_v2si
, WMIAWBT
);
1684 iwmmx2_mbuiltin ("wmiawtb", di_ftype_di_v2si_v2si
, WMIAWTB
);
1685 iwmmx2_mbuiltin ("wmiawtt", di_ftype_di_v2si_v2si
, WMIAWTT
);
1687 iwmmx2_mbuiltin ("wmiawbbn", di_ftype_di_v2si_v2si
, WMIAWBBN
);
1688 iwmmx2_mbuiltin ("wmiawbtn", di_ftype_di_v2si_v2si
, WMIAWBTN
);
1689 iwmmx2_mbuiltin ("wmiawtbn", di_ftype_di_v2si_v2si
, WMIAWTBN
);
1690 iwmmx2_mbuiltin ("wmiawttn", di_ftype_di_v2si_v2si
, WMIAWTTN
);
1692 iwmmx2_mbuiltin ("wmerge", di_ftype_di_di_int
, WMERGE
);
1694 iwmmx_mbuiltin ("tbcstb", v8qi_ftype_char
, TBCSTB
);
1695 iwmmx_mbuiltin ("tbcsth", v4hi_ftype_short
, TBCSTH
);
1696 iwmmx_mbuiltin ("tbcstw", v2si_ftype_int
, TBCSTW
);
1698 #undef iwmmx_mbuiltin
1699 #undef iwmmx2_mbuiltin
1703 arm_init_fp16_builtins (void)
1705 arm_simd_floatHF_type_node
= make_node (REAL_TYPE
);
1706 TYPE_PRECISION (arm_simd_floatHF_type_node
) = GET_MODE_PRECISION (HFmode
);
1707 layout_type (arm_simd_floatHF_type_node
);
1708 if (arm_fp16_format
)
1709 (*lang_hooks
.types
.register_builtin_type
) (arm_simd_floatHF_type_node
,
1714 arm_init_crc32_builtins ()
1717 = build_function_type_list (unsigned_intSI_type_node
,
1718 unsigned_intSI_type_node
,
1719 unsigned_intQI_type_node
, NULL_TREE
);
1721 = build_function_type_list (unsigned_intSI_type_node
,
1722 unsigned_intSI_type_node
,
1723 unsigned_intHI_type_node
, NULL_TREE
);
1725 = build_function_type_list (unsigned_intSI_type_node
,
1726 unsigned_intSI_type_node
,
1727 unsigned_intSI_type_node
, NULL_TREE
);
1729 arm_builtin_decls
[ARM_BUILTIN_CRC32B
]
1730 = add_builtin_function ("__builtin_arm_crc32b", si_ftype_si_qi
,
1731 ARM_BUILTIN_CRC32B
, BUILT_IN_MD
, NULL
, NULL_TREE
);
1732 arm_builtin_decls
[ARM_BUILTIN_CRC32H
]
1733 = add_builtin_function ("__builtin_arm_crc32h", si_ftype_si_hi
,
1734 ARM_BUILTIN_CRC32H
, BUILT_IN_MD
, NULL
, NULL_TREE
);
1735 arm_builtin_decls
[ARM_BUILTIN_CRC32W
]
1736 = add_builtin_function ("__builtin_arm_crc32w", si_ftype_si_si
,
1737 ARM_BUILTIN_CRC32W
, BUILT_IN_MD
, NULL
, NULL_TREE
);
1738 arm_builtin_decls
[ARM_BUILTIN_CRC32CB
]
1739 = add_builtin_function ("__builtin_arm_crc32cb", si_ftype_si_qi
,
1740 ARM_BUILTIN_CRC32CB
, BUILT_IN_MD
, NULL
, NULL_TREE
);
1741 arm_builtin_decls
[ARM_BUILTIN_CRC32CH
]
1742 = add_builtin_function ("__builtin_arm_crc32ch", si_ftype_si_hi
,
1743 ARM_BUILTIN_CRC32CH
, BUILT_IN_MD
, NULL
, NULL_TREE
);
1744 arm_builtin_decls
[ARM_BUILTIN_CRC32CW
]
1745 = add_builtin_function ("__builtin_arm_crc32cw", si_ftype_si_si
,
1746 ARM_BUILTIN_CRC32CW
, BUILT_IN_MD
, NULL
, NULL_TREE
);
1750 arm_init_builtins (void)
1752 if (TARGET_REALLY_IWMMXT
)
1753 arm_init_iwmmxt_builtins ();
1755 /* This creates the arm_simd_floatHF_type_node so must come before
1756 arm_init_neon_builtins which uses it. */
1757 arm_init_fp16_builtins ();
1760 arm_init_neon_builtins ();
1763 arm_init_crc32_builtins ();
1765 if (TARGET_VFP
&& TARGET_HARD_FLOAT
)
1767 tree ftype_set_fpscr
1768 = build_function_type_list (void_type_node
, unsigned_type_node
, NULL
);
1769 tree ftype_get_fpscr
1770 = build_function_type_list (unsigned_type_node
, NULL
);
1772 arm_builtin_decls
[ARM_BUILTIN_GET_FPSCR
]
1773 = add_builtin_function ("__builtin_arm_ldfscr", ftype_get_fpscr
,
1774 ARM_BUILTIN_GET_FPSCR
, BUILT_IN_MD
, NULL
, NULL_TREE
);
1775 arm_builtin_decls
[ARM_BUILTIN_SET_FPSCR
]
1776 = add_builtin_function ("__builtin_arm_stfscr", ftype_set_fpscr
,
1777 ARM_BUILTIN_SET_FPSCR
, BUILT_IN_MD
, NULL
, NULL_TREE
);
1781 /* Return the ARM builtin for CODE. */
1784 arm_builtin_decl (unsigned code
, bool initialize_p ATTRIBUTE_UNUSED
)
1786 if (code
>= ARM_BUILTIN_MAX
)
1787 return error_mark_node
;
1789 return arm_builtin_decls
[code
];
1792 /* Errors in the source file can cause expand_expr to return const0_rtx
1793 where we expect a vector. To avoid crashing, use one of the vector
1794 clear instructions. */
1797 safe_vector_operand (rtx x
, machine_mode mode
)
1799 if (x
!= const0_rtx
)
1801 x
= gen_reg_rtx (mode
);
1803 emit_insn (gen_iwmmxt_clrdi (mode
== DImode
? x
1804 : gen_rtx_SUBREG (DImode
, x
, 0)));
1808 /* Function to expand ternary builtins. */
1810 arm_expand_ternop_builtin (enum insn_code icode
,
1811 tree exp
, rtx target
)
1814 tree arg0
= CALL_EXPR_ARG (exp
, 0);
1815 tree arg1
= CALL_EXPR_ARG (exp
, 1);
1816 tree arg2
= CALL_EXPR_ARG (exp
, 2);
1818 rtx op0
= expand_normal (arg0
);
1819 rtx op1
= expand_normal (arg1
);
1820 rtx op2
= expand_normal (arg2
);
1823 /* The sha1c, sha1p, sha1m crypto builtins require a different vec_select
1824 lane operand depending on endianness. */
1825 bool builtin_sha1cpm_p
= false;
1827 if (insn_data
[icode
].n_operands
== 5)
1829 gcc_assert (icode
== CODE_FOR_crypto_sha1c
1830 || icode
== CODE_FOR_crypto_sha1p
1831 || icode
== CODE_FOR_crypto_sha1m
);
1832 builtin_sha1cpm_p
= true;
1834 machine_mode tmode
= insn_data
[icode
].operand
[0].mode
;
1835 machine_mode mode0
= insn_data
[icode
].operand
[1].mode
;
1836 machine_mode mode1
= insn_data
[icode
].operand
[2].mode
;
1837 machine_mode mode2
= insn_data
[icode
].operand
[3].mode
;
1840 if (VECTOR_MODE_P (mode0
))
1841 op0
= safe_vector_operand (op0
, mode0
);
1842 if (VECTOR_MODE_P (mode1
))
1843 op1
= safe_vector_operand (op1
, mode1
);
1844 if (VECTOR_MODE_P (mode2
))
1845 op2
= safe_vector_operand (op2
, mode2
);
1848 || GET_MODE (target
) != tmode
1849 || ! (*insn_data
[icode
].operand
[0].predicate
) (target
, tmode
))
1850 target
= gen_reg_rtx (tmode
);
1852 gcc_assert ((GET_MODE (op0
) == mode0
|| GET_MODE (op0
) == VOIDmode
)
1853 && (GET_MODE (op1
) == mode1
|| GET_MODE (op1
) == VOIDmode
)
1854 && (GET_MODE (op2
) == mode2
|| GET_MODE (op2
) == VOIDmode
));
1856 if (! (*insn_data
[icode
].operand
[1].predicate
) (op0
, mode0
))
1857 op0
= copy_to_mode_reg (mode0
, op0
);
1858 if (! (*insn_data
[icode
].operand
[2].predicate
) (op1
, mode1
))
1859 op1
= copy_to_mode_reg (mode1
, op1
);
1860 if (! (*insn_data
[icode
].operand
[3].predicate
) (op2
, mode2
))
1861 op2
= copy_to_mode_reg (mode2
, op2
);
1862 if (builtin_sha1cpm_p
)
1863 op3
= GEN_INT (TARGET_BIG_END
? 1 : 0);
1865 if (builtin_sha1cpm_p
)
1866 pat
= GEN_FCN (icode
) (target
, op0
, op1
, op2
, op3
);
1868 pat
= GEN_FCN (icode
) (target
, op0
, op1
, op2
);
1875 /* Subroutine of arm_expand_builtin to take care of binop insns. */
1878 arm_expand_binop_builtin (enum insn_code icode
,
1879 tree exp
, rtx target
)
1882 tree arg0
= CALL_EXPR_ARG (exp
, 0);
1883 tree arg1
= CALL_EXPR_ARG (exp
, 1);
1884 rtx op0
= expand_normal (arg0
);
1885 rtx op1
= expand_normal (arg1
);
1886 machine_mode tmode
= insn_data
[icode
].operand
[0].mode
;
1887 machine_mode mode0
= insn_data
[icode
].operand
[1].mode
;
1888 machine_mode mode1
= insn_data
[icode
].operand
[2].mode
;
1890 if (VECTOR_MODE_P (mode0
))
1891 op0
= safe_vector_operand (op0
, mode0
);
1892 if (VECTOR_MODE_P (mode1
))
1893 op1
= safe_vector_operand (op1
, mode1
);
1896 || GET_MODE (target
) != tmode
1897 || ! (*insn_data
[icode
].operand
[0].predicate
) (target
, tmode
))
1898 target
= gen_reg_rtx (tmode
);
1900 gcc_assert ((GET_MODE (op0
) == mode0
|| GET_MODE (op0
) == VOIDmode
)
1901 && (GET_MODE (op1
) == mode1
|| GET_MODE (op1
) == VOIDmode
));
1903 if (! (*insn_data
[icode
].operand
[1].predicate
) (op0
, mode0
))
1904 op0
= copy_to_mode_reg (mode0
, op0
);
1905 if (! (*insn_data
[icode
].operand
[2].predicate
) (op1
, mode1
))
1906 op1
= copy_to_mode_reg (mode1
, op1
);
1908 pat
= GEN_FCN (icode
) (target
, op0
, op1
);
1915 /* Subroutine of arm_expand_builtin to take care of unop insns. */
1918 arm_expand_unop_builtin (enum insn_code icode
,
1919 tree exp
, rtx target
, int do_load
)
1922 tree arg0
= CALL_EXPR_ARG (exp
, 0);
1923 rtx op0
= expand_normal (arg0
);
1925 machine_mode tmode
= insn_data
[icode
].operand
[0].mode
;
1926 machine_mode mode0
= insn_data
[icode
].operand
[1].mode
;
1927 bool builtin_sha1h_p
= false;
1929 if (insn_data
[icode
].n_operands
== 3)
1931 gcc_assert (icode
== CODE_FOR_crypto_sha1h
);
1932 builtin_sha1h_p
= true;
1936 || GET_MODE (target
) != tmode
1937 || ! (*insn_data
[icode
].operand
[0].predicate
) (target
, tmode
))
1938 target
= gen_reg_rtx (tmode
);
1940 op0
= gen_rtx_MEM (mode0
, copy_to_mode_reg (Pmode
, op0
));
1943 if (VECTOR_MODE_P (mode0
))
1944 op0
= safe_vector_operand (op0
, mode0
);
1946 if (! (*insn_data
[icode
].operand
[1].predicate
) (op0
, mode0
))
1947 op0
= copy_to_mode_reg (mode0
, op0
);
1949 if (builtin_sha1h_p
)
1950 op1
= GEN_INT (TARGET_BIG_END
? 1 : 0);
1952 if (builtin_sha1h_p
)
1953 pat
= GEN_FCN (icode
) (target
, op0
, op1
);
1955 pat
= GEN_FCN (icode
) (target
, op0
);
1963 NEON_ARG_COPY_TO_REG
,
1965 NEON_ARG_LANE_INDEX
,
1970 #define NEON_MAX_BUILTIN_ARGS 5
1972 /* EXP is a pointer argument to a Neon load or store intrinsic. Derive
1973 and return an expression for the accessed memory.
1975 The intrinsic function operates on a block of registers that has
1976 mode REG_MODE. This block contains vectors of type TYPE_MODE. The
1977 function references the memory at EXP of type TYPE and in mode
1978 MEM_MODE; this mode may be BLKmode if no more suitable mode is
1982 neon_dereference_pointer (tree exp
, tree type
, machine_mode mem_mode
,
1983 machine_mode reg_mode
,
1984 machine_mode vector_mode
)
1986 HOST_WIDE_INT reg_size
, vector_size
, nvectors
, nelems
;
1987 tree elem_type
, upper_bound
, array_type
;
1989 /* Work out the size of the register block in bytes. */
1990 reg_size
= GET_MODE_SIZE (reg_mode
);
1992 /* Work out the size of each vector in bytes. */
1993 vector_size
= GET_MODE_SIZE (vector_mode
);
1995 /* Work out how many vectors there are. */
1996 gcc_assert (reg_size
% vector_size
== 0);
1997 nvectors
= reg_size
/ vector_size
;
1999 /* Work out the type of each element. */
2000 gcc_assert (POINTER_TYPE_P (type
));
2001 elem_type
= TREE_TYPE (type
);
2003 /* Work out how many elements are being loaded or stored.
2004 MEM_MODE == REG_MODE implies a one-to-one mapping between register
2005 and memory elements; anything else implies a lane load or store. */
2006 if (mem_mode
== reg_mode
)
2007 nelems
= vector_size
* nvectors
/ int_size_in_bytes (elem_type
);
2011 /* Create a type that describes the full access. */
2012 upper_bound
= build_int_cst (size_type_node
, nelems
- 1);
2013 array_type
= build_array_type (elem_type
, build_index_type (upper_bound
));
2015 /* Dereference EXP using that type. */
2016 return fold_build2 (MEM_REF
, array_type
, exp
,
2017 build_int_cst (build_pointer_type (array_type
), 0));
2020 /* Expand a Neon builtin. */
2022 arm_expand_neon_args (rtx target
, machine_mode map_mode
, int fcode
,
2023 int icode
, int have_retval
, tree exp
, ...)
2027 tree arg
[SIMD_MAX_BUILTIN_ARGS
];
2028 rtx op
[SIMD_MAX_BUILTIN_ARGS
];
2029 machine_mode tmode
= insn_data
[icode
].operand
[0].mode
;
2030 machine_mode mode
[SIMD_MAX_BUILTIN_ARGS
];
2036 || GET_MODE (target
) != tmode
2037 || !(*insn_data
[icode
].operand
[0].predicate
) (target
, tmode
)))
2038 target
= gen_reg_rtx (tmode
);
2042 formals
= TYPE_ARG_TYPES (TREE_TYPE (arm_builtin_decls
[fcode
]));
2046 builtin_arg thisarg
= (builtin_arg
) va_arg (ap
, int);
2048 if (thisarg
== NEON_ARG_STOP
)
2052 int opno
= argc
+ have_retval
;
2053 arg
[argc
] = CALL_EXPR_ARG (exp
, argc
);
2054 mode
[argc
] = insn_data
[icode
].operand
[opno
].mode
;
2055 if (thisarg
== NEON_ARG_MEMORY
)
2057 machine_mode other_mode
2058 = insn_data
[icode
].operand
[1 - opno
].mode
;
2059 arg
[argc
] = neon_dereference_pointer (arg
[argc
],
2060 TREE_VALUE (formals
),
2061 mode
[argc
], other_mode
,
2065 /* Use EXPAND_MEMORY for NEON_ARG_MEMORY to ensure a MEM_P
2067 op
[argc
] = expand_expr (arg
[argc
], NULL_RTX
, VOIDmode
,
2068 (thisarg
== NEON_ARG_MEMORY
2069 ? EXPAND_MEMORY
: EXPAND_NORMAL
));
2073 case NEON_ARG_COPY_TO_REG
:
2074 if (POINTER_TYPE_P (TREE_TYPE (arg
[argc
])))
2075 op
[argc
] = convert_memory_address (Pmode
, op
[argc
]);
2076 /*gcc_assert (GET_MODE (op[argc]) == mode[argc]); */
2077 if (!(*insn_data
[icode
].operand
[opno
].predicate
)
2078 (op
[argc
], mode
[argc
]))
2079 op
[argc
] = copy_to_mode_reg (mode
[argc
], op
[argc
]);
2082 case NEON_ARG_LANE_INDEX
:
2083 /* Previous argument must be a vector, which this indexes. */
2084 gcc_assert (argc
> 0);
2085 if (CONST_INT_P (op
[argc
]))
2087 enum machine_mode vmode
= mode
[argc
- 1];
2088 neon_lane_bounds (op
[argc
], 0, GET_MODE_NUNITS (vmode
), exp
);
2090 /* Fall through - if the lane index isn't a constant then
2091 the next case will error. */
2092 case NEON_ARG_CONSTANT
:
2093 if (!(*insn_data
[icode
].operand
[opno
].predicate
)
2094 (op
[argc
], mode
[argc
]))
2095 error_at (EXPR_LOCATION (exp
), "incompatible type for argument %d, "
2096 "expected %<const int%>", argc
+ 1);
2098 case NEON_ARG_MEMORY
:
2099 /* Check if expand failed. */
2100 if (op
[argc
] == const0_rtx
)
2105 gcc_assert (MEM_P (op
[argc
]));
2106 PUT_MODE (op
[argc
], mode
[argc
]);
2107 /* ??? arm_neon.h uses the same built-in functions for signed
2108 and unsigned accesses, casting where necessary. This isn't
2110 set_mem_alias_set (op
[argc
], 0);
2111 if (!(*insn_data
[icode
].operand
[opno
].predicate
)
2112 (op
[argc
], mode
[argc
]))
2113 op
[argc
] = (replace_equiv_address
2114 (op
[argc
], force_reg (Pmode
, XEXP (op
[argc
], 0))));
2131 pat
= GEN_FCN (icode
) (target
, op
[0]);
2135 pat
= GEN_FCN (icode
) (target
, op
[0], op
[1]);
2139 pat
= GEN_FCN (icode
) (target
, op
[0], op
[1], op
[2]);
2143 pat
= GEN_FCN (icode
) (target
, op
[0], op
[1], op
[2], op
[3]);
2147 pat
= GEN_FCN (icode
) (target
, op
[0], op
[1], op
[2], op
[3], op
[4]);
2157 pat
= GEN_FCN (icode
) (op
[0]);
2161 pat
= GEN_FCN (icode
) (op
[0], op
[1]);
2165 pat
= GEN_FCN (icode
) (op
[0], op
[1], op
[2]);
2169 pat
= GEN_FCN (icode
) (op
[0], op
[1], op
[2], op
[3]);
2173 pat
= GEN_FCN (icode
) (op
[0], op
[1], op
[2], op
[3], op
[4]);
2188 /* Expand a Neon builtin, i.e. those registered only if TARGET_NEON holds.
2189 Most of these are "special" because they don't have symbolic
2190 constants defined per-instruction or per instruction-variant. Instead, the
2191 required info is looked up in the table neon_builtin_data. */
2193 arm_expand_neon_builtin (int fcode
, tree exp
, rtx target
)
2195 if (fcode
== ARM_BUILTIN_NEON_LANE_CHECK
)
2197 /* Builtin is only to check bounds of the lane passed to some intrinsics
2198 that are implemented with gcc vector extensions in arm_neon.h. */
2200 tree nlanes
= CALL_EXPR_ARG (exp
, 0);
2201 gcc_assert (TREE_CODE (nlanes
) == INTEGER_CST
);
2202 rtx lane_idx
= expand_normal (CALL_EXPR_ARG (exp
, 1));
2203 if (CONST_INT_P (lane_idx
))
2204 neon_lane_bounds (lane_idx
, 0, TREE_INT_CST_LOW (nlanes
), exp
);
2206 error ("%Klane index must be a constant immediate", exp
);
2207 /* Don't generate any RTL. */
2211 neon_builtin_datum
*d
=
2212 &neon_builtin_data
[fcode
- ARM_BUILTIN_NEON_PATTERN_START
];
2213 enum insn_code icode
= d
->code
;
2214 builtin_arg args
[SIMD_MAX_BUILTIN_ARGS
];
2215 int num_args
= insn_data
[d
->code
].n_operands
;
2219 is_void
= !!(d
->qualifiers
[0] & qualifier_void
);
2221 num_args
+= is_void
;
2223 for (k
= 1; k
< num_args
; k
++)
2225 /* We have four arrays of data, each indexed in a different fashion.
2226 qualifiers - element 0 always describes the function return type.
2227 operands - element 0 is either the operand for return value (if
2228 the function has a non-void return type) or the operand for the
2230 expr_args - element 0 always holds the first argument.
2231 args - element 0 is always used for the return type. */
2232 int qualifiers_k
= k
;
2233 int operands_k
= k
- is_void
;
2234 int expr_args_k
= k
- 1;
2236 if (d
->qualifiers
[qualifiers_k
] & qualifier_lane_index
)
2237 args
[k
] = NEON_ARG_LANE_INDEX
;
2238 else if (d
->qualifiers
[qualifiers_k
] & qualifier_immediate
)
2239 args
[k
] = NEON_ARG_CONSTANT
;
2240 else if (d
->qualifiers
[qualifiers_k
] & qualifier_maybe_immediate
)
2243 = expand_normal (CALL_EXPR_ARG (exp
,
2245 /* Handle constants only if the predicate allows it. */
2246 bool op_const_int_p
=
2248 && (*insn_data
[icode
].operand
[operands_k
].predicate
)
2249 (arg
, insn_data
[icode
].operand
[operands_k
].mode
));
2250 args
[k
] = op_const_int_p
? NEON_ARG_CONSTANT
: NEON_ARG_COPY_TO_REG
;
2252 else if (d
->qualifiers
[qualifiers_k
] & qualifier_pointer
)
2253 args
[k
] = NEON_ARG_MEMORY
;
2255 args
[k
] = NEON_ARG_COPY_TO_REG
;
2257 args
[k
] = NEON_ARG_STOP
;
2259 /* The interface to arm_expand_neon_args expects a 0 if
2260 the function is void, and a 1 if it is not. */
2261 return arm_expand_neon_args
2262 (target
, d
->mode
, fcode
, icode
, !is_void
, exp
,
2270 /* Expand an expression EXP that calls a built-in function,
2271 with result going to TARGET if that's convenient
2272 (and in mode MODE if that's convenient).
2273 SUBTARGET may be used as the target for computing one of EXP's operands.
2274 IGNORE is nonzero if the value is to be ignored. */
2277 arm_expand_builtin (tree exp
,
2279 rtx subtarget ATTRIBUTE_UNUSED
,
2280 machine_mode mode ATTRIBUTE_UNUSED
,
2281 int ignore ATTRIBUTE_UNUSED
)
2283 const struct builtin_description
* d
;
2284 enum insn_code icode
;
2285 tree fndecl
= TREE_OPERAND (CALL_EXPR_FN (exp
), 0);
2293 unsigned int fcode
= DECL_FUNCTION_CODE (fndecl
);
2304 if (fcode
>= ARM_BUILTIN_NEON_BASE
)
2305 return arm_expand_neon_builtin (fcode
, exp
, target
);
2309 case ARM_BUILTIN_GET_FPSCR
:
2310 case ARM_BUILTIN_SET_FPSCR
:
2311 if (fcode
== ARM_BUILTIN_GET_FPSCR
)
2313 icode
= CODE_FOR_get_fpscr
;
2314 target
= gen_reg_rtx (SImode
);
2315 pat
= GEN_FCN (icode
) (target
);
2320 icode
= CODE_FOR_set_fpscr
;
2321 arg0
= CALL_EXPR_ARG (exp
, 0);
2322 op0
= expand_normal (arg0
);
2323 pat
= GEN_FCN (icode
) (op0
);
2328 case ARM_BUILTIN_TEXTRMSB
:
2329 case ARM_BUILTIN_TEXTRMUB
:
2330 case ARM_BUILTIN_TEXTRMSH
:
2331 case ARM_BUILTIN_TEXTRMUH
:
2332 case ARM_BUILTIN_TEXTRMSW
:
2333 case ARM_BUILTIN_TEXTRMUW
:
2334 icode
= (fcode
== ARM_BUILTIN_TEXTRMSB
? CODE_FOR_iwmmxt_textrmsb
2335 : fcode
== ARM_BUILTIN_TEXTRMUB
? CODE_FOR_iwmmxt_textrmub
2336 : fcode
== ARM_BUILTIN_TEXTRMSH
? CODE_FOR_iwmmxt_textrmsh
2337 : fcode
== ARM_BUILTIN_TEXTRMUH
? CODE_FOR_iwmmxt_textrmuh
2338 : CODE_FOR_iwmmxt_textrmw
);
2340 arg0
= CALL_EXPR_ARG (exp
, 0);
2341 arg1
= CALL_EXPR_ARG (exp
, 1);
2342 op0
= expand_normal (arg0
);
2343 op1
= expand_normal (arg1
);
2344 tmode
= insn_data
[icode
].operand
[0].mode
;
2345 mode0
= insn_data
[icode
].operand
[1].mode
;
2346 mode1
= insn_data
[icode
].operand
[2].mode
;
2348 if (! (*insn_data
[icode
].operand
[1].predicate
) (op0
, mode0
))
2349 op0
= copy_to_mode_reg (mode0
, op0
);
2350 if (! (*insn_data
[icode
].operand
[2].predicate
) (op1
, mode1
))
2352 /* @@@ better error message */
2353 error ("selector must be an immediate");
2354 return gen_reg_rtx (tmode
);
2357 opint
= INTVAL (op1
);
2358 if (fcode
== ARM_BUILTIN_TEXTRMSB
|| fcode
== ARM_BUILTIN_TEXTRMUB
)
2360 if (opint
> 7 || opint
< 0)
2361 error ("the range of selector should be in 0 to 7");
2363 else if (fcode
== ARM_BUILTIN_TEXTRMSH
|| fcode
== ARM_BUILTIN_TEXTRMUH
)
2365 if (opint
> 3 || opint
< 0)
2366 error ("the range of selector should be in 0 to 3");
2368 else /* ARM_BUILTIN_TEXTRMSW || ARM_BUILTIN_TEXTRMUW. */
2370 if (opint
> 1 || opint
< 0)
2371 error ("the range of selector should be in 0 to 1");
2375 || GET_MODE (target
) != tmode
2376 || ! (*insn_data
[icode
].operand
[0].predicate
) (target
, tmode
))
2377 target
= gen_reg_rtx (tmode
);
2378 pat
= GEN_FCN (icode
) (target
, op0
, op1
);
2384 case ARM_BUILTIN_WALIGNI
:
2385 /* If op2 is immediate, call walighi, else call walighr. */
2386 arg0
= CALL_EXPR_ARG (exp
, 0);
2387 arg1
= CALL_EXPR_ARG (exp
, 1);
2388 arg2
= CALL_EXPR_ARG (exp
, 2);
2389 op0
= expand_normal (arg0
);
2390 op1
= expand_normal (arg1
);
2391 op2
= expand_normal (arg2
);
2392 if (CONST_INT_P (op2
))
2394 icode
= CODE_FOR_iwmmxt_waligni
;
2395 tmode
= insn_data
[icode
].operand
[0].mode
;
2396 mode0
= insn_data
[icode
].operand
[1].mode
;
2397 mode1
= insn_data
[icode
].operand
[2].mode
;
2398 mode2
= insn_data
[icode
].operand
[3].mode
;
2399 if (!(*insn_data
[icode
].operand
[1].predicate
) (op0
, mode0
))
2400 op0
= copy_to_mode_reg (mode0
, op0
);
2401 if (!(*insn_data
[icode
].operand
[2].predicate
) (op1
, mode1
))
2402 op1
= copy_to_mode_reg (mode1
, op1
);
2403 gcc_assert ((*insn_data
[icode
].operand
[3].predicate
) (op2
, mode2
));
2404 selector
= INTVAL (op2
);
2405 if (selector
> 7 || selector
< 0)
2406 error ("the range of selector should be in 0 to 7");
2410 icode
= CODE_FOR_iwmmxt_walignr
;
2411 tmode
= insn_data
[icode
].operand
[0].mode
;
2412 mode0
= insn_data
[icode
].operand
[1].mode
;
2413 mode1
= insn_data
[icode
].operand
[2].mode
;
2414 mode2
= insn_data
[icode
].operand
[3].mode
;
2415 if (!(*insn_data
[icode
].operand
[1].predicate
) (op0
, mode0
))
2416 op0
= copy_to_mode_reg (mode0
, op0
);
2417 if (!(*insn_data
[icode
].operand
[2].predicate
) (op1
, mode1
))
2418 op1
= copy_to_mode_reg (mode1
, op1
);
2419 if (!(*insn_data
[icode
].operand
[3].predicate
) (op2
, mode2
))
2420 op2
= copy_to_mode_reg (mode2
, op2
);
2423 || GET_MODE (target
) != tmode
2424 || !(*insn_data
[icode
].operand
[0].predicate
) (target
, tmode
))
2425 target
= gen_reg_rtx (tmode
);
2426 pat
= GEN_FCN (icode
) (target
, op0
, op1
, op2
);
2432 case ARM_BUILTIN_TINSRB
:
2433 case ARM_BUILTIN_TINSRH
:
2434 case ARM_BUILTIN_TINSRW
:
2435 case ARM_BUILTIN_WMERGE
:
2436 icode
= (fcode
== ARM_BUILTIN_TINSRB
? CODE_FOR_iwmmxt_tinsrb
2437 : fcode
== ARM_BUILTIN_TINSRH
? CODE_FOR_iwmmxt_tinsrh
2438 : fcode
== ARM_BUILTIN_WMERGE
? CODE_FOR_iwmmxt_wmerge
2439 : CODE_FOR_iwmmxt_tinsrw
);
2440 arg0
= CALL_EXPR_ARG (exp
, 0);
2441 arg1
= CALL_EXPR_ARG (exp
, 1);
2442 arg2
= CALL_EXPR_ARG (exp
, 2);
2443 op0
= expand_normal (arg0
);
2444 op1
= expand_normal (arg1
);
2445 op2
= expand_normal (arg2
);
2446 tmode
= insn_data
[icode
].operand
[0].mode
;
2447 mode0
= insn_data
[icode
].operand
[1].mode
;
2448 mode1
= insn_data
[icode
].operand
[2].mode
;
2449 mode2
= insn_data
[icode
].operand
[3].mode
;
2451 if (! (*insn_data
[icode
].operand
[1].predicate
) (op0
, mode0
))
2452 op0
= copy_to_mode_reg (mode0
, op0
);
2453 if (! (*insn_data
[icode
].operand
[2].predicate
) (op1
, mode1
))
2454 op1
= copy_to_mode_reg (mode1
, op1
);
2455 if (! (*insn_data
[icode
].operand
[3].predicate
) (op2
, mode2
))
2457 error ("selector must be an immediate");
2460 if (icode
== CODE_FOR_iwmmxt_wmerge
)
2462 selector
= INTVAL (op2
);
2463 if (selector
> 7 || selector
< 0)
2464 error ("the range of selector should be in 0 to 7");
2466 if ((icode
== CODE_FOR_iwmmxt_tinsrb
)
2467 || (icode
== CODE_FOR_iwmmxt_tinsrh
)
2468 || (icode
== CODE_FOR_iwmmxt_tinsrw
))
2471 selector
= INTVAL (op2
);
2472 if (icode
== CODE_FOR_iwmmxt_tinsrb
&& (selector
< 0 || selector
> 7))
2473 error ("the range of selector should be in 0 to 7");
2474 else if (icode
== CODE_FOR_iwmmxt_tinsrh
&& (selector
< 0 ||selector
> 3))
2475 error ("the range of selector should be in 0 to 3");
2476 else if (icode
== CODE_FOR_iwmmxt_tinsrw
&& (selector
< 0 ||selector
> 1))
2477 error ("the range of selector should be in 0 to 1");
2479 op2
= GEN_INT (mask
);
2482 || GET_MODE (target
) != tmode
2483 || ! (*insn_data
[icode
].operand
[0].predicate
) (target
, tmode
))
2484 target
= gen_reg_rtx (tmode
);
2485 pat
= GEN_FCN (icode
) (target
, op0
, op1
, op2
);
2491 case ARM_BUILTIN_SETWCGR0
:
2492 case ARM_BUILTIN_SETWCGR1
:
2493 case ARM_BUILTIN_SETWCGR2
:
2494 case ARM_BUILTIN_SETWCGR3
:
2495 icode
= (fcode
== ARM_BUILTIN_SETWCGR0
? CODE_FOR_iwmmxt_setwcgr0
2496 : fcode
== ARM_BUILTIN_SETWCGR1
? CODE_FOR_iwmmxt_setwcgr1
2497 : fcode
== ARM_BUILTIN_SETWCGR2
? CODE_FOR_iwmmxt_setwcgr2
2498 : CODE_FOR_iwmmxt_setwcgr3
);
2499 arg0
= CALL_EXPR_ARG (exp
, 0);
2500 op0
= expand_normal (arg0
);
2501 mode0
= insn_data
[icode
].operand
[0].mode
;
2502 if (!(*insn_data
[icode
].operand
[0].predicate
) (op0
, mode0
))
2503 op0
= copy_to_mode_reg (mode0
, op0
);
2504 pat
= GEN_FCN (icode
) (op0
);
2510 case ARM_BUILTIN_GETWCGR0
:
2511 case ARM_BUILTIN_GETWCGR1
:
2512 case ARM_BUILTIN_GETWCGR2
:
2513 case ARM_BUILTIN_GETWCGR3
:
2514 icode
= (fcode
== ARM_BUILTIN_GETWCGR0
? CODE_FOR_iwmmxt_getwcgr0
2515 : fcode
== ARM_BUILTIN_GETWCGR1
? CODE_FOR_iwmmxt_getwcgr1
2516 : fcode
== ARM_BUILTIN_GETWCGR2
? CODE_FOR_iwmmxt_getwcgr2
2517 : CODE_FOR_iwmmxt_getwcgr3
);
2518 tmode
= insn_data
[icode
].operand
[0].mode
;
2520 || GET_MODE (target
) != tmode
2521 || !(*insn_data
[icode
].operand
[0].predicate
) (target
, tmode
))
2522 target
= gen_reg_rtx (tmode
);
2523 pat
= GEN_FCN (icode
) (target
);
2529 case ARM_BUILTIN_WSHUFH
:
2530 icode
= CODE_FOR_iwmmxt_wshufh
;
2531 arg0
= CALL_EXPR_ARG (exp
, 0);
2532 arg1
= CALL_EXPR_ARG (exp
, 1);
2533 op0
= expand_normal (arg0
);
2534 op1
= expand_normal (arg1
);
2535 tmode
= insn_data
[icode
].operand
[0].mode
;
2536 mode1
= insn_data
[icode
].operand
[1].mode
;
2537 mode2
= insn_data
[icode
].operand
[2].mode
;
2539 if (! (*insn_data
[icode
].operand
[1].predicate
) (op0
, mode1
))
2540 op0
= copy_to_mode_reg (mode1
, op0
);
2541 if (! (*insn_data
[icode
].operand
[2].predicate
) (op1
, mode2
))
2543 error ("mask must be an immediate");
2546 selector
= INTVAL (op1
);
2547 if (selector
< 0 || selector
> 255)
2548 error ("the range of mask should be in 0 to 255");
2550 || GET_MODE (target
) != tmode
2551 || ! (*insn_data
[icode
].operand
[0].predicate
) (target
, tmode
))
2552 target
= gen_reg_rtx (tmode
);
2553 pat
= GEN_FCN (icode
) (target
, op0
, op1
);
2559 case ARM_BUILTIN_WMADDS
:
2560 return arm_expand_binop_builtin (CODE_FOR_iwmmxt_wmadds
, exp
, target
);
2561 case ARM_BUILTIN_WMADDSX
:
2562 return arm_expand_binop_builtin (CODE_FOR_iwmmxt_wmaddsx
, exp
, target
);
2563 case ARM_BUILTIN_WMADDSN
:
2564 return arm_expand_binop_builtin (CODE_FOR_iwmmxt_wmaddsn
, exp
, target
);
2565 case ARM_BUILTIN_WMADDU
:
2566 return arm_expand_binop_builtin (CODE_FOR_iwmmxt_wmaddu
, exp
, target
);
2567 case ARM_BUILTIN_WMADDUX
:
2568 return arm_expand_binop_builtin (CODE_FOR_iwmmxt_wmaddux
, exp
, target
);
2569 case ARM_BUILTIN_WMADDUN
:
2570 return arm_expand_binop_builtin (CODE_FOR_iwmmxt_wmaddun
, exp
, target
);
2571 case ARM_BUILTIN_WSADBZ
:
2572 return arm_expand_binop_builtin (CODE_FOR_iwmmxt_wsadbz
, exp
, target
);
2573 case ARM_BUILTIN_WSADHZ
:
2574 return arm_expand_binop_builtin (CODE_FOR_iwmmxt_wsadhz
, exp
, target
);
2576 /* Several three-argument builtins. */
2577 case ARM_BUILTIN_WMACS
:
2578 case ARM_BUILTIN_WMACU
:
2579 case ARM_BUILTIN_TMIA
:
2580 case ARM_BUILTIN_TMIAPH
:
2581 case ARM_BUILTIN_TMIATT
:
2582 case ARM_BUILTIN_TMIATB
:
2583 case ARM_BUILTIN_TMIABT
:
2584 case ARM_BUILTIN_TMIABB
:
2585 case ARM_BUILTIN_WQMIABB
:
2586 case ARM_BUILTIN_WQMIABT
:
2587 case ARM_BUILTIN_WQMIATB
:
2588 case ARM_BUILTIN_WQMIATT
:
2589 case ARM_BUILTIN_WQMIABBN
:
2590 case ARM_BUILTIN_WQMIABTN
:
2591 case ARM_BUILTIN_WQMIATBN
:
2592 case ARM_BUILTIN_WQMIATTN
:
2593 case ARM_BUILTIN_WMIABB
:
2594 case ARM_BUILTIN_WMIABT
:
2595 case ARM_BUILTIN_WMIATB
:
2596 case ARM_BUILTIN_WMIATT
:
2597 case ARM_BUILTIN_WMIABBN
:
2598 case ARM_BUILTIN_WMIABTN
:
2599 case ARM_BUILTIN_WMIATBN
:
2600 case ARM_BUILTIN_WMIATTN
:
2601 case ARM_BUILTIN_WMIAWBB
:
2602 case ARM_BUILTIN_WMIAWBT
:
2603 case ARM_BUILTIN_WMIAWTB
:
2604 case ARM_BUILTIN_WMIAWTT
:
2605 case ARM_BUILTIN_WMIAWBBN
:
2606 case ARM_BUILTIN_WMIAWBTN
:
2607 case ARM_BUILTIN_WMIAWTBN
:
2608 case ARM_BUILTIN_WMIAWTTN
:
2609 case ARM_BUILTIN_WSADB
:
2610 case ARM_BUILTIN_WSADH
:
2611 icode
= (fcode
== ARM_BUILTIN_WMACS
? CODE_FOR_iwmmxt_wmacs
2612 : fcode
== ARM_BUILTIN_WMACU
? CODE_FOR_iwmmxt_wmacu
2613 : fcode
== ARM_BUILTIN_TMIA
? CODE_FOR_iwmmxt_tmia
2614 : fcode
== ARM_BUILTIN_TMIAPH
? CODE_FOR_iwmmxt_tmiaph
2615 : fcode
== ARM_BUILTIN_TMIABB
? CODE_FOR_iwmmxt_tmiabb
2616 : fcode
== ARM_BUILTIN_TMIABT
? CODE_FOR_iwmmxt_tmiabt
2617 : fcode
== ARM_BUILTIN_TMIATB
? CODE_FOR_iwmmxt_tmiatb
2618 : fcode
== ARM_BUILTIN_TMIATT
? CODE_FOR_iwmmxt_tmiatt
2619 : fcode
== ARM_BUILTIN_WQMIABB
? CODE_FOR_iwmmxt_wqmiabb
2620 : fcode
== ARM_BUILTIN_WQMIABT
? CODE_FOR_iwmmxt_wqmiabt
2621 : fcode
== ARM_BUILTIN_WQMIATB
? CODE_FOR_iwmmxt_wqmiatb
2622 : fcode
== ARM_BUILTIN_WQMIATT
? CODE_FOR_iwmmxt_wqmiatt
2623 : fcode
== ARM_BUILTIN_WQMIABBN
? CODE_FOR_iwmmxt_wqmiabbn
2624 : fcode
== ARM_BUILTIN_WQMIABTN
? CODE_FOR_iwmmxt_wqmiabtn
2625 : fcode
== ARM_BUILTIN_WQMIATBN
? CODE_FOR_iwmmxt_wqmiatbn
2626 : fcode
== ARM_BUILTIN_WQMIATTN
? CODE_FOR_iwmmxt_wqmiattn
2627 : fcode
== ARM_BUILTIN_WMIABB
? CODE_FOR_iwmmxt_wmiabb
2628 : fcode
== ARM_BUILTIN_WMIABT
? CODE_FOR_iwmmxt_wmiabt
2629 : fcode
== ARM_BUILTIN_WMIATB
? CODE_FOR_iwmmxt_wmiatb
2630 : fcode
== ARM_BUILTIN_WMIATT
? CODE_FOR_iwmmxt_wmiatt
2631 : fcode
== ARM_BUILTIN_WMIABBN
? CODE_FOR_iwmmxt_wmiabbn
2632 : fcode
== ARM_BUILTIN_WMIABTN
? CODE_FOR_iwmmxt_wmiabtn
2633 : fcode
== ARM_BUILTIN_WMIATBN
? CODE_FOR_iwmmxt_wmiatbn
2634 : fcode
== ARM_BUILTIN_WMIATTN
? CODE_FOR_iwmmxt_wmiattn
2635 : fcode
== ARM_BUILTIN_WMIAWBB
? CODE_FOR_iwmmxt_wmiawbb
2636 : fcode
== ARM_BUILTIN_WMIAWBT
? CODE_FOR_iwmmxt_wmiawbt
2637 : fcode
== ARM_BUILTIN_WMIAWTB
? CODE_FOR_iwmmxt_wmiawtb
2638 : fcode
== ARM_BUILTIN_WMIAWTT
? CODE_FOR_iwmmxt_wmiawtt
2639 : fcode
== ARM_BUILTIN_WMIAWBBN
? CODE_FOR_iwmmxt_wmiawbbn
2640 : fcode
== ARM_BUILTIN_WMIAWBTN
? CODE_FOR_iwmmxt_wmiawbtn
2641 : fcode
== ARM_BUILTIN_WMIAWTBN
? CODE_FOR_iwmmxt_wmiawtbn
2642 : fcode
== ARM_BUILTIN_WMIAWTTN
? CODE_FOR_iwmmxt_wmiawttn
2643 : fcode
== ARM_BUILTIN_WSADB
? CODE_FOR_iwmmxt_wsadb
2644 : CODE_FOR_iwmmxt_wsadh
);
2645 arg0
= CALL_EXPR_ARG (exp
, 0);
2646 arg1
= CALL_EXPR_ARG (exp
, 1);
2647 arg2
= CALL_EXPR_ARG (exp
, 2);
2648 op0
= expand_normal (arg0
);
2649 op1
= expand_normal (arg1
);
2650 op2
= expand_normal (arg2
);
2651 tmode
= insn_data
[icode
].operand
[0].mode
;
2652 mode0
= insn_data
[icode
].operand
[1].mode
;
2653 mode1
= insn_data
[icode
].operand
[2].mode
;
2654 mode2
= insn_data
[icode
].operand
[3].mode
;
2656 if (! (*insn_data
[icode
].operand
[1].predicate
) (op0
, mode0
))
2657 op0
= copy_to_mode_reg (mode0
, op0
);
2658 if (! (*insn_data
[icode
].operand
[2].predicate
) (op1
, mode1
))
2659 op1
= copy_to_mode_reg (mode1
, op1
);
2660 if (! (*insn_data
[icode
].operand
[3].predicate
) (op2
, mode2
))
2661 op2
= copy_to_mode_reg (mode2
, op2
);
2663 || GET_MODE (target
) != tmode
2664 || ! (*insn_data
[icode
].operand
[0].predicate
) (target
, tmode
))
2665 target
= gen_reg_rtx (tmode
);
2666 pat
= GEN_FCN (icode
) (target
, op0
, op1
, op2
);
2672 case ARM_BUILTIN_WZERO
:
2673 target
= gen_reg_rtx (DImode
);
2674 emit_insn (gen_iwmmxt_clrdi (target
));
2677 case ARM_BUILTIN_WSRLHI
:
2678 case ARM_BUILTIN_WSRLWI
:
2679 case ARM_BUILTIN_WSRLDI
:
2680 case ARM_BUILTIN_WSLLHI
:
2681 case ARM_BUILTIN_WSLLWI
:
2682 case ARM_BUILTIN_WSLLDI
:
2683 case ARM_BUILTIN_WSRAHI
:
2684 case ARM_BUILTIN_WSRAWI
:
2685 case ARM_BUILTIN_WSRADI
:
2686 case ARM_BUILTIN_WRORHI
:
2687 case ARM_BUILTIN_WRORWI
:
2688 case ARM_BUILTIN_WRORDI
:
2689 case ARM_BUILTIN_WSRLH
:
2690 case ARM_BUILTIN_WSRLW
:
2691 case ARM_BUILTIN_WSRLD
:
2692 case ARM_BUILTIN_WSLLH
:
2693 case ARM_BUILTIN_WSLLW
:
2694 case ARM_BUILTIN_WSLLD
:
2695 case ARM_BUILTIN_WSRAH
:
2696 case ARM_BUILTIN_WSRAW
:
2697 case ARM_BUILTIN_WSRAD
:
2698 case ARM_BUILTIN_WRORH
:
2699 case ARM_BUILTIN_WRORW
:
2700 case ARM_BUILTIN_WRORD
:
2701 icode
= (fcode
== ARM_BUILTIN_WSRLHI
? CODE_FOR_lshrv4hi3_iwmmxt
2702 : fcode
== ARM_BUILTIN_WSRLWI
? CODE_FOR_lshrv2si3_iwmmxt
2703 : fcode
== ARM_BUILTIN_WSRLDI
? CODE_FOR_lshrdi3_iwmmxt
2704 : fcode
== ARM_BUILTIN_WSLLHI
? CODE_FOR_ashlv4hi3_iwmmxt
2705 : fcode
== ARM_BUILTIN_WSLLWI
? CODE_FOR_ashlv2si3_iwmmxt
2706 : fcode
== ARM_BUILTIN_WSLLDI
? CODE_FOR_ashldi3_iwmmxt
2707 : fcode
== ARM_BUILTIN_WSRAHI
? CODE_FOR_ashrv4hi3_iwmmxt
2708 : fcode
== ARM_BUILTIN_WSRAWI
? CODE_FOR_ashrv2si3_iwmmxt
2709 : fcode
== ARM_BUILTIN_WSRADI
? CODE_FOR_ashrdi3_iwmmxt
2710 : fcode
== ARM_BUILTIN_WRORHI
? CODE_FOR_rorv4hi3
2711 : fcode
== ARM_BUILTIN_WRORWI
? CODE_FOR_rorv2si3
2712 : fcode
== ARM_BUILTIN_WRORDI
? CODE_FOR_rordi3
2713 : fcode
== ARM_BUILTIN_WSRLH
? CODE_FOR_lshrv4hi3_di
2714 : fcode
== ARM_BUILTIN_WSRLW
? CODE_FOR_lshrv2si3_di
2715 : fcode
== ARM_BUILTIN_WSRLD
? CODE_FOR_lshrdi3_di
2716 : fcode
== ARM_BUILTIN_WSLLH
? CODE_FOR_ashlv4hi3_di
2717 : fcode
== ARM_BUILTIN_WSLLW
? CODE_FOR_ashlv2si3_di
2718 : fcode
== ARM_BUILTIN_WSLLD
? CODE_FOR_ashldi3_di
2719 : fcode
== ARM_BUILTIN_WSRAH
? CODE_FOR_ashrv4hi3_di
2720 : fcode
== ARM_BUILTIN_WSRAW
? CODE_FOR_ashrv2si3_di
2721 : fcode
== ARM_BUILTIN_WSRAD
? CODE_FOR_ashrdi3_di
2722 : fcode
== ARM_BUILTIN_WRORH
? CODE_FOR_rorv4hi3_di
2723 : fcode
== ARM_BUILTIN_WRORW
? CODE_FOR_rorv2si3_di
2724 : fcode
== ARM_BUILTIN_WRORD
? CODE_FOR_rordi3_di
2725 : CODE_FOR_nothing
);
2726 arg1
= CALL_EXPR_ARG (exp
, 1);
2727 op1
= expand_normal (arg1
);
2728 if (GET_MODE (op1
) == VOIDmode
)
2731 if ((fcode
== ARM_BUILTIN_WRORHI
|| fcode
== ARM_BUILTIN_WRORWI
2732 || fcode
== ARM_BUILTIN_WRORH
|| fcode
== ARM_BUILTIN_WRORW
)
2733 && (imm
< 0 || imm
> 32))
2735 if (fcode
== ARM_BUILTIN_WRORHI
)
2736 error ("the range of count should be in 0 to 32. please check the intrinsic _mm_rori_pi16 in code.");
2737 else if (fcode
== ARM_BUILTIN_WRORWI
)
2738 error ("the range of count should be in 0 to 32. please check the intrinsic _mm_rori_pi32 in code.");
2739 else if (fcode
== ARM_BUILTIN_WRORH
)
2740 error ("the range of count should be in 0 to 32. please check the intrinsic _mm_ror_pi16 in code.");
2742 error ("the range of count should be in 0 to 32. please check the intrinsic _mm_ror_pi32 in code.");
2744 else if ((fcode
== ARM_BUILTIN_WRORDI
|| fcode
== ARM_BUILTIN_WRORD
)
2745 && (imm
< 0 || imm
> 64))
2747 if (fcode
== ARM_BUILTIN_WRORDI
)
2748 error ("the range of count should be in 0 to 64. please check the intrinsic _mm_rori_si64 in code.");
2750 error ("the range of count should be in 0 to 64. please check the intrinsic _mm_ror_si64 in code.");
2754 if (fcode
== ARM_BUILTIN_WSRLHI
)
2755 error ("the count should be no less than 0. please check the intrinsic _mm_srli_pi16 in code.");
2756 else if (fcode
== ARM_BUILTIN_WSRLWI
)
2757 error ("the count should be no less than 0. please check the intrinsic _mm_srli_pi32 in code.");
2758 else if (fcode
== ARM_BUILTIN_WSRLDI
)
2759 error ("the count should be no less than 0. please check the intrinsic _mm_srli_si64 in code.");
2760 else if (fcode
== ARM_BUILTIN_WSLLHI
)
2761 error ("the count should be no less than 0. please check the intrinsic _mm_slli_pi16 in code.");
2762 else if (fcode
== ARM_BUILTIN_WSLLWI
)
2763 error ("the count should be no less than 0. please check the intrinsic _mm_slli_pi32 in code.");
2764 else if (fcode
== ARM_BUILTIN_WSLLDI
)
2765 error ("the count should be no less than 0. please check the intrinsic _mm_slli_si64 in code.");
2766 else if (fcode
== ARM_BUILTIN_WSRAHI
)
2767 error ("the count should be no less than 0. please check the intrinsic _mm_srai_pi16 in code.");
2768 else if (fcode
== ARM_BUILTIN_WSRAWI
)
2769 error ("the count should be no less than 0. please check the intrinsic _mm_srai_pi32 in code.");
2770 else if (fcode
== ARM_BUILTIN_WSRADI
)
2771 error ("the count should be no less than 0. please check the intrinsic _mm_srai_si64 in code.");
2772 else if (fcode
== ARM_BUILTIN_WSRLH
)
2773 error ("the count should be no less than 0. please check the intrinsic _mm_srl_pi16 in code.");
2774 else if (fcode
== ARM_BUILTIN_WSRLW
)
2775 error ("the count should be no less than 0. please check the intrinsic _mm_srl_pi32 in code.");
2776 else if (fcode
== ARM_BUILTIN_WSRLD
)
2777 error ("the count should be no less than 0. please check the intrinsic _mm_srl_si64 in code.");
2778 else if (fcode
== ARM_BUILTIN_WSLLH
)
2779 error ("the count should be no less than 0. please check the intrinsic _mm_sll_pi16 in code.");
2780 else if (fcode
== ARM_BUILTIN_WSLLW
)
2781 error ("the count should be no less than 0. please check the intrinsic _mm_sll_pi32 in code.");
2782 else if (fcode
== ARM_BUILTIN_WSLLD
)
2783 error ("the count should be no less than 0. please check the intrinsic _mm_sll_si64 in code.");
2784 else if (fcode
== ARM_BUILTIN_WSRAH
)
2785 error ("the count should be no less than 0. please check the intrinsic _mm_sra_pi16 in code.");
2786 else if (fcode
== ARM_BUILTIN_WSRAW
)
2787 error ("the count should be no less than 0. please check the intrinsic _mm_sra_pi32 in code.");
2789 error ("the count should be no less than 0. please check the intrinsic _mm_sra_si64 in code.");
2792 return arm_expand_binop_builtin (icode
, exp
, target
);
2798 for (i
= 0, d
= bdesc_2arg
; i
< ARRAY_SIZE (bdesc_2arg
); i
++, d
++)
2799 if (d
->code
== (const enum arm_builtins
) fcode
)
2800 return arm_expand_binop_builtin (d
->icode
, exp
, target
);
2802 for (i
= 0, d
= bdesc_1arg
; i
< ARRAY_SIZE (bdesc_1arg
); i
++, d
++)
2803 if (d
->code
== (const enum arm_builtins
) fcode
)
2804 return arm_expand_unop_builtin (d
->icode
, exp
, target
, 0);
2806 for (i
= 0, d
= bdesc_3arg
; i
< ARRAY_SIZE (bdesc_3arg
); i
++, d
++)
2807 if (d
->code
== (const enum arm_builtins
) fcode
)
2808 return arm_expand_ternop_builtin (d
->icode
, exp
, target
);
2810 /* @@@ Should really do something sensible here. */
2815 arm_builtin_vectorized_function (tree fndecl
, tree type_out
, tree type_in
)
2817 machine_mode in_mode
, out_mode
;
2819 bool out_unsigned_p
= TYPE_UNSIGNED (type_out
);
2821 if (TREE_CODE (type_out
) != VECTOR_TYPE
2822 || TREE_CODE (type_in
) != VECTOR_TYPE
)
2825 out_mode
= TYPE_MODE (TREE_TYPE (type_out
));
2826 out_n
= TYPE_VECTOR_SUBPARTS (type_out
);
2827 in_mode
= TYPE_MODE (TREE_TYPE (type_in
));
2828 in_n
= TYPE_VECTOR_SUBPARTS (type_in
);
2830 /* ARM_CHECK_BUILTIN_MODE and ARM_FIND_VRINT_VARIANT are used to find the
2831 decl of the vectorized builtin for the appropriate vector mode.
2832 NULL_TREE is returned if no such builtin is available. */
2833 #undef ARM_CHECK_BUILTIN_MODE
2834 #define ARM_CHECK_BUILTIN_MODE(C) \
2835 (TARGET_NEON && TARGET_FPU_ARMV8 \
2836 && flag_unsafe_math_optimizations \
2837 && ARM_CHECK_BUILTIN_MODE_1 (C))
2839 #undef ARM_CHECK_BUILTIN_MODE_1
2840 #define ARM_CHECK_BUILTIN_MODE_1(C) \
2841 (out_mode == SFmode && out_n == C \
2842 && in_mode == SFmode && in_n == C)
2844 #undef ARM_FIND_VRINT_VARIANT
2845 #define ARM_FIND_VRINT_VARIANT(N) \
2846 (ARM_CHECK_BUILTIN_MODE (2) \
2847 ? arm_builtin_decl(ARM_BUILTIN_NEON_##N##v2sf, false) \
2848 : (ARM_CHECK_BUILTIN_MODE (4) \
2849 ? arm_builtin_decl(ARM_BUILTIN_NEON_##N##v4sf, false) \
2852 if (DECL_BUILT_IN_CLASS (fndecl
) == BUILT_IN_NORMAL
)
2854 enum built_in_function fn
= DECL_FUNCTION_CODE (fndecl
);
2857 case BUILT_IN_FLOORF
:
2858 return ARM_FIND_VRINT_VARIANT (vrintm
);
2859 case BUILT_IN_CEILF
:
2860 return ARM_FIND_VRINT_VARIANT (vrintp
);
2861 case BUILT_IN_TRUNCF
:
2862 return ARM_FIND_VRINT_VARIANT (vrintz
);
2863 case BUILT_IN_ROUNDF
:
2864 return ARM_FIND_VRINT_VARIANT (vrinta
);
2865 #undef ARM_CHECK_BUILTIN_MODE_1
2866 #define ARM_CHECK_BUILTIN_MODE_1(C) \
2867 (out_mode == SImode && out_n == C \
2868 && in_mode == SFmode && in_n == C)
2870 #define ARM_FIND_VCVT_VARIANT(N) \
2871 (ARM_CHECK_BUILTIN_MODE (2) \
2872 ? arm_builtin_decl(ARM_BUILTIN_NEON_##N##v2sfv2si, false) \
2873 : (ARM_CHECK_BUILTIN_MODE (4) \
2874 ? arm_builtin_decl(ARM_BUILTIN_NEON_##N##v4sfv4si, false) \
2877 #define ARM_FIND_VCVTU_VARIANT(N) \
2878 (ARM_CHECK_BUILTIN_MODE (2) \
2879 ? arm_builtin_decl(ARM_BUILTIN_NEON_##N##uv2sfv2si, false) \
2880 : (ARM_CHECK_BUILTIN_MODE (4) \
2881 ? arm_builtin_decl(ARM_BUILTIN_NEON_##N##uv4sfv4si, false) \
2883 case BUILT_IN_LROUNDF
:
2884 return out_unsigned_p
2885 ? ARM_FIND_VCVTU_VARIANT (vcvta
)
2886 : ARM_FIND_VCVT_VARIANT (vcvta
);
2887 case BUILT_IN_LCEILF
:
2888 return out_unsigned_p
2889 ? ARM_FIND_VCVTU_VARIANT (vcvtp
)
2890 : ARM_FIND_VCVT_VARIANT (vcvtp
);
2891 case BUILT_IN_LFLOORF
:
2892 return out_unsigned_p
2893 ? ARM_FIND_VCVTU_VARIANT (vcvtm
)
2894 : ARM_FIND_VCVT_VARIANT (vcvtm
);
2895 #undef ARM_CHECK_BUILTIN_MODE
2896 #define ARM_CHECK_BUILTIN_MODE(C, N) \
2897 (out_mode == N##mode && out_n == C \
2898 && in_mode == N##mode && in_n == C)
2899 case BUILT_IN_BSWAP16
:
2900 if (ARM_CHECK_BUILTIN_MODE (4, HI
))
2901 return arm_builtin_decl (ARM_BUILTIN_NEON_bswapv4hi
, false);
2902 else if (ARM_CHECK_BUILTIN_MODE (8, HI
))
2903 return arm_builtin_decl (ARM_BUILTIN_NEON_bswapv8hi
, false);
2906 case BUILT_IN_BSWAP32
:
2907 if (ARM_CHECK_BUILTIN_MODE (2, SI
))
2908 return arm_builtin_decl (ARM_BUILTIN_NEON_bswapv2si
, false);
2909 else if (ARM_CHECK_BUILTIN_MODE (4, SI
))
2910 return arm_builtin_decl (ARM_BUILTIN_NEON_bswapv4si
, false);
2913 case BUILT_IN_BSWAP64
:
2914 if (ARM_CHECK_BUILTIN_MODE (2, DI
))
2915 return arm_builtin_decl (ARM_BUILTIN_NEON_bswapv2di
, false);
2918 case BUILT_IN_COPYSIGNF
:
2919 if (ARM_CHECK_BUILTIN_MODE (2, SF
))
2920 return arm_builtin_decl (ARM_BUILTIN_NEON_copysignfv2sf
, false);
2921 else if (ARM_CHECK_BUILTIN_MODE (4, SF
))
2922 return arm_builtin_decl (ARM_BUILTIN_NEON_copysignfv4sf
, false);
2932 #undef ARM_FIND_VCVT_VARIANT
2933 #undef ARM_FIND_VCVTU_VARIANT
2934 #undef ARM_CHECK_BUILTIN_MODE
2935 #undef ARM_FIND_VRINT_VARIANT
2938 arm_atomic_assign_expand_fenv (tree
*hold
, tree
*clear
, tree
*update
)
2940 const unsigned ARM_FE_INVALID
= 1;
2941 const unsigned ARM_FE_DIVBYZERO
= 2;
2942 const unsigned ARM_FE_OVERFLOW
= 4;
2943 const unsigned ARM_FE_UNDERFLOW
= 8;
2944 const unsigned ARM_FE_INEXACT
= 16;
2945 const unsigned HOST_WIDE_INT ARM_FE_ALL_EXCEPT
= (ARM_FE_INVALID
2950 const unsigned HOST_WIDE_INT ARM_FE_EXCEPT_SHIFT
= 8;
2951 tree fenv_var
, get_fpscr
, set_fpscr
, mask
, ld_fenv
, masked_fenv
;
2952 tree new_fenv_var
, reload_fenv
, restore_fnenv
;
2953 tree update_call
, atomic_feraiseexcept
, hold_fnclex
;
2955 if (!TARGET_VFP
|| !TARGET_HARD_FLOAT
)
2958 /* Generate the equivalent of :
2959 unsigned int fenv_var;
2960 fenv_var = __builtin_arm_get_fpscr ();
2962 unsigned int masked_fenv;
2963 masked_fenv = fenv_var & mask;
2965 __builtin_arm_set_fpscr (masked_fenv); */
2967 fenv_var
= create_tmp_var_raw (unsigned_type_node
);
2968 get_fpscr
= arm_builtin_decls
[ARM_BUILTIN_GET_FPSCR
];
2969 set_fpscr
= arm_builtin_decls
[ARM_BUILTIN_SET_FPSCR
];
2970 mask
= build_int_cst (unsigned_type_node
,
2971 ~((ARM_FE_ALL_EXCEPT
<< ARM_FE_EXCEPT_SHIFT
)
2972 | ARM_FE_ALL_EXCEPT
));
2973 ld_fenv
= build2 (MODIFY_EXPR
, unsigned_type_node
,
2974 fenv_var
, build_call_expr (get_fpscr
, 0));
2975 masked_fenv
= build2 (BIT_AND_EXPR
, unsigned_type_node
, fenv_var
, mask
);
2976 hold_fnclex
= build_call_expr (set_fpscr
, 1, masked_fenv
);
2977 *hold
= build2 (COMPOUND_EXPR
, void_type_node
,
2978 build2 (COMPOUND_EXPR
, void_type_node
, masked_fenv
, ld_fenv
),
2981 /* Store the value of masked_fenv to clear the exceptions:
2982 __builtin_arm_set_fpscr (masked_fenv); */
2984 *clear
= build_call_expr (set_fpscr
, 1, masked_fenv
);
2986 /* Generate the equivalent of :
2987 unsigned int new_fenv_var;
2988 new_fenv_var = __builtin_arm_get_fpscr ();
2990 __builtin_arm_set_fpscr (fenv_var);
2992 __atomic_feraiseexcept (new_fenv_var); */
2994 new_fenv_var
= create_tmp_var_raw (unsigned_type_node
);
2995 reload_fenv
= build2 (MODIFY_EXPR
, unsigned_type_node
, new_fenv_var
,
2996 build_call_expr (get_fpscr
, 0));
2997 restore_fnenv
= build_call_expr (set_fpscr
, 1, fenv_var
);
2998 atomic_feraiseexcept
= builtin_decl_implicit (BUILT_IN_ATOMIC_FERAISEEXCEPT
);
2999 update_call
= build_call_expr (atomic_feraiseexcept
, 1,
3000 fold_convert (integer_type_node
, new_fenv_var
));
3001 *update
= build2 (COMPOUND_EXPR
, void_type_node
,
3002 build2 (COMPOUND_EXPR
, void_type_node
,
3003 reload_fenv
, restore_fnenv
), update_call
);
3006 #include "gt-arm-builtins.h"