1 /* Description of builtins used by the ARM backend.
2 Copyright (C) 2014-2019 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it
7 under the terms of the GNU General Public License as published
8 by the Free Software Foundation; either version 3, or (at your
9 option) any later version.
11 GCC is distributed in the hope that it will be useful, but WITHOUT
12 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
13 or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public
14 License for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
20 #define IN_TARGET_CODE 1
24 #include "coretypes.h"
29 #include "gimple-expr.h"
32 #include "profile-count.h"
36 #include "diagnostic-core.h"
37 #include "fold-const.h"
38 #include "stor-layout.h"
41 #include "langhooks.h"
42 #include "case-cfn-macros.h"
45 #define SIMD_MAX_BUILTIN_ARGS 7
47 enum arm_type_qualifiers
52 qualifier_unsigned
= 0x1, /* 1 << 0 */
54 qualifier_const
= 0x2, /* 1 << 1 */
56 qualifier_pointer
= 0x4, /* 1 << 2 */
58 qualifier_const_pointer
= 0x6,
59 /* Used when expanding arguments if an operand could
61 qualifier_immediate
= 0x8, /* 1 << 3 */
62 qualifier_unsigned_immediate
= 0x9,
63 qualifier_maybe_immediate
= 0x10, /* 1 << 4 */
65 qualifier_void
= 0x20, /* 1 << 5 */
66 /* Some patterns may have internal operands, this qualifier is an
67 instruction to the initialisation code to skip this operand. */
68 qualifier_internal
= 0x40, /* 1 << 6 */
69 /* Some builtins should use the T_*mode* encoded in a simd_builtin_datum
70 rather than using the type of the operand. */
71 qualifier_map_mode
= 0x80, /* 1 << 7 */
72 /* qualifier_pointer | qualifier_map_mode */
73 qualifier_pointer_map_mode
= 0x84,
74 /* qualifier_const_pointer | qualifier_map_mode */
75 qualifier_const_pointer_map_mode
= 0x86,
76 /* Polynomial types. */
77 qualifier_poly
= 0x100,
78 /* Lane indices - must be within range of previous argument = a vector. */
79 qualifier_lane_index
= 0x200,
80 /* Lane indices for single lane structure loads and stores. */
81 qualifier_struct_load_store_lane_index
= 0x400,
83 qualifier_void_pointer
= 0x800,
84 /* A const void pointer. */
85 qualifier_const_void_pointer
= 0x802
88 /* The qualifier_internal allows generation of a unary builtin from
89 a pattern with a third pseudo-operand such as a match_scratch.
91 static enum arm_type_qualifiers
92 arm_unop_qualifiers
[SIMD_MAX_BUILTIN_ARGS
]
93 = { qualifier_none
, qualifier_none
, qualifier_internal
};
94 #define UNOP_QUALIFIERS (arm_unop_qualifiers)
96 /* unsigned T (unsigned T). */
97 static enum arm_type_qualifiers
98 arm_bswap_qualifiers
[SIMD_MAX_BUILTIN_ARGS
]
99 = { qualifier_unsigned
, qualifier_unsigned
};
100 #define BSWAP_QUALIFIERS (arm_bswap_qualifiers)
102 /* T (T, T [maybe_immediate]). */
103 static enum arm_type_qualifiers
104 arm_binop_qualifiers
[SIMD_MAX_BUILTIN_ARGS
]
105 = { qualifier_none
, qualifier_none
, qualifier_maybe_immediate
};
106 #define BINOP_QUALIFIERS (arm_binop_qualifiers)
109 static enum arm_type_qualifiers
110 arm_ternop_qualifiers
[SIMD_MAX_BUILTIN_ARGS
]
111 = { qualifier_none
, qualifier_none
, qualifier_none
, qualifier_none
};
112 #define TERNOP_QUALIFIERS (arm_ternop_qualifiers)
114 /* unsigned T (unsigned T, unsigned T, unsigned T). */
115 static enum arm_type_qualifiers
116 arm_unsigned_uternop_qualifiers
[SIMD_MAX_BUILTIN_ARGS
]
117 = { qualifier_unsigned
, qualifier_unsigned
, qualifier_unsigned
,
118 qualifier_unsigned
};
119 #define UTERNOP_QUALIFIERS (arm_unsigned_uternop_qualifiers)
121 /* T (T, immediate). */
122 static enum arm_type_qualifiers
123 arm_binop_imm_qualifiers
[SIMD_MAX_BUILTIN_ARGS
]
124 = { qualifier_none
, qualifier_none
, qualifier_immediate
};
125 #define BINOP_IMM_QUALIFIERS (arm_binop_imm_qualifiers)
127 /* T (T, lane index). */
128 static enum arm_type_qualifiers
129 arm_getlane_qualifiers
[SIMD_MAX_BUILTIN_ARGS
]
130 = { qualifier_none
, qualifier_none
, qualifier_lane_index
};
131 #define GETLANE_QUALIFIERS (arm_getlane_qualifiers)
133 /* T (T, T, T, immediate). */
134 static enum arm_type_qualifiers
135 arm_mac_n_qualifiers
[SIMD_MAX_BUILTIN_ARGS
]
136 = { qualifier_none
, qualifier_none
, qualifier_none
,
137 qualifier_none
, qualifier_immediate
};
138 #define MAC_N_QUALIFIERS (arm_mac_n_qualifiers)
140 /* T (T, T, T, lane index). */
141 static enum arm_type_qualifiers
142 arm_mac_lane_qualifiers
[SIMD_MAX_BUILTIN_ARGS
]
143 = { qualifier_none
, qualifier_none
, qualifier_none
,
144 qualifier_none
, qualifier_lane_index
};
145 #define MAC_LANE_QUALIFIERS (arm_mac_lane_qualifiers)
147 /* unsigned T (unsigned T, unsigned T, unsigend T, lane index). */
148 static enum arm_type_qualifiers
149 arm_umac_lane_qualifiers
[SIMD_MAX_BUILTIN_ARGS
]
150 = { qualifier_unsigned
, qualifier_unsigned
, qualifier_unsigned
,
151 qualifier_unsigned
, qualifier_lane_index
};
152 #define UMAC_LANE_QUALIFIERS (arm_umac_lane_qualifiers)
154 /* T (T, T, immediate). */
155 static enum arm_type_qualifiers
156 arm_ternop_imm_qualifiers
[SIMD_MAX_BUILTIN_ARGS
]
157 = { qualifier_none
, qualifier_none
, qualifier_none
, qualifier_immediate
};
158 #define TERNOP_IMM_QUALIFIERS (arm_ternop_imm_qualifiers)
160 /* T (T, T, lane index). */
161 static enum arm_type_qualifiers
162 arm_setlane_qualifiers
[SIMD_MAX_BUILTIN_ARGS
]
163 = { qualifier_none
, qualifier_none
, qualifier_none
, qualifier_lane_index
};
164 #define SETLANE_QUALIFIERS (arm_setlane_qualifiers)
167 static enum arm_type_qualifiers
168 arm_combine_qualifiers
[SIMD_MAX_BUILTIN_ARGS
]
169 = { qualifier_none
, qualifier_none
, qualifier_none
};
170 #define COMBINE_QUALIFIERS (arm_combine_qualifiers)
172 /* T ([T element type] *). */
173 static enum arm_type_qualifiers
174 arm_load1_qualifiers
[SIMD_MAX_BUILTIN_ARGS
]
175 = { qualifier_none
, qualifier_const_pointer_map_mode
};
176 #define LOAD1_QUALIFIERS (arm_load1_qualifiers)
178 /* T ([T element type] *, T, immediate). */
179 static enum arm_type_qualifiers
180 arm_load1_lane_qualifiers
[SIMD_MAX_BUILTIN_ARGS
]
181 = { qualifier_none
, qualifier_const_pointer_map_mode
,
182 qualifier_none
, qualifier_struct_load_store_lane_index
};
183 #define LOAD1LANE_QUALIFIERS (arm_load1_lane_qualifiers)
185 /* unsigned T (unsigned T, unsigned T, unsigned T). */
186 static enum arm_type_qualifiers
187 arm_unsigned_binop_qualifiers
[SIMD_MAX_BUILTIN_ARGS
]
188 = { qualifier_unsigned
, qualifier_unsigned
, qualifier_unsigned
,
189 qualifier_unsigned
};
190 #define UBINOP_QUALIFIERS (arm_unsigned_binop_qualifiers)
192 /* void (unsigned immediate, unsigned immediate, unsigned immediate,
193 unsigned immediate, unsigned immediate, unsigned immediate). */
194 static enum arm_type_qualifiers
195 arm_cdp_qualifiers
[SIMD_MAX_BUILTIN_ARGS
]
196 = { qualifier_void
, qualifier_unsigned_immediate
,
197 qualifier_unsigned_immediate
,
198 qualifier_unsigned_immediate
,
199 qualifier_unsigned_immediate
,
200 qualifier_unsigned_immediate
,
201 qualifier_unsigned_immediate
};
202 #define CDP_QUALIFIERS \
205 /* void (unsigned immediate, unsigned immediate, const void *). */
206 static enum arm_type_qualifiers
207 arm_ldc_qualifiers
[SIMD_MAX_BUILTIN_ARGS
]
208 = { qualifier_void
, qualifier_unsigned_immediate
,
209 qualifier_unsigned_immediate
, qualifier_const_void_pointer
};
210 #define LDC_QUALIFIERS \
213 /* void (unsigned immediate, unsigned immediate, void *). */
214 static enum arm_type_qualifiers
215 arm_stc_qualifiers
[SIMD_MAX_BUILTIN_ARGS
]
216 = { qualifier_void
, qualifier_unsigned_immediate
,
217 qualifier_unsigned_immediate
, qualifier_void_pointer
};
218 #define STC_QUALIFIERS \
221 /* void (unsigned immediate, unsigned immediate, T, unsigned immediate,
222 unsigned immediate, unsigned immediate). */
223 static enum arm_type_qualifiers
224 arm_mcr_qualifiers
[SIMD_MAX_BUILTIN_ARGS
]
225 = { qualifier_void
, qualifier_unsigned_immediate
,
226 qualifier_unsigned_immediate
, qualifier_none
,
227 qualifier_unsigned_immediate
, qualifier_unsigned_immediate
,
228 qualifier_unsigned_immediate
};
229 #define MCR_QUALIFIERS \
232 /* T (unsigned immediate, unsigned immediate, unsigned immediate,
233 unsigned immediate, unsigned immediate). */
234 static enum arm_type_qualifiers
235 arm_mrc_qualifiers
[SIMD_MAX_BUILTIN_ARGS
]
236 = { qualifier_none
, qualifier_unsigned_immediate
,
237 qualifier_unsigned_immediate
, qualifier_unsigned_immediate
,
238 qualifier_unsigned_immediate
, qualifier_unsigned_immediate
};
239 #define MRC_QUALIFIERS \
242 /* void (unsigned immediate, unsigned immediate, T, unsigned immediate). */
243 static enum arm_type_qualifiers
244 arm_mcrr_qualifiers
[SIMD_MAX_BUILTIN_ARGS
]
245 = { qualifier_void
, qualifier_unsigned_immediate
,
246 qualifier_unsigned_immediate
, qualifier_none
,
247 qualifier_unsigned_immediate
};
248 #define MCRR_QUALIFIERS \
249 (arm_mcrr_qualifiers)
251 /* T (unsigned immediate, unsigned immediate, unsigned immediate). */
252 static enum arm_type_qualifiers
253 arm_mrrc_qualifiers
[SIMD_MAX_BUILTIN_ARGS
]
254 = { qualifier_none
, qualifier_unsigned_immediate
,
255 qualifier_unsigned_immediate
, qualifier_unsigned_immediate
};
256 #define MRRC_QUALIFIERS \
257 (arm_mrrc_qualifiers)
259 /* The first argument (return type) of a store should be void type,
260 which we represent with qualifier_void. Their first operand will be
261 a DImode pointer to the location to store to, so we must use
262 qualifier_map_mode | qualifier_pointer to build a pointer to the
263 element type of the vector.
265 void ([T element type] *, T). */
266 static enum arm_type_qualifiers
267 arm_store1_qualifiers
[SIMD_MAX_BUILTIN_ARGS
]
268 = { qualifier_void
, qualifier_pointer_map_mode
, qualifier_none
};
269 #define STORE1_QUALIFIERS (arm_store1_qualifiers)
271 /* void ([T element type] *, T, immediate). */
272 static enum arm_type_qualifiers
273 arm_storestruct_lane_qualifiers
[SIMD_MAX_BUILTIN_ARGS
]
274 = { qualifier_void
, qualifier_pointer_map_mode
,
275 qualifier_none
, qualifier_struct_load_store_lane_index
};
276 #define STORE1LANE_QUALIFIERS (arm_storestruct_lane_qualifiers)
278 #define v8qi_UP E_V8QImode
279 #define v4hi_UP E_V4HImode
280 #define v4hf_UP E_V4HFmode
281 #define v2si_UP E_V2SImode
282 #define v2sf_UP E_V2SFmode
283 #define di_UP E_DImode
284 #define v16qi_UP E_V16QImode
285 #define v8hi_UP E_V8HImode
286 #define v8hf_UP E_V8HFmode
287 #define v4si_UP E_V4SImode
288 #define v4sf_UP E_V4SFmode
289 #define v2di_UP E_V2DImode
290 #define ti_UP E_TImode
291 #define ei_UP E_EImode
292 #define oi_UP E_OImode
293 #define hf_UP E_HFmode
294 #define si_UP E_SImode
295 #define void_UP E_VOIDmode
302 const enum insn_code code
;
304 enum arm_type_qualifiers
*qualifiers
;
307 #define CF(N,X) CODE_FOR_neon_##N##X
309 #define VAR1(T, N, A) \
310 {#N #A, UP (A), CF (N, A), 0, T##_QUALIFIERS},
311 #define VAR2(T, N, A, B) \
314 #define VAR3(T, N, A, B, C) \
317 #define VAR4(T, N, A, B, C, D) \
318 VAR3 (T, N, A, B, C) \
320 #define VAR5(T, N, A, B, C, D, E) \
321 VAR4 (T, N, A, B, C, D) \
323 #define VAR6(T, N, A, B, C, D, E, F) \
324 VAR5 (T, N, A, B, C, D, E) \
326 #define VAR7(T, N, A, B, C, D, E, F, G) \
327 VAR6 (T, N, A, B, C, D, E, F) \
329 #define VAR8(T, N, A, B, C, D, E, F, G, H) \
330 VAR7 (T, N, A, B, C, D, E, F, G) \
332 #define VAR9(T, N, A, B, C, D, E, F, G, H, I) \
333 VAR8 (T, N, A, B, C, D, E, F, G, H) \
335 #define VAR10(T, N, A, B, C, D, E, F, G, H, I, J) \
336 VAR9 (T, N, A, B, C, D, E, F, G, H, I) \
338 #define VAR11(T, N, A, B, C, D, E, F, G, H, I, J, K) \
339 VAR10 (T, N, A, B, C, D, E, F, G, H, I, J) \
341 #define VAR12(T, N, A, B, C, D, E, F, G, H, I, J, K, L) \
342 VAR11 (T, N, A, B, C, D, E, F, G, H, I, J, K) \
345 /* The builtin data can be found in arm_neon_builtins.def, arm_vfp_builtins.def
346 and arm_acle_builtins.def. The entries in arm_neon_builtins.def require
347 TARGET_NEON to be true. The feature tests are checked when the builtins are
350 The mode entries in the following table correspond to the "key" type of the
351 instruction variant, i.e. equivalent to that which would be specified after
352 the assembler mnemonic for neon instructions, which usually refers to the
353 last vector operand. The modes listed per instruction should be the same as
354 those defined for that instruction's pattern, for instance in neon.md. */
356 static arm_builtin_datum vfp_builtin_data
[] =
358 #include "arm_vfp_builtins.def"
361 static arm_builtin_datum neon_builtin_data
[] =
363 #include "arm_neon_builtins.def"
368 #define VAR1(T, N, A) \
369 {#N, UP (A), CODE_FOR_##N, 0, T##_QUALIFIERS},
371 static arm_builtin_datum acle_builtin_data
[] =
373 #include "arm_acle_builtins.def"
378 #define VAR1(T, N, X) \
379 ARM_BUILTIN_NEON_##N##X,
383 ARM_BUILTIN_GETWCGR0
,
384 ARM_BUILTIN_GETWCGR1
,
385 ARM_BUILTIN_GETWCGR2
,
386 ARM_BUILTIN_GETWCGR3
,
388 ARM_BUILTIN_SETWCGR0
,
389 ARM_BUILTIN_SETWCGR1
,
390 ARM_BUILTIN_SETWCGR2
,
391 ARM_BUILTIN_SETWCGR3
,
415 ARM_BUILTIN_WALIGNR0
,
416 ARM_BUILTIN_WALIGNR1
,
417 ARM_BUILTIN_WALIGNR2
,
418 ARM_BUILTIN_WALIGNR3
,
427 ARM_BUILTIN_TMOVMSKB
,
428 ARM_BUILTIN_TMOVMSKH
,
429 ARM_BUILTIN_TMOVMSKW
,
438 ARM_BUILTIN_WPACKHSS
,
439 ARM_BUILTIN_WPACKWSS
,
440 ARM_BUILTIN_WPACKDSS
,
441 ARM_BUILTIN_WPACKHUS
,
442 ARM_BUILTIN_WPACKWUS
,
443 ARM_BUILTIN_WPACKDUS
,
472 ARM_BUILTIN_WCMPGTUB
,
473 ARM_BUILTIN_WCMPGTUH
,
474 ARM_BUILTIN_WCMPGTUW
,
475 ARM_BUILTIN_WCMPGTSB
,
476 ARM_BUILTIN_WCMPGTSH
,
477 ARM_BUILTIN_WCMPGTSW
,
479 ARM_BUILTIN_TEXTRMSB
,
480 ARM_BUILTIN_TEXTRMSH
,
481 ARM_BUILTIN_TEXTRMSW
,
482 ARM_BUILTIN_TEXTRMUB
,
483 ARM_BUILTIN_TEXTRMUH
,
484 ARM_BUILTIN_TEXTRMUW
,
534 ARM_BUILTIN_WUNPCKIHB
,
535 ARM_BUILTIN_WUNPCKIHH
,
536 ARM_BUILTIN_WUNPCKIHW
,
537 ARM_BUILTIN_WUNPCKILB
,
538 ARM_BUILTIN_WUNPCKILH
,
539 ARM_BUILTIN_WUNPCKILW
,
541 ARM_BUILTIN_WUNPCKEHSB
,
542 ARM_BUILTIN_WUNPCKEHSH
,
543 ARM_BUILTIN_WUNPCKEHSW
,
544 ARM_BUILTIN_WUNPCKEHUB
,
545 ARM_BUILTIN_WUNPCKEHUH
,
546 ARM_BUILTIN_WUNPCKEHUW
,
547 ARM_BUILTIN_WUNPCKELSB
,
548 ARM_BUILTIN_WUNPCKELSH
,
549 ARM_BUILTIN_WUNPCKELSW
,
550 ARM_BUILTIN_WUNPCKELUB
,
551 ARM_BUILTIN_WUNPCKELUH
,
552 ARM_BUILTIN_WUNPCKELUW
,
558 ARM_BUILTIN_WADDSUBHX
,
559 ARM_BUILTIN_WSUBADDHX
,
561 ARM_BUILTIN_WABSDIFFB
,
562 ARM_BUILTIN_WABSDIFFH
,
563 ARM_BUILTIN_WABSDIFFW
,
580 ARM_BUILTIN_WMULWSMR
,
581 ARM_BUILTIN_WMULWUMR
,
592 ARM_BUILTIN_WQMULWMR
,
594 ARM_BUILTIN_WADDBHUSM
,
595 ARM_BUILTIN_WADDBHUSL
,
602 ARM_BUILTIN_WQMIABBN
,
603 ARM_BUILTIN_WQMIABTN
,
604 ARM_BUILTIN_WQMIATBN
,
605 ARM_BUILTIN_WQMIATTN
,
622 ARM_BUILTIN_WMIAWBBN
,
623 ARM_BUILTIN_WMIAWBTN
,
624 ARM_BUILTIN_WMIAWTBN
,
625 ARM_BUILTIN_WMIAWTTN
,
629 ARM_BUILTIN_GET_FPSCR
,
630 ARM_BUILTIN_SET_FPSCR
,
632 ARM_BUILTIN_CMSE_NONSECURE_CALLER
,
638 #define CRYPTO1(L, U, M1, M2) \
639 ARM_BUILTIN_CRYPTO_##U,
640 #define CRYPTO2(L, U, M1, M2, M3) \
641 ARM_BUILTIN_CRYPTO_##U,
642 #define CRYPTO3(L, U, M1, M2, M3, M4) \
643 ARM_BUILTIN_CRYPTO_##U,
645 ARM_BUILTIN_CRYPTO_BASE
,
647 #include "crypto.def"
653 ARM_BUILTIN_VFP_BASE
,
655 #include "arm_vfp_builtins.def"
657 ARM_BUILTIN_NEON_BASE
,
658 ARM_BUILTIN_NEON_LANE_CHECK
= ARM_BUILTIN_NEON_BASE
,
660 #include "arm_neon_builtins.def"
663 #define VAR1(T, N, X) \
666 ARM_BUILTIN_ACLE_BASE
,
668 #include "arm_acle_builtins.def"
673 #define ARM_BUILTIN_VFP_PATTERN_START \
674 (ARM_BUILTIN_VFP_BASE + 1)
676 #define ARM_BUILTIN_NEON_PATTERN_START \
677 (ARM_BUILTIN_NEON_BASE + 1)
679 #define ARM_BUILTIN_ACLE_PATTERN_START \
680 (ARM_BUILTIN_ACLE_BASE + 1)
694 static GTY(()) tree arm_builtin_decls
[ARM_BUILTIN_MAX
];
696 #define NUM_DREG_TYPES 5
697 #define NUM_QREG_TYPES 6
699 /* Internal scalar builtin types. These types are used to support
700 neon intrinsic builtins. They are _not_ user-visible types. Therefore
701 the mangling for these types are implementation defined. */
702 const char *arm_scalar_builtin_types
[] = {
710 "__builtin_neon_uqi",
711 "__builtin_neon_uhi",
712 "__builtin_neon_usi",
713 "__builtin_neon_udi",
721 #define ENTRY(E, M, Q, S, T, G) E,
724 #include "arm-simd-builtin-types.def"
729 struct arm_simd_type_info
731 enum arm_simd_type type
;
733 /* Internal type name. */
736 /* Internal type name(mangled). The mangled names conform to the
737 AAPCS (see "Procedure Call Standard for the ARM Architecture",
738 Appendix A). To qualify for emission with the mangled names defined in
739 that document, a vector type must not only be of the correct mode but also
740 be of the correct internal Neon vector type (e.g. __simd64_int8_t);
741 these types are registered by arm_init_simd_builtin_types (). In other
742 words, vector types defined in other ways e.g. via vector_size attribute
743 will get default mangled names. */
752 /* Machine mode the internal type maps to. */
756 enum arm_type_qualifiers q
;
759 #define ENTRY(E, M, Q, S, T, G) \
761 "__simd" #S "_" #T "_t", \
762 #G "__simd" #S "_" #T "_t", \
763 NULL_TREE, NULL_TREE, M##mode, qualifier_##Q},
764 static struct arm_simd_type_info arm_simd_types
[] = {
765 #include "arm-simd-builtin-types.def"
769 /* The user-visible __fp16 type. */
770 tree arm_fp16_type_node
= NULL_TREE
;
771 static tree arm_simd_intOI_type_node
= NULL_TREE
;
772 static tree arm_simd_intEI_type_node
= NULL_TREE
;
773 static tree arm_simd_intCI_type_node
= NULL_TREE
;
774 static tree arm_simd_intXI_type_node
= NULL_TREE
;
775 static tree arm_simd_polyQI_type_node
= NULL_TREE
;
776 static tree arm_simd_polyHI_type_node
= NULL_TREE
;
777 static tree arm_simd_polyDI_type_node
= NULL_TREE
;
778 static tree arm_simd_polyTI_type_node
= NULL_TREE
;
781 arm_mangle_builtin_scalar_type (const_tree type
)
785 while (arm_scalar_builtin_types
[i
] != NULL
)
787 const char *name
= arm_scalar_builtin_types
[i
];
789 if (TREE_CODE (TYPE_NAME (type
)) == TYPE_DECL
790 && DECL_NAME (TYPE_NAME (type
))
791 && !strcmp (IDENTIFIER_POINTER (DECL_NAME (TYPE_NAME (type
))), name
))
792 return arm_scalar_builtin_types
[i
];
799 arm_mangle_builtin_vector_type (const_tree type
)
802 int nelts
= sizeof (arm_simd_types
) / sizeof (arm_simd_types
[0]);
804 for (i
= 0; i
< nelts
; i
++)
805 if (arm_simd_types
[i
].mode
== TYPE_MODE (type
)
807 && TREE_CODE (TYPE_NAME (type
)) == TYPE_DECL
808 && DECL_NAME (TYPE_NAME (type
))
810 (IDENTIFIER_POINTER (DECL_NAME (TYPE_NAME (type
))),
811 arm_simd_types
[i
].name
))
812 return arm_simd_types
[i
].mangle
;
818 arm_mangle_builtin_type (const_tree type
)
821 /* Walk through all the AArch64 builtins types tables to filter out the
823 if ((mangle
= arm_mangle_builtin_vector_type (type
))
824 || (mangle
= arm_mangle_builtin_scalar_type (type
)))
831 arm_simd_builtin_std_type (machine_mode mode
,
832 enum arm_type_qualifiers q
)
834 #define QUAL_TYPE(M) \
835 ((q == qualifier_none) ? int##M##_type_node : unsigned_int##M##_type_node);
839 return QUAL_TYPE (QI
);
841 return QUAL_TYPE (HI
);
843 return QUAL_TYPE (SI
);
845 return QUAL_TYPE (DI
);
847 return QUAL_TYPE (TI
);
849 return arm_simd_intOI_type_node
;
851 return arm_simd_intEI_type_node
;
853 return arm_simd_intCI_type_node
;
855 return arm_simd_intXI_type_node
;
857 return arm_fp16_type_node
;
859 return float_type_node
;
861 return double_type_node
;
869 arm_lookup_simd_builtin_type (machine_mode mode
,
870 enum arm_type_qualifiers q
)
873 int nelts
= sizeof (arm_simd_types
) / sizeof (arm_simd_types
[0]);
875 /* Non-poly scalar modes map to standard types not in the table. */
876 if (q
!= qualifier_poly
&& !VECTOR_MODE_P (mode
))
877 return arm_simd_builtin_std_type (mode
, q
);
879 for (i
= 0; i
< nelts
; i
++)
880 if (arm_simd_types
[i
].mode
== mode
881 && arm_simd_types
[i
].q
== q
)
882 return arm_simd_types
[i
].itype
;
884 /* Note that we won't have caught the underlying type for poly64x2_t
885 in the above table. This gets default mangling. */
891 arm_simd_builtin_type (machine_mode mode
, bool unsigned_p
, bool poly_p
)
894 return arm_lookup_simd_builtin_type (mode
, qualifier_poly
);
896 return arm_lookup_simd_builtin_type (mode
, qualifier_unsigned
);
898 return arm_lookup_simd_builtin_type (mode
, qualifier_none
);
902 arm_init_simd_builtin_types (void)
905 int nelts
= sizeof (arm_simd_types
) / sizeof (arm_simd_types
[0]);
908 /* Poly types are a world of their own. In order to maintain legacy
909 ABI, they get initialized using the old interface, and don't get
910 an entry in our mangling table, consequently, they get default
911 mangling. As a further gotcha, poly8_t and poly16_t are signed
912 types, poly64_t and poly128_t are unsigned types. */
913 arm_simd_polyQI_type_node
914 = build_distinct_type_copy (intQI_type_node
);
915 (*lang_hooks
.types
.register_builtin_type
) (arm_simd_polyQI_type_node
,
916 "__builtin_neon_poly8");
917 arm_simd_polyHI_type_node
918 = build_distinct_type_copy (intHI_type_node
);
919 (*lang_hooks
.types
.register_builtin_type
) (arm_simd_polyHI_type_node
,
920 "__builtin_neon_poly16");
921 arm_simd_polyDI_type_node
922 = build_distinct_type_copy (unsigned_intDI_type_node
);
923 (*lang_hooks
.types
.register_builtin_type
) (arm_simd_polyDI_type_node
,
924 "__builtin_neon_poly64");
925 arm_simd_polyTI_type_node
926 = build_distinct_type_copy (unsigned_intTI_type_node
);
927 (*lang_hooks
.types
.register_builtin_type
) (arm_simd_polyTI_type_node
,
928 "__builtin_neon_poly128");
930 /* Prevent front-ends from transforming poly vectors into string
932 TYPE_STRING_FLAG (arm_simd_polyQI_type_node
) = false;
933 TYPE_STRING_FLAG (arm_simd_polyHI_type_node
) = false;
935 /* Init all the element types built by the front-end. */
936 arm_simd_types
[Int8x8_t
].eltype
= intQI_type_node
;
937 arm_simd_types
[Int8x16_t
].eltype
= intQI_type_node
;
938 arm_simd_types
[Int16x4_t
].eltype
= intHI_type_node
;
939 arm_simd_types
[Int16x8_t
].eltype
= intHI_type_node
;
940 arm_simd_types
[Int32x2_t
].eltype
= intSI_type_node
;
941 arm_simd_types
[Int32x4_t
].eltype
= intSI_type_node
;
942 arm_simd_types
[Int64x2_t
].eltype
= intDI_type_node
;
943 arm_simd_types
[Uint8x8_t
].eltype
= unsigned_intQI_type_node
;
944 arm_simd_types
[Uint8x16_t
].eltype
= unsigned_intQI_type_node
;
945 arm_simd_types
[Uint16x4_t
].eltype
= unsigned_intHI_type_node
;
946 arm_simd_types
[Uint16x8_t
].eltype
= unsigned_intHI_type_node
;
947 arm_simd_types
[Uint32x2_t
].eltype
= unsigned_intSI_type_node
;
948 arm_simd_types
[Uint32x4_t
].eltype
= unsigned_intSI_type_node
;
949 arm_simd_types
[Uint64x2_t
].eltype
= unsigned_intDI_type_node
;
951 /* Init poly vector element types with scalar poly types. */
952 arm_simd_types
[Poly8x8_t
].eltype
= arm_simd_polyQI_type_node
;
953 arm_simd_types
[Poly8x16_t
].eltype
= arm_simd_polyQI_type_node
;
954 arm_simd_types
[Poly16x4_t
].eltype
= arm_simd_polyHI_type_node
;
955 arm_simd_types
[Poly16x8_t
].eltype
= arm_simd_polyHI_type_node
;
956 /* Note: poly64x2_t is defined in arm_neon.h, to ensure it gets default
959 /* Continue with standard types. */
960 /* The __builtin_simd{64,128}_float16 types are kept private unless
961 we have a scalar __fp16 type. */
962 arm_simd_types
[Float16x4_t
].eltype
= arm_fp16_type_node
;
963 arm_simd_types
[Float16x8_t
].eltype
= arm_fp16_type_node
;
964 arm_simd_types
[Float32x2_t
].eltype
= float_type_node
;
965 arm_simd_types
[Float32x4_t
].eltype
= float_type_node
;
967 for (i
= 0; i
< nelts
; i
++)
969 tree eltype
= arm_simd_types
[i
].eltype
;
970 machine_mode mode
= arm_simd_types
[i
].mode
;
972 if (arm_simd_types
[i
].itype
== NULL
)
973 arm_simd_types
[i
].itype
=
974 build_distinct_type_copy
975 (build_vector_type (eltype
, GET_MODE_NUNITS (mode
)));
977 tdecl
= add_builtin_type (arm_simd_types
[i
].name
,
978 arm_simd_types
[i
].itype
);
979 TYPE_NAME (arm_simd_types
[i
].itype
) = tdecl
;
980 SET_TYPE_STRUCTURAL_EQUALITY (arm_simd_types
[i
].itype
);
983 #define AARCH_BUILD_SIGNED_TYPE(mode) \
984 make_signed_type (GET_MODE_PRECISION (mode));
985 arm_simd_intOI_type_node
= AARCH_BUILD_SIGNED_TYPE (OImode
);
986 arm_simd_intEI_type_node
= AARCH_BUILD_SIGNED_TYPE (EImode
);
987 arm_simd_intCI_type_node
= AARCH_BUILD_SIGNED_TYPE (CImode
);
988 arm_simd_intXI_type_node
= AARCH_BUILD_SIGNED_TYPE (XImode
);
989 #undef AARCH_BUILD_SIGNED_TYPE
991 tdecl
= add_builtin_type
992 ("__builtin_neon_ei" , arm_simd_intEI_type_node
);
993 TYPE_NAME (arm_simd_intEI_type_node
) = tdecl
;
994 tdecl
= add_builtin_type
995 ("__builtin_neon_oi" , arm_simd_intOI_type_node
);
996 TYPE_NAME (arm_simd_intOI_type_node
) = tdecl
;
997 tdecl
= add_builtin_type
998 ("__builtin_neon_ci" , arm_simd_intCI_type_node
);
999 TYPE_NAME (arm_simd_intCI_type_node
) = tdecl
;
1000 tdecl
= add_builtin_type
1001 ("__builtin_neon_xi" , arm_simd_intXI_type_node
);
1002 TYPE_NAME (arm_simd_intXI_type_node
) = tdecl
;
1006 arm_init_simd_builtin_scalar_types (void)
1008 /* Define typedefs for all the standard scalar types. */
1009 (*lang_hooks
.types
.register_builtin_type
) (intQI_type_node
,
1010 "__builtin_neon_qi");
1011 (*lang_hooks
.types
.register_builtin_type
) (intHI_type_node
,
1012 "__builtin_neon_hi");
1013 (*lang_hooks
.types
.register_builtin_type
) (intSI_type_node
,
1014 "__builtin_neon_si");
1015 (*lang_hooks
.types
.register_builtin_type
) (float_type_node
,
1016 "__builtin_neon_sf");
1017 (*lang_hooks
.types
.register_builtin_type
) (intDI_type_node
,
1018 "__builtin_neon_di");
1019 (*lang_hooks
.types
.register_builtin_type
) (double_type_node
,
1020 "__builtin_neon_df");
1021 (*lang_hooks
.types
.register_builtin_type
) (intTI_type_node
,
1022 "__builtin_neon_ti");
1024 /* Unsigned integer types for various mode sizes. */
1025 (*lang_hooks
.types
.register_builtin_type
) (unsigned_intQI_type_node
,
1026 "__builtin_neon_uqi");
1027 (*lang_hooks
.types
.register_builtin_type
) (unsigned_intHI_type_node
,
1028 "__builtin_neon_uhi");
1029 (*lang_hooks
.types
.register_builtin_type
) (unsigned_intSI_type_node
,
1030 "__builtin_neon_usi");
1031 (*lang_hooks
.types
.register_builtin_type
) (unsigned_intDI_type_node
,
1032 "__builtin_neon_udi");
1033 (*lang_hooks
.types
.register_builtin_type
) (unsigned_intTI_type_node
,
1034 "__builtin_neon_uti");
1037 /* Set up a builtin. It will use information stored in the argument struct D to
1038 derive the builtin's type signature and name. It will append the name in D
1039 to the PREFIX passed and use these to create a builtin declaration that is
1040 then stored in 'arm_builtin_decls' under index FCODE. This FCODE is also
1041 written back to D for future use. */
1044 arm_init_builtin (unsigned int fcode
, arm_builtin_datum
*d
,
1045 const char * prefix
)
1047 bool print_type_signature_p
= false;
1048 char type_signature
[SIMD_MAX_BUILTIN_ARGS
] = { 0 };
1055 /* We must track two variables here. op_num is
1056 the operand number as in the RTL pattern. This is
1057 required to access the mode (e.g. V4SF mode) of the
1058 argument, from which the base type can be derived.
1059 arg_num is an index in to the qualifiers data, which
1060 gives qualifiers to the type (e.g. const unsigned).
1061 The reason these two variables may differ by one is the
1062 void return type. While all return types take the 0th entry
1063 in the qualifiers array, there is no operand for them in the
1065 int op_num
= insn_data
[d
->code
].n_operands
- 1;
1066 int arg_num
= d
->qualifiers
[0] & qualifier_void
1069 tree return_type
= void_type_node
, args
= void_list_node
;
1072 /* Build a function type directly from the insn_data for this
1073 builtin. The build_function_type () function takes care of
1074 removing duplicates for us. */
1075 for (; op_num
>= 0; arg_num
--, op_num
--)
1077 machine_mode op_mode
= insn_data
[d
->code
].operand
[op_num
].mode
;
1078 enum arm_type_qualifiers qualifiers
= d
->qualifiers
[arg_num
];
1080 if (qualifiers
& qualifier_unsigned
)
1082 type_signature
[arg_num
] = 'u';
1083 print_type_signature_p
= true;
1085 else if (qualifiers
& qualifier_poly
)
1087 type_signature
[arg_num
] = 'p';
1088 print_type_signature_p
= true;
1091 type_signature
[arg_num
] = 's';
1093 /* Skip an internal operand for vget_{low, high}. */
1094 if (qualifiers
& qualifier_internal
)
1097 /* Some builtins have different user-facing types
1098 for certain arguments, encoded in d->mode. */
1099 if (qualifiers
& qualifier_map_mode
)
1102 /* For pointers, we want a pointer to the basic type
1104 if (qualifiers
& qualifier_pointer
&& VECTOR_MODE_P (op_mode
))
1105 op_mode
= GET_MODE_INNER (op_mode
);
1107 /* For void pointers we already have nodes constructed by the midend. */
1108 if (qualifiers
& qualifier_void_pointer
)
1109 eltype
= qualifiers
& qualifier_const
1110 ? const_ptr_type_node
: ptr_type_node
;
1114 = arm_simd_builtin_type (op_mode
,
1115 (qualifiers
& qualifier_unsigned
) != 0,
1116 (qualifiers
& qualifier_poly
) != 0);
1117 gcc_assert (eltype
!= NULL
);
1119 /* Add qualifiers. */
1120 if (qualifiers
& qualifier_const
)
1121 eltype
= build_qualified_type (eltype
, TYPE_QUAL_CONST
);
1123 if (qualifiers
& qualifier_pointer
)
1124 eltype
= build_pointer_type (eltype
);
1126 /* If we have reached arg_num == 0, we are at a non-void
1127 return type. Otherwise, we are still processing
1130 return_type
= eltype
;
1132 args
= tree_cons (NULL_TREE
, eltype
, args
);
1135 ftype
= build_function_type (return_type
, args
);
1137 gcc_assert (ftype
!= NULL
);
1139 if (print_type_signature_p
1140 && IN_RANGE (fcode
, ARM_BUILTIN_VFP_BASE
, ARM_BUILTIN_ACLE_BASE
- 1))
1141 snprintf (namebuf
, sizeof (namebuf
), "%s_%s_%s",
1142 prefix
, d
->name
, type_signature
);
1144 snprintf (namebuf
, sizeof (namebuf
), "%s_%s",
1147 fndecl
= add_builtin_function (namebuf
, ftype
, fcode
, BUILT_IN_MD
,
1149 arm_builtin_decls
[fcode
] = fndecl
;
1152 /* Set up ACLE builtins, even builtins for instructions that are not
1153 in the current target ISA to allow the user to compile particular modules
1154 with different target specific options that differ from the command line
1155 options. Such builtins will be rejected in arm_expand_builtin. */
1158 arm_init_acle_builtins (void)
1160 unsigned int i
, fcode
= ARM_BUILTIN_ACLE_PATTERN_START
;
1162 for (i
= 0; i
< ARRAY_SIZE (acle_builtin_data
); i
++, fcode
++)
1164 arm_builtin_datum
*d
= &acle_builtin_data
[i
];
1165 arm_init_builtin (fcode
, d
, "__builtin_arm");
1169 /* Set up all the NEON builtins, even builtins for instructions that are not
1170 in the current target ISA to allow the user to compile particular modules
1171 with different target specific options that differ from the command line
1172 options. Such builtins will be rejected in arm_expand_builtin. */
1175 arm_init_neon_builtins (void)
1177 unsigned int i
, fcode
= ARM_BUILTIN_NEON_PATTERN_START
;
1179 arm_init_simd_builtin_types ();
1181 /* Strong-typing hasn't been implemented for all AdvSIMD builtin intrinsics.
1182 Therefore we need to preserve the old __builtin scalar types. It can be
1183 removed once all the intrinsics become strongly typed using the qualifier
1185 arm_init_simd_builtin_scalar_types ();
1187 tree lane_check_fpr
= build_function_type_list (void_type_node
,
1191 arm_builtin_decls
[ARM_BUILTIN_NEON_LANE_CHECK
] =
1192 add_builtin_function ("__builtin_arm_lane_check", lane_check_fpr
,
1193 ARM_BUILTIN_NEON_LANE_CHECK
, BUILT_IN_MD
,
1196 for (i
= 0; i
< ARRAY_SIZE (neon_builtin_data
); i
++, fcode
++)
1198 arm_builtin_datum
*d
= &neon_builtin_data
[i
];
1199 arm_init_builtin (fcode
, d
, "__builtin_neon");
1203 /* Set up all the scalar floating point builtins. */
1206 arm_init_vfp_builtins (void)
1208 unsigned int i
, fcode
= ARM_BUILTIN_VFP_PATTERN_START
;
1210 for (i
= 0; i
< ARRAY_SIZE (vfp_builtin_data
); i
++, fcode
++)
1212 arm_builtin_datum
*d
= &vfp_builtin_data
[i
];
1213 arm_init_builtin (fcode
, d
, "__builtin_neon");
1218 arm_init_crypto_builtins (void)
1220 tree V16UQI_type_node
1221 = arm_simd_builtin_type (V16QImode
, true, false);
1223 tree V4USI_type_node
1224 = arm_simd_builtin_type (V4SImode
, true, false);
1226 tree v16uqi_ftype_v16uqi
1227 = build_function_type_list (V16UQI_type_node
, V16UQI_type_node
,
1230 tree v16uqi_ftype_v16uqi_v16uqi
1231 = build_function_type_list (V16UQI_type_node
, V16UQI_type_node
,
1232 V16UQI_type_node
, NULL_TREE
);
1234 tree v4usi_ftype_v4usi
1235 = build_function_type_list (V4USI_type_node
, V4USI_type_node
,
1238 tree v4usi_ftype_v4usi_v4usi
1239 = build_function_type_list (V4USI_type_node
, V4USI_type_node
,
1240 V4USI_type_node
, NULL_TREE
);
1242 tree v4usi_ftype_v4usi_v4usi_v4usi
1243 = build_function_type_list (V4USI_type_node
, V4USI_type_node
,
1244 V4USI_type_node
, V4USI_type_node
,
1247 tree uti_ftype_udi_udi
1248 = build_function_type_list (unsigned_intTI_type_node
,
1249 unsigned_intDI_type_node
,
1250 unsigned_intDI_type_node
,
1264 ARM_BUILTIN_CRYPTO_##U
1266 "__builtin_arm_crypto_"#L
1269 #define FT2(R, A1, A2) \
1270 R##_ftype_##A1##_##A2
1271 #define FT3(R, A1, A2, A3) \
1272 R##_ftype_##A1##_##A2##_##A3
1273 #define CRYPTO1(L, U, R, A) \
1274 arm_builtin_decls[C (U)] \
1275 = add_builtin_function (N (L), FT1 (R, A), \
1276 C (U), BUILT_IN_MD, NULL, NULL_TREE);
1277 #define CRYPTO2(L, U, R, A1, A2) \
1278 arm_builtin_decls[C (U)] \
1279 = add_builtin_function (N (L), FT2 (R, A1, A2), \
1280 C (U), BUILT_IN_MD, NULL, NULL_TREE);
1282 #define CRYPTO3(L, U, R, A1, A2, A3) \
1283 arm_builtin_decls[C (U)] \
1284 = add_builtin_function (N (L), FT3 (R, A1, A2, A3), \
1285 C (U), BUILT_IN_MD, NULL, NULL_TREE);
1286 #include "crypto.def"
1298 #undef NUM_DREG_TYPES
1299 #undef NUM_QREG_TYPES
1301 #define def_mbuiltin(FLAG, NAME, TYPE, CODE) \
1304 if (FLAG == isa_nobit \
1305 || bitmap_bit_p (arm_active_target.isa, FLAG)) \
1308 bdecl = add_builtin_function ((NAME), (TYPE), (CODE), \
1309 BUILT_IN_MD, NULL, NULL_TREE); \
1310 arm_builtin_decls[CODE] = bdecl; \
1315 struct builtin_description
1317 const enum isa_feature feature
;
1318 const enum insn_code icode
;
1319 const char * const name
;
1320 const enum arm_builtins code
;
1321 const enum rtx_code comparison
;
1322 const unsigned int flag
;
1325 static const struct builtin_description bdesc_2arg
[] =
1327 #define IWMMXT_BUILTIN(code, string, builtin) \
1328 { isa_bit_iwmmxt, CODE_FOR_##code, \
1329 "__builtin_arm_" string, \
1330 ARM_BUILTIN_##builtin, UNKNOWN, 0 },
1332 #define IWMMXT2_BUILTIN(code, string, builtin) \
1333 { isa_bit_iwmmxt2, CODE_FOR_##code, \
1334 "__builtin_arm_" string, \
1335 ARM_BUILTIN_##builtin, UNKNOWN, 0 },
1337 IWMMXT_BUILTIN (addv8qi3
, "waddb", WADDB
)
1338 IWMMXT_BUILTIN (addv4hi3
, "waddh", WADDH
)
1339 IWMMXT_BUILTIN (addv2si3
, "waddw", WADDW
)
1340 IWMMXT_BUILTIN (subv8qi3
, "wsubb", WSUBB
)
1341 IWMMXT_BUILTIN (subv4hi3
, "wsubh", WSUBH
)
1342 IWMMXT_BUILTIN (subv2si3
, "wsubw", WSUBW
)
1343 IWMMXT_BUILTIN (ssaddv8qi3
, "waddbss", WADDSSB
)
1344 IWMMXT_BUILTIN (ssaddv4hi3
, "waddhss", WADDSSH
)
1345 IWMMXT_BUILTIN (ssaddv2si3
, "waddwss", WADDSSW
)
1346 IWMMXT_BUILTIN (sssubv8qi3
, "wsubbss", WSUBSSB
)
1347 IWMMXT_BUILTIN (sssubv4hi3
, "wsubhss", WSUBSSH
)
1348 IWMMXT_BUILTIN (sssubv2si3
, "wsubwss", WSUBSSW
)
1349 IWMMXT_BUILTIN (usaddv8qi3
, "waddbus", WADDUSB
)
1350 IWMMXT_BUILTIN (usaddv4hi3
, "waddhus", WADDUSH
)
1351 IWMMXT_BUILTIN (usaddv2si3
, "waddwus", WADDUSW
)
1352 IWMMXT_BUILTIN (ussubv8qi3
, "wsubbus", WSUBUSB
)
1353 IWMMXT_BUILTIN (ussubv4hi3
, "wsubhus", WSUBUSH
)
1354 IWMMXT_BUILTIN (ussubv2si3
, "wsubwus", WSUBUSW
)
1355 IWMMXT_BUILTIN (mulv4hi3
, "wmulul", WMULUL
)
1356 IWMMXT_BUILTIN (smulv4hi3_highpart
, "wmulsm", WMULSM
)
1357 IWMMXT_BUILTIN (umulv4hi3_highpart
, "wmulum", WMULUM
)
1358 IWMMXT_BUILTIN (eqv8qi3
, "wcmpeqb", WCMPEQB
)
1359 IWMMXT_BUILTIN (eqv4hi3
, "wcmpeqh", WCMPEQH
)
1360 IWMMXT_BUILTIN (eqv2si3
, "wcmpeqw", WCMPEQW
)
1361 IWMMXT_BUILTIN (gtuv8qi3
, "wcmpgtub", WCMPGTUB
)
1362 IWMMXT_BUILTIN (gtuv4hi3
, "wcmpgtuh", WCMPGTUH
)
1363 IWMMXT_BUILTIN (gtuv2si3
, "wcmpgtuw", WCMPGTUW
)
1364 IWMMXT_BUILTIN (gtv8qi3
, "wcmpgtsb", WCMPGTSB
)
1365 IWMMXT_BUILTIN (gtv4hi3
, "wcmpgtsh", WCMPGTSH
)
1366 IWMMXT_BUILTIN (gtv2si3
, "wcmpgtsw", WCMPGTSW
)
1367 IWMMXT_BUILTIN (umaxv8qi3
, "wmaxub", WMAXUB
)
1368 IWMMXT_BUILTIN (smaxv8qi3
, "wmaxsb", WMAXSB
)
1369 IWMMXT_BUILTIN (umaxv4hi3
, "wmaxuh", WMAXUH
)
1370 IWMMXT_BUILTIN (smaxv4hi3
, "wmaxsh", WMAXSH
)
1371 IWMMXT_BUILTIN (umaxv2si3
, "wmaxuw", WMAXUW
)
1372 IWMMXT_BUILTIN (smaxv2si3
, "wmaxsw", WMAXSW
)
1373 IWMMXT_BUILTIN (uminv8qi3
, "wminub", WMINUB
)
1374 IWMMXT_BUILTIN (sminv8qi3
, "wminsb", WMINSB
)
1375 IWMMXT_BUILTIN (uminv4hi3
, "wminuh", WMINUH
)
1376 IWMMXT_BUILTIN (sminv4hi3
, "wminsh", WMINSH
)
1377 IWMMXT_BUILTIN (uminv2si3
, "wminuw", WMINUW
)
1378 IWMMXT_BUILTIN (sminv2si3
, "wminsw", WMINSW
)
1379 IWMMXT_BUILTIN (iwmmxt_anddi3
, "wand", WAND
)
1380 IWMMXT_BUILTIN (iwmmxt_nanddi3
, "wandn", WANDN
)
1381 IWMMXT_BUILTIN (iwmmxt_iordi3
, "wor", WOR
)
1382 IWMMXT_BUILTIN (iwmmxt_xordi3
, "wxor", WXOR
)
1383 IWMMXT_BUILTIN (iwmmxt_uavgv8qi3
, "wavg2b", WAVG2B
)
1384 IWMMXT_BUILTIN (iwmmxt_uavgv4hi3
, "wavg2h", WAVG2H
)
1385 IWMMXT_BUILTIN (iwmmxt_uavgrndv8qi3
, "wavg2br", WAVG2BR
)
1386 IWMMXT_BUILTIN (iwmmxt_uavgrndv4hi3
, "wavg2hr", WAVG2HR
)
1387 IWMMXT_BUILTIN (iwmmxt_wunpckilb
, "wunpckilb", WUNPCKILB
)
1388 IWMMXT_BUILTIN (iwmmxt_wunpckilh
, "wunpckilh", WUNPCKILH
)
1389 IWMMXT_BUILTIN (iwmmxt_wunpckilw
, "wunpckilw", WUNPCKILW
)
1390 IWMMXT_BUILTIN (iwmmxt_wunpckihb
, "wunpckihb", WUNPCKIHB
)
1391 IWMMXT_BUILTIN (iwmmxt_wunpckihh
, "wunpckihh", WUNPCKIHH
)
1392 IWMMXT_BUILTIN (iwmmxt_wunpckihw
, "wunpckihw", WUNPCKIHW
)
1393 IWMMXT2_BUILTIN (iwmmxt_waddsubhx
, "waddsubhx", WADDSUBHX
)
1394 IWMMXT2_BUILTIN (iwmmxt_wsubaddhx
, "wsubaddhx", WSUBADDHX
)
1395 IWMMXT2_BUILTIN (iwmmxt_wabsdiffb
, "wabsdiffb", WABSDIFFB
)
1396 IWMMXT2_BUILTIN (iwmmxt_wabsdiffh
, "wabsdiffh", WABSDIFFH
)
1397 IWMMXT2_BUILTIN (iwmmxt_wabsdiffw
, "wabsdiffw", WABSDIFFW
)
1398 IWMMXT2_BUILTIN (iwmmxt_avg4
, "wavg4", WAVG4
)
1399 IWMMXT2_BUILTIN (iwmmxt_avg4r
, "wavg4r", WAVG4R
)
1400 IWMMXT2_BUILTIN (iwmmxt_wmulwsm
, "wmulwsm", WMULWSM
)
1401 IWMMXT2_BUILTIN (iwmmxt_wmulwum
, "wmulwum", WMULWUM
)
1402 IWMMXT2_BUILTIN (iwmmxt_wmulwsmr
, "wmulwsmr", WMULWSMR
)
1403 IWMMXT2_BUILTIN (iwmmxt_wmulwumr
, "wmulwumr", WMULWUMR
)
1404 IWMMXT2_BUILTIN (iwmmxt_wmulwl
, "wmulwl", WMULWL
)
1405 IWMMXT2_BUILTIN (iwmmxt_wmulsmr
, "wmulsmr", WMULSMR
)
1406 IWMMXT2_BUILTIN (iwmmxt_wmulumr
, "wmulumr", WMULUMR
)
1407 IWMMXT2_BUILTIN (iwmmxt_wqmulm
, "wqmulm", WQMULM
)
1408 IWMMXT2_BUILTIN (iwmmxt_wqmulmr
, "wqmulmr", WQMULMR
)
1409 IWMMXT2_BUILTIN (iwmmxt_wqmulwm
, "wqmulwm", WQMULWM
)
1410 IWMMXT2_BUILTIN (iwmmxt_wqmulwmr
, "wqmulwmr", WQMULWMR
)
1411 IWMMXT_BUILTIN (iwmmxt_walignr0
, "walignr0", WALIGNR0
)
1412 IWMMXT_BUILTIN (iwmmxt_walignr1
, "walignr1", WALIGNR1
)
1413 IWMMXT_BUILTIN (iwmmxt_walignr2
, "walignr2", WALIGNR2
)
1414 IWMMXT_BUILTIN (iwmmxt_walignr3
, "walignr3", WALIGNR3
)
1416 #define IWMMXT_BUILTIN2(code, builtin) \
1417 { isa_bit_iwmmxt, CODE_FOR_##code, NULL, \
1418 ARM_BUILTIN_##builtin, UNKNOWN, 0 },
1420 #define IWMMXT2_BUILTIN2(code, builtin) \
1421 { isa_bit_iwmmxt2, CODE_FOR_##code, NULL, \
1422 ARM_BUILTIN_##builtin, UNKNOWN, 0 },
1424 IWMMXT2_BUILTIN2 (iwmmxt_waddbhusm
, WADDBHUSM
)
1425 IWMMXT2_BUILTIN2 (iwmmxt_waddbhusl
, WADDBHUSL
)
1426 IWMMXT_BUILTIN2 (iwmmxt_wpackhss
, WPACKHSS
)
1427 IWMMXT_BUILTIN2 (iwmmxt_wpackwss
, WPACKWSS
)
1428 IWMMXT_BUILTIN2 (iwmmxt_wpackdss
, WPACKDSS
)
1429 IWMMXT_BUILTIN2 (iwmmxt_wpackhus
, WPACKHUS
)
1430 IWMMXT_BUILTIN2 (iwmmxt_wpackwus
, WPACKWUS
)
1431 IWMMXT_BUILTIN2 (iwmmxt_wpackdus
, WPACKDUS
)
1432 IWMMXT_BUILTIN2 (iwmmxt_wmacuz
, WMACUZ
)
1433 IWMMXT_BUILTIN2 (iwmmxt_wmacsz
, WMACSZ
)
1436 #define FP_BUILTIN(L, U) \
1437 {isa_nobit, CODE_FOR_##L, "__builtin_arm_"#L, ARM_BUILTIN_##U, \
1440 FP_BUILTIN (get_fpscr
, GET_FPSCR
)
1441 FP_BUILTIN (set_fpscr
, SET_FPSCR
)
1444 #define CRYPTO_BUILTIN(L, U) \
1445 {isa_nobit, CODE_FOR_crypto_##L, "__builtin_arm_crypto_"#L, \
1446 ARM_BUILTIN_CRYPTO_##U, UNKNOWN, 0},
1450 #define CRYPTO2(L, U, R, A1, A2) CRYPTO_BUILTIN (L, U)
1451 #define CRYPTO1(L, U, R, A)
1452 #define CRYPTO3(L, U, R, A1, A2, A3)
1453 #include "crypto.def"
1460 static const struct builtin_description bdesc_1arg
[] =
1462 IWMMXT_BUILTIN (iwmmxt_tmovmskb
, "tmovmskb", TMOVMSKB
)
1463 IWMMXT_BUILTIN (iwmmxt_tmovmskh
, "tmovmskh", TMOVMSKH
)
1464 IWMMXT_BUILTIN (iwmmxt_tmovmskw
, "tmovmskw", TMOVMSKW
)
1465 IWMMXT_BUILTIN (iwmmxt_waccb
, "waccb", WACCB
)
1466 IWMMXT_BUILTIN (iwmmxt_wacch
, "wacch", WACCH
)
1467 IWMMXT_BUILTIN (iwmmxt_waccw
, "waccw", WACCW
)
1468 IWMMXT_BUILTIN (iwmmxt_wunpckehub
, "wunpckehub", WUNPCKEHUB
)
1469 IWMMXT_BUILTIN (iwmmxt_wunpckehuh
, "wunpckehuh", WUNPCKEHUH
)
1470 IWMMXT_BUILTIN (iwmmxt_wunpckehuw
, "wunpckehuw", WUNPCKEHUW
)
1471 IWMMXT_BUILTIN (iwmmxt_wunpckehsb
, "wunpckehsb", WUNPCKEHSB
)
1472 IWMMXT_BUILTIN (iwmmxt_wunpckehsh
, "wunpckehsh", WUNPCKEHSH
)
1473 IWMMXT_BUILTIN (iwmmxt_wunpckehsw
, "wunpckehsw", WUNPCKEHSW
)
1474 IWMMXT_BUILTIN (iwmmxt_wunpckelub
, "wunpckelub", WUNPCKELUB
)
1475 IWMMXT_BUILTIN (iwmmxt_wunpckeluh
, "wunpckeluh", WUNPCKELUH
)
1476 IWMMXT_BUILTIN (iwmmxt_wunpckeluw
, "wunpckeluw", WUNPCKELUW
)
1477 IWMMXT_BUILTIN (iwmmxt_wunpckelsb
, "wunpckelsb", WUNPCKELSB
)
1478 IWMMXT_BUILTIN (iwmmxt_wunpckelsh
, "wunpckelsh", WUNPCKELSH
)
1479 IWMMXT_BUILTIN (iwmmxt_wunpckelsw
, "wunpckelsw", WUNPCKELSW
)
1480 IWMMXT2_BUILTIN (iwmmxt_wabsv8qi3
, "wabsb", WABSB
)
1481 IWMMXT2_BUILTIN (iwmmxt_wabsv4hi3
, "wabsh", WABSH
)
1482 IWMMXT2_BUILTIN (iwmmxt_wabsv2si3
, "wabsw", WABSW
)
1483 IWMMXT_BUILTIN (tbcstv8qi
, "tbcstb", TBCSTB
)
1484 IWMMXT_BUILTIN (tbcstv4hi
, "tbcsth", TBCSTH
)
1485 IWMMXT_BUILTIN (tbcstv2si
, "tbcstw", TBCSTW
)
1487 #define CRYPTO1(L, U, R, A) CRYPTO_BUILTIN (L, U)
1488 #define CRYPTO2(L, U, R, A1, A2)
1489 #define CRYPTO3(L, U, R, A1, A2, A3)
1490 #include "crypto.def"
1496 static const struct builtin_description bdesc_3arg
[] =
1498 #define CRYPTO3(L, U, R, A1, A2, A3) CRYPTO_BUILTIN (L, U)
1499 #define CRYPTO1(L, U, R, A)
1500 #define CRYPTO2(L, U, R, A1, A2)
1501 #include "crypto.def"
1506 #undef CRYPTO_BUILTIN
1508 /* Set up all the iWMMXt builtins. This is not called if
1509 TARGET_IWMMXT is zero. */
1512 arm_init_iwmmxt_builtins (void)
1514 const struct builtin_description
* d
;
1517 tree V2SI_type_node
= build_vector_type_for_mode (intSI_type_node
, V2SImode
);
1518 tree V4HI_type_node
= build_vector_type_for_mode (intHI_type_node
, V4HImode
);
1519 tree V8QI_type_node
= build_vector_type_for_mode (intQI_type_node
, V8QImode
);
1521 tree v8qi_ftype_v8qi_v8qi_int
1522 = build_function_type_list (V8QI_type_node
,
1523 V8QI_type_node
, V8QI_type_node
,
1524 integer_type_node
, NULL_TREE
);
1525 tree v4hi_ftype_v4hi_int
1526 = build_function_type_list (V4HI_type_node
,
1527 V4HI_type_node
, integer_type_node
, NULL_TREE
);
1528 tree v2si_ftype_v2si_int
1529 = build_function_type_list (V2SI_type_node
,
1530 V2SI_type_node
, integer_type_node
, NULL_TREE
);
1531 tree v2si_ftype_di_di
1532 = build_function_type_list (V2SI_type_node
,
1533 long_long_integer_type_node
,
1534 long_long_integer_type_node
,
1536 tree di_ftype_di_int
1537 = build_function_type_list (long_long_integer_type_node
,
1538 long_long_integer_type_node
,
1539 integer_type_node
, NULL_TREE
);
1540 tree di_ftype_di_int_int
1541 = build_function_type_list (long_long_integer_type_node
,
1542 long_long_integer_type_node
,
1544 integer_type_node
, NULL_TREE
);
1546 = build_function_type_list (integer_type_node
,
1547 V8QI_type_node
, NULL_TREE
);
1549 = build_function_type_list (integer_type_node
,
1550 V4HI_type_node
, NULL_TREE
);
1552 = build_function_type_list (integer_type_node
,
1553 V2SI_type_node
, NULL_TREE
);
1554 tree int_ftype_v8qi_int
1555 = build_function_type_list (integer_type_node
,
1556 V8QI_type_node
, integer_type_node
, NULL_TREE
);
1557 tree int_ftype_v4hi_int
1558 = build_function_type_list (integer_type_node
,
1559 V4HI_type_node
, integer_type_node
, NULL_TREE
);
1560 tree int_ftype_v2si_int
1561 = build_function_type_list (integer_type_node
,
1562 V2SI_type_node
, integer_type_node
, NULL_TREE
);
1563 tree v8qi_ftype_v8qi_int_int
1564 = build_function_type_list (V8QI_type_node
,
1565 V8QI_type_node
, integer_type_node
,
1566 integer_type_node
, NULL_TREE
);
1567 tree v4hi_ftype_v4hi_int_int
1568 = build_function_type_list (V4HI_type_node
,
1569 V4HI_type_node
, integer_type_node
,
1570 integer_type_node
, NULL_TREE
);
1571 tree v2si_ftype_v2si_int_int
1572 = build_function_type_list (V2SI_type_node
,
1573 V2SI_type_node
, integer_type_node
,
1574 integer_type_node
, NULL_TREE
);
1575 /* Miscellaneous. */
1576 tree v8qi_ftype_v4hi_v4hi
1577 = build_function_type_list (V8QI_type_node
,
1578 V4HI_type_node
, V4HI_type_node
, NULL_TREE
);
1579 tree v4hi_ftype_v2si_v2si
1580 = build_function_type_list (V4HI_type_node
,
1581 V2SI_type_node
, V2SI_type_node
, NULL_TREE
);
1582 tree v8qi_ftype_v4hi_v8qi
1583 = build_function_type_list (V8QI_type_node
,
1584 V4HI_type_node
, V8QI_type_node
, NULL_TREE
);
1585 tree v2si_ftype_v4hi_v4hi
1586 = build_function_type_list (V2SI_type_node
,
1587 V4HI_type_node
, V4HI_type_node
, NULL_TREE
);
1588 tree v2si_ftype_v8qi_v8qi
1589 = build_function_type_list (V2SI_type_node
,
1590 V8QI_type_node
, V8QI_type_node
, NULL_TREE
);
1591 tree v4hi_ftype_v4hi_di
1592 = build_function_type_list (V4HI_type_node
,
1593 V4HI_type_node
, long_long_integer_type_node
,
1595 tree v2si_ftype_v2si_di
1596 = build_function_type_list (V2SI_type_node
,
1597 V2SI_type_node
, long_long_integer_type_node
,
1600 = build_function_type_list (long_long_unsigned_type_node
, NULL_TREE
);
1602 = build_function_type_list (integer_type_node
, NULL_TREE
);
1604 = build_function_type_list (long_long_integer_type_node
,
1605 V8QI_type_node
, NULL_TREE
);
1607 = build_function_type_list (long_long_integer_type_node
,
1608 V4HI_type_node
, NULL_TREE
);
1610 = build_function_type_list (long_long_integer_type_node
,
1611 V2SI_type_node
, NULL_TREE
);
1612 tree v2si_ftype_v4hi
1613 = build_function_type_list (V2SI_type_node
,
1614 V4HI_type_node
, NULL_TREE
);
1615 tree v4hi_ftype_v8qi
1616 = build_function_type_list (V4HI_type_node
,
1617 V8QI_type_node
, NULL_TREE
);
1618 tree v8qi_ftype_v8qi
1619 = build_function_type_list (V8QI_type_node
,
1620 V8QI_type_node
, NULL_TREE
);
1621 tree v4hi_ftype_v4hi
1622 = build_function_type_list (V4HI_type_node
,
1623 V4HI_type_node
, NULL_TREE
);
1624 tree v2si_ftype_v2si
1625 = build_function_type_list (V2SI_type_node
,
1626 V2SI_type_node
, NULL_TREE
);
1628 tree di_ftype_di_v4hi_v4hi
1629 = build_function_type_list (long_long_unsigned_type_node
,
1630 long_long_unsigned_type_node
,
1631 V4HI_type_node
, V4HI_type_node
,
1634 tree di_ftype_v4hi_v4hi
1635 = build_function_type_list (long_long_unsigned_type_node
,
1636 V4HI_type_node
,V4HI_type_node
,
1639 tree v2si_ftype_v2si_v4hi_v4hi
1640 = build_function_type_list (V2SI_type_node
,
1641 V2SI_type_node
, V4HI_type_node
,
1642 V4HI_type_node
, NULL_TREE
);
1644 tree v2si_ftype_v2si_v8qi_v8qi
1645 = build_function_type_list (V2SI_type_node
,
1646 V2SI_type_node
, V8QI_type_node
,
1647 V8QI_type_node
, NULL_TREE
);
1649 tree di_ftype_di_v2si_v2si
1650 = build_function_type_list (long_long_unsigned_type_node
,
1651 long_long_unsigned_type_node
,
1652 V2SI_type_node
, V2SI_type_node
,
1655 tree di_ftype_di_di_int
1656 = build_function_type_list (long_long_unsigned_type_node
,
1657 long_long_unsigned_type_node
,
1658 long_long_unsigned_type_node
,
1659 integer_type_node
, NULL_TREE
);
1662 = build_function_type_list (void_type_node
,
1663 integer_type_node
, NULL_TREE
);
1665 tree v8qi_ftype_char
1666 = build_function_type_list (V8QI_type_node
,
1667 signed_char_type_node
, NULL_TREE
);
1669 tree v4hi_ftype_short
1670 = build_function_type_list (V4HI_type_node
,
1671 short_integer_type_node
, NULL_TREE
);
1674 = build_function_type_list (V2SI_type_node
,
1675 integer_type_node
, NULL_TREE
);
1677 /* Normal vector binops. */
1678 tree v8qi_ftype_v8qi_v8qi
1679 = build_function_type_list (V8QI_type_node
,
1680 V8QI_type_node
, V8QI_type_node
, NULL_TREE
);
1681 tree v4hi_ftype_v4hi_v4hi
1682 = build_function_type_list (V4HI_type_node
,
1683 V4HI_type_node
,V4HI_type_node
, NULL_TREE
);
1684 tree v2si_ftype_v2si_v2si
1685 = build_function_type_list (V2SI_type_node
,
1686 V2SI_type_node
, V2SI_type_node
, NULL_TREE
);
1688 = build_function_type_list (long_long_unsigned_type_node
,
1689 long_long_unsigned_type_node
,
1690 long_long_unsigned_type_node
,
1693 /* Add all builtins that are more or less simple operations on two
1695 for (i
= 0, d
= bdesc_2arg
; i
< ARRAY_SIZE (bdesc_2arg
); i
++, d
++)
1697 /* Use one of the operands; the target can have a different mode for
1698 mask-generating compares. */
1703 || !(d
->feature
== isa_bit_iwmmxt
1704 || d
->feature
== isa_bit_iwmmxt2
))
1707 mode
= insn_data
[d
->icode
].operand
[1].mode
;
1712 type
= v8qi_ftype_v8qi_v8qi
;
1715 type
= v4hi_ftype_v4hi_v4hi
;
1718 type
= v2si_ftype_v2si_v2si
;
1721 type
= di_ftype_di_di
;
1728 def_mbuiltin (d
->feature
, d
->name
, type
, d
->code
);
1731 /* Add the remaining MMX insns with somewhat more complicated types. */
1732 #define iwmmx_mbuiltin(NAME, TYPE, CODE) \
1733 def_mbuiltin (isa_bit_iwmmxt, "__builtin_arm_" NAME, \
1734 (TYPE), ARM_BUILTIN_ ## CODE)
1736 #define iwmmx2_mbuiltin(NAME, TYPE, CODE) \
1737 def_mbuiltin (isa_bit_iwmmxt2, "__builtin_arm_" NAME, \
1738 (TYPE), ARM_BUILTIN_ ## CODE)
1740 iwmmx_mbuiltin ("wzero", di_ftype_void
, WZERO
);
1741 iwmmx_mbuiltin ("setwcgr0", void_ftype_int
, SETWCGR0
);
1742 iwmmx_mbuiltin ("setwcgr1", void_ftype_int
, SETWCGR1
);
1743 iwmmx_mbuiltin ("setwcgr2", void_ftype_int
, SETWCGR2
);
1744 iwmmx_mbuiltin ("setwcgr3", void_ftype_int
, SETWCGR3
);
1745 iwmmx_mbuiltin ("getwcgr0", int_ftype_void
, GETWCGR0
);
1746 iwmmx_mbuiltin ("getwcgr1", int_ftype_void
, GETWCGR1
);
1747 iwmmx_mbuiltin ("getwcgr2", int_ftype_void
, GETWCGR2
);
1748 iwmmx_mbuiltin ("getwcgr3", int_ftype_void
, GETWCGR3
);
1750 iwmmx_mbuiltin ("wsllh", v4hi_ftype_v4hi_di
, WSLLH
);
1751 iwmmx_mbuiltin ("wsllw", v2si_ftype_v2si_di
, WSLLW
);
1752 iwmmx_mbuiltin ("wslld", di_ftype_di_di
, WSLLD
);
1753 iwmmx_mbuiltin ("wsllhi", v4hi_ftype_v4hi_int
, WSLLHI
);
1754 iwmmx_mbuiltin ("wsllwi", v2si_ftype_v2si_int
, WSLLWI
);
1755 iwmmx_mbuiltin ("wslldi", di_ftype_di_int
, WSLLDI
);
1757 iwmmx_mbuiltin ("wsrlh", v4hi_ftype_v4hi_di
, WSRLH
);
1758 iwmmx_mbuiltin ("wsrlw", v2si_ftype_v2si_di
, WSRLW
);
1759 iwmmx_mbuiltin ("wsrld", di_ftype_di_di
, WSRLD
);
1760 iwmmx_mbuiltin ("wsrlhi", v4hi_ftype_v4hi_int
, WSRLHI
);
1761 iwmmx_mbuiltin ("wsrlwi", v2si_ftype_v2si_int
, WSRLWI
);
1762 iwmmx_mbuiltin ("wsrldi", di_ftype_di_int
, WSRLDI
);
1764 iwmmx_mbuiltin ("wsrah", v4hi_ftype_v4hi_di
, WSRAH
);
1765 iwmmx_mbuiltin ("wsraw", v2si_ftype_v2si_di
, WSRAW
);
1766 iwmmx_mbuiltin ("wsrad", di_ftype_di_di
, WSRAD
);
1767 iwmmx_mbuiltin ("wsrahi", v4hi_ftype_v4hi_int
, WSRAHI
);
1768 iwmmx_mbuiltin ("wsrawi", v2si_ftype_v2si_int
, WSRAWI
);
1769 iwmmx_mbuiltin ("wsradi", di_ftype_di_int
, WSRADI
);
1771 iwmmx_mbuiltin ("wrorh", v4hi_ftype_v4hi_di
, WRORH
);
1772 iwmmx_mbuiltin ("wrorw", v2si_ftype_v2si_di
, WRORW
);
1773 iwmmx_mbuiltin ("wrord", di_ftype_di_di
, WRORD
);
1774 iwmmx_mbuiltin ("wrorhi", v4hi_ftype_v4hi_int
, WRORHI
);
1775 iwmmx_mbuiltin ("wrorwi", v2si_ftype_v2si_int
, WRORWI
);
1776 iwmmx_mbuiltin ("wrordi", di_ftype_di_int
, WRORDI
);
1778 iwmmx_mbuiltin ("wshufh", v4hi_ftype_v4hi_int
, WSHUFH
);
1780 iwmmx_mbuiltin ("wsadb", v2si_ftype_v2si_v8qi_v8qi
, WSADB
);
1781 iwmmx_mbuiltin ("wsadh", v2si_ftype_v2si_v4hi_v4hi
, WSADH
);
1782 iwmmx_mbuiltin ("wmadds", v2si_ftype_v4hi_v4hi
, WMADDS
);
1783 iwmmx2_mbuiltin ("wmaddsx", v2si_ftype_v4hi_v4hi
, WMADDSX
);
1784 iwmmx2_mbuiltin ("wmaddsn", v2si_ftype_v4hi_v4hi
, WMADDSN
);
1785 iwmmx_mbuiltin ("wmaddu", v2si_ftype_v4hi_v4hi
, WMADDU
);
1786 iwmmx2_mbuiltin ("wmaddux", v2si_ftype_v4hi_v4hi
, WMADDUX
);
1787 iwmmx2_mbuiltin ("wmaddun", v2si_ftype_v4hi_v4hi
, WMADDUN
);
1788 iwmmx_mbuiltin ("wsadbz", v2si_ftype_v8qi_v8qi
, WSADBZ
);
1789 iwmmx_mbuiltin ("wsadhz", v2si_ftype_v4hi_v4hi
, WSADHZ
);
1791 iwmmx_mbuiltin ("textrmsb", int_ftype_v8qi_int
, TEXTRMSB
);
1792 iwmmx_mbuiltin ("textrmsh", int_ftype_v4hi_int
, TEXTRMSH
);
1793 iwmmx_mbuiltin ("textrmsw", int_ftype_v2si_int
, TEXTRMSW
);
1794 iwmmx_mbuiltin ("textrmub", int_ftype_v8qi_int
, TEXTRMUB
);
1795 iwmmx_mbuiltin ("textrmuh", int_ftype_v4hi_int
, TEXTRMUH
);
1796 iwmmx_mbuiltin ("textrmuw", int_ftype_v2si_int
, TEXTRMUW
);
1797 iwmmx_mbuiltin ("tinsrb", v8qi_ftype_v8qi_int_int
, TINSRB
);
1798 iwmmx_mbuiltin ("tinsrh", v4hi_ftype_v4hi_int_int
, TINSRH
);
1799 iwmmx_mbuiltin ("tinsrw", v2si_ftype_v2si_int_int
, TINSRW
);
1801 iwmmx_mbuiltin ("waccb", di_ftype_v8qi
, WACCB
);
1802 iwmmx_mbuiltin ("wacch", di_ftype_v4hi
, WACCH
);
1803 iwmmx_mbuiltin ("waccw", di_ftype_v2si
, WACCW
);
1805 iwmmx_mbuiltin ("tmovmskb", int_ftype_v8qi
, TMOVMSKB
);
1806 iwmmx_mbuiltin ("tmovmskh", int_ftype_v4hi
, TMOVMSKH
);
1807 iwmmx_mbuiltin ("tmovmskw", int_ftype_v2si
, TMOVMSKW
);
1809 iwmmx2_mbuiltin ("waddbhusm", v8qi_ftype_v4hi_v8qi
, WADDBHUSM
);
1810 iwmmx2_mbuiltin ("waddbhusl", v8qi_ftype_v4hi_v8qi
, WADDBHUSL
);
1812 iwmmx_mbuiltin ("wpackhss", v8qi_ftype_v4hi_v4hi
, WPACKHSS
);
1813 iwmmx_mbuiltin ("wpackhus", v8qi_ftype_v4hi_v4hi
, WPACKHUS
);
1814 iwmmx_mbuiltin ("wpackwus", v4hi_ftype_v2si_v2si
, WPACKWUS
);
1815 iwmmx_mbuiltin ("wpackwss", v4hi_ftype_v2si_v2si
, WPACKWSS
);
1816 iwmmx_mbuiltin ("wpackdus", v2si_ftype_di_di
, WPACKDUS
);
1817 iwmmx_mbuiltin ("wpackdss", v2si_ftype_di_di
, WPACKDSS
);
1819 iwmmx_mbuiltin ("wunpckehub", v4hi_ftype_v8qi
, WUNPCKEHUB
);
1820 iwmmx_mbuiltin ("wunpckehuh", v2si_ftype_v4hi
, WUNPCKEHUH
);
1821 iwmmx_mbuiltin ("wunpckehuw", di_ftype_v2si
, WUNPCKEHUW
);
1822 iwmmx_mbuiltin ("wunpckehsb", v4hi_ftype_v8qi
, WUNPCKEHSB
);
1823 iwmmx_mbuiltin ("wunpckehsh", v2si_ftype_v4hi
, WUNPCKEHSH
);
1824 iwmmx_mbuiltin ("wunpckehsw", di_ftype_v2si
, WUNPCKEHSW
);
1825 iwmmx_mbuiltin ("wunpckelub", v4hi_ftype_v8qi
, WUNPCKELUB
);
1826 iwmmx_mbuiltin ("wunpckeluh", v2si_ftype_v4hi
, WUNPCKELUH
);
1827 iwmmx_mbuiltin ("wunpckeluw", di_ftype_v2si
, WUNPCKELUW
);
1828 iwmmx_mbuiltin ("wunpckelsb", v4hi_ftype_v8qi
, WUNPCKELSB
);
1829 iwmmx_mbuiltin ("wunpckelsh", v2si_ftype_v4hi
, WUNPCKELSH
);
1830 iwmmx_mbuiltin ("wunpckelsw", di_ftype_v2si
, WUNPCKELSW
);
1832 iwmmx_mbuiltin ("wmacs", di_ftype_di_v4hi_v4hi
, WMACS
);
1833 iwmmx_mbuiltin ("wmacsz", di_ftype_v4hi_v4hi
, WMACSZ
);
1834 iwmmx_mbuiltin ("wmacu", di_ftype_di_v4hi_v4hi
, WMACU
);
1835 iwmmx_mbuiltin ("wmacuz", di_ftype_v4hi_v4hi
, WMACUZ
);
1837 iwmmx_mbuiltin ("walign", v8qi_ftype_v8qi_v8qi_int
, WALIGNI
);
1838 iwmmx_mbuiltin ("tmia", di_ftype_di_int_int
, TMIA
);
1839 iwmmx_mbuiltin ("tmiaph", di_ftype_di_int_int
, TMIAPH
);
1840 iwmmx_mbuiltin ("tmiabb", di_ftype_di_int_int
, TMIABB
);
1841 iwmmx_mbuiltin ("tmiabt", di_ftype_di_int_int
, TMIABT
);
1842 iwmmx_mbuiltin ("tmiatb", di_ftype_di_int_int
, TMIATB
);
1843 iwmmx_mbuiltin ("tmiatt", di_ftype_di_int_int
, TMIATT
);
1845 iwmmx2_mbuiltin ("wabsb", v8qi_ftype_v8qi
, WABSB
);
1846 iwmmx2_mbuiltin ("wabsh", v4hi_ftype_v4hi
, WABSH
);
1847 iwmmx2_mbuiltin ("wabsw", v2si_ftype_v2si
, WABSW
);
1849 iwmmx2_mbuiltin ("wqmiabb", v2si_ftype_v2si_v4hi_v4hi
, WQMIABB
);
1850 iwmmx2_mbuiltin ("wqmiabt", v2si_ftype_v2si_v4hi_v4hi
, WQMIABT
);
1851 iwmmx2_mbuiltin ("wqmiatb", v2si_ftype_v2si_v4hi_v4hi
, WQMIATB
);
1852 iwmmx2_mbuiltin ("wqmiatt", v2si_ftype_v2si_v4hi_v4hi
, WQMIATT
);
1854 iwmmx2_mbuiltin ("wqmiabbn", v2si_ftype_v2si_v4hi_v4hi
, WQMIABBN
);
1855 iwmmx2_mbuiltin ("wqmiabtn", v2si_ftype_v2si_v4hi_v4hi
, WQMIABTN
);
1856 iwmmx2_mbuiltin ("wqmiatbn", v2si_ftype_v2si_v4hi_v4hi
, WQMIATBN
);
1857 iwmmx2_mbuiltin ("wqmiattn", v2si_ftype_v2si_v4hi_v4hi
, WQMIATTN
);
1859 iwmmx2_mbuiltin ("wmiabb", di_ftype_di_v4hi_v4hi
, WMIABB
);
1860 iwmmx2_mbuiltin ("wmiabt", di_ftype_di_v4hi_v4hi
, WMIABT
);
1861 iwmmx2_mbuiltin ("wmiatb", di_ftype_di_v4hi_v4hi
, WMIATB
);
1862 iwmmx2_mbuiltin ("wmiatt", di_ftype_di_v4hi_v4hi
, WMIATT
);
1864 iwmmx2_mbuiltin ("wmiabbn", di_ftype_di_v4hi_v4hi
, WMIABBN
);
1865 iwmmx2_mbuiltin ("wmiabtn", di_ftype_di_v4hi_v4hi
, WMIABTN
);
1866 iwmmx2_mbuiltin ("wmiatbn", di_ftype_di_v4hi_v4hi
, WMIATBN
);
1867 iwmmx2_mbuiltin ("wmiattn", di_ftype_di_v4hi_v4hi
, WMIATTN
);
1869 iwmmx2_mbuiltin ("wmiawbb", di_ftype_di_v2si_v2si
, WMIAWBB
);
1870 iwmmx2_mbuiltin ("wmiawbt", di_ftype_di_v2si_v2si
, WMIAWBT
);
1871 iwmmx2_mbuiltin ("wmiawtb", di_ftype_di_v2si_v2si
, WMIAWTB
);
1872 iwmmx2_mbuiltin ("wmiawtt", di_ftype_di_v2si_v2si
, WMIAWTT
);
1874 iwmmx2_mbuiltin ("wmiawbbn", di_ftype_di_v2si_v2si
, WMIAWBBN
);
1875 iwmmx2_mbuiltin ("wmiawbtn", di_ftype_di_v2si_v2si
, WMIAWBTN
);
1876 iwmmx2_mbuiltin ("wmiawtbn", di_ftype_di_v2si_v2si
, WMIAWTBN
);
1877 iwmmx2_mbuiltin ("wmiawttn", di_ftype_di_v2si_v2si
, WMIAWTTN
);
1879 iwmmx2_mbuiltin ("wmerge", di_ftype_di_di_int
, WMERGE
);
1881 iwmmx_mbuiltin ("tbcstb", v8qi_ftype_char
, TBCSTB
);
1882 iwmmx_mbuiltin ("tbcsth", v4hi_ftype_short
, TBCSTH
);
1883 iwmmx_mbuiltin ("tbcstw", v2si_ftype_int
, TBCSTW
);
1885 #undef iwmmx_mbuiltin
1886 #undef iwmmx2_mbuiltin
1890 arm_init_fp16_builtins (void)
1892 arm_fp16_type_node
= make_node (REAL_TYPE
);
1893 TYPE_PRECISION (arm_fp16_type_node
) = GET_MODE_PRECISION (HFmode
);
1894 layout_type (arm_fp16_type_node
);
1895 if (arm_fp16_format
)
1896 (*lang_hooks
.types
.register_builtin_type
) (arm_fp16_type_node
,
1901 arm_init_builtins (void)
1903 if (TARGET_REALLY_IWMMXT
)
1904 arm_init_iwmmxt_builtins ();
1906 /* This creates the arm_simd_floatHF_type_node so must come before
1907 arm_init_neon_builtins which uses it. */
1908 arm_init_fp16_builtins ();
1910 if (TARGET_MAYBE_HARD_FLOAT
)
1912 arm_init_neon_builtins ();
1913 arm_init_vfp_builtins ();
1914 arm_init_crypto_builtins ();
1917 arm_init_acle_builtins ();
1919 if (TARGET_MAYBE_HARD_FLOAT
)
1921 tree ftype_set_fpscr
1922 = build_function_type_list (void_type_node
, unsigned_type_node
, NULL
);
1923 tree ftype_get_fpscr
1924 = build_function_type_list (unsigned_type_node
, NULL
);
1926 arm_builtin_decls
[ARM_BUILTIN_GET_FPSCR
]
1927 = add_builtin_function ("__builtin_arm_get_fpscr", ftype_get_fpscr
,
1928 ARM_BUILTIN_GET_FPSCR
, BUILT_IN_MD
, NULL
, NULL_TREE
);
1929 arm_builtin_decls
[ARM_BUILTIN_SET_FPSCR
]
1930 = add_builtin_function ("__builtin_arm_set_fpscr", ftype_set_fpscr
,
1931 ARM_BUILTIN_SET_FPSCR
, BUILT_IN_MD
, NULL
, NULL_TREE
);
1936 tree ftype_cmse_nonsecure_caller
1937 = build_function_type_list (unsigned_type_node
, NULL
);
1938 arm_builtin_decls
[ARM_BUILTIN_CMSE_NONSECURE_CALLER
]
1939 = add_builtin_function ("__builtin_arm_cmse_nonsecure_caller",
1940 ftype_cmse_nonsecure_caller
,
1941 ARM_BUILTIN_CMSE_NONSECURE_CALLER
, BUILT_IN_MD
,
1946 /* Return the ARM builtin for CODE. */
1949 arm_builtin_decl (unsigned code
, bool initialize_p ATTRIBUTE_UNUSED
)
1951 if (code
>= ARM_BUILTIN_MAX
)
1952 return error_mark_node
;
1954 return arm_builtin_decls
[code
];
1957 /* Errors in the source file can cause expand_expr to return const0_rtx
1958 where we expect a vector. To avoid crashing, use one of the vector
1959 clear instructions. */
1962 safe_vector_operand (rtx x
, machine_mode mode
)
1964 if (x
!= const0_rtx
)
1966 x
= gen_reg_rtx (mode
);
1968 emit_insn (gen_iwmmxt_clrdi (mode
== DImode
? x
1969 : gen_rtx_SUBREG (DImode
, x
, 0)));
1973 /* Function to expand ternary builtins. */
1975 arm_expand_ternop_builtin (enum insn_code icode
,
1976 tree exp
, rtx target
)
1979 tree arg0
= CALL_EXPR_ARG (exp
, 0);
1980 tree arg1
= CALL_EXPR_ARG (exp
, 1);
1981 tree arg2
= CALL_EXPR_ARG (exp
, 2);
1983 rtx op0
= expand_normal (arg0
);
1984 rtx op1
= expand_normal (arg1
);
1985 rtx op2
= expand_normal (arg2
);
1988 /* The sha1c, sha1p, sha1m crypto builtins require a different vec_select
1989 lane operand depending on endianness. */
1990 bool builtin_sha1cpm_p
= false;
1992 if (insn_data
[icode
].n_operands
== 5)
1994 gcc_assert (icode
== CODE_FOR_crypto_sha1c
1995 || icode
== CODE_FOR_crypto_sha1p
1996 || icode
== CODE_FOR_crypto_sha1m
);
1997 builtin_sha1cpm_p
= true;
1999 machine_mode tmode
= insn_data
[icode
].operand
[0].mode
;
2000 machine_mode mode0
= insn_data
[icode
].operand
[1].mode
;
2001 machine_mode mode1
= insn_data
[icode
].operand
[2].mode
;
2002 machine_mode mode2
= insn_data
[icode
].operand
[3].mode
;
2005 if (VECTOR_MODE_P (mode0
))
2006 op0
= safe_vector_operand (op0
, mode0
);
2007 if (VECTOR_MODE_P (mode1
))
2008 op1
= safe_vector_operand (op1
, mode1
);
2009 if (VECTOR_MODE_P (mode2
))
2010 op2
= safe_vector_operand (op2
, mode2
);
2013 || GET_MODE (target
) != tmode
2014 || ! (*insn_data
[icode
].operand
[0].predicate
) (target
, tmode
))
2015 target
= gen_reg_rtx (tmode
);
2017 gcc_assert ((GET_MODE (op0
) == mode0
|| GET_MODE (op0
) == VOIDmode
)
2018 && (GET_MODE (op1
) == mode1
|| GET_MODE (op1
) == VOIDmode
)
2019 && (GET_MODE (op2
) == mode2
|| GET_MODE (op2
) == VOIDmode
));
2021 if (! (*insn_data
[icode
].operand
[1].predicate
) (op0
, mode0
))
2022 op0
= copy_to_mode_reg (mode0
, op0
);
2023 if (! (*insn_data
[icode
].operand
[2].predicate
) (op1
, mode1
))
2024 op1
= copy_to_mode_reg (mode1
, op1
);
2025 if (! (*insn_data
[icode
].operand
[3].predicate
) (op2
, mode2
))
2026 op2
= copy_to_mode_reg (mode2
, op2
);
2027 if (builtin_sha1cpm_p
)
2028 op3
= GEN_INT (TARGET_BIG_END
? 1 : 0);
2030 if (builtin_sha1cpm_p
)
2031 pat
= GEN_FCN (icode
) (target
, op0
, op1
, op2
, op3
);
2033 pat
= GEN_FCN (icode
) (target
, op0
, op1
, op2
);
2040 /* Subroutine of arm_expand_builtin to take care of binop insns. */
2043 arm_expand_binop_builtin (enum insn_code icode
,
2044 tree exp
, rtx target
)
2047 tree arg0
= CALL_EXPR_ARG (exp
, 0);
2048 tree arg1
= CALL_EXPR_ARG (exp
, 1);
2049 rtx op0
= expand_normal (arg0
);
2050 rtx op1
= expand_normal (arg1
);
2051 machine_mode tmode
= insn_data
[icode
].operand
[0].mode
;
2052 machine_mode mode0
= insn_data
[icode
].operand
[1].mode
;
2053 machine_mode mode1
= insn_data
[icode
].operand
[2].mode
;
2055 if (VECTOR_MODE_P (mode0
))
2056 op0
= safe_vector_operand (op0
, mode0
);
2057 if (VECTOR_MODE_P (mode1
))
2058 op1
= safe_vector_operand (op1
, mode1
);
2061 || GET_MODE (target
) != tmode
2062 || ! (*insn_data
[icode
].operand
[0].predicate
) (target
, tmode
))
2063 target
= gen_reg_rtx (tmode
);
2065 gcc_assert ((GET_MODE (op0
) == mode0
|| GET_MODE (op0
) == VOIDmode
)
2066 && (GET_MODE (op1
) == mode1
|| GET_MODE (op1
) == VOIDmode
));
2068 if (! (*insn_data
[icode
].operand
[1].predicate
) (op0
, mode0
))
2069 op0
= copy_to_mode_reg (mode0
, op0
);
2070 if (! (*insn_data
[icode
].operand
[2].predicate
) (op1
, mode1
))
2071 op1
= copy_to_mode_reg (mode1
, op1
);
2073 pat
= GEN_FCN (icode
) (target
, op0
, op1
);
2080 /* Subroutine of arm_expand_builtin to take care of unop insns. */
2083 arm_expand_unop_builtin (enum insn_code icode
,
2084 tree exp
, rtx target
, int do_load
)
2087 tree arg0
= CALL_EXPR_ARG (exp
, 0);
2088 rtx op0
= expand_normal (arg0
);
2090 machine_mode tmode
= insn_data
[icode
].operand
[0].mode
;
2091 machine_mode mode0
= insn_data
[icode
].operand
[1].mode
;
2092 bool builtin_sha1h_p
= false;
2094 if (insn_data
[icode
].n_operands
== 3)
2096 gcc_assert (icode
== CODE_FOR_crypto_sha1h
);
2097 builtin_sha1h_p
= true;
2101 || GET_MODE (target
) != tmode
2102 || ! (*insn_data
[icode
].operand
[0].predicate
) (target
, tmode
))
2103 target
= gen_reg_rtx (tmode
);
2105 op0
= gen_rtx_MEM (mode0
, copy_to_mode_reg (Pmode
, op0
));
2108 if (VECTOR_MODE_P (mode0
))
2109 op0
= safe_vector_operand (op0
, mode0
);
2111 if (! (*insn_data
[icode
].operand
[1].predicate
) (op0
, mode0
))
2112 op0
= copy_to_mode_reg (mode0
, op0
);
2114 if (builtin_sha1h_p
)
2115 op1
= GEN_INT (TARGET_BIG_END
? 1 : 0);
2117 if (builtin_sha1h_p
)
2118 pat
= GEN_FCN (icode
) (target
, op0
, op1
);
2120 pat
= GEN_FCN (icode
) (target
, op0
);
2128 ARG_BUILTIN_COPY_TO_REG
,
2129 ARG_BUILTIN_CONSTANT
,
2130 ARG_BUILTIN_LANE_INDEX
,
2131 ARG_BUILTIN_STRUCT_LOAD_STORE_LANE_INDEX
,
2132 ARG_BUILTIN_NEON_MEMORY
,
2138 /* EXP is a pointer argument to a Neon load or store intrinsic. Derive
2139 and return an expression for the accessed memory.
2141 The intrinsic function operates on a block of registers that has
2142 mode REG_MODE. This block contains vectors of type TYPE_MODE. The
2143 function references the memory at EXP of type TYPE and in mode
2144 MEM_MODE; this mode may be BLKmode if no more suitable mode is
2148 neon_dereference_pointer (tree exp
, tree type
, machine_mode mem_mode
,
2149 machine_mode reg_mode
,
2150 machine_mode vector_mode
)
2152 HOST_WIDE_INT reg_size
, vector_size
, nvectors
, nelems
;
2153 tree elem_type
, upper_bound
, array_type
;
2155 /* Work out the size of the register block in bytes. */
2156 reg_size
= GET_MODE_SIZE (reg_mode
);
2158 /* Work out the size of each vector in bytes. */
2159 vector_size
= GET_MODE_SIZE (vector_mode
);
2161 /* Work out how many vectors there are. */
2162 gcc_assert (reg_size
% vector_size
== 0);
2163 nvectors
= reg_size
/ vector_size
;
2165 /* Work out the type of each element. */
2166 gcc_assert (POINTER_TYPE_P (type
));
2167 elem_type
= TREE_TYPE (type
);
2169 /* Work out how many elements are being loaded or stored.
2170 MEM_MODE == REG_MODE implies a one-to-one mapping between register
2171 and memory elements; anything else implies a lane load or store. */
2172 if (mem_mode
== reg_mode
)
2173 nelems
= vector_size
* nvectors
/ int_size_in_bytes (elem_type
);
2177 /* Create a type that describes the full access. */
2178 upper_bound
= build_int_cst (size_type_node
, nelems
- 1);
2179 array_type
= build_array_type (elem_type
, build_index_type (upper_bound
));
2181 /* Dereference EXP using that type. */
2182 return fold_build2 (MEM_REF
, array_type
, exp
,
2183 build_int_cst (build_pointer_type (array_type
), 0));
2186 /* Expand a builtin. */
2188 arm_expand_builtin_args (rtx target
, machine_mode map_mode
, int fcode
,
2189 int icode
, int have_retval
, tree exp
,
2193 tree arg
[SIMD_MAX_BUILTIN_ARGS
];
2194 rtx op
[SIMD_MAX_BUILTIN_ARGS
];
2195 machine_mode tmode
= insn_data
[icode
].operand
[0].mode
;
2196 machine_mode mode
[SIMD_MAX_BUILTIN_ARGS
];
2203 || GET_MODE (target
) != tmode
2204 || !(*insn_data
[icode
].operand
[0].predicate
) (target
, tmode
)))
2205 target
= gen_reg_rtx (tmode
);
2207 formals
= TYPE_ARG_TYPES (TREE_TYPE (arm_builtin_decls
[fcode
]));
2211 builtin_arg thisarg
= args
[argc
];
2213 if (thisarg
== ARG_BUILTIN_STOP
)
2217 int opno
= argc
+ have_retval
;
2218 arg
[argc
] = CALL_EXPR_ARG (exp
, argc
);
2219 mode
[argc
] = insn_data
[icode
].operand
[opno
].mode
;
2220 if (thisarg
== ARG_BUILTIN_NEON_MEMORY
)
2222 machine_mode other_mode
2223 = insn_data
[icode
].operand
[1 - opno
].mode
;
2224 arg
[argc
] = neon_dereference_pointer (arg
[argc
],
2225 TREE_VALUE (formals
),
2226 mode
[argc
], other_mode
,
2230 /* Use EXPAND_MEMORY for ARG_BUILTIN_MEMORY and
2231 ARG_BUILTIN_NEON_MEMORY to ensure a MEM_P be returned. */
2232 op
[argc
] = expand_expr (arg
[argc
], NULL_RTX
, VOIDmode
,
2233 ((thisarg
== ARG_BUILTIN_MEMORY
2234 || thisarg
== ARG_BUILTIN_NEON_MEMORY
)
2235 ? EXPAND_MEMORY
: EXPAND_NORMAL
));
2239 case ARG_BUILTIN_MEMORY
:
2240 case ARG_BUILTIN_COPY_TO_REG
:
2241 if (POINTER_TYPE_P (TREE_TYPE (arg
[argc
])))
2242 op
[argc
] = convert_memory_address (Pmode
, op
[argc
]);
2243 /*gcc_assert (GET_MODE (op[argc]) == mode[argc]); */
2244 if (!(*insn_data
[icode
].operand
[opno
].predicate
)
2245 (op
[argc
], mode
[argc
]))
2246 op
[argc
] = copy_to_mode_reg (mode
[argc
], op
[argc
]);
2249 case ARG_BUILTIN_STRUCT_LOAD_STORE_LANE_INDEX
:
2250 gcc_assert (argc
> 1);
2251 if (CONST_INT_P (op
[argc
]))
2253 neon_lane_bounds (op
[argc
], 0,
2254 GET_MODE_NUNITS (map_mode
), exp
);
2255 /* Keep to GCC-vector-extension lane indices in the RTL. */
2257 GEN_INT (NEON_ENDIAN_LANE_N (map_mode
, INTVAL (op
[argc
])));
2261 case ARG_BUILTIN_LANE_INDEX
:
2262 /* Previous argument must be a vector, which this indexes. */
2263 gcc_assert (argc
> 0);
2264 if (CONST_INT_P (op
[argc
]))
2266 machine_mode vmode
= mode
[argc
- 1];
2267 neon_lane_bounds (op
[argc
], 0, GET_MODE_NUNITS (vmode
), exp
);
2269 /* If the lane index isn't a constant then the next
2272 case ARG_BUILTIN_CONSTANT
:
2274 if (!(*insn_data
[icode
].operand
[opno
].predicate
)
2275 (op
[argc
], mode
[argc
]))
2277 error ("%Kargument %d must be a constant immediate",
2279 /* We have failed to expand the pattern, and are safely
2280 in to invalid code. But the mid-end will still try to
2281 build an assignment for this node while it expands,
2282 before stopping for the error, just pass it back
2283 TARGET to ensure a valid assignment. */
2288 case ARG_BUILTIN_NEON_MEMORY
:
2289 /* Check if expand failed. */
2290 if (op
[argc
] == const0_rtx
)
2292 gcc_assert (MEM_P (op
[argc
]));
2293 PUT_MODE (op
[argc
], mode
[argc
]);
2294 /* ??? arm_neon.h uses the same built-in functions for signed
2295 and unsigned accesses, casting where necessary. This isn't
2297 set_mem_alias_set (op
[argc
], 0);
2298 if (!(*insn_data
[icode
].operand
[opno
].predicate
)
2299 (op
[argc
], mode
[argc
]))
2300 op
[argc
] = (replace_equiv_address
2302 copy_to_mode_reg (Pmode
, XEXP (op
[argc
], 0))));
2305 case ARG_BUILTIN_STOP
:
2317 pat
= GEN_FCN (icode
) (target
, op
[0]);
2321 pat
= GEN_FCN (icode
) (target
, op
[0], op
[1]);
2325 pat
= GEN_FCN (icode
) (target
, op
[0], op
[1], op
[2]);
2329 pat
= GEN_FCN (icode
) (target
, op
[0], op
[1], op
[2], op
[3]);
2333 pat
= GEN_FCN (icode
) (target
, op
[0], op
[1], op
[2], op
[3], op
[4]);
2337 pat
= GEN_FCN (icode
) (target
, op
[0], op
[1], op
[2], op
[3], op
[4], op
[5]);
2347 pat
= GEN_FCN (icode
) (op
[0]);
2351 pat
= GEN_FCN (icode
) (op
[0], op
[1]);
2355 pat
= GEN_FCN (icode
) (op
[0], op
[1], op
[2]);
2359 pat
= GEN_FCN (icode
) (op
[0], op
[1], op
[2], op
[3]);
2363 pat
= GEN_FCN (icode
) (op
[0], op
[1], op
[2], op
[3], op
[4]);
2367 pat
= GEN_FCN (icode
) (op
[0], op
[1], op
[2], op
[3], op
[4], op
[5]);
2377 /* Check whether our current target implements the pattern chosen for this
2378 builtin and error out if not. */
2381 insn
= get_insns ();
2384 if (recog_memoized (insn
) < 0)
2385 error ("this builtin is not supported for this target");
2392 /* Expand a builtin. These builtins are "special" because they don't have
2393 symbolic constants defined per-instruction or per instruction-variant.
2394 Instead, the required info is looked up in the ARM_BUILTIN_DATA record that
2395 is passed into the function. */
2398 arm_expand_builtin_1 (int fcode
, tree exp
, rtx target
,
2399 arm_builtin_datum
*d
)
2401 enum insn_code icode
= d
->code
;
2402 builtin_arg args
[SIMD_MAX_BUILTIN_ARGS
+ 1];
2403 int num_args
= insn_data
[d
->code
].n_operands
;
2408 if (IN_RANGE (fcode
, ARM_BUILTIN_VFP_BASE
, ARM_BUILTIN_ACLE_BASE
- 1))
2411 is_void
= !!(d
->qualifiers
[0] & qualifier_void
);
2413 num_args
+= is_void
;
2415 for (k
= 1; k
< num_args
; k
++)
2417 /* We have four arrays of data, each indexed in a different fashion.
2418 qualifiers - element 0 always describes the function return type.
2419 operands - element 0 is either the operand for return value (if
2420 the function has a non-void return type) or the operand for the
2422 expr_args - element 0 always holds the first argument.
2423 args - element 0 is always used for the return type. */
2424 int qualifiers_k
= k
;
2425 int operands_k
= k
- is_void
;
2426 int expr_args_k
= k
- 1;
2428 if (d
->qualifiers
[qualifiers_k
] & qualifier_lane_index
)
2429 args
[k
] = ARG_BUILTIN_LANE_INDEX
;
2430 else if (d
->qualifiers
[qualifiers_k
] & qualifier_struct_load_store_lane_index
)
2431 args
[k
] = ARG_BUILTIN_STRUCT_LOAD_STORE_LANE_INDEX
;
2432 else if (d
->qualifiers
[qualifiers_k
] & qualifier_immediate
)
2433 args
[k
] = ARG_BUILTIN_CONSTANT
;
2434 else if (d
->qualifiers
[qualifiers_k
] & qualifier_maybe_immediate
)
2437 = expand_normal (CALL_EXPR_ARG (exp
,
2439 /* Handle constants only if the predicate allows it. */
2440 bool op_const_int_p
=
2442 && (*insn_data
[icode
].operand
[operands_k
].predicate
)
2443 (arg
, insn_data
[icode
].operand
[operands_k
].mode
));
2444 args
[k
] = op_const_int_p
? ARG_BUILTIN_CONSTANT
: ARG_BUILTIN_COPY_TO_REG
;
2446 else if (d
->qualifiers
[qualifiers_k
] & qualifier_pointer
)
2449 args
[k
] = ARG_BUILTIN_NEON_MEMORY
;
2451 args
[k
] = ARG_BUILTIN_MEMORY
;
2454 args
[k
] = ARG_BUILTIN_COPY_TO_REG
;
2456 args
[k
] = ARG_BUILTIN_STOP
;
2458 /* The interface to arm_expand_builtin_args expects a 0 if
2459 the function is void, and a 1 if it is not. */
2460 return arm_expand_builtin_args
2461 (target
, d
->mode
, fcode
, icode
, !is_void
, exp
,
2465 /* Expand an ACLE builtin, i.e. those registered only if their respective
2466 target constraints are met. This check happens within
2467 arm_expand_builtin_args. */
2470 arm_expand_acle_builtin (int fcode
, tree exp
, rtx target
)
2473 arm_builtin_datum
*d
2474 = &acle_builtin_data
[fcode
- ARM_BUILTIN_ACLE_PATTERN_START
];
2476 return arm_expand_builtin_1 (fcode
, exp
, target
, d
);
2479 /* Expand a Neon builtin, i.e. those registered only if TARGET_NEON holds.
2480 Most of these are "special" because they don't have symbolic
2481 constants defined per-instruction or per instruction-variant. Instead, the
2482 required info is looked up in the table neon_builtin_data. */
2485 arm_expand_neon_builtin (int fcode
, tree exp
, rtx target
)
2487 if (fcode
>= ARM_BUILTIN_NEON_BASE
&& ! TARGET_NEON
)
2489 fatal_error (input_location
,
2490 "You must enable NEON instructions"
2491 " (e.g. -mfloat-abi=softfp -mfpu=neon)"
2492 " to use these intrinsics.");
2496 if (fcode
== ARM_BUILTIN_NEON_LANE_CHECK
)
2498 /* Builtin is only to check bounds of the lane passed to some intrinsics
2499 that are implemented with gcc vector extensions in arm_neon.h. */
2501 tree nlanes
= CALL_EXPR_ARG (exp
, 0);
2502 gcc_assert (TREE_CODE (nlanes
) == INTEGER_CST
);
2503 rtx lane_idx
= expand_normal (CALL_EXPR_ARG (exp
, 1));
2504 if (CONST_INT_P (lane_idx
))
2505 neon_lane_bounds (lane_idx
, 0, TREE_INT_CST_LOW (nlanes
), exp
);
2507 error ("%Klane index must be a constant immediate", exp
);
2508 /* Don't generate any RTL. */
2512 arm_builtin_datum
*d
2513 = &neon_builtin_data
[fcode
- ARM_BUILTIN_NEON_PATTERN_START
];
2515 return arm_expand_builtin_1 (fcode
, exp
, target
, d
);
2518 /* Expand a VFP builtin. These builtins are treated like
2519 neon builtins except that the data is looked up in table
2520 VFP_BUILTIN_DATA. */
2523 arm_expand_vfp_builtin (int fcode
, tree exp
, rtx target
)
2525 if (fcode
>= ARM_BUILTIN_VFP_BASE
&& ! TARGET_HARD_FLOAT
)
2527 fatal_error (input_location
,
2528 "You must enable VFP instructions"
2529 " to use these intrinsics.");
2533 arm_builtin_datum
*d
2534 = &vfp_builtin_data
[fcode
- ARM_BUILTIN_VFP_PATTERN_START
];
2536 return arm_expand_builtin_1 (fcode
, exp
, target
, d
);
2539 /* Expand an expression EXP that calls a built-in function,
2540 with result going to TARGET if that's convenient
2541 (and in mode MODE if that's convenient).
2542 SUBTARGET may be used as the target for computing one of EXP's operands.
2543 IGNORE is nonzero if the value is to be ignored. */
2546 arm_expand_builtin (tree exp
,
2548 rtx subtarget ATTRIBUTE_UNUSED
,
2549 machine_mode mode ATTRIBUTE_UNUSED
,
2550 int ignore ATTRIBUTE_UNUSED
)
2552 const struct builtin_description
* d
;
2553 enum insn_code icode
;
2554 tree fndecl
= TREE_OPERAND (CALL_EXPR_FN (exp
), 0);
2562 unsigned int fcode
= DECL_FUNCTION_CODE (fndecl
);
2573 if (fcode
>= ARM_BUILTIN_ACLE_BASE
)
2574 return arm_expand_acle_builtin (fcode
, exp
, target
);
2576 if (fcode
>= ARM_BUILTIN_NEON_BASE
)
2577 return arm_expand_neon_builtin (fcode
, exp
, target
);
2579 if (fcode
>= ARM_BUILTIN_VFP_BASE
)
2580 return arm_expand_vfp_builtin (fcode
, exp
, target
);
2582 /* Check in the context of the function making the call whether the
2583 builtin is supported. */
2584 if (fcode
>= ARM_BUILTIN_CRYPTO_BASE
2585 && (!TARGET_CRYPTO
|| !TARGET_HARD_FLOAT
))
2587 fatal_error (input_location
,
2588 "You must enable crypto instructions"
2589 " (e.g. include -mfloat-abi=softfp -mfpu=crypto-neon...)"
2590 " to use these intrinsics.");
2596 case ARM_BUILTIN_GET_FPSCR
:
2597 case ARM_BUILTIN_SET_FPSCR
:
2598 if (fcode
== ARM_BUILTIN_GET_FPSCR
)
2600 icode
= CODE_FOR_get_fpscr
;
2601 target
= gen_reg_rtx (SImode
);
2602 pat
= GEN_FCN (icode
) (target
);
2607 icode
= CODE_FOR_set_fpscr
;
2608 arg0
= CALL_EXPR_ARG (exp
, 0);
2609 op0
= expand_normal (arg0
);
2610 pat
= GEN_FCN (icode
) (force_reg (SImode
, op0
));
2615 case ARM_BUILTIN_CMSE_NONSECURE_CALLER
:
2616 target
= gen_reg_rtx (SImode
);
2617 op0
= arm_return_addr (0, NULL_RTX
);
2618 emit_insn (gen_andsi3 (target
, op0
, const1_rtx
));
2619 op1
= gen_rtx_EQ (SImode
, target
, const0_rtx
);
2620 emit_insn (gen_cstoresi4 (target
, op1
, target
, const0_rtx
));
2623 case ARM_BUILTIN_TEXTRMSB
:
2624 case ARM_BUILTIN_TEXTRMUB
:
2625 case ARM_BUILTIN_TEXTRMSH
:
2626 case ARM_BUILTIN_TEXTRMUH
:
2627 case ARM_BUILTIN_TEXTRMSW
:
2628 case ARM_BUILTIN_TEXTRMUW
:
2629 icode
= (fcode
== ARM_BUILTIN_TEXTRMSB
? CODE_FOR_iwmmxt_textrmsb
2630 : fcode
== ARM_BUILTIN_TEXTRMUB
? CODE_FOR_iwmmxt_textrmub
2631 : fcode
== ARM_BUILTIN_TEXTRMSH
? CODE_FOR_iwmmxt_textrmsh
2632 : fcode
== ARM_BUILTIN_TEXTRMUH
? CODE_FOR_iwmmxt_textrmuh
2633 : CODE_FOR_iwmmxt_textrmw
);
2635 arg0
= CALL_EXPR_ARG (exp
, 0);
2636 arg1
= CALL_EXPR_ARG (exp
, 1);
2637 op0
= expand_normal (arg0
);
2638 op1
= expand_normal (arg1
);
2639 tmode
= insn_data
[icode
].operand
[0].mode
;
2640 mode0
= insn_data
[icode
].operand
[1].mode
;
2641 mode1
= insn_data
[icode
].operand
[2].mode
;
2643 if (! (*insn_data
[icode
].operand
[1].predicate
) (op0
, mode0
))
2644 op0
= copy_to_mode_reg (mode0
, op0
);
2645 if (! (*insn_data
[icode
].operand
[2].predicate
) (op1
, mode1
))
2647 /* @@@ better error message */
2648 error ("selector must be an immediate");
2649 return gen_reg_rtx (tmode
);
2652 opint
= INTVAL (op1
);
2653 if (fcode
== ARM_BUILTIN_TEXTRMSB
|| fcode
== ARM_BUILTIN_TEXTRMUB
)
2655 if (opint
> 7 || opint
< 0)
2656 error ("the range of selector should be in 0 to 7");
2658 else if (fcode
== ARM_BUILTIN_TEXTRMSH
|| fcode
== ARM_BUILTIN_TEXTRMUH
)
2660 if (opint
> 3 || opint
< 0)
2661 error ("the range of selector should be in 0 to 3");
2663 else /* ARM_BUILTIN_TEXTRMSW || ARM_BUILTIN_TEXTRMUW. */
2665 if (opint
> 1 || opint
< 0)
2666 error ("the range of selector should be in 0 to 1");
2670 || GET_MODE (target
) != tmode
2671 || ! (*insn_data
[icode
].operand
[0].predicate
) (target
, tmode
))
2672 target
= gen_reg_rtx (tmode
);
2673 pat
= GEN_FCN (icode
) (target
, op0
, op1
);
2679 case ARM_BUILTIN_WALIGNI
:
2680 /* If op2 is immediate, call walighi, else call walighr. */
2681 arg0
= CALL_EXPR_ARG (exp
, 0);
2682 arg1
= CALL_EXPR_ARG (exp
, 1);
2683 arg2
= CALL_EXPR_ARG (exp
, 2);
2684 op0
= expand_normal (arg0
);
2685 op1
= expand_normal (arg1
);
2686 op2
= expand_normal (arg2
);
2687 if (CONST_INT_P (op2
))
2689 icode
= CODE_FOR_iwmmxt_waligni
;
2690 tmode
= insn_data
[icode
].operand
[0].mode
;
2691 mode0
= insn_data
[icode
].operand
[1].mode
;
2692 mode1
= insn_data
[icode
].operand
[2].mode
;
2693 mode2
= insn_data
[icode
].operand
[3].mode
;
2694 if (!(*insn_data
[icode
].operand
[1].predicate
) (op0
, mode0
))
2695 op0
= copy_to_mode_reg (mode0
, op0
);
2696 if (!(*insn_data
[icode
].operand
[2].predicate
) (op1
, mode1
))
2697 op1
= copy_to_mode_reg (mode1
, op1
);
2698 gcc_assert ((*insn_data
[icode
].operand
[3].predicate
) (op2
, mode2
));
2699 selector
= INTVAL (op2
);
2700 if (selector
> 7 || selector
< 0)
2701 error ("the range of selector should be in 0 to 7");
2705 icode
= CODE_FOR_iwmmxt_walignr
;
2706 tmode
= insn_data
[icode
].operand
[0].mode
;
2707 mode0
= insn_data
[icode
].operand
[1].mode
;
2708 mode1
= insn_data
[icode
].operand
[2].mode
;
2709 mode2
= insn_data
[icode
].operand
[3].mode
;
2710 if (!(*insn_data
[icode
].operand
[1].predicate
) (op0
, mode0
))
2711 op0
= copy_to_mode_reg (mode0
, op0
);
2712 if (!(*insn_data
[icode
].operand
[2].predicate
) (op1
, mode1
))
2713 op1
= copy_to_mode_reg (mode1
, op1
);
2714 if (!(*insn_data
[icode
].operand
[3].predicate
) (op2
, mode2
))
2715 op2
= copy_to_mode_reg (mode2
, op2
);
2718 || GET_MODE (target
) != tmode
2719 || !(*insn_data
[icode
].operand
[0].predicate
) (target
, tmode
))
2720 target
= gen_reg_rtx (tmode
);
2721 pat
= GEN_FCN (icode
) (target
, op0
, op1
, op2
);
2727 case ARM_BUILTIN_TINSRB
:
2728 case ARM_BUILTIN_TINSRH
:
2729 case ARM_BUILTIN_TINSRW
:
2730 case ARM_BUILTIN_WMERGE
:
2731 icode
= (fcode
== ARM_BUILTIN_TINSRB
? CODE_FOR_iwmmxt_tinsrb
2732 : fcode
== ARM_BUILTIN_TINSRH
? CODE_FOR_iwmmxt_tinsrh
2733 : fcode
== ARM_BUILTIN_WMERGE
? CODE_FOR_iwmmxt_wmerge
2734 : CODE_FOR_iwmmxt_tinsrw
);
2735 arg0
= CALL_EXPR_ARG (exp
, 0);
2736 arg1
= CALL_EXPR_ARG (exp
, 1);
2737 arg2
= CALL_EXPR_ARG (exp
, 2);
2738 op0
= expand_normal (arg0
);
2739 op1
= expand_normal (arg1
);
2740 op2
= expand_normal (arg2
);
2741 tmode
= insn_data
[icode
].operand
[0].mode
;
2742 mode0
= insn_data
[icode
].operand
[1].mode
;
2743 mode1
= insn_data
[icode
].operand
[2].mode
;
2744 mode2
= insn_data
[icode
].operand
[3].mode
;
2746 if (! (*insn_data
[icode
].operand
[1].predicate
) (op0
, mode0
))
2747 op0
= copy_to_mode_reg (mode0
, op0
);
2748 if (! (*insn_data
[icode
].operand
[2].predicate
) (op1
, mode1
))
2749 op1
= copy_to_mode_reg (mode1
, op1
);
2750 if (! (*insn_data
[icode
].operand
[3].predicate
) (op2
, mode2
))
2752 error ("selector must be an immediate");
2755 if (icode
== CODE_FOR_iwmmxt_wmerge
)
2757 selector
= INTVAL (op2
);
2758 if (selector
> 7 || selector
< 0)
2759 error ("the range of selector should be in 0 to 7");
2761 if ((icode
== CODE_FOR_iwmmxt_tinsrb
)
2762 || (icode
== CODE_FOR_iwmmxt_tinsrh
)
2763 || (icode
== CODE_FOR_iwmmxt_tinsrw
))
2766 selector
= INTVAL (op2
);
2767 if (icode
== CODE_FOR_iwmmxt_tinsrb
&& (selector
< 0 || selector
> 7))
2768 error ("the range of selector should be in 0 to 7");
2769 else if (icode
== CODE_FOR_iwmmxt_tinsrh
&& (selector
< 0 ||selector
> 3))
2770 error ("the range of selector should be in 0 to 3");
2771 else if (icode
== CODE_FOR_iwmmxt_tinsrw
&& (selector
< 0 ||selector
> 1))
2772 error ("the range of selector should be in 0 to 1");
2774 op2
= GEN_INT (mask
);
2777 || GET_MODE (target
) != tmode
2778 || ! (*insn_data
[icode
].operand
[0].predicate
) (target
, tmode
))
2779 target
= gen_reg_rtx (tmode
);
2780 pat
= GEN_FCN (icode
) (target
, op0
, op1
, op2
);
2786 case ARM_BUILTIN_SETWCGR0
:
2787 case ARM_BUILTIN_SETWCGR1
:
2788 case ARM_BUILTIN_SETWCGR2
:
2789 case ARM_BUILTIN_SETWCGR3
:
2790 icode
= (fcode
== ARM_BUILTIN_SETWCGR0
? CODE_FOR_iwmmxt_setwcgr0
2791 : fcode
== ARM_BUILTIN_SETWCGR1
? CODE_FOR_iwmmxt_setwcgr1
2792 : fcode
== ARM_BUILTIN_SETWCGR2
? CODE_FOR_iwmmxt_setwcgr2
2793 : CODE_FOR_iwmmxt_setwcgr3
);
2794 arg0
= CALL_EXPR_ARG (exp
, 0);
2795 op0
= expand_normal (arg0
);
2796 mode0
= insn_data
[icode
].operand
[0].mode
;
2797 if (!(*insn_data
[icode
].operand
[0].predicate
) (op0
, mode0
))
2798 op0
= copy_to_mode_reg (mode0
, op0
);
2799 pat
= GEN_FCN (icode
) (op0
);
2805 case ARM_BUILTIN_GETWCGR0
:
2806 case ARM_BUILTIN_GETWCGR1
:
2807 case ARM_BUILTIN_GETWCGR2
:
2808 case ARM_BUILTIN_GETWCGR3
:
2809 icode
= (fcode
== ARM_BUILTIN_GETWCGR0
? CODE_FOR_iwmmxt_getwcgr0
2810 : fcode
== ARM_BUILTIN_GETWCGR1
? CODE_FOR_iwmmxt_getwcgr1
2811 : fcode
== ARM_BUILTIN_GETWCGR2
? CODE_FOR_iwmmxt_getwcgr2
2812 : CODE_FOR_iwmmxt_getwcgr3
);
2813 tmode
= insn_data
[icode
].operand
[0].mode
;
2815 || GET_MODE (target
) != tmode
2816 || !(*insn_data
[icode
].operand
[0].predicate
) (target
, tmode
))
2817 target
= gen_reg_rtx (tmode
);
2818 pat
= GEN_FCN (icode
) (target
);
2824 case ARM_BUILTIN_WSHUFH
:
2825 icode
= CODE_FOR_iwmmxt_wshufh
;
2826 arg0
= CALL_EXPR_ARG (exp
, 0);
2827 arg1
= CALL_EXPR_ARG (exp
, 1);
2828 op0
= expand_normal (arg0
);
2829 op1
= expand_normal (arg1
);
2830 tmode
= insn_data
[icode
].operand
[0].mode
;
2831 mode1
= insn_data
[icode
].operand
[1].mode
;
2832 mode2
= insn_data
[icode
].operand
[2].mode
;
2834 if (! (*insn_data
[icode
].operand
[1].predicate
) (op0
, mode1
))
2835 op0
= copy_to_mode_reg (mode1
, op0
);
2836 if (! (*insn_data
[icode
].operand
[2].predicate
) (op1
, mode2
))
2838 error ("mask must be an immediate");
2841 selector
= INTVAL (op1
);
2842 if (selector
< 0 || selector
> 255)
2843 error ("the range of mask should be in 0 to 255");
2845 || GET_MODE (target
) != tmode
2846 || ! (*insn_data
[icode
].operand
[0].predicate
) (target
, tmode
))
2847 target
= gen_reg_rtx (tmode
);
2848 pat
= GEN_FCN (icode
) (target
, op0
, op1
);
2854 case ARM_BUILTIN_WMADDS
:
2855 return arm_expand_binop_builtin (CODE_FOR_iwmmxt_wmadds
, exp
, target
);
2856 case ARM_BUILTIN_WMADDSX
:
2857 return arm_expand_binop_builtin (CODE_FOR_iwmmxt_wmaddsx
, exp
, target
);
2858 case ARM_BUILTIN_WMADDSN
:
2859 return arm_expand_binop_builtin (CODE_FOR_iwmmxt_wmaddsn
, exp
, target
);
2860 case ARM_BUILTIN_WMADDU
:
2861 return arm_expand_binop_builtin (CODE_FOR_iwmmxt_wmaddu
, exp
, target
);
2862 case ARM_BUILTIN_WMADDUX
:
2863 return arm_expand_binop_builtin (CODE_FOR_iwmmxt_wmaddux
, exp
, target
);
2864 case ARM_BUILTIN_WMADDUN
:
2865 return arm_expand_binop_builtin (CODE_FOR_iwmmxt_wmaddun
, exp
, target
);
2866 case ARM_BUILTIN_WSADBZ
:
2867 return arm_expand_binop_builtin (CODE_FOR_iwmmxt_wsadbz
, exp
, target
);
2868 case ARM_BUILTIN_WSADHZ
:
2869 return arm_expand_binop_builtin (CODE_FOR_iwmmxt_wsadhz
, exp
, target
);
2871 /* Several three-argument builtins. */
2872 case ARM_BUILTIN_WMACS
:
2873 case ARM_BUILTIN_WMACU
:
2874 case ARM_BUILTIN_TMIA
:
2875 case ARM_BUILTIN_TMIAPH
:
2876 case ARM_BUILTIN_TMIATT
:
2877 case ARM_BUILTIN_TMIATB
:
2878 case ARM_BUILTIN_TMIABT
:
2879 case ARM_BUILTIN_TMIABB
:
2880 case ARM_BUILTIN_WQMIABB
:
2881 case ARM_BUILTIN_WQMIABT
:
2882 case ARM_BUILTIN_WQMIATB
:
2883 case ARM_BUILTIN_WQMIATT
:
2884 case ARM_BUILTIN_WQMIABBN
:
2885 case ARM_BUILTIN_WQMIABTN
:
2886 case ARM_BUILTIN_WQMIATBN
:
2887 case ARM_BUILTIN_WQMIATTN
:
2888 case ARM_BUILTIN_WMIABB
:
2889 case ARM_BUILTIN_WMIABT
:
2890 case ARM_BUILTIN_WMIATB
:
2891 case ARM_BUILTIN_WMIATT
:
2892 case ARM_BUILTIN_WMIABBN
:
2893 case ARM_BUILTIN_WMIABTN
:
2894 case ARM_BUILTIN_WMIATBN
:
2895 case ARM_BUILTIN_WMIATTN
:
2896 case ARM_BUILTIN_WMIAWBB
:
2897 case ARM_BUILTIN_WMIAWBT
:
2898 case ARM_BUILTIN_WMIAWTB
:
2899 case ARM_BUILTIN_WMIAWTT
:
2900 case ARM_BUILTIN_WMIAWBBN
:
2901 case ARM_BUILTIN_WMIAWBTN
:
2902 case ARM_BUILTIN_WMIAWTBN
:
2903 case ARM_BUILTIN_WMIAWTTN
:
2904 case ARM_BUILTIN_WSADB
:
2905 case ARM_BUILTIN_WSADH
:
2906 icode
= (fcode
== ARM_BUILTIN_WMACS
? CODE_FOR_iwmmxt_wmacs
2907 : fcode
== ARM_BUILTIN_WMACU
? CODE_FOR_iwmmxt_wmacu
2908 : fcode
== ARM_BUILTIN_TMIA
? CODE_FOR_iwmmxt_tmia
2909 : fcode
== ARM_BUILTIN_TMIAPH
? CODE_FOR_iwmmxt_tmiaph
2910 : fcode
== ARM_BUILTIN_TMIABB
? CODE_FOR_iwmmxt_tmiabb
2911 : fcode
== ARM_BUILTIN_TMIABT
? CODE_FOR_iwmmxt_tmiabt
2912 : fcode
== ARM_BUILTIN_TMIATB
? CODE_FOR_iwmmxt_tmiatb
2913 : fcode
== ARM_BUILTIN_TMIATT
? CODE_FOR_iwmmxt_tmiatt
2914 : fcode
== ARM_BUILTIN_WQMIABB
? CODE_FOR_iwmmxt_wqmiabb
2915 : fcode
== ARM_BUILTIN_WQMIABT
? CODE_FOR_iwmmxt_wqmiabt
2916 : fcode
== ARM_BUILTIN_WQMIATB
? CODE_FOR_iwmmxt_wqmiatb
2917 : fcode
== ARM_BUILTIN_WQMIATT
? CODE_FOR_iwmmxt_wqmiatt
2918 : fcode
== ARM_BUILTIN_WQMIABBN
? CODE_FOR_iwmmxt_wqmiabbn
2919 : fcode
== ARM_BUILTIN_WQMIABTN
? CODE_FOR_iwmmxt_wqmiabtn
2920 : fcode
== ARM_BUILTIN_WQMIATBN
? CODE_FOR_iwmmxt_wqmiatbn
2921 : fcode
== ARM_BUILTIN_WQMIATTN
? CODE_FOR_iwmmxt_wqmiattn
2922 : fcode
== ARM_BUILTIN_WMIABB
? CODE_FOR_iwmmxt_wmiabb
2923 : fcode
== ARM_BUILTIN_WMIABT
? CODE_FOR_iwmmxt_wmiabt
2924 : fcode
== ARM_BUILTIN_WMIATB
? CODE_FOR_iwmmxt_wmiatb
2925 : fcode
== ARM_BUILTIN_WMIATT
? CODE_FOR_iwmmxt_wmiatt
2926 : fcode
== ARM_BUILTIN_WMIABBN
? CODE_FOR_iwmmxt_wmiabbn
2927 : fcode
== ARM_BUILTIN_WMIABTN
? CODE_FOR_iwmmxt_wmiabtn
2928 : fcode
== ARM_BUILTIN_WMIATBN
? CODE_FOR_iwmmxt_wmiatbn
2929 : fcode
== ARM_BUILTIN_WMIATTN
? CODE_FOR_iwmmxt_wmiattn
2930 : fcode
== ARM_BUILTIN_WMIAWBB
? CODE_FOR_iwmmxt_wmiawbb
2931 : fcode
== ARM_BUILTIN_WMIAWBT
? CODE_FOR_iwmmxt_wmiawbt
2932 : fcode
== ARM_BUILTIN_WMIAWTB
? CODE_FOR_iwmmxt_wmiawtb
2933 : fcode
== ARM_BUILTIN_WMIAWTT
? CODE_FOR_iwmmxt_wmiawtt
2934 : fcode
== ARM_BUILTIN_WMIAWBBN
? CODE_FOR_iwmmxt_wmiawbbn
2935 : fcode
== ARM_BUILTIN_WMIAWBTN
? CODE_FOR_iwmmxt_wmiawbtn
2936 : fcode
== ARM_BUILTIN_WMIAWTBN
? CODE_FOR_iwmmxt_wmiawtbn
2937 : fcode
== ARM_BUILTIN_WMIAWTTN
? CODE_FOR_iwmmxt_wmiawttn
2938 : fcode
== ARM_BUILTIN_WSADB
? CODE_FOR_iwmmxt_wsadb
2939 : CODE_FOR_iwmmxt_wsadh
);
2940 arg0
= CALL_EXPR_ARG (exp
, 0);
2941 arg1
= CALL_EXPR_ARG (exp
, 1);
2942 arg2
= CALL_EXPR_ARG (exp
, 2);
2943 op0
= expand_normal (arg0
);
2944 op1
= expand_normal (arg1
);
2945 op2
= expand_normal (arg2
);
2946 tmode
= insn_data
[icode
].operand
[0].mode
;
2947 mode0
= insn_data
[icode
].operand
[1].mode
;
2948 mode1
= insn_data
[icode
].operand
[2].mode
;
2949 mode2
= insn_data
[icode
].operand
[3].mode
;
2951 if (! (*insn_data
[icode
].operand
[1].predicate
) (op0
, mode0
))
2952 op0
= copy_to_mode_reg (mode0
, op0
);
2953 if (! (*insn_data
[icode
].operand
[2].predicate
) (op1
, mode1
))
2954 op1
= copy_to_mode_reg (mode1
, op1
);
2955 if (! (*insn_data
[icode
].operand
[3].predicate
) (op2
, mode2
))
2956 op2
= copy_to_mode_reg (mode2
, op2
);
2958 || GET_MODE (target
) != tmode
2959 || ! (*insn_data
[icode
].operand
[0].predicate
) (target
, tmode
))
2960 target
= gen_reg_rtx (tmode
);
2961 pat
= GEN_FCN (icode
) (target
, op0
, op1
, op2
);
2967 case ARM_BUILTIN_WZERO
:
2968 target
= gen_reg_rtx (DImode
);
2969 emit_insn (gen_iwmmxt_clrdi (target
));
2972 case ARM_BUILTIN_WSRLHI
:
2973 case ARM_BUILTIN_WSRLWI
:
2974 case ARM_BUILTIN_WSRLDI
:
2975 case ARM_BUILTIN_WSLLHI
:
2976 case ARM_BUILTIN_WSLLWI
:
2977 case ARM_BUILTIN_WSLLDI
:
2978 case ARM_BUILTIN_WSRAHI
:
2979 case ARM_BUILTIN_WSRAWI
:
2980 case ARM_BUILTIN_WSRADI
:
2981 case ARM_BUILTIN_WRORHI
:
2982 case ARM_BUILTIN_WRORWI
:
2983 case ARM_BUILTIN_WRORDI
:
2984 case ARM_BUILTIN_WSRLH
:
2985 case ARM_BUILTIN_WSRLW
:
2986 case ARM_BUILTIN_WSRLD
:
2987 case ARM_BUILTIN_WSLLH
:
2988 case ARM_BUILTIN_WSLLW
:
2989 case ARM_BUILTIN_WSLLD
:
2990 case ARM_BUILTIN_WSRAH
:
2991 case ARM_BUILTIN_WSRAW
:
2992 case ARM_BUILTIN_WSRAD
:
2993 case ARM_BUILTIN_WRORH
:
2994 case ARM_BUILTIN_WRORW
:
2995 case ARM_BUILTIN_WRORD
:
2996 icode
= (fcode
== ARM_BUILTIN_WSRLHI
? CODE_FOR_lshrv4hi3_iwmmxt
2997 : fcode
== ARM_BUILTIN_WSRLWI
? CODE_FOR_lshrv2si3_iwmmxt
2998 : fcode
== ARM_BUILTIN_WSRLDI
? CODE_FOR_lshrdi3_iwmmxt
2999 : fcode
== ARM_BUILTIN_WSLLHI
? CODE_FOR_ashlv4hi3_iwmmxt
3000 : fcode
== ARM_BUILTIN_WSLLWI
? CODE_FOR_ashlv2si3_iwmmxt
3001 : fcode
== ARM_BUILTIN_WSLLDI
? CODE_FOR_ashldi3_iwmmxt
3002 : fcode
== ARM_BUILTIN_WSRAHI
? CODE_FOR_ashrv4hi3_iwmmxt
3003 : fcode
== ARM_BUILTIN_WSRAWI
? CODE_FOR_ashrv2si3_iwmmxt
3004 : fcode
== ARM_BUILTIN_WSRADI
? CODE_FOR_ashrdi3_iwmmxt
3005 : fcode
== ARM_BUILTIN_WRORHI
? CODE_FOR_rorv4hi3
3006 : fcode
== ARM_BUILTIN_WRORWI
? CODE_FOR_rorv2si3
3007 : fcode
== ARM_BUILTIN_WRORDI
? CODE_FOR_rordi3
3008 : fcode
== ARM_BUILTIN_WSRLH
? CODE_FOR_lshrv4hi3_di
3009 : fcode
== ARM_BUILTIN_WSRLW
? CODE_FOR_lshrv2si3_di
3010 : fcode
== ARM_BUILTIN_WSRLD
? CODE_FOR_lshrdi3_di
3011 : fcode
== ARM_BUILTIN_WSLLH
? CODE_FOR_ashlv4hi3_di
3012 : fcode
== ARM_BUILTIN_WSLLW
? CODE_FOR_ashlv2si3_di
3013 : fcode
== ARM_BUILTIN_WSLLD
? CODE_FOR_ashldi3_di
3014 : fcode
== ARM_BUILTIN_WSRAH
? CODE_FOR_ashrv4hi3_di
3015 : fcode
== ARM_BUILTIN_WSRAW
? CODE_FOR_ashrv2si3_di
3016 : fcode
== ARM_BUILTIN_WSRAD
? CODE_FOR_ashrdi3_di
3017 : fcode
== ARM_BUILTIN_WRORH
? CODE_FOR_rorv4hi3_di
3018 : fcode
== ARM_BUILTIN_WRORW
? CODE_FOR_rorv2si3_di
3019 : fcode
== ARM_BUILTIN_WRORD
? CODE_FOR_rordi3_di
3020 : CODE_FOR_nothing
);
3021 arg1
= CALL_EXPR_ARG (exp
, 1);
3022 op1
= expand_normal (arg1
);
3023 if (GET_MODE (op1
) == VOIDmode
)
3026 if ((fcode
== ARM_BUILTIN_WRORHI
|| fcode
== ARM_BUILTIN_WRORWI
3027 || fcode
== ARM_BUILTIN_WRORH
|| fcode
== ARM_BUILTIN_WRORW
)
3028 && (imm
< 0 || imm
> 32))
3030 if (fcode
== ARM_BUILTIN_WRORHI
)
3031 error ("the range of count should be in 0 to 32. please check the intrinsic _mm_rori_pi16 in code.");
3032 else if (fcode
== ARM_BUILTIN_WRORWI
)
3033 error ("the range of count should be in 0 to 32. please check the intrinsic _mm_rori_pi32 in code.");
3034 else if (fcode
== ARM_BUILTIN_WRORH
)
3035 error ("the range of count should be in 0 to 32. please check the intrinsic _mm_ror_pi16 in code.");
3037 error ("the range of count should be in 0 to 32. please check the intrinsic _mm_ror_pi32 in code.");
3039 else if ((fcode
== ARM_BUILTIN_WRORDI
|| fcode
== ARM_BUILTIN_WRORD
)
3040 && (imm
< 0 || imm
> 64))
3042 if (fcode
== ARM_BUILTIN_WRORDI
)
3043 error ("the range of count should be in 0 to 64. please check the intrinsic _mm_rori_si64 in code.");
3045 error ("the range of count should be in 0 to 64. please check the intrinsic _mm_ror_si64 in code.");
3049 if (fcode
== ARM_BUILTIN_WSRLHI
)
3050 error ("the count should be no less than 0. please check the intrinsic _mm_srli_pi16 in code.");
3051 else if (fcode
== ARM_BUILTIN_WSRLWI
)
3052 error ("the count should be no less than 0. please check the intrinsic _mm_srli_pi32 in code.");
3053 else if (fcode
== ARM_BUILTIN_WSRLDI
)
3054 error ("the count should be no less than 0. please check the intrinsic _mm_srli_si64 in code.");
3055 else if (fcode
== ARM_BUILTIN_WSLLHI
)
3056 error ("the count should be no less than 0. please check the intrinsic _mm_slli_pi16 in code.");
3057 else if (fcode
== ARM_BUILTIN_WSLLWI
)
3058 error ("the count should be no less than 0. please check the intrinsic _mm_slli_pi32 in code.");
3059 else if (fcode
== ARM_BUILTIN_WSLLDI
)
3060 error ("the count should be no less than 0. please check the intrinsic _mm_slli_si64 in code.");
3061 else if (fcode
== ARM_BUILTIN_WSRAHI
)
3062 error ("the count should be no less than 0. please check the intrinsic _mm_srai_pi16 in code.");
3063 else if (fcode
== ARM_BUILTIN_WSRAWI
)
3064 error ("the count should be no less than 0. please check the intrinsic _mm_srai_pi32 in code.");
3065 else if (fcode
== ARM_BUILTIN_WSRADI
)
3066 error ("the count should be no less than 0. please check the intrinsic _mm_srai_si64 in code.");
3067 else if (fcode
== ARM_BUILTIN_WSRLH
)
3068 error ("the count should be no less than 0. please check the intrinsic _mm_srl_pi16 in code.");
3069 else if (fcode
== ARM_BUILTIN_WSRLW
)
3070 error ("the count should be no less than 0. please check the intrinsic _mm_srl_pi32 in code.");
3071 else if (fcode
== ARM_BUILTIN_WSRLD
)
3072 error ("the count should be no less than 0. please check the intrinsic _mm_srl_si64 in code.");
3073 else if (fcode
== ARM_BUILTIN_WSLLH
)
3074 error ("the count should be no less than 0. please check the intrinsic _mm_sll_pi16 in code.");
3075 else if (fcode
== ARM_BUILTIN_WSLLW
)
3076 error ("the count should be no less than 0. please check the intrinsic _mm_sll_pi32 in code.");
3077 else if (fcode
== ARM_BUILTIN_WSLLD
)
3078 error ("the count should be no less than 0. please check the intrinsic _mm_sll_si64 in code.");
3079 else if (fcode
== ARM_BUILTIN_WSRAH
)
3080 error ("the count should be no less than 0. please check the intrinsic _mm_sra_pi16 in code.");
3081 else if (fcode
== ARM_BUILTIN_WSRAW
)
3082 error ("the count should be no less than 0. please check the intrinsic _mm_sra_pi32 in code.");
3084 error ("the count should be no less than 0. please check the intrinsic _mm_sra_si64 in code.");
3087 return arm_expand_binop_builtin (icode
, exp
, target
);
3093 for (i
= 0, d
= bdesc_2arg
; i
< ARRAY_SIZE (bdesc_2arg
); i
++, d
++)
3094 if (d
->code
== (enum arm_builtins
) fcode
)
3095 return arm_expand_binop_builtin (d
->icode
, exp
, target
);
3097 for (i
= 0, d
= bdesc_1arg
; i
< ARRAY_SIZE (bdesc_1arg
); i
++, d
++)
3098 if (d
->code
== (enum arm_builtins
) fcode
)
3099 return arm_expand_unop_builtin (d
->icode
, exp
, target
, 0);
3101 for (i
= 0, d
= bdesc_3arg
; i
< ARRAY_SIZE (bdesc_3arg
); i
++, d
++)
3102 if (d
->code
== (enum arm_builtins
) fcode
)
3103 return arm_expand_ternop_builtin (d
->icode
, exp
, target
);
3105 /* @@@ Should really do something sensible here. */
3110 arm_builtin_vectorized_function (unsigned int fn
, tree type_out
, tree type_in
)
3112 machine_mode in_mode
, out_mode
;
3114 bool out_unsigned_p
= TYPE_UNSIGNED (type_out
);
3116 /* Can't provide any vectorized builtins when we can't use NEON. */
3120 if (TREE_CODE (type_out
) != VECTOR_TYPE
3121 || TREE_CODE (type_in
) != VECTOR_TYPE
)
3124 out_mode
= TYPE_MODE (TREE_TYPE (type_out
));
3125 out_n
= TYPE_VECTOR_SUBPARTS (type_out
);
3126 in_mode
= TYPE_MODE (TREE_TYPE (type_in
));
3127 in_n
= TYPE_VECTOR_SUBPARTS (type_in
);
3129 /* ARM_CHECK_BUILTIN_MODE and ARM_FIND_VRINT_VARIANT are used to find the
3130 decl of the vectorized builtin for the appropriate vector mode.
3131 NULL_TREE is returned if no such builtin is available. */
3132 #undef ARM_CHECK_BUILTIN_MODE
3133 #define ARM_CHECK_BUILTIN_MODE(C) \
3135 && flag_unsafe_math_optimizations \
3136 && ARM_CHECK_BUILTIN_MODE_1 (C))
3138 #undef ARM_CHECK_BUILTIN_MODE_1
3139 #define ARM_CHECK_BUILTIN_MODE_1(C) \
3140 (out_mode == SFmode && out_n == C \
3141 && in_mode == SFmode && in_n == C)
3143 #undef ARM_FIND_VRINT_VARIANT
3144 #define ARM_FIND_VRINT_VARIANT(N) \
3145 (ARM_CHECK_BUILTIN_MODE (2) \
3146 ? arm_builtin_decl(ARM_BUILTIN_NEON_##N##v2sf, false) \
3147 : (ARM_CHECK_BUILTIN_MODE (4) \
3148 ? arm_builtin_decl(ARM_BUILTIN_NEON_##N##v4sf, false) \
3154 return ARM_FIND_VRINT_VARIANT (vrintm
);
3156 return ARM_FIND_VRINT_VARIANT (vrintp
);
3158 return ARM_FIND_VRINT_VARIANT (vrintz
);
3160 return ARM_FIND_VRINT_VARIANT (vrinta
);
3161 #undef ARM_CHECK_BUILTIN_MODE_1
3162 #define ARM_CHECK_BUILTIN_MODE_1(C) \
3163 (out_mode == SImode && out_n == C \
3164 && in_mode == SFmode && in_n == C)
3166 #define ARM_FIND_VCVT_VARIANT(N) \
3167 (ARM_CHECK_BUILTIN_MODE (2) \
3168 ? arm_builtin_decl(ARM_BUILTIN_NEON_##N##v2sfv2si, false) \
3169 : (ARM_CHECK_BUILTIN_MODE (4) \
3170 ? arm_builtin_decl(ARM_BUILTIN_NEON_##N##v4sfv4si, false) \
3173 #define ARM_FIND_VCVTU_VARIANT(N) \
3174 (ARM_CHECK_BUILTIN_MODE (2) \
3175 ? arm_builtin_decl(ARM_BUILTIN_NEON_##N##uv2sfv2si, false) \
3176 : (ARM_CHECK_BUILTIN_MODE (4) \
3177 ? arm_builtin_decl(ARM_BUILTIN_NEON_##N##uv4sfv4si, false) \
3180 return (out_unsigned_p
3181 ? ARM_FIND_VCVTU_VARIANT (vcvta
)
3182 : ARM_FIND_VCVT_VARIANT (vcvta
));
3184 return (out_unsigned_p
3185 ? ARM_FIND_VCVTU_VARIANT (vcvtp
)
3186 : ARM_FIND_VCVT_VARIANT (vcvtp
));
3188 return (out_unsigned_p
3189 ? ARM_FIND_VCVTU_VARIANT (vcvtm
)
3190 : ARM_FIND_VCVT_VARIANT (vcvtm
));
3191 #undef ARM_CHECK_BUILTIN_MODE
3192 #define ARM_CHECK_BUILTIN_MODE(C, N) \
3193 (out_mode == N##mode && out_n == C \
3194 && in_mode == N##mode && in_n == C)
3195 case CFN_BUILT_IN_BSWAP16
:
3196 if (ARM_CHECK_BUILTIN_MODE (4, HI
))
3197 return arm_builtin_decl (ARM_BUILTIN_NEON_bswapv4hi
, false);
3198 else if (ARM_CHECK_BUILTIN_MODE (8, HI
))
3199 return arm_builtin_decl (ARM_BUILTIN_NEON_bswapv8hi
, false);
3202 case CFN_BUILT_IN_BSWAP32
:
3203 if (ARM_CHECK_BUILTIN_MODE (2, SI
))
3204 return arm_builtin_decl (ARM_BUILTIN_NEON_bswapv2si
, false);
3205 else if (ARM_CHECK_BUILTIN_MODE (4, SI
))
3206 return arm_builtin_decl (ARM_BUILTIN_NEON_bswapv4si
, false);
3209 case CFN_BUILT_IN_BSWAP64
:
3210 if (ARM_CHECK_BUILTIN_MODE (2, DI
))
3211 return arm_builtin_decl (ARM_BUILTIN_NEON_bswapv2di
, false);
3215 if (ARM_CHECK_BUILTIN_MODE (2, SF
))
3216 return arm_builtin_decl (ARM_BUILTIN_NEON_copysignfv2sf
, false);
3217 else if (ARM_CHECK_BUILTIN_MODE (4, SF
))
3218 return arm_builtin_decl (ARM_BUILTIN_NEON_copysignfv4sf
, false);
3227 #undef ARM_FIND_VCVT_VARIANT
3228 #undef ARM_FIND_VCVTU_VARIANT
3229 #undef ARM_CHECK_BUILTIN_MODE
3230 #undef ARM_FIND_VRINT_VARIANT
3233 arm_atomic_assign_expand_fenv (tree
*hold
, tree
*clear
, tree
*update
)
3235 const unsigned ARM_FE_INVALID
= 1;
3236 const unsigned ARM_FE_DIVBYZERO
= 2;
3237 const unsigned ARM_FE_OVERFLOW
= 4;
3238 const unsigned ARM_FE_UNDERFLOW
= 8;
3239 const unsigned ARM_FE_INEXACT
= 16;
3240 const unsigned HOST_WIDE_INT ARM_FE_ALL_EXCEPT
= (ARM_FE_INVALID
3245 const unsigned HOST_WIDE_INT ARM_FE_EXCEPT_SHIFT
= 8;
3246 tree fenv_var
, get_fpscr
, set_fpscr
, mask
, ld_fenv
, masked_fenv
;
3247 tree new_fenv_var
, reload_fenv
, restore_fnenv
;
3248 tree update_call
, atomic_feraiseexcept
, hold_fnclex
;
3250 if (!TARGET_HARD_FLOAT
)
3253 /* Generate the equivalent of :
3254 unsigned int fenv_var;
3255 fenv_var = __builtin_arm_get_fpscr ();
3257 unsigned int masked_fenv;
3258 masked_fenv = fenv_var & mask;
3260 __builtin_arm_set_fpscr (masked_fenv); */
3262 fenv_var
= create_tmp_var_raw (unsigned_type_node
);
3263 get_fpscr
= arm_builtin_decls
[ARM_BUILTIN_GET_FPSCR
];
3264 set_fpscr
= arm_builtin_decls
[ARM_BUILTIN_SET_FPSCR
];
3265 mask
= build_int_cst (unsigned_type_node
,
3266 ~((ARM_FE_ALL_EXCEPT
<< ARM_FE_EXCEPT_SHIFT
)
3267 | ARM_FE_ALL_EXCEPT
));
3268 ld_fenv
= build2 (MODIFY_EXPR
, unsigned_type_node
,
3269 fenv_var
, build_call_expr (get_fpscr
, 0));
3270 masked_fenv
= build2 (BIT_AND_EXPR
, unsigned_type_node
, fenv_var
, mask
);
3271 hold_fnclex
= build_call_expr (set_fpscr
, 1, masked_fenv
);
3272 *hold
= build2 (COMPOUND_EXPR
, void_type_node
,
3273 build2 (COMPOUND_EXPR
, void_type_node
, masked_fenv
, ld_fenv
),
3276 /* Store the value of masked_fenv to clear the exceptions:
3277 __builtin_arm_set_fpscr (masked_fenv); */
3279 *clear
= build_call_expr (set_fpscr
, 1, masked_fenv
);
3281 /* Generate the equivalent of :
3282 unsigned int new_fenv_var;
3283 new_fenv_var = __builtin_arm_get_fpscr ();
3285 __builtin_arm_set_fpscr (fenv_var);
3287 __atomic_feraiseexcept (new_fenv_var); */
3289 new_fenv_var
= create_tmp_var_raw (unsigned_type_node
);
3290 reload_fenv
= build2 (MODIFY_EXPR
, unsigned_type_node
, new_fenv_var
,
3291 build_call_expr (get_fpscr
, 0));
3292 restore_fnenv
= build_call_expr (set_fpscr
, 1, fenv_var
);
3293 atomic_feraiseexcept
= builtin_decl_implicit (BUILT_IN_ATOMIC_FERAISEEXCEPT
);
3294 update_call
= build_call_expr (atomic_feraiseexcept
, 1,
3295 fold_convert (integer_type_node
, new_fenv_var
));
3296 *update
= build2 (COMPOUND_EXPR
, void_type_node
,
3297 build2 (COMPOUND_EXPR
, void_type_node
,
3298 reload_fenv
, restore_fnenv
), update_call
);
3301 #include "gt-arm-builtins.h"