]> git.ipfire.org Git - thirdparty/gcc.git/blame - gcc/builtins.cc
Don't build readline/libreadline.a, when --with-system-readline is supplied
[thirdparty/gcc.git] / gcc / builtins.cc
CommitLineData
28f4ec01 1/* Expand builtin functions.
7adcbafe 2 Copyright (C) 1988-2022 Free Software Foundation, Inc.
28f4ec01 3
1322177d 4This file is part of GCC.
28f4ec01 5
1322177d
LB
6GCC is free software; you can redistribute it and/or modify it under
7the terms of the GNU General Public License as published by the Free
9dcd6f09 8Software Foundation; either version 3, or (at your option) any later
1322177d 9version.
28f4ec01 10
1322177d
LB
11GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12WARRANTY; without even the implied warranty of MERCHANTABILITY or
13FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14for more details.
28f4ec01
BS
15
16You should have received a copy of the GNU General Public License
9dcd6f09
NC
17along with GCC; see the file COPYING3. If not see
18<http://www.gnu.org/licenses/>. */
28f4ec01 19
25ab3b0a
RB
20/* Legacy warning! Please add no further builtin simplifications here
21 (apart from pure constant folding) - builtin simplifications should go
e53b6e56 22 to match.pd or gimple-fold.cc instead. */
25ab3b0a 23
28f4ec01
BS
24#include "config.h"
25#include "system.h"
4977bab6 26#include "coretypes.h"
c7131fb2 27#include "backend.h"
957060b5
AM
28#include "target.h"
29#include "rtl.h"
c7131fb2 30#include "tree.h"
e73cf9a2 31#include "memmodel.h"
c7131fb2 32#include "gimple.h"
957060b5
AM
33#include "predict.h"
34#include "tm_p.h"
35#include "stringpool.h"
f90aa46c 36#include "tree-vrp.h"
957060b5
AM
37#include "tree-ssanames.h"
38#include "expmed.h"
39#include "optabs.h"
957060b5
AM
40#include "emit-rtl.h"
41#include "recog.h"
957060b5 42#include "diagnostic-core.h"
40e23961 43#include "alias.h"
40e23961 44#include "fold-const.h"
5c1a2e63 45#include "fold-const-call.h"
b48d4e68 46#include "gimple-ssa-warn-access.h"
d8a2d370
DN
47#include "stor-layout.h"
48#include "calls.h"
49#include "varasm.h"
50#include "tree-object-size.h"
ef29b12c 51#include "tree-ssa-strlen.h"
d49b6e1e 52#include "realmpfr.h"
60393bbc 53#include "cfgrtl.h"
28f4ec01 54#include "except.h"
36566b39
PK
55#include "dojump.h"
56#include "explow.h"
36566b39 57#include "stmt.h"
28f4ec01 58#include "expr.h"
e78d8e51 59#include "libfuncs.h"
28f4ec01
BS
60#include "output.h"
61#include "typeclass.h"
ab393bf1 62#include "langhooks.h"
079a182e 63#include "value-prof.h"
fa19795e 64#include "builtins.h"
314e6352
ML
65#include "stringpool.h"
66#include "attribs.h"
bdea98ca 67#include "asan.h"
686ee971 68#include "internal-fn.h"
b03ff92e 69#include "case-cfn-macros.h"
ba206889 70#include "gimple-iterator.h"
44a845ca 71#include "gimple-fold.h"
ee92e7ba 72#include "intl.h"
7365279f 73#include "file-prefix-map.h" /* remap_macro_filename() */
1f62d637
TV
74#include "gomp-constants.h"
75#include "omp-general.h"
464969eb 76#include "tree-dfa.h"
410675cb
JJ
77#include "gimple-ssa.h"
78#include "tree-ssa-live.h"
79#include "tree-outof-ssa.h"
4f8cfb42 80#include "attr-fnspec.h"
fd64f348 81#include "demangle.h"
45f4e2b0 82#include "gimple-range.h"
2a837de2 83#include "pointer-query.h"
81f5094d 84
fa19795e
RS
85struct target_builtins default_target_builtins;
86#if SWITCHABLE_TARGET
87struct target_builtins *this_target_builtins = &default_target_builtins;
88#endif
89
9df2c88c 90/* Define the names of the builtin function types and codes. */
5e351e96 91const char *const built_in_class_names[BUILT_IN_LAST]
9df2c88c
RK
92 = {"NOT_BUILT_IN", "BUILT_IN_FRONTEND", "BUILT_IN_MD", "BUILT_IN_NORMAL"};
93
c6a912da 94#define DEF_BUILTIN(X, N, C, T, LT, B, F, NA, AT, IM, COND) #X,
7e7e470f 95const char * built_in_names[(int) END_BUILTINS] =
cb1072f4
KG
96{
97#include "builtins.def"
98};
9df2c88c 99
cbf5d0e7 100/* Setup an array of builtin_info_type, make sure each element decl is
3ff5f682 101 initialized to NULL_TREE. */
cbf5d0e7 102builtin_info_type builtin_info[(int)END_BUILTINS];
3ff5f682 103
4e7d7b3d
JJ
104/* Non-zero if __builtin_constant_p should be folded right away. */
105bool force_folding_builtin_constant_p;
106
4682ae04 107static int target_char_cast (tree, char *);
4682ae04
AJ
108static int apply_args_size (void);
109static int apply_result_size (void);
4682ae04 110static rtx result_vector (int, rtx);
4682ae04
AJ
111static void expand_builtin_prefetch (tree);
112static rtx expand_builtin_apply_args (void);
113static rtx expand_builtin_apply_args_1 (void);
114static rtx expand_builtin_apply (rtx, rtx, rtx);
115static void expand_builtin_return (rtx);
116static enum type_class type_to_class (tree);
117static rtx expand_builtin_classify_type (tree);
6c7cf1f0 118static rtx expand_builtin_mathfn_3 (tree, rtx, rtx);
1b1562a5 119static rtx expand_builtin_mathfn_ternary (tree, rtx, rtx);
4359dc2a 120static rtx expand_builtin_interclass_mathfn (tree, rtx);
403e54f0 121static rtx expand_builtin_sincos (tree);
4343f5e2
RFF
122static rtx expand_builtin_fegetround (tree, rtx, machine_mode);
123static rtx expand_builtin_feclear_feraise_except (tree, rtx, machine_mode,
124 optab);
4359dc2a 125static rtx expand_builtin_cexpi (tree, rtx);
0982edd3 126static rtx expand_builtin_issignaling (tree, rtx);
1856c8dc
JH
127static rtx expand_builtin_int_roundingfn (tree, rtx);
128static rtx expand_builtin_int_roundingfn_2 (tree, rtx);
8870e212 129static rtx expand_builtin_next_arg (void);
4682ae04
AJ
130static rtx expand_builtin_va_start (tree);
131static rtx expand_builtin_va_end (tree);
132static rtx expand_builtin_va_copy (tree);
d5803b98 133static rtx inline_expand_builtin_bytecmp (tree, rtx);
44e10129 134static rtx expand_builtin_strcmp (tree, rtx);
ef4bddc2 135static rtx expand_builtin_strncmp (tree, rtx, machine_mode);
44e10129 136static rtx expand_builtin_memcpy (tree, rtx);
671a00ee 137static rtx expand_builtin_memory_copy_args (tree dest, tree src, tree len,
2ff5ffb6 138 rtx target, tree exp,
03a9b90a
AS
139 memop_ret retmode,
140 bool might_overlap);
e50d56a5 141static rtx expand_builtin_memmove (tree, rtx);
671a00ee 142static rtx expand_builtin_mempcpy (tree, rtx);
2ff5ffb6 143static rtx expand_builtin_mempcpy_args (tree, tree, tree, rtx, tree, memop_ret);
44e10129 144static rtx expand_builtin_strcpy (tree, rtx);
e08341bb 145static rtx expand_builtin_strcpy_args (tree, tree, tree, rtx);
ef4bddc2 146static rtx expand_builtin_stpcpy (tree, rtx, machine_mode);
44e10129 147static rtx expand_builtin_strncpy (tree, rtx);
ef4bddc2 148static rtx expand_builtin_memset_args (tree, tree, tree, rtx, machine_mode, tree);
4682ae04 149static rtx expand_builtin_bzero (tree);
ef4bddc2 150static rtx expand_builtin_strlen (tree, rtx, machine_mode);
781ff3d8 151static rtx expand_builtin_strnlen (tree, rtx, machine_mode);
b7e52782 152static rtx expand_builtin_alloca (tree);
ef4bddc2 153static rtx expand_builtin_unop (machine_mode, tree, rtx, rtx, optab);
4682ae04 154static rtx expand_builtin_frame_address (tree, tree);
db3927fb 155static tree stabilize_va_list_loc (location_t, tree, int);
4682ae04 156static rtx expand_builtin_expect (tree, rtx);
1e9168b2 157static rtx expand_builtin_expect_with_probability (tree, rtx);
4682ae04
AJ
158static tree fold_builtin_constant_p (tree);
159static tree fold_builtin_classify_type (tree);
d14c547a 160static tree fold_builtin_strlen (location_t, tree, tree, tree);
db3927fb 161static tree fold_builtin_inf (location_t, tree, int);
db3927fb 162static tree rewrite_call_expr (location_t, tree, int, tree, int, ...);
0dba7960 163static bool validate_arg (const_tree, enum tree_code code);
4682ae04 164static rtx expand_builtin_fabs (tree, rtx, rtx);
ef79730c 165static rtx expand_builtin_signbit (tree, rtx);
db3927fb 166static tree fold_builtin_memcmp (location_t, tree, tree, tree);
db3927fb
AH
167static tree fold_builtin_isascii (location_t, tree);
168static tree fold_builtin_toascii (location_t, tree);
169static tree fold_builtin_isdigit (location_t, tree);
170static tree fold_builtin_fabs (location_t, tree, tree);
171static tree fold_builtin_abs (location_t, tree, tree);
172static tree fold_builtin_unordered_cmp (location_t, tree, tree, tree, enum tree_code,
a35da91f 173 enum tree_code);
903c723b 174static tree fold_builtin_varargs (location_t, tree, tree*, int);
db3927fb 175
b5338fb3
MS
176static tree fold_builtin_strpbrk (location_t, tree, tree, tree, tree);
177static tree fold_builtin_strspn (location_t, tree, tree, tree);
178static tree fold_builtin_strcspn (location_t, tree, tree, tree);
6de9cd9a 179
10a0d495 180static rtx expand_builtin_object_size (tree);
ef4bddc2 181static rtx expand_builtin_memory_chk (tree, rtx, machine_mode,
10a0d495
JJ
182 enum built_in_function);
183static void maybe_emit_chk_warning (tree, enum built_in_function);
184static void maybe_emit_sprintf_chk_warning (tree, enum built_in_function);
79a89108 185static tree fold_builtin_object_size (tree, tree, enum built_in_function);
000ba23d 186
ad03a744 187unsigned HOST_WIDE_INT target_newline;
fef5a0d9 188unsigned HOST_WIDE_INT target_percent;
000ba23d
KG
189static unsigned HOST_WIDE_INT target_c;
190static unsigned HOST_WIDE_INT target_s;
edd7ae68 191char target_percent_c[3];
fef5a0d9 192char target_percent_s[3];
ad03a744 193char target_percent_s_newline[4];
ea91f957 194static tree do_mpfr_remquo (tree, tree, tree);
752b7d38 195static tree do_mpfr_lgamma_r (tree, tree, tree);
86951993 196static void expand_builtin_sync_synchronize (void);
10a0d495 197
d7f09764
DN
198/* Return true if NAME starts with __builtin_ or __sync_. */
199
0c1e7e42 200static bool
bbf7ce11 201is_builtin_name (const char *name)
48ae6c13 202{
6ba3079d
ML
203 return (startswith (name, "__builtin_")
204 || startswith (name, "__sync_")
205 || startswith (name, "__atomic_"));
48ae6c13 206}
6de9cd9a 207
bbf7ce11
RAE
208/* Return true if NODE should be considered for inline expansion regardless
209 of the optimization level. This means whenever a function is invoked with
210 its "internal" name, which normally contains the prefix "__builtin". */
211
4cfe7a6c 212bool
bbf7ce11
RAE
213called_as_built_in (tree node)
214{
215 /* Note that we must use DECL_NAME, not DECL_ASSEMBLER_NAME_SET_P since
216 we want the name used to call the function, not the name it
217 will have. */
218 const char *name = IDENTIFIER_POINTER (DECL_NAME (node));
219 return is_builtin_name (name);
220}
221
644ffefd
MJ
222/* Compute values M and N such that M divides (address of EXP - N) and such
223 that N < M. If these numbers can be determined, store M in alignp and N in
224 *BITPOSP and return true. Otherwise return false and store BITS_PER_UNIT to
225 *alignp and any bit-offset to *bitposp.
73f6eabc
RS
226
227 Note that the address (and thus the alignment) computed here is based
228 on the address to which a symbol resolves, whereas DECL_ALIGN is based
229 on the address at which an object is actually located. These two
230 addresses are not always the same. For example, on ARM targets,
231 the address &foo of a Thumb function foo() has the lowest bit set,
b0f4a35f 232 whereas foo() itself starts on an even address.
df96b059 233
b0f4a35f
RG
234 If ADDR_P is true we are taking the address of the memory reference EXP
235 and thus cannot rely on the access taking place. */
236
c5288df7 237bool
b0f4a35f
RG
238get_object_alignment_2 (tree exp, unsigned int *alignp,
239 unsigned HOST_WIDE_INT *bitposp, bool addr_p)
df96b059 240{
7df9b6f1 241 poly_int64 bitsize, bitpos;
e80c2726 242 tree offset;
ef4bddc2 243 machine_mode mode;
ee45a32d 244 int unsignedp, reversep, volatilep;
eae76e53 245 unsigned int align = BITS_PER_UNIT;
644ffefd 246 bool known_alignment = false;
df96b059 247
e80c2726
RG
248 /* Get the innermost object and the constant (bitpos) and possibly
249 variable (offset) offset of the access. */
ee45a32d 250 exp = get_inner_reference (exp, &bitsize, &bitpos, &offset, &mode,
25b75a48 251 &unsignedp, &reversep, &volatilep);
e80c2726
RG
252
253 /* Extract alignment information from the innermost object and
254 possibly adjust bitpos and offset. */
b0f4a35f 255 if (TREE_CODE (exp) == FUNCTION_DECL)
73f6eabc 256 {
b0f4a35f
RG
257 /* Function addresses can encode extra information besides their
258 alignment. However, if TARGET_PTRMEMFUNC_VBIT_LOCATION
259 allows the low bit to be used as a virtual bit, we know
260 that the address itself must be at least 2-byte aligned. */
261 if (TARGET_PTRMEMFUNC_VBIT_LOCATION == ptrmemfunc_vbit_in_pfn)
262 align = 2 * BITS_PER_UNIT;
73f6eabc 263 }
b0f4a35f
RG
264 else if (TREE_CODE (exp) == LABEL_DECL)
265 ;
266 else if (TREE_CODE (exp) == CONST_DECL)
e80c2726 267 {
b0f4a35f
RG
268 /* The alignment of a CONST_DECL is determined by its initializer. */
269 exp = DECL_INITIAL (exp);
e80c2726 270 align = TYPE_ALIGN (TREE_TYPE (exp));
b0f4a35f 271 if (CONSTANT_CLASS_P (exp))
58e17cf8 272 align = targetm.constant_alignment (exp, align);
6b00e42d 273
b0f4a35f 274 known_alignment = true;
e80c2726 275 }
b0f4a35f 276 else if (DECL_P (exp))
644ffefd 277 {
b0f4a35f 278 align = DECL_ALIGN (exp);
644ffefd 279 known_alignment = true;
644ffefd 280 }
b0f4a35f
RG
281 else if (TREE_CODE (exp) == INDIRECT_REF
282 || TREE_CODE (exp) == MEM_REF
283 || TREE_CODE (exp) == TARGET_MEM_REF)
e80c2726
RG
284 {
285 tree addr = TREE_OPERAND (exp, 0);
644ffefd
MJ
286 unsigned ptr_align;
287 unsigned HOST_WIDE_INT ptr_bitpos;
4ceae7e9 288 unsigned HOST_WIDE_INT ptr_bitmask = ~0;
644ffefd 289
4ceae7e9 290 /* If the address is explicitely aligned, handle that. */
e80c2726
RG
291 if (TREE_CODE (addr) == BIT_AND_EXPR
292 && TREE_CODE (TREE_OPERAND (addr, 1)) == INTEGER_CST)
293 {
4ceae7e9
RB
294 ptr_bitmask = TREE_INT_CST_LOW (TREE_OPERAND (addr, 1));
295 ptr_bitmask *= BITS_PER_UNIT;
146ec50f 296 align = least_bit_hwi (ptr_bitmask);
e80c2726
RG
297 addr = TREE_OPERAND (addr, 0);
298 }
644ffefd 299
b0f4a35f
RG
300 known_alignment
301 = get_pointer_alignment_1 (addr, &ptr_align, &ptr_bitpos);
b0f4a35f
RG
302 align = MAX (ptr_align, align);
303
4ceae7e9
RB
304 /* Re-apply explicit alignment to the bitpos. */
305 ptr_bitpos &= ptr_bitmask;
306
3c82efd9
RG
307 /* The alignment of the pointer operand in a TARGET_MEM_REF
308 has to take the variable offset parts into account. */
b0f4a35f 309 if (TREE_CODE (exp) == TARGET_MEM_REF)
1be38ccb 310 {
b0f4a35f
RG
311 if (TMR_INDEX (exp))
312 {
313 unsigned HOST_WIDE_INT step = 1;
314 if (TMR_STEP (exp))
315 step = TREE_INT_CST_LOW (TMR_STEP (exp));
146ec50f 316 align = MIN (align, least_bit_hwi (step) * BITS_PER_UNIT);
b0f4a35f
RG
317 }
318 if (TMR_INDEX2 (exp))
319 align = BITS_PER_UNIT;
320 known_alignment = false;
1be38ccb 321 }
644ffefd 322
b0f4a35f
RG
323 /* When EXP is an actual memory reference then we can use
324 TYPE_ALIGN of a pointer indirection to derive alignment.
325 Do so only if get_pointer_alignment_1 did not reveal absolute
3c82efd9
RG
326 alignment knowledge and if using that alignment would
327 improve the situation. */
a4cf4b64 328 unsigned int talign;
3c82efd9 329 if (!addr_p && !known_alignment
a4cf4b64
RB
330 && (talign = min_align_of_type (TREE_TYPE (exp)) * BITS_PER_UNIT)
331 && talign > align)
332 align = talign;
3c82efd9
RG
333 else
334 {
335 /* Else adjust bitpos accordingly. */
336 bitpos += ptr_bitpos;
337 if (TREE_CODE (exp) == MEM_REF
338 || TREE_CODE (exp) == TARGET_MEM_REF)
aca52e6f 339 bitpos += mem_ref_offset (exp).force_shwi () * BITS_PER_UNIT;
3c82efd9 340 }
e80c2726 341 }
b0f4a35f 342 else if (TREE_CODE (exp) == STRING_CST)
1be38ccb 343 {
b0f4a35f
RG
344 /* STRING_CST are the only constant objects we allow to be not
345 wrapped inside a CONST_DECL. */
346 align = TYPE_ALIGN (TREE_TYPE (exp));
b0f4a35f 347 if (CONSTANT_CLASS_P (exp))
58e17cf8 348 align = targetm.constant_alignment (exp, align);
6b00e42d 349
b0f4a35f 350 known_alignment = true;
e80c2726 351 }
e80c2726
RG
352
353 /* If there is a non-constant offset part extract the maximum
354 alignment that can prevail. */
eae76e53 355 if (offset)
e80c2726 356 {
e75fde1a 357 unsigned int trailing_zeros = tree_ctz (offset);
eae76e53 358 if (trailing_zeros < HOST_BITS_PER_INT)
e80c2726 359 {
eae76e53
JJ
360 unsigned int inner = (1U << trailing_zeros) * BITS_PER_UNIT;
361 if (inner)
362 align = MIN (align, inner);
e80c2726 363 }
e80c2726
RG
364 }
365
7df9b6f1
RS
366 /* Account for the alignment of runtime coefficients, so that the constant
367 bitpos is guaranteed to be accurate. */
368 unsigned int alt_align = ::known_alignment (bitpos - bitpos.coeffs[0]);
369 if (alt_align != 0 && alt_align < align)
370 {
371 align = alt_align;
372 known_alignment = false;
373 }
374
b0f4a35f 375 *alignp = align;
7df9b6f1 376 *bitposp = bitpos.coeffs[0] & (align - 1);
644ffefd 377 return known_alignment;
daade206
RG
378}
379
b0f4a35f
RG
380/* For a memory reference expression EXP compute values M and N such that M
381 divides (&EXP - N) and such that N < M. If these numbers can be determined,
382 store M in alignp and N in *BITPOSP and return true. Otherwise return false
383 and store BITS_PER_UNIT to *alignp and any bit-offset to *bitposp. */
384
385bool
386get_object_alignment_1 (tree exp, unsigned int *alignp,
387 unsigned HOST_WIDE_INT *bitposp)
388{
2e6ad1ba
RB
389 /* Strip a WITH_SIZE_EXPR, get_inner_reference doesn't know how to deal
390 with it. */
391 if (TREE_CODE (exp) == WITH_SIZE_EXPR)
392 exp = TREE_OPERAND (exp, 0);
b0f4a35f
RG
393 return get_object_alignment_2 (exp, alignp, bitposp, false);
394}
395
0eb77834 396/* Return the alignment in bits of EXP, an object. */
daade206
RG
397
398unsigned int
0eb77834 399get_object_alignment (tree exp)
daade206
RG
400{
401 unsigned HOST_WIDE_INT bitpos = 0;
402 unsigned int align;
403
644ffefd 404 get_object_alignment_1 (exp, &align, &bitpos);
daade206 405
e80c2726
RG
406 /* align and bitpos now specify known low bits of the pointer.
407 ptr & (align - 1) == bitpos. */
408
409 if (bitpos != 0)
146ec50f 410 align = least_bit_hwi (bitpos);
0eb77834 411 return align;
df96b059
JJ
412}
413
644ffefd
MJ
414/* For a pointer valued expression EXP compute values M and N such that M
415 divides (EXP - N) and such that N < M. If these numbers can be determined,
b0f4a35f
RG
416 store M in alignp and N in *BITPOSP and return true. Return false if
417 the results are just a conservative approximation.
28f4ec01 418
644ffefd 419 If EXP is not a pointer, false is returned too. */
28f4ec01 420
644ffefd
MJ
421bool
422get_pointer_alignment_1 (tree exp, unsigned int *alignp,
423 unsigned HOST_WIDE_INT *bitposp)
28f4ec01 424{
1be38ccb 425 STRIP_NOPS (exp);
6026b73e 426
1be38ccb 427 if (TREE_CODE (exp) == ADDR_EXPR)
b0f4a35f
RG
428 return get_object_alignment_2 (TREE_OPERAND (exp, 0),
429 alignp, bitposp, true);
5fa79de8
RB
430 else if (TREE_CODE (exp) == POINTER_PLUS_EXPR)
431 {
432 unsigned int align;
433 unsigned HOST_WIDE_INT bitpos;
434 bool res = get_pointer_alignment_1 (TREE_OPERAND (exp, 0),
435 &align, &bitpos);
436 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
437 bitpos += TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)) * BITS_PER_UNIT;
438 else
439 {
440 unsigned int trailing_zeros = tree_ctz (TREE_OPERAND (exp, 1));
441 if (trailing_zeros < HOST_BITS_PER_INT)
442 {
443 unsigned int inner = (1U << trailing_zeros) * BITS_PER_UNIT;
444 if (inner)
445 align = MIN (align, inner);
446 }
447 }
448 *alignp = align;
449 *bitposp = bitpos & (align - 1);
450 return res;
451 }
1be38ccb
RG
452 else if (TREE_CODE (exp) == SSA_NAME
453 && POINTER_TYPE_P (TREE_TYPE (exp)))
28f4ec01 454 {
644ffefd 455 unsigned int ptr_align, ptr_misalign;
1be38ccb 456 struct ptr_info_def *pi = SSA_NAME_PTR_INFO (exp);
644ffefd
MJ
457
458 if (pi && get_ptr_info_alignment (pi, &ptr_align, &ptr_misalign))
459 {
460 *bitposp = ptr_misalign * BITS_PER_UNIT;
461 *alignp = ptr_align * BITS_PER_UNIT;
5505978a
RB
462 /* Make sure to return a sensible alignment when the multiplication
463 by BITS_PER_UNIT overflowed. */
464 if (*alignp == 0)
465 *alignp = 1u << (HOST_BITS_PER_INT - 1);
b0f4a35f 466 /* We cannot really tell whether this result is an approximation. */
5f9a167b 467 return false;
644ffefd
MJ
468 }
469 else
87c0fb4b
RG
470 {
471 *bitposp = 0;
644ffefd
MJ
472 *alignp = BITS_PER_UNIT;
473 return false;
87c0fb4b 474 }
28f4ec01 475 }
44fabee4
RG
476 else if (TREE_CODE (exp) == INTEGER_CST)
477 {
478 *alignp = BIGGEST_ALIGNMENT;
479 *bitposp = ((TREE_INT_CST_LOW (exp) * BITS_PER_UNIT)
480 & (BIGGEST_ALIGNMENT - 1));
481 return true;
482 }
1be38ccb 483
87c0fb4b 484 *bitposp = 0;
644ffefd
MJ
485 *alignp = BITS_PER_UNIT;
486 return false;
28f4ec01
BS
487}
488
87c0fb4b
RG
489/* Return the alignment in bits of EXP, a pointer valued expression.
490 The alignment returned is, by default, the alignment of the thing that
491 EXP points to. If it is not a POINTER_TYPE, 0 is returned.
492
493 Otherwise, look at the expression to see if we can do better, i.e., if the
494 expression is actually pointing at an object whose alignment is tighter. */
495
496unsigned int
497get_pointer_alignment (tree exp)
498{
499 unsigned HOST_WIDE_INT bitpos = 0;
500 unsigned int align;
644ffefd
MJ
501
502 get_pointer_alignment_1 (exp, &align, &bitpos);
87c0fb4b
RG
503
504 /* align and bitpos now specify known low bits of the pointer.
505 ptr & (align - 1) == bitpos. */
506
507 if (bitpos != 0)
146ec50f 508 align = least_bit_hwi (bitpos);
87c0fb4b
RG
509
510 return align;
511}
512
bfb9bd47 513/* Return the number of leading non-zero elements in the sequence
1eb4547b
MS
514 [ PTR, PTR + MAXELTS ) where each element's size is ELTSIZE bytes.
515 ELTSIZE must be a power of 2 less than 8. Used by c_strlen. */
516
bfb9bd47 517unsigned
1eb4547b
MS
518string_length (const void *ptr, unsigned eltsize, unsigned maxelts)
519{
520 gcc_checking_assert (eltsize == 1 || eltsize == 2 || eltsize == 4);
521
522 unsigned n;
523
524 if (eltsize == 1)
525 {
526 /* Optimize the common case of plain char. */
527 for (n = 0; n < maxelts; n++)
528 {
529 const char *elt = (const char*) ptr + n;
530 if (!*elt)
531 break;
532 }
533 }
534 else
535 {
536 for (n = 0; n < maxelts; n++)
537 {
538 const char *elt = (const char*) ptr + n * eltsize;
539 if (!memcmp (elt, "\0\0\0\0", eltsize))
540 break;
541 }
542 }
543 return n;
544}
545
546/* Compute the length of a null-terminated character string or wide
547 character string handling character sizes of 1, 2, and 4 bytes.
548 TREE_STRING_LENGTH is not the right way because it evaluates to
549 the size of the character array in bytes (as opposed to characters)
550 and because it can contain a zero byte in the middle.
28f4ec01 551
f1ba665b 552 ONLY_VALUE should be nonzero if the result is not going to be emitted
88373ed0 553 into the instruction stream and zero if it is going to be expanded.
f1ba665b 554 E.g. with i++ ? "foo" : "bar", if ONLY_VALUE is nonzero, constant 3
ae808627 555 is returned, otherwise NULL, since
14b7950f 556 len = c_strlen (ARG, 1); if (len) expand_expr (len, ...); would not
ae808627
JJ
557 evaluate the side-effects.
558
21e8fb22
RB
559 If ONLY_VALUE is two then we do not emit warnings about out-of-bound
560 accesses. Note that this implies the result is not going to be emitted
561 into the instruction stream.
562
7d583f42 563 Additional information about the string accessed may be recorded
14b7950f 564 in DATA. For example, if ARG references an unterminated string,
7d583f42
JL
565 then the declaration will be stored in the DECL field. If the
566 length of the unterminated string can be determined, it'll be
567 stored in the LEN field. Note this length could well be different
568 than what a C strlen call would return.
6ab24ea8 569
4148b00d
BE
570 ELTSIZE is 1 for normal single byte character strings, and 2 or
571 4 for wide characer strings. ELTSIZE is by default 1.
fed3cef0 572
4148b00d 573 The value returned is of type `ssizetype'. */
28f4ec01 574
6de9cd9a 575tree
14b7950f 576c_strlen (tree arg, int only_value, c_strlen_data *data, unsigned eltsize)
28f4ec01 577{
7d583f42
JL
578 /* If we were not passed a DATA pointer, then get one to a local
579 structure. That avoids having to check DATA for NULL before
580 each time we want to use it. */
3f46ef1f 581 c_strlen_data local_strlen_data = { };
7d583f42
JL
582 if (!data)
583 data = &local_strlen_data;
584
1ebf0641 585 gcc_checking_assert (eltsize == 1 || eltsize == 2 || eltsize == 4);
14b7950f
MS
586
587 tree src = STRIP_NOPS (arg);
ae808627
JJ
588 if (TREE_CODE (src) == COND_EXPR
589 && (only_value || !TREE_SIDE_EFFECTS (TREE_OPERAND (src, 0))))
590 {
591 tree len1, len2;
592
7d583f42
JL
593 len1 = c_strlen (TREE_OPERAND (src, 1), only_value, data, eltsize);
594 len2 = c_strlen (TREE_OPERAND (src, 2), only_value, data, eltsize);
33521f7d 595 if (tree_int_cst_equal (len1, len2))
ae808627
JJ
596 return len1;
597 }
598
599 if (TREE_CODE (src) == COMPOUND_EXPR
600 && (only_value || !TREE_SIDE_EFFECTS (TREE_OPERAND (src, 0))))
7d583f42 601 return c_strlen (TREE_OPERAND (src, 1), only_value, data, eltsize);
ae808627 602
1eb4547b 603 location_t loc = EXPR_LOC_OR_LOC (src, input_location);
59d49708 604
1eb4547b
MS
605 /* Offset from the beginning of the string in bytes. */
606 tree byteoff;
4148b00d 607 tree memsize;
6ab24ea8
MS
608 tree decl;
609 src = string_constant (src, &byteoff, &memsize, &decl);
28f4ec01 610 if (src == 0)
5039610b 611 return NULL_TREE;
fed3cef0 612
1eb4547b 613 /* Determine the size of the string element. */
4148b00d
BE
614 if (eltsize != tree_to_uhwi (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (src)))))
615 return NULL_TREE;
1eb4547b 616
ca32b29e 617 /* Set MAXELTS to ARRAY_SIZE (SRC) - 1, the maximum possible
35b4d3a6 618 length of SRC. Prefer TYPE_SIZE() to TREE_STRING_LENGTH() if possible
bfb9bd47
MS
619 in case the latter is less than the size of the array, such as when
620 SRC refers to a short string literal used to initialize a large array.
621 In that case, the elements of the array after the terminating NUL are
622 all NUL. */
623 HOST_WIDE_INT strelts = TREE_STRING_LENGTH (src);
d01b568a 624 strelts = strelts / eltsize;
bfb9bd47 625
4148b00d
BE
626 if (!tree_fits_uhwi_p (memsize))
627 return NULL_TREE;
628
d01b568a 629 HOST_WIDE_INT maxelts = tree_to_uhwi (memsize) / eltsize;
1eb4547b
MS
630
631 /* PTR can point to the byte representation of any string type, including
632 char* and wchar_t*. */
633 const char *ptr = TREE_STRING_POINTER (src);
fed3cef0 634
1eb4547b 635 if (byteoff && TREE_CODE (byteoff) != INTEGER_CST)
28f4ec01 636 {
4148b00d
BE
637 /* The code below works only for single byte character types. */
638 if (eltsize != 1)
639 return NULL_TREE;
640
bfb9bd47
MS
641 /* If the string has an internal NUL character followed by any
642 non-NUL characters (e.g., "foo\0bar"), we can't compute
643 the offset to the following NUL if we don't know where to
28f4ec01 644 start searching for it. */
bfb9bd47 645 unsigned len = string_length (ptr, eltsize, strelts);
fed3cef0 646
7d583f42
JL
647 /* Return when an embedded null character is found or none at all.
648 In the latter case, set the DECL/LEN field in the DATA structure
649 so that callers may examine them. */
6ab24ea8 650 if (len + 1 < strelts)
4148b00d 651 return NULL_TREE;
6ab24ea8
MS
652 else if (len >= maxelts)
653 {
7d583f42 654 data->decl = decl;
6c4aa5f6 655 data->off = byteoff;
b71bbbe2 656 data->minlen = ssize_int (len);
6ab24ea8
MS
657 return NULL_TREE;
658 }
c42d0aa0 659
d01b568a
BE
660 /* For empty strings the result should be zero. */
661 if (len == 0)
662 return ssize_int (0);
663
28f4ec01 664 /* We don't know the starting offset, but we do know that the string
bfb9bd47
MS
665 has no internal zero bytes. If the offset falls within the bounds
666 of the string subtract the offset from the length of the string,
667 and return that. Otherwise the length is zero. Take care to
668 use SAVE_EXPR in case the OFFSET has side-effects. */
e8bf3d5e
BE
669 tree offsave = TREE_SIDE_EFFECTS (byteoff) ? save_expr (byteoff)
670 : byteoff;
671 offsave = fold_convert_loc (loc, sizetype, offsave);
bfb9bd47 672 tree condexp = fold_build2_loc (loc, LE_EXPR, boolean_type_node, offsave,
e8bf3d5e
BE
673 size_int (len));
674 tree lenexp = fold_build2_loc (loc, MINUS_EXPR, sizetype, size_int (len),
675 offsave);
676 lenexp = fold_convert_loc (loc, ssizetype, lenexp);
bfb9bd47
MS
677 return fold_build3_loc (loc, COND_EXPR, ssizetype, condexp, lenexp,
678 build_zero_cst (ssizetype));
28f4ec01
BS
679 }
680
1eb4547b
MS
681 /* Offset from the beginning of the string in elements. */
682 HOST_WIDE_INT eltoff;
683
28f4ec01 684 /* We have a known offset into the string. Start searching there for
5197bd50 685 a null character if we can represent it as a single HOST_WIDE_INT. */
1eb4547b
MS
686 if (byteoff == 0)
687 eltoff = 0;
1ebf0641 688 else if (! tree_fits_uhwi_p (byteoff) || tree_to_uhwi (byteoff) % eltsize)
1eb4547b 689 eltoff = -1;
28f4ec01 690 else
1ebf0641 691 eltoff = tree_to_uhwi (byteoff) / eltsize;
fed3cef0 692
b2ed71b6
BE
693 /* If the offset is known to be out of bounds, warn, and call strlen at
694 runtime. */
d01b568a 695 if (eltoff < 0 || eltoff >= maxelts)
28f4ec01 696 {
1db01ff9 697 /* Suppress multiple warnings for propagated constant strings. */
3b57ff81 698 if (only_value != 2
e9e2bad7 699 && !warning_suppressed_p (arg, OPT_Warray_bounds)
1db01ff9
JJ
700 && warning_at (loc, OPT_Warray_bounds,
701 "offset %qwi outside bounds of constant string",
702 eltoff))
14b7950f
MS
703 {
704 if (decl)
705 inform (DECL_SOURCE_LOCATION (decl), "%qE declared here", decl);
e9e2bad7 706 suppress_warning (arg, OPT_Warray_bounds);
14b7950f 707 }
5039610b 708 return NULL_TREE;
28f4ec01 709 }
fed3cef0 710
4148b00d
BE
711 /* If eltoff is larger than strelts but less than maxelts the
712 string length is zero, since the excess memory will be zero. */
713 if (eltoff > strelts)
714 return ssize_int (0);
715
28f4ec01
BS
716 /* Use strlen to search for the first zero byte. Since any strings
717 constructed with build_string will have nulls appended, we win even
718 if we get handed something like (char[4])"abcd".
719
1eb4547b 720 Since ELTOFF is our starting index into the string, no further
28f4ec01 721 calculation is needed. */
1eb4547b 722 unsigned len = string_length (ptr + eltoff * eltsize, eltsize,
4148b00d 723 strelts - eltoff);
1eb4547b 724
d01b568a 725 /* Don't know what to return if there was no zero termination.
7d583f42
JL
726 Ideally this would turn into a gcc_checking_assert over time.
727 Set DECL/LEN so callers can examine them. */
d01b568a 728 if (len >= maxelts - eltoff)
6ab24ea8 729 {
7d583f42 730 data->decl = decl;
6c4aa5f6 731 data->off = byteoff;
b71bbbe2 732 data->minlen = ssize_int (len);
6ab24ea8
MS
733 return NULL_TREE;
734 }
1ebf0641 735
1eb4547b 736 return ssize_int (len);
28f4ec01
BS
737}
738
807e902e 739/* Return a constant integer corresponding to target reading
3140b2ed
JJ
740 GET_MODE_BITSIZE (MODE) bits from string constant STR. If
741 NULL_TERMINATED_P, reading stops after '\0' character, all further ones
742 are assumed to be zero, otherwise it reads as many characters
743 as needed. */
744
745rtx
746c_readstr (const char *str, scalar_int_mode mode,
747 bool null_terminated_p/*=true*/)
57814e5e 748{
57814e5e
JJ
749 HOST_WIDE_INT ch;
750 unsigned int i, j;
807e902e 751 HOST_WIDE_INT tmp[MAX_BITSIZE_MODE_ANY_INT / HOST_BITS_PER_WIDE_INT];
57814e5e 752
298e6adc 753 gcc_assert (GET_MODE_CLASS (mode) == MODE_INT);
807e902e
KZ
754 unsigned int len = (GET_MODE_PRECISION (mode) + HOST_BITS_PER_WIDE_INT - 1)
755 / HOST_BITS_PER_WIDE_INT;
756
757 gcc_assert (len <= MAX_BITSIZE_MODE_ANY_INT / HOST_BITS_PER_WIDE_INT);
758 for (i = 0; i < len; i++)
759 tmp[i] = 0;
5906d013 760
57814e5e
JJ
761 ch = 1;
762 for (i = 0; i < GET_MODE_SIZE (mode); i++)
763 {
764 j = i;
765 if (WORDS_BIG_ENDIAN)
766 j = GET_MODE_SIZE (mode) - i - 1;
767 if (BYTES_BIG_ENDIAN != WORDS_BIG_ENDIAN
e046112d 768 && GET_MODE_SIZE (mode) >= UNITS_PER_WORD)
57814e5e
JJ
769 j = j + UNITS_PER_WORD - 2 * (j % UNITS_PER_WORD) - 1;
770 j *= BITS_PER_UNIT;
5906d013 771
3140b2ed 772 if (ch || !null_terminated_p)
57814e5e 773 ch = (unsigned char) str[i];
807e902e 774 tmp[j / HOST_BITS_PER_WIDE_INT] |= ch << (j % HOST_BITS_PER_WIDE_INT);
57814e5e 775 }
807e902e
KZ
776
777 wide_int c = wide_int::from_array (tmp, len, GET_MODE_PRECISION (mode));
778 return immed_wide_int_const (c, mode);
57814e5e
JJ
779}
780
ab937357 781/* Cast a target constant CST to target CHAR and if that value fits into
206048bd 782 host char type, return zero and put that value into variable pointed to by
ab937357
JJ
783 P. */
784
785static int
4682ae04 786target_char_cast (tree cst, char *p)
ab937357
JJ
787{
788 unsigned HOST_WIDE_INT val, hostval;
789
de77ab75 790 if (TREE_CODE (cst) != INTEGER_CST
ab937357
JJ
791 || CHAR_TYPE_SIZE > HOST_BITS_PER_WIDE_INT)
792 return 1;
793
807e902e 794 /* Do not care if it fits or not right here. */
de77ab75 795 val = TREE_INT_CST_LOW (cst);
807e902e 796
ab937357 797 if (CHAR_TYPE_SIZE < HOST_BITS_PER_WIDE_INT)
fecfbfa4 798 val &= (HOST_WIDE_INT_1U << CHAR_TYPE_SIZE) - 1;
ab937357
JJ
799
800 hostval = val;
801 if (HOST_BITS_PER_CHAR < HOST_BITS_PER_WIDE_INT)
fecfbfa4 802 hostval &= (HOST_WIDE_INT_1U << HOST_BITS_PER_CHAR) - 1;
ab937357
JJ
803
804 if (val != hostval)
805 return 1;
806
807 *p = hostval;
808 return 0;
809}
810
6de9cd9a
DN
811/* Similar to save_expr, but assumes that arbitrary code is not executed
812 in between the multiple evaluations. In particular, we assume that a
813 non-addressable local variable will not be modified. */
814
815static tree
816builtin_save_expr (tree exp)
817{
5cbf5c20
RG
818 if (TREE_CODE (exp) == SSA_NAME
819 || (TREE_ADDRESSABLE (exp) == 0
820 && (TREE_CODE (exp) == PARM_DECL
8813a647 821 || (VAR_P (exp) && !TREE_STATIC (exp)))))
6de9cd9a
DN
822 return exp;
823
824 return save_expr (exp);
825}
826
28f4ec01
BS
827/* Given TEM, a pointer to a stack frame, follow the dynamic chain COUNT
828 times to get the address of either a higher stack frame, or a return
829 address located within it (depending on FNDECL_CODE). */
fed3cef0 830
54e62799 831static rtx
c6d01079 832expand_builtin_return_addr (enum built_in_function fndecl_code, int count)
28f4ec01
BS
833{
834 int i;
c6d01079 835 rtx tem = INITIAL_FRAME_ADDRESS_RTX;
579f2946 836 if (tem == NULL_RTX)
c8f27794 837 {
579f2946
TS
838 /* For a zero count with __builtin_return_address, we don't care what
839 frame address we return, because target-specific definitions will
840 override us. Therefore frame pointer elimination is OK, and using
841 the soft frame pointer is OK.
842
843 For a nonzero count, or a zero count with __builtin_frame_address,
844 we require a stable offset from the current frame pointer to the
845 previous one, so we must use the hard frame pointer, and
846 we must disable frame pointer elimination. */
847 if (count == 0 && fndecl_code == BUILT_IN_RETURN_ADDRESS)
848 tem = frame_pointer_rtx;
849 else
850 {
851 tem = hard_frame_pointer_rtx;
c8f27794 852
579f2946
TS
853 /* Tell reload not to eliminate the frame pointer. */
854 crtl->accesses_prior_frames = 1;
855 }
c8f27794 856 }
c6d01079 857
28f4ec01
BS
858 if (count > 0)
859 SETUP_FRAME_ADDRESSES ();
28f4ec01 860
224869d9 861 /* On the SPARC, the return address is not in the frame, it is in a
28f4ec01
BS
862 register. There is no way to access it off of the current frame
863 pointer, but it can be accessed off the previous frame pointer by
864 reading the value from the register window save area. */
2e612c47 865 if (RETURN_ADDR_IN_PREVIOUS_FRAME && fndecl_code == BUILT_IN_RETURN_ADDRESS)
28f4ec01 866 count--;
28f4ec01
BS
867
868 /* Scan back COUNT frames to the specified frame. */
869 for (i = 0; i < count; i++)
870 {
871 /* Assume the dynamic chain pointer is in the word that the
872 frame address points to, unless otherwise specified. */
28f4ec01 873 tem = DYNAMIC_CHAIN_ADDRESS (tem);
28f4ec01 874 tem = memory_address (Pmode, tem);
bf877a76 875 tem = gen_frame_mem (Pmode, tem);
432fd734 876 tem = copy_to_reg (tem);
28f4ec01
BS
877 }
878
224869d9
EB
879 /* For __builtin_frame_address, return what we've got. But, on
880 the SPARC for example, we may have to add a bias. */
28f4ec01 881 if (fndecl_code == BUILT_IN_FRAME_ADDRESS)
224869d9 882 return FRAME_ADDR_RTX (tem);
28f4ec01 883
224869d9 884 /* For __builtin_return_address, get the return address from that frame. */
28f4ec01
BS
885#ifdef RETURN_ADDR_RTX
886 tem = RETURN_ADDR_RTX (count, tem);
887#else
888 tem = memory_address (Pmode,
0a81f074 889 plus_constant (Pmode, tem, GET_MODE_SIZE (Pmode)));
bf877a76 890 tem = gen_frame_mem (Pmode, tem);
28f4ec01
BS
891#endif
892 return tem;
893}
894
3bdf5ad1 895/* Alias set used for setjmp buffer. */
4862826d 896static alias_set_type setjmp_alias_set = -1;
3bdf5ad1 897
250d07b6 898/* Construct the leading half of a __builtin_setjmp call. Control will
4f6c2131
EB
899 return to RECEIVER_LABEL. This is also called directly by the SJLJ
900 exception handling code. */
28f4ec01 901
250d07b6 902void
4682ae04 903expand_builtin_setjmp_setup (rtx buf_addr, rtx receiver_label)
28f4ec01 904{
ef4bddc2 905 machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
28f4ec01 906 rtx stack_save;
3bdf5ad1 907 rtx mem;
28f4ec01 908
3bdf5ad1
RK
909 if (setjmp_alias_set == -1)
910 setjmp_alias_set = new_alias_set ();
911
5ae6cd0d 912 buf_addr = convert_memory_address (Pmode, buf_addr);
28f4ec01 913
7d505b82 914 buf_addr = force_reg (Pmode, force_operand (buf_addr, NULL_RTX));
28f4ec01 915
250d07b6
RH
916 /* We store the frame pointer and the address of receiver_label in
917 the buffer and use the rest of it for the stack save area, which
918 is machine-dependent. */
28f4ec01 919
3bdf5ad1 920 mem = gen_rtx_MEM (Pmode, buf_addr);
ba4828e0 921 set_mem_alias_set (mem, setjmp_alias_set);
25403c41 922 emit_move_insn (mem, hard_frame_pointer_rtx);
3bdf5ad1 923
0a81f074
RS
924 mem = gen_rtx_MEM (Pmode, plus_constant (Pmode, buf_addr,
925 GET_MODE_SIZE (Pmode))),
ba4828e0 926 set_mem_alias_set (mem, setjmp_alias_set);
3bdf5ad1
RK
927
928 emit_move_insn (validize_mem (mem),
250d07b6 929 force_reg (Pmode, gen_rtx_LABEL_REF (Pmode, receiver_label)));
28f4ec01
BS
930
931 stack_save = gen_rtx_MEM (sa_mode,
0a81f074 932 plus_constant (Pmode, buf_addr,
28f4ec01 933 2 * GET_MODE_SIZE (Pmode)));
ba4828e0 934 set_mem_alias_set (stack_save, setjmp_alias_set);
9eac0f2a 935 emit_stack_save (SAVE_NONLOCAL, &stack_save);
28f4ec01
BS
936
937 /* If there is further processing to do, do it. */
95a3fb9d
RS
938 if (targetm.have_builtin_setjmp_setup ())
939 emit_insn (targetm.gen_builtin_setjmp_setup (buf_addr));
28f4ec01 940
ecaebb9e 941 /* We have a nonlocal label. */
e3b5732b 942 cfun->has_nonlocal_label = 1;
250d07b6 943}
28f4ec01 944
4f6c2131 945/* Construct the trailing part of a __builtin_setjmp call. This is
e90d1568
HPN
946 also called directly by the SJLJ exception handling code.
947 If RECEIVER_LABEL is NULL, instead contruct a nonlocal goto handler. */
250d07b6
RH
948
949void
95a3fb9d 950expand_builtin_setjmp_receiver (rtx receiver_label)
250d07b6 951{
531ca746
RH
952 rtx chain;
953
e90d1568 954 /* Mark the FP as used when we get here, so we have to make sure it's
28f4ec01 955 marked as used by this function. */
c41c1387 956 emit_use (hard_frame_pointer_rtx);
28f4ec01
BS
957
958 /* Mark the static chain as clobbered here so life information
959 doesn't get messed up for it. */
4b522b8f 960 chain = rtx_for_static_chain (current_function_decl, true);
531ca746
RH
961 if (chain && REG_P (chain))
962 emit_clobber (chain);
28f4ec01 963
38b0b093 964 if (!HARD_FRAME_POINTER_IS_ARG_POINTER && fixed_regs[ARG_POINTER_REGNUM])
28f4ec01 965 {
e90d1568
HPN
966 /* If the argument pointer can be eliminated in favor of the
967 frame pointer, we don't need to restore it. We assume here
968 that if such an elimination is present, it can always be used.
969 This is the case on all known machines; if we don't make this
970 assumption, we do unnecessary saving on many machines. */
28f4ec01 971 size_t i;
8b60264b 972 static const struct elims {const int from, to;} elim_regs[] = ELIMINABLE_REGS;
28f4ec01 973
b6a1cbae 974 for (i = 0; i < ARRAY_SIZE (elim_regs); i++)
28f4ec01
BS
975 if (elim_regs[i].from == ARG_POINTER_REGNUM
976 && elim_regs[i].to == HARD_FRAME_POINTER_REGNUM)
977 break;
978
b6a1cbae 979 if (i == ARRAY_SIZE (elim_regs))
28f4ec01
BS
980 {
981 /* Now restore our arg pointer from the address at which it
278ed218 982 was saved in our stack frame. */
2e3f842f 983 emit_move_insn (crtl->args.internal_arg_pointer,
bd60bab2 984 copy_to_reg (get_arg_pointer_save_area ()));
28f4ec01
BS
985 }
986 }
28f4ec01 987
95a3fb9d
RS
988 if (receiver_label != NULL && targetm.have_builtin_setjmp_receiver ())
989 emit_insn (targetm.gen_builtin_setjmp_receiver (receiver_label));
990 else if (targetm.have_nonlocal_goto_receiver ())
991 emit_insn (targetm.gen_nonlocal_goto_receiver ());
28f4ec01 992 else
95a3fb9d 993 { /* Nothing */ }
bcd7edfe 994
6fb5fa3c
DB
995 /* We must not allow the code we just generated to be reordered by
996 scheduling. Specifically, the update of the frame pointer must
f1257268 997 happen immediately, not later. */
6fb5fa3c 998 emit_insn (gen_blockage ());
250d07b6 999}
28f4ec01 1000
28f4ec01
BS
1001/* __builtin_longjmp is passed a pointer to an array of five words (not
1002 all will be used on all machines). It operates similarly to the C
1003 library function of the same name, but is more efficient. Much of
4f6c2131 1004 the code below is copied from the handling of non-local gotos. */
28f4ec01 1005
54e62799 1006static void
4682ae04 1007expand_builtin_longjmp (rtx buf_addr, rtx value)
28f4ec01 1008{
58f4cf2a
DM
1009 rtx fp, lab, stack;
1010 rtx_insn *insn, *last;
ef4bddc2 1011 machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
28f4ec01 1012
b8698a0f 1013 /* DRAP is needed for stack realign if longjmp is expanded to current
2e3f842f
L
1014 function */
1015 if (SUPPORTS_STACK_ALIGNMENT)
1016 crtl->need_drap = true;
1017
3bdf5ad1
RK
1018 if (setjmp_alias_set == -1)
1019 setjmp_alias_set = new_alias_set ();
1020
5ae6cd0d 1021 buf_addr = convert_memory_address (Pmode, buf_addr);
4b6c1672 1022
28f4ec01
BS
1023 buf_addr = force_reg (Pmode, buf_addr);
1024
531ca746
RH
1025 /* We require that the user must pass a second argument of 1, because
1026 that is what builtin_setjmp will return. */
298e6adc 1027 gcc_assert (value == const1_rtx);
28f4ec01 1028
d337d653 1029 last = get_last_insn ();
95a3fb9d
RS
1030 if (targetm.have_builtin_longjmp ())
1031 emit_insn (targetm.gen_builtin_longjmp (buf_addr));
28f4ec01 1032 else
28f4ec01
BS
1033 {
1034 fp = gen_rtx_MEM (Pmode, buf_addr);
0a81f074 1035 lab = gen_rtx_MEM (Pmode, plus_constant (Pmode, buf_addr,
28f4ec01
BS
1036 GET_MODE_SIZE (Pmode)));
1037
0a81f074 1038 stack = gen_rtx_MEM (sa_mode, plus_constant (Pmode, buf_addr,
28f4ec01 1039 2 * GET_MODE_SIZE (Pmode)));
ba4828e0
RK
1040 set_mem_alias_set (fp, setjmp_alias_set);
1041 set_mem_alias_set (lab, setjmp_alias_set);
1042 set_mem_alias_set (stack, setjmp_alias_set);
28f4ec01
BS
1043
1044 /* Pick up FP, label, and SP from the block and jump. This code is
e53b6e56 1045 from expand_goto in stmt.cc; see there for detailed comments. */
95a3fb9d 1046 if (targetm.have_nonlocal_goto ())
28f4ec01
BS
1047 /* We have to pass a value to the nonlocal_goto pattern that will
1048 get copied into the static_chain pointer, but it does not matter
1049 what that value is, because builtin_setjmp does not use it. */
95a3fb9d 1050 emit_insn (targetm.gen_nonlocal_goto (value, lab, stack, fp));
28f4ec01 1051 else
28f4ec01 1052 {
c41c1387
RS
1053 emit_clobber (gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode)));
1054 emit_clobber (gen_rtx_MEM (BLKmode, hard_frame_pointer_rtx));
41439bf6 1055
511ed59d
WD
1056 lab = copy_to_reg (lab);
1057
71b14428
WD
1058 /* Restore the frame pointer and stack pointer. We must use a
1059 temporary since the setjmp buffer may be a local. */
1060 fp = copy_to_reg (fp);
9eac0f2a 1061 emit_stack_restore (SAVE_NONLOCAL, stack);
511ed59d
WD
1062
1063 /* Ensure the frame pointer move is not optimized. */
1064 emit_insn (gen_blockage ());
1065 emit_clobber (hard_frame_pointer_rtx);
1066 emit_clobber (frame_pointer_rtx);
71b14428 1067 emit_move_insn (hard_frame_pointer_rtx, fp);
28f4ec01 1068
c41c1387
RS
1069 emit_use (hard_frame_pointer_rtx);
1070 emit_use (stack_pointer_rtx);
28f4ec01
BS
1071 emit_indirect_jump (lab);
1072 }
1073 }
4b01bd16
RH
1074
1075 /* Search backwards and mark the jump insn as a non-local goto.
1076 Note that this precludes the use of __builtin_longjmp to a
1077 __builtin_setjmp target in the same function. However, we've
1078 already cautioned the user that these functions are for
1079 internal exception handling use only. */
8206fc89
AM
1080 for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
1081 {
298e6adc 1082 gcc_assert (insn != last);
5906d013 1083
4b4bf941 1084 if (JUMP_P (insn))
8206fc89 1085 {
65c5f2a6 1086 add_reg_note (insn, REG_NON_LOCAL_GOTO, const0_rtx);
8206fc89
AM
1087 break;
1088 }
4b4bf941 1089 else if (CALL_P (insn))
ca7fd9cd 1090 break;
8206fc89 1091 }
28f4ec01
BS
1092}
1093
862d0b35
DN
1094static inline bool
1095more_const_call_expr_args_p (const const_call_expr_arg_iterator *iter)
1096{
1097 return (iter->i < iter->n);
1098}
1099
1100/* This function validates the types of a function call argument list
1101 against a specified list of tree_codes. If the last specifier is a 0,
474da67e 1102 that represents an ellipsis, otherwise the last specifier must be a
862d0b35
DN
1103 VOID_TYPE. */
1104
1105static bool
1106validate_arglist (const_tree callexpr, ...)
1107{
1108 enum tree_code code;
1109 bool res = 0;
1110 va_list ap;
1111 const_call_expr_arg_iterator iter;
1112 const_tree arg;
1113
1114 va_start (ap, callexpr);
1115 init_const_call_expr_arg_iterator (callexpr, &iter);
1116
474da67e 1117 /* Get a bitmap of pointer argument numbers declared attribute nonnull. */
0dba7960
JJ
1118 tree fn = CALL_EXPR_FN (callexpr);
1119 bitmap argmap = get_nonnull_args (TREE_TYPE (TREE_TYPE (fn)));
474da67e
MS
1120
1121 for (unsigned argno = 1; ; ++argno)
862d0b35
DN
1122 {
1123 code = (enum tree_code) va_arg (ap, int);
474da67e 1124
862d0b35
DN
1125 switch (code)
1126 {
1127 case 0:
1128 /* This signifies an ellipses, any further arguments are all ok. */
1129 res = true;
1130 goto end;
1131 case VOID_TYPE:
1132 /* This signifies an endlink, if no arguments remain, return
1133 true, otherwise return false. */
1134 res = !more_const_call_expr_args_p (&iter);
1135 goto end;
474da67e
MS
1136 case POINTER_TYPE:
1137 /* The actual argument must be nonnull when either the whole
1138 called function has been declared nonnull, or when the formal
1139 argument corresponding to the actual argument has been. */
0dba7960
JJ
1140 if (argmap
1141 && (bitmap_empty_p (argmap) || bitmap_bit_p (argmap, argno)))
1142 {
1143 arg = next_const_call_expr_arg (&iter);
1144 if (!validate_arg (arg, code) || integer_zerop (arg))
1145 goto end;
1146 break;
1147 }
474da67e 1148 /* FALLTHRU */
862d0b35
DN
1149 default:
1150 /* If no parameters remain or the parameter's code does not
1151 match the specified code, return false. Otherwise continue
1152 checking any remaining arguments. */
1153 arg = next_const_call_expr_arg (&iter);
0dba7960 1154 if (!validate_arg (arg, code))
862d0b35
DN
1155 goto end;
1156 break;
1157 }
1158 }
862d0b35
DN
1159
1160 /* We need gotos here since we can only have one VA_CLOSE in a
1161 function. */
1162 end: ;
1163 va_end (ap);
1164
474da67e
MS
1165 BITMAP_FREE (argmap);
1166
862d0b35
DN
1167 return res;
1168}
1169
6de9cd9a
DN
1170/* Expand a call to __builtin_nonlocal_goto. We're passed the target label
1171 and the address of the save area. */
1172
1173static rtx
5039610b 1174expand_builtin_nonlocal_goto (tree exp)
6de9cd9a
DN
1175{
1176 tree t_label, t_save_area;
58f4cf2a
DM
1177 rtx r_label, r_save_area, r_fp, r_sp;
1178 rtx_insn *insn;
6de9cd9a 1179
5039610b 1180 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
6de9cd9a
DN
1181 return NULL_RTX;
1182
5039610b
SL
1183 t_label = CALL_EXPR_ARG (exp, 0);
1184 t_save_area = CALL_EXPR_ARG (exp, 1);
6de9cd9a 1185
84217346 1186 r_label = expand_normal (t_label);
5e89a381 1187 r_label = convert_memory_address (Pmode, r_label);
84217346 1188 r_save_area = expand_normal (t_save_area);
5e89a381 1189 r_save_area = convert_memory_address (Pmode, r_save_area);
bc6d3f91
EB
1190 /* Copy the address of the save location to a register just in case it was
1191 based on the frame pointer. */
cba2d79f 1192 r_save_area = copy_to_reg (r_save_area);
6de9cd9a
DN
1193 r_fp = gen_rtx_MEM (Pmode, r_save_area);
1194 r_sp = gen_rtx_MEM (STACK_SAVEAREA_MODE (SAVE_NONLOCAL),
0a81f074
RS
1195 plus_constant (Pmode, r_save_area,
1196 GET_MODE_SIZE (Pmode)));
6de9cd9a 1197
e3b5732b 1198 crtl->has_nonlocal_goto = 1;
6de9cd9a 1199
6de9cd9a 1200 /* ??? We no longer need to pass the static chain value, afaik. */
95a3fb9d
RS
1201 if (targetm.have_nonlocal_goto ())
1202 emit_insn (targetm.gen_nonlocal_goto (const0_rtx, r_label, r_sp, r_fp));
6de9cd9a 1203 else
6de9cd9a 1204 {
c41c1387
RS
1205 emit_clobber (gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode)));
1206 emit_clobber (gen_rtx_MEM (BLKmode, hard_frame_pointer_rtx));
caf93cb0 1207
511ed59d
WD
1208 r_label = copy_to_reg (r_label);
1209
71b14428
WD
1210 /* Restore the frame pointer and stack pointer. We must use a
1211 temporary since the setjmp buffer may be a local. */
1212 r_fp = copy_to_reg (r_fp);
9eac0f2a 1213 emit_stack_restore (SAVE_NONLOCAL, r_sp);
511ed59d
WD
1214
1215 /* Ensure the frame pointer move is not optimized. */
1216 emit_insn (gen_blockage ());
1217 emit_clobber (hard_frame_pointer_rtx);
1218 emit_clobber (frame_pointer_rtx);
71b14428 1219 emit_move_insn (hard_frame_pointer_rtx, r_fp);
caf93cb0 1220
6de9cd9a
DN
1221 /* USE of hard_frame_pointer_rtx added for consistency;
1222 not clear if really needed. */
c41c1387
RS
1223 emit_use (hard_frame_pointer_rtx);
1224 emit_use (stack_pointer_rtx);
eae645b6
RS
1225
1226 /* If the architecture is using a GP register, we must
1227 conservatively assume that the target function makes use of it.
1228 The prologue of functions with nonlocal gotos must therefore
1229 initialize the GP register to the appropriate value, and we
1230 must then make sure that this value is live at the point
1231 of the jump. (Note that this doesn't necessarily apply
1232 to targets with a nonlocal_goto pattern; they are free
1233 to implement it in their own way. Note also that this is
1234 a no-op if the GP register is a global invariant.) */
959c1e20
AH
1235 unsigned regnum = PIC_OFFSET_TABLE_REGNUM;
1236 if (regnum != INVALID_REGNUM && fixed_regs[regnum])
c41c1387 1237 emit_use (pic_offset_table_rtx);
eae645b6 1238
6de9cd9a
DN
1239 emit_indirect_jump (r_label);
1240 }
caf93cb0 1241
6de9cd9a
DN
1242 /* Search backwards to the jump insn and mark it as a
1243 non-local goto. */
1244 for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
1245 {
4b4bf941 1246 if (JUMP_P (insn))
6de9cd9a 1247 {
65c5f2a6 1248 add_reg_note (insn, REG_NON_LOCAL_GOTO, const0_rtx);
6de9cd9a
DN
1249 break;
1250 }
4b4bf941 1251 else if (CALL_P (insn))
6de9cd9a
DN
1252 break;
1253 }
1254
1255 return const0_rtx;
1256}
1257
2b92e7f5
RK
1258/* __builtin_update_setjmp_buf is passed a pointer to an array of five words
1259 (not all will be used on all machines) that was passed to __builtin_setjmp.
d33606c3
EB
1260 It updates the stack pointer in that block to the current value. This is
1261 also called directly by the SJLJ exception handling code. */
2b92e7f5 1262
d33606c3 1263void
2b92e7f5
RK
1264expand_builtin_update_setjmp_buf (rtx buf_addr)
1265{
ef4bddc2 1266 machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
4887028b 1267 buf_addr = convert_memory_address (Pmode, buf_addr);
bc6d3f91 1268 rtx stack_save
2b92e7f5
RK
1269 = gen_rtx_MEM (sa_mode,
1270 memory_address
1271 (sa_mode,
0a81f074
RS
1272 plus_constant (Pmode, buf_addr,
1273 2 * GET_MODE_SIZE (Pmode))));
2b92e7f5 1274
9eac0f2a 1275 emit_stack_save (SAVE_NONLOCAL, &stack_save);
2b92e7f5
RK
1276}
1277
a9ccbb60
JJ
1278/* Expand a call to __builtin_prefetch. For a target that does not support
1279 data prefetch, evaluate the memory address argument in case it has side
1280 effects. */
1281
1282static void
5039610b 1283expand_builtin_prefetch (tree exp)
a9ccbb60
JJ
1284{
1285 tree arg0, arg1, arg2;
5039610b 1286 int nargs;
a9ccbb60
JJ
1287 rtx op0, op1, op2;
1288
5039610b 1289 if (!validate_arglist (exp, POINTER_TYPE, 0))
e83d297b
JJ
1290 return;
1291
5039610b
SL
1292 arg0 = CALL_EXPR_ARG (exp, 0);
1293
e83d297b
JJ
1294 /* Arguments 1 and 2 are optional; argument 1 (read/write) defaults to
1295 zero (read) and argument 2 (locality) defaults to 3 (high degree of
1296 locality). */
5039610b
SL
1297 nargs = call_expr_nargs (exp);
1298 if (nargs > 1)
1299 arg1 = CALL_EXPR_ARG (exp, 1);
e83d297b 1300 else
5039610b
SL
1301 arg1 = integer_zero_node;
1302 if (nargs > 2)
1303 arg2 = CALL_EXPR_ARG (exp, 2);
1304 else
9a9d280e 1305 arg2 = integer_three_node;
a9ccbb60
JJ
1306
1307 /* Argument 0 is an address. */
1308 op0 = expand_expr (arg0, NULL_RTX, Pmode, EXPAND_NORMAL);
1309
1310 /* Argument 1 (read/write flag) must be a compile-time constant int. */
1311 if (TREE_CODE (arg1) != INTEGER_CST)
1312 {
40b97a2e 1313 error ("second argument to %<__builtin_prefetch%> must be a constant");
ca7fd9cd 1314 arg1 = integer_zero_node;
a9ccbb60 1315 }
84217346 1316 op1 = expand_normal (arg1);
a9ccbb60
JJ
1317 /* Argument 1 must be either zero or one. */
1318 if (INTVAL (op1) != 0 && INTVAL (op1) != 1)
1319 {
d4ee4d25 1320 warning (0, "invalid second argument to %<__builtin_prefetch%>;"
40b97a2e 1321 " using zero");
a9ccbb60
JJ
1322 op1 = const0_rtx;
1323 }
1324
1325 /* Argument 2 (locality) must be a compile-time constant int. */
1326 if (TREE_CODE (arg2) != INTEGER_CST)
1327 {
40b97a2e 1328 error ("third argument to %<__builtin_prefetch%> must be a constant");
a9ccbb60
JJ
1329 arg2 = integer_zero_node;
1330 }
84217346 1331 op2 = expand_normal (arg2);
a9ccbb60
JJ
1332 /* Argument 2 must be 0, 1, 2, or 3. */
1333 if (INTVAL (op2) < 0 || INTVAL (op2) > 3)
1334 {
d4ee4d25 1335 warning (0, "invalid third argument to %<__builtin_prefetch%>; using zero");
a9ccbb60
JJ
1336 op2 = const0_rtx;
1337 }
1338
134b044d 1339 if (targetm.have_prefetch ())
a9ccbb60 1340 {
99b1c316 1341 class expand_operand ops[3];
a5c7d693
RS
1342
1343 create_address_operand (&ops[0], op0);
1344 create_integer_operand (&ops[1], INTVAL (op1));
1345 create_integer_operand (&ops[2], INTVAL (op2));
134b044d 1346 if (maybe_expand_insn (targetm.code_for_prefetch, 3, ops))
a5c7d693 1347 return;
a9ccbb60 1348 }
ad76cef8 1349
5ab2f7b7
KH
1350 /* Don't do anything with direct references to volatile memory, but
1351 generate code to handle other side effects. */
3c0cb5de 1352 if (!MEM_P (op0) && side_effects_p (op0))
5ab2f7b7 1353 emit_insn (op0);
a9ccbb60
JJ
1354}
1355
3bdf5ad1 1356/* Get a MEM rtx for expression EXP which is the address of an operand
76715c32 1357 to be used in a string instruction (cmpstrsi, cpymemsi, ..). LEN is
435bb2a1
JJ
1358 the maximum length of the block of memory that might be accessed or
1359 NULL if unknown. */
3bdf5ad1 1360
6f966f06 1361rtx
435bb2a1 1362get_memory_rtx (tree exp, tree len)
28f4ec01 1363{
e4ff11a8 1364 tree orig_exp = exp, base;
805903b5 1365 rtx addr, mem;
805903b5
JJ
1366
1367 /* When EXP is not resolved SAVE_EXPR, MEM_ATTRS can be still derived
1368 from its expression, for expr->a.b only <variable>.a.b is recorded. */
1369 if (TREE_CODE (exp) == SAVE_EXPR && !SAVE_EXPR_RESOLVED_P (exp))
1370 exp = TREE_OPERAND (exp, 0);
1371
1372 addr = expand_expr (orig_exp, NULL_RTX, ptr_mode, EXPAND_NORMAL);
1373 mem = gen_rtx_MEM (BLKmode, memory_address (BLKmode, addr));
8ac61af7 1374
3bdf5ad1 1375 /* Get an expression we can use to find the attributes to assign to MEM.
625ed172 1376 First remove any nops. */
1043771b 1377 while (CONVERT_EXPR_P (exp)
3bdf5ad1
RK
1378 && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (exp, 0))))
1379 exp = TREE_OPERAND (exp, 0);
1380
625ed172
MM
1381 /* Build a MEM_REF representing the whole accessed area as a byte blob,
1382 (as builtin stringops may alias with anything). */
1383 exp = fold_build2 (MEM_REF,
1384 build_array_type (char_type_node,
1385 build_range_type (sizetype,
1386 size_one_node, len)),
1387 exp, build_int_cst (ptr_type_node, 0));
1388
1389 /* If the MEM_REF has no acceptable address, try to get the base object
1390 from the original address we got, and build an all-aliasing
1391 unknown-sized access to that one. */
1392 if (is_gimple_mem_ref_addr (TREE_OPERAND (exp, 0)))
1393 set_mem_attributes (mem, exp, 0);
1394 else if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
e4ff11a8
RB
1395 && (base = get_base_address (TREE_OPERAND (TREE_OPERAND (exp, 0),
1396 0))))
343fb412 1397 {
e4ff11a8
RB
1398 unsigned int align = get_pointer_alignment (TREE_OPERAND (exp, 0));
1399 exp = build_fold_addr_expr (base);
625ed172
MM
1400 exp = fold_build2 (MEM_REF,
1401 build_array_type (char_type_node,
1402 build_range_type (sizetype,
1403 size_zero_node,
1404 NULL)),
1405 exp, build_int_cst (ptr_type_node, 0));
931e6c29 1406 set_mem_attributes (mem, exp, 0);
e4ff11a8
RB
1407 /* Since we stripped parts make sure the offset is unknown and the
1408 alignment is computed from the original address. */
1409 clear_mem_offset (mem);
1410 set_mem_align (mem, align);
343fb412 1411 }
625ed172 1412 set_mem_alias_set (mem, 0);
28f4ec01
BS
1413 return mem;
1414}
1415\f
1416/* Built-in functions to perform an untyped call and return. */
1417
fa19795e
RS
1418#define apply_args_mode \
1419 (this_target_builtins->x_apply_args_mode)
1420#define apply_result_mode \
1421 (this_target_builtins->x_apply_result_mode)
28f4ec01 1422
28f4ec01
BS
1423/* Return the size required for the block returned by __builtin_apply_args,
1424 and initialize apply_args_mode. */
1425
1426static int
4682ae04 1427apply_args_size (void)
28f4ec01
BS
1428{
1429 static int size = -1;
cbf5468f
AH
1430 int align;
1431 unsigned int regno;
28f4ec01
BS
1432
1433 /* The values computed by this function never change. */
1434 if (size < 0)
1435 {
1436 /* The first value is the incoming arg-pointer. */
1437 size = GET_MODE_SIZE (Pmode);
1438
1439 /* The second value is the structure value address unless this is
1440 passed as an "invisible" first argument. */
92f6864c 1441 if (targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0))
28f4ec01
BS
1442 size += GET_MODE_SIZE (Pmode);
1443
1444 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1445 if (FUNCTION_ARG_REGNO_P (regno))
1446 {
b660eccf 1447 fixed_size_mode mode = targetm.calls.get_raw_arg_mode (regno);
33521f7d 1448
298e6adc 1449 gcc_assert (mode != VOIDmode);
28f4ec01
BS
1450
1451 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1452 if (size % align != 0)
1453 size = CEIL (size, align) * align;
28f4ec01
BS
1454 size += GET_MODE_SIZE (mode);
1455 apply_args_mode[regno] = mode;
1456 }
1457 else
1458 {
b660eccf 1459 apply_args_mode[regno] = as_a <fixed_size_mode> (VOIDmode);
28f4ec01
BS
1460 }
1461 }
1462 return size;
1463}
1464
1465/* Return the size required for the block returned by __builtin_apply,
1466 and initialize apply_result_mode. */
1467
1468static int
4682ae04 1469apply_result_size (void)
28f4ec01
BS
1470{
1471 static int size = -1;
1472 int align, regno;
28f4ec01
BS
1473
1474 /* The values computed by this function never change. */
1475 if (size < 0)
1476 {
1477 size = 0;
1478
1479 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
82f81f18 1480 if (targetm.calls.function_value_regno_p (regno))
28f4ec01 1481 {
b660eccf 1482 fixed_size_mode mode = targetm.calls.get_raw_result_mode (regno);
33521f7d 1483
298e6adc 1484 gcc_assert (mode != VOIDmode);
28f4ec01
BS
1485
1486 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1487 if (size % align != 0)
1488 size = CEIL (size, align) * align;
1489 size += GET_MODE_SIZE (mode);
1490 apply_result_mode[regno] = mode;
1491 }
1492 else
b660eccf 1493 apply_result_mode[regno] = as_a <fixed_size_mode> (VOIDmode);
28f4ec01
BS
1494
1495 /* Allow targets that use untyped_call and untyped_return to override
1496 the size so that machine-specific information can be stored here. */
1497#ifdef APPLY_RESULT_SIZE
1498 size = APPLY_RESULT_SIZE;
1499#endif
1500 }
1501 return size;
1502}
1503
28f4ec01
BS
1504/* Create a vector describing the result block RESULT. If SAVEP is true,
1505 the result block is used to save the values; otherwise it is used to
1506 restore the values. */
1507
1508static rtx
4682ae04 1509result_vector (int savep, rtx result)
28f4ec01
BS
1510{
1511 int regno, size, align, nelts;
b660eccf 1512 fixed_size_mode mode;
28f4ec01 1513 rtx reg, mem;
f883e0a7 1514 rtx *savevec = XALLOCAVEC (rtx, FIRST_PSEUDO_REGISTER);
8d51ecf8 1515
28f4ec01
BS
1516 size = nelts = 0;
1517 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1518 if ((mode = apply_result_mode[regno]) != VOIDmode)
1519 {
1520 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1521 if (size % align != 0)
1522 size = CEIL (size, align) * align;
1523 reg = gen_rtx_REG (mode, savep ? regno : INCOMING_REGNO (regno));
f4ef873c 1524 mem = adjust_address (result, mode, size);
28f4ec01 1525 savevec[nelts++] = (savep
f7df4a84
RS
1526 ? gen_rtx_SET (mem, reg)
1527 : gen_rtx_SET (reg, mem));
28f4ec01
BS
1528 size += GET_MODE_SIZE (mode);
1529 }
1530 return gen_rtx_PARALLEL (VOIDmode, gen_rtvec_v (nelts, savevec));
1531}
28f4ec01
BS
1532
1533/* Save the state required to perform an untyped call with the same
1534 arguments as were passed to the current function. */
1535
1536static rtx
4682ae04 1537expand_builtin_apply_args_1 (void)
28f4ec01 1538{
88e541e1 1539 rtx registers, tem;
28f4ec01 1540 int size, align, regno;
b660eccf 1541 fixed_size_mode mode;
92f6864c 1542 rtx struct_incoming_value = targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 1);
28f4ec01
BS
1543
1544 /* Create a block where the arg-pointer, structure value address,
1545 and argument registers can be saved. */
1546 registers = assign_stack_local (BLKmode, apply_args_size (), -1);
1547
1548 /* Walk past the arg-pointer and structure value address. */
1549 size = GET_MODE_SIZE (Pmode);
92f6864c 1550 if (targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0))
28f4ec01
BS
1551 size += GET_MODE_SIZE (Pmode);
1552
1553 /* Save each register used in calling a function to the block. */
1554 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1555 if ((mode = apply_args_mode[regno]) != VOIDmode)
1556 {
28f4ec01
BS
1557 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1558 if (size % align != 0)
1559 size = CEIL (size, align) * align;
1560
1561 tem = gen_rtx_REG (mode, INCOMING_REGNO (regno));
1562
f4ef873c 1563 emit_move_insn (adjust_address (registers, mode, size), tem);
28f4ec01
BS
1564 size += GET_MODE_SIZE (mode);
1565 }
1566
1567 /* Save the arg pointer to the block. */
2e3f842f 1568 tem = copy_to_reg (crtl->args.internal_arg_pointer);
88e541e1 1569 /* We need the pointer as the caller actually passed them to us, not
ac3f5df7
HPN
1570 as we might have pretended they were passed. Make sure it's a valid
1571 operand, as emit_move_insn isn't expected to handle a PLUS. */
581edfa3
TS
1572 if (STACK_GROWS_DOWNWARD)
1573 tem
1574 = force_operand (plus_constant (Pmode, tem,
1575 crtl->args.pretend_args_size),
1576 NULL_RTX);
88e541e1 1577 emit_move_insn (adjust_address (registers, Pmode, 0), tem);
33521f7d 1578
28f4ec01
BS
1579 size = GET_MODE_SIZE (Pmode);
1580
1581 /* Save the structure value address unless this is passed as an
1582 "invisible" first argument. */
61f71b34 1583 if (struct_incoming_value)
45309d28
ML
1584 emit_move_insn (adjust_address (registers, Pmode, size),
1585 copy_to_reg (struct_incoming_value));
28f4ec01
BS
1586
1587 /* Return the address of the block. */
1588 return copy_addr_to_reg (XEXP (registers, 0));
1589}
1590
1591/* __builtin_apply_args returns block of memory allocated on
1592 the stack into which is stored the arg pointer, structure
1593 value address, static chain, and all the registers that might
1594 possibly be used in performing a function call. The code is
1595 moved to the start of the function so the incoming values are
1596 saved. */
5197bd50 1597
28f4ec01 1598static rtx
4682ae04 1599expand_builtin_apply_args (void)
28f4ec01
BS
1600{
1601 /* Don't do __builtin_apply_args more than once in a function.
1602 Save the result of the first call and reuse it. */
1603 if (apply_args_value != 0)
1604 return apply_args_value;
1605 {
1606 /* When this function is called, it means that registers must be
1607 saved on entry to this function. So we migrate the
1608 call to the first insn of this function. */
1609 rtx temp;
28f4ec01
BS
1610
1611 start_sequence ();
1612 temp = expand_builtin_apply_args_1 ();
e67d1102 1613 rtx_insn *seq = get_insns ();
28f4ec01
BS
1614 end_sequence ();
1615
1616 apply_args_value = temp;
1617
2f937369
DM
1618 /* Put the insns after the NOTE that starts the function.
1619 If this is inside a start_sequence, make the outer-level insn
28f4ec01 1620 chain current, so the code is placed at the start of the
1f21b6f4
JJ
1621 function. If internal_arg_pointer is a non-virtual pseudo,
1622 it needs to be placed after the function that initializes
1623 that pseudo. */
28f4ec01 1624 push_topmost_sequence ();
1f21b6f4
JJ
1625 if (REG_P (crtl->args.internal_arg_pointer)
1626 && REGNO (crtl->args.internal_arg_pointer) > LAST_VIRTUAL_REGISTER)
1627 emit_insn_before (seq, parm_birth_insn);
1628 else
1629 emit_insn_before (seq, NEXT_INSN (entry_of_function ()));
28f4ec01
BS
1630 pop_topmost_sequence ();
1631 return temp;
1632 }
1633}
1634
1635/* Perform an untyped call and save the state required to perform an
1636 untyped return of whatever value was returned by the given function. */
1637
1638static rtx
4682ae04 1639expand_builtin_apply (rtx function, rtx arguments, rtx argsize)
28f4ec01
BS
1640{
1641 int size, align, regno;
b660eccf 1642 fixed_size_mode mode;
58f4cf2a
DM
1643 rtx incoming_args, result, reg, dest, src;
1644 rtx_call_insn *call_insn;
28f4ec01
BS
1645 rtx old_stack_level = 0;
1646 rtx call_fusage = 0;
92f6864c 1647 rtx struct_value = targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0);
28f4ec01 1648
5ae6cd0d 1649 arguments = convert_memory_address (Pmode, arguments);
ce2d32cd 1650
28f4ec01
BS
1651 /* Create a block where the return registers can be saved. */
1652 result = assign_stack_local (BLKmode, apply_result_size (), -1);
1653
28f4ec01
BS
1654 /* Fetch the arg pointer from the ARGUMENTS block. */
1655 incoming_args = gen_reg_rtx (Pmode);
ce2d32cd 1656 emit_move_insn (incoming_args, gen_rtx_MEM (Pmode, arguments));
581edfa3
TS
1657 if (!STACK_GROWS_DOWNWARD)
1658 incoming_args = expand_simple_binop (Pmode, MINUS, incoming_args, argsize,
1659 incoming_args, 0, OPTAB_LIB_WIDEN);
28f4ec01 1660
9d53e585
JM
1661 /* Push a new argument block and copy the arguments. Do not allow
1662 the (potential) memcpy call below to interfere with our stack
1663 manipulations. */
28f4ec01 1664 do_pending_stack_adjust ();
9d53e585 1665 NO_DEFER_POP;
28f4ec01 1666
f9da5064 1667 /* Save the stack with nonlocal if available. */
4476e1a0 1668 if (targetm.have_save_stack_nonlocal ())
9eac0f2a 1669 emit_stack_save (SAVE_NONLOCAL, &old_stack_level);
28f4ec01 1670 else
9eac0f2a 1671 emit_stack_save (SAVE_BLOCK, &old_stack_level);
28f4ec01 1672
316d0b19 1673 /* Allocate a block of memory onto the stack and copy the memory
d3c12306
EB
1674 arguments to the outgoing arguments address. We can pass TRUE
1675 as the 4th argument because we just saved the stack pointer
1676 and will restore it right after the call. */
9e878cf1 1677 allocate_dynamic_stack_space (argsize, 0, BIGGEST_ALIGNMENT, -1, true);
2e3f842f
L
1678
1679 /* Set DRAP flag to true, even though allocate_dynamic_stack_space
1680 may have already set current_function_calls_alloca to true.
1681 current_function_calls_alloca won't be set if argsize is zero,
1682 so we have to guarantee need_drap is true here. */
1683 if (SUPPORTS_STACK_ALIGNMENT)
1684 crtl->need_drap = true;
1685
316d0b19 1686 dest = virtual_outgoing_args_rtx;
581edfa3
TS
1687 if (!STACK_GROWS_DOWNWARD)
1688 {
1689 if (CONST_INT_P (argsize))
1690 dest = plus_constant (Pmode, dest, -INTVAL (argsize));
1691 else
1692 dest = gen_rtx_PLUS (Pmode, dest, negate_rtx (Pmode, argsize));
1693 }
8ac61af7
RK
1694 dest = gen_rtx_MEM (BLKmode, dest);
1695 set_mem_align (dest, PARM_BOUNDARY);
1696 src = gen_rtx_MEM (BLKmode, incoming_args);
1697 set_mem_align (src, PARM_BOUNDARY);
44bb111a 1698 emit_block_move (dest, src, argsize, BLOCK_OP_NORMAL);
28f4ec01
BS
1699
1700 /* Refer to the argument block. */
1701 apply_args_size ();
1702 arguments = gen_rtx_MEM (BLKmode, arguments);
8ac61af7 1703 set_mem_align (arguments, PARM_BOUNDARY);
28f4ec01
BS
1704
1705 /* Walk past the arg-pointer and structure value address. */
1706 size = GET_MODE_SIZE (Pmode);
61f71b34 1707 if (struct_value)
28f4ec01
BS
1708 size += GET_MODE_SIZE (Pmode);
1709
1710 /* Restore each of the registers previously saved. Make USE insns
1711 for each of these registers for use in making the call. */
1712 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1713 if ((mode = apply_args_mode[regno]) != VOIDmode)
1714 {
1715 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1716 if (size % align != 0)
1717 size = CEIL (size, align) * align;
1718 reg = gen_rtx_REG (mode, regno);
f4ef873c 1719 emit_move_insn (reg, adjust_address (arguments, mode, size));
28f4ec01
BS
1720 use_reg (&call_fusage, reg);
1721 size += GET_MODE_SIZE (mode);
1722 }
1723
1724 /* Restore the structure value address unless this is passed as an
1725 "invisible" first argument. */
1726 size = GET_MODE_SIZE (Pmode);
61f71b34 1727 if (struct_value)
28f4ec01
BS
1728 {
1729 rtx value = gen_reg_rtx (Pmode);
f4ef873c 1730 emit_move_insn (value, adjust_address (arguments, Pmode, size));
61f71b34 1731 emit_move_insn (struct_value, value);
f8cfc6aa 1732 if (REG_P (struct_value))
61f71b34 1733 use_reg (&call_fusage, struct_value);
28f4ec01
BS
1734 }
1735
1736 /* All arguments and registers used for the call are set up by now! */
531ca746 1737 function = prepare_call_address (NULL, function, NULL, &call_fusage, 0, 0);
28f4ec01
BS
1738
1739 /* Ensure address is valid. SYMBOL_REF is already valid, so no need,
1740 and we don't want to load it into a register as an optimization,
1741 because prepare_call_address already did it if it should be done. */
1742 if (GET_CODE (function) != SYMBOL_REF)
1743 function = memory_address (FUNCTION_MODE, function);
1744
1745 /* Generate the actual call instruction and save the return value. */
43c7dca8
RS
1746 if (targetm.have_untyped_call ())
1747 {
1748 rtx mem = gen_rtx_MEM (FUNCTION_MODE, function);
49e65199
RS
1749 rtx_insn *seq = targetm.gen_untyped_call (mem, result,
1750 result_vector (1, result));
1751 for (rtx_insn *insn = seq; insn; insn = NEXT_INSN (insn))
1752 if (CALL_P (insn))
1753 add_reg_note (insn, REG_UNTYPED_CALL, NULL_RTX);
1754 emit_insn (seq);
43c7dca8 1755 }
58d745ec 1756 else if (targetm.have_call_value ())
28f4ec01
BS
1757 {
1758 rtx valreg = 0;
1759
1760 /* Locate the unique return register. It is not possible to
1761 express a call that sets more than one return register using
1762 call_value; use untyped_call for that. In fact, untyped_call
1763 only needs to save the return registers in the given block. */
1764 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1765 if ((mode = apply_result_mode[regno]) != VOIDmode)
1766 {
58d745ec 1767 gcc_assert (!valreg); /* have_untyped_call required. */
5906d013 1768
28f4ec01
BS
1769 valreg = gen_rtx_REG (mode, regno);
1770 }
1771
58d745ec
RS
1772 emit_insn (targetm.gen_call_value (valreg,
1773 gen_rtx_MEM (FUNCTION_MODE, function),
1774 const0_rtx, NULL_RTX, const0_rtx));
28f4ec01 1775
f4ef873c 1776 emit_move_insn (adjust_address (result, GET_MODE (valreg), 0), valreg);
28f4ec01
BS
1777 }
1778 else
298e6adc 1779 gcc_unreachable ();
28f4ec01 1780
ee960939
OH
1781 /* Find the CALL insn we just emitted, and attach the register usage
1782 information. */
1783 call_insn = last_call_insn ();
1784 add_function_usage_to (call_insn, call_fusage);
28f4ec01
BS
1785
1786 /* Restore the stack. */
4476e1a0 1787 if (targetm.have_save_stack_nonlocal ())
9eac0f2a 1788 emit_stack_restore (SAVE_NONLOCAL, old_stack_level);
28f4ec01 1789 else
9eac0f2a 1790 emit_stack_restore (SAVE_BLOCK, old_stack_level);
c3284718 1791 fixup_args_size_notes (call_insn, get_last_insn (), 0);
28f4ec01 1792
9d53e585
JM
1793 OK_DEFER_POP;
1794
28f4ec01 1795 /* Return the address of the result block. */
5ae6cd0d
MM
1796 result = copy_addr_to_reg (XEXP (result, 0));
1797 return convert_memory_address (ptr_mode, result);
28f4ec01
BS
1798}
1799
1800/* Perform an untyped return. */
1801
1802static void
4682ae04 1803expand_builtin_return (rtx result)
28f4ec01
BS
1804{
1805 int size, align, regno;
b660eccf 1806 fixed_size_mode mode;
28f4ec01 1807 rtx reg;
fee3e72c 1808 rtx_insn *call_fusage = 0;
28f4ec01 1809
5ae6cd0d 1810 result = convert_memory_address (Pmode, result);
ce2d32cd 1811
28f4ec01
BS
1812 apply_result_size ();
1813 result = gen_rtx_MEM (BLKmode, result);
1814
43c7dca8 1815 if (targetm.have_untyped_return ())
28f4ec01 1816 {
43c7dca8
RS
1817 rtx vector = result_vector (0, result);
1818 emit_jump_insn (targetm.gen_untyped_return (result, vector));
28f4ec01
BS
1819 emit_barrier ();
1820 return;
1821 }
28f4ec01
BS
1822
1823 /* Restore the return value and note that each value is used. */
1824 size = 0;
1825 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1826 if ((mode = apply_result_mode[regno]) != VOIDmode)
1827 {
1828 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1829 if (size % align != 0)
1830 size = CEIL (size, align) * align;
1831 reg = gen_rtx_REG (mode, INCOMING_REGNO (regno));
f4ef873c 1832 emit_move_insn (reg, adjust_address (result, mode, size));
28f4ec01
BS
1833
1834 push_to_sequence (call_fusage);
c41c1387 1835 emit_use (reg);
28f4ec01
BS
1836 call_fusage = get_insns ();
1837 end_sequence ();
1838 size += GET_MODE_SIZE (mode);
1839 }
1840
1841 /* Put the USE insns before the return. */
2f937369 1842 emit_insn (call_fusage);
28f4ec01
BS
1843
1844 /* Return whatever values was restored by jumping directly to the end
1845 of the function. */
6e3077c6 1846 expand_naked_return ();
28f4ec01
BS
1847}
1848
ad82abb8 1849/* Used by expand_builtin_classify_type and fold_builtin_classify_type. */
5197bd50 1850
ad82abb8 1851static enum type_class
4682ae04 1852type_to_class (tree type)
ad82abb8
ZW
1853{
1854 switch (TREE_CODE (type))
1855 {
1856 case VOID_TYPE: return void_type_class;
1857 case INTEGER_TYPE: return integer_type_class;
ad82abb8
ZW
1858 case ENUMERAL_TYPE: return enumeral_type_class;
1859 case BOOLEAN_TYPE: return boolean_type_class;
1860 case POINTER_TYPE: return pointer_type_class;
1861 case REFERENCE_TYPE: return reference_type_class;
1862 case OFFSET_TYPE: return offset_type_class;
1863 case REAL_TYPE: return real_type_class;
1864 case COMPLEX_TYPE: return complex_type_class;
1865 case FUNCTION_TYPE: return function_type_class;
1866 case METHOD_TYPE: return method_type_class;
1867 case RECORD_TYPE: return record_type_class;
1868 case UNION_TYPE:
1869 case QUAL_UNION_TYPE: return union_type_class;
1870 case ARRAY_TYPE: return (TYPE_STRING_FLAG (type)
1871 ? string_type_class : array_type_class);
ad82abb8 1872 case LANG_TYPE: return lang_type_class;
1e2d8575 1873 case OPAQUE_TYPE: return opaque_type_class;
ad82abb8
ZW
1874 default: return no_type_class;
1875 }
1876}
8d51ecf8 1877
5039610b 1878/* Expand a call EXP to __builtin_classify_type. */
5197bd50 1879
28f4ec01 1880static rtx
5039610b 1881expand_builtin_classify_type (tree exp)
28f4ec01 1882{
5039610b
SL
1883 if (call_expr_nargs (exp))
1884 return GEN_INT (type_to_class (TREE_TYPE (CALL_EXPR_ARG (exp, 0))));
28f4ec01
BS
1885 return GEN_INT (no_type_class);
1886}
1887
ee5fd23a
MM
1888/* This helper macro, meant to be used in mathfn_built_in below, determines
1889 which among a set of builtin math functions is appropriate for a given type
1890 mode. The `F' (float) and `L' (long double) are automatically generated
1891 from the 'double' case. If a function supports the _Float<N> and _Float<N>X
1892 types, there are additional types that are considered with 'F32', 'F64',
1893 'F128', etc. suffixes. */
b03ff92e
RS
1894#define CASE_MATHFN(MATHFN) \
1895 CASE_CFN_##MATHFN: \
1896 fcode = BUILT_IN_##MATHFN; fcodef = BUILT_IN_##MATHFN##F ; \
1897 fcodel = BUILT_IN_##MATHFN##L ; break;
ee5fd23a
MM
1898/* Similar to the above, but also add support for the _Float<N> and _Float<N>X
1899 types. */
1900#define CASE_MATHFN_FLOATN(MATHFN) \
1901 CASE_CFN_##MATHFN: \
1902 fcode = BUILT_IN_##MATHFN; fcodef = BUILT_IN_##MATHFN##F ; \
1903 fcodel = BUILT_IN_##MATHFN##L ; fcodef16 = BUILT_IN_##MATHFN##F16 ; \
1904 fcodef32 = BUILT_IN_##MATHFN##F32; fcodef64 = BUILT_IN_##MATHFN##F64 ; \
1905 fcodef128 = BUILT_IN_##MATHFN##F128 ; fcodef32x = BUILT_IN_##MATHFN##F32X ; \
1906 fcodef64x = BUILT_IN_##MATHFN##F64X ; fcodef128x = BUILT_IN_##MATHFN##F128X ;\
1907 break;
bf460eec 1908/* Similar to above, but appends _R after any F/L suffix. */
b03ff92e
RS
1909#define CASE_MATHFN_REENT(MATHFN) \
1910 case CFN_BUILT_IN_##MATHFN##_R: \
1911 case CFN_BUILT_IN_##MATHFN##F_R: \
1912 case CFN_BUILT_IN_##MATHFN##L_R: \
1913 fcode = BUILT_IN_##MATHFN##_R; fcodef = BUILT_IN_##MATHFN##F_R ; \
1914 fcodel = BUILT_IN_##MATHFN##L_R ; break;
daa027cc 1915
5c1a2e63
RS
1916/* Return a function equivalent to FN but operating on floating-point
1917 values of type TYPE, or END_BUILTINS if no such function exists.
b03ff92e
RS
1918 This is purely an operation on function codes; it does not guarantee
1919 that the target actually has an implementation of the function. */
05f41289 1920
5c1a2e63 1921static built_in_function
b03ff92e 1922mathfn_built_in_2 (tree type, combined_fn fn)
272f51a3 1923{
ee5fd23a 1924 tree mtype;
5c1a2e63 1925 built_in_function fcode, fcodef, fcodel;
ee5fd23a
MM
1926 built_in_function fcodef16 = END_BUILTINS;
1927 built_in_function fcodef32 = END_BUILTINS;
1928 built_in_function fcodef64 = END_BUILTINS;
1929 built_in_function fcodef128 = END_BUILTINS;
1930 built_in_function fcodef32x = END_BUILTINS;
1931 built_in_function fcodef64x = END_BUILTINS;
1932 built_in_function fcodef128x = END_BUILTINS;
daa027cc
KG
1933
1934 switch (fn)
1935 {
a500588a
AO
1936#define SEQ_OF_CASE_MATHFN \
1937 CASE_MATHFN (ACOS) \
1938 CASE_MATHFN (ACOSH) \
1939 CASE_MATHFN (ASIN) \
1940 CASE_MATHFN (ASINH) \
1941 CASE_MATHFN (ATAN) \
1942 CASE_MATHFN (ATAN2) \
1943 CASE_MATHFN (ATANH) \
1944 CASE_MATHFN (CBRT) \
1945 CASE_MATHFN_FLOATN (CEIL) \
1946 CASE_MATHFN (CEXPI) \
1947 CASE_MATHFN_FLOATN (COPYSIGN) \
1948 CASE_MATHFN (COS) \
1949 CASE_MATHFN (COSH) \
1950 CASE_MATHFN (DREM) \
1951 CASE_MATHFN (ERF) \
1952 CASE_MATHFN (ERFC) \
1953 CASE_MATHFN (EXP) \
1954 CASE_MATHFN (EXP10) \
1955 CASE_MATHFN (EXP2) \
1956 CASE_MATHFN (EXPM1) \
1957 CASE_MATHFN (FABS) \
1958 CASE_MATHFN (FDIM) \
1959 CASE_MATHFN_FLOATN (FLOOR) \
1960 CASE_MATHFN_FLOATN (FMA) \
1961 CASE_MATHFN_FLOATN (FMAX) \
1962 CASE_MATHFN_FLOATN (FMIN) \
1963 CASE_MATHFN (FMOD) \
1964 CASE_MATHFN (FREXP) \
1965 CASE_MATHFN (GAMMA) \
1966 CASE_MATHFN_REENT (GAMMA) /* GAMMA_R */ \
1967 CASE_MATHFN (HUGE_VAL) \
1968 CASE_MATHFN (HYPOT) \
1969 CASE_MATHFN (ILOGB) \
1970 CASE_MATHFN (ICEIL) \
1971 CASE_MATHFN (IFLOOR) \
1972 CASE_MATHFN (INF) \
1973 CASE_MATHFN (IRINT) \
1974 CASE_MATHFN (IROUND) \
1975 CASE_MATHFN (ISINF) \
1976 CASE_MATHFN (J0) \
1977 CASE_MATHFN (J1) \
1978 CASE_MATHFN (JN) \
1979 CASE_MATHFN (LCEIL) \
1980 CASE_MATHFN (LDEXP) \
1981 CASE_MATHFN (LFLOOR) \
1982 CASE_MATHFN (LGAMMA) \
1983 CASE_MATHFN_REENT (LGAMMA) /* LGAMMA_R */ \
1984 CASE_MATHFN (LLCEIL) \
1985 CASE_MATHFN (LLFLOOR) \
1986 CASE_MATHFN (LLRINT) \
1987 CASE_MATHFN (LLROUND) \
1988 CASE_MATHFN (LOG) \
1989 CASE_MATHFN (LOG10) \
1990 CASE_MATHFN (LOG1P) \
1991 CASE_MATHFN (LOG2) \
1992 CASE_MATHFN (LOGB) \
1993 CASE_MATHFN (LRINT) \
1994 CASE_MATHFN (LROUND) \
1995 CASE_MATHFN (MODF) \
1996 CASE_MATHFN (NAN) \
1997 CASE_MATHFN (NANS) \
1998 CASE_MATHFN_FLOATN (NEARBYINT) \
1999 CASE_MATHFN (NEXTAFTER) \
2000 CASE_MATHFN (NEXTTOWARD) \
2001 CASE_MATHFN (POW) \
2002 CASE_MATHFN (POWI) \
2003 CASE_MATHFN (POW10) \
2004 CASE_MATHFN (REMAINDER) \
2005 CASE_MATHFN (REMQUO) \
2006 CASE_MATHFN_FLOATN (RINT) \
2007 CASE_MATHFN_FLOATN (ROUND) \
2008 CASE_MATHFN_FLOATN (ROUNDEVEN) \
2009 CASE_MATHFN (SCALB) \
2010 CASE_MATHFN (SCALBLN) \
2011 CASE_MATHFN (SCALBN) \
2012 CASE_MATHFN (SIGNBIT) \
2013 CASE_MATHFN (SIGNIFICAND) \
2014 CASE_MATHFN (SIN) \
2015 CASE_MATHFN (SINCOS) \
2016 CASE_MATHFN (SINH) \
2017 CASE_MATHFN_FLOATN (SQRT) \
2018 CASE_MATHFN (TAN) \
2019 CASE_MATHFN (TANH) \
2020 CASE_MATHFN (TGAMMA) \
2021 CASE_MATHFN_FLOATN (TRUNC) \
2022 CASE_MATHFN (Y0) \
2023 CASE_MATHFN (Y1) \
b03ff92e 2024 CASE_MATHFN (YN)
daa027cc 2025
a500588a
AO
2026 SEQ_OF_CASE_MATHFN
2027
b03ff92e
RS
2028 default:
2029 return END_BUILTINS;
2030 }
daa027cc 2031
ee5fd23a
MM
2032 mtype = TYPE_MAIN_VARIANT (type);
2033 if (mtype == double_type_node)
5c1a2e63 2034 return fcode;
ee5fd23a 2035 else if (mtype == float_type_node)
5c1a2e63 2036 return fcodef;
ee5fd23a 2037 else if (mtype == long_double_type_node)
5c1a2e63 2038 return fcodel;
ee5fd23a
MM
2039 else if (mtype == float16_type_node)
2040 return fcodef16;
2041 else if (mtype == float32_type_node)
2042 return fcodef32;
2043 else if (mtype == float64_type_node)
2044 return fcodef64;
2045 else if (mtype == float128_type_node)
2046 return fcodef128;
2047 else if (mtype == float32x_type_node)
2048 return fcodef32x;
2049 else if (mtype == float64x_type_node)
2050 return fcodef64x;
2051 else if (mtype == float128x_type_node)
2052 return fcodef128x;
daa027cc 2053 else
5c1a2e63
RS
2054 return END_BUILTINS;
2055}
2056
a500588a
AO
2057#undef CASE_MATHFN
2058#undef CASE_MATHFN_FLOATN
2059#undef CASE_MATHFN_REENT
2060
5c1a2e63
RS
2061/* Return mathematic function equivalent to FN but operating directly on TYPE,
2062 if available. If IMPLICIT_P is true use the implicit builtin declaration,
2063 otherwise use the explicit declaration. If we can't do the conversion,
2064 return null. */
2065
2066static tree
b03ff92e 2067mathfn_built_in_1 (tree type, combined_fn fn, bool implicit_p)
5c1a2e63
RS
2068{
2069 built_in_function fcode2 = mathfn_built_in_2 (type, fn);
2070 if (fcode2 == END_BUILTINS)
5039610b 2071 return NULL_TREE;
e79983f4
MM
2072
2073 if (implicit_p && !builtin_decl_implicit_p (fcode2))
2074 return NULL_TREE;
2075
2076 return builtin_decl_explicit (fcode2);
272f51a3
JH
2077}
2078
b03ff92e 2079/* Like mathfn_built_in_1, but always use the implicit array. */
05f41289
KG
2080
2081tree
b03ff92e 2082mathfn_built_in (tree type, combined_fn fn)
05f41289
KG
2083{
2084 return mathfn_built_in_1 (type, fn, /*implicit=*/ 1);
2085}
2086
b03ff92e
RS
2087/* Like mathfn_built_in_1, but take a built_in_function and
2088 always use the implicit array. */
2089
2090tree
2091mathfn_built_in (tree type, enum built_in_function fn)
2092{
2093 return mathfn_built_in_1 (type, as_combined_fn (fn), /*implicit=*/ 1);
2094}
2095
a500588a
AO
2096/* Return the type associated with a built in function, i.e., the one
2097 to be passed to mathfn_built_in to get the type-specific
2098 function. */
2099
2100tree
2101mathfn_built_in_type (combined_fn fn)
2102{
2103#define CASE_MATHFN(MATHFN) \
455c3d2e 2104 case CFN_BUILT_IN_##MATHFN: \
a500588a 2105 return double_type_node; \
455c3d2e 2106 case CFN_BUILT_IN_##MATHFN##F: \
a500588a 2107 return float_type_node; \
455c3d2e 2108 case CFN_BUILT_IN_##MATHFN##L: \
a500588a
AO
2109 return long_double_type_node;
2110
2111#define CASE_MATHFN_FLOATN(MATHFN) \
2112 CASE_MATHFN(MATHFN) \
455c3d2e 2113 case CFN_BUILT_IN_##MATHFN##F16: \
a500588a 2114 return float16_type_node; \
455c3d2e 2115 case CFN_BUILT_IN_##MATHFN##F32: \
a500588a 2116 return float32_type_node; \
455c3d2e 2117 case CFN_BUILT_IN_##MATHFN##F64: \
a500588a 2118 return float64_type_node; \
455c3d2e 2119 case CFN_BUILT_IN_##MATHFN##F128: \
a500588a 2120 return float128_type_node; \
455c3d2e 2121 case CFN_BUILT_IN_##MATHFN##F32X: \
a500588a 2122 return float32x_type_node; \
455c3d2e 2123 case CFN_BUILT_IN_##MATHFN##F64X: \
a500588a 2124 return float64x_type_node; \
455c3d2e 2125 case CFN_BUILT_IN_##MATHFN##F128X: \
a500588a
AO
2126 return float128x_type_node;
2127
2128/* Similar to above, but appends _R after any F/L suffix. */
2129#define CASE_MATHFN_REENT(MATHFN) \
455c3d2e 2130 case CFN_BUILT_IN_##MATHFN##_R: \
a500588a 2131 return double_type_node; \
455c3d2e 2132 case CFN_BUILT_IN_##MATHFN##F_R: \
a500588a 2133 return float_type_node; \
455c3d2e 2134 case CFN_BUILT_IN_##MATHFN##L_R: \
a500588a
AO
2135 return long_double_type_node;
2136
2137 switch (fn)
2138 {
2139 SEQ_OF_CASE_MATHFN
2140
2141 default:
2142 return NULL_TREE;
2143 }
2144
2145#undef CASE_MATHFN
2146#undef CASE_MATHFN_FLOATN
2147#undef CASE_MATHFN_REENT
2148#undef SEQ_OF_CASE_MATHFN
2149}
2150
30213ae9
RS
2151/* Check whether there is an internal function associated with function FN
2152 and return type RETURN_TYPE. Return the function if so, otherwise return
2153 IFN_LAST.
686ee971 2154
30213ae9
RS
2155 Note that this function only tests whether the function is defined in
2156 internals.def, not whether it is actually available on the target. */
2157
2158static internal_fn
2159associated_internal_fn (built_in_function fn, tree return_type)
686ee971 2160{
30213ae9 2161 switch (fn)
686ee971
RS
2162 {
2163#define DEF_INTERNAL_FLT_FN(NAME, FLAGS, OPTAB, TYPE) \
2164 CASE_FLT_FN (BUILT_IN_##NAME): return IFN_##NAME;
ee5fd23a
MM
2165#define DEF_INTERNAL_FLT_FLOATN_FN(NAME, FLAGS, OPTAB, TYPE) \
2166 CASE_FLT_FN (BUILT_IN_##NAME): return IFN_##NAME; \
2167 CASE_FLT_FN_FLOATN_NX (BUILT_IN_##NAME): return IFN_##NAME;
4959a752
RS
2168#define DEF_INTERNAL_INT_FN(NAME, FLAGS, OPTAB, TYPE) \
2169 CASE_INT_FN (BUILT_IN_##NAME): return IFN_##NAME;
686ee971
RS
2170#include "internal-fn.def"
2171
2172 CASE_FLT_FN (BUILT_IN_POW10):
2173 return IFN_EXP10;
2174
2175 CASE_FLT_FN (BUILT_IN_DREM):
2176 return IFN_REMAINDER;
2177
2178 CASE_FLT_FN (BUILT_IN_SCALBN):
2179 CASE_FLT_FN (BUILT_IN_SCALBLN):
2180 if (REAL_MODE_FORMAT (TYPE_MODE (return_type))->b == 2)
2181 return IFN_LDEXP;
2182 return IFN_LAST;
2183
2184 default:
2185 return IFN_LAST;
2186 }
2187}
2188
30213ae9
RS
2189/* If BUILT_IN_NORMAL function FNDECL has an associated internal function,
2190 return its code, otherwise return IFN_LAST. Note that this function
2191 only tests whether the function is defined in internals.def, not whether
2192 it is actually available on the target. */
2193
2194internal_fn
2195associated_internal_fn (tree fndecl)
2196{
2197 gcc_checking_assert (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL);
2198 return associated_internal_fn (DECL_FUNCTION_CODE (fndecl),
2199 TREE_TYPE (TREE_TYPE (fndecl)));
2200}
2201
2202/* Check whether there is an internal function associated with function CFN
2203 and return type RETURN_TYPE. Return the function if so, otherwise return
2204 IFN_LAST.
2205
2206 Note that this function only tests whether the function is defined in
2207 internals.def, not whether it is actually available on the target. */
2208
2209internal_fn
2210associated_internal_fn (combined_fn cfn, tree return_type)
2211{
2212 if (internal_fn_p (cfn))
2213 return as_internal_fn (cfn);
2214 return associated_internal_fn (as_builtin_fn (cfn), return_type);
2215}
2216
686ee971
RS
2217/* If CALL is a call to a BUILT_IN_NORMAL function that could be replaced
2218 on the current target by a call to an internal function, return the
2219 code of that internal function, otherwise return IFN_LAST. The caller
2220 is responsible for ensuring that any side-effects of the built-in
2221 call are dealt with correctly. E.g. if CALL sets errno, the caller
2222 must decide that the errno result isn't needed or make it available
2223 in some other way. */
2224
2225internal_fn
2226replacement_internal_fn (gcall *call)
2227{
2228 if (gimple_call_builtin_p (call, BUILT_IN_NORMAL))
2229 {
2230 internal_fn ifn = associated_internal_fn (gimple_call_fndecl (call));
2231 if (ifn != IFN_LAST)
2232 {
2233 tree_pair types = direct_internal_fn_types (ifn, call);
d95ab70a
RS
2234 optimization_type opt_type = bb_optimization_type (gimple_bb (call));
2235 if (direct_internal_fn_supported_p (ifn, types, opt_type))
686ee971
RS
2236 return ifn;
2237 }
2238 }
2239 return IFN_LAST;
2240}
2241
1b1562a5
MM
2242/* Expand a call to the builtin trinary math functions (fma).
2243 Return NULL_RTX if a normal call should be emitted rather than expanding the
2244 function in-line. EXP is the expression that is a call to the builtin
2245 function; if convenient, the result should be placed in TARGET.
2246 SUBTARGET may be used as the target for computing one of EXP's
2247 operands. */
2248
2249static rtx
2250expand_builtin_mathfn_ternary (tree exp, rtx target, rtx subtarget)
2251{
2252 optab builtin_optab;
58f4cf2a
DM
2253 rtx op0, op1, op2, result;
2254 rtx_insn *insns;
1b1562a5
MM
2255 tree fndecl = get_callee_fndecl (exp);
2256 tree arg0, arg1, arg2;
ef4bddc2 2257 machine_mode mode;
1b1562a5
MM
2258
2259 if (!validate_arglist (exp, REAL_TYPE, REAL_TYPE, REAL_TYPE, VOID_TYPE))
2260 return NULL_RTX;
2261
2262 arg0 = CALL_EXPR_ARG (exp, 0);
2263 arg1 = CALL_EXPR_ARG (exp, 1);
2264 arg2 = CALL_EXPR_ARG (exp, 2);
2265
2266 switch (DECL_FUNCTION_CODE (fndecl))
2267 {
2268 CASE_FLT_FN (BUILT_IN_FMA):
ee5fd23a 2269 CASE_FLT_FN_FLOATN_NX (BUILT_IN_FMA):
1b1562a5
MM
2270 builtin_optab = fma_optab; break;
2271 default:
2272 gcc_unreachable ();
2273 }
2274
2275 /* Make a suitable register to place result in. */
2276 mode = TYPE_MODE (TREE_TYPE (exp));
2277
2278 /* Before working hard, check whether the instruction is available. */
2279 if (optab_handler (builtin_optab, mode) == CODE_FOR_nothing)
2280 return NULL_RTX;
2281
04b80dbb 2282 result = gen_reg_rtx (mode);
1b1562a5
MM
2283
2284 /* Always stabilize the argument list. */
2285 CALL_EXPR_ARG (exp, 0) = arg0 = builtin_save_expr (arg0);
2286 CALL_EXPR_ARG (exp, 1) = arg1 = builtin_save_expr (arg1);
2287 CALL_EXPR_ARG (exp, 2) = arg2 = builtin_save_expr (arg2);
2288
2289 op0 = expand_expr (arg0, subtarget, VOIDmode, EXPAND_NORMAL);
2290 op1 = expand_normal (arg1);
2291 op2 = expand_normal (arg2);
2292
2293 start_sequence ();
2294
04b80dbb
RS
2295 /* Compute into RESULT.
2296 Set RESULT to wherever the result comes back. */
2297 result = expand_ternary_op (mode, builtin_optab, op0, op1, op2,
2298 result, 0);
1b1562a5
MM
2299
2300 /* If we were unable to expand via the builtin, stop the sequence
2301 (without outputting the insns) and call to the library function
2302 with the stabilized argument list. */
04b80dbb 2303 if (result == 0)
1b1562a5
MM
2304 {
2305 end_sequence ();
2306 return expand_call (exp, target, target == const0_rtx);
2307 }
2308
2309 /* Output the entire sequence. */
2310 insns = get_insns ();
2311 end_sequence ();
2312 emit_insn (insns);
2313
04b80dbb 2314 return result;
1b1562a5
MM
2315}
2316
6c7cf1f0 2317/* Expand a call to the builtin sin and cos math functions.
5039610b 2318 Return NULL_RTX if a normal call should be emitted rather than expanding the
6c7cf1f0
UB
2319 function in-line. EXP is the expression that is a call to the builtin
2320 function; if convenient, the result should be placed in TARGET.
2321 SUBTARGET may be used as the target for computing one of EXP's
2322 operands. */
2323
2324static rtx
2325expand_builtin_mathfn_3 (tree exp, rtx target, rtx subtarget)
2326{
2327 optab builtin_optab;
58f4cf2a
DM
2328 rtx op0;
2329 rtx_insn *insns;
6c7cf1f0 2330 tree fndecl = get_callee_fndecl (exp);
ef4bddc2 2331 machine_mode mode;
5799f732 2332 tree arg;
6c7cf1f0 2333
5039610b
SL
2334 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2335 return NULL_RTX;
6c7cf1f0 2336
5039610b 2337 arg = CALL_EXPR_ARG (exp, 0);
6c7cf1f0
UB
2338
2339 switch (DECL_FUNCTION_CODE (fndecl))
2340 {
ea6a6627
VR
2341 CASE_FLT_FN (BUILT_IN_SIN):
2342 CASE_FLT_FN (BUILT_IN_COS):
6c7cf1f0
UB
2343 builtin_optab = sincos_optab; break;
2344 default:
298e6adc 2345 gcc_unreachable ();
6c7cf1f0
UB
2346 }
2347
2348 /* Make a suitable register to place result in. */
2349 mode = TYPE_MODE (TREE_TYPE (exp));
2350
6c7cf1f0 2351 /* Check if sincos insn is available, otherwise fallback
9cf737f8 2352 to sin or cos insn. */
947131ba 2353 if (optab_handler (builtin_optab, mode) == CODE_FOR_nothing)
6c7cf1f0
UB
2354 switch (DECL_FUNCTION_CODE (fndecl))
2355 {
ea6a6627 2356 CASE_FLT_FN (BUILT_IN_SIN):
6c7cf1f0 2357 builtin_optab = sin_optab; break;
ea6a6627 2358 CASE_FLT_FN (BUILT_IN_COS):
6c7cf1f0
UB
2359 builtin_optab = cos_optab; break;
2360 default:
298e6adc 2361 gcc_unreachable ();
6c7cf1f0 2362 }
6c7cf1f0
UB
2363
2364 /* Before working hard, check whether the instruction is available. */
947131ba 2365 if (optab_handler (builtin_optab, mode) != CODE_FOR_nothing)
6c7cf1f0 2366 {
04b80dbb 2367 rtx result = gen_reg_rtx (mode);
6c7cf1f0
UB
2368
2369 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2370 need to expand the argument again. This way, we will not perform
2371 side-effects more the once. */
5799f732 2372 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
6c7cf1f0 2373
49452c07 2374 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
6c7cf1f0 2375
6c7cf1f0
UB
2376 start_sequence ();
2377
04b80dbb
RS
2378 /* Compute into RESULT.
2379 Set RESULT to wherever the result comes back. */
6c7cf1f0
UB
2380 if (builtin_optab == sincos_optab)
2381 {
04b80dbb 2382 int ok;
5906d013 2383
6c7cf1f0
UB
2384 switch (DECL_FUNCTION_CODE (fndecl))
2385 {
ea6a6627 2386 CASE_FLT_FN (BUILT_IN_SIN):
04b80dbb 2387 ok = expand_twoval_unop (builtin_optab, op0, 0, result, 0);
6c7cf1f0 2388 break;
ea6a6627 2389 CASE_FLT_FN (BUILT_IN_COS):
04b80dbb 2390 ok = expand_twoval_unop (builtin_optab, op0, result, 0, 0);
6c7cf1f0
UB
2391 break;
2392 default:
298e6adc 2393 gcc_unreachable ();
6c7cf1f0 2394 }
04b80dbb 2395 gcc_assert (ok);
6c7cf1f0
UB
2396 }
2397 else
04b80dbb 2398 result = expand_unop (mode, builtin_optab, op0, result, 0);
6c7cf1f0 2399
04b80dbb 2400 if (result != 0)
6c7cf1f0 2401 {
6c7cf1f0
UB
2402 /* Output the entire sequence. */
2403 insns = get_insns ();
2404 end_sequence ();
2405 emit_insn (insns);
04b80dbb 2406 return result;
6c7cf1f0
UB
2407 }
2408
2409 /* If we were unable to expand via the builtin, stop the sequence
2410 (without outputting the insns) and call to the library function
2411 with the stabilized argument list. */
2412 end_sequence ();
2413 }
2414
04b80dbb 2415 return expand_call (exp, target, target == const0_rtx);
6c7cf1f0
UB
2416}
2417
44e10129
MM
2418/* Given an interclass math builtin decl FNDECL and it's argument ARG
2419 return an RTL instruction code that implements the functionality.
2420 If that isn't possible or available return CODE_FOR_nothing. */
eaee4464 2421
44e10129
MM
2422static enum insn_code
2423interclass_mathfn_icode (tree arg, tree fndecl)
eaee4464 2424{
44e10129 2425 bool errno_set = false;
2225b9f2 2426 optab builtin_optab = unknown_optab;
ef4bddc2 2427 machine_mode mode;
eaee4464
UB
2428
2429 switch (DECL_FUNCTION_CODE (fndecl))
2430 {
2431 CASE_FLT_FN (BUILT_IN_ILOGB):
903c723b
TC
2432 errno_set = true; builtin_optab = ilogb_optab; break;
2433 CASE_FLT_FN (BUILT_IN_ISINF):
2434 builtin_optab = isinf_optab; break;
2435 case BUILT_IN_ISNORMAL:
2436 case BUILT_IN_ISFINITE:
2437 CASE_FLT_FN (BUILT_IN_FINITE):
2438 case BUILT_IN_FINITED32:
2439 case BUILT_IN_FINITED64:
2440 case BUILT_IN_FINITED128:
2441 case BUILT_IN_ISINFD32:
2442 case BUILT_IN_ISINFD64:
2443 case BUILT_IN_ISINFD128:
2444 /* These builtins have no optabs (yet). */
0c8d3c2b 2445 break;
eaee4464
UB
2446 default:
2447 gcc_unreachable ();
2448 }
2449
2450 /* There's no easy way to detect the case we need to set EDOM. */
2451 if (flag_errno_math && errno_set)
44e10129 2452 return CODE_FOR_nothing;
eaee4464
UB
2453
2454 /* Optab mode depends on the mode of the input argument. */
2455 mode = TYPE_MODE (TREE_TYPE (arg));
2456
0c8d3c2b 2457 if (builtin_optab)
947131ba 2458 return optab_handler (builtin_optab, mode);
44e10129
MM
2459 return CODE_FOR_nothing;
2460}
2461
2462/* Expand a call to one of the builtin math functions that operate on
903c723b
TC
2463 floating point argument and output an integer result (ilogb, isinf,
2464 isnan, etc).
44e10129
MM
2465 Return 0 if a normal call should be emitted rather than expanding the
2466 function in-line. EXP is the expression that is a call to the builtin
4359dc2a 2467 function; if convenient, the result should be placed in TARGET. */
44e10129
MM
2468
2469static rtx
4359dc2a 2470expand_builtin_interclass_mathfn (tree exp, rtx target)
44e10129
MM
2471{
2472 enum insn_code icode = CODE_FOR_nothing;
2473 rtx op0;
2474 tree fndecl = get_callee_fndecl (exp);
ef4bddc2 2475 machine_mode mode;
44e10129
MM
2476 tree arg;
2477
2478 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2479 return NULL_RTX;
2480
2481 arg = CALL_EXPR_ARG (exp, 0);
2482 icode = interclass_mathfn_icode (arg, fndecl);
2483 mode = TYPE_MODE (TREE_TYPE (arg));
2484
eaee4464
UB
2485 if (icode != CODE_FOR_nothing)
2486 {
99b1c316 2487 class expand_operand ops[1];
58f4cf2a 2488 rtx_insn *last = get_last_insn ();
8a0b1aa4 2489 tree orig_arg = arg;
eaee4464
UB
2490
2491 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2492 need to expand the argument again. This way, we will not perform
2493 side-effects more the once. */
5799f732 2494 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
eaee4464 2495
4359dc2a 2496 op0 = expand_expr (arg, NULL_RTX, VOIDmode, EXPAND_NORMAL);
eaee4464
UB
2497
2498 if (mode != GET_MODE (op0))
2499 op0 = convert_to_mode (mode, op0, 0);
2500
a5c7d693
RS
2501 create_output_operand (&ops[0], target, TYPE_MODE (TREE_TYPE (exp)));
2502 if (maybe_legitimize_operands (icode, 0, 1, ops)
2503 && maybe_emit_unop_insn (icode, ops[0].value, op0, UNKNOWN))
2504 return ops[0].value;
2505
8a0b1aa4
MM
2506 delete_insns_since (last);
2507 CALL_EXPR_ARG (exp, 0) = orig_arg;
eaee4464
UB
2508 }
2509
44e10129 2510 return NULL_RTX;
eaee4464
UB
2511}
2512
403e54f0 2513/* Expand a call to the builtin sincos math function.
5039610b 2514 Return NULL_RTX if a normal call should be emitted rather than expanding the
403e54f0
RG
2515 function in-line. EXP is the expression that is a call to the builtin
2516 function. */
2517
2518static rtx
2519expand_builtin_sincos (tree exp)
2520{
2521 rtx op0, op1, op2, target1, target2;
ef4bddc2 2522 machine_mode mode;
403e54f0
RG
2523 tree arg, sinp, cosp;
2524 int result;
db3927fb 2525 location_t loc = EXPR_LOCATION (exp);
ca818bd9 2526 tree alias_type, alias_off;
403e54f0 2527
5039610b
SL
2528 if (!validate_arglist (exp, REAL_TYPE,
2529 POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
2530 return NULL_RTX;
403e54f0 2531
5039610b
SL
2532 arg = CALL_EXPR_ARG (exp, 0);
2533 sinp = CALL_EXPR_ARG (exp, 1);
2534 cosp = CALL_EXPR_ARG (exp, 2);
403e54f0
RG
2535
2536 /* Make a suitable register to place result in. */
2537 mode = TYPE_MODE (TREE_TYPE (arg));
2538
2539 /* Check if sincos insn is available, otherwise emit the call. */
947131ba 2540 if (optab_handler (sincos_optab, mode) == CODE_FOR_nothing)
403e54f0
RG
2541 return NULL_RTX;
2542
2543 target1 = gen_reg_rtx (mode);
2544 target2 = gen_reg_rtx (mode);
2545
84217346 2546 op0 = expand_normal (arg);
ca818bd9
RG
2547 alias_type = build_pointer_type_for_mode (TREE_TYPE (arg), ptr_mode, true);
2548 alias_off = build_int_cst (alias_type, 0);
2549 op1 = expand_normal (fold_build2_loc (loc, MEM_REF, TREE_TYPE (arg),
2550 sinp, alias_off));
2551 op2 = expand_normal (fold_build2_loc (loc, MEM_REF, TREE_TYPE (arg),
2552 cosp, alias_off));
403e54f0
RG
2553
2554 /* Compute into target1 and target2.
2555 Set TARGET to wherever the result comes back. */
2556 result = expand_twoval_unop (sincos_optab, op0, target2, target1, 0);
2557 gcc_assert (result);
2558
2559 /* Move target1 and target2 to the memory locations indicated
2560 by op1 and op2. */
2561 emit_move_insn (op1, target1);
2562 emit_move_insn (op2, target2);
2563
2564 return const0_rtx;
2565}
2566
4343f5e2
RFF
2567/* Expand call EXP to the fegetround builtin (from C99 fenv.h), returning the
2568 result and setting it in TARGET. Otherwise return NULL_RTX on failure. */
2569static rtx
2570expand_builtin_fegetround (tree exp, rtx target, machine_mode target_mode)
2571{
2572 if (!validate_arglist (exp, VOID_TYPE))
2573 return NULL_RTX;
2574
2575 insn_code icode = direct_optab_handler (fegetround_optab, SImode);
2576 if (icode == CODE_FOR_nothing)
2577 return NULL_RTX;
2578
2579 if (target == 0
2580 || GET_MODE (target) != target_mode
2581 || !(*insn_data[icode].operand[0].predicate) (target, target_mode))
2582 target = gen_reg_rtx (target_mode);
2583
2584 rtx pat = GEN_FCN (icode) (target);
2585 if (!pat)
2586 return NULL_RTX;
2587 emit_insn (pat);
2588
2589 return target;
2590}
2591
2592/* Expand call EXP to either feclearexcept or feraiseexcept builtins (from C99
2593 fenv.h), returning the result and setting it in TARGET. Otherwise return
2594 NULL_RTX on failure. */
2595static rtx
2596expand_builtin_feclear_feraise_except (tree exp, rtx target,
2597 machine_mode target_mode, optab op_optab)
2598{
2599 if (!validate_arglist (exp, INTEGER_TYPE, VOID_TYPE))
2600 return NULL_RTX;
2601 rtx op0 = expand_normal (CALL_EXPR_ARG (exp, 0));
2602
2603 insn_code icode = direct_optab_handler (op_optab, SImode);
2604 if (icode == CODE_FOR_nothing)
2605 return NULL_RTX;
2606
8bcf835e
RFF
2607 if (!(*insn_data[icode].operand[1].predicate) (op0, GET_MODE (op0)))
2608 return NULL_RTX;
2609
4343f5e2
RFF
2610 if (target == 0
2611 || GET_MODE (target) != target_mode
2612 || !(*insn_data[icode].operand[0].predicate) (target, target_mode))
2613 target = gen_reg_rtx (target_mode);
2614
2615 rtx pat = GEN_FCN (icode) (target, op0);
2616 if (!pat)
2617 return NULL_RTX;
2618 emit_insn (pat);
2619
2620 return target;
2621}
2622
75c7c595
RG
2623/* Expand a call to the internal cexpi builtin to the sincos math function.
2624 EXP is the expression that is a call to the builtin function; if convenient,
4359dc2a 2625 the result should be placed in TARGET. */
75c7c595
RG
2626
2627static rtx
4359dc2a 2628expand_builtin_cexpi (tree exp, rtx target)
75c7c595
RG
2629{
2630 tree fndecl = get_callee_fndecl (exp);
75c7c595 2631 tree arg, type;
ef4bddc2 2632 machine_mode mode;
75c7c595 2633 rtx op0, op1, op2;
db3927fb 2634 location_t loc = EXPR_LOCATION (exp);
75c7c595 2635
5039610b
SL
2636 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2637 return NULL_RTX;
75c7c595 2638
5039610b 2639 arg = CALL_EXPR_ARG (exp, 0);
75c7c595
RG
2640 type = TREE_TYPE (arg);
2641 mode = TYPE_MODE (TREE_TYPE (arg));
2642
2643 /* Try expanding via a sincos optab, fall back to emitting a libcall
b54c5497
RG
2644 to sincos or cexp. We are sure we have sincos or cexp because cexpi
2645 is only generated from sincos, cexp or if we have either of them. */
947131ba 2646 if (optab_handler (sincos_optab, mode) != CODE_FOR_nothing)
75c7c595
RG
2647 {
2648 op1 = gen_reg_rtx (mode);
2649 op2 = gen_reg_rtx (mode);
2650
4359dc2a 2651 op0 = expand_expr (arg, NULL_RTX, VOIDmode, EXPAND_NORMAL);
75c7c595
RG
2652
2653 /* Compute into op1 and op2. */
2654 expand_twoval_unop (sincos_optab, op0, op2, op1, 0);
2655 }
bae974e6 2656 else if (targetm.libc_has_function (function_sincos, type))
75c7c595 2657 {
5039610b 2658 tree call, fn = NULL_TREE;
75c7c595
RG
2659 tree top1, top2;
2660 rtx op1a, op2a;
2661
2662 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
e79983f4 2663 fn = builtin_decl_explicit (BUILT_IN_SINCOSF);
75c7c595 2664 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
e79983f4 2665 fn = builtin_decl_explicit (BUILT_IN_SINCOS);
75c7c595 2666 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
e79983f4 2667 fn = builtin_decl_explicit (BUILT_IN_SINCOSL);
5039610b
SL
2668 else
2669 gcc_unreachable ();
b8698a0f 2670
9474e8ab
MM
2671 op1 = assign_temp (TREE_TYPE (arg), 1, 1);
2672 op2 = assign_temp (TREE_TYPE (arg), 1, 1);
18ae1560
UB
2673 op1a = copy_addr_to_reg (XEXP (op1, 0));
2674 op2a = copy_addr_to_reg (XEXP (op2, 0));
75c7c595
RG
2675 top1 = make_tree (build_pointer_type (TREE_TYPE (arg)), op1a);
2676 top2 = make_tree (build_pointer_type (TREE_TYPE (arg)), op2a);
2677
75c7c595
RG
2678 /* Make sure not to fold the sincos call again. */
2679 call = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
5039610b
SL
2680 expand_normal (build_call_nary (TREE_TYPE (TREE_TYPE (fn)),
2681 call, 3, arg, top1, top2));
75c7c595 2682 }
b54c5497
RG
2683 else
2684 {
9d972b2d 2685 tree call, fn = NULL_TREE, narg;
b54c5497
RG
2686 tree ctype = build_complex_type (type);
2687
9d972b2d 2688 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
e79983f4 2689 fn = builtin_decl_explicit (BUILT_IN_CEXPF);
9d972b2d 2690 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
e79983f4 2691 fn = builtin_decl_explicit (BUILT_IN_CEXP);
9d972b2d 2692 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
e79983f4 2693 fn = builtin_decl_explicit (BUILT_IN_CEXPL);
5039610b
SL
2694 else
2695 gcc_unreachable ();
34a24c11
RG
2696
2697 /* If we don't have a decl for cexp create one. This is the
2698 friendliest fallback if the user calls __builtin_cexpi
2699 without full target C99 function support. */
2700 if (fn == NULL_TREE)
2701 {
2702 tree fntype;
2703 const char *name = NULL;
2704
2705 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2706 name = "cexpf";
2707 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2708 name = "cexp";
2709 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2710 name = "cexpl";
2711
2712 fntype = build_function_type_list (ctype, ctype, NULL_TREE);
2713 fn = build_fn_decl (name, fntype);
2714 }
2715
db3927fb 2716 narg = fold_build2_loc (loc, COMPLEX_EXPR, ctype,
b54c5497
RG
2717 build_real (type, dconst0), arg);
2718
2719 /* Make sure not to fold the cexp call again. */
2720 call = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
b8698a0f 2721 return expand_expr (build_call_nary (ctype, call, 1, narg),
49452c07 2722 target, VOIDmode, EXPAND_NORMAL);
b54c5497 2723 }
75c7c595
RG
2724
2725 /* Now build the proper return type. */
2726 return expand_expr (build2 (COMPLEX_EXPR, build_complex_type (type),
2727 make_tree (TREE_TYPE (arg), op2),
2728 make_tree (TREE_TYPE (arg), op1)),
49452c07 2729 target, VOIDmode, EXPAND_NORMAL);
75c7c595
RG
2730}
2731
44e10129
MM
2732/* Conveniently construct a function call expression. FNDECL names the
2733 function to be called, N is the number of arguments, and the "..."
2734 parameters are the argument expressions. Unlike build_call_exr
2735 this doesn't fold the call, hence it will always return a CALL_EXPR. */
2736
2737static tree
2738build_call_nofold_loc (location_t loc, tree fndecl, int n, ...)
2739{
2740 va_list ap;
2741 tree fntype = TREE_TYPE (fndecl);
2742 tree fn = build1 (ADDR_EXPR, build_pointer_type (fntype), fndecl);
2743
2744 va_start (ap, n);
2745 fn = build_call_valist (TREE_TYPE (fntype), fn, n, ap);
2746 va_end (ap);
2747 SET_EXPR_LOCATION (fn, loc);
2748 return fn;
2749}
44e10129 2750
0982edd3
JJ
2751/* Expand the __builtin_issignaling builtin. This needs to handle
2752 all floating point formats that do support NaNs (for those that
2753 don't it just sets target to 0). */
2754
2755static rtx
2756expand_builtin_issignaling (tree exp, rtx target)
2757{
2758 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2759 return NULL_RTX;
2760
2761 tree arg = CALL_EXPR_ARG (exp, 0);
2762 scalar_float_mode fmode = SCALAR_FLOAT_TYPE_MODE (TREE_TYPE (arg));
2763 const struct real_format *fmt = REAL_MODE_FORMAT (fmode);
2764
2765 /* Expand the argument yielding a RTX expression. */
2766 rtx temp = expand_normal (arg);
2767
2768 /* If mode doesn't support NaN, always return 0.
2769 Don't use !HONOR_SNANS (fmode) here, so there is some possibility of
2770 __builtin_issignaling working without -fsignaling-nans. Especially
2771 when -fno-signaling-nans is the default.
2772 On the other side, MODE_HAS_NANS (fmode) is unnecessary, with
2773 -ffinite-math-only even __builtin_isnan or __builtin_fpclassify
2774 fold to 0 or non-NaN/Inf classification. */
2775 if (!HONOR_NANS (fmode))
2776 {
2777 emit_move_insn (target, const0_rtx);
2778 return target;
2779 }
2780
2781 /* Check if the back end provides an insn that handles issignaling for the
2782 argument's mode. */
2783 enum insn_code icode = optab_handler (issignaling_optab, fmode);
2784 if (icode != CODE_FOR_nothing)
2785 {
2786 rtx_insn *last = get_last_insn ();
2787 rtx this_target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
2788 if (maybe_emit_unop_insn (icode, this_target, temp, UNKNOWN))
2789 return this_target;
2790 delete_insns_since (last);
2791 }
2792
2793 if (DECIMAL_FLOAT_MODE_P (fmode))
2794 {
2795 scalar_int_mode imode;
2796 rtx hi;
2797 switch (fmt->ieee_bits)
2798 {
2799 case 32:
2800 case 64:
2801 imode = int_mode_for_mode (fmode).require ();
2802 temp = gen_lowpart (imode, temp);
2803 break;
2804 case 128:
2805 imode = int_mode_for_size (64, 1).require ();
2806 hi = NULL_RTX;
2807 /* For decimal128, TImode support isn't always there and even when
2808 it is, working on the DImode high part is usually better. */
2809 if (!MEM_P (temp))
2810 {
2811 if (rtx t = simplify_gen_subreg (imode, temp, fmode,
2812 subreg_highpart_offset (imode,
2813 fmode)))
2814 hi = t;
2815 else
2816 {
2817 scalar_int_mode imode2;
2818 if (int_mode_for_mode (fmode).exists (&imode2))
2819 {
2820 rtx temp2 = gen_lowpart (imode2, temp);
2821 poly_uint64 off = subreg_highpart_offset (imode, imode2);
2822 if (rtx t = simplify_gen_subreg (imode, temp2,
2823 imode2, off))
2824 hi = t;
2825 }
2826 }
2827 if (!hi)
2828 {
2829 rtx mem = assign_stack_temp (fmode, GET_MODE_SIZE (fmode));
2830 emit_move_insn (mem, temp);
2831 temp = mem;
2832 }
2833 }
2834 if (!hi)
2835 {
2836 poly_int64 offset
2837 = subreg_highpart_offset (imode, GET_MODE (temp));
2838 hi = adjust_address (temp, imode, offset);
2839 }
2840 temp = hi;
2841 break;
2842 default:
2843 gcc_unreachable ();
2844 }
2845 /* In all of decimal{32,64,128}, there is MSB sign bit and sNaN
2846 have 6 bits below it all set. */
2847 rtx val
2848 = GEN_INT (HOST_WIDE_INT_C (0x3f) << (GET_MODE_BITSIZE (imode) - 7));
2849 temp = expand_binop (imode, and_optab, temp, val,
2850 NULL_RTX, 1, OPTAB_LIB_WIDEN);
2851 temp = emit_store_flag_force (target, EQ, temp, val, imode, 1, 1);
2852 return temp;
2853 }
2854
2855 /* Only PDP11 has these defined differently but doesn't support NaNs. */
2856 gcc_assert (FLOAT_WORDS_BIG_ENDIAN == WORDS_BIG_ENDIAN);
2857 gcc_assert (fmt->signbit_ro > 0 && fmt->b == 2);
2858 gcc_assert (MODE_COMPOSITE_P (fmode)
2859 || (fmt->pnan == fmt->p
2860 && fmt->signbit_ro == fmt->signbit_rw));
2861
2862 switch (fmt->p)
2863 {
2864 case 106: /* IBM double double */
2865 /* For IBM double double, recurse on the most significant double. */
2866 gcc_assert (MODE_COMPOSITE_P (fmode));
2867 temp = convert_modes (DFmode, fmode, temp, 0);
2868 fmode = DFmode;
2869 fmt = REAL_MODE_FORMAT (DFmode);
2870 /* FALLTHRU */
2871 case 8: /* bfloat */
2872 case 11: /* IEEE half */
2873 case 24: /* IEEE single */
2874 case 53: /* IEEE double or Intel extended with rounding to double */
2875 if (fmt->p == 53 && fmt->signbit_ro == 79)
2876 goto extended;
2877 {
2878 scalar_int_mode imode = int_mode_for_mode (fmode).require ();
2879 temp = gen_lowpart (imode, temp);
2880 rtx val = GEN_INT ((HOST_WIDE_INT_M1U << (fmt->p - 2))
2881 & ~(HOST_WIDE_INT_M1U << fmt->signbit_ro));
2882 if (fmt->qnan_msb_set)
2883 {
2884 rtx mask = GEN_INT (~(HOST_WIDE_INT_M1U << fmt->signbit_ro));
2885 rtx bit = GEN_INT (HOST_WIDE_INT_1U << (fmt->p - 2));
2886 /* For non-MIPS/PA IEEE single/double/half or bfloat, expand to:
2887 ((temp ^ bit) & mask) > val. */
2888 temp = expand_binop (imode, xor_optab, temp, bit,
2889 NULL_RTX, 1, OPTAB_LIB_WIDEN);
2890 temp = expand_binop (imode, and_optab, temp, mask,
2891 NULL_RTX, 1, OPTAB_LIB_WIDEN);
2892 temp = emit_store_flag_force (target, GTU, temp, val, imode,
2893 1, 1);
2894 }
2895 else
2896 {
2897 /* For MIPS/PA IEEE single/double, expand to:
2898 (temp & val) == val. */
2899 temp = expand_binop (imode, and_optab, temp, val,
2900 NULL_RTX, 1, OPTAB_LIB_WIDEN);
2901 temp = emit_store_flag_force (target, EQ, temp, val, imode,
2902 1, 1);
2903 }
2904 }
2905 break;
2906 case 113: /* IEEE quad */
2907 {
2908 rtx hi = NULL_RTX, lo = NULL_RTX;
2909 scalar_int_mode imode = int_mode_for_size (64, 1).require ();
2910 /* For IEEE quad, TImode support isn't always there and even when
2911 it is, working on DImode parts is usually better. */
2912 if (!MEM_P (temp))
2913 {
2914 hi = simplify_gen_subreg (imode, temp, fmode,
2915 subreg_highpart_offset (imode, fmode));
2916 lo = simplify_gen_subreg (imode, temp, fmode,
2917 subreg_lowpart_offset (imode, fmode));
2918 if (!hi || !lo)
2919 {
2920 scalar_int_mode imode2;
2921 if (int_mode_for_mode (fmode).exists (&imode2))
2922 {
2923 rtx temp2 = gen_lowpart (imode2, temp);
2924 hi = simplify_gen_subreg (imode, temp2, imode2,
2925 subreg_highpart_offset (imode,
2926 imode2));
2927 lo = simplify_gen_subreg (imode, temp2, imode2,
2928 subreg_lowpart_offset (imode,
2929 imode2));
2930 }
2931 }
2932 if (!hi || !lo)
2933 {
2934 rtx mem = assign_stack_temp (fmode, GET_MODE_SIZE (fmode));
2935 emit_move_insn (mem, temp);
2936 temp = mem;
2937 }
2938 }
2939 if (!hi || !lo)
2940 {
2941 poly_int64 offset
2942 = subreg_highpart_offset (imode, GET_MODE (temp));
2943 hi = adjust_address (temp, imode, offset);
2944 offset = subreg_lowpart_offset (imode, GET_MODE (temp));
2945 lo = adjust_address (temp, imode, offset);
2946 }
2947 rtx val = GEN_INT ((HOST_WIDE_INT_M1U << (fmt->p - 2 - 64))
2948 & ~(HOST_WIDE_INT_M1U << (fmt->signbit_ro - 64)));
2949 if (fmt->qnan_msb_set)
2950 {
2951 rtx mask = GEN_INT (~(HOST_WIDE_INT_M1U << (fmt->signbit_ro
2952 - 64)));
2953 rtx bit = GEN_INT (HOST_WIDE_INT_1U << (fmt->p - 2 - 64));
2954 /* For non-MIPS/PA IEEE quad, expand to:
2955 (((hi ^ bit) | ((lo | -lo) >> 63)) & mask) > val. */
2956 rtx nlo = expand_unop (imode, neg_optab, lo, NULL_RTX, 0);
2957 lo = expand_binop (imode, ior_optab, lo, nlo,
2958 NULL_RTX, 1, OPTAB_LIB_WIDEN);
2959 lo = expand_shift (RSHIFT_EXPR, imode, lo, 63, NULL_RTX, 1);
2960 temp = expand_binop (imode, xor_optab, hi, bit,
2961 NULL_RTX, 1, OPTAB_LIB_WIDEN);
2962 temp = expand_binop (imode, ior_optab, temp, lo,
2963 NULL_RTX, 1, OPTAB_LIB_WIDEN);
2964 temp = expand_binop (imode, and_optab, temp, mask,
2965 NULL_RTX, 1, OPTAB_LIB_WIDEN);
2966 temp = emit_store_flag_force (target, GTU, temp, val, imode,
2967 1, 1);
2968 }
2969 else
2970 {
2971 /* For MIPS/PA IEEE quad, expand to:
2972 (hi & val) == val. */
2973 temp = expand_binop (imode, and_optab, hi, val,
2974 NULL_RTX, 1, OPTAB_LIB_WIDEN);
2975 temp = emit_store_flag_force (target, EQ, temp, val, imode,
2976 1, 1);
2977 }
2978 }
2979 break;
2980 case 64: /* Intel or Motorola extended */
2981 extended:
2982 {
2983 rtx ex, hi, lo;
2984 scalar_int_mode imode = int_mode_for_size (32, 1).require ();
2985 scalar_int_mode iemode = int_mode_for_size (16, 1).require ();
2986 if (!MEM_P (temp))
2987 {
2988 rtx mem = assign_stack_temp (fmode, GET_MODE_SIZE (fmode));
2989 emit_move_insn (mem, temp);
2990 temp = mem;
2991 }
2992 if (fmt->signbit_ro == 95)
2993 {
2994 /* Motorola, always big endian, with 16-bit gap in between
2995 16-bit sign+exponent and 64-bit mantissa. */
2996 ex = adjust_address (temp, iemode, 0);
2997 hi = adjust_address (temp, imode, 4);
2998 lo = adjust_address (temp, imode, 8);
2999 }
3000 else if (!WORDS_BIG_ENDIAN)
3001 {
3002 /* Intel little endian, 64-bit mantissa followed by 16-bit
3003 sign+exponent and then either 16 or 48 bits of gap. */
3004 ex = adjust_address (temp, iemode, 8);
3005 hi = adjust_address (temp, imode, 4);
3006 lo = adjust_address (temp, imode, 0);
3007 }
3008 else
3009 {
3010 /* Big endian Itanium. */
3011 ex = adjust_address (temp, iemode, 0);
3012 hi = adjust_address (temp, imode, 2);
3013 lo = adjust_address (temp, imode, 6);
3014 }
3015 rtx val = GEN_INT (HOST_WIDE_INT_M1U << 30);
3016 gcc_assert (fmt->qnan_msb_set);
3017 rtx mask = GEN_INT (0x7fff);
3018 rtx bit = GEN_INT (HOST_WIDE_INT_1U << 30);
3019 /* For Intel/Motorola extended format, expand to:
3020 (ex & mask) == mask && ((hi ^ bit) | ((lo | -lo) >> 31)) > val. */
3021 rtx nlo = expand_unop (imode, neg_optab, lo, NULL_RTX, 0);
3022 lo = expand_binop (imode, ior_optab, lo, nlo,
3023 NULL_RTX, 1, OPTAB_LIB_WIDEN);
3024 lo = expand_shift (RSHIFT_EXPR, imode, lo, 31, NULL_RTX, 1);
3025 temp = expand_binop (imode, xor_optab, hi, bit,
3026 NULL_RTX, 1, OPTAB_LIB_WIDEN);
3027 temp = expand_binop (imode, ior_optab, temp, lo,
3028 NULL_RTX, 1, OPTAB_LIB_WIDEN);
3029 temp = emit_store_flag_force (target, GTU, temp, val, imode, 1, 1);
3030 ex = expand_binop (iemode, and_optab, ex, mask,
3031 NULL_RTX, 1, OPTAB_LIB_WIDEN);
3032 ex = emit_store_flag_force (gen_reg_rtx (GET_MODE (temp)), EQ,
3033 ex, mask, iemode, 1, 1);
3034 temp = expand_binop (GET_MODE (temp), and_optab, temp, ex,
3035 NULL_RTX, 1, OPTAB_LIB_WIDEN);
3036 }
3037 break;
3038 default:
3039 gcc_unreachable ();
3040 }
3041
3042 return temp;
3043}
3044
0bfa1541
RG
3045/* Expand a call to one of the builtin rounding functions gcc defines
3046 as an extension (lfloor and lceil). As these are gcc extensions we
3047 do not need to worry about setting errno to EDOM.
d8b42d06
UB
3048 If expanding via optab fails, lower expression to (int)(floor(x)).
3049 EXP is the expression that is a call to the builtin function;
1856c8dc 3050 if convenient, the result should be placed in TARGET. */
d8b42d06
UB
3051
3052static rtx
1856c8dc 3053expand_builtin_int_roundingfn (tree exp, rtx target)
d8b42d06 3054{
c3a4177f 3055 convert_optab builtin_optab;
58f4cf2a
DM
3056 rtx op0, tmp;
3057 rtx_insn *insns;
d8b42d06 3058 tree fndecl = get_callee_fndecl (exp);
d8b42d06
UB
3059 enum built_in_function fallback_fn;
3060 tree fallback_fndecl;
ef4bddc2 3061 machine_mode mode;
968fc3b6 3062 tree arg;
d8b42d06 3063
5039610b 3064 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
1c178a5a 3065 return NULL_RTX;
d8b42d06 3066
5039610b 3067 arg = CALL_EXPR_ARG (exp, 0);
d8b42d06
UB
3068
3069 switch (DECL_FUNCTION_CODE (fndecl))
3070 {
6c32ee74 3071 CASE_FLT_FN (BUILT_IN_ICEIL):
ea6a6627
VR
3072 CASE_FLT_FN (BUILT_IN_LCEIL):
3073 CASE_FLT_FN (BUILT_IN_LLCEIL):
f94b1661
UB
3074 builtin_optab = lceil_optab;
3075 fallback_fn = BUILT_IN_CEIL;
3076 break;
3077
6c32ee74 3078 CASE_FLT_FN (BUILT_IN_IFLOOR):
ea6a6627
VR
3079 CASE_FLT_FN (BUILT_IN_LFLOOR):
3080 CASE_FLT_FN (BUILT_IN_LLFLOOR):
d8b42d06
UB
3081 builtin_optab = lfloor_optab;
3082 fallback_fn = BUILT_IN_FLOOR;
3083 break;
3084
3085 default:
3086 gcc_unreachable ();
3087 }
3088
3089 /* Make a suitable register to place result in. */
3090 mode = TYPE_MODE (TREE_TYPE (exp));
3091
c3a4177f 3092 target = gen_reg_rtx (mode);
d8b42d06 3093
c3a4177f
RG
3094 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
3095 need to expand the argument again. This way, we will not perform
3096 side-effects more the once. */
5799f732 3097 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
d8b42d06 3098
1856c8dc 3099 op0 = expand_expr (arg, NULL, VOIDmode, EXPAND_NORMAL);
d8b42d06 3100
c3a4177f 3101 start_sequence ();
d8b42d06 3102
c3a4177f
RG
3103 /* Compute into TARGET. */
3104 if (expand_sfix_optab (target, op0, builtin_optab))
3105 {
3106 /* Output the entire sequence. */
3107 insns = get_insns ();
d8b42d06 3108 end_sequence ();
c3a4177f
RG
3109 emit_insn (insns);
3110 return target;
d8b42d06
UB
3111 }
3112
c3a4177f
RG
3113 /* If we were unable to expand via the builtin, stop the sequence
3114 (without outputting the insns). */
3115 end_sequence ();
3116
d8b42d06
UB
3117 /* Fall back to floating point rounding optab. */
3118 fallback_fndecl = mathfn_built_in (TREE_TYPE (arg), fallback_fn);
34a24c11
RG
3119
3120 /* For non-C99 targets we may end up without a fallback fndecl here
3121 if the user called __builtin_lfloor directly. In this case emit
3122 a call to the floor/ceil variants nevertheless. This should result
3123 in the best user experience for not full C99 targets. */
3124 if (fallback_fndecl == NULL_TREE)
3125 {
3126 tree fntype;
3127 const char *name = NULL;
3128
3129 switch (DECL_FUNCTION_CODE (fndecl))
3130 {
6c32ee74 3131 case BUILT_IN_ICEIL:
34a24c11
RG
3132 case BUILT_IN_LCEIL:
3133 case BUILT_IN_LLCEIL:
3134 name = "ceil";
3135 break;
6c32ee74 3136 case BUILT_IN_ICEILF:
34a24c11
RG
3137 case BUILT_IN_LCEILF:
3138 case BUILT_IN_LLCEILF:
3139 name = "ceilf";
3140 break;
6c32ee74 3141 case BUILT_IN_ICEILL:
34a24c11
RG
3142 case BUILT_IN_LCEILL:
3143 case BUILT_IN_LLCEILL:
3144 name = "ceill";
3145 break;
6c32ee74 3146 case BUILT_IN_IFLOOR:
34a24c11
RG
3147 case BUILT_IN_LFLOOR:
3148 case BUILT_IN_LLFLOOR:
3149 name = "floor";
3150 break;
6c32ee74 3151 case BUILT_IN_IFLOORF:
34a24c11
RG
3152 case BUILT_IN_LFLOORF:
3153 case BUILT_IN_LLFLOORF:
3154 name = "floorf";
3155 break;
6c32ee74 3156 case BUILT_IN_IFLOORL:
34a24c11
RG
3157 case BUILT_IN_LFLOORL:
3158 case BUILT_IN_LLFLOORL:
3159 name = "floorl";
3160 break;
3161 default:
3162 gcc_unreachable ();
3163 }
3164
3165 fntype = build_function_type_list (TREE_TYPE (arg),
3166 TREE_TYPE (arg), NULL_TREE);
3167 fallback_fndecl = build_fn_decl (name, fntype);
3168 }
3169
aa493694 3170 exp = build_call_nofold_loc (EXPR_LOCATION (exp), fallback_fndecl, 1, arg);
d8b42d06 3171
39b1ec97 3172 tmp = expand_normal (exp);
9a002da8 3173 tmp = maybe_emit_group_store (tmp, TREE_TYPE (exp));
d8b42d06
UB
3174
3175 /* Truncate the result of floating point optab to integer
3176 via expand_fix (). */
3177 target = gen_reg_rtx (mode);
3178 expand_fix (target, tmp, 0);
3179
3180 return target;
3181}
3182
0bfa1541
RG
3183/* Expand a call to one of the builtin math functions doing integer
3184 conversion (lrint).
3185 Return 0 if a normal call should be emitted rather than expanding the
3186 function in-line. EXP is the expression that is a call to the builtin
1856c8dc 3187 function; if convenient, the result should be placed in TARGET. */
0bfa1541
RG
3188
3189static rtx
1856c8dc 3190expand_builtin_int_roundingfn_2 (tree exp, rtx target)
0bfa1541 3191{
bb7f0423 3192 convert_optab builtin_optab;
58f4cf2a
DM
3193 rtx op0;
3194 rtx_insn *insns;
0bfa1541 3195 tree fndecl = get_callee_fndecl (exp);
968fc3b6 3196 tree arg;
ef4bddc2 3197 machine_mode mode;
ff63ac4d 3198 enum built_in_function fallback_fn = BUILT_IN_NONE;
0bfa1541 3199
5039610b 3200 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
1c178a5a 3201 return NULL_RTX;
b8698a0f 3202
5039610b 3203 arg = CALL_EXPR_ARG (exp, 0);
0bfa1541
RG
3204
3205 switch (DECL_FUNCTION_CODE (fndecl))
3206 {
6c32ee74 3207 CASE_FLT_FN (BUILT_IN_IRINT):
ff63ac4d 3208 fallback_fn = BUILT_IN_LRINT;
81fea426 3209 gcc_fallthrough ();
0bfa1541
RG
3210 CASE_FLT_FN (BUILT_IN_LRINT):
3211 CASE_FLT_FN (BUILT_IN_LLRINT):
ff63ac4d
JJ
3212 builtin_optab = lrint_optab;
3213 break;
6c32ee74
UB
3214
3215 CASE_FLT_FN (BUILT_IN_IROUND):
ff63ac4d 3216 fallback_fn = BUILT_IN_LROUND;
81fea426 3217 gcc_fallthrough ();
4d81bf84
RG
3218 CASE_FLT_FN (BUILT_IN_LROUND):
3219 CASE_FLT_FN (BUILT_IN_LLROUND):
ff63ac4d
JJ
3220 builtin_optab = lround_optab;
3221 break;
6c32ee74 3222
0bfa1541
RG
3223 default:
3224 gcc_unreachable ();
3225 }
3226
ff63ac4d
JJ
3227 /* There's no easy way to detect the case we need to set EDOM. */
3228 if (flag_errno_math && fallback_fn == BUILT_IN_NONE)
3229 return NULL_RTX;
3230
0bfa1541
RG
3231 /* Make a suitable register to place result in. */
3232 mode = TYPE_MODE (TREE_TYPE (exp));
3233
ff63ac4d
JJ
3234 /* There's no easy way to detect the case we need to set EDOM. */
3235 if (!flag_errno_math)
3236 {
04b80dbb 3237 rtx result = gen_reg_rtx (mode);
0bfa1541 3238
ff63ac4d
JJ
3239 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
3240 need to expand the argument again. This way, we will not perform
3241 side-effects more the once. */
3242 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
0bfa1541 3243
ff63ac4d 3244 op0 = expand_expr (arg, NULL, VOIDmode, EXPAND_NORMAL);
0bfa1541 3245
ff63ac4d 3246 start_sequence ();
0bfa1541 3247
04b80dbb 3248 if (expand_sfix_optab (result, op0, builtin_optab))
ff63ac4d
JJ
3249 {
3250 /* Output the entire sequence. */
3251 insns = get_insns ();
3252 end_sequence ();
3253 emit_insn (insns);
04b80dbb 3254 return result;
ff63ac4d
JJ
3255 }
3256
3257 /* If we were unable to expand via the builtin, stop the sequence
3258 (without outputting the insns) and call to the library function
3259 with the stabilized argument list. */
0bfa1541
RG
3260 end_sequence ();
3261 }
3262
ff63ac4d
JJ
3263 if (fallback_fn != BUILT_IN_NONE)
3264 {
3265 /* Fall back to rounding to long int. Use implicit_p 0 - for non-C99
3266 targets, (int) round (x) should never be transformed into
3267 BUILT_IN_IROUND and if __builtin_iround is called directly, emit
3268 a call to lround in the hope that the target provides at least some
3269 C99 functions. This should result in the best user experience for
91a38e8a
JJ
3270 not full C99 targets.
3271 As scalar float conversions with same mode are useless in GIMPLE,
3272 we can end up e.g. with _Float32 argument passed to float builtin,
3273 try to get the type from the builtin prototype first. */
3274 tree fallback_fndecl = NULL_TREE;
3275 if (tree argtypes = TYPE_ARG_TYPES (TREE_TYPE (fndecl)))
3276 fallback_fndecl
3277 = mathfn_built_in_1 (TREE_VALUE (argtypes),
3278 as_combined_fn (fallback_fn), 0);
3279 if (fallback_fndecl == NULL_TREE)
3280 fallback_fndecl
3281 = mathfn_built_in_1 (TREE_TYPE (arg),
3282 as_combined_fn (fallback_fn), 0);
3283 if (fallback_fndecl)
3284 {
3285 exp = build_call_nofold_loc (EXPR_LOCATION (exp),
3286 fallback_fndecl, 1, arg);
ff63ac4d 3287
91a38e8a
JJ
3288 target = expand_call (exp, NULL_RTX, target == const0_rtx);
3289 target = maybe_emit_group_store (target, TREE_TYPE (exp));
3290 return convert_to_mode (mode, target, 0);
3291 }
ff63ac4d 3292 }
bb7f0423 3293
04b80dbb 3294 return expand_call (exp, target, target == const0_rtx);
0bfa1541
RG
3295}
3296
5039610b 3297/* Expand a call to the powi built-in mathematical function. Return NULL_RTX if
17684d46
RG
3298 a normal call should be emitted rather than expanding the function
3299 in-line. EXP is the expression that is a call to the builtin
3300 function; if convenient, the result should be placed in TARGET. */
3301
3302static rtx
4359dc2a 3303expand_builtin_powi (tree exp, rtx target)
17684d46 3304{
17684d46
RG
3305 tree arg0, arg1;
3306 rtx op0, op1;
ef4bddc2
RS
3307 machine_mode mode;
3308 machine_mode mode2;
17684d46 3309
5039610b
SL
3310 if (! validate_arglist (exp, REAL_TYPE, INTEGER_TYPE, VOID_TYPE))
3311 return NULL_RTX;
17684d46 3312
5039610b
SL
3313 arg0 = CALL_EXPR_ARG (exp, 0);
3314 arg1 = CALL_EXPR_ARG (exp, 1);
17684d46
RG
3315 mode = TYPE_MODE (TREE_TYPE (exp));
3316
17684d46
RG
3317 /* Emit a libcall to libgcc. */
3318
5039610b 3319 /* Mode of the 2nd argument must match that of an int. */
f4b31647 3320 mode2 = int_mode_for_size (INT_TYPE_SIZE, 0).require ();
0b8495ae 3321
17684d46
RG
3322 if (target == NULL_RTX)
3323 target = gen_reg_rtx (mode);
3324
4359dc2a 3325 op0 = expand_expr (arg0, NULL_RTX, mode, EXPAND_NORMAL);
17684d46
RG
3326 if (GET_MODE (op0) != mode)
3327 op0 = convert_to_mode (mode, op0, 0);
49452c07 3328 op1 = expand_expr (arg1, NULL_RTX, mode2, EXPAND_NORMAL);
0b8495ae
FJ
3329 if (GET_MODE (op1) != mode2)
3330 op1 = convert_to_mode (mode2, op1, 0);
17684d46 3331
8a33f100 3332 target = emit_library_call_value (optab_libfunc (powi_optab, mode),
db69559b 3333 target, LCT_CONST, mode,
0b8495ae 3334 op0, mode, op1, mode2);
17684d46
RG
3335
3336 return target;
3337}
3338
b8698a0f 3339/* Expand expression EXP which is a call to the strlen builtin. Return
781ff3d8 3340 NULL_RTX if we failed and the caller should emit a normal call, otherwise
0e9295cf 3341 try to get the result in TARGET, if convenient. */
3bdf5ad1 3342
28f4ec01 3343static rtx
5039610b 3344expand_builtin_strlen (tree exp, rtx target,
ef4bddc2 3345 machine_mode target_mode)
28f4ec01 3346{
5039610b
SL
3347 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
3348 return NULL_RTX;
712b7a05 3349
16155777 3350 tree src = CALL_EXPR_ARG (exp, 0);
ae808627 3351
16155777 3352 /* If the length can be computed at compile-time, return it. */
d14c547a 3353 if (tree len = c_strlen (src, 0))
16155777
MS
3354 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
3355
3356 /* If the length can be computed at compile-time and is constant
3357 integer, but there are side-effects in src, evaluate
3358 src for side-effects, then return len.
3359 E.g. x = strlen (i++ ? "xfoo" + 1 : "bar");
3360 can be optimized into: i++; x = 3; */
d14c547a 3361 tree len = c_strlen (src, 1);
16155777
MS
3362 if (len && TREE_CODE (len) == INTEGER_CST)
3363 {
3364 expand_expr (src, const0_rtx, VOIDmode, EXPAND_NORMAL);
3365 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
3366 }
28f4ec01 3367
d14c547a 3368 unsigned int align = get_pointer_alignment (src) / BITS_PER_UNIT;
28f4ec01 3369
16155777
MS
3370 /* If SRC is not a pointer type, don't do this operation inline. */
3371 if (align == 0)
3372 return NULL_RTX;
3373
3374 /* Bail out if we can't compute strlen in the right mode. */
d14c547a
MS
3375 machine_mode insn_mode;
3376 enum insn_code icode = CODE_FOR_nothing;
16155777
MS
3377 FOR_EACH_MODE_FROM (insn_mode, target_mode)
3378 {
3379 icode = optab_handler (strlen_optab, insn_mode);
3380 if (icode != CODE_FOR_nothing)
3381 break;
3382 }
3383 if (insn_mode == VOIDmode)
3384 return NULL_RTX;
28f4ec01 3385
16155777
MS
3386 /* Make a place to hold the source address. We will not expand
3387 the actual source until we are sure that the expansion will
3388 not fail -- there are trees that cannot be expanded twice. */
d14c547a 3389 rtx src_reg = gen_reg_rtx (Pmode);
28f4ec01 3390
16155777
MS
3391 /* Mark the beginning of the strlen sequence so we can emit the
3392 source operand later. */
d14c547a 3393 rtx_insn *before_strlen = get_last_insn ();
28f4ec01 3394
d14c547a 3395 class expand_operand ops[4];
16155777
MS
3396 create_output_operand (&ops[0], target, insn_mode);
3397 create_fixed_operand (&ops[1], gen_rtx_MEM (BLKmode, src_reg));
3398 create_integer_operand (&ops[2], 0);
3399 create_integer_operand (&ops[3], align);
3400 if (!maybe_expand_insn (icode, 4, ops))
3401 return NULL_RTX;
dd05e4fa 3402
16155777
MS
3403 /* Check to see if the argument was declared attribute nonstring
3404 and if so, issue a warning since at this point it's not known
3405 to be nul-terminated. */
3406 maybe_warn_nonstring_arg (get_callee_fndecl (exp), exp);
6a33d0ff 3407
16155777
MS
3408 /* Now that we are assured of success, expand the source. */
3409 start_sequence ();
d14c547a 3410 rtx pat = expand_expr (src, src_reg, Pmode, EXPAND_NORMAL);
16155777
MS
3411 if (pat != src_reg)
3412 {
fa465762 3413#ifdef POINTERS_EXTEND_UNSIGNED
16155777
MS
3414 if (GET_MODE (pat) != Pmode)
3415 pat = convert_to_mode (Pmode, pat,
3416 POINTERS_EXTEND_UNSIGNED);
fa465762 3417#endif
16155777
MS
3418 emit_move_insn (src_reg, pat);
3419 }
3420 pat = get_insns ();
3421 end_sequence ();
fca9f642 3422
16155777
MS
3423 if (before_strlen)
3424 emit_insn_after (pat, before_strlen);
3425 else
3426 emit_insn_before (pat, get_insns ());
28f4ec01 3427
16155777
MS
3428 /* Return the value in the proper mode for this function. */
3429 if (GET_MODE (ops[0].value) == target_mode)
3430 target = ops[0].value;
3431 else if (target != 0)
3432 convert_move (target, ops[0].value, 0);
3433 else
3434 target = convert_to_mode (target_mode, ops[0].value, 0);
dd05e4fa 3435
16155777 3436 return target;
28f4ec01
BS
3437}
3438
781ff3d8
MS
3439/* Expand call EXP to the strnlen built-in, returning the result
3440 and setting it in TARGET. Otherwise return NULL_RTX on failure. */
3441
3442static rtx
3443expand_builtin_strnlen (tree exp, rtx target, machine_mode target_mode)
3444{
3445 if (!validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3446 return NULL_RTX;
3447
3448 tree src = CALL_EXPR_ARG (exp, 0);
3449 tree bound = CALL_EXPR_ARG (exp, 1);
3450
3451 if (!bound)
3452 return NULL_RTX;
3453
3454 location_t loc = UNKNOWN_LOCATION;
3455 if (EXPR_HAS_LOCATION (exp))
3456 loc = EXPR_LOCATION (exp);
3457
1583124e
MS
3458 /* FIXME: Change c_strlen() to return sizetype instead of ssizetype
3459 so these conversions aren't necessary. */
e09aa5bd
MS
3460 c_strlen_data lendata = { };
3461 tree len = c_strlen (src, 0, &lendata, 1);
1583124e
MS
3462 if (len)
3463 len = fold_convert_loc (loc, TREE_TYPE (bound), len);
781ff3d8
MS
3464
3465 if (TREE_CODE (bound) == INTEGER_CST)
3466 {
6c4aa5f6 3467 if (!len)
781ff3d8
MS
3468 return NULL_RTX;
3469
781ff3d8
MS
3470 len = fold_build2_loc (loc, MIN_EXPR, size_type_node, len, bound);
3471 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
3472 }
3473
3474 if (TREE_CODE (bound) != SSA_NAME)
3475 return NULL_RTX;
3476
3477 wide_int min, max;
45f4e2b0
AH
3478 value_range r;
3479 get_global_range_query ()->range_of_expr (r, bound);
3480 if (r.kind () != VR_RANGE)
781ff3d8 3481 return NULL_RTX;
45f4e2b0
AH
3482 min = r.lower_bound ();
3483 max = r.upper_bound ();
781ff3d8 3484
781ff3d8 3485 if (!len || TREE_CODE (len) != INTEGER_CST)
f3431652 3486 {
d14c547a 3487 bool exact;
e09aa5bd
MS
3488 lendata.decl = unterminated_array (src, &len, &exact);
3489 if (!lendata.decl)
f3431652
MS
3490 return NULL_RTX;
3491 }
781ff3d8 3492
e09aa5bd 3493 if (lendata.decl)
f3431652
MS
3494 return NULL_RTX;
3495
781ff3d8
MS
3496 if (wi::gtu_p (min, wi::to_wide (len)))
3497 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
3498
3499 len = fold_build2_loc (loc, MIN_EXPR, TREE_TYPE (len), len, bound);
3500 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
3501}
3502
57814e5e 3503/* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
d5803b98
MS
3504 bytes from bytes at DATA + OFFSET and return it reinterpreted as
3505 a target constant. */
57814e5e
JJ
3506
3507static rtx
985b3a68 3508builtin_memcpy_read_str (void *data, void *, HOST_WIDE_INT offset,
e5e164ef 3509 fixed_size_mode mode)
57814e5e 3510{
d5803b98
MS
3511 /* The REPresentation pointed to by DATA need not be a nul-terminated
3512 string but the caller guarantees it's large enough for MODE. */
3513 const char *rep = (const char *) data;
57814e5e 3514
e5e164ef
L
3515 /* The by-pieces infrastructure does not try to pick a vector mode
3516 for memcpy expansion. */
3517 return c_readstr (rep + offset, as_a <scalar_int_mode> (mode),
3518 /*nul_terminated=*/false);
57814e5e
JJ
3519}
3520
3918b108 3521/* LEN specify length of the block of memcpy/memset operation.
82bb7d4e
JH
3522 Figure out its range and put it into MIN_SIZE/MAX_SIZE.
3523 In some cases we can make very likely guess on max size, then we
3524 set it into PROBABLE_MAX_SIZE. */
3918b108
JH
3525
3526static void
3527determine_block_size (tree len, rtx len_rtx,
3528 unsigned HOST_WIDE_INT *min_size,
82bb7d4e
JH
3529 unsigned HOST_WIDE_INT *max_size,
3530 unsigned HOST_WIDE_INT *probable_max_size)
3918b108
JH
3531{
3532 if (CONST_INT_P (len_rtx))
3533 {
2738b4c7 3534 *min_size = *max_size = *probable_max_size = UINTVAL (len_rtx);
3918b108
JH
3535 return;
3536 }
3537 else
3538 {
807e902e 3539 wide_int min, max;
54994253 3540 enum value_range_kind range_type = VR_UNDEFINED;
82bb7d4e
JH
3541
3542 /* Determine bounds from the type. */
3543 if (tree_fits_uhwi_p (TYPE_MIN_VALUE (TREE_TYPE (len))))
3544 *min_size = tree_to_uhwi (TYPE_MIN_VALUE (TREE_TYPE (len)));
3545 else
3546 *min_size = 0;
3547 if (tree_fits_uhwi_p (TYPE_MAX_VALUE (TREE_TYPE (len))))
2738b4c7
JJ
3548 *probable_max_size = *max_size
3549 = tree_to_uhwi (TYPE_MAX_VALUE (TREE_TYPE (len)));
82bb7d4e
JH
3550 else
3551 *probable_max_size = *max_size = GET_MODE_MASK (GET_MODE (len_rtx));
3552
3553 if (TREE_CODE (len) == SSA_NAME)
45f4e2b0
AH
3554 {
3555 value_range r;
3556 get_global_range_query ()->range_of_expr (r, len);
3557 range_type = r.kind ();
3558 if (range_type != VR_UNDEFINED)
3559 {
3560 min = wi::to_wide (r.min ());
3561 max = wi::to_wide (r.max ());
3562 }
3563 }
82bb7d4e 3564 if (range_type == VR_RANGE)
3918b108 3565 {
807e902e 3566 if (wi::fits_uhwi_p (min) && *min_size < min.to_uhwi ())
3918b108 3567 *min_size = min.to_uhwi ();
807e902e 3568 if (wi::fits_uhwi_p (max) && *max_size > max.to_uhwi ())
82bb7d4e 3569 *probable_max_size = *max_size = max.to_uhwi ();
3918b108 3570 }
82bb7d4e 3571 else if (range_type == VR_ANTI_RANGE)
3918b108 3572 {
82bb7d4e
JH
3573 /* Code like
3574
3575 int n;
3576 if (n < 100)
70ec86ee 3577 memcpy (a, b, n)
82bb7d4e
JH
3578
3579 Produce anti range allowing negative values of N. We still
3580 can use the information and make a guess that N is not negative.
3581 */
f21757eb 3582 if (!wi::leu_p (max, 1 << 30) && wi::fits_uhwi_p (min))
807e902e 3583 *probable_max_size = min.to_uhwi () - 1;
3918b108
JH
3584 }
3585 }
3586 gcc_checking_assert (*max_size <=
3587 (unsigned HOST_WIDE_INT)
3588 GET_MODE_MASK (GET_MODE (len_rtx)));
3589}
3590
2a837de2
MS
3591/* Expand a call EXP to the memcpy builtin.
3592 Return NULL_RTX if we failed, the caller should emit a normal call,
3593 otherwise try to get the result in TARGET, if convenient (and in
3594 mode MODE if that's convenient). */
eafe8ee7 3595
2a837de2
MS
3596static rtx
3597expand_builtin_memcpy (tree exp, rtx target)
3598{
3599 if (!validate_arglist (exp,
3600 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3601 return NULL_RTX;
a2c2cee9 3602
2a837de2
MS
3603 tree dest = CALL_EXPR_ARG (exp, 0);
3604 tree src = CALL_EXPR_ARG (exp, 1);
3605 tree len = CALL_EXPR_ARG (exp, 2);
a2c2cee9 3606
2a837de2
MS
3607 return expand_builtin_memory_copy_args (dest, src, len, target, exp,
3608 /*retmode=*/ RETURN_BEGIN, false);
3609}
a1108556 3610
2a837de2
MS
3611/* Check a call EXP to the memmove built-in for validity.
3612 Return NULL_RTX on both success and failure. */
a1108556 3613
2a837de2
MS
3614static rtx
3615expand_builtin_memmove (tree exp, rtx target)
3616{
3617 if (!validate_arglist (exp,
3618 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3619 return NULL_RTX;
a1108556 3620
2a837de2
MS
3621 tree dest = CALL_EXPR_ARG (exp, 0);
3622 tree src = CALL_EXPR_ARG (exp, 1);
3623 tree len = CALL_EXPR_ARG (exp, 2);
eafe8ee7 3624
2a837de2
MS
3625 return expand_builtin_memory_copy_args (dest, src, len, target, exp,
3626 /*retmode=*/ RETURN_BEGIN, true);
a2c2cee9
MS
3627}
3628
2a837de2
MS
3629/* Expand a call EXP to the mempcpy builtin.
3630 Return NULL_RTX if we failed; the caller should emit a normal call,
3631 otherwise try to get the result in TARGET, if convenient (and in
3632 mode MODE if that's convenient). */
d14c547a 3633
2a837de2
MS
3634static rtx
3635expand_builtin_mempcpy (tree exp, rtx target)
d14c547a 3636{
2a837de2
MS
3637 if (!validate_arglist (exp,
3638 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3639 return NULL_RTX;
d14c547a 3640
2a837de2
MS
3641 tree dest = CALL_EXPR_ARG (exp, 0);
3642 tree src = CALL_EXPR_ARG (exp, 1);
3643 tree len = CALL_EXPR_ARG (exp, 2);
ee92e7ba 3644
2a837de2
MS
3645 /* Policy does not generally allow using compute_objsize (which
3646 is used internally by check_memop_size) to change code generation
3647 or drive optimization decisions.
ee92e7ba 3648
2a837de2
MS
3649 In this instance it is safe because the code we generate has
3650 the same semantics regardless of the return value of
3651 check_memop_sizes. Exactly the same amount of data is copied
3652 and the return value is exactly the same in both cases.
b825a228 3653
2a837de2
MS
3654 Furthermore, check_memop_size always uses mode 0 for the call to
3655 compute_objsize, so the imprecise nature of compute_objsize is
3656 avoided. */
a2c2cee9 3657
2a837de2
MS
3658 /* Avoid expanding mempcpy into memcpy when the call is determined
3659 to overflow the buffer. This also prevents the same overflow
3660 from being diagnosed again when expanding memcpy. */
ee92e7ba 3661
2a837de2
MS
3662 return expand_builtin_mempcpy_args (dest, src, len,
3663 target, exp, /*retmode=*/ RETURN_END);
3664}
d9c5a8b9 3665
2a837de2
MS
3666/* Helper function to do the actual work for expand of memory copy family
3667 functions (memcpy, mempcpy, stpcpy). Expansing should assign LEN bytes
3668 of memory from SRC to DEST and assign to TARGET if convenient. Return
3669 value is based on RETMODE argument. */
d9c5a8b9 3670
2a837de2
MS
3671static rtx
3672expand_builtin_memory_copy_args (tree dest, tree src, tree len,
3673 rtx target, tree exp, memop_ret retmode,
3674 bool might_overlap)
3675{
3676 unsigned int src_align = get_pointer_alignment (src);
3677 unsigned int dest_align = get_pointer_alignment (dest);
3678 rtx dest_mem, src_mem, dest_addr, len_rtx;
3679 HOST_WIDE_INT expected_size = -1;
3680 unsigned int expected_align = 0;
3681 unsigned HOST_WIDE_INT min_size;
3682 unsigned HOST_WIDE_INT max_size;
3683 unsigned HOST_WIDE_INT probable_max_size;
ee92e7ba 3684
2a837de2 3685 bool is_move_done;
ee92e7ba 3686
2a837de2
MS
3687 /* If DEST is not a pointer type, call the normal function. */
3688 if (dest_align == 0)
3689 return NULL_RTX;
ee92e7ba 3690
2a837de2
MS
3691 /* If either SRC is not a pointer type, don't do this
3692 operation in-line. */
3693 if (src_align == 0)
3694 return NULL_RTX;
ee92e7ba 3695
2a837de2
MS
3696 if (currently_expanding_gimple_stmt)
3697 stringop_block_profile (currently_expanding_gimple_stmt,
3698 &expected_align, &expected_size);
ee92e7ba 3699
2a837de2
MS
3700 if (expected_align < dest_align)
3701 expected_align = dest_align;
3702 dest_mem = get_memory_rtx (dest, len);
3703 set_mem_align (dest_mem, dest_align);
3704 len_rtx = expand_normal (len);
3705 determine_block_size (len, len_rtx, &min_size, &max_size,
3706 &probable_max_size);
ee92e7ba 3707
2a837de2
MS
3708 /* Try to get the byte representation of the constant SRC points to,
3709 with its byte size in NBYTES. */
3710 unsigned HOST_WIDE_INT nbytes;
3711 const char *rep = getbyterep (src, &nbytes);
ee92e7ba 3712
2a837de2
MS
3713 /* If the function's constant bound LEN_RTX is less than or equal
3714 to the byte size of the representation of the constant argument,
3715 and if block move would be done by pieces, we can avoid loading
3716 the bytes from memory and only store the computed constant.
3717 This works in the overlap (memmove) case as well because
3718 store_by_pieces just generates a series of stores of constants
3719 from the representation returned by getbyterep(). */
3720 if (rep
3721 && CONST_INT_P (len_rtx)
3722 && (unsigned HOST_WIDE_INT) INTVAL (len_rtx) <= nbytes
3723 && can_store_by_pieces (INTVAL (len_rtx), builtin_memcpy_read_str,
3724 CONST_CAST (char *, rep),
3725 dest_align, false))
ee92e7ba 3726 {
2a837de2
MS
3727 dest_mem = store_by_pieces (dest_mem, INTVAL (len_rtx),
3728 builtin_memcpy_read_str,
3729 CONST_CAST (char *, rep),
3730 dest_align, false, retmode);
3731 dest_mem = force_operand (XEXP (dest_mem, 0), target);
3732 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3733 return dest_mem;
ee92e7ba
MS
3734 }
3735
2a837de2
MS
3736 src_mem = get_memory_rtx (src, len);
3737 set_mem_align (src_mem, src_align);
d14c547a 3738
2a837de2
MS
3739 /* Copy word part most expediently. */
3740 enum block_op_methods method = BLOCK_OP_NORMAL;
3741 if (CALL_EXPR_TAILCALL (exp)
3742 && (retmode == RETURN_BEGIN || target == const0_rtx))
3743 method = BLOCK_OP_TAILCALL;
3744 bool use_mempcpy_call = (targetm.libc_has_fast_function (BUILT_IN_MEMPCPY)
3745 && retmode == RETURN_END
3746 && !might_overlap
3747 && target != const0_rtx);
3748 if (use_mempcpy_call)
3749 method = BLOCK_OP_NO_LIBCALL_RET;
3750 dest_addr = emit_block_move_hints (dest_mem, src_mem, len_rtx, method,
3751 expected_align, expected_size,
3752 min_size, max_size, probable_max_size,
3753 use_mempcpy_call, &is_move_done,
3754 might_overlap);
d14c547a 3755
2a837de2
MS
3756 /* Bail out when a mempcpy call would be expanded as libcall and when
3757 we have a target that provides a fast implementation
3758 of mempcpy routine. */
3759 if (!is_move_done)
3760 return NULL_RTX;
ee92e7ba 3761
2a837de2
MS
3762 if (dest_addr == pc_rtx)
3763 return NULL_RTX;
d14c547a 3764
2a837de2
MS
3765 if (dest_addr == 0)
3766 {
3767 dest_addr = force_operand (XEXP (dest_mem, 0), target);
3768 dest_addr = convert_memory_address (ptr_mode, dest_addr);
3769 }
e0676e2e 3770
2a837de2
MS
3771 if (retmode != RETURN_BEGIN && target != const0_rtx)
3772 {
3773 dest_addr = gen_rtx_PLUS (ptr_mode, dest_addr, len_rtx);
3774 /* stpcpy pointer to last byte. */
3775 if (retmode == RETURN_END_MINUS_ONE)
3776 dest_addr = gen_rtx_MINUS (ptr_mode, dest_addr, const1_rtx);
d9c5a8b9
MS
3777 }
3778
2a837de2 3779 return dest_addr;
ee92e7ba
MS
3780}
3781
2a837de2
MS
3782static rtx
3783expand_builtin_mempcpy_args (tree dest, tree src, tree len,
3784 rtx target, tree orig_exp, memop_ret retmode)
d14c547a 3785{
2a837de2
MS
3786 return expand_builtin_memory_copy_args (dest, src, len, target, orig_exp,
3787 retmode, false);
d14c547a
MS
3788}
3789
2a837de2
MS
3790/* Expand into a movstr instruction, if one is available. Return NULL_RTX if
3791 we failed, the caller should emit a normal call, otherwise try to
3792 get the result in TARGET, if convenient.
3793 Return value is based on RETMODE argument. */
268209f3 3794
2a837de2
MS
3795static rtx
3796expand_movstr (tree dest, tree src, rtx target, memop_ret retmode)
268209f3 3797{
2a837de2
MS
3798 class expand_operand ops[3];
3799 rtx dest_mem;
3800 rtx src_mem;
268209f3 3801
2a837de2
MS
3802 if (!targetm.have_movstr ())
3803 return NULL_RTX;
268209f3 3804
2a837de2
MS
3805 dest_mem = get_memory_rtx (dest, NULL);
3806 src_mem = get_memory_rtx (src, NULL);
3807 if (retmode == RETURN_BEGIN)
268209f3 3808 {
2a837de2
MS
3809 target = force_reg (Pmode, XEXP (dest_mem, 0));
3810 dest_mem = replace_equiv_address (dest_mem, target);
268209f3
MS
3811 }
3812
2a837de2
MS
3813 create_output_operand (&ops[0],
3814 retmode != RETURN_BEGIN ? target : NULL_RTX, Pmode);
3815 create_fixed_operand (&ops[1], dest_mem);
3816 create_fixed_operand (&ops[2], src_mem);
3817 if (!maybe_expand_insn (targetm.code_for_movstr, 3, ops))
3818 return NULL_RTX;
268209f3 3819
2a837de2 3820 if (retmode != RETURN_BEGIN && target != const0_rtx)
268209f3 3821 {
2a837de2
MS
3822 target = ops[0].value;
3823 /* movstr is supposed to set end to the address of the NUL
3824 terminator. If the caller requested a mempcpy-like return value,
3825 adjust it. */
3826 if (retmode == RETURN_END)
268209f3 3827 {
2a837de2
MS
3828 rtx tem = plus_constant (GET_MODE (target),
3829 gen_lowpart (GET_MODE (target), target), 1);
3830 emit_move_insn (target, force_operand (tem, NULL_RTX));
268209f3
MS
3831 }
3832 }
2a837de2
MS
3833 return target;
3834}
268209f3 3835
2a837de2
MS
3836/* Expand expression EXP, which is a call to the strcpy builtin. Return
3837 NULL_RTX if we failed the caller should emit a normal call, otherwise
3838 try to get the result in TARGET, if convenient (and in mode MODE if that's
3839 convenient). */
baad4c48 3840
2a837de2
MS
3841static rtx
3842expand_builtin_strcpy (tree exp, rtx target)
3843{
3844 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3845 return NULL_RTX;
baad4c48 3846
2a837de2
MS
3847 tree dest = CALL_EXPR_ARG (exp, 0);
3848 tree src = CALL_EXPR_ARG (exp, 1);
baad4c48 3849
81d6cdd3 3850 return expand_builtin_strcpy_args (exp, dest, src, target);
baad4c48
MS
3851}
3852
2a837de2
MS
3853/* Helper function to do the actual work for expand_builtin_strcpy. The
3854 arguments to the builtin_strcpy call DEST and SRC are broken out
3855 so that this can also be called without constructing an actual CALL_EXPR.
3856 The other arguments and return value are the same as for
3857 expand_builtin_strcpy. */
ef29b12c 3858
2a837de2 3859static rtx
81d6cdd3 3860expand_builtin_strcpy_args (tree, tree dest, tree src, rtx target)
ef29b12c 3861{
2a837de2
MS
3862 return expand_movstr (dest, src, target, /*retmode=*/ RETURN_BEGIN);
3863}
83685efd 3864
2a837de2
MS
3865/* Expand a call EXP to the stpcpy builtin.
3866 Return NULL_RTX if we failed the caller should emit a normal call,
3867 otherwise try to get the result in TARGET, if convenient (and in
3868 mode MODE if that's convenient). */
83685efd 3869
2a837de2
MS
3870static rtx
3871expand_builtin_stpcpy_1 (tree exp, rtx target, machine_mode mode)
3872{
3873 tree dst, src;
3874 location_t loc = EXPR_LOCATION (exp);
de05c19d 3875
2a837de2
MS
3876 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3877 return NULL_RTX;
d9f1466f 3878
2a837de2
MS
3879 dst = CALL_EXPR_ARG (exp, 0);
3880 src = CALL_EXPR_ARG (exp, 1);
83685efd 3881
2a837de2
MS
3882 /* If return value is ignored, transform stpcpy into strcpy. */
3883 if (target == const0_rtx && builtin_decl_implicit (BUILT_IN_STRCPY))
83685efd 3884 {
2a837de2
MS
3885 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
3886 tree result = build_call_nofold_loc (loc, fn, 2, dst, src);
3887 return expand_expr (result, target, mode, EXPAND_NORMAL);
3888 }
3889 else
3890 {
3891 tree len, lenp1;
3892 rtx ret;
83685efd 3893
2a837de2
MS
3894 /* Ensure we get an actual string whose length can be evaluated at
3895 compile-time, not an expression containing a string. This is
3896 because the latter will potentially produce pessimized code
3897 when used to produce the return value. */
3898 c_strlen_data lendata = { };
3899 if (!c_getstr (src)
3900 || !(len = c_strlen (src, 0, &lendata, 1)))
3901 return expand_movstr (dst, src, target,
3902 /*retmode=*/ RETURN_END_MINUS_ONE);
8bf5b49e 3903
2a837de2
MS
3904 lenp1 = size_binop_loc (loc, PLUS_EXPR, len, ssize_int (1));
3905 ret = expand_builtin_mempcpy_args (dst, src, lenp1,
3906 target, exp,
3907 /*retmode=*/ RETURN_END_MINUS_ONE);
8bf5b49e 3908
2a837de2
MS
3909 if (ret)
3910 return ret;
83685efd 3911
2a837de2
MS
3912 if (TREE_CODE (len) == INTEGER_CST)
3913 {
3914 rtx len_rtx = expand_normal (len);
8bf5b49e 3915
2a837de2
MS
3916 if (CONST_INT_P (len_rtx))
3917 {
3918 ret = expand_builtin_strcpy_args (exp, dst, src, target);
8bf5b49e 3919
2a837de2
MS
3920 if (ret)
3921 {
3922 if (! target)
3923 {
3924 if (mode != VOIDmode)
3925 target = gen_reg_rtx (mode);
3926 else
3927 target = gen_reg_rtx (GET_MODE (ret));
3928 }
3929 if (GET_MODE (target) != GET_MODE (ret))
3930 ret = gen_lowpart (GET_MODE (target), ret);
8bf5b49e 3931
2a837de2
MS
3932 ret = plus_constant (GET_MODE (ret), ret, INTVAL (len_rtx));
3933 ret = emit_move_insn (target, force_operand (ret, NULL_RTX));
3934 gcc_assert (ret);
83685efd 3935
2a837de2
MS
3936 return target;
3937 }
3938 }
3939 }
83685efd 3940
2a837de2
MS
3941 return expand_movstr (dst, src, target,
3942 /*retmode=*/ RETURN_END_MINUS_ONE);
3943 }
83685efd
MS
3944}
3945
2a837de2
MS
3946/* Expand a call EXP to the stpcpy builtin and diagnose uses of nonstring
3947 arguments while being careful to avoid duplicate warnings (which could
3948 be issued if the expander were to expand the call, resulting in it
3949 being emitted in expand_call(). */
eafe8ee7 3950
2a837de2
MS
3951static rtx
3952expand_builtin_stpcpy (tree exp, rtx target, machine_mode mode)
eafe8ee7 3953{
2a837de2 3954 if (rtx ret = expand_builtin_stpcpy_1 (exp, target, mode))
eafe8ee7 3955 {
2a837de2
MS
3956 /* The call has been successfully expanded. Check for nonstring
3957 arguments and issue warnings as appropriate. */
3958 maybe_warn_nonstring_arg (get_callee_fndecl (exp), exp);
3959 return ret;
eafe8ee7 3960 }
eafe8ee7 3961
2a837de2 3962 return NULL_RTX;
eafe8ee7
MS
3963}
3964
2a837de2
MS
3965/* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3966 bytes from constant string DATA + OFFSET and return it as target
3967 constant. */
8d57bdad 3968
2a837de2
MS
3969rtx
3970builtin_strncpy_read_str (void *data, void *, HOST_WIDE_INT offset,
e5e164ef 3971 fixed_size_mode mode)
2a837de2
MS
3972{
3973 const char *str = (const char *) data;
8d57bdad 3974
2a837de2
MS
3975 if ((unsigned HOST_WIDE_INT) offset > strlen (str))
3976 return const0_rtx;
8d57bdad 3977
e5e164ef
L
3978 /* The by-pieces infrastructure does not try to pick a vector mode
3979 for strncpy expansion. */
3980 return c_readstr (str + offset, as_a <scalar_int_mode> (mode));
8d57bdad
MS
3981}
3982
2a837de2
MS
3983/* Helper to check the sizes of sequences and the destination of calls
3984 to __builtin_strncat and __builtin___strncat_chk. Returns true on
3985 success (no overflow or invalid sizes), false otherwise. */
8d57bdad
MS
3986
3987static bool
2a837de2 3988check_strncat_sizes (tree exp, tree objsize)
8d57bdad 3989{
2a837de2
MS
3990 tree dest = CALL_EXPR_ARG (exp, 0);
3991 tree src = CALL_EXPR_ARG (exp, 1);
3992 tree maxread = CALL_EXPR_ARG (exp, 2);
3993
3994 /* Try to determine the range of lengths that the source expression
3995 refers to. */
3996 c_strlen_data lendata = { };
3997 get_range_strlen (src, &lendata, /* eltsize = */ 1);
8d57bdad 3998
2a837de2
MS
3999 /* Try to verify that the destination is big enough for the shortest
4000 string. */
8d57bdad 4001
9a27acc3 4002 access_data data (nullptr, exp, access_read_write, maxread, true);
2a837de2 4003 if (!objsize && warn_stringop_overflow)
8d57bdad 4004 {
2a837de2
MS
4005 /* If it hasn't been provided by __strncat_chk, try to determine
4006 the size of the destination object into which the source is
4007 being copied. */
4008 objsize = compute_objsize (dest, warn_stringop_overflow - 1, &data.dst);
8d57bdad
MS
4009 }
4010
2a837de2
MS
4011 /* Add one for the terminating nul. */
4012 tree srclen = (lendata.minlen
4013 ? fold_build2 (PLUS_EXPR, size_type_node, lendata.minlen,
4014 size_one_node)
4015 : NULL_TREE);
8d57bdad 4016
2a837de2
MS
4017 /* The strncat function copies at most MAXREAD bytes and always appends
4018 the terminating nul so the specified upper bound should never be equal
4019 to (or greater than) the size of the destination. */
4020 if (tree_fits_uhwi_p (maxread) && tree_fits_uhwi_p (objsize)
4021 && tree_int_cst_equal (objsize, maxread))
8d57bdad 4022 {
2a837de2
MS
4023 location_t loc = EXPR_LOCATION (exp);
4024 warning_at (loc, OPT_Wstringop_overflow_,
4025 "%qD specified bound %E equals destination size",
4026 get_callee_fndecl (exp), maxread);
4027
4028 return false;
8d57bdad
MS
4029 }
4030
2a837de2
MS
4031 if (!srclen
4032 || (maxread && tree_fits_uhwi_p (maxread)
4033 && tree_fits_uhwi_p (srclen)
4034 && tree_int_cst_lt (maxread, srclen)))
4035 srclen = maxread;
8d57bdad 4036
2a837de2
MS
4037 /* The number of bytes to write is LEN but check_access will alsoa
4038 check SRCLEN if LEN's value isn't known. */
4039 return check_access (exp, /*dstwrite=*/NULL_TREE, maxread, srclen,
4040 objsize, data.mode, &data);
4041}
ef29b12c 4042
2a837de2
MS
4043/* Expand expression EXP, which is a call to the strncpy builtin. Return
4044 NULL_RTX if we failed the caller should emit a normal call. */
a2c2cee9 4045
2a837de2
MS
4046static rtx
4047expand_builtin_strncpy (tree exp, rtx target)
4048{
4049 location_t loc = EXPR_LOCATION (exp);
af3fa359 4050
2a837de2
MS
4051 if (!validate_arglist (exp,
4052 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
4053 return NULL_RTX;
4054 tree dest = CALL_EXPR_ARG (exp, 0);
4055 tree src = CALL_EXPR_ARG (exp, 1);
4056 /* The number of bytes to write (not the maximum). */
4057 tree len = CALL_EXPR_ARG (exp, 2);
ef29b12c 4058
2a837de2
MS
4059 /* The length of the source sequence. */
4060 tree slen = c_strlen (src, 1);
83685efd 4061
2a837de2
MS
4062 /* We must be passed a constant len and src parameter. */
4063 if (!tree_fits_uhwi_p (len) || !slen || !tree_fits_uhwi_p (slen))
4064 return NULL_RTX;
83685efd 4065
2a837de2 4066 slen = size_binop_loc (loc, PLUS_EXPR, slen, ssize_int (1));
83685efd 4067
2a837de2
MS
4068 /* We're required to pad with trailing zeros if the requested
4069 len is greater than strlen(s2)+1. In that case try to
4070 use store_by_pieces, if it fails, punt. */
4071 if (tree_int_cst_lt (slen, len))
de05c19d 4072 {
2a837de2
MS
4073 unsigned int dest_align = get_pointer_alignment (dest);
4074 const char *p = c_getstr (src);
4075 rtx dest_mem;
de05c19d 4076
2a837de2
MS
4077 if (!p || dest_align == 0 || !tree_fits_uhwi_p (len)
4078 || !can_store_by_pieces (tree_to_uhwi (len),
4079 builtin_strncpy_read_str,
4080 CONST_CAST (char *, p),
4081 dest_align, false))
4082 return NULL_RTX;
dce6c58d 4083
2a837de2
MS
4084 dest_mem = get_memory_rtx (dest, len);
4085 store_by_pieces (dest_mem, tree_to_uhwi (len),
4086 builtin_strncpy_read_str,
4087 CONST_CAST (char *, p), dest_align, false,
4088 RETURN_BEGIN);
4089 dest_mem = force_operand (XEXP (dest_mem, 0), target);
4090 dest_mem = convert_memory_address (ptr_mode, dest_mem);
4091 return dest_mem;
de05c19d 4092 }
83685efd 4093
2a837de2
MS
4094 return NULL_RTX;
4095}
4096
e5e164ef 4097/* Return the RTL of a register in MODE generated from PREV in the
2a837de2
MS
4098 previous iteration. */
4099
e5e164ef
L
4100static rtx
4101gen_memset_value_from_prev (by_pieces_prev *prev, fixed_size_mode mode)
2a837de2 4102{
e5e164ef 4103 rtx target = nullptr;
2a837de2 4104 if (prev != nullptr && prev->data != nullptr)
de05c19d 4105 {
2a837de2
MS
4106 /* Use the previous data in the same mode. */
4107 if (prev->mode == mode)
4108 return prev->data;
e5e164ef
L
4109
4110 fixed_size_mode prev_mode = prev->mode;
4111
4112 /* Don't use the previous data to write QImode if it is in a
4113 vector mode. */
4114 if (VECTOR_MODE_P (prev_mode) && mode == QImode)
4115 return target;
4116
4117 rtx prev_rtx = prev->data;
4118
4119 if (REG_P (prev_rtx)
4120 && HARD_REGISTER_P (prev_rtx)
4121 && lowpart_subreg_regno (REGNO (prev_rtx), prev_mode, mode) < 0)
4122 {
4123 /* This case occurs when PREV_MODE is a vector and when
4124 MODE is too small to store using vector operations.
4125 After register allocation, the code will need to move the
4126 lowpart of the vector register into a non-vector register.
4127
4128 Also, the target has chosen to use a hard register
4129 instead of going with the default choice of using a
4130 pseudo register. We should respect that choice and try to
4131 avoid creating a pseudo register with the same mode as the
4132 current hard register.
4133
4134 In principle, we could just use a lowpart MODE subreg of
4135 the vector register. However, the vector register mode might
4136 be too wide for non-vector registers, and we already know
4137 that the non-vector mode is too small for vector registers.
4138 It's therefore likely that we'd need to spill to memory in
4139 the vector mode and reload the non-vector value from there.
4140
4141 Try to avoid that by reducing the vector register to the
4142 smallest size that it can hold. This should increase the
4143 chances that non-vector registers can hold both the inner
4144 and outer modes of the subreg that we generate later. */
4145 machine_mode m;
4146 fixed_size_mode candidate;
4147 FOR_EACH_MODE_IN_CLASS (m, GET_MODE_CLASS (mode))
4148 if (is_a<fixed_size_mode> (m, &candidate))
4149 {
4150 if (GET_MODE_SIZE (candidate)
4151 >= GET_MODE_SIZE (prev_mode))
4152 break;
4153 if (GET_MODE_SIZE (candidate) >= GET_MODE_SIZE (mode)
4154 && lowpart_subreg_regno (REGNO (prev_rtx),
4155 prev_mode, candidate) >= 0)
4156 {
4157 target = lowpart_subreg (candidate, prev_rtx,
4158 prev_mode);
4159 prev_rtx = target;
4160 prev_mode = candidate;
4161 break;
4162 }
4163 }
4164 if (target == nullptr)
4165 prev_rtx = copy_to_reg (prev_rtx);
4166 }
4167
4168 target = lowpart_subreg (mode, prev_rtx, prev_mode);
de05c19d 4169 }
e5e164ef
L
4170 return target;
4171}
4172
4173/* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
4174 bytes from constant string DATA + OFFSET and return it as target
4175 constant. If PREV isn't nullptr, it has the RTL info from the
4176 previous iteration. */
a2c2cee9 4177
e5e164ef
L
4178rtx
4179builtin_memset_read_str (void *data, void *prev,
4180 HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
4181 fixed_size_mode mode)
4182{
2a837de2 4183 const char *c = (const char *) data;
e5e164ef 4184 unsigned int size = GET_MODE_SIZE (mode);
eafe8ee7 4185
e5e164ef
L
4186 rtx target = gen_memset_value_from_prev ((by_pieces_prev *) prev,
4187 mode);
4188 if (target != nullptr)
4189 return target;
4190 rtx src = gen_int_mode (*c, QImode);
d02c41dd 4191
e5e164ef
L
4192 if (VECTOR_MODE_P (mode))
4193 {
4194 gcc_assert (GET_MODE_INNER (mode) == QImode);
4195
4196 rtx const_vec = gen_const_vec_duplicate (mode, src);
4197 if (prev == NULL)
4198 /* Return CONST_VECTOR when called by a query function. */
4199 return const_vec;
4200
4201 /* Use the move expander with CONST_VECTOR. */
4202 target = targetm.gen_memset_scratch_rtx (mode);
4203 emit_move_insn (target, const_vec);
4204 return target;
4205 }
4206
4207 char *p = XALLOCAVEC (char, size);
4208
4209 memset (p, *c, size);
4210
4211 /* Vector modes should be handled above. */
4212 return c_readstr (p, as_a <scalar_int_mode> (mode));
2a837de2 4213}
268209f3 4214
2a837de2
MS
4215/* Callback routine for store_by_pieces. Return the RTL of a register
4216 containing GET_MODE_SIZE (MODE) consecutive copies of the unsigned
4217 char value given in the RTL register data. For example, if mode is
4218 4 bytes wide, return the RTL for 0x01010101*data. If PREV isn't
4219 nullptr, it has the RTL info from the previous iteration. */
83685efd 4220
2a837de2 4221static rtx
e5e164ef 4222builtin_memset_gen_str (void *data, void *prev,
2a837de2 4223 HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
e5e164ef 4224 fixed_size_mode mode)
2a837de2
MS
4225{
4226 rtx target, coeff;
4227 size_t size;
4228 char *p;
baad4c48 4229
2a837de2
MS
4230 size = GET_MODE_SIZE (mode);
4231 if (size == 1)
4232 return (rtx) data;
268209f3 4233
e5e164ef
L
4234 target = gen_memset_value_from_prev ((by_pieces_prev *) prev, mode);
4235 if (target != nullptr)
4236 return target;
4237
4238 if (VECTOR_MODE_P (mode))
4239 {
4240 gcc_assert (GET_MODE_INNER (mode) == QImode);
4241
4242 /* vec_duplicate_optab is a precondition to pick a vector mode for
4243 the memset expander. */
4244 insn_code icode = optab_handler (vec_duplicate_optab, mode);
4245
4246 target = targetm.gen_memset_scratch_rtx (mode);
4247 class expand_operand ops[2];
4248 create_output_operand (&ops[0], target, mode);
4249 create_input_operand (&ops[1], (rtx) data, QImode);
4250 expand_insn (icode, 2, ops);
4251 if (!rtx_equal_p (target, ops[0].value))
4252 emit_move_insn (target, ops[0].value);
4253
4254 return target;
4255 }
4256
2a837de2
MS
4257 p = XALLOCAVEC (char, size);
4258 memset (p, 1, size);
e5e164ef
L
4259 /* Vector modes should be handled above. */
4260 coeff = c_readstr (p, as_a <scalar_int_mode> (mode));
eafe8ee7 4261
2a837de2
MS
4262 target = convert_to_mode (mode, (rtx) data, 1);
4263 target = expand_mult (mode, target, coeff, NULL_RTX, 1);
4264 return force_reg (mode, target);
4265}
eafe8ee7 4266
2a837de2
MS
4267/* Expand expression EXP, which is a call to the memset builtin. Return
4268 NULL_RTX if we failed the caller should emit a normal call, otherwise
4269 try to get the result in TARGET, if convenient (and in mode MODE if that's
4270 convenient). */
a1108556 4271
a25e0b5e 4272rtx
2a837de2
MS
4273expand_builtin_memset (tree exp, rtx target, machine_mode mode)
4274{
4275 if (!validate_arglist (exp,
4276 POINTER_TYPE, INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
4277 return NULL_RTX;
b631bdb3 4278
2a837de2
MS
4279 tree dest = CALL_EXPR_ARG (exp, 0);
4280 tree val = CALL_EXPR_ARG (exp, 1);
4281 tree len = CALL_EXPR_ARG (exp, 2);
b631bdb3 4282
2a837de2 4283 return expand_builtin_memset_args (dest, val, len, target, mode, exp);
a2c2cee9
MS
4284}
4285
2a837de2
MS
4286/* Try to store VAL (or, if NULL_RTX, VALC) in LEN bytes starting at TO.
4287 Return TRUE if successful, FALSE otherwise. TO is assumed to be
4288 aligned at an ALIGN-bits boundary. LEN must be a multiple of
4289 1<<CTZ_LEN between MIN_LEN and MAX_LEN.
a2c2cee9 4290
2a837de2
MS
4291 The strategy is to issue one store_by_pieces for each power of two,
4292 from most to least significant, guarded by a test on whether there
4293 are at least that many bytes left to copy in LEN.
d02c41dd 4294
2a837de2
MS
4295 ??? Should we skip some powers of two in favor of loops? Maybe start
4296 at the max of TO/LEN/word alignment, at least when optimizing for
4297 size, instead of ensuring O(log len) dynamic compares? */
d02c41dd 4298
2a837de2
MS
4299bool
4300try_store_by_multiple_pieces (rtx to, rtx len, unsigned int ctz_len,
4301 unsigned HOST_WIDE_INT min_len,
4302 unsigned HOST_WIDE_INT max_len,
4303 rtx val, char valc, unsigned int align)
d02c41dd 4304{
2a837de2
MS
4305 int max_bits = floor_log2 (max_len);
4306 int min_bits = floor_log2 (min_len);
4307 int sctz_len = ctz_len;
d02c41dd 4308
2a837de2 4309 gcc_checking_assert (sctz_len >= 0);
a2c2cee9 4310
2a837de2
MS
4311 if (val)
4312 valc = 1;
a2c2cee9 4313
2a837de2
MS
4314 /* Bits more significant than TST_BITS are part of the shared prefix
4315 in the binary representation of both min_len and max_len. Since
4316 they're identical, we don't need to test them in the loop. */
4317 int tst_bits = (max_bits != min_bits ? max_bits
4318 : floor_log2 (max_len ^ min_len));
a2c2cee9 4319
2a837de2
MS
4320 /* Check whether it's profitable to start by storing a fixed BLKSIZE
4321 bytes, to lower max_bits. In the unlikely case of a constant LEN
4322 (implied by identical MAX_LEN and MIN_LEN), we want to issue a
4323 single store_by_pieces, but otherwise, select the minimum multiple
4324 of the ALIGN (in bytes) and of the MCD of the possible LENs, that
4325 brings MAX_LEN below TST_BITS, if that's lower than min_len. */
4326 unsigned HOST_WIDE_INT blksize;
4327 if (max_len > min_len)
4328 {
4329 unsigned HOST_WIDE_INT alrng = MAX (HOST_WIDE_INT_1U << ctz_len,
4330 align / BITS_PER_UNIT);
4331 blksize = max_len - (HOST_WIDE_INT_1U << tst_bits) + alrng;
4332 blksize &= ~(alrng - 1);
4333 }
4334 else if (max_len == min_len)
4335 blksize = max_len;
4336 else
c95a9f1e
AO
4337 /* Huh, max_len < min_len? Punt. See pr100843.c. */
4338 return false;
2a837de2
MS
4339 if (min_len >= blksize)
4340 {
4341 min_len -= blksize;
4342 min_bits = floor_log2 (min_len);
4343 max_len -= blksize;
4344 max_bits = floor_log2 (max_len);
a2c2cee9 4345
2a837de2
MS
4346 tst_bits = (max_bits != min_bits ? max_bits
4347 : floor_log2 (max_len ^ min_len));
4348 }
4349 else
4350 blksize = 0;
a2c2cee9 4351
2a837de2
MS
4352 /* Check that we can use store by pieces for the maximum store count
4353 we may issue (initial fixed-size block, plus conditional
4354 power-of-two-sized from max_bits to ctz_len. */
4355 unsigned HOST_WIDE_INT xlenest = blksize;
4356 if (max_bits >= 0)
4357 xlenest += ((HOST_WIDE_INT_1U << max_bits) * 2
4358 - (HOST_WIDE_INT_1U << ctz_len));
4359 if (!can_store_by_pieces (xlenest, builtin_memset_read_str,
4360 &valc, align, true))
4361 return false;
a2c2cee9 4362
e5e164ef 4363 by_pieces_constfn constfun;
2a837de2
MS
4364 void *constfundata;
4365 if (val)
4366 {
4367 constfun = builtin_memset_gen_str;
4368 constfundata = val = force_reg (TYPE_MODE (unsigned_char_type_node),
4369 val);
4370 }
4371 else
4372 {
4373 constfun = builtin_memset_read_str;
4374 constfundata = &valc;
4375 }
ee92e7ba 4376
71cc9b8c 4377 rtx ptr = copy_addr_to_reg (XEXP (to, 0));
2a837de2
MS
4378 rtx rem = copy_to_mode_reg (ptr_mode, convert_to_mode (ptr_mode, len, 0));
4379 to = replace_equiv_address (to, ptr);
4380 set_mem_align (to, align);
ee92e7ba 4381
2a837de2
MS
4382 if (blksize)
4383 {
4384 to = store_by_pieces (to, blksize,
4385 constfun, constfundata,
4386 align, true,
4387 max_len != 0 ? RETURN_END : RETURN_BEGIN);
4388 if (max_len == 0)
4389 return true;
ee92e7ba 4390
2a837de2
MS
4391 /* Adjust PTR, TO and REM. Since TO's address is likely
4392 PTR+offset, we have to replace it. */
4393 emit_move_insn (ptr, force_operand (XEXP (to, 0), NULL_RTX));
4394 to = replace_equiv_address (to, ptr);
4395 rtx rem_minus_blksize = plus_constant (ptr_mode, rem, -blksize);
4396 emit_move_insn (rem, force_operand (rem_minus_blksize, NULL_RTX));
4397 }
d9c5a8b9 4398
2a837de2
MS
4399 /* Iterate over power-of-two block sizes from the maximum length to
4400 the least significant bit possibly set in the length. */
4401 for (int i = max_bits; i >= sctz_len; i--)
4402 {
4403 rtx_code_label *label = NULL;
4404 blksize = HOST_WIDE_INT_1U << i;
d9c5a8b9 4405
2a837de2
MS
4406 /* If we're past the bits shared between min_ and max_len, expand
4407 a test on the dynamic length, comparing it with the
4408 BLKSIZE. */
4409 if (i <= tst_bits)
4410 {
4411 label = gen_label_rtx ();
4412 emit_cmp_and_jump_insns (rem, GEN_INT (blksize), LT, NULL,
4413 ptr_mode, 1, label,
4414 profile_probability::even ());
4415 }
4416 /* If we are at a bit that is in the prefix shared by min_ and
4417 max_len, skip this BLKSIZE if the bit is clear. */
4418 else if ((max_len & blksize) == 0)
4419 continue;
d9c5a8b9 4420
2a837de2
MS
4421 /* Issue a store of BLKSIZE bytes. */
4422 to = store_by_pieces (to, blksize,
4423 constfun, constfundata,
4424 align, true,
4425 i != sctz_len ? RETURN_END : RETURN_BEGIN);
d9c5a8b9 4426
2a837de2
MS
4427 /* Adjust REM and PTR, unless this is the last iteration. */
4428 if (i != sctz_len)
4429 {
4430 emit_move_insn (ptr, force_operand (XEXP (to, 0), NULL_RTX));
4431 to = replace_equiv_address (to, ptr);
4432 rtx rem_minus_blksize = plus_constant (ptr_mode, rem, -blksize);
4433 emit_move_insn (rem, force_operand (rem_minus_blksize, NULL_RTX));
4434 }
d9c5a8b9 4435
2a837de2
MS
4436 if (label)
4437 {
4438 emit_label (label);
4439
4440 /* Given conditional stores, the offset can no longer be
4441 known, so clear it. */
4442 clear_mem_offset (to);
4443 }
4444 }
4445
4446 return true;
ee92e7ba
MS
4447}
4448
2a837de2
MS
4449/* Helper function to do the actual work for expand_builtin_memset. The
4450 arguments to the builtin_memset call DEST, VAL, and LEN are broken out
4451 so that this can also be called without constructing an actual CALL_EXPR.
4452 The other arguments and return value are the same as for
4453 expand_builtin_memset. */
5039610b 4454
28f4ec01 4455static rtx
2a837de2
MS
4456expand_builtin_memset_args (tree dest, tree val, tree len,
4457 rtx target, machine_mode mode, tree orig_exp)
28f4ec01 4458{
2a837de2
MS
4459 tree fndecl, fn;
4460 enum built_in_function fcode;
4461 machine_mode val_mode;
4462 char c;
4463 unsigned int dest_align;
4464 rtx dest_mem, dest_addr, len_rtx;
4465 HOST_WIDE_INT expected_size = -1;
4466 unsigned int expected_align = 0;
4467 unsigned HOST_WIDE_INT min_size;
4468 unsigned HOST_WIDE_INT max_size;
4469 unsigned HOST_WIDE_INT probable_max_size;
ee92e7ba 4470
2a837de2 4471 dest_align = get_pointer_alignment (dest);
ee92e7ba 4472
2a837de2
MS
4473 /* If DEST is not a pointer type, don't do this operation in-line. */
4474 if (dest_align == 0)
4475 return NULL_RTX;
ee92e7ba 4476
2a837de2
MS
4477 if (currently_expanding_gimple_stmt)
4478 stringop_block_profile (currently_expanding_gimple_stmt,
4479 &expected_align, &expected_size);
57814e5e 4480
2a837de2
MS
4481 if (expected_align < dest_align)
4482 expected_align = dest_align;
e50d56a5 4483
2a837de2
MS
4484 /* If the LEN parameter is zero, return DEST. */
4485 if (integer_zerop (len))
4486 {
4487 /* Evaluate and ignore VAL in case it has side-effects. */
4488 expand_expr (val, const0_rtx, VOIDmode, EXPAND_NORMAL);
4489 return expand_expr (dest, target, mode, EXPAND_NORMAL);
4490 }
e50d56a5 4491
2a837de2
MS
4492 /* Stabilize the arguments in case we fail. */
4493 dest = builtin_save_expr (dest);
4494 val = builtin_save_expr (val);
4495 len = builtin_save_expr (len);
e50d56a5 4496
2a837de2
MS
4497 len_rtx = expand_normal (len);
4498 determine_block_size (len, len_rtx, &min_size, &max_size,
4499 &probable_max_size);
4500 dest_mem = get_memory_rtx (dest, len);
4501 val_mode = TYPE_MODE (unsigned_char_type_node);
e50d56a5 4502
2a837de2
MS
4503 if (TREE_CODE (val) != INTEGER_CST
4504 || target_char_cast (val, &c))
4505 {
4506 rtx val_rtx;
e50d56a5 4507
2a837de2
MS
4508 val_rtx = expand_normal (val);
4509 val_rtx = convert_to_mode (val_mode, val_rtx, 0);
4510
4511 /* Assume that we can memset by pieces if we can store
4512 * the coefficients by pieces (in the required modes).
4513 * We can't pass builtin_memset_gen_str as that emits RTL. */
4514 c = 1;
4515 if (tree_fits_uhwi_p (len)
4516 && can_store_by_pieces (tree_to_uhwi (len),
4517 builtin_memset_read_str, &c, dest_align,
4518 true))
4519 {
4520 val_rtx = force_reg (val_mode, val_rtx);
4521 store_by_pieces (dest_mem, tree_to_uhwi (len),
4522 builtin_memset_gen_str, val_rtx, dest_align,
4523 true, RETURN_BEGIN);
4524 }
4525 else if (!set_storage_via_setmem (dest_mem, len_rtx, val_rtx,
4526 dest_align, expected_align,
4527 expected_size, min_size, max_size,
4528 probable_max_size)
4529 && !try_store_by_multiple_pieces (dest_mem, len_rtx,
4530 tree_ctz (len),
4531 min_size, max_size,
4532 val_rtx, 0,
4533 dest_align))
4534 goto do_libcall;
4535
4536 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
4537 dest_mem = convert_memory_address (ptr_mode, dest_mem);
4538 return dest_mem;
4539 }
4540
4541 if (c)
4542 {
4543 if (tree_fits_uhwi_p (len)
4544 && can_store_by_pieces (tree_to_uhwi (len),
4545 builtin_memset_read_str, &c, dest_align,
4546 true))
4547 store_by_pieces (dest_mem, tree_to_uhwi (len),
4548 builtin_memset_read_str, &c, dest_align, true,
4549 RETURN_BEGIN);
4550 else if (!set_storage_via_setmem (dest_mem, len_rtx,
4551 gen_int_mode (c, val_mode),
4552 dest_align, expected_align,
4553 expected_size, min_size, max_size,
4554 probable_max_size)
4555 && !try_store_by_multiple_pieces (dest_mem, len_rtx,
4556 tree_ctz (len),
4557 min_size, max_size,
4558 NULL_RTX, c,
4559 dest_align))
4560 goto do_libcall;
4561
4562 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
4563 dest_mem = convert_memory_address (ptr_mode, dest_mem);
4564 return dest_mem;
4565 }
4566
4567 set_mem_align (dest_mem, dest_align);
4568 dest_addr = clear_storage_hints (dest_mem, len_rtx,
4569 CALL_EXPR_TAILCALL (orig_exp)
4570 ? BLOCK_OP_TAILCALL : BLOCK_OP_NORMAL,
4571 expected_align, expected_size,
4572 min_size, max_size,
4573 probable_max_size, tree_ctz (len));
4574
4575 if (dest_addr == 0)
4576 {
4577 dest_addr = force_operand (XEXP (dest_mem, 0), NULL_RTX);
4578 dest_addr = convert_memory_address (ptr_mode, dest_addr);
4579 }
4580
4581 return dest_addr;
4582
4583 do_libcall:
4584 fndecl = get_callee_fndecl (orig_exp);
4585 fcode = DECL_FUNCTION_CODE (fndecl);
4586 if (fcode == BUILT_IN_MEMSET)
4587 fn = build_call_nofold_loc (EXPR_LOCATION (orig_exp), fndecl, 3,
4588 dest, val, len);
4589 else if (fcode == BUILT_IN_BZERO)
4590 fn = build_call_nofold_loc (EXPR_LOCATION (orig_exp), fndecl, 2,
4591 dest, len);
4592 else
4593 gcc_unreachable ();
4594 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
4595 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (orig_exp);
4596 return expand_call (fn, target, target == const0_rtx);
4597}
4598
4599/* Expand expression EXP, which is a call to the bzero builtin. Return
4600 NULL_RTX if we failed the caller should emit a normal call. */
e3e9f108
JJ
4601
4602static rtx
2a837de2 4603expand_builtin_bzero (tree exp)
e3e9f108 4604{
2a837de2 4605 if (!validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
5039610b 4606 return NULL_RTX;
ee92e7ba
MS
4607
4608 tree dest = CALL_EXPR_ARG (exp, 0);
2a837de2 4609 tree size = CALL_EXPR_ARG (exp, 1);
ee92e7ba 4610
2a837de2
MS
4611 /* New argument list transforming bzero(ptr x, int y) to
4612 memset(ptr x, int 0, size_t y). This is done this way
4613 so that if it isn't expanded inline, we fallback to
4614 calling bzero instead of memset. */
af3fa359 4615
2a837de2 4616 location_t loc = EXPR_LOCATION (exp);
af3fa359 4617
2a837de2
MS
4618 return expand_builtin_memset_args (dest, integer_zero_node,
4619 fold_convert_loc (loc,
4620 size_type_node, size),
4621 const0_rtx, VOIDmode, exp);
4622}
ee92e7ba 4623
2a837de2
MS
4624/* Try to expand cmpstr operation ICODE with the given operands.
4625 Return the result rtx on success, otherwise return null. */
4626
4627static rtx
4628expand_cmpstr (insn_code icode, rtx target, rtx arg1_rtx, rtx arg2_rtx,
4629 HOST_WIDE_INT align)
4630{
4631 machine_mode insn_mode = insn_data[icode].operand[0].mode;
4632
4633 if (target && (!REG_P (target) || HARD_REGISTER_P (target)))
4634 target = NULL_RTX;
4635
4636 class expand_operand ops[4];
4637 create_output_operand (&ops[0], target, insn_mode);
4638 create_fixed_operand (&ops[1], arg1_rtx);
4639 create_fixed_operand (&ops[2], arg2_rtx);
4640 create_integer_operand (&ops[3], align);
4641 if (maybe_expand_insn (icode, 4, ops))
4642 return ops[0].value;
4643 return NULL_RTX;
edcf72f3
IE
4644}
4645
2a837de2
MS
4646/* Expand expression EXP, which is a call to the memcmp built-in function.
4647 Return NULL_RTX if we failed and the caller should emit a normal call,
4648 otherwise try to get the result in TARGET, if convenient.
4649 RESULT_EQ is true if we can relax the returned value to be either zero
4650 or nonzero, without caring about the sign. */
5039610b
SL
4651
4652static rtx
2a837de2 4653expand_builtin_memcmp (tree exp, rtx target, bool result_eq)
5039610b 4654{
2a837de2
MS
4655 if (!validate_arglist (exp,
4656 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
4657 return NULL_RTX;
edcf72f3 4658
2a837de2
MS
4659 tree arg1 = CALL_EXPR_ARG (exp, 0);
4660 tree arg2 = CALL_EXPR_ARG (exp, 1);
4661 tree len = CALL_EXPR_ARG (exp, 2);
db91c7cf 4662
2a837de2
MS
4663 /* Due to the performance benefit, always inline the calls first
4664 when result_eq is false. */
4665 rtx result = NULL_RTX;
4666 enum built_in_function fcode = DECL_FUNCTION_CODE (get_callee_fndecl (exp));
4667 if (!result_eq && fcode != BUILT_IN_BCMP)
4668 {
4669 result = inline_expand_builtin_bytecmp (exp, target);
4670 if (result)
4671 return result;
4672 }
4673
4674 machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
4675 location_t loc = EXPR_LOCATION (exp);
4676
4677 unsigned int arg1_align = get_pointer_alignment (arg1) / BITS_PER_UNIT;
4678 unsigned int arg2_align = get_pointer_alignment (arg2) / BITS_PER_UNIT;
4679
4680 /* If we don't have POINTER_TYPE, call the function. */
4681 if (arg1_align == 0 || arg2_align == 0)
671a00ee 4682 return NULL_RTX;
8fd3cf4e 4683
2a837de2
MS
4684 rtx arg1_rtx = get_memory_rtx (arg1, len);
4685 rtx arg2_rtx = get_memory_rtx (arg2, len);
4686 rtx len_rtx = expand_normal (fold_convert_loc (loc, sizetype, len));
33521f7d 4687
2a837de2
MS
4688 /* Set MEM_SIZE as appropriate. */
4689 if (CONST_INT_P (len_rtx))
4690 {
4691 set_mem_size (arg1_rtx, INTVAL (len_rtx));
4692 set_mem_size (arg2_rtx, INTVAL (len_rtx));
4693 }
d5803b98 4694
2a837de2
MS
4695 by_pieces_constfn constfn = NULL;
4696
4697 /* Try to get the byte representation of the constant ARG2 (or, only
4698 when the function's result is used for equality to zero, ARG1)
4699 points to, with its byte size in NBYTES. */
d5803b98 4700 unsigned HOST_WIDE_INT nbytes;
2a837de2
MS
4701 const char *rep = getbyterep (arg2, &nbytes);
4702 if (result_eq && rep == NULL)
4703 {
4704 /* For equality to zero the arguments are interchangeable. */
4705 rep = getbyterep (arg1, &nbytes);
4706 if (rep != NULL)
4707 std::swap (arg1_rtx, arg2_rtx);
4708 }
d5803b98
MS
4709
4710 /* If the function's constant bound LEN_RTX is less than or equal
4711 to the byte size of the representation of the constant argument,
4712 and if block move would be done by pieces, we can avoid loading
2a837de2 4713 the bytes from memory and only store the computed constant result. */
d5803b98 4714 if (rep
671a00ee 4715 && CONST_INT_P (len_rtx)
2a837de2
MS
4716 && (unsigned HOST_WIDE_INT) INTVAL (len_rtx) <= nbytes)
4717 constfn = builtin_memcpy_read_str;
db91c7cf 4718
2a837de2
MS
4719 result = emit_block_cmp_hints (arg1_rtx, arg2_rtx, len_rtx,
4720 TREE_TYPE (len), target,
4721 result_eq, constfn,
4722 CONST_CAST (char *, rep));
671a00ee 4723
2a837de2 4724 if (result)
671a00ee 4725 {
2a837de2
MS
4726 /* Return the value in the proper mode for this function. */
4727 if (GET_MODE (result) == mode)
4728 return result;
671a00ee 4729
2a837de2
MS
4730 if (target != 0)
4731 {
4732 convert_move (target, result, 0);
4733 return target;
4734 }
671a00ee 4735
2a837de2
MS
4736 return convert_to_mode (mode, result, 0);
4737 }
671a00ee 4738
2a837de2 4739 return NULL_RTX;
e3e9f108
JJ
4740}
4741
2a837de2
MS
4742/* Expand expression EXP, which is a call to the strcmp builtin. Return NULL_RTX
4743 if we failed the caller should emit a normal call, otherwise try to get
4744 the result in TARGET, if convenient. */
beed8fc0
AO
4745
4746static rtx
2a837de2 4747expand_builtin_strcmp (tree exp, ATTRIBUTE_UNUSED rtx target)
beed8fc0 4748{
2a837de2 4749 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
5039610b 4750 return NULL_RTX;
beed8fc0 4751
2a837de2
MS
4752 tree arg1 = CALL_EXPR_ARG (exp, 0);
4753 tree arg2 = CALL_EXPR_ARG (exp, 1);
beed8fc0 4754
2a837de2
MS
4755 /* Due to the performance benefit, always inline the calls first. */
4756 rtx result = NULL_RTX;
4757 result = inline_expand_builtin_bytecmp (exp, target);
4758 if (result)
4759 return result;
beed8fc0 4760
2a837de2
MS
4761 insn_code cmpstr_icode = direct_optab_handler (cmpstr_optab, SImode);
4762 insn_code cmpstrn_icode = direct_optab_handler (cmpstrn_optab, SImode);
4763 if (cmpstr_icode == CODE_FOR_nothing && cmpstrn_icode == CODE_FOR_nothing)
4764 return NULL_RTX;
ee92e7ba 4765
2a837de2
MS
4766 unsigned int arg1_align = get_pointer_alignment (arg1) / BITS_PER_UNIT;
4767 unsigned int arg2_align = get_pointer_alignment (arg2) / BITS_PER_UNIT;
4768
4769 /* If we don't have POINTER_TYPE, call the function. */
4770 if (arg1_align == 0 || arg2_align == 0)
ee92e7ba
MS
4771 return NULL_RTX;
4772
2a837de2
MS
4773 /* Stabilize the arguments in case gen_cmpstr(n)si fail. */
4774 arg1 = builtin_save_expr (arg1);
4775 arg2 = builtin_save_expr (arg2);
ee92e7ba 4776
2a837de2
MS
4777 rtx arg1_rtx = get_memory_rtx (arg1, NULL);
4778 rtx arg2_rtx = get_memory_rtx (arg2, NULL);
ee92e7ba 4779
2a837de2
MS
4780 /* Try to call cmpstrsi. */
4781 if (cmpstr_icode != CODE_FOR_nothing)
4782 result = expand_cmpstr (cmpstr_icode, target, arg1_rtx, arg2_rtx,
4783 MIN (arg1_align, arg2_align));
ee92e7ba 4784
2a837de2
MS
4785 /* Try to determine at least one length and call cmpstrnsi. */
4786 if (!result && cmpstrn_icode != CODE_FOR_nothing)
4787 {
4788 tree len;
4789 rtx arg3_rtx;
ee92e7ba 4790
2a837de2
MS
4791 tree len1 = c_strlen (arg1, 1);
4792 tree len2 = c_strlen (arg2, 1);
fed3cef0 4793
2a837de2
MS
4794 if (len1)
4795 len1 = size_binop (PLUS_EXPR, ssize_int (1), len1);
4796 if (len2)
4797 len2 = size_binop (PLUS_EXPR, ssize_int (1), len2);
ee92e7ba 4798
2a837de2
MS
4799 /* If we don't have a constant length for the first, use the length
4800 of the second, if we know it. We don't require a constant for
4801 this case; some cost analysis could be done if both are available
4802 but neither is constant. For now, assume they're equally cheap,
4803 unless one has side effects. If both strings have constant lengths,
4804 use the smaller. */
ee92e7ba 4805
2a837de2
MS
4806 if (!len1)
4807 len = len2;
4808 else if (!len2)
4809 len = len1;
4810 else if (TREE_SIDE_EFFECTS (len1))
4811 len = len2;
4812 else if (TREE_SIDE_EFFECTS (len2))
4813 len = len1;
4814 else if (TREE_CODE (len1) != INTEGER_CST)
4815 len = len2;
4816 else if (TREE_CODE (len2) != INTEGER_CST)
4817 len = len1;
4818 else if (tree_int_cst_lt (len1, len2))
4819 len = len1;
4820 else
4821 len = len2;
4822
4823 /* If both arguments have side effects, we cannot optimize. */
4824 if (len && !TREE_SIDE_EFFECTS (len))
4825 {
4826 arg3_rtx = expand_normal (len);
4827 result = expand_cmpstrn_or_cmpmem
4828 (cmpstrn_icode, target, arg1_rtx, arg2_rtx, TREE_TYPE (len),
4829 arg3_rtx, MIN (arg1_align, arg2_align));
4830 }
ee92e7ba
MS
4831 }
4832
2a837de2
MS
4833 tree fndecl = get_callee_fndecl (exp);
4834 if (result)
36537a1c 4835 {
2a837de2
MS
4836 /* Return the value in the proper mode for this function. */
4837 machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
4838 if (GET_MODE (result) == mode)
4839 return result;
4840 if (target == 0)
4841 return convert_to_mode (mode, result, 0);
4842 convert_move (target, result, 0);
4843 return target;
36537a1c
MS
4844 }
4845
2a837de2
MS
4846 /* Expand the library call ourselves using a stabilized argument
4847 list to avoid re-evaluating the function's arguments twice. */
4848 tree fn = build_call_nofold_loc (EXPR_LOCATION (exp), fndecl, 2, arg1, arg2);
81d6cdd3 4849 copy_warning (fn, exp);
2a837de2
MS
4850 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
4851 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
4852 return expand_call (fn, target, target == const0_rtx);
5039610b
SL
4853}
4854
2a837de2
MS
4855/* Expand expression EXP, which is a call to the strncmp builtin. Return
4856 NULL_RTX if we failed the caller should emit a normal call, otherwise
4857 try to get the result in TARGET, if convenient. */
5039610b
SL
4858
4859static rtx
2a837de2
MS
4860expand_builtin_strncmp (tree exp, ATTRIBUTE_UNUSED rtx target,
4861 ATTRIBUTE_UNUSED machine_mode mode)
4862{
4863 if (!validate_arglist (exp,
4864 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
4865 return NULL_RTX;
e08341bb 4866
2a837de2
MS
4867 tree arg1 = CALL_EXPR_ARG (exp, 0);
4868 tree arg2 = CALL_EXPR_ARG (exp, 1);
4869 tree arg3 = CALL_EXPR_ARG (exp, 2);
28f4ec01 4870
db3927fb 4871 location_t loc = EXPR_LOCATION (exp);
2a837de2
MS
4872 tree len1 = c_strlen (arg1, 1);
4873 tree len2 = c_strlen (arg2, 1);
5039610b 4874
2a837de2
MS
4875 /* Due to the performance benefit, always inline the calls first. */
4876 rtx result = NULL_RTX;
4877 result = inline_expand_builtin_bytecmp (exp, target);
4878 if (result)
4879 return result;
01b0acb7 4880
2a837de2
MS
4881 /* If c_strlen can determine an expression for one of the string
4882 lengths, and it doesn't have side effects, then emit cmpstrnsi
4883 using length MIN(strlen(string)+1, arg3). */
4884 insn_code cmpstrn_icode = direct_optab_handler (cmpstrn_optab, SImode);
4885 if (cmpstrn_icode == CODE_FOR_nothing)
4886 return NULL_RTX;
beed8fc0 4887
2a837de2 4888 tree len;
beed8fc0 4889
2a837de2
MS
4890 unsigned int arg1_align = get_pointer_alignment (arg1) / BITS_PER_UNIT;
4891 unsigned int arg2_align = get_pointer_alignment (arg2) / BITS_PER_UNIT;
beed8fc0 4892
2a837de2
MS
4893 if (len1)
4894 len1 = size_binop_loc (loc, PLUS_EXPR, ssize_int (1), len1);
4895 if (len2)
4896 len2 = size_binop_loc (loc, PLUS_EXPR, ssize_int (1), len2);
beed8fc0 4897
2a837de2 4898 tree len3 = fold_convert_loc (loc, sizetype, arg3);
beed8fc0 4899
2a837de2
MS
4900 /* If we don't have a constant length for the first, use the length
4901 of the second, if we know it. If neither string is constant length,
4902 use the given length argument. We don't require a constant for
4903 this case; some cost analysis could be done if both are available
4904 but neither is constant. For now, assume they're equally cheap,
4905 unless one has side effects. If both strings have constant lengths,
4906 use the smaller. */
beed8fc0 4907
2a837de2
MS
4908 if (!len1 && !len2)
4909 len = len3;
4910 else if (!len1)
4911 len = len2;
4912 else if (!len2)
4913 len = len1;
4914 else if (TREE_SIDE_EFFECTS (len1))
4915 len = len2;
4916 else if (TREE_SIDE_EFFECTS (len2))
4917 len = len1;
4918 else if (TREE_CODE (len1) != INTEGER_CST)
4919 len = len2;
4920 else if (TREE_CODE (len2) != INTEGER_CST)
4921 len = len1;
4922 else if (tree_int_cst_lt (len1, len2))
4923 len = len1;
4924 else
4925 len = len2;
beed8fc0 4926
2a837de2
MS
4927 /* If we are not using the given length, we must incorporate it here.
4928 The actual new length parameter will be MIN(len,arg3) in this case. */
4929 if (len != len3)
4930 {
4931 len = fold_convert_loc (loc, sizetype, len);
4932 len = fold_build2_loc (loc, MIN_EXPR, TREE_TYPE (len), len, len3);
9cb65f92 4933 }
2a837de2
MS
4934 rtx arg1_rtx = get_memory_rtx (arg1, len);
4935 rtx arg2_rtx = get_memory_rtx (arg2, len);
4936 rtx arg3_rtx = expand_normal (len);
4937 result = expand_cmpstrn_or_cmpmem (cmpstrn_icode, target, arg1_rtx,
4938 arg2_rtx, TREE_TYPE (len), arg3_rtx,
4939 MIN (arg1_align, arg2_align));
3ce4cdb2 4940
2a837de2
MS
4941 tree fndecl = get_callee_fndecl (exp);
4942 if (result)
3ce4cdb2 4943 {
2a837de2
MS
4944 /* Return the value in the proper mode for this function. */
4945 mode = TYPE_MODE (TREE_TYPE (exp));
4946 if (GET_MODE (result) == mode)
4947 return result;
4948 if (target == 0)
4949 return convert_to_mode (mode, result, 0);
4950 convert_move (target, result, 0);
4951 return target;
3ce4cdb2
MS
4952 }
4953
2a837de2
MS
4954 /* Expand the library call ourselves using a stabilized argument
4955 list to avoid re-evaluating the function's arguments twice. */
4956 tree call = build_call_nofold_loc (loc, fndecl, 3, arg1, arg2, len);
4957 copy_warning (call, exp);
4958 gcc_assert (TREE_CODE (call) == CALL_EXPR);
4959 CALL_EXPR_TAILCALL (call) = CALL_EXPR_TAILCALL (exp);
4960 return expand_call (call, target, target == const0_rtx);
3ce4cdb2
MS
4961}
4962
2a837de2
MS
4963/* Expand a call to __builtin_saveregs, generating the result in TARGET,
4964 if that's convenient. */
e50d56a5 4965
2a837de2
MS
4966rtx
4967expand_builtin_saveregs (void)
e50d56a5 4968{
2a837de2
MS
4969 rtx val;
4970 rtx_insn *seq;
e50d56a5 4971
2a837de2
MS
4972 /* Don't do __builtin_saveregs more than once in a function.
4973 Save the result of the first call and reuse it. */
4974 if (saveregs_value != 0)
4975 return saveregs_value;
e50d56a5 4976
2a837de2
MS
4977 /* When this function is called, it means that registers must be
4978 saved on entry to this function. So we migrate the call to the
4979 first insn of this function. */
e50d56a5 4980
2a837de2 4981 start_sequence ();
57814e5e 4982
2a837de2
MS
4983 /* Do whatever the machine needs done in this case. */
4984 val = targetm.calls.expand_builtin_saveregs ();
57814e5e 4985
2a837de2
MS
4986 seq = get_insns ();
4987 end_sequence ();
57814e5e 4988
2a837de2
MS
4989 saveregs_value = val;
4990
4991 /* Put the insns after the NOTE that starts the function. If this
4992 is inside a start_sequence, make the outer-level insn chain current, so
4993 the code is placed at the start of the function. */
4994 push_topmost_sequence ();
4995 emit_insn_after (seq, entry_of_function ());
4996 pop_topmost_sequence ();
4997
4998 return val;
57814e5e
JJ
4999}
5000
2a837de2 5001/* Expand a call to __builtin_next_arg. */
ee92e7ba 5002
2a837de2
MS
5003static rtx
5004expand_builtin_next_arg (void)
ee92e7ba 5005{
2a837de2
MS
5006 /* Checking arguments is already done in fold_builtin_next_arg
5007 that must be called before this function. */
5008 return expand_binop (ptr_mode, add_optab,
5009 crtl->args.internal_arg_pointer,
5010 crtl->args.arg_offset_rtx,
5011 NULL_RTX, 0, OPTAB_LIB_WIDEN);
5012}
ee92e7ba 5013
2a837de2
MS
5014/* Make it easier for the backends by protecting the valist argument
5015 from multiple evaluations. */
ee92e7ba 5016
2a837de2
MS
5017static tree
5018stabilize_va_list_loc (location_t loc, tree valist, int needs_lvalue)
5019{
5020 tree vatype = targetm.canonical_va_list_type (TREE_TYPE (valist));
ee92e7ba 5021
2a837de2
MS
5022 /* The current way of determining the type of valist is completely
5023 bogus. We should have the information on the va builtin instead. */
5024 if (!vatype)
5025 vatype = targetm.fn_abi_va_list (cfun->decl);
5026
5027 if (TREE_CODE (vatype) == ARRAY_TYPE)
ee92e7ba 5028 {
2a837de2
MS
5029 if (TREE_SIDE_EFFECTS (valist))
5030 valist = save_expr (valist);
5031
5032 /* For this case, the backends will be expecting a pointer to
5033 vatype, but it's possible we've actually been given an array
5034 (an actual TARGET_CANONICAL_VA_LIST_TYPE (valist)).
5035 So fix it. */
5036 if (TREE_CODE (TREE_TYPE (valist)) == ARRAY_TYPE)
5037 {
5038 tree p1 = build_pointer_type (TREE_TYPE (vatype));
5039 valist = build_fold_addr_expr_with_type_loc (loc, valist, p1);
5040 }
ee92e7ba 5041 }
2a837de2
MS
5042 else
5043 {
5044 tree pt = build_pointer_type (vatype);
ee92e7ba 5045
2a837de2
MS
5046 if (! needs_lvalue)
5047 {
5048 if (! TREE_SIDE_EFFECTS (valist))
5049 return valist;
ee92e7ba 5050
2a837de2
MS
5051 valist = fold_build1_loc (loc, ADDR_EXPR, pt, valist);
5052 TREE_SIDE_EFFECTS (valist) = 1;
5053 }
ee92e7ba 5054
2a837de2
MS
5055 if (TREE_SIDE_EFFECTS (valist))
5056 valist = save_expr (valist);
5057 valist = fold_build2_loc (loc, MEM_REF,
5058 vatype, valist, build_int_cst (pt, 0));
ee92e7ba
MS
5059 }
5060
2a837de2 5061 return valist;
ee92e7ba
MS
5062}
5063
2a837de2 5064/* The "standard" definition of va_list is void*. */
ee92e7ba 5065
2a837de2
MS
5066tree
5067std_build_builtin_va_list (void)
ee92e7ba 5068{
2a837de2
MS
5069 return ptr_type_node;
5070}
b5338fb3 5071
2a837de2 5072/* The "standard" abi va_list is va_list_type_node. */
b5338fb3 5073
2a837de2
MS
5074tree
5075std_fn_abi_va_list (tree fndecl ATTRIBUTE_UNUSED)
5076{
5077 return va_list_type_node;
5078}
ee92e7ba 5079
2a837de2 5080/* The "standard" type of va_list is va_list_type_node. */
ee92e7ba 5081
2a837de2
MS
5082tree
5083std_canonical_va_list_type (tree type)
5084{
5085 tree wtype, htype;
ee92e7ba 5086
2a837de2
MS
5087 wtype = va_list_type_node;
5088 htype = type;
ee92e7ba 5089
2a837de2 5090 if (TREE_CODE (wtype) == ARRAY_TYPE)
ee92e7ba 5091 {
2a837de2
MS
5092 /* If va_list is an array type, the argument may have decayed
5093 to a pointer type, e.g. by being passed to another function.
5094 In that case, unwrap both types so that we can compare the
5095 underlying records. */
5096 if (TREE_CODE (htype) == ARRAY_TYPE
5097 || POINTER_TYPE_P (htype))
5098 {
5099 wtype = TREE_TYPE (wtype);
5100 htype = TREE_TYPE (htype);
5101 }
ee92e7ba 5102 }
2a837de2
MS
5103 if (TYPE_MAIN_VARIANT (wtype) == TYPE_MAIN_VARIANT (htype))
5104 return va_list_type_node;
ee92e7ba 5105
2a837de2
MS
5106 return NULL_TREE;
5107}
ee92e7ba 5108
2a837de2
MS
5109/* The "standard" implementation of va_start: just assign `nextarg' to
5110 the variable. */
5111
5112void
5113std_expand_builtin_va_start (tree valist, rtx nextarg)
5114{
5115 rtx va_r = expand_expr (valist, NULL_RTX, VOIDmode, EXPAND_WRITE);
5116 convert_move (va_r, nextarg, 0);
ee92e7ba
MS
5117}
5118
2a837de2 5119/* Expand EXP, a call to __builtin_va_start. */
da9e9f08
KG
5120
5121static rtx
2a837de2 5122expand_builtin_va_start (tree exp)
da9e9f08 5123{
2a837de2
MS
5124 rtx nextarg;
5125 tree valist;
db3927fb 5126 location_t loc = EXPR_LOCATION (exp);
5039610b 5127
2a837de2
MS
5128 if (call_expr_nargs (exp) < 2)
5129 {
5130 error_at (loc, "too few arguments to function %<va_start%>");
5131 return const0_rtx;
5132 }
57814e5e 5133
2a837de2
MS
5134 if (fold_builtin_next_arg (exp, true))
5135 return const0_rtx;
ee92e7ba 5136
2a837de2
MS
5137 nextarg = expand_builtin_next_arg ();
5138 valist = stabilize_va_list_loc (loc, CALL_EXPR_ARG (exp, 0), 1);
da9e9f08 5139
2a837de2
MS
5140 if (targetm.expand_builtin_va_start)
5141 targetm.expand_builtin_va_start (valist, nextarg);
5142 else
5143 std_expand_builtin_va_start (valist, nextarg);
57814e5e 5144
2a837de2
MS
5145 return const0_rtx;
5146}
b5338fb3 5147
2a837de2 5148/* Expand EXP, a call to __builtin_va_end. */
b5338fb3 5149
2a837de2
MS
5150static rtx
5151expand_builtin_va_end (tree exp)
5152{
5153 tree valist = CALL_EXPR_ARG (exp, 0);
b5338fb3 5154
2a837de2
MS
5155 /* Evaluate for side effects, if needed. I hate macros that don't
5156 do that. */
5157 if (TREE_SIDE_EFFECTS (valist))
5158 expand_expr (valist, const0_rtx, VOIDmode, EXPAND_NORMAL);
b5338fb3 5159
2a837de2 5160 return const0_rtx;
da9e9f08
KG
5161}
5162
2a837de2
MS
5163/* Expand EXP, a call to __builtin_va_copy. We do this as a
5164 builtin rather than just as an assignment in stdarg.h because of the
5165 nastiness of array-type va_list types. */
ab937357 5166
2a837de2
MS
5167static rtx
5168expand_builtin_va_copy (tree exp)
ab937357 5169{
2a837de2
MS
5170 tree dst, src, t;
5171 location_t loc = EXPR_LOCATION (exp);
5172
5173 dst = CALL_EXPR_ARG (exp, 0);
5174 src = CALL_EXPR_ARG (exp, 1);
5175
5176 dst = stabilize_va_list_loc (loc, dst, 1);
5177 src = stabilize_va_list_loc (loc, src, 0);
5178
5179 gcc_assert (cfun != NULL && cfun->decl != NULL_TREE);
5180
5181 if (TREE_CODE (targetm.fn_abi_va_list (cfun->decl)) != ARRAY_TYPE)
985b3a68 5182 {
2a837de2
MS
5183 t = build2 (MODIFY_EXPR, targetm.fn_abi_va_list (cfun->decl), dst, src);
5184 TREE_SIDE_EFFECTS (t) = 1;
5185 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
985b3a68 5186 }
2a837de2
MS
5187 else
5188 {
5189 rtx dstb, srcb, size;
985b3a68 5190
2a837de2
MS
5191 /* Evaluate to pointers. */
5192 dstb = expand_expr (dst, NULL_RTX, Pmode, EXPAND_NORMAL);
5193 srcb = expand_expr (src, NULL_RTX, Pmode, EXPAND_NORMAL);
5194 size = expand_expr (TYPE_SIZE_UNIT (targetm.fn_abi_va_list (cfun->decl)),
5195 NULL_RTX, VOIDmode, EXPAND_NORMAL);
ab937357 5196
2a837de2
MS
5197 dstb = convert_memory_address (Pmode, dstb);
5198 srcb = convert_memory_address (Pmode, srcb);
ab937357 5199
2a837de2
MS
5200 /* "Dereference" to BLKmode memories. */
5201 dstb = gen_rtx_MEM (BLKmode, dstb);
5202 set_mem_alias_set (dstb, get_alias_set (TREE_TYPE (TREE_TYPE (dst))));
5203 set_mem_align (dstb, TYPE_ALIGN (targetm.fn_abi_va_list (cfun->decl)));
5204 srcb = gen_rtx_MEM (BLKmode, srcb);
5205 set_mem_alias_set (srcb, get_alias_set (TREE_TYPE (TREE_TYPE (src))));
5206 set_mem_align (srcb, TYPE_ALIGN (targetm.fn_abi_va_list (cfun->decl)));
5207
5208 /* Copy. */
5209 emit_block_move (dstb, srcb, size, BLOCK_OP_NORMAL);
5210 }
5211
5212 return const0_rtx;
ab937357
JJ
5213}
5214
2a837de2
MS
5215/* Expand a call to one of the builtin functions __builtin_frame_address or
5216 __builtin_return_address. */
1a887f86
RS
5217
5218static rtx
2a837de2 5219expand_builtin_frame_address (tree fndecl, tree exp)
1a887f86 5220{
2a837de2
MS
5221 /* The argument must be a nonnegative integer constant.
5222 It counts the number of frames to scan up the stack.
5223 The value is either the frame pointer value or the return
5224 address saved in that frame. */
5225 if (call_expr_nargs (exp) == 0)
5226 /* Warning about missing arg was already issued. */
5227 return const0_rtx;
5228 else if (! tree_fits_uhwi_p (CALL_EXPR_ARG (exp, 0)))
985b3a68 5229 {
2a837de2
MS
5230 error ("invalid argument to %qD", fndecl);
5231 return const0_rtx;
985b3a68 5232 }
2a837de2
MS
5233 else
5234 {
5235 /* Number of frames to scan up the stack. */
5236 unsigned HOST_WIDE_INT count = tree_to_uhwi (CALL_EXPR_ARG (exp, 0));
985b3a68 5237
2a837de2 5238 rtx tem = expand_builtin_return_addr (DECL_FUNCTION_CODE (fndecl), count);
1a887f86 5239
2a837de2
MS
5240 /* Some ports cannot access arbitrary stack frames. */
5241 if (tem == NULL)
5242 {
5243 warning (0, "unsupported argument to %qD", fndecl);
5244 return const0_rtx;
5245 }
1a887f86 5246
2a837de2
MS
5247 if (count)
5248 {
5249 /* Warn since no effort is made to ensure that any frame
5250 beyond the current one exists or can be safely reached. */
5251 warning (OPT_Wframe_address, "calling %qD with "
5252 "a nonzero argument is unsafe", fndecl);
5253 }
5254
5255 /* For __builtin_frame_address, return what we've got. */
5256 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
5257 return tem;
5258
5259 if (!REG_P (tem)
5260 && ! CONSTANT_P (tem))
5261 tem = copy_addr_to_reg (tem);
5262 return tem;
5263 }
1a887f86
RS
5264}
5265
2a837de2
MS
5266/* Expand EXP, a call to the alloca builtin. Return NULL_RTX if we
5267 failed and the caller should emit a normal call. */
fed3cef0 5268
28f4ec01 5269static rtx
2a837de2 5270expand_builtin_alloca (tree exp)
28f4ec01 5271{
2a837de2
MS
5272 rtx op0;
5273 rtx result;
5274 unsigned int align;
5275 tree fndecl = get_callee_fndecl (exp);
5276 HOST_WIDE_INT max_size;
5277 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
5278 bool alloca_for_var = CALL_ALLOCA_FOR_VAR_P (exp);
5279 bool valid_arglist
5280 = (fcode == BUILT_IN_ALLOCA_WITH_ALIGN_AND_MAX
5281 ? validate_arglist (exp, INTEGER_TYPE, INTEGER_TYPE, INTEGER_TYPE,
5282 VOID_TYPE)
5283 : fcode == BUILT_IN_ALLOCA_WITH_ALIGN
5284 ? validate_arglist (exp, INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE)
5285 : validate_arglist (exp, INTEGER_TYPE, VOID_TYPE));
ee92e7ba 5286
2a837de2
MS
5287 if (!valid_arglist)
5288 return NULL_RTX;
ee92e7ba 5289
2a837de2
MS
5290 /* Compute the argument. */
5291 op0 = expand_normal (CALL_EXPR_ARG (exp, 0));
28f4ec01 5292
2a837de2
MS
5293 /* Compute the alignment. */
5294 align = (fcode == BUILT_IN_ALLOCA
5295 ? BIGGEST_ALIGNMENT
5296 : TREE_INT_CST_LOW (CALL_EXPR_ARG (exp, 1)));
da9e6e63 5297
2a837de2
MS
5298 /* Compute the maximum size. */
5299 max_size = (fcode == BUILT_IN_ALLOCA_WITH_ALIGN_AND_MAX
5300 ? TREE_INT_CST_LOW (CALL_EXPR_ARG (exp, 2))
5301 : -1);
da9e6e63 5302
2a837de2
MS
5303 /* Allocate the desired space. If the allocation stems from the declaration
5304 of a variable-sized object, it cannot accumulate. */
5305 result
5306 = allocate_dynamic_stack_space (op0, 0, align, max_size, alloca_for_var);
5307 result = convert_memory_address (ptr_mode, result);
da9e6e63 5308
2a837de2
MS
5309 /* Dynamic allocations for variables are recorded during gimplification. */
5310 if (!alloca_for_var && (flag_callgraph_info & CALLGRAPH_INFO_DYNAMIC_ALLOC))
5311 record_dynamic_alloc (exp);
da9e6e63 5312
2a837de2
MS
5313 return result;
5314}
da9e6e63 5315
2a837de2
MS
5316/* Emit a call to __asan_allocas_unpoison call in EXP. Add to second argument
5317 of the call virtual_stack_dynamic_rtx - stack_pointer_rtx, which is the
5318 STACK_DYNAMIC_OFFSET value. See motivation for this in comment to
5319 handle_builtin_stack_restore function. */
da9e6e63 5320
2a837de2
MS
5321static rtx
5322expand_asan_emit_allocas_unpoison (tree exp)
5323{
5324 tree arg0 = CALL_EXPR_ARG (exp, 0);
5325 tree arg1 = CALL_EXPR_ARG (exp, 1);
5326 rtx top = expand_expr (arg0, NULL_RTX, ptr_mode, EXPAND_NORMAL);
5327 rtx bot = expand_expr (arg1, NULL_RTX, ptr_mode, EXPAND_NORMAL);
5328 rtx off = expand_simple_binop (Pmode, MINUS, virtual_stack_dynamic_rtx,
5329 stack_pointer_rtx, NULL_RTX, 0,
5330 OPTAB_LIB_WIDEN);
5331 off = convert_modes (ptr_mode, Pmode, off, 0);
5332 bot = expand_simple_binop (ptr_mode, PLUS, bot, off, NULL_RTX, 0,
5333 OPTAB_LIB_WIDEN);
5334 rtx ret = init_one_libfunc ("__asan_allocas_unpoison");
5335 ret = emit_library_call_value (ret, NULL_RTX, LCT_NORMAL, ptr_mode,
5336 top, ptr_mode, bot, ptr_mode);
5337 return ret;
5338}
da9e6e63 5339
2a837de2
MS
5340/* Expand a call to bswap builtin in EXP.
5341 Return NULL_RTX if a normal call should be emitted rather than expanding the
5342 function in-line. If convenient, the result should be placed in TARGET.
5343 SUBTARGET may be used as the target for computing one of EXP's operands. */
da9e6e63 5344
2a837de2
MS
5345static rtx
5346expand_builtin_bswap (machine_mode target_mode, tree exp, rtx target,
5347 rtx subtarget)
5348{
5349 tree arg;
5350 rtx op0;
da9e6e63 5351
2a837de2
MS
5352 if (!validate_arglist (exp, INTEGER_TYPE, VOID_TYPE))
5353 return NULL_RTX;
da9e6e63 5354
2a837de2
MS
5355 arg = CALL_EXPR_ARG (exp, 0);
5356 op0 = expand_expr (arg,
5357 subtarget && GET_MODE (subtarget) == target_mode
5358 ? subtarget : NULL_RTX,
5359 target_mode, EXPAND_NORMAL);
5360 if (GET_MODE (op0) != target_mode)
5361 op0 = convert_to_mode (target_mode, op0, 1);
da9e6e63 5362
2a837de2 5363 target = expand_unop (target_mode, bswap_optab, op0, target, 1);
da9e6e63 5364
2a837de2 5365 gcc_assert (target);
da9e6e63 5366
2a837de2
MS
5367 return convert_to_mode (target_mode, target, 1);
5368}
da9e6e63 5369
2a837de2
MS
5370/* Expand a call to a unary builtin in EXP.
5371 Return NULL_RTX if a normal call should be emitted rather than expanding the
5372 function in-line. If convenient, the result should be placed in TARGET.
5373 SUBTARGET may be used as the target for computing one of EXP's operands. */
da9e6e63 5374
2a837de2
MS
5375static rtx
5376expand_builtin_unop (machine_mode target_mode, tree exp, rtx target,
5377 rtx subtarget, optab op_optab)
5378{
5379 rtx op0;
da9e6e63 5380
2a837de2
MS
5381 if (!validate_arglist (exp, INTEGER_TYPE, VOID_TYPE))
5382 return NULL_RTX;
da9e6e63 5383
2a837de2
MS
5384 /* Compute the argument. */
5385 op0 = expand_expr (CALL_EXPR_ARG (exp, 0),
5386 (subtarget
5387 && (TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 0)))
5388 == GET_MODE (subtarget))) ? subtarget : NULL_RTX,
5389 VOIDmode, EXPAND_NORMAL);
5390 /* Compute op, into TARGET if possible.
5391 Set TARGET to wherever the result comes back. */
5392 target = expand_unop (TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 0))),
5393 op_optab, op0, target, op_optab != clrsb_optab);
5394 gcc_assert (target);
da9e6e63 5395
2a837de2 5396 return convert_to_mode (target_mode, target, 0);
da9e6e63
AO
5397}
5398
2a837de2
MS
5399/* Expand a call to __builtin_expect. We just return our argument
5400 as the builtin_expect semantic should've been already executed by
5401 tree branch prediction pass. */
880864cf 5402
5039610b 5403static rtx
2a837de2 5404expand_builtin_expect (tree exp, rtx target)
5039610b 5405{
2a837de2 5406 tree arg;
079a182e 5407
2a837de2
MS
5408 if (call_expr_nargs (exp) < 2)
5409 return const0_rtx;
5410 arg = CALL_EXPR_ARG (exp, 0);
c2bd38e8 5411
2a837de2
MS
5412 target = expand_expr (arg, target, VOIDmode, EXPAND_NORMAL);
5413 /* When guessing was done, the hints should be already stripped away. */
5414 gcc_assert (!flag_guess_branch_prob
5415 || optimize == 0 || seen_error ());
5416 return target;
5417}
726a989a 5418
2a837de2
MS
5419/* Expand a call to __builtin_expect_with_probability. We just return our
5420 argument as the builtin_expect semantic should've been already executed by
5421 tree branch prediction pass. */
880864cf 5422
2a837de2
MS
5423static rtx
5424expand_builtin_expect_with_probability (tree exp, rtx target)
5425{
5426 tree arg;
1a887f86 5427
2a837de2
MS
5428 if (call_expr_nargs (exp) < 3)
5429 return const0_rtx;
5430 arg = CALL_EXPR_ARG (exp, 0);
1a887f86 5431
2a837de2
MS
5432 target = expand_expr (arg, target, VOIDmode, EXPAND_NORMAL);
5433 /* When guessing was done, the hints should be already stripped away. */
5434 gcc_assert (!flag_guess_branch_prob
5435 || optimize == 0 || seen_error ());
5436 return target;
5437}
1a887f86 5438
28f4ec01 5439
2a837de2
MS
5440/* Expand a call to __builtin_assume_aligned. We just return our first
5441 argument as the builtin_assume_aligned semantic should've been already
5442 executed by CCP. */
b8698a0f 5443
2a837de2
MS
5444static rtx
5445expand_builtin_assume_aligned (tree exp, rtx target)
5446{
5447 if (call_expr_nargs (exp) < 2)
5448 return const0_rtx;
5449 target = expand_expr (CALL_EXPR_ARG (exp, 0), target, VOIDmode,
5450 EXPAND_NORMAL);
5451 gcc_assert (!TREE_SIDE_EFFECTS (CALL_EXPR_ARG (exp, 1))
5452 && (call_expr_nargs (exp) < 3
5453 || !TREE_SIDE_EFFECTS (CALL_EXPR_ARG (exp, 2))));
5454 return target;
5455}
28f4ec01 5456
2a837de2
MS
5457void
5458expand_builtin_trap (void)
5459{
5460 if (targetm.have_trap ())
5039610b 5461 {
2a837de2
MS
5462 rtx_insn *insn = emit_insn (targetm.gen_trap ());
5463 /* For trap insns when not accumulating outgoing args force
5464 REG_ARGS_SIZE note to prevent crossjumping of calls with
5465 different args sizes. */
5466 if (!ACCUMULATE_OUTGOING_ARGS)
5467 add_args_size_note (insn, stack_pointer_delta);
5039610b 5468 }
2a837de2 5469 else
5039610b 5470 {
2a837de2
MS
5471 tree fn = builtin_decl_implicit (BUILT_IN_ABORT);
5472 tree call_expr = build_call_expr (fn, 0);
5473 expand_call (call_expr, NULL_RTX, false);
5039610b 5474 }
28f4ec01 5475
2a837de2 5476 emit_barrier ();
28f4ec01
BS
5477}
5478
2a837de2
MS
5479/* Expand a call to __builtin_unreachable. We do nothing except emit
5480 a barrier saying that control flow will not pass here.
5197bd50 5481
2a837de2
MS
5482 It is the responsibility of the program being compiled to ensure
5483 that control flow does never reach __builtin_unreachable. */
5484static void
5485expand_builtin_unreachable (void)
e3a709be 5486{
d68d3664
JM
5487 /* Use gimple_build_builtin_unreachable or builtin_decl_unreachable
5488 to avoid this. */
5489 gcc_checking_assert (!sanitize_flags_p (SANITIZE_UNREACHABLE));
2a837de2 5490 emit_barrier ();
e3a709be
KG
5491}
5492
2a837de2
MS
5493/* Expand EXP, a call to fabs, fabsf or fabsl.
5494 Return NULL_RTX if a normal call should be emitted rather than expanding
5495 the function inline. If convenient, the result should be placed
5496 in TARGET. SUBTARGET may be used as the target for computing
5497 the operand. */
a666df60
RS
5498
5499static rtx
2a837de2 5500expand_builtin_fabs (tree exp, rtx target, rtx subtarget)
a666df60 5501{
2a837de2
MS
5502 machine_mode mode;
5503 tree arg;
5504 rtx op0;
a666df60 5505
2a837de2
MS
5506 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
5507 return NULL_RTX;
a666df60 5508
2a837de2
MS
5509 arg = CALL_EXPR_ARG (exp, 0);
5510 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
5511 mode = TYPE_MODE (TREE_TYPE (arg));
5512 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
5513 return expand_abs (mode, op0, target, 0, safe_from_p (target, arg, 1));
a666df60
RS
5514}
5515
2a837de2
MS
5516/* Expand EXP, a call to copysign, copysignf, or copysignl.
5517 Return NULL is a normal call should be emitted rather than expanding the
5518 function inline. If convenient, the result should be placed in TARGET.
5519 SUBTARGET may be used as the target for computing the operand. */
5197bd50 5520
28f4ec01 5521static rtx
2a837de2 5522expand_builtin_copysign (tree exp, rtx target, rtx subtarget)
28f4ec01 5523{
2a837de2
MS
5524 rtx op0, op1;
5525 tree arg;
d9c5a8b9 5526
2a837de2 5527 if (!validate_arglist (exp, REAL_TYPE, REAL_TYPE, VOID_TYPE))
b99d7d97
QZ
5528 return NULL_RTX;
5529
2a837de2
MS
5530 arg = CALL_EXPR_ARG (exp, 0);
5531 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
d9c5a8b9 5532
2a837de2
MS
5533 arg = CALL_EXPR_ARG (exp, 1);
5534 op1 = expand_normal (arg);
358b8f01 5535
2a837de2
MS
5536 return expand_copysign (op0, op1, target);
5537}
28f4ec01 5538
2a837de2 5539/* Emit a call to __builtin___clear_cache. */
28f4ec01 5540
2a837de2
MS
5541void
5542default_emit_call_builtin___clear_cache (rtx begin, rtx end)
5543{
5544 rtx callee = gen_rtx_SYMBOL_REF (Pmode,
5545 BUILTIN_ASM_NAME_PTR
5546 (BUILT_IN_CLEAR_CACHE));
28f4ec01 5547
2a837de2
MS
5548 emit_library_call (callee,
5549 LCT_NORMAL, VOIDmode,
5550 convert_memory_address (ptr_mode, begin), ptr_mode,
5551 convert_memory_address (ptr_mode, end), ptr_mode);
5552}
5553
5554/* Emit a call to __builtin___clear_cache, unless the target specifies
5555 it as do-nothing. This function can be used by trampoline
5556 finalizers to duplicate the effects of expanding a call to the
5557 clear_cache builtin. */
5558
5559void
5560maybe_emit_call_builtin___clear_cache (rtx begin, rtx end)
5561{
4e5bc4e4
KC
5562 gcc_assert ((GET_MODE (begin) == ptr_mode || GET_MODE (begin) == Pmode
5563 || CONST_INT_P (begin))
5564 && (GET_MODE (end) == ptr_mode || GET_MODE (end) == Pmode
5565 || CONST_INT_P (end)));
6cbaec9e 5566
2a837de2
MS
5567 if (targetm.have_clear_cache ())
5568 {
5569 /* We have a "clear_cache" insn, and it will handle everything. */
5570 class expand_operand ops[2];
36b85e43 5571
2a837de2
MS
5572 create_address_operand (&ops[0], begin);
5573 create_address_operand (&ops[1], end);
5574
5575 if (maybe_expand_insn (targetm.code_for_clear_cache, 2, ops))
5576 return;
5577 }
5578 else
d0d7f887 5579 {
2a837de2
MS
5580#ifndef CLEAR_INSN_CACHE
5581 /* There is no "clear_cache" insn, and __clear_cache() in libgcc
5582 does nothing. There is no need to call it. Do nothing. */
5583 return;
5584#endif /* CLEAR_INSN_CACHE */
d0d7f887 5585 }
36b85e43 5586
2a837de2
MS
5587 targetm.calls.emit_call_builtin___clear_cache (begin, end);
5588}
36b85e43 5589
2a837de2 5590/* Expand a call to __builtin___clear_cache. */
36b85e43 5591
2a837de2
MS
5592static void
5593expand_builtin___clear_cache (tree exp)
5594{
5595 tree begin, end;
5596 rtx begin_rtx, end_rtx;
5597
5598 /* We must not expand to a library call. If we did, any
5599 fallback library function in libgcc that might contain a call to
5600 __builtin___clear_cache() would recurse infinitely. */
5601 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
7f9f48be 5602 {
2a837de2
MS
5603 error ("both arguments to %<__builtin___clear_cache%> must be pointers");
5604 return;
5605 }
6cbaec9e 5606
2a837de2
MS
5607 begin = CALL_EXPR_ARG (exp, 0);
5608 begin_rtx = expand_expr (begin, NULL_RTX, Pmode, EXPAND_NORMAL);
8878e913 5609
2a837de2
MS
5610 end = CALL_EXPR_ARG (exp, 1);
5611 end_rtx = expand_expr (end, NULL_RTX, Pmode, EXPAND_NORMAL);
28f4ec01 5612
2a837de2 5613 maybe_emit_call_builtin___clear_cache (begin_rtx, end_rtx);
c2bd38e8
RS
5614}
5615
2a837de2 5616/* Given a trampoline address, make sure it satisfies TRAMPOLINE_ALIGNMENT. */
fed3cef0 5617
28f4ec01 5618static rtx
2a837de2 5619round_trampoline_addr (rtx tramp)
28f4ec01 5620{
2a837de2 5621 rtx temp, addend, mask;
8d51ecf8 5622
2a837de2
MS
5623 /* If we don't need too much alignment, we'll have been guaranteed
5624 proper alignment by get_trampoline_type. */
5625 if (TRAMPOLINE_ALIGNMENT <= STACK_BOUNDARY)
5626 return tramp;
b5338fb3 5627
2a837de2
MS
5628 /* Round address up to desired boundary. */
5629 temp = gen_reg_rtx (Pmode);
5630 addend = gen_int_mode (TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT - 1, Pmode);
5631 mask = gen_int_mode (-TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT, Pmode);
b5338fb3 5632
2a837de2
MS
5633 temp = expand_simple_binop (Pmode, PLUS, tramp, addend,
5634 temp, 0, OPTAB_LIB_WIDEN);
5635 tramp = expand_simple_binop (Pmode, AND, temp, mask,
5636 temp, 0, OPTAB_LIB_WIDEN);
b2272b13 5637
2a837de2
MS
5638 return tramp;
5639}
c22cacf3 5640
2a837de2
MS
5641static rtx
5642expand_builtin_init_trampoline (tree exp, bool onstack)
5643{
5644 tree t_tramp, t_func, t_chain;
5645 rtx m_tramp, r_tramp, r_chain, tmp;
2be3b5ce 5646
2a837de2
MS
5647 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE,
5648 POINTER_TYPE, VOID_TYPE))
16155777 5649 return NULL_RTX;
2be3b5ce 5650
2a837de2
MS
5651 t_tramp = CALL_EXPR_ARG (exp, 0);
5652 t_func = CALL_EXPR_ARG (exp, 1);
5653 t_chain = CALL_EXPR_ARG (exp, 2);
28f4ec01 5654
2a837de2
MS
5655 r_tramp = expand_normal (t_tramp);
5656 m_tramp = gen_rtx_MEM (BLKmode, r_tramp);
5657 MEM_NOTRAP_P (m_tramp) = 1;
a666df60 5658
2a837de2
MS
5659 /* If ONSTACK, the TRAMP argument should be the address of a field
5660 within the local function's FRAME decl. Either way, let's see if
5661 we can fill in the MEM_ATTRs for this memory. */
5662 if (TREE_CODE (t_tramp) == ADDR_EXPR)
5663 set_mem_attributes (m_tramp, TREE_OPERAND (t_tramp, 0), true);
40c1d5f8 5664
2a837de2
MS
5665 /* Creator of a heap trampoline is responsible for making sure the
5666 address is aligned to at least STACK_BOUNDARY. Normally malloc
5667 will ensure this anyhow. */
5668 tmp = round_trampoline_addr (r_tramp);
5669 if (tmp != r_tramp)
16155777 5670 {
2a837de2
MS
5671 m_tramp = change_address (m_tramp, BLKmode, tmp);
5672 set_mem_align (m_tramp, TRAMPOLINE_ALIGNMENT);
5673 set_mem_size (m_tramp, TRAMPOLINE_SIZE);
5674 }
16155777 5675
2a837de2
MS
5676 /* The FUNC argument should be the address of the nested function.
5677 Extract the actual function decl to pass to the hook. */
5678 gcc_assert (TREE_CODE (t_func) == ADDR_EXPR);
5679 t_func = TREE_OPERAND (t_func, 0);
5680 gcc_assert (TREE_CODE (t_func) == FUNCTION_DECL);
16155777 5681
2a837de2 5682 r_chain = expand_normal (t_chain);
c43fa1f5 5683
2a837de2
MS
5684 /* Generate insns to initialize the trampoline. */
5685 targetm.calls.trampoline_init (m_tramp, t_func, r_chain);
16155777 5686
2a837de2 5687 if (onstack)
16155777 5688 {
2a837de2 5689 trampolines_created = 1;
36537a1c 5690
2a837de2
MS
5691 if (targetm.calls.custom_function_descriptors != 0)
5692 warning_at (DECL_SOURCE_LOCATION (t_func), OPT_Wtrampolines,
5693 "trampoline generated for nested function %qD", t_func);
40c1d5f8 5694 }
16155777 5695
2a837de2 5696 return const0_rtx;
2dee4af1 5697}
28f4ec01 5698
da9e9f08 5699static rtx
2a837de2 5700expand_builtin_adjust_trampoline (tree exp)
da9e9f08 5701{
2a837de2 5702 rtx tramp;
b5338fb3 5703
2a837de2 5704 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
b5338fb3
MS
5705 return NULL_RTX;
5706
2a837de2
MS
5707 tramp = expand_normal (CALL_EXPR_ARG (exp, 0));
5708 tramp = round_trampoline_addr (tramp);
5709 if (targetm.calls.trampoline_adjust_address)
5710 tramp = targetm.calls.trampoline_adjust_address (tramp);
d14c547a 5711
2a837de2
MS
5712 return tramp;
5713}
d14c547a 5714
2a837de2
MS
5715/* Expand a call to the builtin descriptor initialization routine.
5716 A descriptor is made up of a couple of pointers to the static
5717 chain and the code entry in this order. */
d14c547a 5718
2a837de2
MS
5719static rtx
5720expand_builtin_init_descriptor (tree exp)
5721{
5722 tree t_descr, t_func, t_chain;
5723 rtx m_descr, r_descr, r_func, r_chain;
d14c547a 5724
2a837de2
MS
5725 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, POINTER_TYPE,
5726 VOID_TYPE))
5727 return NULL_RTX;
d14c547a 5728
2a837de2
MS
5729 t_descr = CALL_EXPR_ARG (exp, 0);
5730 t_func = CALL_EXPR_ARG (exp, 1);
5731 t_chain = CALL_EXPR_ARG (exp, 2);
b2272b13 5732
2a837de2
MS
5733 r_descr = expand_normal (t_descr);
5734 m_descr = gen_rtx_MEM (BLKmode, r_descr);
5735 MEM_NOTRAP_P (m_descr) = 1;
5736 set_mem_align (m_descr, GET_MODE_ALIGNMENT (ptr_mode));
5197bd50 5737
2a837de2
MS
5738 r_func = expand_normal (t_func);
5739 r_chain = expand_normal (t_chain);
16155777 5740
2a837de2
MS
5741 /* Generate insns to initialize the descriptor. */
5742 emit_move_insn (adjust_address_nv (m_descr, ptr_mode, 0), r_chain);
5743 emit_move_insn (adjust_address_nv (m_descr, ptr_mode,
5744 POINTER_SIZE / BITS_PER_UNIT), r_func);
16155777 5745
2a837de2
MS
5746 return const0_rtx;
5747}
16155777 5748
2a837de2 5749/* Expand a call to the builtin descriptor adjustment routine. */
16155777 5750
2a837de2
MS
5751static rtx
5752expand_builtin_adjust_descriptor (tree exp)
5753{
5754 rtx tramp;
16155777 5755
2a837de2
MS
5756 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
5757 return NULL_RTX;
16155777 5758
2a837de2 5759 tramp = expand_normal (CALL_EXPR_ARG (exp, 0));
16155777 5760
2a837de2
MS
5761 /* Unalign the descriptor to allow runtime identification. */
5762 tramp = plus_constant (ptr_mode, tramp,
5763 targetm.calls.custom_function_descriptors);
16155777 5764
2a837de2 5765 return force_operand (tramp, NULL_RTX);
d118937d
KG
5766}
5767
2a837de2
MS
5768/* Expand the call EXP to the built-in signbit, signbitf or signbitl
5769 function. The function first checks whether the back end provides
5770 an insn to implement signbit for the respective mode. If not, it
5771 checks whether the floating point format of the value is such that
5772 the sign bit can be extracted. If that is not the case, error out.
5773 EXP is the expression that is a call to the builtin function; if
5774 convenient, the result should be placed in TARGET. */
5775static rtx
5776expand_builtin_signbit (tree exp, rtx target)
28f4ec01 5777{
2a837de2
MS
5778 const struct real_format *fmt;
5779 scalar_float_mode fmode;
5780 scalar_int_mode rmode, imode;
5781 tree arg;
5782 int word, bitpos;
5783 enum insn_code icode;
5784 rtx temp;
5785 location_t loc = EXPR_LOCATION (exp);
28f4ec01 5786
2a837de2
MS
5787 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
5788 return NULL_RTX;
28f4ec01 5789
2a837de2
MS
5790 arg = CALL_EXPR_ARG (exp, 0);
5791 fmode = SCALAR_FLOAT_TYPE_MODE (TREE_TYPE (arg));
5792 rmode = SCALAR_INT_TYPE_MODE (TREE_TYPE (exp));
5793 fmt = REAL_MODE_FORMAT (fmode);
d3707adb 5794
2a837de2 5795 arg = builtin_save_expr (arg);
28f4ec01 5796
2a837de2
MS
5797 /* Expand the argument yielding a RTX expression. */
5798 temp = expand_normal (arg);
28f4ec01 5799
2a837de2
MS
5800 /* Check if the back end provides an insn that handles signbit for the
5801 argument's mode. */
5802 icode = optab_handler (signbit_optab, fmode);
5803 if (icode != CODE_FOR_nothing)
5804 {
5805 rtx_insn *last = get_last_insn ();
0982edd3
JJ
5806 rtx this_target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
5807 if (maybe_emit_unop_insn (icode, this_target, temp, UNKNOWN))
5808 return this_target;
2a837de2
MS
5809 delete_insns_since (last);
5810 }
28f4ec01 5811
2a837de2
MS
5812 /* For floating point formats without a sign bit, implement signbit
5813 as "ARG < 0.0". */
5814 bitpos = fmt->signbit_ro;
5815 if (bitpos < 0)
5816 {
5817 /* But we can't do this if the format supports signed zero. */
5818 gcc_assert (!fmt->has_signed_zero || !HONOR_SIGNED_ZEROS (fmode));
28f4ec01 5819
2a837de2
MS
5820 arg = fold_build2_loc (loc, LT_EXPR, TREE_TYPE (exp), arg,
5821 build_real (TREE_TYPE (arg), dconst0));
5822 return expand_expr (arg, target, VOIDmode, EXPAND_NORMAL);
5823 }
28f4ec01 5824
2a837de2
MS
5825 if (GET_MODE_SIZE (fmode) <= UNITS_PER_WORD)
5826 {
5827 imode = int_mode_for_mode (fmode).require ();
5828 temp = gen_lowpart (imode, temp);
5829 }
5830 else
5831 {
5832 imode = word_mode;
5833 /* Handle targets with different FP word orders. */
5834 if (FLOAT_WORDS_BIG_ENDIAN)
5835 word = (GET_MODE_BITSIZE (fmode) - bitpos) / BITS_PER_WORD;
5836 else
5837 word = bitpos / BITS_PER_WORD;
5838 temp = operand_subword_force (temp, word, fmode);
5839 bitpos = bitpos % BITS_PER_WORD;
5840 }
d3707adb 5841
2a837de2
MS
5842 /* Force the intermediate word_mode (or narrower) result into a
5843 register. This avoids attempting to create paradoxical SUBREGs
5844 of floating point modes below. */
5845 temp = force_reg (imode, temp);
35cbb299 5846
2a837de2
MS
5847 /* If the bitpos is within the "result mode" lowpart, the operation
5848 can be implement with a single bitwise AND. Otherwise, we need
5849 a right shift and an AND. */
35cbb299 5850
2a837de2 5851 if (bitpos < GET_MODE_BITSIZE (rmode))
d3707adb 5852 {
2a837de2 5853 wide_int mask = wi::set_bit_in_zero (bitpos, GET_MODE_PRECISION (rmode));
8ebecc3b 5854
2a837de2
MS
5855 if (GET_MODE_SIZE (imode) > GET_MODE_SIZE (rmode))
5856 temp = gen_lowpart (rmode, temp);
5857 temp = expand_binop (rmode, and_optab, temp,
5858 immed_wide_int_const (mask, rmode),
5859 NULL_RTX, 1, OPTAB_LIB_WIDEN);
d3707adb 5860 }
8ebecc3b 5861 else
d3707adb 5862 {
2a837de2
MS
5863 /* Perform a logical right shift to place the signbit in the least
5864 significant bit, then truncate the result to the desired mode
5865 and mask just this bit. */
5866 temp = expand_shift (RSHIFT_EXPR, imode, temp, bitpos, NULL_RTX, 1);
5867 temp = gen_lowpart (rmode, temp);
5868 temp = expand_binop (rmode, and_optab, temp, const1_rtx,
5869 NULL_RTX, 1, OPTAB_LIB_WIDEN);
d3707adb
RH
5870 }
5871
2a837de2 5872 return temp;
d3707adb
RH
5873}
5874
2a837de2
MS
5875/* Expand fork or exec calls. TARGET is the desired target of the
5876 call. EXP is the call. FN is the
5877 identificator of the actual function. IGNORE is nonzero if the
5878 value is to be ignored. */
c35d187f 5879
2a837de2
MS
5880static rtx
5881expand_builtin_fork_or_exec (tree fn, tree exp, rtx target, int ignore)
c35d187f 5882{
2a837de2
MS
5883 tree id, decl;
5884 tree call;
c35d187f 5885
2a837de2
MS
5886 /* If we are not profiling, just call the function. */
5887 if (!profile_arc_flag)
5888 return NULL_RTX;
35cbb299 5889
2a837de2
MS
5890 /* Otherwise call the wrapper. This should be equivalent for the rest of
5891 compiler, so the code does not diverge, and the wrapper may run the
5892 code necessary for keeping the profiling sane. */
431e31a9 5893
2a837de2 5894 switch (DECL_FUNCTION_CODE (fn))
35cbb299 5895 {
2a837de2
MS
5896 case BUILT_IN_FORK:
5897 id = get_identifier ("__gcov_fork");
5898 break;
35cbb299 5899
2a837de2
MS
5900 case BUILT_IN_EXECL:
5901 id = get_identifier ("__gcov_execl");
5902 break;
35cbb299 5903
2a837de2
MS
5904 case BUILT_IN_EXECV:
5905 id = get_identifier ("__gcov_execv");
5906 break;
5197bd50 5907
2a837de2
MS
5908 case BUILT_IN_EXECLP:
5909 id = get_identifier ("__gcov_execlp");
5910 break;
d3707adb 5911
2a837de2
MS
5912 case BUILT_IN_EXECLE:
5913 id = get_identifier ("__gcov_execle");
5914 break;
5197bd50 5915
2a837de2
MS
5916 case BUILT_IN_EXECVP:
5917 id = get_identifier ("__gcov_execvp");
5918 break;
d3707adb 5919
2a837de2
MS
5920 case BUILT_IN_EXECVE:
5921 id = get_identifier ("__gcov_execve");
5922 break;
5923
5924 default:
5925 gcc_unreachable ();
c69c9b36 5926 }
d3707adb 5927
2a837de2
MS
5928 decl = build_decl (DECL_SOURCE_LOCATION (fn),
5929 FUNCTION_DECL, id, TREE_TYPE (fn));
5930 DECL_EXTERNAL (decl) = 1;
5931 TREE_PUBLIC (decl) = 1;
5932 DECL_ARTIFICIAL (decl) = 1;
5933 TREE_NOTHROW (decl) = 1;
5934 DECL_VISIBILITY (decl) = VISIBILITY_DEFAULT;
5935 DECL_VISIBILITY_SPECIFIED (decl) = 1;
5936 call = rewrite_call_expr (EXPR_LOCATION (exp), exp, 0, decl, 0);
5937 return expand_call (call, target, ignore);
5938 }
d3147f64 5939
d3707adb 5940
2a837de2
MS
5941\f
5942/* Reconstitute a mode for a __sync intrinsic operation. Since the type of
5943 the pointer in these functions is void*, the tree optimizers may remove
5944 casts. The mode computed in expand_builtin isn't reliable either, due
5945 to __sync_bool_compare_and_swap.
d3707adb 5946
2a837de2
MS
5947 FCODE_DIFF should be fcode - base, where base is the FOO_1 code for the
5948 group of builtins. This gives us log2 of the mode size. */
5949
5950static inline machine_mode
5951get_builtin_sync_mode (int fcode_diff)
5952{
5953 /* The size is not negotiable, so ask not to get BLKmode in return
5954 if the target indicates that a smaller size would be better. */
5955 return int_mode_for_size (BITS_PER_UNIT << fcode_diff, 0).require ();
d3707adb
RH
5956}
5957
2a837de2
MS
5958/* Expand the memory expression LOC and return the appropriate memory operand
5959 for the builtin_sync operations. */
3bdf5ad1 5960
d3707adb 5961static rtx
2a837de2 5962get_builtin_sync_mem (tree loc, machine_mode mode)
d3707adb 5963{
2a837de2
MS
5964 rtx addr, mem;
5965 int addr_space = TYPE_ADDR_SPACE (POINTER_TYPE_P (TREE_TYPE (loc))
5966 ? TREE_TYPE (TREE_TYPE (loc))
5967 : TREE_TYPE (loc));
5968 scalar_int_mode addr_mode = targetm.addr_space.address_mode (addr_space);
daf68dd7 5969
2a837de2
MS
5970 addr = expand_expr (loc, NULL_RTX, addr_mode, EXPAND_SUM);
5971 addr = convert_memory_address (addr_mode, addr);
d3707adb 5972
2a837de2
MS
5973 /* Note that we explicitly do not want any alias information for this
5974 memory, so that we kill all other live memories. Otherwise we don't
5975 satisfy the full barrier semantics of the intrinsic. */
5976 mem = gen_rtx_MEM (mode, addr);
d3707adb 5977
2a837de2 5978 set_mem_addr_space (mem, addr_space);
3bdf5ad1 5979
2a837de2 5980 mem = validize_mem (mem);
d3707adb 5981
2a837de2
MS
5982 /* The alignment needs to be at least according to that of the mode. */
5983 set_mem_align (mem, MAX (GET_MODE_ALIGNMENT (mode),
5984 get_pointer_alignment (loc)));
5985 set_mem_alias_set (mem, ALIAS_SET_MEMORY_BARRIER);
5986 MEM_VOLATILE_P (mem) = 1;
d3707adb 5987
2a837de2
MS
5988 return mem;
5989}
d3707adb 5990
2a837de2
MS
5991/* Make sure an argument is in the right mode.
5992 EXP is the tree argument.
5993 MODE is the mode it should be in. */
35cbb299 5994
2a837de2
MS
5995static rtx
5996expand_expr_force_mode (tree exp, machine_mode mode)
5997{
5998 rtx val;
5999 machine_mode old_mode;
6000
6001 if (TREE_CODE (exp) == SSA_NAME
6002 && TYPE_MODE (TREE_TYPE (exp)) != mode)
d3707adb 6003 {
2a837de2
MS
6004 /* Undo argument promotion if possible, as combine might not
6005 be able to do it later due to MEM_VOLATILE_P uses in the
6006 patterns. */
6007 gimple *g = get_gimple_for_ssa_name (exp);
6008 if (g && gimple_assign_cast_p (g))
6009 {
6010 tree rhs = gimple_assign_rhs1 (g);
6011 tree_code code = gimple_assign_rhs_code (g);
6012 if (CONVERT_EXPR_CODE_P (code)
6013 && TYPE_MODE (TREE_TYPE (rhs)) == mode
6014 && INTEGRAL_TYPE_P (TREE_TYPE (exp))
6015 && INTEGRAL_TYPE_P (TREE_TYPE (rhs))
6016 && (TYPE_PRECISION (TREE_TYPE (exp))
6017 > TYPE_PRECISION (TREE_TYPE (rhs))))
6018 exp = rhs;
6019 }
d3707adb 6020 }
8ebecc3b 6021
2a837de2
MS
6022 val = expand_expr (exp, NULL_RTX, mode, EXPAND_NORMAL);
6023 /* If VAL is promoted to a wider mode, convert it back to MODE. Take care
6024 of CONST_INTs, where we know the old_mode only from the call argument. */
d3707adb 6025
2a837de2
MS
6026 old_mode = GET_MODE (val);
6027 if (old_mode == VOIDmode)
6028 old_mode = TYPE_MODE (TREE_TYPE (exp));
6029 val = convert_modes (mode, old_mode, val, 1);
6030 return val;
d3707adb
RH
6031}
6032
2a837de2
MS
6033
6034/* Expand the __sync_xxx_and_fetch and __sync_fetch_and_xxx intrinsics.
6035 EXP is the CALL_EXPR. CODE is the rtx code
6036 that corresponds to the arithmetic or logical operation from the name;
6037 an exception here is that NOT actually means NAND. TARGET is an optional
6038 place for us to store the results; AFTER is true if this is the
6039 fetch_and_xxx form. */
5197bd50 6040
28f4ec01 6041static rtx
2a837de2
MS
6042expand_builtin_sync_operation (machine_mode mode, tree exp,
6043 enum rtx_code code, bool after,
6044 rtx target)
28f4ec01 6045{
2a837de2
MS
6046 rtx val, mem;
6047 location_t loc = EXPR_LOCATION (exp);
6048
6049 if (code == NOT && warn_sync_nand)
28f4ec01 6050 {
2a837de2
MS
6051 tree fndecl = get_callee_fndecl (exp);
6052 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
8423e57c 6053
2a837de2 6054 static bool warned_f_a_n, warned_n_a_f;
28f4ec01 6055
2a837de2 6056 switch (fcode)
28f4ec01 6057 {
2a837de2
MS
6058 case BUILT_IN_SYNC_FETCH_AND_NAND_1:
6059 case BUILT_IN_SYNC_FETCH_AND_NAND_2:
6060 case BUILT_IN_SYNC_FETCH_AND_NAND_4:
6061 case BUILT_IN_SYNC_FETCH_AND_NAND_8:
6062 case BUILT_IN_SYNC_FETCH_AND_NAND_16:
6063 if (warned_f_a_n)
6064 break;
28f4ec01 6065
2a837de2
MS
6066 fndecl = builtin_decl_implicit (BUILT_IN_SYNC_FETCH_AND_NAND_N);
6067 inform (loc, "%qD changed semantics in GCC 4.4", fndecl);
6068 warned_f_a_n = true;
6069 break;
8423e57c 6070
2a837de2
MS
6071 case BUILT_IN_SYNC_NAND_AND_FETCH_1:
6072 case BUILT_IN_SYNC_NAND_AND_FETCH_2:
6073 case BUILT_IN_SYNC_NAND_AND_FETCH_4:
6074 case BUILT_IN_SYNC_NAND_AND_FETCH_8:
6075 case BUILT_IN_SYNC_NAND_AND_FETCH_16:
6076 if (warned_n_a_f)
6077 break;
28f4ec01 6078
2a837de2
MS
6079 fndecl = builtin_decl_implicit (BUILT_IN_SYNC_NAND_AND_FETCH_N);
6080 inform (loc, "%qD changed semantics in GCC 4.4", fndecl);
6081 warned_n_a_f = true;
6082 break;
6083
6084 default:
6085 gcc_unreachable ();
6086 }
28f4ec01 6087 }
2a837de2
MS
6088
6089 /* Expand the operands. */
6090 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
6091 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
6092
6093 return expand_atomic_fetch_op (target, mem, val, code, MEMMODEL_SYNC_SEQ_CST,
6094 after);
28f4ec01
BS
6095}
6096
2a837de2
MS
6097/* Expand the __sync_val_compare_and_swap and __sync_bool_compare_and_swap
6098 intrinsics. EXP is the CALL_EXPR. IS_BOOL is
6099 true if this is the boolean form. TARGET is a place for us to store the
6100 results; this is NOT optional if IS_BOOL is true. */
d5457140 6101
28f4ec01 6102static rtx
2a837de2
MS
6103expand_builtin_compare_and_swap (machine_mode mode, tree exp,
6104 bool is_bool, rtx target)
28f4ec01 6105{
2a837de2
MS
6106 rtx old_val, new_val, mem;
6107 rtx *pbool, *poval;
13e49da9 6108
2a837de2
MS
6109 /* Expand the operands. */
6110 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
6111 old_val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
6112 new_val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 2), mode);
28f4ec01 6113
2a837de2
MS
6114 pbool = poval = NULL;
6115 if (target != const0_rtx)
8bd9f164 6116 {
2a837de2
MS
6117 if (is_bool)
6118 pbool = &target;
6119 else
6120 poval = &target;
8bd9f164 6121 }
2a837de2
MS
6122 if (!expand_atomic_compare_and_swap (pbool, poval, mem, old_val, new_val,
6123 false, MEMMODEL_SYNC_SEQ_CST,
6124 MEMMODEL_SYNC_SEQ_CST))
6125 return NULL_RTX;
8bd9f164 6126
2a837de2
MS
6127 return target;
6128}
28f4ec01 6129
2a837de2
MS
6130/* Expand the __sync_lock_test_and_set intrinsic. Note that the most
6131 general form is actually an atomic exchange, and some targets only
6132 support a reduced form with the second argument being a constant 1.
6133 EXP is the CALL_EXPR; TARGET is an optional place for us to store
6134 the results. */
9e878cf1 6135
2a837de2
MS
6136static rtx
6137expand_builtin_sync_lock_test_and_set (machine_mode mode, tree exp,
6138 rtx target)
6139{
6140 rtx val, mem;
d5457140 6141
2a837de2
MS
6142 /* Expand the operands. */
6143 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
6144 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
3cf3da88 6145
2a837de2 6146 return expand_sync_lock_test_and_set (target, mem, val);
28f4ec01
BS
6147}
6148
2a837de2 6149/* Expand the __sync_lock_release intrinsic. EXP is the CALL_EXPR. */
e3174bdf 6150
2a837de2
MS
6151static void
6152expand_builtin_sync_lock_release (machine_mode mode, tree exp)
e3174bdf 6153{
2a837de2
MS
6154 rtx mem;
6155
6156 /* Expand the operands. */
6157 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
6158
6159 expand_atomic_store (mem, const0_rtx, MEMMODEL_SYNC_RELEASE, true);
e3174bdf
MO
6160}
6161
2a837de2
MS
6162/* Given an integer representing an ``enum memmodel'', verify its
6163 correctness and return the memory model enum. */
167fa32c 6164
2a837de2
MS
6165static enum memmodel
6166get_memmodel (tree exp)
167fa32c 6167{
2a837de2
MS
6168 /* If the parameter is not a constant, it's a run time value so we'll just
6169 convert it to MEMMODEL_SEQ_CST to avoid annoying runtime checking. */
6170 if (TREE_CODE (exp) != INTEGER_CST)
6171 return MEMMODEL_SEQ_CST;
167fa32c 6172
5a431b60 6173 rtx op = expand_normal (exp);
167fa32c 6174
5a431b60 6175 unsigned HOST_WIDE_INT val = INTVAL (op);
2a837de2
MS
6176 if (targetm.memmodel_check)
6177 val = targetm.memmodel_check (val);
6178 else if (val & ~MEMMODEL_MASK)
5a431b60 6179 return MEMMODEL_SEQ_CST;
167fa32c 6180
2a837de2
MS
6181 /* Should never see a user explicit SYNC memodel model, so >= LAST works. */
6182 if (memmodel_base (val) >= MEMMODEL_LAST)
5a431b60 6183 return MEMMODEL_SEQ_CST;
167fa32c 6184
2a837de2
MS
6185 /* Workaround for Bugzilla 59448. GCC doesn't track consume properly, so
6186 be conservative and promote consume to acquire. */
6187 if (val == MEMMODEL_CONSUME)
6188 val = MEMMODEL_ACQUIRE;
6189
6190 return (enum memmodel) val;
167fa32c
EC
6191}
6192
2a837de2
MS
6193/* Expand the __atomic_exchange intrinsic:
6194 TYPE __atomic_exchange (TYPE *object, TYPE desired, enum memmodel)
6195 EXP is the CALL_EXPR.
6196 TARGET is an optional place for us to store the results. */
d5457140 6197
28f4ec01 6198static rtx
2a837de2 6199expand_builtin_atomic_exchange (machine_mode mode, tree exp, rtx target)
28f4ec01 6200{
2a837de2
MS
6201 rtx val, mem;
6202 enum memmodel model;
5039610b 6203
2a837de2
MS
6204 model = get_memmodel (CALL_EXPR_ARG (exp, 2));
6205
6206 if (!flag_inline_atomics)
5039610b 6207 return NULL_RTX;
28f4ec01 6208
2a837de2
MS
6209 /* Expand the operands. */
6210 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
6211 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5906d013 6212
2a837de2 6213 return expand_atomic_exchange (target, mem, val, model);
28f4ec01 6214}
994a57cd 6215
2a837de2
MS
6216/* Expand the __atomic_compare_exchange intrinsic:
6217 bool __atomic_compare_exchange (TYPE *object, TYPE *expect,
6218 TYPE desired, BOOL weak,
6219 enum memmodel success,
6220 enum memmodel failure)
6221 EXP is the CALL_EXPR.
6222 TARGET is an optional place for us to store the results. */
994a57cd
RH
6223
6224static rtx
2a837de2
MS
6225expand_builtin_atomic_compare_exchange (machine_mode mode, tree exp,
6226 rtx target)
994a57cd 6227{
2a837de2
MS
6228 rtx expect, desired, mem, oldval;
6229 rtx_code_label *label;
2a837de2
MS
6230 tree weak;
6231 bool is_weak;
994a57cd 6232
5a431b60
MS
6233 memmodel success = get_memmodel (CALL_EXPR_ARG (exp, 4));
6234 memmodel failure = get_memmodel (CALL_EXPR_ARG (exp, 5));
994a57cd 6235
2a837de2 6236 if (failure > success)
5a431b60 6237 success = MEMMODEL_SEQ_CST;
2a837de2
MS
6238
6239 if (is_mm_release (failure) || is_mm_acq_rel (failure))
6240 {
2a837de2
MS
6241 failure = MEMMODEL_SEQ_CST;
6242 success = MEMMODEL_SEQ_CST;
6243 }
5f2d6cfa 6244
2a837de2
MS
6245
6246 if (!flag_inline_atomics)
6247 return NULL_RTX;
1e9168b2 6248
2a837de2
MS
6249 /* Expand the operands. */
6250 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
1e9168b2 6251
2a837de2
MS
6252 expect = expand_normal (CALL_EXPR_ARG (exp, 1));
6253 expect = convert_memory_address (Pmode, expect);
6254 expect = gen_rtx_MEM (mode, expect);
6255 desired = expand_expr_force_mode (CALL_EXPR_ARG (exp, 2), mode);
1e9168b2 6256
2a837de2
MS
6257 weak = CALL_EXPR_ARG (exp, 3);
6258 is_weak = false;
6259 if (tree_fits_shwi_p (weak) && tree_to_shwi (weak) != 0)
6260 is_weak = true;
1e9168b2 6261
2a837de2
MS
6262 if (target == const0_rtx)
6263 target = NULL;
1e9168b2 6264
2a837de2
MS
6265 /* Lest the rtl backend create a race condition with an imporoper store
6266 to memory, always create a new pseudo for OLDVAL. */
6267 oldval = NULL;
6268
6269 if (!expand_atomic_compare_and_swap (&target, &oldval, mem, expect, desired,
6270 is_weak, success, failure))
6271 return NULL_RTX;
6272
6273 /* Conditionally store back to EXPECT, lest we create a race condition
6274 with an improper store to memory. */
6275 /* ??? With a rearrangement of atomics at the gimple level, we can handle
6276 the normal case where EXPECT is totally private, i.e. a register. At
6277 which point the store can be unconditional. */
6278 label = gen_label_rtx ();
6279 emit_cmp_and_jump_insns (target, const0_rtx, NE, NULL,
6280 GET_MODE (target), 1, label);
6281 emit_move_insn (expect, oldval);
6282 emit_label (label);
45d439ac 6283
45d439ac
JJ
6284 return target;
6285}
6286
2a837de2
MS
6287/* Helper function for expand_ifn_atomic_compare_exchange - expand
6288 internal ATOMIC_COMPARE_EXCHANGE call into __atomic_compare_exchange_N
6289 call. The weak parameter must be dropped to match the expected parameter
6290 list and the expected argument changed from value to pointer to memory
6291 slot. */
468059bc 6292
468059bc 6293static void
2a837de2 6294expand_ifn_atomic_compare_exchange_into_call (gcall *call, machine_mode mode)
046625fa 6295{
2a837de2
MS
6296 unsigned int z;
6297 vec<tree, va_gc> *vec;
046625fa 6298
2a837de2
MS
6299 vec_alloc (vec, 5);
6300 vec->quick_push (gimple_call_arg (call, 0));
6301 tree expected = gimple_call_arg (call, 1);
6302 rtx x = assign_stack_temp_for_type (mode, GET_MODE_SIZE (mode),
6303 TREE_TYPE (expected));
6304 rtx expd = expand_expr (expected, x, mode, EXPAND_NORMAL);
6305 if (expd != x)
6306 emit_move_insn (x, expd);
6307 tree v = make_tree (TREE_TYPE (expected), x);
6308 vec->quick_push (build1 (ADDR_EXPR,
6309 build_pointer_type (TREE_TYPE (expected)), v));
6310 vec->quick_push (gimple_call_arg (call, 2));
6311 /* Skip the boolean weak parameter. */
6312 for (z = 4; z < 6; z++)
6313 vec->quick_push (gimple_call_arg (call, z));
6314 /* At present we only have BUILT_IN_ATOMIC_COMPARE_EXCHANGE_{1,2,4,8,16}. */
6315 unsigned int bytes_log2 = exact_log2 (GET_MODE_SIZE (mode).to_constant ());
6316 gcc_assert (bytes_log2 < 5);
6317 built_in_function fncode
6318 = (built_in_function) ((int) BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1
6319 + bytes_log2);
6320 tree fndecl = builtin_decl_explicit (fncode);
6321 tree fn = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fndecl)),
6322 fndecl);
6323 tree exp = build_call_vec (boolean_type_node, fn, vec);
6324 tree lhs = gimple_call_lhs (call);
6325 rtx boolret = expand_call (exp, NULL_RTX, lhs == NULL_TREE);
6326 if (lhs)
6327 {
6328 rtx target = expand_expr (lhs, NULL_RTX, VOIDmode, EXPAND_WRITE);
6329 if (GET_MODE (boolret) != mode)
6330 boolret = convert_modes (mode, GET_MODE (boolret), boolret, 1);
6331 x = force_reg (mode, x);
13f44099
TC
6332 write_complex_part (target, boolret, true, true);
6333 write_complex_part (target, x, false, false);
2a837de2 6334 }
046625fa
RH
6335}
6336
2a837de2 6337/* Expand IFN_ATOMIC_COMPARE_EXCHANGE internal function. */
677feb77 6338
c05ece92 6339void
2a837de2 6340expand_ifn_atomic_compare_exchange (gcall *call)
677feb77 6341{
2a837de2
MS
6342 int size = tree_to_shwi (gimple_call_arg (call, 3)) & 255;
6343 gcc_assert (size == 1 || size == 2 || size == 4 || size == 8 || size == 16);
6344 machine_mode mode = int_mode_for_size (BITS_PER_UNIT * size, 0).require ();
c05ece92 6345
5a431b60
MS
6346 memmodel success = get_memmodel (gimple_call_arg (call, 4));
6347 memmodel failure = get_memmodel (gimple_call_arg (call, 5));
c05ece92 6348
2a837de2 6349 if (failure > success)
5a431b60 6350 success = MEMMODEL_SEQ_CST;
c05ece92 6351
2a837de2 6352 if (is_mm_release (failure) || is_mm_acq_rel (failure))
c05ece92 6353 {
2a837de2
MS
6354 failure = MEMMODEL_SEQ_CST;
6355 success = MEMMODEL_SEQ_CST;
c05ece92 6356 }
2a837de2
MS
6357
6358 if (!flag_inline_atomics)
c05ece92 6359 {
2a837de2 6360 expand_ifn_atomic_compare_exchange_into_call (call, mode);
c05ece92 6361 return;
f2cf13bd
RS
6362 }
6363
2a837de2 6364 /* Expand the operands. */
5a431b60 6365 rtx mem = get_builtin_sync_mem (gimple_call_arg (call, 0), mode);
c05ece92 6366
5a431b60
MS
6367 rtx expect = expand_expr_force_mode (gimple_call_arg (call, 1), mode);
6368 rtx desired = expand_expr_force_mode (gimple_call_arg (call, 2), mode);
c05ece92 6369
5a431b60 6370 bool is_weak = (tree_to_shwi (gimple_call_arg (call, 3)) & 256) != 0;
677feb77 6371
5a431b60
MS
6372 rtx boolret = NULL;
6373 rtx oldval = NULL;
2a837de2
MS
6374
6375 if (!expand_atomic_compare_and_swap (&boolret, &oldval, mem, expect, desired,
6376 is_weak, success, failure))
677feb77 6377 {
2a837de2 6378 expand_ifn_atomic_compare_exchange_into_call (call, mode);
c05ece92 6379 return;
677feb77
DD
6380 }
6381
5a431b60 6382 tree lhs = gimple_call_lhs (call);
2a837de2
MS
6383 if (lhs)
6384 {
6385 rtx target = expand_expr (lhs, NULL_RTX, VOIDmode, EXPAND_WRITE);
6386 if (GET_MODE (boolret) != mode)
6387 boolret = convert_modes (mode, GET_MODE (boolret), boolret, 1);
13f44099
TC
6388 write_complex_part (target, boolret, true, true);
6389 write_complex_part (target, oldval, false, false);
2a837de2 6390 }
677feb77
DD
6391}
6392
2a837de2
MS
6393/* Expand the __atomic_load intrinsic:
6394 TYPE __atomic_load (TYPE *object, enum memmodel)
6395 EXP is the CALL_EXPR.
6396 TARGET is an optional place for us to store the results. */
6de9cd9a
DN
6397
6398static rtx
2a837de2 6399expand_builtin_atomic_load (machine_mode mode, tree exp, rtx target)
6de9cd9a 6400{
5a431b60 6401 memmodel model = get_memmodel (CALL_EXPR_ARG (exp, 1));
2a837de2 6402 if (is_mm_release (model) || is_mm_acq_rel (model))
5a431b60 6403 model = MEMMODEL_SEQ_CST;
6de9cd9a 6404
2a837de2
MS
6405 if (!flag_inline_atomics)
6406 return NULL_RTX;
6de9cd9a 6407
2a837de2 6408 /* Expand the operand. */
5a431b60 6409 rtx mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
6de9cd9a 6410
2a837de2 6411 return expand_atomic_load (target, mem, model);
6de9cd9a
DN
6412}
6413
6de9cd9a 6414
2a837de2
MS
6415/* Expand the __atomic_store intrinsic:
6416 void __atomic_store (TYPE *object, TYPE desired, enum memmodel)
6417 EXP is the CALL_EXPR.
6418 TARGET is an optional place for us to store the results. */
6de9cd9a 6419
2a837de2
MS
6420static rtx
6421expand_builtin_atomic_store (machine_mode mode, tree exp)
6422{
5a431b60 6423 memmodel model = get_memmodel (CALL_EXPR_ARG (exp, 2));
2a837de2
MS
6424 if (!(is_mm_relaxed (model) || is_mm_seq_cst (model)
6425 || is_mm_release (model)))
5a431b60 6426 model = MEMMODEL_SEQ_CST;
8ffadef9 6427
2a837de2 6428 if (!flag_inline_atomics)
6de9cd9a
DN
6429 return NULL_RTX;
6430
2a837de2 6431 /* Expand the operands. */
5a431b60
MS
6432 rtx mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
6433 rtx val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
6de9cd9a 6434
2a837de2 6435 return expand_atomic_store (mem, val, model, false);
6de9cd9a
DN
6436}
6437
2a837de2
MS
6438/* Expand the __atomic_fetch_XXX intrinsic:
6439 TYPE __atomic_fetch_XXX (TYPE *object, TYPE val, enum memmodel)
6440 EXP is the CALL_EXPR.
6441 TARGET is an optional place for us to store the results.
6442 CODE is the operation, PLUS, MINUS, ADD, XOR, or IOR.
6443 FETCH_AFTER is true if returning the result of the operation.
6444 FETCH_AFTER is false if returning the value before the operation.
6445 IGNORE is true if the result is not used.
6446 EXT_CALL is the correct builtin for an external call if this cannot be
6447 resolved to an instruction sequence. */
4c640e26
EB
6448
6449static rtx
2a837de2
MS
6450expand_builtin_atomic_fetch_op (machine_mode mode, tree exp, rtx target,
6451 enum rtx_code code, bool fetch_after,
6452 bool ignore, enum built_in_function ext_call)
4c640e26 6453{
2a837de2
MS
6454 rtx val, mem, ret;
6455 enum memmodel model;
6456 tree fndecl;
6457 tree addr;
4c640e26 6458
2a837de2 6459 model = get_memmodel (CALL_EXPR_ARG (exp, 2));
4c640e26 6460
2a837de2
MS
6461 /* Expand the operands. */
6462 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
6463 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
4c640e26 6464
2a837de2
MS
6465 /* Only try generating instructions if inlining is turned on. */
6466 if (flag_inline_atomics)
6467 {
6468 ret = expand_atomic_fetch_op (target, mem, val, code, model, fetch_after);
6469 if (ret)
6470 return ret;
6471 }
4c640e26 6472
2a837de2
MS
6473 /* Return if a different routine isn't needed for the library call. */
6474 if (ext_call == BUILT_IN_NONE)
6475 return NULL_RTX;
4c640e26 6476
2a837de2
MS
6477 /* Change the call to the specified function. */
6478 fndecl = get_callee_fndecl (exp);
6479 addr = CALL_EXPR_FN (exp);
6480 STRIP_NOPS (addr);
4c640e26 6481
2a837de2
MS
6482 gcc_assert (TREE_OPERAND (addr, 0) == fndecl);
6483 TREE_OPERAND (addr, 0) = builtin_decl_explicit (ext_call);
4c640e26 6484
2a837de2
MS
6485 /* If we will emit code after the call, the call cannot be a tail call.
6486 If it is emitted as a tail call, a barrier is emitted after it, and
6487 then all trailing code is removed. */
6488 if (!ignore)
6489 CALL_EXPR_TAILCALL (exp) = 0;
4c640e26 6490
2a837de2
MS
6491 /* Expand the call here so we can emit trailing code. */
6492 ret = expand_call (exp, target, ignore);
4c640e26 6493
2a837de2
MS
6494 /* Replace the original function just in case it matters. */
6495 TREE_OPERAND (addr, 0) = fndecl;
4c640e26 6496
2a837de2
MS
6497 /* Then issue the arithmetic correction to return the right result. */
6498 if (!ignore)
6499 {
6500 if (code == NOT)
6501 {
6502 ret = expand_simple_binop (mode, AND, ret, val, NULL_RTX, true,
6503 OPTAB_LIB_WIDEN);
6504 ret = expand_simple_unop (mode, NOT, ret, target, true);
6505 }
6506 else
6507 ret = expand_simple_binop (mode, code, ret, val, target, true,
6508 OPTAB_LIB_WIDEN);
6509 }
6510 return ret;
4c640e26
EB
6511}
6512
2a837de2
MS
6513/* Expand IFN_ATOMIC_BIT_TEST_AND_* internal function. */
6514
6515void
6516expand_ifn_atomic_bit_test_and (gcall *call)
ef79730c 6517{
2a837de2
MS
6518 tree ptr = gimple_call_arg (call, 0);
6519 tree bit = gimple_call_arg (call, 1);
6520 tree flag = gimple_call_arg (call, 2);
6521 tree lhs = gimple_call_lhs (call);
6522 enum memmodel model = MEMMODEL_SYNC_SEQ_CST;
6523 machine_mode mode = TYPE_MODE (TREE_TYPE (flag));
6524 enum rtx_code code;
6525 optab optab;
6526 class expand_operand ops[5];
ef79730c 6527
2a837de2 6528 gcc_assert (flag_inline_atomics);
ef79730c 6529
6a27c430 6530 if (gimple_call_num_args (call) == 5)
2a837de2 6531 model = get_memmodel (gimple_call_arg (call, 3));
ef79730c 6532
2a837de2
MS
6533 rtx mem = get_builtin_sync_mem (ptr, mode);
6534 rtx val = expand_expr_force_mode (bit, mode);
0f67fa83 6535
2a837de2
MS
6536 switch (gimple_call_internal_fn (call))
6537 {
6538 case IFN_ATOMIC_BIT_TEST_AND_SET:
6539 code = IOR;
6540 optab = atomic_bit_test_and_set_optab;
6541 break;
6542 case IFN_ATOMIC_BIT_TEST_AND_COMPLEMENT:
6543 code = XOR;
6544 optab = atomic_bit_test_and_complement_optab;
6545 break;
6546 case IFN_ATOMIC_BIT_TEST_AND_RESET:
6547 code = AND;
6548 optab = atomic_bit_test_and_reset_optab;
6549 break;
6550 default:
6551 gcc_unreachable ();
6552 }
0f67fa83 6553
2a837de2 6554 if (lhs == NULL_TREE)
0f67fa83 6555 {
6a27c430
JJ
6556 rtx val2 = expand_simple_binop (mode, ASHIFT, const1_rtx,
6557 val, NULL_RTX, true, OPTAB_DIRECT);
2a837de2 6558 if (code == AND)
6a27c430
JJ
6559 val2 = expand_simple_unop (mode, NOT, val2, NULL_RTX, true);
6560 if (expand_atomic_fetch_op (const0_rtx, mem, val2, code, model, false))
6561 return;
0f67fa83
WG
6562 }
6563
6a27c430
JJ
6564 rtx target;
6565 if (lhs)
6566 target = expand_expr (lhs, NULL_RTX, VOIDmode, EXPAND_WRITE);
6567 else
6568 target = gen_reg_rtx (mode);
2a837de2
MS
6569 enum insn_code icode = direct_optab_handler (optab, mode);
6570 gcc_assert (icode != CODE_FOR_nothing);
6571 create_output_operand (&ops[0], target, mode);
6572 create_fixed_operand (&ops[1], mem);
6573 create_convert_operand_to (&ops[2], val, mode, true);
6574 create_integer_operand (&ops[3], model);
6575 create_integer_operand (&ops[4], integer_onep (flag));
6576 if (maybe_expand_insn (icode, 5, ops))
6577 return;
ef79730c 6578
2a837de2
MS
6579 rtx bitval = val;
6580 val = expand_simple_binop (mode, ASHIFT, const1_rtx,
6581 val, NULL_RTX, true, OPTAB_DIRECT);
6582 rtx maskval = val;
6583 if (code == AND)
6584 val = expand_simple_unop (mode, NOT, val, NULL_RTX, true);
6585 rtx result = expand_atomic_fetch_op (gen_reg_rtx (mode), mem, val,
6586 code, model, false);
6a27c430
JJ
6587 if (!result)
6588 {
6589 bool is_atomic = gimple_call_num_args (call) == 5;
6590 tree tcall = gimple_call_arg (call, 3 + is_atomic);
6591 tree fndecl = gimple_call_addr_fndecl (tcall);
6592 tree type = TREE_TYPE (TREE_TYPE (fndecl));
6593 tree exp = build_call_nary (type, tcall, 2 + is_atomic, ptr,
6594 make_tree (type, val),
6595 is_atomic
6596 ? gimple_call_arg (call, 3)
6597 : integer_zero_node);
6598 result = expand_builtin (exp, gen_reg_rtx (mode), NULL_RTX,
6599 mode, !lhs);
6600 }
6601 if (!lhs)
6602 return;
2a837de2 6603 if (integer_onep (flag))
ef79730c 6604 {
2a837de2
MS
6605 result = expand_simple_binop (mode, ASHIFTRT, result, bitval,
6606 NULL_RTX, true, OPTAB_DIRECT);
6607 result = expand_simple_binop (mode, AND, result, const1_rtx, target,
6608 true, OPTAB_DIRECT);
254878ea
RS
6609 }
6610 else
2a837de2
MS
6611 result = expand_simple_binop (mode, AND, result, maskval, target, true,
6612 OPTAB_DIRECT);
6613 if (result != target)
6614 emit_move_insn (target, result);
6615}
e4fbead1 6616
6362627b
JJ
6617/* Expand IFN_ATOMIC_*_FETCH_CMP_0 internal function. */
6618
6619void
6620expand_ifn_atomic_op_fetch_cmp_0 (gcall *call)
6621{
6622 tree cmp = gimple_call_arg (call, 0);
6623 tree ptr = gimple_call_arg (call, 1);
6624 tree arg = gimple_call_arg (call, 2);
6625 tree lhs = gimple_call_lhs (call);
6626 enum memmodel model = MEMMODEL_SYNC_SEQ_CST;
6627 machine_mode mode = TYPE_MODE (TREE_TYPE (cmp));
6628 optab optab;
6629 rtx_code code;
6630 class expand_operand ops[5];
6631
6632 gcc_assert (flag_inline_atomics);
6633
6a27c430 6634 if (gimple_call_num_args (call) == 5)
6362627b
JJ
6635 model = get_memmodel (gimple_call_arg (call, 3));
6636
6637 rtx mem = get_builtin_sync_mem (ptr, mode);
6638 rtx op = expand_expr_force_mode (arg, mode);
6639
6640 switch (gimple_call_internal_fn (call))
6641 {
6642 case IFN_ATOMIC_ADD_FETCH_CMP_0:
6643 code = PLUS;
6644 optab = atomic_add_fetch_cmp_0_optab;
6645 break;
6646 case IFN_ATOMIC_SUB_FETCH_CMP_0:
6647 code = MINUS;
6648 optab = atomic_sub_fetch_cmp_0_optab;
6649 break;
6650 case IFN_ATOMIC_AND_FETCH_CMP_0:
6651 code = AND;
6652 optab = atomic_and_fetch_cmp_0_optab;
6653 break;
6654 case IFN_ATOMIC_OR_FETCH_CMP_0:
6655 code = IOR;
6656 optab = atomic_or_fetch_cmp_0_optab;
6657 break;
6658 case IFN_ATOMIC_XOR_FETCH_CMP_0:
6659 code = XOR;
6660 optab = atomic_xor_fetch_cmp_0_optab;
6661 break;
6662 default:
6663 gcc_unreachable ();
6664 }
6665
6666 enum rtx_code comp = UNKNOWN;
6667 switch (tree_to_uhwi (cmp))
6668 {
6669 case ATOMIC_OP_FETCH_CMP_0_EQ: comp = EQ; break;
6670 case ATOMIC_OP_FETCH_CMP_0_NE: comp = NE; break;
6671 case ATOMIC_OP_FETCH_CMP_0_GT: comp = GT; break;
6672 case ATOMIC_OP_FETCH_CMP_0_GE: comp = GE; break;
6673 case ATOMIC_OP_FETCH_CMP_0_LT: comp = LT; break;
6674 case ATOMIC_OP_FETCH_CMP_0_LE: comp = LE; break;
6675 default: gcc_unreachable ();
6676 }
6677
6678 rtx target;
6679 if (lhs == NULL_TREE)
6680 target = gen_reg_rtx (TYPE_MODE (boolean_type_node));
6681 else
6682 target = expand_expr (lhs, NULL_RTX, VOIDmode, EXPAND_WRITE);
6683 enum insn_code icode = direct_optab_handler (optab, mode);
6684 gcc_assert (icode != CODE_FOR_nothing);
6685 create_output_operand (&ops[0], target, TYPE_MODE (boolean_type_node));
6686 create_fixed_operand (&ops[1], mem);
6687 create_convert_operand_to (&ops[2], op, mode, true);
6688 create_integer_operand (&ops[3], model);
6689 create_integer_operand (&ops[4], comp);
6690 if (maybe_expand_insn (icode, 5, ops))
6691 return;
6692
6693 rtx result = expand_atomic_fetch_op (gen_reg_rtx (mode), mem, op,
6694 code, model, true);
6a27c430
JJ
6695 if (!result)
6696 {
6697 bool is_atomic = gimple_call_num_args (call) == 5;
6698 tree tcall = gimple_call_arg (call, 3 + is_atomic);
6699 tree fndecl = gimple_call_addr_fndecl (tcall);
6700 tree type = TREE_TYPE (TREE_TYPE (fndecl));
6701 tree exp = build_call_nary (type, tcall,
6702 2 + is_atomic, ptr, arg,
6703 is_atomic
6704 ? gimple_call_arg (call, 3)
6705 : integer_zero_node);
6706 result = expand_builtin (exp, gen_reg_rtx (mode), NULL_RTX,
6707 mode, !lhs);
6708 }
6709
6362627b
JJ
6710 if (lhs)
6711 {
6712 result = emit_store_flag_force (target, comp, result, const0_rtx, mode,
6713 0, 1);
6714 if (result != target)
6715 emit_move_insn (target, result);
6716 }
6717}
6718
2a837de2
MS
6719/* Expand an atomic clear operation.
6720 void _atomic_clear (BOOL *obj, enum memmodel)
6721 EXP is the call expression. */
210e1852 6722
2a837de2
MS
6723static rtx
6724expand_builtin_atomic_clear (tree exp)
6725{
5a431b60
MS
6726 machine_mode mode = int_mode_for_size (BOOL_TYPE_SIZE, 0).require ();
6727 rtx mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
6728 memmodel model = get_memmodel (CALL_EXPR_ARG (exp, 1));
ef79730c 6729
2a837de2 6730 if (is_mm_consume (model) || is_mm_acquire (model) || is_mm_acq_rel (model))
5a431b60 6731 model = MEMMODEL_SEQ_CST;
e4fbead1 6732
2a837de2
MS
6733 /* Try issuing an __atomic_store, and allow fallback to __sync_lock_release.
6734 Failing that, a store is issued by __atomic_store. The only way this can
6735 fail is if the bool type is larger than a word size. Unlikely, but
6736 handle it anyway for completeness. Assume a single threaded model since
6737 there is no atomic support in this case, and no barriers are required. */
5a431b60 6738 rtx ret = expand_atomic_store (mem, const0_rtx, model, true);
2a837de2
MS
6739 if (!ret)
6740 emit_move_insn (mem, const0_rtx);
6741 return const0_rtx;
ef79730c 6742}
d1c38823 6743
2a837de2
MS
6744/* Expand an atomic test_and_set operation.
6745 bool _atomic_test_and_set (BOOL *obj, enum memmodel)
6746 EXP is the call expression. */
d1c38823
ZD
6747
6748static rtx
2a837de2 6749expand_builtin_atomic_test_and_set (tree exp, rtx target)
d1c38823 6750{
2a837de2
MS
6751 rtx mem;
6752 enum memmodel model;
6753 machine_mode mode;
b5338fb3 6754
2a837de2
MS
6755 mode = int_mode_for_size (BOOL_TYPE_SIZE, 0).require ();
6756 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
6757 model = get_memmodel (CALL_EXPR_ARG (exp, 1));
b5338fb3 6758
2a837de2
MS
6759 return expand_atomic_test_and_set (target, mem, model);
6760}
d1c38823 6761
d1c38823 6762
2a837de2
MS
6763/* Return true if (optional) argument ARG1 of size ARG0 is always lock free on
6764 this architecture. If ARG1 is NULL, use typical alignment for size ARG0. */
d1c38823 6765
2a837de2
MS
6766static tree
6767fold_builtin_atomic_always_lock_free (tree arg0, tree arg1)
6768{
6769 int size;
6770 machine_mode mode;
6771 unsigned int mode_align, type_align;
d1c38823 6772
2a837de2
MS
6773 if (TREE_CODE (arg0) != INTEGER_CST)
6774 return NULL_TREE;
d1c38823 6775
2a837de2
MS
6776 /* We need a corresponding integer mode for the access to be lock-free. */
6777 size = INTVAL (expand_normal (arg0)) * BITS_PER_UNIT;
6778 if (!int_mode_for_size (size, 0).exists (&mode))
6779 return boolean_false_node;
d1c38823 6780
2a837de2 6781 mode_align = GET_MODE_ALIGNMENT (mode);
d1c38823 6782
2a837de2
MS
6783 if (TREE_CODE (arg1) == INTEGER_CST)
6784 {
6785 unsigned HOST_WIDE_INT val = UINTVAL (expand_normal (arg1));
d1c38823 6786
2a837de2
MS
6787 /* Either this argument is null, or it's a fake pointer encoding
6788 the alignment of the object. */
6789 val = least_bit_hwi (val);
6790 val *= BITS_PER_UNIT;
d1c38823 6791
2a837de2
MS
6792 if (val == 0 || mode_align < val)
6793 type_align = mode_align;
6794 else
6795 type_align = val;
d1c38823 6796 }
2a837de2
MS
6797 else
6798 {
6799 tree ttype = TREE_TYPE (arg1);
d1c38823 6800
2a837de2
MS
6801 /* This function is usually invoked and folded immediately by the front
6802 end before anything else has a chance to look at it. The pointer
6803 parameter at this point is usually cast to a void *, so check for that
6804 and look past the cast. */
6805 if (CONVERT_EXPR_P (arg1)
6806 && POINTER_TYPE_P (ttype)
6807 && VOID_TYPE_P (TREE_TYPE (ttype))
6808 && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (arg1, 0))))
6809 arg1 = TREE_OPERAND (arg1, 0);
b8698a0f 6810
2a837de2
MS
6811 ttype = TREE_TYPE (arg1);
6812 gcc_assert (POINTER_TYPE_P (ttype));
48ae6c13 6813
2a837de2
MS
6814 /* Get the underlying type of the object. */
6815 ttype = TREE_TYPE (ttype);
6816 type_align = TYPE_ALIGN (ttype);
6817 }
02ee605c 6818
2a837de2
MS
6819 /* If the object has smaller alignment, the lock free routines cannot
6820 be used. */
6821 if (type_align < mode_align)
6822 return boolean_false_node;
02ee605c 6823
2a837de2
MS
6824 /* Check if a compare_and_swap pattern exists for the mode which represents
6825 the required size. The pattern is not allowed to fail, so the existence
6826 of the pattern indicates support is present. Also require that an
6827 atomic load exists for the required size. */
6828 if (can_compare_and_swap_p (mode, true) && can_atomic_load_p (mode))
6829 return boolean_true_node;
6830 else
6831 return boolean_false_node;
02ee605c
RH
6832}
6833
2a837de2
MS
6834/* Return true if the parameters to call EXP represent an object which will
6835 always generate lock free instructions. The first argument represents the
6836 size of the object, and the second parameter is a pointer to the object
6837 itself. If NULL is passed for the object, then the result is based on
6838 typical alignment for an object of the specified size. Otherwise return
6839 false. */
1387fef3
AS
6840
6841static rtx
2a837de2 6842expand_builtin_atomic_always_lock_free (tree exp)
1387fef3 6843{
2a837de2
MS
6844 tree size;
6845 tree arg0 = CALL_EXPR_ARG (exp, 0);
6846 tree arg1 = CALL_EXPR_ARG (exp, 1);
1387fef3 6847
2a837de2
MS
6848 if (TREE_CODE (arg0) != INTEGER_CST)
6849 {
6850 error ("non-constant argument 1 to %qs", "__atomic_always_lock_free");
6851 return const0_rtx;
6852 }
b6895597 6853
2a837de2
MS
6854 size = fold_builtin_atomic_always_lock_free (arg0, arg1);
6855 if (size == boolean_true_node)
6856 return const1_rtx;
6857 return const0_rtx;
6858}
b6895597 6859
2a837de2
MS
6860/* Return a one or zero if it can be determined that object ARG1 of size ARG
6861 is lock free on this architecture. */
1387fef3 6862
2a837de2
MS
6863static tree
6864fold_builtin_atomic_is_lock_free (tree arg0, tree arg1)
6865{
6866 if (!flag_inline_atomics)
6867 return NULL_TREE;
6868
6869 /* If it isn't always lock free, don't generate a result. */
6870 if (fold_builtin_atomic_always_lock_free (arg0, arg1) == boolean_true_node)
6871 return boolean_true_node;
1387fef3 6872
2a837de2 6873 return NULL_TREE;
1387fef3
AS
6874}
6875
2a837de2
MS
6876/* Return true if the parameters to call EXP represent an object which will
6877 always generate lock free instructions. The first argument represents the
6878 size of the object, and the second parameter is a pointer to the object
6879 itself. If NULL is passed for the object, then the result is based on
6880 typical alignment for an object of the specified size. Otherwise return
6881 NULL*/
86951993
AM
6882
6883static rtx
2a837de2 6884expand_builtin_atomic_is_lock_free (tree exp)
86951993 6885{
2a837de2
MS
6886 tree size;
6887 tree arg0 = CALL_EXPR_ARG (exp, 0);
6888 tree arg1 = CALL_EXPR_ARG (exp, 1);
86951993 6889
2a837de2 6890 if (!INTEGRAL_TYPE_P (TREE_TYPE (arg0)))
410675cb 6891 {
2a837de2
MS
6892 error ("non-integer argument 1 to %qs", "__atomic_is_lock_free");
6893 return NULL_RTX;
410675cb
JJ
6894 }
6895
2a837de2
MS
6896 if (!flag_inline_atomics)
6897 return NULL_RTX;
86951993 6898
2a837de2
MS
6899 /* If the value is known at compile time, return the RTX for it. */
6900 size = fold_builtin_atomic_is_lock_free (arg0, arg1);
6901 if (size == boolean_true_node)
6902 return const1_rtx;
6903
6904 return NULL_RTX;
86951993
AM
6905}
6906
2a837de2
MS
6907/* Expand the __atomic_thread_fence intrinsic:
6908 void __atomic_thread_fence (enum memmodel)
6909 EXP is the CALL_EXPR. */
86951993 6910
2a837de2
MS
6911static void
6912expand_builtin_atomic_thread_fence (tree exp)
6913{
6914 enum memmodel model = get_memmodel (CALL_EXPR_ARG (exp, 0));
6915 expand_mem_thread_fence (model);
6916}
48ae6c13 6917
2a837de2
MS
6918/* Expand the __atomic_signal_fence intrinsic:
6919 void __atomic_signal_fence (enum memmodel)
6920 EXP is the CALL_EXPR. */
6921
6922static void
6923expand_builtin_atomic_signal_fence (tree exp)
48ae6c13 6924{
2a837de2
MS
6925 enum memmodel model = get_memmodel (CALL_EXPR_ARG (exp, 0));
6926 expand_mem_signal_fence (model);
6927}
48ae6c13 6928
2a837de2 6929/* Expand the __sync_synchronize intrinsic. */
23462d4d 6930
2a837de2
MS
6931static void
6932expand_builtin_sync_synchronize (void)
6933{
6934 expand_mem_thread_fence (MEMMODEL_SYNC_SEQ_CST);
6935}
23462d4d 6936
2a837de2
MS
6937static rtx
6938expand_builtin_thread_pointer (tree exp, rtx target)
6939{
6940 enum insn_code icode;
6941 if (!validate_arglist (exp, VOID_TYPE))
6942 return const0_rtx;
6943 icode = direct_optab_handler (get_thread_pointer_optab, Pmode);
6944 if (icode != CODE_FOR_nothing)
6945 {
6946 class expand_operand op;
6947 /* If the target is not sutitable then create a new target. */
6948 if (target == NULL_RTX
6949 || !REG_P (target)
6950 || GET_MODE (target) != Pmode)
6951 target = gen_reg_rtx (Pmode);
6952 create_output_operand (&op, target, Pmode);
6953 expand_insn (icode, 1, &op);
6954 return target;
6955 }
6956 error ("%<__builtin_thread_pointer%> is not supported on this target");
6957 return const0_rtx;
6958}
23462d4d 6959
2a837de2
MS
6960static void
6961expand_builtin_set_thread_pointer (tree exp)
6962{
6963 enum insn_code icode;
6964 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6965 return;
6966 icode = direct_optab_handler (set_thread_pointer_optab, Pmode);
6967 if (icode != CODE_FOR_nothing)
6968 {
6969 class expand_operand op;
6970 rtx val = expand_expr (CALL_EXPR_ARG (exp, 0), NULL_RTX,
6971 Pmode, EXPAND_NORMAL);
6972 create_input_operand (&op, val, Pmode);
6973 expand_insn (icode, 1, &op);
6974 return;
6975 }
6976 error ("%<__builtin_set_thread_pointer%> is not supported on this target");
6977}
23462d4d 6978
2a837de2
MS
6979\f
6980/* Emit code to restore the current value of stack. */
23462d4d 6981
2a837de2
MS
6982static void
6983expand_stack_restore (tree var)
6984{
6985 rtx_insn *prev;
6986 rtx sa = expand_normal (var);
23462d4d 6987
2a837de2 6988 sa = convert_memory_address (Pmode, sa);
23462d4d 6989
2a837de2
MS
6990 prev = get_last_insn ();
6991 emit_stack_restore (SAVE_BLOCK, sa);
48ae6c13 6992
2a837de2
MS
6993 record_new_stack_level ();
6994
6995 fixup_args_size_notes (prev, get_last_insn (), 0);
48ae6c13
RH
6996}
6997
2a837de2 6998/* Emit code to save the current value of stack. */
48ae6c13
RH
6999
7000static rtx
2a837de2 7001expand_stack_save (void)
48ae6c13 7002{
2a837de2 7003 rtx ret = NULL_RTX;
48ae6c13 7004
2a837de2
MS
7005 emit_stack_save (SAVE_BLOCK, &ret);
7006 return ret;
7007}
48ae6c13 7008
2a837de2 7009/* Emit code to get the openacc gang, worker or vector id or size. */
48ae6c13
RH
7010
7011static rtx
2a837de2 7012expand_builtin_goacc_parlevel_id_size (tree exp, rtx target, int ignore)
48ae6c13 7013{
2a837de2
MS
7014 const char *name;
7015 rtx fallback_retval;
7016 rtx_insn *(*gen_fn) (rtx, rtx);
7017 switch (DECL_FUNCTION_CODE (get_callee_fndecl (exp)))
7018 {
7019 case BUILT_IN_GOACC_PARLEVEL_ID:
7020 name = "__builtin_goacc_parlevel_id";
7021 fallback_retval = const0_rtx;
7022 gen_fn = targetm.gen_oacc_dim_pos;
7023 break;
7024 case BUILT_IN_GOACC_PARLEVEL_SIZE:
7025 name = "__builtin_goacc_parlevel_size";
7026 fallback_retval = const1_rtx;
7027 gen_fn = targetm.gen_oacc_dim_size;
7028 break;
7029 default:
7030 gcc_unreachable ();
7031 }
48ae6c13 7032
2a837de2
MS
7033 if (oacc_get_fn_attrib (current_function_decl) == NULL_TREE)
7034 {
7035 error ("%qs only supported in OpenACC code", name);
7036 return const0_rtx;
7037 }
86951993 7038
2a837de2
MS
7039 tree arg = CALL_EXPR_ARG (exp, 0);
7040 if (TREE_CODE (arg) != INTEGER_CST)
7041 {
7042 error ("non-constant argument 0 to %qs", name);
7043 return const0_rtx;
7044 }
86951993 7045
2a837de2
MS
7046 int dim = TREE_INT_CST_LOW (arg);
7047 switch (dim)
7048 {
7049 case GOMP_DIM_GANG:
7050 case GOMP_DIM_WORKER:
7051 case GOMP_DIM_VECTOR:
7052 break;
7053 default:
7054 error ("illegal argument 0 to %qs", name);
7055 return const0_rtx;
7056 }
86951993 7057
2a837de2
MS
7058 if (ignore)
7059 return target;
86951993 7060
2a837de2
MS
7061 if (target == NULL_RTX)
7062 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
86951993 7063
2a837de2
MS
7064 if (!targetm.have_oacc_dim_size ())
7065 {
7066 emit_move_insn (target, fallback_retval);
7067 return target;
7068 }
7069
7070 rtx reg = MEM_P (target) ? gen_reg_rtx (GET_MODE (target)) : target;
7071 emit_insn (gen_fn (reg, GEN_INT (dim)));
7072 if (reg != target)
7073 emit_move_insn (target, reg);
7074
7075 return target;
86951993
AM
7076}
7077
2a837de2
MS
7078/* Expand a string compare operation using a sequence of char comparison
7079 to get rid of the calling overhead, with result going to TARGET if
7080 that's convenient.
86951993 7081
2a837de2
MS
7082 VAR_STR is the variable string source;
7083 CONST_STR is the constant string source;
7084 LENGTH is the number of chars to compare;
7085 CONST_STR_N indicates which source string is the constant string;
7086 IS_MEMCMP indicates whether it's a memcmp or strcmp.
7087
7088 to: (assume const_str_n is 2, i.e., arg2 is a constant string)
86951993 7089
2a837de2
MS
7090 target = (int) (unsigned char) var_str[0]
7091 - (int) (unsigned char) const_str[0];
7092 if (target != 0)
7093 goto ne_label;
7094 ...
7095 target = (int) (unsigned char) var_str[length - 2]
7096 - (int) (unsigned char) const_str[length - 2];
7097 if (target != 0)
7098 goto ne_label;
7099 target = (int) (unsigned char) var_str[length - 1]
7100 - (int) (unsigned char) const_str[length - 1];
7101 ne_label:
7102 */
86951993 7103
2a837de2
MS
7104static rtx
7105inline_string_cmp (rtx target, tree var_str, const char *const_str,
7106 unsigned HOST_WIDE_INT length,
7107 int const_str_n, machine_mode mode)
7108{
7109 HOST_WIDE_INT offset = 0;
7110 rtx var_rtx_array
7111 = get_memory_rtx (var_str, build_int_cst (unsigned_type_node,length));
7112 rtx var_rtx = NULL_RTX;
7113 rtx const_rtx = NULL_RTX;
7114 rtx result = target ? target : gen_reg_rtx (mode);
7115 rtx_code_label *ne_label = gen_label_rtx ();
7116 tree unit_type_node = unsigned_char_type_node;
7117 scalar_int_mode unit_mode
7118 = as_a <scalar_int_mode> TYPE_MODE (unit_type_node);
5dcfdccd 7119
2a837de2 7120 start_sequence ();
5dcfdccd 7121
2a837de2 7122 for (unsigned HOST_WIDE_INT i = 0; i < length; i++)
86951993 7123 {
2a837de2
MS
7124 var_rtx
7125 = adjust_address (var_rtx_array, TYPE_MODE (unit_type_node), offset);
7126 const_rtx = c_readstr (const_str + offset, unit_mode);
7127 rtx op0 = (const_str_n == 1) ? const_rtx : var_rtx;
7128 rtx op1 = (const_str_n == 1) ? var_rtx : const_rtx;
7129
7130 op0 = convert_modes (mode, unit_mode, op0, 1);
7131 op1 = convert_modes (mode, unit_mode, op1, 1);
7132 result = expand_simple_binop (mode, MINUS, op0, op1,
7133 result, 1, OPTAB_WIDEN);
7134 if (i < length - 1)
7135 emit_cmp_and_jump_insns (result, CONST0_RTX (mode), NE, NULL_RTX,
7136 mode, true, ne_label);
7137 offset += GET_MODE_SIZE (unit_mode);
86951993 7138 }
5dcfdccd 7139
2a837de2
MS
7140 emit_label (ne_label);
7141 rtx_insn *insns = get_insns ();
7142 end_sequence ();
7143 emit_insn (insns);
8673b671 7144
2a837de2 7145 return result;
86951993
AM
7146}
7147
2a837de2
MS
7148/* Inline expansion of a call to str(n)cmp and memcmp, with result going
7149 to TARGET if that's convenient.
7150 If the call is not been inlined, return NULL_RTX. */
86951993
AM
7151
7152static rtx
2a837de2 7153inline_expand_builtin_bytecmp (tree exp, rtx target)
86951993 7154{
2a837de2
MS
7155 tree fndecl = get_callee_fndecl (exp);
7156 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
7157 bool is_ncmp = (fcode == BUILT_IN_STRNCMP || fcode == BUILT_IN_MEMCMP);
86951993 7158
2a837de2
MS
7159 /* Do NOT apply this inlining expansion when optimizing for size or
7160 optimization level below 2. */
7161 if (optimize < 2 || optimize_insn_for_size_p ())
86951993
AM
7162 return NULL_RTX;
7163
2a837de2
MS
7164 gcc_checking_assert (fcode == BUILT_IN_STRCMP
7165 || fcode == BUILT_IN_STRNCMP
7166 || fcode == BUILT_IN_MEMCMP);
86951993 7167
2a837de2
MS
7168 /* On a target where the type of the call (int) has same or narrower presicion
7169 than unsigned char, give up the inlining expansion. */
7170 if (TYPE_PRECISION (unsigned_char_type_node)
7171 >= TYPE_PRECISION (TREE_TYPE (exp)))
7172 return NULL_RTX;
86951993 7173
2a837de2
MS
7174 tree arg1 = CALL_EXPR_ARG (exp, 0);
7175 tree arg2 = CALL_EXPR_ARG (exp, 1);
7176 tree len3_tree = is_ncmp ? CALL_EXPR_ARG (exp, 2) : NULL_TREE;
86951993 7177
2a837de2
MS
7178 unsigned HOST_WIDE_INT len1 = 0;
7179 unsigned HOST_WIDE_INT len2 = 0;
7180 unsigned HOST_WIDE_INT len3 = 0;
86951993 7181
2a837de2
MS
7182 /* Get the object representation of the initializers of ARG1 and ARG2
7183 as strings, provided they refer to constant objects, with their byte
7184 sizes in LEN1 and LEN2, respectively. */
7185 const char *bytes1 = getbyterep (arg1, &len1);
7186 const char *bytes2 = getbyterep (arg2, &len2);
86951993 7187
2a837de2
MS
7188 /* Fail if neither argument refers to an initialized constant. */
7189 if (!bytes1 && !bytes2)
7190 return NULL_RTX;
7191
7192 if (is_ncmp)
77df5327 7193 {
2a837de2
MS
7194 /* Fail if the memcmp/strncmp bound is not a constant. */
7195 if (!tree_fits_uhwi_p (len3_tree))
7196 return NULL_RTX;
86951993 7197
2a837de2 7198 len3 = tree_to_uhwi (len3_tree);
86951993 7199
2a837de2
MS
7200 if (fcode == BUILT_IN_MEMCMP)
7201 {
7202 /* Fail if the memcmp bound is greater than the size of either
7203 of the two constant objects. */
7204 if ((bytes1 && len1 < len3)
7205 || (bytes2 && len2 < len3))
7206 return NULL_RTX;
7207 }
7208 }
86951993 7209
2a837de2
MS
7210 if (fcode != BUILT_IN_MEMCMP)
7211 {
7212 /* For string functions (i.e., strcmp and strncmp) reduce LEN1
7213 and LEN2 to the length of the nul-terminated string stored
7214 in each. */
7215 if (bytes1 != NULL)
7216 len1 = strnlen (bytes1, len1) + 1;
7217 if (bytes2 != NULL)
7218 len2 = strnlen (bytes2, len2) + 1;
7219 }
86951993 7220
2a837de2
MS
7221 /* See inline_string_cmp. */
7222 int const_str_n;
7223 if (!len1)
7224 const_str_n = 2;
7225 else if (!len2)
7226 const_str_n = 1;
7227 else if (len2 > len1)
7228 const_str_n = 1;
7229 else
7230 const_str_n = 2;
672ce939 7231
2a837de2
MS
7232 /* For strncmp only, compute the new bound as the smallest of
7233 the lengths of the two strings (plus 1) and the bound provided
7234 to the function. */
7235 unsigned HOST_WIDE_INT bound = (const_str_n == 1) ? len1 : len2;
7236 if (is_ncmp && len3 < bound)
7237 bound = len3;
2fdc29e8 7238
2a837de2
MS
7239 /* If the bound of the comparison is larger than the threshold,
7240 do nothing. */
7241 if (bound > (unsigned HOST_WIDE_INT) param_builtin_string_cmp_inline_length)
86951993
AM
7242 return NULL_RTX;
7243
2a837de2 7244 machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
849a76a5 7245
2a837de2
MS
7246 /* Now, start inline expansion the call. */
7247 return inline_string_cmp (target, (const_str_n == 1) ? arg2 : arg1,
7248 (const_str_n == 1) ? bytes1 : bytes2, bound,
7249 const_str_n, mode);
849a76a5
JJ
7250}
7251
2a837de2
MS
7252/* Expand a call to __builtin_speculation_safe_value_<N>. MODE
7253 represents the size of the first argument to that call, or VOIDmode
7254 if the argument is a pointer. IGNORE will be true if the result
7255 isn't used. */
7256static rtx
7257expand_speculation_safe_value (machine_mode mode, tree exp, rtx target,
7258 bool ignore)
849a76a5 7259{
2a837de2
MS
7260 rtx val, failsafe;
7261 unsigned nargs = call_expr_nargs (exp);
849a76a5 7262
2a837de2 7263 tree arg0 = CALL_EXPR_ARG (exp, 0);
849a76a5 7264
2a837de2 7265 if (mode == VOIDmode)
849a76a5 7266 {
2a837de2
MS
7267 mode = TYPE_MODE (TREE_TYPE (arg0));
7268 gcc_assert (GET_MODE_CLASS (mode) == MODE_INT);
849a76a5
JJ
7269 }
7270
2a837de2 7271 val = expand_expr (arg0, NULL_RTX, mode, EXPAND_NORMAL);
849a76a5 7272
2a837de2
MS
7273 /* An optional second argument can be used as a failsafe value on
7274 some machines. If it isn't present, then the failsafe value is
7275 assumed to be 0. */
7276 if (nargs > 1)
849a76a5 7277 {
2a837de2
MS
7278 tree arg1 = CALL_EXPR_ARG (exp, 1);
7279 failsafe = expand_expr (arg1, NULL_RTX, mode, EXPAND_NORMAL);
849a76a5 7280 }
2a837de2
MS
7281 else
7282 failsafe = const0_rtx;
849a76a5 7283
2a837de2
MS
7284 /* If the result isn't used, the behavior is undefined. It would be
7285 nice to emit a warning here, but path splitting means this might
7286 happen with legitimate code. So simply drop the builtin
7287 expansion in that case; we've handled any side-effects above. */
7288 if (ignore)
7289 return const0_rtx;
849a76a5 7290
2a837de2
MS
7291 /* If we don't have a suitable target, create one to hold the result. */
7292 if (target == NULL || GET_MODE (target) != mode)
7293 target = gen_reg_rtx (mode);
849a76a5 7294
2a837de2
MS
7295 if (GET_MODE (val) != mode && GET_MODE (val) != VOIDmode)
7296 val = convert_modes (mode, VOIDmode, val, false);
849a76a5 7297
2a837de2 7298 return targetm.speculation_safe_value (mode, target, val, failsafe);
849a76a5
JJ
7299}
7300
2a837de2
MS
7301/* Expand an expression EXP that calls a built-in function,
7302 with result going to TARGET if that's convenient
7303 (and in mode MODE if that's convenient).
7304 SUBTARGET may be used as the target for computing one of EXP's operands.
7305 IGNORE is nonzero if the value is to be ignored. */
86951993 7306
2a837de2
MS
7307rtx
7308expand_builtin (tree exp, rtx target, rtx subtarget, machine_mode mode,
7309 int ignore)
86951993 7310{
2a837de2
MS
7311 tree fndecl = get_callee_fndecl (exp);
7312 machine_mode target_mode = TYPE_MODE (TREE_TYPE (exp));
7313 int flags;
86951993 7314
2a837de2
MS
7315 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
7316 return targetm.expand_builtin (exp, target, subtarget, mode, ignore);
86951993 7317
2a837de2
MS
7318 /* When ASan is enabled, we don't want to expand some memory/string
7319 builtins and rely on libsanitizer's hooks. This allows us to avoid
7320 redundant checks and be sure, that possible overflow will be detected
7321 by ASan. */
86951993 7322
2a837de2
MS
7323 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
7324 if ((flag_sanitize & SANITIZE_ADDRESS) && asan_intercepted_p (fcode))
7325 return expand_call (exp, target, ignore);
86951993 7326
2a837de2
MS
7327 /* When not optimizing, generate calls to library functions for a certain
7328 set of builtins. */
7329 if (!optimize
7330 && !called_as_built_in (fndecl)
7331 && fcode != BUILT_IN_FORK
7332 && fcode != BUILT_IN_EXECL
7333 && fcode != BUILT_IN_EXECV
7334 && fcode != BUILT_IN_EXECLP
7335 && fcode != BUILT_IN_EXECLE
7336 && fcode != BUILT_IN_EXECVP
7337 && fcode != BUILT_IN_EXECVE
7338 && fcode != BUILT_IN_CLEAR_CACHE
7339 && !ALLOCA_FUNCTION_CODE_P (fcode)
7340 && fcode != BUILT_IN_FREE)
7341 return expand_call (exp, target, ignore);
86951993 7342
2a837de2
MS
7343 /* The built-in function expanders test for target == const0_rtx
7344 to determine whether the function's result will be ignored. */
7345 if (ignore)
7346 target = const0_rtx;
86951993 7347
2a837de2
MS
7348 /* If the result of a pure or const built-in function is ignored, and
7349 none of its arguments are volatile, we can avoid expanding the
7350 built-in call and just evaluate the arguments for side-effects. */
7351 if (target == const0_rtx
7352 && ((flags = flags_from_decl_or_type (fndecl)) & (ECF_CONST | ECF_PURE))
7353 && !(flags & ECF_LOOPING_CONST_OR_PURE))
7354 {
7355 bool volatilep = false;
7356 tree arg;
7357 call_expr_arg_iterator iter;
86951993 7358
2a837de2
MS
7359 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
7360 if (TREE_THIS_VOLATILE (arg))
7361 {
7362 volatilep = true;
7363 break;
7364 }
86951993 7365
2a837de2
MS
7366 if (! volatilep)
7367 {
7368 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
7369 expand_expr (arg, const0_rtx, VOIDmode, EXPAND_NORMAL);
7370 return const0_rtx;
7371 }
86951993
AM
7372 }
7373
2a837de2
MS
7374 switch (fcode)
7375 {
7376 CASE_FLT_FN (BUILT_IN_FABS):
7377 CASE_FLT_FN_FLOATN_NX (BUILT_IN_FABS):
7378 case BUILT_IN_FABSD32:
7379 case BUILT_IN_FABSD64:
7380 case BUILT_IN_FABSD128:
7381 target = expand_builtin_fabs (exp, target, subtarget);
7382 if (target)
7383 return target;
7384 break;
86951993 7385
2a837de2
MS
7386 CASE_FLT_FN (BUILT_IN_COPYSIGN):
7387 CASE_FLT_FN_FLOATN_NX (BUILT_IN_COPYSIGN):
7388 target = expand_builtin_copysign (exp, target, subtarget);
7389 if (target)
7390 return target;
7391 break;
86951993 7392
2a837de2
MS
7393 /* Just do a normal library call if we were unable to fold
7394 the values. */
7395 CASE_FLT_FN (BUILT_IN_CABS):
7396 break;
86951993 7397
2a837de2
MS
7398 CASE_FLT_FN (BUILT_IN_FMA):
7399 CASE_FLT_FN_FLOATN_NX (BUILT_IN_FMA):
7400 target = expand_builtin_mathfn_ternary (exp, target, subtarget);
7401 if (target)
7402 return target;
7403 break;
86951993 7404
2a837de2
MS
7405 CASE_FLT_FN (BUILT_IN_ILOGB):
7406 if (! flag_unsafe_math_optimizations)
7407 break;
7408 gcc_fallthrough ();
7409 CASE_FLT_FN (BUILT_IN_ISINF):
7410 CASE_FLT_FN (BUILT_IN_FINITE):
7411 case BUILT_IN_ISFINITE:
7412 case BUILT_IN_ISNORMAL:
7413 target = expand_builtin_interclass_mathfn (exp, target);
7414 if (target)
7415 return target;
7416 break;
86951993 7417
0982edd3
JJ
7418 case BUILT_IN_ISSIGNALING:
7419 target = expand_builtin_issignaling (exp, target);
7420 if (target)
7421 return target;
7422 break;
7423
2a837de2
MS
7424 CASE_FLT_FN (BUILT_IN_ICEIL):
7425 CASE_FLT_FN (BUILT_IN_LCEIL):
7426 CASE_FLT_FN (BUILT_IN_LLCEIL):
7427 CASE_FLT_FN (BUILT_IN_LFLOOR):
7428 CASE_FLT_FN (BUILT_IN_IFLOOR):
7429 CASE_FLT_FN (BUILT_IN_LLFLOOR):
7430 target = expand_builtin_int_roundingfn (exp, target);
7431 if (target)
7432 return target;
7433 break;
86951993 7434
2a837de2
MS
7435 CASE_FLT_FN (BUILT_IN_IRINT):
7436 CASE_FLT_FN (BUILT_IN_LRINT):
7437 CASE_FLT_FN (BUILT_IN_LLRINT):
7438 CASE_FLT_FN (BUILT_IN_IROUND):
7439 CASE_FLT_FN (BUILT_IN_LROUND):
7440 CASE_FLT_FN (BUILT_IN_LLROUND):
7441 target = expand_builtin_int_roundingfn_2 (exp, target);
7442 if (target)
7443 return target;
7444 break;
86951993 7445
2a837de2
MS
7446 CASE_FLT_FN (BUILT_IN_POWI):
7447 target = expand_builtin_powi (exp, target);
7448 if (target)
7449 return target;
7450 break;
86951993 7451
2a837de2
MS
7452 CASE_FLT_FN (BUILT_IN_CEXPI):
7453 target = expand_builtin_cexpi (exp, target);
7454 gcc_assert (target);
7455 return target;
86951993 7456
2a837de2
MS
7457 CASE_FLT_FN (BUILT_IN_SIN):
7458 CASE_FLT_FN (BUILT_IN_COS):
7459 if (! flag_unsafe_math_optimizations)
7460 break;
7461 target = expand_builtin_mathfn_3 (exp, target, subtarget);
7462 if (target)
7463 return target;
7464 break;
86951993 7465
2a837de2
MS
7466 CASE_FLT_FN (BUILT_IN_SINCOS):
7467 if (! flag_unsafe_math_optimizations)
7468 break;
7469 target = expand_builtin_sincos (exp);
7470 if (target)
7471 return target;
7472 break;
86951993 7473
4343f5e2
RFF
7474 case BUILT_IN_FEGETROUND:
7475 target = expand_builtin_fegetround (exp, target, target_mode);
7476 if (target)
7477 return target;
7478 break;
7479
7480 case BUILT_IN_FECLEAREXCEPT:
7481 target = expand_builtin_feclear_feraise_except (exp, target, target_mode,
7482 feclearexcept_optab);
7483 if (target)
7484 return target;
7485 break;
7486
7487 case BUILT_IN_FERAISEEXCEPT:
7488 target = expand_builtin_feclear_feraise_except (exp, target, target_mode,
7489 feraiseexcept_optab);
7490 if (target)
7491 return target;
7492 break;
7493
2a837de2
MS
7494 case BUILT_IN_APPLY_ARGS:
7495 return expand_builtin_apply_args ();
08c273bb 7496
2a837de2
MS
7497 /* __builtin_apply (FUNCTION, ARGUMENTS, ARGSIZE) invokes
7498 FUNCTION with a copy of the parameters described by
7499 ARGUMENTS, and ARGSIZE. It returns a block of memory
7500 allocated on the stack into which is stored all the registers
7501 that might possibly be used for returning the result of a
7502 function. ARGUMENTS is the value returned by
7503 __builtin_apply_args. ARGSIZE is the number of bytes of
7504 arguments that must be copied. ??? How should this value be
7505 computed? We'll also need a safe worst case value for varargs
7506 functions. */
7507 case BUILT_IN_APPLY:
7508 if (!validate_arglist (exp, POINTER_TYPE,
7509 POINTER_TYPE, INTEGER_TYPE, VOID_TYPE)
7510 && !validate_arglist (exp, REFERENCE_TYPE,
7511 POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
7512 return const0_rtx;
7513 else
7514 {
7515 rtx ops[3];
86951993 7516
2a837de2
MS
7517 ops[0] = expand_normal (CALL_EXPR_ARG (exp, 0));
7518 ops[1] = expand_normal (CALL_EXPR_ARG (exp, 1));
7519 ops[2] = expand_normal (CALL_EXPR_ARG (exp, 2));
86951993 7520
2a837de2 7521 return expand_builtin_apply (ops[0], ops[1], ops[2]);
154b68db 7522 }
adedd5c1 7523
2a837de2
MS
7524 /* __builtin_return (RESULT) causes the function to return the
7525 value described by RESULT. RESULT is address of the block of
7526 memory returned by __builtin_apply. */
7527 case BUILT_IN_RETURN:
7528 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
7529 expand_builtin_return (expand_normal (CALL_EXPR_ARG (exp, 0)));
7530 return const0_rtx;
adedd5c1 7531
2a837de2
MS
7532 case BUILT_IN_SAVEREGS:
7533 return expand_builtin_saveregs ();
adedd5c1 7534
2a837de2
MS
7535 case BUILT_IN_VA_ARG_PACK:
7536 /* All valid uses of __builtin_va_arg_pack () are removed during
7537 inlining. */
7538 error ("invalid use of %<__builtin_va_arg_pack ()%>");
7539 return const0_rtx;
adedd5c1 7540
2a837de2
MS
7541 case BUILT_IN_VA_ARG_PACK_LEN:
7542 /* All valid uses of __builtin_va_arg_pack_len () are removed during
7543 inlining. */
7544 error ("invalid use of %<__builtin_va_arg_pack_len ()%>");
7545 return const0_rtx;
adedd5c1 7546
2a837de2
MS
7547 /* Return the address of the first anonymous stack arg. */
7548 case BUILT_IN_NEXT_ARG:
7549 if (fold_builtin_next_arg (exp, false))
7550 return const0_rtx;
7551 return expand_builtin_next_arg ();
adedd5c1 7552
2a837de2
MS
7553 case BUILT_IN_CLEAR_CACHE:
7554 expand_builtin___clear_cache (exp);
7555 return const0_rtx;
adedd5c1 7556
2a837de2
MS
7557 case BUILT_IN_CLASSIFY_TYPE:
7558 return expand_builtin_classify_type (exp);
adedd5c1 7559
2a837de2
MS
7560 case BUILT_IN_CONSTANT_P:
7561 return const0_rtx;
d660c35e 7562
2a837de2
MS
7563 case BUILT_IN_FRAME_ADDRESS:
7564 case BUILT_IN_RETURN_ADDRESS:
7565 return expand_builtin_frame_address (fndecl, exp);
d660c35e 7566
2a837de2
MS
7567 /* Returns the address of the area where the structure is returned.
7568 0 otherwise. */
7569 case BUILT_IN_AGGREGATE_INCOMING_ADDRESS:
7570 if (call_expr_nargs (exp) != 0
7571 || ! AGGREGATE_TYPE_P (TREE_TYPE (TREE_TYPE (current_function_decl)))
7572 || !MEM_P (DECL_RTL (DECL_RESULT (current_function_decl))))
7573 return const0_rtx;
7574 else
7575 return XEXP (DECL_RTL (DECL_RESULT (current_function_decl)), 0);
d660c35e 7576
2a837de2
MS
7577 CASE_BUILT_IN_ALLOCA:
7578 target = expand_builtin_alloca (exp);
7579 if (target)
7580 return target;
7581 break;
d660c35e 7582
2a837de2
MS
7583 case BUILT_IN_ASAN_ALLOCAS_UNPOISON:
7584 return expand_asan_emit_allocas_unpoison (exp);
d660c35e 7585
2a837de2
MS
7586 case BUILT_IN_STACK_SAVE:
7587 return expand_stack_save ();
d660c35e 7588
2a837de2
MS
7589 case BUILT_IN_STACK_RESTORE:
7590 expand_stack_restore (CALL_EXPR_ARG (exp, 0));
7591 return const0_rtx;
d660c35e 7592
2a837de2
MS
7593 case BUILT_IN_BSWAP16:
7594 case BUILT_IN_BSWAP32:
7595 case BUILT_IN_BSWAP64:
7596 case BUILT_IN_BSWAP128:
7597 target = expand_builtin_bswap (target_mode, exp, target, subtarget);
7598 if (target)
7599 return target;
7600 break;
d660c35e 7601
2a837de2
MS
7602 CASE_INT_FN (BUILT_IN_FFS):
7603 target = expand_builtin_unop (target_mode, exp, target,
7604 subtarget, ffs_optab);
7605 if (target)
7606 return target;
7607 break;
d660c35e 7608
2a837de2
MS
7609 CASE_INT_FN (BUILT_IN_CLZ):
7610 target = expand_builtin_unop (target_mode, exp, target,
7611 subtarget, clz_optab);
7612 if (target)
7613 return target;
7614 break;
d660c35e 7615
2a837de2
MS
7616 CASE_INT_FN (BUILT_IN_CTZ):
7617 target = expand_builtin_unop (target_mode, exp, target,
7618 subtarget, ctz_optab);
7619 if (target)
7620 return target;
7621 break;
86951993 7622
2a837de2
MS
7623 CASE_INT_FN (BUILT_IN_CLRSB):
7624 target = expand_builtin_unop (target_mode, exp, target,
7625 subtarget, clrsb_optab);
7626 if (target)
7627 return target;
7628 break;
86951993 7629
2a837de2
MS
7630 CASE_INT_FN (BUILT_IN_POPCOUNT):
7631 target = expand_builtin_unop (target_mode, exp, target,
7632 subtarget, popcount_optab);
7633 if (target)
7634 return target;
7635 break;
48ae6c13 7636
2a837de2
MS
7637 CASE_INT_FN (BUILT_IN_PARITY):
7638 target = expand_builtin_unop (target_mode, exp, target,
7639 subtarget, parity_optab);
7640 if (target)
7641 return target;
7642 break;
f4b31647 7643
2a837de2
MS
7644 case BUILT_IN_STRLEN:
7645 target = expand_builtin_strlen (exp, target, target_mode);
7646 if (target)
7647 return target;
7648 break;
86951993 7649
2a837de2
MS
7650 case BUILT_IN_STRNLEN:
7651 target = expand_builtin_strnlen (exp, target, target_mode);
7652 if (target)
7653 return target;
7654 break;
310055e7 7655
2a837de2
MS
7656 case BUILT_IN_STRCPY:
7657 target = expand_builtin_strcpy (exp, target);
7658 if (target)
7659 return target;
7660 break;
86951993 7661
2a837de2
MS
7662 case BUILT_IN_STRNCPY:
7663 target = expand_builtin_strncpy (exp, target);
7664 if (target)
7665 return target;
7666 break;
86951993 7667
2a837de2
MS
7668 case BUILT_IN_STPCPY:
7669 target = expand_builtin_stpcpy (exp, target, mode);
7670 if (target)
7671 return target;
7672 break;
86951993 7673
2a837de2
MS
7674 case BUILT_IN_MEMCPY:
7675 target = expand_builtin_memcpy (exp, target);
7676 if (target)
7677 return target;
7678 break;
86951993 7679
2a837de2
MS
7680 case BUILT_IN_MEMMOVE:
7681 target = expand_builtin_memmove (exp, target);
7682 if (target)
7683 return target;
7684 break;
86951993 7685
2a837de2
MS
7686 case BUILT_IN_MEMPCPY:
7687 target = expand_builtin_mempcpy (exp, target);
7688 if (target)
7689 return target;
7690 break;
86951993 7691
2a837de2
MS
7692 case BUILT_IN_MEMSET:
7693 target = expand_builtin_memset (exp, target, mode);
7694 if (target)
7695 return target;
7696 break;
86951993 7697
2a837de2
MS
7698 case BUILT_IN_BZERO:
7699 target = expand_builtin_bzero (exp);
7700 if (target)
7701 return target;
7702 break;
86951993 7703
2a837de2
MS
7704 /* Expand it as BUILT_IN_MEMCMP_EQ first. If not successful, change it
7705 back to a BUILT_IN_STRCMP. Remember to delete the 3rd parameter
7706 when changing it to a strcmp call. */
7707 case BUILT_IN_STRCMP_EQ:
7708 target = expand_builtin_memcmp (exp, target, true);
7709 if (target)
7710 return target;
86951993 7711
2a837de2
MS
7712 /* Change this call back to a BUILT_IN_STRCMP. */
7713 TREE_OPERAND (exp, 1)
7714 = build_fold_addr_expr (builtin_decl_explicit (BUILT_IN_STRCMP));
86951993 7715
2a837de2
MS
7716 /* Delete the last parameter. */
7717 unsigned int i;
7718 vec<tree, va_gc> *arg_vec;
7719 vec_alloc (arg_vec, 2);
7720 for (i = 0; i < 2; i++)
7721 arg_vec->quick_push (CALL_EXPR_ARG (exp, i));
7722 exp = build_call_vec (TREE_TYPE (exp), CALL_EXPR_FN (exp), arg_vec);
7723 /* FALLTHROUGH */
86951993 7724
2a837de2
MS
7725 case BUILT_IN_STRCMP:
7726 target = expand_builtin_strcmp (exp, target);
7727 if (target)
7728 return target;
7729 break;
86951993 7730
2a837de2
MS
7731 /* Expand it as BUILT_IN_MEMCMP_EQ first. If not successful, change it
7732 back to a BUILT_IN_STRNCMP. */
7733 case BUILT_IN_STRNCMP_EQ:
7734 target = expand_builtin_memcmp (exp, target, true);
7735 if (target)
7736 return target;
86951993 7737
2a837de2
MS
7738 /* Change it back to a BUILT_IN_STRNCMP. */
7739 TREE_OPERAND (exp, 1)
7740 = build_fold_addr_expr (builtin_decl_explicit (BUILT_IN_STRNCMP));
7741 /* FALLTHROUGH */
48ae6c13 7742
2a837de2
MS
7743 case BUILT_IN_STRNCMP:
7744 target = expand_builtin_strncmp (exp, target, mode);
7745 if (target)
7746 return target;
7747 break;
48ae6c13 7748
2a837de2
MS
7749 case BUILT_IN_BCMP:
7750 case BUILT_IN_MEMCMP:
7751 case BUILT_IN_MEMCMP_EQ:
7752 target = expand_builtin_memcmp (exp, target, fcode == BUILT_IN_MEMCMP_EQ);
7753 if (target)
7754 return target;
7755 if (fcode == BUILT_IN_MEMCMP_EQ)
7756 {
7757 tree newdecl = builtin_decl_explicit (BUILT_IN_MEMCMP);
7758 TREE_OPERAND (exp, 1) = build_fold_addr_expr (newdecl);
7759 }
7760 break;
48ae6c13 7761
2a837de2
MS
7762 case BUILT_IN_SETJMP:
7763 /* This should have been lowered to the builtins below. */
7764 gcc_unreachable ();
862d0b35 7765
2a837de2
MS
7766 case BUILT_IN_SETJMP_SETUP:
7767 /* __builtin_setjmp_setup is passed a pointer to an array of five words
7768 and the receiver label. */
7769 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
7770 {
7771 rtx buf_addr = expand_expr (CALL_EXPR_ARG (exp, 0), subtarget,
7772 VOIDmode, EXPAND_NORMAL);
7773 tree label = TREE_OPERAND (CALL_EXPR_ARG (exp, 1), 0);
7774 rtx_insn *label_r = label_rtx (label);
862d0b35 7775
2a837de2 7776 expand_builtin_setjmp_setup (buf_addr, label_r);
2a837de2
MS
7777 return const0_rtx;
7778 }
7779 break;
862d0b35 7780
2a837de2
MS
7781 case BUILT_IN_SETJMP_RECEIVER:
7782 /* __builtin_setjmp_receiver is passed the receiver label. */
7783 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
7784 {
7785 tree label = TREE_OPERAND (CALL_EXPR_ARG (exp, 0), 0);
7786 rtx_insn *label_r = label_rtx (label);
d33606c3 7787
2a837de2 7788 expand_builtin_setjmp_receiver (label_r);
daa36cfc
AM
7789 nonlocal_goto_handler_labels
7790 = gen_rtx_INSN_LIST (VOIDmode, label_r,
7791 nonlocal_goto_handler_labels);
7792 /* ??? Do not let expand_label treat us as such since we would
7793 not want to be both on the list of non-local labels and on
7794 the list of forced labels. */
7795 FORCED_LABEL (label) = 0;
2a837de2
MS
7796 return const0_rtx;
7797 }
7798 break;
d33606c3 7799
2a837de2
MS
7800 /* __builtin_longjmp is passed a pointer to an array of five words.
7801 It's similar to the C library longjmp function but works with
7802 __builtin_setjmp above. */
7803 case BUILT_IN_LONGJMP:
7804 if (validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
7805 {
7806 rtx buf_addr = expand_expr (CALL_EXPR_ARG (exp, 0), subtarget,
7807 VOIDmode, EXPAND_NORMAL);
7808 rtx value = expand_normal (CALL_EXPR_ARG (exp, 1));
862d0b35 7809
2a837de2
MS
7810 if (value != const1_rtx)
7811 {
7812 error ("%<__builtin_longjmp%> second argument must be 1");
7813 return const0_rtx;
7814 }
862d0b35 7815
2a837de2
MS
7816 expand_builtin_longjmp (buf_addr, value);
7817 return const0_rtx;
7818 }
7819 break;
862d0b35 7820
2a837de2
MS
7821 case BUILT_IN_NONLOCAL_GOTO:
7822 target = expand_builtin_nonlocal_goto (exp);
7823 if (target)
7824 return target;
7825 break;
862d0b35 7826
2a837de2
MS
7827 /* This updates the setjmp buffer that is its argument with the value
7828 of the current stack pointer. */
7829 case BUILT_IN_UPDATE_SETJMP_BUF:
7830 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
7831 {
7832 rtx buf_addr
7833 = expand_normal (CALL_EXPR_ARG (exp, 0));
1f62d637 7834
2a837de2
MS
7835 expand_builtin_update_setjmp_buf (buf_addr);
7836 return const0_rtx;
7837 }
1f62d637 7838 break;
1f62d637 7839
2a837de2
MS
7840 case BUILT_IN_TRAP:
7841 expand_builtin_trap ();
1f62d637 7842 return const0_rtx;
1f62d637 7843
2a837de2
MS
7844 case BUILT_IN_UNREACHABLE:
7845 expand_builtin_unreachable ();
1f62d637 7846 return const0_rtx;
1f62d637 7847
2a837de2
MS
7848 CASE_FLT_FN (BUILT_IN_SIGNBIT):
7849 case BUILT_IN_SIGNBITD32:
7850 case BUILT_IN_SIGNBITD64:
7851 case BUILT_IN_SIGNBITD128:
7852 target = expand_builtin_signbit (exp, target);
7853 if (target)
7854 return target;
1f62d637 7855 break;
1f62d637 7856
2a837de2
MS
7857 /* Various hooks for the DWARF 2 __throw routine. */
7858 case BUILT_IN_UNWIND_INIT:
7859 expand_builtin_unwind_init ();
7860 return const0_rtx;
7861 case BUILT_IN_DWARF_CFA:
7862 return virtual_cfa_rtx;
7863#ifdef DWARF2_UNWIND_INFO
7864 case BUILT_IN_DWARF_SP_COLUMN:
7865 return expand_builtin_dwarf_sp_column ();
7866 case BUILT_IN_INIT_DWARF_REG_SIZES:
7867 expand_builtin_init_dwarf_reg_sizes (CALL_EXPR_ARG (exp, 0));
7868 return const0_rtx;
7869#endif
7870 case BUILT_IN_FROB_RETURN_ADDR:
7871 return expand_builtin_frob_return_addr (CALL_EXPR_ARG (exp, 0));
7872 case BUILT_IN_EXTRACT_RETURN_ADDR:
7873 return expand_builtin_extract_return_addr (CALL_EXPR_ARG (exp, 0));
7874 case BUILT_IN_EH_RETURN:
7875 expand_builtin_eh_return (CALL_EXPR_ARG (exp, 0),
7876 CALL_EXPR_ARG (exp, 1));
7877 return const0_rtx;
7878 case BUILT_IN_EH_RETURN_DATA_REGNO:
7879 return expand_builtin_eh_return_data_regno (exp);
7880 case BUILT_IN_EXTEND_POINTER:
7881 return expand_builtin_extend_pointer (CALL_EXPR_ARG (exp, 0));
7882 case BUILT_IN_EH_POINTER:
7883 return expand_builtin_eh_pointer (exp);
7884 case BUILT_IN_EH_FILTER:
7885 return expand_builtin_eh_filter (exp);
7886 case BUILT_IN_EH_COPY_VALUES:
7887 return expand_builtin_eh_copy_values (exp);
39bc9f83 7888
2a837de2
MS
7889 case BUILT_IN_VA_START:
7890 return expand_builtin_va_start (exp);
7891 case BUILT_IN_VA_END:
7892 return expand_builtin_va_end (exp);
7893 case BUILT_IN_VA_COPY:
7894 return expand_builtin_va_copy (exp);
7895 case BUILT_IN_EXPECT:
7896 return expand_builtin_expect (exp, target);
7897 case BUILT_IN_EXPECT_WITH_PROBABILITY:
7898 return expand_builtin_expect_with_probability (exp, target);
7899 case BUILT_IN_ASSUME_ALIGNED:
7900 return expand_builtin_assume_aligned (exp, target);
7901 case BUILT_IN_PREFETCH:
7902 expand_builtin_prefetch (exp);
7903 return const0_rtx;
1f62d637 7904
2a837de2
MS
7905 case BUILT_IN_INIT_TRAMPOLINE:
7906 return expand_builtin_init_trampoline (exp, true);
7907 case BUILT_IN_INIT_HEAP_TRAMPOLINE:
7908 return expand_builtin_init_trampoline (exp, false);
7909 case BUILT_IN_ADJUST_TRAMPOLINE:
7910 return expand_builtin_adjust_trampoline (exp);
1f62d637 7911
2a837de2
MS
7912 case BUILT_IN_INIT_DESCRIPTOR:
7913 return expand_builtin_init_descriptor (exp);
7914 case BUILT_IN_ADJUST_DESCRIPTOR:
7915 return expand_builtin_adjust_descriptor (exp);
41dbbb37 7916
2a837de2
MS
7917 case BUILT_IN_FORK:
7918 case BUILT_IN_EXECL:
7919 case BUILT_IN_EXECV:
7920 case BUILT_IN_EXECLP:
7921 case BUILT_IN_EXECLE:
7922 case BUILT_IN_EXECVP:
7923 case BUILT_IN_EXECVE:
7924 target = expand_builtin_fork_or_exec (fndecl, exp, target, ignore);
7925 if (target)
7926 return target;
7927 break;
b2272b13 7928
2a837de2
MS
7929 case BUILT_IN_SYNC_FETCH_AND_ADD_1:
7930 case BUILT_IN_SYNC_FETCH_AND_ADD_2:
7931 case BUILT_IN_SYNC_FETCH_AND_ADD_4:
7932 case BUILT_IN_SYNC_FETCH_AND_ADD_8:
7933 case BUILT_IN_SYNC_FETCH_AND_ADD_16:
7934 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_ADD_1);
7935 target = expand_builtin_sync_operation (mode, exp, PLUS, false, target);
7936 if (target)
7937 return target;
7938 break;
b2272b13 7939
2a837de2
MS
7940 case BUILT_IN_SYNC_FETCH_AND_SUB_1:
7941 case BUILT_IN_SYNC_FETCH_AND_SUB_2:
7942 case BUILT_IN_SYNC_FETCH_AND_SUB_4:
7943 case BUILT_IN_SYNC_FETCH_AND_SUB_8:
7944 case BUILT_IN_SYNC_FETCH_AND_SUB_16:
7945 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_SUB_1);
7946 target = expand_builtin_sync_operation (mode, exp, MINUS, false, target);
7947 if (target)
7948 return target;
7949 break;
b2272b13 7950
2a837de2
MS
7951 case BUILT_IN_SYNC_FETCH_AND_OR_1:
7952 case BUILT_IN_SYNC_FETCH_AND_OR_2:
7953 case BUILT_IN_SYNC_FETCH_AND_OR_4:
7954 case BUILT_IN_SYNC_FETCH_AND_OR_8:
7955 case BUILT_IN_SYNC_FETCH_AND_OR_16:
7956 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_OR_1);
7957 target = expand_builtin_sync_operation (mode, exp, IOR, false, target);
7958 if (target)
7959 return target;
7960 break;
10a0e2a9 7961
2a837de2
MS
7962 case BUILT_IN_SYNC_FETCH_AND_AND_1:
7963 case BUILT_IN_SYNC_FETCH_AND_AND_2:
7964 case BUILT_IN_SYNC_FETCH_AND_AND_4:
7965 case BUILT_IN_SYNC_FETCH_AND_AND_8:
7966 case BUILT_IN_SYNC_FETCH_AND_AND_16:
7967 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_AND_1);
7968 target = expand_builtin_sync_operation (mode, exp, AND, false, target);
7969 if (target)
7970 return target;
7971 break;
b2272b13 7972
2a837de2
MS
7973 case BUILT_IN_SYNC_FETCH_AND_XOR_1:
7974 case BUILT_IN_SYNC_FETCH_AND_XOR_2:
7975 case BUILT_IN_SYNC_FETCH_AND_XOR_4:
7976 case BUILT_IN_SYNC_FETCH_AND_XOR_8:
7977 case BUILT_IN_SYNC_FETCH_AND_XOR_16:
7978 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_XOR_1);
7979 target = expand_builtin_sync_operation (mode, exp, XOR, false, target);
7980 if (target)
7981 return target;
7982 break;
b2272b13 7983
2a837de2
MS
7984 case BUILT_IN_SYNC_FETCH_AND_NAND_1:
7985 case BUILT_IN_SYNC_FETCH_AND_NAND_2:
7986 case BUILT_IN_SYNC_FETCH_AND_NAND_4:
7987 case BUILT_IN_SYNC_FETCH_AND_NAND_8:
7988 case BUILT_IN_SYNC_FETCH_AND_NAND_16:
7989 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_NAND_1);
7990 target = expand_builtin_sync_operation (mode, exp, NOT, false, target);
7991 if (target)
7992 return target;
7993 break;
b2272b13 7994
2a837de2
MS
7995 case BUILT_IN_SYNC_ADD_AND_FETCH_1:
7996 case BUILT_IN_SYNC_ADD_AND_FETCH_2:
7997 case BUILT_IN_SYNC_ADD_AND_FETCH_4:
7998 case BUILT_IN_SYNC_ADD_AND_FETCH_8:
7999 case BUILT_IN_SYNC_ADD_AND_FETCH_16:
8000 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_ADD_AND_FETCH_1);
8001 target = expand_builtin_sync_operation (mode, exp, PLUS, true, target);
8002 if (target)
8003 return target;
8004 break;
d5803b98 8005
2a837de2
MS
8006 case BUILT_IN_SYNC_SUB_AND_FETCH_1:
8007 case BUILT_IN_SYNC_SUB_AND_FETCH_2:
8008 case BUILT_IN_SYNC_SUB_AND_FETCH_4:
8009 case BUILT_IN_SYNC_SUB_AND_FETCH_8:
8010 case BUILT_IN_SYNC_SUB_AND_FETCH_16:
8011 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_SUB_AND_FETCH_1);
8012 target = expand_builtin_sync_operation (mode, exp, MINUS, true, target);
8013 if (target)
8014 return target;
8015 break;
b2272b13 8016
2a837de2
MS
8017 case BUILT_IN_SYNC_OR_AND_FETCH_1:
8018 case BUILT_IN_SYNC_OR_AND_FETCH_2:
8019 case BUILT_IN_SYNC_OR_AND_FETCH_4:
8020 case BUILT_IN_SYNC_OR_AND_FETCH_8:
8021 case BUILT_IN_SYNC_OR_AND_FETCH_16:
8022 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_OR_AND_FETCH_1);
8023 target = expand_builtin_sync_operation (mode, exp, IOR, true, target);
8024 if (target)
8025 return target;
8026 break;
3d592d2d 8027
2a837de2
MS
8028 case BUILT_IN_SYNC_AND_AND_FETCH_1:
8029 case BUILT_IN_SYNC_AND_AND_FETCH_2:
8030 case BUILT_IN_SYNC_AND_AND_FETCH_4:
8031 case BUILT_IN_SYNC_AND_AND_FETCH_8:
8032 case BUILT_IN_SYNC_AND_AND_FETCH_16:
8033 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_AND_AND_FETCH_1);
8034 target = expand_builtin_sync_operation (mode, exp, AND, true, target);
8035 if (target)
8036 return target;
8037 break;
b2272b13 8038
2a837de2
MS
8039 case BUILT_IN_SYNC_XOR_AND_FETCH_1:
8040 case BUILT_IN_SYNC_XOR_AND_FETCH_2:
8041 case BUILT_IN_SYNC_XOR_AND_FETCH_4:
8042 case BUILT_IN_SYNC_XOR_AND_FETCH_8:
8043 case BUILT_IN_SYNC_XOR_AND_FETCH_16:
8044 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_XOR_AND_FETCH_1);
8045 target = expand_builtin_sync_operation (mode, exp, XOR, true, target);
8046 if (target)
8047 return target;
8048 break;
523a59ff 8049
2a837de2
MS
8050 case BUILT_IN_SYNC_NAND_AND_FETCH_1:
8051 case BUILT_IN_SYNC_NAND_AND_FETCH_2:
8052 case BUILT_IN_SYNC_NAND_AND_FETCH_4:
8053 case BUILT_IN_SYNC_NAND_AND_FETCH_8:
8054 case BUILT_IN_SYNC_NAND_AND_FETCH_16:
8055 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_NAND_AND_FETCH_1);
8056 target = expand_builtin_sync_operation (mode, exp, NOT, true, target);
8057 if (target)
8058 return target;
8059 break;
b2272b13 8060
2a837de2
MS
8061 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_1:
8062 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_2:
8063 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_4:
8064 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_8:
8065 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_16:
8066 if (mode == VOIDmode)
8067 mode = TYPE_MODE (boolean_type_node);
8068 if (!target || !register_operand (target, mode))
8069 target = gen_reg_rtx (mode);
b2272b13 8070
2a837de2
MS
8071 mode = get_builtin_sync_mode
8072 (fcode - BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_1);
8073 target = expand_builtin_compare_and_swap (mode, exp, true, target);
8074 if (target)
8075 return target;
8076 break;
10a0e2a9 8077
2a837de2
MS
8078 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_1:
8079 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_2:
8080 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_4:
8081 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_8:
8082 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_16:
8083 mode = get_builtin_sync_mode
8084 (fcode - BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_1);
8085 target = expand_builtin_compare_and_swap (mode, exp, false, target);
8086 if (target)
8087 return target;
8088 break;
b2272b13 8089
2a837de2
MS
8090 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_1:
8091 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_2:
8092 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_4:
8093 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_8:
8094 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_16:
8095 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_LOCK_TEST_AND_SET_1);
8096 target = expand_builtin_sync_lock_test_and_set (mode, exp, target);
8097 if (target)
8098 return target;
8099 break;
bdea98ca 8100
2a837de2
MS
8101 case BUILT_IN_SYNC_LOCK_RELEASE_1:
8102 case BUILT_IN_SYNC_LOCK_RELEASE_2:
8103 case BUILT_IN_SYNC_LOCK_RELEASE_4:
8104 case BUILT_IN_SYNC_LOCK_RELEASE_8:
8105 case BUILT_IN_SYNC_LOCK_RELEASE_16:
8106 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_LOCK_RELEASE_1);
8107 expand_builtin_sync_lock_release (mode, exp);
8108 return const0_rtx;
bdea98ca 8109
2a837de2
MS
8110 case BUILT_IN_SYNC_SYNCHRONIZE:
8111 expand_builtin_sync_synchronize ();
8112 return const0_rtx;
28f4ec01 8113
2a837de2
MS
8114 case BUILT_IN_ATOMIC_EXCHANGE_1:
8115 case BUILT_IN_ATOMIC_EXCHANGE_2:
8116 case BUILT_IN_ATOMIC_EXCHANGE_4:
8117 case BUILT_IN_ATOMIC_EXCHANGE_8:
8118 case BUILT_IN_ATOMIC_EXCHANGE_16:
8119 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_EXCHANGE_1);
8120 target = expand_builtin_atomic_exchange (mode, exp, target);
8121 if (target)
8122 return target;
8123 break;
0a45ec5c 8124
2a837de2
MS
8125 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1:
8126 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_2:
8127 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_4:
8128 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_8:
8129 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_16:
8130 {
8131 unsigned int nargs, z;
8132 vec<tree, va_gc> *vec;
0a45ec5c 8133
2a837de2
MS
8134 mode =
8135 get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1);
8136 target = expand_builtin_atomic_compare_exchange (mode, exp, target);
8137 if (target)
8138 return target;
0a45ec5c 8139
2a837de2
MS
8140 /* If this is turned into an external library call, the weak parameter
8141 must be dropped to match the expected parameter list. */
8142 nargs = call_expr_nargs (exp);
8143 vec_alloc (vec, nargs - 1);
8144 for (z = 0; z < 3; z++)
8145 vec->quick_push (CALL_EXPR_ARG (exp, z));
8146 /* Skip the boolean weak parameter. */
8147 for (z = 4; z < 6; z++)
8148 vec->quick_push (CALL_EXPR_ARG (exp, z));
8149 exp = build_call_vec (TREE_TYPE (exp), CALL_EXPR_FN (exp), vec);
8150 break;
8151 }
0a45ec5c 8152
2a837de2
MS
8153 case BUILT_IN_ATOMIC_LOAD_1:
8154 case BUILT_IN_ATOMIC_LOAD_2:
8155 case BUILT_IN_ATOMIC_LOAD_4:
8156 case BUILT_IN_ATOMIC_LOAD_8:
8157 case BUILT_IN_ATOMIC_LOAD_16:
8158 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_LOAD_1);
8159 target = expand_builtin_atomic_load (mode, exp, target);
075ec276 8160 if (target)
c22cacf3 8161 return target;
075ec276
RS
8162 break;
8163
2a837de2
MS
8164 case BUILT_IN_ATOMIC_STORE_1:
8165 case BUILT_IN_ATOMIC_STORE_2:
8166 case BUILT_IN_ATOMIC_STORE_4:
8167 case BUILT_IN_ATOMIC_STORE_8:
8168 case BUILT_IN_ATOMIC_STORE_16:
8169 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_STORE_1);
8170 target = expand_builtin_atomic_store (mode, exp);
046625fa 8171 if (target)
2a837de2 8172 return const0_rtx;
046625fa
RH
8173 break;
8174
2a837de2
MS
8175 case BUILT_IN_ATOMIC_ADD_FETCH_1:
8176 case BUILT_IN_ATOMIC_ADD_FETCH_2:
8177 case BUILT_IN_ATOMIC_ADD_FETCH_4:
8178 case BUILT_IN_ATOMIC_ADD_FETCH_8:
8179 case BUILT_IN_ATOMIC_ADD_FETCH_16:
8180 {
8181 enum built_in_function lib;
8182 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_ADD_FETCH_1);
8183 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_ADD_1 +
8184 (fcode - BUILT_IN_ATOMIC_ADD_FETCH_1));
8185 target = expand_builtin_atomic_fetch_op (mode, exp, target, PLUS, true,
8186 ignore, lib);
8187 if (target)
8188 return target;
eaee4464 8189 break;
2a837de2
MS
8190 }
8191 case BUILT_IN_ATOMIC_SUB_FETCH_1:
8192 case BUILT_IN_ATOMIC_SUB_FETCH_2:
8193 case BUILT_IN_ATOMIC_SUB_FETCH_4:
8194 case BUILT_IN_ATOMIC_SUB_FETCH_8:
8195 case BUILT_IN_ATOMIC_SUB_FETCH_16:
8196 {
8197 enum built_in_function lib;
8198 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_SUB_FETCH_1);
8199 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_SUB_1 +
8200 (fcode - BUILT_IN_ATOMIC_SUB_FETCH_1));
8201 target = expand_builtin_atomic_fetch_op (mode, exp, target, MINUS, true,
8202 ignore, lib);
8203 if (target)
8204 return target;
8205 break;
8206 }
8207 case BUILT_IN_ATOMIC_AND_FETCH_1:
8208 case BUILT_IN_ATOMIC_AND_FETCH_2:
8209 case BUILT_IN_ATOMIC_AND_FETCH_4:
8210 case BUILT_IN_ATOMIC_AND_FETCH_8:
8211 case BUILT_IN_ATOMIC_AND_FETCH_16:
8212 {
8213 enum built_in_function lib;
8214 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_AND_FETCH_1);
8215 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_AND_1 +
8216 (fcode - BUILT_IN_ATOMIC_AND_FETCH_1));
8217 target = expand_builtin_atomic_fetch_op (mode, exp, target, AND, true,
8218 ignore, lib);
8219 if (target)
8220 return target;
8221 break;
8222 }
8223 case BUILT_IN_ATOMIC_NAND_FETCH_1:
8224 case BUILT_IN_ATOMIC_NAND_FETCH_2:
8225 case BUILT_IN_ATOMIC_NAND_FETCH_4:
8226 case BUILT_IN_ATOMIC_NAND_FETCH_8:
8227 case BUILT_IN_ATOMIC_NAND_FETCH_16:
8228 {
8229 enum built_in_function lib;
8230 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_NAND_FETCH_1);
8231 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_NAND_1 +
8232 (fcode - BUILT_IN_ATOMIC_NAND_FETCH_1));
8233 target = expand_builtin_atomic_fetch_op (mode, exp, target, NOT, true,
8234 ignore, lib);
8235 if (target)
8236 return target;
8237 break;
8238 }
8239 case BUILT_IN_ATOMIC_XOR_FETCH_1:
8240 case BUILT_IN_ATOMIC_XOR_FETCH_2:
8241 case BUILT_IN_ATOMIC_XOR_FETCH_4:
8242 case BUILT_IN_ATOMIC_XOR_FETCH_8:
8243 case BUILT_IN_ATOMIC_XOR_FETCH_16:
8244 {
8245 enum built_in_function lib;
8246 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_XOR_FETCH_1);
8247 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_XOR_1 +
8248 (fcode - BUILT_IN_ATOMIC_XOR_FETCH_1));
8249 target = expand_builtin_atomic_fetch_op (mode, exp, target, XOR, true,
8250 ignore, lib);
8251 if (target)
8252 return target;
8253 break;
8254 }
8255 case BUILT_IN_ATOMIC_OR_FETCH_1:
8256 case BUILT_IN_ATOMIC_OR_FETCH_2:
8257 case BUILT_IN_ATOMIC_OR_FETCH_4:
8258 case BUILT_IN_ATOMIC_OR_FETCH_8:
8259 case BUILT_IN_ATOMIC_OR_FETCH_16:
8260 {
8261 enum built_in_function lib;
8262 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_OR_FETCH_1);
8263 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_OR_1 +
8264 (fcode - BUILT_IN_ATOMIC_OR_FETCH_1));
8265 target = expand_builtin_atomic_fetch_op (mode, exp, target, IOR, true,
8266 ignore, lib);
8267 if (target)
8268 return target;
8269 break;
8270 }
8271 case BUILT_IN_ATOMIC_FETCH_ADD_1:
8272 case BUILT_IN_ATOMIC_FETCH_ADD_2:
8273 case BUILT_IN_ATOMIC_FETCH_ADD_4:
8274 case BUILT_IN_ATOMIC_FETCH_ADD_8:
8275 case BUILT_IN_ATOMIC_FETCH_ADD_16:
8276 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_ADD_1);
8277 target = expand_builtin_atomic_fetch_op (mode, exp, target, PLUS, false,
8278 ignore, BUILT_IN_NONE);
eaee4464
UB
8279 if (target)
8280 return target;
8281 break;
2a837de2
MS
8282
8283 case BUILT_IN_ATOMIC_FETCH_SUB_1:
8284 case BUILT_IN_ATOMIC_FETCH_SUB_2:
8285 case BUILT_IN_ATOMIC_FETCH_SUB_4:
8286 case BUILT_IN_ATOMIC_FETCH_SUB_8:
8287 case BUILT_IN_ATOMIC_FETCH_SUB_16:
8288 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_SUB_1);
8289 target = expand_builtin_atomic_fetch_op (mode, exp, target, MINUS, false,
8290 ignore, BUILT_IN_NONE);
d8b42d06
UB
8291 if (target)
8292 return target;
8293 break;
8294
2a837de2
MS
8295 case BUILT_IN_ATOMIC_FETCH_AND_1:
8296 case BUILT_IN_ATOMIC_FETCH_AND_2:
8297 case BUILT_IN_ATOMIC_FETCH_AND_4:
8298 case BUILT_IN_ATOMIC_FETCH_AND_8:
8299 case BUILT_IN_ATOMIC_FETCH_AND_16:
8300 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_AND_1);
8301 target = expand_builtin_atomic_fetch_op (mode, exp, target, AND, false,
8302 ignore, BUILT_IN_NONE);
0bfa1541
RG
8303 if (target)
8304 return target;
8305 break;
2a837de2
MS
8306
8307 case BUILT_IN_ATOMIC_FETCH_NAND_1:
8308 case BUILT_IN_ATOMIC_FETCH_NAND_2:
8309 case BUILT_IN_ATOMIC_FETCH_NAND_4:
8310 case BUILT_IN_ATOMIC_FETCH_NAND_8:
8311 case BUILT_IN_ATOMIC_FETCH_NAND_16:
8312 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_NAND_1);
8313 target = expand_builtin_atomic_fetch_op (mode, exp, target, NOT, false,
8314 ignore, BUILT_IN_NONE);
17684d46
RG
8315 if (target)
8316 return target;
8317 break;
2a837de2
MS
8318
8319 case BUILT_IN_ATOMIC_FETCH_XOR_1:
8320 case BUILT_IN_ATOMIC_FETCH_XOR_2:
8321 case BUILT_IN_ATOMIC_FETCH_XOR_4:
8322 case BUILT_IN_ATOMIC_FETCH_XOR_8:
8323 case BUILT_IN_ATOMIC_FETCH_XOR_16:
8324 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_XOR_1);
8325 target = expand_builtin_atomic_fetch_op (mode, exp, target, XOR, false,
8326 ignore, BUILT_IN_NONE);
6c7cf1f0
UB
8327 if (target)
8328 return target;
8329 break;
2a837de2
MS
8330
8331 case BUILT_IN_ATOMIC_FETCH_OR_1:
8332 case BUILT_IN_ATOMIC_FETCH_OR_2:
8333 case BUILT_IN_ATOMIC_FETCH_OR_4:
8334 case BUILT_IN_ATOMIC_FETCH_OR_8:
8335 case BUILT_IN_ATOMIC_FETCH_OR_16:
8336 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_OR_1);
8337 target = expand_builtin_atomic_fetch_op (mode, exp, target, IOR, false,
8338 ignore, BUILT_IN_NONE);
403e54f0
RG
8339 if (target)
8340 return target;
8341 break;
8342
2a837de2
MS
8343 case BUILT_IN_ATOMIC_TEST_AND_SET:
8344 return expand_builtin_atomic_test_and_set (exp, target);
28f4ec01 8345
2a837de2
MS
8346 case BUILT_IN_ATOMIC_CLEAR:
8347 return expand_builtin_atomic_clear (exp);
8348
8349 case BUILT_IN_ATOMIC_ALWAYS_LOCK_FREE:
8350 return expand_builtin_atomic_always_lock_free (exp);
28f4ec01 8351
2a837de2
MS
8352 case BUILT_IN_ATOMIC_IS_LOCK_FREE:
8353 target = expand_builtin_atomic_is_lock_free (exp);
8354 if (target)
8355 return target;
8356 break;
28f4ec01 8357
2a837de2
MS
8358 case BUILT_IN_ATOMIC_THREAD_FENCE:
8359 expand_builtin_atomic_thread_fence (exp);
28f4ec01
BS
8360 return const0_rtx;
8361
2a837de2
MS
8362 case BUILT_IN_ATOMIC_SIGNAL_FENCE:
8363 expand_builtin_atomic_signal_fence (exp);
6ef5231b
JJ
8364 return const0_rtx;
8365
2a837de2 8366 case BUILT_IN_OBJECT_SIZE:
79a89108 8367 case BUILT_IN_DYNAMIC_OBJECT_SIZE:
2a837de2 8368 return expand_builtin_object_size (exp);
28f4ec01 8369
2a837de2
MS
8370 case BUILT_IN_MEMCPY_CHK:
8371 case BUILT_IN_MEMPCPY_CHK:
8372 case BUILT_IN_MEMMOVE_CHK:
8373 case BUILT_IN_MEMSET_CHK:
8374 target = expand_builtin_memory_chk (exp, target, mode, fcode);
28f4ec01
BS
8375 if (target)
8376 return target;
8377 break;
8378
2a837de2
MS
8379 case BUILT_IN_STRCPY_CHK:
8380 case BUILT_IN_STPCPY_CHK:
8381 case BUILT_IN_STRNCPY_CHK:
8382 case BUILT_IN_STPNCPY_CHK:
8383 case BUILT_IN_STRCAT_CHK:
8384 case BUILT_IN_STRNCAT_CHK:
8385 case BUILT_IN_SNPRINTF_CHK:
8386 case BUILT_IN_VSNPRINTF_CHK:
8387 maybe_emit_chk_warning (exp, fcode);
e3a709be
KG
8388 break;
8389
2a837de2
MS
8390 case BUILT_IN_SPRINTF_CHK:
8391 case BUILT_IN_VSPRINTF_CHK:
8392 maybe_emit_sprintf_chk_warning (exp, fcode);
8393 break;
b8698a0f 8394
2a837de2
MS
8395 case BUILT_IN_THREAD_POINTER:
8396 return expand_builtin_thread_pointer (exp, target);
8397
8398 case BUILT_IN_SET_THREAD_POINTER:
8399 expand_builtin_set_thread_pointer (exp);
8400 return const0_rtx;
8401
8402 case BUILT_IN_ACC_ON_DEVICE:
8403 /* Do library call, if we failed to expand the builtin when
8404 folding. */
8405 break;
8406
8407 case BUILT_IN_GOACC_PARLEVEL_ID:
8408 case BUILT_IN_GOACC_PARLEVEL_SIZE:
8409 return expand_builtin_goacc_parlevel_id_size (exp, target, ignore);
8410
8411 case BUILT_IN_SPECULATION_SAFE_VALUE_PTR:
8412 return expand_speculation_safe_value (VOIDmode, exp, target, ignore);
8413
8414 case BUILT_IN_SPECULATION_SAFE_VALUE_1:
8415 case BUILT_IN_SPECULATION_SAFE_VALUE_2:
8416 case BUILT_IN_SPECULATION_SAFE_VALUE_4:
8417 case BUILT_IN_SPECULATION_SAFE_VALUE_8:
8418 case BUILT_IN_SPECULATION_SAFE_VALUE_16:
8419 mode = get_builtin_sync_mode (fcode - BUILT_IN_SPECULATION_SAFE_VALUE_1);
8420 return expand_speculation_safe_value (mode, exp, target, ignore);
8421
8422 default: /* just do library call, if unknown builtin */
8423 break;
3d577eaf 8424 }
b8698a0f 8425
2a837de2
MS
8426 /* The switch statement above can drop through to cause the function
8427 to be called normally. */
8428 return expand_call (exp, target, ignore);
3d577eaf
KG
8429}
8430
2a837de2
MS
8431/* Determine whether a tree node represents a call to a built-in
8432 function. If the tree T is a call to a built-in function with
8433 the right number of arguments of the appropriate types, return
8434 the DECL_FUNCTION_CODE of the call, e.g. BUILT_IN_SQRT.
8435 Otherwise the return value is END_BUILTINS. */
903c723b 8436
2a837de2
MS
8437enum built_in_function
8438builtin_mathfn_code (const_tree t)
903c723b 8439{
2a837de2
MS
8440 const_tree fndecl, arg, parmlist;
8441 const_tree argtype, parmtype;
8442 const_call_expr_arg_iterator iter;
903c723b 8443
2a837de2
MS
8444 if (TREE_CODE (t) != CALL_EXPR)
8445 return END_BUILTINS;
903c723b 8446
2a837de2
MS
8447 fndecl = get_callee_fndecl (t);
8448 if (fndecl == NULL_TREE || !fndecl_built_in_p (fndecl, BUILT_IN_NORMAL))
8449 return END_BUILTINS;
02cf2861 8450
2a837de2
MS
8451 parmlist = TYPE_ARG_TYPES (TREE_TYPE (fndecl));
8452 init_const_call_expr_arg_iterator (t, &iter);
8453 for (; parmlist; parmlist = TREE_CHAIN (parmlist))
903c723b 8454 {
2a837de2
MS
8455 /* If a function doesn't take a variable number of arguments,
8456 the last element in the list will have type `void'. */
8457 parmtype = TREE_VALUE (parmlist);
8458 if (VOID_TYPE_P (parmtype))
8459 {
8460 if (more_const_call_expr_args_p (&iter))
8461 return END_BUILTINS;
8462 return DECL_FUNCTION_CODE (fndecl);
8463 }
44e10129 8464
2a837de2
MS
8465 if (! more_const_call_expr_args_p (&iter))
8466 return END_BUILTINS;
903c723b 8467
2a837de2
MS
8468 arg = next_const_call_expr_arg (&iter);
8469 argtype = TREE_TYPE (arg);
903c723b 8470
2a837de2
MS
8471 if (SCALAR_FLOAT_TYPE_P (parmtype))
8472 {
8473 if (! SCALAR_FLOAT_TYPE_P (argtype))
8474 return END_BUILTINS;
8475 }
8476 else if (COMPLEX_FLOAT_TYPE_P (parmtype))
8477 {
8478 if (! COMPLEX_FLOAT_TYPE_P (argtype))
8479 return END_BUILTINS;
8480 }
8481 else if (POINTER_TYPE_P (parmtype))
8482 {
8483 if (! POINTER_TYPE_P (argtype))
8484 return END_BUILTINS;
8485 }
8486 else if (INTEGRAL_TYPE_P (parmtype))
8487 {
8488 if (! INTEGRAL_TYPE_P (argtype))
8489 return END_BUILTINS;
8490 }
8491 else
8492 return END_BUILTINS;
8493 }
903c723b 8494
2a837de2
MS
8495 /* Variable-length argument list. */
8496 return DECL_FUNCTION_CODE (fndecl);
8497}
903c723b 8498
2a837de2
MS
8499/* Fold a call to __builtin_constant_p, if we know its argument ARG will
8500 evaluate to a constant. */
903c723b 8501
2a837de2
MS
8502static tree
8503fold_builtin_constant_p (tree arg)
8504{
8505 /* We return 1 for a numeric type that's known to be a constant
8506 value at compile-time or for an aggregate type that's a
8507 literal constant. */
8508 STRIP_NOPS (arg);
8509
8510 /* If we know this is a constant, emit the constant of one. */
8511 if (CONSTANT_CLASS_P (arg)
8512 || (TREE_CODE (arg) == CONSTRUCTOR
8513 && TREE_CONSTANT (arg)))
8514 return integer_one_node;
8515 if (TREE_CODE (arg) == ADDR_EXPR)
8516 {
8517 tree op = TREE_OPERAND (arg, 0);
8518 if (TREE_CODE (op) == STRING_CST
8519 || (TREE_CODE (op) == ARRAY_REF
8520 && integer_zerop (TREE_OPERAND (op, 1))
8521 && TREE_CODE (TREE_OPERAND (op, 0)) == STRING_CST))
8522 return integer_one_node;
903c723b
TC
8523 }
8524
2a837de2
MS
8525 /* If this expression has side effects, show we don't know it to be a
8526 constant. Likewise if it's a pointer or aggregate type since in
8527 those case we only want literals, since those are only optimized
8528 when generating RTL, not later.
8529 And finally, if we are compiling an initializer, not code, we
8530 need to return a definite result now; there's not going to be any
8531 more optimization done. */
8532 if (TREE_SIDE_EFFECTS (arg)
8533 || AGGREGATE_TYPE_P (TREE_TYPE (arg))
8534 || POINTER_TYPE_P (TREE_TYPE (arg))
8535 || cfun == 0
8536 || folding_initializer
8537 || force_folding_builtin_constant_p)
8538 return integer_zero_node;
8539
903c723b
TC
8540 return NULL_TREE;
8541}
8542
2a837de2
MS
8543/* Create builtin_expect or builtin_expect_with_probability
8544 with PRED and EXPECTED as its arguments and return it as a truthvalue.
8545 Fortran FE can also produce builtin_expect with PREDICTOR as third argument.
8546 builtin_expect_with_probability instead uses third argument as PROBABILITY
8547 value. */
64a9295a
PB
8548
8549static tree
2a837de2
MS
8550build_builtin_expect_predicate (location_t loc, tree pred, tree expected,
8551 tree predictor, tree probability)
64a9295a 8552{
2a837de2 8553 tree fn, arg_types, pred_type, expected_type, call_expr, ret_type;
903c723b 8554
2a837de2
MS
8555 fn = builtin_decl_explicit (probability == NULL_TREE ? BUILT_IN_EXPECT
8556 : BUILT_IN_EXPECT_WITH_PROBABILITY);
8557 arg_types = TYPE_ARG_TYPES (TREE_TYPE (fn));
8558 ret_type = TREE_TYPE (TREE_TYPE (fn));
8559 pred_type = TREE_VALUE (arg_types);
8560 expected_type = TREE_VALUE (TREE_CHAIN (arg_types));
64a9295a 8561
2a837de2
MS
8562 pred = fold_convert_loc (loc, pred_type, pred);
8563 expected = fold_convert_loc (loc, expected_type, expected);
903c723b 8564
2a837de2
MS
8565 if (probability)
8566 call_expr = build_call_expr_loc (loc, fn, 3, pred, expected, probability);
8567 else
8568 call_expr = build_call_expr_loc (loc, fn, predictor ? 3 : 2, pred, expected,
8569 predictor);
05f41289 8570
2a837de2
MS
8571 return build2 (NE_EXPR, TREE_TYPE (pred), call_expr,
8572 build_int_cst (ret_type, 0));
8573}
05f41289 8574
2a837de2
MS
8575/* Fold a call to builtin_expect with arguments ARG0, ARG1, ARG2, ARG3. Return
8576 NULL_TREE if no simplification is possible. */
05f41289 8577
2a837de2
MS
8578tree
8579fold_builtin_expect (location_t loc, tree arg0, tree arg1, tree arg2,
8580 tree arg3)
8581{
8582 tree inner, fndecl, inner_arg0;
8583 enum tree_code code;
b8698a0f 8584
2a837de2
MS
8585 /* Distribute the expected value over short-circuiting operators.
8586 See through the cast from truthvalue_type_node to long. */
8587 inner_arg0 = arg0;
8588 while (CONVERT_EXPR_P (inner_arg0)
8589 && INTEGRAL_TYPE_P (TREE_TYPE (inner_arg0))
8590 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (inner_arg0, 0))))
8591 inner_arg0 = TREE_OPERAND (inner_arg0, 0);
05f41289 8592
2a837de2
MS
8593 /* If this is a builtin_expect within a builtin_expect keep the
8594 inner one. See through a comparison against a constant. It
8595 might have been added to create a thruthvalue. */
8596 inner = inner_arg0;
05f41289 8597
2a837de2
MS
8598 if (COMPARISON_CLASS_P (inner)
8599 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST)
8600 inner = TREE_OPERAND (inner, 0);
903c723b 8601
2a837de2
MS
8602 if (TREE_CODE (inner) == CALL_EXPR
8603 && (fndecl = get_callee_fndecl (inner))
8604 && (fndecl_built_in_p (fndecl, BUILT_IN_EXPECT)
8605 || fndecl_built_in_p (fndecl, BUILT_IN_EXPECT_WITH_PROBABILITY)))
8606 return arg0;
903c723b 8607
2a837de2
MS
8608 inner = inner_arg0;
8609 code = TREE_CODE (inner);
8610 if (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR)
8611 {
8612 tree op0 = TREE_OPERAND (inner, 0);
8613 tree op1 = TREE_OPERAND (inner, 1);
8614 arg1 = save_expr (arg1);
903c723b 8615
2a837de2
MS
8616 op0 = build_builtin_expect_predicate (loc, op0, arg1, arg2, arg3);
8617 op1 = build_builtin_expect_predicate (loc, op1, arg1, arg2, arg3);
8618 inner = build2 (code, TREE_TYPE (inner), op0, op1);
8619
8620 return fold_convert_loc (loc, TREE_TYPE (arg0), inner);
8621 }
8622
8623 /* If the argument isn't invariant then there's nothing else we can do. */
8624 if (!TREE_CONSTANT (inner_arg0))
8625 return NULL_TREE;
8626
8627 /* If we expect that a comparison against the argument will fold to
8628 a constant return the constant. In practice, this means a true
8629 constant or the address of a non-weak symbol. */
8630 inner = inner_arg0;
8631 STRIP_NOPS (inner);
8632 if (TREE_CODE (inner) == ADDR_EXPR)
8633 {
8634 do
8635 {
8636 inner = TREE_OPERAND (inner, 0);
8637 }
8638 while (TREE_CODE (inner) == COMPONENT_REF
8639 || TREE_CODE (inner) == ARRAY_REF);
8640 if (VAR_OR_FUNCTION_DECL_P (inner) && DECL_WEAK (inner))
8641 return NULL_TREE;
64a9295a 8642 }
2a837de2
MS
8643
8644 /* Otherwise, ARG0 already has the proper type for the return value. */
8645 return arg0;
64a9295a
PB
8646}
8647
2a837de2 8648/* Fold a call to __builtin_classify_type with argument ARG. */
903c723b
TC
8649
8650static tree
2a837de2 8651fold_builtin_classify_type (tree arg)
903c723b 8652{
2a837de2
MS
8653 if (arg == 0)
8654 return build_int_cst (integer_type_node, no_type_class);
903c723b 8655
2a837de2
MS
8656 return build_int_cst (integer_type_node, type_to_class (TREE_TYPE (arg)));
8657}
8658
8659/* Fold a call EXPR (which may be null) to __builtin_strlen with argument
8660 ARG. */
8661
8662static tree
8663fold_builtin_strlen (location_t loc, tree expr, tree type, tree arg)
8664{
8665 if (!validate_arg (arg, POINTER_TYPE))
903c723b 8666 return NULL_TREE;
2a837de2
MS
8667 else
8668 {
8669 c_strlen_data lendata = { };
8670 tree len = c_strlen (arg, 0, &lendata);
903c723b 8671
2a837de2
MS
8672 if (len)
8673 return fold_convert_loc (loc, type, len);
8674
81d6cdd3
MS
8675 /* TODO: Move this to gimple-ssa-warn-access once the pass runs
8676 also early enough to detect invalid reads in multimensional
8677 arrays and struct members. */
2a837de2 8678 if (!lendata.decl)
81d6cdd3 8679 c_strlen (arg, 1, &lendata);
2a837de2
MS
8680
8681 if (lendata.decl)
8682 {
8683 if (EXPR_HAS_LOCATION (arg))
8684 loc = EXPR_LOCATION (arg);
8685 else if (loc == UNKNOWN_LOCATION)
8686 loc = input_location;
8687 warn_string_no_nul (loc, expr, "strlen", arg, lendata.decl);
8688 }
8689
8690 return NULL_TREE;
8691 }
8692}
8693
8694/* Fold a call to __builtin_inf or __builtin_huge_val. */
8695
8696static tree
8697fold_builtin_inf (location_t loc, tree type, int warn)
8698{
2a837de2
MS
8699 /* __builtin_inff is intended to be usable to define INFINITY on all
8700 targets. If an infinity is not available, INFINITY expands "to a
8701 positive constant of type float that overflows at translation
8702 time", footnote "In this case, using INFINITY will violate the
8703 constraint in 6.4.4 and thus require a diagnostic." (C99 7.12#4).
8704 Thus we pedwarn to ensure this constraint violation is
8705 diagnosed. */
8706 if (!MODE_HAS_INFINITIES (TYPE_MODE (type)) && warn)
8707 pedwarn (loc, 0, "target format does not support infinity");
903c723b 8708
bb9d4344 8709 return build_real (type, dconstinf);
2a837de2 8710}
903c723b 8711
2a837de2
MS
8712/* Fold function call to builtin sincos, sincosf, or sincosl. Return
8713 NULL_TREE if no simplification can be made. */
903c723b 8714
2a837de2
MS
8715static tree
8716fold_builtin_sincos (location_t loc,
8717 tree arg0, tree arg1, tree arg2)
8718{
8719 tree type;
8720 tree fndecl, call = NULL_TREE;
903c723b 8721
2a837de2
MS
8722 if (!validate_arg (arg0, REAL_TYPE)
8723 || !validate_arg (arg1, POINTER_TYPE)
8724 || !validate_arg (arg2, POINTER_TYPE))
8725 return NULL_TREE;
8726
8727 type = TREE_TYPE (arg0);
8728
8729 /* Calculate the result when the argument is a constant. */
8730 built_in_function fn = mathfn_built_in_2 (type, CFN_BUILT_IN_CEXPI);
8731 if (fn == END_BUILTINS)
8732 return NULL_TREE;
8733
8734 /* Canonicalize sincos to cexpi. */
8735 if (TREE_CODE (arg0) == REAL_CST)
903c723b 8736 {
2a837de2
MS
8737 tree complex_type = build_complex_type (type);
8738 call = fold_const_call (as_combined_fn (fn), complex_type, arg0);
903c723b 8739 }
2a837de2 8740 if (!call)
903c723b 8741 {
2a837de2
MS
8742 if (!targetm.libc_has_function (function_c99_math_complex, type)
8743 || !builtin_decl_implicit_p (fn))
8744 return NULL_TREE;
8745 fndecl = builtin_decl_explicit (fn);
8746 call = build_call_expr_loc (loc, fndecl, 1, arg0);
8747 call = builtin_save_expr (call);
903c723b
TC
8748 }
8749
2a837de2
MS
8750 tree ptype = build_pointer_type (type);
8751 arg1 = fold_convert (ptype, arg1);
8752 arg2 = fold_convert (ptype, arg2);
8753 return build2 (COMPOUND_EXPR, void_type_node,
8754 build2 (MODIFY_EXPR, void_type_node,
8755 build_fold_indirect_ref_loc (loc, arg1),
8756 fold_build1_loc (loc, IMAGPART_EXPR, type, call)),
8757 build2 (MODIFY_EXPR, void_type_node,
8758 build_fold_indirect_ref_loc (loc, arg2),
8759 fold_build1_loc (loc, REALPART_EXPR, type, call)));
903c723b
TC
8760}
8761
2a837de2
MS
8762/* Fold function call to builtin memcmp with arguments ARG1 and ARG2.
8763 Return NULL_TREE if no simplification can be made. */
08039bd8
RS
8764
8765static tree
2a837de2 8766fold_builtin_memcmp (location_t loc, tree arg1, tree arg2, tree len)
08039bd8 8767{
2a837de2
MS
8768 if (!validate_arg (arg1, POINTER_TYPE)
8769 || !validate_arg (arg2, POINTER_TYPE)
8770 || !validate_arg (len, INTEGER_TYPE))
8771 return NULL_TREE;
c22cacf3 8772
2a837de2
MS
8773 /* If the LEN parameter is zero, return zero. */
8774 if (integer_zerop (len))
8775 return omit_two_operands_loc (loc, integer_type_node, integer_zero_node,
8776 arg1, arg2);
c22cacf3 8777
2a837de2
MS
8778 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
8779 if (operand_equal_p (arg1, arg2, 0))
8780 return omit_one_operand_loc (loc, integer_type_node, integer_zero_node, len);
14f661f1 8781
2a837de2
MS
8782 /* If len parameter is one, return an expression corresponding to
8783 (*(const unsigned char*)arg1 - (const unsigned char*)arg2). */
8784 if (tree_fits_uhwi_p (len) && tree_to_uhwi (len) == 1)
14f661f1 8785 {
2a837de2
MS
8786 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
8787 tree cst_uchar_ptr_node
8788 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
8789
8790 tree ind1
8791 = fold_convert_loc (loc, integer_type_node,
8792 build1 (INDIRECT_REF, cst_uchar_node,
8793 fold_convert_loc (loc,
8794 cst_uchar_ptr_node,
8795 arg1)));
8796 tree ind2
8797 = fold_convert_loc (loc, integer_type_node,
8798 build1 (INDIRECT_REF, cst_uchar_node,
8799 fold_convert_loc (loc,
8800 cst_uchar_ptr_node,
8801 arg2)));
8802 return fold_build2_loc (loc, MINUS_EXPR, integer_type_node, ind1, ind2);
14f661f1 8803 }
08039bd8 8804
2a837de2 8805 return NULL_TREE;
08039bd8
RS
8806}
8807
2a837de2 8808/* Fold a call to builtin isascii with argument ARG. */
1304953e
JJ
8809
8810static tree
2a837de2 8811fold_builtin_isascii (location_t loc, tree arg)
1304953e 8812{
2a837de2
MS
8813 if (!validate_arg (arg, INTEGER_TYPE))
8814 return NULL_TREE;
8815 else
1304953e 8816 {
2a837de2
MS
8817 /* Transform isascii(c) -> ((c & ~0x7f) == 0). */
8818 arg = fold_build2 (BIT_AND_EXPR, integer_type_node, arg,
8819 build_int_cst (integer_type_node,
8820 ~ (unsigned HOST_WIDE_INT) 0x7f));
8821 return fold_build2_loc (loc, EQ_EXPR, integer_type_node,
8822 arg, integer_zero_node);
1304953e 8823 }
2a837de2 8824}
44a845ca 8825
2a837de2 8826/* Fold a call to builtin toascii with argument ARG. */
44a845ca 8827
2a837de2
MS
8828static tree
8829fold_builtin_toascii (location_t loc, tree arg)
8830{
8831 if (!validate_arg (arg, INTEGER_TYPE))
8832 return NULL_TREE;
44a845ca 8833
2a837de2
MS
8834 /* Transform toascii(c) -> (c & 0x7f). */
8835 return fold_build2_loc (loc, BIT_AND_EXPR, integer_type_node, arg,
8836 build_int_cst (integer_type_node, 0x7f));
8837}
8838
8839/* Fold a call to builtin isdigit with argument ARG. */
8840
8841static tree
8842fold_builtin_isdigit (location_t loc, tree arg)
8843{
8844 if (!validate_arg (arg, INTEGER_TYPE))
8845 return NULL_TREE;
43574e4f
JJ
8846 else
8847 {
2a837de2
MS
8848 /* Transform isdigit(c) -> (unsigned)(c) - '0' <= 9. */
8849 /* According to the C standard, isdigit is unaffected by locale.
8850 However, it definitely is affected by the target character set. */
8851 unsigned HOST_WIDE_INT target_digit0
8852 = lang_hooks.to_target_charset ('0');
44a845ca 8853
2a837de2
MS
8854 if (target_digit0 == 0)
8855 return NULL_TREE;
44a845ca 8856
2a837de2
MS
8857 arg = fold_convert_loc (loc, unsigned_type_node, arg);
8858 arg = fold_build2 (MINUS_EXPR, unsigned_type_node, arg,
8859 build_int_cst (unsigned_type_node, target_digit0));
8860 return fold_build2_loc (loc, LE_EXPR, integer_type_node, arg,
8861 build_int_cst (unsigned_type_node, 9));
8862 }
1304953e
JJ
8863}
8864
2a837de2 8865/* Fold a call to fabs, fabsf or fabsl with argument ARG. */
b25aad5f 8866
2a837de2
MS
8867static tree
8868fold_builtin_fabs (location_t loc, tree arg, tree type)
b25aad5f 8869{
2a837de2
MS
8870 if (!validate_arg (arg, REAL_TYPE))
8871 return NULL_TREE;
b25aad5f 8872
2a837de2
MS
8873 arg = fold_convert_loc (loc, type, arg);
8874 return fold_build1_loc (loc, ABS_EXPR, type, arg);
b25aad5f
MS
8875}
8876
2a837de2 8877/* Fold a call to abs, labs, llabs or imaxabs with argument ARG. */
b25aad5f 8878
2a837de2
MS
8879static tree
8880fold_builtin_abs (location_t loc, tree arg, tree type)
b25aad5f 8881{
2a837de2
MS
8882 if (!validate_arg (arg, INTEGER_TYPE))
8883 return NULL_TREE;
b25aad5f 8884
2a837de2
MS
8885 arg = fold_convert_loc (loc, type, arg);
8886 return fold_build1_loc (loc, ABS_EXPR, type, arg);
b25aad5f
MS
8887}
8888
2a837de2 8889/* Fold a call to builtin carg(a+bi) -> atan2(b,a). */
b25aad5f 8890
2a837de2
MS
8891static tree
8892fold_builtin_carg (location_t loc, tree arg, tree type)
b25aad5f 8893{
2a837de2
MS
8894 if (validate_arg (arg, COMPLEX_TYPE)
8895 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) == REAL_TYPE)
8896 {
8897 tree atan2_fn = mathfn_built_in (type, BUILT_IN_ATAN2);
8898
8899 if (atan2_fn)
8900 {
8901 tree new_arg = builtin_save_expr (arg);
8902 tree r_arg = fold_build1_loc (loc, REALPART_EXPR, type, new_arg);
8903 tree i_arg = fold_build1_loc (loc, IMAGPART_EXPR, type, new_arg);
8904 return build_call_expr_loc (loc, atan2_fn, 2, i_arg, r_arg);
8905 }
8906 }
8907
8908 return NULL_TREE;
b25aad5f
MS
8909}
8910
2a837de2 8911/* Fold a call to builtin frexp, we can assume the base is 2. */
b0b3afb2 8912
6de9cd9a 8913static tree
2a837de2 8914fold_builtin_frexp (location_t loc, tree arg0, tree arg1, tree rettype)
b0b3afb2 8915{
2a837de2
MS
8916 if (! validate_arg (arg0, REAL_TYPE) || ! validate_arg (arg1, POINTER_TYPE))
8917 return NULL_TREE;
b25aad5f 8918
2a837de2 8919 STRIP_NOPS (arg0);
b25aad5f 8920
2a837de2
MS
8921 if (!(TREE_CODE (arg0) == REAL_CST && ! TREE_OVERFLOW (arg0)))
8922 return NULL_TREE;
b25aad5f 8923
2a837de2 8924 arg1 = build_fold_indirect_ref_loc (loc, arg1);
d3147f64 8925
2a837de2
MS
8926 /* Proceed if a valid pointer type was passed in. */
8927 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg1)) == integer_type_node)
8928 {
8929 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg0);
40f6e591 8930 tree frac, exp, res;
d3147f64 8931
2a837de2
MS
8932 switch (value->cl)
8933 {
8934 case rvc_zero:
8935 /* For +-0, return (*exp = 0, +-0). */
8936 exp = integer_zero_node;
8937 frac = arg0;
8938 break;
8939 case rvc_nan:
8940 case rvc_inf:
8941 /* For +-NaN or +-Inf, *exp is unspecified, return arg0. */
8942 return omit_one_operand_loc (loc, rettype, arg0, arg1);
8943 case rvc_normal:
8944 {
8945 /* Since the frexp function always expects base 2, and in
8946 GCC normalized significands are already in the range
8947 [0.5, 1.0), we have exactly what frexp wants. */
8948 REAL_VALUE_TYPE frac_rvt = *value;
8949 SET_REAL_EXP (&frac_rvt, 0);
8950 frac = build_real (rettype, frac_rvt);
8951 exp = build_int_cst (integer_type_node, REAL_EXP (value));
8952 }
8953 break;
8954 default:
8955 gcc_unreachable ();
8956 }
d3147f64 8957
2a837de2
MS
8958 /* Create the COMPOUND_EXPR (*arg1 = trunc, frac). */
8959 arg1 = fold_build2_loc (loc, MODIFY_EXPR, rettype, arg1, exp);
8960 TREE_SIDE_EFFECTS (arg1) = 1;
40f6e591
RS
8961 res = fold_build2_loc (loc, COMPOUND_EXPR, rettype, arg1, frac);
8962 suppress_warning (res, OPT_Wunused_value);
8963 return res;
5039610b 8964 }
2a837de2 8965
5039610b
SL
8966 return NULL_TREE;
8967}
d3147f64 8968
2a837de2 8969/* Fold a call to builtin modf. */
d3147f64 8970
5039610b 8971static tree
2a837de2 8972fold_builtin_modf (location_t loc, tree arg0, tree arg1, tree rettype)
5039610b 8973{
2a837de2
MS
8974 if (! validate_arg (arg0, REAL_TYPE) || ! validate_arg (arg1, POINTER_TYPE))
8975 return NULL_TREE;
5c1a2e63 8976
2a837de2
MS
8977 STRIP_NOPS (arg0);
8978
8979 if (!(TREE_CODE (arg0) == REAL_CST && ! TREE_OVERFLOW (arg0)))
5c1a2e63
RS
8980 return NULL_TREE;
8981
2a837de2 8982 arg1 = build_fold_indirect_ref_loc (loc, arg1);
5c1a2e63 8983
2a837de2
MS
8984 /* Proceed if a valid pointer type was passed in. */
8985 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg1)) == TYPE_MAIN_VARIANT (rettype))
5039610b 8986 {
2a837de2
MS
8987 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg0);
8988 REAL_VALUE_TYPE trunc, frac;
40f6e591 8989 tree res;
d3147f64 8990
2a837de2
MS
8991 switch (value->cl)
8992 {
8993 case rvc_nan:
8994 case rvc_zero:
8995 /* For +-NaN or +-0, return (*arg1 = arg0, arg0). */
8996 trunc = frac = *value;
8997 break;
8998 case rvc_inf:
8999 /* For +-Inf, return (*arg1 = arg0, +-0). */
9000 frac = dconst0;
9001 frac.sign = value->sign;
9002 trunc = *value;
9003 break;
9004 case rvc_normal:
9005 /* Return (*arg1 = trunc(arg0), arg0-trunc(arg0)). */
9006 real_trunc (&trunc, VOIDmode, value);
9007 real_arithmetic (&frac, MINUS_EXPR, value, &trunc);
9008 /* If the original number was negative and already
9009 integral, then the fractional part is -0.0. */
9010 if (value->sign && frac.cl == rvc_zero)
9011 frac.sign = value->sign;
9012 break;
d3147f64 9013 }
b0b3afb2 9014
2a837de2
MS
9015 /* Create the COMPOUND_EXPR (*arg1 = trunc, frac). */
9016 arg1 = fold_build2_loc (loc, MODIFY_EXPR, rettype, arg1,
9017 build_real (rettype, trunc));
9018 TREE_SIDE_EFFECTS (arg1) = 1;
40f6e591
RS
9019 res = fold_build2_loc (loc, COMPOUND_EXPR, rettype, arg1,
9020 build_real (rettype, frac));
9021 suppress_warning (res, OPT_Wunused_value);
9022 return res;
2a837de2 9023 }
b0b3afb2 9024
2a837de2
MS
9025 return NULL_TREE;
9026}
9655d83b 9027
2a837de2
MS
9028/* Given a location LOC, an interclass builtin function decl FNDECL
9029 and its single argument ARG, return an folded expression computing
9030 the same, or NULL_TREE if we either couldn't or didn't want to fold
9031 (the latter happen if there's an RTL instruction available). */
07bae5ad 9032
2a837de2
MS
9033static tree
9034fold_builtin_interclass_mathfn (location_t loc, tree fndecl, tree arg)
9035{
9036 machine_mode mode;
aa6c7c3a 9037
2a837de2
MS
9038 if (!validate_arg (arg, REAL_TYPE))
9039 return NULL_TREE;
aa6c7c3a 9040
2a837de2
MS
9041 if (interclass_mathfn_icode (arg, fndecl) != CODE_FOR_nothing)
9042 return NULL_TREE;
aa6c7c3a 9043
2a837de2 9044 mode = TYPE_MODE (TREE_TYPE (arg));
43272bf5 9045
2a837de2 9046 bool is_ibm_extended = MODE_COMPOSITE_P (mode);
b8698a0f 9047
2a837de2
MS
9048 /* If there is no optab, try generic code. */
9049 switch (DECL_FUNCTION_CODE (fndecl))
9050 {
9051 tree result;
b8698a0f 9052
2a837de2
MS
9053 CASE_FLT_FN (BUILT_IN_ISINF):
9054 {
9055 /* isinf(x) -> isgreater(fabs(x),DBL_MAX). */
9056 tree const isgr_fn = builtin_decl_explicit (BUILT_IN_ISGREATER);
9057 tree type = TREE_TYPE (arg);
9058 REAL_VALUE_TYPE r;
9059 char buf[128];
b8698a0f 9060
2a837de2
MS
9061 if (is_ibm_extended)
9062 {
9063 /* NaN and Inf are encoded in the high-order double value
9064 only. The low-order value is not significant. */
9065 type = double_type_node;
9066 mode = DFmode;
9067 arg = fold_build1_loc (loc, NOP_EXPR, type, arg);
9068 }
9069 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf), false);
9070 real_from_string (&r, buf);
9071 result = build_call_expr (isgr_fn, 2,
9072 fold_build1_loc (loc, ABS_EXPR, type, arg),
9073 build_real (type, r));
9074 return result;
9075 }
903c723b 9076 CASE_FLT_FN (BUILT_IN_FINITE):
903c723b
TC
9077 case BUILT_IN_ISFINITE:
9078 {
2a837de2
MS
9079 /* isfinite(x) -> islessequal(fabs(x),DBL_MAX). */
9080 tree const isle_fn = builtin_decl_explicit (BUILT_IN_ISLESSEQUAL);
9081 tree type = TREE_TYPE (arg);
9082 REAL_VALUE_TYPE r;
9083 char buf[128];
903c723b 9084
2a837de2
MS
9085 if (is_ibm_extended)
9086 {
9087 /* NaN and Inf are encoded in the high-order double value
9088 only. The low-order value is not significant. */
9089 type = double_type_node;
9090 mode = DFmode;
9091 arg = fold_build1_loc (loc, NOP_EXPR, type, arg);
9092 }
9093 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf), false);
9094 real_from_string (&r, buf);
9095 result = build_call_expr (isle_fn, 2,
9096 fold_build1_loc (loc, ABS_EXPR, type, arg),
9097 build_real (type, r));
9098 /*result = fold_build2_loc (loc, UNGT_EXPR,
9099 TREE_TYPE (TREE_TYPE (fndecl)),
9100 fold_build1_loc (loc, ABS_EXPR, type, arg),
9101 build_real (type, r));
9102 result = fold_build1_loc (loc, TRUTH_NOT_EXPR,
9103 TREE_TYPE (TREE_TYPE (fndecl)),
9104 result);*/
9105 return result;
903c723b 9106 }
903c723b 9107 case BUILT_IN_ISNORMAL:
2a837de2
MS
9108 {
9109 /* isnormal(x) -> isgreaterequal(fabs(x),DBL_MIN) &
9110 islessequal(fabs(x),DBL_MAX). */
9111 tree const isle_fn = builtin_decl_explicit (BUILT_IN_ISLESSEQUAL);
9112 tree type = TREE_TYPE (arg);
9113 tree orig_arg, max_exp, min_exp;
9114 machine_mode orig_mode = mode;
9115 REAL_VALUE_TYPE rmax, rmin;
9116 char buf[128];
903c723b 9117
2a837de2
MS
9118 orig_arg = arg = builtin_save_expr (arg);
9119 if (is_ibm_extended)
9120 {
9121 /* Use double to test the normal range of IBM extended
9122 precision. Emin for IBM extended precision is
9123 different to emin for IEEE double, being 53 higher
9124 since the low double exponent is at least 53 lower
9125 than the high double exponent. */
9126 type = double_type_node;
9127 mode = DFmode;
9128 arg = fold_build1_loc (loc, NOP_EXPR, type, arg);
9129 }
9130 arg = fold_build1_loc (loc, ABS_EXPR, type, arg);
b8698a0f 9131
2a837de2
MS
9132 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf), false);
9133 real_from_string (&rmax, buf);
9134 sprintf (buf, "0x1p%d", REAL_MODE_FORMAT (orig_mode)->emin - 1);
9135 real_from_string (&rmin, buf);
9136 max_exp = build_real (type, rmax);
9137 min_exp = build_real (type, rmin);
07bae5ad 9138
2a837de2
MS
9139 max_exp = build_call_expr (isle_fn, 2, arg, max_exp);
9140 if (is_ibm_extended)
9141 {
9142 /* Testing the high end of the range is done just using
9143 the high double, using the same test as isfinite().
9144 For the subnormal end of the range we first test the
9145 high double, then if its magnitude is equal to the
9146 limit of 0x1p-969, we test whether the low double is
9147 non-zero and opposite sign to the high double. */
9148 tree const islt_fn = builtin_decl_explicit (BUILT_IN_ISLESS);
9149 tree const isgt_fn = builtin_decl_explicit (BUILT_IN_ISGREATER);
9150 tree gt_min = build_call_expr (isgt_fn, 2, arg, min_exp);
9151 tree eq_min = fold_build2 (EQ_EXPR, integer_type_node,
9152 arg, min_exp);
9153 tree as_complex = build1 (VIEW_CONVERT_EXPR,
9154 complex_double_type_node, orig_arg);
9155 tree hi_dbl = build1 (REALPART_EXPR, type, as_complex);
9156 tree lo_dbl = build1 (IMAGPART_EXPR, type, as_complex);
9157 tree zero = build_real (type, dconst0);
9158 tree hilt = build_call_expr (islt_fn, 2, hi_dbl, zero);
9159 tree lolt = build_call_expr (islt_fn, 2, lo_dbl, zero);
9160 tree logt = build_call_expr (isgt_fn, 2, lo_dbl, zero);
9161 tree ok_lo = fold_build1 (TRUTH_NOT_EXPR, integer_type_node,
9162 fold_build3 (COND_EXPR,
9163 integer_type_node,
9164 hilt, logt, lolt));
9165 eq_min = fold_build2 (TRUTH_ANDIF_EXPR, integer_type_node,
9166 eq_min, ok_lo);
9167 min_exp = fold_build2 (TRUTH_ORIF_EXPR, integer_type_node,
9168 gt_min, eq_min);
9169 }
9170 else
9171 {
9172 tree const isge_fn
9173 = builtin_decl_explicit (BUILT_IN_ISGREATEREQUAL);
9174 min_exp = build_call_expr (isge_fn, 2, arg, min_exp);
9175 }
9176 result = fold_build2 (BIT_AND_EXPR, integer_type_node,
9177 max_exp, min_exp);
9178 return result;
9179 }
5c1a2e63 9180 default:
4835c978 9181 break;
5c1a2e63 9182 }
4977bab6 9183
5c1a2e63 9184 return NULL_TREE;
5c1a2e63 9185}
b53fed56 9186
2a837de2
MS
9187/* Fold a call to __builtin_isnan(), __builtin_isinf, __builtin_finite.
9188 ARG is the argument for the call. */
5039610b
SL
9189
9190static tree
2a837de2 9191fold_builtin_classify (location_t loc, tree fndecl, tree arg, int builtin_index)
5039610b
SL
9192{
9193 tree type = TREE_TYPE (TREE_TYPE (fndecl));
5039610b 9194
2a837de2 9195 if (!validate_arg (arg, REAL_TYPE))
5c1a2e63 9196 return NULL_TREE;
ea91f957 9197
2a837de2 9198 switch (builtin_index)
5c1a2e63 9199 {
2a837de2
MS
9200 case BUILT_IN_ISINF:
9201 if (tree_expr_infinite_p (arg))
9202 return omit_one_operand_loc (loc, type, integer_one_node, arg);
9203 if (!tree_expr_maybe_infinite_p (arg))
9204 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
9205 return NULL_TREE;
5039610b 9206
2a837de2
MS
9207 case BUILT_IN_ISINF_SIGN:
9208 {
9209 /* isinf_sign(x) -> isinf(x) ? (signbit(x) ? -1 : 1) : 0 */
9210 /* In a boolean context, GCC will fold the inner COND_EXPR to
9211 1. So e.g. "if (isinf_sign(x))" would be folded to just
9212 "if (isinf(x) ? 1 : 0)" which becomes "if (isinf(x))". */
9213 tree signbit_fn = builtin_decl_explicit (BUILT_IN_SIGNBIT);
9214 tree isinf_fn = builtin_decl_explicit (BUILT_IN_ISINF);
9215 tree tmp = NULL_TREE;
7a2a25ab 9216
2a837de2 9217 arg = builtin_save_expr (arg);
3d577eaf 9218
2a837de2
MS
9219 if (signbit_fn && isinf_fn)
9220 {
9221 tree signbit_call = build_call_expr_loc (loc, signbit_fn, 1, arg);
9222 tree isinf_call = build_call_expr_loc (loc, isinf_fn, 1, arg);
9223
9224 signbit_call = fold_build2_loc (loc, NE_EXPR, integer_type_node,
9225 signbit_call, integer_zero_node);
9226 isinf_call = fold_build2_loc (loc, NE_EXPR, integer_type_node,
9227 isinf_call, integer_zero_node);
9228
9229 tmp = fold_build3_loc (loc, COND_EXPR, integer_type_node, signbit_call,
9230 integer_minus_one_node, integer_one_node);
9231 tmp = fold_build3_loc (loc, COND_EXPR, integer_type_node,
9232 isinf_call, tmp,
9233 integer_zero_node);
9234 }
9235
9236 return tmp;
9237 }
9238
9239 case BUILT_IN_ISFINITE:
9240 if (tree_expr_finite_p (arg))
9241 return omit_one_operand_loc (loc, type, integer_one_node, arg);
9242 if (tree_expr_nan_p (arg) || tree_expr_infinite_p (arg))
9243 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
9244 return NULL_TREE;
9245
9246 case BUILT_IN_ISNAN:
9247 if (tree_expr_nan_p (arg))
9248 return omit_one_operand_loc (loc, type, integer_one_node, arg);
9249 if (!tree_expr_maybe_nan_p (arg))
9250 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
9251
9252 {
9253 bool is_ibm_extended = MODE_COMPOSITE_P (TYPE_MODE (TREE_TYPE (arg)));
9254 if (is_ibm_extended)
9255 {
9256 /* NaN and Inf are encoded in the high-order double value
9257 only. The low-order value is not significant. */
9258 arg = fold_build1_loc (loc, NOP_EXPR, double_type_node, arg);
9259 }
9260 }
9261 arg = builtin_save_expr (arg);
9262 return fold_build2_loc (loc, UNORDERED_EXPR, type, arg, arg);
9263
0982edd3
JJ
9264 case BUILT_IN_ISSIGNALING:
9265 /* Folding to true for REAL_CST is done in fold_const_call_ss.
9266 Don't use tree_expr_signaling_nan_p (arg) -> integer_one_node
9267 and !tree_expr_maybe_signaling_nan_p (arg) -> integer_zero_node
9268 here, so there is some possibility of __builtin_issignaling working
9269 without -fsignaling-nans. Especially when -fno-signaling-nans is
9270 the default. */
9271 if (!tree_expr_maybe_nan_p (arg))
9272 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
9273 return NULL_TREE;
9274
2a837de2
MS
9275 default:
9276 gcc_unreachable ();
9277 }
9278}
5039610b 9279
2a837de2
MS
9280/* Fold a call to __builtin_fpclassify(int, int, int, int, int, ...).
9281 This builtin will generate code to return the appropriate floating
9282 point classification depending on the value of the floating point
9283 number passed in. The possible return values must be supplied as
9284 int arguments to the call in the following order: FP_NAN, FP_INFINITE,
9285 FP_NORMAL, FP_SUBNORMAL and FP_ZERO. The ellipses is for exactly
9286 one floating point argument which is "type generic". */
5039610b 9287
2a837de2
MS
9288static tree
9289fold_builtin_fpclassify (location_t loc, tree *args, int nargs)
9290{
9291 tree fp_nan, fp_infinite, fp_normal, fp_subnormal, fp_zero,
9292 arg, type, res, tmp;
9293 machine_mode mode;
9294 REAL_VALUE_TYPE r;
9295 char buf[128];
5039610b 9296
2a837de2
MS
9297 /* Verify the required arguments in the original call. */
9298 if (nargs != 6
9299 || !validate_arg (args[0], INTEGER_TYPE)
9300 || !validate_arg (args[1], INTEGER_TYPE)
9301 || !validate_arg (args[2], INTEGER_TYPE)
9302 || !validate_arg (args[3], INTEGER_TYPE)
9303 || !validate_arg (args[4], INTEGER_TYPE)
9304 || !validate_arg (args[5], REAL_TYPE))
9305 return NULL_TREE;
5039610b 9306
2a837de2
MS
9307 fp_nan = args[0];
9308 fp_infinite = args[1];
9309 fp_normal = args[2];
9310 fp_subnormal = args[3];
9311 fp_zero = args[4];
9312 arg = args[5];
9313 type = TREE_TYPE (arg);
9314 mode = TYPE_MODE (type);
9315 arg = builtin_save_expr (fold_build1_loc (loc, ABS_EXPR, type, arg));
08039bd8 9316
2a837de2
MS
9317 /* fpclassify(x) ->
9318 isnan(x) ? FP_NAN :
9319 (fabs(x) == Inf ? FP_INFINITE :
9320 (fabs(x) >= DBL_MIN ? FP_NORMAL :
9321 (x == 0 ? FP_ZERO : FP_SUBNORMAL))). */
a32e70c3 9322
2a837de2
MS
9323 tmp = fold_build2_loc (loc, EQ_EXPR, integer_type_node, arg,
9324 build_real (type, dconst0));
9325 res = fold_build3_loc (loc, COND_EXPR, integer_type_node,
9326 tmp, fp_zero, fp_subnormal);
10a0d495 9327
2a837de2
MS
9328 sprintf (buf, "0x1p%d", REAL_MODE_FORMAT (mode)->emin - 1);
9329 real_from_string (&r, buf);
9330 tmp = fold_build2_loc (loc, GE_EXPR, integer_type_node,
9331 arg, build_real (type, r));
9332 res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp, fp_normal, res);
86951993 9333
2a837de2
MS
9334 if (tree_expr_maybe_infinite_p (arg))
9335 {
2a837de2 9336 tmp = fold_build2_loc (loc, EQ_EXPR, integer_type_node, arg,
bb9d4344 9337 build_real (type, dconstinf));
2a837de2
MS
9338 res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp,
9339 fp_infinite, res);
9340 }
86951993 9341
2a837de2
MS
9342 if (tree_expr_maybe_nan_p (arg))
9343 {
9344 tmp = fold_build2_loc (loc, ORDERED_EXPR, integer_type_node, arg, arg);
9345 res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp, res, fp_nan);
5039610b 9346 }
2a837de2
MS
9347
9348 return res;
5039610b
SL
9349}
9350
2a837de2
MS
9351/* Fold a call to an unordered comparison function such as
9352 __builtin_isgreater(). FNDECL is the FUNCTION_DECL for the function
9353 being called and ARG0 and ARG1 are the arguments for the call.
9354 UNORDERED_CODE and ORDERED_CODE are comparison codes that give
9355 the opposite of the desired result. UNORDERED_CODE is used
9356 for modes that can hold NaNs and ORDERED_CODE is used for
9357 the rest. */
5039610b
SL
9358
9359static tree
2a837de2
MS
9360fold_builtin_unordered_cmp (location_t loc, tree fndecl, tree arg0, tree arg1,
9361 enum tree_code unordered_code,
9362 enum tree_code ordered_code)
5039610b
SL
9363{
9364 tree type = TREE_TYPE (TREE_TYPE (fndecl));
2a837de2
MS
9365 enum tree_code code;
9366 tree type0, type1;
9367 enum tree_code code0, code1;
9368 tree cmp_type = NULL_TREE;
5c1a2e63 9369
2a837de2
MS
9370 type0 = TREE_TYPE (arg0);
9371 type1 = TREE_TYPE (arg1);
5c1a2e63 9372
2a837de2
MS
9373 code0 = TREE_CODE (type0);
9374 code1 = TREE_CODE (type1);
5c1a2e63 9375
2a837de2
MS
9376 if (code0 == REAL_TYPE && code1 == REAL_TYPE)
9377 /* Choose the wider of two real types. */
9378 cmp_type = TYPE_PRECISION (type0) >= TYPE_PRECISION (type1)
9379 ? type0 : type1;
9380 else if (code0 == REAL_TYPE && code1 == INTEGER_TYPE)
9381 cmp_type = type0;
9382 else if (code0 == INTEGER_TYPE && code1 == REAL_TYPE)
9383 cmp_type = type1;
5039610b 9384
2a837de2
MS
9385 arg0 = fold_convert_loc (loc, cmp_type, arg0);
9386 arg1 = fold_convert_loc (loc, cmp_type, arg1);
5039610b 9387
2a837de2
MS
9388 if (unordered_code == UNORDERED_EXPR)
9389 {
9390 if (tree_expr_nan_p (arg0) || tree_expr_nan_p (arg1))
9391 return omit_two_operands_loc (loc, type, integer_one_node, arg0, arg1);
9392 if (!tree_expr_maybe_nan_p (arg0) && !tree_expr_maybe_nan_p (arg1))
9393 return omit_two_operands_loc (loc, type, integer_zero_node, arg0, arg1);
9394 return fold_build2_loc (loc, UNORDERED_EXPR, type, arg0, arg1);
9395 }
ea91f957 9396
2a837de2
MS
9397 code = (tree_expr_maybe_nan_p (arg0) || tree_expr_maybe_nan_p (arg1))
9398 ? unordered_code : ordered_code;
9399 return fold_build1_loc (loc, TRUTH_NOT_EXPR, type,
9400 fold_build2_loc (loc, code, type, arg0, arg1));
9401}
5039610b 9402
2a837de2
MS
9403/* Fold __builtin_{,s,u}{add,sub,mul}{,l,ll}_overflow, either into normal
9404 arithmetics if it can never overflow, or into internal functions that
9405 return both result of arithmetics and overflowed boolean flag in
9406 a complex integer result, or some other check for overflow.
9407 Similarly fold __builtin_{add,sub,mul}_overflow_p to just the overflow
9408 checking part of that. */
1e9168b2 9409
2a837de2
MS
9410static tree
9411fold_builtin_arith_overflow (location_t loc, enum built_in_function fcode,
9412 tree arg0, tree arg1, tree arg2)
9413{
9414 enum internal_fn ifn = IFN_LAST;
9415 /* The code of the expression corresponding to the built-in. */
9416 enum tree_code opcode = ERROR_MARK;
9417 bool ovf_only = false;
ed9c79e1 9418
2a837de2
MS
9419 switch (fcode)
9420 {
44a845ca 9421 case BUILT_IN_ADD_OVERFLOW_P:
2a837de2
MS
9422 ovf_only = true;
9423 /* FALLTHRU */
9424 case BUILT_IN_ADD_OVERFLOW:
1304953e
JJ
9425 case BUILT_IN_SADD_OVERFLOW:
9426 case BUILT_IN_SADDL_OVERFLOW:
9427 case BUILT_IN_SADDLL_OVERFLOW:
1304953e
JJ
9428 case BUILT_IN_UADD_OVERFLOW:
9429 case BUILT_IN_UADDL_OVERFLOW:
9430 case BUILT_IN_UADDLL_OVERFLOW:
2a837de2
MS
9431 opcode = PLUS_EXPR;
9432 ifn = IFN_ADD_OVERFLOW;
9433 break;
9434 case BUILT_IN_SUB_OVERFLOW_P:
9435 ovf_only = true;
9436 /* FALLTHRU */
9437 case BUILT_IN_SUB_OVERFLOW:
9438 case BUILT_IN_SSUB_OVERFLOW:
9439 case BUILT_IN_SSUBL_OVERFLOW:
9440 case BUILT_IN_SSUBLL_OVERFLOW:
1304953e
JJ
9441 case BUILT_IN_USUB_OVERFLOW:
9442 case BUILT_IN_USUBL_OVERFLOW:
9443 case BUILT_IN_USUBLL_OVERFLOW:
2a837de2
MS
9444 opcode = MINUS_EXPR;
9445 ifn = IFN_SUB_OVERFLOW;
9446 break;
9447 case BUILT_IN_MUL_OVERFLOW_P:
9448 ovf_only = true;
9449 /* FALLTHRU */
9450 case BUILT_IN_MUL_OVERFLOW:
9451 case BUILT_IN_SMUL_OVERFLOW:
9452 case BUILT_IN_SMULL_OVERFLOW:
9453 case BUILT_IN_SMULLL_OVERFLOW:
1304953e
JJ
9454 case BUILT_IN_UMUL_OVERFLOW:
9455 case BUILT_IN_UMULL_OVERFLOW:
9456 case BUILT_IN_UMULLL_OVERFLOW:
2a837de2
MS
9457 opcode = MULT_EXPR;
9458 ifn = IFN_MUL_OVERFLOW;
b0b3afb2 9459 break;
2a837de2
MS
9460 default:
9461 gcc_unreachable ();
9462 }
9463
9464 /* For the "generic" overloads, the first two arguments can have different
9465 types and the last argument determines the target type to use to check
9466 for overflow. The arguments of the other overloads all have the same
9467 type. */
9468 tree type = ovf_only ? TREE_TYPE (arg2) : TREE_TYPE (TREE_TYPE (arg2));
9469
9470 /* For the __builtin_{add,sub,mul}_overflow_p builtins, when the first two
9471 arguments are constant, attempt to fold the built-in call into a constant
9472 expression indicating whether or not it detected an overflow. */
9473 if (ovf_only
9474 && TREE_CODE (arg0) == INTEGER_CST
9475 && TREE_CODE (arg1) == INTEGER_CST)
9476 /* Perform the computation in the target type and check for overflow. */
9477 return omit_one_operand_loc (loc, boolean_type_node,
9478 arith_overflowed_p (opcode, type, arg0, arg1)
9479 ? boolean_true_node : boolean_false_node,
9480 arg2);
9481
9482 tree intres, ovfres;
9483 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
9484 {
9485 intres = fold_binary_loc (loc, opcode, type,
9486 fold_convert_loc (loc, type, arg0),
9487 fold_convert_loc (loc, type, arg1));
9488 if (TREE_OVERFLOW (intres))
9489 intres = drop_tree_overflow (intres);
9490 ovfres = (arith_overflowed_p (opcode, type, arg0, arg1)
9491 ? boolean_true_node : boolean_false_node);
b0b3afb2 9492 }
2a837de2
MS
9493 else
9494 {
9495 tree ctype = build_complex_type (type);
9496 tree call = build_call_expr_internal_loc (loc, ifn, ctype, 2,
9497 arg0, arg1);
9498 tree tgt = save_expr (call);
9499 intres = build1_loc (loc, REALPART_EXPR, type, tgt);
9500 ovfres = build1_loc (loc, IMAGPART_EXPR, type, tgt);
9501 ovfres = fold_convert_loc (loc, boolean_type_node, ovfres);
9502 }
9503
9504 if (ovf_only)
9505 return omit_one_operand_loc (loc, boolean_type_node, ovfres, arg2);
9506
9507 tree mem_arg2 = build_fold_indirect_ref_loc (loc, arg2);
9508 tree store
9509 = fold_build2_loc (loc, MODIFY_EXPR, void_type_node, mem_arg2, intres);
9510 return build2_loc (loc, COMPOUND_EXPR, boolean_type_node, store, ovfres);
5039610b 9511}
b0b3afb2 9512
2a837de2 9513/* Fold a call to __builtin_FILE to a constant string. */
b8698a0f 9514
2a837de2
MS
9515static inline tree
9516fold_builtin_FILE (location_t loc)
5039610b 9517{
2a837de2 9518 if (const char *fname = LOCATION_FILE (loc))
5039610b 9519 {
2a837de2
MS
9520 /* The documentation says this builtin is equivalent to the preprocessor
9521 __FILE__ macro so it appears appropriate to use the same file prefix
9522 mappings. */
9523 fname = remap_macro_filename (fname);
9524 return build_string_literal (strlen (fname) + 1, fname);
5039610b 9525 }
2a837de2
MS
9526
9527 return build_string_literal (1, "");
5039610b
SL
9528}
9529
2a837de2 9530/* Fold a call to __builtin_FUNCTION to a constant string. */
5039610b 9531
2a837de2
MS
9532static inline tree
9533fold_builtin_FUNCTION ()
5039610b 9534{
2a837de2 9535 const char *name = "";
5039610b 9536
2a837de2
MS
9537 if (current_function_decl)
9538 name = lang_hooks.decl_printable_name (current_function_decl, 0);
3bf5906b 9539
2a837de2 9540 return build_string_literal (strlen (name) + 1, name);
862d0b35 9541}
5039610b 9542
2a837de2 9543/* Fold a call to __builtin_LINE to an integer constant. */
0889e9bc 9544
2a837de2
MS
9545static inline tree
9546fold_builtin_LINE (location_t loc, tree type)
0889e9bc 9547{
2a837de2 9548 return build_int_cst (type, LOCATION_LINE (loc));
0889e9bc
JJ
9549}
9550
2a837de2
MS
9551/* Fold a call to built-in function FNDECL with 0 arguments.
9552 This function returns NULL_TREE if no simplification was possible. */
6de9cd9a 9553
2a837de2
MS
9554static tree
9555fold_builtin_0 (location_t loc, tree fndecl)
6de9cd9a 9556{
2a837de2
MS
9557 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9558 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
9559 switch (fcode)
6ef5231b 9560 {
2a837de2
MS
9561 case BUILT_IN_FILE:
9562 return fold_builtin_FILE (loc);
6ef5231b 9563
2a837de2
MS
9564 case BUILT_IN_FUNCTION:
9565 return fold_builtin_FUNCTION ();
6ef5231b 9566
2a837de2
MS
9567 case BUILT_IN_LINE:
9568 return fold_builtin_LINE (loc, type);
0889e9bc 9569
2a837de2
MS
9570 CASE_FLT_FN (BUILT_IN_INF):
9571 CASE_FLT_FN_FLOATN_NX (BUILT_IN_INF):
9572 case BUILT_IN_INFD32:
9573 case BUILT_IN_INFD64:
9574 case BUILT_IN_INFD128:
9575 return fold_builtin_inf (loc, type, true);
b8698a0f 9576
2a837de2
MS
9577 CASE_FLT_FN (BUILT_IN_HUGE_VAL):
9578 CASE_FLT_FN_FLOATN_NX (BUILT_IN_HUGE_VAL):
9579 return fold_builtin_inf (loc, type, false);
4977bab6 9580
2a837de2
MS
9581 case BUILT_IN_CLASSIFY_TYPE:
9582 return fold_builtin_classify_type (NULL_TREE);
5039610b 9583
d68d3664
JM
9584 case BUILT_IN_UNREACHABLE:
9585 /* Rewrite any explicit calls to __builtin_unreachable. */
9586 if (sanitize_flags_p (SANITIZE_UNREACHABLE))
9587 return build_builtin_unreachable (loc);
9588 break;
9589
2a837de2
MS
9590 default:
9591 break;
a6a0570f 9592 }
a6a0570f 9593 return NULL_TREE;
5039610b
SL
9594}
9595
2a837de2
MS
9596/* Fold a call to built-in function FNDECL with 1 argument, ARG0.
9597 This function returns NULL_TREE if no simplification was possible. */
43ea30dc
NF
9598
9599static tree
2a837de2 9600fold_builtin_1 (location_t loc, tree expr, tree fndecl, tree arg0)
43ea30dc 9601{
2a837de2
MS
9602 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9603 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
43ea30dc 9604
2a837de2
MS
9605 if (TREE_CODE (arg0) == ERROR_MARK)
9606 return NULL_TREE;
5039610b 9607
2a837de2
MS
9608 if (tree ret = fold_const_call (as_combined_fn (fcode), type, arg0))
9609 return ret;
5039610b 9610
2a837de2
MS
9611 switch (fcode)
9612 {
9613 case BUILT_IN_CONSTANT_P:
9614 {
9615 tree val = fold_builtin_constant_p (arg0);
b8698a0f 9616
2a837de2
MS
9617 /* Gimplification will pull the CALL_EXPR for the builtin out of
9618 an if condition. When not optimizing, we'll not CSE it back.
9619 To avoid link error types of regressions, return false now. */
9620 if (!val && !optimize)
9621 val = integer_zero_node;
9622
9623 return val;
9624 }
9625
9626 case BUILT_IN_CLASSIFY_TYPE:
9627 return fold_builtin_classify_type (arg0);
9628
9629 case BUILT_IN_STRLEN:
9630 return fold_builtin_strlen (loc, expr, type, arg0);
9631
9632 CASE_FLT_FN (BUILT_IN_FABS):
9633 CASE_FLT_FN_FLOATN_NX (BUILT_IN_FABS):
9634 case BUILT_IN_FABSD32:
9635 case BUILT_IN_FABSD64:
9636 case BUILT_IN_FABSD128:
9637 return fold_builtin_fabs (loc, arg0, type);
019fa094 9638
2a837de2
MS
9639 case BUILT_IN_ABS:
9640 case BUILT_IN_LABS:
9641 case BUILT_IN_LLABS:
9642 case BUILT_IN_IMAXABS:
9643 return fold_builtin_abs (loc, arg0, type);
726a989a 9644
2a837de2
MS
9645 CASE_FLT_FN (BUILT_IN_CONJ):
9646 if (validate_arg (arg0, COMPLEX_TYPE)
9647 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9648 return fold_build1_loc (loc, CONJ_EXPR, type, arg0);
9649 break;
726a989a 9650
2a837de2
MS
9651 CASE_FLT_FN (BUILT_IN_CREAL):
9652 if (validate_arg (arg0, COMPLEX_TYPE)
9653 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9654 return non_lvalue_loc (loc, fold_build1_loc (loc, REALPART_EXPR, type, arg0));
9655 break;
726a989a 9656
2a837de2
MS
9657 CASE_FLT_FN (BUILT_IN_CIMAG):
9658 if (validate_arg (arg0, COMPLEX_TYPE)
9659 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9660 return non_lvalue_loc (loc, fold_build1_loc (loc, IMAGPART_EXPR, type, arg0));
9661 break;
726a989a 9662
2a837de2
MS
9663 CASE_FLT_FN (BUILT_IN_CARG):
9664 return fold_builtin_carg (loc, arg0, type);
726a989a 9665
2a837de2
MS
9666 case BUILT_IN_ISASCII:
9667 return fold_builtin_isascii (loc, arg0);
726a989a 9668
2a837de2
MS
9669 case BUILT_IN_TOASCII:
9670 return fold_builtin_toascii (loc, arg0);
726a989a 9671
2a837de2
MS
9672 case BUILT_IN_ISDIGIT:
9673 return fold_builtin_isdigit (loc, arg0);
f6155fda 9674
2a837de2
MS
9675 CASE_FLT_FN (BUILT_IN_FINITE):
9676 case BUILT_IN_FINITED32:
9677 case BUILT_IN_FINITED64:
9678 case BUILT_IN_FINITED128:
9679 case BUILT_IN_ISFINITE:
9680 {
9681 tree ret = fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISFINITE);
9682 if (ret)
9683 return ret;
9684 return fold_builtin_interclass_mathfn (loc, fndecl, arg0);
9685 }
34ee7f82 9686
2a837de2
MS
9687 CASE_FLT_FN (BUILT_IN_ISINF):
9688 case BUILT_IN_ISINFD32:
9689 case BUILT_IN_ISINFD64:
9690 case BUILT_IN_ISINFD128:
9691 {
9692 tree ret = fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISINF);
9693 if (ret)
9694 return ret;
9695 return fold_builtin_interclass_mathfn (loc, fndecl, arg0);
9696 }
7dc61d6c 9697
2a837de2
MS
9698 case BUILT_IN_ISNORMAL:
9699 return fold_builtin_interclass_mathfn (loc, fndecl, arg0);
7dc61d6c 9700
2a837de2
MS
9701 case BUILT_IN_ISINF_SIGN:
9702 return fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISINF_SIGN);
aef0afc4 9703
2a837de2
MS
9704 CASE_FLT_FN (BUILT_IN_ISNAN):
9705 case BUILT_IN_ISNAND32:
9706 case BUILT_IN_ISNAND64:
9707 case BUILT_IN_ISNAND128:
9708 return fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISNAN);
aef0afc4 9709
0982edd3
JJ
9710 case BUILT_IN_ISSIGNALING:
9711 return fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISSIGNALING);
9712
2a837de2
MS
9713 case BUILT_IN_FREE:
9714 if (integer_zerop (arg0))
9715 return build_empty_stmt (loc);
9716 break;
6de9cd9a 9717
2a837de2
MS
9718 default:
9719 break;
9720 }
6de9cd9a 9721
2a837de2 9722 return NULL_TREE;
6de9cd9a 9723
2a837de2 9724}
6de9cd9a 9725
2a837de2
MS
9726/* Folds a call EXPR (which may be null) to built-in function FNDECL
9727 with 2 arguments, ARG0 and ARG1. This function returns NULL_TREE
9728 if no simplification was possible. */
6de9cd9a
DN
9729
9730static tree
2a837de2 9731fold_builtin_2 (location_t loc, tree expr, tree fndecl, tree arg0, tree arg1)
6de9cd9a 9732{
2a837de2
MS
9733 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9734 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
6de9cd9a 9735
2a837de2
MS
9736 if (TREE_CODE (arg0) == ERROR_MARK
9737 || TREE_CODE (arg1) == ERROR_MARK)
b5338fb3 9738 return NULL_TREE;
6de9cd9a 9739
2a837de2
MS
9740 if (tree ret = fold_const_call (as_combined_fn (fcode), type, arg0, arg1))
9741 return ret;
9742
9743 switch (fcode)
b5338fb3 9744 {
2a837de2
MS
9745 CASE_FLT_FN_REENT (BUILT_IN_GAMMA): /* GAMMA_R */
9746 CASE_FLT_FN_REENT (BUILT_IN_LGAMMA): /* LGAMMA_R */
9747 if (validate_arg (arg0, REAL_TYPE)
9748 && validate_arg (arg1, POINTER_TYPE))
9749 return do_mpfr_lgamma_r (arg0, arg1, type);
9750 break;
6de9cd9a 9751
2a837de2
MS
9752 CASE_FLT_FN (BUILT_IN_FREXP):
9753 return fold_builtin_frexp (loc, arg0, arg1, type);
6de9cd9a 9754
2a837de2
MS
9755 CASE_FLT_FN (BUILT_IN_MODF):
9756 return fold_builtin_modf (loc, arg0, arg1, type);
6de9cd9a 9757
2a837de2
MS
9758 case BUILT_IN_STRSPN:
9759 return fold_builtin_strspn (loc, expr, arg0, arg1);
6de9cd9a 9760
2a837de2
MS
9761 case BUILT_IN_STRCSPN:
9762 return fold_builtin_strcspn (loc, expr, arg0, arg1);
b5338fb3 9763
2a837de2
MS
9764 case BUILT_IN_STRPBRK:
9765 return fold_builtin_strpbrk (loc, expr, arg0, arg1, type);
b5338fb3 9766
2a837de2
MS
9767 case BUILT_IN_EXPECT:
9768 return fold_builtin_expect (loc, arg0, arg1, NULL_TREE, NULL_TREE);
9769
9770 case BUILT_IN_ISGREATER:
9771 return fold_builtin_unordered_cmp (loc, fndecl,
9772 arg0, arg1, UNLE_EXPR, LE_EXPR);
9773 case BUILT_IN_ISGREATEREQUAL:
9774 return fold_builtin_unordered_cmp (loc, fndecl,
9775 arg0, arg1, UNLT_EXPR, LT_EXPR);
9776 case BUILT_IN_ISLESS:
9777 return fold_builtin_unordered_cmp (loc, fndecl,
9778 arg0, arg1, UNGE_EXPR, GE_EXPR);
9779 case BUILT_IN_ISLESSEQUAL:
9780 return fold_builtin_unordered_cmp (loc, fndecl,
9781 arg0, arg1, UNGT_EXPR, GT_EXPR);
9782 case BUILT_IN_ISLESSGREATER:
9783 return fold_builtin_unordered_cmp (loc, fndecl,
9784 arg0, arg1, UNEQ_EXPR, EQ_EXPR);
9785 case BUILT_IN_ISUNORDERED:
9786 return fold_builtin_unordered_cmp (loc, fndecl,
9787 arg0, arg1, UNORDERED_EXPR,
9788 NOP_EXPR);
9789
9790 /* We do the folding for va_start in the expander. */
9791 case BUILT_IN_VA_START:
9792 break;
9793
9794 case BUILT_IN_OBJECT_SIZE:
79a89108
SP
9795 case BUILT_IN_DYNAMIC_OBJECT_SIZE:
9796 return fold_builtin_object_size (arg0, arg1, fcode);
6de9cd9a 9797
2a837de2
MS
9798 case BUILT_IN_ATOMIC_ALWAYS_LOCK_FREE:
9799 return fold_builtin_atomic_always_lock_free (arg0, arg1);
6de9cd9a 9800
2a837de2
MS
9801 case BUILT_IN_ATOMIC_IS_LOCK_FREE:
9802 return fold_builtin_atomic_is_lock_free (arg0, arg1);
6de9cd9a 9803
2a837de2
MS
9804 default:
9805 break;
9806 }
9807 return NULL_TREE;
9808}
6de9cd9a 9809
2a837de2
MS
9810/* Fold a call to built-in function FNDECL with 3 arguments, ARG0, ARG1,
9811 and ARG2.
9812 This function returns NULL_TREE if no simplification was possible. */
6de9cd9a
DN
9813
9814static tree
2a837de2
MS
9815fold_builtin_3 (location_t loc, tree fndecl,
9816 tree arg0, tree arg1, tree arg2)
6de9cd9a 9817{
2a837de2
MS
9818 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9819 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
6de9cd9a 9820
2a837de2
MS
9821 if (TREE_CODE (arg0) == ERROR_MARK
9822 || TREE_CODE (arg1) == ERROR_MARK
9823 || TREE_CODE (arg2) == ERROR_MARK)
b5338fb3
MS
9824 return NULL_TREE;
9825
2a837de2
MS
9826 if (tree ret = fold_const_call (as_combined_fn (fcode), type,
9827 arg0, arg1, arg2))
9828 return ret;
b5338fb3 9829
2a837de2
MS
9830 switch (fcode)
9831 {
6de9cd9a 9832
2a837de2
MS
9833 CASE_FLT_FN (BUILT_IN_SINCOS):
9834 return fold_builtin_sincos (loc, arg0, arg1, arg2);
6de9cd9a 9835
2a837de2
MS
9836 CASE_FLT_FN (BUILT_IN_REMQUO):
9837 if (validate_arg (arg0, REAL_TYPE)
9838 && validate_arg (arg1, REAL_TYPE)
9839 && validate_arg (arg2, POINTER_TYPE))
9840 return do_mpfr_remquo (arg0, arg1, arg2);
9841 break;
6de9cd9a 9842
2a837de2
MS
9843 case BUILT_IN_MEMCMP:
9844 return fold_builtin_memcmp (loc, arg0, arg1, arg2);
6de9cd9a 9845
2a837de2
MS
9846 case BUILT_IN_EXPECT:
9847 return fold_builtin_expect (loc, arg0, arg1, arg2, NULL_TREE);
6de9cd9a 9848
2a837de2
MS
9849 case BUILT_IN_EXPECT_WITH_PROBABILITY:
9850 return fold_builtin_expect (loc, arg0, arg1, NULL_TREE, arg2);
b5338fb3 9851
2a837de2
MS
9852 case BUILT_IN_ADD_OVERFLOW:
9853 case BUILT_IN_SUB_OVERFLOW:
9854 case BUILT_IN_MUL_OVERFLOW:
9855 case BUILT_IN_ADD_OVERFLOW_P:
9856 case BUILT_IN_SUB_OVERFLOW_P:
9857 case BUILT_IN_MUL_OVERFLOW_P:
9858 case BUILT_IN_SADD_OVERFLOW:
9859 case BUILT_IN_SADDL_OVERFLOW:
9860 case BUILT_IN_SADDLL_OVERFLOW:
9861 case BUILT_IN_SSUB_OVERFLOW:
9862 case BUILT_IN_SSUBL_OVERFLOW:
9863 case BUILT_IN_SSUBLL_OVERFLOW:
9864 case BUILT_IN_SMUL_OVERFLOW:
9865 case BUILT_IN_SMULL_OVERFLOW:
9866 case BUILT_IN_SMULLL_OVERFLOW:
9867 case BUILT_IN_UADD_OVERFLOW:
9868 case BUILT_IN_UADDL_OVERFLOW:
9869 case BUILT_IN_UADDLL_OVERFLOW:
9870 case BUILT_IN_USUB_OVERFLOW:
9871 case BUILT_IN_USUBL_OVERFLOW:
9872 case BUILT_IN_USUBLL_OVERFLOW:
9873 case BUILT_IN_UMUL_OVERFLOW:
9874 case BUILT_IN_UMULL_OVERFLOW:
9875 case BUILT_IN_UMULLL_OVERFLOW:
9876 return fold_builtin_arith_overflow (loc, fcode, arg0, arg1, arg2);
b5338fb3 9877
2a837de2
MS
9878 default:
9879 break;
b5338fb3 9880 }
2a837de2
MS
9881 return NULL_TREE;
9882}
6de9cd9a 9883
2a837de2
MS
9884/* Folds a call EXPR (which may be null) to built-in function FNDECL.
9885 ARGS is an array of NARGS arguments. IGNORE is true if the result
9886 of the function call is ignored. This function returns NULL_TREE
9887 if no simplification was possible. */
6de9cd9a 9888
2a837de2
MS
9889static tree
9890fold_builtin_n (location_t loc, tree expr, tree fndecl, tree *args,
9891 int nargs, bool)
9892{
9893 tree ret = NULL_TREE;
6de9cd9a 9894
2a837de2
MS
9895 switch (nargs)
9896 {
9897 case 0:
9898 ret = fold_builtin_0 (loc, fndecl);
9899 break;
9900 case 1:
9901 ret = fold_builtin_1 (loc, expr, fndecl, args[0]);
9902 break;
9903 case 2:
9904 ret = fold_builtin_2 (loc, expr, fndecl, args[0], args[1]);
9905 break;
9906 case 3:
9907 ret = fold_builtin_3 (loc, fndecl, args[0], args[1], args[2]);
9908 break;
9909 default:
9910 ret = fold_builtin_varargs (loc, fndecl, args, nargs);
9911 break;
9912 }
9913 if (ret)
9914 {
9915 ret = build1 (NOP_EXPR, TREE_TYPE (ret), ret);
9916 SET_EXPR_LOCATION (ret, loc);
9917 return ret;
6de9cd9a 9918 }
b5338fb3 9919 return NULL_TREE;
6de9cd9a
DN
9920}
9921
2a837de2
MS
9922/* Construct a new CALL_EXPR to FNDECL using the tail of the argument
9923 list ARGS along with N new arguments in NEWARGS. SKIP is the number
9924 of arguments in ARGS to be omitted. OLDNARGS is the number of
9925 elements in ARGS. */
726a989a 9926
2a837de2
MS
9927static tree
9928rewrite_call_expr_valist (location_t loc, int oldnargs, tree *args,
9929 int skip, tree fndecl, int n, va_list newargs)
6de9cd9a 9930{
2a837de2
MS
9931 int nargs = oldnargs - skip + n;
9932 tree *buffer;
6de9cd9a 9933
2a837de2 9934 if (n > 0)
2efcfa4e 9935 {
2a837de2 9936 int i, j;
5039610b 9937
2a837de2
MS
9938 buffer = XALLOCAVEC (tree, nargs);
9939 for (i = 0; i < n; i++)
9940 buffer[i] = va_arg (newargs, tree);
9941 for (j = skip; j < oldnargs; j++, i++)
9942 buffer[i] = args[j];
8870e212 9943 }
5039610b 9944 else
2a837de2 9945 buffer = args + skip;
5039610b 9946
2a837de2
MS
9947 return build_call_expr_loc_array (loc, fndecl, nargs, buffer);
9948}
9949
9950/* Return true if FNDECL shouldn't be folded right now.
9951 If a built-in function has an inline attribute always_inline
9952 wrapper, defer folding it after always_inline functions have
9953 been inlined, otherwise e.g. -D_FORTIFY_SOURCE checking
9954 might not be performed. */
9955
9956bool
9957avoid_folding_inline_builtin (tree fndecl)
9958{
9959 return (DECL_DECLARED_INLINE_P (fndecl)
9960 && DECL_DISREGARD_INLINE_LIMITS (fndecl)
9961 && cfun
9962 && !cfun->always_inline_functions_inlined
9963 && lookup_attribute ("always_inline", DECL_ATTRIBUTES (fndecl)));
9964}
9965
9966/* A wrapper function for builtin folding that prevents warnings for
9967 "statement without effect" and the like, caused by removing the
9968 call node earlier than the warning is generated. */
4e3825db 9969
2a837de2
MS
9970tree
9971fold_call_expr (location_t loc, tree exp, bool ignore)
9972{
9973 tree ret = NULL_TREE;
9974 tree fndecl = get_callee_fndecl (exp);
9975 if (fndecl && fndecl_built_in_p (fndecl)
9976 /* If CALL_EXPR_VA_ARG_PACK is set, the arguments aren't finalized
9977 yet. Defer folding until we see all the arguments
9978 (after inlining). */
9979 && !CALL_EXPR_VA_ARG_PACK (exp))
5039610b 9980 {
2a837de2 9981 int nargs = call_expr_nargs (exp);
8870e212 9982
2a837de2
MS
9983 /* Before gimplification CALL_EXPR_VA_ARG_PACK is not set, but
9984 instead last argument is __builtin_va_arg_pack (). Defer folding
9985 even in that case, until arguments are finalized. */
9986 if (nargs && TREE_CODE (CALL_EXPR_ARG (exp, nargs - 1)) == CALL_EXPR)
c22cacf3 9987 {
2a837de2
MS
9988 tree fndecl2 = get_callee_fndecl (CALL_EXPR_ARG (exp, nargs - 1));
9989 if (fndecl2 && fndecl_built_in_p (fndecl2, BUILT_IN_VA_ARG_PACK))
9990 return NULL_TREE;
2efcfa4e 9991 }
2985f531 9992
2a837de2
MS
9993 if (avoid_folding_inline_builtin (fndecl))
9994 return NULL_TREE;
2985f531 9995
2a837de2
MS
9996 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
9997 return targetm.fold_builtin (fndecl, call_expr_nargs (exp),
9998 CALL_EXPR_ARGP (exp), ignore);
5039610b 9999 else
2a837de2
MS
10000 {
10001 tree *args = CALL_EXPR_ARGP (exp);
10002 ret = fold_builtin_n (loc, exp, fndecl, args, nargs, ignore);
10003 if (ret)
10004 return ret;
10005 }
2efcfa4e 10006 }
2a837de2 10007 return NULL_TREE;
6de9cd9a
DN
10008}
10009
2a837de2
MS
10010/* Fold a CALL_EXPR with type TYPE with FN as the function expression.
10011 N arguments are passed in the array ARGARRAY. Return a folded
10012 expression or NULL_TREE if no simplification was possible. */
6de9cd9a 10013
2a837de2
MS
10014tree
10015fold_builtin_call_array (location_t loc, tree,
10016 tree fn,
10017 int n,
10018 tree *argarray)
10a0d495 10019{
2a837de2
MS
10020 if (TREE_CODE (fn) != ADDR_EXPR)
10021 return NULL_TREE;
10a0d495 10022
2a837de2
MS
10023 tree fndecl = TREE_OPERAND (fn, 0);
10024 if (TREE_CODE (fndecl) == FUNCTION_DECL
10025 && fndecl_built_in_p (fndecl))
10a0d495 10026 {
2a837de2
MS
10027 /* If last argument is __builtin_va_arg_pack (), arguments to this
10028 function are not finalized yet. Defer folding until they are. */
10029 if (n && TREE_CODE (argarray[n - 1]) == CALL_EXPR)
10030 {
10031 tree fndecl2 = get_callee_fndecl (argarray[n - 1]);
10032 if (fndecl2 && fndecl_built_in_p (fndecl2, BUILT_IN_VA_ARG_PACK))
10033 return NULL_TREE;
10034 }
10035 if (avoid_folding_inline_builtin (fndecl))
10036 return NULL_TREE;
10037 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
10038 return targetm.fold_builtin (fndecl, n, argarray, false);
10039 else
10040 return fold_builtin_n (loc, NULL_TREE, fndecl, argarray, n, false);
10a0d495
JJ
10041 }
10042
2a837de2
MS
10043 return NULL_TREE;
10044}
10a0d495 10045
2a837de2
MS
10046/* Construct a new CALL_EXPR using the tail of the argument list of EXP
10047 along with N new arguments specified as the "..." parameters. SKIP
10048 is the number of arguments in EXP to be omitted. This function is used
10049 to do varargs-to-varargs transformations. */
10a0d495 10050
2a837de2
MS
10051static tree
10052rewrite_call_expr (location_t loc, tree exp, int skip, tree fndecl, int n, ...)
10053{
10054 va_list ap;
10055 tree t;
10a0d495 10056
2a837de2
MS
10057 va_start (ap, n);
10058 t = rewrite_call_expr_valist (loc, call_expr_nargs (exp),
10059 CALL_EXPR_ARGP (exp), skip, fndecl, n, ap);
10060 va_end (ap);
10061
10062 return t;
10a0d495
JJ
10063}
10064
2a837de2
MS
10065/* Validate a single argument ARG against a tree code CODE representing
10066 a type. Return true when argument is valid. */
10a0d495 10067
2a837de2
MS
10068static bool
10069validate_arg (const_tree arg, enum tree_code code)
10a0d495 10070{
2a837de2
MS
10071 if (!arg)
10072 return false;
10073 else if (code == POINTER_TYPE)
10074 return POINTER_TYPE_P (TREE_TYPE (arg));
10075 else if (code == INTEGER_TYPE)
10076 return INTEGRAL_TYPE_P (TREE_TYPE (arg));
10077 return code == TREE_CODE (TREE_TYPE (arg));
10078}
10a0d495 10079
2a837de2
MS
10080/* This function validates the types of a function call argument list
10081 against a specified list of tree_codes. If the last specifier is a 0,
10082 that represents an ellipses, otherwise the last specifier must be a
10083 VOID_TYPE.
10a0d495 10084
2a837de2 10085 This is the GIMPLE version of validate_arglist. Eventually we want to
e53b6e56 10086 completely convert builtins.cc to work from GIMPLEs and the tree based
2a837de2 10087 validate_arglist will then be removed. */
ee92e7ba 10088
2a837de2
MS
10089bool
10090validate_gimple_arglist (const gcall *call, ...)
10091{
10092 enum tree_code code;
10093 bool res = 0;
10094 va_list ap;
10095 const_tree arg;
10096 size_t i;
10a0d495 10097
2a837de2
MS
10098 va_start (ap, call);
10099 i = 0;
10a0d495 10100
2a837de2
MS
10101 do
10102 {
10103 code = (enum tree_code) va_arg (ap, int);
10104 switch (code)
10a0d495 10105 {
2a837de2
MS
10106 case 0:
10107 /* This signifies an ellipses, any further arguments are all ok. */
10108 res = true;
10109 goto end;
10110 case VOID_TYPE:
10111 /* This signifies an endlink, if no arguments remain, return
10112 true, otherwise return false. */
10113 res = (i == gimple_call_num_args (call));
10114 goto end;
10a0d495 10115 default:
2a837de2
MS
10116 /* If no parameters remain or the parameter's code does not
10117 match the specified code, return false. Otherwise continue
10118 checking any remaining arguments. */
10119 arg = gimple_call_arg (call, i++);
10120 if (!validate_arg (arg, code))
10121 goto end;
10a0d495
JJ
10122 break;
10123 }
10a0d495 10124 }
2a837de2
MS
10125 while (1);
10126
10127 /* We need gotos here since we can only have one VA_CLOSE in a
10128 function. */
10129 end: ;
10130 va_end (ap);
10a0d495 10131
2a837de2
MS
10132 return res;
10133}
10a0d495 10134
2a837de2 10135/* Default target-specific builtin expander that does nothing. */
10a0d495 10136
2a837de2
MS
10137rtx
10138default_expand_builtin (tree exp ATTRIBUTE_UNUSED,
10139 rtx target ATTRIBUTE_UNUSED,
10140 rtx subtarget ATTRIBUTE_UNUSED,
10141 machine_mode mode ATTRIBUTE_UNUSED,
10142 int ignore ATTRIBUTE_UNUSED)
10143{
10144 return NULL_RTX;
10145}
10a0d495 10146
2a837de2
MS
10147/* Returns true is EXP represents data that would potentially reside
10148 in a readonly section. */
10a0d495 10149
2a837de2
MS
10150bool
10151readonly_data_expr (tree exp)
10152{
10153 STRIP_NOPS (exp);
10a0d495 10154
2a837de2
MS
10155 if (TREE_CODE (exp) != ADDR_EXPR)
10156 return false;
10a0d495 10157
2a837de2
MS
10158 exp = get_base_address (TREE_OPERAND (exp, 0));
10159 if (!exp)
10160 return false;
10161
10162 /* Make sure we call decl_readonly_section only for trees it
10163 can handle (since it returns true for everything it doesn't
10164 understand). */
10165 if (TREE_CODE (exp) == STRING_CST
10166 || TREE_CODE (exp) == CONSTRUCTOR
10167 || (VAR_P (exp) && TREE_STATIC (exp)))
10168 return decl_readonly_section (exp, 0);
10169 else
10170 return false;
10a0d495
JJ
10171}
10172
2a837de2
MS
10173/* Simplify a call to the strpbrk builtin. S1 and S2 are the arguments
10174 to the call, and TYPE is its return type.
10a0d495 10175
2a837de2
MS
10176 Return NULL_TREE if no simplification was possible, otherwise return the
10177 simplified form of the call as a tree.
10a0d495 10178
2a837de2
MS
10179 The simplified form may be a constant or other expression which
10180 computes the same value, but in a more efficient manner (including
10181 calls to other builtin functions).
ee92e7ba 10182
2a837de2
MS
10183 The call may contain arguments which need to be evaluated, but
10184 which are not useful to determine the result of the call. In
10185 this case we return a chain of COMPOUND_EXPRs. The LHS of each
10186 COMPOUND_EXPR will be an argument which must be evaluated.
10187 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
10188 COMPOUND_EXPR in the chain will contain the tree for the simplified
10189 form of the builtin function call. */
ee92e7ba 10190
2a837de2
MS
10191static tree
10192fold_builtin_strpbrk (location_t loc, tree, tree s1, tree s2, tree type)
10193{
10194 if (!validate_arg (s1, POINTER_TYPE)
10195 || !validate_arg (s2, POINTER_TYPE))
10196 return NULL_TREE;
ee92e7ba 10197
2a837de2
MS
10198 tree fn;
10199 const char *p1, *p2;
ee92e7ba 10200
2a837de2
MS
10201 p2 = c_getstr (s2);
10202 if (p2 == NULL)
10203 return NULL_TREE;
10a0d495 10204
2a837de2
MS
10205 p1 = c_getstr (s1);
10206 if (p1 != NULL)
10a0d495 10207 {
2a837de2
MS
10208 const char *r = strpbrk (p1, p2);
10209 tree tem;
10210
10211 if (r == NULL)
10212 return build_int_cst (TREE_TYPE (s1), 0);
10213
10214 /* Return an offset into the constant string argument. */
10215 tem = fold_build_pointer_plus_hwi_loc (loc, s1, r - p1);
10216 return fold_convert_loc (loc, type, tem);
10a0d495 10217 }
10a0d495 10218
2a837de2
MS
10219 if (p2[0] == '\0')
10220 /* strpbrk(x, "") == NULL.
10221 Evaluate and ignore s1 in case it had side-effects. */
10222 return omit_one_operand_loc (loc, type, integer_zero_node, s1);
10a0d495 10223
2a837de2
MS
10224 if (p2[1] != '\0')
10225 return NULL_TREE; /* Really call strpbrk. */
10a0d495 10226
2a837de2
MS
10227 fn = builtin_decl_implicit (BUILT_IN_STRCHR);
10228 if (!fn)
10229 return NULL_TREE;
10a0d495 10230
2a837de2
MS
10231 /* New argument list transforming strpbrk(s1, s2) to
10232 strchr(s1, s2[0]). */
10233 return build_call_expr_loc (loc, fn, 2, s1,
10234 build_int_cst (integer_type_node, p2[0]));
10235}
b8698a0f 10236
2a837de2
MS
10237/* Simplify a call to the strspn builtin. S1 and S2 are the arguments
10238 to the call.
10a0d495 10239
2a837de2
MS
10240 Return NULL_TREE if no simplification was possible, otherwise return the
10241 simplified form of the call as a tree.
10a0d495 10242
2a837de2
MS
10243 The simplified form may be a constant or other expression which
10244 computes the same value, but in a more efficient manner (including
10245 calls to other builtin functions).
10a0d495 10246
2a837de2
MS
10247 The call may contain arguments which need to be evaluated, but
10248 which are not useful to determine the result of the call. In
10249 this case we return a chain of COMPOUND_EXPRs. The LHS of each
10250 COMPOUND_EXPR will be an argument which must be evaluated.
10251 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
10252 COMPOUND_EXPR in the chain will contain the tree for the simplified
10253 form of the builtin function call. */
000ba23d 10254
2a837de2
MS
10255static tree
10256fold_builtin_strspn (location_t loc, tree expr, tree s1, tree s2)
10257{
10258 if (!validate_arg (s1, POINTER_TYPE)
10259 || !validate_arg (s2, POINTER_TYPE))
10260 return NULL_TREE;
10a0d495 10261
2a837de2
MS
10262 if (!check_nul_terminated_array (expr, s1)
10263 || !check_nul_terminated_array (expr, s2))
10264 return NULL_TREE;
10a0d495 10265
2a837de2
MS
10266 const char *p1 = c_getstr (s1), *p2 = c_getstr (s2);
10267
10268 /* If either argument is "", return NULL_TREE. */
10269 if ((p1 && *p1 == '\0') || (p2 && *p2 == '\0'))
10270 /* Evaluate and ignore both arguments in case either one has
10271 side-effects. */
10272 return omit_two_operands_loc (loc, size_type_node, size_zero_node,
10273 s1, s2);
10274 return NULL_TREE;
10275}
10a0d495 10276
2a837de2
MS
10277/* Simplify a call to the strcspn builtin. S1 and S2 are the arguments
10278 to the call.
cc8bea0a 10279
2a837de2
MS
10280 Return NULL_TREE if no simplification was possible, otherwise return the
10281 simplified form of the call as a tree.
10a0d495 10282
2a837de2
MS
10283 The simplified form may be a constant or other expression which
10284 computes the same value, but in a more efficient manner (including
10285 calls to other builtin functions).
f9555f40 10286
2a837de2
MS
10287 The call may contain arguments which need to be evaluated, but
10288 which are not useful to determine the result of the call. In
10289 this case we return a chain of COMPOUND_EXPRs. The LHS of each
10290 COMPOUND_EXPR will be an argument which must be evaluated.
10291 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
10292 COMPOUND_EXPR in the chain will contain the tree for the simplified
10293 form of the builtin function call. */
10294
10295static tree
10296fold_builtin_strcspn (location_t loc, tree expr, tree s1, tree s2)
dce6c58d 10297{
2a837de2
MS
10298 if (!validate_arg (s1, POINTER_TYPE)
10299 || !validate_arg (s2, POINTER_TYPE))
10300 return NULL_TREE;
dce6c58d 10301
2a837de2
MS
10302 if (!check_nul_terminated_array (expr, s1)
10303 || !check_nul_terminated_array (expr, s2))
10304 return NULL_TREE;
dce6c58d 10305
2a837de2
MS
10306 /* If the first argument is "", return NULL_TREE. */
10307 const char *p1 = c_getstr (s1);
10308 if (p1 && *p1 == '\0')
dce6c58d 10309 {
2a837de2
MS
10310 /* Evaluate and ignore argument s2 in case it has
10311 side-effects. */
10312 return omit_one_operand_loc (loc, size_type_node,
10313 size_zero_node, s2);
dce6c58d
MS
10314 }
10315
2a837de2
MS
10316 /* If the second argument is "", return __builtin_strlen(s1). */
10317 const char *p2 = c_getstr (s2);
10318 if (p2 && *p2 == '\0')
dce6c58d 10319 {
2a837de2 10320 tree fn = builtin_decl_implicit (BUILT_IN_STRLEN);
dce6c58d 10321
2a837de2
MS
10322 /* If the replacement _DECL isn't initialized, don't do the
10323 transformation. */
10324 if (!fn)
10325 return NULL_TREE;
dce6c58d 10326
2a837de2
MS
10327 return build_call_expr_loc (loc, fn, 1, s1);
10328 }
10329 return NULL_TREE;
dce6c58d
MS
10330}
10331
2a837de2
MS
10332/* Fold the next_arg or va_start call EXP. Returns true if there was an error
10333 produced. False otherwise. This is done so that we don't output the error
10334 or warning twice or three times. */
dce6c58d 10335
2a837de2
MS
10336bool
10337fold_builtin_next_arg (tree exp, bool va_start_p)
dce6c58d 10338{
2a837de2
MS
10339 tree fntype = TREE_TYPE (current_function_decl);
10340 int nargs = call_expr_nargs (exp);
10341 tree arg;
10342 /* There is good chance the current input_location points inside the
10343 definition of the va_start macro (perhaps on the token for
10344 builtin) in a system header, so warnings will not be emitted.
10345 Use the location in real source code. */
10346 location_t current_location =
10347 linemap_unwind_to_first_non_reserved_loc (line_table, input_location,
10348 NULL);
c7e276b8 10349
2a837de2 10350 if (!stdarg_p (fntype))
d9f1466f 10351 {
2a837de2
MS
10352 error ("%<va_start%> used in function with fixed arguments");
10353 return true;
d9f1466f 10354 }
dce6c58d 10355
2a837de2 10356 if (va_start_p)
dce6c58d 10357 {
2a837de2 10358 if (va_start_p && (nargs != 2))
dce6c58d 10359 {
2a837de2
MS
10360 error ("wrong number of arguments to function %<va_start%>");
10361 return true;
dce6c58d 10362 }
2a837de2 10363 arg = CALL_EXPR_ARG (exp, 1);
dce6c58d 10364 }
2a837de2
MS
10365 /* We use __builtin_va_start (ap, 0, 0) or __builtin_next_arg (0, 0)
10366 when we checked the arguments and if needed issued a warning. */
10367 else
dce6c58d 10368 {
2a837de2
MS
10369 if (nargs == 0)
10370 {
10371 /* Evidently an out of date version of <stdarg.h>; can't validate
10372 va_start's second argument, but can still work as intended. */
10373 warning_at (current_location,
10374 OPT_Wvarargs,
10375 "%<__builtin_next_arg%> called without an argument");
10376 return true;
10377 }
10378 else if (nargs > 1)
10379 {
10380 error ("wrong number of arguments to function %<__builtin_next_arg%>");
10381 return true;
10382 }
10383 arg = CALL_EXPR_ARG (exp, 0);
dce6c58d
MS
10384 }
10385
2a837de2
MS
10386 if (TREE_CODE (arg) == SSA_NAME
10387 && SSA_NAME_VAR (arg))
10388 arg = SSA_NAME_VAR (arg);
fe7f75cf 10389
2a837de2
MS
10390 /* We destructively modify the call to be __builtin_va_start (ap, 0)
10391 or __builtin_next_arg (0) the first time we see it, after checking
10392 the arguments and if needed issuing a warning. */
10393 if (!integer_zerop (arg))
fd64f348 10394 {
2a837de2 10395 tree last_parm = tree_last (DECL_ARGUMENTS (current_function_decl));
fe7f75cf 10396
2a837de2
MS
10397 /* Strip off all nops for the sake of the comparison. This
10398 is not quite the same as STRIP_NOPS. It does more.
10399 We must also strip off INDIRECT_EXPR for C++ reference
10400 parameters. */
10401 while (CONVERT_EXPR_P (arg)
10402 || TREE_CODE (arg) == INDIRECT_REF)
10403 arg = TREE_OPERAND (arg, 0);
10404 if (arg != last_parm)
10405 {
10406 /* FIXME: Sometimes with the tree optimizers we can get the
10407 not the last argument even though the user used the last
10408 argument. We just warn and set the arg to be the last
10409 argument so that we will get wrong-code because of
10410 it. */
10411 warning_at (current_location,
10412 OPT_Wvarargs,
10413 "second parameter of %<va_start%> not last named argument");
10414 }
fe7f75cf 10415
2a837de2
MS
10416 /* Undefined by C99 7.15.1.4p4 (va_start):
10417 "If the parameter parmN is declared with the register storage
10418 class, with a function or array type, or with a type that is
10419 not compatible with the type that results after application of
10420 the default argument promotions, the behavior is undefined."
10421 */
10422 else if (DECL_REGISTER (arg))
10423 {
10424 warning_at (current_location,
10425 OPT_Wvarargs,
10426 "undefined behavior when second parameter of "
10427 "%<va_start%> is declared with %<register%> storage");
10428 }
fe7f75cf 10429
2a837de2
MS
10430 /* We want to verify the second parameter just once before the tree
10431 optimizers are run and then avoid keeping it in the tree,
10432 as otherwise we could warn even for correct code like:
10433 void foo (int i, ...)
10434 { va_list ap; i++; va_start (ap, i); va_end (ap); } */
10435 if (va_start_p)
10436 CALL_EXPR_ARG (exp, 1) = integer_zero_node;
10437 else
10438 CALL_EXPR_ARG (exp, 0) = integer_zero_node;
10439 }
10440 return false;
10441}
fe7f75cf 10442
fe7f75cf 10443
2a837de2 10444/* Expand a call EXP to __builtin_object_size. */
fe7f75cf 10445
2a837de2
MS
10446static rtx
10447expand_builtin_object_size (tree exp)
10448{
10449 tree ost;
10450 int object_size_type;
10451 tree fndecl = get_callee_fndecl (exp);
fe7f75cf 10452
2a837de2
MS
10453 if (!validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
10454 {
10455 error ("first argument of %qD must be a pointer, second integer constant",
10456 fndecl);
10457 expand_builtin_trap ();
10458 return const0_rtx;
fd64f348 10459 }
fe7f75cf 10460
2a837de2
MS
10461 ost = CALL_EXPR_ARG (exp, 1);
10462 STRIP_NOPS (ost);
fe7f75cf 10463
2a837de2
MS
10464 if (TREE_CODE (ost) != INTEGER_CST
10465 || tree_int_cst_sgn (ost) < 0
10466 || compare_tree_int (ost, 3) > 0)
10467 {
10468 error ("last argument of %qD is not integer constant between 0 and 3",
10469 fndecl);
10470 expand_builtin_trap ();
10471 return const0_rtx;
10472 }
fe7f75cf 10473
2a837de2 10474 object_size_type = tree_to_shwi (ost);
fe7f75cf 10475
2a837de2 10476 return object_size_type < 2 ? constm1_rtx : const0_rtx;
dce6c58d
MS
10477}
10478
2a837de2
MS
10479/* Expand EXP, a call to the __mem{cpy,pcpy,move,set}_chk builtin.
10480 FCODE is the BUILT_IN_* to use.
10481 Return NULL_RTX if we failed; the caller should emit a normal call,
10482 otherwise try to get the result in TARGET, if convenient (and in
10483 mode MODE if that's convenient). */
fd64f348 10484
2a837de2
MS
10485static rtx
10486expand_builtin_memory_chk (tree exp, rtx target, machine_mode mode,
10487 enum built_in_function fcode)
fd64f348 10488{
2a837de2
MS
10489 if (!validate_arglist (exp,
10490 POINTER_TYPE,
10491 fcode == BUILT_IN_MEMSET_CHK
10492 ? INTEGER_TYPE : POINTER_TYPE,
10493 INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
10494 return NULL_RTX;
fd64f348 10495
2a837de2
MS
10496 tree dest = CALL_EXPR_ARG (exp, 0);
10497 tree src = CALL_EXPR_ARG (exp, 1);
10498 tree len = CALL_EXPR_ARG (exp, 2);
10499 tree size = CALL_EXPR_ARG (exp, 3);
fd64f348 10500
2a837de2
MS
10501 /* FIXME: Set access mode to write only for memset et al. */
10502 bool sizes_ok = check_access (exp, len, /*maxread=*/NULL_TREE,
10503 /*srcstr=*/NULL_TREE, size, access_read_write);
dce6c58d 10504
2a837de2
MS
10505 if (!tree_fits_uhwi_p (size))
10506 return NULL_RTX;
fe7f75cf 10507
2a837de2 10508 if (tree_fits_uhwi_p (len) || integer_all_onesp (size))
dce6c58d 10509 {
2a837de2
MS
10510 /* Avoid transforming the checking call to an ordinary one when
10511 an overflow has been detected or when the call couldn't be
10512 validated because the size is not constant. */
10513 if (!sizes_ok && !integer_all_onesp (size) && tree_int_cst_lt (size, len))
10514 return NULL_RTX;
dce6c58d 10515
2a837de2
MS
10516 tree fn = NULL_TREE;
10517 /* If __builtin_mem{cpy,pcpy,move,set}_chk is used, assume
10518 mem{cpy,pcpy,move,set} is available. */
10519 switch (fcode)
dce6c58d 10520 {
2a837de2
MS
10521 case BUILT_IN_MEMCPY_CHK:
10522 fn = builtin_decl_explicit (BUILT_IN_MEMCPY);
10523 break;
10524 case BUILT_IN_MEMPCPY_CHK:
10525 fn = builtin_decl_explicit (BUILT_IN_MEMPCPY);
10526 break;
10527 case BUILT_IN_MEMMOVE_CHK:
10528 fn = builtin_decl_explicit (BUILT_IN_MEMMOVE);
10529 break;
10530 case BUILT_IN_MEMSET_CHK:
10531 fn = builtin_decl_explicit (BUILT_IN_MEMSET);
dce6c58d 10532 break;
dce6c58d
MS
10533 default:
10534 break;
10535 }
fe7f75cf 10536
2a837de2
MS
10537 if (! fn)
10538 return NULL_RTX;
fe7f75cf 10539
2a837de2
MS
10540 fn = build_call_nofold_loc (EXPR_LOCATION (exp), fn, 3, dest, src, len);
10541 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
10542 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
10543 return expand_expr (fn, target, mode, EXPAND_NORMAL);
fe7f75cf 10544 }
2a837de2
MS
10545 else if (fcode == BUILT_IN_MEMSET_CHK)
10546 return NULL_RTX;
10547 else
dce6c58d 10548 {
2a837de2 10549 unsigned int dest_align = get_pointer_alignment (dest);
dce6c58d 10550
2a837de2
MS
10551 /* If DEST is not a pointer type, call the normal function. */
10552 if (dest_align == 0)
10553 return NULL_RTX;
fe7f75cf 10554
2a837de2
MS
10555 /* If SRC and DEST are the same (and not volatile), do nothing. */
10556 if (operand_equal_p (src, dest, 0))
fe7f75cf 10557 {
2a837de2 10558 tree expr;
fe7f75cf 10559
2a837de2 10560 if (fcode != BUILT_IN_MEMPCPY_CHK)
fe7f75cf 10561 {
2a837de2
MS
10562 /* Evaluate and ignore LEN in case it has side-effects. */
10563 expand_expr (len, const0_rtx, VOIDmode, EXPAND_NORMAL);
10564 return expand_expr (dest, target, mode, EXPAND_NORMAL);
fe7f75cf
MS
10565 }
10566
2a837de2
MS
10567 expr = fold_build_pointer_plus (dest, len);
10568 return expand_expr (expr, target, mode, EXPAND_NORMAL);
fe7f75cf
MS
10569 }
10570
2a837de2
MS
10571 /* __memmove_chk special case. */
10572 if (fcode == BUILT_IN_MEMMOVE_CHK)
10573 {
10574 unsigned int src_align = get_pointer_alignment (src);
fe7f75cf 10575
2a837de2
MS
10576 if (src_align == 0)
10577 return NULL_RTX;
fe7f75cf 10578
2a837de2
MS
10579 /* If src is categorized for a readonly section we can use
10580 normal __memcpy_chk. */
10581 if (readonly_data_expr (src))
10582 {
10583 tree fn = builtin_decl_explicit (BUILT_IN_MEMCPY_CHK);
10584 if (!fn)
10585 return NULL_RTX;
10586 fn = build_call_nofold_loc (EXPR_LOCATION (exp), fn, 4,
10587 dest, src, len, size);
10588 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
10589 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
10590 return expand_expr (fn, target, mode, EXPAND_NORMAL);
10591 }
10592 }
10593 return NULL_RTX;
fe7f75cf 10594 }
dce6c58d
MS
10595}
10596
2a837de2 10597/* Emit warning if a buffer overflow is detected at compile time. */
dce6c58d 10598
2a837de2
MS
10599static void
10600maybe_emit_chk_warning (tree exp, enum built_in_function fcode)
dce6c58d 10601{
2a837de2
MS
10602 /* The source string. */
10603 tree srcstr = NULL_TREE;
10604 /* The size of the destination object returned by __builtin_object_size. */
10605 tree objsize = NULL_TREE;
10606 /* The string that is being concatenated with (as in __strcat_chk)
10607 or null if it isn't. */
10608 tree catstr = NULL_TREE;
10609 /* The maximum length of the source sequence in a bounded operation
10610 (such as __strncat_chk) or null if the operation isn't bounded
10611 (such as __strcat_chk). */
10612 tree maxread = NULL_TREE;
10613 /* The exact size of the access (such as in __strncpy_chk). */
10614 tree size = NULL_TREE;
10615 /* The access by the function that's checked. Except for snprintf
10616 both writing and reading is checked. */
10617 access_mode mode = access_read_write;
dce6c58d 10618
2a837de2
MS
10619 switch (fcode)
10620 {
10621 case BUILT_IN_STRCPY_CHK:
10622 case BUILT_IN_STPCPY_CHK:
10623 srcstr = CALL_EXPR_ARG (exp, 1);
10624 objsize = CALL_EXPR_ARG (exp, 2);
10625 break;
dce6c58d 10626
2a837de2
MS
10627 case BUILT_IN_STRCAT_CHK:
10628 /* For __strcat_chk the warning will be emitted only if overflowing
10629 by at least strlen (dest) + 1 bytes. */
10630 catstr = CALL_EXPR_ARG (exp, 0);
10631 srcstr = CALL_EXPR_ARG (exp, 1);
10632 objsize = CALL_EXPR_ARG (exp, 2);
10633 break;
fe7f75cf 10634
2a837de2
MS
10635 case BUILT_IN_STRNCAT_CHK:
10636 catstr = CALL_EXPR_ARG (exp, 0);
10637 srcstr = CALL_EXPR_ARG (exp, 1);
10638 maxread = CALL_EXPR_ARG (exp, 2);
10639 objsize = CALL_EXPR_ARG (exp, 3);
10640 break;
fdd8560c 10641
2a837de2
MS
10642 case BUILT_IN_STRNCPY_CHK:
10643 case BUILT_IN_STPNCPY_CHK:
10644 srcstr = CALL_EXPR_ARG (exp, 1);
10645 size = CALL_EXPR_ARG (exp, 2);
10646 objsize = CALL_EXPR_ARG (exp, 3);
10647 break;
fe7f75cf 10648
2a837de2
MS
10649 case BUILT_IN_SNPRINTF_CHK:
10650 case BUILT_IN_VSNPRINTF_CHK:
10651 maxread = CALL_EXPR_ARG (exp, 1);
10652 objsize = CALL_EXPR_ARG (exp, 3);
10653 /* The only checked access the write to the destination. */
10654 mode = access_write_only;
10655 break;
10656 default:
10657 gcc_unreachable ();
dce6c58d
MS
10658 }
10659
2a837de2 10660 if (catstr && maxread)
dce6c58d 10661 {
2a837de2
MS
10662 /* Check __strncat_chk. There is no way to determine the length
10663 of the string to which the source string is being appended so
10664 just warn when the length of the source string is not known. */
10665 check_strncat_sizes (exp, objsize);
10666 return;
dce6c58d
MS
10667 }
10668
2a837de2 10669 check_access (exp, size, maxread, srcstr, objsize, mode);
dce6c58d
MS
10670}
10671
2a837de2
MS
10672/* Emit warning if a buffer overflow is detected at compile time
10673 in __sprintf_chk/__vsprintf_chk calls. */
dce6c58d 10674
2a837de2
MS
10675static void
10676maybe_emit_sprintf_chk_warning (tree exp, enum built_in_function fcode)
f9555f40 10677{
2a837de2
MS
10678 tree size, len, fmt;
10679 const char *fmt_str;
10680 int nargs = call_expr_nargs (exp);
9616781d 10681
2a837de2 10682 /* Verify the required arguments in the original call. */
f9555f40 10683
2a837de2 10684 if (nargs < 4)
f9555f40 10685 return;
2a837de2
MS
10686 size = CALL_EXPR_ARG (exp, 2);
10687 fmt = CALL_EXPR_ARG (exp, 3);
f9555f40 10688
2a837de2 10689 if (! tree_fits_uhwi_p (size) || integer_all_onesp (size))
f9555f40
JJ
10690 return;
10691
2a837de2
MS
10692 /* Check whether the format is a literal string constant. */
10693 fmt_str = c_getstr (fmt);
10694 if (fmt_str == NULL)
dce6c58d
MS
10695 return;
10696
2a837de2
MS
10697 if (!init_target_chars ())
10698 return;
dce6c58d 10699
2a837de2
MS
10700 /* If the format doesn't contain % args or %%, we know its size. */
10701 if (strchr (fmt_str, target_percent) == 0)
10702 len = build_int_cstu (size_type_node, strlen (fmt_str));
10703 /* If the format is "%s" and first ... argument is a string literal,
10704 we know it too. */
10705 else if (fcode == BUILT_IN_SPRINTF_CHK
10706 && strcmp (fmt_str, target_percent_s) == 0)
dce6c58d 10707 {
2a837de2 10708 tree arg;
dce6c58d 10709
2a837de2
MS
10710 if (nargs < 5)
10711 return;
10712 arg = CALL_EXPR_ARG (exp, 4);
10713 if (! POINTER_TYPE_P (TREE_TYPE (arg)))
10714 return;
10715
10716 len = c_strlen (arg, 1);
10717 if (!len || ! tree_fits_uhwi_p (len))
dce6c58d
MS
10718 return;
10719 }
2a837de2
MS
10720 else
10721 return;
10722
10723 /* Add one for the terminating nul. */
10724 len = fold_build2 (PLUS_EXPR, TREE_TYPE (len), len, size_one_node);
10725
10726 check_access (exp, /*size=*/NULL_TREE, /*maxread=*/NULL_TREE, len, size,
10727 access_write_only);
f9555f40
JJ
10728}
10729
5039610b
SL
10730/* Fold a call to __builtin_object_size with arguments PTR and OST,
10731 if possible. */
10a0d495 10732
9b2b7279 10733static tree
79a89108 10734fold_builtin_object_size (tree ptr, tree ost, enum built_in_function fcode)
10a0d495 10735{
422f9eb7 10736 tree bytes;
10a0d495
JJ
10737 int object_size_type;
10738
5039610b
SL
10739 if (!validate_arg (ptr, POINTER_TYPE)
10740 || !validate_arg (ost, INTEGER_TYPE))
10741 return NULL_TREE;
10a0d495 10742
10a0d495
JJ
10743 STRIP_NOPS (ost);
10744
10745 if (TREE_CODE (ost) != INTEGER_CST
10746 || tree_int_cst_sgn (ost) < 0
10747 || compare_tree_int (ost, 3) > 0)
5039610b 10748 return NULL_TREE;
10a0d495 10749
9439e9a1 10750 object_size_type = tree_to_shwi (ost);
10a0d495
JJ
10751
10752 /* __builtin_object_size doesn't evaluate side-effects in its arguments;
10753 if there are any side-effects, it returns (size_t) -1 for types 0 and 1
10754 and (size_t) 0 for types 2 and 3. */
10755 if (TREE_SIDE_EFFECTS (ptr))
2ac7cbb5 10756 return build_int_cst_type (size_type_node, object_size_type < 2 ? -1 : 0);
10a0d495 10757
79a89108
SP
10758 if (fcode == BUILT_IN_DYNAMIC_OBJECT_SIZE)
10759 object_size_type |= OST_DYNAMIC;
10760
10a0d495 10761 if (TREE_CODE (ptr) == ADDR_EXPR)
88e06841 10762 {
05a64756 10763 compute_builtin_object_size (ptr, object_size_type, &bytes);
404c787e
SP
10764 if ((object_size_type & OST_DYNAMIC)
10765 || int_fits_type_p (bytes, size_type_node))
422f9eb7 10766 return fold_convert (size_type_node, bytes);
88e06841 10767 }
10a0d495
JJ
10768 else if (TREE_CODE (ptr) == SSA_NAME)
10769 {
10a0d495
JJ
10770 /* If object size is not known yet, delay folding until
10771 later. Maybe subsequent passes will help determining
10772 it. */
05a64756 10773 if (compute_builtin_object_size (ptr, object_size_type, &bytes)
404c787e
SP
10774 && ((object_size_type & OST_DYNAMIC)
10775 || int_fits_type_p (bytes, size_type_node)))
422f9eb7 10776 return fold_convert (size_type_node, bytes);
10a0d495
JJ
10777 }
10778
88e06841 10779 return NULL_TREE;
10a0d495
JJ
10780}
10781
903c723b
TC
10782/* Builtins with folding operations that operate on "..." arguments
10783 need special handling; we need to store the arguments in a convenient
10784 data structure before attempting any folding. Fortunately there are
10785 only a few builtins that fall into this category. FNDECL is the
10786 function, EXP is the CALL_EXPR for the call. */
10787
10788static tree
10789fold_builtin_varargs (location_t loc, tree fndecl, tree *args, int nargs)
10790{
10791 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10792 tree ret = NULL_TREE;
10793
10794 switch (fcode)
10795 {
10796 case BUILT_IN_FPCLASSIFY:
10797 ret = fold_builtin_fpclassify (loc, args, nargs);
10798 break;
10799
10800 default:
10801 break;
10802 }
10803 if (ret)
10804 {
10805 ret = build1 (NOP_EXPR, TREE_TYPE (ret), ret);
10806 SET_EXPR_LOCATION (ret, loc);
e9e2bad7 10807 suppress_warning (ret);
903c723b
TC
10808 return ret;
10809 }
10810 return NULL_TREE;
10811}
10812
000ba23d
KG
10813/* Initialize format string characters in the target charset. */
10814
fef5a0d9 10815bool
000ba23d
KG
10816init_target_chars (void)
10817{
10818 static bool init;
10819 if (!init)
10820 {
10821 target_newline = lang_hooks.to_target_charset ('\n');
10822 target_percent = lang_hooks.to_target_charset ('%');
10823 target_c = lang_hooks.to_target_charset ('c');
10824 target_s = lang_hooks.to_target_charset ('s');
10825 if (target_newline == 0 || target_percent == 0 || target_c == 0
10826 || target_s == 0)
10827 return false;
10828
10829 target_percent_c[0] = target_percent;
10830 target_percent_c[1] = target_c;
10831 target_percent_c[2] = '\0';
10832
10833 target_percent_s[0] = target_percent;
10834 target_percent_s[1] = target_s;
10835 target_percent_s[2] = '\0';
10836
10837 target_percent_s_newline[0] = target_percent;
10838 target_percent_s_newline[1] = target_s;
10839 target_percent_s_newline[2] = target_newline;
10840 target_percent_s_newline[3] = '\0';
c22cacf3 10841
000ba23d
KG
10842 init = true;
10843 }
10844 return true;
10845}
1f3f1f68 10846
4413d881
KG
10847/* Helper function for do_mpfr_arg*(). Ensure M is a normal number
10848 and no overflow/underflow occurred. INEXACT is true if M was not
2f8e468b 10849 exactly calculated. TYPE is the tree type for the result. This
4413d881
KG
10850 function assumes that you cleared the MPFR flags and then
10851 calculated M to see if anything subsequently set a flag prior to
10852 entering this function. Return NULL_TREE if any checks fail. */
10853
10854static tree
62e5bf5d 10855do_mpfr_ckconv (mpfr_srcptr m, tree type, int inexact)
4413d881
KG
10856{
10857 /* Proceed iff we get a normal number, i.e. not NaN or Inf and no
10858 overflow/underflow occurred. If -frounding-math, proceed iff the
10859 result of calling FUNC was exact. */
62e5bf5d 10860 if (mpfr_number_p (m) && !mpfr_overflow_p () && !mpfr_underflow_p ()
4413d881
KG
10861 && (!flag_rounding_math || !inexact))
10862 {
10863 REAL_VALUE_TYPE rr;
10864
90ca6847 10865 real_from_mpfr (&rr, m, type, MPFR_RNDN);
4413d881
KG
10866 /* Proceed iff GCC's REAL_VALUE_TYPE can hold the MPFR value,
10867 check for overflow/underflow. If the REAL_VALUE_TYPE is zero
10868 but the mpft_t is not, then we underflowed in the
10869 conversion. */
4c8c70e0 10870 if (real_isfinite (&rr)
4413d881
KG
10871 && (rr.cl == rvc_zero) == (mpfr_zero_p (m) != 0))
10872 {
10873 REAL_VALUE_TYPE rmode;
10874
10875 real_convert (&rmode, TYPE_MODE (type), &rr);
10876 /* Proceed iff the specified mode can hold the value. */
10877 if (real_identical (&rmode, &rr))
10878 return build_real (type, rmode);
10879 }
10880 }
10881 return NULL_TREE;
10882}
10883
c128599a
KG
10884/* Helper function for do_mpc_arg*(). Ensure M is a normal complex
10885 number and no overflow/underflow occurred. INEXACT is true if M
10886 was not exactly calculated. TYPE is the tree type for the result.
10887 This function assumes that you cleared the MPFR flags and then
10888 calculated M to see if anything subsequently set a flag prior to
ca75b926
KG
10889 entering this function. Return NULL_TREE if any checks fail, if
10890 FORCE_CONVERT is true, then bypass the checks. */
c128599a
KG
10891
10892static tree
ca75b926 10893do_mpc_ckconv (mpc_srcptr m, tree type, int inexact, int force_convert)
c128599a
KG
10894{
10895 /* Proceed iff we get a normal number, i.e. not NaN or Inf and no
10896 overflow/underflow occurred. If -frounding-math, proceed iff the
10897 result of calling FUNC was exact. */
ca75b926
KG
10898 if (force_convert
10899 || (mpfr_number_p (mpc_realref (m)) && mpfr_number_p (mpc_imagref (m))
10900 && !mpfr_overflow_p () && !mpfr_underflow_p ()
10901 && (!flag_rounding_math || !inexact)))
c128599a
KG
10902 {
10903 REAL_VALUE_TYPE re, im;
10904
90ca6847
TB
10905 real_from_mpfr (&re, mpc_realref (m), TREE_TYPE (type), MPFR_RNDN);
10906 real_from_mpfr (&im, mpc_imagref (m), TREE_TYPE (type), MPFR_RNDN);
c128599a
KG
10907 /* Proceed iff GCC's REAL_VALUE_TYPE can hold the MPFR values,
10908 check for overflow/underflow. If the REAL_VALUE_TYPE is zero
10909 but the mpft_t is not, then we underflowed in the
10910 conversion. */
ca75b926
KG
10911 if (force_convert
10912 || (real_isfinite (&re) && real_isfinite (&im)
10913 && (re.cl == rvc_zero) == (mpfr_zero_p (mpc_realref (m)) != 0)
10914 && (im.cl == rvc_zero) == (mpfr_zero_p (mpc_imagref (m)) != 0)))
c128599a
KG
10915 {
10916 REAL_VALUE_TYPE re_mode, im_mode;
10917
10918 real_convert (&re_mode, TYPE_MODE (TREE_TYPE (type)), &re);
10919 real_convert (&im_mode, TYPE_MODE (TREE_TYPE (type)), &im);
10920 /* Proceed iff the specified mode can hold the value. */
ca75b926
KG
10921 if (force_convert
10922 || (real_identical (&re_mode, &re)
10923 && real_identical (&im_mode, &im)))
c128599a
KG
10924 return build_complex (type, build_real (TREE_TYPE (type), re_mode),
10925 build_real (TREE_TYPE (type), im_mode));
10926 }
10927 }
10928 return NULL_TREE;
10929}
c128599a 10930
ea91f957
KG
10931/* If arguments ARG0 and ARG1 are REAL_CSTs, call mpfr_remquo() to set
10932 the pointer *(ARG_QUO) and return the result. The type is taken
10933 from the type of ARG0 and is used for setting the precision of the
10934 calculation and results. */
10935
10936static tree
10937do_mpfr_remquo (tree arg0, tree arg1, tree arg_quo)
10938{
10939 tree const type = TREE_TYPE (arg0);
10940 tree result = NULL_TREE;
b8698a0f 10941
ea91f957
KG
10942 STRIP_NOPS (arg0);
10943 STRIP_NOPS (arg1);
b8698a0f 10944
ea91f957
KG
10945 /* To proceed, MPFR must exactly represent the target floating point
10946 format, which only happens when the target base equals two. */
10947 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
10948 && TREE_CODE (arg0) == REAL_CST && !TREE_OVERFLOW (arg0)
10949 && TREE_CODE (arg1) == REAL_CST && !TREE_OVERFLOW (arg1))
10950 {
10951 const REAL_VALUE_TYPE *const ra0 = TREE_REAL_CST_PTR (arg0);
10952 const REAL_VALUE_TYPE *const ra1 = TREE_REAL_CST_PTR (arg1);
10953
4c8c70e0 10954 if (real_isfinite (ra0) && real_isfinite (ra1))
ea91f957 10955 {
3e479de3
UW
10956 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
10957 const int prec = fmt->p;
90ca6847 10958 const mpfr_rnd_t rnd = fmt->round_towards_zero? MPFR_RNDZ : MPFR_RNDN;
ea91f957
KG
10959 tree result_rem;
10960 long integer_quo;
10961 mpfr_t m0, m1;
10962
10963 mpfr_inits2 (prec, m0, m1, NULL);
90ca6847
TB
10964 mpfr_from_real (m0, ra0, MPFR_RNDN);
10965 mpfr_from_real (m1, ra1, MPFR_RNDN);
ea91f957 10966 mpfr_clear_flags ();
3e479de3 10967 mpfr_remquo (m0, &integer_quo, m0, m1, rnd);
ea91f957
KG
10968 /* Remquo is independent of the rounding mode, so pass
10969 inexact=0 to do_mpfr_ckconv(). */
10970 result_rem = do_mpfr_ckconv (m0, type, /*inexact=*/ 0);
10971 mpfr_clears (m0, m1, NULL);
10972 if (result_rem)
10973 {
10974 /* MPFR calculates quo in the host's long so it may
10975 return more bits in quo than the target int can hold
10976 if sizeof(host long) > sizeof(target int). This can
10977 happen even for native compilers in LP64 mode. In
10978 these cases, modulo the quo value with the largest
10979 number that the target int can hold while leaving one
10980 bit for the sign. */
10981 if (sizeof (integer_quo) * CHAR_BIT > INT_TYPE_SIZE)
10982 integer_quo %= (long)(1UL << (INT_TYPE_SIZE - 1));
10983
10984 /* Dereference the quo pointer argument. */
10985 arg_quo = build_fold_indirect_ref (arg_quo);
10986 /* Proceed iff a valid pointer type was passed in. */
10987 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg_quo)) == integer_type_node)
10988 {
10989 /* Set the value. */
45a2c477
RG
10990 tree result_quo
10991 = fold_build2 (MODIFY_EXPR, TREE_TYPE (arg_quo), arg_quo,
10992 build_int_cst (TREE_TYPE (arg_quo),
10993 integer_quo));
ea91f957
KG
10994 TREE_SIDE_EFFECTS (result_quo) = 1;
10995 /* Combine the quo assignment with the rem. */
40f6e591
RS
10996 result = fold_build2 (COMPOUND_EXPR, type,
10997 result_quo, result_rem);
10998 suppress_warning (result, OPT_Wunused_value);
10999 result = non_lvalue (result);
ea91f957
KG
11000 }
11001 }
11002 }
11003 }
11004 return result;
11005}
752b7d38
KG
11006
11007/* If ARG is a REAL_CST, call mpfr_lgamma() on it and return the
11008 resulting value as a tree with type TYPE. The mpfr precision is
11009 set to the precision of TYPE. We assume that this mpfr function
11010 returns zero if the result could be calculated exactly within the
11011 requested precision. In addition, the integer pointer represented
11012 by ARG_SG will be dereferenced and set to the appropriate signgam
11013 (-1,1) value. */
11014
11015static tree
11016do_mpfr_lgamma_r (tree arg, tree arg_sg, tree type)
11017{
11018 tree result = NULL_TREE;
11019
11020 STRIP_NOPS (arg);
b8698a0f 11021
752b7d38
KG
11022 /* To proceed, MPFR must exactly represent the target floating point
11023 format, which only happens when the target base equals two. Also
11024 verify ARG is a constant and that ARG_SG is an int pointer. */
11025 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
11026 && TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg)
11027 && TREE_CODE (TREE_TYPE (arg_sg)) == POINTER_TYPE
11028 && TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (arg_sg))) == integer_type_node)
11029 {
11030 const REAL_VALUE_TYPE *const ra = TREE_REAL_CST_PTR (arg);
11031
11032 /* In addition to NaN and Inf, the argument cannot be zero or a
11033 negative integer. */
4c8c70e0 11034 if (real_isfinite (ra)
752b7d38 11035 && ra->cl != rvc_zero
c3284718 11036 && !(real_isneg (ra) && real_isinteger (ra, TYPE_MODE (type))))
752b7d38 11037 {
3e479de3
UW
11038 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
11039 const int prec = fmt->p;
90ca6847 11040 const mpfr_rnd_t rnd = fmt->round_towards_zero? MPFR_RNDZ : MPFR_RNDN;
752b7d38
KG
11041 int inexact, sg;
11042 mpfr_t m;
11043 tree result_lg;
11044
11045 mpfr_init2 (m, prec);
90ca6847 11046 mpfr_from_real (m, ra, MPFR_RNDN);
752b7d38 11047 mpfr_clear_flags ();
3e479de3 11048 inexact = mpfr_lgamma (m, &sg, m, rnd);
752b7d38
KG
11049 result_lg = do_mpfr_ckconv (m, type, inexact);
11050 mpfr_clear (m);
11051 if (result_lg)
11052 {
11053 tree result_sg;
11054
11055 /* Dereference the arg_sg pointer argument. */
11056 arg_sg = build_fold_indirect_ref (arg_sg);
11057 /* Assign the signgam value into *arg_sg. */
11058 result_sg = fold_build2 (MODIFY_EXPR,
11059 TREE_TYPE (arg_sg), arg_sg,
45a2c477 11060 build_int_cst (TREE_TYPE (arg_sg), sg));
752b7d38
KG
11061 TREE_SIDE_EFFECTS (result_sg) = 1;
11062 /* Combine the signgam assignment with the lgamma result. */
11063 result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
11064 result_sg, result_lg));
11065 }
11066 }
11067 }
11068
11069 return result;
11070}
726a989a 11071
a41d064d
KG
11072/* If arguments ARG0 and ARG1 are a COMPLEX_CST, call the two-argument
11073 mpc function FUNC on it and return the resulting value as a tree
11074 with type TYPE. The mpfr precision is set to the precision of
11075 TYPE. We assume that function FUNC returns zero if the result
ca75b926
KG
11076 could be calculated exactly within the requested precision. If
11077 DO_NONFINITE is true, then fold expressions containing Inf or NaN
11078 in the arguments and/or results. */
a41d064d 11079
2f440f6a 11080tree
ca75b926 11081do_mpc_arg2 (tree arg0, tree arg1, tree type, int do_nonfinite,
a41d064d
KG
11082 int (*func)(mpc_ptr, mpc_srcptr, mpc_srcptr, mpc_rnd_t))
11083{
11084 tree result = NULL_TREE;
b8698a0f 11085
a41d064d
KG
11086 STRIP_NOPS (arg0);
11087 STRIP_NOPS (arg1);
11088
11089 /* To proceed, MPFR must exactly represent the target floating point
11090 format, which only happens when the target base equals two. */
11091 if (TREE_CODE (arg0) == COMPLEX_CST && !TREE_OVERFLOW (arg0)
11092 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE
11093 && TREE_CODE (arg1) == COMPLEX_CST && !TREE_OVERFLOW (arg1)
11094 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg1))) == REAL_TYPE
11095 && REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (TREE_TYPE (arg0))))->b == 2)
11096 {
11097 const REAL_VALUE_TYPE *const re0 = TREE_REAL_CST_PTR (TREE_REALPART (arg0));
11098 const REAL_VALUE_TYPE *const im0 = TREE_REAL_CST_PTR (TREE_IMAGPART (arg0));
11099 const REAL_VALUE_TYPE *const re1 = TREE_REAL_CST_PTR (TREE_REALPART (arg1));
11100 const REAL_VALUE_TYPE *const im1 = TREE_REAL_CST_PTR (TREE_IMAGPART (arg1));
11101
ca75b926
KG
11102 if (do_nonfinite
11103 || (real_isfinite (re0) && real_isfinite (im0)
11104 && real_isfinite (re1) && real_isfinite (im1)))
a41d064d
KG
11105 {
11106 const struct real_format *const fmt =
11107 REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (type)));
11108 const int prec = fmt->p;
90ca6847
TB
11109 const mpfr_rnd_t rnd = fmt->round_towards_zero
11110 ? MPFR_RNDZ : MPFR_RNDN;
a41d064d
KG
11111 const mpc_rnd_t crnd = fmt->round_towards_zero ? MPC_RNDZZ : MPC_RNDNN;
11112 int inexact;
11113 mpc_t m0, m1;
b8698a0f 11114
a41d064d
KG
11115 mpc_init2 (m0, prec);
11116 mpc_init2 (m1, prec);
c3284718
RS
11117 mpfr_from_real (mpc_realref (m0), re0, rnd);
11118 mpfr_from_real (mpc_imagref (m0), im0, rnd);
11119 mpfr_from_real (mpc_realref (m1), re1, rnd);
11120 mpfr_from_real (mpc_imagref (m1), im1, rnd);
a41d064d
KG
11121 mpfr_clear_flags ();
11122 inexact = func (m0, m0, m1, crnd);
ca75b926 11123 result = do_mpc_ckconv (m0, type, inexact, do_nonfinite);
a41d064d
KG
11124 mpc_clear (m0);
11125 mpc_clear (m1);
11126 }
11127 }
11128
11129 return result;
11130}
c128599a 11131
726a989a
RB
11132/* A wrapper function for builtin folding that prevents warnings for
11133 "statement without effect" and the like, caused by removing the
11134 call node earlier than the warning is generated. */
11135
11136tree
538dd0b7 11137fold_call_stmt (gcall *stmt, bool ignore)
726a989a
RB
11138{
11139 tree ret = NULL_TREE;
11140 tree fndecl = gimple_call_fndecl (stmt);
db3927fb 11141 location_t loc = gimple_location (stmt);
3d78e008 11142 if (fndecl && fndecl_built_in_p (fndecl)
726a989a
RB
11143 && !gimple_call_va_arg_pack_p (stmt))
11144 {
11145 int nargs = gimple_call_num_args (stmt);
8897c9ce
NF
11146 tree *args = (nargs > 0
11147 ? gimple_call_arg_ptr (stmt, 0)
11148 : &error_mark_node);
726a989a 11149
0889e9bc
JJ
11150 if (avoid_folding_inline_builtin (fndecl))
11151 return NULL_TREE;
726a989a
RB
11152 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
11153 {
8897c9ce 11154 return targetm.fold_builtin (fndecl, nargs, args, ignore);
726a989a
RB
11155 }
11156 else
11157 {
b5338fb3 11158 ret = fold_builtin_n (loc, NULL_TREE, fndecl, args, nargs, ignore);
726a989a
RB
11159 if (ret)
11160 {
11161 /* Propagate location information from original call to
11162 expansion of builtin. Otherwise things like
11163 maybe_emit_chk_warning, that operate on the expansion
11164 of a builtin, will use the wrong location information. */
11165 if (gimple_has_location (stmt))
11166 {
11167 tree realret = ret;
11168 if (TREE_CODE (ret) == NOP_EXPR)
11169 realret = TREE_OPERAND (ret, 0);
11170 if (CAN_HAVE_LOCATION_P (realret)
11171 && !EXPR_HAS_LOCATION (realret))
db3927fb 11172 SET_EXPR_LOCATION (realret, loc);
726a989a
RB
11173 return realret;
11174 }
11175 return ret;
11176 }
11177 }
11178 }
11179 return NULL_TREE;
11180}
d7f09764 11181
e79983f4 11182/* Look up the function in builtin_decl that corresponds to DECL
d7f09764
DN
11183 and set ASMSPEC as its user assembler name. DECL must be a
11184 function decl that declares a builtin. */
11185
11186void
11187set_builtin_user_assembler_name (tree decl, const char *asmspec)
11188{
3d78e008 11189 gcc_assert (fndecl_built_in_p (decl, BUILT_IN_NORMAL)
d7f09764
DN
11190 && asmspec != 0);
11191
ee516de9 11192 tree builtin = builtin_decl_explicit (DECL_FUNCTION_CODE (decl));
ce835863 11193 set_user_assembler_name (builtin, asmspec);
ee516de9
EB
11194
11195 if (DECL_FUNCTION_CODE (decl) == BUILT_IN_FFS
11196 && INT_TYPE_SIZE < BITS_PER_WORD)
d7f09764 11197 {
fffbab82 11198 scalar_int_mode mode = int_mode_for_size (INT_TYPE_SIZE, 0).require ();
ee516de9 11199 set_user_assembler_libfunc ("ffs", asmspec);
fffbab82 11200 set_optab_libfunc (ffs_optab, mode, "ffs");
d7f09764
DN
11201 }
11202}
bec922f0
SL
11203
11204/* Return true if DECL is a builtin that expands to a constant or similarly
11205 simple code. */
11206bool
11207is_simple_builtin (tree decl)
11208{
3d78e008 11209 if (decl && fndecl_built_in_p (decl, BUILT_IN_NORMAL))
bec922f0
SL
11210 switch (DECL_FUNCTION_CODE (decl))
11211 {
11212 /* Builtins that expand to constants. */
11213 case BUILT_IN_CONSTANT_P:
11214 case BUILT_IN_EXPECT:
11215 case BUILT_IN_OBJECT_SIZE:
11216 case BUILT_IN_UNREACHABLE:
11217 /* Simple register moves or loads from stack. */
45d439ac 11218 case BUILT_IN_ASSUME_ALIGNED:
bec922f0
SL
11219 case BUILT_IN_RETURN_ADDRESS:
11220 case BUILT_IN_EXTRACT_RETURN_ADDR:
11221 case BUILT_IN_FROB_RETURN_ADDR:
11222 case BUILT_IN_RETURN:
11223 case BUILT_IN_AGGREGATE_INCOMING_ADDRESS:
11224 case BUILT_IN_FRAME_ADDRESS:
11225 case BUILT_IN_VA_END:
11226 case BUILT_IN_STACK_SAVE:
11227 case BUILT_IN_STACK_RESTORE:
33a7a632 11228 case BUILT_IN_DWARF_CFA:
bec922f0
SL
11229 /* Exception state returns or moves registers around. */
11230 case BUILT_IN_EH_FILTER:
11231 case BUILT_IN_EH_POINTER:
11232 case BUILT_IN_EH_COPY_VALUES:
11233 return true;
11234
11235 default:
11236 return false;
11237 }
11238
11239 return false;
11240}
11241
11242/* Return true if DECL is a builtin that is not expensive, i.e., they are
11243 most probably expanded inline into reasonably simple code. This is a
11244 superset of is_simple_builtin. */
11245bool
11246is_inexpensive_builtin (tree decl)
11247{
11248 if (!decl)
11249 return false;
11250 else if (DECL_BUILT_IN_CLASS (decl) == BUILT_IN_MD)
11251 return true;
11252 else if (DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL)
11253 switch (DECL_FUNCTION_CODE (decl))
11254 {
11255 case BUILT_IN_ABS:
9e878cf1 11256 CASE_BUILT_IN_ALLOCA:
ac868f29 11257 case BUILT_IN_BSWAP16:
bec922f0
SL
11258 case BUILT_IN_BSWAP32:
11259 case BUILT_IN_BSWAP64:
fe7ebef7 11260 case BUILT_IN_BSWAP128:
bec922f0
SL
11261 case BUILT_IN_CLZ:
11262 case BUILT_IN_CLZIMAX:
11263 case BUILT_IN_CLZL:
11264 case BUILT_IN_CLZLL:
11265 case BUILT_IN_CTZ:
11266 case BUILT_IN_CTZIMAX:
11267 case BUILT_IN_CTZL:
11268 case BUILT_IN_CTZLL:
11269 case BUILT_IN_FFS:
11270 case BUILT_IN_FFSIMAX:
11271 case BUILT_IN_FFSL:
11272 case BUILT_IN_FFSLL:
11273 case BUILT_IN_IMAXABS:
11274 case BUILT_IN_FINITE:
11275 case BUILT_IN_FINITEF:
11276 case BUILT_IN_FINITEL:
11277 case BUILT_IN_FINITED32:
11278 case BUILT_IN_FINITED64:
11279 case BUILT_IN_FINITED128:
11280 case BUILT_IN_FPCLASSIFY:
11281 case BUILT_IN_ISFINITE:
11282 case BUILT_IN_ISINF_SIGN:
11283 case BUILT_IN_ISINF:
11284 case BUILT_IN_ISINFF:
11285 case BUILT_IN_ISINFL:
11286 case BUILT_IN_ISINFD32:
11287 case BUILT_IN_ISINFD64:
11288 case BUILT_IN_ISINFD128:
11289 case BUILT_IN_ISNAN:
11290 case BUILT_IN_ISNANF:
11291 case BUILT_IN_ISNANL:
11292 case BUILT_IN_ISNAND32:
11293 case BUILT_IN_ISNAND64:
11294 case BUILT_IN_ISNAND128:
11295 case BUILT_IN_ISNORMAL:
11296 case BUILT_IN_ISGREATER:
11297 case BUILT_IN_ISGREATEREQUAL:
11298 case BUILT_IN_ISLESS:
11299 case BUILT_IN_ISLESSEQUAL:
11300 case BUILT_IN_ISLESSGREATER:
11301 case BUILT_IN_ISUNORDERED:
11302 case BUILT_IN_VA_ARG_PACK:
11303 case BUILT_IN_VA_ARG_PACK_LEN:
11304 case BUILT_IN_VA_COPY:
11305 case BUILT_IN_TRAP:
11306 case BUILT_IN_SAVEREGS:
11307 case BUILT_IN_POPCOUNTL:
11308 case BUILT_IN_POPCOUNTLL:
11309 case BUILT_IN_POPCOUNTIMAX:
11310 case BUILT_IN_POPCOUNT:
11311 case BUILT_IN_PARITYL:
11312 case BUILT_IN_PARITYLL:
11313 case BUILT_IN_PARITYIMAX:
11314 case BUILT_IN_PARITY:
11315 case BUILT_IN_LABS:
11316 case BUILT_IN_LLABS:
11317 case BUILT_IN_PREFETCH:
41dbbb37 11318 case BUILT_IN_ACC_ON_DEVICE:
bec922f0
SL
11319 return true;
11320
11321 default:
11322 return is_simple_builtin (decl);
11323 }
11324
11325 return false;
11326}
488c6247
ML
11327
11328/* Return true if T is a constant and the value cast to a target char
11329 can be represented by a host char.
11330 Store the casted char constant in *P if so. */
11331
11332bool
11333target_char_cst_p (tree t, char *p)
11334{
11335 if (!tree_fits_uhwi_p (t) || CHAR_TYPE_SIZE != HOST_BITS_PER_CHAR)
11336 return false;
11337
11338 *p = (char)tree_to_uhwi (t);
11339 return true;
11340}
5747e0c0
XHL
11341
11342/* Return true if the builtin DECL is implemented in a standard library.
f418bc3c
EB
11343 Otherwise return false which doesn't guarantee it is not (thus the list
11344 of handled builtins below may be incomplete). */
5747e0c0
XHL
11345
11346bool
11347builtin_with_linkage_p (tree decl)
11348{
11349 if (DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL)
11350 switch (DECL_FUNCTION_CODE (decl))
11351 {
11352 CASE_FLT_FN (BUILT_IN_ACOS):
11353 CASE_FLT_FN (BUILT_IN_ACOSH):
11354 CASE_FLT_FN (BUILT_IN_ASIN):
11355 CASE_FLT_FN (BUILT_IN_ASINH):
11356 CASE_FLT_FN (BUILT_IN_ATAN):
11357 CASE_FLT_FN (BUILT_IN_ATANH):
11358 CASE_FLT_FN (BUILT_IN_ATAN2):
11359 CASE_FLT_FN (BUILT_IN_CBRT):
11360 CASE_FLT_FN (BUILT_IN_CEIL):
11361 CASE_FLT_FN_FLOATN_NX (BUILT_IN_CEIL):
11362 CASE_FLT_FN (BUILT_IN_COPYSIGN):
11363 CASE_FLT_FN_FLOATN_NX (BUILT_IN_COPYSIGN):
11364 CASE_FLT_FN (BUILT_IN_COS):
11365 CASE_FLT_FN (BUILT_IN_COSH):
11366 CASE_FLT_FN (BUILT_IN_ERF):
11367 CASE_FLT_FN (BUILT_IN_ERFC):
11368 CASE_FLT_FN (BUILT_IN_EXP):
11369 CASE_FLT_FN (BUILT_IN_EXP2):
11370 CASE_FLT_FN (BUILT_IN_EXPM1):
11371 CASE_FLT_FN (BUILT_IN_FABS):
11372 CASE_FLT_FN_FLOATN_NX (BUILT_IN_FABS):
11373 CASE_FLT_FN (BUILT_IN_FDIM):
11374 CASE_FLT_FN (BUILT_IN_FLOOR):
11375 CASE_FLT_FN_FLOATN_NX (BUILT_IN_FLOOR):
11376 CASE_FLT_FN (BUILT_IN_FMA):
11377 CASE_FLT_FN_FLOATN_NX (BUILT_IN_FMA):
11378 CASE_FLT_FN (BUILT_IN_FMAX):
11379 CASE_FLT_FN_FLOATN_NX (BUILT_IN_FMAX):
11380 CASE_FLT_FN (BUILT_IN_FMIN):
11381 CASE_FLT_FN_FLOATN_NX (BUILT_IN_FMIN):
11382 CASE_FLT_FN (BUILT_IN_FMOD):
11383 CASE_FLT_FN (BUILT_IN_FREXP):
11384 CASE_FLT_FN (BUILT_IN_HYPOT):
11385 CASE_FLT_FN (BUILT_IN_ILOGB):
11386 CASE_FLT_FN (BUILT_IN_LDEXP):
11387 CASE_FLT_FN (BUILT_IN_LGAMMA):
11388 CASE_FLT_FN (BUILT_IN_LLRINT):
11389 CASE_FLT_FN (BUILT_IN_LLROUND):
11390 CASE_FLT_FN (BUILT_IN_LOG):
11391 CASE_FLT_FN (BUILT_IN_LOG10):
11392 CASE_FLT_FN (BUILT_IN_LOG1P):
11393 CASE_FLT_FN (BUILT_IN_LOG2):
11394 CASE_FLT_FN (BUILT_IN_LOGB):
11395 CASE_FLT_FN (BUILT_IN_LRINT):
11396 CASE_FLT_FN (BUILT_IN_LROUND):
11397 CASE_FLT_FN (BUILT_IN_MODF):
11398 CASE_FLT_FN (BUILT_IN_NAN):
11399 CASE_FLT_FN (BUILT_IN_NEARBYINT):
11400 CASE_FLT_FN_FLOATN_NX (BUILT_IN_NEARBYINT):
11401 CASE_FLT_FN (BUILT_IN_NEXTAFTER):
11402 CASE_FLT_FN (BUILT_IN_NEXTTOWARD):
11403 CASE_FLT_FN (BUILT_IN_POW):
11404 CASE_FLT_FN (BUILT_IN_REMAINDER):
11405 CASE_FLT_FN (BUILT_IN_REMQUO):
11406 CASE_FLT_FN (BUILT_IN_RINT):
11407 CASE_FLT_FN_FLOATN_NX (BUILT_IN_RINT):
11408 CASE_FLT_FN (BUILT_IN_ROUND):
11409 CASE_FLT_FN_FLOATN_NX (BUILT_IN_ROUND):
11410 CASE_FLT_FN (BUILT_IN_SCALBLN):
11411 CASE_FLT_FN (BUILT_IN_SCALBN):
11412 CASE_FLT_FN (BUILT_IN_SIN):
11413 CASE_FLT_FN (BUILT_IN_SINH):
11414 CASE_FLT_FN (BUILT_IN_SINCOS):
11415 CASE_FLT_FN (BUILT_IN_SQRT):
11416 CASE_FLT_FN_FLOATN_NX (BUILT_IN_SQRT):
11417 CASE_FLT_FN (BUILT_IN_TAN):
11418 CASE_FLT_FN (BUILT_IN_TANH):
11419 CASE_FLT_FN (BUILT_IN_TGAMMA):
11420 CASE_FLT_FN (BUILT_IN_TRUNC):
11421 CASE_FLT_FN_FLOATN_NX (BUILT_IN_TRUNC):
11422 return true;
f418bc3c
EB
11423
11424 case BUILT_IN_STPCPY:
11425 case BUILT_IN_STPNCPY:
11426 /* stpcpy is both referenced in libiberty's pex-win32.c and provided
11427 by libiberty's stpcpy.c for MinGW targets so we need to return true
11428 in order to be able to build libiberty in LTO mode for them. */
11429 return true;
11430
5747e0c0
XHL
11431 default:
11432 break;
11433 }
11434 return false;
11435}
de05c19d
MS
11436
11437/* Return true if OFFRNG is bounded to a subrange of offset values
11438 valid for the largest possible object. */
11439
11440bool
11441access_ref::offset_bounded () const
11442{
11443 tree min = TYPE_MIN_VALUE (ptrdiff_type_node);
11444 tree max = TYPE_MAX_VALUE (ptrdiff_type_node);
11445 return wi::to_offset (min) <= offrng[0] && offrng[1] <= wi::to_offset (max);
11446}
4f8cfb42
JH
11447
11448/* If CALLEE has known side effects, fill in INFO and return true.
e53b6e56 11449 See tree-ssa-structalias.cc:find_func_aliases
4f8cfb42
JH
11450 for the list of builtins we might need to handle here. */
11451
11452attr_fnspec
11453builtin_fnspec (tree callee)
11454{
11455 built_in_function code = DECL_FUNCTION_CODE (callee);
11456
11457 switch (code)
11458 {
11459 /* All the following functions read memory pointed to by
11460 their second argument and write memory pointed to by first
11461 argument.
11462 strcat/strncat additionally reads memory pointed to by the first
11463 argument. */
11464 case BUILT_IN_STRCAT:
11465 case BUILT_IN_STRCAT_CHK:
071a31a5 11466 return "1cW 1 ";
4f8cfb42
JH
11467 case BUILT_IN_STRNCAT:
11468 case BUILT_IN_STRNCAT_CHK:
071a31a5 11469 return "1cW 13";
4f8cfb42
JH
11470 case BUILT_IN_STRCPY:
11471 case BUILT_IN_STRCPY_CHK:
071a31a5 11472 return "1cO 1 ";
4f8cfb42
JH
11473 case BUILT_IN_STPCPY:
11474 case BUILT_IN_STPCPY_CHK:
071a31a5 11475 return ".cO 1 ";
4f8cfb42
JH
11476 case BUILT_IN_STRNCPY:
11477 case BUILT_IN_MEMCPY:
11478 case BUILT_IN_MEMMOVE:
11479 case BUILT_IN_TM_MEMCPY:
11480 case BUILT_IN_TM_MEMMOVE:
11481 case BUILT_IN_STRNCPY_CHK:
11482 case BUILT_IN_MEMCPY_CHK:
11483 case BUILT_IN_MEMMOVE_CHK:
071a31a5 11484 return "1cO313";
4f8cfb42
JH
11485 case BUILT_IN_MEMPCPY:
11486 case BUILT_IN_MEMPCPY_CHK:
071a31a5 11487 return ".cO313";
4f8cfb42
JH
11488 case BUILT_IN_STPNCPY:
11489 case BUILT_IN_STPNCPY_CHK:
071a31a5 11490 return ".cO313";
4f8cfb42 11491 case BUILT_IN_BCOPY:
071a31a5 11492 return ".c23O3";
b53f709d
JH
11493 case BUILT_IN_BZERO:
11494 return ".cO2";
11495 case BUILT_IN_MEMCMP:
11496 case BUILT_IN_MEMCMP_EQ:
11497 case BUILT_IN_BCMP:
11498 case BUILT_IN_STRNCMP:
11499 case BUILT_IN_STRNCMP_EQ:
11500 case BUILT_IN_STRNCASECMP:
11501 return ".cR3R3";
4f8cfb42
JH
11502
11503 /* The following functions read memory pointed to by their
11504 first argument. */
11505 CASE_BUILT_IN_TM_LOAD (1):
11506 CASE_BUILT_IN_TM_LOAD (2):
11507 CASE_BUILT_IN_TM_LOAD (4):
11508 CASE_BUILT_IN_TM_LOAD (8):
11509 CASE_BUILT_IN_TM_LOAD (FLOAT):
11510 CASE_BUILT_IN_TM_LOAD (DOUBLE):
11511 CASE_BUILT_IN_TM_LOAD (LDOUBLE):
11512 CASE_BUILT_IN_TM_LOAD (M64):
11513 CASE_BUILT_IN_TM_LOAD (M128):
11514 CASE_BUILT_IN_TM_LOAD (M256):
11515 case BUILT_IN_TM_LOG:
11516 case BUILT_IN_TM_LOG_1:
11517 case BUILT_IN_TM_LOG_2:
11518 case BUILT_IN_TM_LOG_4:
11519 case BUILT_IN_TM_LOG_8:
11520 case BUILT_IN_TM_LOG_FLOAT:
11521 case BUILT_IN_TM_LOG_DOUBLE:
11522 case BUILT_IN_TM_LOG_LDOUBLE:
11523 case BUILT_IN_TM_LOG_M64:
11524 case BUILT_IN_TM_LOG_M128:
11525 case BUILT_IN_TM_LOG_M256:
11526 return ".cR ";
11527
11528 case BUILT_IN_INDEX:
b53f709d 11529 case BUILT_IN_RINDEX:
4f8cfb42 11530 case BUILT_IN_STRCHR:
b53f709d 11531 case BUILT_IN_STRLEN:
4f8cfb42
JH
11532 case BUILT_IN_STRRCHR:
11533 return ".cR ";
b53f709d
JH
11534 case BUILT_IN_STRNLEN:
11535 return ".cR2";
4f8cfb42
JH
11536
11537 /* These read memory pointed to by the first argument.
11538 Allocating memory does not have any side-effects apart from
11539 being the definition point for the pointer.
11540 Unix98 specifies that errno is set on allocation failure. */
11541 case BUILT_IN_STRDUP:
11542 return "mCR ";
11543 case BUILT_IN_STRNDUP:
11544 return "mCR2";
11545 /* Allocating memory does not have any side-effects apart from
11546 being the definition point for the pointer. */
11547 case BUILT_IN_MALLOC:
11548 case BUILT_IN_ALIGNED_ALLOC:
11549 case BUILT_IN_CALLOC:
6fcc3cac 11550 case BUILT_IN_GOMP_ALLOC:
4f8cfb42
JH
11551 return "mC";
11552 CASE_BUILT_IN_ALLOCA:
11553 return "mc";
11554 /* These read memory pointed to by the first argument with size
11555 in the third argument. */
11556 case BUILT_IN_MEMCHR:
11557 return ".cR3";
11558 /* These read memory pointed to by the first and second arguments. */
11559 case BUILT_IN_STRSTR:
11560 case BUILT_IN_STRPBRK:
b53f709d
JH
11561 case BUILT_IN_STRCASECMP:
11562 case BUILT_IN_STRCSPN:
11563 case BUILT_IN_STRSPN:
11564 case BUILT_IN_STRCMP:
11565 case BUILT_IN_STRCMP_EQ:
4f8cfb42
JH
11566 return ".cR R ";
11567 /* Freeing memory kills the pointed-to memory. More importantly
11568 the call has to serve as a barrier for moving loads and stores
11569 across it. */
11570 case BUILT_IN_STACK_RESTORE:
11571 case BUILT_IN_FREE:
6fcc3cac 11572 case BUILT_IN_GOMP_FREE:
4f8cfb42
JH
11573 return ".co ";
11574 case BUILT_IN_VA_END:
11575 return ".cO ";
11576 /* Realloc serves both as allocation point and deallocation point. */
11577 case BUILT_IN_REALLOC:
57fcbe57 11578 return ".Cw ";
4f8cfb42
JH
11579 case BUILT_IN_GAMMA_R:
11580 case BUILT_IN_GAMMAF_R:
11581 case BUILT_IN_GAMMAL_R:
11582 case BUILT_IN_LGAMMA_R:
11583 case BUILT_IN_LGAMMAF_R:
11584 case BUILT_IN_LGAMMAL_R:
11585 return ".C. Ot";
11586 case BUILT_IN_FREXP:
11587 case BUILT_IN_FREXPF:
11588 case BUILT_IN_FREXPL:
11589 case BUILT_IN_MODF:
11590 case BUILT_IN_MODFF:
11591 case BUILT_IN_MODFL:
11592 return ".c. Ot";
11593 case BUILT_IN_REMQUO:
11594 case BUILT_IN_REMQUOF:
11595 case BUILT_IN_REMQUOL:
11596 return ".c. . Ot";
11597 case BUILT_IN_SINCOS:
11598 case BUILT_IN_SINCOSF:
11599 case BUILT_IN_SINCOSL:
11600 return ".c. OtOt";
11601 case BUILT_IN_MEMSET:
11602 case BUILT_IN_MEMSET_CHK:
11603 case BUILT_IN_TM_MEMSET:
11604 return "1cO3";
11605 CASE_BUILT_IN_TM_STORE (1):
11606 CASE_BUILT_IN_TM_STORE (2):
11607 CASE_BUILT_IN_TM_STORE (4):
11608 CASE_BUILT_IN_TM_STORE (8):
11609 CASE_BUILT_IN_TM_STORE (FLOAT):
11610 CASE_BUILT_IN_TM_STORE (DOUBLE):
11611 CASE_BUILT_IN_TM_STORE (LDOUBLE):
11612 CASE_BUILT_IN_TM_STORE (M64):
11613 CASE_BUILT_IN_TM_STORE (M128):
11614 CASE_BUILT_IN_TM_STORE (M256):
11615 return ".cO ";
11616 case BUILT_IN_STACK_SAVE:
33a7a632
JH
11617 case BUILT_IN_RETURN:
11618 case BUILT_IN_EH_POINTER:
11619 case BUILT_IN_EH_FILTER:
11620 case BUILT_IN_UNWIND_RESUME:
11621 case BUILT_IN_CXA_END_CLEANUP:
11622 case BUILT_IN_EH_COPY_VALUES:
11623 case BUILT_IN_FRAME_ADDRESS:
11624 case BUILT_IN_APPLY_ARGS:
11625 case BUILT_IN_ASAN_BEFORE_DYNAMIC_INIT:
11626 case BUILT_IN_ASAN_AFTER_DYNAMIC_INIT:
11627 case BUILT_IN_PREFETCH:
11628 case BUILT_IN_DWARF_CFA:
11629 case BUILT_IN_RETURN_ADDRESS:
4f8cfb42
JH
11630 return ".c";
11631 case BUILT_IN_ASSUME_ALIGNED:
11632 return "1cX ";
11633 /* But posix_memalign stores a pointer into the memory pointed to
11634 by its first argument. */
11635 case BUILT_IN_POSIX_MEMALIGN:
11636 return ".cOt";
11637
11638 default:
11639 return "";
11640 }
11641}