]> git.ipfire.org Git - thirdparty/gcc.git/blame - gcc/builtins.c
[62/77] Big machine_mode to scalar_int_mode replacement
[thirdparty/gcc.git] / gcc / builtins.c
CommitLineData
53800dbe 1/* Expand builtin functions.
aad93da1 2 Copyright (C) 1988-2017 Free Software Foundation, Inc.
53800dbe 3
f12b58b3 4This file is part of GCC.
53800dbe 5
f12b58b3 6GCC is free software; you can redistribute it and/or modify it under
7the terms of the GNU General Public License as published by the Free
8c4c00c1 8Software Foundation; either version 3, or (at your option) any later
f12b58b3 9version.
53800dbe 10
f12b58b3 11GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12WARRANTY; without even the implied warranty of MERCHANTABILITY or
13FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14for more details.
53800dbe 15
16You should have received a copy of the GNU General Public License
8c4c00c1 17along with GCC; see the file COPYING3. If not see
18<http://www.gnu.org/licenses/>. */
53800dbe 19
7c2ecb89 20/* Legacy warning! Please add no further builtin simplifications here
21 (apart from pure constant folding) - builtin simplifications should go
22 to match.pd or gimple-fold.c instead. */
23
53800dbe 24#include "config.h"
25#include "system.h"
805e22b2 26#include "coretypes.h"
9ef16211 27#include "backend.h"
7c29e30e 28#include "target.h"
29#include "rtl.h"
9ef16211 30#include "tree.h"
ea36272b 31#include "memmodel.h"
9ef16211 32#include "gimple.h"
7c29e30e 33#include "predict.h"
34#include "tm_p.h"
35#include "stringpool.h"
c296f633 36#include "tree-vrp.h"
7c29e30e 37#include "tree-ssanames.h"
38#include "expmed.h"
39#include "optabs.h"
7c29e30e 40#include "emit-rtl.h"
41#include "recog.h"
7c29e30e 42#include "diagnostic-core.h"
b20a8bb4 43#include "alias.h"
b20a8bb4 44#include "fold-const.h"
6c21be92 45#include "fold-const-call.h"
9ed99284 46#include "stor-layout.h"
47#include "calls.h"
48#include "varasm.h"
49#include "tree-object-size.h"
dae0b5cb 50#include "realmpfr.h"
94ea8568 51#include "cfgrtl.h"
53800dbe 52#include "except.h"
d53441c8 53#include "dojump.h"
54#include "explow.h"
d53441c8 55#include "stmt.h"
53800dbe 56#include "expr.h"
d8fc4d0b 57#include "libfuncs.h"
53800dbe 58#include "output.h"
59#include "typeclass.h"
63c62881 60#include "langhooks.h"
162719b3 61#include "value-prof.h"
3b9c3a16 62#include "builtins.h"
30a86690 63#include "stringpool.h"
64#include "attribs.h"
f9acf11a 65#include "asan.h"
d037099f 66#include "cilk.h"
058a1b7a 67#include "tree-chkp.h"
68#include "rtl-chkp.h"
1f24b8e9 69#include "internal-fn.h"
e3240774 70#include "case-cfn-macros.h"
732905bb 71#include "gimple-fold.h"
5aef8938 72#include "intl.h"
5383fb56 73
3b9c3a16 74struct target_builtins default_target_builtins;
75#if SWITCHABLE_TARGET
76struct target_builtins *this_target_builtins = &default_target_builtins;
77#endif
78
ab7943b9 79/* Define the names of the builtin function types and codes. */
96423453 80const char *const built_in_class_names[BUILT_IN_LAST]
ab7943b9 81 = {"NOT_BUILT_IN", "BUILT_IN_FRONTEND", "BUILT_IN_MD", "BUILT_IN_NORMAL"};
82
9cfddb70 83#define DEF_BUILTIN(X, N, C, T, LT, B, F, NA, AT, IM, COND) #X,
0dfc45b5 84const char * built_in_names[(int) END_BUILTINS] =
4e9d90c7 85{
86#include "builtins.def"
87};
ab7943b9 88
cffdfb3d 89/* Setup an array of builtin_info_type, make sure each element decl is
df94cd3b 90 initialized to NULL_TREE. */
cffdfb3d 91builtin_info_type builtin_info[(int)END_BUILTINS];
df94cd3b 92
0b049e15 93/* Non-zero if __builtin_constant_p should be folded right away. */
94bool force_folding_builtin_constant_p;
95
f77c4496 96static rtx c_readstr (const char *, scalar_int_mode);
aecda0d6 97static int target_char_cast (tree, char *);
d8ae1baa 98static rtx get_memory_rtx (tree, tree);
aecda0d6 99static int apply_args_size (void);
100static int apply_result_size (void);
aecda0d6 101static rtx result_vector (int, rtx);
aecda0d6 102static void expand_builtin_prefetch (tree);
103static rtx expand_builtin_apply_args (void);
104static rtx expand_builtin_apply_args_1 (void);
105static rtx expand_builtin_apply (rtx, rtx, rtx);
106static void expand_builtin_return (rtx);
107static enum type_class type_to_class (tree);
108static rtx expand_builtin_classify_type (tree);
6b43bae4 109static rtx expand_builtin_mathfn_3 (tree, rtx, rtx);
7e0713b1 110static rtx expand_builtin_mathfn_ternary (tree, rtx, rtx);
f97eea22 111static rtx expand_builtin_interclass_mathfn (tree, rtx);
c3147c1a 112static rtx expand_builtin_sincos (tree);
f97eea22 113static rtx expand_builtin_cexpi (tree, rtx);
ff1b14e4 114static rtx expand_builtin_int_roundingfn (tree, rtx);
115static rtx expand_builtin_int_roundingfn_2 (tree, rtx);
79012a9d 116static rtx expand_builtin_next_arg (void);
aecda0d6 117static rtx expand_builtin_va_start (tree);
118static rtx expand_builtin_va_end (tree);
119static rtx expand_builtin_va_copy (tree);
a65c4d64 120static rtx expand_builtin_strcmp (tree, rtx);
3754d046 121static rtx expand_builtin_strncmp (tree, rtx, machine_mode);
f77c4496 122static rtx builtin_memcpy_read_str (void *, HOST_WIDE_INT, scalar_int_mode);
8d6c6ef5 123static rtx expand_builtin_memchr (tree, rtx);
a65c4d64 124static rtx expand_builtin_memcpy (tree, rtx);
f21337ef 125static rtx expand_builtin_memcpy_with_bounds (tree, rtx);
d0fbba1a 126static rtx expand_builtin_memory_copy_args (tree dest, tree src, tree len,
127 rtx target, tree exp, int endp);
4d317237 128static rtx expand_builtin_memmove (tree, rtx);
d0fbba1a 129static rtx expand_builtin_mempcpy (tree, rtx);
130static rtx expand_builtin_mempcpy_with_bounds (tree, rtx);
131static rtx expand_builtin_mempcpy_args (tree, tree, tree, rtx, tree, int);
5aef8938 132static rtx expand_builtin_strcat (tree, rtx);
a65c4d64 133static rtx expand_builtin_strcpy (tree, rtx);
134static rtx expand_builtin_strcpy_args (tree, tree, rtx);
3754d046 135static rtx expand_builtin_stpcpy (tree, rtx, machine_mode);
4d317237 136static rtx expand_builtin_stpncpy (tree, rtx);
5aef8938 137static rtx expand_builtin_strncat (tree, rtx);
a65c4d64 138static rtx expand_builtin_strncpy (tree, rtx);
f77c4496 139static rtx builtin_memset_gen_str (void *, HOST_WIDE_INT, scalar_int_mode);
3754d046 140static rtx expand_builtin_memset (tree, rtx, machine_mode);
f21337ef 141static rtx expand_builtin_memset_with_bounds (tree, rtx, machine_mode);
3754d046 142static rtx expand_builtin_memset_args (tree, tree, tree, rtx, machine_mode, tree);
aecda0d6 143static rtx expand_builtin_bzero (tree);
3754d046 144static rtx expand_builtin_strlen (tree, rtx, machine_mode);
2b29cc6a 145static rtx expand_builtin_alloca (tree);
3754d046 146static rtx expand_builtin_unop (machine_mode, tree, rtx, rtx, optab);
aecda0d6 147static rtx expand_builtin_frame_address (tree, tree);
389dd41b 148static tree stabilize_va_list_loc (location_t, tree, int);
aecda0d6 149static rtx expand_builtin_expect (tree, rtx);
150static tree fold_builtin_constant_p (tree);
151static tree fold_builtin_classify_type (tree);
c7cbde74 152static tree fold_builtin_strlen (location_t, tree, tree);
389dd41b 153static tree fold_builtin_inf (location_t, tree, int);
389dd41b 154static tree rewrite_call_expr (location_t, tree, int, tree, int, ...);
184fac50 155static bool validate_arg (const_tree, enum tree_code code);
aecda0d6 156static rtx expand_builtin_fabs (tree, rtx, rtx);
27f261ef 157static rtx expand_builtin_signbit (tree, rtx);
389dd41b 158static tree fold_builtin_memcmp (location_t, tree, tree, tree);
389dd41b 159static tree fold_builtin_isascii (location_t, tree);
160static tree fold_builtin_toascii (location_t, tree);
161static tree fold_builtin_isdigit (location_t, tree);
162static tree fold_builtin_fabs (location_t, tree, tree);
163static tree fold_builtin_abs (location_t, tree, tree);
164static tree fold_builtin_unordered_cmp (location_t, tree, tree, tree, enum tree_code,
d5019fe8 165 enum tree_code);
e80cc485 166static tree fold_builtin_0 (location_t, tree);
167static tree fold_builtin_1 (location_t, tree, tree);
168static tree fold_builtin_2 (location_t, tree, tree, tree);
169static tree fold_builtin_3 (location_t, tree, tree, tree, tree);
12f08300 170static tree fold_builtin_varargs (location_t, tree, tree*, int);
389dd41b 171
172static tree fold_builtin_strpbrk (location_t, tree, tree, tree);
389dd41b 173static tree fold_builtin_strspn (location_t, tree, tree);
174static tree fold_builtin_strcspn (location_t, tree, tree);
4ee9c684 175
0a39fd54 176static rtx expand_builtin_object_size (tree);
3754d046 177static rtx expand_builtin_memory_chk (tree, rtx, machine_mode,
0a39fd54 178 enum built_in_function);
179static void maybe_emit_chk_warning (tree, enum built_in_function);
180static void maybe_emit_sprintf_chk_warning (tree, enum built_in_function);
2c281b15 181static void maybe_emit_free_warning (tree);
c2f47e15 182static tree fold_builtin_object_size (tree, tree);
99eabcc1 183
e788f202 184unsigned HOST_WIDE_INT target_newline;
b9ea678c 185unsigned HOST_WIDE_INT target_percent;
99eabcc1 186static unsigned HOST_WIDE_INT target_c;
187static unsigned HOST_WIDE_INT target_s;
aea88c77 188char target_percent_c[3];
b9ea678c 189char target_percent_s[3];
e788f202 190char target_percent_s_newline[4];
e5407ca6 191static tree do_mpfr_remquo (tree, tree, tree);
e84da7c1 192static tree do_mpfr_lgamma_r (tree, tree, tree);
1cd6e20d 193static void expand_builtin_sync_synchronize (void);
0a39fd54 194
7bfefa9d 195/* Return true if NAME starts with __builtin_ or __sync_. */
196
b29139ad 197static bool
1c47b3e8 198is_builtin_name (const char *name)
b6a5fc45 199{
b6a5fc45 200 if (strncmp (name, "__builtin_", 10) == 0)
201 return true;
202 if (strncmp (name, "__sync_", 7) == 0)
203 return true;
1cd6e20d 204 if (strncmp (name, "__atomic_", 9) == 0)
205 return true;
a89e6c15 206 if (flag_cilkplus
d037099f 207 && (!strcmp (name, "__cilkrts_detach")
208 || !strcmp (name, "__cilkrts_pop_frame")))
209 return true;
b6a5fc45 210 return false;
211}
4ee9c684 212
7bfefa9d 213
214/* Return true if DECL is a function symbol representing a built-in. */
215
216bool
217is_builtin_fn (tree decl)
218{
219 return TREE_CODE (decl) == FUNCTION_DECL && DECL_BUILT_IN (decl);
220}
221
1c47b3e8 222/* Return true if NODE should be considered for inline expansion regardless
223 of the optimization level. This means whenever a function is invoked with
224 its "internal" name, which normally contains the prefix "__builtin". */
225
ae62deea 226bool
1c47b3e8 227called_as_built_in (tree node)
228{
229 /* Note that we must use DECL_NAME, not DECL_ASSEMBLER_NAME_SET_P since
230 we want the name used to call the function, not the name it
231 will have. */
232 const char *name = IDENTIFIER_POINTER (DECL_NAME (node));
233 return is_builtin_name (name);
234}
235
ceea063b 236/* Compute values M and N such that M divides (address of EXP - N) and such
237 that N < M. If these numbers can be determined, store M in alignp and N in
238 *BITPOSP and return true. Otherwise return false and store BITS_PER_UNIT to
239 *alignp and any bit-offset to *bitposp.
0d8f7716 240
241 Note that the address (and thus the alignment) computed here is based
242 on the address to which a symbol resolves, whereas DECL_ALIGN is based
243 on the address at which an object is actually located. These two
244 addresses are not always the same. For example, on ARM targets,
245 the address &foo of a Thumb function foo() has the lowest bit set,
3482bf13 246 whereas foo() itself starts on an even address.
698537d1 247
3482bf13 248 If ADDR_P is true we are taking the address of the memory reference EXP
249 and thus cannot rely on the access taking place. */
250
251static bool
252get_object_alignment_2 (tree exp, unsigned int *alignp,
253 unsigned HOST_WIDE_INT *bitposp, bool addr_p)
698537d1 254{
98ab9e8f 255 HOST_WIDE_INT bitsize, bitpos;
256 tree offset;
3754d046 257 machine_mode mode;
292237f3 258 int unsignedp, reversep, volatilep;
c8a2b4ff 259 unsigned int align = BITS_PER_UNIT;
ceea063b 260 bool known_alignment = false;
698537d1 261
98ab9e8f 262 /* Get the innermost object and the constant (bitpos) and possibly
263 variable (offset) offset of the access. */
292237f3 264 exp = get_inner_reference (exp, &bitsize, &bitpos, &offset, &mode,
b3b6e4b5 265 &unsignedp, &reversep, &volatilep);
98ab9e8f 266
267 /* Extract alignment information from the innermost object and
268 possibly adjust bitpos and offset. */
3482bf13 269 if (TREE_CODE (exp) == FUNCTION_DECL)
0d8f7716 270 {
3482bf13 271 /* Function addresses can encode extra information besides their
272 alignment. However, if TARGET_PTRMEMFUNC_VBIT_LOCATION
273 allows the low bit to be used as a virtual bit, we know
274 that the address itself must be at least 2-byte aligned. */
275 if (TARGET_PTRMEMFUNC_VBIT_LOCATION == ptrmemfunc_vbit_in_pfn)
276 align = 2 * BITS_PER_UNIT;
0d8f7716 277 }
3482bf13 278 else if (TREE_CODE (exp) == LABEL_DECL)
279 ;
280 else if (TREE_CODE (exp) == CONST_DECL)
98ab9e8f 281 {
3482bf13 282 /* The alignment of a CONST_DECL is determined by its initializer. */
283 exp = DECL_INITIAL (exp);
98ab9e8f 284 align = TYPE_ALIGN (TREE_TYPE (exp));
3482bf13 285 if (CONSTANT_CLASS_P (exp))
286 align = (unsigned) CONSTANT_ALIGNMENT (exp, align);
e532afed 287
3482bf13 288 known_alignment = true;
98ab9e8f 289 }
3482bf13 290 else if (DECL_P (exp))
ceea063b 291 {
3482bf13 292 align = DECL_ALIGN (exp);
ceea063b 293 known_alignment = true;
ceea063b 294 }
3482bf13 295 else if (TREE_CODE (exp) == INDIRECT_REF
296 || TREE_CODE (exp) == MEM_REF
297 || TREE_CODE (exp) == TARGET_MEM_REF)
98ab9e8f 298 {
299 tree addr = TREE_OPERAND (exp, 0);
ceea063b 300 unsigned ptr_align;
301 unsigned HOST_WIDE_INT ptr_bitpos;
ab1e78e5 302 unsigned HOST_WIDE_INT ptr_bitmask = ~0;
ceea063b 303
ab1e78e5 304 /* If the address is explicitely aligned, handle that. */
98ab9e8f 305 if (TREE_CODE (addr) == BIT_AND_EXPR
306 && TREE_CODE (TREE_OPERAND (addr, 1)) == INTEGER_CST)
307 {
ab1e78e5 308 ptr_bitmask = TREE_INT_CST_LOW (TREE_OPERAND (addr, 1));
309 ptr_bitmask *= BITS_PER_UNIT;
ac29ece2 310 align = least_bit_hwi (ptr_bitmask);
98ab9e8f 311 addr = TREE_OPERAND (addr, 0);
312 }
ceea063b 313
3482bf13 314 known_alignment
315 = get_pointer_alignment_1 (addr, &ptr_align, &ptr_bitpos);
3482bf13 316 align = MAX (ptr_align, align);
317
ab1e78e5 318 /* Re-apply explicit alignment to the bitpos. */
319 ptr_bitpos &= ptr_bitmask;
320
4083990a 321 /* The alignment of the pointer operand in a TARGET_MEM_REF
322 has to take the variable offset parts into account. */
3482bf13 323 if (TREE_CODE (exp) == TARGET_MEM_REF)
153c3b50 324 {
3482bf13 325 if (TMR_INDEX (exp))
326 {
327 unsigned HOST_WIDE_INT step = 1;
328 if (TMR_STEP (exp))
f9ae6f95 329 step = TREE_INT_CST_LOW (TMR_STEP (exp));
ac29ece2 330 align = MIN (align, least_bit_hwi (step) * BITS_PER_UNIT);
3482bf13 331 }
332 if (TMR_INDEX2 (exp))
333 align = BITS_PER_UNIT;
334 known_alignment = false;
153c3b50 335 }
ceea063b 336
3482bf13 337 /* When EXP is an actual memory reference then we can use
338 TYPE_ALIGN of a pointer indirection to derive alignment.
339 Do so only if get_pointer_alignment_1 did not reveal absolute
4083990a 340 alignment knowledge and if using that alignment would
341 improve the situation. */
700a9760 342 unsigned int talign;
4083990a 343 if (!addr_p && !known_alignment
700a9760 344 && (talign = min_align_of_type (TREE_TYPE (exp)) * BITS_PER_UNIT)
345 && talign > align)
346 align = talign;
4083990a 347 else
348 {
349 /* Else adjust bitpos accordingly. */
350 bitpos += ptr_bitpos;
351 if (TREE_CODE (exp) == MEM_REF
352 || TREE_CODE (exp) == TARGET_MEM_REF)
e913b5cd 353 bitpos += mem_ref_offset (exp).to_short_addr () * BITS_PER_UNIT;
4083990a 354 }
98ab9e8f 355 }
3482bf13 356 else if (TREE_CODE (exp) == STRING_CST)
153c3b50 357 {
3482bf13 358 /* STRING_CST are the only constant objects we allow to be not
359 wrapped inside a CONST_DECL. */
360 align = TYPE_ALIGN (TREE_TYPE (exp));
3482bf13 361 if (CONSTANT_CLASS_P (exp))
362 align = (unsigned) CONSTANT_ALIGNMENT (exp, align);
e532afed 363
3482bf13 364 known_alignment = true;
98ab9e8f 365 }
98ab9e8f 366
367 /* If there is a non-constant offset part extract the maximum
368 alignment that can prevail. */
c8a2b4ff 369 if (offset)
98ab9e8f 370 {
ad464c56 371 unsigned int trailing_zeros = tree_ctz (offset);
c8a2b4ff 372 if (trailing_zeros < HOST_BITS_PER_INT)
98ab9e8f 373 {
c8a2b4ff 374 unsigned int inner = (1U << trailing_zeros) * BITS_PER_UNIT;
375 if (inner)
376 align = MIN (align, inner);
98ab9e8f 377 }
98ab9e8f 378 }
379
3482bf13 380 *alignp = align;
381 *bitposp = bitpos & (*alignp - 1);
ceea063b 382 return known_alignment;
0c883ef3 383}
384
3482bf13 385/* For a memory reference expression EXP compute values M and N such that M
386 divides (&EXP - N) and such that N < M. If these numbers can be determined,
387 store M in alignp and N in *BITPOSP and return true. Otherwise return false
388 and store BITS_PER_UNIT to *alignp and any bit-offset to *bitposp. */
389
390bool
391get_object_alignment_1 (tree exp, unsigned int *alignp,
392 unsigned HOST_WIDE_INT *bitposp)
393{
394 return get_object_alignment_2 (exp, alignp, bitposp, false);
395}
396
957d0361 397/* Return the alignment in bits of EXP, an object. */
0c883ef3 398
399unsigned int
957d0361 400get_object_alignment (tree exp)
0c883ef3 401{
402 unsigned HOST_WIDE_INT bitpos = 0;
403 unsigned int align;
404
ceea063b 405 get_object_alignment_1 (exp, &align, &bitpos);
0c883ef3 406
98ab9e8f 407 /* align and bitpos now specify known low bits of the pointer.
408 ptr & (align - 1) == bitpos. */
409
410 if (bitpos != 0)
ac29ece2 411 align = least_bit_hwi (bitpos);
957d0361 412 return align;
698537d1 413}
414
ceea063b 415/* For a pointer valued expression EXP compute values M and N such that M
416 divides (EXP - N) and such that N < M. If these numbers can be determined,
3482bf13 417 store M in alignp and N in *BITPOSP and return true. Return false if
418 the results are just a conservative approximation.
53800dbe 419
ceea063b 420 If EXP is not a pointer, false is returned too. */
53800dbe 421
ceea063b 422bool
423get_pointer_alignment_1 (tree exp, unsigned int *alignp,
424 unsigned HOST_WIDE_INT *bitposp)
53800dbe 425{
153c3b50 426 STRIP_NOPS (exp);
535e2026 427
153c3b50 428 if (TREE_CODE (exp) == ADDR_EXPR)
3482bf13 429 return get_object_alignment_2 (TREE_OPERAND (exp, 0),
430 alignp, bitposp, true);
906a9403 431 else if (TREE_CODE (exp) == POINTER_PLUS_EXPR)
432 {
433 unsigned int align;
434 unsigned HOST_WIDE_INT bitpos;
435 bool res = get_pointer_alignment_1 (TREE_OPERAND (exp, 0),
436 &align, &bitpos);
437 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
438 bitpos += TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)) * BITS_PER_UNIT;
439 else
440 {
441 unsigned int trailing_zeros = tree_ctz (TREE_OPERAND (exp, 1));
442 if (trailing_zeros < HOST_BITS_PER_INT)
443 {
444 unsigned int inner = (1U << trailing_zeros) * BITS_PER_UNIT;
445 if (inner)
446 align = MIN (align, inner);
447 }
448 }
449 *alignp = align;
450 *bitposp = bitpos & (align - 1);
451 return res;
452 }
153c3b50 453 else if (TREE_CODE (exp) == SSA_NAME
454 && POINTER_TYPE_P (TREE_TYPE (exp)))
53800dbe 455 {
ceea063b 456 unsigned int ptr_align, ptr_misalign;
153c3b50 457 struct ptr_info_def *pi = SSA_NAME_PTR_INFO (exp);
ceea063b 458
459 if (pi && get_ptr_info_alignment (pi, &ptr_align, &ptr_misalign))
460 {
461 *bitposp = ptr_misalign * BITS_PER_UNIT;
462 *alignp = ptr_align * BITS_PER_UNIT;
d10da77a 463 /* Make sure to return a sensible alignment when the multiplication
464 by BITS_PER_UNIT overflowed. */
465 if (*alignp == 0)
466 *alignp = 1u << (HOST_BITS_PER_INT - 1);
3482bf13 467 /* We cannot really tell whether this result is an approximation. */
b428654a 468 return false;
ceea063b 469 }
470 else
69fbc3aa 471 {
472 *bitposp = 0;
ceea063b 473 *alignp = BITS_PER_UNIT;
474 return false;
69fbc3aa 475 }
53800dbe 476 }
0bb8b39a 477 else if (TREE_CODE (exp) == INTEGER_CST)
478 {
479 *alignp = BIGGEST_ALIGNMENT;
f9ae6f95 480 *bitposp = ((TREE_INT_CST_LOW (exp) * BITS_PER_UNIT)
0bb8b39a 481 & (BIGGEST_ALIGNMENT - 1));
482 return true;
483 }
153c3b50 484
69fbc3aa 485 *bitposp = 0;
ceea063b 486 *alignp = BITS_PER_UNIT;
487 return false;
53800dbe 488}
489
69fbc3aa 490/* Return the alignment in bits of EXP, a pointer valued expression.
491 The alignment returned is, by default, the alignment of the thing that
492 EXP points to. If it is not a POINTER_TYPE, 0 is returned.
493
494 Otherwise, look at the expression to see if we can do better, i.e., if the
495 expression is actually pointing at an object whose alignment is tighter. */
496
497unsigned int
498get_pointer_alignment (tree exp)
499{
500 unsigned HOST_WIDE_INT bitpos = 0;
501 unsigned int align;
ceea063b 502
503 get_pointer_alignment_1 (exp, &align, &bitpos);
69fbc3aa 504
505 /* align and bitpos now specify known low bits of the pointer.
506 ptr & (align - 1) == bitpos. */
507
508 if (bitpos != 0)
ac29ece2 509 align = least_bit_hwi (bitpos);
69fbc3aa 510
511 return align;
512}
513
c62d63d4 514/* Return the number of non-zero elements in the sequence
515 [ PTR, PTR + MAXELTS ) where each element's size is ELTSIZE bytes.
516 ELTSIZE must be a power of 2 less than 8. Used by c_strlen. */
517
518static unsigned
519string_length (const void *ptr, unsigned eltsize, unsigned maxelts)
520{
521 gcc_checking_assert (eltsize == 1 || eltsize == 2 || eltsize == 4);
522
523 unsigned n;
524
525 if (eltsize == 1)
526 {
527 /* Optimize the common case of plain char. */
528 for (n = 0; n < maxelts; n++)
529 {
530 const char *elt = (const char*) ptr + n;
531 if (!*elt)
532 break;
533 }
534 }
535 else
536 {
537 for (n = 0; n < maxelts; n++)
538 {
539 const char *elt = (const char*) ptr + n * eltsize;
540 if (!memcmp (elt, "\0\0\0\0", eltsize))
541 break;
542 }
543 }
544 return n;
545}
546
547/* Compute the length of a null-terminated character string or wide
548 character string handling character sizes of 1, 2, and 4 bytes.
549 TREE_STRING_LENGTH is not the right way because it evaluates to
550 the size of the character array in bytes (as opposed to characters)
551 and because it can contain a zero byte in the middle.
53800dbe 552
4172d65e 553 ONLY_VALUE should be nonzero if the result is not going to be emitted
c09841f6 554 into the instruction stream and zero if it is going to be expanded.
4172d65e 555 E.g. with i++ ? "foo" : "bar", if ONLY_VALUE is nonzero, constant 3
681fab1e 556 is returned, otherwise NULL, since
557 len = c_strlen (src, 1); if (len) expand_expr (len, ...); would not
558 evaluate the side-effects.
559
6bda159e 560 If ONLY_VALUE is two then we do not emit warnings about out-of-bound
561 accesses. Note that this implies the result is not going to be emitted
562 into the instruction stream.
563
902de8ed 564 The value returned is of type `ssizetype'.
565
53800dbe 566 Unfortunately, string_constant can't access the values of const char
567 arrays with initializers, so neither can we do so here. */
568
4ee9c684 569tree
681fab1e 570c_strlen (tree src, int only_value)
53800dbe 571{
681fab1e 572 STRIP_NOPS (src);
573 if (TREE_CODE (src) == COND_EXPR
574 && (only_value || !TREE_SIDE_EFFECTS (TREE_OPERAND (src, 0))))
575 {
576 tree len1, len2;
577
578 len1 = c_strlen (TREE_OPERAND (src, 1), only_value);
579 len2 = c_strlen (TREE_OPERAND (src, 2), only_value);
0862b7e9 580 if (tree_int_cst_equal (len1, len2))
681fab1e 581 return len1;
582 }
583
584 if (TREE_CODE (src) == COMPOUND_EXPR
585 && (only_value || !TREE_SIDE_EFFECTS (TREE_OPERAND (src, 0))))
586 return c_strlen (TREE_OPERAND (src, 1), only_value);
587
c62d63d4 588 location_t loc = EXPR_LOC_OR_LOC (src, input_location);
da136652 589
c62d63d4 590 /* Offset from the beginning of the string in bytes. */
591 tree byteoff;
592 src = string_constant (src, &byteoff);
53800dbe 593 if (src == 0)
c2f47e15 594 return NULL_TREE;
902de8ed 595
c62d63d4 596 /* Determine the size of the string element. */
597 unsigned eltsize
598 = tree_to_uhwi (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (src))));
599
600 /* Set MAXELTS to sizeof (SRC) / sizeof (*SRC) - 1, the maximum possible
601 length of SRC. */
602 unsigned maxelts = TREE_STRING_LENGTH (src) / eltsize - 1;
603
604 /* PTR can point to the byte representation of any string type, including
605 char* and wchar_t*. */
606 const char *ptr = TREE_STRING_POINTER (src);
902de8ed 607
c62d63d4 608 if (byteoff && TREE_CODE (byteoff) != INTEGER_CST)
53800dbe 609 {
610 /* If the string has an internal zero byte (e.g., "foo\0bar"), we can't
611 compute the offset to the following null if we don't know where to
612 start searching for it. */
c62d63d4 613 if (string_length (ptr, eltsize, maxelts) < maxelts)
614 {
615 /* Return when an embedded null character is found. */
c2f47e15 616 return NULL_TREE;
c62d63d4 617 }
902de8ed 618
53800dbe 619 /* We don't know the starting offset, but we do know that the string
620 has no internal zero bytes. We can assume that the offset falls
621 within the bounds of the string; otherwise, the programmer deserves
622 what he gets. Subtract the offset from the length of the string,
902de8ed 623 and return that. This would perhaps not be valid if we were dealing
624 with named arrays in addition to literal string constants. */
625
c62d63d4 626 return size_diffop_loc (loc, size_int (maxelts * eltsize), byteoff);
53800dbe 627 }
628
c62d63d4 629 /* Offset from the beginning of the string in elements. */
630 HOST_WIDE_INT eltoff;
631
53800dbe 632 /* We have a known offset into the string. Start searching there for
27d0c333 633 a null character if we can represent it as a single HOST_WIDE_INT. */
c62d63d4 634 if (byteoff == 0)
635 eltoff = 0;
636 else if (! tree_fits_shwi_p (byteoff))
637 eltoff = -1;
53800dbe 638 else
c62d63d4 639 eltoff = tree_to_shwi (byteoff) / eltsize;
902de8ed 640
1f63a7d6 641 /* If the offset is known to be out of bounds, warn, and call strlen at
642 runtime. */
c62d63d4 643 if (eltoff < 0 || eltoff > maxelts)
53800dbe 644 {
1f63a7d6 645 /* Suppress multiple warnings for propagated constant strings. */
2f1c4f17 646 if (only_value != 2
647 && !TREE_NO_WARNING (src))
1f63a7d6 648 {
c62d63d4 649 warning_at (loc, 0, "offset %qwi outside bounds of constant string",
650 eltoff);
1f63a7d6 651 TREE_NO_WARNING (src) = 1;
652 }
c2f47e15 653 return NULL_TREE;
53800dbe 654 }
902de8ed 655
53800dbe 656 /* Use strlen to search for the first zero byte. Since any strings
657 constructed with build_string will have nulls appended, we win even
658 if we get handed something like (char[4])"abcd".
659
c62d63d4 660 Since ELTOFF is our starting index into the string, no further
53800dbe 661 calculation is needed. */
c62d63d4 662 unsigned len = string_length (ptr + eltoff * eltsize, eltsize,
663 maxelts - eltoff);
664
665 return ssize_int (len);
53800dbe 666}
667
e913b5cd 668/* Return a constant integer corresponding to target reading
8c85fcb7 669 GET_MODE_BITSIZE (MODE) bits from string constant STR. */
ecc318ff 670
6840589f 671static rtx
f77c4496 672c_readstr (const char *str, scalar_int_mode mode)
6840589f 673{
6840589f 674 HOST_WIDE_INT ch;
675 unsigned int i, j;
e913b5cd 676 HOST_WIDE_INT tmp[MAX_BITSIZE_MODE_ANY_INT / HOST_BITS_PER_WIDE_INT];
0407eaee 677
678 gcc_assert (GET_MODE_CLASS (mode) == MODE_INT);
e913b5cd 679 unsigned int len = (GET_MODE_PRECISION (mode) + HOST_BITS_PER_WIDE_INT - 1)
680 / HOST_BITS_PER_WIDE_INT;
681
a12aa4cc 682 gcc_assert (len <= MAX_BITSIZE_MODE_ANY_INT / HOST_BITS_PER_WIDE_INT);
e913b5cd 683 for (i = 0; i < len; i++)
684 tmp[i] = 0;
6840589f 685
6840589f 686 ch = 1;
687 for (i = 0; i < GET_MODE_SIZE (mode); i++)
688 {
689 j = i;
690 if (WORDS_BIG_ENDIAN)
691 j = GET_MODE_SIZE (mode) - i - 1;
692 if (BYTES_BIG_ENDIAN != WORDS_BIG_ENDIAN
ad8f8e52 693 && GET_MODE_SIZE (mode) >= UNITS_PER_WORD)
6840589f 694 j = j + UNITS_PER_WORD - 2 * (j % UNITS_PER_WORD) - 1;
695 j *= BITS_PER_UNIT;
7d3f6cc7 696
6840589f 697 if (ch)
698 ch = (unsigned char) str[i];
e913b5cd 699 tmp[j / HOST_BITS_PER_WIDE_INT] |= ch << (j % HOST_BITS_PER_WIDE_INT);
6840589f 700 }
ddb1be65 701
ab2c1de8 702 wide_int c = wide_int::from_array (tmp, len, GET_MODE_PRECISION (mode));
e913b5cd 703 return immed_wide_int_const (c, mode);
6840589f 704}
705
ecc318ff 706/* Cast a target constant CST to target CHAR and if that value fits into
5206b159 707 host char type, return zero and put that value into variable pointed to by
ecc318ff 708 P. */
709
710static int
aecda0d6 711target_char_cast (tree cst, char *p)
ecc318ff 712{
713 unsigned HOST_WIDE_INT val, hostval;
714
c19686c5 715 if (TREE_CODE (cst) != INTEGER_CST
ecc318ff 716 || CHAR_TYPE_SIZE > HOST_BITS_PER_WIDE_INT)
717 return 1;
718
e913b5cd 719 /* Do not care if it fits or not right here. */
f9ae6f95 720 val = TREE_INT_CST_LOW (cst);
e913b5cd 721
ecc318ff 722 if (CHAR_TYPE_SIZE < HOST_BITS_PER_WIDE_INT)
edc19fd0 723 val &= (HOST_WIDE_INT_1U << CHAR_TYPE_SIZE) - 1;
ecc318ff 724
725 hostval = val;
726 if (HOST_BITS_PER_CHAR < HOST_BITS_PER_WIDE_INT)
edc19fd0 727 hostval &= (HOST_WIDE_INT_1U << HOST_BITS_PER_CHAR) - 1;
ecc318ff 728
729 if (val != hostval)
730 return 1;
731
732 *p = hostval;
733 return 0;
734}
735
4ee9c684 736/* Similar to save_expr, but assumes that arbitrary code is not executed
737 in between the multiple evaluations. In particular, we assume that a
738 non-addressable local variable will not be modified. */
739
740static tree
741builtin_save_expr (tree exp)
742{
f6c35aa4 743 if (TREE_CODE (exp) == SSA_NAME
744 || (TREE_ADDRESSABLE (exp) == 0
745 && (TREE_CODE (exp) == PARM_DECL
53e9c5c4 746 || (VAR_P (exp) && !TREE_STATIC (exp)))))
4ee9c684 747 return exp;
748
749 return save_expr (exp);
750}
751
53800dbe 752/* Given TEM, a pointer to a stack frame, follow the dynamic chain COUNT
753 times to get the address of either a higher stack frame, or a return
754 address located within it (depending on FNDECL_CODE). */
902de8ed 755
c626df3d 756static rtx
869d0ef0 757expand_builtin_return_addr (enum built_in_function fndecl_code, int count)
53800dbe 758{
759 int i;
869d0ef0 760 rtx tem = INITIAL_FRAME_ADDRESS_RTX;
3f840859 761 if (tem == NULL_RTX)
e3e15c50 762 {
3f840859 763 /* For a zero count with __builtin_return_address, we don't care what
764 frame address we return, because target-specific definitions will
765 override us. Therefore frame pointer elimination is OK, and using
766 the soft frame pointer is OK.
767
768 For a nonzero count, or a zero count with __builtin_frame_address,
769 we require a stable offset from the current frame pointer to the
770 previous one, so we must use the hard frame pointer, and
771 we must disable frame pointer elimination. */
772 if (count == 0 && fndecl_code == BUILT_IN_RETURN_ADDRESS)
773 tem = frame_pointer_rtx;
774 else
775 {
776 tem = hard_frame_pointer_rtx;
e3e15c50 777
3f840859 778 /* Tell reload not to eliminate the frame pointer. */
779 crtl->accesses_prior_frames = 1;
780 }
e3e15c50 781 }
869d0ef0 782
53800dbe 783 if (count > 0)
784 SETUP_FRAME_ADDRESSES ();
53800dbe 785
3a69c60c 786 /* On the SPARC, the return address is not in the frame, it is in a
53800dbe 787 register. There is no way to access it off of the current frame
788 pointer, but it can be accessed off the previous frame pointer by
789 reading the value from the register window save area. */
a26d6c60 790 if (RETURN_ADDR_IN_PREVIOUS_FRAME && fndecl_code == BUILT_IN_RETURN_ADDRESS)
53800dbe 791 count--;
53800dbe 792
793 /* Scan back COUNT frames to the specified frame. */
794 for (i = 0; i < count; i++)
795 {
796 /* Assume the dynamic chain pointer is in the word that the
797 frame address points to, unless otherwise specified. */
53800dbe 798 tem = DYNAMIC_CHAIN_ADDRESS (tem);
53800dbe 799 tem = memory_address (Pmode, tem);
00060fc2 800 tem = gen_frame_mem (Pmode, tem);
83fc1478 801 tem = copy_to_reg (tem);
53800dbe 802 }
803
3a69c60c 804 /* For __builtin_frame_address, return what we've got. But, on
805 the SPARC for example, we may have to add a bias. */
53800dbe 806 if (fndecl_code == BUILT_IN_FRAME_ADDRESS)
3a69c60c 807 return FRAME_ADDR_RTX (tem);
53800dbe 808
3a69c60c 809 /* For __builtin_return_address, get the return address from that frame. */
53800dbe 810#ifdef RETURN_ADDR_RTX
811 tem = RETURN_ADDR_RTX (count, tem);
812#else
813 tem = memory_address (Pmode,
29c05e22 814 plus_constant (Pmode, tem, GET_MODE_SIZE (Pmode)));
00060fc2 815 tem = gen_frame_mem (Pmode, tem);
53800dbe 816#endif
817 return tem;
818}
819
f7c44134 820/* Alias set used for setjmp buffer. */
32c2fdea 821static alias_set_type setjmp_alias_set = -1;
f7c44134 822
6b7f6858 823/* Construct the leading half of a __builtin_setjmp call. Control will
2c8a1497 824 return to RECEIVER_LABEL. This is also called directly by the SJLJ
825 exception handling code. */
53800dbe 826
6b7f6858 827void
aecda0d6 828expand_builtin_setjmp_setup (rtx buf_addr, rtx receiver_label)
53800dbe 829{
3754d046 830 machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
53800dbe 831 rtx stack_save;
f7c44134 832 rtx mem;
53800dbe 833
f7c44134 834 if (setjmp_alias_set == -1)
835 setjmp_alias_set = new_alias_set ();
836
85d654dd 837 buf_addr = convert_memory_address (Pmode, buf_addr);
53800dbe 838
37ae8504 839 buf_addr = force_reg (Pmode, force_operand (buf_addr, NULL_RTX));
53800dbe 840
6b7f6858 841 /* We store the frame pointer and the address of receiver_label in
842 the buffer and use the rest of it for the stack save area, which
843 is machine-dependent. */
53800dbe 844
f7c44134 845 mem = gen_rtx_MEM (Pmode, buf_addr);
ab6ab77e 846 set_mem_alias_set (mem, setjmp_alias_set);
e3e026e8 847 emit_move_insn (mem, targetm.builtin_setjmp_frame_value ());
f7c44134 848
29c05e22 849 mem = gen_rtx_MEM (Pmode, plus_constant (Pmode, buf_addr,
850 GET_MODE_SIZE (Pmode))),
ab6ab77e 851 set_mem_alias_set (mem, setjmp_alias_set);
f7c44134 852
853 emit_move_insn (validize_mem (mem),
6b7f6858 854 force_reg (Pmode, gen_rtx_LABEL_REF (Pmode, receiver_label)));
53800dbe 855
856 stack_save = gen_rtx_MEM (sa_mode,
29c05e22 857 plus_constant (Pmode, buf_addr,
53800dbe 858 2 * GET_MODE_SIZE (Pmode)));
ab6ab77e 859 set_mem_alias_set (stack_save, setjmp_alias_set);
e9c97615 860 emit_stack_save (SAVE_NONLOCAL, &stack_save);
53800dbe 861
862 /* If there is further processing to do, do it. */
a3c81e61 863 if (targetm.have_builtin_setjmp_setup ())
864 emit_insn (targetm.gen_builtin_setjmp_setup (buf_addr));
53800dbe 865
29f09705 866 /* We have a nonlocal label. */
18d50ae6 867 cfun->has_nonlocal_label = 1;
6b7f6858 868}
53800dbe 869
2c8a1497 870/* Construct the trailing part of a __builtin_setjmp call. This is
4598ade9 871 also called directly by the SJLJ exception handling code.
872 If RECEIVER_LABEL is NULL, instead contruct a nonlocal goto handler. */
6b7f6858 873
874void
a3c81e61 875expand_builtin_setjmp_receiver (rtx receiver_label)
6b7f6858 876{
82c7907c 877 rtx chain;
878
4598ade9 879 /* Mark the FP as used when we get here, so we have to make sure it's
53800dbe 880 marked as used by this function. */
18b42941 881 emit_use (hard_frame_pointer_rtx);
53800dbe 882
883 /* Mark the static chain as clobbered here so life information
884 doesn't get messed up for it. */
82c7907c 885 chain = targetm.calls.static_chain (current_function_decl, true);
886 if (chain && REG_P (chain))
887 emit_clobber (chain);
53800dbe 888
889 /* Now put in the code to restore the frame pointer, and argument
491e04ef 890 pointer, if needed. */
a3c81e61 891 if (! targetm.have_nonlocal_goto ())
62dcb5c8 892 {
893 /* First adjust our frame pointer to its actual value. It was
894 previously set to the start of the virtual area corresponding to
895 the stacked variables when we branched here and now needs to be
896 adjusted to the actual hardware fp value.
897
898 Assignments to virtual registers are converted by
899 instantiate_virtual_regs into the corresponding assignment
900 to the underlying register (fp in this case) that makes
901 the original assignment true.
902 So the following insn will actually be decrementing fp by
903 STARTING_FRAME_OFFSET. */
904 emit_move_insn (virtual_stack_vars_rtx, hard_frame_pointer_rtx);
905
906 /* Restoring the frame pointer also modifies the hard frame pointer.
907 Mark it used (so that the previous assignment remains live once
908 the frame pointer is eliminated) and clobbered (to represent the
909 implicit update from the assignment). */
910 emit_use (hard_frame_pointer_rtx);
911 emit_clobber (hard_frame_pointer_rtx);
912 }
53800dbe 913
a494b6d7 914 if (!HARD_FRAME_POINTER_IS_ARG_POINTER && fixed_regs[ARG_POINTER_REGNUM])
53800dbe 915 {
4598ade9 916 /* If the argument pointer can be eliminated in favor of the
917 frame pointer, we don't need to restore it. We assume here
918 that if such an elimination is present, it can always be used.
919 This is the case on all known machines; if we don't make this
920 assumption, we do unnecessary saving on many machines. */
53800dbe 921 size_t i;
e99c3a1d 922 static const struct elims {const int from, to;} elim_regs[] = ELIMINABLE_REGS;
53800dbe 923
3098b2d3 924 for (i = 0; i < ARRAY_SIZE (elim_regs); i++)
53800dbe 925 if (elim_regs[i].from == ARG_POINTER_REGNUM
926 && elim_regs[i].to == HARD_FRAME_POINTER_REGNUM)
927 break;
928
3098b2d3 929 if (i == ARRAY_SIZE (elim_regs))
53800dbe 930 {
931 /* Now restore our arg pointer from the address at which it
05927e40 932 was saved in our stack frame. */
27a7a23a 933 emit_move_insn (crtl->args.internal_arg_pointer,
b079a207 934 copy_to_reg (get_arg_pointer_save_area ()));
53800dbe 935 }
936 }
53800dbe 937
a3c81e61 938 if (receiver_label != NULL && targetm.have_builtin_setjmp_receiver ())
939 emit_insn (targetm.gen_builtin_setjmp_receiver (receiver_label));
940 else if (targetm.have_nonlocal_goto_receiver ())
941 emit_insn (targetm.gen_nonlocal_goto_receiver ());
53800dbe 942 else
a3c81e61 943 { /* Nothing */ }
57f6bb94 944
3072d30e 945 /* We must not allow the code we just generated to be reordered by
946 scheduling. Specifically, the update of the frame pointer must
62dcb5c8 947 happen immediately, not later. */
3072d30e 948 emit_insn (gen_blockage ());
6b7f6858 949}
53800dbe 950
53800dbe 951/* __builtin_longjmp is passed a pointer to an array of five words (not
952 all will be used on all machines). It operates similarly to the C
953 library function of the same name, but is more efficient. Much of
2c8a1497 954 the code below is copied from the handling of non-local gotos. */
53800dbe 955
c626df3d 956static void
aecda0d6 957expand_builtin_longjmp (rtx buf_addr, rtx value)
53800dbe 958{
1e0c0b35 959 rtx fp, lab, stack;
960 rtx_insn *insn, *last;
3754d046 961 machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
53800dbe 962
48e1416a 963 /* DRAP is needed for stack realign if longjmp is expanded to current
27a7a23a 964 function */
965 if (SUPPORTS_STACK_ALIGNMENT)
966 crtl->need_drap = true;
967
f7c44134 968 if (setjmp_alias_set == -1)
969 setjmp_alias_set = new_alias_set ();
970
85d654dd 971 buf_addr = convert_memory_address (Pmode, buf_addr);
479e4d5e 972
53800dbe 973 buf_addr = force_reg (Pmode, buf_addr);
974
82c7907c 975 /* We require that the user must pass a second argument of 1, because
976 that is what builtin_setjmp will return. */
64db345d 977 gcc_assert (value == const1_rtx);
53800dbe 978
4712c7d6 979 last = get_last_insn ();
a3c81e61 980 if (targetm.have_builtin_longjmp ())
981 emit_insn (targetm.gen_builtin_longjmp (buf_addr));
53800dbe 982 else
53800dbe 983 {
984 fp = gen_rtx_MEM (Pmode, buf_addr);
29c05e22 985 lab = gen_rtx_MEM (Pmode, plus_constant (Pmode, buf_addr,
53800dbe 986 GET_MODE_SIZE (Pmode)));
987
29c05e22 988 stack = gen_rtx_MEM (sa_mode, plus_constant (Pmode, buf_addr,
53800dbe 989 2 * GET_MODE_SIZE (Pmode)));
ab6ab77e 990 set_mem_alias_set (fp, setjmp_alias_set);
991 set_mem_alias_set (lab, setjmp_alias_set);
992 set_mem_alias_set (stack, setjmp_alias_set);
53800dbe 993
994 /* Pick up FP, label, and SP from the block and jump. This code is
995 from expand_goto in stmt.c; see there for detailed comments. */
a3c81e61 996 if (targetm.have_nonlocal_goto ())
53800dbe 997 /* We have to pass a value to the nonlocal_goto pattern that will
998 get copied into the static_chain pointer, but it does not matter
999 what that value is, because builtin_setjmp does not use it. */
a3c81e61 1000 emit_insn (targetm.gen_nonlocal_goto (value, lab, stack, fp));
53800dbe 1001 else
53800dbe 1002 {
1003 lab = copy_to_reg (lab);
1004
18b42941 1005 emit_clobber (gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode)));
1006 emit_clobber (gen_rtx_MEM (BLKmode, hard_frame_pointer_rtx));
2a871ad1 1007
53800dbe 1008 emit_move_insn (hard_frame_pointer_rtx, fp);
e9c97615 1009 emit_stack_restore (SAVE_NONLOCAL, stack);
53800dbe 1010
18b42941 1011 emit_use (hard_frame_pointer_rtx);
1012 emit_use (stack_pointer_rtx);
53800dbe 1013 emit_indirect_jump (lab);
1014 }
1015 }
615166bb 1016
1017 /* Search backwards and mark the jump insn as a non-local goto.
1018 Note that this precludes the use of __builtin_longjmp to a
1019 __builtin_setjmp target in the same function. However, we've
1020 already cautioned the user that these functions are for
1021 internal exception handling use only. */
449c0509 1022 for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
1023 {
64db345d 1024 gcc_assert (insn != last);
7d3f6cc7 1025
6d7dc5b9 1026 if (JUMP_P (insn))
449c0509 1027 {
a1ddb869 1028 add_reg_note (insn, REG_NON_LOCAL_GOTO, const0_rtx);
449c0509 1029 break;
1030 }
6d7dc5b9 1031 else if (CALL_P (insn))
9342ee68 1032 break;
449c0509 1033 }
53800dbe 1034}
1035
0e80b01d 1036static inline bool
1037more_const_call_expr_args_p (const const_call_expr_arg_iterator *iter)
1038{
1039 return (iter->i < iter->n);
1040}
1041
1042/* This function validates the types of a function call argument list
1043 against a specified list of tree_codes. If the last specifier is a 0,
5cfa3fc8 1044 that represents an ellipsis, otherwise the last specifier must be a
0e80b01d 1045 VOID_TYPE. */
1046
1047static bool
1048validate_arglist (const_tree callexpr, ...)
1049{
1050 enum tree_code code;
1051 bool res = 0;
1052 va_list ap;
1053 const_call_expr_arg_iterator iter;
1054 const_tree arg;
1055
1056 va_start (ap, callexpr);
1057 init_const_call_expr_arg_iterator (callexpr, &iter);
1058
5cfa3fc8 1059 /* Get a bitmap of pointer argument numbers declared attribute nonnull. */
184fac50 1060 tree fn = CALL_EXPR_FN (callexpr);
1061 bitmap argmap = get_nonnull_args (TREE_TYPE (TREE_TYPE (fn)));
5cfa3fc8 1062
1063 for (unsigned argno = 1; ; ++argno)
0e80b01d 1064 {
1065 code = (enum tree_code) va_arg (ap, int);
5cfa3fc8 1066
0e80b01d 1067 switch (code)
1068 {
1069 case 0:
1070 /* This signifies an ellipses, any further arguments are all ok. */
1071 res = true;
1072 goto end;
1073 case VOID_TYPE:
1074 /* This signifies an endlink, if no arguments remain, return
1075 true, otherwise return false. */
1076 res = !more_const_call_expr_args_p (&iter);
1077 goto end;
5cfa3fc8 1078 case POINTER_TYPE:
1079 /* The actual argument must be nonnull when either the whole
1080 called function has been declared nonnull, or when the formal
1081 argument corresponding to the actual argument has been. */
184fac50 1082 if (argmap
1083 && (bitmap_empty_p (argmap) || bitmap_bit_p (argmap, argno)))
1084 {
1085 arg = next_const_call_expr_arg (&iter);
1086 if (!validate_arg (arg, code) || integer_zerop (arg))
1087 goto end;
1088 break;
1089 }
5cfa3fc8 1090 /* FALLTHRU */
0e80b01d 1091 default:
1092 /* If no parameters remain or the parameter's code does not
1093 match the specified code, return false. Otherwise continue
1094 checking any remaining arguments. */
1095 arg = next_const_call_expr_arg (&iter);
184fac50 1096 if (!validate_arg (arg, code))
0e80b01d 1097 goto end;
1098 break;
1099 }
1100 }
0e80b01d 1101
1102 /* We need gotos here since we can only have one VA_CLOSE in a
1103 function. */
1104 end: ;
1105 va_end (ap);
1106
5cfa3fc8 1107 BITMAP_FREE (argmap);
1108
0e80b01d 1109 return res;
1110}
1111
4ee9c684 1112/* Expand a call to __builtin_nonlocal_goto. We're passed the target label
1113 and the address of the save area. */
1114
1115static rtx
c2f47e15 1116expand_builtin_nonlocal_goto (tree exp)
4ee9c684 1117{
1118 tree t_label, t_save_area;
1e0c0b35 1119 rtx r_label, r_save_area, r_fp, r_sp;
1120 rtx_insn *insn;
4ee9c684 1121
c2f47e15 1122 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
4ee9c684 1123 return NULL_RTX;
1124
c2f47e15 1125 t_label = CALL_EXPR_ARG (exp, 0);
1126 t_save_area = CALL_EXPR_ARG (exp, 1);
4ee9c684 1127
8ec3c5c2 1128 r_label = expand_normal (t_label);
3dce56cc 1129 r_label = convert_memory_address (Pmode, r_label);
8ec3c5c2 1130 r_save_area = expand_normal (t_save_area);
3dce56cc 1131 r_save_area = convert_memory_address (Pmode, r_save_area);
d1ff492e 1132 /* Copy the address of the save location to a register just in case it was
1133 based on the frame pointer. */
51adbc8a 1134 r_save_area = copy_to_reg (r_save_area);
4ee9c684 1135 r_fp = gen_rtx_MEM (Pmode, r_save_area);
1136 r_sp = gen_rtx_MEM (STACK_SAVEAREA_MODE (SAVE_NONLOCAL),
29c05e22 1137 plus_constant (Pmode, r_save_area,
1138 GET_MODE_SIZE (Pmode)));
4ee9c684 1139
18d50ae6 1140 crtl->has_nonlocal_goto = 1;
4ee9c684 1141
4ee9c684 1142 /* ??? We no longer need to pass the static chain value, afaik. */
a3c81e61 1143 if (targetm.have_nonlocal_goto ())
1144 emit_insn (targetm.gen_nonlocal_goto (const0_rtx, r_label, r_sp, r_fp));
4ee9c684 1145 else
4ee9c684 1146 {
1147 r_label = copy_to_reg (r_label);
1148
18b42941 1149 emit_clobber (gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode)));
1150 emit_clobber (gen_rtx_MEM (BLKmode, hard_frame_pointer_rtx));
491e04ef 1151
d1ff492e 1152 /* Restore frame pointer for containing function. */
4ee9c684 1153 emit_move_insn (hard_frame_pointer_rtx, r_fp);
e9c97615 1154 emit_stack_restore (SAVE_NONLOCAL, r_sp);
491e04ef 1155
4ee9c684 1156 /* USE of hard_frame_pointer_rtx added for consistency;
1157 not clear if really needed. */
18b42941 1158 emit_use (hard_frame_pointer_rtx);
1159 emit_use (stack_pointer_rtx);
ad0d0af8 1160
1161 /* If the architecture is using a GP register, we must
1162 conservatively assume that the target function makes use of it.
1163 The prologue of functions with nonlocal gotos must therefore
1164 initialize the GP register to the appropriate value, and we
1165 must then make sure that this value is live at the point
1166 of the jump. (Note that this doesn't necessarily apply
1167 to targets with a nonlocal_goto pattern; they are free
1168 to implement it in their own way. Note also that this is
1169 a no-op if the GP register is a global invariant.) */
1e826931 1170 unsigned regnum = PIC_OFFSET_TABLE_REGNUM;
1171 if (regnum != INVALID_REGNUM && fixed_regs[regnum])
18b42941 1172 emit_use (pic_offset_table_rtx);
ad0d0af8 1173
4ee9c684 1174 emit_indirect_jump (r_label);
1175 }
491e04ef 1176
4ee9c684 1177 /* Search backwards to the jump insn and mark it as a
1178 non-local goto. */
1179 for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
1180 {
6d7dc5b9 1181 if (JUMP_P (insn))
4ee9c684 1182 {
a1ddb869 1183 add_reg_note (insn, REG_NON_LOCAL_GOTO, const0_rtx);
4ee9c684 1184 break;
1185 }
6d7dc5b9 1186 else if (CALL_P (insn))
4ee9c684 1187 break;
1188 }
1189
1190 return const0_rtx;
1191}
1192
843d08a9 1193/* __builtin_update_setjmp_buf is passed a pointer to an array of five words
1194 (not all will be used on all machines) that was passed to __builtin_setjmp.
97354ae4 1195 It updates the stack pointer in that block to the current value. This is
1196 also called directly by the SJLJ exception handling code. */
843d08a9 1197
97354ae4 1198void
843d08a9 1199expand_builtin_update_setjmp_buf (rtx buf_addr)
1200{
3754d046 1201 machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
d1ff492e 1202 rtx stack_save
843d08a9 1203 = gen_rtx_MEM (sa_mode,
1204 memory_address
1205 (sa_mode,
29c05e22 1206 plus_constant (Pmode, buf_addr,
1207 2 * GET_MODE_SIZE (Pmode))));
843d08a9 1208
e9c97615 1209 emit_stack_save (SAVE_NONLOCAL, &stack_save);
843d08a9 1210}
1211
5e3608d8 1212/* Expand a call to __builtin_prefetch. For a target that does not support
1213 data prefetch, evaluate the memory address argument in case it has side
1214 effects. */
1215
1216static void
c2f47e15 1217expand_builtin_prefetch (tree exp)
5e3608d8 1218{
1219 tree arg0, arg1, arg2;
c2f47e15 1220 int nargs;
5e3608d8 1221 rtx op0, op1, op2;
1222
c2f47e15 1223 if (!validate_arglist (exp, POINTER_TYPE, 0))
26a5cadb 1224 return;
1225
c2f47e15 1226 arg0 = CALL_EXPR_ARG (exp, 0);
1227
26a5cadb 1228 /* Arguments 1 and 2 are optional; argument 1 (read/write) defaults to
1229 zero (read) and argument 2 (locality) defaults to 3 (high degree of
1230 locality). */
c2f47e15 1231 nargs = call_expr_nargs (exp);
1232 if (nargs > 1)
1233 arg1 = CALL_EXPR_ARG (exp, 1);
26a5cadb 1234 else
c2f47e15 1235 arg1 = integer_zero_node;
1236 if (nargs > 2)
1237 arg2 = CALL_EXPR_ARG (exp, 2);
1238 else
2512209b 1239 arg2 = integer_three_node;
5e3608d8 1240
1241 /* Argument 0 is an address. */
1242 op0 = expand_expr (arg0, NULL_RTX, Pmode, EXPAND_NORMAL);
1243
1244 /* Argument 1 (read/write flag) must be a compile-time constant int. */
1245 if (TREE_CODE (arg1) != INTEGER_CST)
1246 {
07e3a3d2 1247 error ("second argument to %<__builtin_prefetch%> must be a constant");
9342ee68 1248 arg1 = integer_zero_node;
5e3608d8 1249 }
8ec3c5c2 1250 op1 = expand_normal (arg1);
5e3608d8 1251 /* Argument 1 must be either zero or one. */
1252 if (INTVAL (op1) != 0 && INTVAL (op1) != 1)
1253 {
c3ceba8e 1254 warning (0, "invalid second argument to %<__builtin_prefetch%>;"
07e3a3d2 1255 " using zero");
5e3608d8 1256 op1 = const0_rtx;
1257 }
1258
1259 /* Argument 2 (locality) must be a compile-time constant int. */
1260 if (TREE_CODE (arg2) != INTEGER_CST)
1261 {
07e3a3d2 1262 error ("third argument to %<__builtin_prefetch%> must be a constant");
5e3608d8 1263 arg2 = integer_zero_node;
1264 }
8ec3c5c2 1265 op2 = expand_normal (arg2);
5e3608d8 1266 /* Argument 2 must be 0, 1, 2, or 3. */
1267 if (INTVAL (op2) < 0 || INTVAL (op2) > 3)
1268 {
c3ceba8e 1269 warning (0, "invalid third argument to %<__builtin_prefetch%>; using zero");
5e3608d8 1270 op2 = const0_rtx;
1271 }
1272
1d375a79 1273 if (targetm.have_prefetch ())
5e3608d8 1274 {
8786db1e 1275 struct expand_operand ops[3];
1276
1277 create_address_operand (&ops[0], op0);
1278 create_integer_operand (&ops[1], INTVAL (op1));
1279 create_integer_operand (&ops[2], INTVAL (op2));
1d375a79 1280 if (maybe_expand_insn (targetm.code_for_prefetch, 3, ops))
8786db1e 1281 return;
5e3608d8 1282 }
0a534ba7 1283
f0ce3b1f 1284 /* Don't do anything with direct references to volatile memory, but
1285 generate code to handle other side effects. */
e16ceb8e 1286 if (!MEM_P (op0) && side_effects_p (op0))
f0ce3b1f 1287 emit_insn (op0);
5e3608d8 1288}
1289
f7c44134 1290/* Get a MEM rtx for expression EXP which is the address of an operand
d8ae1baa 1291 to be used in a string instruction (cmpstrsi, movmemsi, ..). LEN is
1292 the maximum length of the block of memory that might be accessed or
1293 NULL if unknown. */
f7c44134 1294
53800dbe 1295static rtx
d8ae1baa 1296get_memory_rtx (tree exp, tree len)
53800dbe 1297{
ad0a178f 1298 tree orig_exp = exp;
1299 rtx addr, mem;
ad0a178f 1300
1301 /* When EXP is not resolved SAVE_EXPR, MEM_ATTRS can be still derived
1302 from its expression, for expr->a.b only <variable>.a.b is recorded. */
1303 if (TREE_CODE (exp) == SAVE_EXPR && !SAVE_EXPR_RESOLVED_P (exp))
1304 exp = TREE_OPERAND (exp, 0);
1305
1306 addr = expand_expr (orig_exp, NULL_RTX, ptr_mode, EXPAND_NORMAL);
1307 mem = gen_rtx_MEM (BLKmode, memory_address (BLKmode, addr));
2a631e19 1308
f7c44134 1309 /* Get an expression we can use to find the attributes to assign to MEM.
5dd3f78f 1310 First remove any nops. */
72dd6141 1311 while (CONVERT_EXPR_P (exp)
f7c44134 1312 && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (exp, 0))))
1313 exp = TREE_OPERAND (exp, 0);
1314
5dd3f78f 1315 /* Build a MEM_REF representing the whole accessed area as a byte blob,
1316 (as builtin stringops may alias with anything). */
1317 exp = fold_build2 (MEM_REF,
1318 build_array_type (char_type_node,
1319 build_range_type (sizetype,
1320 size_one_node, len)),
1321 exp, build_int_cst (ptr_type_node, 0));
1322
1323 /* If the MEM_REF has no acceptable address, try to get the base object
1324 from the original address we got, and build an all-aliasing
1325 unknown-sized access to that one. */
1326 if (is_gimple_mem_ref_addr (TREE_OPERAND (exp, 0)))
1327 set_mem_attributes (mem, exp, 0);
1328 else if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
1329 && (exp = get_base_address (TREE_OPERAND (TREE_OPERAND (exp, 0),
1330 0))))
eec8e941 1331 {
5dd3f78f 1332 exp = build_fold_addr_expr (exp);
1333 exp = fold_build2 (MEM_REF,
1334 build_array_type (char_type_node,
1335 build_range_type (sizetype,
1336 size_zero_node,
1337 NULL)),
1338 exp, build_int_cst (ptr_type_node, 0));
a1a25d19 1339 set_mem_attributes (mem, exp, 0);
eec8e941 1340 }
5dd3f78f 1341 set_mem_alias_set (mem, 0);
53800dbe 1342 return mem;
1343}
1344\f
1345/* Built-in functions to perform an untyped call and return. */
1346
3b9c3a16 1347#define apply_args_mode \
1348 (this_target_builtins->x_apply_args_mode)
1349#define apply_result_mode \
1350 (this_target_builtins->x_apply_result_mode)
53800dbe 1351
53800dbe 1352/* Return the size required for the block returned by __builtin_apply_args,
1353 and initialize apply_args_mode. */
1354
1355static int
aecda0d6 1356apply_args_size (void)
53800dbe 1357{
1358 static int size = -1;
58e9ce8f 1359 int align;
1360 unsigned int regno;
3754d046 1361 machine_mode mode;
53800dbe 1362
1363 /* The values computed by this function never change. */
1364 if (size < 0)
1365 {
1366 /* The first value is the incoming arg-pointer. */
1367 size = GET_MODE_SIZE (Pmode);
1368
1369 /* The second value is the structure value address unless this is
1370 passed as an "invisible" first argument. */
6812c89e 1371 if (targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0))
53800dbe 1372 size += GET_MODE_SIZE (Pmode);
1373
1374 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1375 if (FUNCTION_ARG_REGNO_P (regno))
1376 {
4bac51c9 1377 mode = targetm.calls.get_raw_arg_mode (regno);
0862b7e9 1378
64db345d 1379 gcc_assert (mode != VOIDmode);
53800dbe 1380
1381 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1382 if (size % align != 0)
1383 size = CEIL (size, align) * align;
53800dbe 1384 size += GET_MODE_SIZE (mode);
1385 apply_args_mode[regno] = mode;
1386 }
1387 else
1388 {
1389 apply_args_mode[regno] = VOIDmode;
53800dbe 1390 }
1391 }
1392 return size;
1393}
1394
1395/* Return the size required for the block returned by __builtin_apply,
1396 and initialize apply_result_mode. */
1397
1398static int
aecda0d6 1399apply_result_size (void)
53800dbe 1400{
1401 static int size = -1;
1402 int align, regno;
3754d046 1403 machine_mode mode;
53800dbe 1404
1405 /* The values computed by this function never change. */
1406 if (size < 0)
1407 {
1408 size = 0;
1409
1410 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
e1ce1485 1411 if (targetm.calls.function_value_regno_p (regno))
53800dbe 1412 {
4bac51c9 1413 mode = targetm.calls.get_raw_result_mode (regno);
0862b7e9 1414
64db345d 1415 gcc_assert (mode != VOIDmode);
53800dbe 1416
1417 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1418 if (size % align != 0)
1419 size = CEIL (size, align) * align;
1420 size += GET_MODE_SIZE (mode);
1421 apply_result_mode[regno] = mode;
1422 }
1423 else
1424 apply_result_mode[regno] = VOIDmode;
1425
1426 /* Allow targets that use untyped_call and untyped_return to override
1427 the size so that machine-specific information can be stored here. */
1428#ifdef APPLY_RESULT_SIZE
1429 size = APPLY_RESULT_SIZE;
1430#endif
1431 }
1432 return size;
1433}
1434
53800dbe 1435/* Create a vector describing the result block RESULT. If SAVEP is true,
1436 the result block is used to save the values; otherwise it is used to
1437 restore the values. */
1438
1439static rtx
aecda0d6 1440result_vector (int savep, rtx result)
53800dbe 1441{
1442 int regno, size, align, nelts;
3754d046 1443 machine_mode mode;
53800dbe 1444 rtx reg, mem;
364c0c59 1445 rtx *savevec = XALLOCAVEC (rtx, FIRST_PSEUDO_REGISTER);
bf8e3599 1446
53800dbe 1447 size = nelts = 0;
1448 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1449 if ((mode = apply_result_mode[regno]) != VOIDmode)
1450 {
1451 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1452 if (size % align != 0)
1453 size = CEIL (size, align) * align;
1454 reg = gen_rtx_REG (mode, savep ? regno : INCOMING_REGNO (regno));
e513d163 1455 mem = adjust_address (result, mode, size);
53800dbe 1456 savevec[nelts++] = (savep
d1f9b275 1457 ? gen_rtx_SET (mem, reg)
1458 : gen_rtx_SET (reg, mem));
53800dbe 1459 size += GET_MODE_SIZE (mode);
1460 }
1461 return gen_rtx_PARALLEL (VOIDmode, gen_rtvec_v (nelts, savevec));
1462}
53800dbe 1463
1464/* Save the state required to perform an untyped call with the same
1465 arguments as were passed to the current function. */
1466
1467static rtx
aecda0d6 1468expand_builtin_apply_args_1 (void)
53800dbe 1469{
1c7e61a7 1470 rtx registers, tem;
53800dbe 1471 int size, align, regno;
3754d046 1472 machine_mode mode;
6812c89e 1473 rtx struct_incoming_value = targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 1);
53800dbe 1474
1475 /* Create a block where the arg-pointer, structure value address,
1476 and argument registers can be saved. */
1477 registers = assign_stack_local (BLKmode, apply_args_size (), -1);
1478
1479 /* Walk past the arg-pointer and structure value address. */
1480 size = GET_MODE_SIZE (Pmode);
6812c89e 1481 if (targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0))
53800dbe 1482 size += GET_MODE_SIZE (Pmode);
1483
1484 /* Save each register used in calling a function to the block. */
1485 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1486 if ((mode = apply_args_mode[regno]) != VOIDmode)
1487 {
53800dbe 1488 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1489 if (size % align != 0)
1490 size = CEIL (size, align) * align;
1491
1492 tem = gen_rtx_REG (mode, INCOMING_REGNO (regno));
1493
e513d163 1494 emit_move_insn (adjust_address (registers, mode, size), tem);
53800dbe 1495 size += GET_MODE_SIZE (mode);
1496 }
1497
1498 /* Save the arg pointer to the block. */
27a7a23a 1499 tem = copy_to_reg (crtl->args.internal_arg_pointer);
1c7e61a7 1500 /* We need the pointer as the caller actually passed them to us, not
9d4b544c 1501 as we might have pretended they were passed. Make sure it's a valid
1502 operand, as emit_move_insn isn't expected to handle a PLUS. */
3764c94e 1503 if (STACK_GROWS_DOWNWARD)
1504 tem
1505 = force_operand (plus_constant (Pmode, tem,
1506 crtl->args.pretend_args_size),
1507 NULL_RTX);
1c7e61a7 1508 emit_move_insn (adjust_address (registers, Pmode, 0), tem);
0862b7e9 1509
53800dbe 1510 size = GET_MODE_SIZE (Pmode);
1511
1512 /* Save the structure value address unless this is passed as an
1513 "invisible" first argument. */
45550790 1514 if (struct_incoming_value)
53800dbe 1515 {
e513d163 1516 emit_move_insn (adjust_address (registers, Pmode, size),
45550790 1517 copy_to_reg (struct_incoming_value));
53800dbe 1518 size += GET_MODE_SIZE (Pmode);
1519 }
1520
1521 /* Return the address of the block. */
1522 return copy_addr_to_reg (XEXP (registers, 0));
1523}
1524
1525/* __builtin_apply_args returns block of memory allocated on
1526 the stack into which is stored the arg pointer, structure
1527 value address, static chain, and all the registers that might
1528 possibly be used in performing a function call. The code is
1529 moved to the start of the function so the incoming values are
1530 saved. */
27d0c333 1531
53800dbe 1532static rtx
aecda0d6 1533expand_builtin_apply_args (void)
53800dbe 1534{
1535 /* Don't do __builtin_apply_args more than once in a function.
1536 Save the result of the first call and reuse it. */
1537 if (apply_args_value != 0)
1538 return apply_args_value;
1539 {
1540 /* When this function is called, it means that registers must be
1541 saved on entry to this function. So we migrate the
1542 call to the first insn of this function. */
1543 rtx temp;
53800dbe 1544
1545 start_sequence ();
1546 temp = expand_builtin_apply_args_1 ();
9ed997be 1547 rtx_insn *seq = get_insns ();
53800dbe 1548 end_sequence ();
1549
1550 apply_args_value = temp;
1551
31d3e01c 1552 /* Put the insns after the NOTE that starts the function.
1553 If this is inside a start_sequence, make the outer-level insn
53800dbe 1554 chain current, so the code is placed at the start of the
0ef1a651 1555 function. If internal_arg_pointer is a non-virtual pseudo,
1556 it needs to be placed after the function that initializes
1557 that pseudo. */
53800dbe 1558 push_topmost_sequence ();
0ef1a651 1559 if (REG_P (crtl->args.internal_arg_pointer)
1560 && REGNO (crtl->args.internal_arg_pointer) > LAST_VIRTUAL_REGISTER)
1561 emit_insn_before (seq, parm_birth_insn);
1562 else
1563 emit_insn_before (seq, NEXT_INSN (entry_of_function ()));
53800dbe 1564 pop_topmost_sequence ();
1565 return temp;
1566 }
1567}
1568
1569/* Perform an untyped call and save the state required to perform an
1570 untyped return of whatever value was returned by the given function. */
1571
1572static rtx
aecda0d6 1573expand_builtin_apply (rtx function, rtx arguments, rtx argsize)
53800dbe 1574{
1575 int size, align, regno;
3754d046 1576 machine_mode mode;
1e0c0b35 1577 rtx incoming_args, result, reg, dest, src;
1578 rtx_call_insn *call_insn;
53800dbe 1579 rtx old_stack_level = 0;
1580 rtx call_fusage = 0;
6812c89e 1581 rtx struct_value = targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0);
53800dbe 1582
85d654dd 1583 arguments = convert_memory_address (Pmode, arguments);
726ec87c 1584
53800dbe 1585 /* Create a block where the return registers can be saved. */
1586 result = assign_stack_local (BLKmode, apply_result_size (), -1);
1587
53800dbe 1588 /* Fetch the arg pointer from the ARGUMENTS block. */
1589 incoming_args = gen_reg_rtx (Pmode);
726ec87c 1590 emit_move_insn (incoming_args, gen_rtx_MEM (Pmode, arguments));
3764c94e 1591 if (!STACK_GROWS_DOWNWARD)
1592 incoming_args = expand_simple_binop (Pmode, MINUS, incoming_args, argsize,
1593 incoming_args, 0, OPTAB_LIB_WIDEN);
53800dbe 1594
04a46d40 1595 /* Push a new argument block and copy the arguments. Do not allow
1596 the (potential) memcpy call below to interfere with our stack
1597 manipulations. */
53800dbe 1598 do_pending_stack_adjust ();
04a46d40 1599 NO_DEFER_POP;
53800dbe 1600
2358393e 1601 /* Save the stack with nonlocal if available. */
71512c05 1602 if (targetm.have_save_stack_nonlocal ())
e9c97615 1603 emit_stack_save (SAVE_NONLOCAL, &old_stack_level);
53800dbe 1604 else
e9c97615 1605 emit_stack_save (SAVE_BLOCK, &old_stack_level);
53800dbe 1606
59647703 1607 /* Allocate a block of memory onto the stack and copy the memory
990495a7 1608 arguments to the outgoing arguments address. We can pass TRUE
1609 as the 4th argument because we just saved the stack pointer
1610 and will restore it right after the call. */
5be42b39 1611 allocate_dynamic_stack_space (argsize, 0, BIGGEST_ALIGNMENT, true);
27a7a23a 1612
1613 /* Set DRAP flag to true, even though allocate_dynamic_stack_space
1614 may have already set current_function_calls_alloca to true.
1615 current_function_calls_alloca won't be set if argsize is zero,
1616 so we have to guarantee need_drap is true here. */
1617 if (SUPPORTS_STACK_ALIGNMENT)
1618 crtl->need_drap = true;
1619
59647703 1620 dest = virtual_outgoing_args_rtx;
3764c94e 1621 if (!STACK_GROWS_DOWNWARD)
1622 {
1623 if (CONST_INT_P (argsize))
1624 dest = plus_constant (Pmode, dest, -INTVAL (argsize));
1625 else
1626 dest = gen_rtx_PLUS (Pmode, dest, negate_rtx (Pmode, argsize));
1627 }
2a631e19 1628 dest = gen_rtx_MEM (BLKmode, dest);
1629 set_mem_align (dest, PARM_BOUNDARY);
1630 src = gen_rtx_MEM (BLKmode, incoming_args);
1631 set_mem_align (src, PARM_BOUNDARY);
0378dbdc 1632 emit_block_move (dest, src, argsize, BLOCK_OP_NORMAL);
53800dbe 1633
1634 /* Refer to the argument block. */
1635 apply_args_size ();
1636 arguments = gen_rtx_MEM (BLKmode, arguments);
2a631e19 1637 set_mem_align (arguments, PARM_BOUNDARY);
53800dbe 1638
1639 /* Walk past the arg-pointer and structure value address. */
1640 size = GET_MODE_SIZE (Pmode);
45550790 1641 if (struct_value)
53800dbe 1642 size += GET_MODE_SIZE (Pmode);
1643
1644 /* Restore each of the registers previously saved. Make USE insns
1645 for each of these registers for use in making the call. */
1646 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1647 if ((mode = apply_args_mode[regno]) != VOIDmode)
1648 {
1649 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1650 if (size % align != 0)
1651 size = CEIL (size, align) * align;
1652 reg = gen_rtx_REG (mode, regno);
e513d163 1653 emit_move_insn (reg, adjust_address (arguments, mode, size));
53800dbe 1654 use_reg (&call_fusage, reg);
1655 size += GET_MODE_SIZE (mode);
1656 }
1657
1658 /* Restore the structure value address unless this is passed as an
1659 "invisible" first argument. */
1660 size = GET_MODE_SIZE (Pmode);
45550790 1661 if (struct_value)
53800dbe 1662 {
1663 rtx value = gen_reg_rtx (Pmode);
e513d163 1664 emit_move_insn (value, adjust_address (arguments, Pmode, size));
45550790 1665 emit_move_insn (struct_value, value);
8ad4c111 1666 if (REG_P (struct_value))
45550790 1667 use_reg (&call_fusage, struct_value);
53800dbe 1668 size += GET_MODE_SIZE (Pmode);
1669 }
1670
1671 /* All arguments and registers used for the call are set up by now! */
82c7907c 1672 function = prepare_call_address (NULL, function, NULL, &call_fusage, 0, 0);
53800dbe 1673
1674 /* Ensure address is valid. SYMBOL_REF is already valid, so no need,
1675 and we don't want to load it into a register as an optimization,
1676 because prepare_call_address already did it if it should be done. */
1677 if (GET_CODE (function) != SYMBOL_REF)
1678 function = memory_address (FUNCTION_MODE, function);
1679
1680 /* Generate the actual call instruction and save the return value. */
1d99ab0a 1681 if (targetm.have_untyped_call ())
1682 {
1683 rtx mem = gen_rtx_MEM (FUNCTION_MODE, function);
1684 emit_call_insn (targetm.gen_untyped_call (mem, result,
1685 result_vector (1, result)));
1686 }
7f265a08 1687 else if (targetm.have_call_value ())
53800dbe 1688 {
1689 rtx valreg = 0;
1690
1691 /* Locate the unique return register. It is not possible to
1692 express a call that sets more than one return register using
1693 call_value; use untyped_call for that. In fact, untyped_call
1694 only needs to save the return registers in the given block. */
1695 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1696 if ((mode = apply_result_mode[regno]) != VOIDmode)
1697 {
7f265a08 1698 gcc_assert (!valreg); /* have_untyped_call required. */
7d3f6cc7 1699
53800dbe 1700 valreg = gen_rtx_REG (mode, regno);
1701 }
1702
7f265a08 1703 emit_insn (targetm.gen_call_value (valreg,
1704 gen_rtx_MEM (FUNCTION_MODE, function),
1705 const0_rtx, NULL_RTX, const0_rtx));
53800dbe 1706
e513d163 1707 emit_move_insn (adjust_address (result, GET_MODE (valreg), 0), valreg);
53800dbe 1708 }
1709 else
64db345d 1710 gcc_unreachable ();
53800dbe 1711
d5f9786f 1712 /* Find the CALL insn we just emitted, and attach the register usage
1713 information. */
1714 call_insn = last_call_insn ();
1715 add_function_usage_to (call_insn, call_fusage);
53800dbe 1716
1717 /* Restore the stack. */
71512c05 1718 if (targetm.have_save_stack_nonlocal ())
e9c97615 1719 emit_stack_restore (SAVE_NONLOCAL, old_stack_level);
53800dbe 1720 else
e9c97615 1721 emit_stack_restore (SAVE_BLOCK, old_stack_level);
9af5ce0c 1722 fixup_args_size_notes (call_insn, get_last_insn (), 0);
53800dbe 1723
04a46d40 1724 OK_DEFER_POP;
1725
53800dbe 1726 /* Return the address of the result block. */
85d654dd 1727 result = copy_addr_to_reg (XEXP (result, 0));
1728 return convert_memory_address (ptr_mode, result);
53800dbe 1729}
1730
1731/* Perform an untyped return. */
1732
1733static void
aecda0d6 1734expand_builtin_return (rtx result)
53800dbe 1735{
1736 int size, align, regno;
3754d046 1737 machine_mode mode;
53800dbe 1738 rtx reg;
57c26b3a 1739 rtx_insn *call_fusage = 0;
53800dbe 1740
85d654dd 1741 result = convert_memory_address (Pmode, result);
726ec87c 1742
53800dbe 1743 apply_result_size ();
1744 result = gen_rtx_MEM (BLKmode, result);
1745
1d99ab0a 1746 if (targetm.have_untyped_return ())
53800dbe 1747 {
1d99ab0a 1748 rtx vector = result_vector (0, result);
1749 emit_jump_insn (targetm.gen_untyped_return (result, vector));
53800dbe 1750 emit_barrier ();
1751 return;
1752 }
53800dbe 1753
1754 /* Restore the return value and note that each value is used. */
1755 size = 0;
1756 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1757 if ((mode = apply_result_mode[regno]) != VOIDmode)
1758 {
1759 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1760 if (size % align != 0)
1761 size = CEIL (size, align) * align;
1762 reg = gen_rtx_REG (mode, INCOMING_REGNO (regno));
e513d163 1763 emit_move_insn (reg, adjust_address (result, mode, size));
53800dbe 1764
1765 push_to_sequence (call_fusage);
18b42941 1766 emit_use (reg);
53800dbe 1767 call_fusage = get_insns ();
1768 end_sequence ();
1769 size += GET_MODE_SIZE (mode);
1770 }
1771
1772 /* Put the USE insns before the return. */
31d3e01c 1773 emit_insn (call_fusage);
53800dbe 1774
1775 /* Return whatever values was restored by jumping directly to the end
1776 of the function. */
62380d2d 1777 expand_naked_return ();
53800dbe 1778}
1779
539a3a92 1780/* Used by expand_builtin_classify_type and fold_builtin_classify_type. */
27d0c333 1781
539a3a92 1782static enum type_class
aecda0d6 1783type_to_class (tree type)
539a3a92 1784{
1785 switch (TREE_CODE (type))
1786 {
1787 case VOID_TYPE: return void_type_class;
1788 case INTEGER_TYPE: return integer_type_class;
539a3a92 1789 case ENUMERAL_TYPE: return enumeral_type_class;
1790 case BOOLEAN_TYPE: return boolean_type_class;
1791 case POINTER_TYPE: return pointer_type_class;
1792 case REFERENCE_TYPE: return reference_type_class;
1793 case OFFSET_TYPE: return offset_type_class;
1794 case REAL_TYPE: return real_type_class;
1795 case COMPLEX_TYPE: return complex_type_class;
1796 case FUNCTION_TYPE: return function_type_class;
1797 case METHOD_TYPE: return method_type_class;
1798 case RECORD_TYPE: return record_type_class;
1799 case UNION_TYPE:
1800 case QUAL_UNION_TYPE: return union_type_class;
1801 case ARRAY_TYPE: return (TYPE_STRING_FLAG (type)
1802 ? string_type_class : array_type_class);
539a3a92 1803 case LANG_TYPE: return lang_type_class;
1804 default: return no_type_class;
1805 }
1806}
bf8e3599 1807
c2f47e15 1808/* Expand a call EXP to __builtin_classify_type. */
27d0c333 1809
53800dbe 1810static rtx
c2f47e15 1811expand_builtin_classify_type (tree exp)
53800dbe 1812{
c2f47e15 1813 if (call_expr_nargs (exp))
1814 return GEN_INT (type_to_class (TREE_TYPE (CALL_EXPR_ARG (exp, 0))));
53800dbe 1815 return GEN_INT (no_type_class);
1816}
1817
07976da7 1818/* This helper macro, meant to be used in mathfn_built_in below,
1819 determines which among a set of three builtin math functions is
1820 appropriate for a given type mode. The `F' and `L' cases are
1821 automatically generated from the `double' case. */
e3240774 1822#define CASE_MATHFN(MATHFN) \
1823 CASE_CFN_##MATHFN: \
1824 fcode = BUILT_IN_##MATHFN; fcodef = BUILT_IN_##MATHFN##F ; \
1825 fcodel = BUILT_IN_##MATHFN##L ; break;
cd2656b0 1826/* Similar to above, but appends _R after any F/L suffix. */
e3240774 1827#define CASE_MATHFN_REENT(MATHFN) \
1828 case CFN_BUILT_IN_##MATHFN##_R: \
1829 case CFN_BUILT_IN_##MATHFN##F_R: \
1830 case CFN_BUILT_IN_##MATHFN##L_R: \
1831 fcode = BUILT_IN_##MATHFN##_R; fcodef = BUILT_IN_##MATHFN##F_R ; \
1832 fcodel = BUILT_IN_##MATHFN##L_R ; break;
07976da7 1833
6c21be92 1834/* Return a function equivalent to FN but operating on floating-point
1835 values of type TYPE, or END_BUILTINS if no such function exists.
e3240774 1836 This is purely an operation on function codes; it does not guarantee
1837 that the target actually has an implementation of the function. */
c319d56a 1838
6c21be92 1839static built_in_function
e3240774 1840mathfn_built_in_2 (tree type, combined_fn fn)
0a68165a 1841{
6c21be92 1842 built_in_function fcode, fcodef, fcodel;
07976da7 1843
1844 switch (fn)
1845 {
e3240774 1846 CASE_MATHFN (ACOS)
1847 CASE_MATHFN (ACOSH)
1848 CASE_MATHFN (ASIN)
1849 CASE_MATHFN (ASINH)
1850 CASE_MATHFN (ATAN)
1851 CASE_MATHFN (ATAN2)
1852 CASE_MATHFN (ATANH)
1853 CASE_MATHFN (CBRT)
1854 CASE_MATHFN (CEIL)
1855 CASE_MATHFN (CEXPI)
1856 CASE_MATHFN (COPYSIGN)
1857 CASE_MATHFN (COS)
1858 CASE_MATHFN (COSH)
1859 CASE_MATHFN (DREM)
1860 CASE_MATHFN (ERF)
1861 CASE_MATHFN (ERFC)
1862 CASE_MATHFN (EXP)
1863 CASE_MATHFN (EXP10)
1864 CASE_MATHFN (EXP2)
1865 CASE_MATHFN (EXPM1)
1866 CASE_MATHFN (FABS)
1867 CASE_MATHFN (FDIM)
1868 CASE_MATHFN (FLOOR)
1869 CASE_MATHFN (FMA)
1870 CASE_MATHFN (FMAX)
1871 CASE_MATHFN (FMIN)
1872 CASE_MATHFN (FMOD)
1873 CASE_MATHFN (FREXP)
1874 CASE_MATHFN (GAMMA)
1875 CASE_MATHFN_REENT (GAMMA) /* GAMMA_R */
1876 CASE_MATHFN (HUGE_VAL)
1877 CASE_MATHFN (HYPOT)
1878 CASE_MATHFN (ILOGB)
1879 CASE_MATHFN (ICEIL)
1880 CASE_MATHFN (IFLOOR)
1881 CASE_MATHFN (INF)
1882 CASE_MATHFN (IRINT)
1883 CASE_MATHFN (IROUND)
1884 CASE_MATHFN (ISINF)
1885 CASE_MATHFN (J0)
1886 CASE_MATHFN (J1)
1887 CASE_MATHFN (JN)
1888 CASE_MATHFN (LCEIL)
1889 CASE_MATHFN (LDEXP)
1890 CASE_MATHFN (LFLOOR)
1891 CASE_MATHFN (LGAMMA)
1892 CASE_MATHFN_REENT (LGAMMA) /* LGAMMA_R */
1893 CASE_MATHFN (LLCEIL)
1894 CASE_MATHFN (LLFLOOR)
1895 CASE_MATHFN (LLRINT)
1896 CASE_MATHFN (LLROUND)
1897 CASE_MATHFN (LOG)
1898 CASE_MATHFN (LOG10)
1899 CASE_MATHFN (LOG1P)
1900 CASE_MATHFN (LOG2)
1901 CASE_MATHFN (LOGB)
1902 CASE_MATHFN (LRINT)
1903 CASE_MATHFN (LROUND)
1904 CASE_MATHFN (MODF)
1905 CASE_MATHFN (NAN)
1906 CASE_MATHFN (NANS)
1907 CASE_MATHFN (NEARBYINT)
1908 CASE_MATHFN (NEXTAFTER)
1909 CASE_MATHFN (NEXTTOWARD)
1910 CASE_MATHFN (POW)
1911 CASE_MATHFN (POWI)
1912 CASE_MATHFN (POW10)
1913 CASE_MATHFN (REMAINDER)
1914 CASE_MATHFN (REMQUO)
1915 CASE_MATHFN (RINT)
1916 CASE_MATHFN (ROUND)
1917 CASE_MATHFN (SCALB)
1918 CASE_MATHFN (SCALBLN)
1919 CASE_MATHFN (SCALBN)
1920 CASE_MATHFN (SIGNBIT)
1921 CASE_MATHFN (SIGNIFICAND)
1922 CASE_MATHFN (SIN)
1923 CASE_MATHFN (SINCOS)
1924 CASE_MATHFN (SINH)
1925 CASE_MATHFN (SQRT)
1926 CASE_MATHFN (TAN)
1927 CASE_MATHFN (TANH)
1928 CASE_MATHFN (TGAMMA)
1929 CASE_MATHFN (TRUNC)
1930 CASE_MATHFN (Y0)
1931 CASE_MATHFN (Y1)
1932 CASE_MATHFN (YN)
07976da7 1933
e3240774 1934 default:
1935 return END_BUILTINS;
1936 }
07976da7 1937
96b9f485 1938 if (TYPE_MAIN_VARIANT (type) == double_type_node)
6c21be92 1939 return fcode;
96b9f485 1940 else if (TYPE_MAIN_VARIANT (type) == float_type_node)
6c21be92 1941 return fcodef;
96b9f485 1942 else if (TYPE_MAIN_VARIANT (type) == long_double_type_node)
6c21be92 1943 return fcodel;
07976da7 1944 else
6c21be92 1945 return END_BUILTINS;
1946}
1947
1948/* Return mathematic function equivalent to FN but operating directly on TYPE,
1949 if available. If IMPLICIT_P is true use the implicit builtin declaration,
1950 otherwise use the explicit declaration. If we can't do the conversion,
1951 return null. */
1952
1953static tree
e3240774 1954mathfn_built_in_1 (tree type, combined_fn fn, bool implicit_p)
6c21be92 1955{
1956 built_in_function fcode2 = mathfn_built_in_2 (type, fn);
1957 if (fcode2 == END_BUILTINS)
c2f47e15 1958 return NULL_TREE;
b9a16870 1959
1960 if (implicit_p && !builtin_decl_implicit_p (fcode2))
1961 return NULL_TREE;
1962
1963 return builtin_decl_explicit (fcode2);
0a68165a 1964}
1965
e3240774 1966/* Like mathfn_built_in_1, but always use the implicit array. */
c319d56a 1967
1968tree
e3240774 1969mathfn_built_in (tree type, combined_fn fn)
c319d56a 1970{
1971 return mathfn_built_in_1 (type, fn, /*implicit=*/ 1);
1972}
1973
e3240774 1974/* Like mathfn_built_in_1, but take a built_in_function and
1975 always use the implicit array. */
1976
1977tree
1978mathfn_built_in (tree type, enum built_in_function fn)
1979{
1980 return mathfn_built_in_1 (type, as_combined_fn (fn), /*implicit=*/ 1);
1981}
1982
1f24b8e9 1983/* If BUILT_IN_NORMAL function FNDECL has an associated internal function,
1984 return its code, otherwise return IFN_LAST. Note that this function
1985 only tests whether the function is defined in internals.def, not whether
1986 it is actually available on the target. */
1987
1988internal_fn
1989associated_internal_fn (tree fndecl)
1990{
1991 gcc_checking_assert (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL);
1992 tree return_type = TREE_TYPE (TREE_TYPE (fndecl));
1993 switch (DECL_FUNCTION_CODE (fndecl))
1994 {
1995#define DEF_INTERNAL_FLT_FN(NAME, FLAGS, OPTAB, TYPE) \
1996 CASE_FLT_FN (BUILT_IN_##NAME): return IFN_##NAME;
c9452b7c 1997#define DEF_INTERNAL_INT_FN(NAME, FLAGS, OPTAB, TYPE) \
1998 CASE_INT_FN (BUILT_IN_##NAME): return IFN_##NAME;
1f24b8e9 1999#include "internal-fn.def"
2000
2001 CASE_FLT_FN (BUILT_IN_POW10):
2002 return IFN_EXP10;
2003
2004 CASE_FLT_FN (BUILT_IN_DREM):
2005 return IFN_REMAINDER;
2006
2007 CASE_FLT_FN (BUILT_IN_SCALBN):
2008 CASE_FLT_FN (BUILT_IN_SCALBLN):
2009 if (REAL_MODE_FORMAT (TYPE_MODE (return_type))->b == 2)
2010 return IFN_LDEXP;
2011 return IFN_LAST;
2012
2013 default:
2014 return IFN_LAST;
2015 }
2016}
2017
2018/* If CALL is a call to a BUILT_IN_NORMAL function that could be replaced
2019 on the current target by a call to an internal function, return the
2020 code of that internal function, otherwise return IFN_LAST. The caller
2021 is responsible for ensuring that any side-effects of the built-in
2022 call are dealt with correctly. E.g. if CALL sets errno, the caller
2023 must decide that the errno result isn't needed or make it available
2024 in some other way. */
2025
2026internal_fn
2027replacement_internal_fn (gcall *call)
2028{
2029 if (gimple_call_builtin_p (call, BUILT_IN_NORMAL))
2030 {
2031 internal_fn ifn = associated_internal_fn (gimple_call_fndecl (call));
2032 if (ifn != IFN_LAST)
2033 {
2034 tree_pair types = direct_internal_fn_types (ifn, call);
acdfe9e0 2035 optimization_type opt_type = bb_optimization_type (gimple_bb (call));
2036 if (direct_internal_fn_supported_p (ifn, types, opt_type))
1f24b8e9 2037 return ifn;
2038 }
2039 }
2040 return IFN_LAST;
2041}
2042
7e0713b1 2043/* Expand a call to the builtin trinary math functions (fma).
2044 Return NULL_RTX if a normal call should be emitted rather than expanding the
2045 function in-line. EXP is the expression that is a call to the builtin
2046 function; if convenient, the result should be placed in TARGET.
2047 SUBTARGET may be used as the target for computing one of EXP's
2048 operands. */
2049
2050static rtx
2051expand_builtin_mathfn_ternary (tree exp, rtx target, rtx subtarget)
2052{
2053 optab builtin_optab;
1e0c0b35 2054 rtx op0, op1, op2, result;
2055 rtx_insn *insns;
7e0713b1 2056 tree fndecl = get_callee_fndecl (exp);
2057 tree arg0, arg1, arg2;
3754d046 2058 machine_mode mode;
7e0713b1 2059
2060 if (!validate_arglist (exp, REAL_TYPE, REAL_TYPE, REAL_TYPE, VOID_TYPE))
2061 return NULL_RTX;
2062
2063 arg0 = CALL_EXPR_ARG (exp, 0);
2064 arg1 = CALL_EXPR_ARG (exp, 1);
2065 arg2 = CALL_EXPR_ARG (exp, 2);
2066
2067 switch (DECL_FUNCTION_CODE (fndecl))
2068 {
2069 CASE_FLT_FN (BUILT_IN_FMA):
2070 builtin_optab = fma_optab; break;
2071 default:
2072 gcc_unreachable ();
2073 }
2074
2075 /* Make a suitable register to place result in. */
2076 mode = TYPE_MODE (TREE_TYPE (exp));
2077
2078 /* Before working hard, check whether the instruction is available. */
2079 if (optab_handler (builtin_optab, mode) == CODE_FOR_nothing)
2080 return NULL_RTX;
2081
de2e453e 2082 result = gen_reg_rtx (mode);
7e0713b1 2083
2084 /* Always stabilize the argument list. */
2085 CALL_EXPR_ARG (exp, 0) = arg0 = builtin_save_expr (arg0);
2086 CALL_EXPR_ARG (exp, 1) = arg1 = builtin_save_expr (arg1);
2087 CALL_EXPR_ARG (exp, 2) = arg2 = builtin_save_expr (arg2);
2088
2089 op0 = expand_expr (arg0, subtarget, VOIDmode, EXPAND_NORMAL);
2090 op1 = expand_normal (arg1);
2091 op2 = expand_normal (arg2);
2092
2093 start_sequence ();
2094
de2e453e 2095 /* Compute into RESULT.
2096 Set RESULT to wherever the result comes back. */
2097 result = expand_ternary_op (mode, builtin_optab, op0, op1, op2,
2098 result, 0);
7e0713b1 2099
2100 /* If we were unable to expand via the builtin, stop the sequence
2101 (without outputting the insns) and call to the library function
2102 with the stabilized argument list. */
de2e453e 2103 if (result == 0)
7e0713b1 2104 {
2105 end_sequence ();
2106 return expand_call (exp, target, target == const0_rtx);
2107 }
2108
2109 /* Output the entire sequence. */
2110 insns = get_insns ();
2111 end_sequence ();
2112 emit_insn (insns);
2113
de2e453e 2114 return result;
7e0713b1 2115}
2116
6b43bae4 2117/* Expand a call to the builtin sin and cos math functions.
c2f47e15 2118 Return NULL_RTX if a normal call should be emitted rather than expanding the
6b43bae4 2119 function in-line. EXP is the expression that is a call to the builtin
2120 function; if convenient, the result should be placed in TARGET.
2121 SUBTARGET may be used as the target for computing one of EXP's
2122 operands. */
2123
2124static rtx
2125expand_builtin_mathfn_3 (tree exp, rtx target, rtx subtarget)
2126{
2127 optab builtin_optab;
1e0c0b35 2128 rtx op0;
2129 rtx_insn *insns;
6b43bae4 2130 tree fndecl = get_callee_fndecl (exp);
3754d046 2131 machine_mode mode;
abfea505 2132 tree arg;
6b43bae4 2133
c2f47e15 2134 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2135 return NULL_RTX;
6b43bae4 2136
c2f47e15 2137 arg = CALL_EXPR_ARG (exp, 0);
6b43bae4 2138
2139 switch (DECL_FUNCTION_CODE (fndecl))
2140 {
4f35b1fc 2141 CASE_FLT_FN (BUILT_IN_SIN):
2142 CASE_FLT_FN (BUILT_IN_COS):
6b43bae4 2143 builtin_optab = sincos_optab; break;
2144 default:
64db345d 2145 gcc_unreachable ();
6b43bae4 2146 }
2147
2148 /* Make a suitable register to place result in. */
2149 mode = TYPE_MODE (TREE_TYPE (exp));
2150
6b43bae4 2151 /* Check if sincos insn is available, otherwise fallback
0bed3869 2152 to sin or cos insn. */
d6bf3b14 2153 if (optab_handler (builtin_optab, mode) == CODE_FOR_nothing)
6b43bae4 2154 switch (DECL_FUNCTION_CODE (fndecl))
2155 {
4f35b1fc 2156 CASE_FLT_FN (BUILT_IN_SIN):
6b43bae4 2157 builtin_optab = sin_optab; break;
4f35b1fc 2158 CASE_FLT_FN (BUILT_IN_COS):
6b43bae4 2159 builtin_optab = cos_optab; break;
2160 default:
64db345d 2161 gcc_unreachable ();
6b43bae4 2162 }
6b43bae4 2163
2164 /* Before working hard, check whether the instruction is available. */
d6bf3b14 2165 if (optab_handler (builtin_optab, mode) != CODE_FOR_nothing)
6b43bae4 2166 {
de2e453e 2167 rtx result = gen_reg_rtx (mode);
6b43bae4 2168
2169 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2170 need to expand the argument again. This way, we will not perform
2171 side-effects more the once. */
abfea505 2172 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
6b43bae4 2173
1db6d067 2174 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
6b43bae4 2175
6b43bae4 2176 start_sequence ();
2177
de2e453e 2178 /* Compute into RESULT.
2179 Set RESULT to wherever the result comes back. */
6b43bae4 2180 if (builtin_optab == sincos_optab)
2181 {
de2e453e 2182 int ok;
7d3f6cc7 2183
6b43bae4 2184 switch (DECL_FUNCTION_CODE (fndecl))
2185 {
4f35b1fc 2186 CASE_FLT_FN (BUILT_IN_SIN):
de2e453e 2187 ok = expand_twoval_unop (builtin_optab, op0, 0, result, 0);
6b43bae4 2188 break;
4f35b1fc 2189 CASE_FLT_FN (BUILT_IN_COS):
de2e453e 2190 ok = expand_twoval_unop (builtin_optab, op0, result, 0, 0);
6b43bae4 2191 break;
2192 default:
64db345d 2193 gcc_unreachable ();
6b43bae4 2194 }
de2e453e 2195 gcc_assert (ok);
6b43bae4 2196 }
2197 else
de2e453e 2198 result = expand_unop (mode, builtin_optab, op0, result, 0);
6b43bae4 2199
de2e453e 2200 if (result != 0)
6b43bae4 2201 {
6b43bae4 2202 /* Output the entire sequence. */
2203 insns = get_insns ();
2204 end_sequence ();
2205 emit_insn (insns);
de2e453e 2206 return result;
6b43bae4 2207 }
2208
2209 /* If we were unable to expand via the builtin, stop the sequence
2210 (without outputting the insns) and call to the library function
2211 with the stabilized argument list. */
2212 end_sequence ();
2213 }
2214
de2e453e 2215 return expand_call (exp, target, target == const0_rtx);
6b43bae4 2216}
2217
a65c4d64 2218/* Given an interclass math builtin decl FNDECL and it's argument ARG
2219 return an RTL instruction code that implements the functionality.
2220 If that isn't possible or available return CODE_FOR_nothing. */
a67a90e5 2221
a65c4d64 2222static enum insn_code
2223interclass_mathfn_icode (tree arg, tree fndecl)
a67a90e5 2224{
a65c4d64 2225 bool errno_set = false;
6cdd383a 2226 optab builtin_optab = unknown_optab;
3754d046 2227 machine_mode mode;
a67a90e5 2228
2229 switch (DECL_FUNCTION_CODE (fndecl))
2230 {
2231 CASE_FLT_FN (BUILT_IN_ILOGB):
12f08300 2232 errno_set = true; builtin_optab = ilogb_optab; break;
2233 CASE_FLT_FN (BUILT_IN_ISINF):
2234 builtin_optab = isinf_optab; break;
2235 case BUILT_IN_ISNORMAL:
2236 case BUILT_IN_ISFINITE:
2237 CASE_FLT_FN (BUILT_IN_FINITE):
2238 case BUILT_IN_FINITED32:
2239 case BUILT_IN_FINITED64:
2240 case BUILT_IN_FINITED128:
2241 case BUILT_IN_ISINFD32:
2242 case BUILT_IN_ISINFD64:
2243 case BUILT_IN_ISINFD128:
2244 /* These builtins have no optabs (yet). */
cde061c1 2245 break;
a67a90e5 2246 default:
2247 gcc_unreachable ();
2248 }
2249
2250 /* There's no easy way to detect the case we need to set EDOM. */
2251 if (flag_errno_math && errno_set)
a65c4d64 2252 return CODE_FOR_nothing;
a67a90e5 2253
2254 /* Optab mode depends on the mode of the input argument. */
2255 mode = TYPE_MODE (TREE_TYPE (arg));
2256
cde061c1 2257 if (builtin_optab)
d6bf3b14 2258 return optab_handler (builtin_optab, mode);
a65c4d64 2259 return CODE_FOR_nothing;
2260}
2261
2262/* Expand a call to one of the builtin math functions that operate on
12f08300 2263 floating point argument and output an integer result (ilogb, isinf,
2264 isnan, etc).
a65c4d64 2265 Return 0 if a normal call should be emitted rather than expanding the
2266 function in-line. EXP is the expression that is a call to the builtin
f97eea22 2267 function; if convenient, the result should be placed in TARGET. */
a65c4d64 2268
2269static rtx
f97eea22 2270expand_builtin_interclass_mathfn (tree exp, rtx target)
a65c4d64 2271{
2272 enum insn_code icode = CODE_FOR_nothing;
2273 rtx op0;
2274 tree fndecl = get_callee_fndecl (exp);
3754d046 2275 machine_mode mode;
a65c4d64 2276 tree arg;
2277
2278 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2279 return NULL_RTX;
2280
2281 arg = CALL_EXPR_ARG (exp, 0);
2282 icode = interclass_mathfn_icode (arg, fndecl);
2283 mode = TYPE_MODE (TREE_TYPE (arg));
2284
a67a90e5 2285 if (icode != CODE_FOR_nothing)
2286 {
8786db1e 2287 struct expand_operand ops[1];
1e0c0b35 2288 rtx_insn *last = get_last_insn ();
4e2a2fb4 2289 tree orig_arg = arg;
a67a90e5 2290
2291 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2292 need to expand the argument again. This way, we will not perform
2293 side-effects more the once. */
abfea505 2294 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
a67a90e5 2295
f97eea22 2296 op0 = expand_expr (arg, NULL_RTX, VOIDmode, EXPAND_NORMAL);
a67a90e5 2297
2298 if (mode != GET_MODE (op0))
2299 op0 = convert_to_mode (mode, op0, 0);
2300
8786db1e 2301 create_output_operand (&ops[0], target, TYPE_MODE (TREE_TYPE (exp)));
2302 if (maybe_legitimize_operands (icode, 0, 1, ops)
2303 && maybe_emit_unop_insn (icode, ops[0].value, op0, UNKNOWN))
2304 return ops[0].value;
2305
4e2a2fb4 2306 delete_insns_since (last);
2307 CALL_EXPR_ARG (exp, 0) = orig_arg;
a67a90e5 2308 }
2309
a65c4d64 2310 return NULL_RTX;
a67a90e5 2311}
2312
c3147c1a 2313/* Expand a call to the builtin sincos math function.
c2f47e15 2314 Return NULL_RTX if a normal call should be emitted rather than expanding the
c3147c1a 2315 function in-line. EXP is the expression that is a call to the builtin
2316 function. */
2317
2318static rtx
2319expand_builtin_sincos (tree exp)
2320{
2321 rtx op0, op1, op2, target1, target2;
3754d046 2322 machine_mode mode;
c3147c1a 2323 tree arg, sinp, cosp;
2324 int result;
389dd41b 2325 location_t loc = EXPR_LOCATION (exp);
be5575b2 2326 tree alias_type, alias_off;
c3147c1a 2327
c2f47e15 2328 if (!validate_arglist (exp, REAL_TYPE,
2329 POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
2330 return NULL_RTX;
c3147c1a 2331
c2f47e15 2332 arg = CALL_EXPR_ARG (exp, 0);
2333 sinp = CALL_EXPR_ARG (exp, 1);
2334 cosp = CALL_EXPR_ARG (exp, 2);
c3147c1a 2335
2336 /* Make a suitable register to place result in. */
2337 mode = TYPE_MODE (TREE_TYPE (arg));
2338
2339 /* Check if sincos insn is available, otherwise emit the call. */
d6bf3b14 2340 if (optab_handler (sincos_optab, mode) == CODE_FOR_nothing)
c3147c1a 2341 return NULL_RTX;
2342
2343 target1 = gen_reg_rtx (mode);
2344 target2 = gen_reg_rtx (mode);
2345
8ec3c5c2 2346 op0 = expand_normal (arg);
be5575b2 2347 alias_type = build_pointer_type_for_mode (TREE_TYPE (arg), ptr_mode, true);
2348 alias_off = build_int_cst (alias_type, 0);
2349 op1 = expand_normal (fold_build2_loc (loc, MEM_REF, TREE_TYPE (arg),
2350 sinp, alias_off));
2351 op2 = expand_normal (fold_build2_loc (loc, MEM_REF, TREE_TYPE (arg),
2352 cosp, alias_off));
c3147c1a 2353
2354 /* Compute into target1 and target2.
2355 Set TARGET to wherever the result comes back. */
2356 result = expand_twoval_unop (sincos_optab, op0, target2, target1, 0);
2357 gcc_assert (result);
2358
2359 /* Move target1 and target2 to the memory locations indicated
2360 by op1 and op2. */
2361 emit_move_insn (op1, target1);
2362 emit_move_insn (op2, target2);
2363
2364 return const0_rtx;
2365}
2366
d735c391 2367/* Expand a call to the internal cexpi builtin to the sincos math function.
2368 EXP is the expression that is a call to the builtin function; if convenient,
f97eea22 2369 the result should be placed in TARGET. */
d735c391 2370
2371static rtx
f97eea22 2372expand_builtin_cexpi (tree exp, rtx target)
d735c391 2373{
2374 tree fndecl = get_callee_fndecl (exp);
d735c391 2375 tree arg, type;
3754d046 2376 machine_mode mode;
d735c391 2377 rtx op0, op1, op2;
389dd41b 2378 location_t loc = EXPR_LOCATION (exp);
d735c391 2379
c2f47e15 2380 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2381 return NULL_RTX;
d735c391 2382
c2f47e15 2383 arg = CALL_EXPR_ARG (exp, 0);
d735c391 2384 type = TREE_TYPE (arg);
2385 mode = TYPE_MODE (TREE_TYPE (arg));
2386
2387 /* Try expanding via a sincos optab, fall back to emitting a libcall
18b8d8ae 2388 to sincos or cexp. We are sure we have sincos or cexp because cexpi
2389 is only generated from sincos, cexp or if we have either of them. */
d6bf3b14 2390 if (optab_handler (sincos_optab, mode) != CODE_FOR_nothing)
d735c391 2391 {
2392 op1 = gen_reg_rtx (mode);
2393 op2 = gen_reg_rtx (mode);
2394
f97eea22 2395 op0 = expand_expr (arg, NULL_RTX, VOIDmode, EXPAND_NORMAL);
d735c391 2396
2397 /* Compute into op1 and op2. */
2398 expand_twoval_unop (sincos_optab, op0, op2, op1, 0);
2399 }
30f690e0 2400 else if (targetm.libc_has_function (function_sincos))
d735c391 2401 {
c2f47e15 2402 tree call, fn = NULL_TREE;
d735c391 2403 tree top1, top2;
2404 rtx op1a, op2a;
2405
2406 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
b9a16870 2407 fn = builtin_decl_explicit (BUILT_IN_SINCOSF);
d735c391 2408 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
b9a16870 2409 fn = builtin_decl_explicit (BUILT_IN_SINCOS);
d735c391 2410 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
b9a16870 2411 fn = builtin_decl_explicit (BUILT_IN_SINCOSL);
c2f47e15 2412 else
2413 gcc_unreachable ();
48e1416a 2414
0ab48139 2415 op1 = assign_temp (TREE_TYPE (arg), 1, 1);
2416 op2 = assign_temp (TREE_TYPE (arg), 1, 1);
99182918 2417 op1a = copy_addr_to_reg (XEXP (op1, 0));
2418 op2a = copy_addr_to_reg (XEXP (op2, 0));
d735c391 2419 top1 = make_tree (build_pointer_type (TREE_TYPE (arg)), op1a);
2420 top2 = make_tree (build_pointer_type (TREE_TYPE (arg)), op2a);
2421
d735c391 2422 /* Make sure not to fold the sincos call again. */
2423 call = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
c2f47e15 2424 expand_normal (build_call_nary (TREE_TYPE (TREE_TYPE (fn)),
2425 call, 3, arg, top1, top2));
d735c391 2426 }
18b8d8ae 2427 else
2428 {
0ecbc158 2429 tree call, fn = NULL_TREE, narg;
18b8d8ae 2430 tree ctype = build_complex_type (type);
2431
0ecbc158 2432 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
b9a16870 2433 fn = builtin_decl_explicit (BUILT_IN_CEXPF);
0ecbc158 2434 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
b9a16870 2435 fn = builtin_decl_explicit (BUILT_IN_CEXP);
0ecbc158 2436 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
b9a16870 2437 fn = builtin_decl_explicit (BUILT_IN_CEXPL);
c2f47e15 2438 else
2439 gcc_unreachable ();
fc0dfa6e 2440
2441 /* If we don't have a decl for cexp create one. This is the
2442 friendliest fallback if the user calls __builtin_cexpi
2443 without full target C99 function support. */
2444 if (fn == NULL_TREE)
2445 {
2446 tree fntype;
2447 const char *name = NULL;
2448
2449 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2450 name = "cexpf";
2451 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2452 name = "cexp";
2453 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2454 name = "cexpl";
2455
2456 fntype = build_function_type_list (ctype, ctype, NULL_TREE);
2457 fn = build_fn_decl (name, fntype);
2458 }
2459
389dd41b 2460 narg = fold_build2_loc (loc, COMPLEX_EXPR, ctype,
18b8d8ae 2461 build_real (type, dconst0), arg);
2462
2463 /* Make sure not to fold the cexp call again. */
2464 call = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
48e1416a 2465 return expand_expr (build_call_nary (ctype, call, 1, narg),
1db6d067 2466 target, VOIDmode, EXPAND_NORMAL);
18b8d8ae 2467 }
d735c391 2468
2469 /* Now build the proper return type. */
2470 return expand_expr (build2 (COMPLEX_EXPR, build_complex_type (type),
2471 make_tree (TREE_TYPE (arg), op2),
2472 make_tree (TREE_TYPE (arg), op1)),
1db6d067 2473 target, VOIDmode, EXPAND_NORMAL);
d735c391 2474}
2475
a65c4d64 2476/* Conveniently construct a function call expression. FNDECL names the
2477 function to be called, N is the number of arguments, and the "..."
2478 parameters are the argument expressions. Unlike build_call_exr
2479 this doesn't fold the call, hence it will always return a CALL_EXPR. */
2480
2481static tree
2482build_call_nofold_loc (location_t loc, tree fndecl, int n, ...)
2483{
2484 va_list ap;
2485 tree fntype = TREE_TYPE (fndecl);
2486 tree fn = build1 (ADDR_EXPR, build_pointer_type (fntype), fndecl);
2487
2488 va_start (ap, n);
2489 fn = build_call_valist (TREE_TYPE (fntype), fn, n, ap);
2490 va_end (ap);
2491 SET_EXPR_LOCATION (fn, loc);
2492 return fn;
2493}
a65c4d64 2494
7d3afc77 2495/* Expand a call to one of the builtin rounding functions gcc defines
2496 as an extension (lfloor and lceil). As these are gcc extensions we
2497 do not need to worry about setting errno to EDOM.
ad52b9b7 2498 If expanding via optab fails, lower expression to (int)(floor(x)).
2499 EXP is the expression that is a call to the builtin function;
ff1b14e4 2500 if convenient, the result should be placed in TARGET. */
ad52b9b7 2501
2502static rtx
ff1b14e4 2503expand_builtin_int_roundingfn (tree exp, rtx target)
ad52b9b7 2504{
9c42dd28 2505 convert_optab builtin_optab;
1e0c0b35 2506 rtx op0, tmp;
2507 rtx_insn *insns;
ad52b9b7 2508 tree fndecl = get_callee_fndecl (exp);
ad52b9b7 2509 enum built_in_function fallback_fn;
2510 tree fallback_fndecl;
3754d046 2511 machine_mode mode;
4de0924f 2512 tree arg;
ad52b9b7 2513
c2f47e15 2514 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
ad52b9b7 2515 gcc_unreachable ();
2516
c2f47e15 2517 arg = CALL_EXPR_ARG (exp, 0);
ad52b9b7 2518
2519 switch (DECL_FUNCTION_CODE (fndecl))
2520 {
80ff6494 2521 CASE_FLT_FN (BUILT_IN_ICEIL):
4f35b1fc 2522 CASE_FLT_FN (BUILT_IN_LCEIL):
2523 CASE_FLT_FN (BUILT_IN_LLCEIL):
ac148751 2524 builtin_optab = lceil_optab;
2525 fallback_fn = BUILT_IN_CEIL;
2526 break;
2527
80ff6494 2528 CASE_FLT_FN (BUILT_IN_IFLOOR):
4f35b1fc 2529 CASE_FLT_FN (BUILT_IN_LFLOOR):
2530 CASE_FLT_FN (BUILT_IN_LLFLOOR):
ad52b9b7 2531 builtin_optab = lfloor_optab;
2532 fallback_fn = BUILT_IN_FLOOR;
2533 break;
2534
2535 default:
2536 gcc_unreachable ();
2537 }
2538
2539 /* Make a suitable register to place result in. */
2540 mode = TYPE_MODE (TREE_TYPE (exp));
2541
9c42dd28 2542 target = gen_reg_rtx (mode);
ad52b9b7 2543
9c42dd28 2544 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2545 need to expand the argument again. This way, we will not perform
2546 side-effects more the once. */
abfea505 2547 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
ad52b9b7 2548
ff1b14e4 2549 op0 = expand_expr (arg, NULL, VOIDmode, EXPAND_NORMAL);
ad52b9b7 2550
9c42dd28 2551 start_sequence ();
ad52b9b7 2552
9c42dd28 2553 /* Compute into TARGET. */
2554 if (expand_sfix_optab (target, op0, builtin_optab))
2555 {
2556 /* Output the entire sequence. */
2557 insns = get_insns ();
ad52b9b7 2558 end_sequence ();
9c42dd28 2559 emit_insn (insns);
2560 return target;
ad52b9b7 2561 }
2562
9c42dd28 2563 /* If we were unable to expand via the builtin, stop the sequence
2564 (without outputting the insns). */
2565 end_sequence ();
2566
ad52b9b7 2567 /* Fall back to floating point rounding optab. */
2568 fallback_fndecl = mathfn_built_in (TREE_TYPE (arg), fallback_fn);
fc0dfa6e 2569
2570 /* For non-C99 targets we may end up without a fallback fndecl here
2571 if the user called __builtin_lfloor directly. In this case emit
2572 a call to the floor/ceil variants nevertheless. This should result
2573 in the best user experience for not full C99 targets. */
2574 if (fallback_fndecl == NULL_TREE)
2575 {
2576 tree fntype;
2577 const char *name = NULL;
2578
2579 switch (DECL_FUNCTION_CODE (fndecl))
2580 {
80ff6494 2581 case BUILT_IN_ICEIL:
fc0dfa6e 2582 case BUILT_IN_LCEIL:
2583 case BUILT_IN_LLCEIL:
2584 name = "ceil";
2585 break;
80ff6494 2586 case BUILT_IN_ICEILF:
fc0dfa6e 2587 case BUILT_IN_LCEILF:
2588 case BUILT_IN_LLCEILF:
2589 name = "ceilf";
2590 break;
80ff6494 2591 case BUILT_IN_ICEILL:
fc0dfa6e 2592 case BUILT_IN_LCEILL:
2593 case BUILT_IN_LLCEILL:
2594 name = "ceill";
2595 break;
80ff6494 2596 case BUILT_IN_IFLOOR:
fc0dfa6e 2597 case BUILT_IN_LFLOOR:
2598 case BUILT_IN_LLFLOOR:
2599 name = "floor";
2600 break;
80ff6494 2601 case BUILT_IN_IFLOORF:
fc0dfa6e 2602 case BUILT_IN_LFLOORF:
2603 case BUILT_IN_LLFLOORF:
2604 name = "floorf";
2605 break;
80ff6494 2606 case BUILT_IN_IFLOORL:
fc0dfa6e 2607 case BUILT_IN_LFLOORL:
2608 case BUILT_IN_LLFLOORL:
2609 name = "floorl";
2610 break;
2611 default:
2612 gcc_unreachable ();
2613 }
2614
2615 fntype = build_function_type_list (TREE_TYPE (arg),
2616 TREE_TYPE (arg), NULL_TREE);
2617 fallback_fndecl = build_fn_decl (name, fntype);
2618 }
2619
0568e9c1 2620 exp = build_call_nofold_loc (EXPR_LOCATION (exp), fallback_fndecl, 1, arg);
ad52b9b7 2621
d4c690af 2622 tmp = expand_normal (exp);
933eb13a 2623 tmp = maybe_emit_group_store (tmp, TREE_TYPE (exp));
ad52b9b7 2624
2625 /* Truncate the result of floating point optab to integer
2626 via expand_fix (). */
2627 target = gen_reg_rtx (mode);
2628 expand_fix (target, tmp, 0);
2629
2630 return target;
2631}
2632
7d3afc77 2633/* Expand a call to one of the builtin math functions doing integer
2634 conversion (lrint).
2635 Return 0 if a normal call should be emitted rather than expanding the
2636 function in-line. EXP is the expression that is a call to the builtin
ff1b14e4 2637 function; if convenient, the result should be placed in TARGET. */
7d3afc77 2638
2639static rtx
ff1b14e4 2640expand_builtin_int_roundingfn_2 (tree exp, rtx target)
7d3afc77 2641{
5f51ee59 2642 convert_optab builtin_optab;
1e0c0b35 2643 rtx op0;
2644 rtx_insn *insns;
7d3afc77 2645 tree fndecl = get_callee_fndecl (exp);
4de0924f 2646 tree arg;
3754d046 2647 machine_mode mode;
e951f9a4 2648 enum built_in_function fallback_fn = BUILT_IN_NONE;
7d3afc77 2649
c2f47e15 2650 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2651 gcc_unreachable ();
48e1416a 2652
c2f47e15 2653 arg = CALL_EXPR_ARG (exp, 0);
7d3afc77 2654
2655 switch (DECL_FUNCTION_CODE (fndecl))
2656 {
80ff6494 2657 CASE_FLT_FN (BUILT_IN_IRINT):
e951f9a4 2658 fallback_fn = BUILT_IN_LRINT;
3c77f69c 2659 gcc_fallthrough ();
7d3afc77 2660 CASE_FLT_FN (BUILT_IN_LRINT):
2661 CASE_FLT_FN (BUILT_IN_LLRINT):
e951f9a4 2662 builtin_optab = lrint_optab;
2663 break;
80ff6494 2664
2665 CASE_FLT_FN (BUILT_IN_IROUND):
e951f9a4 2666 fallback_fn = BUILT_IN_LROUND;
3c77f69c 2667 gcc_fallthrough ();
ef2f1a10 2668 CASE_FLT_FN (BUILT_IN_LROUND):
2669 CASE_FLT_FN (BUILT_IN_LLROUND):
e951f9a4 2670 builtin_optab = lround_optab;
2671 break;
80ff6494 2672
7d3afc77 2673 default:
2674 gcc_unreachable ();
2675 }
2676
e951f9a4 2677 /* There's no easy way to detect the case we need to set EDOM. */
2678 if (flag_errno_math && fallback_fn == BUILT_IN_NONE)
2679 return NULL_RTX;
2680
7d3afc77 2681 /* Make a suitable register to place result in. */
2682 mode = TYPE_MODE (TREE_TYPE (exp));
2683
e951f9a4 2684 /* There's no easy way to detect the case we need to set EDOM. */
2685 if (!flag_errno_math)
2686 {
de2e453e 2687 rtx result = gen_reg_rtx (mode);
7d3afc77 2688
e951f9a4 2689 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2690 need to expand the argument again. This way, we will not perform
2691 side-effects more the once. */
2692 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
7d3afc77 2693
e951f9a4 2694 op0 = expand_expr (arg, NULL, VOIDmode, EXPAND_NORMAL);
7d3afc77 2695
e951f9a4 2696 start_sequence ();
7d3afc77 2697
de2e453e 2698 if (expand_sfix_optab (result, op0, builtin_optab))
e951f9a4 2699 {
2700 /* Output the entire sequence. */
2701 insns = get_insns ();
2702 end_sequence ();
2703 emit_insn (insns);
de2e453e 2704 return result;
e951f9a4 2705 }
2706
2707 /* If we were unable to expand via the builtin, stop the sequence
2708 (without outputting the insns) and call to the library function
2709 with the stabilized argument list. */
7d3afc77 2710 end_sequence ();
2711 }
2712
e951f9a4 2713 if (fallback_fn != BUILT_IN_NONE)
2714 {
2715 /* Fall back to rounding to long int. Use implicit_p 0 - for non-C99
2716 targets, (int) round (x) should never be transformed into
2717 BUILT_IN_IROUND and if __builtin_iround is called directly, emit
2718 a call to lround in the hope that the target provides at least some
2719 C99 functions. This should result in the best user experience for
2720 not full C99 targets. */
e3240774 2721 tree fallback_fndecl = mathfn_built_in_1
2722 (TREE_TYPE (arg), as_combined_fn (fallback_fn), 0);
e951f9a4 2723
2724 exp = build_call_nofold_loc (EXPR_LOCATION (exp),
2725 fallback_fndecl, 1, arg);
2726
2727 target = expand_call (exp, NULL_RTX, target == const0_rtx);
933eb13a 2728 target = maybe_emit_group_store (target, TREE_TYPE (exp));
e951f9a4 2729 return convert_to_mode (mode, target, 0);
2730 }
5f51ee59 2731
de2e453e 2732 return expand_call (exp, target, target == const0_rtx);
7d3afc77 2733}
2734
c2f47e15 2735/* Expand a call to the powi built-in mathematical function. Return NULL_RTX if
757c219d 2736 a normal call should be emitted rather than expanding the function
2737 in-line. EXP is the expression that is a call to the builtin
2738 function; if convenient, the result should be placed in TARGET. */
2739
2740static rtx
f97eea22 2741expand_builtin_powi (tree exp, rtx target)
757c219d 2742{
757c219d 2743 tree arg0, arg1;
2744 rtx op0, op1;
3754d046 2745 machine_mode mode;
2746 machine_mode mode2;
757c219d 2747
c2f47e15 2748 if (! validate_arglist (exp, REAL_TYPE, INTEGER_TYPE, VOID_TYPE))
2749 return NULL_RTX;
757c219d 2750
c2f47e15 2751 arg0 = CALL_EXPR_ARG (exp, 0);
2752 arg1 = CALL_EXPR_ARG (exp, 1);
757c219d 2753 mode = TYPE_MODE (TREE_TYPE (exp));
2754
757c219d 2755 /* Emit a libcall to libgcc. */
2756
c2f47e15 2757 /* Mode of the 2nd argument must match that of an int. */
d0405f40 2758 mode2 = mode_for_size (INT_TYPE_SIZE, MODE_INT, 0);
2759
757c219d 2760 if (target == NULL_RTX)
2761 target = gen_reg_rtx (mode);
2762
f97eea22 2763 op0 = expand_expr (arg0, NULL_RTX, mode, EXPAND_NORMAL);
757c219d 2764 if (GET_MODE (op0) != mode)
2765 op0 = convert_to_mode (mode, op0, 0);
1db6d067 2766 op1 = expand_expr (arg1, NULL_RTX, mode2, EXPAND_NORMAL);
d0405f40 2767 if (GET_MODE (op1) != mode2)
2768 op1 = convert_to_mode (mode2, op1, 0);
757c219d 2769
f36b9f69 2770 target = emit_library_call_value (optab_libfunc (powi_optab, mode),
2dd6f9ed 2771 target, LCT_CONST, mode, 2,
d0405f40 2772 op0, mode, op1, mode2);
757c219d 2773
2774 return target;
2775}
2776
48e1416a 2777/* Expand expression EXP which is a call to the strlen builtin. Return
c2f47e15 2778 NULL_RTX if we failed the caller should emit a normal call, otherwise
aed0bd19 2779 try to get the result in TARGET, if convenient. */
f7c44134 2780
53800dbe 2781static rtx
c2f47e15 2782expand_builtin_strlen (tree exp, rtx target,
3754d046 2783 machine_mode target_mode)
53800dbe 2784{
c2f47e15 2785 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
2786 return NULL_RTX;
53800dbe 2787 else
2788 {
8786db1e 2789 struct expand_operand ops[4];
911c0150 2790 rtx pat;
c2f47e15 2791 tree len;
2792 tree src = CALL_EXPR_ARG (exp, 0);
1e0c0b35 2793 rtx src_reg;
2794 rtx_insn *before_strlen;
19a4dce4 2795 machine_mode insn_mode;
ef2c4a29 2796 enum insn_code icode = CODE_FOR_nothing;
153c3b50 2797 unsigned int align;
6248e345 2798
2799 /* If the length can be computed at compile-time, return it. */
681fab1e 2800 len = c_strlen (src, 0);
6248e345 2801 if (len)
80cd7a5e 2802 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
6248e345 2803
681fab1e 2804 /* If the length can be computed at compile-time and is constant
2805 integer, but there are side-effects in src, evaluate
2806 src for side-effects, then return len.
2807 E.g. x = strlen (i++ ? "xfoo" + 1 : "bar");
2808 can be optimized into: i++; x = 3; */
2809 len = c_strlen (src, 1);
2810 if (len && TREE_CODE (len) == INTEGER_CST)
2811 {
2812 expand_expr (src, const0_rtx, VOIDmode, EXPAND_NORMAL);
2813 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
2814 }
2815
957d0361 2816 align = get_pointer_alignment (src) / BITS_PER_UNIT;
53800dbe 2817
53800dbe 2818 /* If SRC is not a pointer type, don't do this operation inline. */
2819 if (align == 0)
c2f47e15 2820 return NULL_RTX;
53800dbe 2821
911c0150 2822 /* Bail out if we can't compute strlen in the right mode. */
19a4dce4 2823 FOR_EACH_MODE_FROM (insn_mode, target_mode)
53800dbe 2824 {
d6bf3b14 2825 icode = optab_handler (strlen_optab, insn_mode);
53800dbe 2826 if (icode != CODE_FOR_nothing)
c28ae87f 2827 break;
53800dbe 2828 }
2829 if (insn_mode == VOIDmode)
c2f47e15 2830 return NULL_RTX;
53800dbe 2831
911c0150 2832 /* Make a place to hold the source address. We will not expand
2833 the actual source until we are sure that the expansion will
2834 not fail -- there are trees that cannot be expanded twice. */
2835 src_reg = gen_reg_rtx (Pmode);
53800dbe 2836
911c0150 2837 /* Mark the beginning of the strlen sequence so we can emit the
2838 source operand later. */
f0ce3b1f 2839 before_strlen = get_last_insn ();
53800dbe 2840
8786db1e 2841 create_output_operand (&ops[0], target, insn_mode);
2842 create_fixed_operand (&ops[1], gen_rtx_MEM (BLKmode, src_reg));
2843 create_integer_operand (&ops[2], 0);
2844 create_integer_operand (&ops[3], align);
2845 if (!maybe_expand_insn (icode, 4, ops))
c2f47e15 2846 return NULL_RTX;
911c0150 2847
2848 /* Now that we are assured of success, expand the source. */
2849 start_sequence ();
499eee58 2850 pat = expand_expr (src, src_reg, Pmode, EXPAND_NORMAL);
911c0150 2851 if (pat != src_reg)
499eee58 2852 {
2853#ifdef POINTERS_EXTEND_UNSIGNED
2854 if (GET_MODE (pat) != Pmode)
2855 pat = convert_to_mode (Pmode, pat,
2856 POINTERS_EXTEND_UNSIGNED);
2857#endif
2858 emit_move_insn (src_reg, pat);
2859 }
31d3e01c 2860 pat = get_insns ();
911c0150 2861 end_sequence ();
bceb0d1f 2862
2863 if (before_strlen)
2864 emit_insn_after (pat, before_strlen);
2865 else
2866 emit_insn_before (pat, get_insns ());
53800dbe 2867
2868 /* Return the value in the proper mode for this function. */
8786db1e 2869 if (GET_MODE (ops[0].value) == target_mode)
2870 target = ops[0].value;
53800dbe 2871 else if (target != 0)
8786db1e 2872 convert_move (target, ops[0].value, 0);
53800dbe 2873 else
8786db1e 2874 target = convert_to_mode (target_mode, ops[0].value, 0);
911c0150 2875
2876 return target;
53800dbe 2877 }
2878}
2879
6840589f 2880/* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
2881 bytes from constant string DATA + OFFSET and return it as target
2882 constant. */
2883
2884static rtx
aecda0d6 2885builtin_memcpy_read_str (void *data, HOST_WIDE_INT offset,
f77c4496 2886 scalar_int_mode mode)
6840589f 2887{
2888 const char *str = (const char *) data;
2889
64db345d 2890 gcc_assert (offset >= 0
2891 && ((unsigned HOST_WIDE_INT) offset + GET_MODE_SIZE (mode)
2892 <= strlen (str) + 1));
6840589f 2893
2894 return c_readstr (str + offset, mode);
2895}
2896
36d63243 2897/* LEN specify length of the block of memcpy/memset operation.
9db0f34d 2898 Figure out its range and put it into MIN_SIZE/MAX_SIZE.
2899 In some cases we can make very likely guess on max size, then we
2900 set it into PROBABLE_MAX_SIZE. */
36d63243 2901
2902static void
2903determine_block_size (tree len, rtx len_rtx,
2904 unsigned HOST_WIDE_INT *min_size,
9db0f34d 2905 unsigned HOST_WIDE_INT *max_size,
2906 unsigned HOST_WIDE_INT *probable_max_size)
36d63243 2907{
2908 if (CONST_INT_P (len_rtx))
2909 {
4e140a5c 2910 *min_size = *max_size = *probable_max_size = UINTVAL (len_rtx);
36d63243 2911 return;
2912 }
2913 else
2914 {
9c1be15e 2915 wide_int min, max;
9db0f34d 2916 enum value_range_type range_type = VR_UNDEFINED;
2917
2918 /* Determine bounds from the type. */
2919 if (tree_fits_uhwi_p (TYPE_MIN_VALUE (TREE_TYPE (len))))
2920 *min_size = tree_to_uhwi (TYPE_MIN_VALUE (TREE_TYPE (len)));
2921 else
2922 *min_size = 0;
2923 if (tree_fits_uhwi_p (TYPE_MAX_VALUE (TREE_TYPE (len))))
4e140a5c 2924 *probable_max_size = *max_size
2925 = tree_to_uhwi (TYPE_MAX_VALUE (TREE_TYPE (len)));
9db0f34d 2926 else
2927 *probable_max_size = *max_size = GET_MODE_MASK (GET_MODE (len_rtx));
2928
2929 if (TREE_CODE (len) == SSA_NAME)
2930 range_type = get_range_info (len, &min, &max);
2931 if (range_type == VR_RANGE)
36d63243 2932 {
fe5ad926 2933 if (wi::fits_uhwi_p (min) && *min_size < min.to_uhwi ())
36d63243 2934 *min_size = min.to_uhwi ();
fe5ad926 2935 if (wi::fits_uhwi_p (max) && *max_size > max.to_uhwi ())
9db0f34d 2936 *probable_max_size = *max_size = max.to_uhwi ();
36d63243 2937 }
9db0f34d 2938 else if (range_type == VR_ANTI_RANGE)
36d63243 2939 {
4a474a5a 2940 /* Anti range 0...N lets us to determine minimal size to N+1. */
fe5ad926 2941 if (min == 0)
9db0f34d 2942 {
9c1be15e 2943 if (wi::fits_uhwi_p (max) && max.to_uhwi () + 1 != 0)
2944 *min_size = max.to_uhwi () + 1;
9db0f34d 2945 }
2946 /* Code like
2947
2948 int n;
2949 if (n < 100)
4a474a5a 2950 memcpy (a, b, n)
9db0f34d 2951
2952 Produce anti range allowing negative values of N. We still
2953 can use the information and make a guess that N is not negative.
2954 */
fe5ad926 2955 else if (!wi::leu_p (max, 1 << 30) && wi::fits_uhwi_p (min))
2956 *probable_max_size = min.to_uhwi () - 1;
36d63243 2957 }
2958 }
2959 gcc_checking_assert (*max_size <=
2960 (unsigned HOST_WIDE_INT)
2961 GET_MODE_MASK (GET_MODE (len_rtx)));
2962}
2963
5aef8938 2964/* Try to verify that the sizes and lengths of the arguments to a string
2965 manipulation function given by EXP are within valid bounds and that
2966 the operation does not lead to buffer overflow. Arguments other than
2967 EXP may be null. When non-null, the arguments have the following
2968 meaning:
2969 SIZE is the user-supplied size argument to the function (such as in
2970 memcpy(d, s, SIZE) or strncpy(d, s, SIZE). It specifies the exact
2971 number of bytes to write.
2972 MAXLEN is the user-supplied bound on the length of the source sequence
2973 (such as in strncat(d, s, N). It specifies the upper limit on the number
2974 of bytes to write.
ae33f654 2975 SRC is the source string (such as in strcpy(d, s)) when the expression
5aef8938 2976 EXP is a string function call (as opposed to a memory call like memcpy).
8d6c6ef5 2977 As an exception, SRC can also be an integer denoting the precomputed
2978 size of the source string or object (for functions like memcpy).
5aef8938 2979 OBJSIZE is the size of the destination object specified by the last
2980 argument to the _chk builtins, typically resulting from the expansion
2981 of __builtin_object_size (such as in __builtin___strcpy_chk(d, s,
2982 OBJSIZE).
2983
2984 When SIZE is null LEN is checked to verify that it doesn't exceed
2985 SIZE_MAX.
2986
2987 If the call is successfully verified as safe from buffer overflow
2988 the function returns true, otherwise false.. */
2989
2990static bool
8d6c6ef5 2991check_sizes (int opt, tree exp, tree size, tree maxlen, tree src, tree objsize)
5aef8938 2992{
2993 /* The size of the largest object is half the address space, or
2994 SSIZE_MAX. (This is way too permissive.) */
2995 tree maxobjsize = TYPE_MAX_VALUE (ssizetype);
2996
2997 tree slen = NULL_TREE;
2998
8d6c6ef5 2999 tree range[2] = { NULL_TREE, NULL_TREE };
3000
5aef8938 3001 /* Set to true when the exact number of bytes written by a string
3002 function like strcpy is not known and the only thing that is
3003 known is that it must be at least one (for the terminating nul). */
3004 bool at_least_one = false;
8d6c6ef5 3005 if (src)
5aef8938 3006 {
8d6c6ef5 3007 /* SRC is normally a pointer to string but as a special case
5aef8938 3008 it can be an integer denoting the length of a string. */
8d6c6ef5 3009 if (POINTER_TYPE_P (TREE_TYPE (src)))
5aef8938 3010 {
3011 /* Try to determine the range of lengths the source string
8d6c6ef5 3012 refers to. If it can be determined and is less than
3013 the upper bound given by MAXLEN add one to it for
5aef8938 3014 the terminating nul. Otherwise, set it to one for
8d6c6ef5 3015 the same reason, or to MAXLEN as appropriate. */
3016 get_range_strlen (src, range);
3017 if (range[0] && (!maxlen || TREE_CODE (maxlen) == INTEGER_CST))
3018 {
3019 if (maxlen && tree_int_cst_le (maxlen, range[0]))
3020 range[0] = range[1] = maxlen;
3021 else
3022 range[0] = fold_build2 (PLUS_EXPR, size_type_node,
3023 range[0], size_one_node);
3024
3025 if (maxlen && tree_int_cst_le (maxlen, range[1]))
3026 range[1] = maxlen;
3027 else if (!integer_all_onesp (range[1]))
3028 range[1] = fold_build2 (PLUS_EXPR, size_type_node,
3029 range[1], size_one_node);
3030
3031 slen = range[0];
3032 }
5aef8938 3033 else
3034 {
3035 at_least_one = true;
3036 slen = size_one_node;
3037 }
3038 }
3039 else
8d6c6ef5 3040 slen = src;
5aef8938 3041 }
3042
3043 if (!size && !maxlen)
3044 {
3045 /* When the only available piece of data is the object size
3046 there is nothing to do. */
3047 if (!slen)
3048 return true;
3049
3050 /* Otherwise, when the length of the source sequence is known
3051 (as with with strlen), set SIZE to it. */
8d6c6ef5 3052 if (!range[0])
3053 size = slen;
5aef8938 3054 }
3055
3056 if (!objsize)
3057 objsize = maxobjsize;
3058
3059 /* The SIZE is exact if it's non-null, constant, and in range of
3060 unsigned HOST_WIDE_INT. */
3061 bool exactsize = size && tree_fits_uhwi_p (size);
3062
5aef8938 3063 if (size)
3064 get_size_range (size, range);
3065
3066 /* First check the number of bytes to be written against the maximum
3067 object size. */
3068 if (range[0] && tree_int_cst_lt (maxobjsize, range[0]))
3069 {
3070 location_t loc = tree_nonartificial_location (exp);
4d317237 3071 loc = expansion_point_location_if_in_system_header (loc);
5aef8938 3072
3073 if (range[0] == range[1])
3074 warning_at (loc, opt,
9098b938 3075 "%K%qD specified size %E "
8d6c6ef5 3076 "exceeds maximum object size %E",
3077 exp, get_callee_fndecl (exp), range[0], maxobjsize);
5aef8938 3078 else
3079 warning_at (loc, opt,
9098b938 3080 "%K%qD specified size between %E and %E "
8d6c6ef5 3081 "exceeds maximum object size %E",
5aef8938 3082 exp, get_callee_fndecl (exp),
8d6c6ef5 3083 range[0], range[1], maxobjsize);
5aef8938 3084 return false;
3085 }
3086
3087 /* Next check the number of bytes to be written against the destination
3088 object size. */
3089 if (range[0] || !exactsize || integer_all_onesp (size))
3090 {
3091 if (range[0]
3092 && ((tree_fits_uhwi_p (objsize)
3093 && tree_int_cst_lt (objsize, range[0]))
3094 || (tree_fits_uhwi_p (size)
3095 && tree_int_cst_lt (size, range[0]))))
3096 {
5aef8938 3097 location_t loc = tree_nonartificial_location (exp);
4d317237 3098 loc = expansion_point_location_if_in_system_header (loc);
5aef8938 3099
8d6c6ef5 3100 if (size == slen && at_least_one)
3101 {
3102 /* This is a call to strcpy with a destination of 0 size
3103 and a source of unknown length. The call will write
3104 at least one byte past the end of the destination. */
3105 warning_at (loc, opt,
9098b938 3106 "%K%qD writing %E or more bytes into a region "
8d6c6ef5 3107 "of size %E overflows the destination",
3108 exp, get_callee_fndecl (exp), range[0], objsize);
3109 }
3110 else if (tree_int_cst_equal (range[0], range[1]))
5aef8938 3111 warning_at (loc, opt,
8d6c6ef5 3112 (integer_onep (range[0])
9098b938 3113 ? G_("%K%qD writing %E byte into a region "
8d6c6ef5 3114 "of size %E overflows the destination")
3115 : G_("%K%qD writing %E bytes into a region "
3116 "of size %E overflows the destination")),
3117 exp, get_callee_fndecl (exp), range[0], objsize);
3118 else if (tree_int_cst_sign_bit (range[1]))
3119 {
3120 /* Avoid printing the upper bound if it's invalid. */
3121 warning_at (loc, opt,
9098b938 3122 "%K%qD writing %E or more bytes into a region "
8d6c6ef5 3123 "of size %E overflows the destination",
3124 exp, get_callee_fndecl (exp), range[0], objsize);
3125 }
5aef8938 3126 else
3127 warning_at (loc, opt,
9098b938 3128 "%K%qD writing between %E and %E bytes into "
8d6c6ef5 3129 "a region of size %E overflows the destination",
3130 exp, get_callee_fndecl (exp), range[0], range[1],
3131 objsize);
5aef8938 3132
3133 /* Return error when an overflow has been detected. */
3134 return false;
3135 }
3136 }
3137
3138 /* Check the maximum length of the source sequence against the size
3139 of the destination object if known, or against the maximum size
3140 of an object. */
3141 if (maxlen)
3142 {
3143 get_size_range (maxlen, range);
3144
3145 if (range[0] && objsize && tree_fits_uhwi_p (objsize))
3146 {
3147 location_t loc = tree_nonartificial_location (exp);
4d317237 3148 loc = expansion_point_location_if_in_system_header (loc);
5aef8938 3149
3150 if (tree_int_cst_lt (maxobjsize, range[0]))
3151 {
3152 /* Warn about crazy big sizes first since that's more
3153 likely to be meaningful than saying that the bound
3154 is greater than the object size if both are big. */
3155 if (range[0] == range[1])
3156 warning_at (loc, opt,
9098b938 3157 "%K%qD specified bound %E "
8d6c6ef5 3158 "exceeds maximum object size %E",
5aef8938 3159 exp, get_callee_fndecl (exp),
8d6c6ef5 3160 range[0], maxobjsize);
5aef8938 3161 else
3162 warning_at (loc, opt,
9098b938 3163 "%K%qD specified bound between %E and %E "
8d6c6ef5 3164 "exceeds maximum object size %E",
5aef8938 3165 exp, get_callee_fndecl (exp),
8d6c6ef5 3166 range[0], range[1], maxobjsize);
5aef8938 3167
3168 return false;
3169 }
3170
3171 if (objsize != maxobjsize && tree_int_cst_lt (objsize, range[0]))
3172 {
8d6c6ef5 3173 if (tree_int_cst_equal (range[0], range[1]))
5aef8938 3174 warning_at (loc, opt,
9098b938 3175 "%K%qD specified bound %E "
8d6c6ef5 3176 "exceeds destination size %E",
5aef8938 3177 exp, get_callee_fndecl (exp),
8d6c6ef5 3178 range[0], objsize);
5aef8938 3179 else
3180 warning_at (loc, opt,
9098b938 3181 "%K%qD specified bound between %E and %E "
8d6c6ef5 3182 "exceeds destination size %E",
5aef8938 3183 exp, get_callee_fndecl (exp),
8d6c6ef5 3184 range[0], range[1], objsize);
5aef8938 3185 return false;
3186 }
3187 }
3188 }
3189
8d6c6ef5 3190 if (slen
3191 && slen == src
3192 && size && range[0]
3193 && tree_int_cst_lt (slen, range[0]))
3194 {
3195 location_t loc = tree_nonartificial_location (exp);
3196
3197 if (tree_int_cst_equal (range[0], range[1]))
3198 warning_at (loc, opt,
3199 (tree_int_cst_equal (range[0], integer_one_node)
9098b938 3200 ? G_("%K%qD reading %E byte from a region of size %E")
8d6c6ef5 3201 : G_("%K%qD reading %E bytes from a region of size %E")),
3202 exp, get_callee_fndecl (exp), range[0], slen);
3203 else if (tree_int_cst_sign_bit (range[1]))
3204 {
3205 /* Avoid printing the upper bound if it's invalid. */
3206 warning_at (loc, opt,
9098b938 3207 "%K%qD reading %E or more bytes from a region "
8d6c6ef5 3208 "of size %E",
3209 exp, get_callee_fndecl (exp), range[0], slen);
3210 }
3211 else
3212 warning_at (loc, opt,
9098b938 3213 "%K%qD reading between %E and %E bytes from a region "
8d6c6ef5 3214 "of size %E",
3215 exp, get_callee_fndecl (exp), range[0], range[1], slen);
3216 return false;
3217 }
3218
5aef8938 3219 return true;
3220}
3221
3222/* Helper to compute the size of the object referenced by the DEST
3223 expression which must of of pointer type, using Object Size type
3224 OSTYPE (only the least significant 2 bits are used). Return
3225 the size of the object if successful or NULL when the size cannot
3226 be determined. */
3227
3228static inline tree
8d6c6ef5 3229compute_objsize (tree dest, int ostype)
5aef8938 3230{
3231 unsigned HOST_WIDE_INT size;
3232 if (compute_builtin_object_size (dest, ostype & 3, &size))
3233 return build_int_cst (sizetype, size);
3234
3235 return NULL_TREE;
3236}
3237
3238/* Helper to determine and check the sizes of the source and the destination
8d6c6ef5 3239 of calls to __builtin_{bzero,memcpy,mempcpy,memset} calls. EXP is the
3240 call expression, DEST is the destination argument, SRC is the source
3241 argument or null, and LEN is the number of bytes. Use Object Size type-0
3242 regardless of the OPT_Wstringop_overflow_ setting. Return true on success
5aef8938 3243 (no overflow or invalid sizes), false otherwise. */
3244
3245static bool
8d6c6ef5 3246check_memop_sizes (tree exp, tree dest, tree src, tree size)
5aef8938 3247{
3248 if (!warn_stringop_overflow)
3249 return true;
3250
3251 /* For functions like memset and memcpy that operate on raw memory
8d6c6ef5 3252 try to determine the size of the largest source and destination
3253 object using type-0 Object Size regardless of the object size
3254 type specified by the option. */
3255 tree srcsize = src ? compute_objsize (src, 0) : NULL_TREE;
3256 tree dstsize = compute_objsize (dest, 0);
5aef8938 3257
3258 return check_sizes (OPT_Wstringop_overflow_, exp,
8d6c6ef5 3259 size, /*maxlen=*/NULL_TREE, srcsize, dstsize);
3260}
3261
3262/* Validate memchr arguments without performing any expansion.
3263 Return NULL_RTX. */
3264
3265static rtx
3266expand_builtin_memchr (tree exp, rtx)
3267{
3268 if (!validate_arglist (exp,
3269 POINTER_TYPE, INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
3270 return NULL_RTX;
3271
3272 tree arg1 = CALL_EXPR_ARG (exp, 0);
3273 tree len = CALL_EXPR_ARG (exp, 2);
3274
3275 /* Diagnose calls where the specified length exceeds the size
3276 of the object. */
3277 if (warn_stringop_overflow)
3278 {
3279 tree size = compute_objsize (arg1, 0);
3280 check_sizes (OPT_Wstringop_overflow_,
3281 exp, len, /*maxlen=*/NULL_TREE,
3282 size, /*objsize=*/NULL_TREE);
3283 }
3284
3285 return NULL_RTX;
5aef8938 3286}
3287
c2f47e15 3288/* Expand a call EXP to the memcpy builtin.
3289 Return NULL_RTX if we failed, the caller should emit a normal call,
3b824fa6 3290 otherwise try to get the result in TARGET, if convenient (and in
9fe0e1b8 3291 mode MODE if that's convenient). */
c2f47e15 3292
53800dbe 3293static rtx
a65c4d64 3294expand_builtin_memcpy (tree exp, rtx target)
53800dbe 3295{
c2f47e15 3296 if (!validate_arglist (exp,
3297 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3298 return NULL_RTX;
5aef8938 3299
3300 tree dest = CALL_EXPR_ARG (exp, 0);
3301 tree src = CALL_EXPR_ARG (exp, 1);
3302 tree len = CALL_EXPR_ARG (exp, 2);
3303
8d6c6ef5 3304 check_memop_sizes (exp, dest, src, len);
5aef8938 3305
d0fbba1a 3306 return expand_builtin_memory_copy_args (dest, src, len, target, exp,
3307 /*endp=*/ 0);
f21337ef 3308}
6840589f 3309
4d317237 3310/* Check a call EXP to the memmove built-in for validity.
3311 Return NULL_RTX on both success and failure. */
3312
3313static rtx
3314expand_builtin_memmove (tree exp, rtx)
3315{
3316 if (!validate_arglist (exp,
3317 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3318 return NULL_RTX;
3319
3320 tree dest = CALL_EXPR_ARG (exp, 0);
8d6c6ef5 3321 tree src = CALL_EXPR_ARG (exp, 1);
4d317237 3322 tree len = CALL_EXPR_ARG (exp, 2);
3323
8d6c6ef5 3324 check_memop_sizes (exp, dest, src, len);
4d317237 3325
3326 return NULL_RTX;
3327}
3328
f21337ef 3329/* Expand an instrumented call EXP to the memcpy builtin.
3330 Return NULL_RTX if we failed, the caller should emit a normal call,
3331 otherwise try to get the result in TARGET, if convenient (and in
3332 mode MODE if that's convenient). */
53800dbe 3333
f21337ef 3334static rtx
3335expand_builtin_memcpy_with_bounds (tree exp, rtx target)
3336{
3337 if (!validate_arglist (exp,
3338 POINTER_TYPE, POINTER_BOUNDS_TYPE,
3339 POINTER_TYPE, POINTER_BOUNDS_TYPE,
3340 INTEGER_TYPE, VOID_TYPE))
3341 return NULL_RTX;
3342 else
3343 {
3344 tree dest = CALL_EXPR_ARG (exp, 0);
3345 tree src = CALL_EXPR_ARG (exp, 2);
3346 tree len = CALL_EXPR_ARG (exp, 4);
d0fbba1a 3347 rtx res = expand_builtin_memory_copy_args (dest, src, len, target, exp,
3348 /*end_p=*/ 0);
53800dbe 3349
f21337ef 3350 /* Return src bounds with the result. */
3351 if (res)
e5716f7e 3352 {
17d388d8 3353 rtx bnd = force_reg (targetm.chkp_bound_mode (),
f21337ef 3354 expand_normal (CALL_EXPR_ARG (exp, 1)));
3355 res = chkp_join_splitted_slot (res, bnd);
e5716f7e 3356 }
f21337ef 3357 return res;
53800dbe 3358 }
3359}
3360
c2f47e15 3361/* Expand a call EXP to the mempcpy builtin.
3362 Return NULL_RTX if we failed; the caller should emit a normal call,
647661c6 3363 otherwise try to get the result in TARGET, if convenient (and in
9fe0e1b8 3364 mode MODE if that's convenient). If ENDP is 0 return the
3365 destination pointer, if ENDP is 1 return the end pointer ala
3366 mempcpy, and if ENDP is 2 return the end pointer minus one ala
3367 stpcpy. */
647661c6 3368
3369static rtx
d0fbba1a 3370expand_builtin_mempcpy (tree exp, rtx target)
647661c6 3371{
c2f47e15 3372 if (!validate_arglist (exp,
3373 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3374 return NULL_RTX;
5aef8938 3375
3376 tree dest = CALL_EXPR_ARG (exp, 0);
3377 tree src = CALL_EXPR_ARG (exp, 1);
3378 tree len = CALL_EXPR_ARG (exp, 2);
3379
3380 /* Avoid expanding mempcpy into memcpy when the call is determined
3381 to overflow the buffer. This also prevents the same overflow
3382 from being diagnosed again when expanding memcpy. */
8d6c6ef5 3383 if (!check_memop_sizes (exp, dest, src, len))
5aef8938 3384 return NULL_RTX;
3385
3386 return expand_builtin_mempcpy_args (dest, src, len,
d0fbba1a 3387 target, exp, /*endp=*/ 1);
f21337ef 3388}
3389
3390/* Expand an instrumented call EXP to the mempcpy builtin.
3391 Return NULL_RTX if we failed, the caller should emit a normal call,
3392 otherwise try to get the result in TARGET, if convenient (and in
3393 mode MODE if that's convenient). */
3394
3395static rtx
d0fbba1a 3396expand_builtin_mempcpy_with_bounds (tree exp, rtx target)
f21337ef 3397{
3398 if (!validate_arglist (exp,
3399 POINTER_TYPE, POINTER_BOUNDS_TYPE,
3400 POINTER_TYPE, POINTER_BOUNDS_TYPE,
3401 INTEGER_TYPE, VOID_TYPE))
3402 return NULL_RTX;
3403 else
3404 {
3405 tree dest = CALL_EXPR_ARG (exp, 0);
3406 tree src = CALL_EXPR_ARG (exp, 2);
3407 tree len = CALL_EXPR_ARG (exp, 4);
3408 rtx res = expand_builtin_mempcpy_args (dest, src, len, target,
d0fbba1a 3409 exp, 1);
f21337ef 3410
3411 /* Return src bounds with the result. */
3412 if (res)
3413 {
17d388d8 3414 rtx bnd = force_reg (targetm.chkp_bound_mode (),
f21337ef 3415 expand_normal (CALL_EXPR_ARG (exp, 1)));
3416 res = chkp_join_splitted_slot (res, bnd);
3417 }
3418 return res;
c2f47e15 3419 }
3420}
3421
d0fbba1a 3422/* Helper function to do the actual work for expand of memory copy family
3423 functions (memcpy, mempcpy, stpcpy). Expansing should assign LEN bytes
3424 of memory from SRC to DEST and assign to TARGET if convenient.
3425 If ENDP is 0 return the
3426 destination pointer, if ENDP is 1 return the end pointer ala
3427 mempcpy, and if ENDP is 2 return the end pointer minus one ala
3428 stpcpy. */
c2f47e15 3429
3430static rtx
d0fbba1a 3431expand_builtin_memory_copy_args (tree dest, tree src, tree len,
3432 rtx target, tree exp, int endp)
c2f47e15 3433{
d0fbba1a 3434 const char *src_str;
3435 unsigned int src_align = get_pointer_alignment (src);
3436 unsigned int dest_align = get_pointer_alignment (dest);
3437 rtx dest_mem, src_mem, dest_addr, len_rtx;
3438 HOST_WIDE_INT expected_size = -1;
3439 unsigned int expected_align = 0;
3440 unsigned HOST_WIDE_INT min_size;
3441 unsigned HOST_WIDE_INT max_size;
3442 unsigned HOST_WIDE_INT probable_max_size;
f21337ef 3443
d0fbba1a 3444 /* If DEST is not a pointer type, call the normal function. */
3445 if (dest_align == 0)
3446 return NULL_RTX;
a0c938f0 3447
d0fbba1a 3448 /* If either SRC is not a pointer type, don't do this
3449 operation in-line. */
3450 if (src_align == 0)
3451 return NULL_RTX;
9fe0e1b8 3452
d0fbba1a 3453 if (currently_expanding_gimple_stmt)
3454 stringop_block_profile (currently_expanding_gimple_stmt,
3455 &expected_align, &expected_size);
0862b7e9 3456
d0fbba1a 3457 if (expected_align < dest_align)
3458 expected_align = dest_align;
3459 dest_mem = get_memory_rtx (dest, len);
3460 set_mem_align (dest_mem, dest_align);
3461 len_rtx = expand_normal (len);
3462 determine_block_size (len, len_rtx, &min_size, &max_size,
3463 &probable_max_size);
3464 src_str = c_getstr (src);
647661c6 3465
d0fbba1a 3466 /* If SRC is a string constant and block move would be done
3467 by pieces, we can avoid loading the string from memory
3468 and only stored the computed constants. */
3469 if (src_str
3470 && CONST_INT_P (len_rtx)
3471 && (unsigned HOST_WIDE_INT) INTVAL (len_rtx) <= strlen (src_str) + 1
3472 && can_store_by_pieces (INTVAL (len_rtx), builtin_memcpy_read_str,
3473 CONST_CAST (char *, src_str),
3474 dest_align, false))
3475 {
3476 dest_mem = store_by_pieces (dest_mem, INTVAL (len_rtx),
3477 builtin_memcpy_read_str,
d72123ce 3478 CONST_CAST (char *, src_str),
d0fbba1a 3479 dest_align, false, endp);
3480 dest_mem = force_operand (XEXP (dest_mem, 0), target);
3481 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3482 return dest_mem;
3483 }
647661c6 3484
d0fbba1a 3485 src_mem = get_memory_rtx (src, len);
3486 set_mem_align (src_mem, src_align);
9fe0e1b8 3487
d0fbba1a 3488 /* Copy word part most expediently. */
3489 dest_addr = emit_block_move_hints (dest_mem, src_mem, len_rtx,
3490 CALL_EXPR_TAILCALL (exp)
3491 && (endp == 0 || target == const0_rtx)
3492 ? BLOCK_OP_TAILCALL : BLOCK_OP_NORMAL,
3493 expected_align, expected_size,
3494 min_size, max_size, probable_max_size);
3495
3496 if (dest_addr == 0)
3497 {
3498 dest_addr = force_operand (XEXP (dest_mem, 0), target);
3499 dest_addr = convert_memory_address (ptr_mode, dest_addr);
3500 }
3501
3502 if (endp && target != const0_rtx)
3503 {
3504 dest_addr = gen_rtx_PLUS (ptr_mode, dest_addr, len_rtx);
3505 /* stpcpy pointer to last byte. */
3506 if (endp == 2)
3507 dest_addr = gen_rtx_MINUS (ptr_mode, dest_addr, const1_rtx);
d72123ce 3508 }
d0fbba1a 3509
3510 return dest_addr;
3511}
3512
3513static rtx
3514expand_builtin_mempcpy_args (tree dest, tree src, tree len,
3515 rtx target, tree orig_exp, int endp)
3516{
3517 return expand_builtin_memory_copy_args (dest, src, len, target, orig_exp,
3518 endp);
647661c6 3519}
3520
c2f47e15 3521/* Expand into a movstr instruction, if one is available. Return NULL_RTX if
727c62dd 3522 we failed, the caller should emit a normal call, otherwise try to
3523 get the result in TARGET, if convenient. If ENDP is 0 return the
3524 destination pointer, if ENDP is 1 return the end pointer ala
3525 mempcpy, and if ENDP is 2 return the end pointer minus one ala
3526 stpcpy. */
3527
3528static rtx
3529expand_movstr (tree dest, tree src, rtx target, int endp)
3530{
8786db1e 3531 struct expand_operand ops[3];
727c62dd 3532 rtx dest_mem;
3533 rtx src_mem;
727c62dd 3534
8d74dc42 3535 if (!targetm.have_movstr ())
c2f47e15 3536 return NULL_RTX;
727c62dd 3537
d8ae1baa 3538 dest_mem = get_memory_rtx (dest, NULL);
3539 src_mem = get_memory_rtx (src, NULL);
727c62dd 3540 if (!endp)
3541 {
3542 target = force_reg (Pmode, XEXP (dest_mem, 0));
3543 dest_mem = replace_equiv_address (dest_mem, target);
727c62dd 3544 }
3545
8786db1e 3546 create_output_operand (&ops[0], endp ? target : NULL_RTX, Pmode);
3547 create_fixed_operand (&ops[1], dest_mem);
3548 create_fixed_operand (&ops[2], src_mem);
8d74dc42 3549 if (!maybe_expand_insn (targetm.code_for_movstr, 3, ops))
1e1d5623 3550 return NULL_RTX;
727c62dd 3551
8786db1e 3552 if (endp && target != const0_rtx)
c5aba89c 3553 {
8786db1e 3554 target = ops[0].value;
3555 /* movstr is supposed to set end to the address of the NUL
3556 terminator. If the caller requested a mempcpy-like return value,
3557 adjust it. */
3558 if (endp == 1)
3559 {
29c05e22 3560 rtx tem = plus_constant (GET_MODE (target),
3561 gen_lowpart (GET_MODE (target), target), 1);
8786db1e 3562 emit_move_insn (target, force_operand (tem, NULL_RTX));
3563 }
c5aba89c 3564 }
727c62dd 3565 return target;
3566}
3567
5aef8938 3568/* Do some very basic size validation of a call to the strcpy builtin
3569 given by EXP. Return NULL_RTX to have the built-in expand to a call
3570 to the library function. */
3571
3572static rtx
3573expand_builtin_strcat (tree exp, rtx)
3574{
3575 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE)
3576 || !warn_stringop_overflow)
3577 return NULL_RTX;
3578
3579 tree dest = CALL_EXPR_ARG (exp, 0);
3580 tree src = CALL_EXPR_ARG (exp, 1);
3581
3582 /* There is no way here to determine the length of the string in
3583 the destination to which the SRC string is being appended so
3584 just diagnose cases when the souce string is longer than
3585 the destination object. */
3586
8d6c6ef5 3587 tree destsize = compute_objsize (dest, warn_stringop_overflow - 1);
5aef8938 3588
3589 check_sizes (OPT_Wstringop_overflow_,
3590 exp, /*size=*/NULL_TREE, /*maxlen=*/NULL_TREE, src, destsize);
3591
3592 return NULL_RTX;
3593}
3594
48e1416a 3595/* Expand expression EXP, which is a call to the strcpy builtin. Return
3596 NULL_RTX if we failed the caller should emit a normal call, otherwise
c2f47e15 3597 try to get the result in TARGET, if convenient (and in mode MODE if that's
6f428e8b 3598 convenient). */
902de8ed 3599
53800dbe 3600static rtx
a65c4d64 3601expand_builtin_strcpy (tree exp, rtx target)
53800dbe 3602{
5aef8938 3603 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3604 return NULL_RTX;
3605
3606 tree dest = CALL_EXPR_ARG (exp, 0);
3607 tree src = CALL_EXPR_ARG (exp, 1);
3608
3609 if (warn_stringop_overflow)
3610 {
8d6c6ef5 3611 tree destsize = compute_objsize (dest, warn_stringop_overflow - 1);
5aef8938 3612 check_sizes (OPT_Wstringop_overflow_,
3613 exp, /*size=*/NULL_TREE, /*maxlen=*/NULL_TREE, src, destsize);
3614 }
3615
3616 return expand_builtin_strcpy_args (dest, src, target);
c2f47e15 3617}
3618
3619/* Helper function to do the actual work for expand_builtin_strcpy. The
3620 arguments to the builtin_strcpy call DEST and SRC are broken out
3621 so that this can also be called without constructing an actual CALL_EXPR.
3622 The other arguments and return value are the same as for
3623 expand_builtin_strcpy. */
3624
3625static rtx
a65c4d64 3626expand_builtin_strcpy_args (tree dest, tree src, rtx target)
c2f47e15 3627{
c2f47e15 3628 return expand_movstr (dest, src, target, /*endp=*/0);
53800dbe 3629}
3630
c2f47e15 3631/* Expand a call EXP to the stpcpy builtin.
3632 Return NULL_RTX if we failed the caller should emit a normal call,
3b824fa6 3633 otherwise try to get the result in TARGET, if convenient (and in
3634 mode MODE if that's convenient). */
3635
3636static rtx
3754d046 3637expand_builtin_stpcpy (tree exp, rtx target, machine_mode mode)
3b824fa6 3638{
c2f47e15 3639 tree dst, src;
389dd41b 3640 location_t loc = EXPR_LOCATION (exp);
c2f47e15 3641
3642 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3643 return NULL_RTX;
3644
3645 dst = CALL_EXPR_ARG (exp, 0);
3646 src = CALL_EXPR_ARG (exp, 1);
3647
4d317237 3648 if (warn_stringop_overflow)
3649 {
8d6c6ef5 3650 tree destsize = compute_objsize (dst, warn_stringop_overflow - 1);
4d317237 3651 check_sizes (OPT_Wstringop_overflow_,
3652 exp, /*size=*/NULL_TREE, /*maxlen=*/NULL_TREE, src, destsize);
3653 }
3654
727c62dd 3655 /* If return value is ignored, transform stpcpy into strcpy. */
b9a16870 3656 if (target == const0_rtx && builtin_decl_implicit (BUILT_IN_STRCPY))
978836e5 3657 {
b9a16870 3658 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
0568e9c1 3659 tree result = build_call_nofold_loc (loc, fn, 2, dst, src);
c8b17b2e 3660 return expand_expr (result, target, mode, EXPAND_NORMAL);
978836e5 3661 }
3b824fa6 3662 else
3663 {
c2f47e15 3664 tree len, lenp1;
727c62dd 3665 rtx ret;
647661c6 3666
9fe0e1b8 3667 /* Ensure we get an actual string whose length can be evaluated at
a0c938f0 3668 compile-time, not an expression containing a string. This is
3669 because the latter will potentially produce pessimized code
3670 when used to produce the return value. */
681fab1e 3671 if (! c_getstr (src) || ! (len = c_strlen (src, 0)))
c2f47e15 3672 return expand_movstr (dst, src, target, /*endp=*/2);
3b824fa6 3673
389dd41b 3674 lenp1 = size_binop_loc (loc, PLUS_EXPR, len, ssize_int (1));
a65c4d64 3675 ret = expand_builtin_mempcpy_args (dst, src, lenp1,
d0fbba1a 3676 target, exp, /*endp=*/2);
727c62dd 3677
3678 if (ret)
3679 return ret;
3680
3681 if (TREE_CODE (len) == INTEGER_CST)
3682 {
8ec3c5c2 3683 rtx len_rtx = expand_normal (len);
727c62dd 3684
971ba038 3685 if (CONST_INT_P (len_rtx))
727c62dd 3686 {
a65c4d64 3687 ret = expand_builtin_strcpy_args (dst, src, target);
727c62dd 3688
3689 if (ret)
3690 {
3691 if (! target)
7ac87324 3692 {
3693 if (mode != VOIDmode)
3694 target = gen_reg_rtx (mode);
3695 else
3696 target = gen_reg_rtx (GET_MODE (ret));
3697 }
727c62dd 3698 if (GET_MODE (target) != GET_MODE (ret))
3699 ret = gen_lowpart (GET_MODE (target), ret);
3700
29c05e22 3701 ret = plus_constant (GET_MODE (ret), ret, INTVAL (len_rtx));
c5aba89c 3702 ret = emit_move_insn (target, force_operand (ret, NULL_RTX));
64db345d 3703 gcc_assert (ret);
727c62dd 3704
3705 return target;
3706 }
3707 }
3708 }
3709
c2f47e15 3710 return expand_movstr (dst, src, target, /*endp=*/2);
3b824fa6 3711 }
3712}
3713
4d317237 3714/* Check a call EXP to the stpncpy built-in for validity.
3715 Return NULL_RTX on both success and failure. */
3716
3717static rtx
3718expand_builtin_stpncpy (tree exp, rtx)
3719{
3720 if (!validate_arglist (exp,
3721 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE)
3722 || !warn_stringop_overflow)
3723 return NULL_RTX;
3724
aca1a787 3725 /* The source and destination of the call. */
4d317237 3726 tree dest = CALL_EXPR_ARG (exp, 0);
3727 tree src = CALL_EXPR_ARG (exp, 1);
3728
aca1a787 3729 /* The exact number of bytes to write (not the maximum). */
4d317237 3730 tree len = CALL_EXPR_ARG (exp, 2);
4d317237 3731
aca1a787 3732 /* The size of the destination object. */
8d6c6ef5 3733 tree destsize = compute_objsize (dest, warn_stringop_overflow - 1);
4d317237 3734
4d317237 3735 check_sizes (OPT_Wstringop_overflow_,
aca1a787 3736 exp, len, /*maxlen=*/NULL_TREE, src, destsize);
4d317237 3737
3738 return NULL_RTX;
3739}
3740
6840589f 3741/* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3742 bytes from constant string DATA + OFFSET and return it as target
3743 constant. */
3744
09879952 3745rtx
aecda0d6 3746builtin_strncpy_read_str (void *data, HOST_WIDE_INT offset,
f77c4496 3747 scalar_int_mode mode)
6840589f 3748{
3749 const char *str = (const char *) data;
3750
3751 if ((unsigned HOST_WIDE_INT) offset > strlen (str))
3752 return const0_rtx;
3753
3754 return c_readstr (str + offset, mode);
3755}
3756
5aef8938 3757/* Helper to check the sizes of sequences and the destination of calls
3758 to __builtin_strncat and __builtin___strncat_chk. Returns true on
3759 success (no overflow or invalid sizes), false otherwise. */
3760
3761static bool
3762check_strncat_sizes (tree exp, tree objsize)
3763{
3764 tree dest = CALL_EXPR_ARG (exp, 0);
3765 tree src = CALL_EXPR_ARG (exp, 1);
3766 tree maxlen = CALL_EXPR_ARG (exp, 2);
3767
3768 /* Try to determine the range of lengths that the source expression
3769 refers to. */
3770 tree lenrange[2];
3771 get_range_strlen (src, lenrange);
3772
3773 /* Try to verify that the destination is big enough for the shortest
3774 string. */
3775
3776 if (!objsize && warn_stringop_overflow)
3777 {
3778 /* If it hasn't been provided by __strncat_chk, try to determine
3779 the size of the destination object into which the source is
3780 being copied. */
8d6c6ef5 3781 objsize = compute_objsize (dest, warn_stringop_overflow - 1);
5aef8938 3782 }
3783
3784 /* Add one for the terminating nul. */
3785 tree srclen = (lenrange[0]
3786 ? fold_build2 (PLUS_EXPR, size_type_node, lenrange[0],
3787 size_one_node)
3788 : NULL_TREE);
3789
3790 /* Strncat copies at most MAXLEN bytes and always appends the terminating
3791 nul so the specified upper bound should never be equal to (or greater
3792 than) the size of the destination. */
3793 if (tree_fits_uhwi_p (maxlen) && tree_fits_uhwi_p (objsize)
3794 && tree_int_cst_equal (objsize, maxlen))
3795 {
4d317237 3796 location_t loc = tree_nonartificial_location (exp);
3797 loc = expansion_point_location_if_in_system_header (loc);
3798
3799 warning_at (loc, OPT_Wstringop_overflow_,
9098b938 3800 "%K%qD specified bound %E equals destination size",
8d6c6ef5 3801 exp, get_callee_fndecl (exp), maxlen);
5aef8938 3802
3803 return false;
3804 }
3805
3806 if (!srclen
3807 || (maxlen && tree_fits_uhwi_p (maxlen)
3808 && tree_fits_uhwi_p (srclen)
3809 && tree_int_cst_lt (maxlen, srclen)))
3810 srclen = maxlen;
3811
3812 /* The number of bytes to write is LEN but check_sizes will also
3813 check SRCLEN if LEN's value isn't known. */
3814 return check_sizes (OPT_Wstringop_overflow_,
3815 exp, /*size=*/NULL_TREE, maxlen, srclen, objsize);
3816}
3817
3818/* Similar to expand_builtin_strcat, do some very basic size validation
3819 of a call to the strcpy builtin given by EXP. Return NULL_RTX to have
3820 the built-in expand to a call to the library function. */
3821
3822static rtx
3823expand_builtin_strncat (tree exp, rtx)
3824{
3825 if (!validate_arglist (exp,
3826 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE)
3827 || !warn_stringop_overflow)
3828 return NULL_RTX;
3829
3830 tree dest = CALL_EXPR_ARG (exp, 0);
3831 tree src = CALL_EXPR_ARG (exp, 1);
3832 /* The upper bound on the number of bytes to write. */
3833 tree maxlen = CALL_EXPR_ARG (exp, 2);
3834 /* The length of the source sequence. */
3835 tree slen = c_strlen (src, 1);
3836
3837 /* Try to determine the range of lengths that the source expression
3838 refers to. */
3839 tree lenrange[2];
3840 if (slen)
3841 lenrange[0] = lenrange[1] = slen;
3842 else
3843 get_range_strlen (src, lenrange);
3844
3845 /* Try to verify that the destination is big enough for the shortest
3846 string. First try to determine the size of the destination object
3847 into which the source is being copied. */
8d6c6ef5 3848 tree destsize = compute_objsize (dest, warn_stringop_overflow - 1);
5aef8938 3849
3850 /* Add one for the terminating nul. */
3851 tree srclen = (lenrange[0]
3852 ? fold_build2 (PLUS_EXPR, size_type_node, lenrange[0],
3853 size_one_node)
3854 : NULL_TREE);
3855
3856 /* Strncat copies at most MAXLEN bytes and always appends the terminating
3857 nul so the specified upper bound should never be equal to (or greater
3858 than) the size of the destination. */
3859 if (tree_fits_uhwi_p (maxlen) && tree_fits_uhwi_p (destsize)
3860 && tree_int_cst_equal (destsize, maxlen))
3861 {
4d317237 3862 location_t loc = tree_nonartificial_location (exp);
3863 loc = expansion_point_location_if_in_system_header (loc);
3864
3865 warning_at (loc, OPT_Wstringop_overflow_,
9098b938 3866 "%K%qD specified bound %E equals destination size",
8d6c6ef5 3867 exp, get_callee_fndecl (exp), maxlen);
5aef8938 3868
3869 return NULL_RTX;
3870 }
3871
3872 if (!srclen
3873 || (maxlen && tree_fits_uhwi_p (maxlen)
3874 && tree_fits_uhwi_p (srclen)
3875 && tree_int_cst_lt (maxlen, srclen)))
3876 srclen = maxlen;
3877
3878 /* The number of bytes to write is LEN but check_sizes will also
3879 check SRCLEN if LEN's value isn't known. */
3880 check_sizes (OPT_Wstringop_overflow_,
3881 exp, /*size=*/NULL_TREE, maxlen, srclen, destsize);
3882
3883 return NULL_RTX;
3884}
3885
48e1416a 3886/* Expand expression EXP, which is a call to the strncpy builtin. Return
c2f47e15 3887 NULL_RTX if we failed the caller should emit a normal call. */
ed09096d 3888
3889static rtx
a65c4d64 3890expand_builtin_strncpy (tree exp, rtx target)
ed09096d 3891{
389dd41b 3892 location_t loc = EXPR_LOCATION (exp);
c2f47e15 3893
3894 if (validate_arglist (exp,
3895 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
ed09096d 3896 {
c2f47e15 3897 tree dest = CALL_EXPR_ARG (exp, 0);
3898 tree src = CALL_EXPR_ARG (exp, 1);
5aef8938 3899 /* The number of bytes to write (not the maximum). */
c2f47e15 3900 tree len = CALL_EXPR_ARG (exp, 2);
5aef8938 3901 /* The length of the source sequence. */
c2f47e15 3902 tree slen = c_strlen (src, 1);
6840589f 3903
5aef8938 3904 if (warn_stringop_overflow)
3905 {
8d6c6ef5 3906 tree destsize = compute_objsize (dest,
3907 warn_stringop_overflow - 1);
5aef8938 3908
3909 /* The number of bytes to write is LEN but check_sizes will also
3910 check SLEN if LEN's value isn't known. */
3911 check_sizes (OPT_Wstringop_overflow_,
8d6c6ef5 3912 exp, len, /*maxlen=*/NULL_TREE, src, destsize);
5aef8938 3913 }
3914
8ff6a5cd 3915 /* We must be passed a constant len and src parameter. */
e913b5cd 3916 if (!tree_fits_uhwi_p (len) || !slen || !tree_fits_uhwi_p (slen))
c2f47e15 3917 return NULL_RTX;
ed09096d 3918
389dd41b 3919 slen = size_binop_loc (loc, PLUS_EXPR, slen, ssize_int (1));
ed09096d 3920
3921 /* We're required to pad with trailing zeros if the requested
a0c938f0 3922 len is greater than strlen(s2)+1. In that case try to
6840589f 3923 use store_by_pieces, if it fails, punt. */
ed09096d 3924 if (tree_int_cst_lt (slen, len))
6840589f 3925 {
957d0361 3926 unsigned int dest_align = get_pointer_alignment (dest);
c2f47e15 3927 const char *p = c_getstr (src);
6840589f 3928 rtx dest_mem;
3929
e913b5cd 3930 if (!p || dest_align == 0 || !tree_fits_uhwi_p (len)
3931 || !can_store_by_pieces (tree_to_uhwi (len),
6840589f 3932 builtin_strncpy_read_str,
364c0c59 3933 CONST_CAST (char *, p),
3934 dest_align, false))
c2f47e15 3935 return NULL_RTX;
6840589f 3936
d8ae1baa 3937 dest_mem = get_memory_rtx (dest, len);
e913b5cd 3938 store_by_pieces (dest_mem, tree_to_uhwi (len),
6840589f 3939 builtin_strncpy_read_str,
364c0c59 3940 CONST_CAST (char *, p), dest_align, false, 0);
a65c4d64 3941 dest_mem = force_operand (XEXP (dest_mem, 0), target);
85d654dd 3942 dest_mem = convert_memory_address (ptr_mode, dest_mem);
e5716f7e 3943 return dest_mem;
6840589f 3944 }
ed09096d 3945 }
c2f47e15 3946 return NULL_RTX;
ed09096d 3947}
3948
ecc318ff 3949/* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3950 bytes from constant string DATA + OFFSET and return it as target
3951 constant. */
3952
f656b751 3953rtx
aecda0d6 3954builtin_memset_read_str (void *data, HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
f77c4496 3955 scalar_int_mode mode)
ecc318ff 3956{
3957 const char *c = (const char *) data;
364c0c59 3958 char *p = XALLOCAVEC (char, GET_MODE_SIZE (mode));
ecc318ff 3959
3960 memset (p, *c, GET_MODE_SIZE (mode));
3961
3962 return c_readstr (p, mode);
3963}
3964
a7ec6974 3965/* Callback routine for store_by_pieces. Return the RTL of a register
3966 containing GET_MODE_SIZE (MODE) consecutive copies of the unsigned
3967 char value given in the RTL register data. For example, if mode is
3968 4 bytes wide, return the RTL for 0x01010101*data. */
3969
3970static rtx
aecda0d6 3971builtin_memset_gen_str (void *data, HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
f77c4496 3972 scalar_int_mode mode)
a7ec6974 3973{
3974 rtx target, coeff;
3975 size_t size;
3976 char *p;
3977
3978 size = GET_MODE_SIZE (mode);
f0ce3b1f 3979 if (size == 1)
3980 return (rtx) data;
a7ec6974 3981
364c0c59 3982 p = XALLOCAVEC (char, size);
a7ec6974 3983 memset (p, 1, size);
3984 coeff = c_readstr (p, mode);
3985
f0ce3b1f 3986 target = convert_to_mode (mode, (rtx) data, 1);
a7ec6974 3987 target = expand_mult (mode, target, coeff, NULL_RTX, 1);
3988 return force_reg (mode, target);
3989}
3990
48e1416a 3991/* Expand expression EXP, which is a call to the memset builtin. Return
3992 NULL_RTX if we failed the caller should emit a normal call, otherwise
c2f47e15 3993 try to get the result in TARGET, if convenient (and in mode MODE if that's
6f428e8b 3994 convenient). */
902de8ed 3995
53800dbe 3996static rtx
3754d046 3997expand_builtin_memset (tree exp, rtx target, machine_mode mode)
53800dbe 3998{
c2f47e15 3999 if (!validate_arglist (exp,
4000 POINTER_TYPE, INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
4001 return NULL_RTX;
5aef8938 4002
4003 tree dest = CALL_EXPR_ARG (exp, 0);
4004 tree val = CALL_EXPR_ARG (exp, 1);
4005 tree len = CALL_EXPR_ARG (exp, 2);
4006
8d6c6ef5 4007 check_memop_sizes (exp, dest, NULL_TREE, len);
5aef8938 4008
4009 return expand_builtin_memset_args (dest, val, len, target, mode, exp);
c2f47e15 4010}
53800dbe 4011
f21337ef 4012/* Expand expression EXP, which is an instrumented call to the memset builtin.
4013 Return NULL_RTX if we failed the caller should emit a normal call, otherwise
4014 try to get the result in TARGET, if convenient (and in mode MODE if that's
4015 convenient). */
4016
4017static rtx
4018expand_builtin_memset_with_bounds (tree exp, rtx target, machine_mode mode)
4019{
4020 if (!validate_arglist (exp,
4021 POINTER_TYPE, POINTER_BOUNDS_TYPE,
4022 INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
4023 return NULL_RTX;
4024 else
4025 {
4026 tree dest = CALL_EXPR_ARG (exp, 0);
4027 tree val = CALL_EXPR_ARG (exp, 2);
4028 tree len = CALL_EXPR_ARG (exp, 3);
4029 rtx res = expand_builtin_memset_args (dest, val, len, target, mode, exp);
4030
4031 /* Return src bounds with the result. */
4032 if (res)
4033 {
17d388d8 4034 rtx bnd = force_reg (targetm.chkp_bound_mode (),
f21337ef 4035 expand_normal (CALL_EXPR_ARG (exp, 1)));
4036 res = chkp_join_splitted_slot (res, bnd);
4037 }
4038 return res;
4039 }
4040}
4041
c2f47e15 4042/* Helper function to do the actual work for expand_builtin_memset. The
4043 arguments to the builtin_memset call DEST, VAL, and LEN are broken out
4044 so that this can also be called without constructing an actual CALL_EXPR.
4045 The other arguments and return value are the same as for
4046 expand_builtin_memset. */
6b961939 4047
c2f47e15 4048static rtx
4049expand_builtin_memset_args (tree dest, tree val, tree len,
3754d046 4050 rtx target, machine_mode mode, tree orig_exp)
c2f47e15 4051{
4052 tree fndecl, fn;
4053 enum built_in_function fcode;
3754d046 4054 machine_mode val_mode;
c2f47e15 4055 char c;
4056 unsigned int dest_align;
4057 rtx dest_mem, dest_addr, len_rtx;
4058 HOST_WIDE_INT expected_size = -1;
4059 unsigned int expected_align = 0;
36d63243 4060 unsigned HOST_WIDE_INT min_size;
4061 unsigned HOST_WIDE_INT max_size;
9db0f34d 4062 unsigned HOST_WIDE_INT probable_max_size;
53800dbe 4063
957d0361 4064 dest_align = get_pointer_alignment (dest);
162719b3 4065
c2f47e15 4066 /* If DEST is not a pointer type, don't do this operation in-line. */
4067 if (dest_align == 0)
4068 return NULL_RTX;
6f428e8b 4069
8cee8dc0 4070 if (currently_expanding_gimple_stmt)
4071 stringop_block_profile (currently_expanding_gimple_stmt,
4072 &expected_align, &expected_size);
75a70cf9 4073
c2f47e15 4074 if (expected_align < dest_align)
4075 expected_align = dest_align;
6b961939 4076
c2f47e15 4077 /* If the LEN parameter is zero, return DEST. */
4078 if (integer_zerop (len))
4079 {
4080 /* Evaluate and ignore VAL in case it has side-effects. */
4081 expand_expr (val, const0_rtx, VOIDmode, EXPAND_NORMAL);
4082 return expand_expr (dest, target, mode, EXPAND_NORMAL);
4083 }
7a3e5564 4084
c2f47e15 4085 /* Stabilize the arguments in case we fail. */
4086 dest = builtin_save_expr (dest);
4087 val = builtin_save_expr (val);
4088 len = builtin_save_expr (len);
a7ec6974 4089
c2f47e15 4090 len_rtx = expand_normal (len);
9db0f34d 4091 determine_block_size (len, len_rtx, &min_size, &max_size,
4092 &probable_max_size);
c2f47e15 4093 dest_mem = get_memory_rtx (dest, len);
03a5dda9 4094 val_mode = TYPE_MODE (unsigned_char_type_node);
a7ec6974 4095
c2f47e15 4096 if (TREE_CODE (val) != INTEGER_CST)
4097 {
4098 rtx val_rtx;
a7ec6974 4099
c2f47e15 4100 val_rtx = expand_normal (val);
03a5dda9 4101 val_rtx = convert_to_mode (val_mode, val_rtx, 0);
53800dbe 4102
c2f47e15 4103 /* Assume that we can memset by pieces if we can store
4104 * the coefficients by pieces (in the required modes).
4105 * We can't pass builtin_memset_gen_str as that emits RTL. */
4106 c = 1;
e913b5cd 4107 if (tree_fits_uhwi_p (len)
4108 && can_store_by_pieces (tree_to_uhwi (len),
4b297e2e 4109 builtin_memset_read_str, &c, dest_align,
4110 true))
c2f47e15 4111 {
03a5dda9 4112 val_rtx = force_reg (val_mode, val_rtx);
e913b5cd 4113 store_by_pieces (dest_mem, tree_to_uhwi (len),
4b297e2e 4114 builtin_memset_gen_str, val_rtx, dest_align,
4115 true, 0);
c2f47e15 4116 }
4117 else if (!set_storage_via_setmem (dest_mem, len_rtx, val_rtx,
4118 dest_align, expected_align,
9db0f34d 4119 expected_size, min_size, max_size,
4120 probable_max_size))
6b961939 4121 goto do_libcall;
48e1416a 4122
c2f47e15 4123 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
4124 dest_mem = convert_memory_address (ptr_mode, dest_mem);
4125 return dest_mem;
4126 }
53800dbe 4127
c2f47e15 4128 if (target_char_cast (val, &c))
4129 goto do_libcall;
ecc318ff 4130
c2f47e15 4131 if (c)
4132 {
e913b5cd 4133 if (tree_fits_uhwi_p (len)
4134 && can_store_by_pieces (tree_to_uhwi (len),
4b297e2e 4135 builtin_memset_read_str, &c, dest_align,
4136 true))
e913b5cd 4137 store_by_pieces (dest_mem, tree_to_uhwi (len),
4b297e2e 4138 builtin_memset_read_str, &c, dest_align, true, 0);
03a5dda9 4139 else if (!set_storage_via_setmem (dest_mem, len_rtx,
4140 gen_int_mode (c, val_mode),
c2f47e15 4141 dest_align, expected_align,
9db0f34d 4142 expected_size, min_size, max_size,
4143 probable_max_size))
c2f47e15 4144 goto do_libcall;
48e1416a 4145
c2f47e15 4146 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
4147 dest_mem = convert_memory_address (ptr_mode, dest_mem);
4148 return dest_mem;
4149 }
ecc318ff 4150
c2f47e15 4151 set_mem_align (dest_mem, dest_align);
4152 dest_addr = clear_storage_hints (dest_mem, len_rtx,
4153 CALL_EXPR_TAILCALL (orig_exp)
4154 ? BLOCK_OP_TAILCALL : BLOCK_OP_NORMAL,
36d63243 4155 expected_align, expected_size,
9db0f34d 4156 min_size, max_size,
4157 probable_max_size);
53800dbe 4158
c2f47e15 4159 if (dest_addr == 0)
4160 {
4161 dest_addr = force_operand (XEXP (dest_mem, 0), NULL_RTX);
4162 dest_addr = convert_memory_address (ptr_mode, dest_addr);
4163 }
53800dbe 4164
c2f47e15 4165 return dest_addr;
6b961939 4166
c2f47e15 4167 do_libcall:
4168 fndecl = get_callee_fndecl (orig_exp);
4169 fcode = DECL_FUNCTION_CODE (fndecl);
f21337ef 4170 if (fcode == BUILT_IN_MEMSET
4171 || fcode == BUILT_IN_CHKP_MEMSET_NOBND_NOCHK_CHKP)
0568e9c1 4172 fn = build_call_nofold_loc (EXPR_LOCATION (orig_exp), fndecl, 3,
4173 dest, val, len);
c2f47e15 4174 else if (fcode == BUILT_IN_BZERO)
0568e9c1 4175 fn = build_call_nofold_loc (EXPR_LOCATION (orig_exp), fndecl, 2,
4176 dest, len);
c2f47e15 4177 else
4178 gcc_unreachable ();
a65c4d64 4179 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
4180 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (orig_exp);
c2f47e15 4181 return expand_call (fn, target, target == const0_rtx);
53800dbe 4182}
4183
48e1416a 4184/* Expand expression EXP, which is a call to the bzero builtin. Return
c2f47e15 4185 NULL_RTX if we failed the caller should emit a normal call. */
27d0c333 4186
ffc83088 4187static rtx
0b25db21 4188expand_builtin_bzero (tree exp)
ffc83088 4189{
c2f47e15 4190 if (!validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
7369e7ba 4191 return NULL_RTX;
ffc83088 4192
5aef8938 4193 tree dest = CALL_EXPR_ARG (exp, 0);
4194 tree size = CALL_EXPR_ARG (exp, 1);
4195
8d6c6ef5 4196 check_memop_sizes (exp, dest, NULL_TREE, size);
bf8e3599 4197
7369e7ba 4198 /* New argument list transforming bzero(ptr x, int y) to
6f428e8b 4199 memset(ptr x, int 0, size_t y). This is done this way
4200 so that if it isn't expanded inline, we fallback to
4201 calling bzero instead of memset. */
bf8e3599 4202
5aef8938 4203 location_t loc = EXPR_LOCATION (exp);
4204
c2f47e15 4205 return expand_builtin_memset_args (dest, integer_zero_node,
a0553bff 4206 fold_convert_loc (loc,
4207 size_type_node, size),
c2f47e15 4208 const0_rtx, VOIDmode, exp);
ffc83088 4209}
4210
d6f01a40 4211/* Try to expand cmpstr operation ICODE with the given operands.
4212 Return the result rtx on success, otherwise return null. */
4213
4214static rtx
4215expand_cmpstr (insn_code icode, rtx target, rtx arg1_rtx, rtx arg2_rtx,
4216 HOST_WIDE_INT align)
4217{
4218 machine_mode insn_mode = insn_data[icode].operand[0].mode;
4219
4220 if (target && (!REG_P (target) || HARD_REGISTER_P (target)))
4221 target = NULL_RTX;
4222
4223 struct expand_operand ops[4];
4224 create_output_operand (&ops[0], target, insn_mode);
4225 create_fixed_operand (&ops[1], arg1_rtx);
4226 create_fixed_operand (&ops[2], arg2_rtx);
4227 create_integer_operand (&ops[3], align);
4228 if (maybe_expand_insn (icode, 4, ops))
4229 return ops[0].value;
4230 return NULL_RTX;
4231}
4232
7a3f89b5 4233/* Expand expression EXP, which is a call to the memcmp built-in function.
bd021c1c 4234 Return NULL_RTX if we failed and the caller should emit a normal call,
3e346f54 4235 otherwise try to get the result in TARGET, if convenient.
4236 RESULT_EQ is true if we can relax the returned value to be either zero
4237 or nonzero, without caring about the sign. */
27d0c333 4238
53800dbe 4239static rtx
3e346f54 4240expand_builtin_memcmp (tree exp, rtx target, bool result_eq)
53800dbe 4241{
c2f47e15 4242 if (!validate_arglist (exp,
4243 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
4244 return NULL_RTX;
6f428e8b 4245
ea368aac 4246 tree arg1 = CALL_EXPR_ARG (exp, 0);
4247 tree arg2 = CALL_EXPR_ARG (exp, 1);
4248 tree len = CALL_EXPR_ARG (exp, 2);
8d6c6ef5 4249
4250 /* Diagnose calls where the specified length exceeds the size of either
4251 object. */
4252 if (warn_stringop_overflow)
4253 {
4254 tree size = compute_objsize (arg1, 0);
4255 if (check_sizes (OPT_Wstringop_overflow_,
4256 exp, len, /*maxlen=*/NULL_TREE,
4257 size, /*objsize=*/NULL_TREE))
4258 {
4259 size = compute_objsize (arg2, 0);
4260 check_sizes (OPT_Wstringop_overflow_,
4261 exp, len, /*maxlen=*/NULL_TREE,
4262 size, /*objsize=*/NULL_TREE);
4263 }
4264 }
4265
3e346f54 4266 machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
4267 location_t loc = EXPR_LOCATION (exp);
b428c0a5 4268
ea368aac 4269 unsigned int arg1_align = get_pointer_alignment (arg1) / BITS_PER_UNIT;
4270 unsigned int arg2_align = get_pointer_alignment (arg2) / BITS_PER_UNIT;
53800dbe 4271
ea368aac 4272 /* If we don't have POINTER_TYPE, call the function. */
4273 if (arg1_align == 0 || arg2_align == 0)
4274 return NULL_RTX;
53800dbe 4275
ea368aac 4276 rtx arg1_rtx = get_memory_rtx (arg1, len);
4277 rtx arg2_rtx = get_memory_rtx (arg2, len);
3e346f54 4278 rtx len_rtx = expand_normal (fold_convert_loc (loc, sizetype, len));
53800dbe 4279
ea368aac 4280 /* Set MEM_SIZE as appropriate. */
3e346f54 4281 if (CONST_INT_P (len_rtx))
ea368aac 4282 {
3e346f54 4283 set_mem_size (arg1_rtx, INTVAL (len_rtx));
4284 set_mem_size (arg2_rtx, INTVAL (len_rtx));
ea368aac 4285 }
83f88f8e 4286
3e346f54 4287 by_pieces_constfn constfn = NULL;
4288
719f3058 4289 const char *src_str = c_getstr (arg2);
4290 if (result_eq && src_str == NULL)
4291 {
4292 src_str = c_getstr (arg1);
4293 if (src_str != NULL)
092db747 4294 std::swap (arg1_rtx, arg2_rtx);
719f3058 4295 }
3e346f54 4296
4297 /* If SRC is a string constant and block move would be done
4298 by pieces, we can avoid loading the string from memory
4299 and only stored the computed constants. */
4300 if (src_str
4301 && CONST_INT_P (len_rtx)
4302 && (unsigned HOST_WIDE_INT) INTVAL (len_rtx) <= strlen (src_str) + 1)
4303 constfn = builtin_memcpy_read_str;
4304
4305 rtx result = emit_block_cmp_hints (arg1_rtx, arg2_rtx, len_rtx,
4306 TREE_TYPE (len), target,
4307 result_eq, constfn,
4308 CONST_CAST (char *, src_str));
4309
ea368aac 4310 if (result)
4311 {
4312 /* Return the value in the proper mode for this function. */
4313 if (GET_MODE (result) == mode)
4314 return result;
83f88f8e 4315
ea368aac 4316 if (target != 0)
4317 {
4318 convert_move (target, result, 0);
4319 return target;
4320 }
0cd832f0 4321
53800dbe 4322 return convert_to_mode (mode, result, 0);
ea368aac 4323 }
53800dbe 4324
61ffc71a 4325 return NULL_RTX;
6f428e8b 4326}
4327
c2f47e15 4328/* Expand expression EXP, which is a call to the strcmp builtin. Return NULL_RTX
53800dbe 4329 if we failed the caller should emit a normal call, otherwise try to get
4330 the result in TARGET, if convenient. */
902de8ed 4331
53800dbe 4332static rtx
a65c4d64 4333expand_builtin_strcmp (tree exp, ATTRIBUTE_UNUSED rtx target)
53800dbe 4334{
c2f47e15 4335 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
4336 return NULL_RTX;
bf8e3599 4337
d6f01a40 4338 insn_code cmpstr_icode = direct_optab_handler (cmpstr_optab, SImode);
4339 insn_code cmpstrn_icode = direct_optab_handler (cmpstrn_optab, SImode);
4340 if (cmpstr_icode != CODE_FOR_nothing || cmpstrn_icode != CODE_FOR_nothing)
6ac5504b 4341 {
4342 rtx arg1_rtx, arg2_rtx;
6ac5504b 4343 tree fndecl, fn;
c2f47e15 4344 tree arg1 = CALL_EXPR_ARG (exp, 0);
4345 tree arg2 = CALL_EXPR_ARG (exp, 1);
d6f01a40 4346 rtx result = NULL_RTX;
a0c938f0 4347
957d0361 4348 unsigned int arg1_align = get_pointer_alignment (arg1) / BITS_PER_UNIT;
4349 unsigned int arg2_align = get_pointer_alignment (arg2) / BITS_PER_UNIT;
6ac5504b 4350
4351 /* If we don't have POINTER_TYPE, call the function. */
4352 if (arg1_align == 0 || arg2_align == 0)
c2f47e15 4353 return NULL_RTX;
7a3f89b5 4354
6ac5504b 4355 /* Stabilize the arguments in case gen_cmpstr(n)si fail. */
4356 arg1 = builtin_save_expr (arg1);
4357 arg2 = builtin_save_expr (arg2);
7a3f89b5 4358
d8ae1baa 4359 arg1_rtx = get_memory_rtx (arg1, NULL);
4360 arg2_rtx = get_memory_rtx (arg2, NULL);
53800dbe 4361
6ac5504b 4362 /* Try to call cmpstrsi. */
d6f01a40 4363 if (cmpstr_icode != CODE_FOR_nothing)
4364 result = expand_cmpstr (cmpstr_icode, target, arg1_rtx, arg2_rtx,
4365 MIN (arg1_align, arg2_align));
4366
6ac5504b 4367 /* Try to determine at least one length and call cmpstrnsi. */
d6f01a40 4368 if (!result && cmpstrn_icode != CODE_FOR_nothing)
6ac5504b 4369 {
4370 tree len;
4371 rtx arg3_rtx;
4372
6ac5504b 4373 tree len1 = c_strlen (arg1, 1);
4374 tree len2 = c_strlen (arg2, 1);
4375
4376 if (len1)
4377 len1 = size_binop (PLUS_EXPR, ssize_int (1), len1);
4378 if (len2)
4379 len2 = size_binop (PLUS_EXPR, ssize_int (1), len2);
4380
4381 /* If we don't have a constant length for the first, use the length
4382 of the second, if we know it. We don't require a constant for
4383 this case; some cost analysis could be done if both are available
4384 but neither is constant. For now, assume they're equally cheap,
4385 unless one has side effects. If both strings have constant lengths,
4386 use the smaller. */
4387
4388 if (!len1)
4389 len = len2;
4390 else if (!len2)
4391 len = len1;
4392 else if (TREE_SIDE_EFFECTS (len1))
4393 len = len2;
4394 else if (TREE_SIDE_EFFECTS (len2))
4395 len = len1;
4396 else if (TREE_CODE (len1) != INTEGER_CST)
4397 len = len2;
4398 else if (TREE_CODE (len2) != INTEGER_CST)
4399 len = len1;
4400 else if (tree_int_cst_lt (len1, len2))
4401 len = len1;
4402 else
4403 len = len2;
4404
4405 /* If both arguments have side effects, we cannot optimize. */
d6f01a40 4406 if (len && !TREE_SIDE_EFFECTS (len))
4407 {
4408 arg3_rtx = expand_normal (len);
ea368aac 4409 result = expand_cmpstrn_or_cmpmem
4410 (cmpstrn_icode, target, arg1_rtx, arg2_rtx, TREE_TYPE (len),
4411 arg3_rtx, MIN (arg1_align, arg2_align));
d6f01a40 4412 }
6ac5504b 4413 }
3f8aefe2 4414
d6f01a40 4415 if (result)
6ac5504b 4416 {
6ac5504b 4417 /* Return the value in the proper mode for this function. */
d6f01a40 4418 machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
6ac5504b 4419 if (GET_MODE (result) == mode)
4420 return result;
4421 if (target == 0)
4422 return convert_to_mode (mode, result, 0);
4423 convert_move (target, result, 0);
4424 return target;
4425 }
902de8ed 4426
6ac5504b 4427 /* Expand the library call ourselves using a stabilized argument
4428 list to avoid re-evaluating the function's arguments twice. */
6ac5504b 4429 fndecl = get_callee_fndecl (exp);
0568e9c1 4430 fn = build_call_nofold_loc (EXPR_LOCATION (exp), fndecl, 2, arg1, arg2);
a65c4d64 4431 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
4432 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
6ac5504b 4433 return expand_call (fn, target, target == const0_rtx);
4434 }
c2f47e15 4435 return NULL_RTX;
83d79705 4436}
53800dbe 4437
48e1416a 4438/* Expand expression EXP, which is a call to the strncmp builtin. Return
c2f47e15 4439 NULL_RTX if we failed the caller should emit a normal call, otherwise try to get
ed09096d 4440 the result in TARGET, if convenient. */
27d0c333 4441
ed09096d 4442static rtx
a65c4d64 4443expand_builtin_strncmp (tree exp, ATTRIBUTE_UNUSED rtx target,
3754d046 4444 ATTRIBUTE_UNUSED machine_mode mode)
ed09096d 4445{
a65c4d64 4446 location_t loc ATTRIBUTE_UNUSED = EXPR_LOCATION (exp);
389dd41b 4447
c2f47e15 4448 if (!validate_arglist (exp,
4449 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
4450 return NULL_RTX;
ed09096d 4451
6e34e617 4452 /* If c_strlen can determine an expression for one of the string
6ac5504b 4453 lengths, and it doesn't have side effects, then emit cmpstrnsi
7a3f89b5 4454 using length MIN(strlen(string)+1, arg3). */
d6f01a40 4455 insn_code cmpstrn_icode = direct_optab_handler (cmpstrn_optab, SImode);
4456 if (cmpstrn_icode != CODE_FOR_nothing)
7a3f89b5 4457 {
175cdef4 4458 tree len, len1, len2, len3;
7a3f89b5 4459 rtx arg1_rtx, arg2_rtx, arg3_rtx;
d6f01a40 4460 rtx result;
0b25db21 4461 tree fndecl, fn;
c2f47e15 4462 tree arg1 = CALL_EXPR_ARG (exp, 0);
4463 tree arg2 = CALL_EXPR_ARG (exp, 1);
4464 tree arg3 = CALL_EXPR_ARG (exp, 2);
6f428e8b 4465
957d0361 4466 unsigned int arg1_align = get_pointer_alignment (arg1) / BITS_PER_UNIT;
4467 unsigned int arg2_align = get_pointer_alignment (arg2) / BITS_PER_UNIT;
bf8e3599 4468
681fab1e 4469 len1 = c_strlen (arg1, 1);
4470 len2 = c_strlen (arg2, 1);
7a3f89b5 4471
4472 if (len1)
389dd41b 4473 len1 = size_binop_loc (loc, PLUS_EXPR, ssize_int (1), len1);
7a3f89b5 4474 if (len2)
389dd41b 4475 len2 = size_binop_loc (loc, PLUS_EXPR, ssize_int (1), len2);
7a3f89b5 4476
175cdef4 4477 len3 = fold_convert_loc (loc, sizetype, arg3);
4478
7a3f89b5 4479 /* If we don't have a constant length for the first, use the length
175cdef4 4480 of the second, if we know it. If neither string is constant length,
4481 use the given length argument. We don't require a constant for
7a3f89b5 4482 this case; some cost analysis could be done if both are available
4483 but neither is constant. For now, assume they're equally cheap,
4484 unless one has side effects. If both strings have constant lengths,
4485 use the smaller. */
4486
175cdef4 4487 if (!len1 && !len2)
4488 len = len3;
4489 else if (!len1)
7a3f89b5 4490 len = len2;
4491 else if (!len2)
4492 len = len1;
4493 else if (TREE_SIDE_EFFECTS (len1))
4494 len = len2;
4495 else if (TREE_SIDE_EFFECTS (len2))
4496 len = len1;
4497 else if (TREE_CODE (len1) != INTEGER_CST)
4498 len = len2;
4499 else if (TREE_CODE (len2) != INTEGER_CST)
4500 len = len1;
4501 else if (tree_int_cst_lt (len1, len2))
4502 len = len1;
4503 else
4504 len = len2;
6e34e617 4505
175cdef4 4506 /* If we are not using the given length, we must incorporate it here.
4507 The actual new length parameter will be MIN(len,arg3) in this case. */
4508 if (len != len3)
4509 len = fold_build2_loc (loc, MIN_EXPR, TREE_TYPE (len), len, len3);
a65c4d64 4510 arg1_rtx = get_memory_rtx (arg1, len);
4511 arg2_rtx = get_memory_rtx (arg2, len);
4512 arg3_rtx = expand_normal (len);
ea368aac 4513 result = expand_cmpstrn_or_cmpmem (cmpstrn_icode, target, arg1_rtx,
4514 arg2_rtx, TREE_TYPE (len), arg3_rtx,
4515 MIN (arg1_align, arg2_align));
d6f01a40 4516 if (result)
a65c4d64 4517 {
a65c4d64 4518 /* Return the value in the proper mode for this function. */
4519 mode = TYPE_MODE (TREE_TYPE (exp));
4520 if (GET_MODE (result) == mode)
4521 return result;
4522 if (target == 0)
4523 return convert_to_mode (mode, result, 0);
4524 convert_move (target, result, 0);
4525 return target;
4526 }
27d0c333 4527
a65c4d64 4528 /* Expand the library call ourselves using a stabilized argument
4529 list to avoid re-evaluating the function's arguments twice. */
4530 fndecl = get_callee_fndecl (exp);
0568e9c1 4531 fn = build_call_nofold_loc (EXPR_LOCATION (exp), fndecl, 3,
4532 arg1, arg2, len);
a65c4d64 4533 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
4534 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
4535 return expand_call (fn, target, target == const0_rtx);
4536 }
c2f47e15 4537 return NULL_RTX;
49f0327b 4538}
4539
a66c9326 4540/* Expand a call to __builtin_saveregs, generating the result in TARGET,
4541 if that's convenient. */
902de8ed 4542
a66c9326 4543rtx
aecda0d6 4544expand_builtin_saveregs (void)
53800dbe 4545{
1e0c0b35 4546 rtx val;
4547 rtx_insn *seq;
53800dbe 4548
4549 /* Don't do __builtin_saveregs more than once in a function.
4550 Save the result of the first call and reuse it. */
4551 if (saveregs_value != 0)
4552 return saveregs_value;
53800dbe 4553
a66c9326 4554 /* When this function is called, it means that registers must be
4555 saved on entry to this function. So we migrate the call to the
4556 first insn of this function. */
4557
4558 start_sequence ();
53800dbe 4559
a66c9326 4560 /* Do whatever the machine needs done in this case. */
45550790 4561 val = targetm.calls.expand_builtin_saveregs ();
53800dbe 4562
a66c9326 4563 seq = get_insns ();
4564 end_sequence ();
53800dbe 4565
a66c9326 4566 saveregs_value = val;
53800dbe 4567
31d3e01c 4568 /* Put the insns after the NOTE that starts the function. If this
4569 is inside a start_sequence, make the outer-level insn chain current, so
a66c9326 4570 the code is placed at the start of the function. */
4571 push_topmost_sequence ();
0ec80471 4572 emit_insn_after (seq, entry_of_function ());
a66c9326 4573 pop_topmost_sequence ();
4574
4575 return val;
53800dbe 4576}
4577
79012a9d 4578/* Expand a call to __builtin_next_arg. */
27d0c333 4579
53800dbe 4580static rtx
79012a9d 4581expand_builtin_next_arg (void)
53800dbe 4582{
79012a9d 4583 /* Checking arguments is already done in fold_builtin_next_arg
4584 that must be called before this function. */
940ddc5c 4585 return expand_binop (ptr_mode, add_optab,
abe32cce 4586 crtl->args.internal_arg_pointer,
4587 crtl->args.arg_offset_rtx,
53800dbe 4588 NULL_RTX, 0, OPTAB_LIB_WIDEN);
4589}
4590
a66c9326 4591/* Make it easier for the backends by protecting the valist argument
4592 from multiple evaluations. */
4593
4594static tree
389dd41b 4595stabilize_va_list_loc (location_t loc, tree valist, int needs_lvalue)
a66c9326 4596{
5f57a8b1 4597 tree vatype = targetm.canonical_va_list_type (TREE_TYPE (valist));
4598
182cf5a9 4599 /* The current way of determining the type of valist is completely
4600 bogus. We should have the information on the va builtin instead. */
4601 if (!vatype)
4602 vatype = targetm.fn_abi_va_list (cfun->decl);
5f57a8b1 4603
4604 if (TREE_CODE (vatype) == ARRAY_TYPE)
a66c9326 4605 {
2d47cc32 4606 if (TREE_SIDE_EFFECTS (valist))
4607 valist = save_expr (valist);
11a61dea 4608
2d47cc32 4609 /* For this case, the backends will be expecting a pointer to
5f57a8b1 4610 vatype, but it's possible we've actually been given an array
4611 (an actual TARGET_CANONICAL_VA_LIST_TYPE (valist)).
2d47cc32 4612 So fix it. */
4613 if (TREE_CODE (TREE_TYPE (valist)) == ARRAY_TYPE)
8a15c04a 4614 {
5f57a8b1 4615 tree p1 = build_pointer_type (TREE_TYPE (vatype));
389dd41b 4616 valist = build_fold_addr_expr_with_type_loc (loc, valist, p1);
8a15c04a 4617 }
a66c9326 4618 }
11a61dea 4619 else
a66c9326 4620 {
182cf5a9 4621 tree pt = build_pointer_type (vatype);
11a61dea 4622
2d47cc32 4623 if (! needs_lvalue)
4624 {
11a61dea 4625 if (! TREE_SIDE_EFFECTS (valist))
4626 return valist;
bf8e3599 4627
389dd41b 4628 valist = fold_build1_loc (loc, ADDR_EXPR, pt, valist);
a66c9326 4629 TREE_SIDE_EFFECTS (valist) = 1;
a66c9326 4630 }
2d47cc32 4631
11a61dea 4632 if (TREE_SIDE_EFFECTS (valist))
2d47cc32 4633 valist = save_expr (valist);
182cf5a9 4634 valist = fold_build2_loc (loc, MEM_REF,
4635 vatype, valist, build_int_cst (pt, 0));
a66c9326 4636 }
4637
4638 return valist;
4639}
4640
2e15d750 4641/* The "standard" definition of va_list is void*. */
4642
4643tree
4644std_build_builtin_va_list (void)
4645{
4646 return ptr_type_node;
4647}
4648
5f57a8b1 4649/* The "standard" abi va_list is va_list_type_node. */
4650
4651tree
4652std_fn_abi_va_list (tree fndecl ATTRIBUTE_UNUSED)
4653{
4654 return va_list_type_node;
4655}
4656
4657/* The "standard" type of va_list is va_list_type_node. */
4658
4659tree
4660std_canonical_va_list_type (tree type)
4661{
4662 tree wtype, htype;
4663
5f57a8b1 4664 wtype = va_list_type_node;
4665 htype = type;
b6da2e41 4666
4667 if (TREE_CODE (wtype) == ARRAY_TYPE)
5f57a8b1 4668 {
4669 /* If va_list is an array type, the argument may have decayed
4670 to a pointer type, e.g. by being passed to another function.
4671 In that case, unwrap both types so that we can compare the
4672 underlying records. */
4673 if (TREE_CODE (htype) == ARRAY_TYPE
4674 || POINTER_TYPE_P (htype))
4675 {
4676 wtype = TREE_TYPE (wtype);
4677 htype = TREE_TYPE (htype);
4678 }
4679 }
4680 if (TYPE_MAIN_VARIANT (wtype) == TYPE_MAIN_VARIANT (htype))
4681 return va_list_type_node;
4682
4683 return NULL_TREE;
4684}
4685
a66c9326 4686/* The "standard" implementation of va_start: just assign `nextarg' to
4687 the variable. */
27d0c333 4688
a66c9326 4689void
aecda0d6 4690std_expand_builtin_va_start (tree valist, rtx nextarg)
a66c9326 4691{
f03c17bc 4692 rtx va_r = expand_expr (valist, NULL_RTX, VOIDmode, EXPAND_WRITE);
4693 convert_move (va_r, nextarg, 0);
058a1b7a 4694
4695 /* We do not have any valid bounds for the pointer, so
4696 just store zero bounds for it. */
4697 if (chkp_function_instrumented_p (current_function_decl))
4698 chkp_expand_bounds_reset_for_mem (valist,
4699 make_tree (TREE_TYPE (valist),
4700 nextarg));
a66c9326 4701}
4702
c2f47e15 4703/* Expand EXP, a call to __builtin_va_start. */
27d0c333 4704
a66c9326 4705static rtx
c2f47e15 4706expand_builtin_va_start (tree exp)
a66c9326 4707{
4708 rtx nextarg;
c2f47e15 4709 tree valist;
389dd41b 4710 location_t loc = EXPR_LOCATION (exp);
a66c9326 4711
c2f47e15 4712 if (call_expr_nargs (exp) < 2)
cb166087 4713 {
389dd41b 4714 error_at (loc, "too few arguments to function %<va_start%>");
cb166087 4715 return const0_rtx;
4716 }
a66c9326 4717
c2f47e15 4718 if (fold_builtin_next_arg (exp, true))
79012a9d 4719 return const0_rtx;
7c2f0500 4720
79012a9d 4721 nextarg = expand_builtin_next_arg ();
389dd41b 4722 valist = stabilize_va_list_loc (loc, CALL_EXPR_ARG (exp, 0), 1);
a66c9326 4723
8a58ed0a 4724 if (targetm.expand_builtin_va_start)
4725 targetm.expand_builtin_va_start (valist, nextarg);
4726 else
4727 std_expand_builtin_va_start (valist, nextarg);
a66c9326 4728
4729 return const0_rtx;
4730}
4731
c2f47e15 4732/* Expand EXP, a call to __builtin_va_end. */
f7c44134 4733
a66c9326 4734static rtx
c2f47e15 4735expand_builtin_va_end (tree exp)
a66c9326 4736{
c2f47e15 4737 tree valist = CALL_EXPR_ARG (exp, 0);
8a15c04a 4738
8a15c04a 4739 /* Evaluate for side effects, if needed. I hate macros that don't
4740 do that. */
4741 if (TREE_SIDE_EFFECTS (valist))
4742 expand_expr (valist, const0_rtx, VOIDmode, EXPAND_NORMAL);
a66c9326 4743
4744 return const0_rtx;
4745}
4746
c2f47e15 4747/* Expand EXP, a call to __builtin_va_copy. We do this as a
a66c9326 4748 builtin rather than just as an assignment in stdarg.h because of the
4749 nastiness of array-type va_list types. */
f7c44134 4750
a66c9326 4751static rtx
c2f47e15 4752expand_builtin_va_copy (tree exp)
a66c9326 4753{
4754 tree dst, src, t;
389dd41b 4755 location_t loc = EXPR_LOCATION (exp);
a66c9326 4756
c2f47e15 4757 dst = CALL_EXPR_ARG (exp, 0);
4758 src = CALL_EXPR_ARG (exp, 1);
a66c9326 4759
389dd41b 4760 dst = stabilize_va_list_loc (loc, dst, 1);
4761 src = stabilize_va_list_loc (loc, src, 0);
a66c9326 4762
5f57a8b1 4763 gcc_assert (cfun != NULL && cfun->decl != NULL_TREE);
4764
4765 if (TREE_CODE (targetm.fn_abi_va_list (cfun->decl)) != ARRAY_TYPE)
a66c9326 4766 {
5f57a8b1 4767 t = build2 (MODIFY_EXPR, targetm.fn_abi_va_list (cfun->decl), dst, src);
a66c9326 4768 TREE_SIDE_EFFECTS (t) = 1;
4769 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
4770 }
4771 else
4772 {
11a61dea 4773 rtx dstb, srcb, size;
4774
4775 /* Evaluate to pointers. */
4776 dstb = expand_expr (dst, NULL_RTX, Pmode, EXPAND_NORMAL);
4777 srcb = expand_expr (src, NULL_RTX, Pmode, EXPAND_NORMAL);
5f57a8b1 4778 size = expand_expr (TYPE_SIZE_UNIT (targetm.fn_abi_va_list (cfun->decl)),
4779 NULL_RTX, VOIDmode, EXPAND_NORMAL);
11a61dea 4780
85d654dd 4781 dstb = convert_memory_address (Pmode, dstb);
4782 srcb = convert_memory_address (Pmode, srcb);
726ec87c 4783
11a61dea 4784 /* "Dereference" to BLKmode memories. */
4785 dstb = gen_rtx_MEM (BLKmode, dstb);
ab6ab77e 4786 set_mem_alias_set (dstb, get_alias_set (TREE_TYPE (TREE_TYPE (dst))));
5f57a8b1 4787 set_mem_align (dstb, TYPE_ALIGN (targetm.fn_abi_va_list (cfun->decl)));
11a61dea 4788 srcb = gen_rtx_MEM (BLKmode, srcb);
ab6ab77e 4789 set_mem_alias_set (srcb, get_alias_set (TREE_TYPE (TREE_TYPE (src))));
5f57a8b1 4790 set_mem_align (srcb, TYPE_ALIGN (targetm.fn_abi_va_list (cfun->decl)));
11a61dea 4791
4792 /* Copy. */
0378dbdc 4793 emit_block_move (dstb, srcb, size, BLOCK_OP_NORMAL);
a66c9326 4794 }
4795
4796 return const0_rtx;
4797}
4798
53800dbe 4799/* Expand a call to one of the builtin functions __builtin_frame_address or
4800 __builtin_return_address. */
27d0c333 4801
53800dbe 4802static rtx
c2f47e15 4803expand_builtin_frame_address (tree fndecl, tree exp)
53800dbe 4804{
53800dbe 4805 /* The argument must be a nonnegative integer constant.
4806 It counts the number of frames to scan up the stack.
5b252e95 4807 The value is either the frame pointer value or the return
4808 address saved in that frame. */
c2f47e15 4809 if (call_expr_nargs (exp) == 0)
53800dbe 4810 /* Warning about missing arg was already issued. */
4811 return const0_rtx;
e913b5cd 4812 else if (! tree_fits_uhwi_p (CALL_EXPR_ARG (exp, 0)))
53800dbe 4813 {
5b252e95 4814 error ("invalid argument to %qD", fndecl);
53800dbe 4815 return const0_rtx;
4816 }
4817 else
4818 {
5b252e95 4819 /* Number of frames to scan up the stack. */
4820 unsigned HOST_WIDE_INT count = tree_to_uhwi (CALL_EXPR_ARG (exp, 0));
4821
4822 rtx tem = expand_builtin_return_addr (DECL_FUNCTION_CODE (fndecl), count);
53800dbe 4823
4824 /* Some ports cannot access arbitrary stack frames. */
4825 if (tem == NULL)
4826 {
5b252e95 4827 warning (0, "unsupported argument to %qD", fndecl);
53800dbe 4828 return const0_rtx;
4829 }
4830
5b252e95 4831 if (count)
4832 {
4833 /* Warn since no effort is made to ensure that any frame
4834 beyond the current one exists or can be safely reached. */
4835 warning (OPT_Wframe_address, "calling %qD with "
4836 "a nonzero argument is unsafe", fndecl);
4837 }
4838
53800dbe 4839 /* For __builtin_frame_address, return what we've got. */
4840 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
4841 return tem;
4842
8ad4c111 4843 if (!REG_P (tem)
53800dbe 4844 && ! CONSTANT_P (tem))
99182918 4845 tem = copy_addr_to_reg (tem);
53800dbe 4846 return tem;
4847 }
4848}
4849
990495a7 4850/* Expand EXP, a call to the alloca builtin. Return NULL_RTX if we
2b29cc6a 4851 failed and the caller should emit a normal call. */
15c6cf6b 4852
53800dbe 4853static rtx
2b29cc6a 4854expand_builtin_alloca (tree exp)
53800dbe 4855{
4856 rtx op0;
15c6cf6b 4857 rtx result;
581bf1c2 4858 unsigned int align;
370e45b9 4859 tree fndecl = get_callee_fndecl (exp);
4860 bool alloca_with_align = (DECL_FUNCTION_CODE (fndecl)
581bf1c2 4861 == BUILT_IN_ALLOCA_WITH_ALIGN);
2b29cc6a 4862 bool alloca_for_var = CALL_ALLOCA_FOR_VAR_P (exp);
370e45b9 4863 bool valid_arglist
581bf1c2 4864 = (alloca_with_align
4865 ? validate_arglist (exp, INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE)
4866 : validate_arglist (exp, INTEGER_TYPE, VOID_TYPE));
4867
4868 if (!valid_arglist)
c2f47e15 4869 return NULL_RTX;
53800dbe 4870
370e45b9 4871 if ((alloca_with_align && !warn_vla_limit)
4872 || (!alloca_with_align && !warn_alloca_limit))
4873 {
4874 /* -Walloca-larger-than and -Wvla-larger-than settings override
4875 the more general -Walloc-size-larger-than so unless either of
4876 the former options is specified check the alloca arguments for
4877 overflow. */
4878 tree args[] = { CALL_EXPR_ARG (exp, 0), NULL_TREE };
4879 int idx[] = { 0, -1 };
4880 maybe_warn_alloc_args_overflow (fndecl, exp, args, idx);
4881 }
4882
53800dbe 4883 /* Compute the argument. */
c2f47e15 4884 op0 = expand_normal (CALL_EXPR_ARG (exp, 0));
53800dbe 4885
581bf1c2 4886 /* Compute the alignment. */
4887 align = (alloca_with_align
f9ae6f95 4888 ? TREE_INT_CST_LOW (CALL_EXPR_ARG (exp, 1))
581bf1c2 4889 : BIGGEST_ALIGNMENT);
4890
2b29cc6a 4891 /* Allocate the desired space. If the allocation stems from the declaration
4892 of a variable-sized object, it cannot accumulate. */
4893 result = allocate_dynamic_stack_space (op0, 0, align, alloca_for_var);
85d654dd 4894 result = convert_memory_address (ptr_mode, result);
15c6cf6b 4895
4896 return result;
53800dbe 4897}
4898
d08919a7 4899/* Emit a call to __asan_allocas_unpoison call in EXP. Replace second argument
4900 of the call with virtual_stack_dynamic_rtx because in asan pass we emit a
4901 dummy value into second parameter relying on this function to perform the
4902 change. See motivation for this in comment to handle_builtin_stack_restore
4903 function. */
4904
4905static rtx
4906expand_asan_emit_allocas_unpoison (tree exp)
4907{
4908 tree arg0 = CALL_EXPR_ARG (exp, 0);
cd2ee6ee 4909 rtx top = expand_expr (arg0, NULL_RTX, ptr_mode, EXPAND_NORMAL);
4910 rtx bot = convert_memory_address (ptr_mode, virtual_stack_dynamic_rtx);
d08919a7 4911 rtx ret = init_one_libfunc ("__asan_allocas_unpoison");
4912 ret = emit_library_call_value (ret, NULL_RTX, LCT_NORMAL, ptr_mode, 2, top,
cd2ee6ee 4913 ptr_mode, bot, ptr_mode);
d08919a7 4914 return ret;
4915}
4916
74bdbe96 4917/* Expand a call to bswap builtin in EXP.
4918 Return NULL_RTX if a normal call should be emitted rather than expanding the
4919 function in-line. If convenient, the result should be placed in TARGET.
4920 SUBTARGET may be used as the target for computing one of EXP's operands. */
42791117 4921
4922static rtx
3754d046 4923expand_builtin_bswap (machine_mode target_mode, tree exp, rtx target,
74bdbe96 4924 rtx subtarget)
42791117 4925{
42791117 4926 tree arg;
4927 rtx op0;
4928
c2f47e15 4929 if (!validate_arglist (exp, INTEGER_TYPE, VOID_TYPE))
4930 return NULL_RTX;
42791117 4931
c2f47e15 4932 arg = CALL_EXPR_ARG (exp, 0);
74bdbe96 4933 op0 = expand_expr (arg,
4934 subtarget && GET_MODE (subtarget) == target_mode
4935 ? subtarget : NULL_RTX,
4936 target_mode, EXPAND_NORMAL);
4937 if (GET_MODE (op0) != target_mode)
4938 op0 = convert_to_mode (target_mode, op0, 1);
42791117 4939
74bdbe96 4940 target = expand_unop (target_mode, bswap_optab, op0, target, 1);
42791117 4941
4942 gcc_assert (target);
4943
74bdbe96 4944 return convert_to_mode (target_mode, target, 1);
42791117 4945}
4946
c2f47e15 4947/* Expand a call to a unary builtin in EXP.
4948 Return NULL_RTX if a normal call should be emitted rather than expanding the
53800dbe 4949 function in-line. If convenient, the result should be placed in TARGET.
4950 SUBTARGET may be used as the target for computing one of EXP's operands. */
15c6cf6b 4951
53800dbe 4952static rtx
3754d046 4953expand_builtin_unop (machine_mode target_mode, tree exp, rtx target,
aecda0d6 4954 rtx subtarget, optab op_optab)
53800dbe 4955{
4956 rtx op0;
c2f47e15 4957
4958 if (!validate_arglist (exp, INTEGER_TYPE, VOID_TYPE))
4959 return NULL_RTX;
53800dbe 4960
4961 /* Compute the argument. */
f97eea22 4962 op0 = expand_expr (CALL_EXPR_ARG (exp, 0),
4963 (subtarget
4964 && (TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 0)))
4965 == GET_MODE (subtarget))) ? subtarget : NULL_RTX,
1db6d067 4966 VOIDmode, EXPAND_NORMAL);
6a08d0ab 4967 /* Compute op, into TARGET if possible.
53800dbe 4968 Set TARGET to wherever the result comes back. */
c2f47e15 4969 target = expand_unop (TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 0))),
6aaa1f9e 4970 op_optab, op0, target, op_optab != clrsb_optab);
64db345d 4971 gcc_assert (target);
7d3f6cc7 4972
efb070c8 4973 return convert_to_mode (target_mode, target, 0);
53800dbe 4974}
89cfe6e5 4975
48e1416a 4976/* Expand a call to __builtin_expect. We just return our argument
5a74f77e 4977 as the builtin_expect semantic should've been already executed by
4978 tree branch prediction pass. */
89cfe6e5 4979
4980static rtx
c2f47e15 4981expand_builtin_expect (tree exp, rtx target)
89cfe6e5 4982{
1e4adcfc 4983 tree arg;
89cfe6e5 4984
c2f47e15 4985 if (call_expr_nargs (exp) < 2)
89cfe6e5 4986 return const0_rtx;
c2f47e15 4987 arg = CALL_EXPR_ARG (exp, 0);
89cfe6e5 4988
c2f47e15 4989 target = expand_expr (arg, target, VOIDmode, EXPAND_NORMAL);
5a74f77e 4990 /* When guessing was done, the hints should be already stripped away. */
07311427 4991 gcc_assert (!flag_guess_branch_prob
852f689e 4992 || optimize == 0 || seen_error ());
89cfe6e5 4993 return target;
4994}
689df48e 4995
fca0886c 4996/* Expand a call to __builtin_assume_aligned. We just return our first
4997 argument as the builtin_assume_aligned semantic should've been already
4998 executed by CCP. */
4999
5000static rtx
5001expand_builtin_assume_aligned (tree exp, rtx target)
5002{
5003 if (call_expr_nargs (exp) < 2)
5004 return const0_rtx;
5005 target = expand_expr (CALL_EXPR_ARG (exp, 0), target, VOIDmode,
5006 EXPAND_NORMAL);
5007 gcc_assert (!TREE_SIDE_EFFECTS (CALL_EXPR_ARG (exp, 1))
5008 && (call_expr_nargs (exp) < 3
5009 || !TREE_SIDE_EFFECTS (CALL_EXPR_ARG (exp, 2))));
5010 return target;
5011}
5012
c22de3f0 5013void
aecda0d6 5014expand_builtin_trap (void)
a0ef1725 5015{
4db8dd0c 5016 if (targetm.have_trap ())
f73960eb 5017 {
4db8dd0c 5018 rtx_insn *insn = emit_insn (targetm.gen_trap ());
f73960eb 5019 /* For trap insns when not accumulating outgoing args force
5020 REG_ARGS_SIZE note to prevent crossjumping of calls with
5021 different args sizes. */
5022 if (!ACCUMULATE_OUTGOING_ARGS)
5023 add_reg_note (insn, REG_ARGS_SIZE, GEN_INT (stack_pointer_delta));
5024 }
a0ef1725 5025 else
61ffc71a 5026 {
5027 tree fn = builtin_decl_implicit (BUILT_IN_ABORT);
5028 tree call_expr = build_call_expr (fn, 0);
5029 expand_call (call_expr, NULL_RTX, false);
5030 }
5031
a0ef1725 5032 emit_barrier ();
5033}
78a74442 5034
d2b48f0c 5035/* Expand a call to __builtin_unreachable. We do nothing except emit
5036 a barrier saying that control flow will not pass here.
5037
5038 It is the responsibility of the program being compiled to ensure
5039 that control flow does never reach __builtin_unreachable. */
5040static void
5041expand_builtin_unreachable (void)
5042{
5043 emit_barrier ();
5044}
5045
c2f47e15 5046/* Expand EXP, a call to fabs, fabsf or fabsl.
5047 Return NULL_RTX if a normal call should be emitted rather than expanding
78a74442 5048 the function inline. If convenient, the result should be placed
5049 in TARGET. SUBTARGET may be used as the target for computing
5050 the operand. */
5051
5052static rtx
c2f47e15 5053expand_builtin_fabs (tree exp, rtx target, rtx subtarget)
78a74442 5054{
3754d046 5055 machine_mode mode;
78a74442 5056 tree arg;
5057 rtx op0;
5058
c2f47e15 5059 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
5060 return NULL_RTX;
78a74442 5061
c2f47e15 5062 arg = CALL_EXPR_ARG (exp, 0);
c7f617c2 5063 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
78a74442 5064 mode = TYPE_MODE (TREE_TYPE (arg));
1db6d067 5065 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
78a74442 5066 return expand_abs (mode, op0, target, 0, safe_from_p (target, arg, 1));
5067}
5068
c2f47e15 5069/* Expand EXP, a call to copysign, copysignf, or copysignl.
270436f3 5070 Return NULL is a normal call should be emitted rather than expanding the
5071 function inline. If convenient, the result should be placed in TARGET.
5072 SUBTARGET may be used as the target for computing the operand. */
5073
5074static rtx
c2f47e15 5075expand_builtin_copysign (tree exp, rtx target, rtx subtarget)
270436f3 5076{
5077 rtx op0, op1;
5078 tree arg;
5079
c2f47e15 5080 if (!validate_arglist (exp, REAL_TYPE, REAL_TYPE, VOID_TYPE))
5081 return NULL_RTX;
270436f3 5082
c2f47e15 5083 arg = CALL_EXPR_ARG (exp, 0);
8ec3c5c2 5084 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
270436f3 5085
c2f47e15 5086 arg = CALL_EXPR_ARG (exp, 1);
8ec3c5c2 5087 op1 = expand_normal (arg);
270436f3 5088
5089 return expand_copysign (op0, op1, target);
5090}
5091
ac8fb6db 5092/* Expand a call to __builtin___clear_cache. */
5093
5094static rtx
32e17df0 5095expand_builtin___clear_cache (tree exp)
ac8fb6db 5096{
32e17df0 5097 if (!targetm.code_for_clear_cache)
5098 {
ac8fb6db 5099#ifdef CLEAR_INSN_CACHE
32e17df0 5100 /* There is no "clear_cache" insn, and __clear_cache() in libgcc
5101 does something. Just do the default expansion to a call to
5102 __clear_cache(). */
5103 return NULL_RTX;
ac8fb6db 5104#else
32e17df0 5105 /* There is no "clear_cache" insn, and __clear_cache() in libgcc
5106 does nothing. There is no need to call it. Do nothing. */
5107 return const0_rtx;
ac8fb6db 5108#endif /* CLEAR_INSN_CACHE */
32e17df0 5109 }
5110
ac8fb6db 5111 /* We have a "clear_cache" insn, and it will handle everything. */
5112 tree begin, end;
5113 rtx begin_rtx, end_rtx;
ac8fb6db 5114
5115 /* We must not expand to a library call. If we did, any
5116 fallback library function in libgcc that might contain a call to
5117 __builtin___clear_cache() would recurse infinitely. */
5118 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
5119 {
5120 error ("both arguments to %<__builtin___clear_cache%> must be pointers");
5121 return const0_rtx;
5122 }
5123
32e17df0 5124 if (targetm.have_clear_cache ())
ac8fb6db 5125 {
8786db1e 5126 struct expand_operand ops[2];
ac8fb6db 5127
5128 begin = CALL_EXPR_ARG (exp, 0);
5129 begin_rtx = expand_expr (begin, NULL_RTX, Pmode, EXPAND_NORMAL);
ac8fb6db 5130
5131 end = CALL_EXPR_ARG (exp, 1);
5132 end_rtx = expand_expr (end, NULL_RTX, Pmode, EXPAND_NORMAL);
ac8fb6db 5133
8786db1e 5134 create_address_operand (&ops[0], begin_rtx);
5135 create_address_operand (&ops[1], end_rtx);
32e17df0 5136 if (maybe_expand_insn (targetm.code_for_clear_cache, 2, ops))
8786db1e 5137 return const0_rtx;
ac8fb6db 5138 }
5139 return const0_rtx;
ac8fb6db 5140}
5141
4ee9c684 5142/* Given a trampoline address, make sure it satisfies TRAMPOLINE_ALIGNMENT. */
5143
5144static rtx
5145round_trampoline_addr (rtx tramp)
5146{
5147 rtx temp, addend, mask;
5148
5149 /* If we don't need too much alignment, we'll have been guaranteed
5150 proper alignment by get_trampoline_type. */
5151 if (TRAMPOLINE_ALIGNMENT <= STACK_BOUNDARY)
5152 return tramp;
5153
5154 /* Round address up to desired boundary. */
5155 temp = gen_reg_rtx (Pmode);
0359f9f5 5156 addend = gen_int_mode (TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT - 1, Pmode);
5157 mask = gen_int_mode (-TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT, Pmode);
4ee9c684 5158
5159 temp = expand_simple_binop (Pmode, PLUS, tramp, addend,
5160 temp, 0, OPTAB_LIB_WIDEN);
5161 tramp = expand_simple_binop (Pmode, AND, temp, mask,
5162 temp, 0, OPTAB_LIB_WIDEN);
5163
5164 return tramp;
5165}
5166
5167static rtx
c307f106 5168expand_builtin_init_trampoline (tree exp, bool onstack)
4ee9c684 5169{
5170 tree t_tramp, t_func, t_chain;
82c7907c 5171 rtx m_tramp, r_tramp, r_chain, tmp;
4ee9c684 5172
c2f47e15 5173 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE,
4ee9c684 5174 POINTER_TYPE, VOID_TYPE))
5175 return NULL_RTX;
5176
c2f47e15 5177 t_tramp = CALL_EXPR_ARG (exp, 0);
5178 t_func = CALL_EXPR_ARG (exp, 1);
5179 t_chain = CALL_EXPR_ARG (exp, 2);
4ee9c684 5180
8ec3c5c2 5181 r_tramp = expand_normal (t_tramp);
82c7907c 5182 m_tramp = gen_rtx_MEM (BLKmode, r_tramp);
5183 MEM_NOTRAP_P (m_tramp) = 1;
5184
c307f106 5185 /* If ONSTACK, the TRAMP argument should be the address of a field
5186 within the local function's FRAME decl. Either way, let's see if
5187 we can fill in the MEM_ATTRs for this memory. */
82c7907c 5188 if (TREE_CODE (t_tramp) == ADDR_EXPR)
f4146cb8 5189 set_mem_attributes (m_tramp, TREE_OPERAND (t_tramp, 0), true);
82c7907c 5190
c307f106 5191 /* Creator of a heap trampoline is responsible for making sure the
5192 address is aligned to at least STACK_BOUNDARY. Normally malloc
5193 will ensure this anyhow. */
82c7907c 5194 tmp = round_trampoline_addr (r_tramp);
5195 if (tmp != r_tramp)
5196 {
5197 m_tramp = change_address (m_tramp, BLKmode, tmp);
5198 set_mem_align (m_tramp, TRAMPOLINE_ALIGNMENT);
5b2a69fa 5199 set_mem_size (m_tramp, TRAMPOLINE_SIZE);
82c7907c 5200 }
5201
5202 /* The FUNC argument should be the address of the nested function.
5203 Extract the actual function decl to pass to the hook. */
5204 gcc_assert (TREE_CODE (t_func) == ADDR_EXPR);
5205 t_func = TREE_OPERAND (t_func, 0);
5206 gcc_assert (TREE_CODE (t_func) == FUNCTION_DECL);
5207
8ec3c5c2 5208 r_chain = expand_normal (t_chain);
4ee9c684 5209
5210 /* Generate insns to initialize the trampoline. */
82c7907c 5211 targetm.calls.trampoline_init (m_tramp, t_func, r_chain);
4ee9c684 5212
c307f106 5213 if (onstack)
5214 {
5215 trampolines_created = 1;
8bc8a8f4 5216
a27e3913 5217 if (targetm.calls.custom_function_descriptors != 0)
5218 warning_at (DECL_SOURCE_LOCATION (t_func), OPT_Wtrampolines,
5219 "trampoline generated for nested function %qD", t_func);
c307f106 5220 }
8bc8a8f4 5221
4ee9c684 5222 return const0_rtx;
5223}
5224
5225static rtx
c2f47e15 5226expand_builtin_adjust_trampoline (tree exp)
4ee9c684 5227{
5228 rtx tramp;
5229
c2f47e15 5230 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
4ee9c684 5231 return NULL_RTX;
5232
c2f47e15 5233 tramp = expand_normal (CALL_EXPR_ARG (exp, 0));
4ee9c684 5234 tramp = round_trampoline_addr (tramp);
82c7907c 5235 if (targetm.calls.trampoline_adjust_address)
5236 tramp = targetm.calls.trampoline_adjust_address (tramp);
4ee9c684 5237
5238 return tramp;
5239}
5240
a27e3913 5241/* Expand a call to the builtin descriptor initialization routine.
5242 A descriptor is made up of a couple of pointers to the static
5243 chain and the code entry in this order. */
5244
5245static rtx
5246expand_builtin_init_descriptor (tree exp)
5247{
5248 tree t_descr, t_func, t_chain;
5249 rtx m_descr, r_descr, r_func, r_chain;
5250
5251 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, POINTER_TYPE,
5252 VOID_TYPE))
5253 return NULL_RTX;
5254
5255 t_descr = CALL_EXPR_ARG (exp, 0);
5256 t_func = CALL_EXPR_ARG (exp, 1);
5257 t_chain = CALL_EXPR_ARG (exp, 2);
5258
5259 r_descr = expand_normal (t_descr);
5260 m_descr = gen_rtx_MEM (BLKmode, r_descr);
5261 MEM_NOTRAP_P (m_descr) = 1;
5262
5263 r_func = expand_normal (t_func);
5264 r_chain = expand_normal (t_chain);
5265
5266 /* Generate insns to initialize the descriptor. */
5267 emit_move_insn (adjust_address_nv (m_descr, ptr_mode, 0), r_chain);
5268 emit_move_insn (adjust_address_nv (m_descr, ptr_mode,
5269 POINTER_SIZE / BITS_PER_UNIT), r_func);
5270
5271 return const0_rtx;
5272}
5273
5274/* Expand a call to the builtin descriptor adjustment routine. */
5275
5276static rtx
5277expand_builtin_adjust_descriptor (tree exp)
5278{
5279 rtx tramp;
5280
5281 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
5282 return NULL_RTX;
5283
5284 tramp = expand_normal (CALL_EXPR_ARG (exp, 0));
5285
5286 /* Unalign the descriptor to allow runtime identification. */
5287 tramp = plus_constant (ptr_mode, tramp,
5288 targetm.calls.custom_function_descriptors);
5289
5290 return force_operand (tramp, NULL_RTX);
5291}
5292
93f564d6 5293/* Expand the call EXP to the built-in signbit, signbitf or signbitl
5294 function. The function first checks whether the back end provides
5295 an insn to implement signbit for the respective mode. If not, it
5296 checks whether the floating point format of the value is such that
10902624 5297 the sign bit can be extracted. If that is not the case, error out.
5298 EXP is the expression that is a call to the builtin function; if
5299 convenient, the result should be placed in TARGET. */
27f261ef 5300static rtx
5301expand_builtin_signbit (tree exp, rtx target)
5302{
5303 const struct real_format *fmt;
299dd9fa 5304 scalar_float_mode fmode;
f77c4496 5305 scalar_int_mode rmode, imode;
c2f47e15 5306 tree arg;
ca4f1f5b 5307 int word, bitpos;
27eda240 5308 enum insn_code icode;
27f261ef 5309 rtx temp;
389dd41b 5310 location_t loc = EXPR_LOCATION (exp);
27f261ef 5311
c2f47e15 5312 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
5313 return NULL_RTX;
27f261ef 5314
c2f47e15 5315 arg = CALL_EXPR_ARG (exp, 0);
299dd9fa 5316 fmode = SCALAR_FLOAT_TYPE_MODE (TREE_TYPE (arg));
03b7a719 5317 rmode = SCALAR_INT_TYPE_MODE (TREE_TYPE (exp));
27f261ef 5318 fmt = REAL_MODE_FORMAT (fmode);
5319
93f564d6 5320 arg = builtin_save_expr (arg);
5321
5322 /* Expand the argument yielding a RTX expression. */
5323 temp = expand_normal (arg);
5324
5325 /* Check if the back end provides an insn that handles signbit for the
5326 argument's mode. */
d6bf3b14 5327 icode = optab_handler (signbit_optab, fmode);
27eda240 5328 if (icode != CODE_FOR_nothing)
93f564d6 5329 {
1e0c0b35 5330 rtx_insn *last = get_last_insn ();
93f564d6 5331 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
4e2a2fb4 5332 if (maybe_emit_unop_insn (icode, target, temp, UNKNOWN))
5333 return target;
5334 delete_insns_since (last);
93f564d6 5335 }
5336
27f261ef 5337 /* For floating point formats without a sign bit, implement signbit
5338 as "ARG < 0.0". */
8d564692 5339 bitpos = fmt->signbit_ro;
ca4f1f5b 5340 if (bitpos < 0)
27f261ef 5341 {
5342 /* But we can't do this if the format supports signed zero. */
10902624 5343 gcc_assert (!fmt->has_signed_zero || !HONOR_SIGNED_ZEROS (fmode));
27f261ef 5344
389dd41b 5345 arg = fold_build2_loc (loc, LT_EXPR, TREE_TYPE (exp), arg,
49d00087 5346 build_real (TREE_TYPE (arg), dconst0));
27f261ef 5347 return expand_expr (arg, target, VOIDmode, EXPAND_NORMAL);
5348 }
5349
ca4f1f5b 5350 if (GET_MODE_SIZE (fmode) <= UNITS_PER_WORD)
27f261ef 5351 {
2cf1bb25 5352 imode = int_mode_for_mode (fmode).require ();
ca4f1f5b 5353 temp = gen_lowpart (imode, temp);
24fd4260 5354 }
5355 else
5356 {
ca4f1f5b 5357 imode = word_mode;
5358 /* Handle targets with different FP word orders. */
5359 if (FLOAT_WORDS_BIG_ENDIAN)
a0c938f0 5360 word = (GET_MODE_BITSIZE (fmode) - bitpos) / BITS_PER_WORD;
ca4f1f5b 5361 else
a0c938f0 5362 word = bitpos / BITS_PER_WORD;
ca4f1f5b 5363 temp = operand_subword_force (temp, word, fmode);
5364 bitpos = bitpos % BITS_PER_WORD;
5365 }
5366
44b0f1d0 5367 /* Force the intermediate word_mode (or narrower) result into a
5368 register. This avoids attempting to create paradoxical SUBREGs
5369 of floating point modes below. */
5370 temp = force_reg (imode, temp);
5371
ca4f1f5b 5372 /* If the bitpos is within the "result mode" lowpart, the operation
5373 can be implement with a single bitwise AND. Otherwise, we need
5374 a right shift and an AND. */
5375
5376 if (bitpos < GET_MODE_BITSIZE (rmode))
5377 {
796b6678 5378 wide_int mask = wi::set_bit_in_zero (bitpos, GET_MODE_PRECISION (rmode));
27f261ef 5379
4a46f016 5380 if (GET_MODE_SIZE (imode) > GET_MODE_SIZE (rmode))
ca4f1f5b 5381 temp = gen_lowpart (rmode, temp);
24fd4260 5382 temp = expand_binop (rmode, and_optab, temp,
e913b5cd 5383 immed_wide_int_const (mask, rmode),
ca4f1f5b 5384 NULL_RTX, 1, OPTAB_LIB_WIDEN);
27f261ef 5385 }
ca4f1f5b 5386 else
5387 {
5388 /* Perform a logical right shift to place the signbit in the least
a0c938f0 5389 significant bit, then truncate the result to the desired mode
ca4f1f5b 5390 and mask just this bit. */
f5ff0b21 5391 temp = expand_shift (RSHIFT_EXPR, imode, temp, bitpos, NULL_RTX, 1);
ca4f1f5b 5392 temp = gen_lowpart (rmode, temp);
5393 temp = expand_binop (rmode, and_optab, temp, const1_rtx,
5394 NULL_RTX, 1, OPTAB_LIB_WIDEN);
5395 }
5396
27f261ef 5397 return temp;
5398}
73673831 5399
5400/* Expand fork or exec calls. TARGET is the desired target of the
c2f47e15 5401 call. EXP is the call. FN is the
73673831 5402 identificator of the actual function. IGNORE is nonzero if the
5403 value is to be ignored. */
5404
5405static rtx
c2f47e15 5406expand_builtin_fork_or_exec (tree fn, tree exp, rtx target, int ignore)
73673831 5407{
5408 tree id, decl;
5409 tree call;
5410
5411 /* If we are not profiling, just call the function. */
5412 if (!profile_arc_flag)
5413 return NULL_RTX;
5414
5415 /* Otherwise call the wrapper. This should be equivalent for the rest of
5416 compiler, so the code does not diverge, and the wrapper may run the
9c9bad97 5417 code necessary for keeping the profiling sane. */
73673831 5418
5419 switch (DECL_FUNCTION_CODE (fn))
5420 {
5421 case BUILT_IN_FORK:
5422 id = get_identifier ("__gcov_fork");
5423 break;
5424
5425 case BUILT_IN_EXECL:
5426 id = get_identifier ("__gcov_execl");
5427 break;
5428
5429 case BUILT_IN_EXECV:
5430 id = get_identifier ("__gcov_execv");
5431 break;
5432
5433 case BUILT_IN_EXECLP:
5434 id = get_identifier ("__gcov_execlp");
5435 break;
5436
5437 case BUILT_IN_EXECLE:
5438 id = get_identifier ("__gcov_execle");
5439 break;
5440
5441 case BUILT_IN_EXECVP:
5442 id = get_identifier ("__gcov_execvp");
5443 break;
5444
5445 case BUILT_IN_EXECVE:
5446 id = get_identifier ("__gcov_execve");
5447 break;
5448
5449 default:
64db345d 5450 gcc_unreachable ();
73673831 5451 }
5452
e60a6f7b 5453 decl = build_decl (DECL_SOURCE_LOCATION (fn),
5454 FUNCTION_DECL, id, TREE_TYPE (fn));
73673831 5455 DECL_EXTERNAL (decl) = 1;
5456 TREE_PUBLIC (decl) = 1;
5457 DECL_ARTIFICIAL (decl) = 1;
5458 TREE_NOTHROW (decl) = 1;
e82d310b 5459 DECL_VISIBILITY (decl) = VISIBILITY_DEFAULT;
5460 DECL_VISIBILITY_SPECIFIED (decl) = 1;
389dd41b 5461 call = rewrite_call_expr (EXPR_LOCATION (exp), exp, 0, decl, 0);
73673831 5462 return expand_call (call, target, ignore);
c2f47e15 5463 }
48e1416a 5464
b6a5fc45 5465
5466\f
3e272de8 5467/* Reconstitute a mode for a __sync intrinsic operation. Since the type of
5468 the pointer in these functions is void*, the tree optimizers may remove
5469 casts. The mode computed in expand_builtin isn't reliable either, due
5470 to __sync_bool_compare_and_swap.
5471
5472 FCODE_DIFF should be fcode - base, where base is the FOO_1 code for the
5473 group of builtins. This gives us log2 of the mode size. */
5474
3754d046 5475static inline machine_mode
3e272de8 5476get_builtin_sync_mode (int fcode_diff)
5477{
ad3a13b5 5478 /* The size is not negotiable, so ask not to get BLKmode in return
5479 if the target indicates that a smaller size would be better. */
5480 return mode_for_size (BITS_PER_UNIT << fcode_diff, MODE_INT, 0);
3e272de8 5481}
5482
041e0215 5483/* Expand the memory expression LOC and return the appropriate memory operand
5484 for the builtin_sync operations. */
5485
5486static rtx
3754d046 5487get_builtin_sync_mem (tree loc, machine_mode mode)
041e0215 5488{
5489 rtx addr, mem;
5490
7f4d56ad 5491 addr = expand_expr (loc, NULL_RTX, ptr_mode, EXPAND_SUM);
5492 addr = convert_memory_address (Pmode, addr);
041e0215 5493
5494 /* Note that we explicitly do not want any alias information for this
5495 memory, so that we kill all other live memories. Otherwise we don't
5496 satisfy the full barrier semantics of the intrinsic. */
5497 mem = validize_mem (gen_rtx_MEM (mode, addr));
5498
153c3b50 5499 /* The alignment needs to be at least according to that of the mode. */
5500 set_mem_align (mem, MAX (GET_MODE_ALIGNMENT (mode),
957d0361 5501 get_pointer_alignment (loc)));
c94cfd1c 5502 set_mem_alias_set (mem, ALIAS_SET_MEMORY_BARRIER);
041e0215 5503 MEM_VOLATILE_P (mem) = 1;
5504
5505 return mem;
5506}
5507
1cd6e20d 5508/* Make sure an argument is in the right mode.
5509 EXP is the tree argument.
5510 MODE is the mode it should be in. */
5511
5512static rtx
3754d046 5513expand_expr_force_mode (tree exp, machine_mode mode)
1cd6e20d 5514{
5515 rtx val;
3754d046 5516 machine_mode old_mode;
1cd6e20d 5517
5518 val = expand_expr (exp, NULL_RTX, mode, EXPAND_NORMAL);
5519 /* If VAL is promoted to a wider mode, convert it back to MODE. Take care
5520 of CONST_INTs, where we know the old_mode only from the call argument. */
5521
5522 old_mode = GET_MODE (val);
5523 if (old_mode == VOIDmode)
5524 old_mode = TYPE_MODE (TREE_TYPE (exp));
5525 val = convert_modes (mode, old_mode, val, 1);
5526 return val;
5527}
5528
5529
b6a5fc45 5530/* Expand the __sync_xxx_and_fetch and __sync_fetch_and_xxx intrinsics.
c2f47e15 5531 EXP is the CALL_EXPR. CODE is the rtx code
b6a5fc45 5532 that corresponds to the arithmetic or logical operation from the name;
5533 an exception here is that NOT actually means NAND. TARGET is an optional
5534 place for us to store the results; AFTER is true if this is the
1cd6e20d 5535 fetch_and_xxx form. */
b6a5fc45 5536
5537static rtx
3754d046 5538expand_builtin_sync_operation (machine_mode mode, tree exp,
3e272de8 5539 enum rtx_code code, bool after,
1cd6e20d 5540 rtx target)
b6a5fc45 5541{
041e0215 5542 rtx val, mem;
e60a6f7b 5543 location_t loc = EXPR_LOCATION (exp);
b6a5fc45 5544
cf73e559 5545 if (code == NOT && warn_sync_nand)
5546 {
5547 tree fndecl = get_callee_fndecl (exp);
5548 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
5549
5550 static bool warned_f_a_n, warned_n_a_f;
5551
5552 switch (fcode)
5553 {
2797f13a 5554 case BUILT_IN_SYNC_FETCH_AND_NAND_1:
5555 case BUILT_IN_SYNC_FETCH_AND_NAND_2:
5556 case BUILT_IN_SYNC_FETCH_AND_NAND_4:
5557 case BUILT_IN_SYNC_FETCH_AND_NAND_8:
5558 case BUILT_IN_SYNC_FETCH_AND_NAND_16:
cf73e559 5559 if (warned_f_a_n)
5560 break;
5561
b9a16870 5562 fndecl = builtin_decl_implicit (BUILT_IN_SYNC_FETCH_AND_NAND_N);
e60a6f7b 5563 inform (loc, "%qD changed semantics in GCC 4.4", fndecl);
cf73e559 5564 warned_f_a_n = true;
5565 break;
5566
2797f13a 5567 case BUILT_IN_SYNC_NAND_AND_FETCH_1:
5568 case BUILT_IN_SYNC_NAND_AND_FETCH_2:
5569 case BUILT_IN_SYNC_NAND_AND_FETCH_4:
5570 case BUILT_IN_SYNC_NAND_AND_FETCH_8:
5571 case BUILT_IN_SYNC_NAND_AND_FETCH_16:
cf73e559 5572 if (warned_n_a_f)
5573 break;
5574
b9a16870 5575 fndecl = builtin_decl_implicit (BUILT_IN_SYNC_NAND_AND_FETCH_N);
e60a6f7b 5576 inform (loc, "%qD changed semantics in GCC 4.4", fndecl);
cf73e559 5577 warned_n_a_f = true;
5578 break;
5579
5580 default:
5581 gcc_unreachable ();
5582 }
5583 }
5584
b6a5fc45 5585 /* Expand the operands. */
c2f47e15 5586 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
1cd6e20d 5587 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
b6a5fc45 5588
a372f7ca 5589 return expand_atomic_fetch_op (target, mem, val, code, MEMMODEL_SYNC_SEQ_CST,
1cd6e20d 5590 after);
b6a5fc45 5591}
5592
5593/* Expand the __sync_val_compare_and_swap and __sync_bool_compare_and_swap
c2f47e15 5594 intrinsics. EXP is the CALL_EXPR. IS_BOOL is
b6a5fc45 5595 true if this is the boolean form. TARGET is a place for us to store the
5596 results; this is NOT optional if IS_BOOL is true. */
5597
5598static rtx
3754d046 5599expand_builtin_compare_and_swap (machine_mode mode, tree exp,
3e272de8 5600 bool is_bool, rtx target)
b6a5fc45 5601{
041e0215 5602 rtx old_val, new_val, mem;
ba885f6a 5603 rtx *pbool, *poval;
b6a5fc45 5604
5605 /* Expand the operands. */
c2f47e15 5606 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
1cd6e20d 5607 old_val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5608 new_val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 2), mode);
b6a5fc45 5609
ba885f6a 5610 pbool = poval = NULL;
5611 if (target != const0_rtx)
5612 {
5613 if (is_bool)
5614 pbool = &target;
5615 else
5616 poval = &target;
5617 }
5618 if (!expand_atomic_compare_and_swap (pbool, poval, mem, old_val, new_val,
a372f7ca 5619 false, MEMMODEL_SYNC_SEQ_CST,
5620 MEMMODEL_SYNC_SEQ_CST))
1cd6e20d 5621 return NULL_RTX;
c2f47e15 5622
1cd6e20d 5623 return target;
b6a5fc45 5624}
5625
5626/* Expand the __sync_lock_test_and_set intrinsic. Note that the most
5627 general form is actually an atomic exchange, and some targets only
5628 support a reduced form with the second argument being a constant 1.
48e1416a 5629 EXP is the CALL_EXPR; TARGET is an optional place for us to store
c2f47e15 5630 the results. */
b6a5fc45 5631
5632static rtx
3754d046 5633expand_builtin_sync_lock_test_and_set (machine_mode mode, tree exp,
1cd6e20d 5634 rtx target)
b6a5fc45 5635{
041e0215 5636 rtx val, mem;
b6a5fc45 5637
5638 /* Expand the operands. */
c2f47e15 5639 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
1cd6e20d 5640 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5641
7821cde1 5642 return expand_sync_lock_test_and_set (target, mem, val);
1cd6e20d 5643}
5644
5645/* Expand the __sync_lock_release intrinsic. EXP is the CALL_EXPR. */
5646
5647static void
3754d046 5648expand_builtin_sync_lock_release (machine_mode mode, tree exp)
1cd6e20d 5649{
5650 rtx mem;
5651
5652 /* Expand the operands. */
5653 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5654
a372f7ca 5655 expand_atomic_store (mem, const0_rtx, MEMMODEL_SYNC_RELEASE, true);
1cd6e20d 5656}
5657
5658/* Given an integer representing an ``enum memmodel'', verify its
5659 correctness and return the memory model enum. */
5660
5661static enum memmodel
5662get_memmodel (tree exp)
5663{
5664 rtx op;
7f738025 5665 unsigned HOST_WIDE_INT val;
2cb724f9 5666 source_location loc
5667 = expansion_point_location_if_in_system_header (input_location);
1cd6e20d 5668
5669 /* If the parameter is not a constant, it's a run time value so we'll just
5670 convert it to MEMMODEL_SEQ_CST to avoid annoying runtime checking. */
5671 if (TREE_CODE (exp) != INTEGER_CST)
5672 return MEMMODEL_SEQ_CST;
5673
5674 op = expand_normal (exp);
7f738025 5675
5676 val = INTVAL (op);
5677 if (targetm.memmodel_check)
5678 val = targetm.memmodel_check (val);
5679 else if (val & ~MEMMODEL_MASK)
5680 {
2cb724f9 5681 warning_at (loc, OPT_Winvalid_memory_model,
5682 "unknown architecture specifier in memory model to builtin");
7f738025 5683 return MEMMODEL_SEQ_CST;
5684 }
5685
a372f7ca 5686 /* Should never see a user explicit SYNC memodel model, so >= LAST works. */
5687 if (memmodel_base (val) >= MEMMODEL_LAST)
1cd6e20d 5688 {
2cb724f9 5689 warning_at (loc, OPT_Winvalid_memory_model,
5690 "invalid memory model argument to builtin");
1cd6e20d 5691 return MEMMODEL_SEQ_CST;
5692 }
7f738025 5693
3070f133 5694 /* Workaround for Bugzilla 59448. GCC doesn't track consume properly, so
5695 be conservative and promote consume to acquire. */
5696 if (val == MEMMODEL_CONSUME)
5697 val = MEMMODEL_ACQUIRE;
5698
7f738025 5699 return (enum memmodel) val;
1cd6e20d 5700}
5701
5702/* Expand the __atomic_exchange intrinsic:
5703 TYPE __atomic_exchange (TYPE *object, TYPE desired, enum memmodel)
5704 EXP is the CALL_EXPR.
5705 TARGET is an optional place for us to store the results. */
5706
5707static rtx
3754d046 5708expand_builtin_atomic_exchange (machine_mode mode, tree exp, rtx target)
1cd6e20d 5709{
5710 rtx val, mem;
5711 enum memmodel model;
5712
5713 model = get_memmodel (CALL_EXPR_ARG (exp, 2));
1cd6e20d 5714
5715 if (!flag_inline_atomics)
5716 return NULL_RTX;
5717
5718 /* Expand the operands. */
5719 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5720 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5721
7821cde1 5722 return expand_atomic_exchange (target, mem, val, model);
1cd6e20d 5723}
5724
5725/* Expand the __atomic_compare_exchange intrinsic:
5726 bool __atomic_compare_exchange (TYPE *object, TYPE *expect,
5727 TYPE desired, BOOL weak,
5728 enum memmodel success,
5729 enum memmodel failure)
5730 EXP is the CALL_EXPR.
5731 TARGET is an optional place for us to store the results. */
5732
5733static rtx
3754d046 5734expand_builtin_atomic_compare_exchange (machine_mode mode, tree exp,
1cd6e20d 5735 rtx target)
5736{
1e0c0b35 5737 rtx expect, desired, mem, oldval;
5738 rtx_code_label *label;
1cd6e20d 5739 enum memmodel success, failure;
5740 tree weak;
5741 bool is_weak;
2cb724f9 5742 source_location loc
5743 = expansion_point_location_if_in_system_header (input_location);
1cd6e20d 5744
5745 success = get_memmodel (CALL_EXPR_ARG (exp, 4));
5746 failure = get_memmodel (CALL_EXPR_ARG (exp, 5));
5747
086f4e33 5748 if (failure > success)
5749 {
2cb724f9 5750 warning_at (loc, OPT_Winvalid_memory_model,
5751 "failure memory model cannot be stronger than success "
5752 "memory model for %<__atomic_compare_exchange%>");
086f4e33 5753 success = MEMMODEL_SEQ_CST;
5754 }
5755
a372f7ca 5756 if (is_mm_release (failure) || is_mm_acq_rel (failure))
1cd6e20d 5757 {
2cb724f9 5758 warning_at (loc, OPT_Winvalid_memory_model,
5759 "invalid failure memory model for "
5760 "%<__atomic_compare_exchange%>");
086f4e33 5761 failure = MEMMODEL_SEQ_CST;
5762 success = MEMMODEL_SEQ_CST;
1cd6e20d 5763 }
5764
086f4e33 5765
1cd6e20d 5766 if (!flag_inline_atomics)
5767 return NULL_RTX;
5768
5769 /* Expand the operands. */
5770 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5771
5772 expect = expand_normal (CALL_EXPR_ARG (exp, 1));
5773 expect = convert_memory_address (Pmode, expect);
c401b131 5774 expect = gen_rtx_MEM (mode, expect);
1cd6e20d 5775 desired = expand_expr_force_mode (CALL_EXPR_ARG (exp, 2), mode);
5776
5777 weak = CALL_EXPR_ARG (exp, 3);
5778 is_weak = false;
e913b5cd 5779 if (tree_fits_shwi_p (weak) && tree_to_shwi (weak) != 0)
1cd6e20d 5780 is_weak = true;
5781
d86e3752 5782 if (target == const0_rtx)
5783 target = NULL;
d86e3752 5784
3c29a9ea 5785 /* Lest the rtl backend create a race condition with an imporoper store
5786 to memory, always create a new pseudo for OLDVAL. */
5787 oldval = NULL;
5788
5789 if (!expand_atomic_compare_and_swap (&target, &oldval, mem, expect, desired,
ba885f6a 5790 is_weak, success, failure))
1cd6e20d 5791 return NULL_RTX;
5792
d86e3752 5793 /* Conditionally store back to EXPECT, lest we create a race condition
5794 with an improper store to memory. */
5795 /* ??? With a rearrangement of atomics at the gimple level, we can handle
5796 the normal case where EXPECT is totally private, i.e. a register. At
5797 which point the store can be unconditional. */
5798 label = gen_label_rtx ();
62589f76 5799 emit_cmp_and_jump_insns (target, const0_rtx, NE, NULL,
5800 GET_MODE (target), 1, label);
d86e3752 5801 emit_move_insn (expect, oldval);
5802 emit_label (label);
c401b131 5803
1cd6e20d 5804 return target;
5805}
5806
5a5ef659 5807/* Helper function for expand_ifn_atomic_compare_exchange - expand
5808 internal ATOMIC_COMPARE_EXCHANGE call into __atomic_compare_exchange_N
5809 call. The weak parameter must be dropped to match the expected parameter
5810 list and the expected argument changed from value to pointer to memory
5811 slot. */
5812
5813static void
5814expand_ifn_atomic_compare_exchange_into_call (gcall *call, machine_mode mode)
5815{
5816 unsigned int z;
5817 vec<tree, va_gc> *vec;
5818
5819 vec_alloc (vec, 5);
5820 vec->quick_push (gimple_call_arg (call, 0));
5821 tree expected = gimple_call_arg (call, 1);
5822 rtx x = assign_stack_temp_for_type (mode, GET_MODE_SIZE (mode),
5823 TREE_TYPE (expected));
5824 rtx expd = expand_expr (expected, x, mode, EXPAND_NORMAL);
5825 if (expd != x)
5826 emit_move_insn (x, expd);
5827 tree v = make_tree (TREE_TYPE (expected), x);
5828 vec->quick_push (build1 (ADDR_EXPR,
5829 build_pointer_type (TREE_TYPE (expected)), v));
5830 vec->quick_push (gimple_call_arg (call, 2));
5831 /* Skip the boolean weak parameter. */
5832 for (z = 4; z < 6; z++)
5833 vec->quick_push (gimple_call_arg (call, z));
5834 built_in_function fncode
5835 = (built_in_function) ((int) BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1
5836 + exact_log2 (GET_MODE_SIZE (mode)));
5837 tree fndecl = builtin_decl_explicit (fncode);
5838 tree fn = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fndecl)),
5839 fndecl);
5840 tree exp = build_call_vec (boolean_type_node, fn, vec);
5841 tree lhs = gimple_call_lhs (call);
5842 rtx boolret = expand_call (exp, NULL_RTX, lhs == NULL_TREE);
5843 if (lhs)
5844 {
5845 rtx target = expand_expr (lhs, NULL_RTX, VOIDmode, EXPAND_WRITE);
5846 if (GET_MODE (boolret) != mode)
5847 boolret = convert_modes (mode, GET_MODE (boolret), boolret, 1);
5848 x = force_reg (mode, x);
5849 write_complex_part (target, boolret, true);
5850 write_complex_part (target, x, false);
5851 }
5852}
5853
5854/* Expand IFN_ATOMIC_COMPARE_EXCHANGE internal function. */
5855
5856void
5857expand_ifn_atomic_compare_exchange (gcall *call)
5858{
5859 int size = tree_to_shwi (gimple_call_arg (call, 3)) & 255;
5860 gcc_assert (size == 1 || size == 2 || size == 4 || size == 8 || size == 16);
5861 machine_mode mode = mode_for_size (BITS_PER_UNIT * size, MODE_INT, 0);
5862 rtx expect, desired, mem, oldval, boolret;
5863 enum memmodel success, failure;
5864 tree lhs;
5865 bool is_weak;
5866 source_location loc
5867 = expansion_point_location_if_in_system_header (gimple_location (call));
5868
5869 success = get_memmodel (gimple_call_arg (call, 4));
5870 failure = get_memmodel (gimple_call_arg (call, 5));
5871
5872 if (failure > success)
5873 {
5874 warning_at (loc, OPT_Winvalid_memory_model,
5875 "failure memory model cannot be stronger than success "
5876 "memory model for %<__atomic_compare_exchange%>");
5877 success = MEMMODEL_SEQ_CST;
5878 }
5879
5880 if (is_mm_release (failure) || is_mm_acq_rel (failure))
5881 {
5882 warning_at (loc, OPT_Winvalid_memory_model,
5883 "invalid failure memory model for "
5884 "%<__atomic_compare_exchange%>");
5885 failure = MEMMODEL_SEQ_CST;
5886 success = MEMMODEL_SEQ_CST;
5887 }
5888
5889 if (!flag_inline_atomics)
5890 {
5891 expand_ifn_atomic_compare_exchange_into_call (call, mode);
5892 return;
5893 }
5894
5895 /* Expand the operands. */
5896 mem = get_builtin_sync_mem (gimple_call_arg (call, 0), mode);
5897
5898 expect = expand_expr_force_mode (gimple_call_arg (call, 1), mode);
5899 desired = expand_expr_force_mode (gimple_call_arg (call, 2), mode);
5900
5901 is_weak = (tree_to_shwi (gimple_call_arg (call, 3)) & 256) != 0;
5902
5903 boolret = NULL;
5904 oldval = NULL;
5905
5906 if (!expand_atomic_compare_and_swap (&boolret, &oldval, mem, expect, desired,
5907 is_weak, success, failure))
5908 {
5909 expand_ifn_atomic_compare_exchange_into_call (call, mode);
5910 return;
5911 }
5912
5913 lhs = gimple_call_lhs (call);
5914 if (lhs)
5915 {
5916 rtx target = expand_expr (lhs, NULL_RTX, VOIDmode, EXPAND_WRITE);
5917 if (GET_MODE (boolret) != mode)
5918 boolret = convert_modes (mode, GET_MODE (boolret), boolret, 1);
5919 write_complex_part (target, boolret, true);
5920 write_complex_part (target, oldval, false);
5921 }
5922}
5923
1cd6e20d 5924/* Expand the __atomic_load intrinsic:
5925 TYPE __atomic_load (TYPE *object, enum memmodel)
5926 EXP is the CALL_EXPR.
5927 TARGET is an optional place for us to store the results. */
5928
5929static rtx
3754d046 5930expand_builtin_atomic_load (machine_mode mode, tree exp, rtx target)
1cd6e20d 5931{
5932 rtx mem;
5933 enum memmodel model;
5934
5935 model = get_memmodel (CALL_EXPR_ARG (exp, 1));
a372f7ca 5936 if (is_mm_release (model) || is_mm_acq_rel (model))
1cd6e20d 5937 {
2cb724f9 5938 source_location loc
5939 = expansion_point_location_if_in_system_header (input_location);
5940 warning_at (loc, OPT_Winvalid_memory_model,
5941 "invalid memory model for %<__atomic_load%>");
086f4e33 5942 model = MEMMODEL_SEQ_CST;
1cd6e20d 5943 }
5944
5945 if (!flag_inline_atomics)
5946 return NULL_RTX;
5947
5948 /* Expand the operand. */
5949 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5950
5951 return expand_atomic_load (target, mem, model);
5952}
5953
5954
5955/* Expand the __atomic_store intrinsic:
5956 void __atomic_store (TYPE *object, TYPE desired, enum memmodel)
5957 EXP is the CALL_EXPR.
5958 TARGET is an optional place for us to store the results. */
5959
5960static rtx
3754d046 5961expand_builtin_atomic_store (machine_mode mode, tree exp)
1cd6e20d 5962{
5963 rtx mem, val;
5964 enum memmodel model;
5965
5966 model = get_memmodel (CALL_EXPR_ARG (exp, 2));
a372f7ca 5967 if (!(is_mm_relaxed (model) || is_mm_seq_cst (model)
5968 || is_mm_release (model)))
1cd6e20d 5969 {
2cb724f9 5970 source_location loc
5971 = expansion_point_location_if_in_system_header (input_location);
5972 warning_at (loc, OPT_Winvalid_memory_model,
5973 "invalid memory model for %<__atomic_store%>");
086f4e33 5974 model = MEMMODEL_SEQ_CST;
1cd6e20d 5975 }
5976
5977 if (!flag_inline_atomics)
5978 return NULL_RTX;
5979
5980 /* Expand the operands. */
5981 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5982 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5983
8808bf16 5984 return expand_atomic_store (mem, val, model, false);
1cd6e20d 5985}
5986
5987/* Expand the __atomic_fetch_XXX intrinsic:
5988 TYPE __atomic_fetch_XXX (TYPE *object, TYPE val, enum memmodel)
5989 EXP is the CALL_EXPR.
5990 TARGET is an optional place for us to store the results.
5991 CODE is the operation, PLUS, MINUS, ADD, XOR, or IOR.
5992 FETCH_AFTER is true if returning the result of the operation.
5993 FETCH_AFTER is false if returning the value before the operation.
5994 IGNORE is true if the result is not used.
5995 EXT_CALL is the correct builtin for an external call if this cannot be
5996 resolved to an instruction sequence. */
5997
5998static rtx
3754d046 5999expand_builtin_atomic_fetch_op (machine_mode mode, tree exp, rtx target,
1cd6e20d 6000 enum rtx_code code, bool fetch_after,
6001 bool ignore, enum built_in_function ext_call)
6002{
6003 rtx val, mem, ret;
6004 enum memmodel model;
6005 tree fndecl;
6006 tree addr;
6007
6008 model = get_memmodel (CALL_EXPR_ARG (exp, 2));
6009
6010 /* Expand the operands. */
6011 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
6012 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
6013
6014 /* Only try generating instructions if inlining is turned on. */
6015 if (flag_inline_atomics)
6016 {
6017 ret = expand_atomic_fetch_op (target, mem, val, code, model, fetch_after);
6018 if (ret)
6019 return ret;
6020 }
6021
6022 /* Return if a different routine isn't needed for the library call. */
6023 if (ext_call == BUILT_IN_NONE)
6024 return NULL_RTX;
6025
6026 /* Change the call to the specified function. */
6027 fndecl = get_callee_fndecl (exp);
6028 addr = CALL_EXPR_FN (exp);
6029 STRIP_NOPS (addr);
6030
6031 gcc_assert (TREE_OPERAND (addr, 0) == fndecl);
9af5ce0c 6032 TREE_OPERAND (addr, 0) = builtin_decl_explicit (ext_call);
1cd6e20d 6033
a2f95d97 6034 /* If we will emit code after the call, the call can not be a tail call.
6035 If it is emitted as a tail call, a barrier is emitted after it, and
6036 then all trailing code is removed. */
6037 if (!ignore)
6038 CALL_EXPR_TAILCALL (exp) = 0;
6039
1cd6e20d 6040 /* Expand the call here so we can emit trailing code. */
6041 ret = expand_call (exp, target, ignore);
6042
6043 /* Replace the original function just in case it matters. */
6044 TREE_OPERAND (addr, 0) = fndecl;
6045
6046 /* Then issue the arithmetic correction to return the right result. */
6047 if (!ignore)
c449f851 6048 {
6049 if (code == NOT)
6050 {
6051 ret = expand_simple_binop (mode, AND, ret, val, NULL_RTX, true,
6052 OPTAB_LIB_WIDEN);
6053 ret = expand_simple_unop (mode, NOT, ret, target, true);
6054 }
6055 else
6056 ret = expand_simple_binop (mode, code, ret, val, target, true,
6057 OPTAB_LIB_WIDEN);
6058 }
1cd6e20d 6059 return ret;
6060}
6061
9c1a31e4 6062/* Expand IFN_ATOMIC_BIT_TEST_AND_* internal function. */
6063
6064void
6065expand_ifn_atomic_bit_test_and (gcall *call)
6066{
6067 tree ptr = gimple_call_arg (call, 0);
6068 tree bit = gimple_call_arg (call, 1);
6069 tree flag = gimple_call_arg (call, 2);
6070 tree lhs = gimple_call_lhs (call);
6071 enum memmodel model = MEMMODEL_SYNC_SEQ_CST;
6072 machine_mode mode = TYPE_MODE (TREE_TYPE (flag));
6073 enum rtx_code code;
6074 optab optab;
6075 struct expand_operand ops[5];
6076
6077 gcc_assert (flag_inline_atomics);
6078
6079 if (gimple_call_num_args (call) == 4)
6080 model = get_memmodel (gimple_call_arg (call, 3));
6081
6082 rtx mem = get_builtin_sync_mem (ptr, mode);
6083 rtx val = expand_expr_force_mode (bit, mode);
6084
6085 switch (gimple_call_internal_fn (call))
6086 {
6087 case IFN_ATOMIC_BIT_TEST_AND_SET:
6088 code = IOR;
6089 optab = atomic_bit_test_and_set_optab;
6090 break;
6091 case IFN_ATOMIC_BIT_TEST_AND_COMPLEMENT:
6092 code = XOR;
6093 optab = atomic_bit_test_and_complement_optab;
6094 break;
6095 case IFN_ATOMIC_BIT_TEST_AND_RESET:
6096 code = AND;
6097 optab = atomic_bit_test_and_reset_optab;
6098 break;
6099 default:
6100 gcc_unreachable ();
6101 }
6102
6103 if (lhs == NULL_TREE)
6104 {
6105 val = expand_simple_binop (mode, ASHIFT, const1_rtx,
6106 val, NULL_RTX, true, OPTAB_DIRECT);
6107 if (code == AND)
6108 val = expand_simple_unop (mode, NOT, val, NULL_RTX, true);
6109 expand_atomic_fetch_op (const0_rtx, mem, val, code, model, false);
6110 return;
6111 }
6112
6113 rtx target = expand_expr (lhs, NULL_RTX, VOIDmode, EXPAND_WRITE);
6114 enum insn_code icode = direct_optab_handler (optab, mode);
6115 gcc_assert (icode != CODE_FOR_nothing);
6116 create_output_operand (&ops[0], target, mode);
6117 create_fixed_operand (&ops[1], mem);
6118 create_convert_operand_to (&ops[2], val, mode, true);
6119 create_integer_operand (&ops[3], model);
6120 create_integer_operand (&ops[4], integer_onep (flag));
6121 if (maybe_expand_insn (icode, 5, ops))
6122 return;
6123
6124 rtx bitval = val;
6125 val = expand_simple_binop (mode, ASHIFT, const1_rtx,
6126 val, NULL_RTX, true, OPTAB_DIRECT);
6127 rtx maskval = val;
6128 if (code == AND)
6129 val = expand_simple_unop (mode, NOT, val, NULL_RTX, true);
6130 rtx result = expand_atomic_fetch_op (gen_reg_rtx (mode), mem, val,
6131 code, model, false);
6132 if (integer_onep (flag))
6133 {
6134 result = expand_simple_binop (mode, ASHIFTRT, result, bitval,
6135 NULL_RTX, true, OPTAB_DIRECT);
6136 result = expand_simple_binop (mode, AND, result, const1_rtx, target,
6137 true, OPTAB_DIRECT);
6138 }
6139 else
6140 result = expand_simple_binop (mode, AND, result, maskval, target, true,
6141 OPTAB_DIRECT);
6142 if (result != target)
6143 emit_move_insn (target, result);
6144}
6145
10b744a3 6146/* Expand an atomic clear operation.
6147 void _atomic_clear (BOOL *obj, enum memmodel)
6148 EXP is the call expression. */
6149
6150static rtx
6151expand_builtin_atomic_clear (tree exp)
6152{
3754d046 6153 machine_mode mode;
10b744a3 6154 rtx mem, ret;
6155 enum memmodel model;
6156
6157 mode = mode_for_size (BOOL_TYPE_SIZE, MODE_INT, 0);
6158 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
6159 model = get_memmodel (CALL_EXPR_ARG (exp, 1));
6160
a372f7ca 6161 if (is_mm_consume (model) || is_mm_acquire (model) || is_mm_acq_rel (model))
10b744a3 6162 {
2cb724f9 6163 source_location loc
6164 = expansion_point_location_if_in_system_header (input_location);
6165 warning_at (loc, OPT_Winvalid_memory_model,
6166 "invalid memory model for %<__atomic_store%>");
086f4e33 6167 model = MEMMODEL_SEQ_CST;
10b744a3 6168 }
6169
6170 /* Try issuing an __atomic_store, and allow fallback to __sync_lock_release.
6171 Failing that, a store is issued by __atomic_store. The only way this can
6172 fail is if the bool type is larger than a word size. Unlikely, but
6173 handle it anyway for completeness. Assume a single threaded model since
6174 there is no atomic support in this case, and no barriers are required. */
6175 ret = expand_atomic_store (mem, const0_rtx, model, true);
6176 if (!ret)
6177 emit_move_insn (mem, const0_rtx);
6178 return const0_rtx;
6179}
6180
6181/* Expand an atomic test_and_set operation.
6182 bool _atomic_test_and_set (BOOL *obj, enum memmodel)
6183 EXP is the call expression. */
6184
6185static rtx
7821cde1 6186expand_builtin_atomic_test_and_set (tree exp, rtx target)
10b744a3 6187{
7821cde1 6188 rtx mem;
10b744a3 6189 enum memmodel model;
3754d046 6190 machine_mode mode;
10b744a3 6191
6192 mode = mode_for_size (BOOL_TYPE_SIZE, MODE_INT, 0);
6193 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
6194 model = get_memmodel (CALL_EXPR_ARG (exp, 1));
6195
7821cde1 6196 return expand_atomic_test_and_set (target, mem, model);
10b744a3 6197}
6198
6199
1cd6e20d 6200/* Return true if (optional) argument ARG1 of size ARG0 is always lock free on
6201 this architecture. If ARG1 is NULL, use typical alignment for size ARG0. */
6202
6203static tree
6204fold_builtin_atomic_always_lock_free (tree arg0, tree arg1)
6205{
6206 int size;
3754d046 6207 machine_mode mode;
1cd6e20d 6208 unsigned int mode_align, type_align;
6209
6210 if (TREE_CODE (arg0) != INTEGER_CST)
6211 return NULL_TREE;
b6a5fc45 6212
1cd6e20d 6213 size = INTVAL (expand_normal (arg0)) * BITS_PER_UNIT;
6214 mode = mode_for_size (size, MODE_INT, 0);
6215 mode_align = GET_MODE_ALIGNMENT (mode);
6216
4ca99588 6217 if (TREE_CODE (arg1) == INTEGER_CST)
6218 {
6219 unsigned HOST_WIDE_INT val = UINTVAL (expand_normal (arg1));
6220
6221 /* Either this argument is null, or it's a fake pointer encoding
6222 the alignment of the object. */
ac29ece2 6223 val = least_bit_hwi (val);
4ca99588 6224 val *= BITS_PER_UNIT;
6225
6226 if (val == 0 || mode_align < val)
6227 type_align = mode_align;
6228 else
6229 type_align = val;
6230 }
1cd6e20d 6231 else
6232 {
6233 tree ttype = TREE_TYPE (arg1);
6234
6235 /* This function is usually invoked and folded immediately by the front
6236 end before anything else has a chance to look at it. The pointer
6237 parameter at this point is usually cast to a void *, so check for that
6238 and look past the cast. */
2f8a2ead 6239 if (CONVERT_EXPR_P (arg1)
6240 && POINTER_TYPE_P (ttype)
6241 && VOID_TYPE_P (TREE_TYPE (ttype))
6242 && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (arg1, 0))))
1cd6e20d 6243 arg1 = TREE_OPERAND (arg1, 0);
6244
6245 ttype = TREE_TYPE (arg1);
6246 gcc_assert (POINTER_TYPE_P (ttype));
6247
6248 /* Get the underlying type of the object. */
6249 ttype = TREE_TYPE (ttype);
6250 type_align = TYPE_ALIGN (ttype);
6251 }
6252
47ae02b7 6253 /* If the object has smaller alignment, the lock free routines cannot
1cd6e20d 6254 be used. */
6255 if (type_align < mode_align)
06308d2a 6256 return boolean_false_node;
1cd6e20d 6257
6258 /* Check if a compare_and_swap pattern exists for the mode which represents
6259 the required size. The pattern is not allowed to fail, so the existence
d5f5fa27 6260 of the pattern indicates support is present. Also require that an
6261 atomic load exists for the required size. */
6262 if (can_compare_and_swap_p (mode, true) && can_atomic_load_p (mode))
06308d2a 6263 return boolean_true_node;
1cd6e20d 6264 else
06308d2a 6265 return boolean_false_node;
1cd6e20d 6266}
6267
6268/* Return true if the parameters to call EXP represent an object which will
6269 always generate lock free instructions. The first argument represents the
6270 size of the object, and the second parameter is a pointer to the object
6271 itself. If NULL is passed for the object, then the result is based on
6272 typical alignment for an object of the specified size. Otherwise return
6273 false. */
6274
6275static rtx
6276expand_builtin_atomic_always_lock_free (tree exp)
6277{
6278 tree size;
6279 tree arg0 = CALL_EXPR_ARG (exp, 0);
6280 tree arg1 = CALL_EXPR_ARG (exp, 1);
6281
6282 if (TREE_CODE (arg0) != INTEGER_CST)
6283 {
6284 error ("non-constant argument 1 to __atomic_always_lock_free");
6285 return const0_rtx;
6286 }
6287
6288 size = fold_builtin_atomic_always_lock_free (arg0, arg1);
06308d2a 6289 if (size == boolean_true_node)
1cd6e20d 6290 return const1_rtx;
6291 return const0_rtx;
6292}
6293
6294/* Return a one or zero if it can be determined that object ARG1 of size ARG
6295 is lock free on this architecture. */
6296
6297static tree
6298fold_builtin_atomic_is_lock_free (tree arg0, tree arg1)
6299{
6300 if (!flag_inline_atomics)
6301 return NULL_TREE;
6302
6303 /* If it isn't always lock free, don't generate a result. */
06308d2a 6304 if (fold_builtin_atomic_always_lock_free (arg0, arg1) == boolean_true_node)
6305 return boolean_true_node;
1cd6e20d 6306
6307 return NULL_TREE;
6308}
6309
6310/* Return true if the parameters to call EXP represent an object which will
6311 always generate lock free instructions. The first argument represents the
6312 size of the object, and the second parameter is a pointer to the object
6313 itself. If NULL is passed for the object, then the result is based on
6314 typical alignment for an object of the specified size. Otherwise return
6315 NULL*/
6316
6317static rtx
6318expand_builtin_atomic_is_lock_free (tree exp)
6319{
6320 tree size;
6321 tree arg0 = CALL_EXPR_ARG (exp, 0);
6322 tree arg1 = CALL_EXPR_ARG (exp, 1);
6323
6324 if (!INTEGRAL_TYPE_P (TREE_TYPE (arg0)))
6325 {
6326 error ("non-integer argument 1 to __atomic_is_lock_free");
6327 return NULL_RTX;
6328 }
6329
6330 if (!flag_inline_atomics)
6331 return NULL_RTX;
6332
6333 /* If the value is known at compile time, return the RTX for it. */
6334 size = fold_builtin_atomic_is_lock_free (arg0, arg1);
06308d2a 6335 if (size == boolean_true_node)
1cd6e20d 6336 return const1_rtx;
6337
6338 return NULL_RTX;
6339}
6340
1cd6e20d 6341/* Expand the __atomic_thread_fence intrinsic:
6342 void __atomic_thread_fence (enum memmodel)
6343 EXP is the CALL_EXPR. */
6344
6345static void
6346expand_builtin_atomic_thread_fence (tree exp)
6347{
fe54c06b 6348 enum memmodel model = get_memmodel (CALL_EXPR_ARG (exp, 0));
6349 expand_mem_thread_fence (model);
1cd6e20d 6350}
6351
6352/* Expand the __atomic_signal_fence intrinsic:
6353 void __atomic_signal_fence (enum memmodel)
6354 EXP is the CALL_EXPR. */
6355
6356static void
6357expand_builtin_atomic_signal_fence (tree exp)
6358{
fe54c06b 6359 enum memmodel model = get_memmodel (CALL_EXPR_ARG (exp, 0));
6360 expand_mem_signal_fence (model);
b6a5fc45 6361}
6362
6363/* Expand the __sync_synchronize intrinsic. */
6364
6365static void
2797f13a 6366expand_builtin_sync_synchronize (void)
b6a5fc45 6367{
a372f7ca 6368 expand_mem_thread_fence (MEMMODEL_SYNC_SEQ_CST);
b6a5fc45 6369}
6370
badaa04c 6371static rtx
6372expand_builtin_thread_pointer (tree exp, rtx target)
6373{
6374 enum insn_code icode;
6375 if (!validate_arglist (exp, VOID_TYPE))
6376 return const0_rtx;
6377 icode = direct_optab_handler (get_thread_pointer_optab, Pmode);
6378 if (icode != CODE_FOR_nothing)
6379 {
6380 struct expand_operand op;
3ed779c3 6381 /* If the target is not sutitable then create a new target. */
6382 if (target == NULL_RTX
6383 || !REG_P (target)
6384 || GET_MODE (target) != Pmode)
badaa04c 6385 target = gen_reg_rtx (Pmode);
6386 create_output_operand (&op, target, Pmode);
6387 expand_insn (icode, 1, &op);
6388 return target;
6389 }
6390 error ("__builtin_thread_pointer is not supported on this target");
6391 return const0_rtx;
6392}
6393
6394static void
6395expand_builtin_set_thread_pointer (tree exp)
6396{
6397 enum insn_code icode;
6398 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6399 return;
6400 icode = direct_optab_handler (set_thread_pointer_optab, Pmode);
6401 if (icode != CODE_FOR_nothing)
6402 {
6403 struct expand_operand op;
6404 rtx val = expand_expr (CALL_EXPR_ARG (exp, 0), NULL_RTX,
6405 Pmode, EXPAND_NORMAL);
6f343c10 6406 create_input_operand (&op, val, Pmode);
badaa04c 6407 expand_insn (icode, 1, &op);
6408 return;
6409 }
6410 error ("__builtin_set_thread_pointer is not supported on this target");
6411}
6412
53800dbe 6413\f
0e80b01d 6414/* Emit code to restore the current value of stack. */
6415
6416static void
6417expand_stack_restore (tree var)
6418{
1e0c0b35 6419 rtx_insn *prev;
6420 rtx sa = expand_normal (var);
0e80b01d 6421
6422 sa = convert_memory_address (Pmode, sa);
6423
6424 prev = get_last_insn ();
6425 emit_stack_restore (SAVE_BLOCK, sa);
97354ae4 6426
6427 record_new_stack_level ();
6428
0e80b01d 6429 fixup_args_size_notes (prev, get_last_insn (), 0);
6430}
6431
0e80b01d 6432/* Emit code to save the current value of stack. */
6433
6434static rtx
6435expand_stack_save (void)
6436{
6437 rtx ret = NULL_RTX;
6438
0e80b01d 6439 emit_stack_save (SAVE_BLOCK, &ret);
6440 return ret;
6441}
6442
ca4c3545 6443
53800dbe 6444/* Expand an expression EXP that calls a built-in function,
6445 with result going to TARGET if that's convenient
6446 (and in mode MODE if that's convenient).
6447 SUBTARGET may be used as the target for computing one of EXP's operands.
6448 IGNORE is nonzero if the value is to be ignored. */
6449
6450rtx
3754d046 6451expand_builtin (tree exp, rtx target, rtx subtarget, machine_mode mode,
aecda0d6 6452 int ignore)
53800dbe 6453{
c6e6ecb1 6454 tree fndecl = get_callee_fndecl (exp);
53800dbe 6455 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
3754d046 6456 machine_mode target_mode = TYPE_MODE (TREE_TYPE (exp));
67fa4078 6457 int flags;
53800dbe 6458
4e2f4ed5 6459 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
6460 return targetm.expand_builtin (exp, target, subtarget, mode, ignore);
6461
f9acf11a 6462 /* When ASan is enabled, we don't want to expand some memory/string
6463 builtins and rely on libsanitizer's hooks. This allows us to avoid
6464 redundant checks and be sure, that possible overflow will be detected
6465 by ASan. */
6466
6467 if ((flag_sanitize & SANITIZE_ADDRESS) && asan_intercepted_p (fcode))
6468 return expand_call (exp, target, ignore);
6469
53800dbe 6470 /* When not optimizing, generate calls to library functions for a certain
6471 set of builtins. */
cd9ff771 6472 if (!optimize
b6a5fc45 6473 && !called_as_built_in (fndecl)
73037a1e 6474 && fcode != BUILT_IN_FORK
6475 && fcode != BUILT_IN_EXECL
6476 && fcode != BUILT_IN_EXECV
6477 && fcode != BUILT_IN_EXECLP
6478 && fcode != BUILT_IN_EXECLE
6479 && fcode != BUILT_IN_EXECVP
6480 && fcode != BUILT_IN_EXECVE
2c281b15 6481 && fcode != BUILT_IN_ALLOCA
581bf1c2 6482 && fcode != BUILT_IN_ALLOCA_WITH_ALIGN
058a1b7a 6483 && fcode != BUILT_IN_FREE
6484 && fcode != BUILT_IN_CHKP_SET_PTR_BOUNDS
6485 && fcode != BUILT_IN_CHKP_INIT_PTR_BOUNDS
6486 && fcode != BUILT_IN_CHKP_NULL_PTR_BOUNDS
6487 && fcode != BUILT_IN_CHKP_COPY_PTR_BOUNDS
6488 && fcode != BUILT_IN_CHKP_NARROW_PTR_BOUNDS
6489 && fcode != BUILT_IN_CHKP_STORE_PTR_BOUNDS
6490 && fcode != BUILT_IN_CHKP_CHECK_PTR_LBOUNDS
6491 && fcode != BUILT_IN_CHKP_CHECK_PTR_UBOUNDS
6492 && fcode != BUILT_IN_CHKP_CHECK_PTR_BOUNDS
6493 && fcode != BUILT_IN_CHKP_GET_PTR_LBOUND
6494 && fcode != BUILT_IN_CHKP_GET_PTR_UBOUND
6495 && fcode != BUILT_IN_CHKP_BNDRET)
cd9ff771 6496 return expand_call (exp, target, ignore);
53800dbe 6497
8d6d7930 6498 /* The built-in function expanders test for target == const0_rtx
6499 to determine whether the function's result will be ignored. */
6500 if (ignore)
6501 target = const0_rtx;
6502
6503 /* If the result of a pure or const built-in function is ignored, and
6504 none of its arguments are volatile, we can avoid expanding the
6505 built-in call and just evaluate the arguments for side-effects. */
6506 if (target == const0_rtx
67fa4078 6507 && ((flags = flags_from_decl_or_type (fndecl)) & (ECF_CONST | ECF_PURE))
6508 && !(flags & ECF_LOOPING_CONST_OR_PURE))
8d6d7930 6509 {
6510 bool volatilep = false;
6511 tree arg;
c2f47e15 6512 call_expr_arg_iterator iter;
8d6d7930 6513
c2f47e15 6514 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
6515 if (TREE_THIS_VOLATILE (arg))
8d6d7930 6516 {
6517 volatilep = true;
6518 break;
6519 }
6520
6521 if (! volatilep)
6522 {
c2f47e15 6523 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
6524 expand_expr (arg, const0_rtx, VOIDmode, EXPAND_NORMAL);
8d6d7930 6525 return const0_rtx;
6526 }
6527 }
6528
f21337ef 6529 /* expand_builtin_with_bounds is supposed to be used for
6530 instrumented builtin calls. */
058a1b7a 6531 gcc_assert (!CALL_WITH_BOUNDS_P (exp));
6532
53800dbe 6533 switch (fcode)
6534 {
4f35b1fc 6535 CASE_FLT_FN (BUILT_IN_FABS):
012f068a 6536 CASE_FLT_FN_FLOATN_NX (BUILT_IN_FABS):
8aa32773 6537 case BUILT_IN_FABSD32:
6538 case BUILT_IN_FABSD64:
6539 case BUILT_IN_FABSD128:
c2f47e15 6540 target = expand_builtin_fabs (exp, target, subtarget);
78a74442 6541 if (target)
a0c938f0 6542 return target;
78a74442 6543 break;
6544
4f35b1fc 6545 CASE_FLT_FN (BUILT_IN_COPYSIGN):
012f068a 6546 CASE_FLT_FN_FLOATN_NX (BUILT_IN_COPYSIGN):
c2f47e15 6547 target = expand_builtin_copysign (exp, target, subtarget);
270436f3 6548 if (target)
6549 return target;
6550 break;
6551
7d3f6cc7 6552 /* Just do a normal library call if we were unable to fold
6553 the values. */
4f35b1fc 6554 CASE_FLT_FN (BUILT_IN_CABS):
78a74442 6555 break;
53800dbe 6556
7e0713b1 6557 CASE_FLT_FN (BUILT_IN_FMA):
6558 target = expand_builtin_mathfn_ternary (exp, target, subtarget);
6559 if (target)
6560 return target;
6561 break;
6562
a67a90e5 6563 CASE_FLT_FN (BUILT_IN_ILOGB):
6564 if (! flag_unsafe_math_optimizations)
6565 break;
12f08300 6566 gcc_fallthrough ();
6567 CASE_FLT_FN (BUILT_IN_ISINF):
6568 CASE_FLT_FN (BUILT_IN_FINITE):
6569 case BUILT_IN_ISFINITE:
6570 case BUILT_IN_ISNORMAL:
f97eea22 6571 target = expand_builtin_interclass_mathfn (exp, target);
a67a90e5 6572 if (target)
6573 return target;
6574 break;
6575
80ff6494 6576 CASE_FLT_FN (BUILT_IN_ICEIL):
4f35b1fc 6577 CASE_FLT_FN (BUILT_IN_LCEIL):
6578 CASE_FLT_FN (BUILT_IN_LLCEIL):
6579 CASE_FLT_FN (BUILT_IN_LFLOOR):
80ff6494 6580 CASE_FLT_FN (BUILT_IN_IFLOOR):
4f35b1fc 6581 CASE_FLT_FN (BUILT_IN_LLFLOOR):
ff1b14e4 6582 target = expand_builtin_int_roundingfn (exp, target);
ad52b9b7 6583 if (target)
6584 return target;
6585 break;
6586
80ff6494 6587 CASE_FLT_FN (BUILT_IN_IRINT):
7d3afc77 6588 CASE_FLT_FN (BUILT_IN_LRINT):
6589 CASE_FLT_FN (BUILT_IN_LLRINT):
80ff6494 6590 CASE_FLT_FN (BUILT_IN_IROUND):
ef2f1a10 6591 CASE_FLT_FN (BUILT_IN_LROUND):
6592 CASE_FLT_FN (BUILT_IN_LLROUND):
ff1b14e4 6593 target = expand_builtin_int_roundingfn_2 (exp, target);
7d3afc77 6594 if (target)
6595 return target;
6596 break;
6597
4f35b1fc 6598 CASE_FLT_FN (BUILT_IN_POWI):
f97eea22 6599 target = expand_builtin_powi (exp, target);
757c219d 6600 if (target)
6601 return target;
6602 break;
6603
d735c391 6604 CASE_FLT_FN (BUILT_IN_CEXPI):
f97eea22 6605 target = expand_builtin_cexpi (exp, target);
d735c391 6606 gcc_assert (target);
6607 return target;
6608
4f35b1fc 6609 CASE_FLT_FN (BUILT_IN_SIN):
6610 CASE_FLT_FN (BUILT_IN_COS):
6b43bae4 6611 if (! flag_unsafe_math_optimizations)
6612 break;
6613 target = expand_builtin_mathfn_3 (exp, target, subtarget);
6614 if (target)
6615 return target;
6616 break;
6617
c3147c1a 6618 CASE_FLT_FN (BUILT_IN_SINCOS):
6619 if (! flag_unsafe_math_optimizations)
6620 break;
6621 target = expand_builtin_sincos (exp);
6622 if (target)
6623 return target;
6624 break;
6625
53800dbe 6626 case BUILT_IN_APPLY_ARGS:
6627 return expand_builtin_apply_args ();
6628
6629 /* __builtin_apply (FUNCTION, ARGUMENTS, ARGSIZE) invokes
6630 FUNCTION with a copy of the parameters described by
6631 ARGUMENTS, and ARGSIZE. It returns a block of memory
6632 allocated on the stack into which is stored all the registers
6633 that might possibly be used for returning the result of a
6634 function. ARGUMENTS is the value returned by
6635 __builtin_apply_args. ARGSIZE is the number of bytes of
6636 arguments that must be copied. ??? How should this value be
6637 computed? We'll also need a safe worst case value for varargs
6638 functions. */
6639 case BUILT_IN_APPLY:
c2f47e15 6640 if (!validate_arglist (exp, POINTER_TYPE,
0eb671f7 6641 POINTER_TYPE, INTEGER_TYPE, VOID_TYPE)
c2f47e15 6642 && !validate_arglist (exp, REFERENCE_TYPE,
0eb671f7 6643 POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
53800dbe 6644 return const0_rtx;
6645 else
6646 {
53800dbe 6647 rtx ops[3];
6648
c2f47e15 6649 ops[0] = expand_normal (CALL_EXPR_ARG (exp, 0));
6650 ops[1] = expand_normal (CALL_EXPR_ARG (exp, 1));
6651 ops[2] = expand_normal (CALL_EXPR_ARG (exp, 2));
53800dbe 6652
6653 return expand_builtin_apply (ops[0], ops[1], ops[2]);
6654 }
6655
6656 /* __builtin_return (RESULT) causes the function to return the
6657 value described by RESULT. RESULT is address of the block of
6658 memory returned by __builtin_apply. */
6659 case BUILT_IN_RETURN:
c2f47e15 6660 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6661 expand_builtin_return (expand_normal (CALL_EXPR_ARG (exp, 0)));
53800dbe 6662 return const0_rtx;
6663
6664 case BUILT_IN_SAVEREGS:
a66c9326 6665 return expand_builtin_saveregs ();
53800dbe 6666
48dc2227 6667 case BUILT_IN_VA_ARG_PACK:
6668 /* All valid uses of __builtin_va_arg_pack () are removed during
6669 inlining. */
b8c23db3 6670 error ("%Kinvalid use of %<__builtin_va_arg_pack ()%>", exp);
48dc2227 6671 return const0_rtx;
6672
4e1d7ea4 6673 case BUILT_IN_VA_ARG_PACK_LEN:
6674 /* All valid uses of __builtin_va_arg_pack_len () are removed during
6675 inlining. */
b8c23db3 6676 error ("%Kinvalid use of %<__builtin_va_arg_pack_len ()%>", exp);
4e1d7ea4 6677 return const0_rtx;
6678
53800dbe 6679 /* Return the address of the first anonymous stack arg. */
6680 case BUILT_IN_NEXT_ARG:
c2f47e15 6681 if (fold_builtin_next_arg (exp, false))
a0c938f0 6682 return const0_rtx;
79012a9d 6683 return expand_builtin_next_arg ();
53800dbe 6684
ac8fb6db 6685 case BUILT_IN_CLEAR_CACHE:
6686 target = expand_builtin___clear_cache (exp);
6687 if (target)
6688 return target;
6689 break;
6690
53800dbe 6691 case BUILT_IN_CLASSIFY_TYPE:
c2f47e15 6692 return expand_builtin_classify_type (exp);
53800dbe 6693
6694 case BUILT_IN_CONSTANT_P:
4ee9c684 6695 return const0_rtx;
53800dbe 6696
6697 case BUILT_IN_FRAME_ADDRESS:
6698 case BUILT_IN_RETURN_ADDRESS:
c2f47e15 6699 return expand_builtin_frame_address (fndecl, exp);
53800dbe 6700
6701 /* Returns the address of the area where the structure is returned.
6702 0 otherwise. */
6703 case BUILT_IN_AGGREGATE_INCOMING_ADDRESS:
c2f47e15 6704 if (call_expr_nargs (exp) != 0
9342ee68 6705 || ! AGGREGATE_TYPE_P (TREE_TYPE (TREE_TYPE (current_function_decl)))
e16ceb8e 6706 || !MEM_P (DECL_RTL (DECL_RESULT (current_function_decl))))
9342ee68 6707 return const0_rtx;
53800dbe 6708 else
9342ee68 6709 return XEXP (DECL_RTL (DECL_RESULT (current_function_decl)), 0);
53800dbe 6710
6711 case BUILT_IN_ALLOCA:
581bf1c2 6712 case BUILT_IN_ALLOCA_WITH_ALIGN:
2b29cc6a 6713 target = expand_builtin_alloca (exp);
53800dbe 6714 if (target)
6715 return target;
6716 break;
6717
d08919a7 6718 case BUILT_IN_ASAN_ALLOCAS_UNPOISON:
6719 return expand_asan_emit_allocas_unpoison (exp);
6720
4ee9c684 6721 case BUILT_IN_STACK_SAVE:
6722 return expand_stack_save ();
6723
6724 case BUILT_IN_STACK_RESTORE:
c2f47e15 6725 expand_stack_restore (CALL_EXPR_ARG (exp, 0));
4ee9c684 6726 return const0_rtx;
6727
74bdbe96 6728 case BUILT_IN_BSWAP16:
42791117 6729 case BUILT_IN_BSWAP32:
6730 case BUILT_IN_BSWAP64:
74bdbe96 6731 target = expand_builtin_bswap (target_mode, exp, target, subtarget);
42791117 6732 if (target)
6733 return target;
6734 break;
6735
4f35b1fc 6736 CASE_INT_FN (BUILT_IN_FFS):
c2f47e15 6737 target = expand_builtin_unop (target_mode, exp, target,
efb070c8 6738 subtarget, ffs_optab);
6a08d0ab 6739 if (target)
6740 return target;
6741 break;
6742
4f35b1fc 6743 CASE_INT_FN (BUILT_IN_CLZ):
c2f47e15 6744 target = expand_builtin_unop (target_mode, exp, target,
efb070c8 6745 subtarget, clz_optab);
6a08d0ab 6746 if (target)
6747 return target;
6748 break;
6749
4f35b1fc 6750 CASE_INT_FN (BUILT_IN_CTZ):
c2f47e15 6751 target = expand_builtin_unop (target_mode, exp, target,
efb070c8 6752 subtarget, ctz_optab);
6a08d0ab 6753 if (target)
6754 return target;
6755 break;
6756
d8492bd3 6757 CASE_INT_FN (BUILT_IN_CLRSB):
d8492bd3 6758 target = expand_builtin_unop (target_mode, exp, target,
6759 subtarget, clrsb_optab);
6760 if (target)
6761 return target;
6762 break;
6763
4f35b1fc 6764 CASE_INT_FN (BUILT_IN_POPCOUNT):
c2f47e15 6765 target = expand_builtin_unop (target_mode, exp, target,
efb070c8 6766 subtarget, popcount_optab);
6a08d0ab 6767 if (target)
6768 return target;
6769 break;
6770
4f35b1fc 6771 CASE_INT_FN (BUILT_IN_PARITY):
c2f47e15 6772 target = expand_builtin_unop (target_mode, exp, target,
efb070c8 6773 subtarget, parity_optab);
53800dbe 6774 if (target)
6775 return target;
6776 break;
6777
6778 case BUILT_IN_STRLEN:
c2f47e15 6779 target = expand_builtin_strlen (exp, target, target_mode);
53800dbe 6780 if (target)
6781 return target;
6782 break;
6783
5aef8938 6784 case BUILT_IN_STRCAT:
6785 target = expand_builtin_strcat (exp, target);
6786 if (target)
6787 return target;
6788 break;
6789
53800dbe 6790 case BUILT_IN_STRCPY:
a65c4d64 6791 target = expand_builtin_strcpy (exp, target);
53800dbe 6792 if (target)
6793 return target;
6794 break;
bf8e3599 6795
5aef8938 6796 case BUILT_IN_STRNCAT:
6797 target = expand_builtin_strncat (exp, target);
6798 if (target)
6799 return target;
6800 break;
6801
ed09096d 6802 case BUILT_IN_STRNCPY:
a65c4d64 6803 target = expand_builtin_strncpy (exp, target);
ed09096d 6804 if (target)
6805 return target;
6806 break;
bf8e3599 6807
3b824fa6 6808 case BUILT_IN_STPCPY:
dc369150 6809 target = expand_builtin_stpcpy (exp, target, mode);
3b824fa6 6810 if (target)
6811 return target;
6812 break;
6813
4d317237 6814 case BUILT_IN_STPNCPY:
6815 target = expand_builtin_stpncpy (exp, target);
6816 if (target)
6817 return target;
6818 break;
6819
8d6c6ef5 6820 case BUILT_IN_MEMCHR:
6821 target = expand_builtin_memchr (exp, target);
6822 if (target)
6823 return target;
6824 break;
6825
53800dbe 6826 case BUILT_IN_MEMCPY:
a65c4d64 6827 target = expand_builtin_memcpy (exp, target);
3b824fa6 6828 if (target)
6829 return target;
6830 break;
6831
4d317237 6832 case BUILT_IN_MEMMOVE:
6833 target = expand_builtin_memmove (exp, target);
6834 if (target)
6835 return target;
6836 break;
6837
3b824fa6 6838 case BUILT_IN_MEMPCPY:
d0fbba1a 6839 target = expand_builtin_mempcpy (exp, target);
53800dbe 6840 if (target)
6841 return target;
6842 break;
6843
6844 case BUILT_IN_MEMSET:
c2f47e15 6845 target = expand_builtin_memset (exp, target, mode);
53800dbe 6846 if (target)
6847 return target;
6848 break;
6849
ffc83088 6850 case BUILT_IN_BZERO:
0b25db21 6851 target = expand_builtin_bzero (exp);
ffc83088 6852 if (target)
6853 return target;
6854 break;
6855
53800dbe 6856 case BUILT_IN_STRCMP:
a65c4d64 6857 target = expand_builtin_strcmp (exp, target);
53800dbe 6858 if (target)
6859 return target;
6860 break;
6861
ed09096d 6862 case BUILT_IN_STRNCMP:
6863 target = expand_builtin_strncmp (exp, target, mode);
6864 if (target)
6865 return target;
6866 break;
6867
071f1696 6868 case BUILT_IN_BCMP:
53800dbe 6869 case BUILT_IN_MEMCMP:
3e346f54 6870 case BUILT_IN_MEMCMP_EQ:
6871 target = expand_builtin_memcmp (exp, target, fcode == BUILT_IN_MEMCMP_EQ);
53800dbe 6872 if (target)
6873 return target;
3e346f54 6874 if (fcode == BUILT_IN_MEMCMP_EQ)
6875 {
6876 tree newdecl = builtin_decl_explicit (BUILT_IN_MEMCMP);
6877 TREE_OPERAND (exp, 1) = build_fold_addr_expr (newdecl);
6878 }
53800dbe 6879 break;
53800dbe 6880
6881 case BUILT_IN_SETJMP:
12f08300 6882 /* This should have been lowered to the builtins below. */
2c8a1497 6883 gcc_unreachable ();
6884
6885 case BUILT_IN_SETJMP_SETUP:
6886 /* __builtin_setjmp_setup is passed a pointer to an array of five words
6887 and the receiver label. */
c2f47e15 6888 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
2c8a1497 6889 {
c2f47e15 6890 rtx buf_addr = expand_expr (CALL_EXPR_ARG (exp, 0), subtarget,
2c8a1497 6891 VOIDmode, EXPAND_NORMAL);
c2f47e15 6892 tree label = TREE_OPERAND (CALL_EXPR_ARG (exp, 1), 0);
9ed997be 6893 rtx_insn *label_r = label_rtx (label);
2c8a1497 6894
6895 /* This is copied from the handling of non-local gotos. */
6896 expand_builtin_setjmp_setup (buf_addr, label_r);
6897 nonlocal_goto_handler_labels
a4de1c23 6898 = gen_rtx_INSN_LIST (VOIDmode, label_r,
2c8a1497 6899 nonlocal_goto_handler_labels);
6900 /* ??? Do not let expand_label treat us as such since we would
6901 not want to be both on the list of non-local labels and on
6902 the list of forced labels. */
6903 FORCED_LABEL (label) = 0;
6904 return const0_rtx;
6905 }
6906 break;
6907
2c8a1497 6908 case BUILT_IN_SETJMP_RECEIVER:
6909 /* __builtin_setjmp_receiver is passed the receiver label. */
c2f47e15 6910 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
2c8a1497 6911 {
c2f47e15 6912 tree label = TREE_OPERAND (CALL_EXPR_ARG (exp, 0), 0);
9ed997be 6913 rtx_insn *label_r = label_rtx (label);
2c8a1497 6914
6915 expand_builtin_setjmp_receiver (label_r);
6916 return const0_rtx;
6917 }
6b7f6858 6918 break;
53800dbe 6919
6920 /* __builtin_longjmp is passed a pointer to an array of five words.
6921 It's similar to the C library longjmp function but works with
6922 __builtin_setjmp above. */
6923 case BUILT_IN_LONGJMP:
c2f47e15 6924 if (validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
53800dbe 6925 {
c2f47e15 6926 rtx buf_addr = expand_expr (CALL_EXPR_ARG (exp, 0), subtarget,
8ec3c5c2 6927 VOIDmode, EXPAND_NORMAL);
c2f47e15 6928 rtx value = expand_normal (CALL_EXPR_ARG (exp, 1));
53800dbe 6929
6930 if (value != const1_rtx)
6931 {
1e5fcbe2 6932 error ("%<__builtin_longjmp%> second argument must be 1");
53800dbe 6933 return const0_rtx;
6934 }
6935
6936 expand_builtin_longjmp (buf_addr, value);
6937 return const0_rtx;
6938 }
2c8a1497 6939 break;
53800dbe 6940
4ee9c684 6941 case BUILT_IN_NONLOCAL_GOTO:
c2f47e15 6942 target = expand_builtin_nonlocal_goto (exp);
4ee9c684 6943 if (target)
6944 return target;
6945 break;
6946
843d08a9 6947 /* This updates the setjmp buffer that is its argument with the value
6948 of the current stack pointer. */
6949 case BUILT_IN_UPDATE_SETJMP_BUF:
c2f47e15 6950 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
843d08a9 6951 {
6952 rtx buf_addr
c2f47e15 6953 = expand_normal (CALL_EXPR_ARG (exp, 0));
843d08a9 6954
6955 expand_builtin_update_setjmp_buf (buf_addr);
6956 return const0_rtx;
6957 }
6958 break;
6959
53800dbe 6960 case BUILT_IN_TRAP:
a0ef1725 6961 expand_builtin_trap ();
53800dbe 6962 return const0_rtx;
6963
d2b48f0c 6964 case BUILT_IN_UNREACHABLE:
6965 expand_builtin_unreachable ();
6966 return const0_rtx;
6967
4f35b1fc 6968 CASE_FLT_FN (BUILT_IN_SIGNBIT):
004e23c4 6969 case BUILT_IN_SIGNBITD32:
6970 case BUILT_IN_SIGNBITD64:
6971 case BUILT_IN_SIGNBITD128:
27f261ef 6972 target = expand_builtin_signbit (exp, target);
6973 if (target)
6974 return target;
6975 break;
6976
53800dbe 6977 /* Various hooks for the DWARF 2 __throw routine. */
6978 case BUILT_IN_UNWIND_INIT:
6979 expand_builtin_unwind_init ();
6980 return const0_rtx;
6981 case BUILT_IN_DWARF_CFA:
6982 return virtual_cfa_rtx;
6983#ifdef DWARF2_UNWIND_INFO
f8f023a5 6984 case BUILT_IN_DWARF_SP_COLUMN:
6985 return expand_builtin_dwarf_sp_column ();
695e919b 6986 case BUILT_IN_INIT_DWARF_REG_SIZES:
c2f47e15 6987 expand_builtin_init_dwarf_reg_sizes (CALL_EXPR_ARG (exp, 0));
695e919b 6988 return const0_rtx;
53800dbe 6989#endif
6990 case BUILT_IN_FROB_RETURN_ADDR:
c2f47e15 6991 return expand_builtin_frob_return_addr (CALL_EXPR_ARG (exp, 0));
53800dbe 6992 case BUILT_IN_EXTRACT_RETURN_ADDR:
c2f47e15 6993 return expand_builtin_extract_return_addr (CALL_EXPR_ARG (exp, 0));
53800dbe 6994 case BUILT_IN_EH_RETURN:
c2f47e15 6995 expand_builtin_eh_return (CALL_EXPR_ARG (exp, 0),
6996 CALL_EXPR_ARG (exp, 1));
53800dbe 6997 return const0_rtx;
df4b504c 6998 case BUILT_IN_EH_RETURN_DATA_REGNO:
c2f47e15 6999 return expand_builtin_eh_return_data_regno (exp);
26093bf4 7000 case BUILT_IN_EXTEND_POINTER:
c2f47e15 7001 return expand_builtin_extend_pointer (CALL_EXPR_ARG (exp, 0));
e38def9c 7002 case BUILT_IN_EH_POINTER:
7003 return expand_builtin_eh_pointer (exp);
7004 case BUILT_IN_EH_FILTER:
7005 return expand_builtin_eh_filter (exp);
7006 case BUILT_IN_EH_COPY_VALUES:
7007 return expand_builtin_eh_copy_values (exp);
26093bf4 7008
7ccc713a 7009 case BUILT_IN_VA_START:
c2f47e15 7010 return expand_builtin_va_start (exp);
a66c9326 7011 case BUILT_IN_VA_END:
c2f47e15 7012 return expand_builtin_va_end (exp);
a66c9326 7013 case BUILT_IN_VA_COPY:
c2f47e15 7014 return expand_builtin_va_copy (exp);
89cfe6e5 7015 case BUILT_IN_EXPECT:
c2f47e15 7016 return expand_builtin_expect (exp, target);
fca0886c 7017 case BUILT_IN_ASSUME_ALIGNED:
7018 return expand_builtin_assume_aligned (exp, target);
5e3608d8 7019 case BUILT_IN_PREFETCH:
c2f47e15 7020 expand_builtin_prefetch (exp);
5e3608d8 7021 return const0_rtx;
7022
4ee9c684 7023 case BUILT_IN_INIT_TRAMPOLINE:
c307f106 7024 return expand_builtin_init_trampoline (exp, true);
7025 case BUILT_IN_INIT_HEAP_TRAMPOLINE:
7026 return expand_builtin_init_trampoline (exp, false);
4ee9c684 7027 case BUILT_IN_ADJUST_TRAMPOLINE:
c2f47e15 7028 return expand_builtin_adjust_trampoline (exp);
4ee9c684 7029
a27e3913 7030 case BUILT_IN_INIT_DESCRIPTOR:
7031 return expand_builtin_init_descriptor (exp);
7032 case BUILT_IN_ADJUST_DESCRIPTOR:
7033 return expand_builtin_adjust_descriptor (exp);
7034
73673831 7035 case BUILT_IN_FORK:
7036 case BUILT_IN_EXECL:
7037 case BUILT_IN_EXECV:
7038 case BUILT_IN_EXECLP:
7039 case BUILT_IN_EXECLE:
7040 case BUILT_IN_EXECVP:
7041 case BUILT_IN_EXECVE:
c2f47e15 7042 target = expand_builtin_fork_or_exec (fndecl, exp, target, ignore);
73673831 7043 if (target)
7044 return target;
7045 break;
53800dbe 7046
2797f13a 7047 case BUILT_IN_SYNC_FETCH_AND_ADD_1:
7048 case BUILT_IN_SYNC_FETCH_AND_ADD_2:
7049 case BUILT_IN_SYNC_FETCH_AND_ADD_4:
7050 case BUILT_IN_SYNC_FETCH_AND_ADD_8:
7051 case BUILT_IN_SYNC_FETCH_AND_ADD_16:
7052 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_ADD_1);
1cd6e20d 7053 target = expand_builtin_sync_operation (mode, exp, PLUS, false, target);
b6a5fc45 7054 if (target)
7055 return target;
7056 break;
7057
2797f13a 7058 case BUILT_IN_SYNC_FETCH_AND_SUB_1:
7059 case BUILT_IN_SYNC_FETCH_AND_SUB_2:
7060 case BUILT_IN_SYNC_FETCH_AND_SUB_4:
7061 case BUILT_IN_SYNC_FETCH_AND_SUB_8:
7062 case BUILT_IN_SYNC_FETCH_AND_SUB_16:
7063 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_SUB_1);
1cd6e20d 7064 target = expand_builtin_sync_operation (mode, exp, MINUS, false, target);
b6a5fc45 7065 if (target)
7066 return target;
7067 break;
7068
2797f13a 7069 case BUILT_IN_SYNC_FETCH_AND_OR_1:
7070 case BUILT_IN_SYNC_FETCH_AND_OR_2:
7071 case BUILT_IN_SYNC_FETCH_AND_OR_4:
7072 case BUILT_IN_SYNC_FETCH_AND_OR_8:
7073 case BUILT_IN_SYNC_FETCH_AND_OR_16:
7074 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_OR_1);
1cd6e20d 7075 target = expand_builtin_sync_operation (mode, exp, IOR, false, target);
b6a5fc45 7076 if (target)
7077 return target;
7078 break;
7079
2797f13a 7080 case BUILT_IN_SYNC_FETCH_AND_AND_1:
7081 case BUILT_IN_SYNC_FETCH_AND_AND_2:
7082 case BUILT_IN_SYNC_FETCH_AND_AND_4:
7083 case BUILT_IN_SYNC_FETCH_AND_AND_8:
7084 case BUILT_IN_SYNC_FETCH_AND_AND_16:
7085 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_AND_1);
1cd6e20d 7086 target = expand_builtin_sync_operation (mode, exp, AND, false, target);
b6a5fc45 7087 if (target)
7088 return target;
7089 break;
7090
2797f13a 7091 case BUILT_IN_SYNC_FETCH_AND_XOR_1:
7092 case BUILT_IN_SYNC_FETCH_AND_XOR_2:
7093 case BUILT_IN_SYNC_FETCH_AND_XOR_4:
7094 case BUILT_IN_SYNC_FETCH_AND_XOR_8:
7095 case BUILT_IN_SYNC_FETCH_AND_XOR_16:
7096 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_XOR_1);
1cd6e20d 7097 target = expand_builtin_sync_operation (mode, exp, XOR, false, target);
b6a5fc45 7098 if (target)
7099 return target;
7100 break;
7101
2797f13a 7102 case BUILT_IN_SYNC_FETCH_AND_NAND_1:
7103 case BUILT_IN_SYNC_FETCH_AND_NAND_2:
7104 case BUILT_IN_SYNC_FETCH_AND_NAND_4:
7105 case BUILT_IN_SYNC_FETCH_AND_NAND_8:
7106 case BUILT_IN_SYNC_FETCH_AND_NAND_16:
7107 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_NAND_1);
1cd6e20d 7108 target = expand_builtin_sync_operation (mode, exp, NOT, false, target);
b6a5fc45 7109 if (target)
7110 return target;
7111 break;
7112
2797f13a 7113 case BUILT_IN_SYNC_ADD_AND_FETCH_1:
7114 case BUILT_IN_SYNC_ADD_AND_FETCH_2:
7115 case BUILT_IN_SYNC_ADD_AND_FETCH_4:
7116 case BUILT_IN_SYNC_ADD_AND_FETCH_8:
7117 case BUILT_IN_SYNC_ADD_AND_FETCH_16:
7118 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_ADD_AND_FETCH_1);
1cd6e20d 7119 target = expand_builtin_sync_operation (mode, exp, PLUS, true, target);
b6a5fc45 7120 if (target)
7121 return target;
7122 break;
7123
2797f13a 7124 case BUILT_IN_SYNC_SUB_AND_FETCH_1:
7125 case BUILT_IN_SYNC_SUB_AND_FETCH_2:
7126 case BUILT_IN_SYNC_SUB_AND_FETCH_4:
7127 case BUILT_IN_SYNC_SUB_AND_FETCH_8:
7128 case BUILT_IN_SYNC_SUB_AND_FETCH_16:
7129 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_SUB_AND_FETCH_1);
1cd6e20d 7130 target = expand_builtin_sync_operation (mode, exp, MINUS, true, target);
b6a5fc45 7131 if (target)
7132 return target;
7133 break;
7134
2797f13a 7135 case BUILT_IN_SYNC_OR_AND_FETCH_1:
7136 case BUILT_IN_SYNC_OR_AND_FETCH_2:
7137 case BUILT_IN_SYNC_OR_AND_FETCH_4:
7138 case BUILT_IN_SYNC_OR_AND_FETCH_8:
7139 case BUILT_IN_SYNC_OR_AND_FETCH_16:
7140 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_OR_AND_FETCH_1);
1cd6e20d 7141 target = expand_builtin_sync_operation (mode, exp, IOR, true, target);
b6a5fc45 7142 if (target)
7143 return target;
7144 break;
7145
2797f13a 7146 case BUILT_IN_SYNC_AND_AND_FETCH_1:
7147 case BUILT_IN_SYNC_AND_AND_FETCH_2:
7148 case BUILT_IN_SYNC_AND_AND_FETCH_4:
7149 case BUILT_IN_SYNC_AND_AND_FETCH_8:
7150 case BUILT_IN_SYNC_AND_AND_FETCH_16:
7151 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_AND_AND_FETCH_1);
1cd6e20d 7152 target = expand_builtin_sync_operation (mode, exp, AND, true, target);
b6a5fc45 7153 if (target)
7154 return target;
7155 break;
7156
2797f13a 7157 case BUILT_IN_SYNC_XOR_AND_FETCH_1:
7158 case BUILT_IN_SYNC_XOR_AND_FETCH_2:
7159 case BUILT_IN_SYNC_XOR_AND_FETCH_4:
7160 case BUILT_IN_SYNC_XOR_AND_FETCH_8:
7161 case BUILT_IN_SYNC_XOR_AND_FETCH_16:
7162 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_XOR_AND_FETCH_1);
1cd6e20d 7163 target = expand_builtin_sync_operation (mode, exp, XOR, true, target);
b6a5fc45 7164 if (target)
7165 return target;
7166 break;
7167
2797f13a 7168 case BUILT_IN_SYNC_NAND_AND_FETCH_1:
7169 case BUILT_IN_SYNC_NAND_AND_FETCH_2:
7170 case BUILT_IN_SYNC_NAND_AND_FETCH_4:
7171 case BUILT_IN_SYNC_NAND_AND_FETCH_8:
7172 case BUILT_IN_SYNC_NAND_AND_FETCH_16:
7173 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_NAND_AND_FETCH_1);
1cd6e20d 7174 target = expand_builtin_sync_operation (mode, exp, NOT, true, target);
b6a5fc45 7175 if (target)
7176 return target;
7177 break;
7178
2797f13a 7179 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_1:
7180 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_2:
7181 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_4:
7182 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_8:
7183 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_16:
a601d32a 7184 if (mode == VOIDmode)
7185 mode = TYPE_MODE (boolean_type_node);
b6a5fc45 7186 if (!target || !register_operand (target, mode))
7187 target = gen_reg_rtx (mode);
3e272de8 7188
2797f13a 7189 mode = get_builtin_sync_mode
7190 (fcode - BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_1);
c2f47e15 7191 target = expand_builtin_compare_and_swap (mode, exp, true, target);
b6a5fc45 7192 if (target)
7193 return target;
7194 break;
7195
2797f13a 7196 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_1:
7197 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_2:
7198 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_4:
7199 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_8:
7200 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_16:
7201 mode = get_builtin_sync_mode
7202 (fcode - BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_1);
c2f47e15 7203 target = expand_builtin_compare_and_swap (mode, exp, false, target);
b6a5fc45 7204 if (target)
7205 return target;
7206 break;
7207
2797f13a 7208 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_1:
7209 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_2:
7210 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_4:
7211 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_8:
7212 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_16:
7213 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_LOCK_TEST_AND_SET_1);
7214 target = expand_builtin_sync_lock_test_and_set (mode, exp, target);
b6a5fc45 7215 if (target)
7216 return target;
7217 break;
7218
2797f13a 7219 case BUILT_IN_SYNC_LOCK_RELEASE_1:
7220 case BUILT_IN_SYNC_LOCK_RELEASE_2:
7221 case BUILT_IN_SYNC_LOCK_RELEASE_4:
7222 case BUILT_IN_SYNC_LOCK_RELEASE_8:
7223 case BUILT_IN_SYNC_LOCK_RELEASE_16:
7224 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_LOCK_RELEASE_1);
7225 expand_builtin_sync_lock_release (mode, exp);
b6a5fc45 7226 return const0_rtx;
7227
2797f13a 7228 case BUILT_IN_SYNC_SYNCHRONIZE:
7229 expand_builtin_sync_synchronize ();
b6a5fc45 7230 return const0_rtx;
7231
1cd6e20d 7232 case BUILT_IN_ATOMIC_EXCHANGE_1:
7233 case BUILT_IN_ATOMIC_EXCHANGE_2:
7234 case BUILT_IN_ATOMIC_EXCHANGE_4:
7235 case BUILT_IN_ATOMIC_EXCHANGE_8:
7236 case BUILT_IN_ATOMIC_EXCHANGE_16:
7237 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_EXCHANGE_1);
7238 target = expand_builtin_atomic_exchange (mode, exp, target);
7239 if (target)
7240 return target;
7241 break;
7242
7243 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1:
7244 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_2:
7245 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_4:
7246 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_8:
7247 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_16:
2c201ad1 7248 {
7249 unsigned int nargs, z;
f1f41a6c 7250 vec<tree, va_gc> *vec;
2c201ad1 7251
7252 mode =
7253 get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1);
7254 target = expand_builtin_atomic_compare_exchange (mode, exp, target);
7255 if (target)
7256 return target;
7257
7258 /* If this is turned into an external library call, the weak parameter
7259 must be dropped to match the expected parameter list. */
7260 nargs = call_expr_nargs (exp);
f1f41a6c 7261 vec_alloc (vec, nargs - 1);
2c201ad1 7262 for (z = 0; z < 3; z++)
f1f41a6c 7263 vec->quick_push (CALL_EXPR_ARG (exp, z));
2c201ad1 7264 /* Skip the boolean weak parameter. */
7265 for (z = 4; z < 6; z++)
f1f41a6c 7266 vec->quick_push (CALL_EXPR_ARG (exp, z));
2c201ad1 7267 exp = build_call_vec (TREE_TYPE (exp), CALL_EXPR_FN (exp), vec);
7268 break;
7269 }
1cd6e20d 7270
7271 case BUILT_IN_ATOMIC_LOAD_1:
7272 case BUILT_IN_ATOMIC_LOAD_2:
7273 case BUILT_IN_ATOMIC_LOAD_4:
7274 case BUILT_IN_ATOMIC_LOAD_8:
7275 case BUILT_IN_ATOMIC_LOAD_16:
7276 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_LOAD_1);
7277 target = expand_builtin_atomic_load (mode, exp, target);
7278 if (target)
7279 return target;
7280 break;
7281
7282 case BUILT_IN_ATOMIC_STORE_1:
7283 case BUILT_IN_ATOMIC_STORE_2:
7284 case BUILT_IN_ATOMIC_STORE_4:
7285 case BUILT_IN_ATOMIC_STORE_8:
7286 case BUILT_IN_ATOMIC_STORE_16:
7287 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_STORE_1);
7288 target = expand_builtin_atomic_store (mode, exp);
7289 if (target)
7290 return const0_rtx;
7291 break;
7292
7293 case BUILT_IN_ATOMIC_ADD_FETCH_1:
7294 case BUILT_IN_ATOMIC_ADD_FETCH_2:
7295 case BUILT_IN_ATOMIC_ADD_FETCH_4:
7296 case BUILT_IN_ATOMIC_ADD_FETCH_8:
7297 case BUILT_IN_ATOMIC_ADD_FETCH_16:
7298 {
7299 enum built_in_function lib;
7300 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_ADD_FETCH_1);
7301 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_ADD_1 +
7302 (fcode - BUILT_IN_ATOMIC_ADD_FETCH_1));
7303 target = expand_builtin_atomic_fetch_op (mode, exp, target, PLUS, true,
7304 ignore, lib);
7305 if (target)
7306 return target;
7307 break;
7308 }
7309 case BUILT_IN_ATOMIC_SUB_FETCH_1:
7310 case BUILT_IN_ATOMIC_SUB_FETCH_2:
7311 case BUILT_IN_ATOMIC_SUB_FETCH_4:
7312 case BUILT_IN_ATOMIC_SUB_FETCH_8:
7313 case BUILT_IN_ATOMIC_SUB_FETCH_16:
7314 {
7315 enum built_in_function lib;
7316 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_SUB_FETCH_1);
7317 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_SUB_1 +
7318 (fcode - BUILT_IN_ATOMIC_SUB_FETCH_1));
7319 target = expand_builtin_atomic_fetch_op (mode, exp, target, MINUS, true,
7320 ignore, lib);
7321 if (target)
7322 return target;
7323 break;
7324 }
7325 case BUILT_IN_ATOMIC_AND_FETCH_1:
7326 case BUILT_IN_ATOMIC_AND_FETCH_2:
7327 case BUILT_IN_ATOMIC_AND_FETCH_4:
7328 case BUILT_IN_ATOMIC_AND_FETCH_8:
7329 case BUILT_IN_ATOMIC_AND_FETCH_16:
7330 {
7331 enum built_in_function lib;
7332 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_AND_FETCH_1);
7333 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_AND_1 +
7334 (fcode - BUILT_IN_ATOMIC_AND_FETCH_1));
7335 target = expand_builtin_atomic_fetch_op (mode, exp, target, AND, true,
7336 ignore, lib);
7337 if (target)
7338 return target;
7339 break;
7340 }
7341 case BUILT_IN_ATOMIC_NAND_FETCH_1:
7342 case BUILT_IN_ATOMIC_NAND_FETCH_2:
7343 case BUILT_IN_ATOMIC_NAND_FETCH_4:
7344 case BUILT_IN_ATOMIC_NAND_FETCH_8:
7345 case BUILT_IN_ATOMIC_NAND_FETCH_16:
7346 {
7347 enum built_in_function lib;
7348 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_NAND_FETCH_1);
7349 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_NAND_1 +
7350 (fcode - BUILT_IN_ATOMIC_NAND_FETCH_1));
7351 target = expand_builtin_atomic_fetch_op (mode, exp, target, NOT, true,
7352 ignore, lib);
7353 if (target)
7354 return target;
7355 break;
7356 }
7357 case BUILT_IN_ATOMIC_XOR_FETCH_1:
7358 case BUILT_IN_ATOMIC_XOR_FETCH_2:
7359 case BUILT_IN_ATOMIC_XOR_FETCH_4:
7360 case BUILT_IN_ATOMIC_XOR_FETCH_8:
7361 case BUILT_IN_ATOMIC_XOR_FETCH_16:
7362 {
7363 enum built_in_function lib;
7364 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_XOR_FETCH_1);
7365 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_XOR_1 +
7366 (fcode - BUILT_IN_ATOMIC_XOR_FETCH_1));
7367 target = expand_builtin_atomic_fetch_op (mode, exp, target, XOR, true,
7368 ignore, lib);
7369 if (target)
7370 return target;
7371 break;
7372 }
7373 case BUILT_IN_ATOMIC_OR_FETCH_1:
7374 case BUILT_IN_ATOMIC_OR_FETCH_2:
7375 case BUILT_IN_ATOMIC_OR_FETCH_4:
7376 case BUILT_IN_ATOMIC_OR_FETCH_8:
7377 case BUILT_IN_ATOMIC_OR_FETCH_16:
7378 {
7379 enum built_in_function lib;
7380 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_OR_FETCH_1);
7381 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_OR_1 +
7382 (fcode - BUILT_IN_ATOMIC_OR_FETCH_1));
7383 target = expand_builtin_atomic_fetch_op (mode, exp, target, IOR, true,
7384 ignore, lib);
7385 if (target)
7386 return target;
7387 break;
7388 }
7389 case BUILT_IN_ATOMIC_FETCH_ADD_1:
7390 case BUILT_IN_ATOMIC_FETCH_ADD_2:
7391 case BUILT_IN_ATOMIC_FETCH_ADD_4:
7392 case BUILT_IN_ATOMIC_FETCH_ADD_8:
7393 case BUILT_IN_ATOMIC_FETCH_ADD_16:
7394 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_ADD_1);
7395 target = expand_builtin_atomic_fetch_op (mode, exp, target, PLUS, false,
7396 ignore, BUILT_IN_NONE);
7397 if (target)
7398 return target;
7399 break;
7400
7401 case BUILT_IN_ATOMIC_FETCH_SUB_1:
7402 case BUILT_IN_ATOMIC_FETCH_SUB_2:
7403 case BUILT_IN_ATOMIC_FETCH_SUB_4:
7404 case BUILT_IN_ATOMIC_FETCH_SUB_8:
7405 case BUILT_IN_ATOMIC_FETCH_SUB_16:
7406 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_SUB_1);
7407 target = expand_builtin_atomic_fetch_op (mode, exp, target, MINUS, false,
7408 ignore, BUILT_IN_NONE);
7409 if (target)
7410 return target;
7411 break;
7412
7413 case BUILT_IN_ATOMIC_FETCH_AND_1:
7414 case BUILT_IN_ATOMIC_FETCH_AND_2:
7415 case BUILT_IN_ATOMIC_FETCH_AND_4:
7416 case BUILT_IN_ATOMIC_FETCH_AND_8:
7417 case BUILT_IN_ATOMIC_FETCH_AND_16:
7418 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_AND_1);
7419 target = expand_builtin_atomic_fetch_op (mode, exp, target, AND, false,
7420 ignore, BUILT_IN_NONE);
7421 if (target)
7422 return target;
7423 break;
7424
7425 case BUILT_IN_ATOMIC_FETCH_NAND_1:
7426 case BUILT_IN_ATOMIC_FETCH_NAND_2:
7427 case BUILT_IN_ATOMIC_FETCH_NAND_4:
7428 case BUILT_IN_ATOMIC_FETCH_NAND_8:
7429 case BUILT_IN_ATOMIC_FETCH_NAND_16:
7430 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_NAND_1);
7431 target = expand_builtin_atomic_fetch_op (mode, exp, target, NOT, false,
7432 ignore, BUILT_IN_NONE);
7433 if (target)
7434 return target;
7435 break;
7436
7437 case BUILT_IN_ATOMIC_FETCH_XOR_1:
7438 case BUILT_IN_ATOMIC_FETCH_XOR_2:
7439 case BUILT_IN_ATOMIC_FETCH_XOR_4:
7440 case BUILT_IN_ATOMIC_FETCH_XOR_8:
7441 case BUILT_IN_ATOMIC_FETCH_XOR_16:
7442 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_XOR_1);
7443 target = expand_builtin_atomic_fetch_op (mode, exp, target, XOR, false,
7444 ignore, BUILT_IN_NONE);
7445 if (target)
7446 return target;
7447 break;
7448
7449 case BUILT_IN_ATOMIC_FETCH_OR_1:
7450 case BUILT_IN_ATOMIC_FETCH_OR_2:
7451 case BUILT_IN_ATOMIC_FETCH_OR_4:
7452 case BUILT_IN_ATOMIC_FETCH_OR_8:
7453 case BUILT_IN_ATOMIC_FETCH_OR_16:
7454 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_OR_1);
7455 target = expand_builtin_atomic_fetch_op (mode, exp, target, IOR, false,
7456 ignore, BUILT_IN_NONE);
7457 if (target)
7458 return target;
7459 break;
10b744a3 7460
7461 case BUILT_IN_ATOMIC_TEST_AND_SET:
7821cde1 7462 return expand_builtin_atomic_test_and_set (exp, target);
10b744a3 7463
7464 case BUILT_IN_ATOMIC_CLEAR:
7465 return expand_builtin_atomic_clear (exp);
1cd6e20d 7466
7467 case BUILT_IN_ATOMIC_ALWAYS_LOCK_FREE:
7468 return expand_builtin_atomic_always_lock_free (exp);
7469
7470 case BUILT_IN_ATOMIC_IS_LOCK_FREE:
7471 target = expand_builtin_atomic_is_lock_free (exp);
7472 if (target)
7473 return target;
7474 break;
7475
7476 case BUILT_IN_ATOMIC_THREAD_FENCE:
7477 expand_builtin_atomic_thread_fence (exp);
7478 return const0_rtx;
7479
7480 case BUILT_IN_ATOMIC_SIGNAL_FENCE:
7481 expand_builtin_atomic_signal_fence (exp);
7482 return const0_rtx;
7483
0a39fd54 7484 case BUILT_IN_OBJECT_SIZE:
7485 return expand_builtin_object_size (exp);
7486
7487 case BUILT_IN_MEMCPY_CHK:
7488 case BUILT_IN_MEMPCPY_CHK:
7489 case BUILT_IN_MEMMOVE_CHK:
7490 case BUILT_IN_MEMSET_CHK:
7491 target = expand_builtin_memory_chk (exp, target, mode, fcode);
7492 if (target)
7493 return target;
7494 break;
7495
7496 case BUILT_IN_STRCPY_CHK:
7497 case BUILT_IN_STPCPY_CHK:
7498 case BUILT_IN_STRNCPY_CHK:
1063acde 7499 case BUILT_IN_STPNCPY_CHK:
0a39fd54 7500 case BUILT_IN_STRCAT_CHK:
b356dfef 7501 case BUILT_IN_STRNCAT_CHK:
0a39fd54 7502 case BUILT_IN_SNPRINTF_CHK:
7503 case BUILT_IN_VSNPRINTF_CHK:
7504 maybe_emit_chk_warning (exp, fcode);
7505 break;
7506
7507 case BUILT_IN_SPRINTF_CHK:
7508 case BUILT_IN_VSPRINTF_CHK:
7509 maybe_emit_sprintf_chk_warning (exp, fcode);
7510 break;
7511
2c281b15 7512 case BUILT_IN_FREE:
f74ea1c2 7513 if (warn_free_nonheap_object)
7514 maybe_emit_free_warning (exp);
2c281b15 7515 break;
7516
badaa04c 7517 case BUILT_IN_THREAD_POINTER:
7518 return expand_builtin_thread_pointer (exp, target);
7519
7520 case BUILT_IN_SET_THREAD_POINTER:
7521 expand_builtin_set_thread_pointer (exp);
7522 return const0_rtx;
7523
d037099f 7524 case BUILT_IN_CILK_DETACH:
7525 expand_builtin_cilk_detach (exp);
7526 return const0_rtx;
7527
7528 case BUILT_IN_CILK_POP_FRAME:
7529 expand_builtin_cilk_pop_frame (exp);
7530 return const0_rtx;
7531
058a1b7a 7532 case BUILT_IN_CHKP_INIT_PTR_BOUNDS:
7533 case BUILT_IN_CHKP_NULL_PTR_BOUNDS:
7534 case BUILT_IN_CHKP_COPY_PTR_BOUNDS:
7535 case BUILT_IN_CHKP_CHECK_PTR_LBOUNDS:
7536 case BUILT_IN_CHKP_CHECK_PTR_UBOUNDS:
7537 case BUILT_IN_CHKP_CHECK_PTR_BOUNDS:
7538 case BUILT_IN_CHKP_SET_PTR_BOUNDS:
7539 case BUILT_IN_CHKP_NARROW_PTR_BOUNDS:
7540 case BUILT_IN_CHKP_STORE_PTR_BOUNDS:
7541 case BUILT_IN_CHKP_GET_PTR_LBOUND:
7542 case BUILT_IN_CHKP_GET_PTR_UBOUND:
7543 /* We allow user CHKP builtins if Pointer Bounds
7544 Checker is off. */
7545 if (!chkp_function_instrumented_p (current_function_decl))
7546 {
7547 if (fcode == BUILT_IN_CHKP_SET_PTR_BOUNDS
7548 || fcode == BUILT_IN_CHKP_NARROW_PTR_BOUNDS
7549 || fcode == BUILT_IN_CHKP_INIT_PTR_BOUNDS
7550 || fcode == BUILT_IN_CHKP_NULL_PTR_BOUNDS
7551 || fcode == BUILT_IN_CHKP_COPY_PTR_BOUNDS)
7552 return expand_normal (CALL_EXPR_ARG (exp, 0));
7553 else if (fcode == BUILT_IN_CHKP_GET_PTR_LBOUND)
7554 return expand_normal (size_zero_node);
7555 else if (fcode == BUILT_IN_CHKP_GET_PTR_UBOUND)
7556 return expand_normal (size_int (-1));
7557 else
7558 return const0_rtx;
7559 }
7560 /* FALLTHROUGH */
7561
7562 case BUILT_IN_CHKP_BNDMK:
7563 case BUILT_IN_CHKP_BNDSTX:
7564 case BUILT_IN_CHKP_BNDCL:
7565 case BUILT_IN_CHKP_BNDCU:
7566 case BUILT_IN_CHKP_BNDLDX:
7567 case BUILT_IN_CHKP_BNDRET:
7568 case BUILT_IN_CHKP_INTERSECT:
7569 case BUILT_IN_CHKP_NARROW:
7570 case BUILT_IN_CHKP_EXTRACT_LOWER:
7571 case BUILT_IN_CHKP_EXTRACT_UPPER:
7572 /* Software implementation of Pointer Bounds Checker is NYI.
7573 Target support is required. */
7574 error ("Your target platform does not support -fcheck-pointer-bounds");
7575 break;
7576
ca4c3545 7577 case BUILT_IN_ACC_ON_DEVICE:
1ae4e7aa 7578 /* Do library call, if we failed to expand the builtin when
7579 folding. */
ca4c3545 7580 break;
7581
92482ee0 7582 default: /* just do library call, if unknown builtin */
146c1b4f 7583 break;
53800dbe 7584 }
7585
7586 /* The switch statement above can drop through to cause the function
7587 to be called normally. */
7588 return expand_call (exp, target, ignore);
7589}
650e4c94 7590
f21337ef 7591/* Similar to expand_builtin but is used for instrumented calls. */
7592
7593rtx
7594expand_builtin_with_bounds (tree exp, rtx target,
7595 rtx subtarget ATTRIBUTE_UNUSED,
7596 machine_mode mode, int ignore)
7597{
7598 tree fndecl = get_callee_fndecl (exp);
7599 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
7600
7601 gcc_assert (CALL_WITH_BOUNDS_P (exp));
7602
7603 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
7604 return targetm.expand_builtin (exp, target, subtarget, mode, ignore);
7605
7606 gcc_assert (fcode > BEGIN_CHKP_BUILTINS
7607 && fcode < END_CHKP_BUILTINS);
7608
7609 switch (fcode)
7610 {
7611 case BUILT_IN_CHKP_MEMCPY_NOBND_NOCHK_CHKP:
7612 target = expand_builtin_memcpy_with_bounds (exp, target);
7613 if (target)
7614 return target;
7615 break;
7616
7617 case BUILT_IN_CHKP_MEMPCPY_NOBND_NOCHK_CHKP:
d0fbba1a 7618 target = expand_builtin_mempcpy_with_bounds (exp, target);
f21337ef 7619 if (target)
7620 return target;
7621 break;
7622
7623 case BUILT_IN_CHKP_MEMSET_NOBND_NOCHK_CHKP:
7624 target = expand_builtin_memset_with_bounds (exp, target, mode);
7625 if (target)
7626 return target;
7627 break;
7628
7629 default:
7630 break;
7631 }
7632
7633 /* The switch statement above can drop through to cause the function
7634 to be called normally. */
7635 return expand_call (exp, target, ignore);
7636 }
7637
805e22b2 7638/* Determine whether a tree node represents a call to a built-in
52203a9d 7639 function. If the tree T is a call to a built-in function with
7640 the right number of arguments of the appropriate types, return
7641 the DECL_FUNCTION_CODE of the call, e.g. BUILT_IN_SQRT.
7642 Otherwise the return value is END_BUILTINS. */
aecda0d6 7643
805e22b2 7644enum built_in_function
b7bf20db 7645builtin_mathfn_code (const_tree t)
805e22b2 7646{
b7bf20db 7647 const_tree fndecl, arg, parmlist;
7648 const_tree argtype, parmtype;
7649 const_call_expr_arg_iterator iter;
805e22b2 7650
7651 if (TREE_CODE (t) != CALL_EXPR
c2f47e15 7652 || TREE_CODE (CALL_EXPR_FN (t)) != ADDR_EXPR)
805e22b2 7653 return END_BUILTINS;
7654
c6e6ecb1 7655 fndecl = get_callee_fndecl (t);
7656 if (fndecl == NULL_TREE
52203a9d 7657 || TREE_CODE (fndecl) != FUNCTION_DECL
805e22b2 7658 || ! DECL_BUILT_IN (fndecl)
7659 || DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
7660 return END_BUILTINS;
7661
52203a9d 7662 parmlist = TYPE_ARG_TYPES (TREE_TYPE (fndecl));
b7bf20db 7663 init_const_call_expr_arg_iterator (t, &iter);
52203a9d 7664 for (; parmlist; parmlist = TREE_CHAIN (parmlist))
e9f80ff5 7665 {
52203a9d 7666 /* If a function doesn't take a variable number of arguments,
7667 the last element in the list will have type `void'. */
7668 parmtype = TREE_VALUE (parmlist);
7669 if (VOID_TYPE_P (parmtype))
7670 {
b7bf20db 7671 if (more_const_call_expr_args_p (&iter))
52203a9d 7672 return END_BUILTINS;
7673 return DECL_FUNCTION_CODE (fndecl);
7674 }
7675
b7bf20db 7676 if (! more_const_call_expr_args_p (&iter))
e9f80ff5 7677 return END_BUILTINS;
48e1416a 7678
b7bf20db 7679 arg = next_const_call_expr_arg (&iter);
c2f47e15 7680 argtype = TREE_TYPE (arg);
52203a9d 7681
7682 if (SCALAR_FLOAT_TYPE_P (parmtype))
7683 {
7684 if (! SCALAR_FLOAT_TYPE_P (argtype))
7685 return END_BUILTINS;
7686 }
7687 else if (COMPLEX_FLOAT_TYPE_P (parmtype))
7688 {
7689 if (! COMPLEX_FLOAT_TYPE_P (argtype))
7690 return END_BUILTINS;
7691 }
7692 else if (POINTER_TYPE_P (parmtype))
7693 {
7694 if (! POINTER_TYPE_P (argtype))
7695 return END_BUILTINS;
7696 }
7697 else if (INTEGRAL_TYPE_P (parmtype))
7698 {
7699 if (! INTEGRAL_TYPE_P (argtype))
7700 return END_BUILTINS;
7701 }
7702 else
e9f80ff5 7703 return END_BUILTINS;
e9f80ff5 7704 }
7705
52203a9d 7706 /* Variable-length argument list. */
805e22b2 7707 return DECL_FUNCTION_CODE (fndecl);
7708}
7709
c2f47e15 7710/* Fold a call to __builtin_constant_p, if we know its argument ARG will
7711 evaluate to a constant. */
650e4c94 7712
7713static tree
c2f47e15 7714fold_builtin_constant_p (tree arg)
650e4c94 7715{
650e4c94 7716 /* We return 1 for a numeric type that's known to be a constant
7717 value at compile-time or for an aggregate type that's a
7718 literal constant. */
c2f47e15 7719 STRIP_NOPS (arg);
650e4c94 7720
7721 /* If we know this is a constant, emit the constant of one. */
c2f47e15 7722 if (CONSTANT_CLASS_P (arg)
7723 || (TREE_CODE (arg) == CONSTRUCTOR
7724 && TREE_CONSTANT (arg)))
650e4c94 7725 return integer_one_node;
c2f47e15 7726 if (TREE_CODE (arg) == ADDR_EXPR)
adcfa3a3 7727 {
c2f47e15 7728 tree op = TREE_OPERAND (arg, 0);
adcfa3a3 7729 if (TREE_CODE (op) == STRING_CST
7730 || (TREE_CODE (op) == ARRAY_REF
7731 && integer_zerop (TREE_OPERAND (op, 1))
7732 && TREE_CODE (TREE_OPERAND (op, 0)) == STRING_CST))
7733 return integer_one_node;
7734 }
650e4c94 7735
1fb4300c 7736 /* If this expression has side effects, show we don't know it to be a
7737 constant. Likewise if it's a pointer or aggregate type since in
7738 those case we only want literals, since those are only optimized
f97c71a1 7739 when generating RTL, not later.
7740 And finally, if we are compiling an initializer, not code, we
7741 need to return a definite result now; there's not going to be any
7742 more optimization done. */
c2f47e15 7743 if (TREE_SIDE_EFFECTS (arg)
7744 || AGGREGATE_TYPE_P (TREE_TYPE (arg))
7745 || POINTER_TYPE_P (TREE_TYPE (arg))
47be647d 7746 || cfun == 0
0b049e15 7747 || folding_initializer
7748 || force_folding_builtin_constant_p)
650e4c94 7749 return integer_zero_node;
7750
c2f47e15 7751 return NULL_TREE;
650e4c94 7752}
7753
76f5a783 7754/* Create builtin_expect with PRED and EXPECTED as its arguments and
7755 return it as a truthvalue. */
4ee9c684 7756
7757static tree
c83059be 7758build_builtin_expect_predicate (location_t loc, tree pred, tree expected,
7759 tree predictor)
4ee9c684 7760{
76f5a783 7761 tree fn, arg_types, pred_type, expected_type, call_expr, ret_type;
4ee9c684 7762
b9a16870 7763 fn = builtin_decl_explicit (BUILT_IN_EXPECT);
76f5a783 7764 arg_types = TYPE_ARG_TYPES (TREE_TYPE (fn));
7765 ret_type = TREE_TYPE (TREE_TYPE (fn));
7766 pred_type = TREE_VALUE (arg_types);
7767 expected_type = TREE_VALUE (TREE_CHAIN (arg_types));
7768
389dd41b 7769 pred = fold_convert_loc (loc, pred_type, pred);
7770 expected = fold_convert_loc (loc, expected_type, expected);
c83059be 7771 call_expr = build_call_expr_loc (loc, fn, predictor ? 3 : 2, pred, expected,
7772 predictor);
76f5a783 7773
7774 return build2 (NE_EXPR, TREE_TYPE (pred), call_expr,
7775 build_int_cst (ret_type, 0));
7776}
7777
7778/* Fold a call to builtin_expect with arguments ARG0 and ARG1. Return
7779 NULL_TREE if no simplification is possible. */
7780
c83059be 7781tree
7782fold_builtin_expect (location_t loc, tree arg0, tree arg1, tree arg2)
76f5a783 7783{
083bada9 7784 tree inner, fndecl, inner_arg0;
76f5a783 7785 enum tree_code code;
7786
083bada9 7787 /* Distribute the expected value over short-circuiting operators.
7788 See through the cast from truthvalue_type_node to long. */
7789 inner_arg0 = arg0;
d09ef31a 7790 while (CONVERT_EXPR_P (inner_arg0)
083bada9 7791 && INTEGRAL_TYPE_P (TREE_TYPE (inner_arg0))
7792 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (inner_arg0, 0))))
7793 inner_arg0 = TREE_OPERAND (inner_arg0, 0);
7794
76f5a783 7795 /* If this is a builtin_expect within a builtin_expect keep the
7796 inner one. See through a comparison against a constant. It
7797 might have been added to create a thruthvalue. */
083bada9 7798 inner = inner_arg0;
7799
76f5a783 7800 if (COMPARISON_CLASS_P (inner)
7801 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST)
7802 inner = TREE_OPERAND (inner, 0);
7803
7804 if (TREE_CODE (inner) == CALL_EXPR
7805 && (fndecl = get_callee_fndecl (inner))
7806 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
7807 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_EXPECT)
7808 return arg0;
7809
083bada9 7810 inner = inner_arg0;
76f5a783 7811 code = TREE_CODE (inner);
7812 if (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR)
7813 {
7814 tree op0 = TREE_OPERAND (inner, 0);
7815 tree op1 = TREE_OPERAND (inner, 1);
7816
c83059be 7817 op0 = build_builtin_expect_predicate (loc, op0, arg1, arg2);
7818 op1 = build_builtin_expect_predicate (loc, op1, arg1, arg2);
76f5a783 7819 inner = build2 (code, TREE_TYPE (inner), op0, op1);
7820
389dd41b 7821 return fold_convert_loc (loc, TREE_TYPE (arg0), inner);
76f5a783 7822 }
7823
7824 /* If the argument isn't invariant then there's nothing else we can do. */
083bada9 7825 if (!TREE_CONSTANT (inner_arg0))
c2f47e15 7826 return NULL_TREE;
4ee9c684 7827
76f5a783 7828 /* If we expect that a comparison against the argument will fold to
7829 a constant return the constant. In practice, this means a true
7830 constant or the address of a non-weak symbol. */
083bada9 7831 inner = inner_arg0;
4ee9c684 7832 STRIP_NOPS (inner);
7833 if (TREE_CODE (inner) == ADDR_EXPR)
7834 {
7835 do
7836 {
7837 inner = TREE_OPERAND (inner, 0);
7838 }
7839 while (TREE_CODE (inner) == COMPONENT_REF
7840 || TREE_CODE (inner) == ARRAY_REF);
53e9c5c4 7841 if (VAR_OR_FUNCTION_DECL_P (inner) && DECL_WEAK (inner))
c2f47e15 7842 return NULL_TREE;
4ee9c684 7843 }
7844
76f5a783 7845 /* Otherwise, ARG0 already has the proper type for the return value. */
7846 return arg0;
4ee9c684 7847}
7848
c2f47e15 7849/* Fold a call to __builtin_classify_type with argument ARG. */
27d0c333 7850
539a3a92 7851static tree
c2f47e15 7852fold_builtin_classify_type (tree arg)
539a3a92 7853{
c2f47e15 7854 if (arg == 0)
7002a1c8 7855 return build_int_cst (integer_type_node, no_type_class);
539a3a92 7856
7002a1c8 7857 return build_int_cst (integer_type_node, type_to_class (TREE_TYPE (arg)));
539a3a92 7858}
7859
c2f47e15 7860/* Fold a call to __builtin_strlen with argument ARG. */
e6e27594 7861
7862static tree
c7cbde74 7863fold_builtin_strlen (location_t loc, tree type, tree arg)
e6e27594 7864{
c2f47e15 7865 if (!validate_arg (arg, POINTER_TYPE))
e6e27594 7866 return NULL_TREE;
7867 else
7868 {
c2f47e15 7869 tree len = c_strlen (arg, 0);
e6e27594 7870
7871 if (len)
c7cbde74 7872 return fold_convert_loc (loc, type, len);
e6e27594 7873
7874 return NULL_TREE;
7875 }
7876}
7877
92c43e3c 7878/* Fold a call to __builtin_inf or __builtin_huge_val. */
7879
7880static tree
389dd41b 7881fold_builtin_inf (location_t loc, tree type, int warn)
92c43e3c 7882{
aa870c1b 7883 REAL_VALUE_TYPE real;
7884
40f4dbd5 7885 /* __builtin_inff is intended to be usable to define INFINITY on all
7886 targets. If an infinity is not available, INFINITY expands "to a
7887 positive constant of type float that overflows at translation
7888 time", footnote "In this case, using INFINITY will violate the
7889 constraint in 6.4.4 and thus require a diagnostic." (C99 7.12#4).
7890 Thus we pedwarn to ensure this constraint violation is
7891 diagnosed. */
92c43e3c 7892 if (!MODE_HAS_INFINITIES (TYPE_MODE (type)) && warn)
389dd41b 7893 pedwarn (loc, 0, "target format does not support infinity");
92c43e3c 7894
aa870c1b 7895 real_inf (&real);
7896 return build_real (type, real);
92c43e3c 7897}
7898
d735c391 7899/* Fold function call to builtin sincos, sincosf, or sincosl. Return
7900 NULL_TREE if no simplification can be made. */
7901
7902static tree
389dd41b 7903fold_builtin_sincos (location_t loc,
7904 tree arg0, tree arg1, tree arg2)
d735c391 7905{
c2f47e15 7906 tree type;
6c21be92 7907 tree fndecl, call = NULL_TREE;
d735c391 7908
c2f47e15 7909 if (!validate_arg (arg0, REAL_TYPE)
7910 || !validate_arg (arg1, POINTER_TYPE)
7911 || !validate_arg (arg2, POINTER_TYPE))
d735c391 7912 return NULL_TREE;
7913
d735c391 7914 type = TREE_TYPE (arg0);
d735c391 7915
7916 /* Calculate the result when the argument is a constant. */
e3240774 7917 built_in_function fn = mathfn_built_in_2 (type, CFN_BUILT_IN_CEXPI);
6c21be92 7918 if (fn == END_BUILTINS)
d735c391 7919 return NULL_TREE;
7920
6c21be92 7921 /* Canonicalize sincos to cexpi. */
7922 if (TREE_CODE (arg0) == REAL_CST)
7923 {
7924 tree complex_type = build_complex_type (type);
744fe358 7925 call = fold_const_call (as_combined_fn (fn), complex_type, arg0);
6c21be92 7926 }
7927 if (!call)
7928 {
7929 if (!targetm.libc_has_function (function_c99_math_complex)
7930 || !builtin_decl_implicit_p (fn))
7931 return NULL_TREE;
7932 fndecl = builtin_decl_explicit (fn);
7933 call = build_call_expr_loc (loc, fndecl, 1, arg0);
7934 call = builtin_save_expr (call);
7935 }
d735c391 7936
a75b1c71 7937 return build2 (COMPOUND_EXPR, void_type_node,
d735c391 7938 build2 (MODIFY_EXPR, void_type_node,
389dd41b 7939 build_fold_indirect_ref_loc (loc, arg1),
6c21be92 7940 fold_build1_loc (loc, IMAGPART_EXPR, type, call)),
d735c391 7941 build2 (MODIFY_EXPR, void_type_node,
389dd41b 7942 build_fold_indirect_ref_loc (loc, arg2),
6c21be92 7943 fold_build1_loc (loc, REALPART_EXPR, type, call)));
d735c391 7944}
7945
c2f47e15 7946/* Fold function call to builtin memcmp with arguments ARG1 and ARG2.
7947 Return NULL_TREE if no simplification can be made. */
9c8a1629 7948
7949static tree
389dd41b 7950fold_builtin_memcmp (location_t loc, tree arg1, tree arg2, tree len)
9c8a1629 7951{
c2f47e15 7952 if (!validate_arg (arg1, POINTER_TYPE)
7953 || !validate_arg (arg2, POINTER_TYPE)
7954 || !validate_arg (len, INTEGER_TYPE))
7955 return NULL_TREE;
9c8a1629 7956
7957 /* If the LEN parameter is zero, return zero. */
7958 if (integer_zerop (len))
389dd41b 7959 return omit_two_operands_loc (loc, integer_type_node, integer_zero_node,
c4fef134 7960 arg1, arg2);
9c8a1629 7961
7962 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
7963 if (operand_equal_p (arg1, arg2, 0))
389dd41b 7964 return omit_one_operand_loc (loc, integer_type_node, integer_zero_node, len);
c4fef134 7965
c4fef134 7966 /* If len parameter is one, return an expression corresponding to
7967 (*(const unsigned char*)arg1 - (const unsigned char*)arg2). */
e913b5cd 7968 if (tree_fits_uhwi_p (len) && tree_to_uhwi (len) == 1)
c4fef134 7969 {
7970 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
4f1b71c0 7971 tree cst_uchar_ptr_node
7972 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
7973
389dd41b 7974 tree ind1
7975 = fold_convert_loc (loc, integer_type_node,
7976 build1 (INDIRECT_REF, cst_uchar_node,
7977 fold_convert_loc (loc,
7978 cst_uchar_ptr_node,
c4fef134 7979 arg1)));
389dd41b 7980 tree ind2
7981 = fold_convert_loc (loc, integer_type_node,
7982 build1 (INDIRECT_REF, cst_uchar_node,
7983 fold_convert_loc (loc,
7984 cst_uchar_ptr_node,
c4fef134 7985 arg2)));
389dd41b 7986 return fold_build2_loc (loc, MINUS_EXPR, integer_type_node, ind1, ind2);
c4fef134 7987 }
9c8a1629 7988
c2f47e15 7989 return NULL_TREE;
9c8a1629 7990}
7991
c2f47e15 7992/* Fold a call to builtin isascii with argument ARG. */
d49367d4 7993
7994static tree
389dd41b 7995fold_builtin_isascii (location_t loc, tree arg)
d49367d4 7996{
c2f47e15 7997 if (!validate_arg (arg, INTEGER_TYPE))
7998 return NULL_TREE;
d49367d4 7999 else
8000 {
8001 /* Transform isascii(c) -> ((c & ~0x7f) == 0). */
c90b5d40 8002 arg = fold_build2 (BIT_AND_EXPR, integer_type_node, arg,
7002a1c8 8003 build_int_cst (integer_type_node,
c90b5d40 8004 ~ (unsigned HOST_WIDE_INT) 0x7f));
389dd41b 8005 return fold_build2_loc (loc, EQ_EXPR, integer_type_node,
7002a1c8 8006 arg, integer_zero_node);
d49367d4 8007 }
8008}
8009
c2f47e15 8010/* Fold a call to builtin toascii with argument ARG. */
d49367d4 8011
8012static tree
389dd41b 8013fold_builtin_toascii (location_t loc, tree arg)
d49367d4 8014{
c2f47e15 8015 if (!validate_arg (arg, INTEGER_TYPE))
8016 return NULL_TREE;
48e1416a 8017
c2f47e15 8018 /* Transform toascii(c) -> (c & 0x7f). */
389dd41b 8019 return fold_build2_loc (loc, BIT_AND_EXPR, integer_type_node, arg,
7002a1c8 8020 build_int_cst (integer_type_node, 0x7f));
d49367d4 8021}
8022
c2f47e15 8023/* Fold a call to builtin isdigit with argument ARG. */
df1cf42e 8024
8025static tree
389dd41b 8026fold_builtin_isdigit (location_t loc, tree arg)
df1cf42e 8027{
c2f47e15 8028 if (!validate_arg (arg, INTEGER_TYPE))
8029 return NULL_TREE;
df1cf42e 8030 else
8031 {
8032 /* Transform isdigit(c) -> (unsigned)(c) - '0' <= 9. */
624d37a6 8033 /* According to the C standard, isdigit is unaffected by locale.
8034 However, it definitely is affected by the target character set. */
624d37a6 8035 unsigned HOST_WIDE_INT target_digit0
8036 = lang_hooks.to_target_charset ('0');
8037
8038 if (target_digit0 == 0)
8039 return NULL_TREE;
8040
389dd41b 8041 arg = fold_convert_loc (loc, unsigned_type_node, arg);
c90b5d40 8042 arg = fold_build2 (MINUS_EXPR, unsigned_type_node, arg,
8043 build_int_cst (unsigned_type_node, target_digit0));
389dd41b 8044 return fold_build2_loc (loc, LE_EXPR, integer_type_node, arg,
f2532264 8045 build_int_cst (unsigned_type_node, 9));
df1cf42e 8046 }
8047}
27f261ef 8048
c2f47e15 8049/* Fold a call to fabs, fabsf or fabsl with argument ARG. */
d1aade50 8050
8051static tree
389dd41b 8052fold_builtin_fabs (location_t loc, tree arg, tree type)
d1aade50 8053{
c2f47e15 8054 if (!validate_arg (arg, REAL_TYPE))
8055 return NULL_TREE;
d1aade50 8056
389dd41b 8057 arg = fold_convert_loc (loc, type, arg);
389dd41b 8058 return fold_build1_loc (loc, ABS_EXPR, type, arg);
d1aade50 8059}
8060
c2f47e15 8061/* Fold a call to abs, labs, llabs or imaxabs with argument ARG. */
d1aade50 8062
8063static tree
389dd41b 8064fold_builtin_abs (location_t loc, tree arg, tree type)
d1aade50 8065{
c2f47e15 8066 if (!validate_arg (arg, INTEGER_TYPE))
8067 return NULL_TREE;
d1aade50 8068
389dd41b 8069 arg = fold_convert_loc (loc, type, arg);
389dd41b 8070 return fold_build1_loc (loc, ABS_EXPR, type, arg);
d1aade50 8071}
8072
b9be572e 8073/* Fold a call to fma, fmaf, or fmal with arguments ARG[012]. */
8074
8075static tree
8076fold_builtin_fma (location_t loc, tree arg0, tree arg1, tree arg2, tree type)
8077{
866b3d58 8078 /* ??? Only expand to FMA_EXPR if it's directly supported. */
b9be572e 8079 if (validate_arg (arg0, REAL_TYPE)
9af5ce0c 8080 && validate_arg (arg1, REAL_TYPE)
866b3d58 8081 && validate_arg (arg2, REAL_TYPE)
8082 && optab_handler (fma_optab, TYPE_MODE (type)) != CODE_FOR_nothing)
8083 return fold_build3_loc (loc, FMA_EXPR, type, arg0, arg1, arg2);
b9be572e 8084
b9be572e 8085 return NULL_TREE;
8086}
8087
abe4dcf6 8088/* Fold a call to builtin carg(a+bi) -> atan2(b,a). */
8089
8090static tree
389dd41b 8091fold_builtin_carg (location_t loc, tree arg, tree type)
abe4dcf6 8092{
239d491a 8093 if (validate_arg (arg, COMPLEX_TYPE)
8094 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) == REAL_TYPE)
abe4dcf6 8095 {
8096 tree atan2_fn = mathfn_built_in (type, BUILT_IN_ATAN2);
48e1416a 8097
abe4dcf6 8098 if (atan2_fn)
8099 {
c2f47e15 8100 tree new_arg = builtin_save_expr (arg);
389dd41b 8101 tree r_arg = fold_build1_loc (loc, REALPART_EXPR, type, new_arg);
8102 tree i_arg = fold_build1_loc (loc, IMAGPART_EXPR, type, new_arg);
8103 return build_call_expr_loc (loc, atan2_fn, 2, i_arg, r_arg);
abe4dcf6 8104 }
8105 }
48e1416a 8106
abe4dcf6 8107 return NULL_TREE;
8108}
8109
3838b9ae 8110/* Fold a call to builtin frexp, we can assume the base is 2. */
8111
8112static tree
389dd41b 8113fold_builtin_frexp (location_t loc, tree arg0, tree arg1, tree rettype)
3838b9ae 8114{
8115 if (! validate_arg (arg0, REAL_TYPE) || ! validate_arg (arg1, POINTER_TYPE))
8116 return NULL_TREE;
48e1416a 8117
3838b9ae 8118 STRIP_NOPS (arg0);
48e1416a 8119
3838b9ae 8120 if (!(TREE_CODE (arg0) == REAL_CST && ! TREE_OVERFLOW (arg0)))
8121 return NULL_TREE;
48e1416a 8122
389dd41b 8123 arg1 = build_fold_indirect_ref_loc (loc, arg1);
3838b9ae 8124
8125 /* Proceed if a valid pointer type was passed in. */
8126 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg1)) == integer_type_node)
8127 {
8128 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg0);
8129 tree frac, exp;
48e1416a 8130
3838b9ae 8131 switch (value->cl)
8132 {
8133 case rvc_zero:
8134 /* For +-0, return (*exp = 0, +-0). */
8135 exp = integer_zero_node;
8136 frac = arg0;
8137 break;
8138 case rvc_nan:
8139 case rvc_inf:
8140 /* For +-NaN or +-Inf, *exp is unspecified, return arg0. */
389dd41b 8141 return omit_one_operand_loc (loc, rettype, arg0, arg1);
3838b9ae 8142 case rvc_normal:
8143 {
8144 /* Since the frexp function always expects base 2, and in
8145 GCC normalized significands are already in the range
8146 [0.5, 1.0), we have exactly what frexp wants. */
8147 REAL_VALUE_TYPE frac_rvt = *value;
8148 SET_REAL_EXP (&frac_rvt, 0);
8149 frac = build_real (rettype, frac_rvt);
7002a1c8 8150 exp = build_int_cst (integer_type_node, REAL_EXP (value));
3838b9ae 8151 }
8152 break;
8153 default:
8154 gcc_unreachable ();
8155 }
48e1416a 8156
3838b9ae 8157 /* Create the COMPOUND_EXPR (*arg1 = trunc, frac). */
389dd41b 8158 arg1 = fold_build2_loc (loc, MODIFY_EXPR, rettype, arg1, exp);
3838b9ae 8159 TREE_SIDE_EFFECTS (arg1) = 1;
389dd41b 8160 return fold_build2_loc (loc, COMPOUND_EXPR, rettype, arg1, frac);
3838b9ae 8161 }
8162
8163 return NULL_TREE;
8164}
8165
ebf8b4f5 8166/* Fold a call to builtin modf. */
8167
8168static tree
389dd41b 8169fold_builtin_modf (location_t loc, tree arg0, tree arg1, tree rettype)
ebf8b4f5 8170{
8171 if (! validate_arg (arg0, REAL_TYPE) || ! validate_arg (arg1, POINTER_TYPE))
8172 return NULL_TREE;
48e1416a 8173
ebf8b4f5 8174 STRIP_NOPS (arg0);
48e1416a 8175
ebf8b4f5 8176 if (!(TREE_CODE (arg0) == REAL_CST && ! TREE_OVERFLOW (arg0)))
8177 return NULL_TREE;
48e1416a 8178
389dd41b 8179 arg1 = build_fold_indirect_ref_loc (loc, arg1);
ebf8b4f5 8180
8181 /* Proceed if a valid pointer type was passed in. */
8182 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg1)) == TYPE_MAIN_VARIANT (rettype))
8183 {
8184 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg0);
8185 REAL_VALUE_TYPE trunc, frac;
8186
8187 switch (value->cl)
8188 {
8189 case rvc_nan:
8190 case rvc_zero:
8191 /* For +-NaN or +-0, return (*arg1 = arg0, arg0). */
8192 trunc = frac = *value;
8193 break;
8194 case rvc_inf:
8195 /* For +-Inf, return (*arg1 = arg0, +-0). */
8196 frac = dconst0;
8197 frac.sign = value->sign;
8198 trunc = *value;
8199 break;
8200 case rvc_normal:
8201 /* Return (*arg1 = trunc(arg0), arg0-trunc(arg0)). */
8202 real_trunc (&trunc, VOIDmode, value);
8203 real_arithmetic (&frac, MINUS_EXPR, value, &trunc);
8204 /* If the original number was negative and already
8205 integral, then the fractional part is -0.0. */
8206 if (value->sign && frac.cl == rvc_zero)
8207 frac.sign = value->sign;
8208 break;
8209 }
48e1416a 8210
ebf8b4f5 8211 /* Create the COMPOUND_EXPR (*arg1 = trunc, frac). */
389dd41b 8212 arg1 = fold_build2_loc (loc, MODIFY_EXPR, rettype, arg1,
ebf8b4f5 8213 build_real (rettype, trunc));
8214 TREE_SIDE_EFFECTS (arg1) = 1;
389dd41b 8215 return fold_build2_loc (loc, COMPOUND_EXPR, rettype, arg1,
ebf8b4f5 8216 build_real (rettype, frac));
8217 }
48e1416a 8218
ebf8b4f5 8219 return NULL_TREE;
8220}
8221
12f08300 8222/* Given a location LOC, an interclass builtin function decl FNDECL
8223 and its single argument ARG, return an folded expression computing
8224 the same, or NULL_TREE if we either couldn't or didn't want to fold
8225 (the latter happen if there's an RTL instruction available). */
8226
8227static tree
8228fold_builtin_interclass_mathfn (location_t loc, tree fndecl, tree arg)
8229{
8230 machine_mode mode;
8231
8232 if (!validate_arg (arg, REAL_TYPE))
8233 return NULL_TREE;
8234
8235 if (interclass_mathfn_icode (arg, fndecl) != CODE_FOR_nothing)
8236 return NULL_TREE;
8237
8238 mode = TYPE_MODE (TREE_TYPE (arg));
8239
8240 bool is_ibm_extended = MODE_COMPOSITE_P (mode);
7f38718f 8241
12f08300 8242 /* If there is no optab, try generic code. */
8243 switch (DECL_FUNCTION_CODE (fndecl))
8244 {
8245 tree result;
a65c4d64 8246
12f08300 8247 CASE_FLT_FN (BUILT_IN_ISINF):
8248 {
8249 /* isinf(x) -> isgreater(fabs(x),DBL_MAX). */
8250 tree const isgr_fn = builtin_decl_explicit (BUILT_IN_ISGREATER);
8251 tree type = TREE_TYPE (arg);
8252 REAL_VALUE_TYPE r;
8253 char buf[128];
8254
8255 if (is_ibm_extended)
8256 {
8257 /* NaN and Inf are encoded in the high-order double value
8258 only. The low-order value is not significant. */
8259 type = double_type_node;
8260 mode = DFmode;
8261 arg = fold_build1_loc (loc, NOP_EXPR, type, arg);
8262 }
8263 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf));
8264 real_from_string (&r, buf);
8265 result = build_call_expr (isgr_fn, 2,
8266 fold_build1_loc (loc, ABS_EXPR, type, arg),
8267 build_real (type, r));
8268 return result;
8269 }
8270 CASE_FLT_FN (BUILT_IN_FINITE):
8271 case BUILT_IN_ISFINITE:
8272 {
8273 /* isfinite(x) -> islessequal(fabs(x),DBL_MAX). */
8274 tree const isle_fn = builtin_decl_explicit (BUILT_IN_ISLESSEQUAL);
8275 tree type = TREE_TYPE (arg);
8276 REAL_VALUE_TYPE r;
8277 char buf[128];
8278
8279 if (is_ibm_extended)
8280 {
8281 /* NaN and Inf are encoded in the high-order double value
8282 only. The low-order value is not significant. */
8283 type = double_type_node;
8284 mode = DFmode;
8285 arg = fold_build1_loc (loc, NOP_EXPR, type, arg);
8286 }
8287 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf));
8288 real_from_string (&r, buf);
8289 result = build_call_expr (isle_fn, 2,
8290 fold_build1_loc (loc, ABS_EXPR, type, arg),
8291 build_real (type, r));
8292 /*result = fold_build2_loc (loc, UNGT_EXPR,
8293 TREE_TYPE (TREE_TYPE (fndecl)),
8294 fold_build1_loc (loc, ABS_EXPR, type, arg),
8295 build_real (type, r));
8296 result = fold_build1_loc (loc, TRUTH_NOT_EXPR,
8297 TREE_TYPE (TREE_TYPE (fndecl)),
8298 result);*/
8299 return result;
8300 }
8301 case BUILT_IN_ISNORMAL:
8302 {
8303 /* isnormal(x) -> isgreaterequal(fabs(x),DBL_MIN) &
8304 islessequal(fabs(x),DBL_MAX). */
8305 tree const isle_fn = builtin_decl_explicit (BUILT_IN_ISLESSEQUAL);
8306 tree type = TREE_TYPE (arg);
8307 tree orig_arg, max_exp, min_exp;
8308 machine_mode orig_mode = mode;
8309 REAL_VALUE_TYPE rmax, rmin;
8310 char buf[128];
8311
8312 orig_arg = arg = builtin_save_expr (arg);
8313 if (is_ibm_extended)
8314 {
8315 /* Use double to test the normal range of IBM extended
8316 precision. Emin for IBM extended precision is
8317 different to emin for IEEE double, being 53 higher
8318 since the low double exponent is at least 53 lower
8319 than the high double exponent. */
8320 type = double_type_node;
8321 mode = DFmode;
8322 arg = fold_build1_loc (loc, NOP_EXPR, type, arg);
8323 }
8324 arg = fold_build1_loc (loc, ABS_EXPR, type, arg);
8325
8326 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf));
8327 real_from_string (&rmax, buf);
8328 sprintf (buf, "0x1p%d", REAL_MODE_FORMAT (orig_mode)->emin - 1);
8329 real_from_string (&rmin, buf);
8330 max_exp = build_real (type, rmax);
8331 min_exp = build_real (type, rmin);
8332
8333 max_exp = build_call_expr (isle_fn, 2, arg, max_exp);
8334 if (is_ibm_extended)
8335 {
8336 /* Testing the high end of the range is done just using
8337 the high double, using the same test as isfinite().
8338 For the subnormal end of the range we first test the
8339 high double, then if its magnitude is equal to the
8340 limit of 0x1p-969, we test whether the low double is
8341 non-zero and opposite sign to the high double. */
8342 tree const islt_fn = builtin_decl_explicit (BUILT_IN_ISLESS);
8343 tree const isgt_fn = builtin_decl_explicit (BUILT_IN_ISGREATER);
8344 tree gt_min = build_call_expr (isgt_fn, 2, arg, min_exp);
8345 tree eq_min = fold_build2 (EQ_EXPR, integer_type_node,
8346 arg, min_exp);
8347 tree as_complex = build1 (VIEW_CONVERT_EXPR,
8348 complex_double_type_node, orig_arg);
8349 tree hi_dbl = build1 (REALPART_EXPR, type, as_complex);
8350 tree lo_dbl = build1 (IMAGPART_EXPR, type, as_complex);
8351 tree zero = build_real (type, dconst0);
8352 tree hilt = build_call_expr (islt_fn, 2, hi_dbl, zero);
8353 tree lolt = build_call_expr (islt_fn, 2, lo_dbl, zero);
8354 tree logt = build_call_expr (isgt_fn, 2, lo_dbl, zero);
8355 tree ok_lo = fold_build1 (TRUTH_NOT_EXPR, integer_type_node,
8356 fold_build3 (COND_EXPR,
8357 integer_type_node,
8358 hilt, logt, lolt));
8359 eq_min = fold_build2 (TRUTH_ANDIF_EXPR, integer_type_node,
8360 eq_min, ok_lo);
8361 min_exp = fold_build2 (TRUTH_ORIF_EXPR, integer_type_node,
8362 gt_min, eq_min);
8363 }
8364 else
8365 {
8366 tree const isge_fn
8367 = builtin_decl_explicit (BUILT_IN_ISGREATEREQUAL);
8368 min_exp = build_call_expr (isge_fn, 2, arg, min_exp);
8369 }
8370 result = fold_build2 (BIT_AND_EXPR, integer_type_node,
8371 max_exp, min_exp);
8372 return result;
8373 }
8374 default:
8375 break;
8376 }
8377
8378 return NULL_TREE;
8379}
8380
8381/* Fold a call to __builtin_isnan(), __builtin_isinf, __builtin_finite.
c2f47e15 8382 ARG is the argument for the call. */
726069ba 8383
8384static tree
12f08300 8385fold_builtin_classify (location_t loc, tree fndecl, tree arg, int builtin_index)
726069ba 8386{
12f08300 8387 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8388
c2f47e15 8389 if (!validate_arg (arg, REAL_TYPE))
d43cee80 8390 return NULL_TREE;
726069ba 8391
726069ba 8392 switch (builtin_index)
8393 {
12f08300 8394 case BUILT_IN_ISINF:
8395 if (!HONOR_INFINITIES (arg))
8396 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
8397
8398 return NULL_TREE;
8399
c319d56a 8400 case BUILT_IN_ISINF_SIGN:
8401 {
8402 /* isinf_sign(x) -> isinf(x) ? (signbit(x) ? -1 : 1) : 0 */
8403 /* In a boolean context, GCC will fold the inner COND_EXPR to
8404 1. So e.g. "if (isinf_sign(x))" would be folded to just
8405 "if (isinf(x) ? 1 : 0)" which becomes "if (isinf(x))". */
6cfc7001 8406 tree signbit_fn = builtin_decl_explicit (BUILT_IN_SIGNBIT);
b9a16870 8407 tree isinf_fn = builtin_decl_explicit (BUILT_IN_ISINF);
c319d56a 8408 tree tmp = NULL_TREE;
8409
8410 arg = builtin_save_expr (arg);
8411
8412 if (signbit_fn && isinf_fn)
8413 {
389dd41b 8414 tree signbit_call = build_call_expr_loc (loc, signbit_fn, 1, arg);
8415 tree isinf_call = build_call_expr_loc (loc, isinf_fn, 1, arg);
c319d56a 8416
389dd41b 8417 signbit_call = fold_build2_loc (loc, NE_EXPR, integer_type_node,
c319d56a 8418 signbit_call, integer_zero_node);
389dd41b 8419 isinf_call = fold_build2_loc (loc, NE_EXPR, integer_type_node,
c319d56a 8420 isinf_call, integer_zero_node);
48e1416a 8421
389dd41b 8422 tmp = fold_build3_loc (loc, COND_EXPR, integer_type_node, signbit_call,
c319d56a 8423 integer_minus_one_node, integer_one_node);
389dd41b 8424 tmp = fold_build3_loc (loc, COND_EXPR, integer_type_node,
8425 isinf_call, tmp,
c319d56a 8426 integer_zero_node);
8427 }
8428
8429 return tmp;
8430 }
8431
12f08300 8432 case BUILT_IN_ISFINITE:
8433 if (!HONOR_NANS (arg)
8434 && !HONOR_INFINITIES (arg))
8435 return omit_one_operand_loc (loc, type, integer_one_node, arg);
8436
8437 return NULL_TREE;
8438
8439 case BUILT_IN_ISNAN:
8440 if (!HONOR_NANS (arg))
8441 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
8442
8443 {
8444 bool is_ibm_extended = MODE_COMPOSITE_P (TYPE_MODE (TREE_TYPE (arg)));
8445 if (is_ibm_extended)
8446 {
8447 /* NaN and Inf are encoded in the high-order double value
8448 only. The low-order value is not significant. */
8449 arg = fold_build1_loc (loc, NOP_EXPR, double_type_node, arg);
8450 }
8451 }
8452 arg = builtin_save_expr (arg);
8453 return fold_build2_loc (loc, UNORDERED_EXPR, type, arg, arg);
8454
726069ba 8455 default:
64db345d 8456 gcc_unreachable ();
726069ba 8457 }
8458}
8459
12f08300 8460/* Fold a call to __builtin_fpclassify(int, int, int, int, int, ...).
8461 This builtin will generate code to return the appropriate floating
8462 point classification depending on the value of the floating point
8463 number passed in. The possible return values must be supplied as
8464 int arguments to the call in the following order: FP_NAN, FP_INFINITE,
8465 FP_NORMAL, FP_SUBNORMAL and FP_ZERO. The ellipses is for exactly
8466 one floating point argument which is "type generic". */
8467
8468static tree
8469fold_builtin_fpclassify (location_t loc, tree *args, int nargs)
8470{
8471 tree fp_nan, fp_infinite, fp_normal, fp_subnormal, fp_zero,
8472 arg, type, res, tmp;
8473 machine_mode mode;
8474 REAL_VALUE_TYPE r;
8475 char buf[128];
8476
8477 /* Verify the required arguments in the original call. */
8478 if (nargs != 6
8479 || !validate_arg (args[0], INTEGER_TYPE)
8480 || !validate_arg (args[1], INTEGER_TYPE)
8481 || !validate_arg (args[2], INTEGER_TYPE)
8482 || !validate_arg (args[3], INTEGER_TYPE)
8483 || !validate_arg (args[4], INTEGER_TYPE)
8484 || !validate_arg (args[5], REAL_TYPE))
8485 return NULL_TREE;
8486
8487 fp_nan = args[0];
8488 fp_infinite = args[1];
8489 fp_normal = args[2];
8490 fp_subnormal = args[3];
8491 fp_zero = args[4];
8492 arg = args[5];
8493 type = TREE_TYPE (arg);
8494 mode = TYPE_MODE (type);
8495 arg = builtin_save_expr (fold_build1_loc (loc, ABS_EXPR, type, arg));
8496
8497 /* fpclassify(x) ->
8498 isnan(x) ? FP_NAN :
8499 (fabs(x) == Inf ? FP_INFINITE :
8500 (fabs(x) >= DBL_MIN ? FP_NORMAL :
8501 (x == 0 ? FP_ZERO : FP_SUBNORMAL))). */
8502
8503 tmp = fold_build2_loc (loc, EQ_EXPR, integer_type_node, arg,
8504 build_real (type, dconst0));
8505 res = fold_build3_loc (loc, COND_EXPR, integer_type_node,
8506 tmp, fp_zero, fp_subnormal);
8507
8508 sprintf (buf, "0x1p%d", REAL_MODE_FORMAT (mode)->emin - 1);
8509 real_from_string (&r, buf);
8510 tmp = fold_build2_loc (loc, GE_EXPR, integer_type_node,
8511 arg, build_real (type, r));
8512 res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp, fp_normal, res);
8513
8514 if (HONOR_INFINITIES (mode))
8515 {
8516 real_inf (&r);
8517 tmp = fold_build2_loc (loc, EQ_EXPR, integer_type_node, arg,
8518 build_real (type, r));
8519 res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp,
8520 fp_infinite, res);
8521 }
8522
8523 if (HONOR_NANS (mode))
8524 {
8525 tmp = fold_build2_loc (loc, ORDERED_EXPR, integer_type_node, arg, arg);
8526 res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp, res, fp_nan);
8527 }
8528
8529 return res;
8530}
8531
9bc9f15f 8532/* Fold a call to an unordered comparison function such as
d5019fe8 8533 __builtin_isgreater(). FNDECL is the FUNCTION_DECL for the function
c2f47e15 8534 being called and ARG0 and ARG1 are the arguments for the call.
726069ba 8535 UNORDERED_CODE and ORDERED_CODE are comparison codes that give
8536 the opposite of the desired result. UNORDERED_CODE is used
8537 for modes that can hold NaNs and ORDERED_CODE is used for
8538 the rest. */
9bc9f15f 8539
8540static tree
389dd41b 8541fold_builtin_unordered_cmp (location_t loc, tree fndecl, tree arg0, tree arg1,
9bc9f15f 8542 enum tree_code unordered_code,
8543 enum tree_code ordered_code)
8544{
859f903a 8545 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9bc9f15f 8546 enum tree_code code;
6978db0d 8547 tree type0, type1;
8548 enum tree_code code0, code1;
8549 tree cmp_type = NULL_TREE;
9bc9f15f 8550
6978db0d 8551 type0 = TREE_TYPE (arg0);
8552 type1 = TREE_TYPE (arg1);
a0c938f0 8553
6978db0d 8554 code0 = TREE_CODE (type0);
8555 code1 = TREE_CODE (type1);
a0c938f0 8556
6978db0d 8557 if (code0 == REAL_TYPE && code1 == REAL_TYPE)
8558 /* Choose the wider of two real types. */
8559 cmp_type = TYPE_PRECISION (type0) >= TYPE_PRECISION (type1)
8560 ? type0 : type1;
8561 else if (code0 == REAL_TYPE && code1 == INTEGER_TYPE)
8562 cmp_type = type0;
8563 else if (code0 == INTEGER_TYPE && code1 == REAL_TYPE)
8564 cmp_type = type1;
a0c938f0 8565
389dd41b 8566 arg0 = fold_convert_loc (loc, cmp_type, arg0);
8567 arg1 = fold_convert_loc (loc, cmp_type, arg1);
859f903a 8568
8569 if (unordered_code == UNORDERED_EXPR)
8570 {
93633022 8571 if (!HONOR_NANS (arg0))
389dd41b 8572 return omit_two_operands_loc (loc, type, integer_zero_node, arg0, arg1);
8573 return fold_build2_loc (loc, UNORDERED_EXPR, type, arg0, arg1);
859f903a 8574 }
9bc9f15f 8575
93633022 8576 code = HONOR_NANS (arg0) ? unordered_code : ordered_code;
389dd41b 8577 return fold_build1_loc (loc, TRUTH_NOT_EXPR, type,
8578 fold_build2_loc (loc, code, type, arg0, arg1));
9bc9f15f 8579}
8580
0c93c8a9 8581/* Fold __builtin_{,s,u}{add,sub,mul}{,l,ll}_overflow, either into normal
8582 arithmetics if it can never overflow, or into internal functions that
8583 return both result of arithmetics and overflowed boolean flag in
732905bb 8584 a complex integer result, or some other check for overflow.
8585 Similarly fold __builtin_{add,sub,mul}_overflow_p to just the overflow
8586 checking part of that. */
0c93c8a9 8587
8588static tree
8589fold_builtin_arith_overflow (location_t loc, enum built_in_function fcode,
8590 tree arg0, tree arg1, tree arg2)
8591{
8592 enum internal_fn ifn = IFN_LAST;
732905bb 8593 /* The code of the expression corresponding to the type-generic
8594 built-in, or ERROR_MARK for the type-specific ones. */
8595 enum tree_code opcode = ERROR_MARK;
8596 bool ovf_only = false;
8597
0c93c8a9 8598 switch (fcode)
8599 {
732905bb 8600 case BUILT_IN_ADD_OVERFLOW_P:
8601 ovf_only = true;
8602 /* FALLTHRU */
0c93c8a9 8603 case BUILT_IN_ADD_OVERFLOW:
732905bb 8604 opcode = PLUS_EXPR;
8605 /* FALLTHRU */
0c93c8a9 8606 case BUILT_IN_SADD_OVERFLOW:
8607 case BUILT_IN_SADDL_OVERFLOW:
8608 case BUILT_IN_SADDLL_OVERFLOW:
8609 case BUILT_IN_UADD_OVERFLOW:
8610 case BUILT_IN_UADDL_OVERFLOW:
8611 case BUILT_IN_UADDLL_OVERFLOW:
8612 ifn = IFN_ADD_OVERFLOW;
8613 break;
732905bb 8614 case BUILT_IN_SUB_OVERFLOW_P:
8615 ovf_only = true;
8616 /* FALLTHRU */
0c93c8a9 8617 case BUILT_IN_SUB_OVERFLOW:
732905bb 8618 opcode = MINUS_EXPR;
8619 /* FALLTHRU */
0c93c8a9 8620 case BUILT_IN_SSUB_OVERFLOW:
8621 case BUILT_IN_SSUBL_OVERFLOW:
8622 case BUILT_IN_SSUBLL_OVERFLOW:
8623 case BUILT_IN_USUB_OVERFLOW:
8624 case BUILT_IN_USUBL_OVERFLOW:
8625 case BUILT_IN_USUBLL_OVERFLOW:
8626 ifn = IFN_SUB_OVERFLOW;
8627 break;
732905bb 8628 case BUILT_IN_MUL_OVERFLOW_P:
8629 ovf_only = true;
8630 /* FALLTHRU */
0c93c8a9 8631 case BUILT_IN_MUL_OVERFLOW:
732905bb 8632 opcode = MULT_EXPR;
8633 /* FALLTHRU */
0c93c8a9 8634 case BUILT_IN_SMUL_OVERFLOW:
8635 case BUILT_IN_SMULL_OVERFLOW:
8636 case BUILT_IN_SMULLL_OVERFLOW:
8637 case BUILT_IN_UMUL_OVERFLOW:
8638 case BUILT_IN_UMULL_OVERFLOW:
8639 case BUILT_IN_UMULLL_OVERFLOW:
8640 ifn = IFN_MUL_OVERFLOW;
8641 break;
8642 default:
8643 gcc_unreachable ();
8644 }
732905bb 8645
8646 /* For the "generic" overloads, the first two arguments can have different
8647 types and the last argument determines the target type to use to check
8648 for overflow. The arguments of the other overloads all have the same
8649 type. */
8650 tree type = ovf_only ? TREE_TYPE (arg2) : TREE_TYPE (TREE_TYPE (arg2));
8651
8652 /* For the __builtin_{add,sub,mul}_overflow_p builtins, when the first two
8653 arguments are constant, attempt to fold the built-in call into a constant
8654 expression indicating whether or not it detected an overflow. */
8655 if (ovf_only
8656 && TREE_CODE (arg0) == INTEGER_CST
8657 && TREE_CODE (arg1) == INTEGER_CST)
8658 /* Perform the computation in the target type and check for overflow. */
8659 return omit_one_operand_loc (loc, boolean_type_node,
8660 arith_overflowed_p (opcode, type, arg0, arg1)
8661 ? boolean_true_node : boolean_false_node,
8662 arg2);
8663
0c93c8a9 8664 tree ctype = build_complex_type (type);
8665 tree call = build_call_expr_internal_loc (loc, ifn, ctype,
8666 2, arg0, arg1);
8667 tree tgt = save_expr (call);
8668 tree intres = build1_loc (loc, REALPART_EXPR, type, tgt);
8669 tree ovfres = build1_loc (loc, IMAGPART_EXPR, type, tgt);
8670 ovfres = fold_convert_loc (loc, boolean_type_node, ovfres);
732905bb 8671
8672 if (ovf_only)
8673 return omit_one_operand_loc (loc, boolean_type_node, ovfres, arg2);
8674
8675 tree mem_arg2 = build_fold_indirect_ref_loc (loc, arg2);
0c93c8a9 8676 tree store
8677 = fold_build2_loc (loc, MODIFY_EXPR, void_type_node, mem_arg2, intres);
8678 return build2_loc (loc, COMPOUND_EXPR, boolean_type_node, store, ovfres);
8679}
8680
c388a0cf 8681/* Fold a call to __builtin_FILE to a constant string. */
8682
8683static inline tree
8684fold_builtin_FILE (location_t loc)
8685{
8686 if (const char *fname = LOCATION_FILE (loc))
8687 return build_string_literal (strlen (fname) + 1, fname);
8688
8689 return build_string_literal (1, "");
8690}
8691
8692/* Fold a call to __builtin_FUNCTION to a constant string. */
8693
8694static inline tree
8695fold_builtin_FUNCTION ()
8696{
c2d38635 8697 const char *name = "";
8698
c388a0cf 8699 if (current_function_decl)
c2d38635 8700 name = lang_hooks.decl_printable_name (current_function_decl, 0);
c388a0cf 8701
c2d38635 8702 return build_string_literal (strlen (name) + 1, name);
c388a0cf 8703}
8704
8705/* Fold a call to __builtin_LINE to an integer constant. */
8706
8707static inline tree
8708fold_builtin_LINE (location_t loc, tree type)
8709{
8710 return build_int_cst (type, LOCATION_LINE (loc));
8711}
8712
c2f47e15 8713/* Fold a call to built-in function FNDECL with 0 arguments.
e80cc485 8714 This function returns NULL_TREE if no simplification was possible. */
650e4c94 8715
4ee9c684 8716static tree
e80cc485 8717fold_builtin_0 (location_t loc, tree fndecl)
650e4c94 8718{
e9f80ff5 8719 tree type = TREE_TYPE (TREE_TYPE (fndecl));
c2f47e15 8720 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
189b3398 8721 switch (fcode)
650e4c94 8722 {
c388a0cf 8723 case BUILT_IN_FILE:
8724 return fold_builtin_FILE (loc);
8725
8726 case BUILT_IN_FUNCTION:
8727 return fold_builtin_FUNCTION ();
8728
8729 case BUILT_IN_LINE:
8730 return fold_builtin_LINE (loc, type);
8731
c2f47e15 8732 CASE_FLT_FN (BUILT_IN_INF):
012f068a 8733 CASE_FLT_FN_FLOATN_NX (BUILT_IN_INF):
c2f47e15 8734 case BUILT_IN_INFD32:
8735 case BUILT_IN_INFD64:
8736 case BUILT_IN_INFD128:
389dd41b 8737 return fold_builtin_inf (loc, type, true);
7c2f0500 8738
c2f47e15 8739 CASE_FLT_FN (BUILT_IN_HUGE_VAL):
012f068a 8740 CASE_FLT_FN_FLOATN_NX (BUILT_IN_HUGE_VAL):
389dd41b 8741 return fold_builtin_inf (loc, type, false);
7c2f0500 8742
c2f47e15 8743 case BUILT_IN_CLASSIFY_TYPE:
8744 return fold_builtin_classify_type (NULL_TREE);
7c2f0500 8745
c2f47e15 8746 default:
8747 break;
8748 }
8749 return NULL_TREE;
8750}
7c2f0500 8751
c2f47e15 8752/* Fold a call to built-in function FNDECL with 1 argument, ARG0.
e80cc485 8753 This function returns NULL_TREE if no simplification was possible. */
7c2f0500 8754
c2f47e15 8755static tree
e80cc485 8756fold_builtin_1 (location_t loc, tree fndecl, tree arg0)
c2f47e15 8757{
8758 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8759 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
6c21be92 8760
8761 if (TREE_CODE (arg0) == ERROR_MARK)
8762 return NULL_TREE;
8763
744fe358 8764 if (tree ret = fold_const_call (as_combined_fn (fcode), type, arg0))
6c21be92 8765 return ret;
8766
c2f47e15 8767 switch (fcode)
8768 {
650e4c94 8769 case BUILT_IN_CONSTANT_P:
7c2f0500 8770 {
c2f47e15 8771 tree val = fold_builtin_constant_p (arg0);
7c2f0500 8772
7c2f0500 8773 /* Gimplification will pull the CALL_EXPR for the builtin out of
8774 an if condition. When not optimizing, we'll not CSE it back.
8775 To avoid link error types of regressions, return false now. */
8776 if (!val && !optimize)
8777 val = integer_zero_node;
8778
8779 return val;
8780 }
650e4c94 8781
539a3a92 8782 case BUILT_IN_CLASSIFY_TYPE:
c2f47e15 8783 return fold_builtin_classify_type (arg0);
539a3a92 8784
650e4c94 8785 case BUILT_IN_STRLEN:
c7cbde74 8786 return fold_builtin_strlen (loc, type, arg0);
650e4c94 8787
4f35b1fc 8788 CASE_FLT_FN (BUILT_IN_FABS):
012f068a 8789 CASE_FLT_FN_FLOATN_NX (BUILT_IN_FABS):
8aa32773 8790 case BUILT_IN_FABSD32:
8791 case BUILT_IN_FABSD64:
8792 case BUILT_IN_FABSD128:
389dd41b 8793 return fold_builtin_fabs (loc, arg0, type);
d1aade50 8794
8795 case BUILT_IN_ABS:
8796 case BUILT_IN_LABS:
8797 case BUILT_IN_LLABS:
8798 case BUILT_IN_IMAXABS:
389dd41b 8799 return fold_builtin_abs (loc, arg0, type);
c63f4ad3 8800
4f35b1fc 8801 CASE_FLT_FN (BUILT_IN_CONJ):
239d491a 8802 if (validate_arg (arg0, COMPLEX_TYPE)
48e1416a 8803 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
389dd41b 8804 return fold_build1_loc (loc, CONJ_EXPR, type, arg0);
c2f47e15 8805 break;
36d3581d 8806
4f35b1fc 8807 CASE_FLT_FN (BUILT_IN_CREAL):
239d491a 8808 if (validate_arg (arg0, COMPLEX_TYPE)
48e1416a 8809 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
7082509e 8810 return non_lvalue_loc (loc, fold_build1_loc (loc, REALPART_EXPR, type, arg0));
c2f47e15 8811 break;
36d3581d 8812
4f35b1fc 8813 CASE_FLT_FN (BUILT_IN_CIMAG):
b0ce8887 8814 if (validate_arg (arg0, COMPLEX_TYPE)
8815 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
389dd41b 8816 return non_lvalue_loc (loc, fold_build1_loc (loc, IMAGPART_EXPR, type, arg0));
c2f47e15 8817 break;
36d3581d 8818
6c21be92 8819 CASE_FLT_FN (BUILT_IN_CARG):
8820 return fold_builtin_carg (loc, arg0, type);
c2373fdb 8821
6c21be92 8822 case BUILT_IN_ISASCII:
8823 return fold_builtin_isascii (loc, arg0);
48e1416a 8824
6c21be92 8825 case BUILT_IN_TOASCII:
8826 return fold_builtin_toascii (loc, arg0);
48e1416a 8827
6c21be92 8828 case BUILT_IN_ISDIGIT:
8829 return fold_builtin_isdigit (loc, arg0);
48e1416a 8830
12f08300 8831 CASE_FLT_FN (BUILT_IN_FINITE):
8832 case BUILT_IN_FINITED32:
8833 case BUILT_IN_FINITED64:
8834 case BUILT_IN_FINITED128:
8835 case BUILT_IN_ISFINITE:
8836 {
8837 tree ret = fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISFINITE);
8838 if (ret)
8839 return ret;
8840 return fold_builtin_interclass_mathfn (loc, fndecl, arg0);
8841 }
8842
8843 CASE_FLT_FN (BUILT_IN_ISINF):
8844 case BUILT_IN_ISINFD32:
8845 case BUILT_IN_ISINFD64:
8846 case BUILT_IN_ISINFD128:
8847 {
8848 tree ret = fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISINF);
8849 if (ret)
8850 return ret;
8851 return fold_builtin_interclass_mathfn (loc, fndecl, arg0);
8852 }
8853
8854 case BUILT_IN_ISNORMAL:
8855 return fold_builtin_interclass_mathfn (loc, fndecl, arg0);
8856
6c21be92 8857 case BUILT_IN_ISINF_SIGN:
12f08300 8858 return fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISINF_SIGN);
8859
8860 CASE_FLT_FN (BUILT_IN_ISNAN):
8861 case BUILT_IN_ISNAND32:
8862 case BUILT_IN_ISNAND64:
8863 case BUILT_IN_ISNAND128:
8864 return fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISNAN);
48e1416a 8865
6c21be92 8866 case BUILT_IN_FREE:
8867 if (integer_zerop (arg0))
8868 return build_empty_stmt (loc);
d064d976 8869 break;
c63f4ad3 8870
6c21be92 8871 default:
8b4af95f 8872 break;
6c21be92 8873 }
805e22b2 8874
6c21be92 8875 return NULL_TREE;
3bc5c41b 8876
6c21be92 8877}
728bac60 8878
6c21be92 8879/* Fold a call to built-in function FNDECL with 2 arguments, ARG0 and ARG1.
8880 This function returns NULL_TREE if no simplification was possible. */
c2f47e15 8881
8882static tree
e80cc485 8883fold_builtin_2 (location_t loc, tree fndecl, tree arg0, tree arg1)
c2f47e15 8884{
8885 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8886 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
8887
6c21be92 8888 if (TREE_CODE (arg0) == ERROR_MARK
8889 || TREE_CODE (arg1) == ERROR_MARK)
8890 return NULL_TREE;
e5407ca6 8891
744fe358 8892 if (tree ret = fold_const_call (as_combined_fn (fcode), type, arg0, arg1))
6c21be92 8893 return ret;
e84da7c1 8894
6c21be92 8895 switch (fcode)
8896 {
e84da7c1 8897 CASE_FLT_FN_REENT (BUILT_IN_GAMMA): /* GAMMA_R */
8898 CASE_FLT_FN_REENT (BUILT_IN_LGAMMA): /* LGAMMA_R */
8899 if (validate_arg (arg0, REAL_TYPE)
9af5ce0c 8900 && validate_arg (arg1, POINTER_TYPE))
e84da7c1 8901 return do_mpfr_lgamma_r (arg0, arg1, type);
8902 break;
c2f47e15 8903
3838b9ae 8904 CASE_FLT_FN (BUILT_IN_FREXP):
389dd41b 8905 return fold_builtin_frexp (loc, arg0, arg1, type);
3838b9ae 8906
ebf8b4f5 8907 CASE_FLT_FN (BUILT_IN_MODF):
389dd41b 8908 return fold_builtin_modf (loc, arg0, arg1, type);
ebf8b4f5 8909
c2f47e15 8910 case BUILT_IN_STRSPN:
389dd41b 8911 return fold_builtin_strspn (loc, arg0, arg1);
c2f47e15 8912
8913 case BUILT_IN_STRCSPN:
389dd41b 8914 return fold_builtin_strcspn (loc, arg0, arg1);
c2f47e15 8915
c2f47e15 8916 case BUILT_IN_STRPBRK:
389dd41b 8917 return fold_builtin_strpbrk (loc, arg0, arg1, type);
c2f47e15 8918
8919 case BUILT_IN_EXPECT:
c83059be 8920 return fold_builtin_expect (loc, arg0, arg1, NULL_TREE);
c2f47e15 8921
9bc9f15f 8922 case BUILT_IN_ISGREATER:
389dd41b 8923 return fold_builtin_unordered_cmp (loc, fndecl,
8924 arg0, arg1, UNLE_EXPR, LE_EXPR);
9bc9f15f 8925 case BUILT_IN_ISGREATEREQUAL:
389dd41b 8926 return fold_builtin_unordered_cmp (loc, fndecl,
8927 arg0, arg1, UNLT_EXPR, LT_EXPR);
9bc9f15f 8928 case BUILT_IN_ISLESS:
389dd41b 8929 return fold_builtin_unordered_cmp (loc, fndecl,
8930 arg0, arg1, UNGE_EXPR, GE_EXPR);
9bc9f15f 8931 case BUILT_IN_ISLESSEQUAL:
389dd41b 8932 return fold_builtin_unordered_cmp (loc, fndecl,
8933 arg0, arg1, UNGT_EXPR, GT_EXPR);
9bc9f15f 8934 case BUILT_IN_ISLESSGREATER:
389dd41b 8935 return fold_builtin_unordered_cmp (loc, fndecl,
8936 arg0, arg1, UNEQ_EXPR, EQ_EXPR);
9bc9f15f 8937 case BUILT_IN_ISUNORDERED:
389dd41b 8938 return fold_builtin_unordered_cmp (loc, fndecl,
8939 arg0, arg1, UNORDERED_EXPR,
d5019fe8 8940 NOP_EXPR);
9bc9f15f 8941
7c2f0500 8942 /* We do the folding for va_start in the expander. */
8943 case BUILT_IN_VA_START:
8944 break;
f0613857 8945
0a39fd54 8946 case BUILT_IN_OBJECT_SIZE:
c2f47e15 8947 return fold_builtin_object_size (arg0, arg1);
0a39fd54 8948
1cd6e20d 8949 case BUILT_IN_ATOMIC_ALWAYS_LOCK_FREE:
8950 return fold_builtin_atomic_always_lock_free (arg0, arg1);
8951
8952 case BUILT_IN_ATOMIC_IS_LOCK_FREE:
8953 return fold_builtin_atomic_is_lock_free (arg0, arg1);
8954
c2f47e15 8955 default:
8956 break;
8957 }
8958 return NULL_TREE;
8959}
8960
8961/* Fold a call to built-in function FNDECL with 3 arguments, ARG0, ARG1,
e80cc485 8962 and ARG2.
c2f47e15 8963 This function returns NULL_TREE if no simplification was possible. */
8964
8965static tree
389dd41b 8966fold_builtin_3 (location_t loc, tree fndecl,
e80cc485 8967 tree arg0, tree arg1, tree arg2)
c2f47e15 8968{
8969 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8970 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
6c21be92 8971
8972 if (TREE_CODE (arg0) == ERROR_MARK
8973 || TREE_CODE (arg1) == ERROR_MARK
8974 || TREE_CODE (arg2) == ERROR_MARK)
8975 return NULL_TREE;
8976
744fe358 8977 if (tree ret = fold_const_call (as_combined_fn (fcode), type,
8978 arg0, arg1, arg2))
6c21be92 8979 return ret;
8980
c2f47e15 8981 switch (fcode)
8982 {
8983
8984 CASE_FLT_FN (BUILT_IN_SINCOS):
389dd41b 8985 return fold_builtin_sincos (loc, arg0, arg1, arg2);
c2f47e15 8986
8987 CASE_FLT_FN (BUILT_IN_FMA):
b9be572e 8988 return fold_builtin_fma (loc, arg0, arg1, arg2, type);
c2f47e15 8989
e5407ca6 8990 CASE_FLT_FN (BUILT_IN_REMQUO):
8991 if (validate_arg (arg0, REAL_TYPE)
9af5ce0c 8992 && validate_arg (arg1, REAL_TYPE)
8993 && validate_arg (arg2, POINTER_TYPE))
e5407ca6 8994 return do_mpfr_remquo (arg0, arg1, arg2);
8995 break;
e5407ca6 8996
c2f47e15 8997 case BUILT_IN_MEMCMP:
389dd41b 8998 return fold_builtin_memcmp (loc, arg0, arg1, arg2);;
c2f47e15 8999
c83059be 9000 case BUILT_IN_EXPECT:
9001 return fold_builtin_expect (loc, arg0, arg1, arg2);
9002
0c93c8a9 9003 case BUILT_IN_ADD_OVERFLOW:
9004 case BUILT_IN_SUB_OVERFLOW:
9005 case BUILT_IN_MUL_OVERFLOW:
732905bb 9006 case BUILT_IN_ADD_OVERFLOW_P:
9007 case BUILT_IN_SUB_OVERFLOW_P:
9008 case BUILT_IN_MUL_OVERFLOW_P:
0c93c8a9 9009 case BUILT_IN_SADD_OVERFLOW:
9010 case BUILT_IN_SADDL_OVERFLOW:
9011 case BUILT_IN_SADDLL_OVERFLOW:
9012 case BUILT_IN_SSUB_OVERFLOW:
9013 case BUILT_IN_SSUBL_OVERFLOW:
9014 case BUILT_IN_SSUBLL_OVERFLOW:
9015 case BUILT_IN_SMUL_OVERFLOW:
9016 case BUILT_IN_SMULL_OVERFLOW:
9017 case BUILT_IN_SMULLL_OVERFLOW:
9018 case BUILT_IN_UADD_OVERFLOW:
9019 case BUILT_IN_UADDL_OVERFLOW:
9020 case BUILT_IN_UADDLL_OVERFLOW:
9021 case BUILT_IN_USUB_OVERFLOW:
9022 case BUILT_IN_USUBL_OVERFLOW:
9023 case BUILT_IN_USUBLL_OVERFLOW:
9024 case BUILT_IN_UMUL_OVERFLOW:
9025 case BUILT_IN_UMULL_OVERFLOW:
9026 case BUILT_IN_UMULLL_OVERFLOW:
9027 return fold_builtin_arith_overflow (loc, fcode, arg0, arg1, arg2);
9028
650e4c94 9029 default:
9030 break;
9031 }
c2f47e15 9032 return NULL_TREE;
9033}
650e4c94 9034
c2f47e15 9035/* Fold a call to built-in function FNDECL. ARGS is an array of NARGS
9d884767 9036 arguments. IGNORE is true if the result of the
9037 function call is ignored. This function returns NULL_TREE if no
9038 simplification was possible. */
48e1416a 9039
2165588a 9040tree
e80cc485 9041fold_builtin_n (location_t loc, tree fndecl, tree *args, int nargs, bool)
c2f47e15 9042{
9043 tree ret = NULL_TREE;
a7f5bb2d 9044
c2f47e15 9045 switch (nargs)
9046 {
9047 case 0:
e80cc485 9048 ret = fold_builtin_0 (loc, fndecl);
c2f47e15 9049 break;
9050 case 1:
e80cc485 9051 ret = fold_builtin_1 (loc, fndecl, args[0]);
c2f47e15 9052 break;
9053 case 2:
e80cc485 9054 ret = fold_builtin_2 (loc, fndecl, args[0], args[1]);
c2f47e15 9055 break;
9056 case 3:
e80cc485 9057 ret = fold_builtin_3 (loc, fndecl, args[0], args[1], args[2]);
c2f47e15 9058 break;
c2f47e15 9059 default:
12f08300 9060 ret = fold_builtin_varargs (loc, fndecl, args, nargs);
c2f47e15 9061 break;
9062 }
9063 if (ret)
9064 {
75a70cf9 9065 ret = build1 (NOP_EXPR, TREE_TYPE (ret), ret);
389dd41b 9066 SET_EXPR_LOCATION (ret, loc);
c2f47e15 9067 TREE_NO_WARNING (ret) = 1;
9068 return ret;
9069 }
9070 return NULL_TREE;
9071}
9072
0e80b01d 9073/* Construct a new CALL_EXPR to FNDECL using the tail of the argument
9074 list ARGS along with N new arguments in NEWARGS. SKIP is the number
9075 of arguments in ARGS to be omitted. OLDNARGS is the number of
9076 elements in ARGS. */
c2f47e15 9077
9078static tree
0e80b01d 9079rewrite_call_expr_valist (location_t loc, int oldnargs, tree *args,
9080 int skip, tree fndecl, int n, va_list newargs)
c2f47e15 9081{
0e80b01d 9082 int nargs = oldnargs - skip + n;
9083 tree *buffer;
c2f47e15 9084
0e80b01d 9085 if (n > 0)
c2f47e15 9086 {
0e80b01d 9087 int i, j;
c2f47e15 9088
0e80b01d 9089 buffer = XALLOCAVEC (tree, nargs);
9090 for (i = 0; i < n; i++)
9091 buffer[i] = va_arg (newargs, tree);
9092 for (j = skip; j < oldnargs; j++, i++)
9093 buffer[i] = args[j];
9094 }
9095 else
9096 buffer = args + skip;
19fbe3a4 9097
0e80b01d 9098 return build_call_expr_loc_array (loc, fndecl, nargs, buffer);
9099}
c2f47e15 9100
198622c0 9101/* Return true if FNDECL shouldn't be folded right now.
9102 If a built-in function has an inline attribute always_inline
9103 wrapper, defer folding it after always_inline functions have
9104 been inlined, otherwise e.g. -D_FORTIFY_SOURCE checking
9105 might not be performed. */
9106
51d2c51e 9107bool
198622c0 9108avoid_folding_inline_builtin (tree fndecl)
9109{
9110 return (DECL_DECLARED_INLINE_P (fndecl)
9111 && DECL_DISREGARD_INLINE_LIMITS (fndecl)
9112 && cfun
9113 && !cfun->always_inline_functions_inlined
9114 && lookup_attribute ("always_inline", DECL_ATTRIBUTES (fndecl)));
9115}
9116
4ee9c684 9117/* A wrapper function for builtin folding that prevents warnings for
491e04ef 9118 "statement without effect" and the like, caused by removing the
4ee9c684 9119 call node earlier than the warning is generated. */
9120
9121tree
389dd41b 9122fold_call_expr (location_t loc, tree exp, bool ignore)
4ee9c684 9123{
c2f47e15 9124 tree ret = NULL_TREE;
9125 tree fndecl = get_callee_fndecl (exp);
9126 if (fndecl
9127 && TREE_CODE (fndecl) == FUNCTION_DECL
48dc2227 9128 && DECL_BUILT_IN (fndecl)
9129 /* If CALL_EXPR_VA_ARG_PACK is set, the arguments aren't finalized
9130 yet. Defer folding until we see all the arguments
9131 (after inlining). */
9132 && !CALL_EXPR_VA_ARG_PACK (exp))
9133 {
9134 int nargs = call_expr_nargs (exp);
9135
9136 /* Before gimplification CALL_EXPR_VA_ARG_PACK is not set, but
9137 instead last argument is __builtin_va_arg_pack (). Defer folding
9138 even in that case, until arguments are finalized. */
9139 if (nargs && TREE_CODE (CALL_EXPR_ARG (exp, nargs - 1)) == CALL_EXPR)
9140 {
9141 tree fndecl2 = get_callee_fndecl (CALL_EXPR_ARG (exp, nargs - 1));
9142 if (fndecl2
9143 && TREE_CODE (fndecl2) == FUNCTION_DECL
9144 && DECL_BUILT_IN_CLASS (fndecl2) == BUILT_IN_NORMAL
9145 && DECL_FUNCTION_CODE (fndecl2) == BUILT_IN_VA_ARG_PACK)
9146 return NULL_TREE;
9147 }
9148
198622c0 9149 if (avoid_folding_inline_builtin (fndecl))
9150 return NULL_TREE;
9151
c2f47e15 9152 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
97d67146 9153 return targetm.fold_builtin (fndecl, call_expr_nargs (exp),
9154 CALL_EXPR_ARGP (exp), ignore);
c2f47e15 9155 else
9156 {
9d884767 9157 tree *args = CALL_EXPR_ARGP (exp);
9158 ret = fold_builtin_n (loc, fndecl, args, nargs, ignore);
c2f47e15 9159 if (ret)
389dd41b 9160 return ret;
c2f47e15 9161 }
4ee9c684 9162 }
c2f47e15 9163 return NULL_TREE;
9164}
48e1416a 9165
9d884767 9166/* Fold a CALL_EXPR with type TYPE with FN as the function expression.
9167 N arguments are passed in the array ARGARRAY. Return a folded
9168 expression or NULL_TREE if no simplification was possible. */
805e22b2 9169
9170tree
9d884767 9171fold_builtin_call_array (location_t loc, tree,
d01f58f9 9172 tree fn,
9173 int n,
9174 tree *argarray)
7e15618b 9175{
9d884767 9176 if (TREE_CODE (fn) != ADDR_EXPR)
9177 return NULL_TREE;
c2f47e15 9178
9d884767 9179 tree fndecl = TREE_OPERAND (fn, 0);
9180 if (TREE_CODE (fndecl) == FUNCTION_DECL
9181 && DECL_BUILT_IN (fndecl))
9182 {
9183 /* If last argument is __builtin_va_arg_pack (), arguments to this
9184 function are not finalized yet. Defer folding until they are. */
9185 if (n && TREE_CODE (argarray[n - 1]) == CALL_EXPR)
9186 {
9187 tree fndecl2 = get_callee_fndecl (argarray[n - 1]);
9188 if (fndecl2
9189 && TREE_CODE (fndecl2) == FUNCTION_DECL
9190 && DECL_BUILT_IN_CLASS (fndecl2) == BUILT_IN_NORMAL
9191 && DECL_FUNCTION_CODE (fndecl2) == BUILT_IN_VA_ARG_PACK)
9192 return NULL_TREE;
9193 }
9194 if (avoid_folding_inline_builtin (fndecl))
9195 return NULL_TREE;
9196 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
9197 return targetm.fold_builtin (fndecl, n, argarray, false);
9198 else
9199 return fold_builtin_n (loc, fndecl, argarray, n, false);
9200 }
c2f47e15 9201
9d884767 9202 return NULL_TREE;
c2f47e15 9203}
9204
af1409ad 9205/* Construct a new CALL_EXPR using the tail of the argument list of EXP
9206 along with N new arguments specified as the "..." parameters. SKIP
9207 is the number of arguments in EXP to be omitted. This function is used
9208 to do varargs-to-varargs transformations. */
9209
9210static tree
9211rewrite_call_expr (location_t loc, tree exp, int skip, tree fndecl, int n, ...)
9212{
9213 va_list ap;
9214 tree t;
9215
9216 va_start (ap, n);
9217 t = rewrite_call_expr_valist (loc, call_expr_nargs (exp),
9218 CALL_EXPR_ARGP (exp), skip, fndecl, n, ap);
9219 va_end (ap);
c2f47e15 9220
af1409ad 9221 return t;
c2f47e15 9222}
9223
9224/* Validate a single argument ARG against a tree code CODE representing
184fac50 9225 a type. Return true when argument is valid. */
48e1416a 9226
c2f47e15 9227static bool
184fac50 9228validate_arg (const_tree arg, enum tree_code code)
c2f47e15 9229{
9230 if (!arg)
9231 return false;
9232 else if (code == POINTER_TYPE)
184fac50 9233 return POINTER_TYPE_P (TREE_TYPE (arg));
c7f617c2 9234 else if (code == INTEGER_TYPE)
9235 return INTEGRAL_TYPE_P (TREE_TYPE (arg));
c2f47e15 9236 return code == TREE_CODE (TREE_TYPE (arg));
7e15618b 9237}
0eb671f7 9238
75a70cf9 9239/* This function validates the types of a function call argument list
9240 against a specified list of tree_codes. If the last specifier is a 0,
9241 that represents an ellipses, otherwise the last specifier must be a
9242 VOID_TYPE.
9243
9244 This is the GIMPLE version of validate_arglist. Eventually we want to
9245 completely convert builtins.c to work from GIMPLEs and the tree based
9246 validate_arglist will then be removed. */
9247
9248bool
1a91d914 9249validate_gimple_arglist (const gcall *call, ...)
75a70cf9 9250{
9251 enum tree_code code;
9252 bool res = 0;
9253 va_list ap;
9254 const_tree arg;
9255 size_t i;
9256
9257 va_start (ap, call);
9258 i = 0;
9259
9260 do
9261 {
d62e827b 9262 code = (enum tree_code) va_arg (ap, int);
75a70cf9 9263 switch (code)
9264 {
9265 case 0:
9266 /* This signifies an ellipses, any further arguments are all ok. */
9267 res = true;
9268 goto end;
9269 case VOID_TYPE:
9270 /* This signifies an endlink, if no arguments remain, return
9271 true, otherwise return false. */
9272 res = (i == gimple_call_num_args (call));
9273 goto end;
9274 default:
9275 /* If no parameters remain or the parameter's code does not
9276 match the specified code, return false. Otherwise continue
9277 checking any remaining arguments. */
9278 arg = gimple_call_arg (call, i++);
9279 if (!validate_arg (arg, code))
9280 goto end;
9281 break;
9282 }
9283 }
9284 while (1);
9285
9286 /* We need gotos here since we can only have one VA_CLOSE in a
9287 function. */
9288 end: ;
9289 va_end (ap);
9290
9291 return res;
9292}
9293
fc2a2dcb 9294/* Default target-specific builtin expander that does nothing. */
9295
9296rtx
aecda0d6 9297default_expand_builtin (tree exp ATTRIBUTE_UNUSED,
9298 rtx target ATTRIBUTE_UNUSED,
9299 rtx subtarget ATTRIBUTE_UNUSED,
3754d046 9300 machine_mode mode ATTRIBUTE_UNUSED,
aecda0d6 9301 int ignore ATTRIBUTE_UNUSED)
fc2a2dcb 9302{
9303 return NULL_RTX;
9304}
c7926a82 9305
01537105 9306/* Returns true is EXP represents data that would potentially reside
9307 in a readonly section. */
9308
b9ea678c 9309bool
01537105 9310readonly_data_expr (tree exp)
9311{
9312 STRIP_NOPS (exp);
9313
9ff0637e 9314 if (TREE_CODE (exp) != ADDR_EXPR)
9315 return false;
9316
9317 exp = get_base_address (TREE_OPERAND (exp, 0));
9318 if (!exp)
9319 return false;
9320
9321 /* Make sure we call decl_readonly_section only for trees it
9322 can handle (since it returns true for everything it doesn't
9323 understand). */
491e04ef 9324 if (TREE_CODE (exp) == STRING_CST
9ff0637e 9325 || TREE_CODE (exp) == CONSTRUCTOR
53e9c5c4 9326 || (VAR_P (exp) && TREE_STATIC (exp)))
9ff0637e 9327 return decl_readonly_section (exp, 0);
01537105 9328 else
9329 return false;
9330}
4ee9c684 9331
c2f47e15 9332/* Simplify a call to the strpbrk builtin. S1 and S2 are the arguments
9333 to the call, and TYPE is its return type.
4ee9c684 9334
c2f47e15 9335 Return NULL_TREE if no simplification was possible, otherwise return the
4ee9c684 9336 simplified form of the call as a tree.
9337
9338 The simplified form may be a constant or other expression which
9339 computes the same value, but in a more efficient manner (including
9340 calls to other builtin functions).
9341
9342 The call may contain arguments which need to be evaluated, but
9343 which are not useful to determine the result of the call. In
9344 this case we return a chain of COMPOUND_EXPRs. The LHS of each
9345 COMPOUND_EXPR will be an argument which must be evaluated.
9346 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
9347 COMPOUND_EXPR in the chain will contain the tree for the simplified
9348 form of the builtin function call. */
9349
9350static tree
389dd41b 9351fold_builtin_strpbrk (location_t loc, tree s1, tree s2, tree type)
4ee9c684 9352{
c2f47e15 9353 if (!validate_arg (s1, POINTER_TYPE)
9354 || !validate_arg (s2, POINTER_TYPE))
9355 return NULL_TREE;
4ee9c684 9356 else
9357 {
4ee9c684 9358 tree fn;
9359 const char *p1, *p2;
9360
9361 p2 = c_getstr (s2);
9362 if (p2 == NULL)
c2f47e15 9363 return NULL_TREE;
4ee9c684 9364
9365 p1 = c_getstr (s1);
9366 if (p1 != NULL)
9367 {
9368 const char *r = strpbrk (p1, p2);
daa1d5f5 9369 tree tem;
4ee9c684 9370
9371 if (r == NULL)
779b4c41 9372 return build_int_cst (TREE_TYPE (s1), 0);
4ee9c684 9373
9374 /* Return an offset into the constant string argument. */
2cc66f2a 9375 tem = fold_build_pointer_plus_hwi_loc (loc, s1, r - p1);
389dd41b 9376 return fold_convert_loc (loc, type, tem);
4ee9c684 9377 }
9378
9379 if (p2[0] == '\0')
05abc81b 9380 /* strpbrk(x, "") == NULL.
9381 Evaluate and ignore s1 in case it had side-effects. */
389dd41b 9382 return omit_one_operand_loc (loc, TREE_TYPE (s1), integer_zero_node, s1);
4ee9c684 9383
9384 if (p2[1] != '\0')
c2f47e15 9385 return NULL_TREE; /* Really call strpbrk. */
4ee9c684 9386
b9a16870 9387 fn = builtin_decl_implicit (BUILT_IN_STRCHR);
4ee9c684 9388 if (!fn)
c2f47e15 9389 return NULL_TREE;
4ee9c684 9390
9391 /* New argument list transforming strpbrk(s1, s2) to
9392 strchr(s1, s2[0]). */
7002a1c8 9393 return build_call_expr_loc (loc, fn, 2, s1,
9394 build_int_cst (integer_type_node, p2[0]));
4ee9c684 9395 }
9396}
9397
c2f47e15 9398/* Simplify a call to the strspn builtin. S1 and S2 are the arguments
9399 to the call.
4ee9c684 9400
c2f47e15 9401 Return NULL_TREE if no simplification was possible, otherwise return the
4ee9c684 9402 simplified form of the call as a tree.
9403
9404 The simplified form may be a constant or other expression which
9405 computes the same value, but in a more efficient manner (including
9406 calls to other builtin functions).
9407
9408 The call may contain arguments which need to be evaluated, but
9409 which are not useful to determine the result of the call. In
9410 this case we return a chain of COMPOUND_EXPRs. The LHS of each
9411 COMPOUND_EXPR will be an argument which must be evaluated.
9412 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
9413 COMPOUND_EXPR in the chain will contain the tree for the simplified
9414 form of the builtin function call. */
9415
9416static tree
389dd41b 9417fold_builtin_strspn (location_t loc, tree s1, tree s2)
4ee9c684 9418{
c2f47e15 9419 if (!validate_arg (s1, POINTER_TYPE)
9420 || !validate_arg (s2, POINTER_TYPE))
9421 return NULL_TREE;
4ee9c684 9422 else
9423 {
4ee9c684 9424 const char *p1 = c_getstr (s1), *p2 = c_getstr (s2);
9425
c2f47e15 9426 /* If either argument is "", return NULL_TREE. */
4ee9c684 9427 if ((p1 && *p1 == '\0') || (p2 && *p2 == '\0'))
9bc9f15f 9428 /* Evaluate and ignore both arguments in case either one has
9429 side-effects. */
389dd41b 9430 return omit_two_operands_loc (loc, size_type_node, size_zero_node,
9bc9f15f 9431 s1, s2);
c2f47e15 9432 return NULL_TREE;
4ee9c684 9433 }
9434}
9435
c2f47e15 9436/* Simplify a call to the strcspn builtin. S1 and S2 are the arguments
9437 to the call.
4ee9c684 9438
c2f47e15 9439 Return NULL_TREE if no simplification was possible, otherwise return the
4ee9c684 9440 simplified form of the call as a tree.
9441
9442 The simplified form may be a constant or other expression which
9443 computes the same value, but in a more efficient manner (including
9444 calls to other builtin functions).
9445
9446 The call may contain arguments which need to be evaluated, but
9447 which are not useful to determine the result of the call. In
9448 this case we return a chain of COMPOUND_EXPRs. The LHS of each
9449 COMPOUND_EXPR will be an argument which must be evaluated.
9450 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
9451 COMPOUND_EXPR in the chain will contain the tree for the simplified
9452 form of the builtin function call. */
9453
9454static tree
389dd41b 9455fold_builtin_strcspn (location_t loc, tree s1, tree s2)
4ee9c684 9456{
c2f47e15 9457 if (!validate_arg (s1, POINTER_TYPE)
9458 || !validate_arg (s2, POINTER_TYPE))
9459 return NULL_TREE;
4ee9c684 9460 else
9461 {
c2f47e15 9462 /* If the first argument is "", return NULL_TREE. */
b5e46e2c 9463 const char *p1 = c_getstr (s1);
4ee9c684 9464 if (p1 && *p1 == '\0')
9465 {
9466 /* Evaluate and ignore argument s2 in case it has
9467 side-effects. */
389dd41b 9468 return omit_one_operand_loc (loc, size_type_node,
39761420 9469 size_zero_node, s2);
4ee9c684 9470 }
9471
9472 /* If the second argument is "", return __builtin_strlen(s1). */
b5e46e2c 9473 const char *p2 = c_getstr (s2);
4ee9c684 9474 if (p2 && *p2 == '\0')
9475 {
b9a16870 9476 tree fn = builtin_decl_implicit (BUILT_IN_STRLEN);
4ee9c684 9477
9478 /* If the replacement _DECL isn't initialized, don't do the
9479 transformation. */
9480 if (!fn)
c2f47e15 9481 return NULL_TREE;
4ee9c684 9482
389dd41b 9483 return build_call_expr_loc (loc, fn, 1, s1);
4ee9c684 9484 }
c2f47e15 9485 return NULL_TREE;
4ee9c684 9486 }
9487}
9488
c2f47e15 9489/* Fold the next_arg or va_start call EXP. Returns true if there was an error
743b0c6a 9490 produced. False otherwise. This is done so that we don't output the error
9491 or warning twice or three times. */
75a70cf9 9492
743b0c6a 9493bool
c2f47e15 9494fold_builtin_next_arg (tree exp, bool va_start_p)
4ee9c684 9495{
9496 tree fntype = TREE_TYPE (current_function_decl);
c2f47e15 9497 int nargs = call_expr_nargs (exp);
9498 tree arg;
d98fd4a4 9499 /* There is good chance the current input_location points inside the
9500 definition of the va_start macro (perhaps on the token for
9501 builtin) in a system header, so warnings will not be emitted.
9502 Use the location in real source code. */
9503 source_location current_location =
9504 linemap_unwind_to_first_non_reserved_loc (line_table, input_location,
9505 NULL);
4ee9c684 9506
257d99c3 9507 if (!stdarg_p (fntype))
743b0c6a 9508 {
9509 error ("%<va_start%> used in function with fixed args");
9510 return true;
9511 }
c2f47e15 9512
9513 if (va_start_p)
79012a9d 9514 {
c2f47e15 9515 if (va_start_p && (nargs != 2))
9516 {
9517 error ("wrong number of arguments to function %<va_start%>");
9518 return true;
9519 }
9520 arg = CALL_EXPR_ARG (exp, 1);
79012a9d 9521 }
9522 /* We use __builtin_va_start (ap, 0, 0) or __builtin_next_arg (0, 0)
9523 when we checked the arguments and if needed issued a warning. */
c2f47e15 9524 else
4ee9c684 9525 {
c2f47e15 9526 if (nargs == 0)
9527 {
9528 /* Evidently an out of date version of <stdarg.h>; can't validate
9529 va_start's second argument, but can still work as intended. */
d98fd4a4 9530 warning_at (current_location,
7edb1062 9531 OPT_Wvarargs,
9532 "%<__builtin_next_arg%> called without an argument");
c2f47e15 9533 return true;
9534 }
9535 else if (nargs > 1)
a0c938f0 9536 {
c2f47e15 9537 error ("wrong number of arguments to function %<__builtin_next_arg%>");
a0c938f0 9538 return true;
9539 }
c2f47e15 9540 arg = CALL_EXPR_ARG (exp, 0);
9541 }
9542
a8dd994c 9543 if (TREE_CODE (arg) == SSA_NAME)
9544 arg = SSA_NAME_VAR (arg);
9545
c2f47e15 9546 /* We destructively modify the call to be __builtin_va_start (ap, 0)
48e1416a 9547 or __builtin_next_arg (0) the first time we see it, after checking
c2f47e15 9548 the arguments and if needed issuing a warning. */
9549 if (!integer_zerop (arg))
9550 {
9551 tree last_parm = tree_last (DECL_ARGUMENTS (current_function_decl));
79012a9d 9552
4ee9c684 9553 /* Strip off all nops for the sake of the comparison. This
9554 is not quite the same as STRIP_NOPS. It does more.
9555 We must also strip off INDIRECT_EXPR for C++ reference
9556 parameters. */
72dd6141 9557 while (CONVERT_EXPR_P (arg)
4ee9c684 9558 || TREE_CODE (arg) == INDIRECT_REF)
9559 arg = TREE_OPERAND (arg, 0);
9560 if (arg != last_parm)
a0c938f0 9561 {
b08cf617 9562 /* FIXME: Sometimes with the tree optimizers we can get the
9563 not the last argument even though the user used the last
9564 argument. We just warn and set the arg to be the last
9565 argument so that we will get wrong-code because of
9566 it. */
d98fd4a4 9567 warning_at (current_location,
7edb1062 9568 OPT_Wvarargs,
d98fd4a4 9569 "second parameter of %<va_start%> not last named argument");
743b0c6a 9570 }
24158ad7 9571
9572 /* Undefined by C99 7.15.1.4p4 (va_start):
9573 "If the parameter parmN is declared with the register storage
9574 class, with a function or array type, or with a type that is
9575 not compatible with the type that results after application of
9576 the default argument promotions, the behavior is undefined."
9577 */
9578 else if (DECL_REGISTER (arg))
d98fd4a4 9579 {
9580 warning_at (current_location,
7edb1062 9581 OPT_Wvarargs,
67cf9b55 9582 "undefined behavior when second parameter of "
d98fd4a4 9583 "%<va_start%> is declared with %<register%> storage");
9584 }
24158ad7 9585
79012a9d 9586 /* We want to verify the second parameter just once before the tree
a0c938f0 9587 optimizers are run and then avoid keeping it in the tree,
9588 as otherwise we could warn even for correct code like:
9589 void foo (int i, ...)
9590 { va_list ap; i++; va_start (ap, i); va_end (ap); } */
c2f47e15 9591 if (va_start_p)
9592 CALL_EXPR_ARG (exp, 1) = integer_zero_node;
9593 else
9594 CALL_EXPR_ARG (exp, 0) = integer_zero_node;
743b0c6a 9595 }
9596 return false;
4ee9c684 9597}
9598
9599
c2f47e15 9600/* Expand a call EXP to __builtin_object_size. */
0a39fd54 9601
f7715905 9602static rtx
0a39fd54 9603expand_builtin_object_size (tree exp)
9604{
9605 tree ost;
9606 int object_size_type;
9607 tree fndecl = get_callee_fndecl (exp);
0a39fd54 9608
c2f47e15 9609 if (!validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
0a39fd54 9610 {
8c41abe8 9611 error ("%Kfirst argument of %qD must be a pointer, second integer constant",
b8c23db3 9612 exp, fndecl);
0a39fd54 9613 expand_builtin_trap ();
9614 return const0_rtx;
9615 }
9616
c2f47e15 9617 ost = CALL_EXPR_ARG (exp, 1);
0a39fd54 9618 STRIP_NOPS (ost);
9619
9620 if (TREE_CODE (ost) != INTEGER_CST
9621 || tree_int_cst_sgn (ost) < 0
9622 || compare_tree_int (ost, 3) > 0)
9623 {
8c41abe8 9624 error ("%Klast argument of %qD is not integer constant between 0 and 3",
b8c23db3 9625 exp, fndecl);
0a39fd54 9626 expand_builtin_trap ();
9627 return const0_rtx;
9628 }
9629
e913b5cd 9630 object_size_type = tree_to_shwi (ost);
0a39fd54 9631
9632 return object_size_type < 2 ? constm1_rtx : const0_rtx;
9633}
9634
9635/* Expand EXP, a call to the __mem{cpy,pcpy,move,set}_chk builtin.
9636 FCODE is the BUILT_IN_* to use.
c2f47e15 9637 Return NULL_RTX if we failed; the caller should emit a normal call,
0a39fd54 9638 otherwise try to get the result in TARGET, if convenient (and in
9639 mode MODE if that's convenient). */
9640
9641static rtx
3754d046 9642expand_builtin_memory_chk (tree exp, rtx target, machine_mode mode,
0a39fd54 9643 enum built_in_function fcode)
9644{
0a39fd54 9645 tree dest, src, len, size;
9646
c2f47e15 9647 if (!validate_arglist (exp,
0a39fd54 9648 POINTER_TYPE,
9649 fcode == BUILT_IN_MEMSET_CHK
9650 ? INTEGER_TYPE : POINTER_TYPE,
9651 INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
c2f47e15 9652 return NULL_RTX;
0a39fd54 9653
c2f47e15 9654 dest = CALL_EXPR_ARG (exp, 0);
9655 src = CALL_EXPR_ARG (exp, 1);
9656 len = CALL_EXPR_ARG (exp, 2);
9657 size = CALL_EXPR_ARG (exp, 3);
0a39fd54 9658
5aef8938 9659 bool sizes_ok = check_sizes (OPT_Wstringop_overflow_,
9660 exp, len, /*maxlen=*/NULL_TREE,
9661 /*str=*/NULL_TREE, size);
9662
9663 if (!tree_fits_uhwi_p (size))
c2f47e15 9664 return NULL_RTX;
0a39fd54 9665
e913b5cd 9666 if (tree_fits_uhwi_p (len) || integer_all_onesp (size))
0a39fd54 9667 {
5aef8938 9668 /* Avoid transforming the checking call to an ordinary one when
9669 an overflow has been detected or when the call couldn't be
9670 validated because the size is not constant. */
9671 if (!sizes_ok && !integer_all_onesp (size) && tree_int_cst_lt (size, len))
9672 return NULL_RTX;
0a39fd54 9673
5aef8938 9674 tree fn = NULL_TREE;
0a39fd54 9675 /* If __builtin_mem{cpy,pcpy,move,set}_chk is used, assume
9676 mem{cpy,pcpy,move,set} is available. */
9677 switch (fcode)
9678 {
9679 case BUILT_IN_MEMCPY_CHK:
b9a16870 9680 fn = builtin_decl_explicit (BUILT_IN_MEMCPY);
0a39fd54 9681 break;
9682 case BUILT_IN_MEMPCPY_CHK:
b9a16870 9683 fn = builtin_decl_explicit (BUILT_IN_MEMPCPY);
0a39fd54 9684 break;
9685 case BUILT_IN_MEMMOVE_CHK:
b9a16870 9686 fn = builtin_decl_explicit (BUILT_IN_MEMMOVE);
0a39fd54 9687 break;
9688 case BUILT_IN_MEMSET_CHK:
b9a16870 9689 fn = builtin_decl_explicit (BUILT_IN_MEMSET);
0a39fd54 9690 break;
9691 default:
9692 break;
9693 }
9694
9695 if (! fn)
c2f47e15 9696 return NULL_RTX;
0a39fd54 9697
0568e9c1 9698 fn = build_call_nofold_loc (EXPR_LOCATION (exp), fn, 3, dest, src, len);
a65c4d64 9699 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
9700 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
0a39fd54 9701 return expand_expr (fn, target, mode, EXPAND_NORMAL);
9702 }
9703 else if (fcode == BUILT_IN_MEMSET_CHK)
c2f47e15 9704 return NULL_RTX;
0a39fd54 9705 else
9706 {
957d0361 9707 unsigned int dest_align = get_pointer_alignment (dest);
0a39fd54 9708
9709 /* If DEST is not a pointer type, call the normal function. */
9710 if (dest_align == 0)
c2f47e15 9711 return NULL_RTX;
0a39fd54 9712
9713 /* If SRC and DEST are the same (and not volatile), do nothing. */
9714 if (operand_equal_p (src, dest, 0))
9715 {
9716 tree expr;
9717
9718 if (fcode != BUILT_IN_MEMPCPY_CHK)
9719 {
9720 /* Evaluate and ignore LEN in case it has side-effects. */
9721 expand_expr (len, const0_rtx, VOIDmode, EXPAND_NORMAL);
9722 return expand_expr (dest, target, mode, EXPAND_NORMAL);
9723 }
9724
2cc66f2a 9725 expr = fold_build_pointer_plus (dest, len);
0a39fd54 9726 return expand_expr (expr, target, mode, EXPAND_NORMAL);
9727 }
9728
9729 /* __memmove_chk special case. */
9730 if (fcode == BUILT_IN_MEMMOVE_CHK)
9731 {
957d0361 9732 unsigned int src_align = get_pointer_alignment (src);
0a39fd54 9733
9734 if (src_align == 0)
c2f47e15 9735 return NULL_RTX;
0a39fd54 9736
9737 /* If src is categorized for a readonly section we can use
9738 normal __memcpy_chk. */
9739 if (readonly_data_expr (src))
9740 {
b9a16870 9741 tree fn = builtin_decl_explicit (BUILT_IN_MEMCPY_CHK);
0a39fd54 9742 if (!fn)
c2f47e15 9743 return NULL_RTX;
0568e9c1 9744 fn = build_call_nofold_loc (EXPR_LOCATION (exp), fn, 4,
9745 dest, src, len, size);
a65c4d64 9746 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
9747 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
0a39fd54 9748 return expand_expr (fn, target, mode, EXPAND_NORMAL);
9749 }
9750 }
c2f47e15 9751 return NULL_RTX;
0a39fd54 9752 }
9753}
9754
9755/* Emit warning if a buffer overflow is detected at compile time. */
9756
9757static void
9758maybe_emit_chk_warning (tree exp, enum built_in_function fcode)
9759{
5aef8938 9760 /* The source string. */
9761 tree srcstr = NULL_TREE;
9762 /* The size of the destination object. */
9763 tree objsize = NULL_TREE;
9764 /* The string that is being concatenated with (as in __strcat_chk)
9765 or null if it isn't. */
9766 tree catstr = NULL_TREE;
9767 /* The maximum length of the source sequence in a bounded operation
9768 (such as __strncat_chk) or null if the operation isn't bounded
9769 (such as __strcat_chk). */
9770 tree maxlen = NULL_TREE;
0a39fd54 9771
9772 switch (fcode)
9773 {
9774 case BUILT_IN_STRCPY_CHK:
9775 case BUILT_IN_STPCPY_CHK:
5aef8938 9776 srcstr = CALL_EXPR_ARG (exp, 1);
9777 objsize = CALL_EXPR_ARG (exp, 2);
9778 break;
9779
0a39fd54 9780 case BUILT_IN_STRCAT_CHK:
5aef8938 9781 /* For __strcat_chk the warning will be emitted only if overflowing
9782 by at least strlen (dest) + 1 bytes. */
9783 catstr = CALL_EXPR_ARG (exp, 0);
9784 srcstr = CALL_EXPR_ARG (exp, 1);
9785 objsize = CALL_EXPR_ARG (exp, 2);
0a39fd54 9786 break;
5aef8938 9787
b356dfef 9788 case BUILT_IN_STRNCAT_CHK:
5aef8938 9789 catstr = CALL_EXPR_ARG (exp, 0);
9790 srcstr = CALL_EXPR_ARG (exp, 1);
9791 maxlen = CALL_EXPR_ARG (exp, 2);
9792 objsize = CALL_EXPR_ARG (exp, 3);
9793 break;
9794
0a39fd54 9795 case BUILT_IN_STRNCPY_CHK:
1063acde 9796 case BUILT_IN_STPNCPY_CHK:
5aef8938 9797 srcstr = CALL_EXPR_ARG (exp, 1);
9798 maxlen = CALL_EXPR_ARG (exp, 2);
9799 objsize = CALL_EXPR_ARG (exp, 3);
0a39fd54 9800 break;
5aef8938 9801
0a39fd54 9802 case BUILT_IN_SNPRINTF_CHK:
9803 case BUILT_IN_VSNPRINTF_CHK:
5aef8938 9804 maxlen = CALL_EXPR_ARG (exp, 1);
9805 objsize = CALL_EXPR_ARG (exp, 3);
0a39fd54 9806 break;
9807 default:
9808 gcc_unreachable ();
9809 }
9810
5aef8938 9811 if (catstr && maxlen)
0a39fd54 9812 {
5aef8938 9813 /* Check __strncat_chk. There is no way to determine the length
9814 of the string to which the source string is being appended so
9815 just warn when the length of the source string is not known. */
8d6c6ef5 9816 check_strncat_sizes (exp, objsize);
9817 return;
0a39fd54 9818 }
0a39fd54 9819
5aef8938 9820 check_sizes (OPT_Wstringop_overflow_, exp,
9821 /*size=*/NULL_TREE, maxlen, srcstr, objsize);
0a39fd54 9822}
9823
9824/* Emit warning if a buffer overflow is detected at compile time
9825 in __sprintf_chk/__vsprintf_chk calls. */
9826
9827static void
9828maybe_emit_sprintf_chk_warning (tree exp, enum built_in_function fcode)
9829{
1e4adcfc 9830 tree size, len, fmt;
0a39fd54 9831 const char *fmt_str;
c2f47e15 9832 int nargs = call_expr_nargs (exp);
0a39fd54 9833
9834 /* Verify the required arguments in the original call. */
48e1416a 9835
c2f47e15 9836 if (nargs < 4)
0a39fd54 9837 return;
c2f47e15 9838 size = CALL_EXPR_ARG (exp, 2);
9839 fmt = CALL_EXPR_ARG (exp, 3);
0a39fd54 9840
e913b5cd 9841 if (! tree_fits_uhwi_p (size) || integer_all_onesp (size))
0a39fd54 9842 return;
9843
9844 /* Check whether the format is a literal string constant. */
9845 fmt_str = c_getstr (fmt);
9846 if (fmt_str == NULL)
9847 return;
9848
d4473c84 9849 if (!init_target_chars ())
99eabcc1 9850 return;
9851
0a39fd54 9852 /* If the format doesn't contain % args or %%, we know its size. */
99eabcc1 9853 if (strchr (fmt_str, target_percent) == 0)
0a39fd54 9854 len = build_int_cstu (size_type_node, strlen (fmt_str));
9855 /* If the format is "%s" and first ... argument is a string literal,
9856 we know it too. */
c2f47e15 9857 else if (fcode == BUILT_IN_SPRINTF_CHK
9858 && strcmp (fmt_str, target_percent_s) == 0)
0a39fd54 9859 {
9860 tree arg;
9861
c2f47e15 9862 if (nargs < 5)
0a39fd54 9863 return;
c2f47e15 9864 arg = CALL_EXPR_ARG (exp, 4);
0a39fd54 9865 if (! POINTER_TYPE_P (TREE_TYPE (arg)))
9866 return;
9867
9868 len = c_strlen (arg, 1);
e913b5cd 9869 if (!len || ! tree_fits_uhwi_p (len))
0a39fd54 9870 return;
9871 }
9872 else
9873 return;
9874
5aef8938 9875 /* Add one for the terminating nul. */
9876 len = fold_build2 (PLUS_EXPR, TREE_TYPE (len), len, size_one_node);
9877 check_sizes (OPT_Wstringop_overflow_,
9878 exp, /*size=*/NULL_TREE, /*maxlen=*/NULL_TREE, len, size);
0a39fd54 9879}
9880
2c281b15 9881/* Emit warning if a free is called with address of a variable. */
9882
9883static void
9884maybe_emit_free_warning (tree exp)
9885{
9886 tree arg = CALL_EXPR_ARG (exp, 0);
9887
9888 STRIP_NOPS (arg);
9889 if (TREE_CODE (arg) != ADDR_EXPR)
9890 return;
9891
9892 arg = get_base_address (TREE_OPERAND (arg, 0));
182cf5a9 9893 if (arg == NULL || INDIRECT_REF_P (arg) || TREE_CODE (arg) == MEM_REF)
2c281b15 9894 return;
9895
9896 if (SSA_VAR_P (arg))
f74ea1c2 9897 warning_at (tree_nonartificial_location (exp), OPT_Wfree_nonheap_object,
9898 "%Kattempt to free a non-heap object %qD", exp, arg);
2c281b15 9899 else
f74ea1c2 9900 warning_at (tree_nonartificial_location (exp), OPT_Wfree_nonheap_object,
9901 "%Kattempt to free a non-heap object", exp);
2c281b15 9902}
9903
c2f47e15 9904/* Fold a call to __builtin_object_size with arguments PTR and OST,
9905 if possible. */
0a39fd54 9906
f7715905 9907static tree
c2f47e15 9908fold_builtin_object_size (tree ptr, tree ost)
0a39fd54 9909{
a6caa15f 9910 unsigned HOST_WIDE_INT bytes;
0a39fd54 9911 int object_size_type;
9912
c2f47e15 9913 if (!validate_arg (ptr, POINTER_TYPE)
9914 || !validate_arg (ost, INTEGER_TYPE))
9915 return NULL_TREE;
0a39fd54 9916
0a39fd54 9917 STRIP_NOPS (ost);
9918
9919 if (TREE_CODE (ost) != INTEGER_CST
9920 || tree_int_cst_sgn (ost) < 0
9921 || compare_tree_int (ost, 3) > 0)
c2f47e15 9922 return NULL_TREE;
0a39fd54 9923
e913b5cd 9924 object_size_type = tree_to_shwi (ost);
0a39fd54 9925
9926 /* __builtin_object_size doesn't evaluate side-effects in its arguments;
9927 if there are any side-effects, it returns (size_t) -1 for types 0 and 1
9928 and (size_t) 0 for types 2 and 3. */
9929 if (TREE_SIDE_EFFECTS (ptr))
697bbc3f 9930 return build_int_cst_type (size_type_node, object_size_type < 2 ? -1 : 0);
0a39fd54 9931
9932 if (TREE_CODE (ptr) == ADDR_EXPR)
a6caa15f 9933 {
4e91a07b 9934 compute_builtin_object_size (ptr, object_size_type, &bytes);
6da74b21 9935 if (wi::fits_to_tree_p (bytes, size_type_node))
9936 return build_int_cstu (size_type_node, bytes);
a6caa15f 9937 }
0a39fd54 9938 else if (TREE_CODE (ptr) == SSA_NAME)
9939 {
0a39fd54 9940 /* If object size is not known yet, delay folding until
9941 later. Maybe subsequent passes will help determining
9942 it. */
4e91a07b 9943 if (compute_builtin_object_size (ptr, object_size_type, &bytes)
9944 && wi::fits_to_tree_p (bytes, size_type_node))
6da74b21 9945 return build_int_cstu (size_type_node, bytes);
0a39fd54 9946 }
9947
a6caa15f 9948 return NULL_TREE;
0a39fd54 9949}
9950
12f08300 9951/* Builtins with folding operations that operate on "..." arguments
9952 need special handling; we need to store the arguments in a convenient
9953 data structure before attempting any folding. Fortunately there are
9954 only a few builtins that fall into this category. FNDECL is the
9955 function, EXP is the CALL_EXPR for the call. */
9956
9957static tree
9958fold_builtin_varargs (location_t loc, tree fndecl, tree *args, int nargs)
9959{
9960 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
9961 tree ret = NULL_TREE;
9962
9963 switch (fcode)
9964 {
9965 case BUILT_IN_FPCLASSIFY:
9966 ret = fold_builtin_fpclassify (loc, args, nargs);
9967 break;
9968
9969 default:
9970 break;
9971 }
9972 if (ret)
9973 {
9974 ret = build1 (NOP_EXPR, TREE_TYPE (ret), ret);
9975 SET_EXPR_LOCATION (ret, loc);
9976 TREE_NO_WARNING (ret) = 1;
9977 return ret;
9978 }
9979 return NULL_TREE;
9980}
9981
99eabcc1 9982/* Initialize format string characters in the target charset. */
9983
b9ea678c 9984bool
99eabcc1 9985init_target_chars (void)
9986{
9987 static bool init;
9988 if (!init)
9989 {
9990 target_newline = lang_hooks.to_target_charset ('\n');
9991 target_percent = lang_hooks.to_target_charset ('%');
9992 target_c = lang_hooks.to_target_charset ('c');
9993 target_s = lang_hooks.to_target_charset ('s');
9994 if (target_newline == 0 || target_percent == 0 || target_c == 0
9995 || target_s == 0)
9996 return false;
9997
9998 target_percent_c[0] = target_percent;
9999 target_percent_c[1] = target_c;
10000 target_percent_c[2] = '\0';
10001
10002 target_percent_s[0] = target_percent;
10003 target_percent_s[1] = target_s;
10004 target_percent_s[2] = '\0';
10005
10006 target_percent_s_newline[0] = target_percent;
10007 target_percent_s_newline[1] = target_s;
10008 target_percent_s_newline[2] = target_newline;
10009 target_percent_s_newline[3] = '\0';
a0c938f0 10010
99eabcc1 10011 init = true;
10012 }
10013 return true;
10014}
bffb7645 10015
f0c477f2 10016/* Helper function for do_mpfr_arg*(). Ensure M is a normal number
10017 and no overflow/underflow occurred. INEXACT is true if M was not
fa7637bd 10018 exactly calculated. TYPE is the tree type for the result. This
f0c477f2 10019 function assumes that you cleared the MPFR flags and then
10020 calculated M to see if anything subsequently set a flag prior to
10021 entering this function. Return NULL_TREE if any checks fail. */
10022
10023static tree
d4473c84 10024do_mpfr_ckconv (mpfr_srcptr m, tree type, int inexact)
f0c477f2 10025{
10026 /* Proceed iff we get a normal number, i.e. not NaN or Inf and no
10027 overflow/underflow occurred. If -frounding-math, proceed iff the
10028 result of calling FUNC was exact. */
d4473c84 10029 if (mpfr_number_p (m) && !mpfr_overflow_p () && !mpfr_underflow_p ()
f0c477f2 10030 && (!flag_rounding_math || !inexact))
10031 {
10032 REAL_VALUE_TYPE rr;
10033
66fa16e6 10034 real_from_mpfr (&rr, m, type, GMP_RNDN);
f0c477f2 10035 /* Proceed iff GCC's REAL_VALUE_TYPE can hold the MPFR value,
10036 check for overflow/underflow. If the REAL_VALUE_TYPE is zero
10037 but the mpft_t is not, then we underflowed in the
10038 conversion. */
776a7bab 10039 if (real_isfinite (&rr)
f0c477f2 10040 && (rr.cl == rvc_zero) == (mpfr_zero_p (m) != 0))
10041 {
10042 REAL_VALUE_TYPE rmode;
10043
10044 real_convert (&rmode, TYPE_MODE (type), &rr);
10045 /* Proceed iff the specified mode can hold the value. */
10046 if (real_identical (&rmode, &rr))
10047 return build_real (type, rmode);
10048 }
10049 }
10050 return NULL_TREE;
10051}
10052
239d491a 10053/* Helper function for do_mpc_arg*(). Ensure M is a normal complex
10054 number and no overflow/underflow occurred. INEXACT is true if M
10055 was not exactly calculated. TYPE is the tree type for the result.
10056 This function assumes that you cleared the MPFR flags and then
10057 calculated M to see if anything subsequently set a flag prior to
652d9409 10058 entering this function. Return NULL_TREE if any checks fail, if
10059 FORCE_CONVERT is true, then bypass the checks. */
239d491a 10060
10061static tree
652d9409 10062do_mpc_ckconv (mpc_srcptr m, tree type, int inexact, int force_convert)
239d491a 10063{
10064 /* Proceed iff we get a normal number, i.e. not NaN or Inf and no
10065 overflow/underflow occurred. If -frounding-math, proceed iff the
10066 result of calling FUNC was exact. */
652d9409 10067 if (force_convert
10068 || (mpfr_number_p (mpc_realref (m)) && mpfr_number_p (mpc_imagref (m))
10069 && !mpfr_overflow_p () && !mpfr_underflow_p ()
10070 && (!flag_rounding_math || !inexact)))
239d491a 10071 {
10072 REAL_VALUE_TYPE re, im;
10073
b0e7c4d4 10074 real_from_mpfr (&re, mpc_realref (m), TREE_TYPE (type), GMP_RNDN);
10075 real_from_mpfr (&im, mpc_imagref (m), TREE_TYPE (type), GMP_RNDN);
239d491a 10076 /* Proceed iff GCC's REAL_VALUE_TYPE can hold the MPFR values,
10077 check for overflow/underflow. If the REAL_VALUE_TYPE is zero
10078 but the mpft_t is not, then we underflowed in the
10079 conversion. */
652d9409 10080 if (force_convert
10081 || (real_isfinite (&re) && real_isfinite (&im)
10082 && (re.cl == rvc_zero) == (mpfr_zero_p (mpc_realref (m)) != 0)
10083 && (im.cl == rvc_zero) == (mpfr_zero_p (mpc_imagref (m)) != 0)))
239d491a 10084 {
10085 REAL_VALUE_TYPE re_mode, im_mode;
10086
10087 real_convert (&re_mode, TYPE_MODE (TREE_TYPE (type)), &re);
10088 real_convert (&im_mode, TYPE_MODE (TREE_TYPE (type)), &im);
10089 /* Proceed iff the specified mode can hold the value. */
652d9409 10090 if (force_convert
10091 || (real_identical (&re_mode, &re)
10092 && real_identical (&im_mode, &im)))
239d491a 10093 return build_complex (type, build_real (TREE_TYPE (type), re_mode),
10094 build_real (TREE_TYPE (type), im_mode));
10095 }
10096 }
10097 return NULL_TREE;
10098}
239d491a 10099
e5407ca6 10100/* If arguments ARG0 and ARG1 are REAL_CSTs, call mpfr_remquo() to set
10101 the pointer *(ARG_QUO) and return the result. The type is taken
10102 from the type of ARG0 and is used for setting the precision of the
10103 calculation and results. */
10104
10105static tree
10106do_mpfr_remquo (tree arg0, tree arg1, tree arg_quo)
10107{
10108 tree const type = TREE_TYPE (arg0);
10109 tree result = NULL_TREE;
48e1416a 10110
e5407ca6 10111 STRIP_NOPS (arg0);
10112 STRIP_NOPS (arg1);
48e1416a 10113
e5407ca6 10114 /* To proceed, MPFR must exactly represent the target floating point
10115 format, which only happens when the target base equals two. */
10116 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
10117 && TREE_CODE (arg0) == REAL_CST && !TREE_OVERFLOW (arg0)
10118 && TREE_CODE (arg1) == REAL_CST && !TREE_OVERFLOW (arg1))
10119 {
10120 const REAL_VALUE_TYPE *const ra0 = TREE_REAL_CST_PTR (arg0);
10121 const REAL_VALUE_TYPE *const ra1 = TREE_REAL_CST_PTR (arg1);
10122
776a7bab 10123 if (real_isfinite (ra0) && real_isfinite (ra1))
e5407ca6 10124 {
e2eb2b7f 10125 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
10126 const int prec = fmt->p;
10127 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
e5407ca6 10128 tree result_rem;
10129 long integer_quo;
10130 mpfr_t m0, m1;
10131
10132 mpfr_inits2 (prec, m0, m1, NULL);
10133 mpfr_from_real (m0, ra0, GMP_RNDN);
10134 mpfr_from_real (m1, ra1, GMP_RNDN);
10135 mpfr_clear_flags ();
e2eb2b7f 10136 mpfr_remquo (m0, &integer_quo, m0, m1, rnd);
e5407ca6 10137 /* Remquo is independent of the rounding mode, so pass
10138 inexact=0 to do_mpfr_ckconv(). */
10139 result_rem = do_mpfr_ckconv (m0, type, /*inexact=*/ 0);
10140 mpfr_clears (m0, m1, NULL);
10141 if (result_rem)
10142 {
10143 /* MPFR calculates quo in the host's long so it may
10144 return more bits in quo than the target int can hold
10145 if sizeof(host long) > sizeof(target int). This can
10146 happen even for native compilers in LP64 mode. In
10147 these cases, modulo the quo value with the largest
10148 number that the target int can hold while leaving one
10149 bit for the sign. */
10150 if (sizeof (integer_quo) * CHAR_BIT > INT_TYPE_SIZE)
10151 integer_quo %= (long)(1UL << (INT_TYPE_SIZE - 1));
10152
10153 /* Dereference the quo pointer argument. */
10154 arg_quo = build_fold_indirect_ref (arg_quo);
10155 /* Proceed iff a valid pointer type was passed in. */
10156 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg_quo)) == integer_type_node)
10157 {
10158 /* Set the value. */
7002a1c8 10159 tree result_quo
10160 = fold_build2 (MODIFY_EXPR, TREE_TYPE (arg_quo), arg_quo,
10161 build_int_cst (TREE_TYPE (arg_quo),
10162 integer_quo));
e5407ca6 10163 TREE_SIDE_EFFECTS (result_quo) = 1;
10164 /* Combine the quo assignment with the rem. */
10165 result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
10166 result_quo, result_rem));
10167 }
10168 }
10169 }
10170 }
10171 return result;
10172}
e84da7c1 10173
10174/* If ARG is a REAL_CST, call mpfr_lgamma() on it and return the
10175 resulting value as a tree with type TYPE. The mpfr precision is
10176 set to the precision of TYPE. We assume that this mpfr function
10177 returns zero if the result could be calculated exactly within the
10178 requested precision. In addition, the integer pointer represented
10179 by ARG_SG will be dereferenced and set to the appropriate signgam
10180 (-1,1) value. */
10181
10182static tree
10183do_mpfr_lgamma_r (tree arg, tree arg_sg, tree type)
10184{
10185 tree result = NULL_TREE;
10186
10187 STRIP_NOPS (arg);
48e1416a 10188
e84da7c1 10189 /* To proceed, MPFR must exactly represent the target floating point
10190 format, which only happens when the target base equals two. Also
10191 verify ARG is a constant and that ARG_SG is an int pointer. */
10192 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
10193 && TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg)
10194 && TREE_CODE (TREE_TYPE (arg_sg)) == POINTER_TYPE
10195 && TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (arg_sg))) == integer_type_node)
10196 {
10197 const REAL_VALUE_TYPE *const ra = TREE_REAL_CST_PTR (arg);
10198
10199 /* In addition to NaN and Inf, the argument cannot be zero or a
10200 negative integer. */
776a7bab 10201 if (real_isfinite (ra)
e84da7c1 10202 && ra->cl != rvc_zero
9af5ce0c 10203 && !(real_isneg (ra) && real_isinteger (ra, TYPE_MODE (type))))
e84da7c1 10204 {
e2eb2b7f 10205 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
10206 const int prec = fmt->p;
10207 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
e84da7c1 10208 int inexact, sg;
10209 mpfr_t m;
10210 tree result_lg;
10211
10212 mpfr_init2 (m, prec);
10213 mpfr_from_real (m, ra, GMP_RNDN);
10214 mpfr_clear_flags ();
e2eb2b7f 10215 inexact = mpfr_lgamma (m, &sg, m, rnd);
e84da7c1 10216 result_lg = do_mpfr_ckconv (m, type, inexact);
10217 mpfr_clear (m);
10218 if (result_lg)
10219 {
10220 tree result_sg;
10221
10222 /* Dereference the arg_sg pointer argument. */
10223 arg_sg = build_fold_indirect_ref (arg_sg);
10224 /* Assign the signgam value into *arg_sg. */
10225 result_sg = fold_build2 (MODIFY_EXPR,
10226 TREE_TYPE (arg_sg), arg_sg,
7002a1c8 10227 build_int_cst (TREE_TYPE (arg_sg), sg));
e84da7c1 10228 TREE_SIDE_EFFECTS (result_sg) = 1;
10229 /* Combine the signgam assignment with the lgamma result. */
10230 result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
10231 result_sg, result_lg));
10232 }
10233 }
10234 }
10235
10236 return result;
10237}
75a70cf9 10238
c699fab8 10239/* If arguments ARG0 and ARG1 are a COMPLEX_CST, call the two-argument
10240 mpc function FUNC on it and return the resulting value as a tree
10241 with type TYPE. The mpfr precision is set to the precision of
10242 TYPE. We assume that function FUNC returns zero if the result
652d9409 10243 could be calculated exactly within the requested precision. If
10244 DO_NONFINITE is true, then fold expressions containing Inf or NaN
10245 in the arguments and/or results. */
c699fab8 10246
63e89698 10247tree
652d9409 10248do_mpc_arg2 (tree arg0, tree arg1, tree type, int do_nonfinite,
c699fab8 10249 int (*func)(mpc_ptr, mpc_srcptr, mpc_srcptr, mpc_rnd_t))
10250{
10251 tree result = NULL_TREE;
48e1416a 10252
c699fab8 10253 STRIP_NOPS (arg0);
10254 STRIP_NOPS (arg1);
10255
10256 /* To proceed, MPFR must exactly represent the target floating point
10257 format, which only happens when the target base equals two. */
10258 if (TREE_CODE (arg0) == COMPLEX_CST && !TREE_OVERFLOW (arg0)
10259 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE
10260 && TREE_CODE (arg1) == COMPLEX_CST && !TREE_OVERFLOW (arg1)
10261 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg1))) == REAL_TYPE
10262 && REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (TREE_TYPE (arg0))))->b == 2)
10263 {
10264 const REAL_VALUE_TYPE *const re0 = TREE_REAL_CST_PTR (TREE_REALPART (arg0));
10265 const REAL_VALUE_TYPE *const im0 = TREE_REAL_CST_PTR (TREE_IMAGPART (arg0));
10266 const REAL_VALUE_TYPE *const re1 = TREE_REAL_CST_PTR (TREE_REALPART (arg1));
10267 const REAL_VALUE_TYPE *const im1 = TREE_REAL_CST_PTR (TREE_IMAGPART (arg1));
10268
652d9409 10269 if (do_nonfinite
10270 || (real_isfinite (re0) && real_isfinite (im0)
10271 && real_isfinite (re1) && real_isfinite (im1)))
c699fab8 10272 {
10273 const struct real_format *const fmt =
10274 REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (type)));
10275 const int prec = fmt->p;
10276 const mp_rnd_t rnd = fmt->round_towards_zero ? GMP_RNDZ : GMP_RNDN;
10277 const mpc_rnd_t crnd = fmt->round_towards_zero ? MPC_RNDZZ : MPC_RNDNN;
10278 int inexact;
10279 mpc_t m0, m1;
48e1416a 10280
c699fab8 10281 mpc_init2 (m0, prec);
10282 mpc_init2 (m1, prec);
9af5ce0c 10283 mpfr_from_real (mpc_realref (m0), re0, rnd);
10284 mpfr_from_real (mpc_imagref (m0), im0, rnd);
10285 mpfr_from_real (mpc_realref (m1), re1, rnd);
10286 mpfr_from_real (mpc_imagref (m1), im1, rnd);
c699fab8 10287 mpfr_clear_flags ();
10288 inexact = func (m0, m0, m1, crnd);
652d9409 10289 result = do_mpc_ckconv (m0, type, inexact, do_nonfinite);
c699fab8 10290 mpc_clear (m0);
10291 mpc_clear (m1);
10292 }
10293 }
10294
10295 return result;
10296}
239d491a 10297
75a70cf9 10298/* A wrapper function for builtin folding that prevents warnings for
10299 "statement without effect" and the like, caused by removing the
10300 call node earlier than the warning is generated. */
10301
10302tree
1a91d914 10303fold_call_stmt (gcall *stmt, bool ignore)
75a70cf9 10304{
10305 tree ret = NULL_TREE;
10306 tree fndecl = gimple_call_fndecl (stmt);
389dd41b 10307 location_t loc = gimple_location (stmt);
75a70cf9 10308 if (fndecl
10309 && TREE_CODE (fndecl) == FUNCTION_DECL
10310 && DECL_BUILT_IN (fndecl)
10311 && !gimple_call_va_arg_pack_p (stmt))
10312 {
10313 int nargs = gimple_call_num_args (stmt);
9845fb99 10314 tree *args = (nargs > 0
10315 ? gimple_call_arg_ptr (stmt, 0)
10316 : &error_mark_node);
75a70cf9 10317
198622c0 10318 if (avoid_folding_inline_builtin (fndecl))
10319 return NULL_TREE;
75a70cf9 10320 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
10321 {
9845fb99 10322 return targetm.fold_builtin (fndecl, nargs, args, ignore);
75a70cf9 10323 }
10324 else
10325 {
9d884767 10326 ret = fold_builtin_n (loc, fndecl, args, nargs, ignore);
75a70cf9 10327 if (ret)
10328 {
10329 /* Propagate location information from original call to
10330 expansion of builtin. Otherwise things like
10331 maybe_emit_chk_warning, that operate on the expansion
10332 of a builtin, will use the wrong location information. */
10333 if (gimple_has_location (stmt))
10334 {
10335 tree realret = ret;
10336 if (TREE_CODE (ret) == NOP_EXPR)
10337 realret = TREE_OPERAND (ret, 0);
10338 if (CAN_HAVE_LOCATION_P (realret)
10339 && !EXPR_HAS_LOCATION (realret))
389dd41b 10340 SET_EXPR_LOCATION (realret, loc);
75a70cf9 10341 return realret;
10342 }
10343 return ret;
10344 }
10345 }
10346 }
10347 return NULL_TREE;
10348}
7bfefa9d 10349
b9a16870 10350/* Look up the function in builtin_decl that corresponds to DECL
7bfefa9d 10351 and set ASMSPEC as its user assembler name. DECL must be a
10352 function decl that declares a builtin. */
10353
10354void
10355set_builtin_user_assembler_name (tree decl, const char *asmspec)
10356{
7bfefa9d 10357 gcc_assert (TREE_CODE (decl) == FUNCTION_DECL
10358 && DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL
10359 && asmspec != 0);
10360
61ffc71a 10361 tree builtin = builtin_decl_explicit (DECL_FUNCTION_CODE (decl));
4d8e0d6d 10362 set_user_assembler_name (builtin, asmspec);
61ffc71a 10363
10364 if (DECL_FUNCTION_CODE (decl) == BUILT_IN_FFS
10365 && INT_TYPE_SIZE < BITS_PER_WORD)
7bfefa9d 10366 {
44504d18 10367 scalar_int_mode mode = int_mode_for_size (INT_TYPE_SIZE, 0).require ();
61ffc71a 10368 set_user_assembler_libfunc ("ffs", asmspec);
44504d18 10369 set_optab_libfunc (ffs_optab, mode, "ffs");
7bfefa9d 10370 }
10371}
a6b74a67 10372
10373/* Return true if DECL is a builtin that expands to a constant or similarly
10374 simple code. */
10375bool
10376is_simple_builtin (tree decl)
10377{
10378 if (decl && DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL)
10379 switch (DECL_FUNCTION_CODE (decl))
10380 {
10381 /* Builtins that expand to constants. */
10382 case BUILT_IN_CONSTANT_P:
10383 case BUILT_IN_EXPECT:
10384 case BUILT_IN_OBJECT_SIZE:
10385 case BUILT_IN_UNREACHABLE:
10386 /* Simple register moves or loads from stack. */
fca0886c 10387 case BUILT_IN_ASSUME_ALIGNED:
a6b74a67 10388 case BUILT_IN_RETURN_ADDRESS:
10389 case BUILT_IN_EXTRACT_RETURN_ADDR:
10390 case BUILT_IN_FROB_RETURN_ADDR:
10391 case BUILT_IN_RETURN:
10392 case BUILT_IN_AGGREGATE_INCOMING_ADDRESS:
10393 case BUILT_IN_FRAME_ADDRESS:
10394 case BUILT_IN_VA_END:
10395 case BUILT_IN_STACK_SAVE:
10396 case BUILT_IN_STACK_RESTORE:
10397 /* Exception state returns or moves registers around. */
10398 case BUILT_IN_EH_FILTER:
10399 case BUILT_IN_EH_POINTER:
10400 case BUILT_IN_EH_COPY_VALUES:
10401 return true;
10402
10403 default:
10404 return false;
10405 }
10406
10407 return false;
10408}
10409
10410/* Return true if DECL is a builtin that is not expensive, i.e., they are
10411 most probably expanded inline into reasonably simple code. This is a
10412 superset of is_simple_builtin. */
10413bool
10414is_inexpensive_builtin (tree decl)
10415{
10416 if (!decl)
10417 return false;
10418 else if (DECL_BUILT_IN_CLASS (decl) == BUILT_IN_MD)
10419 return true;
10420 else if (DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL)
10421 switch (DECL_FUNCTION_CODE (decl))
10422 {
10423 case BUILT_IN_ABS:
10424 case BUILT_IN_ALLOCA:
581bf1c2 10425 case BUILT_IN_ALLOCA_WITH_ALIGN:
74bdbe96 10426 case BUILT_IN_BSWAP16:
a6b74a67 10427 case BUILT_IN_BSWAP32:
10428 case BUILT_IN_BSWAP64:
10429 case BUILT_IN_CLZ:
10430 case BUILT_IN_CLZIMAX:
10431 case BUILT_IN_CLZL:
10432 case BUILT_IN_CLZLL:
10433 case BUILT_IN_CTZ:
10434 case BUILT_IN_CTZIMAX:
10435 case BUILT_IN_CTZL:
10436 case BUILT_IN_CTZLL:
10437 case BUILT_IN_FFS:
10438 case BUILT_IN_FFSIMAX:
10439 case BUILT_IN_FFSL:
10440 case BUILT_IN_FFSLL:
10441 case BUILT_IN_IMAXABS:
10442 case BUILT_IN_FINITE:
10443 case BUILT_IN_FINITEF:
10444 case BUILT_IN_FINITEL:
10445 case BUILT_IN_FINITED32:
10446 case BUILT_IN_FINITED64:
10447 case BUILT_IN_FINITED128:
10448 case BUILT_IN_FPCLASSIFY:
10449 case BUILT_IN_ISFINITE:
10450 case BUILT_IN_ISINF_SIGN:
10451 case BUILT_IN_ISINF:
10452 case BUILT_IN_ISINFF:
10453 case BUILT_IN_ISINFL:
10454 case BUILT_IN_ISINFD32:
10455 case BUILT_IN_ISINFD64:
10456 case BUILT_IN_ISINFD128:
10457 case BUILT_IN_ISNAN:
10458 case BUILT_IN_ISNANF:
10459 case BUILT_IN_ISNANL:
10460 case BUILT_IN_ISNAND32:
10461 case BUILT_IN_ISNAND64:
10462 case BUILT_IN_ISNAND128:
10463 case BUILT_IN_ISNORMAL:
10464 case BUILT_IN_ISGREATER:
10465 case BUILT_IN_ISGREATEREQUAL:
10466 case BUILT_IN_ISLESS:
10467 case BUILT_IN_ISLESSEQUAL:
10468 case BUILT_IN_ISLESSGREATER:
10469 case BUILT_IN_ISUNORDERED:
10470 case BUILT_IN_VA_ARG_PACK:
10471 case BUILT_IN_VA_ARG_PACK_LEN:
10472 case BUILT_IN_VA_COPY:
10473 case BUILT_IN_TRAP:
10474 case BUILT_IN_SAVEREGS:
10475 case BUILT_IN_POPCOUNTL:
10476 case BUILT_IN_POPCOUNTLL:
10477 case BUILT_IN_POPCOUNTIMAX:
10478 case BUILT_IN_POPCOUNT:
10479 case BUILT_IN_PARITYL:
10480 case BUILT_IN_PARITYLL:
10481 case BUILT_IN_PARITYIMAX:
10482 case BUILT_IN_PARITY:
10483 case BUILT_IN_LABS:
10484 case BUILT_IN_LLABS:
10485 case BUILT_IN_PREFETCH:
ca4c3545 10486 case BUILT_IN_ACC_ON_DEVICE:
a6b74a67 10487 return true;
10488
10489 default:
10490 return is_simple_builtin (decl);
10491 }
10492
10493 return false;
10494}
507a998e 10495
10496/* Return true if T is a constant and the value cast to a target char
10497 can be represented by a host char.
10498 Store the casted char constant in *P if so. */
10499
10500bool
10501target_char_cst_p (tree t, char *p)
10502{
10503 if (!tree_fits_uhwi_p (t) || CHAR_TYPE_SIZE != HOST_BITS_PER_CHAR)
10504 return false;
10505
10506 *p = (char)tree_to_uhwi (t);
10507 return true;
10508}