]> git.ipfire.org Git - thirdparty/gcc.git/blame - gcc/builtins.c
* runtime/config/sparc/os-unix-sysdep.c (__cilkrts_getticks): Adjust
[thirdparty/gcc.git] / gcc / builtins.c
CommitLineData
53800dbe 1/* Expand builtin functions.
aad93da1 2 Copyright (C) 1988-2017 Free Software Foundation, Inc.
53800dbe 3
f12b58b3 4This file is part of GCC.
53800dbe 5
f12b58b3 6GCC is free software; you can redistribute it and/or modify it under
7the terms of the GNU General Public License as published by the Free
8c4c00c1 8Software Foundation; either version 3, or (at your option) any later
f12b58b3 9version.
53800dbe 10
f12b58b3 11GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12WARRANTY; without even the implied warranty of MERCHANTABILITY or
13FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14for more details.
53800dbe 15
16You should have received a copy of the GNU General Public License
8c4c00c1 17along with GCC; see the file COPYING3. If not see
18<http://www.gnu.org/licenses/>. */
53800dbe 19
7c2ecb89 20/* Legacy warning! Please add no further builtin simplifications here
21 (apart from pure constant folding) - builtin simplifications should go
22 to match.pd or gimple-fold.c instead. */
23
53800dbe 24#include "config.h"
25#include "system.h"
805e22b2 26#include "coretypes.h"
9ef16211 27#include "backend.h"
7c29e30e 28#include "target.h"
29#include "rtl.h"
9ef16211 30#include "tree.h"
ea36272b 31#include "memmodel.h"
9ef16211 32#include "gimple.h"
7c29e30e 33#include "predict.h"
34#include "tm_p.h"
35#include "stringpool.h"
c296f633 36#include "tree-vrp.h"
7c29e30e 37#include "tree-ssanames.h"
38#include "expmed.h"
39#include "optabs.h"
7c29e30e 40#include "emit-rtl.h"
41#include "recog.h"
7c29e30e 42#include "diagnostic-core.h"
b20a8bb4 43#include "alias.h"
b20a8bb4 44#include "fold-const.h"
6c21be92 45#include "fold-const-call.h"
9ed99284 46#include "stor-layout.h"
47#include "calls.h"
48#include "varasm.h"
49#include "tree-object-size.h"
dae0b5cb 50#include "realmpfr.h"
94ea8568 51#include "cfgrtl.h"
53800dbe 52#include "except.h"
d53441c8 53#include "dojump.h"
54#include "explow.h"
d53441c8 55#include "stmt.h"
53800dbe 56#include "expr.h"
d8fc4d0b 57#include "libfuncs.h"
53800dbe 58#include "output.h"
59#include "typeclass.h"
63c62881 60#include "langhooks.h"
162719b3 61#include "value-prof.h"
3b9c3a16 62#include "builtins.h"
f9acf11a 63#include "asan.h"
d037099f 64#include "cilk.h"
058a1b7a 65#include "tree-chkp.h"
66#include "rtl-chkp.h"
1f24b8e9 67#include "internal-fn.h"
e3240774 68#include "case-cfn-macros.h"
732905bb 69#include "gimple-fold.h"
5aef8938 70#include "intl.h"
5383fb56 71
3b9c3a16 72struct target_builtins default_target_builtins;
73#if SWITCHABLE_TARGET
74struct target_builtins *this_target_builtins = &default_target_builtins;
75#endif
76
ab7943b9 77/* Define the names of the builtin function types and codes. */
96423453 78const char *const built_in_class_names[BUILT_IN_LAST]
ab7943b9 79 = {"NOT_BUILT_IN", "BUILT_IN_FRONTEND", "BUILT_IN_MD", "BUILT_IN_NORMAL"};
80
9cfddb70 81#define DEF_BUILTIN(X, N, C, T, LT, B, F, NA, AT, IM, COND) #X,
0dfc45b5 82const char * built_in_names[(int) END_BUILTINS] =
4e9d90c7 83{
84#include "builtins.def"
85};
ab7943b9 86
cffdfb3d 87/* Setup an array of builtin_info_type, make sure each element decl is
df94cd3b 88 initialized to NULL_TREE. */
cffdfb3d 89builtin_info_type builtin_info[(int)END_BUILTINS];
df94cd3b 90
0b049e15 91/* Non-zero if __builtin_constant_p should be folded right away. */
92bool force_folding_builtin_constant_p;
93
3754d046 94static rtx c_readstr (const char *, machine_mode);
aecda0d6 95static int target_char_cast (tree, char *);
d8ae1baa 96static rtx get_memory_rtx (tree, tree);
aecda0d6 97static int apply_args_size (void);
98static int apply_result_size (void);
aecda0d6 99static rtx result_vector (int, rtx);
aecda0d6 100static void expand_builtin_prefetch (tree);
101static rtx expand_builtin_apply_args (void);
102static rtx expand_builtin_apply_args_1 (void);
103static rtx expand_builtin_apply (rtx, rtx, rtx);
104static void expand_builtin_return (rtx);
105static enum type_class type_to_class (tree);
106static rtx expand_builtin_classify_type (tree);
6b43bae4 107static rtx expand_builtin_mathfn_3 (tree, rtx, rtx);
7e0713b1 108static rtx expand_builtin_mathfn_ternary (tree, rtx, rtx);
f97eea22 109static rtx expand_builtin_interclass_mathfn (tree, rtx);
c3147c1a 110static rtx expand_builtin_sincos (tree);
f97eea22 111static rtx expand_builtin_cexpi (tree, rtx);
ff1b14e4 112static rtx expand_builtin_int_roundingfn (tree, rtx);
113static rtx expand_builtin_int_roundingfn_2 (tree, rtx);
79012a9d 114static rtx expand_builtin_next_arg (void);
aecda0d6 115static rtx expand_builtin_va_start (tree);
116static rtx expand_builtin_va_end (tree);
117static rtx expand_builtin_va_copy (tree);
a65c4d64 118static rtx expand_builtin_strcmp (tree, rtx);
3754d046 119static rtx expand_builtin_strncmp (tree, rtx, machine_mode);
120static rtx builtin_memcpy_read_str (void *, HOST_WIDE_INT, machine_mode);
8d6c6ef5 121static rtx expand_builtin_memchr (tree, rtx);
a65c4d64 122static rtx expand_builtin_memcpy (tree, rtx);
f21337ef 123static rtx expand_builtin_memcpy_with_bounds (tree, rtx);
124static rtx expand_builtin_memcpy_args (tree, tree, tree, rtx, tree);
4d317237 125static rtx expand_builtin_memmove (tree, rtx);
3754d046 126static rtx expand_builtin_mempcpy (tree, rtx, machine_mode);
f21337ef 127static rtx expand_builtin_mempcpy_with_bounds (tree, rtx, machine_mode);
48e1416a 128static rtx expand_builtin_mempcpy_args (tree, tree, tree, rtx,
f21337ef 129 machine_mode, int, tree);
5aef8938 130static rtx expand_builtin_strcat (tree, rtx);
a65c4d64 131static rtx expand_builtin_strcpy (tree, rtx);
132static rtx expand_builtin_strcpy_args (tree, tree, rtx);
3754d046 133static rtx expand_builtin_stpcpy (tree, rtx, machine_mode);
4d317237 134static rtx expand_builtin_stpncpy (tree, rtx);
5aef8938 135static rtx expand_builtin_strncat (tree, rtx);
a65c4d64 136static rtx expand_builtin_strncpy (tree, rtx);
3754d046 137static rtx builtin_memset_gen_str (void *, HOST_WIDE_INT, machine_mode);
138static rtx expand_builtin_memset (tree, rtx, machine_mode);
f21337ef 139static rtx expand_builtin_memset_with_bounds (tree, rtx, machine_mode);
3754d046 140static rtx expand_builtin_memset_args (tree, tree, tree, rtx, machine_mode, tree);
aecda0d6 141static rtx expand_builtin_bzero (tree);
3754d046 142static rtx expand_builtin_strlen (tree, rtx, machine_mode);
2b29cc6a 143static rtx expand_builtin_alloca (tree);
3754d046 144static rtx expand_builtin_unop (machine_mode, tree, rtx, rtx, optab);
aecda0d6 145static rtx expand_builtin_frame_address (tree, tree);
389dd41b 146static tree stabilize_va_list_loc (location_t, tree, int);
aecda0d6 147static rtx expand_builtin_expect (tree, rtx);
148static tree fold_builtin_constant_p (tree);
149static tree fold_builtin_classify_type (tree);
c7cbde74 150static tree fold_builtin_strlen (location_t, tree, tree);
389dd41b 151static tree fold_builtin_inf (location_t, tree, int);
389dd41b 152static tree rewrite_call_expr (location_t, tree, int, tree, int, ...);
184fac50 153static bool validate_arg (const_tree, enum tree_code code);
aecda0d6 154static rtx expand_builtin_fabs (tree, rtx, rtx);
27f261ef 155static rtx expand_builtin_signbit (tree, rtx);
389dd41b 156static tree fold_builtin_memcmp (location_t, tree, tree, tree);
389dd41b 157static tree fold_builtin_isascii (location_t, tree);
158static tree fold_builtin_toascii (location_t, tree);
159static tree fold_builtin_isdigit (location_t, tree);
160static tree fold_builtin_fabs (location_t, tree, tree);
161static tree fold_builtin_abs (location_t, tree, tree);
162static tree fold_builtin_unordered_cmp (location_t, tree, tree, tree, enum tree_code,
d5019fe8 163 enum tree_code);
e80cc485 164static tree fold_builtin_0 (location_t, tree);
165static tree fold_builtin_1 (location_t, tree, tree);
166static tree fold_builtin_2 (location_t, tree, tree, tree);
167static tree fold_builtin_3 (location_t, tree, tree, tree, tree);
12f08300 168static tree fold_builtin_varargs (location_t, tree, tree*, int);
389dd41b 169
170static tree fold_builtin_strpbrk (location_t, tree, tree, tree);
389dd41b 171static tree fold_builtin_strspn (location_t, tree, tree);
172static tree fold_builtin_strcspn (location_t, tree, tree);
4ee9c684 173
0a39fd54 174static rtx expand_builtin_object_size (tree);
3754d046 175static rtx expand_builtin_memory_chk (tree, rtx, machine_mode,
0a39fd54 176 enum built_in_function);
177static void maybe_emit_chk_warning (tree, enum built_in_function);
178static void maybe_emit_sprintf_chk_warning (tree, enum built_in_function);
2c281b15 179static void maybe_emit_free_warning (tree);
c2f47e15 180static tree fold_builtin_object_size (tree, tree);
99eabcc1 181
e788f202 182unsigned HOST_WIDE_INT target_newline;
b9ea678c 183unsigned HOST_WIDE_INT target_percent;
99eabcc1 184static unsigned HOST_WIDE_INT target_c;
185static unsigned HOST_WIDE_INT target_s;
aea88c77 186char target_percent_c[3];
b9ea678c 187char target_percent_s[3];
e788f202 188char target_percent_s_newline[4];
e5407ca6 189static tree do_mpfr_remquo (tree, tree, tree);
e84da7c1 190static tree do_mpfr_lgamma_r (tree, tree, tree);
1cd6e20d 191static void expand_builtin_sync_synchronize (void);
0a39fd54 192
7bfefa9d 193/* Return true if NAME starts with __builtin_ or __sync_. */
194
b29139ad 195static bool
1c47b3e8 196is_builtin_name (const char *name)
b6a5fc45 197{
b6a5fc45 198 if (strncmp (name, "__builtin_", 10) == 0)
199 return true;
200 if (strncmp (name, "__sync_", 7) == 0)
201 return true;
1cd6e20d 202 if (strncmp (name, "__atomic_", 9) == 0)
203 return true;
a89e6c15 204 if (flag_cilkplus
d037099f 205 && (!strcmp (name, "__cilkrts_detach")
206 || !strcmp (name, "__cilkrts_pop_frame")))
207 return true;
b6a5fc45 208 return false;
209}
4ee9c684 210
7bfefa9d 211
212/* Return true if DECL is a function symbol representing a built-in. */
213
214bool
215is_builtin_fn (tree decl)
216{
217 return TREE_CODE (decl) == FUNCTION_DECL && DECL_BUILT_IN (decl);
218}
219
1c47b3e8 220/* Return true if NODE should be considered for inline expansion regardless
221 of the optimization level. This means whenever a function is invoked with
222 its "internal" name, which normally contains the prefix "__builtin". */
223
ae62deea 224bool
1c47b3e8 225called_as_built_in (tree node)
226{
227 /* Note that we must use DECL_NAME, not DECL_ASSEMBLER_NAME_SET_P since
228 we want the name used to call the function, not the name it
229 will have. */
230 const char *name = IDENTIFIER_POINTER (DECL_NAME (node));
231 return is_builtin_name (name);
232}
233
ceea063b 234/* Compute values M and N such that M divides (address of EXP - N) and such
235 that N < M. If these numbers can be determined, store M in alignp and N in
236 *BITPOSP and return true. Otherwise return false and store BITS_PER_UNIT to
237 *alignp and any bit-offset to *bitposp.
0d8f7716 238
239 Note that the address (and thus the alignment) computed here is based
240 on the address to which a symbol resolves, whereas DECL_ALIGN is based
241 on the address at which an object is actually located. These two
242 addresses are not always the same. For example, on ARM targets,
243 the address &foo of a Thumb function foo() has the lowest bit set,
3482bf13 244 whereas foo() itself starts on an even address.
698537d1 245
3482bf13 246 If ADDR_P is true we are taking the address of the memory reference EXP
247 and thus cannot rely on the access taking place. */
248
249static bool
250get_object_alignment_2 (tree exp, unsigned int *alignp,
251 unsigned HOST_WIDE_INT *bitposp, bool addr_p)
698537d1 252{
98ab9e8f 253 HOST_WIDE_INT bitsize, bitpos;
254 tree offset;
3754d046 255 machine_mode mode;
292237f3 256 int unsignedp, reversep, volatilep;
c8a2b4ff 257 unsigned int align = BITS_PER_UNIT;
ceea063b 258 bool known_alignment = false;
698537d1 259
98ab9e8f 260 /* Get the innermost object and the constant (bitpos) and possibly
261 variable (offset) offset of the access. */
292237f3 262 exp = get_inner_reference (exp, &bitsize, &bitpos, &offset, &mode,
b3b6e4b5 263 &unsignedp, &reversep, &volatilep);
98ab9e8f 264
265 /* Extract alignment information from the innermost object and
266 possibly adjust bitpos and offset. */
3482bf13 267 if (TREE_CODE (exp) == FUNCTION_DECL)
0d8f7716 268 {
3482bf13 269 /* Function addresses can encode extra information besides their
270 alignment. However, if TARGET_PTRMEMFUNC_VBIT_LOCATION
271 allows the low bit to be used as a virtual bit, we know
272 that the address itself must be at least 2-byte aligned. */
273 if (TARGET_PTRMEMFUNC_VBIT_LOCATION == ptrmemfunc_vbit_in_pfn)
274 align = 2 * BITS_PER_UNIT;
0d8f7716 275 }
3482bf13 276 else if (TREE_CODE (exp) == LABEL_DECL)
277 ;
278 else if (TREE_CODE (exp) == CONST_DECL)
98ab9e8f 279 {
3482bf13 280 /* The alignment of a CONST_DECL is determined by its initializer. */
281 exp = DECL_INITIAL (exp);
98ab9e8f 282 align = TYPE_ALIGN (TREE_TYPE (exp));
3482bf13 283 if (CONSTANT_CLASS_P (exp))
284 align = (unsigned) CONSTANT_ALIGNMENT (exp, align);
e532afed 285
3482bf13 286 known_alignment = true;
98ab9e8f 287 }
3482bf13 288 else if (DECL_P (exp))
ceea063b 289 {
3482bf13 290 align = DECL_ALIGN (exp);
ceea063b 291 known_alignment = true;
ceea063b 292 }
3482bf13 293 else if (TREE_CODE (exp) == INDIRECT_REF
294 || TREE_CODE (exp) == MEM_REF
295 || TREE_CODE (exp) == TARGET_MEM_REF)
98ab9e8f 296 {
297 tree addr = TREE_OPERAND (exp, 0);
ceea063b 298 unsigned ptr_align;
299 unsigned HOST_WIDE_INT ptr_bitpos;
ab1e78e5 300 unsigned HOST_WIDE_INT ptr_bitmask = ~0;
ceea063b 301
ab1e78e5 302 /* If the address is explicitely aligned, handle that. */
98ab9e8f 303 if (TREE_CODE (addr) == BIT_AND_EXPR
304 && TREE_CODE (TREE_OPERAND (addr, 1)) == INTEGER_CST)
305 {
ab1e78e5 306 ptr_bitmask = TREE_INT_CST_LOW (TREE_OPERAND (addr, 1));
307 ptr_bitmask *= BITS_PER_UNIT;
ac29ece2 308 align = least_bit_hwi (ptr_bitmask);
98ab9e8f 309 addr = TREE_OPERAND (addr, 0);
310 }
ceea063b 311
3482bf13 312 known_alignment
313 = get_pointer_alignment_1 (addr, &ptr_align, &ptr_bitpos);
3482bf13 314 align = MAX (ptr_align, align);
315
ab1e78e5 316 /* Re-apply explicit alignment to the bitpos. */
317 ptr_bitpos &= ptr_bitmask;
318
4083990a 319 /* The alignment of the pointer operand in a TARGET_MEM_REF
320 has to take the variable offset parts into account. */
3482bf13 321 if (TREE_CODE (exp) == TARGET_MEM_REF)
153c3b50 322 {
3482bf13 323 if (TMR_INDEX (exp))
324 {
325 unsigned HOST_WIDE_INT step = 1;
326 if (TMR_STEP (exp))
f9ae6f95 327 step = TREE_INT_CST_LOW (TMR_STEP (exp));
ac29ece2 328 align = MIN (align, least_bit_hwi (step) * BITS_PER_UNIT);
3482bf13 329 }
330 if (TMR_INDEX2 (exp))
331 align = BITS_PER_UNIT;
332 known_alignment = false;
153c3b50 333 }
ceea063b 334
3482bf13 335 /* When EXP is an actual memory reference then we can use
336 TYPE_ALIGN of a pointer indirection to derive alignment.
337 Do so only if get_pointer_alignment_1 did not reveal absolute
4083990a 338 alignment knowledge and if using that alignment would
339 improve the situation. */
700a9760 340 unsigned int talign;
4083990a 341 if (!addr_p && !known_alignment
700a9760 342 && (talign = min_align_of_type (TREE_TYPE (exp)) * BITS_PER_UNIT)
343 && talign > align)
344 align = talign;
4083990a 345 else
346 {
347 /* Else adjust bitpos accordingly. */
348 bitpos += ptr_bitpos;
349 if (TREE_CODE (exp) == MEM_REF
350 || TREE_CODE (exp) == TARGET_MEM_REF)
e913b5cd 351 bitpos += mem_ref_offset (exp).to_short_addr () * BITS_PER_UNIT;
4083990a 352 }
98ab9e8f 353 }
3482bf13 354 else if (TREE_CODE (exp) == STRING_CST)
153c3b50 355 {
3482bf13 356 /* STRING_CST are the only constant objects we allow to be not
357 wrapped inside a CONST_DECL. */
358 align = TYPE_ALIGN (TREE_TYPE (exp));
3482bf13 359 if (CONSTANT_CLASS_P (exp))
360 align = (unsigned) CONSTANT_ALIGNMENT (exp, align);
e532afed 361
3482bf13 362 known_alignment = true;
98ab9e8f 363 }
98ab9e8f 364
365 /* If there is a non-constant offset part extract the maximum
366 alignment that can prevail. */
c8a2b4ff 367 if (offset)
98ab9e8f 368 {
ad464c56 369 unsigned int trailing_zeros = tree_ctz (offset);
c8a2b4ff 370 if (trailing_zeros < HOST_BITS_PER_INT)
98ab9e8f 371 {
c8a2b4ff 372 unsigned int inner = (1U << trailing_zeros) * BITS_PER_UNIT;
373 if (inner)
374 align = MIN (align, inner);
98ab9e8f 375 }
98ab9e8f 376 }
377
3482bf13 378 *alignp = align;
379 *bitposp = bitpos & (*alignp - 1);
ceea063b 380 return known_alignment;
0c883ef3 381}
382
3482bf13 383/* For a memory reference expression EXP compute values M and N such that M
384 divides (&EXP - N) and such that N < M. If these numbers can be determined,
385 store M in alignp and N in *BITPOSP and return true. Otherwise return false
386 and store BITS_PER_UNIT to *alignp and any bit-offset to *bitposp. */
387
388bool
389get_object_alignment_1 (tree exp, unsigned int *alignp,
390 unsigned HOST_WIDE_INT *bitposp)
391{
392 return get_object_alignment_2 (exp, alignp, bitposp, false);
393}
394
957d0361 395/* Return the alignment in bits of EXP, an object. */
0c883ef3 396
397unsigned int
957d0361 398get_object_alignment (tree exp)
0c883ef3 399{
400 unsigned HOST_WIDE_INT bitpos = 0;
401 unsigned int align;
402
ceea063b 403 get_object_alignment_1 (exp, &align, &bitpos);
0c883ef3 404
98ab9e8f 405 /* align and bitpos now specify known low bits of the pointer.
406 ptr & (align - 1) == bitpos. */
407
408 if (bitpos != 0)
ac29ece2 409 align = least_bit_hwi (bitpos);
957d0361 410 return align;
698537d1 411}
412
ceea063b 413/* For a pointer valued expression EXP compute values M and N such that M
414 divides (EXP - N) and such that N < M. If these numbers can be determined,
3482bf13 415 store M in alignp and N in *BITPOSP and return true. Return false if
416 the results are just a conservative approximation.
53800dbe 417
ceea063b 418 If EXP is not a pointer, false is returned too. */
53800dbe 419
ceea063b 420bool
421get_pointer_alignment_1 (tree exp, unsigned int *alignp,
422 unsigned HOST_WIDE_INT *bitposp)
53800dbe 423{
153c3b50 424 STRIP_NOPS (exp);
535e2026 425
153c3b50 426 if (TREE_CODE (exp) == ADDR_EXPR)
3482bf13 427 return get_object_alignment_2 (TREE_OPERAND (exp, 0),
428 alignp, bitposp, true);
906a9403 429 else if (TREE_CODE (exp) == POINTER_PLUS_EXPR)
430 {
431 unsigned int align;
432 unsigned HOST_WIDE_INT bitpos;
433 bool res = get_pointer_alignment_1 (TREE_OPERAND (exp, 0),
434 &align, &bitpos);
435 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
436 bitpos += TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)) * BITS_PER_UNIT;
437 else
438 {
439 unsigned int trailing_zeros = tree_ctz (TREE_OPERAND (exp, 1));
440 if (trailing_zeros < HOST_BITS_PER_INT)
441 {
442 unsigned int inner = (1U << trailing_zeros) * BITS_PER_UNIT;
443 if (inner)
444 align = MIN (align, inner);
445 }
446 }
447 *alignp = align;
448 *bitposp = bitpos & (align - 1);
449 return res;
450 }
153c3b50 451 else if (TREE_CODE (exp) == SSA_NAME
452 && POINTER_TYPE_P (TREE_TYPE (exp)))
53800dbe 453 {
ceea063b 454 unsigned int ptr_align, ptr_misalign;
153c3b50 455 struct ptr_info_def *pi = SSA_NAME_PTR_INFO (exp);
ceea063b 456
457 if (pi && get_ptr_info_alignment (pi, &ptr_align, &ptr_misalign))
458 {
459 *bitposp = ptr_misalign * BITS_PER_UNIT;
460 *alignp = ptr_align * BITS_PER_UNIT;
d10da77a 461 /* Make sure to return a sensible alignment when the multiplication
462 by BITS_PER_UNIT overflowed. */
463 if (*alignp == 0)
464 *alignp = 1u << (HOST_BITS_PER_INT - 1);
3482bf13 465 /* We cannot really tell whether this result is an approximation. */
b428654a 466 return false;
ceea063b 467 }
468 else
69fbc3aa 469 {
470 *bitposp = 0;
ceea063b 471 *alignp = BITS_PER_UNIT;
472 return false;
69fbc3aa 473 }
53800dbe 474 }
0bb8b39a 475 else if (TREE_CODE (exp) == INTEGER_CST)
476 {
477 *alignp = BIGGEST_ALIGNMENT;
f9ae6f95 478 *bitposp = ((TREE_INT_CST_LOW (exp) * BITS_PER_UNIT)
0bb8b39a 479 & (BIGGEST_ALIGNMENT - 1));
480 return true;
481 }
153c3b50 482
69fbc3aa 483 *bitposp = 0;
ceea063b 484 *alignp = BITS_PER_UNIT;
485 return false;
53800dbe 486}
487
69fbc3aa 488/* Return the alignment in bits of EXP, a pointer valued expression.
489 The alignment returned is, by default, the alignment of the thing that
490 EXP points to. If it is not a POINTER_TYPE, 0 is returned.
491
492 Otherwise, look at the expression to see if we can do better, i.e., if the
493 expression is actually pointing at an object whose alignment is tighter. */
494
495unsigned int
496get_pointer_alignment (tree exp)
497{
498 unsigned HOST_WIDE_INT bitpos = 0;
499 unsigned int align;
ceea063b 500
501 get_pointer_alignment_1 (exp, &align, &bitpos);
69fbc3aa 502
503 /* align and bitpos now specify known low bits of the pointer.
504 ptr & (align - 1) == bitpos. */
505
506 if (bitpos != 0)
ac29ece2 507 align = least_bit_hwi (bitpos);
69fbc3aa 508
509 return align;
510}
511
c62d63d4 512/* Return the number of non-zero elements in the sequence
513 [ PTR, PTR + MAXELTS ) where each element's size is ELTSIZE bytes.
514 ELTSIZE must be a power of 2 less than 8. Used by c_strlen. */
515
516static unsigned
517string_length (const void *ptr, unsigned eltsize, unsigned maxelts)
518{
519 gcc_checking_assert (eltsize == 1 || eltsize == 2 || eltsize == 4);
520
521 unsigned n;
522
523 if (eltsize == 1)
524 {
525 /* Optimize the common case of plain char. */
526 for (n = 0; n < maxelts; n++)
527 {
528 const char *elt = (const char*) ptr + n;
529 if (!*elt)
530 break;
531 }
532 }
533 else
534 {
535 for (n = 0; n < maxelts; n++)
536 {
537 const char *elt = (const char*) ptr + n * eltsize;
538 if (!memcmp (elt, "\0\0\0\0", eltsize))
539 break;
540 }
541 }
542 return n;
543}
544
545/* Compute the length of a null-terminated character string or wide
546 character string handling character sizes of 1, 2, and 4 bytes.
547 TREE_STRING_LENGTH is not the right way because it evaluates to
548 the size of the character array in bytes (as opposed to characters)
549 and because it can contain a zero byte in the middle.
53800dbe 550
4172d65e 551 ONLY_VALUE should be nonzero if the result is not going to be emitted
c09841f6 552 into the instruction stream and zero if it is going to be expanded.
4172d65e 553 E.g. with i++ ? "foo" : "bar", if ONLY_VALUE is nonzero, constant 3
681fab1e 554 is returned, otherwise NULL, since
555 len = c_strlen (src, 1); if (len) expand_expr (len, ...); would not
556 evaluate the side-effects.
557
6bda159e 558 If ONLY_VALUE is two then we do not emit warnings about out-of-bound
559 accesses. Note that this implies the result is not going to be emitted
560 into the instruction stream.
561
902de8ed 562 The value returned is of type `ssizetype'.
563
53800dbe 564 Unfortunately, string_constant can't access the values of const char
565 arrays with initializers, so neither can we do so here. */
566
4ee9c684 567tree
681fab1e 568c_strlen (tree src, int only_value)
53800dbe 569{
681fab1e 570 STRIP_NOPS (src);
571 if (TREE_CODE (src) == COND_EXPR
572 && (only_value || !TREE_SIDE_EFFECTS (TREE_OPERAND (src, 0))))
573 {
574 tree len1, len2;
575
576 len1 = c_strlen (TREE_OPERAND (src, 1), only_value);
577 len2 = c_strlen (TREE_OPERAND (src, 2), only_value);
0862b7e9 578 if (tree_int_cst_equal (len1, len2))
681fab1e 579 return len1;
580 }
581
582 if (TREE_CODE (src) == COMPOUND_EXPR
583 && (only_value || !TREE_SIDE_EFFECTS (TREE_OPERAND (src, 0))))
584 return c_strlen (TREE_OPERAND (src, 1), only_value);
585
c62d63d4 586 location_t loc = EXPR_LOC_OR_LOC (src, input_location);
da136652 587
c62d63d4 588 /* Offset from the beginning of the string in bytes. */
589 tree byteoff;
590 src = string_constant (src, &byteoff);
53800dbe 591 if (src == 0)
c2f47e15 592 return NULL_TREE;
902de8ed 593
c62d63d4 594 /* Determine the size of the string element. */
595 unsigned eltsize
596 = tree_to_uhwi (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (src))));
597
598 /* Set MAXELTS to sizeof (SRC) / sizeof (*SRC) - 1, the maximum possible
599 length of SRC. */
600 unsigned maxelts = TREE_STRING_LENGTH (src) / eltsize - 1;
601
602 /* PTR can point to the byte representation of any string type, including
603 char* and wchar_t*. */
604 const char *ptr = TREE_STRING_POINTER (src);
902de8ed 605
c62d63d4 606 if (byteoff && TREE_CODE (byteoff) != INTEGER_CST)
53800dbe 607 {
608 /* If the string has an internal zero byte (e.g., "foo\0bar"), we can't
609 compute the offset to the following null if we don't know where to
610 start searching for it. */
c62d63d4 611 if (string_length (ptr, eltsize, maxelts) < maxelts)
612 {
613 /* Return when an embedded null character is found. */
c2f47e15 614 return NULL_TREE;
c62d63d4 615 }
902de8ed 616
53800dbe 617 /* We don't know the starting offset, but we do know that the string
618 has no internal zero bytes. We can assume that the offset falls
619 within the bounds of the string; otherwise, the programmer deserves
620 what he gets. Subtract the offset from the length of the string,
902de8ed 621 and return that. This would perhaps not be valid if we were dealing
622 with named arrays in addition to literal string constants. */
623
c62d63d4 624 return size_diffop_loc (loc, size_int (maxelts * eltsize), byteoff);
53800dbe 625 }
626
c62d63d4 627 /* Offset from the beginning of the string in elements. */
628 HOST_WIDE_INT eltoff;
629
53800dbe 630 /* We have a known offset into the string. Start searching there for
27d0c333 631 a null character if we can represent it as a single HOST_WIDE_INT. */
c62d63d4 632 if (byteoff == 0)
633 eltoff = 0;
634 else if (! tree_fits_shwi_p (byteoff))
635 eltoff = -1;
53800dbe 636 else
c62d63d4 637 eltoff = tree_to_shwi (byteoff) / eltsize;
902de8ed 638
1f63a7d6 639 /* If the offset is known to be out of bounds, warn, and call strlen at
640 runtime. */
c62d63d4 641 if (eltoff < 0 || eltoff > maxelts)
53800dbe 642 {
1f63a7d6 643 /* Suppress multiple warnings for propagated constant strings. */
2f1c4f17 644 if (only_value != 2
645 && !TREE_NO_WARNING (src))
1f63a7d6 646 {
c62d63d4 647 warning_at (loc, 0, "offset %qwi outside bounds of constant string",
648 eltoff);
1f63a7d6 649 TREE_NO_WARNING (src) = 1;
650 }
c2f47e15 651 return NULL_TREE;
53800dbe 652 }
902de8ed 653
53800dbe 654 /* Use strlen to search for the first zero byte. Since any strings
655 constructed with build_string will have nulls appended, we win even
656 if we get handed something like (char[4])"abcd".
657
c62d63d4 658 Since ELTOFF is our starting index into the string, no further
53800dbe 659 calculation is needed. */
c62d63d4 660 unsigned len = string_length (ptr + eltoff * eltsize, eltsize,
661 maxelts - eltoff);
662
663 return ssize_int (len);
53800dbe 664}
665
e913b5cd 666/* Return a constant integer corresponding to target reading
8c85fcb7 667 GET_MODE_BITSIZE (MODE) bits from string constant STR. */
ecc318ff 668
6840589f 669static rtx
3754d046 670c_readstr (const char *str, machine_mode mode)
6840589f 671{
6840589f 672 HOST_WIDE_INT ch;
673 unsigned int i, j;
e913b5cd 674 HOST_WIDE_INT tmp[MAX_BITSIZE_MODE_ANY_INT / HOST_BITS_PER_WIDE_INT];
0407eaee 675
676 gcc_assert (GET_MODE_CLASS (mode) == MODE_INT);
e913b5cd 677 unsigned int len = (GET_MODE_PRECISION (mode) + HOST_BITS_PER_WIDE_INT - 1)
678 / HOST_BITS_PER_WIDE_INT;
679
a12aa4cc 680 gcc_assert (len <= MAX_BITSIZE_MODE_ANY_INT / HOST_BITS_PER_WIDE_INT);
e913b5cd 681 for (i = 0; i < len; i++)
682 tmp[i] = 0;
6840589f 683
6840589f 684 ch = 1;
685 for (i = 0; i < GET_MODE_SIZE (mode); i++)
686 {
687 j = i;
688 if (WORDS_BIG_ENDIAN)
689 j = GET_MODE_SIZE (mode) - i - 1;
690 if (BYTES_BIG_ENDIAN != WORDS_BIG_ENDIAN
ad8f8e52 691 && GET_MODE_SIZE (mode) >= UNITS_PER_WORD)
6840589f 692 j = j + UNITS_PER_WORD - 2 * (j % UNITS_PER_WORD) - 1;
693 j *= BITS_PER_UNIT;
7d3f6cc7 694
6840589f 695 if (ch)
696 ch = (unsigned char) str[i];
e913b5cd 697 tmp[j / HOST_BITS_PER_WIDE_INT] |= ch << (j % HOST_BITS_PER_WIDE_INT);
6840589f 698 }
ddb1be65 699
ab2c1de8 700 wide_int c = wide_int::from_array (tmp, len, GET_MODE_PRECISION (mode));
e913b5cd 701 return immed_wide_int_const (c, mode);
6840589f 702}
703
ecc318ff 704/* Cast a target constant CST to target CHAR and if that value fits into
5206b159 705 host char type, return zero and put that value into variable pointed to by
ecc318ff 706 P. */
707
708static int
aecda0d6 709target_char_cast (tree cst, char *p)
ecc318ff 710{
711 unsigned HOST_WIDE_INT val, hostval;
712
c19686c5 713 if (TREE_CODE (cst) != INTEGER_CST
ecc318ff 714 || CHAR_TYPE_SIZE > HOST_BITS_PER_WIDE_INT)
715 return 1;
716
e913b5cd 717 /* Do not care if it fits or not right here. */
f9ae6f95 718 val = TREE_INT_CST_LOW (cst);
e913b5cd 719
ecc318ff 720 if (CHAR_TYPE_SIZE < HOST_BITS_PER_WIDE_INT)
edc19fd0 721 val &= (HOST_WIDE_INT_1U << CHAR_TYPE_SIZE) - 1;
ecc318ff 722
723 hostval = val;
724 if (HOST_BITS_PER_CHAR < HOST_BITS_PER_WIDE_INT)
edc19fd0 725 hostval &= (HOST_WIDE_INT_1U << HOST_BITS_PER_CHAR) - 1;
ecc318ff 726
727 if (val != hostval)
728 return 1;
729
730 *p = hostval;
731 return 0;
732}
733
4ee9c684 734/* Similar to save_expr, but assumes that arbitrary code is not executed
735 in between the multiple evaluations. In particular, we assume that a
736 non-addressable local variable will not be modified. */
737
738static tree
739builtin_save_expr (tree exp)
740{
f6c35aa4 741 if (TREE_CODE (exp) == SSA_NAME
742 || (TREE_ADDRESSABLE (exp) == 0
743 && (TREE_CODE (exp) == PARM_DECL
53e9c5c4 744 || (VAR_P (exp) && !TREE_STATIC (exp)))))
4ee9c684 745 return exp;
746
747 return save_expr (exp);
748}
749
53800dbe 750/* Given TEM, a pointer to a stack frame, follow the dynamic chain COUNT
751 times to get the address of either a higher stack frame, or a return
752 address located within it (depending on FNDECL_CODE). */
902de8ed 753
c626df3d 754static rtx
869d0ef0 755expand_builtin_return_addr (enum built_in_function fndecl_code, int count)
53800dbe 756{
757 int i;
869d0ef0 758 rtx tem = INITIAL_FRAME_ADDRESS_RTX;
3f840859 759 if (tem == NULL_RTX)
e3e15c50 760 {
3f840859 761 /* For a zero count with __builtin_return_address, we don't care what
762 frame address we return, because target-specific definitions will
763 override us. Therefore frame pointer elimination is OK, and using
764 the soft frame pointer is OK.
765
766 For a nonzero count, or a zero count with __builtin_frame_address,
767 we require a stable offset from the current frame pointer to the
768 previous one, so we must use the hard frame pointer, and
769 we must disable frame pointer elimination. */
770 if (count == 0 && fndecl_code == BUILT_IN_RETURN_ADDRESS)
771 tem = frame_pointer_rtx;
772 else
773 {
774 tem = hard_frame_pointer_rtx;
e3e15c50 775
3f840859 776 /* Tell reload not to eliminate the frame pointer. */
777 crtl->accesses_prior_frames = 1;
778 }
e3e15c50 779 }
869d0ef0 780
53800dbe 781 if (count > 0)
782 SETUP_FRAME_ADDRESSES ();
53800dbe 783
3a69c60c 784 /* On the SPARC, the return address is not in the frame, it is in a
53800dbe 785 register. There is no way to access it off of the current frame
786 pointer, but it can be accessed off the previous frame pointer by
787 reading the value from the register window save area. */
a26d6c60 788 if (RETURN_ADDR_IN_PREVIOUS_FRAME && fndecl_code == BUILT_IN_RETURN_ADDRESS)
53800dbe 789 count--;
53800dbe 790
791 /* Scan back COUNT frames to the specified frame. */
792 for (i = 0; i < count; i++)
793 {
794 /* Assume the dynamic chain pointer is in the word that the
795 frame address points to, unless otherwise specified. */
53800dbe 796 tem = DYNAMIC_CHAIN_ADDRESS (tem);
53800dbe 797 tem = memory_address (Pmode, tem);
00060fc2 798 tem = gen_frame_mem (Pmode, tem);
83fc1478 799 tem = copy_to_reg (tem);
53800dbe 800 }
801
3a69c60c 802 /* For __builtin_frame_address, return what we've got. But, on
803 the SPARC for example, we may have to add a bias. */
53800dbe 804 if (fndecl_code == BUILT_IN_FRAME_ADDRESS)
3a69c60c 805 return FRAME_ADDR_RTX (tem);
53800dbe 806
3a69c60c 807 /* For __builtin_return_address, get the return address from that frame. */
53800dbe 808#ifdef RETURN_ADDR_RTX
809 tem = RETURN_ADDR_RTX (count, tem);
810#else
811 tem = memory_address (Pmode,
29c05e22 812 plus_constant (Pmode, tem, GET_MODE_SIZE (Pmode)));
00060fc2 813 tem = gen_frame_mem (Pmode, tem);
53800dbe 814#endif
815 return tem;
816}
817
f7c44134 818/* Alias set used for setjmp buffer. */
32c2fdea 819static alias_set_type setjmp_alias_set = -1;
f7c44134 820
6b7f6858 821/* Construct the leading half of a __builtin_setjmp call. Control will
2c8a1497 822 return to RECEIVER_LABEL. This is also called directly by the SJLJ
823 exception handling code. */
53800dbe 824
6b7f6858 825void
aecda0d6 826expand_builtin_setjmp_setup (rtx buf_addr, rtx receiver_label)
53800dbe 827{
3754d046 828 machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
53800dbe 829 rtx stack_save;
f7c44134 830 rtx mem;
53800dbe 831
f7c44134 832 if (setjmp_alias_set == -1)
833 setjmp_alias_set = new_alias_set ();
834
85d654dd 835 buf_addr = convert_memory_address (Pmode, buf_addr);
53800dbe 836
37ae8504 837 buf_addr = force_reg (Pmode, force_operand (buf_addr, NULL_RTX));
53800dbe 838
6b7f6858 839 /* We store the frame pointer and the address of receiver_label in
840 the buffer and use the rest of it for the stack save area, which
841 is machine-dependent. */
53800dbe 842
f7c44134 843 mem = gen_rtx_MEM (Pmode, buf_addr);
ab6ab77e 844 set_mem_alias_set (mem, setjmp_alias_set);
e3e026e8 845 emit_move_insn (mem, targetm.builtin_setjmp_frame_value ());
f7c44134 846
29c05e22 847 mem = gen_rtx_MEM (Pmode, plus_constant (Pmode, buf_addr,
848 GET_MODE_SIZE (Pmode))),
ab6ab77e 849 set_mem_alias_set (mem, setjmp_alias_set);
f7c44134 850
851 emit_move_insn (validize_mem (mem),
6b7f6858 852 force_reg (Pmode, gen_rtx_LABEL_REF (Pmode, receiver_label)));
53800dbe 853
854 stack_save = gen_rtx_MEM (sa_mode,
29c05e22 855 plus_constant (Pmode, buf_addr,
53800dbe 856 2 * GET_MODE_SIZE (Pmode)));
ab6ab77e 857 set_mem_alias_set (stack_save, setjmp_alias_set);
e9c97615 858 emit_stack_save (SAVE_NONLOCAL, &stack_save);
53800dbe 859
860 /* If there is further processing to do, do it. */
a3c81e61 861 if (targetm.have_builtin_setjmp_setup ())
862 emit_insn (targetm.gen_builtin_setjmp_setup (buf_addr));
53800dbe 863
29f09705 864 /* We have a nonlocal label. */
18d50ae6 865 cfun->has_nonlocal_label = 1;
6b7f6858 866}
53800dbe 867
2c8a1497 868/* Construct the trailing part of a __builtin_setjmp call. This is
4598ade9 869 also called directly by the SJLJ exception handling code.
870 If RECEIVER_LABEL is NULL, instead contruct a nonlocal goto handler. */
6b7f6858 871
872void
a3c81e61 873expand_builtin_setjmp_receiver (rtx receiver_label)
6b7f6858 874{
82c7907c 875 rtx chain;
876
4598ade9 877 /* Mark the FP as used when we get here, so we have to make sure it's
53800dbe 878 marked as used by this function. */
18b42941 879 emit_use (hard_frame_pointer_rtx);
53800dbe 880
881 /* Mark the static chain as clobbered here so life information
882 doesn't get messed up for it. */
82c7907c 883 chain = targetm.calls.static_chain (current_function_decl, true);
884 if (chain && REG_P (chain))
885 emit_clobber (chain);
53800dbe 886
887 /* Now put in the code to restore the frame pointer, and argument
491e04ef 888 pointer, if needed. */
a3c81e61 889 if (! targetm.have_nonlocal_goto ())
62dcb5c8 890 {
891 /* First adjust our frame pointer to its actual value. It was
892 previously set to the start of the virtual area corresponding to
893 the stacked variables when we branched here and now needs to be
894 adjusted to the actual hardware fp value.
895
896 Assignments to virtual registers are converted by
897 instantiate_virtual_regs into the corresponding assignment
898 to the underlying register (fp in this case) that makes
899 the original assignment true.
900 So the following insn will actually be decrementing fp by
901 STARTING_FRAME_OFFSET. */
902 emit_move_insn (virtual_stack_vars_rtx, hard_frame_pointer_rtx);
903
904 /* Restoring the frame pointer also modifies the hard frame pointer.
905 Mark it used (so that the previous assignment remains live once
906 the frame pointer is eliminated) and clobbered (to represent the
907 implicit update from the assignment). */
908 emit_use (hard_frame_pointer_rtx);
909 emit_clobber (hard_frame_pointer_rtx);
910 }
53800dbe 911
a494b6d7 912 if (!HARD_FRAME_POINTER_IS_ARG_POINTER && fixed_regs[ARG_POINTER_REGNUM])
53800dbe 913 {
4598ade9 914 /* If the argument pointer can be eliminated in favor of the
915 frame pointer, we don't need to restore it. We assume here
916 that if such an elimination is present, it can always be used.
917 This is the case on all known machines; if we don't make this
918 assumption, we do unnecessary saving on many machines. */
53800dbe 919 size_t i;
e99c3a1d 920 static const struct elims {const int from, to;} elim_regs[] = ELIMINABLE_REGS;
53800dbe 921
3098b2d3 922 for (i = 0; i < ARRAY_SIZE (elim_regs); i++)
53800dbe 923 if (elim_regs[i].from == ARG_POINTER_REGNUM
924 && elim_regs[i].to == HARD_FRAME_POINTER_REGNUM)
925 break;
926
3098b2d3 927 if (i == ARRAY_SIZE (elim_regs))
53800dbe 928 {
929 /* Now restore our arg pointer from the address at which it
05927e40 930 was saved in our stack frame. */
27a7a23a 931 emit_move_insn (crtl->args.internal_arg_pointer,
b079a207 932 copy_to_reg (get_arg_pointer_save_area ()));
53800dbe 933 }
934 }
53800dbe 935
a3c81e61 936 if (receiver_label != NULL && targetm.have_builtin_setjmp_receiver ())
937 emit_insn (targetm.gen_builtin_setjmp_receiver (receiver_label));
938 else if (targetm.have_nonlocal_goto_receiver ())
939 emit_insn (targetm.gen_nonlocal_goto_receiver ());
53800dbe 940 else
a3c81e61 941 { /* Nothing */ }
57f6bb94 942
3072d30e 943 /* We must not allow the code we just generated to be reordered by
944 scheduling. Specifically, the update of the frame pointer must
62dcb5c8 945 happen immediately, not later. */
3072d30e 946 emit_insn (gen_blockage ());
6b7f6858 947}
53800dbe 948
53800dbe 949/* __builtin_longjmp is passed a pointer to an array of five words (not
950 all will be used on all machines). It operates similarly to the C
951 library function of the same name, but is more efficient. Much of
2c8a1497 952 the code below is copied from the handling of non-local gotos. */
53800dbe 953
c626df3d 954static void
aecda0d6 955expand_builtin_longjmp (rtx buf_addr, rtx value)
53800dbe 956{
1e0c0b35 957 rtx fp, lab, stack;
958 rtx_insn *insn, *last;
3754d046 959 machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
53800dbe 960
48e1416a 961 /* DRAP is needed for stack realign if longjmp is expanded to current
27a7a23a 962 function */
963 if (SUPPORTS_STACK_ALIGNMENT)
964 crtl->need_drap = true;
965
f7c44134 966 if (setjmp_alias_set == -1)
967 setjmp_alias_set = new_alias_set ();
968
85d654dd 969 buf_addr = convert_memory_address (Pmode, buf_addr);
479e4d5e 970
53800dbe 971 buf_addr = force_reg (Pmode, buf_addr);
972
82c7907c 973 /* We require that the user must pass a second argument of 1, because
974 that is what builtin_setjmp will return. */
64db345d 975 gcc_assert (value == const1_rtx);
53800dbe 976
4712c7d6 977 last = get_last_insn ();
a3c81e61 978 if (targetm.have_builtin_longjmp ())
979 emit_insn (targetm.gen_builtin_longjmp (buf_addr));
53800dbe 980 else
53800dbe 981 {
982 fp = gen_rtx_MEM (Pmode, buf_addr);
29c05e22 983 lab = gen_rtx_MEM (Pmode, plus_constant (Pmode, buf_addr,
53800dbe 984 GET_MODE_SIZE (Pmode)));
985
29c05e22 986 stack = gen_rtx_MEM (sa_mode, plus_constant (Pmode, buf_addr,
53800dbe 987 2 * GET_MODE_SIZE (Pmode)));
ab6ab77e 988 set_mem_alias_set (fp, setjmp_alias_set);
989 set_mem_alias_set (lab, setjmp_alias_set);
990 set_mem_alias_set (stack, setjmp_alias_set);
53800dbe 991
992 /* Pick up FP, label, and SP from the block and jump. This code is
993 from expand_goto in stmt.c; see there for detailed comments. */
a3c81e61 994 if (targetm.have_nonlocal_goto ())
53800dbe 995 /* We have to pass a value to the nonlocal_goto pattern that will
996 get copied into the static_chain pointer, but it does not matter
997 what that value is, because builtin_setjmp does not use it. */
a3c81e61 998 emit_insn (targetm.gen_nonlocal_goto (value, lab, stack, fp));
53800dbe 999 else
53800dbe 1000 {
1001 lab = copy_to_reg (lab);
1002
18b42941 1003 emit_clobber (gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode)));
1004 emit_clobber (gen_rtx_MEM (BLKmode, hard_frame_pointer_rtx));
2a871ad1 1005
53800dbe 1006 emit_move_insn (hard_frame_pointer_rtx, fp);
e9c97615 1007 emit_stack_restore (SAVE_NONLOCAL, stack);
53800dbe 1008
18b42941 1009 emit_use (hard_frame_pointer_rtx);
1010 emit_use (stack_pointer_rtx);
53800dbe 1011 emit_indirect_jump (lab);
1012 }
1013 }
615166bb 1014
1015 /* Search backwards and mark the jump insn as a non-local goto.
1016 Note that this precludes the use of __builtin_longjmp to a
1017 __builtin_setjmp target in the same function. However, we've
1018 already cautioned the user that these functions are for
1019 internal exception handling use only. */
449c0509 1020 for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
1021 {
64db345d 1022 gcc_assert (insn != last);
7d3f6cc7 1023
6d7dc5b9 1024 if (JUMP_P (insn))
449c0509 1025 {
a1ddb869 1026 add_reg_note (insn, REG_NON_LOCAL_GOTO, const0_rtx);
449c0509 1027 break;
1028 }
6d7dc5b9 1029 else if (CALL_P (insn))
9342ee68 1030 break;
449c0509 1031 }
53800dbe 1032}
1033
0e80b01d 1034static inline bool
1035more_const_call_expr_args_p (const const_call_expr_arg_iterator *iter)
1036{
1037 return (iter->i < iter->n);
1038}
1039
1040/* This function validates the types of a function call argument list
1041 against a specified list of tree_codes. If the last specifier is a 0,
5cfa3fc8 1042 that represents an ellipsis, otherwise the last specifier must be a
0e80b01d 1043 VOID_TYPE. */
1044
1045static bool
1046validate_arglist (const_tree callexpr, ...)
1047{
1048 enum tree_code code;
1049 bool res = 0;
1050 va_list ap;
1051 const_call_expr_arg_iterator iter;
1052 const_tree arg;
1053
1054 va_start (ap, callexpr);
1055 init_const_call_expr_arg_iterator (callexpr, &iter);
1056
5cfa3fc8 1057 /* Get a bitmap of pointer argument numbers declared attribute nonnull. */
184fac50 1058 tree fn = CALL_EXPR_FN (callexpr);
1059 bitmap argmap = get_nonnull_args (TREE_TYPE (TREE_TYPE (fn)));
5cfa3fc8 1060
1061 for (unsigned argno = 1; ; ++argno)
0e80b01d 1062 {
1063 code = (enum tree_code) va_arg (ap, int);
5cfa3fc8 1064
0e80b01d 1065 switch (code)
1066 {
1067 case 0:
1068 /* This signifies an ellipses, any further arguments are all ok. */
1069 res = true;
1070 goto end;
1071 case VOID_TYPE:
1072 /* This signifies an endlink, if no arguments remain, return
1073 true, otherwise return false. */
1074 res = !more_const_call_expr_args_p (&iter);
1075 goto end;
5cfa3fc8 1076 case POINTER_TYPE:
1077 /* The actual argument must be nonnull when either the whole
1078 called function has been declared nonnull, or when the formal
1079 argument corresponding to the actual argument has been. */
184fac50 1080 if (argmap
1081 && (bitmap_empty_p (argmap) || bitmap_bit_p (argmap, argno)))
1082 {
1083 arg = next_const_call_expr_arg (&iter);
1084 if (!validate_arg (arg, code) || integer_zerop (arg))
1085 goto end;
1086 break;
1087 }
5cfa3fc8 1088 /* FALLTHRU */
0e80b01d 1089 default:
1090 /* If no parameters remain or the parameter's code does not
1091 match the specified code, return false. Otherwise continue
1092 checking any remaining arguments. */
1093 arg = next_const_call_expr_arg (&iter);
184fac50 1094 if (!validate_arg (arg, code))
0e80b01d 1095 goto end;
1096 break;
1097 }
1098 }
0e80b01d 1099
1100 /* We need gotos here since we can only have one VA_CLOSE in a
1101 function. */
1102 end: ;
1103 va_end (ap);
1104
5cfa3fc8 1105 BITMAP_FREE (argmap);
1106
0e80b01d 1107 return res;
1108}
1109
4ee9c684 1110/* Expand a call to __builtin_nonlocal_goto. We're passed the target label
1111 and the address of the save area. */
1112
1113static rtx
c2f47e15 1114expand_builtin_nonlocal_goto (tree exp)
4ee9c684 1115{
1116 tree t_label, t_save_area;
1e0c0b35 1117 rtx r_label, r_save_area, r_fp, r_sp;
1118 rtx_insn *insn;
4ee9c684 1119
c2f47e15 1120 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
4ee9c684 1121 return NULL_RTX;
1122
c2f47e15 1123 t_label = CALL_EXPR_ARG (exp, 0);
1124 t_save_area = CALL_EXPR_ARG (exp, 1);
4ee9c684 1125
8ec3c5c2 1126 r_label = expand_normal (t_label);
3dce56cc 1127 r_label = convert_memory_address (Pmode, r_label);
8ec3c5c2 1128 r_save_area = expand_normal (t_save_area);
3dce56cc 1129 r_save_area = convert_memory_address (Pmode, r_save_area);
d1ff492e 1130 /* Copy the address of the save location to a register just in case it was
1131 based on the frame pointer. */
51adbc8a 1132 r_save_area = copy_to_reg (r_save_area);
4ee9c684 1133 r_fp = gen_rtx_MEM (Pmode, r_save_area);
1134 r_sp = gen_rtx_MEM (STACK_SAVEAREA_MODE (SAVE_NONLOCAL),
29c05e22 1135 plus_constant (Pmode, r_save_area,
1136 GET_MODE_SIZE (Pmode)));
4ee9c684 1137
18d50ae6 1138 crtl->has_nonlocal_goto = 1;
4ee9c684 1139
4ee9c684 1140 /* ??? We no longer need to pass the static chain value, afaik. */
a3c81e61 1141 if (targetm.have_nonlocal_goto ())
1142 emit_insn (targetm.gen_nonlocal_goto (const0_rtx, r_label, r_sp, r_fp));
4ee9c684 1143 else
4ee9c684 1144 {
1145 r_label = copy_to_reg (r_label);
1146
18b42941 1147 emit_clobber (gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode)));
1148 emit_clobber (gen_rtx_MEM (BLKmode, hard_frame_pointer_rtx));
491e04ef 1149
d1ff492e 1150 /* Restore frame pointer for containing function. */
4ee9c684 1151 emit_move_insn (hard_frame_pointer_rtx, r_fp);
e9c97615 1152 emit_stack_restore (SAVE_NONLOCAL, r_sp);
491e04ef 1153
4ee9c684 1154 /* USE of hard_frame_pointer_rtx added for consistency;
1155 not clear if really needed. */
18b42941 1156 emit_use (hard_frame_pointer_rtx);
1157 emit_use (stack_pointer_rtx);
ad0d0af8 1158
1159 /* If the architecture is using a GP register, we must
1160 conservatively assume that the target function makes use of it.
1161 The prologue of functions with nonlocal gotos must therefore
1162 initialize the GP register to the appropriate value, and we
1163 must then make sure that this value is live at the point
1164 of the jump. (Note that this doesn't necessarily apply
1165 to targets with a nonlocal_goto pattern; they are free
1166 to implement it in their own way. Note also that this is
1167 a no-op if the GP register is a global invariant.) */
1e826931 1168 unsigned regnum = PIC_OFFSET_TABLE_REGNUM;
1169 if (regnum != INVALID_REGNUM && fixed_regs[regnum])
18b42941 1170 emit_use (pic_offset_table_rtx);
ad0d0af8 1171
4ee9c684 1172 emit_indirect_jump (r_label);
1173 }
491e04ef 1174
4ee9c684 1175 /* Search backwards to the jump insn and mark it as a
1176 non-local goto. */
1177 for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
1178 {
6d7dc5b9 1179 if (JUMP_P (insn))
4ee9c684 1180 {
a1ddb869 1181 add_reg_note (insn, REG_NON_LOCAL_GOTO, const0_rtx);
4ee9c684 1182 break;
1183 }
6d7dc5b9 1184 else if (CALL_P (insn))
4ee9c684 1185 break;
1186 }
1187
1188 return const0_rtx;
1189}
1190
843d08a9 1191/* __builtin_update_setjmp_buf is passed a pointer to an array of five words
1192 (not all will be used on all machines) that was passed to __builtin_setjmp.
97354ae4 1193 It updates the stack pointer in that block to the current value. This is
1194 also called directly by the SJLJ exception handling code. */
843d08a9 1195
97354ae4 1196void
843d08a9 1197expand_builtin_update_setjmp_buf (rtx buf_addr)
1198{
3754d046 1199 machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
d1ff492e 1200 rtx stack_save
843d08a9 1201 = gen_rtx_MEM (sa_mode,
1202 memory_address
1203 (sa_mode,
29c05e22 1204 plus_constant (Pmode, buf_addr,
1205 2 * GET_MODE_SIZE (Pmode))));
843d08a9 1206
e9c97615 1207 emit_stack_save (SAVE_NONLOCAL, &stack_save);
843d08a9 1208}
1209
5e3608d8 1210/* Expand a call to __builtin_prefetch. For a target that does not support
1211 data prefetch, evaluate the memory address argument in case it has side
1212 effects. */
1213
1214static void
c2f47e15 1215expand_builtin_prefetch (tree exp)
5e3608d8 1216{
1217 tree arg0, arg1, arg2;
c2f47e15 1218 int nargs;
5e3608d8 1219 rtx op0, op1, op2;
1220
c2f47e15 1221 if (!validate_arglist (exp, POINTER_TYPE, 0))
26a5cadb 1222 return;
1223
c2f47e15 1224 arg0 = CALL_EXPR_ARG (exp, 0);
1225
26a5cadb 1226 /* Arguments 1 and 2 are optional; argument 1 (read/write) defaults to
1227 zero (read) and argument 2 (locality) defaults to 3 (high degree of
1228 locality). */
c2f47e15 1229 nargs = call_expr_nargs (exp);
1230 if (nargs > 1)
1231 arg1 = CALL_EXPR_ARG (exp, 1);
26a5cadb 1232 else
c2f47e15 1233 arg1 = integer_zero_node;
1234 if (nargs > 2)
1235 arg2 = CALL_EXPR_ARG (exp, 2);
1236 else
2512209b 1237 arg2 = integer_three_node;
5e3608d8 1238
1239 /* Argument 0 is an address. */
1240 op0 = expand_expr (arg0, NULL_RTX, Pmode, EXPAND_NORMAL);
1241
1242 /* Argument 1 (read/write flag) must be a compile-time constant int. */
1243 if (TREE_CODE (arg1) != INTEGER_CST)
1244 {
07e3a3d2 1245 error ("second argument to %<__builtin_prefetch%> must be a constant");
9342ee68 1246 arg1 = integer_zero_node;
5e3608d8 1247 }
8ec3c5c2 1248 op1 = expand_normal (arg1);
5e3608d8 1249 /* Argument 1 must be either zero or one. */
1250 if (INTVAL (op1) != 0 && INTVAL (op1) != 1)
1251 {
c3ceba8e 1252 warning (0, "invalid second argument to %<__builtin_prefetch%>;"
07e3a3d2 1253 " using zero");
5e3608d8 1254 op1 = const0_rtx;
1255 }
1256
1257 /* Argument 2 (locality) must be a compile-time constant int. */
1258 if (TREE_CODE (arg2) != INTEGER_CST)
1259 {
07e3a3d2 1260 error ("third argument to %<__builtin_prefetch%> must be a constant");
5e3608d8 1261 arg2 = integer_zero_node;
1262 }
8ec3c5c2 1263 op2 = expand_normal (arg2);
5e3608d8 1264 /* Argument 2 must be 0, 1, 2, or 3. */
1265 if (INTVAL (op2) < 0 || INTVAL (op2) > 3)
1266 {
c3ceba8e 1267 warning (0, "invalid third argument to %<__builtin_prefetch%>; using zero");
5e3608d8 1268 op2 = const0_rtx;
1269 }
1270
1d375a79 1271 if (targetm.have_prefetch ())
5e3608d8 1272 {
8786db1e 1273 struct expand_operand ops[3];
1274
1275 create_address_operand (&ops[0], op0);
1276 create_integer_operand (&ops[1], INTVAL (op1));
1277 create_integer_operand (&ops[2], INTVAL (op2));
1d375a79 1278 if (maybe_expand_insn (targetm.code_for_prefetch, 3, ops))
8786db1e 1279 return;
5e3608d8 1280 }
0a534ba7 1281
f0ce3b1f 1282 /* Don't do anything with direct references to volatile memory, but
1283 generate code to handle other side effects. */
e16ceb8e 1284 if (!MEM_P (op0) && side_effects_p (op0))
f0ce3b1f 1285 emit_insn (op0);
5e3608d8 1286}
1287
f7c44134 1288/* Get a MEM rtx for expression EXP which is the address of an operand
d8ae1baa 1289 to be used in a string instruction (cmpstrsi, movmemsi, ..). LEN is
1290 the maximum length of the block of memory that might be accessed or
1291 NULL if unknown. */
f7c44134 1292
53800dbe 1293static rtx
d8ae1baa 1294get_memory_rtx (tree exp, tree len)
53800dbe 1295{
ad0a178f 1296 tree orig_exp = exp;
1297 rtx addr, mem;
ad0a178f 1298
1299 /* When EXP is not resolved SAVE_EXPR, MEM_ATTRS can be still derived
1300 from its expression, for expr->a.b only <variable>.a.b is recorded. */
1301 if (TREE_CODE (exp) == SAVE_EXPR && !SAVE_EXPR_RESOLVED_P (exp))
1302 exp = TREE_OPERAND (exp, 0);
1303
1304 addr = expand_expr (orig_exp, NULL_RTX, ptr_mode, EXPAND_NORMAL);
1305 mem = gen_rtx_MEM (BLKmode, memory_address (BLKmode, addr));
2a631e19 1306
f7c44134 1307 /* Get an expression we can use to find the attributes to assign to MEM.
5dd3f78f 1308 First remove any nops. */
72dd6141 1309 while (CONVERT_EXPR_P (exp)
f7c44134 1310 && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (exp, 0))))
1311 exp = TREE_OPERAND (exp, 0);
1312
5dd3f78f 1313 /* Build a MEM_REF representing the whole accessed area as a byte blob,
1314 (as builtin stringops may alias with anything). */
1315 exp = fold_build2 (MEM_REF,
1316 build_array_type (char_type_node,
1317 build_range_type (sizetype,
1318 size_one_node, len)),
1319 exp, build_int_cst (ptr_type_node, 0));
1320
1321 /* If the MEM_REF has no acceptable address, try to get the base object
1322 from the original address we got, and build an all-aliasing
1323 unknown-sized access to that one. */
1324 if (is_gimple_mem_ref_addr (TREE_OPERAND (exp, 0)))
1325 set_mem_attributes (mem, exp, 0);
1326 else if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
1327 && (exp = get_base_address (TREE_OPERAND (TREE_OPERAND (exp, 0),
1328 0))))
eec8e941 1329 {
5dd3f78f 1330 exp = build_fold_addr_expr (exp);
1331 exp = fold_build2 (MEM_REF,
1332 build_array_type (char_type_node,
1333 build_range_type (sizetype,
1334 size_zero_node,
1335 NULL)),
1336 exp, build_int_cst (ptr_type_node, 0));
a1a25d19 1337 set_mem_attributes (mem, exp, 0);
eec8e941 1338 }
5dd3f78f 1339 set_mem_alias_set (mem, 0);
53800dbe 1340 return mem;
1341}
1342\f
1343/* Built-in functions to perform an untyped call and return. */
1344
3b9c3a16 1345#define apply_args_mode \
1346 (this_target_builtins->x_apply_args_mode)
1347#define apply_result_mode \
1348 (this_target_builtins->x_apply_result_mode)
53800dbe 1349
53800dbe 1350/* Return the size required for the block returned by __builtin_apply_args,
1351 and initialize apply_args_mode. */
1352
1353static int
aecda0d6 1354apply_args_size (void)
53800dbe 1355{
1356 static int size = -1;
58e9ce8f 1357 int align;
1358 unsigned int regno;
3754d046 1359 machine_mode mode;
53800dbe 1360
1361 /* The values computed by this function never change. */
1362 if (size < 0)
1363 {
1364 /* The first value is the incoming arg-pointer. */
1365 size = GET_MODE_SIZE (Pmode);
1366
1367 /* The second value is the structure value address unless this is
1368 passed as an "invisible" first argument. */
6812c89e 1369 if (targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0))
53800dbe 1370 size += GET_MODE_SIZE (Pmode);
1371
1372 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1373 if (FUNCTION_ARG_REGNO_P (regno))
1374 {
4bac51c9 1375 mode = targetm.calls.get_raw_arg_mode (regno);
0862b7e9 1376
64db345d 1377 gcc_assert (mode != VOIDmode);
53800dbe 1378
1379 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1380 if (size % align != 0)
1381 size = CEIL (size, align) * align;
53800dbe 1382 size += GET_MODE_SIZE (mode);
1383 apply_args_mode[regno] = mode;
1384 }
1385 else
1386 {
1387 apply_args_mode[regno] = VOIDmode;
53800dbe 1388 }
1389 }
1390 return size;
1391}
1392
1393/* Return the size required for the block returned by __builtin_apply,
1394 and initialize apply_result_mode. */
1395
1396static int
aecda0d6 1397apply_result_size (void)
53800dbe 1398{
1399 static int size = -1;
1400 int align, regno;
3754d046 1401 machine_mode mode;
53800dbe 1402
1403 /* The values computed by this function never change. */
1404 if (size < 0)
1405 {
1406 size = 0;
1407
1408 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
e1ce1485 1409 if (targetm.calls.function_value_regno_p (regno))
53800dbe 1410 {
4bac51c9 1411 mode = targetm.calls.get_raw_result_mode (regno);
0862b7e9 1412
64db345d 1413 gcc_assert (mode != VOIDmode);
53800dbe 1414
1415 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1416 if (size % align != 0)
1417 size = CEIL (size, align) * align;
1418 size += GET_MODE_SIZE (mode);
1419 apply_result_mode[regno] = mode;
1420 }
1421 else
1422 apply_result_mode[regno] = VOIDmode;
1423
1424 /* Allow targets that use untyped_call and untyped_return to override
1425 the size so that machine-specific information can be stored here. */
1426#ifdef APPLY_RESULT_SIZE
1427 size = APPLY_RESULT_SIZE;
1428#endif
1429 }
1430 return size;
1431}
1432
53800dbe 1433/* Create a vector describing the result block RESULT. If SAVEP is true,
1434 the result block is used to save the values; otherwise it is used to
1435 restore the values. */
1436
1437static rtx
aecda0d6 1438result_vector (int savep, rtx result)
53800dbe 1439{
1440 int regno, size, align, nelts;
3754d046 1441 machine_mode mode;
53800dbe 1442 rtx reg, mem;
364c0c59 1443 rtx *savevec = XALLOCAVEC (rtx, FIRST_PSEUDO_REGISTER);
bf8e3599 1444
53800dbe 1445 size = nelts = 0;
1446 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1447 if ((mode = apply_result_mode[regno]) != VOIDmode)
1448 {
1449 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1450 if (size % align != 0)
1451 size = CEIL (size, align) * align;
1452 reg = gen_rtx_REG (mode, savep ? regno : INCOMING_REGNO (regno));
e513d163 1453 mem = adjust_address (result, mode, size);
53800dbe 1454 savevec[nelts++] = (savep
d1f9b275 1455 ? gen_rtx_SET (mem, reg)
1456 : gen_rtx_SET (reg, mem));
53800dbe 1457 size += GET_MODE_SIZE (mode);
1458 }
1459 return gen_rtx_PARALLEL (VOIDmode, gen_rtvec_v (nelts, savevec));
1460}
53800dbe 1461
1462/* Save the state required to perform an untyped call with the same
1463 arguments as were passed to the current function. */
1464
1465static rtx
aecda0d6 1466expand_builtin_apply_args_1 (void)
53800dbe 1467{
1c7e61a7 1468 rtx registers, tem;
53800dbe 1469 int size, align, regno;
3754d046 1470 machine_mode mode;
6812c89e 1471 rtx struct_incoming_value = targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 1);
53800dbe 1472
1473 /* Create a block where the arg-pointer, structure value address,
1474 and argument registers can be saved. */
1475 registers = assign_stack_local (BLKmode, apply_args_size (), -1);
1476
1477 /* Walk past the arg-pointer and structure value address. */
1478 size = GET_MODE_SIZE (Pmode);
6812c89e 1479 if (targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0))
53800dbe 1480 size += GET_MODE_SIZE (Pmode);
1481
1482 /* Save each register used in calling a function to the block. */
1483 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1484 if ((mode = apply_args_mode[regno]) != VOIDmode)
1485 {
53800dbe 1486 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1487 if (size % align != 0)
1488 size = CEIL (size, align) * align;
1489
1490 tem = gen_rtx_REG (mode, INCOMING_REGNO (regno));
1491
e513d163 1492 emit_move_insn (adjust_address (registers, mode, size), tem);
53800dbe 1493 size += GET_MODE_SIZE (mode);
1494 }
1495
1496 /* Save the arg pointer to the block. */
27a7a23a 1497 tem = copy_to_reg (crtl->args.internal_arg_pointer);
1c7e61a7 1498 /* We need the pointer as the caller actually passed them to us, not
9d4b544c 1499 as we might have pretended they were passed. Make sure it's a valid
1500 operand, as emit_move_insn isn't expected to handle a PLUS. */
3764c94e 1501 if (STACK_GROWS_DOWNWARD)
1502 tem
1503 = force_operand (plus_constant (Pmode, tem,
1504 crtl->args.pretend_args_size),
1505 NULL_RTX);
1c7e61a7 1506 emit_move_insn (adjust_address (registers, Pmode, 0), tem);
0862b7e9 1507
53800dbe 1508 size = GET_MODE_SIZE (Pmode);
1509
1510 /* Save the structure value address unless this is passed as an
1511 "invisible" first argument. */
45550790 1512 if (struct_incoming_value)
53800dbe 1513 {
e513d163 1514 emit_move_insn (adjust_address (registers, Pmode, size),
45550790 1515 copy_to_reg (struct_incoming_value));
53800dbe 1516 size += GET_MODE_SIZE (Pmode);
1517 }
1518
1519 /* Return the address of the block. */
1520 return copy_addr_to_reg (XEXP (registers, 0));
1521}
1522
1523/* __builtin_apply_args returns block of memory allocated on
1524 the stack into which is stored the arg pointer, structure
1525 value address, static chain, and all the registers that might
1526 possibly be used in performing a function call. The code is
1527 moved to the start of the function so the incoming values are
1528 saved. */
27d0c333 1529
53800dbe 1530static rtx
aecda0d6 1531expand_builtin_apply_args (void)
53800dbe 1532{
1533 /* Don't do __builtin_apply_args more than once in a function.
1534 Save the result of the first call and reuse it. */
1535 if (apply_args_value != 0)
1536 return apply_args_value;
1537 {
1538 /* When this function is called, it means that registers must be
1539 saved on entry to this function. So we migrate the
1540 call to the first insn of this function. */
1541 rtx temp;
53800dbe 1542
1543 start_sequence ();
1544 temp = expand_builtin_apply_args_1 ();
9ed997be 1545 rtx_insn *seq = get_insns ();
53800dbe 1546 end_sequence ();
1547
1548 apply_args_value = temp;
1549
31d3e01c 1550 /* Put the insns after the NOTE that starts the function.
1551 If this is inside a start_sequence, make the outer-level insn
53800dbe 1552 chain current, so the code is placed at the start of the
0ef1a651 1553 function. If internal_arg_pointer is a non-virtual pseudo,
1554 it needs to be placed after the function that initializes
1555 that pseudo. */
53800dbe 1556 push_topmost_sequence ();
0ef1a651 1557 if (REG_P (crtl->args.internal_arg_pointer)
1558 && REGNO (crtl->args.internal_arg_pointer) > LAST_VIRTUAL_REGISTER)
1559 emit_insn_before (seq, parm_birth_insn);
1560 else
1561 emit_insn_before (seq, NEXT_INSN (entry_of_function ()));
53800dbe 1562 pop_topmost_sequence ();
1563 return temp;
1564 }
1565}
1566
1567/* Perform an untyped call and save the state required to perform an
1568 untyped return of whatever value was returned by the given function. */
1569
1570static rtx
aecda0d6 1571expand_builtin_apply (rtx function, rtx arguments, rtx argsize)
53800dbe 1572{
1573 int size, align, regno;
3754d046 1574 machine_mode mode;
1e0c0b35 1575 rtx incoming_args, result, reg, dest, src;
1576 rtx_call_insn *call_insn;
53800dbe 1577 rtx old_stack_level = 0;
1578 rtx call_fusage = 0;
6812c89e 1579 rtx struct_value = targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0);
53800dbe 1580
85d654dd 1581 arguments = convert_memory_address (Pmode, arguments);
726ec87c 1582
53800dbe 1583 /* Create a block where the return registers can be saved. */
1584 result = assign_stack_local (BLKmode, apply_result_size (), -1);
1585
53800dbe 1586 /* Fetch the arg pointer from the ARGUMENTS block. */
1587 incoming_args = gen_reg_rtx (Pmode);
726ec87c 1588 emit_move_insn (incoming_args, gen_rtx_MEM (Pmode, arguments));
3764c94e 1589 if (!STACK_GROWS_DOWNWARD)
1590 incoming_args = expand_simple_binop (Pmode, MINUS, incoming_args, argsize,
1591 incoming_args, 0, OPTAB_LIB_WIDEN);
53800dbe 1592
04a46d40 1593 /* Push a new argument block and copy the arguments. Do not allow
1594 the (potential) memcpy call below to interfere with our stack
1595 manipulations. */
53800dbe 1596 do_pending_stack_adjust ();
04a46d40 1597 NO_DEFER_POP;
53800dbe 1598
2358393e 1599 /* Save the stack with nonlocal if available. */
71512c05 1600 if (targetm.have_save_stack_nonlocal ())
e9c97615 1601 emit_stack_save (SAVE_NONLOCAL, &old_stack_level);
53800dbe 1602 else
e9c97615 1603 emit_stack_save (SAVE_BLOCK, &old_stack_level);
53800dbe 1604
59647703 1605 /* Allocate a block of memory onto the stack and copy the memory
990495a7 1606 arguments to the outgoing arguments address. We can pass TRUE
1607 as the 4th argument because we just saved the stack pointer
1608 and will restore it right after the call. */
5be42b39 1609 allocate_dynamic_stack_space (argsize, 0, BIGGEST_ALIGNMENT, true);
27a7a23a 1610
1611 /* Set DRAP flag to true, even though allocate_dynamic_stack_space
1612 may have already set current_function_calls_alloca to true.
1613 current_function_calls_alloca won't be set if argsize is zero,
1614 so we have to guarantee need_drap is true here. */
1615 if (SUPPORTS_STACK_ALIGNMENT)
1616 crtl->need_drap = true;
1617
59647703 1618 dest = virtual_outgoing_args_rtx;
3764c94e 1619 if (!STACK_GROWS_DOWNWARD)
1620 {
1621 if (CONST_INT_P (argsize))
1622 dest = plus_constant (Pmode, dest, -INTVAL (argsize));
1623 else
1624 dest = gen_rtx_PLUS (Pmode, dest, negate_rtx (Pmode, argsize));
1625 }
2a631e19 1626 dest = gen_rtx_MEM (BLKmode, dest);
1627 set_mem_align (dest, PARM_BOUNDARY);
1628 src = gen_rtx_MEM (BLKmode, incoming_args);
1629 set_mem_align (src, PARM_BOUNDARY);
0378dbdc 1630 emit_block_move (dest, src, argsize, BLOCK_OP_NORMAL);
53800dbe 1631
1632 /* Refer to the argument block. */
1633 apply_args_size ();
1634 arguments = gen_rtx_MEM (BLKmode, arguments);
2a631e19 1635 set_mem_align (arguments, PARM_BOUNDARY);
53800dbe 1636
1637 /* Walk past the arg-pointer and structure value address. */
1638 size = GET_MODE_SIZE (Pmode);
45550790 1639 if (struct_value)
53800dbe 1640 size += GET_MODE_SIZE (Pmode);
1641
1642 /* Restore each of the registers previously saved. Make USE insns
1643 for each of these registers for use in making the call. */
1644 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1645 if ((mode = apply_args_mode[regno]) != VOIDmode)
1646 {
1647 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1648 if (size % align != 0)
1649 size = CEIL (size, align) * align;
1650 reg = gen_rtx_REG (mode, regno);
e513d163 1651 emit_move_insn (reg, adjust_address (arguments, mode, size));
53800dbe 1652 use_reg (&call_fusage, reg);
1653 size += GET_MODE_SIZE (mode);
1654 }
1655
1656 /* Restore the structure value address unless this is passed as an
1657 "invisible" first argument. */
1658 size = GET_MODE_SIZE (Pmode);
45550790 1659 if (struct_value)
53800dbe 1660 {
1661 rtx value = gen_reg_rtx (Pmode);
e513d163 1662 emit_move_insn (value, adjust_address (arguments, Pmode, size));
45550790 1663 emit_move_insn (struct_value, value);
8ad4c111 1664 if (REG_P (struct_value))
45550790 1665 use_reg (&call_fusage, struct_value);
53800dbe 1666 size += GET_MODE_SIZE (Pmode);
1667 }
1668
1669 /* All arguments and registers used for the call are set up by now! */
82c7907c 1670 function = prepare_call_address (NULL, function, NULL, &call_fusage, 0, 0);
53800dbe 1671
1672 /* Ensure address is valid. SYMBOL_REF is already valid, so no need,
1673 and we don't want to load it into a register as an optimization,
1674 because prepare_call_address already did it if it should be done. */
1675 if (GET_CODE (function) != SYMBOL_REF)
1676 function = memory_address (FUNCTION_MODE, function);
1677
1678 /* Generate the actual call instruction and save the return value. */
1d99ab0a 1679 if (targetm.have_untyped_call ())
1680 {
1681 rtx mem = gen_rtx_MEM (FUNCTION_MODE, function);
1682 emit_call_insn (targetm.gen_untyped_call (mem, result,
1683 result_vector (1, result)));
1684 }
7f265a08 1685 else if (targetm.have_call_value ())
53800dbe 1686 {
1687 rtx valreg = 0;
1688
1689 /* Locate the unique return register. It is not possible to
1690 express a call that sets more than one return register using
1691 call_value; use untyped_call for that. In fact, untyped_call
1692 only needs to save the return registers in the given block. */
1693 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1694 if ((mode = apply_result_mode[regno]) != VOIDmode)
1695 {
7f265a08 1696 gcc_assert (!valreg); /* have_untyped_call required. */
7d3f6cc7 1697
53800dbe 1698 valreg = gen_rtx_REG (mode, regno);
1699 }
1700
7f265a08 1701 emit_insn (targetm.gen_call_value (valreg,
1702 gen_rtx_MEM (FUNCTION_MODE, function),
1703 const0_rtx, NULL_RTX, const0_rtx));
53800dbe 1704
e513d163 1705 emit_move_insn (adjust_address (result, GET_MODE (valreg), 0), valreg);
53800dbe 1706 }
1707 else
64db345d 1708 gcc_unreachable ();
53800dbe 1709
d5f9786f 1710 /* Find the CALL insn we just emitted, and attach the register usage
1711 information. */
1712 call_insn = last_call_insn ();
1713 add_function_usage_to (call_insn, call_fusage);
53800dbe 1714
1715 /* Restore the stack. */
71512c05 1716 if (targetm.have_save_stack_nonlocal ())
e9c97615 1717 emit_stack_restore (SAVE_NONLOCAL, old_stack_level);
53800dbe 1718 else
e9c97615 1719 emit_stack_restore (SAVE_BLOCK, old_stack_level);
9af5ce0c 1720 fixup_args_size_notes (call_insn, get_last_insn (), 0);
53800dbe 1721
04a46d40 1722 OK_DEFER_POP;
1723
53800dbe 1724 /* Return the address of the result block. */
85d654dd 1725 result = copy_addr_to_reg (XEXP (result, 0));
1726 return convert_memory_address (ptr_mode, result);
53800dbe 1727}
1728
1729/* Perform an untyped return. */
1730
1731static void
aecda0d6 1732expand_builtin_return (rtx result)
53800dbe 1733{
1734 int size, align, regno;
3754d046 1735 machine_mode mode;
53800dbe 1736 rtx reg;
57c26b3a 1737 rtx_insn *call_fusage = 0;
53800dbe 1738
85d654dd 1739 result = convert_memory_address (Pmode, result);
726ec87c 1740
53800dbe 1741 apply_result_size ();
1742 result = gen_rtx_MEM (BLKmode, result);
1743
1d99ab0a 1744 if (targetm.have_untyped_return ())
53800dbe 1745 {
1d99ab0a 1746 rtx vector = result_vector (0, result);
1747 emit_jump_insn (targetm.gen_untyped_return (result, vector));
53800dbe 1748 emit_barrier ();
1749 return;
1750 }
53800dbe 1751
1752 /* Restore the return value and note that each value is used. */
1753 size = 0;
1754 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1755 if ((mode = apply_result_mode[regno]) != VOIDmode)
1756 {
1757 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1758 if (size % align != 0)
1759 size = CEIL (size, align) * align;
1760 reg = gen_rtx_REG (mode, INCOMING_REGNO (regno));
e513d163 1761 emit_move_insn (reg, adjust_address (result, mode, size));
53800dbe 1762
1763 push_to_sequence (call_fusage);
18b42941 1764 emit_use (reg);
53800dbe 1765 call_fusage = get_insns ();
1766 end_sequence ();
1767 size += GET_MODE_SIZE (mode);
1768 }
1769
1770 /* Put the USE insns before the return. */
31d3e01c 1771 emit_insn (call_fusage);
53800dbe 1772
1773 /* Return whatever values was restored by jumping directly to the end
1774 of the function. */
62380d2d 1775 expand_naked_return ();
53800dbe 1776}
1777
539a3a92 1778/* Used by expand_builtin_classify_type and fold_builtin_classify_type. */
27d0c333 1779
539a3a92 1780static enum type_class
aecda0d6 1781type_to_class (tree type)
539a3a92 1782{
1783 switch (TREE_CODE (type))
1784 {
1785 case VOID_TYPE: return void_type_class;
1786 case INTEGER_TYPE: return integer_type_class;
539a3a92 1787 case ENUMERAL_TYPE: return enumeral_type_class;
1788 case BOOLEAN_TYPE: return boolean_type_class;
1789 case POINTER_TYPE: return pointer_type_class;
1790 case REFERENCE_TYPE: return reference_type_class;
1791 case OFFSET_TYPE: return offset_type_class;
1792 case REAL_TYPE: return real_type_class;
1793 case COMPLEX_TYPE: return complex_type_class;
1794 case FUNCTION_TYPE: return function_type_class;
1795 case METHOD_TYPE: return method_type_class;
1796 case RECORD_TYPE: return record_type_class;
1797 case UNION_TYPE:
1798 case QUAL_UNION_TYPE: return union_type_class;
1799 case ARRAY_TYPE: return (TYPE_STRING_FLAG (type)
1800 ? string_type_class : array_type_class);
539a3a92 1801 case LANG_TYPE: return lang_type_class;
1802 default: return no_type_class;
1803 }
1804}
bf8e3599 1805
c2f47e15 1806/* Expand a call EXP to __builtin_classify_type. */
27d0c333 1807
53800dbe 1808static rtx
c2f47e15 1809expand_builtin_classify_type (tree exp)
53800dbe 1810{
c2f47e15 1811 if (call_expr_nargs (exp))
1812 return GEN_INT (type_to_class (TREE_TYPE (CALL_EXPR_ARG (exp, 0))));
53800dbe 1813 return GEN_INT (no_type_class);
1814}
1815
07976da7 1816/* This helper macro, meant to be used in mathfn_built_in below,
1817 determines which among a set of three builtin math functions is
1818 appropriate for a given type mode. The `F' and `L' cases are
1819 automatically generated from the `double' case. */
e3240774 1820#define CASE_MATHFN(MATHFN) \
1821 CASE_CFN_##MATHFN: \
1822 fcode = BUILT_IN_##MATHFN; fcodef = BUILT_IN_##MATHFN##F ; \
1823 fcodel = BUILT_IN_##MATHFN##L ; break;
cd2656b0 1824/* Similar to above, but appends _R after any F/L suffix. */
e3240774 1825#define CASE_MATHFN_REENT(MATHFN) \
1826 case CFN_BUILT_IN_##MATHFN##_R: \
1827 case CFN_BUILT_IN_##MATHFN##F_R: \
1828 case CFN_BUILT_IN_##MATHFN##L_R: \
1829 fcode = BUILT_IN_##MATHFN##_R; fcodef = BUILT_IN_##MATHFN##F_R ; \
1830 fcodel = BUILT_IN_##MATHFN##L_R ; break;
07976da7 1831
6c21be92 1832/* Return a function equivalent to FN but operating on floating-point
1833 values of type TYPE, or END_BUILTINS if no such function exists.
e3240774 1834 This is purely an operation on function codes; it does not guarantee
1835 that the target actually has an implementation of the function. */
c319d56a 1836
6c21be92 1837static built_in_function
e3240774 1838mathfn_built_in_2 (tree type, combined_fn fn)
0a68165a 1839{
6c21be92 1840 built_in_function fcode, fcodef, fcodel;
07976da7 1841
1842 switch (fn)
1843 {
e3240774 1844 CASE_MATHFN (ACOS)
1845 CASE_MATHFN (ACOSH)
1846 CASE_MATHFN (ASIN)
1847 CASE_MATHFN (ASINH)
1848 CASE_MATHFN (ATAN)
1849 CASE_MATHFN (ATAN2)
1850 CASE_MATHFN (ATANH)
1851 CASE_MATHFN (CBRT)
1852 CASE_MATHFN (CEIL)
1853 CASE_MATHFN (CEXPI)
1854 CASE_MATHFN (COPYSIGN)
1855 CASE_MATHFN (COS)
1856 CASE_MATHFN (COSH)
1857 CASE_MATHFN (DREM)
1858 CASE_MATHFN (ERF)
1859 CASE_MATHFN (ERFC)
1860 CASE_MATHFN (EXP)
1861 CASE_MATHFN (EXP10)
1862 CASE_MATHFN (EXP2)
1863 CASE_MATHFN (EXPM1)
1864 CASE_MATHFN (FABS)
1865 CASE_MATHFN (FDIM)
1866 CASE_MATHFN (FLOOR)
1867 CASE_MATHFN (FMA)
1868 CASE_MATHFN (FMAX)
1869 CASE_MATHFN (FMIN)
1870 CASE_MATHFN (FMOD)
1871 CASE_MATHFN (FREXP)
1872 CASE_MATHFN (GAMMA)
1873 CASE_MATHFN_REENT (GAMMA) /* GAMMA_R */
1874 CASE_MATHFN (HUGE_VAL)
1875 CASE_MATHFN (HYPOT)
1876 CASE_MATHFN (ILOGB)
1877 CASE_MATHFN (ICEIL)
1878 CASE_MATHFN (IFLOOR)
1879 CASE_MATHFN (INF)
1880 CASE_MATHFN (IRINT)
1881 CASE_MATHFN (IROUND)
1882 CASE_MATHFN (ISINF)
1883 CASE_MATHFN (J0)
1884 CASE_MATHFN (J1)
1885 CASE_MATHFN (JN)
1886 CASE_MATHFN (LCEIL)
1887 CASE_MATHFN (LDEXP)
1888 CASE_MATHFN (LFLOOR)
1889 CASE_MATHFN (LGAMMA)
1890 CASE_MATHFN_REENT (LGAMMA) /* LGAMMA_R */
1891 CASE_MATHFN (LLCEIL)
1892 CASE_MATHFN (LLFLOOR)
1893 CASE_MATHFN (LLRINT)
1894 CASE_MATHFN (LLROUND)
1895 CASE_MATHFN (LOG)
1896 CASE_MATHFN (LOG10)
1897 CASE_MATHFN (LOG1P)
1898 CASE_MATHFN (LOG2)
1899 CASE_MATHFN (LOGB)
1900 CASE_MATHFN (LRINT)
1901 CASE_MATHFN (LROUND)
1902 CASE_MATHFN (MODF)
1903 CASE_MATHFN (NAN)
1904 CASE_MATHFN (NANS)
1905 CASE_MATHFN (NEARBYINT)
1906 CASE_MATHFN (NEXTAFTER)
1907 CASE_MATHFN (NEXTTOWARD)
1908 CASE_MATHFN (POW)
1909 CASE_MATHFN (POWI)
1910 CASE_MATHFN (POW10)
1911 CASE_MATHFN (REMAINDER)
1912 CASE_MATHFN (REMQUO)
1913 CASE_MATHFN (RINT)
1914 CASE_MATHFN (ROUND)
1915 CASE_MATHFN (SCALB)
1916 CASE_MATHFN (SCALBLN)
1917 CASE_MATHFN (SCALBN)
1918 CASE_MATHFN (SIGNBIT)
1919 CASE_MATHFN (SIGNIFICAND)
1920 CASE_MATHFN (SIN)
1921 CASE_MATHFN (SINCOS)
1922 CASE_MATHFN (SINH)
1923 CASE_MATHFN (SQRT)
1924 CASE_MATHFN (TAN)
1925 CASE_MATHFN (TANH)
1926 CASE_MATHFN (TGAMMA)
1927 CASE_MATHFN (TRUNC)
1928 CASE_MATHFN (Y0)
1929 CASE_MATHFN (Y1)
1930 CASE_MATHFN (YN)
07976da7 1931
e3240774 1932 default:
1933 return END_BUILTINS;
1934 }
07976da7 1935
96b9f485 1936 if (TYPE_MAIN_VARIANT (type) == double_type_node)
6c21be92 1937 return fcode;
96b9f485 1938 else if (TYPE_MAIN_VARIANT (type) == float_type_node)
6c21be92 1939 return fcodef;
96b9f485 1940 else if (TYPE_MAIN_VARIANT (type) == long_double_type_node)
6c21be92 1941 return fcodel;
07976da7 1942 else
6c21be92 1943 return END_BUILTINS;
1944}
1945
1946/* Return mathematic function equivalent to FN but operating directly on TYPE,
1947 if available. If IMPLICIT_P is true use the implicit builtin declaration,
1948 otherwise use the explicit declaration. If we can't do the conversion,
1949 return null. */
1950
1951static tree
e3240774 1952mathfn_built_in_1 (tree type, combined_fn fn, bool implicit_p)
6c21be92 1953{
1954 built_in_function fcode2 = mathfn_built_in_2 (type, fn);
1955 if (fcode2 == END_BUILTINS)
c2f47e15 1956 return NULL_TREE;
b9a16870 1957
1958 if (implicit_p && !builtin_decl_implicit_p (fcode2))
1959 return NULL_TREE;
1960
1961 return builtin_decl_explicit (fcode2);
0a68165a 1962}
1963
e3240774 1964/* Like mathfn_built_in_1, but always use the implicit array. */
c319d56a 1965
1966tree
e3240774 1967mathfn_built_in (tree type, combined_fn fn)
c319d56a 1968{
1969 return mathfn_built_in_1 (type, fn, /*implicit=*/ 1);
1970}
1971
e3240774 1972/* Like mathfn_built_in_1, but take a built_in_function and
1973 always use the implicit array. */
1974
1975tree
1976mathfn_built_in (tree type, enum built_in_function fn)
1977{
1978 return mathfn_built_in_1 (type, as_combined_fn (fn), /*implicit=*/ 1);
1979}
1980
1f24b8e9 1981/* If BUILT_IN_NORMAL function FNDECL has an associated internal function,
1982 return its code, otherwise return IFN_LAST. Note that this function
1983 only tests whether the function is defined in internals.def, not whether
1984 it is actually available on the target. */
1985
1986internal_fn
1987associated_internal_fn (tree fndecl)
1988{
1989 gcc_checking_assert (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL);
1990 tree return_type = TREE_TYPE (TREE_TYPE (fndecl));
1991 switch (DECL_FUNCTION_CODE (fndecl))
1992 {
1993#define DEF_INTERNAL_FLT_FN(NAME, FLAGS, OPTAB, TYPE) \
1994 CASE_FLT_FN (BUILT_IN_##NAME): return IFN_##NAME;
c9452b7c 1995#define DEF_INTERNAL_INT_FN(NAME, FLAGS, OPTAB, TYPE) \
1996 CASE_INT_FN (BUILT_IN_##NAME): return IFN_##NAME;
1f24b8e9 1997#include "internal-fn.def"
1998
1999 CASE_FLT_FN (BUILT_IN_POW10):
2000 return IFN_EXP10;
2001
2002 CASE_FLT_FN (BUILT_IN_DREM):
2003 return IFN_REMAINDER;
2004
2005 CASE_FLT_FN (BUILT_IN_SCALBN):
2006 CASE_FLT_FN (BUILT_IN_SCALBLN):
2007 if (REAL_MODE_FORMAT (TYPE_MODE (return_type))->b == 2)
2008 return IFN_LDEXP;
2009 return IFN_LAST;
2010
2011 default:
2012 return IFN_LAST;
2013 }
2014}
2015
2016/* If CALL is a call to a BUILT_IN_NORMAL function that could be replaced
2017 on the current target by a call to an internal function, return the
2018 code of that internal function, otherwise return IFN_LAST. The caller
2019 is responsible for ensuring that any side-effects of the built-in
2020 call are dealt with correctly. E.g. if CALL sets errno, the caller
2021 must decide that the errno result isn't needed or make it available
2022 in some other way. */
2023
2024internal_fn
2025replacement_internal_fn (gcall *call)
2026{
2027 if (gimple_call_builtin_p (call, BUILT_IN_NORMAL))
2028 {
2029 internal_fn ifn = associated_internal_fn (gimple_call_fndecl (call));
2030 if (ifn != IFN_LAST)
2031 {
2032 tree_pair types = direct_internal_fn_types (ifn, call);
acdfe9e0 2033 optimization_type opt_type = bb_optimization_type (gimple_bb (call));
2034 if (direct_internal_fn_supported_p (ifn, types, opt_type))
1f24b8e9 2035 return ifn;
2036 }
2037 }
2038 return IFN_LAST;
2039}
2040
7e0713b1 2041/* Expand a call to the builtin trinary math functions (fma).
2042 Return NULL_RTX if a normal call should be emitted rather than expanding the
2043 function in-line. EXP is the expression that is a call to the builtin
2044 function; if convenient, the result should be placed in TARGET.
2045 SUBTARGET may be used as the target for computing one of EXP's
2046 operands. */
2047
2048static rtx
2049expand_builtin_mathfn_ternary (tree exp, rtx target, rtx subtarget)
2050{
2051 optab builtin_optab;
1e0c0b35 2052 rtx op0, op1, op2, result;
2053 rtx_insn *insns;
7e0713b1 2054 tree fndecl = get_callee_fndecl (exp);
2055 tree arg0, arg1, arg2;
3754d046 2056 machine_mode mode;
7e0713b1 2057
2058 if (!validate_arglist (exp, REAL_TYPE, REAL_TYPE, REAL_TYPE, VOID_TYPE))
2059 return NULL_RTX;
2060
2061 arg0 = CALL_EXPR_ARG (exp, 0);
2062 arg1 = CALL_EXPR_ARG (exp, 1);
2063 arg2 = CALL_EXPR_ARG (exp, 2);
2064
2065 switch (DECL_FUNCTION_CODE (fndecl))
2066 {
2067 CASE_FLT_FN (BUILT_IN_FMA):
2068 builtin_optab = fma_optab; break;
2069 default:
2070 gcc_unreachable ();
2071 }
2072
2073 /* Make a suitable register to place result in. */
2074 mode = TYPE_MODE (TREE_TYPE (exp));
2075
2076 /* Before working hard, check whether the instruction is available. */
2077 if (optab_handler (builtin_optab, mode) == CODE_FOR_nothing)
2078 return NULL_RTX;
2079
de2e453e 2080 result = gen_reg_rtx (mode);
7e0713b1 2081
2082 /* Always stabilize the argument list. */
2083 CALL_EXPR_ARG (exp, 0) = arg0 = builtin_save_expr (arg0);
2084 CALL_EXPR_ARG (exp, 1) = arg1 = builtin_save_expr (arg1);
2085 CALL_EXPR_ARG (exp, 2) = arg2 = builtin_save_expr (arg2);
2086
2087 op0 = expand_expr (arg0, subtarget, VOIDmode, EXPAND_NORMAL);
2088 op1 = expand_normal (arg1);
2089 op2 = expand_normal (arg2);
2090
2091 start_sequence ();
2092
de2e453e 2093 /* Compute into RESULT.
2094 Set RESULT to wherever the result comes back. */
2095 result = expand_ternary_op (mode, builtin_optab, op0, op1, op2,
2096 result, 0);
7e0713b1 2097
2098 /* If we were unable to expand via the builtin, stop the sequence
2099 (without outputting the insns) and call to the library function
2100 with the stabilized argument list. */
de2e453e 2101 if (result == 0)
7e0713b1 2102 {
2103 end_sequence ();
2104 return expand_call (exp, target, target == const0_rtx);
2105 }
2106
2107 /* Output the entire sequence. */
2108 insns = get_insns ();
2109 end_sequence ();
2110 emit_insn (insns);
2111
de2e453e 2112 return result;
7e0713b1 2113}
2114
6b43bae4 2115/* Expand a call to the builtin sin and cos math functions.
c2f47e15 2116 Return NULL_RTX if a normal call should be emitted rather than expanding the
6b43bae4 2117 function in-line. EXP is the expression that is a call to the builtin
2118 function; if convenient, the result should be placed in TARGET.
2119 SUBTARGET may be used as the target for computing one of EXP's
2120 operands. */
2121
2122static rtx
2123expand_builtin_mathfn_3 (tree exp, rtx target, rtx subtarget)
2124{
2125 optab builtin_optab;
1e0c0b35 2126 rtx op0;
2127 rtx_insn *insns;
6b43bae4 2128 tree fndecl = get_callee_fndecl (exp);
3754d046 2129 machine_mode mode;
abfea505 2130 tree arg;
6b43bae4 2131
c2f47e15 2132 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2133 return NULL_RTX;
6b43bae4 2134
c2f47e15 2135 arg = CALL_EXPR_ARG (exp, 0);
6b43bae4 2136
2137 switch (DECL_FUNCTION_CODE (fndecl))
2138 {
4f35b1fc 2139 CASE_FLT_FN (BUILT_IN_SIN):
2140 CASE_FLT_FN (BUILT_IN_COS):
6b43bae4 2141 builtin_optab = sincos_optab; break;
2142 default:
64db345d 2143 gcc_unreachable ();
6b43bae4 2144 }
2145
2146 /* Make a suitable register to place result in. */
2147 mode = TYPE_MODE (TREE_TYPE (exp));
2148
6b43bae4 2149 /* Check if sincos insn is available, otherwise fallback
0bed3869 2150 to sin or cos insn. */
d6bf3b14 2151 if (optab_handler (builtin_optab, mode) == CODE_FOR_nothing)
6b43bae4 2152 switch (DECL_FUNCTION_CODE (fndecl))
2153 {
4f35b1fc 2154 CASE_FLT_FN (BUILT_IN_SIN):
6b43bae4 2155 builtin_optab = sin_optab; break;
4f35b1fc 2156 CASE_FLT_FN (BUILT_IN_COS):
6b43bae4 2157 builtin_optab = cos_optab; break;
2158 default:
64db345d 2159 gcc_unreachable ();
6b43bae4 2160 }
6b43bae4 2161
2162 /* Before working hard, check whether the instruction is available. */
d6bf3b14 2163 if (optab_handler (builtin_optab, mode) != CODE_FOR_nothing)
6b43bae4 2164 {
de2e453e 2165 rtx result = gen_reg_rtx (mode);
6b43bae4 2166
2167 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2168 need to expand the argument again. This way, we will not perform
2169 side-effects more the once. */
abfea505 2170 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
6b43bae4 2171
1db6d067 2172 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
6b43bae4 2173
6b43bae4 2174 start_sequence ();
2175
de2e453e 2176 /* Compute into RESULT.
2177 Set RESULT to wherever the result comes back. */
6b43bae4 2178 if (builtin_optab == sincos_optab)
2179 {
de2e453e 2180 int ok;
7d3f6cc7 2181
6b43bae4 2182 switch (DECL_FUNCTION_CODE (fndecl))
2183 {
4f35b1fc 2184 CASE_FLT_FN (BUILT_IN_SIN):
de2e453e 2185 ok = expand_twoval_unop (builtin_optab, op0, 0, result, 0);
6b43bae4 2186 break;
4f35b1fc 2187 CASE_FLT_FN (BUILT_IN_COS):
de2e453e 2188 ok = expand_twoval_unop (builtin_optab, op0, result, 0, 0);
6b43bae4 2189 break;
2190 default:
64db345d 2191 gcc_unreachable ();
6b43bae4 2192 }
de2e453e 2193 gcc_assert (ok);
6b43bae4 2194 }
2195 else
de2e453e 2196 result = expand_unop (mode, builtin_optab, op0, result, 0);
6b43bae4 2197
de2e453e 2198 if (result != 0)
6b43bae4 2199 {
6b43bae4 2200 /* Output the entire sequence. */
2201 insns = get_insns ();
2202 end_sequence ();
2203 emit_insn (insns);
de2e453e 2204 return result;
6b43bae4 2205 }
2206
2207 /* If we were unable to expand via the builtin, stop the sequence
2208 (without outputting the insns) and call to the library function
2209 with the stabilized argument list. */
2210 end_sequence ();
2211 }
2212
de2e453e 2213 return expand_call (exp, target, target == const0_rtx);
6b43bae4 2214}
2215
a65c4d64 2216/* Given an interclass math builtin decl FNDECL and it's argument ARG
2217 return an RTL instruction code that implements the functionality.
2218 If that isn't possible or available return CODE_FOR_nothing. */
a67a90e5 2219
a65c4d64 2220static enum insn_code
2221interclass_mathfn_icode (tree arg, tree fndecl)
a67a90e5 2222{
a65c4d64 2223 bool errno_set = false;
6cdd383a 2224 optab builtin_optab = unknown_optab;
3754d046 2225 machine_mode mode;
a67a90e5 2226
2227 switch (DECL_FUNCTION_CODE (fndecl))
2228 {
2229 CASE_FLT_FN (BUILT_IN_ILOGB):
12f08300 2230 errno_set = true; builtin_optab = ilogb_optab; break;
2231 CASE_FLT_FN (BUILT_IN_ISINF):
2232 builtin_optab = isinf_optab; break;
2233 case BUILT_IN_ISNORMAL:
2234 case BUILT_IN_ISFINITE:
2235 CASE_FLT_FN (BUILT_IN_FINITE):
2236 case BUILT_IN_FINITED32:
2237 case BUILT_IN_FINITED64:
2238 case BUILT_IN_FINITED128:
2239 case BUILT_IN_ISINFD32:
2240 case BUILT_IN_ISINFD64:
2241 case BUILT_IN_ISINFD128:
2242 /* These builtins have no optabs (yet). */
cde061c1 2243 break;
a67a90e5 2244 default:
2245 gcc_unreachable ();
2246 }
2247
2248 /* There's no easy way to detect the case we need to set EDOM. */
2249 if (flag_errno_math && errno_set)
a65c4d64 2250 return CODE_FOR_nothing;
a67a90e5 2251
2252 /* Optab mode depends on the mode of the input argument. */
2253 mode = TYPE_MODE (TREE_TYPE (arg));
2254
cde061c1 2255 if (builtin_optab)
d6bf3b14 2256 return optab_handler (builtin_optab, mode);
a65c4d64 2257 return CODE_FOR_nothing;
2258}
2259
2260/* Expand a call to one of the builtin math functions that operate on
12f08300 2261 floating point argument and output an integer result (ilogb, isinf,
2262 isnan, etc).
a65c4d64 2263 Return 0 if a normal call should be emitted rather than expanding the
2264 function in-line. EXP is the expression that is a call to the builtin
f97eea22 2265 function; if convenient, the result should be placed in TARGET. */
a65c4d64 2266
2267static rtx
f97eea22 2268expand_builtin_interclass_mathfn (tree exp, rtx target)
a65c4d64 2269{
2270 enum insn_code icode = CODE_FOR_nothing;
2271 rtx op0;
2272 tree fndecl = get_callee_fndecl (exp);
3754d046 2273 machine_mode mode;
a65c4d64 2274 tree arg;
2275
2276 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2277 return NULL_RTX;
2278
2279 arg = CALL_EXPR_ARG (exp, 0);
2280 icode = interclass_mathfn_icode (arg, fndecl);
2281 mode = TYPE_MODE (TREE_TYPE (arg));
2282
a67a90e5 2283 if (icode != CODE_FOR_nothing)
2284 {
8786db1e 2285 struct expand_operand ops[1];
1e0c0b35 2286 rtx_insn *last = get_last_insn ();
4e2a2fb4 2287 tree orig_arg = arg;
a67a90e5 2288
2289 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2290 need to expand the argument again. This way, we will not perform
2291 side-effects more the once. */
abfea505 2292 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
a67a90e5 2293
f97eea22 2294 op0 = expand_expr (arg, NULL_RTX, VOIDmode, EXPAND_NORMAL);
a67a90e5 2295
2296 if (mode != GET_MODE (op0))
2297 op0 = convert_to_mode (mode, op0, 0);
2298
8786db1e 2299 create_output_operand (&ops[0], target, TYPE_MODE (TREE_TYPE (exp)));
2300 if (maybe_legitimize_operands (icode, 0, 1, ops)
2301 && maybe_emit_unop_insn (icode, ops[0].value, op0, UNKNOWN))
2302 return ops[0].value;
2303
4e2a2fb4 2304 delete_insns_since (last);
2305 CALL_EXPR_ARG (exp, 0) = orig_arg;
a67a90e5 2306 }
2307
a65c4d64 2308 return NULL_RTX;
a67a90e5 2309}
2310
c3147c1a 2311/* Expand a call to the builtin sincos math function.
c2f47e15 2312 Return NULL_RTX if a normal call should be emitted rather than expanding the
c3147c1a 2313 function in-line. EXP is the expression that is a call to the builtin
2314 function. */
2315
2316static rtx
2317expand_builtin_sincos (tree exp)
2318{
2319 rtx op0, op1, op2, target1, target2;
3754d046 2320 machine_mode mode;
c3147c1a 2321 tree arg, sinp, cosp;
2322 int result;
389dd41b 2323 location_t loc = EXPR_LOCATION (exp);
be5575b2 2324 tree alias_type, alias_off;
c3147c1a 2325
c2f47e15 2326 if (!validate_arglist (exp, REAL_TYPE,
2327 POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
2328 return NULL_RTX;
c3147c1a 2329
c2f47e15 2330 arg = CALL_EXPR_ARG (exp, 0);
2331 sinp = CALL_EXPR_ARG (exp, 1);
2332 cosp = CALL_EXPR_ARG (exp, 2);
c3147c1a 2333
2334 /* Make a suitable register to place result in. */
2335 mode = TYPE_MODE (TREE_TYPE (arg));
2336
2337 /* Check if sincos insn is available, otherwise emit the call. */
d6bf3b14 2338 if (optab_handler (sincos_optab, mode) == CODE_FOR_nothing)
c3147c1a 2339 return NULL_RTX;
2340
2341 target1 = gen_reg_rtx (mode);
2342 target2 = gen_reg_rtx (mode);
2343
8ec3c5c2 2344 op0 = expand_normal (arg);
be5575b2 2345 alias_type = build_pointer_type_for_mode (TREE_TYPE (arg), ptr_mode, true);
2346 alias_off = build_int_cst (alias_type, 0);
2347 op1 = expand_normal (fold_build2_loc (loc, MEM_REF, TREE_TYPE (arg),
2348 sinp, alias_off));
2349 op2 = expand_normal (fold_build2_loc (loc, MEM_REF, TREE_TYPE (arg),
2350 cosp, alias_off));
c3147c1a 2351
2352 /* Compute into target1 and target2.
2353 Set TARGET to wherever the result comes back. */
2354 result = expand_twoval_unop (sincos_optab, op0, target2, target1, 0);
2355 gcc_assert (result);
2356
2357 /* Move target1 and target2 to the memory locations indicated
2358 by op1 and op2. */
2359 emit_move_insn (op1, target1);
2360 emit_move_insn (op2, target2);
2361
2362 return const0_rtx;
2363}
2364
d735c391 2365/* Expand a call to the internal cexpi builtin to the sincos math function.
2366 EXP is the expression that is a call to the builtin function; if convenient,
f97eea22 2367 the result should be placed in TARGET. */
d735c391 2368
2369static rtx
f97eea22 2370expand_builtin_cexpi (tree exp, rtx target)
d735c391 2371{
2372 tree fndecl = get_callee_fndecl (exp);
d735c391 2373 tree arg, type;
3754d046 2374 machine_mode mode;
d735c391 2375 rtx op0, op1, op2;
389dd41b 2376 location_t loc = EXPR_LOCATION (exp);
d735c391 2377
c2f47e15 2378 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2379 return NULL_RTX;
d735c391 2380
c2f47e15 2381 arg = CALL_EXPR_ARG (exp, 0);
d735c391 2382 type = TREE_TYPE (arg);
2383 mode = TYPE_MODE (TREE_TYPE (arg));
2384
2385 /* Try expanding via a sincos optab, fall back to emitting a libcall
18b8d8ae 2386 to sincos or cexp. We are sure we have sincos or cexp because cexpi
2387 is only generated from sincos, cexp or if we have either of them. */
d6bf3b14 2388 if (optab_handler (sincos_optab, mode) != CODE_FOR_nothing)
d735c391 2389 {
2390 op1 = gen_reg_rtx (mode);
2391 op2 = gen_reg_rtx (mode);
2392
f97eea22 2393 op0 = expand_expr (arg, NULL_RTX, VOIDmode, EXPAND_NORMAL);
d735c391 2394
2395 /* Compute into op1 and op2. */
2396 expand_twoval_unop (sincos_optab, op0, op2, op1, 0);
2397 }
30f690e0 2398 else if (targetm.libc_has_function (function_sincos))
d735c391 2399 {
c2f47e15 2400 tree call, fn = NULL_TREE;
d735c391 2401 tree top1, top2;
2402 rtx op1a, op2a;
2403
2404 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
b9a16870 2405 fn = builtin_decl_explicit (BUILT_IN_SINCOSF);
d735c391 2406 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
b9a16870 2407 fn = builtin_decl_explicit (BUILT_IN_SINCOS);
d735c391 2408 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
b9a16870 2409 fn = builtin_decl_explicit (BUILT_IN_SINCOSL);
c2f47e15 2410 else
2411 gcc_unreachable ();
48e1416a 2412
0ab48139 2413 op1 = assign_temp (TREE_TYPE (arg), 1, 1);
2414 op2 = assign_temp (TREE_TYPE (arg), 1, 1);
99182918 2415 op1a = copy_addr_to_reg (XEXP (op1, 0));
2416 op2a = copy_addr_to_reg (XEXP (op2, 0));
d735c391 2417 top1 = make_tree (build_pointer_type (TREE_TYPE (arg)), op1a);
2418 top2 = make_tree (build_pointer_type (TREE_TYPE (arg)), op2a);
2419
d735c391 2420 /* Make sure not to fold the sincos call again. */
2421 call = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
c2f47e15 2422 expand_normal (build_call_nary (TREE_TYPE (TREE_TYPE (fn)),
2423 call, 3, arg, top1, top2));
d735c391 2424 }
18b8d8ae 2425 else
2426 {
0ecbc158 2427 tree call, fn = NULL_TREE, narg;
18b8d8ae 2428 tree ctype = build_complex_type (type);
2429
0ecbc158 2430 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
b9a16870 2431 fn = builtin_decl_explicit (BUILT_IN_CEXPF);
0ecbc158 2432 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
b9a16870 2433 fn = builtin_decl_explicit (BUILT_IN_CEXP);
0ecbc158 2434 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
b9a16870 2435 fn = builtin_decl_explicit (BUILT_IN_CEXPL);
c2f47e15 2436 else
2437 gcc_unreachable ();
fc0dfa6e 2438
2439 /* If we don't have a decl for cexp create one. This is the
2440 friendliest fallback if the user calls __builtin_cexpi
2441 without full target C99 function support. */
2442 if (fn == NULL_TREE)
2443 {
2444 tree fntype;
2445 const char *name = NULL;
2446
2447 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2448 name = "cexpf";
2449 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2450 name = "cexp";
2451 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2452 name = "cexpl";
2453
2454 fntype = build_function_type_list (ctype, ctype, NULL_TREE);
2455 fn = build_fn_decl (name, fntype);
2456 }
2457
389dd41b 2458 narg = fold_build2_loc (loc, COMPLEX_EXPR, ctype,
18b8d8ae 2459 build_real (type, dconst0), arg);
2460
2461 /* Make sure not to fold the cexp call again. */
2462 call = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
48e1416a 2463 return expand_expr (build_call_nary (ctype, call, 1, narg),
1db6d067 2464 target, VOIDmode, EXPAND_NORMAL);
18b8d8ae 2465 }
d735c391 2466
2467 /* Now build the proper return type. */
2468 return expand_expr (build2 (COMPLEX_EXPR, build_complex_type (type),
2469 make_tree (TREE_TYPE (arg), op2),
2470 make_tree (TREE_TYPE (arg), op1)),
1db6d067 2471 target, VOIDmode, EXPAND_NORMAL);
d735c391 2472}
2473
a65c4d64 2474/* Conveniently construct a function call expression. FNDECL names the
2475 function to be called, N is the number of arguments, and the "..."
2476 parameters are the argument expressions. Unlike build_call_exr
2477 this doesn't fold the call, hence it will always return a CALL_EXPR. */
2478
2479static tree
2480build_call_nofold_loc (location_t loc, tree fndecl, int n, ...)
2481{
2482 va_list ap;
2483 tree fntype = TREE_TYPE (fndecl);
2484 tree fn = build1 (ADDR_EXPR, build_pointer_type (fntype), fndecl);
2485
2486 va_start (ap, n);
2487 fn = build_call_valist (TREE_TYPE (fntype), fn, n, ap);
2488 va_end (ap);
2489 SET_EXPR_LOCATION (fn, loc);
2490 return fn;
2491}
a65c4d64 2492
7d3afc77 2493/* Expand a call to one of the builtin rounding functions gcc defines
2494 as an extension (lfloor and lceil). As these are gcc extensions we
2495 do not need to worry about setting errno to EDOM.
ad52b9b7 2496 If expanding via optab fails, lower expression to (int)(floor(x)).
2497 EXP is the expression that is a call to the builtin function;
ff1b14e4 2498 if convenient, the result should be placed in TARGET. */
ad52b9b7 2499
2500static rtx
ff1b14e4 2501expand_builtin_int_roundingfn (tree exp, rtx target)
ad52b9b7 2502{
9c42dd28 2503 convert_optab builtin_optab;
1e0c0b35 2504 rtx op0, tmp;
2505 rtx_insn *insns;
ad52b9b7 2506 tree fndecl = get_callee_fndecl (exp);
ad52b9b7 2507 enum built_in_function fallback_fn;
2508 tree fallback_fndecl;
3754d046 2509 machine_mode mode;
4de0924f 2510 tree arg;
ad52b9b7 2511
c2f47e15 2512 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
ad52b9b7 2513 gcc_unreachable ();
2514
c2f47e15 2515 arg = CALL_EXPR_ARG (exp, 0);
ad52b9b7 2516
2517 switch (DECL_FUNCTION_CODE (fndecl))
2518 {
80ff6494 2519 CASE_FLT_FN (BUILT_IN_ICEIL):
4f35b1fc 2520 CASE_FLT_FN (BUILT_IN_LCEIL):
2521 CASE_FLT_FN (BUILT_IN_LLCEIL):
ac148751 2522 builtin_optab = lceil_optab;
2523 fallback_fn = BUILT_IN_CEIL;
2524 break;
2525
80ff6494 2526 CASE_FLT_FN (BUILT_IN_IFLOOR):
4f35b1fc 2527 CASE_FLT_FN (BUILT_IN_LFLOOR):
2528 CASE_FLT_FN (BUILT_IN_LLFLOOR):
ad52b9b7 2529 builtin_optab = lfloor_optab;
2530 fallback_fn = BUILT_IN_FLOOR;
2531 break;
2532
2533 default:
2534 gcc_unreachable ();
2535 }
2536
2537 /* Make a suitable register to place result in. */
2538 mode = TYPE_MODE (TREE_TYPE (exp));
2539
9c42dd28 2540 target = gen_reg_rtx (mode);
ad52b9b7 2541
9c42dd28 2542 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2543 need to expand the argument again. This way, we will not perform
2544 side-effects more the once. */
abfea505 2545 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
ad52b9b7 2546
ff1b14e4 2547 op0 = expand_expr (arg, NULL, VOIDmode, EXPAND_NORMAL);
ad52b9b7 2548
9c42dd28 2549 start_sequence ();
ad52b9b7 2550
9c42dd28 2551 /* Compute into TARGET. */
2552 if (expand_sfix_optab (target, op0, builtin_optab))
2553 {
2554 /* Output the entire sequence. */
2555 insns = get_insns ();
ad52b9b7 2556 end_sequence ();
9c42dd28 2557 emit_insn (insns);
2558 return target;
ad52b9b7 2559 }
2560
9c42dd28 2561 /* If we were unable to expand via the builtin, stop the sequence
2562 (without outputting the insns). */
2563 end_sequence ();
2564
ad52b9b7 2565 /* Fall back to floating point rounding optab. */
2566 fallback_fndecl = mathfn_built_in (TREE_TYPE (arg), fallback_fn);
fc0dfa6e 2567
2568 /* For non-C99 targets we may end up without a fallback fndecl here
2569 if the user called __builtin_lfloor directly. In this case emit
2570 a call to the floor/ceil variants nevertheless. This should result
2571 in the best user experience for not full C99 targets. */
2572 if (fallback_fndecl == NULL_TREE)
2573 {
2574 tree fntype;
2575 const char *name = NULL;
2576
2577 switch (DECL_FUNCTION_CODE (fndecl))
2578 {
80ff6494 2579 case BUILT_IN_ICEIL:
fc0dfa6e 2580 case BUILT_IN_LCEIL:
2581 case BUILT_IN_LLCEIL:
2582 name = "ceil";
2583 break;
80ff6494 2584 case BUILT_IN_ICEILF:
fc0dfa6e 2585 case BUILT_IN_LCEILF:
2586 case BUILT_IN_LLCEILF:
2587 name = "ceilf";
2588 break;
80ff6494 2589 case BUILT_IN_ICEILL:
fc0dfa6e 2590 case BUILT_IN_LCEILL:
2591 case BUILT_IN_LLCEILL:
2592 name = "ceill";
2593 break;
80ff6494 2594 case BUILT_IN_IFLOOR:
fc0dfa6e 2595 case BUILT_IN_LFLOOR:
2596 case BUILT_IN_LLFLOOR:
2597 name = "floor";
2598 break;
80ff6494 2599 case BUILT_IN_IFLOORF:
fc0dfa6e 2600 case BUILT_IN_LFLOORF:
2601 case BUILT_IN_LLFLOORF:
2602 name = "floorf";
2603 break;
80ff6494 2604 case BUILT_IN_IFLOORL:
fc0dfa6e 2605 case BUILT_IN_LFLOORL:
2606 case BUILT_IN_LLFLOORL:
2607 name = "floorl";
2608 break;
2609 default:
2610 gcc_unreachable ();
2611 }
2612
2613 fntype = build_function_type_list (TREE_TYPE (arg),
2614 TREE_TYPE (arg), NULL_TREE);
2615 fallback_fndecl = build_fn_decl (name, fntype);
2616 }
2617
0568e9c1 2618 exp = build_call_nofold_loc (EXPR_LOCATION (exp), fallback_fndecl, 1, arg);
ad52b9b7 2619
d4c690af 2620 tmp = expand_normal (exp);
933eb13a 2621 tmp = maybe_emit_group_store (tmp, TREE_TYPE (exp));
ad52b9b7 2622
2623 /* Truncate the result of floating point optab to integer
2624 via expand_fix (). */
2625 target = gen_reg_rtx (mode);
2626 expand_fix (target, tmp, 0);
2627
2628 return target;
2629}
2630
7d3afc77 2631/* Expand a call to one of the builtin math functions doing integer
2632 conversion (lrint).
2633 Return 0 if a normal call should be emitted rather than expanding the
2634 function in-line. EXP is the expression that is a call to the builtin
ff1b14e4 2635 function; if convenient, the result should be placed in TARGET. */
7d3afc77 2636
2637static rtx
ff1b14e4 2638expand_builtin_int_roundingfn_2 (tree exp, rtx target)
7d3afc77 2639{
5f51ee59 2640 convert_optab builtin_optab;
1e0c0b35 2641 rtx op0;
2642 rtx_insn *insns;
7d3afc77 2643 tree fndecl = get_callee_fndecl (exp);
4de0924f 2644 tree arg;
3754d046 2645 machine_mode mode;
e951f9a4 2646 enum built_in_function fallback_fn = BUILT_IN_NONE;
7d3afc77 2647
c2f47e15 2648 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2649 gcc_unreachable ();
48e1416a 2650
c2f47e15 2651 arg = CALL_EXPR_ARG (exp, 0);
7d3afc77 2652
2653 switch (DECL_FUNCTION_CODE (fndecl))
2654 {
80ff6494 2655 CASE_FLT_FN (BUILT_IN_IRINT):
e951f9a4 2656 fallback_fn = BUILT_IN_LRINT;
3c77f69c 2657 gcc_fallthrough ();
7d3afc77 2658 CASE_FLT_FN (BUILT_IN_LRINT):
2659 CASE_FLT_FN (BUILT_IN_LLRINT):
e951f9a4 2660 builtin_optab = lrint_optab;
2661 break;
80ff6494 2662
2663 CASE_FLT_FN (BUILT_IN_IROUND):
e951f9a4 2664 fallback_fn = BUILT_IN_LROUND;
3c77f69c 2665 gcc_fallthrough ();
ef2f1a10 2666 CASE_FLT_FN (BUILT_IN_LROUND):
2667 CASE_FLT_FN (BUILT_IN_LLROUND):
e951f9a4 2668 builtin_optab = lround_optab;
2669 break;
80ff6494 2670
7d3afc77 2671 default:
2672 gcc_unreachable ();
2673 }
2674
e951f9a4 2675 /* There's no easy way to detect the case we need to set EDOM. */
2676 if (flag_errno_math && fallback_fn == BUILT_IN_NONE)
2677 return NULL_RTX;
2678
7d3afc77 2679 /* Make a suitable register to place result in. */
2680 mode = TYPE_MODE (TREE_TYPE (exp));
2681
e951f9a4 2682 /* There's no easy way to detect the case we need to set EDOM. */
2683 if (!flag_errno_math)
2684 {
de2e453e 2685 rtx result = gen_reg_rtx (mode);
7d3afc77 2686
e951f9a4 2687 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2688 need to expand the argument again. This way, we will not perform
2689 side-effects more the once. */
2690 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
7d3afc77 2691
e951f9a4 2692 op0 = expand_expr (arg, NULL, VOIDmode, EXPAND_NORMAL);
7d3afc77 2693
e951f9a4 2694 start_sequence ();
7d3afc77 2695
de2e453e 2696 if (expand_sfix_optab (result, op0, builtin_optab))
e951f9a4 2697 {
2698 /* Output the entire sequence. */
2699 insns = get_insns ();
2700 end_sequence ();
2701 emit_insn (insns);
de2e453e 2702 return result;
e951f9a4 2703 }
2704
2705 /* If we were unable to expand via the builtin, stop the sequence
2706 (without outputting the insns) and call to the library function
2707 with the stabilized argument list. */
7d3afc77 2708 end_sequence ();
2709 }
2710
e951f9a4 2711 if (fallback_fn != BUILT_IN_NONE)
2712 {
2713 /* Fall back to rounding to long int. Use implicit_p 0 - for non-C99
2714 targets, (int) round (x) should never be transformed into
2715 BUILT_IN_IROUND and if __builtin_iround is called directly, emit
2716 a call to lround in the hope that the target provides at least some
2717 C99 functions. This should result in the best user experience for
2718 not full C99 targets. */
e3240774 2719 tree fallback_fndecl = mathfn_built_in_1
2720 (TREE_TYPE (arg), as_combined_fn (fallback_fn), 0);
e951f9a4 2721
2722 exp = build_call_nofold_loc (EXPR_LOCATION (exp),
2723 fallback_fndecl, 1, arg);
2724
2725 target = expand_call (exp, NULL_RTX, target == const0_rtx);
933eb13a 2726 target = maybe_emit_group_store (target, TREE_TYPE (exp));
e951f9a4 2727 return convert_to_mode (mode, target, 0);
2728 }
5f51ee59 2729
de2e453e 2730 return expand_call (exp, target, target == const0_rtx);
7d3afc77 2731}
2732
c2f47e15 2733/* Expand a call to the powi built-in mathematical function. Return NULL_RTX if
757c219d 2734 a normal call should be emitted rather than expanding the function
2735 in-line. EXP is the expression that is a call to the builtin
2736 function; if convenient, the result should be placed in TARGET. */
2737
2738static rtx
f97eea22 2739expand_builtin_powi (tree exp, rtx target)
757c219d 2740{
757c219d 2741 tree arg0, arg1;
2742 rtx op0, op1;
3754d046 2743 machine_mode mode;
2744 machine_mode mode2;
757c219d 2745
c2f47e15 2746 if (! validate_arglist (exp, REAL_TYPE, INTEGER_TYPE, VOID_TYPE))
2747 return NULL_RTX;
757c219d 2748
c2f47e15 2749 arg0 = CALL_EXPR_ARG (exp, 0);
2750 arg1 = CALL_EXPR_ARG (exp, 1);
757c219d 2751 mode = TYPE_MODE (TREE_TYPE (exp));
2752
757c219d 2753 /* Emit a libcall to libgcc. */
2754
c2f47e15 2755 /* Mode of the 2nd argument must match that of an int. */
d0405f40 2756 mode2 = mode_for_size (INT_TYPE_SIZE, MODE_INT, 0);
2757
757c219d 2758 if (target == NULL_RTX)
2759 target = gen_reg_rtx (mode);
2760
f97eea22 2761 op0 = expand_expr (arg0, NULL_RTX, mode, EXPAND_NORMAL);
757c219d 2762 if (GET_MODE (op0) != mode)
2763 op0 = convert_to_mode (mode, op0, 0);
1db6d067 2764 op1 = expand_expr (arg1, NULL_RTX, mode2, EXPAND_NORMAL);
d0405f40 2765 if (GET_MODE (op1) != mode2)
2766 op1 = convert_to_mode (mode2, op1, 0);
757c219d 2767
f36b9f69 2768 target = emit_library_call_value (optab_libfunc (powi_optab, mode),
2dd6f9ed 2769 target, LCT_CONST, mode, 2,
d0405f40 2770 op0, mode, op1, mode2);
757c219d 2771
2772 return target;
2773}
2774
48e1416a 2775/* Expand expression EXP which is a call to the strlen builtin. Return
c2f47e15 2776 NULL_RTX if we failed the caller should emit a normal call, otherwise
aed0bd19 2777 try to get the result in TARGET, if convenient. */
f7c44134 2778
53800dbe 2779static rtx
c2f47e15 2780expand_builtin_strlen (tree exp, rtx target,
3754d046 2781 machine_mode target_mode)
53800dbe 2782{
c2f47e15 2783 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
2784 return NULL_RTX;
53800dbe 2785 else
2786 {
8786db1e 2787 struct expand_operand ops[4];
911c0150 2788 rtx pat;
c2f47e15 2789 tree len;
2790 tree src = CALL_EXPR_ARG (exp, 0);
1e0c0b35 2791 rtx src_reg;
2792 rtx_insn *before_strlen;
3754d046 2793 machine_mode insn_mode = target_mode;
ef2c4a29 2794 enum insn_code icode = CODE_FOR_nothing;
153c3b50 2795 unsigned int align;
6248e345 2796
2797 /* If the length can be computed at compile-time, return it. */
681fab1e 2798 len = c_strlen (src, 0);
6248e345 2799 if (len)
80cd7a5e 2800 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
6248e345 2801
681fab1e 2802 /* If the length can be computed at compile-time and is constant
2803 integer, but there are side-effects in src, evaluate
2804 src for side-effects, then return len.
2805 E.g. x = strlen (i++ ? "xfoo" + 1 : "bar");
2806 can be optimized into: i++; x = 3; */
2807 len = c_strlen (src, 1);
2808 if (len && TREE_CODE (len) == INTEGER_CST)
2809 {
2810 expand_expr (src, const0_rtx, VOIDmode, EXPAND_NORMAL);
2811 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
2812 }
2813
957d0361 2814 align = get_pointer_alignment (src) / BITS_PER_UNIT;
53800dbe 2815
53800dbe 2816 /* If SRC is not a pointer type, don't do this operation inline. */
2817 if (align == 0)
c2f47e15 2818 return NULL_RTX;
53800dbe 2819
911c0150 2820 /* Bail out if we can't compute strlen in the right mode. */
53800dbe 2821 while (insn_mode != VOIDmode)
2822 {
d6bf3b14 2823 icode = optab_handler (strlen_optab, insn_mode);
53800dbe 2824 if (icode != CODE_FOR_nothing)
c28ae87f 2825 break;
53800dbe 2826
2827 insn_mode = GET_MODE_WIDER_MODE (insn_mode);
2828 }
2829 if (insn_mode == VOIDmode)
c2f47e15 2830 return NULL_RTX;
53800dbe 2831
911c0150 2832 /* Make a place to hold the source address. We will not expand
2833 the actual source until we are sure that the expansion will
2834 not fail -- there are trees that cannot be expanded twice. */
2835 src_reg = gen_reg_rtx (Pmode);
53800dbe 2836
911c0150 2837 /* Mark the beginning of the strlen sequence so we can emit the
2838 source operand later. */
f0ce3b1f 2839 before_strlen = get_last_insn ();
53800dbe 2840
8786db1e 2841 create_output_operand (&ops[0], target, insn_mode);
2842 create_fixed_operand (&ops[1], gen_rtx_MEM (BLKmode, src_reg));
2843 create_integer_operand (&ops[2], 0);
2844 create_integer_operand (&ops[3], align);
2845 if (!maybe_expand_insn (icode, 4, ops))
c2f47e15 2846 return NULL_RTX;
911c0150 2847
2848 /* Now that we are assured of success, expand the source. */
2849 start_sequence ();
499eee58 2850 pat = expand_expr (src, src_reg, Pmode, EXPAND_NORMAL);
911c0150 2851 if (pat != src_reg)
499eee58 2852 {
2853#ifdef POINTERS_EXTEND_UNSIGNED
2854 if (GET_MODE (pat) != Pmode)
2855 pat = convert_to_mode (Pmode, pat,
2856 POINTERS_EXTEND_UNSIGNED);
2857#endif
2858 emit_move_insn (src_reg, pat);
2859 }
31d3e01c 2860 pat = get_insns ();
911c0150 2861 end_sequence ();
bceb0d1f 2862
2863 if (before_strlen)
2864 emit_insn_after (pat, before_strlen);
2865 else
2866 emit_insn_before (pat, get_insns ());
53800dbe 2867
2868 /* Return the value in the proper mode for this function. */
8786db1e 2869 if (GET_MODE (ops[0].value) == target_mode)
2870 target = ops[0].value;
53800dbe 2871 else if (target != 0)
8786db1e 2872 convert_move (target, ops[0].value, 0);
53800dbe 2873 else
8786db1e 2874 target = convert_to_mode (target_mode, ops[0].value, 0);
911c0150 2875
2876 return target;
53800dbe 2877 }
2878}
2879
6840589f 2880/* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
2881 bytes from constant string DATA + OFFSET and return it as target
2882 constant. */
2883
2884static rtx
aecda0d6 2885builtin_memcpy_read_str (void *data, HOST_WIDE_INT offset,
3754d046 2886 machine_mode mode)
6840589f 2887{
2888 const char *str = (const char *) data;
2889
64db345d 2890 gcc_assert (offset >= 0
2891 && ((unsigned HOST_WIDE_INT) offset + GET_MODE_SIZE (mode)
2892 <= strlen (str) + 1));
6840589f 2893
2894 return c_readstr (str + offset, mode);
2895}
2896
36d63243 2897/* LEN specify length of the block of memcpy/memset operation.
9db0f34d 2898 Figure out its range and put it into MIN_SIZE/MAX_SIZE.
2899 In some cases we can make very likely guess on max size, then we
2900 set it into PROBABLE_MAX_SIZE. */
36d63243 2901
2902static void
2903determine_block_size (tree len, rtx len_rtx,
2904 unsigned HOST_WIDE_INT *min_size,
9db0f34d 2905 unsigned HOST_WIDE_INT *max_size,
2906 unsigned HOST_WIDE_INT *probable_max_size)
36d63243 2907{
2908 if (CONST_INT_P (len_rtx))
2909 {
4e140a5c 2910 *min_size = *max_size = *probable_max_size = UINTVAL (len_rtx);
36d63243 2911 return;
2912 }
2913 else
2914 {
9c1be15e 2915 wide_int min, max;
9db0f34d 2916 enum value_range_type range_type = VR_UNDEFINED;
2917
2918 /* Determine bounds from the type. */
2919 if (tree_fits_uhwi_p (TYPE_MIN_VALUE (TREE_TYPE (len))))
2920 *min_size = tree_to_uhwi (TYPE_MIN_VALUE (TREE_TYPE (len)));
2921 else
2922 *min_size = 0;
2923 if (tree_fits_uhwi_p (TYPE_MAX_VALUE (TREE_TYPE (len))))
4e140a5c 2924 *probable_max_size = *max_size
2925 = tree_to_uhwi (TYPE_MAX_VALUE (TREE_TYPE (len)));
9db0f34d 2926 else
2927 *probable_max_size = *max_size = GET_MODE_MASK (GET_MODE (len_rtx));
2928
2929 if (TREE_CODE (len) == SSA_NAME)
2930 range_type = get_range_info (len, &min, &max);
2931 if (range_type == VR_RANGE)
36d63243 2932 {
fe5ad926 2933 if (wi::fits_uhwi_p (min) && *min_size < min.to_uhwi ())
36d63243 2934 *min_size = min.to_uhwi ();
fe5ad926 2935 if (wi::fits_uhwi_p (max) && *max_size > max.to_uhwi ())
9db0f34d 2936 *probable_max_size = *max_size = max.to_uhwi ();
36d63243 2937 }
9db0f34d 2938 else if (range_type == VR_ANTI_RANGE)
36d63243 2939 {
4a474a5a 2940 /* Anti range 0...N lets us to determine minimal size to N+1. */
fe5ad926 2941 if (min == 0)
9db0f34d 2942 {
9c1be15e 2943 if (wi::fits_uhwi_p (max) && max.to_uhwi () + 1 != 0)
2944 *min_size = max.to_uhwi () + 1;
9db0f34d 2945 }
2946 /* Code like
2947
2948 int n;
2949 if (n < 100)
4a474a5a 2950 memcpy (a, b, n)
9db0f34d 2951
2952 Produce anti range allowing negative values of N. We still
2953 can use the information and make a guess that N is not negative.
2954 */
fe5ad926 2955 else if (!wi::leu_p (max, 1 << 30) && wi::fits_uhwi_p (min))
2956 *probable_max_size = min.to_uhwi () - 1;
36d63243 2957 }
2958 }
2959 gcc_checking_assert (*max_size <=
2960 (unsigned HOST_WIDE_INT)
2961 GET_MODE_MASK (GET_MODE (len_rtx)));
2962}
2963
f21337ef 2964/* Helper function to do the actual work for expand_builtin_memcpy. */
2965
2966static rtx
2967expand_builtin_memcpy_args (tree dest, tree src, tree len, rtx target, tree exp)
2968{
2969 const char *src_str;
2970 unsigned int src_align = get_pointer_alignment (src);
2971 unsigned int dest_align = get_pointer_alignment (dest);
2972 rtx dest_mem, src_mem, dest_addr, len_rtx;
2973 HOST_WIDE_INT expected_size = -1;
2974 unsigned int expected_align = 0;
2975 unsigned HOST_WIDE_INT min_size;
2976 unsigned HOST_WIDE_INT max_size;
2977 unsigned HOST_WIDE_INT probable_max_size;
2978
2979 /* If DEST is not a pointer type, call the normal function. */
2980 if (dest_align == 0)
2981 return NULL_RTX;
2982
2983 /* If either SRC is not a pointer type, don't do this
2984 operation in-line. */
2985 if (src_align == 0)
2986 return NULL_RTX;
2987
2988 if (currently_expanding_gimple_stmt)
2989 stringop_block_profile (currently_expanding_gimple_stmt,
2990 &expected_align, &expected_size);
2991
2992 if (expected_align < dest_align)
2993 expected_align = dest_align;
2994 dest_mem = get_memory_rtx (dest, len);
2995 set_mem_align (dest_mem, dest_align);
2996 len_rtx = expand_normal (len);
2997 determine_block_size (len, len_rtx, &min_size, &max_size,
2998 &probable_max_size);
2999 src_str = c_getstr (src);
3000
3001 /* If SRC is a string constant and block move would be done
3002 by pieces, we can avoid loading the string from memory
3003 and only stored the computed constants. */
3004 if (src_str
3005 && CONST_INT_P (len_rtx)
3006 && (unsigned HOST_WIDE_INT) INTVAL (len_rtx) <= strlen (src_str) + 1
3007 && can_store_by_pieces (INTVAL (len_rtx), builtin_memcpy_read_str,
3008 CONST_CAST (char *, src_str),
3009 dest_align, false))
3010 {
3011 dest_mem = store_by_pieces (dest_mem, INTVAL (len_rtx),
3012 builtin_memcpy_read_str,
3013 CONST_CAST (char *, src_str),
3014 dest_align, false, 0);
3015 dest_mem = force_operand (XEXP (dest_mem, 0), target);
3016 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3017 return dest_mem;
3018 }
3019
3020 src_mem = get_memory_rtx (src, len);
3021 set_mem_align (src_mem, src_align);
3022
3023 /* Copy word part most expediently. */
3024 dest_addr = emit_block_move_hints (dest_mem, src_mem, len_rtx,
3025 CALL_EXPR_TAILCALL (exp)
3026 ? BLOCK_OP_TAILCALL : BLOCK_OP_NORMAL,
3027 expected_align, expected_size,
3028 min_size, max_size, probable_max_size);
3029
3030 if (dest_addr == 0)
3031 {
3032 dest_addr = force_operand (XEXP (dest_mem, 0), target);
3033 dest_addr = convert_memory_address (ptr_mode, dest_addr);
3034 }
3035
3036 return dest_addr;
3037}
3038
5aef8938 3039/* Try to verify that the sizes and lengths of the arguments to a string
3040 manipulation function given by EXP are within valid bounds and that
3041 the operation does not lead to buffer overflow. Arguments other than
3042 EXP may be null. When non-null, the arguments have the following
3043 meaning:
3044 SIZE is the user-supplied size argument to the function (such as in
3045 memcpy(d, s, SIZE) or strncpy(d, s, SIZE). It specifies the exact
3046 number of bytes to write.
3047 MAXLEN is the user-supplied bound on the length of the source sequence
3048 (such as in strncat(d, s, N). It specifies the upper limit on the number
3049 of bytes to write.
ae33f654 3050 SRC is the source string (such as in strcpy(d, s)) when the expression
5aef8938 3051 EXP is a string function call (as opposed to a memory call like memcpy).
8d6c6ef5 3052 As an exception, SRC can also be an integer denoting the precomputed
3053 size of the source string or object (for functions like memcpy).
5aef8938 3054 OBJSIZE is the size of the destination object specified by the last
3055 argument to the _chk builtins, typically resulting from the expansion
3056 of __builtin_object_size (such as in __builtin___strcpy_chk(d, s,
3057 OBJSIZE).
3058
3059 When SIZE is null LEN is checked to verify that it doesn't exceed
3060 SIZE_MAX.
3061
3062 If the call is successfully verified as safe from buffer overflow
3063 the function returns true, otherwise false.. */
3064
3065static bool
8d6c6ef5 3066check_sizes (int opt, tree exp, tree size, tree maxlen, tree src, tree objsize)
5aef8938 3067{
3068 /* The size of the largest object is half the address space, or
3069 SSIZE_MAX. (This is way too permissive.) */
3070 tree maxobjsize = TYPE_MAX_VALUE (ssizetype);
3071
3072 tree slen = NULL_TREE;
3073
8d6c6ef5 3074 tree range[2] = { NULL_TREE, NULL_TREE };
3075
5aef8938 3076 /* Set to true when the exact number of bytes written by a string
3077 function like strcpy is not known and the only thing that is
3078 known is that it must be at least one (for the terminating nul). */
3079 bool at_least_one = false;
8d6c6ef5 3080 if (src)
5aef8938 3081 {
8d6c6ef5 3082 /* SRC is normally a pointer to string but as a special case
5aef8938 3083 it can be an integer denoting the length of a string. */
8d6c6ef5 3084 if (POINTER_TYPE_P (TREE_TYPE (src)))
5aef8938 3085 {
3086 /* Try to determine the range of lengths the source string
8d6c6ef5 3087 refers to. If it can be determined and is less than
3088 the upper bound given by MAXLEN add one to it for
5aef8938 3089 the terminating nul. Otherwise, set it to one for
8d6c6ef5 3090 the same reason, or to MAXLEN as appropriate. */
3091 get_range_strlen (src, range);
3092 if (range[0] && (!maxlen || TREE_CODE (maxlen) == INTEGER_CST))
3093 {
3094 if (maxlen && tree_int_cst_le (maxlen, range[0]))
3095 range[0] = range[1] = maxlen;
3096 else
3097 range[0] = fold_build2 (PLUS_EXPR, size_type_node,
3098 range[0], size_one_node);
3099
3100 if (maxlen && tree_int_cst_le (maxlen, range[1]))
3101 range[1] = maxlen;
3102 else if (!integer_all_onesp (range[1]))
3103 range[1] = fold_build2 (PLUS_EXPR, size_type_node,
3104 range[1], size_one_node);
3105
3106 slen = range[0];
3107 }
5aef8938 3108 else
3109 {
3110 at_least_one = true;
3111 slen = size_one_node;
3112 }
3113 }
3114 else
8d6c6ef5 3115 slen = src;
5aef8938 3116 }
3117
3118 if (!size && !maxlen)
3119 {
3120 /* When the only available piece of data is the object size
3121 there is nothing to do. */
3122 if (!slen)
3123 return true;
3124
3125 /* Otherwise, when the length of the source sequence is known
3126 (as with with strlen), set SIZE to it. */
8d6c6ef5 3127 if (!range[0])
3128 size = slen;
5aef8938 3129 }
3130
3131 if (!objsize)
3132 objsize = maxobjsize;
3133
3134 /* The SIZE is exact if it's non-null, constant, and in range of
3135 unsigned HOST_WIDE_INT. */
3136 bool exactsize = size && tree_fits_uhwi_p (size);
3137
5aef8938 3138 if (size)
3139 get_size_range (size, range);
3140
3141 /* First check the number of bytes to be written against the maximum
3142 object size. */
3143 if (range[0] && tree_int_cst_lt (maxobjsize, range[0]))
3144 {
3145 location_t loc = tree_nonartificial_location (exp);
4d317237 3146 loc = expansion_point_location_if_in_system_header (loc);
5aef8938 3147
3148 if (range[0] == range[1])
3149 warning_at (loc, opt,
9098b938 3150 "%K%qD specified size %E "
8d6c6ef5 3151 "exceeds maximum object size %E",
3152 exp, get_callee_fndecl (exp), range[0], maxobjsize);
5aef8938 3153 else
3154 warning_at (loc, opt,
9098b938 3155 "%K%qD specified size between %E and %E "
8d6c6ef5 3156 "exceeds maximum object size %E",
5aef8938 3157 exp, get_callee_fndecl (exp),
8d6c6ef5 3158 range[0], range[1], maxobjsize);
5aef8938 3159 return false;
3160 }
3161
3162 /* Next check the number of bytes to be written against the destination
3163 object size. */
3164 if (range[0] || !exactsize || integer_all_onesp (size))
3165 {
3166 if (range[0]
3167 && ((tree_fits_uhwi_p (objsize)
3168 && tree_int_cst_lt (objsize, range[0]))
3169 || (tree_fits_uhwi_p (size)
3170 && tree_int_cst_lt (size, range[0]))))
3171 {
5aef8938 3172 location_t loc = tree_nonartificial_location (exp);
4d317237 3173 loc = expansion_point_location_if_in_system_header (loc);
5aef8938 3174
8d6c6ef5 3175 if (size == slen && at_least_one)
3176 {
3177 /* This is a call to strcpy with a destination of 0 size
3178 and a source of unknown length. The call will write
3179 at least one byte past the end of the destination. */
3180 warning_at (loc, opt,
9098b938 3181 "%K%qD writing %E or more bytes into a region "
8d6c6ef5 3182 "of size %E overflows the destination",
3183 exp, get_callee_fndecl (exp), range[0], objsize);
3184 }
3185 else if (tree_int_cst_equal (range[0], range[1]))
5aef8938 3186 warning_at (loc, opt,
8d6c6ef5 3187 (integer_onep (range[0])
9098b938 3188 ? G_("%K%qD writing %E byte into a region "
8d6c6ef5 3189 "of size %E overflows the destination")
3190 : G_("%K%qD writing %E bytes into a region "
3191 "of size %E overflows the destination")),
3192 exp, get_callee_fndecl (exp), range[0], objsize);
3193 else if (tree_int_cst_sign_bit (range[1]))
3194 {
3195 /* Avoid printing the upper bound if it's invalid. */
3196 warning_at (loc, opt,
9098b938 3197 "%K%qD writing %E or more bytes into a region "
8d6c6ef5 3198 "of size %E overflows the destination",
3199 exp, get_callee_fndecl (exp), range[0], objsize);
3200 }
5aef8938 3201 else
3202 warning_at (loc, opt,
9098b938 3203 "%K%qD writing between %E and %E bytes into "
8d6c6ef5 3204 "a region of size %E overflows the destination",
3205 exp, get_callee_fndecl (exp), range[0], range[1],
3206 objsize);
5aef8938 3207
3208 /* Return error when an overflow has been detected. */
3209 return false;
3210 }
3211 }
3212
3213 /* Check the maximum length of the source sequence against the size
3214 of the destination object if known, or against the maximum size
3215 of an object. */
3216 if (maxlen)
3217 {
3218 get_size_range (maxlen, range);
3219
3220 if (range[0] && objsize && tree_fits_uhwi_p (objsize))
3221 {
3222 location_t loc = tree_nonartificial_location (exp);
4d317237 3223 loc = expansion_point_location_if_in_system_header (loc);
5aef8938 3224
3225 if (tree_int_cst_lt (maxobjsize, range[0]))
3226 {
3227 /* Warn about crazy big sizes first since that's more
3228 likely to be meaningful than saying that the bound
3229 is greater than the object size if both are big. */
3230 if (range[0] == range[1])
3231 warning_at (loc, opt,
9098b938 3232 "%K%qD specified bound %E "
8d6c6ef5 3233 "exceeds maximum object size %E",
5aef8938 3234 exp, get_callee_fndecl (exp),
8d6c6ef5 3235 range[0], maxobjsize);
5aef8938 3236 else
3237 warning_at (loc, opt,
9098b938 3238 "%K%qD specified bound between %E and %E "
8d6c6ef5 3239 "exceeds maximum object size %E",
5aef8938 3240 exp, get_callee_fndecl (exp),
8d6c6ef5 3241 range[0], range[1], maxobjsize);
5aef8938 3242
3243 return false;
3244 }
3245
3246 if (objsize != maxobjsize && tree_int_cst_lt (objsize, range[0]))
3247 {
8d6c6ef5 3248 if (tree_int_cst_equal (range[0], range[1]))
5aef8938 3249 warning_at (loc, opt,
9098b938 3250 "%K%qD specified bound %E "
8d6c6ef5 3251 "exceeds destination size %E",
5aef8938 3252 exp, get_callee_fndecl (exp),
8d6c6ef5 3253 range[0], objsize);
5aef8938 3254 else
3255 warning_at (loc, opt,
9098b938 3256 "%K%qD specified bound between %E and %E "
8d6c6ef5 3257 "exceeds destination size %E",
5aef8938 3258 exp, get_callee_fndecl (exp),
8d6c6ef5 3259 range[0], range[1], objsize);
5aef8938 3260 return false;
3261 }
3262 }
3263 }
3264
8d6c6ef5 3265 if (slen
3266 && slen == src
3267 && size && range[0]
3268 && tree_int_cst_lt (slen, range[0]))
3269 {
3270 location_t loc = tree_nonartificial_location (exp);
3271
3272 if (tree_int_cst_equal (range[0], range[1]))
3273 warning_at (loc, opt,
3274 (tree_int_cst_equal (range[0], integer_one_node)
9098b938 3275 ? G_("%K%qD reading %E byte from a region of size %E")
8d6c6ef5 3276 : G_("%K%qD reading %E bytes from a region of size %E")),
3277 exp, get_callee_fndecl (exp), range[0], slen);
3278 else if (tree_int_cst_sign_bit (range[1]))
3279 {
3280 /* Avoid printing the upper bound if it's invalid. */
3281 warning_at (loc, opt,
9098b938 3282 "%K%qD reading %E or more bytes from a region "
8d6c6ef5 3283 "of size %E",
3284 exp, get_callee_fndecl (exp), range[0], slen);
3285 }
3286 else
3287 warning_at (loc, opt,
9098b938 3288 "%K%qD reading between %E and %E bytes from a region "
8d6c6ef5 3289 "of size %E",
3290 exp, get_callee_fndecl (exp), range[0], range[1], slen);
3291 return false;
3292 }
3293
5aef8938 3294 return true;
3295}
3296
3297/* Helper to compute the size of the object referenced by the DEST
3298 expression which must of of pointer type, using Object Size type
3299 OSTYPE (only the least significant 2 bits are used). Return
3300 the size of the object if successful or NULL when the size cannot
3301 be determined. */
3302
3303static inline tree
8d6c6ef5 3304compute_objsize (tree dest, int ostype)
5aef8938 3305{
3306 unsigned HOST_WIDE_INT size;
3307 if (compute_builtin_object_size (dest, ostype & 3, &size))
3308 return build_int_cst (sizetype, size);
3309
3310 return NULL_TREE;
3311}
3312
3313/* Helper to determine and check the sizes of the source and the destination
8d6c6ef5 3314 of calls to __builtin_{bzero,memcpy,mempcpy,memset} calls. EXP is the
3315 call expression, DEST is the destination argument, SRC is the source
3316 argument or null, and LEN is the number of bytes. Use Object Size type-0
3317 regardless of the OPT_Wstringop_overflow_ setting. Return true on success
5aef8938 3318 (no overflow or invalid sizes), false otherwise. */
3319
3320static bool
8d6c6ef5 3321check_memop_sizes (tree exp, tree dest, tree src, tree size)
5aef8938 3322{
3323 if (!warn_stringop_overflow)
3324 return true;
3325
3326 /* For functions like memset and memcpy that operate on raw memory
8d6c6ef5 3327 try to determine the size of the largest source and destination
3328 object using type-0 Object Size regardless of the object size
3329 type specified by the option. */
3330 tree srcsize = src ? compute_objsize (src, 0) : NULL_TREE;
3331 tree dstsize = compute_objsize (dest, 0);
5aef8938 3332
3333 return check_sizes (OPT_Wstringop_overflow_, exp,
8d6c6ef5 3334 size, /*maxlen=*/NULL_TREE, srcsize, dstsize);
3335}
3336
3337/* Validate memchr arguments without performing any expansion.
3338 Return NULL_RTX. */
3339
3340static rtx
3341expand_builtin_memchr (tree exp, rtx)
3342{
3343 if (!validate_arglist (exp,
3344 POINTER_TYPE, INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
3345 return NULL_RTX;
3346
3347 tree arg1 = CALL_EXPR_ARG (exp, 0);
3348 tree len = CALL_EXPR_ARG (exp, 2);
3349
3350 /* Diagnose calls where the specified length exceeds the size
3351 of the object. */
3352 if (warn_stringop_overflow)
3353 {
3354 tree size = compute_objsize (arg1, 0);
3355 check_sizes (OPT_Wstringop_overflow_,
3356 exp, len, /*maxlen=*/NULL_TREE,
3357 size, /*objsize=*/NULL_TREE);
3358 }
3359
3360 return NULL_RTX;
5aef8938 3361}
3362
c2f47e15 3363/* Expand a call EXP to the memcpy builtin.
3364 Return NULL_RTX if we failed, the caller should emit a normal call,
3b824fa6 3365 otherwise try to get the result in TARGET, if convenient (and in
9fe0e1b8 3366 mode MODE if that's convenient). */
c2f47e15 3367
53800dbe 3368static rtx
a65c4d64 3369expand_builtin_memcpy (tree exp, rtx target)
53800dbe 3370{
c2f47e15 3371 if (!validate_arglist (exp,
3372 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3373 return NULL_RTX;
5aef8938 3374
3375 tree dest = CALL_EXPR_ARG (exp, 0);
3376 tree src = CALL_EXPR_ARG (exp, 1);
3377 tree len = CALL_EXPR_ARG (exp, 2);
3378
8d6c6ef5 3379 check_memop_sizes (exp, dest, src, len);
5aef8938 3380
3381 return expand_builtin_memcpy_args (dest, src, len, target, exp);
f21337ef 3382}
6840589f 3383
4d317237 3384/* Check a call EXP to the memmove built-in for validity.
3385 Return NULL_RTX on both success and failure. */
3386
3387static rtx
3388expand_builtin_memmove (tree exp, rtx)
3389{
3390 if (!validate_arglist (exp,
3391 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3392 return NULL_RTX;
3393
3394 tree dest = CALL_EXPR_ARG (exp, 0);
8d6c6ef5 3395 tree src = CALL_EXPR_ARG (exp, 1);
4d317237 3396 tree len = CALL_EXPR_ARG (exp, 2);
3397
8d6c6ef5 3398 check_memop_sizes (exp, dest, src, len);
4d317237 3399
3400 return NULL_RTX;
3401}
3402
f21337ef 3403/* Expand an instrumented call EXP to the memcpy builtin.
3404 Return NULL_RTX if we failed, the caller should emit a normal call,
3405 otherwise try to get the result in TARGET, if convenient (and in
3406 mode MODE if that's convenient). */
53800dbe 3407
f21337ef 3408static rtx
3409expand_builtin_memcpy_with_bounds (tree exp, rtx target)
3410{
3411 if (!validate_arglist (exp,
3412 POINTER_TYPE, POINTER_BOUNDS_TYPE,
3413 POINTER_TYPE, POINTER_BOUNDS_TYPE,
3414 INTEGER_TYPE, VOID_TYPE))
3415 return NULL_RTX;
3416 else
3417 {
3418 tree dest = CALL_EXPR_ARG (exp, 0);
3419 tree src = CALL_EXPR_ARG (exp, 2);
3420 tree len = CALL_EXPR_ARG (exp, 4);
3421 rtx res = expand_builtin_memcpy_args (dest, src, len, target, exp);
53800dbe 3422
f21337ef 3423 /* Return src bounds with the result. */
3424 if (res)
e5716f7e 3425 {
17d388d8 3426 rtx bnd = force_reg (targetm.chkp_bound_mode (),
f21337ef 3427 expand_normal (CALL_EXPR_ARG (exp, 1)));
3428 res = chkp_join_splitted_slot (res, bnd);
e5716f7e 3429 }
f21337ef 3430 return res;
53800dbe 3431 }
3432}
3433
c2f47e15 3434/* Expand a call EXP to the mempcpy builtin.
3435 Return NULL_RTX if we failed; the caller should emit a normal call,
647661c6 3436 otherwise try to get the result in TARGET, if convenient (and in
9fe0e1b8 3437 mode MODE if that's convenient). If ENDP is 0 return the
3438 destination pointer, if ENDP is 1 return the end pointer ala
3439 mempcpy, and if ENDP is 2 return the end pointer minus one ala
3440 stpcpy. */
647661c6 3441
3442static rtx
3754d046 3443expand_builtin_mempcpy (tree exp, rtx target, machine_mode mode)
647661c6 3444{
c2f47e15 3445 if (!validate_arglist (exp,
3446 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3447 return NULL_RTX;
5aef8938 3448
3449 tree dest = CALL_EXPR_ARG (exp, 0);
3450 tree src = CALL_EXPR_ARG (exp, 1);
3451 tree len = CALL_EXPR_ARG (exp, 2);
3452
3453 /* Avoid expanding mempcpy into memcpy when the call is determined
3454 to overflow the buffer. This also prevents the same overflow
3455 from being diagnosed again when expanding memcpy. */
8d6c6ef5 3456 if (!check_memop_sizes (exp, dest, src, len))
5aef8938 3457 return NULL_RTX;
3458
3459 return expand_builtin_mempcpy_args (dest, src, len,
3460 target, mode, /*endp=*/ 1,
3461 exp);
f21337ef 3462}
3463
3464/* Expand an instrumented call EXP to the mempcpy builtin.
3465 Return NULL_RTX if we failed, the caller should emit a normal call,
3466 otherwise try to get the result in TARGET, if convenient (and in
3467 mode MODE if that's convenient). */
3468
3469static rtx
3470expand_builtin_mempcpy_with_bounds (tree exp, rtx target, machine_mode mode)
3471{
3472 if (!validate_arglist (exp,
3473 POINTER_TYPE, POINTER_BOUNDS_TYPE,
3474 POINTER_TYPE, POINTER_BOUNDS_TYPE,
3475 INTEGER_TYPE, VOID_TYPE))
3476 return NULL_RTX;
3477 else
3478 {
3479 tree dest = CALL_EXPR_ARG (exp, 0);
3480 tree src = CALL_EXPR_ARG (exp, 2);
3481 tree len = CALL_EXPR_ARG (exp, 4);
3482 rtx res = expand_builtin_mempcpy_args (dest, src, len, target,
3483 mode, 1, exp);
3484
3485 /* Return src bounds with the result. */
3486 if (res)
3487 {
17d388d8 3488 rtx bnd = force_reg (targetm.chkp_bound_mode (),
f21337ef 3489 expand_normal (CALL_EXPR_ARG (exp, 1)));
3490 res = chkp_join_splitted_slot (res, bnd);
3491 }
3492 return res;
c2f47e15 3493 }
3494}
3495
3496/* Helper function to do the actual work for expand_builtin_mempcpy. The
3497 arguments to the builtin_mempcpy call DEST, SRC, and LEN are broken out
3498 so that this can also be called without constructing an actual CALL_EXPR.
a65c4d64 3499 The other arguments and return value are the same as for
3500 expand_builtin_mempcpy. */
c2f47e15 3501
3502static rtx
a65c4d64 3503expand_builtin_mempcpy_args (tree dest, tree src, tree len,
f21337ef 3504 rtx target, machine_mode mode, int endp,
3505 tree orig_exp)
c2f47e15 3506{
f21337ef 3507 tree fndecl = get_callee_fndecl (orig_exp);
3508
c2f47e15 3509 /* If return value is ignored, transform mempcpy into memcpy. */
f21337ef 3510 if (target == const0_rtx
3511 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_MEMPCPY_NOBND_NOCHK_CHKP
3512 && builtin_decl_implicit_p (BUILT_IN_CHKP_MEMCPY_NOBND_NOCHK_CHKP))
3513 {
3514 tree fn = builtin_decl_implicit (BUILT_IN_CHKP_MEMCPY_NOBND_NOCHK_CHKP);
3515 tree result = build_call_nofold_loc (UNKNOWN_LOCATION, fn, 3,
3516 dest, src, len);
3517 return expand_expr (result, target, mode, EXPAND_NORMAL);
3518 }
3519 else if (target == const0_rtx
3520 && builtin_decl_implicit_p (BUILT_IN_MEMCPY))
9fe0e1b8 3521 {
b9a16870 3522 tree fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
0568e9c1 3523 tree result = build_call_nofold_loc (UNKNOWN_LOCATION, fn, 3,
3524 dest, src, len);
c8b17b2e 3525 return expand_expr (result, target, mode, EXPAND_NORMAL);
9fe0e1b8 3526 }
647661c6 3527 else
3528 {
9fe0e1b8 3529 const char *src_str;
957d0361 3530 unsigned int src_align = get_pointer_alignment (src);
3531 unsigned int dest_align = get_pointer_alignment (dest);
9fe0e1b8 3532 rtx dest_mem, src_mem, len_rtx;
a0c938f0 3533
7da1412b 3534 /* If either SRC or DEST is not a pointer type, don't do this
a0c938f0 3535 operation in-line. */
7da1412b 3536 if (dest_align == 0 || src_align == 0)
c2f47e15 3537 return NULL_RTX;
9fe0e1b8 3538
6217c238 3539 /* If LEN is not constant, call the normal function. */
e913b5cd 3540 if (! tree_fits_uhwi_p (len))
c2f47e15 3541 return NULL_RTX;
0862b7e9 3542
8ec3c5c2 3543 len_rtx = expand_normal (len);
9fe0e1b8 3544 src_str = c_getstr (src);
647661c6 3545
9fe0e1b8 3546 /* If SRC is a string constant and block move would be done
3547 by pieces, we can avoid loading the string from memory
3548 and only stored the computed constants. */
3549 if (src_str
971ba038 3550 && CONST_INT_P (len_rtx)
9fe0e1b8 3551 && (unsigned HOST_WIDE_INT) INTVAL (len_rtx) <= strlen (src_str) + 1
3552 && can_store_by_pieces (INTVAL (len_rtx), builtin_memcpy_read_str,
364c0c59 3553 CONST_CAST (char *, src_str),
3554 dest_align, false))
9fe0e1b8 3555 {
d8ae1baa 3556 dest_mem = get_memory_rtx (dest, len);
9fe0e1b8 3557 set_mem_align (dest_mem, dest_align);
3558 dest_mem = store_by_pieces (dest_mem, INTVAL (len_rtx),
3559 builtin_memcpy_read_str,
364c0c59 3560 CONST_CAST (char *, src_str),
3561 dest_align, false, endp);
9fe0e1b8 3562 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
85d654dd 3563 dest_mem = convert_memory_address (ptr_mode, dest_mem);
9fe0e1b8 3564 return dest_mem;
647661c6 3565 }
3566
971ba038 3567 if (CONST_INT_P (len_rtx)
9fe0e1b8 3568 && can_move_by_pieces (INTVAL (len_rtx),
3569 MIN (dest_align, src_align)))
3570 {
d8ae1baa 3571 dest_mem = get_memory_rtx (dest, len);
9fe0e1b8 3572 set_mem_align (dest_mem, dest_align);
d8ae1baa 3573 src_mem = get_memory_rtx (src, len);
9fe0e1b8 3574 set_mem_align (src_mem, src_align);
3575 dest_mem = move_by_pieces (dest_mem, src_mem, INTVAL (len_rtx),
3576 MIN (dest_align, src_align), endp);
3577 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
85d654dd 3578 dest_mem = convert_memory_address (ptr_mode, dest_mem);
9fe0e1b8 3579 return dest_mem;
3580 }
3581
c2f47e15 3582 return NULL_RTX;
647661c6 3583 }
3584}
3585
c2f47e15 3586/* Expand into a movstr instruction, if one is available. Return NULL_RTX if
727c62dd 3587 we failed, the caller should emit a normal call, otherwise try to
3588 get the result in TARGET, if convenient. If ENDP is 0 return the
3589 destination pointer, if ENDP is 1 return the end pointer ala
3590 mempcpy, and if ENDP is 2 return the end pointer minus one ala
3591 stpcpy. */
3592
3593static rtx
3594expand_movstr (tree dest, tree src, rtx target, int endp)
3595{
8786db1e 3596 struct expand_operand ops[3];
727c62dd 3597 rtx dest_mem;
3598 rtx src_mem;
727c62dd 3599
8d74dc42 3600 if (!targetm.have_movstr ())
c2f47e15 3601 return NULL_RTX;
727c62dd 3602
d8ae1baa 3603 dest_mem = get_memory_rtx (dest, NULL);
3604 src_mem = get_memory_rtx (src, NULL);
727c62dd 3605 if (!endp)
3606 {
3607 target = force_reg (Pmode, XEXP (dest_mem, 0));
3608 dest_mem = replace_equiv_address (dest_mem, target);
727c62dd 3609 }
3610
8786db1e 3611 create_output_operand (&ops[0], endp ? target : NULL_RTX, Pmode);
3612 create_fixed_operand (&ops[1], dest_mem);
3613 create_fixed_operand (&ops[2], src_mem);
8d74dc42 3614 if (!maybe_expand_insn (targetm.code_for_movstr, 3, ops))
1e1d5623 3615 return NULL_RTX;
727c62dd 3616
8786db1e 3617 if (endp && target != const0_rtx)
c5aba89c 3618 {
8786db1e 3619 target = ops[0].value;
3620 /* movstr is supposed to set end to the address of the NUL
3621 terminator. If the caller requested a mempcpy-like return value,
3622 adjust it. */
3623 if (endp == 1)
3624 {
29c05e22 3625 rtx tem = plus_constant (GET_MODE (target),
3626 gen_lowpart (GET_MODE (target), target), 1);
8786db1e 3627 emit_move_insn (target, force_operand (tem, NULL_RTX));
3628 }
c5aba89c 3629 }
727c62dd 3630 return target;
3631}
3632
5aef8938 3633/* Do some very basic size validation of a call to the strcpy builtin
3634 given by EXP. Return NULL_RTX to have the built-in expand to a call
3635 to the library function. */
3636
3637static rtx
3638expand_builtin_strcat (tree exp, rtx)
3639{
3640 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE)
3641 || !warn_stringop_overflow)
3642 return NULL_RTX;
3643
3644 tree dest = CALL_EXPR_ARG (exp, 0);
3645 tree src = CALL_EXPR_ARG (exp, 1);
3646
3647 /* There is no way here to determine the length of the string in
3648 the destination to which the SRC string is being appended so
3649 just diagnose cases when the souce string is longer than
3650 the destination object. */
3651
8d6c6ef5 3652 tree destsize = compute_objsize (dest, warn_stringop_overflow - 1);
5aef8938 3653
3654 check_sizes (OPT_Wstringop_overflow_,
3655 exp, /*size=*/NULL_TREE, /*maxlen=*/NULL_TREE, src, destsize);
3656
3657 return NULL_RTX;
3658}
3659
48e1416a 3660/* Expand expression EXP, which is a call to the strcpy builtin. Return
3661 NULL_RTX if we failed the caller should emit a normal call, otherwise
c2f47e15 3662 try to get the result in TARGET, if convenient (and in mode MODE if that's
6f428e8b 3663 convenient). */
902de8ed 3664
53800dbe 3665static rtx
a65c4d64 3666expand_builtin_strcpy (tree exp, rtx target)
53800dbe 3667{
5aef8938 3668 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3669 return NULL_RTX;
3670
3671 tree dest = CALL_EXPR_ARG (exp, 0);
3672 tree src = CALL_EXPR_ARG (exp, 1);
3673
3674 if (warn_stringop_overflow)
3675 {
8d6c6ef5 3676 tree destsize = compute_objsize (dest, warn_stringop_overflow - 1);
5aef8938 3677 check_sizes (OPT_Wstringop_overflow_,
3678 exp, /*size=*/NULL_TREE, /*maxlen=*/NULL_TREE, src, destsize);
3679 }
3680
3681 return expand_builtin_strcpy_args (dest, src, target);
c2f47e15 3682}
3683
3684/* Helper function to do the actual work for expand_builtin_strcpy. The
3685 arguments to the builtin_strcpy call DEST and SRC are broken out
3686 so that this can also be called without constructing an actual CALL_EXPR.
3687 The other arguments and return value are the same as for
3688 expand_builtin_strcpy. */
3689
3690static rtx
a65c4d64 3691expand_builtin_strcpy_args (tree dest, tree src, rtx target)
c2f47e15 3692{
c2f47e15 3693 return expand_movstr (dest, src, target, /*endp=*/0);
53800dbe 3694}
3695
c2f47e15 3696/* Expand a call EXP to the stpcpy builtin.
3697 Return NULL_RTX if we failed the caller should emit a normal call,
3b824fa6 3698 otherwise try to get the result in TARGET, if convenient (and in
3699 mode MODE if that's convenient). */
3700
3701static rtx
3754d046 3702expand_builtin_stpcpy (tree exp, rtx target, machine_mode mode)
3b824fa6 3703{
c2f47e15 3704 tree dst, src;
389dd41b 3705 location_t loc = EXPR_LOCATION (exp);
c2f47e15 3706
3707 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3708 return NULL_RTX;
3709
3710 dst = CALL_EXPR_ARG (exp, 0);
3711 src = CALL_EXPR_ARG (exp, 1);
3712
4d317237 3713 if (warn_stringop_overflow)
3714 {
8d6c6ef5 3715 tree destsize = compute_objsize (dst, warn_stringop_overflow - 1);
4d317237 3716 check_sizes (OPT_Wstringop_overflow_,
3717 exp, /*size=*/NULL_TREE, /*maxlen=*/NULL_TREE, src, destsize);
3718 }
3719
727c62dd 3720 /* If return value is ignored, transform stpcpy into strcpy. */
b9a16870 3721 if (target == const0_rtx && builtin_decl_implicit (BUILT_IN_STRCPY))
978836e5 3722 {
b9a16870 3723 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
0568e9c1 3724 tree result = build_call_nofold_loc (loc, fn, 2, dst, src);
c8b17b2e 3725 return expand_expr (result, target, mode, EXPAND_NORMAL);
978836e5 3726 }
3b824fa6 3727 else
3728 {
c2f47e15 3729 tree len, lenp1;
727c62dd 3730 rtx ret;
647661c6 3731
9fe0e1b8 3732 /* Ensure we get an actual string whose length can be evaluated at
a0c938f0 3733 compile-time, not an expression containing a string. This is
3734 because the latter will potentially produce pessimized code
3735 when used to produce the return value. */
681fab1e 3736 if (! c_getstr (src) || ! (len = c_strlen (src, 0)))
c2f47e15 3737 return expand_movstr (dst, src, target, /*endp=*/2);
3b824fa6 3738
389dd41b 3739 lenp1 = size_binop_loc (loc, PLUS_EXPR, len, ssize_int (1));
a65c4d64 3740 ret = expand_builtin_mempcpy_args (dst, src, lenp1,
f21337ef 3741 target, mode, /*endp=*/2,
3742 exp);
727c62dd 3743
3744 if (ret)
3745 return ret;
3746
3747 if (TREE_CODE (len) == INTEGER_CST)
3748 {
8ec3c5c2 3749 rtx len_rtx = expand_normal (len);
727c62dd 3750
971ba038 3751 if (CONST_INT_P (len_rtx))
727c62dd 3752 {
a65c4d64 3753 ret = expand_builtin_strcpy_args (dst, src, target);
727c62dd 3754
3755 if (ret)
3756 {
3757 if (! target)
7ac87324 3758 {
3759 if (mode != VOIDmode)
3760 target = gen_reg_rtx (mode);
3761 else
3762 target = gen_reg_rtx (GET_MODE (ret));
3763 }
727c62dd 3764 if (GET_MODE (target) != GET_MODE (ret))
3765 ret = gen_lowpart (GET_MODE (target), ret);
3766
29c05e22 3767 ret = plus_constant (GET_MODE (ret), ret, INTVAL (len_rtx));
c5aba89c 3768 ret = emit_move_insn (target, force_operand (ret, NULL_RTX));
64db345d 3769 gcc_assert (ret);
727c62dd 3770
3771 return target;
3772 }
3773 }
3774 }
3775
c2f47e15 3776 return expand_movstr (dst, src, target, /*endp=*/2);
3b824fa6 3777 }
3778}
3779
4d317237 3780/* Check a call EXP to the stpncpy built-in for validity.
3781 Return NULL_RTX on both success and failure. */
3782
3783static rtx
3784expand_builtin_stpncpy (tree exp, rtx)
3785{
3786 if (!validate_arglist (exp,
3787 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE)
3788 || !warn_stringop_overflow)
3789 return NULL_RTX;
3790
aca1a787 3791 /* The source and destination of the call. */
4d317237 3792 tree dest = CALL_EXPR_ARG (exp, 0);
3793 tree src = CALL_EXPR_ARG (exp, 1);
3794
aca1a787 3795 /* The exact number of bytes to write (not the maximum). */
4d317237 3796 tree len = CALL_EXPR_ARG (exp, 2);
4d317237 3797
aca1a787 3798 /* The size of the destination object. */
8d6c6ef5 3799 tree destsize = compute_objsize (dest, warn_stringop_overflow - 1);
4d317237 3800
4d317237 3801 check_sizes (OPT_Wstringop_overflow_,
aca1a787 3802 exp, len, /*maxlen=*/NULL_TREE, src, destsize);
4d317237 3803
3804 return NULL_RTX;
3805}
3806
6840589f 3807/* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3808 bytes from constant string DATA + OFFSET and return it as target
3809 constant. */
3810
09879952 3811rtx
aecda0d6 3812builtin_strncpy_read_str (void *data, HOST_WIDE_INT offset,
3754d046 3813 machine_mode mode)
6840589f 3814{
3815 const char *str = (const char *) data;
3816
3817 if ((unsigned HOST_WIDE_INT) offset > strlen (str))
3818 return const0_rtx;
3819
3820 return c_readstr (str + offset, mode);
3821}
3822
5aef8938 3823/* Helper to check the sizes of sequences and the destination of calls
3824 to __builtin_strncat and __builtin___strncat_chk. Returns true on
3825 success (no overflow or invalid sizes), false otherwise. */
3826
3827static bool
3828check_strncat_sizes (tree exp, tree objsize)
3829{
3830 tree dest = CALL_EXPR_ARG (exp, 0);
3831 tree src = CALL_EXPR_ARG (exp, 1);
3832 tree maxlen = CALL_EXPR_ARG (exp, 2);
3833
3834 /* Try to determine the range of lengths that the source expression
3835 refers to. */
3836 tree lenrange[2];
3837 get_range_strlen (src, lenrange);
3838
3839 /* Try to verify that the destination is big enough for the shortest
3840 string. */
3841
3842 if (!objsize && warn_stringop_overflow)
3843 {
3844 /* If it hasn't been provided by __strncat_chk, try to determine
3845 the size of the destination object into which the source is
3846 being copied. */
8d6c6ef5 3847 objsize = compute_objsize (dest, warn_stringop_overflow - 1);
5aef8938 3848 }
3849
3850 /* Add one for the terminating nul. */
3851 tree srclen = (lenrange[0]
3852 ? fold_build2 (PLUS_EXPR, size_type_node, lenrange[0],
3853 size_one_node)
3854 : NULL_TREE);
3855
3856 /* Strncat copies at most MAXLEN bytes and always appends the terminating
3857 nul so the specified upper bound should never be equal to (or greater
3858 than) the size of the destination. */
3859 if (tree_fits_uhwi_p (maxlen) && tree_fits_uhwi_p (objsize)
3860 && tree_int_cst_equal (objsize, maxlen))
3861 {
4d317237 3862 location_t loc = tree_nonartificial_location (exp);
3863 loc = expansion_point_location_if_in_system_header (loc);
3864
3865 warning_at (loc, OPT_Wstringop_overflow_,
9098b938 3866 "%K%qD specified bound %E equals destination size",
8d6c6ef5 3867 exp, get_callee_fndecl (exp), maxlen);
5aef8938 3868
3869 return false;
3870 }
3871
3872 if (!srclen
3873 || (maxlen && tree_fits_uhwi_p (maxlen)
3874 && tree_fits_uhwi_p (srclen)
3875 && tree_int_cst_lt (maxlen, srclen)))
3876 srclen = maxlen;
3877
3878 /* The number of bytes to write is LEN but check_sizes will also
3879 check SRCLEN if LEN's value isn't known. */
3880 return check_sizes (OPT_Wstringop_overflow_,
3881 exp, /*size=*/NULL_TREE, maxlen, srclen, objsize);
3882}
3883
3884/* Similar to expand_builtin_strcat, do some very basic size validation
3885 of a call to the strcpy builtin given by EXP. Return NULL_RTX to have
3886 the built-in expand to a call to the library function. */
3887
3888static rtx
3889expand_builtin_strncat (tree exp, rtx)
3890{
3891 if (!validate_arglist (exp,
3892 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE)
3893 || !warn_stringop_overflow)
3894 return NULL_RTX;
3895
3896 tree dest = CALL_EXPR_ARG (exp, 0);
3897 tree src = CALL_EXPR_ARG (exp, 1);
3898 /* The upper bound on the number of bytes to write. */
3899 tree maxlen = CALL_EXPR_ARG (exp, 2);
3900 /* The length of the source sequence. */
3901 tree slen = c_strlen (src, 1);
3902
3903 /* Try to determine the range of lengths that the source expression
3904 refers to. */
3905 tree lenrange[2];
3906 if (slen)
3907 lenrange[0] = lenrange[1] = slen;
3908 else
3909 get_range_strlen (src, lenrange);
3910
3911 /* Try to verify that the destination is big enough for the shortest
3912 string. First try to determine the size of the destination object
3913 into which the source is being copied. */
8d6c6ef5 3914 tree destsize = compute_objsize (dest, warn_stringop_overflow - 1);
5aef8938 3915
3916 /* Add one for the terminating nul. */
3917 tree srclen = (lenrange[0]
3918 ? fold_build2 (PLUS_EXPR, size_type_node, lenrange[0],
3919 size_one_node)
3920 : NULL_TREE);
3921
3922 /* Strncat copies at most MAXLEN bytes and always appends the terminating
3923 nul so the specified upper bound should never be equal to (or greater
3924 than) the size of the destination. */
3925 if (tree_fits_uhwi_p (maxlen) && tree_fits_uhwi_p (destsize)
3926 && tree_int_cst_equal (destsize, maxlen))
3927 {
4d317237 3928 location_t loc = tree_nonartificial_location (exp);
3929 loc = expansion_point_location_if_in_system_header (loc);
3930
3931 warning_at (loc, OPT_Wstringop_overflow_,
9098b938 3932 "%K%qD specified bound %E equals destination size",
8d6c6ef5 3933 exp, get_callee_fndecl (exp), maxlen);
5aef8938 3934
3935 return NULL_RTX;
3936 }
3937
3938 if (!srclen
3939 || (maxlen && tree_fits_uhwi_p (maxlen)
3940 && tree_fits_uhwi_p (srclen)
3941 && tree_int_cst_lt (maxlen, srclen)))
3942 srclen = maxlen;
3943
3944 /* The number of bytes to write is LEN but check_sizes will also
3945 check SRCLEN if LEN's value isn't known. */
3946 check_sizes (OPT_Wstringop_overflow_,
3947 exp, /*size=*/NULL_TREE, maxlen, srclen, destsize);
3948
3949 return NULL_RTX;
3950}
3951
48e1416a 3952/* Expand expression EXP, which is a call to the strncpy builtin. Return
c2f47e15 3953 NULL_RTX if we failed the caller should emit a normal call. */
ed09096d 3954
3955static rtx
a65c4d64 3956expand_builtin_strncpy (tree exp, rtx target)
ed09096d 3957{
389dd41b 3958 location_t loc = EXPR_LOCATION (exp);
c2f47e15 3959
3960 if (validate_arglist (exp,
3961 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
ed09096d 3962 {
c2f47e15 3963 tree dest = CALL_EXPR_ARG (exp, 0);
3964 tree src = CALL_EXPR_ARG (exp, 1);
5aef8938 3965 /* The number of bytes to write (not the maximum). */
c2f47e15 3966 tree len = CALL_EXPR_ARG (exp, 2);
5aef8938 3967 /* The length of the source sequence. */
c2f47e15 3968 tree slen = c_strlen (src, 1);
6840589f 3969
5aef8938 3970 if (warn_stringop_overflow)
3971 {
8d6c6ef5 3972 tree destsize = compute_objsize (dest,
3973 warn_stringop_overflow - 1);
5aef8938 3974
3975 /* The number of bytes to write is LEN but check_sizes will also
3976 check SLEN if LEN's value isn't known. */
3977 check_sizes (OPT_Wstringop_overflow_,
8d6c6ef5 3978 exp, len, /*maxlen=*/NULL_TREE, src, destsize);
5aef8938 3979 }
3980
8ff6a5cd 3981 /* We must be passed a constant len and src parameter. */
e913b5cd 3982 if (!tree_fits_uhwi_p (len) || !slen || !tree_fits_uhwi_p (slen))
c2f47e15 3983 return NULL_RTX;
ed09096d 3984
389dd41b 3985 slen = size_binop_loc (loc, PLUS_EXPR, slen, ssize_int (1));
ed09096d 3986
3987 /* We're required to pad with trailing zeros if the requested
a0c938f0 3988 len is greater than strlen(s2)+1. In that case try to
6840589f 3989 use store_by_pieces, if it fails, punt. */
ed09096d 3990 if (tree_int_cst_lt (slen, len))
6840589f 3991 {
957d0361 3992 unsigned int dest_align = get_pointer_alignment (dest);
c2f47e15 3993 const char *p = c_getstr (src);
6840589f 3994 rtx dest_mem;
3995
e913b5cd 3996 if (!p || dest_align == 0 || !tree_fits_uhwi_p (len)
3997 || !can_store_by_pieces (tree_to_uhwi (len),
6840589f 3998 builtin_strncpy_read_str,
364c0c59 3999 CONST_CAST (char *, p),
4000 dest_align, false))
c2f47e15 4001 return NULL_RTX;
6840589f 4002
d8ae1baa 4003 dest_mem = get_memory_rtx (dest, len);
e913b5cd 4004 store_by_pieces (dest_mem, tree_to_uhwi (len),
6840589f 4005 builtin_strncpy_read_str,
364c0c59 4006 CONST_CAST (char *, p), dest_align, false, 0);
a65c4d64 4007 dest_mem = force_operand (XEXP (dest_mem, 0), target);
85d654dd 4008 dest_mem = convert_memory_address (ptr_mode, dest_mem);
e5716f7e 4009 return dest_mem;
6840589f 4010 }
ed09096d 4011 }
c2f47e15 4012 return NULL_RTX;
ed09096d 4013}
4014
ecc318ff 4015/* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
4016 bytes from constant string DATA + OFFSET and return it as target
4017 constant. */
4018
f656b751 4019rtx
aecda0d6 4020builtin_memset_read_str (void *data, HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
3754d046 4021 machine_mode mode)
ecc318ff 4022{
4023 const char *c = (const char *) data;
364c0c59 4024 char *p = XALLOCAVEC (char, GET_MODE_SIZE (mode));
ecc318ff 4025
4026 memset (p, *c, GET_MODE_SIZE (mode));
4027
4028 return c_readstr (p, mode);
4029}
4030
a7ec6974 4031/* Callback routine for store_by_pieces. Return the RTL of a register
4032 containing GET_MODE_SIZE (MODE) consecutive copies of the unsigned
4033 char value given in the RTL register data. For example, if mode is
4034 4 bytes wide, return the RTL for 0x01010101*data. */
4035
4036static rtx
aecda0d6 4037builtin_memset_gen_str (void *data, HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
3754d046 4038 machine_mode mode)
a7ec6974 4039{
4040 rtx target, coeff;
4041 size_t size;
4042 char *p;
4043
4044 size = GET_MODE_SIZE (mode);
f0ce3b1f 4045 if (size == 1)
4046 return (rtx) data;
a7ec6974 4047
364c0c59 4048 p = XALLOCAVEC (char, size);
a7ec6974 4049 memset (p, 1, size);
4050 coeff = c_readstr (p, mode);
4051
f0ce3b1f 4052 target = convert_to_mode (mode, (rtx) data, 1);
a7ec6974 4053 target = expand_mult (mode, target, coeff, NULL_RTX, 1);
4054 return force_reg (mode, target);
4055}
4056
48e1416a 4057/* Expand expression EXP, which is a call to the memset builtin. Return
4058 NULL_RTX if we failed the caller should emit a normal call, otherwise
c2f47e15 4059 try to get the result in TARGET, if convenient (and in mode MODE if that's
6f428e8b 4060 convenient). */
902de8ed 4061
53800dbe 4062static rtx
3754d046 4063expand_builtin_memset (tree exp, rtx target, machine_mode mode)
53800dbe 4064{
c2f47e15 4065 if (!validate_arglist (exp,
4066 POINTER_TYPE, INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
4067 return NULL_RTX;
5aef8938 4068
4069 tree dest = CALL_EXPR_ARG (exp, 0);
4070 tree val = CALL_EXPR_ARG (exp, 1);
4071 tree len = CALL_EXPR_ARG (exp, 2);
4072
8d6c6ef5 4073 check_memop_sizes (exp, dest, NULL_TREE, len);
5aef8938 4074
4075 return expand_builtin_memset_args (dest, val, len, target, mode, exp);
c2f47e15 4076}
53800dbe 4077
f21337ef 4078/* Expand expression EXP, which is an instrumented call to the memset builtin.
4079 Return NULL_RTX if we failed the caller should emit a normal call, otherwise
4080 try to get the result in TARGET, if convenient (and in mode MODE if that's
4081 convenient). */
4082
4083static rtx
4084expand_builtin_memset_with_bounds (tree exp, rtx target, machine_mode mode)
4085{
4086 if (!validate_arglist (exp,
4087 POINTER_TYPE, POINTER_BOUNDS_TYPE,
4088 INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
4089 return NULL_RTX;
4090 else
4091 {
4092 tree dest = CALL_EXPR_ARG (exp, 0);
4093 tree val = CALL_EXPR_ARG (exp, 2);
4094 tree len = CALL_EXPR_ARG (exp, 3);
4095 rtx res = expand_builtin_memset_args (dest, val, len, target, mode, exp);
4096
4097 /* Return src bounds with the result. */
4098 if (res)
4099 {
17d388d8 4100 rtx bnd = force_reg (targetm.chkp_bound_mode (),
f21337ef 4101 expand_normal (CALL_EXPR_ARG (exp, 1)));
4102 res = chkp_join_splitted_slot (res, bnd);
4103 }
4104 return res;
4105 }
4106}
4107
c2f47e15 4108/* Helper function to do the actual work for expand_builtin_memset. The
4109 arguments to the builtin_memset call DEST, VAL, and LEN are broken out
4110 so that this can also be called without constructing an actual CALL_EXPR.
4111 The other arguments and return value are the same as for
4112 expand_builtin_memset. */
6b961939 4113
c2f47e15 4114static rtx
4115expand_builtin_memset_args (tree dest, tree val, tree len,
3754d046 4116 rtx target, machine_mode mode, tree orig_exp)
c2f47e15 4117{
4118 tree fndecl, fn;
4119 enum built_in_function fcode;
3754d046 4120 machine_mode val_mode;
c2f47e15 4121 char c;
4122 unsigned int dest_align;
4123 rtx dest_mem, dest_addr, len_rtx;
4124 HOST_WIDE_INT expected_size = -1;
4125 unsigned int expected_align = 0;
36d63243 4126 unsigned HOST_WIDE_INT min_size;
4127 unsigned HOST_WIDE_INT max_size;
9db0f34d 4128 unsigned HOST_WIDE_INT probable_max_size;
53800dbe 4129
957d0361 4130 dest_align = get_pointer_alignment (dest);
162719b3 4131
c2f47e15 4132 /* If DEST is not a pointer type, don't do this operation in-line. */
4133 if (dest_align == 0)
4134 return NULL_RTX;
6f428e8b 4135
8cee8dc0 4136 if (currently_expanding_gimple_stmt)
4137 stringop_block_profile (currently_expanding_gimple_stmt,
4138 &expected_align, &expected_size);
75a70cf9 4139
c2f47e15 4140 if (expected_align < dest_align)
4141 expected_align = dest_align;
6b961939 4142
c2f47e15 4143 /* If the LEN parameter is zero, return DEST. */
4144 if (integer_zerop (len))
4145 {
4146 /* Evaluate and ignore VAL in case it has side-effects. */
4147 expand_expr (val, const0_rtx, VOIDmode, EXPAND_NORMAL);
4148 return expand_expr (dest, target, mode, EXPAND_NORMAL);
4149 }
7a3e5564 4150
c2f47e15 4151 /* Stabilize the arguments in case we fail. */
4152 dest = builtin_save_expr (dest);
4153 val = builtin_save_expr (val);
4154 len = builtin_save_expr (len);
a7ec6974 4155
c2f47e15 4156 len_rtx = expand_normal (len);
9db0f34d 4157 determine_block_size (len, len_rtx, &min_size, &max_size,
4158 &probable_max_size);
c2f47e15 4159 dest_mem = get_memory_rtx (dest, len);
03a5dda9 4160 val_mode = TYPE_MODE (unsigned_char_type_node);
a7ec6974 4161
c2f47e15 4162 if (TREE_CODE (val) != INTEGER_CST)
4163 {
4164 rtx val_rtx;
a7ec6974 4165
c2f47e15 4166 val_rtx = expand_normal (val);
03a5dda9 4167 val_rtx = convert_to_mode (val_mode, val_rtx, 0);
53800dbe 4168
c2f47e15 4169 /* Assume that we can memset by pieces if we can store
4170 * the coefficients by pieces (in the required modes).
4171 * We can't pass builtin_memset_gen_str as that emits RTL. */
4172 c = 1;
e913b5cd 4173 if (tree_fits_uhwi_p (len)
4174 && can_store_by_pieces (tree_to_uhwi (len),
4b297e2e 4175 builtin_memset_read_str, &c, dest_align,
4176 true))
c2f47e15 4177 {
03a5dda9 4178 val_rtx = force_reg (val_mode, val_rtx);
e913b5cd 4179 store_by_pieces (dest_mem, tree_to_uhwi (len),
4b297e2e 4180 builtin_memset_gen_str, val_rtx, dest_align,
4181 true, 0);
c2f47e15 4182 }
4183 else if (!set_storage_via_setmem (dest_mem, len_rtx, val_rtx,
4184 dest_align, expected_align,
9db0f34d 4185 expected_size, min_size, max_size,
4186 probable_max_size))
6b961939 4187 goto do_libcall;
48e1416a 4188
c2f47e15 4189 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
4190 dest_mem = convert_memory_address (ptr_mode, dest_mem);
4191 return dest_mem;
4192 }
53800dbe 4193
c2f47e15 4194 if (target_char_cast (val, &c))
4195 goto do_libcall;
ecc318ff 4196
c2f47e15 4197 if (c)
4198 {
e913b5cd 4199 if (tree_fits_uhwi_p (len)
4200 && can_store_by_pieces (tree_to_uhwi (len),
4b297e2e 4201 builtin_memset_read_str, &c, dest_align,
4202 true))
e913b5cd 4203 store_by_pieces (dest_mem, tree_to_uhwi (len),
4b297e2e 4204 builtin_memset_read_str, &c, dest_align, true, 0);
03a5dda9 4205 else if (!set_storage_via_setmem (dest_mem, len_rtx,
4206 gen_int_mode (c, val_mode),
c2f47e15 4207 dest_align, expected_align,
9db0f34d 4208 expected_size, min_size, max_size,
4209 probable_max_size))
c2f47e15 4210 goto do_libcall;
48e1416a 4211
c2f47e15 4212 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
4213 dest_mem = convert_memory_address (ptr_mode, dest_mem);
4214 return dest_mem;
4215 }
ecc318ff 4216
c2f47e15 4217 set_mem_align (dest_mem, dest_align);
4218 dest_addr = clear_storage_hints (dest_mem, len_rtx,
4219 CALL_EXPR_TAILCALL (orig_exp)
4220 ? BLOCK_OP_TAILCALL : BLOCK_OP_NORMAL,
36d63243 4221 expected_align, expected_size,
9db0f34d 4222 min_size, max_size,
4223 probable_max_size);
53800dbe 4224
c2f47e15 4225 if (dest_addr == 0)
4226 {
4227 dest_addr = force_operand (XEXP (dest_mem, 0), NULL_RTX);
4228 dest_addr = convert_memory_address (ptr_mode, dest_addr);
4229 }
53800dbe 4230
c2f47e15 4231 return dest_addr;
6b961939 4232
c2f47e15 4233 do_libcall:
4234 fndecl = get_callee_fndecl (orig_exp);
4235 fcode = DECL_FUNCTION_CODE (fndecl);
f21337ef 4236 if (fcode == BUILT_IN_MEMSET
4237 || fcode == BUILT_IN_CHKP_MEMSET_NOBND_NOCHK_CHKP)
0568e9c1 4238 fn = build_call_nofold_loc (EXPR_LOCATION (orig_exp), fndecl, 3,
4239 dest, val, len);
c2f47e15 4240 else if (fcode == BUILT_IN_BZERO)
0568e9c1 4241 fn = build_call_nofold_loc (EXPR_LOCATION (orig_exp), fndecl, 2,
4242 dest, len);
c2f47e15 4243 else
4244 gcc_unreachable ();
a65c4d64 4245 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
4246 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (orig_exp);
c2f47e15 4247 return expand_call (fn, target, target == const0_rtx);
53800dbe 4248}
4249
48e1416a 4250/* Expand expression EXP, which is a call to the bzero builtin. Return
c2f47e15 4251 NULL_RTX if we failed the caller should emit a normal call. */
27d0c333 4252
ffc83088 4253static rtx
0b25db21 4254expand_builtin_bzero (tree exp)
ffc83088 4255{
c2f47e15 4256 if (!validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
7369e7ba 4257 return NULL_RTX;
ffc83088 4258
5aef8938 4259 tree dest = CALL_EXPR_ARG (exp, 0);
4260 tree size = CALL_EXPR_ARG (exp, 1);
4261
8d6c6ef5 4262 check_memop_sizes (exp, dest, NULL_TREE, size);
bf8e3599 4263
7369e7ba 4264 /* New argument list transforming bzero(ptr x, int y) to
6f428e8b 4265 memset(ptr x, int 0, size_t y). This is done this way
4266 so that if it isn't expanded inline, we fallback to
4267 calling bzero instead of memset. */
bf8e3599 4268
5aef8938 4269 location_t loc = EXPR_LOCATION (exp);
4270
c2f47e15 4271 return expand_builtin_memset_args (dest, integer_zero_node,
a0553bff 4272 fold_convert_loc (loc,
4273 size_type_node, size),
c2f47e15 4274 const0_rtx, VOIDmode, exp);
ffc83088 4275}
4276
d6f01a40 4277/* Try to expand cmpstr operation ICODE with the given operands.
4278 Return the result rtx on success, otherwise return null. */
4279
4280static rtx
4281expand_cmpstr (insn_code icode, rtx target, rtx arg1_rtx, rtx arg2_rtx,
4282 HOST_WIDE_INT align)
4283{
4284 machine_mode insn_mode = insn_data[icode].operand[0].mode;
4285
4286 if (target && (!REG_P (target) || HARD_REGISTER_P (target)))
4287 target = NULL_RTX;
4288
4289 struct expand_operand ops[4];
4290 create_output_operand (&ops[0], target, insn_mode);
4291 create_fixed_operand (&ops[1], arg1_rtx);
4292 create_fixed_operand (&ops[2], arg2_rtx);
4293 create_integer_operand (&ops[3], align);
4294 if (maybe_expand_insn (icode, 4, ops))
4295 return ops[0].value;
4296 return NULL_RTX;
4297}
4298
7a3f89b5 4299/* Expand expression EXP, which is a call to the memcmp built-in function.
bd021c1c 4300 Return NULL_RTX if we failed and the caller should emit a normal call,
3e346f54 4301 otherwise try to get the result in TARGET, if convenient.
4302 RESULT_EQ is true if we can relax the returned value to be either zero
4303 or nonzero, without caring about the sign. */
27d0c333 4304
53800dbe 4305static rtx
3e346f54 4306expand_builtin_memcmp (tree exp, rtx target, bool result_eq)
53800dbe 4307{
c2f47e15 4308 if (!validate_arglist (exp,
4309 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
4310 return NULL_RTX;
6f428e8b 4311
ea368aac 4312 tree arg1 = CALL_EXPR_ARG (exp, 0);
4313 tree arg2 = CALL_EXPR_ARG (exp, 1);
4314 tree len = CALL_EXPR_ARG (exp, 2);
8d6c6ef5 4315
4316 /* Diagnose calls where the specified length exceeds the size of either
4317 object. */
4318 if (warn_stringop_overflow)
4319 {
4320 tree size = compute_objsize (arg1, 0);
4321 if (check_sizes (OPT_Wstringop_overflow_,
4322 exp, len, /*maxlen=*/NULL_TREE,
4323 size, /*objsize=*/NULL_TREE))
4324 {
4325 size = compute_objsize (arg2, 0);
4326 check_sizes (OPT_Wstringop_overflow_,
4327 exp, len, /*maxlen=*/NULL_TREE,
4328 size, /*objsize=*/NULL_TREE);
4329 }
4330 }
4331
3e346f54 4332 machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
4333 location_t loc = EXPR_LOCATION (exp);
b428c0a5 4334
ea368aac 4335 unsigned int arg1_align = get_pointer_alignment (arg1) / BITS_PER_UNIT;
4336 unsigned int arg2_align = get_pointer_alignment (arg2) / BITS_PER_UNIT;
53800dbe 4337
ea368aac 4338 /* If we don't have POINTER_TYPE, call the function. */
4339 if (arg1_align == 0 || arg2_align == 0)
4340 return NULL_RTX;
53800dbe 4341
ea368aac 4342 rtx arg1_rtx = get_memory_rtx (arg1, len);
4343 rtx arg2_rtx = get_memory_rtx (arg2, len);
3e346f54 4344 rtx len_rtx = expand_normal (fold_convert_loc (loc, sizetype, len));
53800dbe 4345
ea368aac 4346 /* Set MEM_SIZE as appropriate. */
3e346f54 4347 if (CONST_INT_P (len_rtx))
ea368aac 4348 {
3e346f54 4349 set_mem_size (arg1_rtx, INTVAL (len_rtx));
4350 set_mem_size (arg2_rtx, INTVAL (len_rtx));
ea368aac 4351 }
83f88f8e 4352
3e346f54 4353 by_pieces_constfn constfn = NULL;
4354
719f3058 4355 const char *src_str = c_getstr (arg2);
4356 if (result_eq && src_str == NULL)
4357 {
4358 src_str = c_getstr (arg1);
4359 if (src_str != NULL)
092db747 4360 std::swap (arg1_rtx, arg2_rtx);
719f3058 4361 }
3e346f54 4362
4363 /* If SRC is a string constant and block move would be done
4364 by pieces, we can avoid loading the string from memory
4365 and only stored the computed constants. */
4366 if (src_str
4367 && CONST_INT_P (len_rtx)
4368 && (unsigned HOST_WIDE_INT) INTVAL (len_rtx) <= strlen (src_str) + 1)
4369 constfn = builtin_memcpy_read_str;
4370
4371 rtx result = emit_block_cmp_hints (arg1_rtx, arg2_rtx, len_rtx,
4372 TREE_TYPE (len), target,
4373 result_eq, constfn,
4374 CONST_CAST (char *, src_str));
4375
ea368aac 4376 if (result)
4377 {
4378 /* Return the value in the proper mode for this function. */
4379 if (GET_MODE (result) == mode)
4380 return result;
83f88f8e 4381
ea368aac 4382 if (target != 0)
4383 {
4384 convert_move (target, result, 0);
4385 return target;
4386 }
0cd832f0 4387
53800dbe 4388 return convert_to_mode (mode, result, 0);
ea368aac 4389 }
53800dbe 4390
61ffc71a 4391 return NULL_RTX;
6f428e8b 4392}
4393
c2f47e15 4394/* Expand expression EXP, which is a call to the strcmp builtin. Return NULL_RTX
53800dbe 4395 if we failed the caller should emit a normal call, otherwise try to get
4396 the result in TARGET, if convenient. */
902de8ed 4397
53800dbe 4398static rtx
a65c4d64 4399expand_builtin_strcmp (tree exp, ATTRIBUTE_UNUSED rtx target)
53800dbe 4400{
c2f47e15 4401 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
4402 return NULL_RTX;
bf8e3599 4403
d6f01a40 4404 insn_code cmpstr_icode = direct_optab_handler (cmpstr_optab, SImode);
4405 insn_code cmpstrn_icode = direct_optab_handler (cmpstrn_optab, SImode);
4406 if (cmpstr_icode != CODE_FOR_nothing || cmpstrn_icode != CODE_FOR_nothing)
6ac5504b 4407 {
4408 rtx arg1_rtx, arg2_rtx;
6ac5504b 4409 tree fndecl, fn;
c2f47e15 4410 tree arg1 = CALL_EXPR_ARG (exp, 0);
4411 tree arg2 = CALL_EXPR_ARG (exp, 1);
d6f01a40 4412 rtx result = NULL_RTX;
a0c938f0 4413
957d0361 4414 unsigned int arg1_align = get_pointer_alignment (arg1) / BITS_PER_UNIT;
4415 unsigned int arg2_align = get_pointer_alignment (arg2) / BITS_PER_UNIT;
6ac5504b 4416
4417 /* If we don't have POINTER_TYPE, call the function. */
4418 if (arg1_align == 0 || arg2_align == 0)
c2f47e15 4419 return NULL_RTX;
7a3f89b5 4420
6ac5504b 4421 /* Stabilize the arguments in case gen_cmpstr(n)si fail. */
4422 arg1 = builtin_save_expr (arg1);
4423 arg2 = builtin_save_expr (arg2);
7a3f89b5 4424
d8ae1baa 4425 arg1_rtx = get_memory_rtx (arg1, NULL);
4426 arg2_rtx = get_memory_rtx (arg2, NULL);
53800dbe 4427
6ac5504b 4428 /* Try to call cmpstrsi. */
d6f01a40 4429 if (cmpstr_icode != CODE_FOR_nothing)
4430 result = expand_cmpstr (cmpstr_icode, target, arg1_rtx, arg2_rtx,
4431 MIN (arg1_align, arg2_align));
4432
6ac5504b 4433 /* Try to determine at least one length and call cmpstrnsi. */
d6f01a40 4434 if (!result && cmpstrn_icode != CODE_FOR_nothing)
6ac5504b 4435 {
4436 tree len;
4437 rtx arg3_rtx;
4438
6ac5504b 4439 tree len1 = c_strlen (arg1, 1);
4440 tree len2 = c_strlen (arg2, 1);
4441
4442 if (len1)
4443 len1 = size_binop (PLUS_EXPR, ssize_int (1), len1);
4444 if (len2)
4445 len2 = size_binop (PLUS_EXPR, ssize_int (1), len2);
4446
4447 /* If we don't have a constant length for the first, use the length
4448 of the second, if we know it. We don't require a constant for
4449 this case; some cost analysis could be done if both are available
4450 but neither is constant. For now, assume they're equally cheap,
4451 unless one has side effects. If both strings have constant lengths,
4452 use the smaller. */
4453
4454 if (!len1)
4455 len = len2;
4456 else if (!len2)
4457 len = len1;
4458 else if (TREE_SIDE_EFFECTS (len1))
4459 len = len2;
4460 else if (TREE_SIDE_EFFECTS (len2))
4461 len = len1;
4462 else if (TREE_CODE (len1) != INTEGER_CST)
4463 len = len2;
4464 else if (TREE_CODE (len2) != INTEGER_CST)
4465 len = len1;
4466 else if (tree_int_cst_lt (len1, len2))
4467 len = len1;
4468 else
4469 len = len2;
4470
4471 /* If both arguments have side effects, we cannot optimize. */
d6f01a40 4472 if (len && !TREE_SIDE_EFFECTS (len))
4473 {
4474 arg3_rtx = expand_normal (len);
ea368aac 4475 result = expand_cmpstrn_or_cmpmem
4476 (cmpstrn_icode, target, arg1_rtx, arg2_rtx, TREE_TYPE (len),
4477 arg3_rtx, MIN (arg1_align, arg2_align));
d6f01a40 4478 }
6ac5504b 4479 }
3f8aefe2 4480
d6f01a40 4481 if (result)
6ac5504b 4482 {
6ac5504b 4483 /* Return the value in the proper mode for this function. */
d6f01a40 4484 machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
6ac5504b 4485 if (GET_MODE (result) == mode)
4486 return result;
4487 if (target == 0)
4488 return convert_to_mode (mode, result, 0);
4489 convert_move (target, result, 0);
4490 return target;
4491 }
902de8ed 4492
6ac5504b 4493 /* Expand the library call ourselves using a stabilized argument
4494 list to avoid re-evaluating the function's arguments twice. */
6ac5504b 4495 fndecl = get_callee_fndecl (exp);
0568e9c1 4496 fn = build_call_nofold_loc (EXPR_LOCATION (exp), fndecl, 2, arg1, arg2);
a65c4d64 4497 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
4498 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
6ac5504b 4499 return expand_call (fn, target, target == const0_rtx);
4500 }
c2f47e15 4501 return NULL_RTX;
83d79705 4502}
53800dbe 4503
48e1416a 4504/* Expand expression EXP, which is a call to the strncmp builtin. Return
c2f47e15 4505 NULL_RTX if we failed the caller should emit a normal call, otherwise try to get
ed09096d 4506 the result in TARGET, if convenient. */
27d0c333 4507
ed09096d 4508static rtx
a65c4d64 4509expand_builtin_strncmp (tree exp, ATTRIBUTE_UNUSED rtx target,
3754d046 4510 ATTRIBUTE_UNUSED machine_mode mode)
ed09096d 4511{
a65c4d64 4512 location_t loc ATTRIBUTE_UNUSED = EXPR_LOCATION (exp);
389dd41b 4513
c2f47e15 4514 if (!validate_arglist (exp,
4515 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
4516 return NULL_RTX;
ed09096d 4517
6e34e617 4518 /* If c_strlen can determine an expression for one of the string
6ac5504b 4519 lengths, and it doesn't have side effects, then emit cmpstrnsi
7a3f89b5 4520 using length MIN(strlen(string)+1, arg3). */
d6f01a40 4521 insn_code cmpstrn_icode = direct_optab_handler (cmpstrn_optab, SImode);
4522 if (cmpstrn_icode != CODE_FOR_nothing)
7a3f89b5 4523 {
175cdef4 4524 tree len, len1, len2, len3;
7a3f89b5 4525 rtx arg1_rtx, arg2_rtx, arg3_rtx;
d6f01a40 4526 rtx result;
0b25db21 4527 tree fndecl, fn;
c2f47e15 4528 tree arg1 = CALL_EXPR_ARG (exp, 0);
4529 tree arg2 = CALL_EXPR_ARG (exp, 1);
4530 tree arg3 = CALL_EXPR_ARG (exp, 2);
6f428e8b 4531
957d0361 4532 unsigned int arg1_align = get_pointer_alignment (arg1) / BITS_PER_UNIT;
4533 unsigned int arg2_align = get_pointer_alignment (arg2) / BITS_PER_UNIT;
bf8e3599 4534
681fab1e 4535 len1 = c_strlen (arg1, 1);
4536 len2 = c_strlen (arg2, 1);
7a3f89b5 4537
4538 if (len1)
389dd41b 4539 len1 = size_binop_loc (loc, PLUS_EXPR, ssize_int (1), len1);
7a3f89b5 4540 if (len2)
389dd41b 4541 len2 = size_binop_loc (loc, PLUS_EXPR, ssize_int (1), len2);
7a3f89b5 4542
175cdef4 4543 len3 = fold_convert_loc (loc, sizetype, arg3);
4544
7a3f89b5 4545 /* If we don't have a constant length for the first, use the length
175cdef4 4546 of the second, if we know it. If neither string is constant length,
4547 use the given length argument. We don't require a constant for
7a3f89b5 4548 this case; some cost analysis could be done if both are available
4549 but neither is constant. For now, assume they're equally cheap,
4550 unless one has side effects. If both strings have constant lengths,
4551 use the smaller. */
4552
175cdef4 4553 if (!len1 && !len2)
4554 len = len3;
4555 else if (!len1)
7a3f89b5 4556 len = len2;
4557 else if (!len2)
4558 len = len1;
4559 else if (TREE_SIDE_EFFECTS (len1))
4560 len = len2;
4561 else if (TREE_SIDE_EFFECTS (len2))
4562 len = len1;
4563 else if (TREE_CODE (len1) != INTEGER_CST)
4564 len = len2;
4565 else if (TREE_CODE (len2) != INTEGER_CST)
4566 len = len1;
4567 else if (tree_int_cst_lt (len1, len2))
4568 len = len1;
4569 else
4570 len = len2;
6e34e617 4571
175cdef4 4572 /* If we are not using the given length, we must incorporate it here.
4573 The actual new length parameter will be MIN(len,arg3) in this case. */
4574 if (len != len3)
4575 len = fold_build2_loc (loc, MIN_EXPR, TREE_TYPE (len), len, len3);
a65c4d64 4576 arg1_rtx = get_memory_rtx (arg1, len);
4577 arg2_rtx = get_memory_rtx (arg2, len);
4578 arg3_rtx = expand_normal (len);
ea368aac 4579 result = expand_cmpstrn_or_cmpmem (cmpstrn_icode, target, arg1_rtx,
4580 arg2_rtx, TREE_TYPE (len), arg3_rtx,
4581 MIN (arg1_align, arg2_align));
d6f01a40 4582 if (result)
a65c4d64 4583 {
a65c4d64 4584 /* Return the value in the proper mode for this function. */
4585 mode = TYPE_MODE (TREE_TYPE (exp));
4586 if (GET_MODE (result) == mode)
4587 return result;
4588 if (target == 0)
4589 return convert_to_mode (mode, result, 0);
4590 convert_move (target, result, 0);
4591 return target;
4592 }
27d0c333 4593
a65c4d64 4594 /* Expand the library call ourselves using a stabilized argument
4595 list to avoid re-evaluating the function's arguments twice. */
4596 fndecl = get_callee_fndecl (exp);
0568e9c1 4597 fn = build_call_nofold_loc (EXPR_LOCATION (exp), fndecl, 3,
4598 arg1, arg2, len);
a65c4d64 4599 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
4600 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
4601 return expand_call (fn, target, target == const0_rtx);
4602 }
c2f47e15 4603 return NULL_RTX;
49f0327b 4604}
4605
a66c9326 4606/* Expand a call to __builtin_saveregs, generating the result in TARGET,
4607 if that's convenient. */
902de8ed 4608
a66c9326 4609rtx
aecda0d6 4610expand_builtin_saveregs (void)
53800dbe 4611{
1e0c0b35 4612 rtx val;
4613 rtx_insn *seq;
53800dbe 4614
4615 /* Don't do __builtin_saveregs more than once in a function.
4616 Save the result of the first call and reuse it. */
4617 if (saveregs_value != 0)
4618 return saveregs_value;
53800dbe 4619
a66c9326 4620 /* When this function is called, it means that registers must be
4621 saved on entry to this function. So we migrate the call to the
4622 first insn of this function. */
4623
4624 start_sequence ();
53800dbe 4625
a66c9326 4626 /* Do whatever the machine needs done in this case. */
45550790 4627 val = targetm.calls.expand_builtin_saveregs ();
53800dbe 4628
a66c9326 4629 seq = get_insns ();
4630 end_sequence ();
53800dbe 4631
a66c9326 4632 saveregs_value = val;
53800dbe 4633
31d3e01c 4634 /* Put the insns after the NOTE that starts the function. If this
4635 is inside a start_sequence, make the outer-level insn chain current, so
a66c9326 4636 the code is placed at the start of the function. */
4637 push_topmost_sequence ();
0ec80471 4638 emit_insn_after (seq, entry_of_function ());
a66c9326 4639 pop_topmost_sequence ();
4640
4641 return val;
53800dbe 4642}
4643
79012a9d 4644/* Expand a call to __builtin_next_arg. */
27d0c333 4645
53800dbe 4646static rtx
79012a9d 4647expand_builtin_next_arg (void)
53800dbe 4648{
79012a9d 4649 /* Checking arguments is already done in fold_builtin_next_arg
4650 that must be called before this function. */
940ddc5c 4651 return expand_binop (ptr_mode, add_optab,
abe32cce 4652 crtl->args.internal_arg_pointer,
4653 crtl->args.arg_offset_rtx,
53800dbe 4654 NULL_RTX, 0, OPTAB_LIB_WIDEN);
4655}
4656
a66c9326 4657/* Make it easier for the backends by protecting the valist argument
4658 from multiple evaluations. */
4659
4660static tree
389dd41b 4661stabilize_va_list_loc (location_t loc, tree valist, int needs_lvalue)
a66c9326 4662{
5f57a8b1 4663 tree vatype = targetm.canonical_va_list_type (TREE_TYPE (valist));
4664
182cf5a9 4665 /* The current way of determining the type of valist is completely
4666 bogus. We should have the information on the va builtin instead. */
4667 if (!vatype)
4668 vatype = targetm.fn_abi_va_list (cfun->decl);
5f57a8b1 4669
4670 if (TREE_CODE (vatype) == ARRAY_TYPE)
a66c9326 4671 {
2d47cc32 4672 if (TREE_SIDE_EFFECTS (valist))
4673 valist = save_expr (valist);
11a61dea 4674
2d47cc32 4675 /* For this case, the backends will be expecting a pointer to
5f57a8b1 4676 vatype, but it's possible we've actually been given an array
4677 (an actual TARGET_CANONICAL_VA_LIST_TYPE (valist)).
2d47cc32 4678 So fix it. */
4679 if (TREE_CODE (TREE_TYPE (valist)) == ARRAY_TYPE)
8a15c04a 4680 {
5f57a8b1 4681 tree p1 = build_pointer_type (TREE_TYPE (vatype));
389dd41b 4682 valist = build_fold_addr_expr_with_type_loc (loc, valist, p1);
8a15c04a 4683 }
a66c9326 4684 }
11a61dea 4685 else
a66c9326 4686 {
182cf5a9 4687 tree pt = build_pointer_type (vatype);
11a61dea 4688
2d47cc32 4689 if (! needs_lvalue)
4690 {
11a61dea 4691 if (! TREE_SIDE_EFFECTS (valist))
4692 return valist;
bf8e3599 4693
389dd41b 4694 valist = fold_build1_loc (loc, ADDR_EXPR, pt, valist);
a66c9326 4695 TREE_SIDE_EFFECTS (valist) = 1;
a66c9326 4696 }
2d47cc32 4697
11a61dea 4698 if (TREE_SIDE_EFFECTS (valist))
2d47cc32 4699 valist = save_expr (valist);
182cf5a9 4700 valist = fold_build2_loc (loc, MEM_REF,
4701 vatype, valist, build_int_cst (pt, 0));
a66c9326 4702 }
4703
4704 return valist;
4705}
4706
2e15d750 4707/* The "standard" definition of va_list is void*. */
4708
4709tree
4710std_build_builtin_va_list (void)
4711{
4712 return ptr_type_node;
4713}
4714
5f57a8b1 4715/* The "standard" abi va_list is va_list_type_node. */
4716
4717tree
4718std_fn_abi_va_list (tree fndecl ATTRIBUTE_UNUSED)
4719{
4720 return va_list_type_node;
4721}
4722
4723/* The "standard" type of va_list is va_list_type_node. */
4724
4725tree
4726std_canonical_va_list_type (tree type)
4727{
4728 tree wtype, htype;
4729
5f57a8b1 4730 wtype = va_list_type_node;
4731 htype = type;
b6da2e41 4732
4733 if (TREE_CODE (wtype) == ARRAY_TYPE)
5f57a8b1 4734 {
4735 /* If va_list is an array type, the argument may have decayed
4736 to a pointer type, e.g. by being passed to another function.
4737 In that case, unwrap both types so that we can compare the
4738 underlying records. */
4739 if (TREE_CODE (htype) == ARRAY_TYPE
4740 || POINTER_TYPE_P (htype))
4741 {
4742 wtype = TREE_TYPE (wtype);
4743 htype = TREE_TYPE (htype);
4744 }
4745 }
4746 if (TYPE_MAIN_VARIANT (wtype) == TYPE_MAIN_VARIANT (htype))
4747 return va_list_type_node;
4748
4749 return NULL_TREE;
4750}
4751
a66c9326 4752/* The "standard" implementation of va_start: just assign `nextarg' to
4753 the variable. */
27d0c333 4754
a66c9326 4755void
aecda0d6 4756std_expand_builtin_va_start (tree valist, rtx nextarg)
a66c9326 4757{
f03c17bc 4758 rtx va_r = expand_expr (valist, NULL_RTX, VOIDmode, EXPAND_WRITE);
4759 convert_move (va_r, nextarg, 0);
058a1b7a 4760
4761 /* We do not have any valid bounds for the pointer, so
4762 just store zero bounds for it. */
4763 if (chkp_function_instrumented_p (current_function_decl))
4764 chkp_expand_bounds_reset_for_mem (valist,
4765 make_tree (TREE_TYPE (valist),
4766 nextarg));
a66c9326 4767}
4768
c2f47e15 4769/* Expand EXP, a call to __builtin_va_start. */
27d0c333 4770
a66c9326 4771static rtx
c2f47e15 4772expand_builtin_va_start (tree exp)
a66c9326 4773{
4774 rtx nextarg;
c2f47e15 4775 tree valist;
389dd41b 4776 location_t loc = EXPR_LOCATION (exp);
a66c9326 4777
c2f47e15 4778 if (call_expr_nargs (exp) < 2)
cb166087 4779 {
389dd41b 4780 error_at (loc, "too few arguments to function %<va_start%>");
cb166087 4781 return const0_rtx;
4782 }
a66c9326 4783
c2f47e15 4784 if (fold_builtin_next_arg (exp, true))
79012a9d 4785 return const0_rtx;
7c2f0500 4786
79012a9d 4787 nextarg = expand_builtin_next_arg ();
389dd41b 4788 valist = stabilize_va_list_loc (loc, CALL_EXPR_ARG (exp, 0), 1);
a66c9326 4789
8a58ed0a 4790 if (targetm.expand_builtin_va_start)
4791 targetm.expand_builtin_va_start (valist, nextarg);
4792 else
4793 std_expand_builtin_va_start (valist, nextarg);
a66c9326 4794
4795 return const0_rtx;
4796}
4797
c2f47e15 4798/* Expand EXP, a call to __builtin_va_end. */
f7c44134 4799
a66c9326 4800static rtx
c2f47e15 4801expand_builtin_va_end (tree exp)
a66c9326 4802{
c2f47e15 4803 tree valist = CALL_EXPR_ARG (exp, 0);
8a15c04a 4804
8a15c04a 4805 /* Evaluate for side effects, if needed. I hate macros that don't
4806 do that. */
4807 if (TREE_SIDE_EFFECTS (valist))
4808 expand_expr (valist, const0_rtx, VOIDmode, EXPAND_NORMAL);
a66c9326 4809
4810 return const0_rtx;
4811}
4812
c2f47e15 4813/* Expand EXP, a call to __builtin_va_copy. We do this as a
a66c9326 4814 builtin rather than just as an assignment in stdarg.h because of the
4815 nastiness of array-type va_list types. */
f7c44134 4816
a66c9326 4817static rtx
c2f47e15 4818expand_builtin_va_copy (tree exp)
a66c9326 4819{
4820 tree dst, src, t;
389dd41b 4821 location_t loc = EXPR_LOCATION (exp);
a66c9326 4822
c2f47e15 4823 dst = CALL_EXPR_ARG (exp, 0);
4824 src = CALL_EXPR_ARG (exp, 1);
a66c9326 4825
389dd41b 4826 dst = stabilize_va_list_loc (loc, dst, 1);
4827 src = stabilize_va_list_loc (loc, src, 0);
a66c9326 4828
5f57a8b1 4829 gcc_assert (cfun != NULL && cfun->decl != NULL_TREE);
4830
4831 if (TREE_CODE (targetm.fn_abi_va_list (cfun->decl)) != ARRAY_TYPE)
a66c9326 4832 {
5f57a8b1 4833 t = build2 (MODIFY_EXPR, targetm.fn_abi_va_list (cfun->decl), dst, src);
a66c9326 4834 TREE_SIDE_EFFECTS (t) = 1;
4835 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
4836 }
4837 else
4838 {
11a61dea 4839 rtx dstb, srcb, size;
4840
4841 /* Evaluate to pointers. */
4842 dstb = expand_expr (dst, NULL_RTX, Pmode, EXPAND_NORMAL);
4843 srcb = expand_expr (src, NULL_RTX, Pmode, EXPAND_NORMAL);
5f57a8b1 4844 size = expand_expr (TYPE_SIZE_UNIT (targetm.fn_abi_va_list (cfun->decl)),
4845 NULL_RTX, VOIDmode, EXPAND_NORMAL);
11a61dea 4846
85d654dd 4847 dstb = convert_memory_address (Pmode, dstb);
4848 srcb = convert_memory_address (Pmode, srcb);
726ec87c 4849
11a61dea 4850 /* "Dereference" to BLKmode memories. */
4851 dstb = gen_rtx_MEM (BLKmode, dstb);
ab6ab77e 4852 set_mem_alias_set (dstb, get_alias_set (TREE_TYPE (TREE_TYPE (dst))));
5f57a8b1 4853 set_mem_align (dstb, TYPE_ALIGN (targetm.fn_abi_va_list (cfun->decl)));
11a61dea 4854 srcb = gen_rtx_MEM (BLKmode, srcb);
ab6ab77e 4855 set_mem_alias_set (srcb, get_alias_set (TREE_TYPE (TREE_TYPE (src))));
5f57a8b1 4856 set_mem_align (srcb, TYPE_ALIGN (targetm.fn_abi_va_list (cfun->decl)));
11a61dea 4857
4858 /* Copy. */
0378dbdc 4859 emit_block_move (dstb, srcb, size, BLOCK_OP_NORMAL);
a66c9326 4860 }
4861
4862 return const0_rtx;
4863}
4864
53800dbe 4865/* Expand a call to one of the builtin functions __builtin_frame_address or
4866 __builtin_return_address. */
27d0c333 4867
53800dbe 4868static rtx
c2f47e15 4869expand_builtin_frame_address (tree fndecl, tree exp)
53800dbe 4870{
53800dbe 4871 /* The argument must be a nonnegative integer constant.
4872 It counts the number of frames to scan up the stack.
5b252e95 4873 The value is either the frame pointer value or the return
4874 address saved in that frame. */
c2f47e15 4875 if (call_expr_nargs (exp) == 0)
53800dbe 4876 /* Warning about missing arg was already issued. */
4877 return const0_rtx;
e913b5cd 4878 else if (! tree_fits_uhwi_p (CALL_EXPR_ARG (exp, 0)))
53800dbe 4879 {
5b252e95 4880 error ("invalid argument to %qD", fndecl);
53800dbe 4881 return const0_rtx;
4882 }
4883 else
4884 {
5b252e95 4885 /* Number of frames to scan up the stack. */
4886 unsigned HOST_WIDE_INT count = tree_to_uhwi (CALL_EXPR_ARG (exp, 0));
4887
4888 rtx tem = expand_builtin_return_addr (DECL_FUNCTION_CODE (fndecl), count);
53800dbe 4889
4890 /* Some ports cannot access arbitrary stack frames. */
4891 if (tem == NULL)
4892 {
5b252e95 4893 warning (0, "unsupported argument to %qD", fndecl);
53800dbe 4894 return const0_rtx;
4895 }
4896
5b252e95 4897 if (count)
4898 {
4899 /* Warn since no effort is made to ensure that any frame
4900 beyond the current one exists or can be safely reached. */
4901 warning (OPT_Wframe_address, "calling %qD with "
4902 "a nonzero argument is unsafe", fndecl);
4903 }
4904
53800dbe 4905 /* For __builtin_frame_address, return what we've got. */
4906 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
4907 return tem;
4908
8ad4c111 4909 if (!REG_P (tem)
53800dbe 4910 && ! CONSTANT_P (tem))
99182918 4911 tem = copy_addr_to_reg (tem);
53800dbe 4912 return tem;
4913 }
4914}
4915
990495a7 4916/* Expand EXP, a call to the alloca builtin. Return NULL_RTX if we
2b29cc6a 4917 failed and the caller should emit a normal call. */
15c6cf6b 4918
53800dbe 4919static rtx
2b29cc6a 4920expand_builtin_alloca (tree exp)
53800dbe 4921{
4922 rtx op0;
15c6cf6b 4923 rtx result;
581bf1c2 4924 unsigned int align;
370e45b9 4925 tree fndecl = get_callee_fndecl (exp);
4926 bool alloca_with_align = (DECL_FUNCTION_CODE (fndecl)
581bf1c2 4927 == BUILT_IN_ALLOCA_WITH_ALIGN);
2b29cc6a 4928 bool alloca_for_var = CALL_ALLOCA_FOR_VAR_P (exp);
370e45b9 4929 bool valid_arglist
581bf1c2 4930 = (alloca_with_align
4931 ? validate_arglist (exp, INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE)
4932 : validate_arglist (exp, INTEGER_TYPE, VOID_TYPE));
4933
4934 if (!valid_arglist)
c2f47e15 4935 return NULL_RTX;
53800dbe 4936
370e45b9 4937 if ((alloca_with_align && !warn_vla_limit)
4938 || (!alloca_with_align && !warn_alloca_limit))
4939 {
4940 /* -Walloca-larger-than and -Wvla-larger-than settings override
4941 the more general -Walloc-size-larger-than so unless either of
4942 the former options is specified check the alloca arguments for
4943 overflow. */
4944 tree args[] = { CALL_EXPR_ARG (exp, 0), NULL_TREE };
4945 int idx[] = { 0, -1 };
4946 maybe_warn_alloc_args_overflow (fndecl, exp, args, idx);
4947 }
4948
53800dbe 4949 /* Compute the argument. */
c2f47e15 4950 op0 = expand_normal (CALL_EXPR_ARG (exp, 0));
53800dbe 4951
581bf1c2 4952 /* Compute the alignment. */
4953 align = (alloca_with_align
f9ae6f95 4954 ? TREE_INT_CST_LOW (CALL_EXPR_ARG (exp, 1))
581bf1c2 4955 : BIGGEST_ALIGNMENT);
4956
2b29cc6a 4957 /* Allocate the desired space. If the allocation stems from the declaration
4958 of a variable-sized object, it cannot accumulate. */
4959 result = allocate_dynamic_stack_space (op0, 0, align, alloca_for_var);
85d654dd 4960 result = convert_memory_address (ptr_mode, result);
15c6cf6b 4961
4962 return result;
53800dbe 4963}
4964
74bdbe96 4965/* Expand a call to bswap builtin in EXP.
4966 Return NULL_RTX if a normal call should be emitted rather than expanding the
4967 function in-line. If convenient, the result should be placed in TARGET.
4968 SUBTARGET may be used as the target for computing one of EXP's operands. */
42791117 4969
4970static rtx
3754d046 4971expand_builtin_bswap (machine_mode target_mode, tree exp, rtx target,
74bdbe96 4972 rtx subtarget)
42791117 4973{
42791117 4974 tree arg;
4975 rtx op0;
4976
c2f47e15 4977 if (!validate_arglist (exp, INTEGER_TYPE, VOID_TYPE))
4978 return NULL_RTX;
42791117 4979
c2f47e15 4980 arg = CALL_EXPR_ARG (exp, 0);
74bdbe96 4981 op0 = expand_expr (arg,
4982 subtarget && GET_MODE (subtarget) == target_mode
4983 ? subtarget : NULL_RTX,
4984 target_mode, EXPAND_NORMAL);
4985 if (GET_MODE (op0) != target_mode)
4986 op0 = convert_to_mode (target_mode, op0, 1);
42791117 4987
74bdbe96 4988 target = expand_unop (target_mode, bswap_optab, op0, target, 1);
42791117 4989
4990 gcc_assert (target);
4991
74bdbe96 4992 return convert_to_mode (target_mode, target, 1);
42791117 4993}
4994
c2f47e15 4995/* Expand a call to a unary builtin in EXP.
4996 Return NULL_RTX if a normal call should be emitted rather than expanding the
53800dbe 4997 function in-line. If convenient, the result should be placed in TARGET.
4998 SUBTARGET may be used as the target for computing one of EXP's operands. */
15c6cf6b 4999
53800dbe 5000static rtx
3754d046 5001expand_builtin_unop (machine_mode target_mode, tree exp, rtx target,
aecda0d6 5002 rtx subtarget, optab op_optab)
53800dbe 5003{
5004 rtx op0;
c2f47e15 5005
5006 if (!validate_arglist (exp, INTEGER_TYPE, VOID_TYPE))
5007 return NULL_RTX;
53800dbe 5008
5009 /* Compute the argument. */
f97eea22 5010 op0 = expand_expr (CALL_EXPR_ARG (exp, 0),
5011 (subtarget
5012 && (TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 0)))
5013 == GET_MODE (subtarget))) ? subtarget : NULL_RTX,
1db6d067 5014 VOIDmode, EXPAND_NORMAL);
6a08d0ab 5015 /* Compute op, into TARGET if possible.
53800dbe 5016 Set TARGET to wherever the result comes back. */
c2f47e15 5017 target = expand_unop (TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 0))),
6aaa1f9e 5018 op_optab, op0, target, op_optab != clrsb_optab);
64db345d 5019 gcc_assert (target);
7d3f6cc7 5020
efb070c8 5021 return convert_to_mode (target_mode, target, 0);
53800dbe 5022}
89cfe6e5 5023
48e1416a 5024/* Expand a call to __builtin_expect. We just return our argument
5a74f77e 5025 as the builtin_expect semantic should've been already executed by
5026 tree branch prediction pass. */
89cfe6e5 5027
5028static rtx
c2f47e15 5029expand_builtin_expect (tree exp, rtx target)
89cfe6e5 5030{
1e4adcfc 5031 tree arg;
89cfe6e5 5032
c2f47e15 5033 if (call_expr_nargs (exp) < 2)
89cfe6e5 5034 return const0_rtx;
c2f47e15 5035 arg = CALL_EXPR_ARG (exp, 0);
89cfe6e5 5036
c2f47e15 5037 target = expand_expr (arg, target, VOIDmode, EXPAND_NORMAL);
5a74f77e 5038 /* When guessing was done, the hints should be already stripped away. */
07311427 5039 gcc_assert (!flag_guess_branch_prob
852f689e 5040 || optimize == 0 || seen_error ());
89cfe6e5 5041 return target;
5042}
689df48e 5043
fca0886c 5044/* Expand a call to __builtin_assume_aligned. We just return our first
5045 argument as the builtin_assume_aligned semantic should've been already
5046 executed by CCP. */
5047
5048static rtx
5049expand_builtin_assume_aligned (tree exp, rtx target)
5050{
5051 if (call_expr_nargs (exp) < 2)
5052 return const0_rtx;
5053 target = expand_expr (CALL_EXPR_ARG (exp, 0), target, VOIDmode,
5054 EXPAND_NORMAL);
5055 gcc_assert (!TREE_SIDE_EFFECTS (CALL_EXPR_ARG (exp, 1))
5056 && (call_expr_nargs (exp) < 3
5057 || !TREE_SIDE_EFFECTS (CALL_EXPR_ARG (exp, 2))));
5058 return target;
5059}
5060
c22de3f0 5061void
aecda0d6 5062expand_builtin_trap (void)
a0ef1725 5063{
4db8dd0c 5064 if (targetm.have_trap ())
f73960eb 5065 {
4db8dd0c 5066 rtx_insn *insn = emit_insn (targetm.gen_trap ());
f73960eb 5067 /* For trap insns when not accumulating outgoing args force
5068 REG_ARGS_SIZE note to prevent crossjumping of calls with
5069 different args sizes. */
5070 if (!ACCUMULATE_OUTGOING_ARGS)
5071 add_reg_note (insn, REG_ARGS_SIZE, GEN_INT (stack_pointer_delta));
5072 }
a0ef1725 5073 else
61ffc71a 5074 {
5075 tree fn = builtin_decl_implicit (BUILT_IN_ABORT);
5076 tree call_expr = build_call_expr (fn, 0);
5077 expand_call (call_expr, NULL_RTX, false);
5078 }
5079
a0ef1725 5080 emit_barrier ();
5081}
78a74442 5082
d2b48f0c 5083/* Expand a call to __builtin_unreachable. We do nothing except emit
5084 a barrier saying that control flow will not pass here.
5085
5086 It is the responsibility of the program being compiled to ensure
5087 that control flow does never reach __builtin_unreachable. */
5088static void
5089expand_builtin_unreachable (void)
5090{
5091 emit_barrier ();
5092}
5093
c2f47e15 5094/* Expand EXP, a call to fabs, fabsf or fabsl.
5095 Return NULL_RTX if a normal call should be emitted rather than expanding
78a74442 5096 the function inline. If convenient, the result should be placed
5097 in TARGET. SUBTARGET may be used as the target for computing
5098 the operand. */
5099
5100static rtx
c2f47e15 5101expand_builtin_fabs (tree exp, rtx target, rtx subtarget)
78a74442 5102{
3754d046 5103 machine_mode mode;
78a74442 5104 tree arg;
5105 rtx op0;
5106
c2f47e15 5107 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
5108 return NULL_RTX;
78a74442 5109
c2f47e15 5110 arg = CALL_EXPR_ARG (exp, 0);
c7f617c2 5111 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
78a74442 5112 mode = TYPE_MODE (TREE_TYPE (arg));
1db6d067 5113 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
78a74442 5114 return expand_abs (mode, op0, target, 0, safe_from_p (target, arg, 1));
5115}
5116
c2f47e15 5117/* Expand EXP, a call to copysign, copysignf, or copysignl.
270436f3 5118 Return NULL is a normal call should be emitted rather than expanding the
5119 function inline. If convenient, the result should be placed in TARGET.
5120 SUBTARGET may be used as the target for computing the operand. */
5121
5122static rtx
c2f47e15 5123expand_builtin_copysign (tree exp, rtx target, rtx subtarget)
270436f3 5124{
5125 rtx op0, op1;
5126 tree arg;
5127
c2f47e15 5128 if (!validate_arglist (exp, REAL_TYPE, REAL_TYPE, VOID_TYPE))
5129 return NULL_RTX;
270436f3 5130
c2f47e15 5131 arg = CALL_EXPR_ARG (exp, 0);
8ec3c5c2 5132 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
270436f3 5133
c2f47e15 5134 arg = CALL_EXPR_ARG (exp, 1);
8ec3c5c2 5135 op1 = expand_normal (arg);
270436f3 5136
5137 return expand_copysign (op0, op1, target);
5138}
5139
ac8fb6db 5140/* Expand a call to __builtin___clear_cache. */
5141
5142static rtx
32e17df0 5143expand_builtin___clear_cache (tree exp)
ac8fb6db 5144{
32e17df0 5145 if (!targetm.code_for_clear_cache)
5146 {
ac8fb6db 5147#ifdef CLEAR_INSN_CACHE
32e17df0 5148 /* There is no "clear_cache" insn, and __clear_cache() in libgcc
5149 does something. Just do the default expansion to a call to
5150 __clear_cache(). */
5151 return NULL_RTX;
ac8fb6db 5152#else
32e17df0 5153 /* There is no "clear_cache" insn, and __clear_cache() in libgcc
5154 does nothing. There is no need to call it. Do nothing. */
5155 return const0_rtx;
ac8fb6db 5156#endif /* CLEAR_INSN_CACHE */
32e17df0 5157 }
5158
ac8fb6db 5159 /* We have a "clear_cache" insn, and it will handle everything. */
5160 tree begin, end;
5161 rtx begin_rtx, end_rtx;
ac8fb6db 5162
5163 /* We must not expand to a library call. If we did, any
5164 fallback library function in libgcc that might contain a call to
5165 __builtin___clear_cache() would recurse infinitely. */
5166 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
5167 {
5168 error ("both arguments to %<__builtin___clear_cache%> must be pointers");
5169 return const0_rtx;
5170 }
5171
32e17df0 5172 if (targetm.have_clear_cache ())
ac8fb6db 5173 {
8786db1e 5174 struct expand_operand ops[2];
ac8fb6db 5175
5176 begin = CALL_EXPR_ARG (exp, 0);
5177 begin_rtx = expand_expr (begin, NULL_RTX, Pmode, EXPAND_NORMAL);
ac8fb6db 5178
5179 end = CALL_EXPR_ARG (exp, 1);
5180 end_rtx = expand_expr (end, NULL_RTX, Pmode, EXPAND_NORMAL);
ac8fb6db 5181
8786db1e 5182 create_address_operand (&ops[0], begin_rtx);
5183 create_address_operand (&ops[1], end_rtx);
32e17df0 5184 if (maybe_expand_insn (targetm.code_for_clear_cache, 2, ops))
8786db1e 5185 return const0_rtx;
ac8fb6db 5186 }
5187 return const0_rtx;
ac8fb6db 5188}
5189
4ee9c684 5190/* Given a trampoline address, make sure it satisfies TRAMPOLINE_ALIGNMENT. */
5191
5192static rtx
5193round_trampoline_addr (rtx tramp)
5194{
5195 rtx temp, addend, mask;
5196
5197 /* If we don't need too much alignment, we'll have been guaranteed
5198 proper alignment by get_trampoline_type. */
5199 if (TRAMPOLINE_ALIGNMENT <= STACK_BOUNDARY)
5200 return tramp;
5201
5202 /* Round address up to desired boundary. */
5203 temp = gen_reg_rtx (Pmode);
0359f9f5 5204 addend = gen_int_mode (TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT - 1, Pmode);
5205 mask = gen_int_mode (-TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT, Pmode);
4ee9c684 5206
5207 temp = expand_simple_binop (Pmode, PLUS, tramp, addend,
5208 temp, 0, OPTAB_LIB_WIDEN);
5209 tramp = expand_simple_binop (Pmode, AND, temp, mask,
5210 temp, 0, OPTAB_LIB_WIDEN);
5211
5212 return tramp;
5213}
5214
5215static rtx
c307f106 5216expand_builtin_init_trampoline (tree exp, bool onstack)
4ee9c684 5217{
5218 tree t_tramp, t_func, t_chain;
82c7907c 5219 rtx m_tramp, r_tramp, r_chain, tmp;
4ee9c684 5220
c2f47e15 5221 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE,
4ee9c684 5222 POINTER_TYPE, VOID_TYPE))
5223 return NULL_RTX;
5224
c2f47e15 5225 t_tramp = CALL_EXPR_ARG (exp, 0);
5226 t_func = CALL_EXPR_ARG (exp, 1);
5227 t_chain = CALL_EXPR_ARG (exp, 2);
4ee9c684 5228
8ec3c5c2 5229 r_tramp = expand_normal (t_tramp);
82c7907c 5230 m_tramp = gen_rtx_MEM (BLKmode, r_tramp);
5231 MEM_NOTRAP_P (m_tramp) = 1;
5232
c307f106 5233 /* If ONSTACK, the TRAMP argument should be the address of a field
5234 within the local function's FRAME decl. Either way, let's see if
5235 we can fill in the MEM_ATTRs for this memory. */
82c7907c 5236 if (TREE_CODE (t_tramp) == ADDR_EXPR)
f4146cb8 5237 set_mem_attributes (m_tramp, TREE_OPERAND (t_tramp, 0), true);
82c7907c 5238
c307f106 5239 /* Creator of a heap trampoline is responsible for making sure the
5240 address is aligned to at least STACK_BOUNDARY. Normally malloc
5241 will ensure this anyhow. */
82c7907c 5242 tmp = round_trampoline_addr (r_tramp);
5243 if (tmp != r_tramp)
5244 {
5245 m_tramp = change_address (m_tramp, BLKmode, tmp);
5246 set_mem_align (m_tramp, TRAMPOLINE_ALIGNMENT);
5b2a69fa 5247 set_mem_size (m_tramp, TRAMPOLINE_SIZE);
82c7907c 5248 }
5249
5250 /* The FUNC argument should be the address of the nested function.
5251 Extract the actual function decl to pass to the hook. */
5252 gcc_assert (TREE_CODE (t_func) == ADDR_EXPR);
5253 t_func = TREE_OPERAND (t_func, 0);
5254 gcc_assert (TREE_CODE (t_func) == FUNCTION_DECL);
5255
8ec3c5c2 5256 r_chain = expand_normal (t_chain);
4ee9c684 5257
5258 /* Generate insns to initialize the trampoline. */
82c7907c 5259 targetm.calls.trampoline_init (m_tramp, t_func, r_chain);
4ee9c684 5260
c307f106 5261 if (onstack)
5262 {
5263 trampolines_created = 1;
8bc8a8f4 5264
a27e3913 5265 if (targetm.calls.custom_function_descriptors != 0)
5266 warning_at (DECL_SOURCE_LOCATION (t_func), OPT_Wtrampolines,
5267 "trampoline generated for nested function %qD", t_func);
c307f106 5268 }
8bc8a8f4 5269
4ee9c684 5270 return const0_rtx;
5271}
5272
5273static rtx
c2f47e15 5274expand_builtin_adjust_trampoline (tree exp)
4ee9c684 5275{
5276 rtx tramp;
5277
c2f47e15 5278 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
4ee9c684 5279 return NULL_RTX;
5280
c2f47e15 5281 tramp = expand_normal (CALL_EXPR_ARG (exp, 0));
4ee9c684 5282 tramp = round_trampoline_addr (tramp);
82c7907c 5283 if (targetm.calls.trampoline_adjust_address)
5284 tramp = targetm.calls.trampoline_adjust_address (tramp);
4ee9c684 5285
5286 return tramp;
5287}
5288
a27e3913 5289/* Expand a call to the builtin descriptor initialization routine.
5290 A descriptor is made up of a couple of pointers to the static
5291 chain and the code entry in this order. */
5292
5293static rtx
5294expand_builtin_init_descriptor (tree exp)
5295{
5296 tree t_descr, t_func, t_chain;
5297 rtx m_descr, r_descr, r_func, r_chain;
5298
5299 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, POINTER_TYPE,
5300 VOID_TYPE))
5301 return NULL_RTX;
5302
5303 t_descr = CALL_EXPR_ARG (exp, 0);
5304 t_func = CALL_EXPR_ARG (exp, 1);
5305 t_chain = CALL_EXPR_ARG (exp, 2);
5306
5307 r_descr = expand_normal (t_descr);
5308 m_descr = gen_rtx_MEM (BLKmode, r_descr);
5309 MEM_NOTRAP_P (m_descr) = 1;
5310
5311 r_func = expand_normal (t_func);
5312 r_chain = expand_normal (t_chain);
5313
5314 /* Generate insns to initialize the descriptor. */
5315 emit_move_insn (adjust_address_nv (m_descr, ptr_mode, 0), r_chain);
5316 emit_move_insn (adjust_address_nv (m_descr, ptr_mode,
5317 POINTER_SIZE / BITS_PER_UNIT), r_func);
5318
5319 return const0_rtx;
5320}
5321
5322/* Expand a call to the builtin descriptor adjustment routine. */
5323
5324static rtx
5325expand_builtin_adjust_descriptor (tree exp)
5326{
5327 rtx tramp;
5328
5329 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
5330 return NULL_RTX;
5331
5332 tramp = expand_normal (CALL_EXPR_ARG (exp, 0));
5333
5334 /* Unalign the descriptor to allow runtime identification. */
5335 tramp = plus_constant (ptr_mode, tramp,
5336 targetm.calls.custom_function_descriptors);
5337
5338 return force_operand (tramp, NULL_RTX);
5339}
5340
93f564d6 5341/* Expand the call EXP to the built-in signbit, signbitf or signbitl
5342 function. The function first checks whether the back end provides
5343 an insn to implement signbit for the respective mode. If not, it
5344 checks whether the floating point format of the value is such that
10902624 5345 the sign bit can be extracted. If that is not the case, error out.
5346 EXP is the expression that is a call to the builtin function; if
5347 convenient, the result should be placed in TARGET. */
27f261ef 5348static rtx
5349expand_builtin_signbit (tree exp, rtx target)
5350{
5351 const struct real_format *fmt;
3754d046 5352 machine_mode fmode, imode, rmode;
c2f47e15 5353 tree arg;
ca4f1f5b 5354 int word, bitpos;
27eda240 5355 enum insn_code icode;
27f261ef 5356 rtx temp;
389dd41b 5357 location_t loc = EXPR_LOCATION (exp);
27f261ef 5358
c2f47e15 5359 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
5360 return NULL_RTX;
27f261ef 5361
c2f47e15 5362 arg = CALL_EXPR_ARG (exp, 0);
27f261ef 5363 fmode = TYPE_MODE (TREE_TYPE (arg));
5364 rmode = TYPE_MODE (TREE_TYPE (exp));
5365 fmt = REAL_MODE_FORMAT (fmode);
5366
93f564d6 5367 arg = builtin_save_expr (arg);
5368
5369 /* Expand the argument yielding a RTX expression. */
5370 temp = expand_normal (arg);
5371
5372 /* Check if the back end provides an insn that handles signbit for the
5373 argument's mode. */
d6bf3b14 5374 icode = optab_handler (signbit_optab, fmode);
27eda240 5375 if (icode != CODE_FOR_nothing)
93f564d6 5376 {
1e0c0b35 5377 rtx_insn *last = get_last_insn ();
93f564d6 5378 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
4e2a2fb4 5379 if (maybe_emit_unop_insn (icode, target, temp, UNKNOWN))
5380 return target;
5381 delete_insns_since (last);
93f564d6 5382 }
5383
27f261ef 5384 /* For floating point formats without a sign bit, implement signbit
5385 as "ARG < 0.0". */
8d564692 5386 bitpos = fmt->signbit_ro;
ca4f1f5b 5387 if (bitpos < 0)
27f261ef 5388 {
5389 /* But we can't do this if the format supports signed zero. */
10902624 5390 gcc_assert (!fmt->has_signed_zero || !HONOR_SIGNED_ZEROS (fmode));
27f261ef 5391
389dd41b 5392 arg = fold_build2_loc (loc, LT_EXPR, TREE_TYPE (exp), arg,
49d00087 5393 build_real (TREE_TYPE (arg), dconst0));
27f261ef 5394 return expand_expr (arg, target, VOIDmode, EXPAND_NORMAL);
5395 }
5396
ca4f1f5b 5397 if (GET_MODE_SIZE (fmode) <= UNITS_PER_WORD)
27f261ef 5398 {
ca4f1f5b 5399 imode = int_mode_for_mode (fmode);
10902624 5400 gcc_assert (imode != BLKmode);
ca4f1f5b 5401 temp = gen_lowpart (imode, temp);
24fd4260 5402 }
5403 else
5404 {
ca4f1f5b 5405 imode = word_mode;
5406 /* Handle targets with different FP word orders. */
5407 if (FLOAT_WORDS_BIG_ENDIAN)
a0c938f0 5408 word = (GET_MODE_BITSIZE (fmode) - bitpos) / BITS_PER_WORD;
ca4f1f5b 5409 else
a0c938f0 5410 word = bitpos / BITS_PER_WORD;
ca4f1f5b 5411 temp = operand_subword_force (temp, word, fmode);
5412 bitpos = bitpos % BITS_PER_WORD;
5413 }
5414
44b0f1d0 5415 /* Force the intermediate word_mode (or narrower) result into a
5416 register. This avoids attempting to create paradoxical SUBREGs
5417 of floating point modes below. */
5418 temp = force_reg (imode, temp);
5419
ca4f1f5b 5420 /* If the bitpos is within the "result mode" lowpart, the operation
5421 can be implement with a single bitwise AND. Otherwise, we need
5422 a right shift and an AND. */
5423
5424 if (bitpos < GET_MODE_BITSIZE (rmode))
5425 {
796b6678 5426 wide_int mask = wi::set_bit_in_zero (bitpos, GET_MODE_PRECISION (rmode));
27f261ef 5427
4a46f016 5428 if (GET_MODE_SIZE (imode) > GET_MODE_SIZE (rmode))
ca4f1f5b 5429 temp = gen_lowpart (rmode, temp);
24fd4260 5430 temp = expand_binop (rmode, and_optab, temp,
e913b5cd 5431 immed_wide_int_const (mask, rmode),
ca4f1f5b 5432 NULL_RTX, 1, OPTAB_LIB_WIDEN);
27f261ef 5433 }
ca4f1f5b 5434 else
5435 {
5436 /* Perform a logical right shift to place the signbit in the least
a0c938f0 5437 significant bit, then truncate the result to the desired mode
ca4f1f5b 5438 and mask just this bit. */
f5ff0b21 5439 temp = expand_shift (RSHIFT_EXPR, imode, temp, bitpos, NULL_RTX, 1);
ca4f1f5b 5440 temp = gen_lowpart (rmode, temp);
5441 temp = expand_binop (rmode, and_optab, temp, const1_rtx,
5442 NULL_RTX, 1, OPTAB_LIB_WIDEN);
5443 }
5444
27f261ef 5445 return temp;
5446}
73673831 5447
5448/* Expand fork or exec calls. TARGET is the desired target of the
c2f47e15 5449 call. EXP is the call. FN is the
73673831 5450 identificator of the actual function. IGNORE is nonzero if the
5451 value is to be ignored. */
5452
5453static rtx
c2f47e15 5454expand_builtin_fork_or_exec (tree fn, tree exp, rtx target, int ignore)
73673831 5455{
5456 tree id, decl;
5457 tree call;
5458
5459 /* If we are not profiling, just call the function. */
5460 if (!profile_arc_flag)
5461 return NULL_RTX;
5462
5463 /* Otherwise call the wrapper. This should be equivalent for the rest of
5464 compiler, so the code does not diverge, and the wrapper may run the
9c9bad97 5465 code necessary for keeping the profiling sane. */
73673831 5466
5467 switch (DECL_FUNCTION_CODE (fn))
5468 {
5469 case BUILT_IN_FORK:
5470 id = get_identifier ("__gcov_fork");
5471 break;
5472
5473 case BUILT_IN_EXECL:
5474 id = get_identifier ("__gcov_execl");
5475 break;
5476
5477 case BUILT_IN_EXECV:
5478 id = get_identifier ("__gcov_execv");
5479 break;
5480
5481 case BUILT_IN_EXECLP:
5482 id = get_identifier ("__gcov_execlp");
5483 break;
5484
5485 case BUILT_IN_EXECLE:
5486 id = get_identifier ("__gcov_execle");
5487 break;
5488
5489 case BUILT_IN_EXECVP:
5490 id = get_identifier ("__gcov_execvp");
5491 break;
5492
5493 case BUILT_IN_EXECVE:
5494 id = get_identifier ("__gcov_execve");
5495 break;
5496
5497 default:
64db345d 5498 gcc_unreachable ();
73673831 5499 }
5500
e60a6f7b 5501 decl = build_decl (DECL_SOURCE_LOCATION (fn),
5502 FUNCTION_DECL, id, TREE_TYPE (fn));
73673831 5503 DECL_EXTERNAL (decl) = 1;
5504 TREE_PUBLIC (decl) = 1;
5505 DECL_ARTIFICIAL (decl) = 1;
5506 TREE_NOTHROW (decl) = 1;
e82d310b 5507 DECL_VISIBILITY (decl) = VISIBILITY_DEFAULT;
5508 DECL_VISIBILITY_SPECIFIED (decl) = 1;
389dd41b 5509 call = rewrite_call_expr (EXPR_LOCATION (exp), exp, 0, decl, 0);
73673831 5510 return expand_call (call, target, ignore);
c2f47e15 5511 }
48e1416a 5512
b6a5fc45 5513
5514\f
3e272de8 5515/* Reconstitute a mode for a __sync intrinsic operation. Since the type of
5516 the pointer in these functions is void*, the tree optimizers may remove
5517 casts. The mode computed in expand_builtin isn't reliable either, due
5518 to __sync_bool_compare_and_swap.
5519
5520 FCODE_DIFF should be fcode - base, where base is the FOO_1 code for the
5521 group of builtins. This gives us log2 of the mode size. */
5522
3754d046 5523static inline machine_mode
3e272de8 5524get_builtin_sync_mode (int fcode_diff)
5525{
ad3a13b5 5526 /* The size is not negotiable, so ask not to get BLKmode in return
5527 if the target indicates that a smaller size would be better. */
5528 return mode_for_size (BITS_PER_UNIT << fcode_diff, MODE_INT, 0);
3e272de8 5529}
5530
041e0215 5531/* Expand the memory expression LOC and return the appropriate memory operand
5532 for the builtin_sync operations. */
5533
5534static rtx
3754d046 5535get_builtin_sync_mem (tree loc, machine_mode mode)
041e0215 5536{
5537 rtx addr, mem;
5538
7f4d56ad 5539 addr = expand_expr (loc, NULL_RTX, ptr_mode, EXPAND_SUM);
5540 addr = convert_memory_address (Pmode, addr);
041e0215 5541
5542 /* Note that we explicitly do not want any alias information for this
5543 memory, so that we kill all other live memories. Otherwise we don't
5544 satisfy the full barrier semantics of the intrinsic. */
5545 mem = validize_mem (gen_rtx_MEM (mode, addr));
5546
153c3b50 5547 /* The alignment needs to be at least according to that of the mode. */
5548 set_mem_align (mem, MAX (GET_MODE_ALIGNMENT (mode),
957d0361 5549 get_pointer_alignment (loc)));
c94cfd1c 5550 set_mem_alias_set (mem, ALIAS_SET_MEMORY_BARRIER);
041e0215 5551 MEM_VOLATILE_P (mem) = 1;
5552
5553 return mem;
5554}
5555
1cd6e20d 5556/* Make sure an argument is in the right mode.
5557 EXP is the tree argument.
5558 MODE is the mode it should be in. */
5559
5560static rtx
3754d046 5561expand_expr_force_mode (tree exp, machine_mode mode)
1cd6e20d 5562{
5563 rtx val;
3754d046 5564 machine_mode old_mode;
1cd6e20d 5565
5566 val = expand_expr (exp, NULL_RTX, mode, EXPAND_NORMAL);
5567 /* If VAL is promoted to a wider mode, convert it back to MODE. Take care
5568 of CONST_INTs, where we know the old_mode only from the call argument. */
5569
5570 old_mode = GET_MODE (val);
5571 if (old_mode == VOIDmode)
5572 old_mode = TYPE_MODE (TREE_TYPE (exp));
5573 val = convert_modes (mode, old_mode, val, 1);
5574 return val;
5575}
5576
5577
b6a5fc45 5578/* Expand the __sync_xxx_and_fetch and __sync_fetch_and_xxx intrinsics.
c2f47e15 5579 EXP is the CALL_EXPR. CODE is the rtx code
b6a5fc45 5580 that corresponds to the arithmetic or logical operation from the name;
5581 an exception here is that NOT actually means NAND. TARGET is an optional
5582 place for us to store the results; AFTER is true if this is the
1cd6e20d 5583 fetch_and_xxx form. */
b6a5fc45 5584
5585static rtx
3754d046 5586expand_builtin_sync_operation (machine_mode mode, tree exp,
3e272de8 5587 enum rtx_code code, bool after,
1cd6e20d 5588 rtx target)
b6a5fc45 5589{
041e0215 5590 rtx val, mem;
e60a6f7b 5591 location_t loc = EXPR_LOCATION (exp);
b6a5fc45 5592
cf73e559 5593 if (code == NOT && warn_sync_nand)
5594 {
5595 tree fndecl = get_callee_fndecl (exp);
5596 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
5597
5598 static bool warned_f_a_n, warned_n_a_f;
5599
5600 switch (fcode)
5601 {
2797f13a 5602 case BUILT_IN_SYNC_FETCH_AND_NAND_1:
5603 case BUILT_IN_SYNC_FETCH_AND_NAND_2:
5604 case BUILT_IN_SYNC_FETCH_AND_NAND_4:
5605 case BUILT_IN_SYNC_FETCH_AND_NAND_8:
5606 case BUILT_IN_SYNC_FETCH_AND_NAND_16:
cf73e559 5607 if (warned_f_a_n)
5608 break;
5609
b9a16870 5610 fndecl = builtin_decl_implicit (BUILT_IN_SYNC_FETCH_AND_NAND_N);
e60a6f7b 5611 inform (loc, "%qD changed semantics in GCC 4.4", fndecl);
cf73e559 5612 warned_f_a_n = true;
5613 break;
5614
2797f13a 5615 case BUILT_IN_SYNC_NAND_AND_FETCH_1:
5616 case BUILT_IN_SYNC_NAND_AND_FETCH_2:
5617 case BUILT_IN_SYNC_NAND_AND_FETCH_4:
5618 case BUILT_IN_SYNC_NAND_AND_FETCH_8:
5619 case BUILT_IN_SYNC_NAND_AND_FETCH_16:
cf73e559 5620 if (warned_n_a_f)
5621 break;
5622
b9a16870 5623 fndecl = builtin_decl_implicit (BUILT_IN_SYNC_NAND_AND_FETCH_N);
e60a6f7b 5624 inform (loc, "%qD changed semantics in GCC 4.4", fndecl);
cf73e559 5625 warned_n_a_f = true;
5626 break;
5627
5628 default:
5629 gcc_unreachable ();
5630 }
5631 }
5632
b6a5fc45 5633 /* Expand the operands. */
c2f47e15 5634 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
1cd6e20d 5635 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
b6a5fc45 5636
a372f7ca 5637 return expand_atomic_fetch_op (target, mem, val, code, MEMMODEL_SYNC_SEQ_CST,
1cd6e20d 5638 after);
b6a5fc45 5639}
5640
5641/* Expand the __sync_val_compare_and_swap and __sync_bool_compare_and_swap
c2f47e15 5642 intrinsics. EXP is the CALL_EXPR. IS_BOOL is
b6a5fc45 5643 true if this is the boolean form. TARGET is a place for us to store the
5644 results; this is NOT optional if IS_BOOL is true. */
5645
5646static rtx
3754d046 5647expand_builtin_compare_and_swap (machine_mode mode, tree exp,
3e272de8 5648 bool is_bool, rtx target)
b6a5fc45 5649{
041e0215 5650 rtx old_val, new_val, mem;
ba885f6a 5651 rtx *pbool, *poval;
b6a5fc45 5652
5653 /* Expand the operands. */
c2f47e15 5654 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
1cd6e20d 5655 old_val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5656 new_val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 2), mode);
b6a5fc45 5657
ba885f6a 5658 pbool = poval = NULL;
5659 if (target != const0_rtx)
5660 {
5661 if (is_bool)
5662 pbool = &target;
5663 else
5664 poval = &target;
5665 }
5666 if (!expand_atomic_compare_and_swap (pbool, poval, mem, old_val, new_val,
a372f7ca 5667 false, MEMMODEL_SYNC_SEQ_CST,
5668 MEMMODEL_SYNC_SEQ_CST))
1cd6e20d 5669 return NULL_RTX;
c2f47e15 5670
1cd6e20d 5671 return target;
b6a5fc45 5672}
5673
5674/* Expand the __sync_lock_test_and_set intrinsic. Note that the most
5675 general form is actually an atomic exchange, and some targets only
5676 support a reduced form with the second argument being a constant 1.
48e1416a 5677 EXP is the CALL_EXPR; TARGET is an optional place for us to store
c2f47e15 5678 the results. */
b6a5fc45 5679
5680static rtx
3754d046 5681expand_builtin_sync_lock_test_and_set (machine_mode mode, tree exp,
1cd6e20d 5682 rtx target)
b6a5fc45 5683{
041e0215 5684 rtx val, mem;
b6a5fc45 5685
5686 /* Expand the operands. */
c2f47e15 5687 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
1cd6e20d 5688 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5689
7821cde1 5690 return expand_sync_lock_test_and_set (target, mem, val);
1cd6e20d 5691}
5692
5693/* Expand the __sync_lock_release intrinsic. EXP is the CALL_EXPR. */
5694
5695static void
3754d046 5696expand_builtin_sync_lock_release (machine_mode mode, tree exp)
1cd6e20d 5697{
5698 rtx mem;
5699
5700 /* Expand the operands. */
5701 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5702
a372f7ca 5703 expand_atomic_store (mem, const0_rtx, MEMMODEL_SYNC_RELEASE, true);
1cd6e20d 5704}
5705
5706/* Given an integer representing an ``enum memmodel'', verify its
5707 correctness and return the memory model enum. */
5708
5709static enum memmodel
5710get_memmodel (tree exp)
5711{
5712 rtx op;
7f738025 5713 unsigned HOST_WIDE_INT val;
2cb724f9 5714 source_location loc
5715 = expansion_point_location_if_in_system_header (input_location);
1cd6e20d 5716
5717 /* If the parameter is not a constant, it's a run time value so we'll just
5718 convert it to MEMMODEL_SEQ_CST to avoid annoying runtime checking. */
5719 if (TREE_CODE (exp) != INTEGER_CST)
5720 return MEMMODEL_SEQ_CST;
5721
5722 op = expand_normal (exp);
7f738025 5723
5724 val = INTVAL (op);
5725 if (targetm.memmodel_check)
5726 val = targetm.memmodel_check (val);
5727 else if (val & ~MEMMODEL_MASK)
5728 {
2cb724f9 5729 warning_at (loc, OPT_Winvalid_memory_model,
5730 "unknown architecture specifier in memory model to builtin");
7f738025 5731 return MEMMODEL_SEQ_CST;
5732 }
5733
a372f7ca 5734 /* Should never see a user explicit SYNC memodel model, so >= LAST works. */
5735 if (memmodel_base (val) >= MEMMODEL_LAST)
1cd6e20d 5736 {
2cb724f9 5737 warning_at (loc, OPT_Winvalid_memory_model,
5738 "invalid memory model argument to builtin");
1cd6e20d 5739 return MEMMODEL_SEQ_CST;
5740 }
7f738025 5741
3070f133 5742 /* Workaround for Bugzilla 59448. GCC doesn't track consume properly, so
5743 be conservative and promote consume to acquire. */
5744 if (val == MEMMODEL_CONSUME)
5745 val = MEMMODEL_ACQUIRE;
5746
7f738025 5747 return (enum memmodel) val;
1cd6e20d 5748}
5749
5750/* Expand the __atomic_exchange intrinsic:
5751 TYPE __atomic_exchange (TYPE *object, TYPE desired, enum memmodel)
5752 EXP is the CALL_EXPR.
5753 TARGET is an optional place for us to store the results. */
5754
5755static rtx
3754d046 5756expand_builtin_atomic_exchange (machine_mode mode, tree exp, rtx target)
1cd6e20d 5757{
5758 rtx val, mem;
5759 enum memmodel model;
5760
5761 model = get_memmodel (CALL_EXPR_ARG (exp, 2));
1cd6e20d 5762
5763 if (!flag_inline_atomics)
5764 return NULL_RTX;
5765
5766 /* Expand the operands. */
5767 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5768 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5769
7821cde1 5770 return expand_atomic_exchange (target, mem, val, model);
1cd6e20d 5771}
5772
5773/* Expand the __atomic_compare_exchange intrinsic:
5774 bool __atomic_compare_exchange (TYPE *object, TYPE *expect,
5775 TYPE desired, BOOL weak,
5776 enum memmodel success,
5777 enum memmodel failure)
5778 EXP is the CALL_EXPR.
5779 TARGET is an optional place for us to store the results. */
5780
5781static rtx
3754d046 5782expand_builtin_atomic_compare_exchange (machine_mode mode, tree exp,
1cd6e20d 5783 rtx target)
5784{
1e0c0b35 5785 rtx expect, desired, mem, oldval;
5786 rtx_code_label *label;
1cd6e20d 5787 enum memmodel success, failure;
5788 tree weak;
5789 bool is_weak;
2cb724f9 5790 source_location loc
5791 = expansion_point_location_if_in_system_header (input_location);
1cd6e20d 5792
5793 success = get_memmodel (CALL_EXPR_ARG (exp, 4));
5794 failure = get_memmodel (CALL_EXPR_ARG (exp, 5));
5795
086f4e33 5796 if (failure > success)
5797 {
2cb724f9 5798 warning_at (loc, OPT_Winvalid_memory_model,
5799 "failure memory model cannot be stronger than success "
5800 "memory model for %<__atomic_compare_exchange%>");
086f4e33 5801 success = MEMMODEL_SEQ_CST;
5802 }
5803
a372f7ca 5804 if (is_mm_release (failure) || is_mm_acq_rel (failure))
1cd6e20d 5805 {
2cb724f9 5806 warning_at (loc, OPT_Winvalid_memory_model,
5807 "invalid failure memory model for "
5808 "%<__atomic_compare_exchange%>");
086f4e33 5809 failure = MEMMODEL_SEQ_CST;
5810 success = MEMMODEL_SEQ_CST;
1cd6e20d 5811 }
5812
086f4e33 5813
1cd6e20d 5814 if (!flag_inline_atomics)
5815 return NULL_RTX;
5816
5817 /* Expand the operands. */
5818 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5819
5820 expect = expand_normal (CALL_EXPR_ARG (exp, 1));
5821 expect = convert_memory_address (Pmode, expect);
c401b131 5822 expect = gen_rtx_MEM (mode, expect);
1cd6e20d 5823 desired = expand_expr_force_mode (CALL_EXPR_ARG (exp, 2), mode);
5824
5825 weak = CALL_EXPR_ARG (exp, 3);
5826 is_weak = false;
e913b5cd 5827 if (tree_fits_shwi_p (weak) && tree_to_shwi (weak) != 0)
1cd6e20d 5828 is_weak = true;
5829
d86e3752 5830 if (target == const0_rtx)
5831 target = NULL;
d86e3752 5832
3c29a9ea 5833 /* Lest the rtl backend create a race condition with an imporoper store
5834 to memory, always create a new pseudo for OLDVAL. */
5835 oldval = NULL;
5836
5837 if (!expand_atomic_compare_and_swap (&target, &oldval, mem, expect, desired,
ba885f6a 5838 is_weak, success, failure))
1cd6e20d 5839 return NULL_RTX;
5840
d86e3752 5841 /* Conditionally store back to EXPECT, lest we create a race condition
5842 with an improper store to memory. */
5843 /* ??? With a rearrangement of atomics at the gimple level, we can handle
5844 the normal case where EXPECT is totally private, i.e. a register. At
5845 which point the store can be unconditional. */
5846 label = gen_label_rtx ();
62589f76 5847 emit_cmp_and_jump_insns (target, const0_rtx, NE, NULL,
5848 GET_MODE (target), 1, label);
d86e3752 5849 emit_move_insn (expect, oldval);
5850 emit_label (label);
c401b131 5851
1cd6e20d 5852 return target;
5853}
5854
5a5ef659 5855/* Helper function for expand_ifn_atomic_compare_exchange - expand
5856 internal ATOMIC_COMPARE_EXCHANGE call into __atomic_compare_exchange_N
5857 call. The weak parameter must be dropped to match the expected parameter
5858 list and the expected argument changed from value to pointer to memory
5859 slot. */
5860
5861static void
5862expand_ifn_atomic_compare_exchange_into_call (gcall *call, machine_mode mode)
5863{
5864 unsigned int z;
5865 vec<tree, va_gc> *vec;
5866
5867 vec_alloc (vec, 5);
5868 vec->quick_push (gimple_call_arg (call, 0));
5869 tree expected = gimple_call_arg (call, 1);
5870 rtx x = assign_stack_temp_for_type (mode, GET_MODE_SIZE (mode),
5871 TREE_TYPE (expected));
5872 rtx expd = expand_expr (expected, x, mode, EXPAND_NORMAL);
5873 if (expd != x)
5874 emit_move_insn (x, expd);
5875 tree v = make_tree (TREE_TYPE (expected), x);
5876 vec->quick_push (build1 (ADDR_EXPR,
5877 build_pointer_type (TREE_TYPE (expected)), v));
5878 vec->quick_push (gimple_call_arg (call, 2));
5879 /* Skip the boolean weak parameter. */
5880 for (z = 4; z < 6; z++)
5881 vec->quick_push (gimple_call_arg (call, z));
5882 built_in_function fncode
5883 = (built_in_function) ((int) BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1
5884 + exact_log2 (GET_MODE_SIZE (mode)));
5885 tree fndecl = builtin_decl_explicit (fncode);
5886 tree fn = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fndecl)),
5887 fndecl);
5888 tree exp = build_call_vec (boolean_type_node, fn, vec);
5889 tree lhs = gimple_call_lhs (call);
5890 rtx boolret = expand_call (exp, NULL_RTX, lhs == NULL_TREE);
5891 if (lhs)
5892 {
5893 rtx target = expand_expr (lhs, NULL_RTX, VOIDmode, EXPAND_WRITE);
5894 if (GET_MODE (boolret) != mode)
5895 boolret = convert_modes (mode, GET_MODE (boolret), boolret, 1);
5896 x = force_reg (mode, x);
5897 write_complex_part (target, boolret, true);
5898 write_complex_part (target, x, false);
5899 }
5900}
5901
5902/* Expand IFN_ATOMIC_COMPARE_EXCHANGE internal function. */
5903
5904void
5905expand_ifn_atomic_compare_exchange (gcall *call)
5906{
5907 int size = tree_to_shwi (gimple_call_arg (call, 3)) & 255;
5908 gcc_assert (size == 1 || size == 2 || size == 4 || size == 8 || size == 16);
5909 machine_mode mode = mode_for_size (BITS_PER_UNIT * size, MODE_INT, 0);
5910 rtx expect, desired, mem, oldval, boolret;
5911 enum memmodel success, failure;
5912 tree lhs;
5913 bool is_weak;
5914 source_location loc
5915 = expansion_point_location_if_in_system_header (gimple_location (call));
5916
5917 success = get_memmodel (gimple_call_arg (call, 4));
5918 failure = get_memmodel (gimple_call_arg (call, 5));
5919
5920 if (failure > success)
5921 {
5922 warning_at (loc, OPT_Winvalid_memory_model,
5923 "failure memory model cannot be stronger than success "
5924 "memory model for %<__atomic_compare_exchange%>");
5925 success = MEMMODEL_SEQ_CST;
5926 }
5927
5928 if (is_mm_release (failure) || is_mm_acq_rel (failure))
5929 {
5930 warning_at (loc, OPT_Winvalid_memory_model,
5931 "invalid failure memory model for "
5932 "%<__atomic_compare_exchange%>");
5933 failure = MEMMODEL_SEQ_CST;
5934 success = MEMMODEL_SEQ_CST;
5935 }
5936
5937 if (!flag_inline_atomics)
5938 {
5939 expand_ifn_atomic_compare_exchange_into_call (call, mode);
5940 return;
5941 }
5942
5943 /* Expand the operands. */
5944 mem = get_builtin_sync_mem (gimple_call_arg (call, 0), mode);
5945
5946 expect = expand_expr_force_mode (gimple_call_arg (call, 1), mode);
5947 desired = expand_expr_force_mode (gimple_call_arg (call, 2), mode);
5948
5949 is_weak = (tree_to_shwi (gimple_call_arg (call, 3)) & 256) != 0;
5950
5951 boolret = NULL;
5952 oldval = NULL;
5953
5954 if (!expand_atomic_compare_and_swap (&boolret, &oldval, mem, expect, desired,
5955 is_weak, success, failure))
5956 {
5957 expand_ifn_atomic_compare_exchange_into_call (call, mode);
5958 return;
5959 }
5960
5961 lhs = gimple_call_lhs (call);
5962 if (lhs)
5963 {
5964 rtx target = expand_expr (lhs, NULL_RTX, VOIDmode, EXPAND_WRITE);
5965 if (GET_MODE (boolret) != mode)
5966 boolret = convert_modes (mode, GET_MODE (boolret), boolret, 1);
5967 write_complex_part (target, boolret, true);
5968 write_complex_part (target, oldval, false);
5969 }
5970}
5971
1cd6e20d 5972/* Expand the __atomic_load intrinsic:
5973 TYPE __atomic_load (TYPE *object, enum memmodel)
5974 EXP is the CALL_EXPR.
5975 TARGET is an optional place for us to store the results. */
5976
5977static rtx
3754d046 5978expand_builtin_atomic_load (machine_mode mode, tree exp, rtx target)
1cd6e20d 5979{
5980 rtx mem;
5981 enum memmodel model;
5982
5983 model = get_memmodel (CALL_EXPR_ARG (exp, 1));
a372f7ca 5984 if (is_mm_release (model) || is_mm_acq_rel (model))
1cd6e20d 5985 {
2cb724f9 5986 source_location loc
5987 = expansion_point_location_if_in_system_header (input_location);
5988 warning_at (loc, OPT_Winvalid_memory_model,
5989 "invalid memory model for %<__atomic_load%>");
086f4e33 5990 model = MEMMODEL_SEQ_CST;
1cd6e20d 5991 }
5992
5993 if (!flag_inline_atomics)
5994 return NULL_RTX;
5995
5996 /* Expand the operand. */
5997 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5998
5999 return expand_atomic_load (target, mem, model);
6000}
6001
6002
6003/* Expand the __atomic_store intrinsic:
6004 void __atomic_store (TYPE *object, TYPE desired, enum memmodel)
6005 EXP is the CALL_EXPR.
6006 TARGET is an optional place for us to store the results. */
6007
6008static rtx
3754d046 6009expand_builtin_atomic_store (machine_mode mode, tree exp)
1cd6e20d 6010{
6011 rtx mem, val;
6012 enum memmodel model;
6013
6014 model = get_memmodel (CALL_EXPR_ARG (exp, 2));
a372f7ca 6015 if (!(is_mm_relaxed (model) || is_mm_seq_cst (model)
6016 || is_mm_release (model)))
1cd6e20d 6017 {
2cb724f9 6018 source_location loc
6019 = expansion_point_location_if_in_system_header (input_location);
6020 warning_at (loc, OPT_Winvalid_memory_model,
6021 "invalid memory model for %<__atomic_store%>");
086f4e33 6022 model = MEMMODEL_SEQ_CST;
1cd6e20d 6023 }
6024
6025 if (!flag_inline_atomics)
6026 return NULL_RTX;
6027
6028 /* Expand the operands. */
6029 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
6030 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
6031
8808bf16 6032 return expand_atomic_store (mem, val, model, false);
1cd6e20d 6033}
6034
6035/* Expand the __atomic_fetch_XXX intrinsic:
6036 TYPE __atomic_fetch_XXX (TYPE *object, TYPE val, enum memmodel)
6037 EXP is the CALL_EXPR.
6038 TARGET is an optional place for us to store the results.
6039 CODE is the operation, PLUS, MINUS, ADD, XOR, or IOR.
6040 FETCH_AFTER is true if returning the result of the operation.
6041 FETCH_AFTER is false if returning the value before the operation.
6042 IGNORE is true if the result is not used.
6043 EXT_CALL is the correct builtin for an external call if this cannot be
6044 resolved to an instruction sequence. */
6045
6046static rtx
3754d046 6047expand_builtin_atomic_fetch_op (machine_mode mode, tree exp, rtx target,
1cd6e20d 6048 enum rtx_code code, bool fetch_after,
6049 bool ignore, enum built_in_function ext_call)
6050{
6051 rtx val, mem, ret;
6052 enum memmodel model;
6053 tree fndecl;
6054 tree addr;
6055
6056 model = get_memmodel (CALL_EXPR_ARG (exp, 2));
6057
6058 /* Expand the operands. */
6059 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
6060 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
6061
6062 /* Only try generating instructions if inlining is turned on. */
6063 if (flag_inline_atomics)
6064 {
6065 ret = expand_atomic_fetch_op (target, mem, val, code, model, fetch_after);
6066 if (ret)
6067 return ret;
6068 }
6069
6070 /* Return if a different routine isn't needed for the library call. */
6071 if (ext_call == BUILT_IN_NONE)
6072 return NULL_RTX;
6073
6074 /* Change the call to the specified function. */
6075 fndecl = get_callee_fndecl (exp);
6076 addr = CALL_EXPR_FN (exp);
6077 STRIP_NOPS (addr);
6078
6079 gcc_assert (TREE_OPERAND (addr, 0) == fndecl);
9af5ce0c 6080 TREE_OPERAND (addr, 0) = builtin_decl_explicit (ext_call);
1cd6e20d 6081
6082 /* Expand the call here so we can emit trailing code. */
6083 ret = expand_call (exp, target, ignore);
6084
6085 /* Replace the original function just in case it matters. */
6086 TREE_OPERAND (addr, 0) = fndecl;
6087
6088 /* Then issue the arithmetic correction to return the right result. */
6089 if (!ignore)
c449f851 6090 {
6091 if (code == NOT)
6092 {
6093 ret = expand_simple_binop (mode, AND, ret, val, NULL_RTX, true,
6094 OPTAB_LIB_WIDEN);
6095 ret = expand_simple_unop (mode, NOT, ret, target, true);
6096 }
6097 else
6098 ret = expand_simple_binop (mode, code, ret, val, target, true,
6099 OPTAB_LIB_WIDEN);
6100 }
1cd6e20d 6101 return ret;
6102}
6103
9c1a31e4 6104/* Expand IFN_ATOMIC_BIT_TEST_AND_* internal function. */
6105
6106void
6107expand_ifn_atomic_bit_test_and (gcall *call)
6108{
6109 tree ptr = gimple_call_arg (call, 0);
6110 tree bit = gimple_call_arg (call, 1);
6111 tree flag = gimple_call_arg (call, 2);
6112 tree lhs = gimple_call_lhs (call);
6113 enum memmodel model = MEMMODEL_SYNC_SEQ_CST;
6114 machine_mode mode = TYPE_MODE (TREE_TYPE (flag));
6115 enum rtx_code code;
6116 optab optab;
6117 struct expand_operand ops[5];
6118
6119 gcc_assert (flag_inline_atomics);
6120
6121 if (gimple_call_num_args (call) == 4)
6122 model = get_memmodel (gimple_call_arg (call, 3));
6123
6124 rtx mem = get_builtin_sync_mem (ptr, mode);
6125 rtx val = expand_expr_force_mode (bit, mode);
6126
6127 switch (gimple_call_internal_fn (call))
6128 {
6129 case IFN_ATOMIC_BIT_TEST_AND_SET:
6130 code = IOR;
6131 optab = atomic_bit_test_and_set_optab;
6132 break;
6133 case IFN_ATOMIC_BIT_TEST_AND_COMPLEMENT:
6134 code = XOR;
6135 optab = atomic_bit_test_and_complement_optab;
6136 break;
6137 case IFN_ATOMIC_BIT_TEST_AND_RESET:
6138 code = AND;
6139 optab = atomic_bit_test_and_reset_optab;
6140 break;
6141 default:
6142 gcc_unreachable ();
6143 }
6144
6145 if (lhs == NULL_TREE)
6146 {
6147 val = expand_simple_binop (mode, ASHIFT, const1_rtx,
6148 val, NULL_RTX, true, OPTAB_DIRECT);
6149 if (code == AND)
6150 val = expand_simple_unop (mode, NOT, val, NULL_RTX, true);
6151 expand_atomic_fetch_op (const0_rtx, mem, val, code, model, false);
6152 return;
6153 }
6154
6155 rtx target = expand_expr (lhs, NULL_RTX, VOIDmode, EXPAND_WRITE);
6156 enum insn_code icode = direct_optab_handler (optab, mode);
6157 gcc_assert (icode != CODE_FOR_nothing);
6158 create_output_operand (&ops[0], target, mode);
6159 create_fixed_operand (&ops[1], mem);
6160 create_convert_operand_to (&ops[2], val, mode, true);
6161 create_integer_operand (&ops[3], model);
6162 create_integer_operand (&ops[4], integer_onep (flag));
6163 if (maybe_expand_insn (icode, 5, ops))
6164 return;
6165
6166 rtx bitval = val;
6167 val = expand_simple_binop (mode, ASHIFT, const1_rtx,
6168 val, NULL_RTX, true, OPTAB_DIRECT);
6169 rtx maskval = val;
6170 if (code == AND)
6171 val = expand_simple_unop (mode, NOT, val, NULL_RTX, true);
6172 rtx result = expand_atomic_fetch_op (gen_reg_rtx (mode), mem, val,
6173 code, model, false);
6174 if (integer_onep (flag))
6175 {
6176 result = expand_simple_binop (mode, ASHIFTRT, result, bitval,
6177 NULL_RTX, true, OPTAB_DIRECT);
6178 result = expand_simple_binop (mode, AND, result, const1_rtx, target,
6179 true, OPTAB_DIRECT);
6180 }
6181 else
6182 result = expand_simple_binop (mode, AND, result, maskval, target, true,
6183 OPTAB_DIRECT);
6184 if (result != target)
6185 emit_move_insn (target, result);
6186}
6187
10b744a3 6188/* Expand an atomic clear operation.
6189 void _atomic_clear (BOOL *obj, enum memmodel)
6190 EXP is the call expression. */
6191
6192static rtx
6193expand_builtin_atomic_clear (tree exp)
6194{
3754d046 6195 machine_mode mode;
10b744a3 6196 rtx mem, ret;
6197 enum memmodel model;
6198
6199 mode = mode_for_size (BOOL_TYPE_SIZE, MODE_INT, 0);
6200 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
6201 model = get_memmodel (CALL_EXPR_ARG (exp, 1));
6202
a372f7ca 6203 if (is_mm_consume (model) || is_mm_acquire (model) || is_mm_acq_rel (model))
10b744a3 6204 {
2cb724f9 6205 source_location loc
6206 = expansion_point_location_if_in_system_header (input_location);
6207 warning_at (loc, OPT_Winvalid_memory_model,
6208 "invalid memory model for %<__atomic_store%>");
086f4e33 6209 model = MEMMODEL_SEQ_CST;
10b744a3 6210 }
6211
6212 /* Try issuing an __atomic_store, and allow fallback to __sync_lock_release.
6213 Failing that, a store is issued by __atomic_store. The only way this can
6214 fail is if the bool type is larger than a word size. Unlikely, but
6215 handle it anyway for completeness. Assume a single threaded model since
6216 there is no atomic support in this case, and no barriers are required. */
6217 ret = expand_atomic_store (mem, const0_rtx, model, true);
6218 if (!ret)
6219 emit_move_insn (mem, const0_rtx);
6220 return const0_rtx;
6221}
6222
6223/* Expand an atomic test_and_set operation.
6224 bool _atomic_test_and_set (BOOL *obj, enum memmodel)
6225 EXP is the call expression. */
6226
6227static rtx
7821cde1 6228expand_builtin_atomic_test_and_set (tree exp, rtx target)
10b744a3 6229{
7821cde1 6230 rtx mem;
10b744a3 6231 enum memmodel model;
3754d046 6232 machine_mode mode;
10b744a3 6233
6234 mode = mode_for_size (BOOL_TYPE_SIZE, MODE_INT, 0);
6235 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
6236 model = get_memmodel (CALL_EXPR_ARG (exp, 1));
6237
7821cde1 6238 return expand_atomic_test_and_set (target, mem, model);
10b744a3 6239}
6240
6241
1cd6e20d 6242/* Return true if (optional) argument ARG1 of size ARG0 is always lock free on
6243 this architecture. If ARG1 is NULL, use typical alignment for size ARG0. */
6244
6245static tree
6246fold_builtin_atomic_always_lock_free (tree arg0, tree arg1)
6247{
6248 int size;
3754d046 6249 machine_mode mode;
1cd6e20d 6250 unsigned int mode_align, type_align;
6251
6252 if (TREE_CODE (arg0) != INTEGER_CST)
6253 return NULL_TREE;
b6a5fc45 6254
1cd6e20d 6255 size = INTVAL (expand_normal (arg0)) * BITS_PER_UNIT;
6256 mode = mode_for_size (size, MODE_INT, 0);
6257 mode_align = GET_MODE_ALIGNMENT (mode);
6258
4ca99588 6259 if (TREE_CODE (arg1) == INTEGER_CST)
6260 {
6261 unsigned HOST_WIDE_INT val = UINTVAL (expand_normal (arg1));
6262
6263 /* Either this argument is null, or it's a fake pointer encoding
6264 the alignment of the object. */
ac29ece2 6265 val = least_bit_hwi (val);
4ca99588 6266 val *= BITS_PER_UNIT;
6267
6268 if (val == 0 || mode_align < val)
6269 type_align = mode_align;
6270 else
6271 type_align = val;
6272 }
1cd6e20d 6273 else
6274 {
6275 tree ttype = TREE_TYPE (arg1);
6276
6277 /* This function is usually invoked and folded immediately by the front
6278 end before anything else has a chance to look at it. The pointer
6279 parameter at this point is usually cast to a void *, so check for that
6280 and look past the cast. */
2f8a2ead 6281 if (CONVERT_EXPR_P (arg1)
6282 && POINTER_TYPE_P (ttype)
6283 && VOID_TYPE_P (TREE_TYPE (ttype))
6284 && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (arg1, 0))))
1cd6e20d 6285 arg1 = TREE_OPERAND (arg1, 0);
6286
6287 ttype = TREE_TYPE (arg1);
6288 gcc_assert (POINTER_TYPE_P (ttype));
6289
6290 /* Get the underlying type of the object. */
6291 ttype = TREE_TYPE (ttype);
6292 type_align = TYPE_ALIGN (ttype);
6293 }
6294
47ae02b7 6295 /* If the object has smaller alignment, the lock free routines cannot
1cd6e20d 6296 be used. */
6297 if (type_align < mode_align)
06308d2a 6298 return boolean_false_node;
1cd6e20d 6299
6300 /* Check if a compare_and_swap pattern exists for the mode which represents
6301 the required size. The pattern is not allowed to fail, so the existence
d5f5fa27 6302 of the pattern indicates support is present. Also require that an
6303 atomic load exists for the required size. */
6304 if (can_compare_and_swap_p (mode, true) && can_atomic_load_p (mode))
06308d2a 6305 return boolean_true_node;
1cd6e20d 6306 else
06308d2a 6307 return boolean_false_node;
1cd6e20d 6308}
6309
6310/* Return true if the parameters to call EXP represent an object which will
6311 always generate lock free instructions. The first argument represents the
6312 size of the object, and the second parameter is a pointer to the object
6313 itself. If NULL is passed for the object, then the result is based on
6314 typical alignment for an object of the specified size. Otherwise return
6315 false. */
6316
6317static rtx
6318expand_builtin_atomic_always_lock_free (tree exp)
6319{
6320 tree size;
6321 tree arg0 = CALL_EXPR_ARG (exp, 0);
6322 tree arg1 = CALL_EXPR_ARG (exp, 1);
6323
6324 if (TREE_CODE (arg0) != INTEGER_CST)
6325 {
6326 error ("non-constant argument 1 to __atomic_always_lock_free");
6327 return const0_rtx;
6328 }
6329
6330 size = fold_builtin_atomic_always_lock_free (arg0, arg1);
06308d2a 6331 if (size == boolean_true_node)
1cd6e20d 6332 return const1_rtx;
6333 return const0_rtx;
6334}
6335
6336/* Return a one or zero if it can be determined that object ARG1 of size ARG
6337 is lock free on this architecture. */
6338
6339static tree
6340fold_builtin_atomic_is_lock_free (tree arg0, tree arg1)
6341{
6342 if (!flag_inline_atomics)
6343 return NULL_TREE;
6344
6345 /* If it isn't always lock free, don't generate a result. */
06308d2a 6346 if (fold_builtin_atomic_always_lock_free (arg0, arg1) == boolean_true_node)
6347 return boolean_true_node;
1cd6e20d 6348
6349 return NULL_TREE;
6350}
6351
6352/* Return true if the parameters to call EXP represent an object which will
6353 always generate lock free instructions. The first argument represents the
6354 size of the object, and the second parameter is a pointer to the object
6355 itself. If NULL is passed for the object, then the result is based on
6356 typical alignment for an object of the specified size. Otherwise return
6357 NULL*/
6358
6359static rtx
6360expand_builtin_atomic_is_lock_free (tree exp)
6361{
6362 tree size;
6363 tree arg0 = CALL_EXPR_ARG (exp, 0);
6364 tree arg1 = CALL_EXPR_ARG (exp, 1);
6365
6366 if (!INTEGRAL_TYPE_P (TREE_TYPE (arg0)))
6367 {
6368 error ("non-integer argument 1 to __atomic_is_lock_free");
6369 return NULL_RTX;
6370 }
6371
6372 if (!flag_inline_atomics)
6373 return NULL_RTX;
6374
6375 /* If the value is known at compile time, return the RTX for it. */
6376 size = fold_builtin_atomic_is_lock_free (arg0, arg1);
06308d2a 6377 if (size == boolean_true_node)
1cd6e20d 6378 return const1_rtx;
6379
6380 return NULL_RTX;
6381}
6382
1cd6e20d 6383/* Expand the __atomic_thread_fence intrinsic:
6384 void __atomic_thread_fence (enum memmodel)
6385 EXP is the CALL_EXPR. */
6386
6387static void
6388expand_builtin_atomic_thread_fence (tree exp)
6389{
fe54c06b 6390 enum memmodel model = get_memmodel (CALL_EXPR_ARG (exp, 0));
6391 expand_mem_thread_fence (model);
1cd6e20d 6392}
6393
6394/* Expand the __atomic_signal_fence intrinsic:
6395 void __atomic_signal_fence (enum memmodel)
6396 EXP is the CALL_EXPR. */
6397
6398static void
6399expand_builtin_atomic_signal_fence (tree exp)
6400{
fe54c06b 6401 enum memmodel model = get_memmodel (CALL_EXPR_ARG (exp, 0));
6402 expand_mem_signal_fence (model);
b6a5fc45 6403}
6404
6405/* Expand the __sync_synchronize intrinsic. */
6406
6407static void
2797f13a 6408expand_builtin_sync_synchronize (void)
b6a5fc45 6409{
a372f7ca 6410 expand_mem_thread_fence (MEMMODEL_SYNC_SEQ_CST);
b6a5fc45 6411}
6412
badaa04c 6413static rtx
6414expand_builtin_thread_pointer (tree exp, rtx target)
6415{
6416 enum insn_code icode;
6417 if (!validate_arglist (exp, VOID_TYPE))
6418 return const0_rtx;
6419 icode = direct_optab_handler (get_thread_pointer_optab, Pmode);
6420 if (icode != CODE_FOR_nothing)
6421 {
6422 struct expand_operand op;
3ed779c3 6423 /* If the target is not sutitable then create a new target. */
6424 if (target == NULL_RTX
6425 || !REG_P (target)
6426 || GET_MODE (target) != Pmode)
badaa04c 6427 target = gen_reg_rtx (Pmode);
6428 create_output_operand (&op, target, Pmode);
6429 expand_insn (icode, 1, &op);
6430 return target;
6431 }
6432 error ("__builtin_thread_pointer is not supported on this target");
6433 return const0_rtx;
6434}
6435
6436static void
6437expand_builtin_set_thread_pointer (tree exp)
6438{
6439 enum insn_code icode;
6440 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6441 return;
6442 icode = direct_optab_handler (set_thread_pointer_optab, Pmode);
6443 if (icode != CODE_FOR_nothing)
6444 {
6445 struct expand_operand op;
6446 rtx val = expand_expr (CALL_EXPR_ARG (exp, 0), NULL_RTX,
6447 Pmode, EXPAND_NORMAL);
6f343c10 6448 create_input_operand (&op, val, Pmode);
badaa04c 6449 expand_insn (icode, 1, &op);
6450 return;
6451 }
6452 error ("__builtin_set_thread_pointer is not supported on this target");
6453}
6454
53800dbe 6455\f
0e80b01d 6456/* Emit code to restore the current value of stack. */
6457
6458static void
6459expand_stack_restore (tree var)
6460{
1e0c0b35 6461 rtx_insn *prev;
6462 rtx sa = expand_normal (var);
0e80b01d 6463
6464 sa = convert_memory_address (Pmode, sa);
6465
6466 prev = get_last_insn ();
6467 emit_stack_restore (SAVE_BLOCK, sa);
97354ae4 6468
6469 record_new_stack_level ();
6470
0e80b01d 6471 fixup_args_size_notes (prev, get_last_insn (), 0);
6472}
6473
0e80b01d 6474/* Emit code to save the current value of stack. */
6475
6476static rtx
6477expand_stack_save (void)
6478{
6479 rtx ret = NULL_RTX;
6480
0e80b01d 6481 emit_stack_save (SAVE_BLOCK, &ret);
6482 return ret;
6483}
6484
ca4c3545 6485
53800dbe 6486/* Expand an expression EXP that calls a built-in function,
6487 with result going to TARGET if that's convenient
6488 (and in mode MODE if that's convenient).
6489 SUBTARGET may be used as the target for computing one of EXP's operands.
6490 IGNORE is nonzero if the value is to be ignored. */
6491
6492rtx
3754d046 6493expand_builtin (tree exp, rtx target, rtx subtarget, machine_mode mode,
aecda0d6 6494 int ignore)
53800dbe 6495{
c6e6ecb1 6496 tree fndecl = get_callee_fndecl (exp);
53800dbe 6497 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
3754d046 6498 machine_mode target_mode = TYPE_MODE (TREE_TYPE (exp));
67fa4078 6499 int flags;
53800dbe 6500
4e2f4ed5 6501 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
6502 return targetm.expand_builtin (exp, target, subtarget, mode, ignore);
6503
f9acf11a 6504 /* When ASan is enabled, we don't want to expand some memory/string
6505 builtins and rely on libsanitizer's hooks. This allows us to avoid
6506 redundant checks and be sure, that possible overflow will be detected
6507 by ASan. */
6508
6509 if ((flag_sanitize & SANITIZE_ADDRESS) && asan_intercepted_p (fcode))
6510 return expand_call (exp, target, ignore);
6511
53800dbe 6512 /* When not optimizing, generate calls to library functions for a certain
6513 set of builtins. */
cd9ff771 6514 if (!optimize
b6a5fc45 6515 && !called_as_built_in (fndecl)
73037a1e 6516 && fcode != BUILT_IN_FORK
6517 && fcode != BUILT_IN_EXECL
6518 && fcode != BUILT_IN_EXECV
6519 && fcode != BUILT_IN_EXECLP
6520 && fcode != BUILT_IN_EXECLE
6521 && fcode != BUILT_IN_EXECVP
6522 && fcode != BUILT_IN_EXECVE
2c281b15 6523 && fcode != BUILT_IN_ALLOCA
581bf1c2 6524 && fcode != BUILT_IN_ALLOCA_WITH_ALIGN
058a1b7a 6525 && fcode != BUILT_IN_FREE
6526 && fcode != BUILT_IN_CHKP_SET_PTR_BOUNDS
6527 && fcode != BUILT_IN_CHKP_INIT_PTR_BOUNDS
6528 && fcode != BUILT_IN_CHKP_NULL_PTR_BOUNDS
6529 && fcode != BUILT_IN_CHKP_COPY_PTR_BOUNDS
6530 && fcode != BUILT_IN_CHKP_NARROW_PTR_BOUNDS
6531 && fcode != BUILT_IN_CHKP_STORE_PTR_BOUNDS
6532 && fcode != BUILT_IN_CHKP_CHECK_PTR_LBOUNDS
6533 && fcode != BUILT_IN_CHKP_CHECK_PTR_UBOUNDS
6534 && fcode != BUILT_IN_CHKP_CHECK_PTR_BOUNDS
6535 && fcode != BUILT_IN_CHKP_GET_PTR_LBOUND
6536 && fcode != BUILT_IN_CHKP_GET_PTR_UBOUND
6537 && fcode != BUILT_IN_CHKP_BNDRET)
cd9ff771 6538 return expand_call (exp, target, ignore);
53800dbe 6539
8d6d7930 6540 /* The built-in function expanders test for target == const0_rtx
6541 to determine whether the function's result will be ignored. */
6542 if (ignore)
6543 target = const0_rtx;
6544
6545 /* If the result of a pure or const built-in function is ignored, and
6546 none of its arguments are volatile, we can avoid expanding the
6547 built-in call and just evaluate the arguments for side-effects. */
6548 if (target == const0_rtx
67fa4078 6549 && ((flags = flags_from_decl_or_type (fndecl)) & (ECF_CONST | ECF_PURE))
6550 && !(flags & ECF_LOOPING_CONST_OR_PURE))
8d6d7930 6551 {
6552 bool volatilep = false;
6553 tree arg;
c2f47e15 6554 call_expr_arg_iterator iter;
8d6d7930 6555
c2f47e15 6556 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
6557 if (TREE_THIS_VOLATILE (arg))
8d6d7930 6558 {
6559 volatilep = true;
6560 break;
6561 }
6562
6563 if (! volatilep)
6564 {
c2f47e15 6565 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
6566 expand_expr (arg, const0_rtx, VOIDmode, EXPAND_NORMAL);
8d6d7930 6567 return const0_rtx;
6568 }
6569 }
6570
f21337ef 6571 /* expand_builtin_with_bounds is supposed to be used for
6572 instrumented builtin calls. */
058a1b7a 6573 gcc_assert (!CALL_WITH_BOUNDS_P (exp));
6574
53800dbe 6575 switch (fcode)
6576 {
4f35b1fc 6577 CASE_FLT_FN (BUILT_IN_FABS):
012f068a 6578 CASE_FLT_FN_FLOATN_NX (BUILT_IN_FABS):
8aa32773 6579 case BUILT_IN_FABSD32:
6580 case BUILT_IN_FABSD64:
6581 case BUILT_IN_FABSD128:
c2f47e15 6582 target = expand_builtin_fabs (exp, target, subtarget);
78a74442 6583 if (target)
a0c938f0 6584 return target;
78a74442 6585 break;
6586
4f35b1fc 6587 CASE_FLT_FN (BUILT_IN_COPYSIGN):
012f068a 6588 CASE_FLT_FN_FLOATN_NX (BUILT_IN_COPYSIGN):
c2f47e15 6589 target = expand_builtin_copysign (exp, target, subtarget);
270436f3 6590 if (target)
6591 return target;
6592 break;
6593
7d3f6cc7 6594 /* Just do a normal library call if we were unable to fold
6595 the values. */
4f35b1fc 6596 CASE_FLT_FN (BUILT_IN_CABS):
78a74442 6597 break;
53800dbe 6598
7e0713b1 6599 CASE_FLT_FN (BUILT_IN_FMA):
6600 target = expand_builtin_mathfn_ternary (exp, target, subtarget);
6601 if (target)
6602 return target;
6603 break;
6604
a67a90e5 6605 CASE_FLT_FN (BUILT_IN_ILOGB):
6606 if (! flag_unsafe_math_optimizations)
6607 break;
12f08300 6608 gcc_fallthrough ();
6609 CASE_FLT_FN (BUILT_IN_ISINF):
6610 CASE_FLT_FN (BUILT_IN_FINITE):
6611 case BUILT_IN_ISFINITE:
6612 case BUILT_IN_ISNORMAL:
f97eea22 6613 target = expand_builtin_interclass_mathfn (exp, target);
a67a90e5 6614 if (target)
6615 return target;
6616 break;
6617
80ff6494 6618 CASE_FLT_FN (BUILT_IN_ICEIL):
4f35b1fc 6619 CASE_FLT_FN (BUILT_IN_LCEIL):
6620 CASE_FLT_FN (BUILT_IN_LLCEIL):
6621 CASE_FLT_FN (BUILT_IN_LFLOOR):
80ff6494 6622 CASE_FLT_FN (BUILT_IN_IFLOOR):
4f35b1fc 6623 CASE_FLT_FN (BUILT_IN_LLFLOOR):
ff1b14e4 6624 target = expand_builtin_int_roundingfn (exp, target);
ad52b9b7 6625 if (target)
6626 return target;
6627 break;
6628
80ff6494 6629 CASE_FLT_FN (BUILT_IN_IRINT):
7d3afc77 6630 CASE_FLT_FN (BUILT_IN_LRINT):
6631 CASE_FLT_FN (BUILT_IN_LLRINT):
80ff6494 6632 CASE_FLT_FN (BUILT_IN_IROUND):
ef2f1a10 6633 CASE_FLT_FN (BUILT_IN_LROUND):
6634 CASE_FLT_FN (BUILT_IN_LLROUND):
ff1b14e4 6635 target = expand_builtin_int_roundingfn_2 (exp, target);
7d3afc77 6636 if (target)
6637 return target;
6638 break;
6639
4f35b1fc 6640 CASE_FLT_FN (BUILT_IN_POWI):
f97eea22 6641 target = expand_builtin_powi (exp, target);
757c219d 6642 if (target)
6643 return target;
6644 break;
6645
d735c391 6646 CASE_FLT_FN (BUILT_IN_CEXPI):
f97eea22 6647 target = expand_builtin_cexpi (exp, target);
d735c391 6648 gcc_assert (target);
6649 return target;
6650
4f35b1fc 6651 CASE_FLT_FN (BUILT_IN_SIN):
6652 CASE_FLT_FN (BUILT_IN_COS):
6b43bae4 6653 if (! flag_unsafe_math_optimizations)
6654 break;
6655 target = expand_builtin_mathfn_3 (exp, target, subtarget);
6656 if (target)
6657 return target;
6658 break;
6659
c3147c1a 6660 CASE_FLT_FN (BUILT_IN_SINCOS):
6661 if (! flag_unsafe_math_optimizations)
6662 break;
6663 target = expand_builtin_sincos (exp);
6664 if (target)
6665 return target;
6666 break;
6667
53800dbe 6668 case BUILT_IN_APPLY_ARGS:
6669 return expand_builtin_apply_args ();
6670
6671 /* __builtin_apply (FUNCTION, ARGUMENTS, ARGSIZE) invokes
6672 FUNCTION with a copy of the parameters described by
6673 ARGUMENTS, and ARGSIZE. It returns a block of memory
6674 allocated on the stack into which is stored all the registers
6675 that might possibly be used for returning the result of a
6676 function. ARGUMENTS is the value returned by
6677 __builtin_apply_args. ARGSIZE is the number of bytes of
6678 arguments that must be copied. ??? How should this value be
6679 computed? We'll also need a safe worst case value for varargs
6680 functions. */
6681 case BUILT_IN_APPLY:
c2f47e15 6682 if (!validate_arglist (exp, POINTER_TYPE,
0eb671f7 6683 POINTER_TYPE, INTEGER_TYPE, VOID_TYPE)
c2f47e15 6684 && !validate_arglist (exp, REFERENCE_TYPE,
0eb671f7 6685 POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
53800dbe 6686 return const0_rtx;
6687 else
6688 {
53800dbe 6689 rtx ops[3];
6690
c2f47e15 6691 ops[0] = expand_normal (CALL_EXPR_ARG (exp, 0));
6692 ops[1] = expand_normal (CALL_EXPR_ARG (exp, 1));
6693 ops[2] = expand_normal (CALL_EXPR_ARG (exp, 2));
53800dbe 6694
6695 return expand_builtin_apply (ops[0], ops[1], ops[2]);
6696 }
6697
6698 /* __builtin_return (RESULT) causes the function to return the
6699 value described by RESULT. RESULT is address of the block of
6700 memory returned by __builtin_apply. */
6701 case BUILT_IN_RETURN:
c2f47e15 6702 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6703 expand_builtin_return (expand_normal (CALL_EXPR_ARG (exp, 0)));
53800dbe 6704 return const0_rtx;
6705
6706 case BUILT_IN_SAVEREGS:
a66c9326 6707 return expand_builtin_saveregs ();
53800dbe 6708
48dc2227 6709 case BUILT_IN_VA_ARG_PACK:
6710 /* All valid uses of __builtin_va_arg_pack () are removed during
6711 inlining. */
b8c23db3 6712 error ("%Kinvalid use of %<__builtin_va_arg_pack ()%>", exp);
48dc2227 6713 return const0_rtx;
6714
4e1d7ea4 6715 case BUILT_IN_VA_ARG_PACK_LEN:
6716 /* All valid uses of __builtin_va_arg_pack_len () are removed during
6717 inlining. */
b8c23db3 6718 error ("%Kinvalid use of %<__builtin_va_arg_pack_len ()%>", exp);
4e1d7ea4 6719 return const0_rtx;
6720
53800dbe 6721 /* Return the address of the first anonymous stack arg. */
6722 case BUILT_IN_NEXT_ARG:
c2f47e15 6723 if (fold_builtin_next_arg (exp, false))
a0c938f0 6724 return const0_rtx;
79012a9d 6725 return expand_builtin_next_arg ();
53800dbe 6726
ac8fb6db 6727 case BUILT_IN_CLEAR_CACHE:
6728 target = expand_builtin___clear_cache (exp);
6729 if (target)
6730 return target;
6731 break;
6732
53800dbe 6733 case BUILT_IN_CLASSIFY_TYPE:
c2f47e15 6734 return expand_builtin_classify_type (exp);
53800dbe 6735
6736 case BUILT_IN_CONSTANT_P:
4ee9c684 6737 return const0_rtx;
53800dbe 6738
6739 case BUILT_IN_FRAME_ADDRESS:
6740 case BUILT_IN_RETURN_ADDRESS:
c2f47e15 6741 return expand_builtin_frame_address (fndecl, exp);
53800dbe 6742
6743 /* Returns the address of the area where the structure is returned.
6744 0 otherwise. */
6745 case BUILT_IN_AGGREGATE_INCOMING_ADDRESS:
c2f47e15 6746 if (call_expr_nargs (exp) != 0
9342ee68 6747 || ! AGGREGATE_TYPE_P (TREE_TYPE (TREE_TYPE (current_function_decl)))
e16ceb8e 6748 || !MEM_P (DECL_RTL (DECL_RESULT (current_function_decl))))
9342ee68 6749 return const0_rtx;
53800dbe 6750 else
9342ee68 6751 return XEXP (DECL_RTL (DECL_RESULT (current_function_decl)), 0);
53800dbe 6752
6753 case BUILT_IN_ALLOCA:
581bf1c2 6754 case BUILT_IN_ALLOCA_WITH_ALIGN:
2b29cc6a 6755 target = expand_builtin_alloca (exp);
53800dbe 6756 if (target)
6757 return target;
6758 break;
6759
4ee9c684 6760 case BUILT_IN_STACK_SAVE:
6761 return expand_stack_save ();
6762
6763 case BUILT_IN_STACK_RESTORE:
c2f47e15 6764 expand_stack_restore (CALL_EXPR_ARG (exp, 0));
4ee9c684 6765 return const0_rtx;
6766
74bdbe96 6767 case BUILT_IN_BSWAP16:
42791117 6768 case BUILT_IN_BSWAP32:
6769 case BUILT_IN_BSWAP64:
74bdbe96 6770 target = expand_builtin_bswap (target_mode, exp, target, subtarget);
42791117 6771 if (target)
6772 return target;
6773 break;
6774
4f35b1fc 6775 CASE_INT_FN (BUILT_IN_FFS):
c2f47e15 6776 target = expand_builtin_unop (target_mode, exp, target,
efb070c8 6777 subtarget, ffs_optab);
6a08d0ab 6778 if (target)
6779 return target;
6780 break;
6781
4f35b1fc 6782 CASE_INT_FN (BUILT_IN_CLZ):
c2f47e15 6783 target = expand_builtin_unop (target_mode, exp, target,
efb070c8 6784 subtarget, clz_optab);
6a08d0ab 6785 if (target)
6786 return target;
6787 break;
6788
4f35b1fc 6789 CASE_INT_FN (BUILT_IN_CTZ):
c2f47e15 6790 target = expand_builtin_unop (target_mode, exp, target,
efb070c8 6791 subtarget, ctz_optab);
6a08d0ab 6792 if (target)
6793 return target;
6794 break;
6795
d8492bd3 6796 CASE_INT_FN (BUILT_IN_CLRSB):
d8492bd3 6797 target = expand_builtin_unop (target_mode, exp, target,
6798 subtarget, clrsb_optab);
6799 if (target)
6800 return target;
6801 break;
6802
4f35b1fc 6803 CASE_INT_FN (BUILT_IN_POPCOUNT):
c2f47e15 6804 target = expand_builtin_unop (target_mode, exp, target,
efb070c8 6805 subtarget, popcount_optab);
6a08d0ab 6806 if (target)
6807 return target;
6808 break;
6809
4f35b1fc 6810 CASE_INT_FN (BUILT_IN_PARITY):
c2f47e15 6811 target = expand_builtin_unop (target_mode, exp, target,
efb070c8 6812 subtarget, parity_optab);
53800dbe 6813 if (target)
6814 return target;
6815 break;
6816
6817 case BUILT_IN_STRLEN:
c2f47e15 6818 target = expand_builtin_strlen (exp, target, target_mode);
53800dbe 6819 if (target)
6820 return target;
6821 break;
6822
5aef8938 6823 case BUILT_IN_STRCAT:
6824 target = expand_builtin_strcat (exp, target);
6825 if (target)
6826 return target;
6827 break;
6828
53800dbe 6829 case BUILT_IN_STRCPY:
a65c4d64 6830 target = expand_builtin_strcpy (exp, target);
53800dbe 6831 if (target)
6832 return target;
6833 break;
bf8e3599 6834
5aef8938 6835 case BUILT_IN_STRNCAT:
6836 target = expand_builtin_strncat (exp, target);
6837 if (target)
6838 return target;
6839 break;
6840
ed09096d 6841 case BUILT_IN_STRNCPY:
a65c4d64 6842 target = expand_builtin_strncpy (exp, target);
ed09096d 6843 if (target)
6844 return target;
6845 break;
bf8e3599 6846
3b824fa6 6847 case BUILT_IN_STPCPY:
dc369150 6848 target = expand_builtin_stpcpy (exp, target, mode);
3b824fa6 6849 if (target)
6850 return target;
6851 break;
6852
4d317237 6853 case BUILT_IN_STPNCPY:
6854 target = expand_builtin_stpncpy (exp, target);
6855 if (target)
6856 return target;
6857 break;
6858
8d6c6ef5 6859 case BUILT_IN_MEMCHR:
6860 target = expand_builtin_memchr (exp, target);
6861 if (target)
6862 return target;
6863 break;
6864
53800dbe 6865 case BUILT_IN_MEMCPY:
a65c4d64 6866 target = expand_builtin_memcpy (exp, target);
3b824fa6 6867 if (target)
6868 return target;
6869 break;
6870
4d317237 6871 case BUILT_IN_MEMMOVE:
6872 target = expand_builtin_memmove (exp, target);
6873 if (target)
6874 return target;
6875 break;
6876
3b824fa6 6877 case BUILT_IN_MEMPCPY:
c2f47e15 6878 target = expand_builtin_mempcpy (exp, target, mode);
53800dbe 6879 if (target)
6880 return target;
6881 break;
6882
6883 case BUILT_IN_MEMSET:
c2f47e15 6884 target = expand_builtin_memset (exp, target, mode);
53800dbe 6885 if (target)
6886 return target;
6887 break;
6888
ffc83088 6889 case BUILT_IN_BZERO:
0b25db21 6890 target = expand_builtin_bzero (exp);
ffc83088 6891 if (target)
6892 return target;
6893 break;
6894
53800dbe 6895 case BUILT_IN_STRCMP:
a65c4d64 6896 target = expand_builtin_strcmp (exp, target);
53800dbe 6897 if (target)
6898 return target;
6899 break;
6900
ed09096d 6901 case BUILT_IN_STRNCMP:
6902 target = expand_builtin_strncmp (exp, target, mode);
6903 if (target)
6904 return target;
6905 break;
6906
071f1696 6907 case BUILT_IN_BCMP:
53800dbe 6908 case BUILT_IN_MEMCMP:
3e346f54 6909 case BUILT_IN_MEMCMP_EQ:
6910 target = expand_builtin_memcmp (exp, target, fcode == BUILT_IN_MEMCMP_EQ);
53800dbe 6911 if (target)
6912 return target;
3e346f54 6913 if (fcode == BUILT_IN_MEMCMP_EQ)
6914 {
6915 tree newdecl = builtin_decl_explicit (BUILT_IN_MEMCMP);
6916 TREE_OPERAND (exp, 1) = build_fold_addr_expr (newdecl);
6917 }
53800dbe 6918 break;
53800dbe 6919
6920 case BUILT_IN_SETJMP:
12f08300 6921 /* This should have been lowered to the builtins below. */
2c8a1497 6922 gcc_unreachable ();
6923
6924 case BUILT_IN_SETJMP_SETUP:
6925 /* __builtin_setjmp_setup is passed a pointer to an array of five words
6926 and the receiver label. */
c2f47e15 6927 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
2c8a1497 6928 {
c2f47e15 6929 rtx buf_addr = expand_expr (CALL_EXPR_ARG (exp, 0), subtarget,
2c8a1497 6930 VOIDmode, EXPAND_NORMAL);
c2f47e15 6931 tree label = TREE_OPERAND (CALL_EXPR_ARG (exp, 1), 0);
9ed997be 6932 rtx_insn *label_r = label_rtx (label);
2c8a1497 6933
6934 /* This is copied from the handling of non-local gotos. */
6935 expand_builtin_setjmp_setup (buf_addr, label_r);
6936 nonlocal_goto_handler_labels
a4de1c23 6937 = gen_rtx_INSN_LIST (VOIDmode, label_r,
2c8a1497 6938 nonlocal_goto_handler_labels);
6939 /* ??? Do not let expand_label treat us as such since we would
6940 not want to be both on the list of non-local labels and on
6941 the list of forced labels. */
6942 FORCED_LABEL (label) = 0;
6943 return const0_rtx;
6944 }
6945 break;
6946
2c8a1497 6947 case BUILT_IN_SETJMP_RECEIVER:
6948 /* __builtin_setjmp_receiver is passed the receiver label. */
c2f47e15 6949 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
2c8a1497 6950 {
c2f47e15 6951 tree label = TREE_OPERAND (CALL_EXPR_ARG (exp, 0), 0);
9ed997be 6952 rtx_insn *label_r = label_rtx (label);
2c8a1497 6953
6954 expand_builtin_setjmp_receiver (label_r);
6955 return const0_rtx;
6956 }
6b7f6858 6957 break;
53800dbe 6958
6959 /* __builtin_longjmp is passed a pointer to an array of five words.
6960 It's similar to the C library longjmp function but works with
6961 __builtin_setjmp above. */
6962 case BUILT_IN_LONGJMP:
c2f47e15 6963 if (validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
53800dbe 6964 {
c2f47e15 6965 rtx buf_addr = expand_expr (CALL_EXPR_ARG (exp, 0), subtarget,
8ec3c5c2 6966 VOIDmode, EXPAND_NORMAL);
c2f47e15 6967 rtx value = expand_normal (CALL_EXPR_ARG (exp, 1));
53800dbe 6968
6969 if (value != const1_rtx)
6970 {
1e5fcbe2 6971 error ("%<__builtin_longjmp%> second argument must be 1");
53800dbe 6972 return const0_rtx;
6973 }
6974
6975 expand_builtin_longjmp (buf_addr, value);
6976 return const0_rtx;
6977 }
2c8a1497 6978 break;
53800dbe 6979
4ee9c684 6980 case BUILT_IN_NONLOCAL_GOTO:
c2f47e15 6981 target = expand_builtin_nonlocal_goto (exp);
4ee9c684 6982 if (target)
6983 return target;
6984 break;
6985
843d08a9 6986 /* This updates the setjmp buffer that is its argument with the value
6987 of the current stack pointer. */
6988 case BUILT_IN_UPDATE_SETJMP_BUF:
c2f47e15 6989 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
843d08a9 6990 {
6991 rtx buf_addr
c2f47e15 6992 = expand_normal (CALL_EXPR_ARG (exp, 0));
843d08a9 6993
6994 expand_builtin_update_setjmp_buf (buf_addr);
6995 return const0_rtx;
6996 }
6997 break;
6998
53800dbe 6999 case BUILT_IN_TRAP:
a0ef1725 7000 expand_builtin_trap ();
53800dbe 7001 return const0_rtx;
7002
d2b48f0c 7003 case BUILT_IN_UNREACHABLE:
7004 expand_builtin_unreachable ();
7005 return const0_rtx;
7006
4f35b1fc 7007 CASE_FLT_FN (BUILT_IN_SIGNBIT):
004e23c4 7008 case BUILT_IN_SIGNBITD32:
7009 case BUILT_IN_SIGNBITD64:
7010 case BUILT_IN_SIGNBITD128:
27f261ef 7011 target = expand_builtin_signbit (exp, target);
7012 if (target)
7013 return target;
7014 break;
7015
53800dbe 7016 /* Various hooks for the DWARF 2 __throw routine. */
7017 case BUILT_IN_UNWIND_INIT:
7018 expand_builtin_unwind_init ();
7019 return const0_rtx;
7020 case BUILT_IN_DWARF_CFA:
7021 return virtual_cfa_rtx;
7022#ifdef DWARF2_UNWIND_INFO
f8f023a5 7023 case BUILT_IN_DWARF_SP_COLUMN:
7024 return expand_builtin_dwarf_sp_column ();
695e919b 7025 case BUILT_IN_INIT_DWARF_REG_SIZES:
c2f47e15 7026 expand_builtin_init_dwarf_reg_sizes (CALL_EXPR_ARG (exp, 0));
695e919b 7027 return const0_rtx;
53800dbe 7028#endif
7029 case BUILT_IN_FROB_RETURN_ADDR:
c2f47e15 7030 return expand_builtin_frob_return_addr (CALL_EXPR_ARG (exp, 0));
53800dbe 7031 case BUILT_IN_EXTRACT_RETURN_ADDR:
c2f47e15 7032 return expand_builtin_extract_return_addr (CALL_EXPR_ARG (exp, 0));
53800dbe 7033 case BUILT_IN_EH_RETURN:
c2f47e15 7034 expand_builtin_eh_return (CALL_EXPR_ARG (exp, 0),
7035 CALL_EXPR_ARG (exp, 1));
53800dbe 7036 return const0_rtx;
df4b504c 7037 case BUILT_IN_EH_RETURN_DATA_REGNO:
c2f47e15 7038 return expand_builtin_eh_return_data_regno (exp);
26093bf4 7039 case BUILT_IN_EXTEND_POINTER:
c2f47e15 7040 return expand_builtin_extend_pointer (CALL_EXPR_ARG (exp, 0));
e38def9c 7041 case BUILT_IN_EH_POINTER:
7042 return expand_builtin_eh_pointer (exp);
7043 case BUILT_IN_EH_FILTER:
7044 return expand_builtin_eh_filter (exp);
7045 case BUILT_IN_EH_COPY_VALUES:
7046 return expand_builtin_eh_copy_values (exp);
26093bf4 7047
7ccc713a 7048 case BUILT_IN_VA_START:
c2f47e15 7049 return expand_builtin_va_start (exp);
a66c9326 7050 case BUILT_IN_VA_END:
c2f47e15 7051 return expand_builtin_va_end (exp);
a66c9326 7052 case BUILT_IN_VA_COPY:
c2f47e15 7053 return expand_builtin_va_copy (exp);
89cfe6e5 7054 case BUILT_IN_EXPECT:
c2f47e15 7055 return expand_builtin_expect (exp, target);
fca0886c 7056 case BUILT_IN_ASSUME_ALIGNED:
7057 return expand_builtin_assume_aligned (exp, target);
5e3608d8 7058 case BUILT_IN_PREFETCH:
c2f47e15 7059 expand_builtin_prefetch (exp);
5e3608d8 7060 return const0_rtx;
7061
4ee9c684 7062 case BUILT_IN_INIT_TRAMPOLINE:
c307f106 7063 return expand_builtin_init_trampoline (exp, true);
7064 case BUILT_IN_INIT_HEAP_TRAMPOLINE:
7065 return expand_builtin_init_trampoline (exp, false);
4ee9c684 7066 case BUILT_IN_ADJUST_TRAMPOLINE:
c2f47e15 7067 return expand_builtin_adjust_trampoline (exp);
4ee9c684 7068
a27e3913 7069 case BUILT_IN_INIT_DESCRIPTOR:
7070 return expand_builtin_init_descriptor (exp);
7071 case BUILT_IN_ADJUST_DESCRIPTOR:
7072 return expand_builtin_adjust_descriptor (exp);
7073
73673831 7074 case BUILT_IN_FORK:
7075 case BUILT_IN_EXECL:
7076 case BUILT_IN_EXECV:
7077 case BUILT_IN_EXECLP:
7078 case BUILT_IN_EXECLE:
7079 case BUILT_IN_EXECVP:
7080 case BUILT_IN_EXECVE:
c2f47e15 7081 target = expand_builtin_fork_or_exec (fndecl, exp, target, ignore);
73673831 7082 if (target)
7083 return target;
7084 break;
53800dbe 7085
2797f13a 7086 case BUILT_IN_SYNC_FETCH_AND_ADD_1:
7087 case BUILT_IN_SYNC_FETCH_AND_ADD_2:
7088 case BUILT_IN_SYNC_FETCH_AND_ADD_4:
7089 case BUILT_IN_SYNC_FETCH_AND_ADD_8:
7090 case BUILT_IN_SYNC_FETCH_AND_ADD_16:
7091 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_ADD_1);
1cd6e20d 7092 target = expand_builtin_sync_operation (mode, exp, PLUS, false, target);
b6a5fc45 7093 if (target)
7094 return target;
7095 break;
7096
2797f13a 7097 case BUILT_IN_SYNC_FETCH_AND_SUB_1:
7098 case BUILT_IN_SYNC_FETCH_AND_SUB_2:
7099 case BUILT_IN_SYNC_FETCH_AND_SUB_4:
7100 case BUILT_IN_SYNC_FETCH_AND_SUB_8:
7101 case BUILT_IN_SYNC_FETCH_AND_SUB_16:
7102 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_SUB_1);
1cd6e20d 7103 target = expand_builtin_sync_operation (mode, exp, MINUS, false, target);
b6a5fc45 7104 if (target)
7105 return target;
7106 break;
7107
2797f13a 7108 case BUILT_IN_SYNC_FETCH_AND_OR_1:
7109 case BUILT_IN_SYNC_FETCH_AND_OR_2:
7110 case BUILT_IN_SYNC_FETCH_AND_OR_4:
7111 case BUILT_IN_SYNC_FETCH_AND_OR_8:
7112 case BUILT_IN_SYNC_FETCH_AND_OR_16:
7113 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_OR_1);
1cd6e20d 7114 target = expand_builtin_sync_operation (mode, exp, IOR, false, target);
b6a5fc45 7115 if (target)
7116 return target;
7117 break;
7118
2797f13a 7119 case BUILT_IN_SYNC_FETCH_AND_AND_1:
7120 case BUILT_IN_SYNC_FETCH_AND_AND_2:
7121 case BUILT_IN_SYNC_FETCH_AND_AND_4:
7122 case BUILT_IN_SYNC_FETCH_AND_AND_8:
7123 case BUILT_IN_SYNC_FETCH_AND_AND_16:
7124 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_AND_1);
1cd6e20d 7125 target = expand_builtin_sync_operation (mode, exp, AND, false, target);
b6a5fc45 7126 if (target)
7127 return target;
7128 break;
7129
2797f13a 7130 case BUILT_IN_SYNC_FETCH_AND_XOR_1:
7131 case BUILT_IN_SYNC_FETCH_AND_XOR_2:
7132 case BUILT_IN_SYNC_FETCH_AND_XOR_4:
7133 case BUILT_IN_SYNC_FETCH_AND_XOR_8:
7134 case BUILT_IN_SYNC_FETCH_AND_XOR_16:
7135 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_XOR_1);
1cd6e20d 7136 target = expand_builtin_sync_operation (mode, exp, XOR, false, target);
b6a5fc45 7137 if (target)
7138 return target;
7139 break;
7140
2797f13a 7141 case BUILT_IN_SYNC_FETCH_AND_NAND_1:
7142 case BUILT_IN_SYNC_FETCH_AND_NAND_2:
7143 case BUILT_IN_SYNC_FETCH_AND_NAND_4:
7144 case BUILT_IN_SYNC_FETCH_AND_NAND_8:
7145 case BUILT_IN_SYNC_FETCH_AND_NAND_16:
7146 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_NAND_1);
1cd6e20d 7147 target = expand_builtin_sync_operation (mode, exp, NOT, false, target);
b6a5fc45 7148 if (target)
7149 return target;
7150 break;
7151
2797f13a 7152 case BUILT_IN_SYNC_ADD_AND_FETCH_1:
7153 case BUILT_IN_SYNC_ADD_AND_FETCH_2:
7154 case BUILT_IN_SYNC_ADD_AND_FETCH_4:
7155 case BUILT_IN_SYNC_ADD_AND_FETCH_8:
7156 case BUILT_IN_SYNC_ADD_AND_FETCH_16:
7157 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_ADD_AND_FETCH_1);
1cd6e20d 7158 target = expand_builtin_sync_operation (mode, exp, PLUS, true, target);
b6a5fc45 7159 if (target)
7160 return target;
7161 break;
7162
2797f13a 7163 case BUILT_IN_SYNC_SUB_AND_FETCH_1:
7164 case BUILT_IN_SYNC_SUB_AND_FETCH_2:
7165 case BUILT_IN_SYNC_SUB_AND_FETCH_4:
7166 case BUILT_IN_SYNC_SUB_AND_FETCH_8:
7167 case BUILT_IN_SYNC_SUB_AND_FETCH_16:
7168 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_SUB_AND_FETCH_1);
1cd6e20d 7169 target = expand_builtin_sync_operation (mode, exp, MINUS, true, target);
b6a5fc45 7170 if (target)
7171 return target;
7172 break;
7173
2797f13a 7174 case BUILT_IN_SYNC_OR_AND_FETCH_1:
7175 case BUILT_IN_SYNC_OR_AND_FETCH_2:
7176 case BUILT_IN_SYNC_OR_AND_FETCH_4:
7177 case BUILT_IN_SYNC_OR_AND_FETCH_8:
7178 case BUILT_IN_SYNC_OR_AND_FETCH_16:
7179 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_OR_AND_FETCH_1);
1cd6e20d 7180 target = expand_builtin_sync_operation (mode, exp, IOR, true, target);
b6a5fc45 7181 if (target)
7182 return target;
7183 break;
7184
2797f13a 7185 case BUILT_IN_SYNC_AND_AND_FETCH_1:
7186 case BUILT_IN_SYNC_AND_AND_FETCH_2:
7187 case BUILT_IN_SYNC_AND_AND_FETCH_4:
7188 case BUILT_IN_SYNC_AND_AND_FETCH_8:
7189 case BUILT_IN_SYNC_AND_AND_FETCH_16:
7190 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_AND_AND_FETCH_1);
1cd6e20d 7191 target = expand_builtin_sync_operation (mode, exp, AND, true, target);
b6a5fc45 7192 if (target)
7193 return target;
7194 break;
7195
2797f13a 7196 case BUILT_IN_SYNC_XOR_AND_FETCH_1:
7197 case BUILT_IN_SYNC_XOR_AND_FETCH_2:
7198 case BUILT_IN_SYNC_XOR_AND_FETCH_4:
7199 case BUILT_IN_SYNC_XOR_AND_FETCH_8:
7200 case BUILT_IN_SYNC_XOR_AND_FETCH_16:
7201 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_XOR_AND_FETCH_1);
1cd6e20d 7202 target = expand_builtin_sync_operation (mode, exp, XOR, true, target);
b6a5fc45 7203 if (target)
7204 return target;
7205 break;
7206
2797f13a 7207 case BUILT_IN_SYNC_NAND_AND_FETCH_1:
7208 case BUILT_IN_SYNC_NAND_AND_FETCH_2:
7209 case BUILT_IN_SYNC_NAND_AND_FETCH_4:
7210 case BUILT_IN_SYNC_NAND_AND_FETCH_8:
7211 case BUILT_IN_SYNC_NAND_AND_FETCH_16:
7212 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_NAND_AND_FETCH_1);
1cd6e20d 7213 target = expand_builtin_sync_operation (mode, exp, NOT, true, target);
b6a5fc45 7214 if (target)
7215 return target;
7216 break;
7217
2797f13a 7218 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_1:
7219 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_2:
7220 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_4:
7221 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_8:
7222 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_16:
a601d32a 7223 if (mode == VOIDmode)
7224 mode = TYPE_MODE (boolean_type_node);
b6a5fc45 7225 if (!target || !register_operand (target, mode))
7226 target = gen_reg_rtx (mode);
3e272de8 7227
2797f13a 7228 mode = get_builtin_sync_mode
7229 (fcode - BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_1);
c2f47e15 7230 target = expand_builtin_compare_and_swap (mode, exp, true, target);
b6a5fc45 7231 if (target)
7232 return target;
7233 break;
7234
2797f13a 7235 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_1:
7236 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_2:
7237 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_4:
7238 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_8:
7239 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_16:
7240 mode = get_builtin_sync_mode
7241 (fcode - BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_1);
c2f47e15 7242 target = expand_builtin_compare_and_swap (mode, exp, false, target);
b6a5fc45 7243 if (target)
7244 return target;
7245 break;
7246
2797f13a 7247 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_1:
7248 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_2:
7249 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_4:
7250 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_8:
7251 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_16:
7252 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_LOCK_TEST_AND_SET_1);
7253 target = expand_builtin_sync_lock_test_and_set (mode, exp, target);
b6a5fc45 7254 if (target)
7255 return target;
7256 break;
7257
2797f13a 7258 case BUILT_IN_SYNC_LOCK_RELEASE_1:
7259 case BUILT_IN_SYNC_LOCK_RELEASE_2:
7260 case BUILT_IN_SYNC_LOCK_RELEASE_4:
7261 case BUILT_IN_SYNC_LOCK_RELEASE_8:
7262 case BUILT_IN_SYNC_LOCK_RELEASE_16:
7263 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_LOCK_RELEASE_1);
7264 expand_builtin_sync_lock_release (mode, exp);
b6a5fc45 7265 return const0_rtx;
7266
2797f13a 7267 case BUILT_IN_SYNC_SYNCHRONIZE:
7268 expand_builtin_sync_synchronize ();
b6a5fc45 7269 return const0_rtx;
7270
1cd6e20d 7271 case BUILT_IN_ATOMIC_EXCHANGE_1:
7272 case BUILT_IN_ATOMIC_EXCHANGE_2:
7273 case BUILT_IN_ATOMIC_EXCHANGE_4:
7274 case BUILT_IN_ATOMIC_EXCHANGE_8:
7275 case BUILT_IN_ATOMIC_EXCHANGE_16:
7276 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_EXCHANGE_1);
7277 target = expand_builtin_atomic_exchange (mode, exp, target);
7278 if (target)
7279 return target;
7280 break;
7281
7282 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1:
7283 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_2:
7284 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_4:
7285 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_8:
7286 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_16:
2c201ad1 7287 {
7288 unsigned int nargs, z;
f1f41a6c 7289 vec<tree, va_gc> *vec;
2c201ad1 7290
7291 mode =
7292 get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1);
7293 target = expand_builtin_atomic_compare_exchange (mode, exp, target);
7294 if (target)
7295 return target;
7296
7297 /* If this is turned into an external library call, the weak parameter
7298 must be dropped to match the expected parameter list. */
7299 nargs = call_expr_nargs (exp);
f1f41a6c 7300 vec_alloc (vec, nargs - 1);
2c201ad1 7301 for (z = 0; z < 3; z++)
f1f41a6c 7302 vec->quick_push (CALL_EXPR_ARG (exp, z));
2c201ad1 7303 /* Skip the boolean weak parameter. */
7304 for (z = 4; z < 6; z++)
f1f41a6c 7305 vec->quick_push (CALL_EXPR_ARG (exp, z));
2c201ad1 7306 exp = build_call_vec (TREE_TYPE (exp), CALL_EXPR_FN (exp), vec);
7307 break;
7308 }
1cd6e20d 7309
7310 case BUILT_IN_ATOMIC_LOAD_1:
7311 case BUILT_IN_ATOMIC_LOAD_2:
7312 case BUILT_IN_ATOMIC_LOAD_4:
7313 case BUILT_IN_ATOMIC_LOAD_8:
7314 case BUILT_IN_ATOMIC_LOAD_16:
7315 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_LOAD_1);
7316 target = expand_builtin_atomic_load (mode, exp, target);
7317 if (target)
7318 return target;
7319 break;
7320
7321 case BUILT_IN_ATOMIC_STORE_1:
7322 case BUILT_IN_ATOMIC_STORE_2:
7323 case BUILT_IN_ATOMIC_STORE_4:
7324 case BUILT_IN_ATOMIC_STORE_8:
7325 case BUILT_IN_ATOMIC_STORE_16:
7326 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_STORE_1);
7327 target = expand_builtin_atomic_store (mode, exp);
7328 if (target)
7329 return const0_rtx;
7330 break;
7331
7332 case BUILT_IN_ATOMIC_ADD_FETCH_1:
7333 case BUILT_IN_ATOMIC_ADD_FETCH_2:
7334 case BUILT_IN_ATOMIC_ADD_FETCH_4:
7335 case BUILT_IN_ATOMIC_ADD_FETCH_8:
7336 case BUILT_IN_ATOMIC_ADD_FETCH_16:
7337 {
7338 enum built_in_function lib;
7339 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_ADD_FETCH_1);
7340 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_ADD_1 +
7341 (fcode - BUILT_IN_ATOMIC_ADD_FETCH_1));
7342 target = expand_builtin_atomic_fetch_op (mode, exp, target, PLUS, true,
7343 ignore, lib);
7344 if (target)
7345 return target;
7346 break;
7347 }
7348 case BUILT_IN_ATOMIC_SUB_FETCH_1:
7349 case BUILT_IN_ATOMIC_SUB_FETCH_2:
7350 case BUILT_IN_ATOMIC_SUB_FETCH_4:
7351 case BUILT_IN_ATOMIC_SUB_FETCH_8:
7352 case BUILT_IN_ATOMIC_SUB_FETCH_16:
7353 {
7354 enum built_in_function lib;
7355 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_SUB_FETCH_1);
7356 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_SUB_1 +
7357 (fcode - BUILT_IN_ATOMIC_SUB_FETCH_1));
7358 target = expand_builtin_atomic_fetch_op (mode, exp, target, MINUS, true,
7359 ignore, lib);
7360 if (target)
7361 return target;
7362 break;
7363 }
7364 case BUILT_IN_ATOMIC_AND_FETCH_1:
7365 case BUILT_IN_ATOMIC_AND_FETCH_2:
7366 case BUILT_IN_ATOMIC_AND_FETCH_4:
7367 case BUILT_IN_ATOMIC_AND_FETCH_8:
7368 case BUILT_IN_ATOMIC_AND_FETCH_16:
7369 {
7370 enum built_in_function lib;
7371 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_AND_FETCH_1);
7372 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_AND_1 +
7373 (fcode - BUILT_IN_ATOMIC_AND_FETCH_1));
7374 target = expand_builtin_atomic_fetch_op (mode, exp, target, AND, true,
7375 ignore, lib);
7376 if (target)
7377 return target;
7378 break;
7379 }
7380 case BUILT_IN_ATOMIC_NAND_FETCH_1:
7381 case BUILT_IN_ATOMIC_NAND_FETCH_2:
7382 case BUILT_IN_ATOMIC_NAND_FETCH_4:
7383 case BUILT_IN_ATOMIC_NAND_FETCH_8:
7384 case BUILT_IN_ATOMIC_NAND_FETCH_16:
7385 {
7386 enum built_in_function lib;
7387 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_NAND_FETCH_1);
7388 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_NAND_1 +
7389 (fcode - BUILT_IN_ATOMIC_NAND_FETCH_1));
7390 target = expand_builtin_atomic_fetch_op (mode, exp, target, NOT, true,
7391 ignore, lib);
7392 if (target)
7393 return target;
7394 break;
7395 }
7396 case BUILT_IN_ATOMIC_XOR_FETCH_1:
7397 case BUILT_IN_ATOMIC_XOR_FETCH_2:
7398 case BUILT_IN_ATOMIC_XOR_FETCH_4:
7399 case BUILT_IN_ATOMIC_XOR_FETCH_8:
7400 case BUILT_IN_ATOMIC_XOR_FETCH_16:
7401 {
7402 enum built_in_function lib;
7403 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_XOR_FETCH_1);
7404 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_XOR_1 +
7405 (fcode - BUILT_IN_ATOMIC_XOR_FETCH_1));
7406 target = expand_builtin_atomic_fetch_op (mode, exp, target, XOR, true,
7407 ignore, lib);
7408 if (target)
7409 return target;
7410 break;
7411 }
7412 case BUILT_IN_ATOMIC_OR_FETCH_1:
7413 case BUILT_IN_ATOMIC_OR_FETCH_2:
7414 case BUILT_IN_ATOMIC_OR_FETCH_4:
7415 case BUILT_IN_ATOMIC_OR_FETCH_8:
7416 case BUILT_IN_ATOMIC_OR_FETCH_16:
7417 {
7418 enum built_in_function lib;
7419 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_OR_FETCH_1);
7420 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_OR_1 +
7421 (fcode - BUILT_IN_ATOMIC_OR_FETCH_1));
7422 target = expand_builtin_atomic_fetch_op (mode, exp, target, IOR, true,
7423 ignore, lib);
7424 if (target)
7425 return target;
7426 break;
7427 }
7428 case BUILT_IN_ATOMIC_FETCH_ADD_1:
7429 case BUILT_IN_ATOMIC_FETCH_ADD_2:
7430 case BUILT_IN_ATOMIC_FETCH_ADD_4:
7431 case BUILT_IN_ATOMIC_FETCH_ADD_8:
7432 case BUILT_IN_ATOMIC_FETCH_ADD_16:
7433 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_ADD_1);
7434 target = expand_builtin_atomic_fetch_op (mode, exp, target, PLUS, false,
7435 ignore, BUILT_IN_NONE);
7436 if (target)
7437 return target;
7438 break;
7439
7440 case BUILT_IN_ATOMIC_FETCH_SUB_1:
7441 case BUILT_IN_ATOMIC_FETCH_SUB_2:
7442 case BUILT_IN_ATOMIC_FETCH_SUB_4:
7443 case BUILT_IN_ATOMIC_FETCH_SUB_8:
7444 case BUILT_IN_ATOMIC_FETCH_SUB_16:
7445 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_SUB_1);
7446 target = expand_builtin_atomic_fetch_op (mode, exp, target, MINUS, false,
7447 ignore, BUILT_IN_NONE);
7448 if (target)
7449 return target;
7450 break;
7451
7452 case BUILT_IN_ATOMIC_FETCH_AND_1:
7453 case BUILT_IN_ATOMIC_FETCH_AND_2:
7454 case BUILT_IN_ATOMIC_FETCH_AND_4:
7455 case BUILT_IN_ATOMIC_FETCH_AND_8:
7456 case BUILT_IN_ATOMIC_FETCH_AND_16:
7457 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_AND_1);
7458 target = expand_builtin_atomic_fetch_op (mode, exp, target, AND, false,
7459 ignore, BUILT_IN_NONE);
7460 if (target)
7461 return target;
7462 break;
7463
7464 case BUILT_IN_ATOMIC_FETCH_NAND_1:
7465 case BUILT_IN_ATOMIC_FETCH_NAND_2:
7466 case BUILT_IN_ATOMIC_FETCH_NAND_4:
7467 case BUILT_IN_ATOMIC_FETCH_NAND_8:
7468 case BUILT_IN_ATOMIC_FETCH_NAND_16:
7469 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_NAND_1);
7470 target = expand_builtin_atomic_fetch_op (mode, exp, target, NOT, false,
7471 ignore, BUILT_IN_NONE);
7472 if (target)
7473 return target;
7474 break;
7475
7476 case BUILT_IN_ATOMIC_FETCH_XOR_1:
7477 case BUILT_IN_ATOMIC_FETCH_XOR_2:
7478 case BUILT_IN_ATOMIC_FETCH_XOR_4:
7479 case BUILT_IN_ATOMIC_FETCH_XOR_8:
7480 case BUILT_IN_ATOMIC_FETCH_XOR_16:
7481 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_XOR_1);
7482 target = expand_builtin_atomic_fetch_op (mode, exp, target, XOR, false,
7483 ignore, BUILT_IN_NONE);
7484 if (target)
7485 return target;
7486 break;
7487
7488 case BUILT_IN_ATOMIC_FETCH_OR_1:
7489 case BUILT_IN_ATOMIC_FETCH_OR_2:
7490 case BUILT_IN_ATOMIC_FETCH_OR_4:
7491 case BUILT_IN_ATOMIC_FETCH_OR_8:
7492 case BUILT_IN_ATOMIC_FETCH_OR_16:
7493 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_OR_1);
7494 target = expand_builtin_atomic_fetch_op (mode, exp, target, IOR, false,
7495 ignore, BUILT_IN_NONE);
7496 if (target)
7497 return target;
7498 break;
10b744a3 7499
7500 case BUILT_IN_ATOMIC_TEST_AND_SET:
7821cde1 7501 return expand_builtin_atomic_test_and_set (exp, target);
10b744a3 7502
7503 case BUILT_IN_ATOMIC_CLEAR:
7504 return expand_builtin_atomic_clear (exp);
1cd6e20d 7505
7506 case BUILT_IN_ATOMIC_ALWAYS_LOCK_FREE:
7507 return expand_builtin_atomic_always_lock_free (exp);
7508
7509 case BUILT_IN_ATOMIC_IS_LOCK_FREE:
7510 target = expand_builtin_atomic_is_lock_free (exp);
7511 if (target)
7512 return target;
7513 break;
7514
7515 case BUILT_IN_ATOMIC_THREAD_FENCE:
7516 expand_builtin_atomic_thread_fence (exp);
7517 return const0_rtx;
7518
7519 case BUILT_IN_ATOMIC_SIGNAL_FENCE:
7520 expand_builtin_atomic_signal_fence (exp);
7521 return const0_rtx;
7522
0a39fd54 7523 case BUILT_IN_OBJECT_SIZE:
7524 return expand_builtin_object_size (exp);
7525
7526 case BUILT_IN_MEMCPY_CHK:
7527 case BUILT_IN_MEMPCPY_CHK:
7528 case BUILT_IN_MEMMOVE_CHK:
7529 case BUILT_IN_MEMSET_CHK:
7530 target = expand_builtin_memory_chk (exp, target, mode, fcode);
7531 if (target)
7532 return target;
7533 break;
7534
7535 case BUILT_IN_STRCPY_CHK:
7536 case BUILT_IN_STPCPY_CHK:
7537 case BUILT_IN_STRNCPY_CHK:
1063acde 7538 case BUILT_IN_STPNCPY_CHK:
0a39fd54 7539 case BUILT_IN_STRCAT_CHK:
b356dfef 7540 case BUILT_IN_STRNCAT_CHK:
0a39fd54 7541 case BUILT_IN_SNPRINTF_CHK:
7542 case BUILT_IN_VSNPRINTF_CHK:
7543 maybe_emit_chk_warning (exp, fcode);
7544 break;
7545
7546 case BUILT_IN_SPRINTF_CHK:
7547 case BUILT_IN_VSPRINTF_CHK:
7548 maybe_emit_sprintf_chk_warning (exp, fcode);
7549 break;
7550
2c281b15 7551 case BUILT_IN_FREE:
f74ea1c2 7552 if (warn_free_nonheap_object)
7553 maybe_emit_free_warning (exp);
2c281b15 7554 break;
7555
badaa04c 7556 case BUILT_IN_THREAD_POINTER:
7557 return expand_builtin_thread_pointer (exp, target);
7558
7559 case BUILT_IN_SET_THREAD_POINTER:
7560 expand_builtin_set_thread_pointer (exp);
7561 return const0_rtx;
7562
d037099f 7563 case BUILT_IN_CILK_DETACH:
7564 expand_builtin_cilk_detach (exp);
7565 return const0_rtx;
7566
7567 case BUILT_IN_CILK_POP_FRAME:
7568 expand_builtin_cilk_pop_frame (exp);
7569 return const0_rtx;
7570
058a1b7a 7571 case BUILT_IN_CHKP_INIT_PTR_BOUNDS:
7572 case BUILT_IN_CHKP_NULL_PTR_BOUNDS:
7573 case BUILT_IN_CHKP_COPY_PTR_BOUNDS:
7574 case BUILT_IN_CHKP_CHECK_PTR_LBOUNDS:
7575 case BUILT_IN_CHKP_CHECK_PTR_UBOUNDS:
7576 case BUILT_IN_CHKP_CHECK_PTR_BOUNDS:
7577 case BUILT_IN_CHKP_SET_PTR_BOUNDS:
7578 case BUILT_IN_CHKP_NARROW_PTR_BOUNDS:
7579 case BUILT_IN_CHKP_STORE_PTR_BOUNDS:
7580 case BUILT_IN_CHKP_GET_PTR_LBOUND:
7581 case BUILT_IN_CHKP_GET_PTR_UBOUND:
7582 /* We allow user CHKP builtins if Pointer Bounds
7583 Checker is off. */
7584 if (!chkp_function_instrumented_p (current_function_decl))
7585 {
7586 if (fcode == BUILT_IN_CHKP_SET_PTR_BOUNDS
7587 || fcode == BUILT_IN_CHKP_NARROW_PTR_BOUNDS
7588 || fcode == BUILT_IN_CHKP_INIT_PTR_BOUNDS
7589 || fcode == BUILT_IN_CHKP_NULL_PTR_BOUNDS
7590 || fcode == BUILT_IN_CHKP_COPY_PTR_BOUNDS)
7591 return expand_normal (CALL_EXPR_ARG (exp, 0));
7592 else if (fcode == BUILT_IN_CHKP_GET_PTR_LBOUND)
7593 return expand_normal (size_zero_node);
7594 else if (fcode == BUILT_IN_CHKP_GET_PTR_UBOUND)
7595 return expand_normal (size_int (-1));
7596 else
7597 return const0_rtx;
7598 }
7599 /* FALLTHROUGH */
7600
7601 case BUILT_IN_CHKP_BNDMK:
7602 case BUILT_IN_CHKP_BNDSTX:
7603 case BUILT_IN_CHKP_BNDCL:
7604 case BUILT_IN_CHKP_BNDCU:
7605 case BUILT_IN_CHKP_BNDLDX:
7606 case BUILT_IN_CHKP_BNDRET:
7607 case BUILT_IN_CHKP_INTERSECT:
7608 case BUILT_IN_CHKP_NARROW:
7609 case BUILT_IN_CHKP_EXTRACT_LOWER:
7610 case BUILT_IN_CHKP_EXTRACT_UPPER:
7611 /* Software implementation of Pointer Bounds Checker is NYI.
7612 Target support is required. */
7613 error ("Your target platform does not support -fcheck-pointer-bounds");
7614 break;
7615
ca4c3545 7616 case BUILT_IN_ACC_ON_DEVICE:
1ae4e7aa 7617 /* Do library call, if we failed to expand the builtin when
7618 folding. */
ca4c3545 7619 break;
7620
92482ee0 7621 default: /* just do library call, if unknown builtin */
146c1b4f 7622 break;
53800dbe 7623 }
7624
7625 /* The switch statement above can drop through to cause the function
7626 to be called normally. */
7627 return expand_call (exp, target, ignore);
7628}
650e4c94 7629
f21337ef 7630/* Similar to expand_builtin but is used for instrumented calls. */
7631
7632rtx
7633expand_builtin_with_bounds (tree exp, rtx target,
7634 rtx subtarget ATTRIBUTE_UNUSED,
7635 machine_mode mode, int ignore)
7636{
7637 tree fndecl = get_callee_fndecl (exp);
7638 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
7639
7640 gcc_assert (CALL_WITH_BOUNDS_P (exp));
7641
7642 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
7643 return targetm.expand_builtin (exp, target, subtarget, mode, ignore);
7644
7645 gcc_assert (fcode > BEGIN_CHKP_BUILTINS
7646 && fcode < END_CHKP_BUILTINS);
7647
7648 switch (fcode)
7649 {
7650 case BUILT_IN_CHKP_MEMCPY_NOBND_NOCHK_CHKP:
7651 target = expand_builtin_memcpy_with_bounds (exp, target);
7652 if (target)
7653 return target;
7654 break;
7655
7656 case BUILT_IN_CHKP_MEMPCPY_NOBND_NOCHK_CHKP:
7657 target = expand_builtin_mempcpy_with_bounds (exp, target, mode);
7658 if (target)
7659 return target;
7660 break;
7661
7662 case BUILT_IN_CHKP_MEMSET_NOBND_NOCHK_CHKP:
7663 target = expand_builtin_memset_with_bounds (exp, target, mode);
7664 if (target)
7665 return target;
7666 break;
7667
7668 default:
7669 break;
7670 }
7671
7672 /* The switch statement above can drop through to cause the function
7673 to be called normally. */
7674 return expand_call (exp, target, ignore);
7675 }
7676
805e22b2 7677/* Determine whether a tree node represents a call to a built-in
52203a9d 7678 function. If the tree T is a call to a built-in function with
7679 the right number of arguments of the appropriate types, return
7680 the DECL_FUNCTION_CODE of the call, e.g. BUILT_IN_SQRT.
7681 Otherwise the return value is END_BUILTINS. */
aecda0d6 7682
805e22b2 7683enum built_in_function
b7bf20db 7684builtin_mathfn_code (const_tree t)
805e22b2 7685{
b7bf20db 7686 const_tree fndecl, arg, parmlist;
7687 const_tree argtype, parmtype;
7688 const_call_expr_arg_iterator iter;
805e22b2 7689
7690 if (TREE_CODE (t) != CALL_EXPR
c2f47e15 7691 || TREE_CODE (CALL_EXPR_FN (t)) != ADDR_EXPR)
805e22b2 7692 return END_BUILTINS;
7693
c6e6ecb1 7694 fndecl = get_callee_fndecl (t);
7695 if (fndecl == NULL_TREE
52203a9d 7696 || TREE_CODE (fndecl) != FUNCTION_DECL
805e22b2 7697 || ! DECL_BUILT_IN (fndecl)
7698 || DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
7699 return END_BUILTINS;
7700
52203a9d 7701 parmlist = TYPE_ARG_TYPES (TREE_TYPE (fndecl));
b7bf20db 7702 init_const_call_expr_arg_iterator (t, &iter);
52203a9d 7703 for (; parmlist; parmlist = TREE_CHAIN (parmlist))
e9f80ff5 7704 {
52203a9d 7705 /* If a function doesn't take a variable number of arguments,
7706 the last element in the list will have type `void'. */
7707 parmtype = TREE_VALUE (parmlist);
7708 if (VOID_TYPE_P (parmtype))
7709 {
b7bf20db 7710 if (more_const_call_expr_args_p (&iter))
52203a9d 7711 return END_BUILTINS;
7712 return DECL_FUNCTION_CODE (fndecl);
7713 }
7714
b7bf20db 7715 if (! more_const_call_expr_args_p (&iter))
e9f80ff5 7716 return END_BUILTINS;
48e1416a 7717
b7bf20db 7718 arg = next_const_call_expr_arg (&iter);
c2f47e15 7719 argtype = TREE_TYPE (arg);
52203a9d 7720
7721 if (SCALAR_FLOAT_TYPE_P (parmtype))
7722 {
7723 if (! SCALAR_FLOAT_TYPE_P (argtype))
7724 return END_BUILTINS;
7725 }
7726 else if (COMPLEX_FLOAT_TYPE_P (parmtype))
7727 {
7728 if (! COMPLEX_FLOAT_TYPE_P (argtype))
7729 return END_BUILTINS;
7730 }
7731 else if (POINTER_TYPE_P (parmtype))
7732 {
7733 if (! POINTER_TYPE_P (argtype))
7734 return END_BUILTINS;
7735 }
7736 else if (INTEGRAL_TYPE_P (parmtype))
7737 {
7738 if (! INTEGRAL_TYPE_P (argtype))
7739 return END_BUILTINS;
7740 }
7741 else
e9f80ff5 7742 return END_BUILTINS;
e9f80ff5 7743 }
7744
52203a9d 7745 /* Variable-length argument list. */
805e22b2 7746 return DECL_FUNCTION_CODE (fndecl);
7747}
7748
c2f47e15 7749/* Fold a call to __builtin_constant_p, if we know its argument ARG will
7750 evaluate to a constant. */
650e4c94 7751
7752static tree
c2f47e15 7753fold_builtin_constant_p (tree arg)
650e4c94 7754{
650e4c94 7755 /* We return 1 for a numeric type that's known to be a constant
7756 value at compile-time or for an aggregate type that's a
7757 literal constant. */
c2f47e15 7758 STRIP_NOPS (arg);
650e4c94 7759
7760 /* If we know this is a constant, emit the constant of one. */
c2f47e15 7761 if (CONSTANT_CLASS_P (arg)
7762 || (TREE_CODE (arg) == CONSTRUCTOR
7763 && TREE_CONSTANT (arg)))
650e4c94 7764 return integer_one_node;
c2f47e15 7765 if (TREE_CODE (arg) == ADDR_EXPR)
adcfa3a3 7766 {
c2f47e15 7767 tree op = TREE_OPERAND (arg, 0);
adcfa3a3 7768 if (TREE_CODE (op) == STRING_CST
7769 || (TREE_CODE (op) == ARRAY_REF
7770 && integer_zerop (TREE_OPERAND (op, 1))
7771 && TREE_CODE (TREE_OPERAND (op, 0)) == STRING_CST))
7772 return integer_one_node;
7773 }
650e4c94 7774
1fb4300c 7775 /* If this expression has side effects, show we don't know it to be a
7776 constant. Likewise if it's a pointer or aggregate type since in
7777 those case we only want literals, since those are only optimized
f97c71a1 7778 when generating RTL, not later.
7779 And finally, if we are compiling an initializer, not code, we
7780 need to return a definite result now; there's not going to be any
7781 more optimization done. */
c2f47e15 7782 if (TREE_SIDE_EFFECTS (arg)
7783 || AGGREGATE_TYPE_P (TREE_TYPE (arg))
7784 || POINTER_TYPE_P (TREE_TYPE (arg))
47be647d 7785 || cfun == 0
0b049e15 7786 || folding_initializer
7787 || force_folding_builtin_constant_p)
650e4c94 7788 return integer_zero_node;
7789
c2f47e15 7790 return NULL_TREE;
650e4c94 7791}
7792
76f5a783 7793/* Create builtin_expect with PRED and EXPECTED as its arguments and
7794 return it as a truthvalue. */
4ee9c684 7795
7796static tree
c83059be 7797build_builtin_expect_predicate (location_t loc, tree pred, tree expected,
7798 tree predictor)
4ee9c684 7799{
76f5a783 7800 tree fn, arg_types, pred_type, expected_type, call_expr, ret_type;
4ee9c684 7801
b9a16870 7802 fn = builtin_decl_explicit (BUILT_IN_EXPECT);
76f5a783 7803 arg_types = TYPE_ARG_TYPES (TREE_TYPE (fn));
7804 ret_type = TREE_TYPE (TREE_TYPE (fn));
7805 pred_type = TREE_VALUE (arg_types);
7806 expected_type = TREE_VALUE (TREE_CHAIN (arg_types));
7807
389dd41b 7808 pred = fold_convert_loc (loc, pred_type, pred);
7809 expected = fold_convert_loc (loc, expected_type, expected);
c83059be 7810 call_expr = build_call_expr_loc (loc, fn, predictor ? 3 : 2, pred, expected,
7811 predictor);
76f5a783 7812
7813 return build2 (NE_EXPR, TREE_TYPE (pred), call_expr,
7814 build_int_cst (ret_type, 0));
7815}
7816
7817/* Fold a call to builtin_expect with arguments ARG0 and ARG1. Return
7818 NULL_TREE if no simplification is possible. */
7819
c83059be 7820tree
7821fold_builtin_expect (location_t loc, tree arg0, tree arg1, tree arg2)
76f5a783 7822{
083bada9 7823 tree inner, fndecl, inner_arg0;
76f5a783 7824 enum tree_code code;
7825
083bada9 7826 /* Distribute the expected value over short-circuiting operators.
7827 See through the cast from truthvalue_type_node to long. */
7828 inner_arg0 = arg0;
d09ef31a 7829 while (CONVERT_EXPR_P (inner_arg0)
083bada9 7830 && INTEGRAL_TYPE_P (TREE_TYPE (inner_arg0))
7831 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (inner_arg0, 0))))
7832 inner_arg0 = TREE_OPERAND (inner_arg0, 0);
7833
76f5a783 7834 /* If this is a builtin_expect within a builtin_expect keep the
7835 inner one. See through a comparison against a constant. It
7836 might have been added to create a thruthvalue. */
083bada9 7837 inner = inner_arg0;
7838
76f5a783 7839 if (COMPARISON_CLASS_P (inner)
7840 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST)
7841 inner = TREE_OPERAND (inner, 0);
7842
7843 if (TREE_CODE (inner) == CALL_EXPR
7844 && (fndecl = get_callee_fndecl (inner))
7845 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
7846 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_EXPECT)
7847 return arg0;
7848
083bada9 7849 inner = inner_arg0;
76f5a783 7850 code = TREE_CODE (inner);
7851 if (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR)
7852 {
7853 tree op0 = TREE_OPERAND (inner, 0);
7854 tree op1 = TREE_OPERAND (inner, 1);
7855
c83059be 7856 op0 = build_builtin_expect_predicate (loc, op0, arg1, arg2);
7857 op1 = build_builtin_expect_predicate (loc, op1, arg1, arg2);
76f5a783 7858 inner = build2 (code, TREE_TYPE (inner), op0, op1);
7859
389dd41b 7860 return fold_convert_loc (loc, TREE_TYPE (arg0), inner);
76f5a783 7861 }
7862
7863 /* If the argument isn't invariant then there's nothing else we can do. */
083bada9 7864 if (!TREE_CONSTANT (inner_arg0))
c2f47e15 7865 return NULL_TREE;
4ee9c684 7866
76f5a783 7867 /* If we expect that a comparison against the argument will fold to
7868 a constant return the constant. In practice, this means a true
7869 constant or the address of a non-weak symbol. */
083bada9 7870 inner = inner_arg0;
4ee9c684 7871 STRIP_NOPS (inner);
7872 if (TREE_CODE (inner) == ADDR_EXPR)
7873 {
7874 do
7875 {
7876 inner = TREE_OPERAND (inner, 0);
7877 }
7878 while (TREE_CODE (inner) == COMPONENT_REF
7879 || TREE_CODE (inner) == ARRAY_REF);
53e9c5c4 7880 if (VAR_OR_FUNCTION_DECL_P (inner) && DECL_WEAK (inner))
c2f47e15 7881 return NULL_TREE;
4ee9c684 7882 }
7883
76f5a783 7884 /* Otherwise, ARG0 already has the proper type for the return value. */
7885 return arg0;
4ee9c684 7886}
7887
c2f47e15 7888/* Fold a call to __builtin_classify_type with argument ARG. */
27d0c333 7889
539a3a92 7890static tree
c2f47e15 7891fold_builtin_classify_type (tree arg)
539a3a92 7892{
c2f47e15 7893 if (arg == 0)
7002a1c8 7894 return build_int_cst (integer_type_node, no_type_class);
539a3a92 7895
7002a1c8 7896 return build_int_cst (integer_type_node, type_to_class (TREE_TYPE (arg)));
539a3a92 7897}
7898
c2f47e15 7899/* Fold a call to __builtin_strlen with argument ARG. */
e6e27594 7900
7901static tree
c7cbde74 7902fold_builtin_strlen (location_t loc, tree type, tree arg)
e6e27594 7903{
c2f47e15 7904 if (!validate_arg (arg, POINTER_TYPE))
e6e27594 7905 return NULL_TREE;
7906 else
7907 {
c2f47e15 7908 tree len = c_strlen (arg, 0);
e6e27594 7909
7910 if (len)
c7cbde74 7911 return fold_convert_loc (loc, type, len);
e6e27594 7912
7913 return NULL_TREE;
7914 }
7915}
7916
92c43e3c 7917/* Fold a call to __builtin_inf or __builtin_huge_val. */
7918
7919static tree
389dd41b 7920fold_builtin_inf (location_t loc, tree type, int warn)
92c43e3c 7921{
aa870c1b 7922 REAL_VALUE_TYPE real;
7923
40f4dbd5 7924 /* __builtin_inff is intended to be usable to define INFINITY on all
7925 targets. If an infinity is not available, INFINITY expands "to a
7926 positive constant of type float that overflows at translation
7927 time", footnote "In this case, using INFINITY will violate the
7928 constraint in 6.4.4 and thus require a diagnostic." (C99 7.12#4).
7929 Thus we pedwarn to ensure this constraint violation is
7930 diagnosed. */
92c43e3c 7931 if (!MODE_HAS_INFINITIES (TYPE_MODE (type)) && warn)
389dd41b 7932 pedwarn (loc, 0, "target format does not support infinity");
92c43e3c 7933
aa870c1b 7934 real_inf (&real);
7935 return build_real (type, real);
92c43e3c 7936}
7937
d735c391 7938/* Fold function call to builtin sincos, sincosf, or sincosl. Return
7939 NULL_TREE if no simplification can be made. */
7940
7941static tree
389dd41b 7942fold_builtin_sincos (location_t loc,
7943 tree arg0, tree arg1, tree arg2)
d735c391 7944{
c2f47e15 7945 tree type;
6c21be92 7946 tree fndecl, call = NULL_TREE;
d735c391 7947
c2f47e15 7948 if (!validate_arg (arg0, REAL_TYPE)
7949 || !validate_arg (arg1, POINTER_TYPE)
7950 || !validate_arg (arg2, POINTER_TYPE))
d735c391 7951 return NULL_TREE;
7952
d735c391 7953 type = TREE_TYPE (arg0);
d735c391 7954
7955 /* Calculate the result when the argument is a constant. */
e3240774 7956 built_in_function fn = mathfn_built_in_2 (type, CFN_BUILT_IN_CEXPI);
6c21be92 7957 if (fn == END_BUILTINS)
d735c391 7958 return NULL_TREE;
7959
6c21be92 7960 /* Canonicalize sincos to cexpi. */
7961 if (TREE_CODE (arg0) == REAL_CST)
7962 {
7963 tree complex_type = build_complex_type (type);
744fe358 7964 call = fold_const_call (as_combined_fn (fn), complex_type, arg0);
6c21be92 7965 }
7966 if (!call)
7967 {
7968 if (!targetm.libc_has_function (function_c99_math_complex)
7969 || !builtin_decl_implicit_p (fn))
7970 return NULL_TREE;
7971 fndecl = builtin_decl_explicit (fn);
7972 call = build_call_expr_loc (loc, fndecl, 1, arg0);
7973 call = builtin_save_expr (call);
7974 }
d735c391 7975
a75b1c71 7976 return build2 (COMPOUND_EXPR, void_type_node,
d735c391 7977 build2 (MODIFY_EXPR, void_type_node,
389dd41b 7978 build_fold_indirect_ref_loc (loc, arg1),
6c21be92 7979 fold_build1_loc (loc, IMAGPART_EXPR, type, call)),
d735c391 7980 build2 (MODIFY_EXPR, void_type_node,
389dd41b 7981 build_fold_indirect_ref_loc (loc, arg2),
6c21be92 7982 fold_build1_loc (loc, REALPART_EXPR, type, call)));
d735c391 7983}
7984
c2f47e15 7985/* Fold function call to builtin memcmp with arguments ARG1 and ARG2.
7986 Return NULL_TREE if no simplification can be made. */
9c8a1629 7987
7988static tree
389dd41b 7989fold_builtin_memcmp (location_t loc, tree arg1, tree arg2, tree len)
9c8a1629 7990{
c2f47e15 7991 if (!validate_arg (arg1, POINTER_TYPE)
7992 || !validate_arg (arg2, POINTER_TYPE)
7993 || !validate_arg (len, INTEGER_TYPE))
7994 return NULL_TREE;
9c8a1629 7995
7996 /* If the LEN parameter is zero, return zero. */
7997 if (integer_zerop (len))
389dd41b 7998 return omit_two_operands_loc (loc, integer_type_node, integer_zero_node,
c4fef134 7999 arg1, arg2);
9c8a1629 8000
8001 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
8002 if (operand_equal_p (arg1, arg2, 0))
389dd41b 8003 return omit_one_operand_loc (loc, integer_type_node, integer_zero_node, len);
c4fef134 8004
c4fef134 8005 /* If len parameter is one, return an expression corresponding to
8006 (*(const unsigned char*)arg1 - (const unsigned char*)arg2). */
e913b5cd 8007 if (tree_fits_uhwi_p (len) && tree_to_uhwi (len) == 1)
c4fef134 8008 {
8009 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
4f1b71c0 8010 tree cst_uchar_ptr_node
8011 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
8012
389dd41b 8013 tree ind1
8014 = fold_convert_loc (loc, integer_type_node,
8015 build1 (INDIRECT_REF, cst_uchar_node,
8016 fold_convert_loc (loc,
8017 cst_uchar_ptr_node,
c4fef134 8018 arg1)));
389dd41b 8019 tree ind2
8020 = fold_convert_loc (loc, integer_type_node,
8021 build1 (INDIRECT_REF, cst_uchar_node,
8022 fold_convert_loc (loc,
8023 cst_uchar_ptr_node,
c4fef134 8024 arg2)));
389dd41b 8025 return fold_build2_loc (loc, MINUS_EXPR, integer_type_node, ind1, ind2);
c4fef134 8026 }
9c8a1629 8027
c2f47e15 8028 return NULL_TREE;
9c8a1629 8029}
8030
c2f47e15 8031/* Fold a call to builtin isascii with argument ARG. */
d49367d4 8032
8033static tree
389dd41b 8034fold_builtin_isascii (location_t loc, tree arg)
d49367d4 8035{
c2f47e15 8036 if (!validate_arg (arg, INTEGER_TYPE))
8037 return NULL_TREE;
d49367d4 8038 else
8039 {
8040 /* Transform isascii(c) -> ((c & ~0x7f) == 0). */
c90b5d40 8041 arg = fold_build2 (BIT_AND_EXPR, integer_type_node, arg,
7002a1c8 8042 build_int_cst (integer_type_node,
c90b5d40 8043 ~ (unsigned HOST_WIDE_INT) 0x7f));
389dd41b 8044 return fold_build2_loc (loc, EQ_EXPR, integer_type_node,
7002a1c8 8045 arg, integer_zero_node);
d49367d4 8046 }
8047}
8048
c2f47e15 8049/* Fold a call to builtin toascii with argument ARG. */
d49367d4 8050
8051static tree
389dd41b 8052fold_builtin_toascii (location_t loc, tree arg)
d49367d4 8053{
c2f47e15 8054 if (!validate_arg (arg, INTEGER_TYPE))
8055 return NULL_TREE;
48e1416a 8056
c2f47e15 8057 /* Transform toascii(c) -> (c & 0x7f). */
389dd41b 8058 return fold_build2_loc (loc, BIT_AND_EXPR, integer_type_node, arg,
7002a1c8 8059 build_int_cst (integer_type_node, 0x7f));
d49367d4 8060}
8061
c2f47e15 8062/* Fold a call to builtin isdigit with argument ARG. */
df1cf42e 8063
8064static tree
389dd41b 8065fold_builtin_isdigit (location_t loc, tree arg)
df1cf42e 8066{
c2f47e15 8067 if (!validate_arg (arg, INTEGER_TYPE))
8068 return NULL_TREE;
df1cf42e 8069 else
8070 {
8071 /* Transform isdigit(c) -> (unsigned)(c) - '0' <= 9. */
624d37a6 8072 /* According to the C standard, isdigit is unaffected by locale.
8073 However, it definitely is affected by the target character set. */
624d37a6 8074 unsigned HOST_WIDE_INT target_digit0
8075 = lang_hooks.to_target_charset ('0');
8076
8077 if (target_digit0 == 0)
8078 return NULL_TREE;
8079
389dd41b 8080 arg = fold_convert_loc (loc, unsigned_type_node, arg);
c90b5d40 8081 arg = fold_build2 (MINUS_EXPR, unsigned_type_node, arg,
8082 build_int_cst (unsigned_type_node, target_digit0));
389dd41b 8083 return fold_build2_loc (loc, LE_EXPR, integer_type_node, arg,
f2532264 8084 build_int_cst (unsigned_type_node, 9));
df1cf42e 8085 }
8086}
27f261ef 8087
c2f47e15 8088/* Fold a call to fabs, fabsf or fabsl with argument ARG. */
d1aade50 8089
8090static tree
389dd41b 8091fold_builtin_fabs (location_t loc, tree arg, tree type)
d1aade50 8092{
c2f47e15 8093 if (!validate_arg (arg, REAL_TYPE))
8094 return NULL_TREE;
d1aade50 8095
389dd41b 8096 arg = fold_convert_loc (loc, type, arg);
389dd41b 8097 return fold_build1_loc (loc, ABS_EXPR, type, arg);
d1aade50 8098}
8099
c2f47e15 8100/* Fold a call to abs, labs, llabs or imaxabs with argument ARG. */
d1aade50 8101
8102static tree
389dd41b 8103fold_builtin_abs (location_t loc, tree arg, tree type)
d1aade50 8104{
c2f47e15 8105 if (!validate_arg (arg, INTEGER_TYPE))
8106 return NULL_TREE;
d1aade50 8107
389dd41b 8108 arg = fold_convert_loc (loc, type, arg);
389dd41b 8109 return fold_build1_loc (loc, ABS_EXPR, type, arg);
d1aade50 8110}
8111
b9be572e 8112/* Fold a call to fma, fmaf, or fmal with arguments ARG[012]. */
8113
8114static tree
8115fold_builtin_fma (location_t loc, tree arg0, tree arg1, tree arg2, tree type)
8116{
866b3d58 8117 /* ??? Only expand to FMA_EXPR if it's directly supported. */
b9be572e 8118 if (validate_arg (arg0, REAL_TYPE)
9af5ce0c 8119 && validate_arg (arg1, REAL_TYPE)
866b3d58 8120 && validate_arg (arg2, REAL_TYPE)
8121 && optab_handler (fma_optab, TYPE_MODE (type)) != CODE_FOR_nothing)
8122 return fold_build3_loc (loc, FMA_EXPR, type, arg0, arg1, arg2);
b9be572e 8123
b9be572e 8124 return NULL_TREE;
8125}
8126
abe4dcf6 8127/* Fold a call to builtin carg(a+bi) -> atan2(b,a). */
8128
8129static tree
389dd41b 8130fold_builtin_carg (location_t loc, tree arg, tree type)
abe4dcf6 8131{
239d491a 8132 if (validate_arg (arg, COMPLEX_TYPE)
8133 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) == REAL_TYPE)
abe4dcf6 8134 {
8135 tree atan2_fn = mathfn_built_in (type, BUILT_IN_ATAN2);
48e1416a 8136
abe4dcf6 8137 if (atan2_fn)
8138 {
c2f47e15 8139 tree new_arg = builtin_save_expr (arg);
389dd41b 8140 tree r_arg = fold_build1_loc (loc, REALPART_EXPR, type, new_arg);
8141 tree i_arg = fold_build1_loc (loc, IMAGPART_EXPR, type, new_arg);
8142 return build_call_expr_loc (loc, atan2_fn, 2, i_arg, r_arg);
abe4dcf6 8143 }
8144 }
48e1416a 8145
abe4dcf6 8146 return NULL_TREE;
8147}
8148
3838b9ae 8149/* Fold a call to builtin frexp, we can assume the base is 2. */
8150
8151static tree
389dd41b 8152fold_builtin_frexp (location_t loc, tree arg0, tree arg1, tree rettype)
3838b9ae 8153{
8154 if (! validate_arg (arg0, REAL_TYPE) || ! validate_arg (arg1, POINTER_TYPE))
8155 return NULL_TREE;
48e1416a 8156
3838b9ae 8157 STRIP_NOPS (arg0);
48e1416a 8158
3838b9ae 8159 if (!(TREE_CODE (arg0) == REAL_CST && ! TREE_OVERFLOW (arg0)))
8160 return NULL_TREE;
48e1416a 8161
389dd41b 8162 arg1 = build_fold_indirect_ref_loc (loc, arg1);
3838b9ae 8163
8164 /* Proceed if a valid pointer type was passed in. */
8165 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg1)) == integer_type_node)
8166 {
8167 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg0);
8168 tree frac, exp;
48e1416a 8169
3838b9ae 8170 switch (value->cl)
8171 {
8172 case rvc_zero:
8173 /* For +-0, return (*exp = 0, +-0). */
8174 exp = integer_zero_node;
8175 frac = arg0;
8176 break;
8177 case rvc_nan:
8178 case rvc_inf:
8179 /* For +-NaN or +-Inf, *exp is unspecified, return arg0. */
389dd41b 8180 return omit_one_operand_loc (loc, rettype, arg0, arg1);
3838b9ae 8181 case rvc_normal:
8182 {
8183 /* Since the frexp function always expects base 2, and in
8184 GCC normalized significands are already in the range
8185 [0.5, 1.0), we have exactly what frexp wants. */
8186 REAL_VALUE_TYPE frac_rvt = *value;
8187 SET_REAL_EXP (&frac_rvt, 0);
8188 frac = build_real (rettype, frac_rvt);
7002a1c8 8189 exp = build_int_cst (integer_type_node, REAL_EXP (value));
3838b9ae 8190 }
8191 break;
8192 default:
8193 gcc_unreachable ();
8194 }
48e1416a 8195
3838b9ae 8196 /* Create the COMPOUND_EXPR (*arg1 = trunc, frac). */
389dd41b 8197 arg1 = fold_build2_loc (loc, MODIFY_EXPR, rettype, arg1, exp);
3838b9ae 8198 TREE_SIDE_EFFECTS (arg1) = 1;
389dd41b 8199 return fold_build2_loc (loc, COMPOUND_EXPR, rettype, arg1, frac);
3838b9ae 8200 }
8201
8202 return NULL_TREE;
8203}
8204
ebf8b4f5 8205/* Fold a call to builtin modf. */
8206
8207static tree
389dd41b 8208fold_builtin_modf (location_t loc, tree arg0, tree arg1, tree rettype)
ebf8b4f5 8209{
8210 if (! validate_arg (arg0, REAL_TYPE) || ! validate_arg (arg1, POINTER_TYPE))
8211 return NULL_TREE;
48e1416a 8212
ebf8b4f5 8213 STRIP_NOPS (arg0);
48e1416a 8214
ebf8b4f5 8215 if (!(TREE_CODE (arg0) == REAL_CST && ! TREE_OVERFLOW (arg0)))
8216 return NULL_TREE;
48e1416a 8217
389dd41b 8218 arg1 = build_fold_indirect_ref_loc (loc, arg1);
ebf8b4f5 8219
8220 /* Proceed if a valid pointer type was passed in. */
8221 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg1)) == TYPE_MAIN_VARIANT (rettype))
8222 {
8223 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg0);
8224 REAL_VALUE_TYPE trunc, frac;
8225
8226 switch (value->cl)
8227 {
8228 case rvc_nan:
8229 case rvc_zero:
8230 /* For +-NaN or +-0, return (*arg1 = arg0, arg0). */
8231 trunc = frac = *value;
8232 break;
8233 case rvc_inf:
8234 /* For +-Inf, return (*arg1 = arg0, +-0). */
8235 frac = dconst0;
8236 frac.sign = value->sign;
8237 trunc = *value;
8238 break;
8239 case rvc_normal:
8240 /* Return (*arg1 = trunc(arg0), arg0-trunc(arg0)). */
8241 real_trunc (&trunc, VOIDmode, value);
8242 real_arithmetic (&frac, MINUS_EXPR, value, &trunc);
8243 /* If the original number was negative and already
8244 integral, then the fractional part is -0.0. */
8245 if (value->sign && frac.cl == rvc_zero)
8246 frac.sign = value->sign;
8247 break;
8248 }
48e1416a 8249
ebf8b4f5 8250 /* Create the COMPOUND_EXPR (*arg1 = trunc, frac). */
389dd41b 8251 arg1 = fold_build2_loc (loc, MODIFY_EXPR, rettype, arg1,
ebf8b4f5 8252 build_real (rettype, trunc));
8253 TREE_SIDE_EFFECTS (arg1) = 1;
389dd41b 8254 return fold_build2_loc (loc, COMPOUND_EXPR, rettype, arg1,
ebf8b4f5 8255 build_real (rettype, frac));
8256 }
48e1416a 8257
ebf8b4f5 8258 return NULL_TREE;
8259}
8260
12f08300 8261/* Given a location LOC, an interclass builtin function decl FNDECL
8262 and its single argument ARG, return an folded expression computing
8263 the same, or NULL_TREE if we either couldn't or didn't want to fold
8264 (the latter happen if there's an RTL instruction available). */
8265
8266static tree
8267fold_builtin_interclass_mathfn (location_t loc, tree fndecl, tree arg)
8268{
8269 machine_mode mode;
8270
8271 if (!validate_arg (arg, REAL_TYPE))
8272 return NULL_TREE;
8273
8274 if (interclass_mathfn_icode (arg, fndecl) != CODE_FOR_nothing)
8275 return NULL_TREE;
8276
8277 mode = TYPE_MODE (TREE_TYPE (arg));
8278
8279 bool is_ibm_extended = MODE_COMPOSITE_P (mode);
7f38718f 8280
12f08300 8281 /* If there is no optab, try generic code. */
8282 switch (DECL_FUNCTION_CODE (fndecl))
8283 {
8284 tree result;
a65c4d64 8285
12f08300 8286 CASE_FLT_FN (BUILT_IN_ISINF):
8287 {
8288 /* isinf(x) -> isgreater(fabs(x),DBL_MAX). */
8289 tree const isgr_fn = builtin_decl_explicit (BUILT_IN_ISGREATER);
8290 tree type = TREE_TYPE (arg);
8291 REAL_VALUE_TYPE r;
8292 char buf[128];
8293
8294 if (is_ibm_extended)
8295 {
8296 /* NaN and Inf are encoded in the high-order double value
8297 only. The low-order value is not significant. */
8298 type = double_type_node;
8299 mode = DFmode;
8300 arg = fold_build1_loc (loc, NOP_EXPR, type, arg);
8301 }
8302 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf));
8303 real_from_string (&r, buf);
8304 result = build_call_expr (isgr_fn, 2,
8305 fold_build1_loc (loc, ABS_EXPR, type, arg),
8306 build_real (type, r));
8307 return result;
8308 }
8309 CASE_FLT_FN (BUILT_IN_FINITE):
8310 case BUILT_IN_ISFINITE:
8311 {
8312 /* isfinite(x) -> islessequal(fabs(x),DBL_MAX). */
8313 tree const isle_fn = builtin_decl_explicit (BUILT_IN_ISLESSEQUAL);
8314 tree type = TREE_TYPE (arg);
8315 REAL_VALUE_TYPE r;
8316 char buf[128];
8317
8318 if (is_ibm_extended)
8319 {
8320 /* NaN and Inf are encoded in the high-order double value
8321 only. The low-order value is not significant. */
8322 type = double_type_node;
8323 mode = DFmode;
8324 arg = fold_build1_loc (loc, NOP_EXPR, type, arg);
8325 }
8326 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf));
8327 real_from_string (&r, buf);
8328 result = build_call_expr (isle_fn, 2,
8329 fold_build1_loc (loc, ABS_EXPR, type, arg),
8330 build_real (type, r));
8331 /*result = fold_build2_loc (loc, UNGT_EXPR,
8332 TREE_TYPE (TREE_TYPE (fndecl)),
8333 fold_build1_loc (loc, ABS_EXPR, type, arg),
8334 build_real (type, r));
8335 result = fold_build1_loc (loc, TRUTH_NOT_EXPR,
8336 TREE_TYPE (TREE_TYPE (fndecl)),
8337 result);*/
8338 return result;
8339 }
8340 case BUILT_IN_ISNORMAL:
8341 {
8342 /* isnormal(x) -> isgreaterequal(fabs(x),DBL_MIN) &
8343 islessequal(fabs(x),DBL_MAX). */
8344 tree const isle_fn = builtin_decl_explicit (BUILT_IN_ISLESSEQUAL);
8345 tree type = TREE_TYPE (arg);
8346 tree orig_arg, max_exp, min_exp;
8347 machine_mode orig_mode = mode;
8348 REAL_VALUE_TYPE rmax, rmin;
8349 char buf[128];
8350
8351 orig_arg = arg = builtin_save_expr (arg);
8352 if (is_ibm_extended)
8353 {
8354 /* Use double to test the normal range of IBM extended
8355 precision. Emin for IBM extended precision is
8356 different to emin for IEEE double, being 53 higher
8357 since the low double exponent is at least 53 lower
8358 than the high double exponent. */
8359 type = double_type_node;
8360 mode = DFmode;
8361 arg = fold_build1_loc (loc, NOP_EXPR, type, arg);
8362 }
8363 arg = fold_build1_loc (loc, ABS_EXPR, type, arg);
8364
8365 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf));
8366 real_from_string (&rmax, buf);
8367 sprintf (buf, "0x1p%d", REAL_MODE_FORMAT (orig_mode)->emin - 1);
8368 real_from_string (&rmin, buf);
8369 max_exp = build_real (type, rmax);
8370 min_exp = build_real (type, rmin);
8371
8372 max_exp = build_call_expr (isle_fn, 2, arg, max_exp);
8373 if (is_ibm_extended)
8374 {
8375 /* Testing the high end of the range is done just using
8376 the high double, using the same test as isfinite().
8377 For the subnormal end of the range we first test the
8378 high double, then if its magnitude is equal to the
8379 limit of 0x1p-969, we test whether the low double is
8380 non-zero and opposite sign to the high double. */
8381 tree const islt_fn = builtin_decl_explicit (BUILT_IN_ISLESS);
8382 tree const isgt_fn = builtin_decl_explicit (BUILT_IN_ISGREATER);
8383 tree gt_min = build_call_expr (isgt_fn, 2, arg, min_exp);
8384 tree eq_min = fold_build2 (EQ_EXPR, integer_type_node,
8385 arg, min_exp);
8386 tree as_complex = build1 (VIEW_CONVERT_EXPR,
8387 complex_double_type_node, orig_arg);
8388 tree hi_dbl = build1 (REALPART_EXPR, type, as_complex);
8389 tree lo_dbl = build1 (IMAGPART_EXPR, type, as_complex);
8390 tree zero = build_real (type, dconst0);
8391 tree hilt = build_call_expr (islt_fn, 2, hi_dbl, zero);
8392 tree lolt = build_call_expr (islt_fn, 2, lo_dbl, zero);
8393 tree logt = build_call_expr (isgt_fn, 2, lo_dbl, zero);
8394 tree ok_lo = fold_build1 (TRUTH_NOT_EXPR, integer_type_node,
8395 fold_build3 (COND_EXPR,
8396 integer_type_node,
8397 hilt, logt, lolt));
8398 eq_min = fold_build2 (TRUTH_ANDIF_EXPR, integer_type_node,
8399 eq_min, ok_lo);
8400 min_exp = fold_build2 (TRUTH_ORIF_EXPR, integer_type_node,
8401 gt_min, eq_min);
8402 }
8403 else
8404 {
8405 tree const isge_fn
8406 = builtin_decl_explicit (BUILT_IN_ISGREATEREQUAL);
8407 min_exp = build_call_expr (isge_fn, 2, arg, min_exp);
8408 }
8409 result = fold_build2 (BIT_AND_EXPR, integer_type_node,
8410 max_exp, min_exp);
8411 return result;
8412 }
8413 default:
8414 break;
8415 }
8416
8417 return NULL_TREE;
8418}
8419
8420/* Fold a call to __builtin_isnan(), __builtin_isinf, __builtin_finite.
c2f47e15 8421 ARG is the argument for the call. */
726069ba 8422
8423static tree
12f08300 8424fold_builtin_classify (location_t loc, tree fndecl, tree arg, int builtin_index)
726069ba 8425{
12f08300 8426 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8427
c2f47e15 8428 if (!validate_arg (arg, REAL_TYPE))
d43cee80 8429 return NULL_TREE;
726069ba 8430
726069ba 8431 switch (builtin_index)
8432 {
12f08300 8433 case BUILT_IN_ISINF:
8434 if (!HONOR_INFINITIES (arg))
8435 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
8436
8437 return NULL_TREE;
8438
c319d56a 8439 case BUILT_IN_ISINF_SIGN:
8440 {
8441 /* isinf_sign(x) -> isinf(x) ? (signbit(x) ? -1 : 1) : 0 */
8442 /* In a boolean context, GCC will fold the inner COND_EXPR to
8443 1. So e.g. "if (isinf_sign(x))" would be folded to just
8444 "if (isinf(x) ? 1 : 0)" which becomes "if (isinf(x))". */
6cfc7001 8445 tree signbit_fn = builtin_decl_explicit (BUILT_IN_SIGNBIT);
b9a16870 8446 tree isinf_fn = builtin_decl_explicit (BUILT_IN_ISINF);
c319d56a 8447 tree tmp = NULL_TREE;
8448
8449 arg = builtin_save_expr (arg);
8450
8451 if (signbit_fn && isinf_fn)
8452 {
389dd41b 8453 tree signbit_call = build_call_expr_loc (loc, signbit_fn, 1, arg);
8454 tree isinf_call = build_call_expr_loc (loc, isinf_fn, 1, arg);
c319d56a 8455
389dd41b 8456 signbit_call = fold_build2_loc (loc, NE_EXPR, integer_type_node,
c319d56a 8457 signbit_call, integer_zero_node);
389dd41b 8458 isinf_call = fold_build2_loc (loc, NE_EXPR, integer_type_node,
c319d56a 8459 isinf_call, integer_zero_node);
48e1416a 8460
389dd41b 8461 tmp = fold_build3_loc (loc, COND_EXPR, integer_type_node, signbit_call,
c319d56a 8462 integer_minus_one_node, integer_one_node);
389dd41b 8463 tmp = fold_build3_loc (loc, COND_EXPR, integer_type_node,
8464 isinf_call, tmp,
c319d56a 8465 integer_zero_node);
8466 }
8467
8468 return tmp;
8469 }
8470
12f08300 8471 case BUILT_IN_ISFINITE:
8472 if (!HONOR_NANS (arg)
8473 && !HONOR_INFINITIES (arg))
8474 return omit_one_operand_loc (loc, type, integer_one_node, arg);
8475
8476 return NULL_TREE;
8477
8478 case BUILT_IN_ISNAN:
8479 if (!HONOR_NANS (arg))
8480 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
8481
8482 {
8483 bool is_ibm_extended = MODE_COMPOSITE_P (TYPE_MODE (TREE_TYPE (arg)));
8484 if (is_ibm_extended)
8485 {
8486 /* NaN and Inf are encoded in the high-order double value
8487 only. The low-order value is not significant. */
8488 arg = fold_build1_loc (loc, NOP_EXPR, double_type_node, arg);
8489 }
8490 }
8491 arg = builtin_save_expr (arg);
8492 return fold_build2_loc (loc, UNORDERED_EXPR, type, arg, arg);
8493
726069ba 8494 default:
64db345d 8495 gcc_unreachable ();
726069ba 8496 }
8497}
8498
12f08300 8499/* Fold a call to __builtin_fpclassify(int, int, int, int, int, ...).
8500 This builtin will generate code to return the appropriate floating
8501 point classification depending on the value of the floating point
8502 number passed in. The possible return values must be supplied as
8503 int arguments to the call in the following order: FP_NAN, FP_INFINITE,
8504 FP_NORMAL, FP_SUBNORMAL and FP_ZERO. The ellipses is for exactly
8505 one floating point argument which is "type generic". */
8506
8507static tree
8508fold_builtin_fpclassify (location_t loc, tree *args, int nargs)
8509{
8510 tree fp_nan, fp_infinite, fp_normal, fp_subnormal, fp_zero,
8511 arg, type, res, tmp;
8512 machine_mode mode;
8513 REAL_VALUE_TYPE r;
8514 char buf[128];
8515
8516 /* Verify the required arguments in the original call. */
8517 if (nargs != 6
8518 || !validate_arg (args[0], INTEGER_TYPE)
8519 || !validate_arg (args[1], INTEGER_TYPE)
8520 || !validate_arg (args[2], INTEGER_TYPE)
8521 || !validate_arg (args[3], INTEGER_TYPE)
8522 || !validate_arg (args[4], INTEGER_TYPE)
8523 || !validate_arg (args[5], REAL_TYPE))
8524 return NULL_TREE;
8525
8526 fp_nan = args[0];
8527 fp_infinite = args[1];
8528 fp_normal = args[2];
8529 fp_subnormal = args[3];
8530 fp_zero = args[4];
8531 arg = args[5];
8532 type = TREE_TYPE (arg);
8533 mode = TYPE_MODE (type);
8534 arg = builtin_save_expr (fold_build1_loc (loc, ABS_EXPR, type, arg));
8535
8536 /* fpclassify(x) ->
8537 isnan(x) ? FP_NAN :
8538 (fabs(x) == Inf ? FP_INFINITE :
8539 (fabs(x) >= DBL_MIN ? FP_NORMAL :
8540 (x == 0 ? FP_ZERO : FP_SUBNORMAL))). */
8541
8542 tmp = fold_build2_loc (loc, EQ_EXPR, integer_type_node, arg,
8543 build_real (type, dconst0));
8544 res = fold_build3_loc (loc, COND_EXPR, integer_type_node,
8545 tmp, fp_zero, fp_subnormal);
8546
8547 sprintf (buf, "0x1p%d", REAL_MODE_FORMAT (mode)->emin - 1);
8548 real_from_string (&r, buf);
8549 tmp = fold_build2_loc (loc, GE_EXPR, integer_type_node,
8550 arg, build_real (type, r));
8551 res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp, fp_normal, res);
8552
8553 if (HONOR_INFINITIES (mode))
8554 {
8555 real_inf (&r);
8556 tmp = fold_build2_loc (loc, EQ_EXPR, integer_type_node, arg,
8557 build_real (type, r));
8558 res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp,
8559 fp_infinite, res);
8560 }
8561
8562 if (HONOR_NANS (mode))
8563 {
8564 tmp = fold_build2_loc (loc, ORDERED_EXPR, integer_type_node, arg, arg);
8565 res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp, res, fp_nan);
8566 }
8567
8568 return res;
8569}
8570
9bc9f15f 8571/* Fold a call to an unordered comparison function such as
d5019fe8 8572 __builtin_isgreater(). FNDECL is the FUNCTION_DECL for the function
c2f47e15 8573 being called and ARG0 and ARG1 are the arguments for the call.
726069ba 8574 UNORDERED_CODE and ORDERED_CODE are comparison codes that give
8575 the opposite of the desired result. UNORDERED_CODE is used
8576 for modes that can hold NaNs and ORDERED_CODE is used for
8577 the rest. */
9bc9f15f 8578
8579static tree
389dd41b 8580fold_builtin_unordered_cmp (location_t loc, tree fndecl, tree arg0, tree arg1,
9bc9f15f 8581 enum tree_code unordered_code,
8582 enum tree_code ordered_code)
8583{
859f903a 8584 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9bc9f15f 8585 enum tree_code code;
6978db0d 8586 tree type0, type1;
8587 enum tree_code code0, code1;
8588 tree cmp_type = NULL_TREE;
9bc9f15f 8589
6978db0d 8590 type0 = TREE_TYPE (arg0);
8591 type1 = TREE_TYPE (arg1);
a0c938f0 8592
6978db0d 8593 code0 = TREE_CODE (type0);
8594 code1 = TREE_CODE (type1);
a0c938f0 8595
6978db0d 8596 if (code0 == REAL_TYPE && code1 == REAL_TYPE)
8597 /* Choose the wider of two real types. */
8598 cmp_type = TYPE_PRECISION (type0) >= TYPE_PRECISION (type1)
8599 ? type0 : type1;
8600 else if (code0 == REAL_TYPE && code1 == INTEGER_TYPE)
8601 cmp_type = type0;
8602 else if (code0 == INTEGER_TYPE && code1 == REAL_TYPE)
8603 cmp_type = type1;
a0c938f0 8604
389dd41b 8605 arg0 = fold_convert_loc (loc, cmp_type, arg0);
8606 arg1 = fold_convert_loc (loc, cmp_type, arg1);
859f903a 8607
8608 if (unordered_code == UNORDERED_EXPR)
8609 {
93633022 8610 if (!HONOR_NANS (arg0))
389dd41b 8611 return omit_two_operands_loc (loc, type, integer_zero_node, arg0, arg1);
8612 return fold_build2_loc (loc, UNORDERED_EXPR, type, arg0, arg1);
859f903a 8613 }
9bc9f15f 8614
93633022 8615 code = HONOR_NANS (arg0) ? unordered_code : ordered_code;
389dd41b 8616 return fold_build1_loc (loc, TRUTH_NOT_EXPR, type,
8617 fold_build2_loc (loc, code, type, arg0, arg1));
9bc9f15f 8618}
8619
0c93c8a9 8620/* Fold __builtin_{,s,u}{add,sub,mul}{,l,ll}_overflow, either into normal
8621 arithmetics if it can never overflow, or into internal functions that
8622 return both result of arithmetics and overflowed boolean flag in
732905bb 8623 a complex integer result, or some other check for overflow.
8624 Similarly fold __builtin_{add,sub,mul}_overflow_p to just the overflow
8625 checking part of that. */
0c93c8a9 8626
8627static tree
8628fold_builtin_arith_overflow (location_t loc, enum built_in_function fcode,
8629 tree arg0, tree arg1, tree arg2)
8630{
8631 enum internal_fn ifn = IFN_LAST;
732905bb 8632 /* The code of the expression corresponding to the type-generic
8633 built-in, or ERROR_MARK for the type-specific ones. */
8634 enum tree_code opcode = ERROR_MARK;
8635 bool ovf_only = false;
8636
0c93c8a9 8637 switch (fcode)
8638 {
732905bb 8639 case BUILT_IN_ADD_OVERFLOW_P:
8640 ovf_only = true;
8641 /* FALLTHRU */
0c93c8a9 8642 case BUILT_IN_ADD_OVERFLOW:
732905bb 8643 opcode = PLUS_EXPR;
8644 /* FALLTHRU */
0c93c8a9 8645 case BUILT_IN_SADD_OVERFLOW:
8646 case BUILT_IN_SADDL_OVERFLOW:
8647 case BUILT_IN_SADDLL_OVERFLOW:
8648 case BUILT_IN_UADD_OVERFLOW:
8649 case BUILT_IN_UADDL_OVERFLOW:
8650 case BUILT_IN_UADDLL_OVERFLOW:
8651 ifn = IFN_ADD_OVERFLOW;
8652 break;
732905bb 8653 case BUILT_IN_SUB_OVERFLOW_P:
8654 ovf_only = true;
8655 /* FALLTHRU */
0c93c8a9 8656 case BUILT_IN_SUB_OVERFLOW:
732905bb 8657 opcode = MINUS_EXPR;
8658 /* FALLTHRU */
0c93c8a9 8659 case BUILT_IN_SSUB_OVERFLOW:
8660 case BUILT_IN_SSUBL_OVERFLOW:
8661 case BUILT_IN_SSUBLL_OVERFLOW:
8662 case BUILT_IN_USUB_OVERFLOW:
8663 case BUILT_IN_USUBL_OVERFLOW:
8664 case BUILT_IN_USUBLL_OVERFLOW:
8665 ifn = IFN_SUB_OVERFLOW;
8666 break;
732905bb 8667 case BUILT_IN_MUL_OVERFLOW_P:
8668 ovf_only = true;
8669 /* FALLTHRU */
0c93c8a9 8670 case BUILT_IN_MUL_OVERFLOW:
732905bb 8671 opcode = MULT_EXPR;
8672 /* FALLTHRU */
0c93c8a9 8673 case BUILT_IN_SMUL_OVERFLOW:
8674 case BUILT_IN_SMULL_OVERFLOW:
8675 case BUILT_IN_SMULLL_OVERFLOW:
8676 case BUILT_IN_UMUL_OVERFLOW:
8677 case BUILT_IN_UMULL_OVERFLOW:
8678 case BUILT_IN_UMULLL_OVERFLOW:
8679 ifn = IFN_MUL_OVERFLOW;
8680 break;
8681 default:
8682 gcc_unreachable ();
8683 }
732905bb 8684
8685 /* For the "generic" overloads, the first two arguments can have different
8686 types and the last argument determines the target type to use to check
8687 for overflow. The arguments of the other overloads all have the same
8688 type. */
8689 tree type = ovf_only ? TREE_TYPE (arg2) : TREE_TYPE (TREE_TYPE (arg2));
8690
8691 /* For the __builtin_{add,sub,mul}_overflow_p builtins, when the first two
8692 arguments are constant, attempt to fold the built-in call into a constant
8693 expression indicating whether or not it detected an overflow. */
8694 if (ovf_only
8695 && TREE_CODE (arg0) == INTEGER_CST
8696 && TREE_CODE (arg1) == INTEGER_CST)
8697 /* Perform the computation in the target type and check for overflow. */
8698 return omit_one_operand_loc (loc, boolean_type_node,
8699 arith_overflowed_p (opcode, type, arg0, arg1)
8700 ? boolean_true_node : boolean_false_node,
8701 arg2);
8702
0c93c8a9 8703 tree ctype = build_complex_type (type);
8704 tree call = build_call_expr_internal_loc (loc, ifn, ctype,
8705 2, arg0, arg1);
8706 tree tgt = save_expr (call);
8707 tree intres = build1_loc (loc, REALPART_EXPR, type, tgt);
8708 tree ovfres = build1_loc (loc, IMAGPART_EXPR, type, tgt);
8709 ovfres = fold_convert_loc (loc, boolean_type_node, ovfres);
732905bb 8710
8711 if (ovf_only)
8712 return omit_one_operand_loc (loc, boolean_type_node, ovfres, arg2);
8713
8714 tree mem_arg2 = build_fold_indirect_ref_loc (loc, arg2);
0c93c8a9 8715 tree store
8716 = fold_build2_loc (loc, MODIFY_EXPR, void_type_node, mem_arg2, intres);
8717 return build2_loc (loc, COMPOUND_EXPR, boolean_type_node, store, ovfres);
8718}
8719
c388a0cf 8720/* Fold a call to __builtin_FILE to a constant string. */
8721
8722static inline tree
8723fold_builtin_FILE (location_t loc)
8724{
8725 if (const char *fname = LOCATION_FILE (loc))
8726 return build_string_literal (strlen (fname) + 1, fname);
8727
8728 return build_string_literal (1, "");
8729}
8730
8731/* Fold a call to __builtin_FUNCTION to a constant string. */
8732
8733static inline tree
8734fold_builtin_FUNCTION ()
8735{
8736 if (current_function_decl)
8737 {
8738 const char *name = IDENTIFIER_POINTER (DECL_NAME (current_function_decl));
8739 return build_string_literal (strlen (name) + 1, name);
8740 }
8741
8742 return build_string_literal (1, "");
8743}
8744
8745/* Fold a call to __builtin_LINE to an integer constant. */
8746
8747static inline tree
8748fold_builtin_LINE (location_t loc, tree type)
8749{
8750 return build_int_cst (type, LOCATION_LINE (loc));
8751}
8752
c2f47e15 8753/* Fold a call to built-in function FNDECL with 0 arguments.
e80cc485 8754 This function returns NULL_TREE if no simplification was possible. */
650e4c94 8755
4ee9c684 8756static tree
e80cc485 8757fold_builtin_0 (location_t loc, tree fndecl)
650e4c94 8758{
e9f80ff5 8759 tree type = TREE_TYPE (TREE_TYPE (fndecl));
c2f47e15 8760 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
189b3398 8761 switch (fcode)
650e4c94 8762 {
c388a0cf 8763 case BUILT_IN_FILE:
8764 return fold_builtin_FILE (loc);
8765
8766 case BUILT_IN_FUNCTION:
8767 return fold_builtin_FUNCTION ();
8768
8769 case BUILT_IN_LINE:
8770 return fold_builtin_LINE (loc, type);
8771
c2f47e15 8772 CASE_FLT_FN (BUILT_IN_INF):
012f068a 8773 CASE_FLT_FN_FLOATN_NX (BUILT_IN_INF):
c2f47e15 8774 case BUILT_IN_INFD32:
8775 case BUILT_IN_INFD64:
8776 case BUILT_IN_INFD128:
389dd41b 8777 return fold_builtin_inf (loc, type, true);
7c2f0500 8778
c2f47e15 8779 CASE_FLT_FN (BUILT_IN_HUGE_VAL):
012f068a 8780 CASE_FLT_FN_FLOATN_NX (BUILT_IN_HUGE_VAL):
389dd41b 8781 return fold_builtin_inf (loc, type, false);
7c2f0500 8782
c2f47e15 8783 case BUILT_IN_CLASSIFY_TYPE:
8784 return fold_builtin_classify_type (NULL_TREE);
7c2f0500 8785
c2f47e15 8786 default:
8787 break;
8788 }
8789 return NULL_TREE;
8790}
7c2f0500 8791
c2f47e15 8792/* Fold a call to built-in function FNDECL with 1 argument, ARG0.
e80cc485 8793 This function returns NULL_TREE if no simplification was possible. */
7c2f0500 8794
c2f47e15 8795static tree
e80cc485 8796fold_builtin_1 (location_t loc, tree fndecl, tree arg0)
c2f47e15 8797{
8798 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8799 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
6c21be92 8800
8801 if (TREE_CODE (arg0) == ERROR_MARK)
8802 return NULL_TREE;
8803
744fe358 8804 if (tree ret = fold_const_call (as_combined_fn (fcode), type, arg0))
6c21be92 8805 return ret;
8806
c2f47e15 8807 switch (fcode)
8808 {
650e4c94 8809 case BUILT_IN_CONSTANT_P:
7c2f0500 8810 {
c2f47e15 8811 tree val = fold_builtin_constant_p (arg0);
7c2f0500 8812
7c2f0500 8813 /* Gimplification will pull the CALL_EXPR for the builtin out of
8814 an if condition. When not optimizing, we'll not CSE it back.
8815 To avoid link error types of regressions, return false now. */
8816 if (!val && !optimize)
8817 val = integer_zero_node;
8818
8819 return val;
8820 }
650e4c94 8821
539a3a92 8822 case BUILT_IN_CLASSIFY_TYPE:
c2f47e15 8823 return fold_builtin_classify_type (arg0);
539a3a92 8824
650e4c94 8825 case BUILT_IN_STRLEN:
c7cbde74 8826 return fold_builtin_strlen (loc, type, arg0);
650e4c94 8827
4f35b1fc 8828 CASE_FLT_FN (BUILT_IN_FABS):
012f068a 8829 CASE_FLT_FN_FLOATN_NX (BUILT_IN_FABS):
8aa32773 8830 case BUILT_IN_FABSD32:
8831 case BUILT_IN_FABSD64:
8832 case BUILT_IN_FABSD128:
389dd41b 8833 return fold_builtin_fabs (loc, arg0, type);
d1aade50 8834
8835 case BUILT_IN_ABS:
8836 case BUILT_IN_LABS:
8837 case BUILT_IN_LLABS:
8838 case BUILT_IN_IMAXABS:
389dd41b 8839 return fold_builtin_abs (loc, arg0, type);
c63f4ad3 8840
4f35b1fc 8841 CASE_FLT_FN (BUILT_IN_CONJ):
239d491a 8842 if (validate_arg (arg0, COMPLEX_TYPE)
48e1416a 8843 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
389dd41b 8844 return fold_build1_loc (loc, CONJ_EXPR, type, arg0);
c2f47e15 8845 break;
36d3581d 8846
4f35b1fc 8847 CASE_FLT_FN (BUILT_IN_CREAL):
239d491a 8848 if (validate_arg (arg0, COMPLEX_TYPE)
48e1416a 8849 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
7082509e 8850 return non_lvalue_loc (loc, fold_build1_loc (loc, REALPART_EXPR, type, arg0));
c2f47e15 8851 break;
36d3581d 8852
4f35b1fc 8853 CASE_FLT_FN (BUILT_IN_CIMAG):
b0ce8887 8854 if (validate_arg (arg0, COMPLEX_TYPE)
8855 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
389dd41b 8856 return non_lvalue_loc (loc, fold_build1_loc (loc, IMAGPART_EXPR, type, arg0));
c2f47e15 8857 break;
36d3581d 8858
6c21be92 8859 CASE_FLT_FN (BUILT_IN_CARG):
8860 return fold_builtin_carg (loc, arg0, type);
c2373fdb 8861
6c21be92 8862 case BUILT_IN_ISASCII:
8863 return fold_builtin_isascii (loc, arg0);
48e1416a 8864
6c21be92 8865 case BUILT_IN_TOASCII:
8866 return fold_builtin_toascii (loc, arg0);
48e1416a 8867
6c21be92 8868 case BUILT_IN_ISDIGIT:
8869 return fold_builtin_isdigit (loc, arg0);
48e1416a 8870
12f08300 8871 CASE_FLT_FN (BUILT_IN_FINITE):
8872 case BUILT_IN_FINITED32:
8873 case BUILT_IN_FINITED64:
8874 case BUILT_IN_FINITED128:
8875 case BUILT_IN_ISFINITE:
8876 {
8877 tree ret = fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISFINITE);
8878 if (ret)
8879 return ret;
8880 return fold_builtin_interclass_mathfn (loc, fndecl, arg0);
8881 }
8882
8883 CASE_FLT_FN (BUILT_IN_ISINF):
8884 case BUILT_IN_ISINFD32:
8885 case BUILT_IN_ISINFD64:
8886 case BUILT_IN_ISINFD128:
8887 {
8888 tree ret = fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISINF);
8889 if (ret)
8890 return ret;
8891 return fold_builtin_interclass_mathfn (loc, fndecl, arg0);
8892 }
8893
8894 case BUILT_IN_ISNORMAL:
8895 return fold_builtin_interclass_mathfn (loc, fndecl, arg0);
8896
6c21be92 8897 case BUILT_IN_ISINF_SIGN:
12f08300 8898 return fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISINF_SIGN);
8899
8900 CASE_FLT_FN (BUILT_IN_ISNAN):
8901 case BUILT_IN_ISNAND32:
8902 case BUILT_IN_ISNAND64:
8903 case BUILT_IN_ISNAND128:
8904 return fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISNAN);
48e1416a 8905
6c21be92 8906 case BUILT_IN_FREE:
8907 if (integer_zerop (arg0))
8908 return build_empty_stmt (loc);
d064d976 8909 break;
c63f4ad3 8910
6c21be92 8911 default:
8b4af95f 8912 break;
6c21be92 8913 }
805e22b2 8914
6c21be92 8915 return NULL_TREE;
3bc5c41b 8916
6c21be92 8917}
728bac60 8918
6c21be92 8919/* Fold a call to built-in function FNDECL with 2 arguments, ARG0 and ARG1.
8920 This function returns NULL_TREE if no simplification was possible. */
c2f47e15 8921
8922static tree
e80cc485 8923fold_builtin_2 (location_t loc, tree fndecl, tree arg0, tree arg1)
c2f47e15 8924{
8925 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8926 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
8927
6c21be92 8928 if (TREE_CODE (arg0) == ERROR_MARK
8929 || TREE_CODE (arg1) == ERROR_MARK)
8930 return NULL_TREE;
e5407ca6 8931
744fe358 8932 if (tree ret = fold_const_call (as_combined_fn (fcode), type, arg0, arg1))
6c21be92 8933 return ret;
e84da7c1 8934
6c21be92 8935 switch (fcode)
8936 {
e84da7c1 8937 CASE_FLT_FN_REENT (BUILT_IN_GAMMA): /* GAMMA_R */
8938 CASE_FLT_FN_REENT (BUILT_IN_LGAMMA): /* LGAMMA_R */
8939 if (validate_arg (arg0, REAL_TYPE)
9af5ce0c 8940 && validate_arg (arg1, POINTER_TYPE))
e84da7c1 8941 return do_mpfr_lgamma_r (arg0, arg1, type);
8942 break;
c2f47e15 8943
3838b9ae 8944 CASE_FLT_FN (BUILT_IN_FREXP):
389dd41b 8945 return fold_builtin_frexp (loc, arg0, arg1, type);
3838b9ae 8946
ebf8b4f5 8947 CASE_FLT_FN (BUILT_IN_MODF):
389dd41b 8948 return fold_builtin_modf (loc, arg0, arg1, type);
ebf8b4f5 8949
c2f47e15 8950 case BUILT_IN_STRSPN:
389dd41b 8951 return fold_builtin_strspn (loc, arg0, arg1);
c2f47e15 8952
8953 case BUILT_IN_STRCSPN:
389dd41b 8954 return fold_builtin_strcspn (loc, arg0, arg1);
c2f47e15 8955
c2f47e15 8956 case BUILT_IN_STRPBRK:
389dd41b 8957 return fold_builtin_strpbrk (loc, arg0, arg1, type);
c2f47e15 8958
8959 case BUILT_IN_EXPECT:
c83059be 8960 return fold_builtin_expect (loc, arg0, arg1, NULL_TREE);
c2f47e15 8961
9bc9f15f 8962 case BUILT_IN_ISGREATER:
389dd41b 8963 return fold_builtin_unordered_cmp (loc, fndecl,
8964 arg0, arg1, UNLE_EXPR, LE_EXPR);
9bc9f15f 8965 case BUILT_IN_ISGREATEREQUAL:
389dd41b 8966 return fold_builtin_unordered_cmp (loc, fndecl,
8967 arg0, arg1, UNLT_EXPR, LT_EXPR);
9bc9f15f 8968 case BUILT_IN_ISLESS:
389dd41b 8969 return fold_builtin_unordered_cmp (loc, fndecl,
8970 arg0, arg1, UNGE_EXPR, GE_EXPR);
9bc9f15f 8971 case BUILT_IN_ISLESSEQUAL:
389dd41b 8972 return fold_builtin_unordered_cmp (loc, fndecl,
8973 arg0, arg1, UNGT_EXPR, GT_EXPR);
9bc9f15f 8974 case BUILT_IN_ISLESSGREATER:
389dd41b 8975 return fold_builtin_unordered_cmp (loc, fndecl,
8976 arg0, arg1, UNEQ_EXPR, EQ_EXPR);
9bc9f15f 8977 case BUILT_IN_ISUNORDERED:
389dd41b 8978 return fold_builtin_unordered_cmp (loc, fndecl,
8979 arg0, arg1, UNORDERED_EXPR,
d5019fe8 8980 NOP_EXPR);
9bc9f15f 8981
7c2f0500 8982 /* We do the folding for va_start in the expander. */
8983 case BUILT_IN_VA_START:
8984 break;
f0613857 8985
0a39fd54 8986 case BUILT_IN_OBJECT_SIZE:
c2f47e15 8987 return fold_builtin_object_size (arg0, arg1);
0a39fd54 8988
1cd6e20d 8989 case BUILT_IN_ATOMIC_ALWAYS_LOCK_FREE:
8990 return fold_builtin_atomic_always_lock_free (arg0, arg1);
8991
8992 case BUILT_IN_ATOMIC_IS_LOCK_FREE:
8993 return fold_builtin_atomic_is_lock_free (arg0, arg1);
8994
c2f47e15 8995 default:
8996 break;
8997 }
8998 return NULL_TREE;
8999}
9000
9001/* Fold a call to built-in function FNDECL with 3 arguments, ARG0, ARG1,
e80cc485 9002 and ARG2.
c2f47e15 9003 This function returns NULL_TREE if no simplification was possible. */
9004
9005static tree
389dd41b 9006fold_builtin_3 (location_t loc, tree fndecl,
e80cc485 9007 tree arg0, tree arg1, tree arg2)
c2f47e15 9008{
9009 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9010 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
6c21be92 9011
9012 if (TREE_CODE (arg0) == ERROR_MARK
9013 || TREE_CODE (arg1) == ERROR_MARK
9014 || TREE_CODE (arg2) == ERROR_MARK)
9015 return NULL_TREE;
9016
744fe358 9017 if (tree ret = fold_const_call (as_combined_fn (fcode), type,
9018 arg0, arg1, arg2))
6c21be92 9019 return ret;
9020
c2f47e15 9021 switch (fcode)
9022 {
9023
9024 CASE_FLT_FN (BUILT_IN_SINCOS):
389dd41b 9025 return fold_builtin_sincos (loc, arg0, arg1, arg2);
c2f47e15 9026
9027 CASE_FLT_FN (BUILT_IN_FMA):
b9be572e 9028 return fold_builtin_fma (loc, arg0, arg1, arg2, type);
c2f47e15 9029
e5407ca6 9030 CASE_FLT_FN (BUILT_IN_REMQUO):
9031 if (validate_arg (arg0, REAL_TYPE)
9af5ce0c 9032 && validate_arg (arg1, REAL_TYPE)
9033 && validate_arg (arg2, POINTER_TYPE))
e5407ca6 9034 return do_mpfr_remquo (arg0, arg1, arg2);
9035 break;
e5407ca6 9036
c2f47e15 9037 case BUILT_IN_MEMCMP:
389dd41b 9038 return fold_builtin_memcmp (loc, arg0, arg1, arg2);;
c2f47e15 9039
c83059be 9040 case BUILT_IN_EXPECT:
9041 return fold_builtin_expect (loc, arg0, arg1, arg2);
9042
0c93c8a9 9043 case BUILT_IN_ADD_OVERFLOW:
9044 case BUILT_IN_SUB_OVERFLOW:
9045 case BUILT_IN_MUL_OVERFLOW:
732905bb 9046 case BUILT_IN_ADD_OVERFLOW_P:
9047 case BUILT_IN_SUB_OVERFLOW_P:
9048 case BUILT_IN_MUL_OVERFLOW_P:
0c93c8a9 9049 case BUILT_IN_SADD_OVERFLOW:
9050 case BUILT_IN_SADDL_OVERFLOW:
9051 case BUILT_IN_SADDLL_OVERFLOW:
9052 case BUILT_IN_SSUB_OVERFLOW:
9053 case BUILT_IN_SSUBL_OVERFLOW:
9054 case BUILT_IN_SSUBLL_OVERFLOW:
9055 case BUILT_IN_SMUL_OVERFLOW:
9056 case BUILT_IN_SMULL_OVERFLOW:
9057 case BUILT_IN_SMULLL_OVERFLOW:
9058 case BUILT_IN_UADD_OVERFLOW:
9059 case BUILT_IN_UADDL_OVERFLOW:
9060 case BUILT_IN_UADDLL_OVERFLOW:
9061 case BUILT_IN_USUB_OVERFLOW:
9062 case BUILT_IN_USUBL_OVERFLOW:
9063 case BUILT_IN_USUBLL_OVERFLOW:
9064 case BUILT_IN_UMUL_OVERFLOW:
9065 case BUILT_IN_UMULL_OVERFLOW:
9066 case BUILT_IN_UMULLL_OVERFLOW:
9067 return fold_builtin_arith_overflow (loc, fcode, arg0, arg1, arg2);
9068
650e4c94 9069 default:
9070 break;
9071 }
c2f47e15 9072 return NULL_TREE;
9073}
650e4c94 9074
c2f47e15 9075/* Fold a call to built-in function FNDECL. ARGS is an array of NARGS
9d884767 9076 arguments. IGNORE is true if the result of the
9077 function call is ignored. This function returns NULL_TREE if no
9078 simplification was possible. */
48e1416a 9079
2165588a 9080tree
e80cc485 9081fold_builtin_n (location_t loc, tree fndecl, tree *args, int nargs, bool)
c2f47e15 9082{
9083 tree ret = NULL_TREE;
a7f5bb2d 9084
c2f47e15 9085 switch (nargs)
9086 {
9087 case 0:
e80cc485 9088 ret = fold_builtin_0 (loc, fndecl);
c2f47e15 9089 break;
9090 case 1:
e80cc485 9091 ret = fold_builtin_1 (loc, fndecl, args[0]);
c2f47e15 9092 break;
9093 case 2:
e80cc485 9094 ret = fold_builtin_2 (loc, fndecl, args[0], args[1]);
c2f47e15 9095 break;
9096 case 3:
e80cc485 9097 ret = fold_builtin_3 (loc, fndecl, args[0], args[1], args[2]);
c2f47e15 9098 break;
c2f47e15 9099 default:
12f08300 9100 ret = fold_builtin_varargs (loc, fndecl, args, nargs);
c2f47e15 9101 break;
9102 }
9103 if (ret)
9104 {
75a70cf9 9105 ret = build1 (NOP_EXPR, TREE_TYPE (ret), ret);
389dd41b 9106 SET_EXPR_LOCATION (ret, loc);
c2f47e15 9107 TREE_NO_WARNING (ret) = 1;
9108 return ret;
9109 }
9110 return NULL_TREE;
9111}
9112
0e80b01d 9113/* Construct a new CALL_EXPR to FNDECL using the tail of the argument
9114 list ARGS along with N new arguments in NEWARGS. SKIP is the number
9115 of arguments in ARGS to be omitted. OLDNARGS is the number of
9116 elements in ARGS. */
c2f47e15 9117
9118static tree
0e80b01d 9119rewrite_call_expr_valist (location_t loc, int oldnargs, tree *args,
9120 int skip, tree fndecl, int n, va_list newargs)
c2f47e15 9121{
0e80b01d 9122 int nargs = oldnargs - skip + n;
9123 tree *buffer;
c2f47e15 9124
0e80b01d 9125 if (n > 0)
c2f47e15 9126 {
0e80b01d 9127 int i, j;
c2f47e15 9128
0e80b01d 9129 buffer = XALLOCAVEC (tree, nargs);
9130 for (i = 0; i < n; i++)
9131 buffer[i] = va_arg (newargs, tree);
9132 for (j = skip; j < oldnargs; j++, i++)
9133 buffer[i] = args[j];
9134 }
9135 else
9136 buffer = args + skip;
19fbe3a4 9137
0e80b01d 9138 return build_call_expr_loc_array (loc, fndecl, nargs, buffer);
9139}
c2f47e15 9140
198622c0 9141/* Return true if FNDECL shouldn't be folded right now.
9142 If a built-in function has an inline attribute always_inline
9143 wrapper, defer folding it after always_inline functions have
9144 been inlined, otherwise e.g. -D_FORTIFY_SOURCE checking
9145 might not be performed. */
9146
51d2c51e 9147bool
198622c0 9148avoid_folding_inline_builtin (tree fndecl)
9149{
9150 return (DECL_DECLARED_INLINE_P (fndecl)
9151 && DECL_DISREGARD_INLINE_LIMITS (fndecl)
9152 && cfun
9153 && !cfun->always_inline_functions_inlined
9154 && lookup_attribute ("always_inline", DECL_ATTRIBUTES (fndecl)));
9155}
9156
4ee9c684 9157/* A wrapper function for builtin folding that prevents warnings for
491e04ef 9158 "statement without effect" and the like, caused by removing the
4ee9c684 9159 call node earlier than the warning is generated. */
9160
9161tree
389dd41b 9162fold_call_expr (location_t loc, tree exp, bool ignore)
4ee9c684 9163{
c2f47e15 9164 tree ret = NULL_TREE;
9165 tree fndecl = get_callee_fndecl (exp);
9166 if (fndecl
9167 && TREE_CODE (fndecl) == FUNCTION_DECL
48dc2227 9168 && DECL_BUILT_IN (fndecl)
9169 /* If CALL_EXPR_VA_ARG_PACK is set, the arguments aren't finalized
9170 yet. Defer folding until we see all the arguments
9171 (after inlining). */
9172 && !CALL_EXPR_VA_ARG_PACK (exp))
9173 {
9174 int nargs = call_expr_nargs (exp);
9175
9176 /* Before gimplification CALL_EXPR_VA_ARG_PACK is not set, but
9177 instead last argument is __builtin_va_arg_pack (). Defer folding
9178 even in that case, until arguments are finalized. */
9179 if (nargs && TREE_CODE (CALL_EXPR_ARG (exp, nargs - 1)) == CALL_EXPR)
9180 {
9181 tree fndecl2 = get_callee_fndecl (CALL_EXPR_ARG (exp, nargs - 1));
9182 if (fndecl2
9183 && TREE_CODE (fndecl2) == FUNCTION_DECL
9184 && DECL_BUILT_IN_CLASS (fndecl2) == BUILT_IN_NORMAL
9185 && DECL_FUNCTION_CODE (fndecl2) == BUILT_IN_VA_ARG_PACK)
9186 return NULL_TREE;
9187 }
9188
198622c0 9189 if (avoid_folding_inline_builtin (fndecl))
9190 return NULL_TREE;
9191
c2f47e15 9192 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
97d67146 9193 return targetm.fold_builtin (fndecl, call_expr_nargs (exp),
9194 CALL_EXPR_ARGP (exp), ignore);
c2f47e15 9195 else
9196 {
9d884767 9197 tree *args = CALL_EXPR_ARGP (exp);
9198 ret = fold_builtin_n (loc, fndecl, args, nargs, ignore);
c2f47e15 9199 if (ret)
389dd41b 9200 return ret;
c2f47e15 9201 }
4ee9c684 9202 }
c2f47e15 9203 return NULL_TREE;
9204}
48e1416a 9205
9d884767 9206/* Fold a CALL_EXPR with type TYPE with FN as the function expression.
9207 N arguments are passed in the array ARGARRAY. Return a folded
9208 expression or NULL_TREE if no simplification was possible. */
805e22b2 9209
9210tree
9d884767 9211fold_builtin_call_array (location_t loc, tree,
d01f58f9 9212 tree fn,
9213 int n,
9214 tree *argarray)
7e15618b 9215{
9d884767 9216 if (TREE_CODE (fn) != ADDR_EXPR)
9217 return NULL_TREE;
c2f47e15 9218
9d884767 9219 tree fndecl = TREE_OPERAND (fn, 0);
9220 if (TREE_CODE (fndecl) == FUNCTION_DECL
9221 && DECL_BUILT_IN (fndecl))
9222 {
9223 /* If last argument is __builtin_va_arg_pack (), arguments to this
9224 function are not finalized yet. Defer folding until they are. */
9225 if (n && TREE_CODE (argarray[n - 1]) == CALL_EXPR)
9226 {
9227 tree fndecl2 = get_callee_fndecl (argarray[n - 1]);
9228 if (fndecl2
9229 && TREE_CODE (fndecl2) == FUNCTION_DECL
9230 && DECL_BUILT_IN_CLASS (fndecl2) == BUILT_IN_NORMAL
9231 && DECL_FUNCTION_CODE (fndecl2) == BUILT_IN_VA_ARG_PACK)
9232 return NULL_TREE;
9233 }
9234 if (avoid_folding_inline_builtin (fndecl))
9235 return NULL_TREE;
9236 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
9237 return targetm.fold_builtin (fndecl, n, argarray, false);
9238 else
9239 return fold_builtin_n (loc, fndecl, argarray, n, false);
9240 }
c2f47e15 9241
9d884767 9242 return NULL_TREE;
c2f47e15 9243}
9244
af1409ad 9245/* Construct a new CALL_EXPR using the tail of the argument list of EXP
9246 along with N new arguments specified as the "..." parameters. SKIP
9247 is the number of arguments in EXP to be omitted. This function is used
9248 to do varargs-to-varargs transformations. */
9249
9250static tree
9251rewrite_call_expr (location_t loc, tree exp, int skip, tree fndecl, int n, ...)
9252{
9253 va_list ap;
9254 tree t;
9255
9256 va_start (ap, n);
9257 t = rewrite_call_expr_valist (loc, call_expr_nargs (exp),
9258 CALL_EXPR_ARGP (exp), skip, fndecl, n, ap);
9259 va_end (ap);
c2f47e15 9260
af1409ad 9261 return t;
c2f47e15 9262}
9263
9264/* Validate a single argument ARG against a tree code CODE representing
184fac50 9265 a type. Return true when argument is valid. */
48e1416a 9266
c2f47e15 9267static bool
184fac50 9268validate_arg (const_tree arg, enum tree_code code)
c2f47e15 9269{
9270 if (!arg)
9271 return false;
9272 else if (code == POINTER_TYPE)
184fac50 9273 return POINTER_TYPE_P (TREE_TYPE (arg));
c7f617c2 9274 else if (code == INTEGER_TYPE)
9275 return INTEGRAL_TYPE_P (TREE_TYPE (arg));
c2f47e15 9276 return code == TREE_CODE (TREE_TYPE (arg));
7e15618b 9277}
0eb671f7 9278
75a70cf9 9279/* This function validates the types of a function call argument list
9280 against a specified list of tree_codes. If the last specifier is a 0,
9281 that represents an ellipses, otherwise the last specifier must be a
9282 VOID_TYPE.
9283
9284 This is the GIMPLE version of validate_arglist. Eventually we want to
9285 completely convert builtins.c to work from GIMPLEs and the tree based
9286 validate_arglist will then be removed. */
9287
9288bool
1a91d914 9289validate_gimple_arglist (const gcall *call, ...)
75a70cf9 9290{
9291 enum tree_code code;
9292 bool res = 0;
9293 va_list ap;
9294 const_tree arg;
9295 size_t i;
9296
9297 va_start (ap, call);
9298 i = 0;
9299
9300 do
9301 {
d62e827b 9302 code = (enum tree_code) va_arg (ap, int);
75a70cf9 9303 switch (code)
9304 {
9305 case 0:
9306 /* This signifies an ellipses, any further arguments are all ok. */
9307 res = true;
9308 goto end;
9309 case VOID_TYPE:
9310 /* This signifies an endlink, if no arguments remain, return
9311 true, otherwise return false. */
9312 res = (i == gimple_call_num_args (call));
9313 goto end;
9314 default:
9315 /* If no parameters remain or the parameter's code does not
9316 match the specified code, return false. Otherwise continue
9317 checking any remaining arguments. */
9318 arg = gimple_call_arg (call, i++);
9319 if (!validate_arg (arg, code))
9320 goto end;
9321 break;
9322 }
9323 }
9324 while (1);
9325
9326 /* We need gotos here since we can only have one VA_CLOSE in a
9327 function. */
9328 end: ;
9329 va_end (ap);
9330
9331 return res;
9332}
9333
fc2a2dcb 9334/* Default target-specific builtin expander that does nothing. */
9335
9336rtx
aecda0d6 9337default_expand_builtin (tree exp ATTRIBUTE_UNUSED,
9338 rtx target ATTRIBUTE_UNUSED,
9339 rtx subtarget ATTRIBUTE_UNUSED,
3754d046 9340 machine_mode mode ATTRIBUTE_UNUSED,
aecda0d6 9341 int ignore ATTRIBUTE_UNUSED)
fc2a2dcb 9342{
9343 return NULL_RTX;
9344}
c7926a82 9345
01537105 9346/* Returns true is EXP represents data that would potentially reside
9347 in a readonly section. */
9348
b9ea678c 9349bool
01537105 9350readonly_data_expr (tree exp)
9351{
9352 STRIP_NOPS (exp);
9353
9ff0637e 9354 if (TREE_CODE (exp) != ADDR_EXPR)
9355 return false;
9356
9357 exp = get_base_address (TREE_OPERAND (exp, 0));
9358 if (!exp)
9359 return false;
9360
9361 /* Make sure we call decl_readonly_section only for trees it
9362 can handle (since it returns true for everything it doesn't
9363 understand). */
491e04ef 9364 if (TREE_CODE (exp) == STRING_CST
9ff0637e 9365 || TREE_CODE (exp) == CONSTRUCTOR
53e9c5c4 9366 || (VAR_P (exp) && TREE_STATIC (exp)))
9ff0637e 9367 return decl_readonly_section (exp, 0);
01537105 9368 else
9369 return false;
9370}
4ee9c684 9371
c2f47e15 9372/* Simplify a call to the strpbrk builtin. S1 and S2 are the arguments
9373 to the call, and TYPE is its return type.
4ee9c684 9374
c2f47e15 9375 Return NULL_TREE if no simplification was possible, otherwise return the
4ee9c684 9376 simplified form of the call as a tree.
9377
9378 The simplified form may be a constant or other expression which
9379 computes the same value, but in a more efficient manner (including
9380 calls to other builtin functions).
9381
9382 The call may contain arguments which need to be evaluated, but
9383 which are not useful to determine the result of the call. In
9384 this case we return a chain of COMPOUND_EXPRs. The LHS of each
9385 COMPOUND_EXPR will be an argument which must be evaluated.
9386 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
9387 COMPOUND_EXPR in the chain will contain the tree for the simplified
9388 form of the builtin function call. */
9389
9390static tree
389dd41b 9391fold_builtin_strpbrk (location_t loc, tree s1, tree s2, tree type)
4ee9c684 9392{
c2f47e15 9393 if (!validate_arg (s1, POINTER_TYPE)
9394 || !validate_arg (s2, POINTER_TYPE))
9395 return NULL_TREE;
4ee9c684 9396 else
9397 {
4ee9c684 9398 tree fn;
9399 const char *p1, *p2;
9400
9401 p2 = c_getstr (s2);
9402 if (p2 == NULL)
c2f47e15 9403 return NULL_TREE;
4ee9c684 9404
9405 p1 = c_getstr (s1);
9406 if (p1 != NULL)
9407 {
9408 const char *r = strpbrk (p1, p2);
daa1d5f5 9409 tree tem;
4ee9c684 9410
9411 if (r == NULL)
779b4c41 9412 return build_int_cst (TREE_TYPE (s1), 0);
4ee9c684 9413
9414 /* Return an offset into the constant string argument. */
2cc66f2a 9415 tem = fold_build_pointer_plus_hwi_loc (loc, s1, r - p1);
389dd41b 9416 return fold_convert_loc (loc, type, tem);
4ee9c684 9417 }
9418
9419 if (p2[0] == '\0')
05abc81b 9420 /* strpbrk(x, "") == NULL.
9421 Evaluate and ignore s1 in case it had side-effects. */
389dd41b 9422 return omit_one_operand_loc (loc, TREE_TYPE (s1), integer_zero_node, s1);
4ee9c684 9423
9424 if (p2[1] != '\0')
c2f47e15 9425 return NULL_TREE; /* Really call strpbrk. */
4ee9c684 9426
b9a16870 9427 fn = builtin_decl_implicit (BUILT_IN_STRCHR);
4ee9c684 9428 if (!fn)
c2f47e15 9429 return NULL_TREE;
4ee9c684 9430
9431 /* New argument list transforming strpbrk(s1, s2) to
9432 strchr(s1, s2[0]). */
7002a1c8 9433 return build_call_expr_loc (loc, fn, 2, s1,
9434 build_int_cst (integer_type_node, p2[0]));
4ee9c684 9435 }
9436}
9437
c2f47e15 9438/* Simplify a call to the strspn builtin. S1 and S2 are the arguments
9439 to the call.
4ee9c684 9440
c2f47e15 9441 Return NULL_TREE if no simplification was possible, otherwise return the
4ee9c684 9442 simplified form of the call as a tree.
9443
9444 The simplified form may be a constant or other expression which
9445 computes the same value, but in a more efficient manner (including
9446 calls to other builtin functions).
9447
9448 The call may contain arguments which need to be evaluated, but
9449 which are not useful to determine the result of the call. In
9450 this case we return a chain of COMPOUND_EXPRs. The LHS of each
9451 COMPOUND_EXPR will be an argument which must be evaluated.
9452 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
9453 COMPOUND_EXPR in the chain will contain the tree for the simplified
9454 form of the builtin function call. */
9455
9456static tree
389dd41b 9457fold_builtin_strspn (location_t loc, tree s1, tree s2)
4ee9c684 9458{
c2f47e15 9459 if (!validate_arg (s1, POINTER_TYPE)
9460 || !validate_arg (s2, POINTER_TYPE))
9461 return NULL_TREE;
4ee9c684 9462 else
9463 {
4ee9c684 9464 const char *p1 = c_getstr (s1), *p2 = c_getstr (s2);
9465
c2f47e15 9466 /* If either argument is "", return NULL_TREE. */
4ee9c684 9467 if ((p1 && *p1 == '\0') || (p2 && *p2 == '\0'))
9bc9f15f 9468 /* Evaluate and ignore both arguments in case either one has
9469 side-effects. */
389dd41b 9470 return omit_two_operands_loc (loc, size_type_node, size_zero_node,
9bc9f15f 9471 s1, s2);
c2f47e15 9472 return NULL_TREE;
4ee9c684 9473 }
9474}
9475
c2f47e15 9476/* Simplify a call to the strcspn builtin. S1 and S2 are the arguments
9477 to the call.
4ee9c684 9478
c2f47e15 9479 Return NULL_TREE if no simplification was possible, otherwise return the
4ee9c684 9480 simplified form of the call as a tree.
9481
9482 The simplified form may be a constant or other expression which
9483 computes the same value, but in a more efficient manner (including
9484 calls to other builtin functions).
9485
9486 The call may contain arguments which need to be evaluated, but
9487 which are not useful to determine the result of the call. In
9488 this case we return a chain of COMPOUND_EXPRs. The LHS of each
9489 COMPOUND_EXPR will be an argument which must be evaluated.
9490 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
9491 COMPOUND_EXPR in the chain will contain the tree for the simplified
9492 form of the builtin function call. */
9493
9494static tree
389dd41b 9495fold_builtin_strcspn (location_t loc, tree s1, tree s2)
4ee9c684 9496{
c2f47e15 9497 if (!validate_arg (s1, POINTER_TYPE)
9498 || !validate_arg (s2, POINTER_TYPE))
9499 return NULL_TREE;
4ee9c684 9500 else
9501 {
c2f47e15 9502 /* If the first argument is "", return NULL_TREE. */
b5e46e2c 9503 const char *p1 = c_getstr (s1);
4ee9c684 9504 if (p1 && *p1 == '\0')
9505 {
9506 /* Evaluate and ignore argument s2 in case it has
9507 side-effects. */
389dd41b 9508 return omit_one_operand_loc (loc, size_type_node,
39761420 9509 size_zero_node, s2);
4ee9c684 9510 }
9511
9512 /* If the second argument is "", return __builtin_strlen(s1). */
b5e46e2c 9513 const char *p2 = c_getstr (s2);
4ee9c684 9514 if (p2 && *p2 == '\0')
9515 {
b9a16870 9516 tree fn = builtin_decl_implicit (BUILT_IN_STRLEN);
4ee9c684 9517
9518 /* If the replacement _DECL isn't initialized, don't do the
9519 transformation. */
9520 if (!fn)
c2f47e15 9521 return NULL_TREE;
4ee9c684 9522
389dd41b 9523 return build_call_expr_loc (loc, fn, 1, s1);
4ee9c684 9524 }
c2f47e15 9525 return NULL_TREE;
4ee9c684 9526 }
9527}
9528
c2f47e15 9529/* Fold the next_arg or va_start call EXP. Returns true if there was an error
743b0c6a 9530 produced. False otherwise. This is done so that we don't output the error
9531 or warning twice or three times. */
75a70cf9 9532
743b0c6a 9533bool
c2f47e15 9534fold_builtin_next_arg (tree exp, bool va_start_p)
4ee9c684 9535{
9536 tree fntype = TREE_TYPE (current_function_decl);
c2f47e15 9537 int nargs = call_expr_nargs (exp);
9538 tree arg;
d98fd4a4 9539 /* There is good chance the current input_location points inside the
9540 definition of the va_start macro (perhaps on the token for
9541 builtin) in a system header, so warnings will not be emitted.
9542 Use the location in real source code. */
9543 source_location current_location =
9544 linemap_unwind_to_first_non_reserved_loc (line_table, input_location,
9545 NULL);
4ee9c684 9546
257d99c3 9547 if (!stdarg_p (fntype))
743b0c6a 9548 {
9549 error ("%<va_start%> used in function with fixed args");
9550 return true;
9551 }
c2f47e15 9552
9553 if (va_start_p)
79012a9d 9554 {
c2f47e15 9555 if (va_start_p && (nargs != 2))
9556 {
9557 error ("wrong number of arguments to function %<va_start%>");
9558 return true;
9559 }
9560 arg = CALL_EXPR_ARG (exp, 1);
79012a9d 9561 }
9562 /* We use __builtin_va_start (ap, 0, 0) or __builtin_next_arg (0, 0)
9563 when we checked the arguments and if needed issued a warning. */
c2f47e15 9564 else
4ee9c684 9565 {
c2f47e15 9566 if (nargs == 0)
9567 {
9568 /* Evidently an out of date version of <stdarg.h>; can't validate
9569 va_start's second argument, but can still work as intended. */
d98fd4a4 9570 warning_at (current_location,
7edb1062 9571 OPT_Wvarargs,
9572 "%<__builtin_next_arg%> called without an argument");
c2f47e15 9573 return true;
9574 }
9575 else if (nargs > 1)
a0c938f0 9576 {
c2f47e15 9577 error ("wrong number of arguments to function %<__builtin_next_arg%>");
a0c938f0 9578 return true;
9579 }
c2f47e15 9580 arg = CALL_EXPR_ARG (exp, 0);
9581 }
9582
a8dd994c 9583 if (TREE_CODE (arg) == SSA_NAME)
9584 arg = SSA_NAME_VAR (arg);
9585
c2f47e15 9586 /* We destructively modify the call to be __builtin_va_start (ap, 0)
48e1416a 9587 or __builtin_next_arg (0) the first time we see it, after checking
c2f47e15 9588 the arguments and if needed issuing a warning. */
9589 if (!integer_zerop (arg))
9590 {
9591 tree last_parm = tree_last (DECL_ARGUMENTS (current_function_decl));
79012a9d 9592
4ee9c684 9593 /* Strip off all nops for the sake of the comparison. This
9594 is not quite the same as STRIP_NOPS. It does more.
9595 We must also strip off INDIRECT_EXPR for C++ reference
9596 parameters. */
72dd6141 9597 while (CONVERT_EXPR_P (arg)
4ee9c684 9598 || TREE_CODE (arg) == INDIRECT_REF)
9599 arg = TREE_OPERAND (arg, 0);
9600 if (arg != last_parm)
a0c938f0 9601 {
b08cf617 9602 /* FIXME: Sometimes with the tree optimizers we can get the
9603 not the last argument even though the user used the last
9604 argument. We just warn and set the arg to be the last
9605 argument so that we will get wrong-code because of
9606 it. */
d98fd4a4 9607 warning_at (current_location,
7edb1062 9608 OPT_Wvarargs,
d98fd4a4 9609 "second parameter of %<va_start%> not last named argument");
743b0c6a 9610 }
24158ad7 9611
9612 /* Undefined by C99 7.15.1.4p4 (va_start):
9613 "If the parameter parmN is declared with the register storage
9614 class, with a function or array type, or with a type that is
9615 not compatible with the type that results after application of
9616 the default argument promotions, the behavior is undefined."
9617 */
9618 else if (DECL_REGISTER (arg))
d98fd4a4 9619 {
9620 warning_at (current_location,
7edb1062 9621 OPT_Wvarargs,
67cf9b55 9622 "undefined behavior when second parameter of "
d98fd4a4 9623 "%<va_start%> is declared with %<register%> storage");
9624 }
24158ad7 9625
79012a9d 9626 /* We want to verify the second parameter just once before the tree
a0c938f0 9627 optimizers are run and then avoid keeping it in the tree,
9628 as otherwise we could warn even for correct code like:
9629 void foo (int i, ...)
9630 { va_list ap; i++; va_start (ap, i); va_end (ap); } */
c2f47e15 9631 if (va_start_p)
9632 CALL_EXPR_ARG (exp, 1) = integer_zero_node;
9633 else
9634 CALL_EXPR_ARG (exp, 0) = integer_zero_node;
743b0c6a 9635 }
9636 return false;
4ee9c684 9637}
9638
9639
c2f47e15 9640/* Expand a call EXP to __builtin_object_size. */
0a39fd54 9641
f7715905 9642static rtx
0a39fd54 9643expand_builtin_object_size (tree exp)
9644{
9645 tree ost;
9646 int object_size_type;
9647 tree fndecl = get_callee_fndecl (exp);
0a39fd54 9648
c2f47e15 9649 if (!validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
0a39fd54 9650 {
8c41abe8 9651 error ("%Kfirst argument of %qD must be a pointer, second integer constant",
b8c23db3 9652 exp, fndecl);
0a39fd54 9653 expand_builtin_trap ();
9654 return const0_rtx;
9655 }
9656
c2f47e15 9657 ost = CALL_EXPR_ARG (exp, 1);
0a39fd54 9658 STRIP_NOPS (ost);
9659
9660 if (TREE_CODE (ost) != INTEGER_CST
9661 || tree_int_cst_sgn (ost) < 0
9662 || compare_tree_int (ost, 3) > 0)
9663 {
8c41abe8 9664 error ("%Klast argument of %qD is not integer constant between 0 and 3",
b8c23db3 9665 exp, fndecl);
0a39fd54 9666 expand_builtin_trap ();
9667 return const0_rtx;
9668 }
9669
e913b5cd 9670 object_size_type = tree_to_shwi (ost);
0a39fd54 9671
9672 return object_size_type < 2 ? constm1_rtx : const0_rtx;
9673}
9674
9675/* Expand EXP, a call to the __mem{cpy,pcpy,move,set}_chk builtin.
9676 FCODE is the BUILT_IN_* to use.
c2f47e15 9677 Return NULL_RTX if we failed; the caller should emit a normal call,
0a39fd54 9678 otherwise try to get the result in TARGET, if convenient (and in
9679 mode MODE if that's convenient). */
9680
9681static rtx
3754d046 9682expand_builtin_memory_chk (tree exp, rtx target, machine_mode mode,
0a39fd54 9683 enum built_in_function fcode)
9684{
0a39fd54 9685 tree dest, src, len, size;
9686
c2f47e15 9687 if (!validate_arglist (exp,
0a39fd54 9688 POINTER_TYPE,
9689 fcode == BUILT_IN_MEMSET_CHK
9690 ? INTEGER_TYPE : POINTER_TYPE,
9691 INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
c2f47e15 9692 return NULL_RTX;
0a39fd54 9693
c2f47e15 9694 dest = CALL_EXPR_ARG (exp, 0);
9695 src = CALL_EXPR_ARG (exp, 1);
9696 len = CALL_EXPR_ARG (exp, 2);
9697 size = CALL_EXPR_ARG (exp, 3);
0a39fd54 9698
5aef8938 9699 bool sizes_ok = check_sizes (OPT_Wstringop_overflow_,
9700 exp, len, /*maxlen=*/NULL_TREE,
9701 /*str=*/NULL_TREE, size);
9702
9703 if (!tree_fits_uhwi_p (size))
c2f47e15 9704 return NULL_RTX;
0a39fd54 9705
e913b5cd 9706 if (tree_fits_uhwi_p (len) || integer_all_onesp (size))
0a39fd54 9707 {
5aef8938 9708 /* Avoid transforming the checking call to an ordinary one when
9709 an overflow has been detected or when the call couldn't be
9710 validated because the size is not constant. */
9711 if (!sizes_ok && !integer_all_onesp (size) && tree_int_cst_lt (size, len))
9712 return NULL_RTX;
0a39fd54 9713
5aef8938 9714 tree fn = NULL_TREE;
0a39fd54 9715 /* If __builtin_mem{cpy,pcpy,move,set}_chk is used, assume
9716 mem{cpy,pcpy,move,set} is available. */
9717 switch (fcode)
9718 {
9719 case BUILT_IN_MEMCPY_CHK:
b9a16870 9720 fn = builtin_decl_explicit (BUILT_IN_MEMCPY);
0a39fd54 9721 break;
9722 case BUILT_IN_MEMPCPY_CHK:
b9a16870 9723 fn = builtin_decl_explicit (BUILT_IN_MEMPCPY);
0a39fd54 9724 break;
9725 case BUILT_IN_MEMMOVE_CHK:
b9a16870 9726 fn = builtin_decl_explicit (BUILT_IN_MEMMOVE);
0a39fd54 9727 break;
9728 case BUILT_IN_MEMSET_CHK:
b9a16870 9729 fn = builtin_decl_explicit (BUILT_IN_MEMSET);
0a39fd54 9730 break;
9731 default:
9732 break;
9733 }
9734
9735 if (! fn)
c2f47e15 9736 return NULL_RTX;
0a39fd54 9737
0568e9c1 9738 fn = build_call_nofold_loc (EXPR_LOCATION (exp), fn, 3, dest, src, len);
a65c4d64 9739 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
9740 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
0a39fd54 9741 return expand_expr (fn, target, mode, EXPAND_NORMAL);
9742 }
9743 else if (fcode == BUILT_IN_MEMSET_CHK)
c2f47e15 9744 return NULL_RTX;
0a39fd54 9745 else
9746 {
957d0361 9747 unsigned int dest_align = get_pointer_alignment (dest);
0a39fd54 9748
9749 /* If DEST is not a pointer type, call the normal function. */
9750 if (dest_align == 0)
c2f47e15 9751 return NULL_RTX;
0a39fd54 9752
9753 /* If SRC and DEST are the same (and not volatile), do nothing. */
9754 if (operand_equal_p (src, dest, 0))
9755 {
9756 tree expr;
9757
9758 if (fcode != BUILT_IN_MEMPCPY_CHK)
9759 {
9760 /* Evaluate and ignore LEN in case it has side-effects. */
9761 expand_expr (len, const0_rtx, VOIDmode, EXPAND_NORMAL);
9762 return expand_expr (dest, target, mode, EXPAND_NORMAL);
9763 }
9764
2cc66f2a 9765 expr = fold_build_pointer_plus (dest, len);
0a39fd54 9766 return expand_expr (expr, target, mode, EXPAND_NORMAL);
9767 }
9768
9769 /* __memmove_chk special case. */
9770 if (fcode == BUILT_IN_MEMMOVE_CHK)
9771 {
957d0361 9772 unsigned int src_align = get_pointer_alignment (src);
0a39fd54 9773
9774 if (src_align == 0)
c2f47e15 9775 return NULL_RTX;
0a39fd54 9776
9777 /* If src is categorized for a readonly section we can use
9778 normal __memcpy_chk. */
9779 if (readonly_data_expr (src))
9780 {
b9a16870 9781 tree fn = builtin_decl_explicit (BUILT_IN_MEMCPY_CHK);
0a39fd54 9782 if (!fn)
c2f47e15 9783 return NULL_RTX;
0568e9c1 9784 fn = build_call_nofold_loc (EXPR_LOCATION (exp), fn, 4,
9785 dest, src, len, size);
a65c4d64 9786 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
9787 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
0a39fd54 9788 return expand_expr (fn, target, mode, EXPAND_NORMAL);
9789 }
9790 }
c2f47e15 9791 return NULL_RTX;
0a39fd54 9792 }
9793}
9794
9795/* Emit warning if a buffer overflow is detected at compile time. */
9796
9797static void
9798maybe_emit_chk_warning (tree exp, enum built_in_function fcode)
9799{
5aef8938 9800 /* The source string. */
9801 tree srcstr = NULL_TREE;
9802 /* The size of the destination object. */
9803 tree objsize = NULL_TREE;
9804 /* The string that is being concatenated with (as in __strcat_chk)
9805 or null if it isn't. */
9806 tree catstr = NULL_TREE;
9807 /* The maximum length of the source sequence in a bounded operation
9808 (such as __strncat_chk) or null if the operation isn't bounded
9809 (such as __strcat_chk). */
9810 tree maxlen = NULL_TREE;
0a39fd54 9811
9812 switch (fcode)
9813 {
9814 case BUILT_IN_STRCPY_CHK:
9815 case BUILT_IN_STPCPY_CHK:
5aef8938 9816 srcstr = CALL_EXPR_ARG (exp, 1);
9817 objsize = CALL_EXPR_ARG (exp, 2);
9818 break;
9819
0a39fd54 9820 case BUILT_IN_STRCAT_CHK:
5aef8938 9821 /* For __strcat_chk the warning will be emitted only if overflowing
9822 by at least strlen (dest) + 1 bytes. */
9823 catstr = CALL_EXPR_ARG (exp, 0);
9824 srcstr = CALL_EXPR_ARG (exp, 1);
9825 objsize = CALL_EXPR_ARG (exp, 2);
0a39fd54 9826 break;
5aef8938 9827
b356dfef 9828 case BUILT_IN_STRNCAT_CHK:
5aef8938 9829 catstr = CALL_EXPR_ARG (exp, 0);
9830 srcstr = CALL_EXPR_ARG (exp, 1);
9831 maxlen = CALL_EXPR_ARG (exp, 2);
9832 objsize = CALL_EXPR_ARG (exp, 3);
9833 break;
9834
0a39fd54 9835 case BUILT_IN_STRNCPY_CHK:
1063acde 9836 case BUILT_IN_STPNCPY_CHK:
5aef8938 9837 srcstr = CALL_EXPR_ARG (exp, 1);
9838 maxlen = CALL_EXPR_ARG (exp, 2);
9839 objsize = CALL_EXPR_ARG (exp, 3);
0a39fd54 9840 break;
5aef8938 9841
0a39fd54 9842 case BUILT_IN_SNPRINTF_CHK:
9843 case BUILT_IN_VSNPRINTF_CHK:
5aef8938 9844 maxlen = CALL_EXPR_ARG (exp, 1);
9845 objsize = CALL_EXPR_ARG (exp, 3);
0a39fd54 9846 break;
9847 default:
9848 gcc_unreachable ();
9849 }
9850
5aef8938 9851 if (catstr && maxlen)
0a39fd54 9852 {
5aef8938 9853 /* Check __strncat_chk. There is no way to determine the length
9854 of the string to which the source string is being appended so
9855 just warn when the length of the source string is not known. */
8d6c6ef5 9856 check_strncat_sizes (exp, objsize);
9857 return;
0a39fd54 9858 }
0a39fd54 9859
5aef8938 9860 check_sizes (OPT_Wstringop_overflow_, exp,
9861 /*size=*/NULL_TREE, maxlen, srcstr, objsize);
0a39fd54 9862}
9863
9864/* Emit warning if a buffer overflow is detected at compile time
9865 in __sprintf_chk/__vsprintf_chk calls. */
9866
9867static void
9868maybe_emit_sprintf_chk_warning (tree exp, enum built_in_function fcode)
9869{
1e4adcfc 9870 tree size, len, fmt;
0a39fd54 9871 const char *fmt_str;
c2f47e15 9872 int nargs = call_expr_nargs (exp);
0a39fd54 9873
9874 /* Verify the required arguments in the original call. */
48e1416a 9875
c2f47e15 9876 if (nargs < 4)
0a39fd54 9877 return;
c2f47e15 9878 size = CALL_EXPR_ARG (exp, 2);
9879 fmt = CALL_EXPR_ARG (exp, 3);
0a39fd54 9880
e913b5cd 9881 if (! tree_fits_uhwi_p (size) || integer_all_onesp (size))
0a39fd54 9882 return;
9883
9884 /* Check whether the format is a literal string constant. */
9885 fmt_str = c_getstr (fmt);
9886 if (fmt_str == NULL)
9887 return;
9888
d4473c84 9889 if (!init_target_chars ())
99eabcc1 9890 return;
9891
0a39fd54 9892 /* If the format doesn't contain % args or %%, we know its size. */
99eabcc1 9893 if (strchr (fmt_str, target_percent) == 0)
0a39fd54 9894 len = build_int_cstu (size_type_node, strlen (fmt_str));
9895 /* If the format is "%s" and first ... argument is a string literal,
9896 we know it too. */
c2f47e15 9897 else if (fcode == BUILT_IN_SPRINTF_CHK
9898 && strcmp (fmt_str, target_percent_s) == 0)
0a39fd54 9899 {
9900 tree arg;
9901
c2f47e15 9902 if (nargs < 5)
0a39fd54 9903 return;
c2f47e15 9904 arg = CALL_EXPR_ARG (exp, 4);
0a39fd54 9905 if (! POINTER_TYPE_P (TREE_TYPE (arg)))
9906 return;
9907
9908 len = c_strlen (arg, 1);
e913b5cd 9909 if (!len || ! tree_fits_uhwi_p (len))
0a39fd54 9910 return;
9911 }
9912 else
9913 return;
9914
5aef8938 9915 /* Add one for the terminating nul. */
9916 len = fold_build2 (PLUS_EXPR, TREE_TYPE (len), len, size_one_node);
9917 check_sizes (OPT_Wstringop_overflow_,
9918 exp, /*size=*/NULL_TREE, /*maxlen=*/NULL_TREE, len, size);
0a39fd54 9919}
9920
2c281b15 9921/* Emit warning if a free is called with address of a variable. */
9922
9923static void
9924maybe_emit_free_warning (tree exp)
9925{
9926 tree arg = CALL_EXPR_ARG (exp, 0);
9927
9928 STRIP_NOPS (arg);
9929 if (TREE_CODE (arg) != ADDR_EXPR)
9930 return;
9931
9932 arg = get_base_address (TREE_OPERAND (arg, 0));
182cf5a9 9933 if (arg == NULL || INDIRECT_REF_P (arg) || TREE_CODE (arg) == MEM_REF)
2c281b15 9934 return;
9935
9936 if (SSA_VAR_P (arg))
f74ea1c2 9937 warning_at (tree_nonartificial_location (exp), OPT_Wfree_nonheap_object,
9938 "%Kattempt to free a non-heap object %qD", exp, arg);
2c281b15 9939 else
f74ea1c2 9940 warning_at (tree_nonartificial_location (exp), OPT_Wfree_nonheap_object,
9941 "%Kattempt to free a non-heap object", exp);
2c281b15 9942}
9943
c2f47e15 9944/* Fold a call to __builtin_object_size with arguments PTR and OST,
9945 if possible. */
0a39fd54 9946
f7715905 9947static tree
c2f47e15 9948fold_builtin_object_size (tree ptr, tree ost)
0a39fd54 9949{
a6caa15f 9950 unsigned HOST_WIDE_INT bytes;
0a39fd54 9951 int object_size_type;
9952
c2f47e15 9953 if (!validate_arg (ptr, POINTER_TYPE)
9954 || !validate_arg (ost, INTEGER_TYPE))
9955 return NULL_TREE;
0a39fd54 9956
0a39fd54 9957 STRIP_NOPS (ost);
9958
9959 if (TREE_CODE (ost) != INTEGER_CST
9960 || tree_int_cst_sgn (ost) < 0
9961 || compare_tree_int (ost, 3) > 0)
c2f47e15 9962 return NULL_TREE;
0a39fd54 9963
e913b5cd 9964 object_size_type = tree_to_shwi (ost);
0a39fd54 9965
9966 /* __builtin_object_size doesn't evaluate side-effects in its arguments;
9967 if there are any side-effects, it returns (size_t) -1 for types 0 and 1
9968 and (size_t) 0 for types 2 and 3. */
9969 if (TREE_SIDE_EFFECTS (ptr))
697bbc3f 9970 return build_int_cst_type (size_type_node, object_size_type < 2 ? -1 : 0);
0a39fd54 9971
9972 if (TREE_CODE (ptr) == ADDR_EXPR)
a6caa15f 9973 {
4e91a07b 9974 compute_builtin_object_size (ptr, object_size_type, &bytes);
6da74b21 9975 if (wi::fits_to_tree_p (bytes, size_type_node))
9976 return build_int_cstu (size_type_node, bytes);
a6caa15f 9977 }
0a39fd54 9978 else if (TREE_CODE (ptr) == SSA_NAME)
9979 {
0a39fd54 9980 /* If object size is not known yet, delay folding until
9981 later. Maybe subsequent passes will help determining
9982 it. */
4e91a07b 9983 if (compute_builtin_object_size (ptr, object_size_type, &bytes)
9984 && wi::fits_to_tree_p (bytes, size_type_node))
6da74b21 9985 return build_int_cstu (size_type_node, bytes);
0a39fd54 9986 }
9987
a6caa15f 9988 return NULL_TREE;
0a39fd54 9989}
9990
12f08300 9991/* Builtins with folding operations that operate on "..." arguments
9992 need special handling; we need to store the arguments in a convenient
9993 data structure before attempting any folding. Fortunately there are
9994 only a few builtins that fall into this category. FNDECL is the
9995 function, EXP is the CALL_EXPR for the call. */
9996
9997static tree
9998fold_builtin_varargs (location_t loc, tree fndecl, tree *args, int nargs)
9999{
10000 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10001 tree ret = NULL_TREE;
10002
10003 switch (fcode)
10004 {
10005 case BUILT_IN_FPCLASSIFY:
10006 ret = fold_builtin_fpclassify (loc, args, nargs);
10007 break;
10008
10009 default:
10010 break;
10011 }
10012 if (ret)
10013 {
10014 ret = build1 (NOP_EXPR, TREE_TYPE (ret), ret);
10015 SET_EXPR_LOCATION (ret, loc);
10016 TREE_NO_WARNING (ret) = 1;
10017 return ret;
10018 }
10019 return NULL_TREE;
10020}
10021
99eabcc1 10022/* Initialize format string characters in the target charset. */
10023
b9ea678c 10024bool
99eabcc1 10025init_target_chars (void)
10026{
10027 static bool init;
10028 if (!init)
10029 {
10030 target_newline = lang_hooks.to_target_charset ('\n');
10031 target_percent = lang_hooks.to_target_charset ('%');
10032 target_c = lang_hooks.to_target_charset ('c');
10033 target_s = lang_hooks.to_target_charset ('s');
10034 if (target_newline == 0 || target_percent == 0 || target_c == 0
10035 || target_s == 0)
10036 return false;
10037
10038 target_percent_c[0] = target_percent;
10039 target_percent_c[1] = target_c;
10040 target_percent_c[2] = '\0';
10041
10042 target_percent_s[0] = target_percent;
10043 target_percent_s[1] = target_s;
10044 target_percent_s[2] = '\0';
10045
10046 target_percent_s_newline[0] = target_percent;
10047 target_percent_s_newline[1] = target_s;
10048 target_percent_s_newline[2] = target_newline;
10049 target_percent_s_newline[3] = '\0';
a0c938f0 10050
99eabcc1 10051 init = true;
10052 }
10053 return true;
10054}
bffb7645 10055
f0c477f2 10056/* Helper function for do_mpfr_arg*(). Ensure M is a normal number
10057 and no overflow/underflow occurred. INEXACT is true if M was not
fa7637bd 10058 exactly calculated. TYPE is the tree type for the result. This
f0c477f2 10059 function assumes that you cleared the MPFR flags and then
10060 calculated M to see if anything subsequently set a flag prior to
10061 entering this function. Return NULL_TREE if any checks fail. */
10062
10063static tree
d4473c84 10064do_mpfr_ckconv (mpfr_srcptr m, tree type, int inexact)
f0c477f2 10065{
10066 /* Proceed iff we get a normal number, i.e. not NaN or Inf and no
10067 overflow/underflow occurred. If -frounding-math, proceed iff the
10068 result of calling FUNC was exact. */
d4473c84 10069 if (mpfr_number_p (m) && !mpfr_overflow_p () && !mpfr_underflow_p ()
f0c477f2 10070 && (!flag_rounding_math || !inexact))
10071 {
10072 REAL_VALUE_TYPE rr;
10073
66fa16e6 10074 real_from_mpfr (&rr, m, type, GMP_RNDN);
f0c477f2 10075 /* Proceed iff GCC's REAL_VALUE_TYPE can hold the MPFR value,
10076 check for overflow/underflow. If the REAL_VALUE_TYPE is zero
10077 but the mpft_t is not, then we underflowed in the
10078 conversion. */
776a7bab 10079 if (real_isfinite (&rr)
f0c477f2 10080 && (rr.cl == rvc_zero) == (mpfr_zero_p (m) != 0))
10081 {
10082 REAL_VALUE_TYPE rmode;
10083
10084 real_convert (&rmode, TYPE_MODE (type), &rr);
10085 /* Proceed iff the specified mode can hold the value. */
10086 if (real_identical (&rmode, &rr))
10087 return build_real (type, rmode);
10088 }
10089 }
10090 return NULL_TREE;
10091}
10092
239d491a 10093/* Helper function for do_mpc_arg*(). Ensure M is a normal complex
10094 number and no overflow/underflow occurred. INEXACT is true if M
10095 was not exactly calculated. TYPE is the tree type for the result.
10096 This function assumes that you cleared the MPFR flags and then
10097 calculated M to see if anything subsequently set a flag prior to
652d9409 10098 entering this function. Return NULL_TREE if any checks fail, if
10099 FORCE_CONVERT is true, then bypass the checks. */
239d491a 10100
10101static tree
652d9409 10102do_mpc_ckconv (mpc_srcptr m, tree type, int inexact, int force_convert)
239d491a 10103{
10104 /* Proceed iff we get a normal number, i.e. not NaN or Inf and no
10105 overflow/underflow occurred. If -frounding-math, proceed iff the
10106 result of calling FUNC was exact. */
652d9409 10107 if (force_convert
10108 || (mpfr_number_p (mpc_realref (m)) && mpfr_number_p (mpc_imagref (m))
10109 && !mpfr_overflow_p () && !mpfr_underflow_p ()
10110 && (!flag_rounding_math || !inexact)))
239d491a 10111 {
10112 REAL_VALUE_TYPE re, im;
10113
b0e7c4d4 10114 real_from_mpfr (&re, mpc_realref (m), TREE_TYPE (type), GMP_RNDN);
10115 real_from_mpfr (&im, mpc_imagref (m), TREE_TYPE (type), GMP_RNDN);
239d491a 10116 /* Proceed iff GCC's REAL_VALUE_TYPE can hold the MPFR values,
10117 check for overflow/underflow. If the REAL_VALUE_TYPE is zero
10118 but the mpft_t is not, then we underflowed in the
10119 conversion. */
652d9409 10120 if (force_convert
10121 || (real_isfinite (&re) && real_isfinite (&im)
10122 && (re.cl == rvc_zero) == (mpfr_zero_p (mpc_realref (m)) != 0)
10123 && (im.cl == rvc_zero) == (mpfr_zero_p (mpc_imagref (m)) != 0)))
239d491a 10124 {
10125 REAL_VALUE_TYPE re_mode, im_mode;
10126
10127 real_convert (&re_mode, TYPE_MODE (TREE_TYPE (type)), &re);
10128 real_convert (&im_mode, TYPE_MODE (TREE_TYPE (type)), &im);
10129 /* Proceed iff the specified mode can hold the value. */
652d9409 10130 if (force_convert
10131 || (real_identical (&re_mode, &re)
10132 && real_identical (&im_mode, &im)))
239d491a 10133 return build_complex (type, build_real (TREE_TYPE (type), re_mode),
10134 build_real (TREE_TYPE (type), im_mode));
10135 }
10136 }
10137 return NULL_TREE;
10138}
239d491a 10139
e5407ca6 10140/* If arguments ARG0 and ARG1 are REAL_CSTs, call mpfr_remquo() to set
10141 the pointer *(ARG_QUO) and return the result. The type is taken
10142 from the type of ARG0 and is used for setting the precision of the
10143 calculation and results. */
10144
10145static tree
10146do_mpfr_remquo (tree arg0, tree arg1, tree arg_quo)
10147{
10148 tree const type = TREE_TYPE (arg0);
10149 tree result = NULL_TREE;
48e1416a 10150
e5407ca6 10151 STRIP_NOPS (arg0);
10152 STRIP_NOPS (arg1);
48e1416a 10153
e5407ca6 10154 /* To proceed, MPFR must exactly represent the target floating point
10155 format, which only happens when the target base equals two. */
10156 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
10157 && TREE_CODE (arg0) == REAL_CST && !TREE_OVERFLOW (arg0)
10158 && TREE_CODE (arg1) == REAL_CST && !TREE_OVERFLOW (arg1))
10159 {
10160 const REAL_VALUE_TYPE *const ra0 = TREE_REAL_CST_PTR (arg0);
10161 const REAL_VALUE_TYPE *const ra1 = TREE_REAL_CST_PTR (arg1);
10162
776a7bab 10163 if (real_isfinite (ra0) && real_isfinite (ra1))
e5407ca6 10164 {
e2eb2b7f 10165 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
10166 const int prec = fmt->p;
10167 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
e5407ca6 10168 tree result_rem;
10169 long integer_quo;
10170 mpfr_t m0, m1;
10171
10172 mpfr_inits2 (prec, m0, m1, NULL);
10173 mpfr_from_real (m0, ra0, GMP_RNDN);
10174 mpfr_from_real (m1, ra1, GMP_RNDN);
10175 mpfr_clear_flags ();
e2eb2b7f 10176 mpfr_remquo (m0, &integer_quo, m0, m1, rnd);
e5407ca6 10177 /* Remquo is independent of the rounding mode, so pass
10178 inexact=0 to do_mpfr_ckconv(). */
10179 result_rem = do_mpfr_ckconv (m0, type, /*inexact=*/ 0);
10180 mpfr_clears (m0, m1, NULL);
10181 if (result_rem)
10182 {
10183 /* MPFR calculates quo in the host's long so it may
10184 return more bits in quo than the target int can hold
10185 if sizeof(host long) > sizeof(target int). This can
10186 happen even for native compilers in LP64 mode. In
10187 these cases, modulo the quo value with the largest
10188 number that the target int can hold while leaving one
10189 bit for the sign. */
10190 if (sizeof (integer_quo) * CHAR_BIT > INT_TYPE_SIZE)
10191 integer_quo %= (long)(1UL << (INT_TYPE_SIZE - 1));
10192
10193 /* Dereference the quo pointer argument. */
10194 arg_quo = build_fold_indirect_ref (arg_quo);
10195 /* Proceed iff a valid pointer type was passed in. */
10196 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg_quo)) == integer_type_node)
10197 {
10198 /* Set the value. */
7002a1c8 10199 tree result_quo
10200 = fold_build2 (MODIFY_EXPR, TREE_TYPE (arg_quo), arg_quo,
10201 build_int_cst (TREE_TYPE (arg_quo),
10202 integer_quo));
e5407ca6 10203 TREE_SIDE_EFFECTS (result_quo) = 1;
10204 /* Combine the quo assignment with the rem. */
10205 result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
10206 result_quo, result_rem));
10207 }
10208 }
10209 }
10210 }
10211 return result;
10212}
e84da7c1 10213
10214/* If ARG is a REAL_CST, call mpfr_lgamma() on it and return the
10215 resulting value as a tree with type TYPE. The mpfr precision is
10216 set to the precision of TYPE. We assume that this mpfr function
10217 returns zero if the result could be calculated exactly within the
10218 requested precision. In addition, the integer pointer represented
10219 by ARG_SG will be dereferenced and set to the appropriate signgam
10220 (-1,1) value. */
10221
10222static tree
10223do_mpfr_lgamma_r (tree arg, tree arg_sg, tree type)
10224{
10225 tree result = NULL_TREE;
10226
10227 STRIP_NOPS (arg);
48e1416a 10228
e84da7c1 10229 /* To proceed, MPFR must exactly represent the target floating point
10230 format, which only happens when the target base equals two. Also
10231 verify ARG is a constant and that ARG_SG is an int pointer. */
10232 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
10233 && TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg)
10234 && TREE_CODE (TREE_TYPE (arg_sg)) == POINTER_TYPE
10235 && TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (arg_sg))) == integer_type_node)
10236 {
10237 const REAL_VALUE_TYPE *const ra = TREE_REAL_CST_PTR (arg);
10238
10239 /* In addition to NaN and Inf, the argument cannot be zero or a
10240 negative integer. */
776a7bab 10241 if (real_isfinite (ra)
e84da7c1 10242 && ra->cl != rvc_zero
9af5ce0c 10243 && !(real_isneg (ra) && real_isinteger (ra, TYPE_MODE (type))))
e84da7c1 10244 {
e2eb2b7f 10245 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
10246 const int prec = fmt->p;
10247 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
e84da7c1 10248 int inexact, sg;
10249 mpfr_t m;
10250 tree result_lg;
10251
10252 mpfr_init2 (m, prec);
10253 mpfr_from_real (m, ra, GMP_RNDN);
10254 mpfr_clear_flags ();
e2eb2b7f 10255 inexact = mpfr_lgamma (m, &sg, m, rnd);
e84da7c1 10256 result_lg = do_mpfr_ckconv (m, type, inexact);
10257 mpfr_clear (m);
10258 if (result_lg)
10259 {
10260 tree result_sg;
10261
10262 /* Dereference the arg_sg pointer argument. */
10263 arg_sg = build_fold_indirect_ref (arg_sg);
10264 /* Assign the signgam value into *arg_sg. */
10265 result_sg = fold_build2 (MODIFY_EXPR,
10266 TREE_TYPE (arg_sg), arg_sg,
7002a1c8 10267 build_int_cst (TREE_TYPE (arg_sg), sg));
e84da7c1 10268 TREE_SIDE_EFFECTS (result_sg) = 1;
10269 /* Combine the signgam assignment with the lgamma result. */
10270 result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
10271 result_sg, result_lg));
10272 }
10273 }
10274 }
10275
10276 return result;
10277}
75a70cf9 10278
c699fab8 10279/* If arguments ARG0 and ARG1 are a COMPLEX_CST, call the two-argument
10280 mpc function FUNC on it and return the resulting value as a tree
10281 with type TYPE. The mpfr precision is set to the precision of
10282 TYPE. We assume that function FUNC returns zero if the result
652d9409 10283 could be calculated exactly within the requested precision. If
10284 DO_NONFINITE is true, then fold expressions containing Inf or NaN
10285 in the arguments and/or results. */
c699fab8 10286
63e89698 10287tree
652d9409 10288do_mpc_arg2 (tree arg0, tree arg1, tree type, int do_nonfinite,
c699fab8 10289 int (*func)(mpc_ptr, mpc_srcptr, mpc_srcptr, mpc_rnd_t))
10290{
10291 tree result = NULL_TREE;
48e1416a 10292
c699fab8 10293 STRIP_NOPS (arg0);
10294 STRIP_NOPS (arg1);
10295
10296 /* To proceed, MPFR must exactly represent the target floating point
10297 format, which only happens when the target base equals two. */
10298 if (TREE_CODE (arg0) == COMPLEX_CST && !TREE_OVERFLOW (arg0)
10299 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE
10300 && TREE_CODE (arg1) == COMPLEX_CST && !TREE_OVERFLOW (arg1)
10301 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg1))) == REAL_TYPE
10302 && REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (TREE_TYPE (arg0))))->b == 2)
10303 {
10304 const REAL_VALUE_TYPE *const re0 = TREE_REAL_CST_PTR (TREE_REALPART (arg0));
10305 const REAL_VALUE_TYPE *const im0 = TREE_REAL_CST_PTR (TREE_IMAGPART (arg0));
10306 const REAL_VALUE_TYPE *const re1 = TREE_REAL_CST_PTR (TREE_REALPART (arg1));
10307 const REAL_VALUE_TYPE *const im1 = TREE_REAL_CST_PTR (TREE_IMAGPART (arg1));
10308
652d9409 10309 if (do_nonfinite
10310 || (real_isfinite (re0) && real_isfinite (im0)
10311 && real_isfinite (re1) && real_isfinite (im1)))
c699fab8 10312 {
10313 const struct real_format *const fmt =
10314 REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (type)));
10315 const int prec = fmt->p;
10316 const mp_rnd_t rnd = fmt->round_towards_zero ? GMP_RNDZ : GMP_RNDN;
10317 const mpc_rnd_t crnd = fmt->round_towards_zero ? MPC_RNDZZ : MPC_RNDNN;
10318 int inexact;
10319 mpc_t m0, m1;
48e1416a 10320
c699fab8 10321 mpc_init2 (m0, prec);
10322 mpc_init2 (m1, prec);
9af5ce0c 10323 mpfr_from_real (mpc_realref (m0), re0, rnd);
10324 mpfr_from_real (mpc_imagref (m0), im0, rnd);
10325 mpfr_from_real (mpc_realref (m1), re1, rnd);
10326 mpfr_from_real (mpc_imagref (m1), im1, rnd);
c699fab8 10327 mpfr_clear_flags ();
10328 inexact = func (m0, m0, m1, crnd);
652d9409 10329 result = do_mpc_ckconv (m0, type, inexact, do_nonfinite);
c699fab8 10330 mpc_clear (m0);
10331 mpc_clear (m1);
10332 }
10333 }
10334
10335 return result;
10336}
239d491a 10337
75a70cf9 10338/* A wrapper function for builtin folding that prevents warnings for
10339 "statement without effect" and the like, caused by removing the
10340 call node earlier than the warning is generated. */
10341
10342tree
1a91d914 10343fold_call_stmt (gcall *stmt, bool ignore)
75a70cf9 10344{
10345 tree ret = NULL_TREE;
10346 tree fndecl = gimple_call_fndecl (stmt);
389dd41b 10347 location_t loc = gimple_location (stmt);
75a70cf9 10348 if (fndecl
10349 && TREE_CODE (fndecl) == FUNCTION_DECL
10350 && DECL_BUILT_IN (fndecl)
10351 && !gimple_call_va_arg_pack_p (stmt))
10352 {
10353 int nargs = gimple_call_num_args (stmt);
9845fb99 10354 tree *args = (nargs > 0
10355 ? gimple_call_arg_ptr (stmt, 0)
10356 : &error_mark_node);
75a70cf9 10357
198622c0 10358 if (avoid_folding_inline_builtin (fndecl))
10359 return NULL_TREE;
75a70cf9 10360 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
10361 {
9845fb99 10362 return targetm.fold_builtin (fndecl, nargs, args, ignore);
75a70cf9 10363 }
10364 else
10365 {
9d884767 10366 ret = fold_builtin_n (loc, fndecl, args, nargs, ignore);
75a70cf9 10367 if (ret)
10368 {
10369 /* Propagate location information from original call to
10370 expansion of builtin. Otherwise things like
10371 maybe_emit_chk_warning, that operate on the expansion
10372 of a builtin, will use the wrong location information. */
10373 if (gimple_has_location (stmt))
10374 {
10375 tree realret = ret;
10376 if (TREE_CODE (ret) == NOP_EXPR)
10377 realret = TREE_OPERAND (ret, 0);
10378 if (CAN_HAVE_LOCATION_P (realret)
10379 && !EXPR_HAS_LOCATION (realret))
389dd41b 10380 SET_EXPR_LOCATION (realret, loc);
75a70cf9 10381 return realret;
10382 }
10383 return ret;
10384 }
10385 }
10386 }
10387 return NULL_TREE;
10388}
7bfefa9d 10389
b9a16870 10390/* Look up the function in builtin_decl that corresponds to DECL
7bfefa9d 10391 and set ASMSPEC as its user assembler name. DECL must be a
10392 function decl that declares a builtin. */
10393
10394void
10395set_builtin_user_assembler_name (tree decl, const char *asmspec)
10396{
7bfefa9d 10397 gcc_assert (TREE_CODE (decl) == FUNCTION_DECL
10398 && DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL
10399 && asmspec != 0);
10400
61ffc71a 10401 tree builtin = builtin_decl_explicit (DECL_FUNCTION_CODE (decl));
4d8e0d6d 10402 set_user_assembler_name (builtin, asmspec);
61ffc71a 10403
10404 if (DECL_FUNCTION_CODE (decl) == BUILT_IN_FFS
10405 && INT_TYPE_SIZE < BITS_PER_WORD)
7bfefa9d 10406 {
61ffc71a 10407 set_user_assembler_libfunc ("ffs", asmspec);
10408 set_optab_libfunc (ffs_optab, mode_for_size (INT_TYPE_SIZE, MODE_INT, 0),
10409 "ffs");
7bfefa9d 10410 }
10411}
a6b74a67 10412
10413/* Return true if DECL is a builtin that expands to a constant or similarly
10414 simple code. */
10415bool
10416is_simple_builtin (tree decl)
10417{
10418 if (decl && DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL)
10419 switch (DECL_FUNCTION_CODE (decl))
10420 {
10421 /* Builtins that expand to constants. */
10422 case BUILT_IN_CONSTANT_P:
10423 case BUILT_IN_EXPECT:
10424 case BUILT_IN_OBJECT_SIZE:
10425 case BUILT_IN_UNREACHABLE:
10426 /* Simple register moves or loads from stack. */
fca0886c 10427 case BUILT_IN_ASSUME_ALIGNED:
a6b74a67 10428 case BUILT_IN_RETURN_ADDRESS:
10429 case BUILT_IN_EXTRACT_RETURN_ADDR:
10430 case BUILT_IN_FROB_RETURN_ADDR:
10431 case BUILT_IN_RETURN:
10432 case BUILT_IN_AGGREGATE_INCOMING_ADDRESS:
10433 case BUILT_IN_FRAME_ADDRESS:
10434 case BUILT_IN_VA_END:
10435 case BUILT_IN_STACK_SAVE:
10436 case BUILT_IN_STACK_RESTORE:
10437 /* Exception state returns or moves registers around. */
10438 case BUILT_IN_EH_FILTER:
10439 case BUILT_IN_EH_POINTER:
10440 case BUILT_IN_EH_COPY_VALUES:
10441 return true;
10442
10443 default:
10444 return false;
10445 }
10446
10447 return false;
10448}
10449
10450/* Return true if DECL is a builtin that is not expensive, i.e., they are
10451 most probably expanded inline into reasonably simple code. This is a
10452 superset of is_simple_builtin. */
10453bool
10454is_inexpensive_builtin (tree decl)
10455{
10456 if (!decl)
10457 return false;
10458 else if (DECL_BUILT_IN_CLASS (decl) == BUILT_IN_MD)
10459 return true;
10460 else if (DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL)
10461 switch (DECL_FUNCTION_CODE (decl))
10462 {
10463 case BUILT_IN_ABS:
10464 case BUILT_IN_ALLOCA:
581bf1c2 10465 case BUILT_IN_ALLOCA_WITH_ALIGN:
74bdbe96 10466 case BUILT_IN_BSWAP16:
a6b74a67 10467 case BUILT_IN_BSWAP32:
10468 case BUILT_IN_BSWAP64:
10469 case BUILT_IN_CLZ:
10470 case BUILT_IN_CLZIMAX:
10471 case BUILT_IN_CLZL:
10472 case BUILT_IN_CLZLL:
10473 case BUILT_IN_CTZ:
10474 case BUILT_IN_CTZIMAX:
10475 case BUILT_IN_CTZL:
10476 case BUILT_IN_CTZLL:
10477 case BUILT_IN_FFS:
10478 case BUILT_IN_FFSIMAX:
10479 case BUILT_IN_FFSL:
10480 case BUILT_IN_FFSLL:
10481 case BUILT_IN_IMAXABS:
10482 case BUILT_IN_FINITE:
10483 case BUILT_IN_FINITEF:
10484 case BUILT_IN_FINITEL:
10485 case BUILT_IN_FINITED32:
10486 case BUILT_IN_FINITED64:
10487 case BUILT_IN_FINITED128:
10488 case BUILT_IN_FPCLASSIFY:
10489 case BUILT_IN_ISFINITE:
10490 case BUILT_IN_ISINF_SIGN:
10491 case BUILT_IN_ISINF:
10492 case BUILT_IN_ISINFF:
10493 case BUILT_IN_ISINFL:
10494 case BUILT_IN_ISINFD32:
10495 case BUILT_IN_ISINFD64:
10496 case BUILT_IN_ISINFD128:
10497 case BUILT_IN_ISNAN:
10498 case BUILT_IN_ISNANF:
10499 case BUILT_IN_ISNANL:
10500 case BUILT_IN_ISNAND32:
10501 case BUILT_IN_ISNAND64:
10502 case BUILT_IN_ISNAND128:
10503 case BUILT_IN_ISNORMAL:
10504 case BUILT_IN_ISGREATER:
10505 case BUILT_IN_ISGREATEREQUAL:
10506 case BUILT_IN_ISLESS:
10507 case BUILT_IN_ISLESSEQUAL:
10508 case BUILT_IN_ISLESSGREATER:
10509 case BUILT_IN_ISUNORDERED:
10510 case BUILT_IN_VA_ARG_PACK:
10511 case BUILT_IN_VA_ARG_PACK_LEN:
10512 case BUILT_IN_VA_COPY:
10513 case BUILT_IN_TRAP:
10514 case BUILT_IN_SAVEREGS:
10515 case BUILT_IN_POPCOUNTL:
10516 case BUILT_IN_POPCOUNTLL:
10517 case BUILT_IN_POPCOUNTIMAX:
10518 case BUILT_IN_POPCOUNT:
10519 case BUILT_IN_PARITYL:
10520 case BUILT_IN_PARITYLL:
10521 case BUILT_IN_PARITYIMAX:
10522 case BUILT_IN_PARITY:
10523 case BUILT_IN_LABS:
10524 case BUILT_IN_LLABS:
10525 case BUILT_IN_PREFETCH:
ca4c3545 10526 case BUILT_IN_ACC_ON_DEVICE:
a6b74a67 10527 return true;
10528
10529 default:
10530 return is_simple_builtin (decl);
10531 }
10532
10533 return false;
10534}
507a998e 10535
10536/* Return true if T is a constant and the value cast to a target char
10537 can be represented by a host char.
10538 Store the casted char constant in *P if so. */
10539
10540bool
10541target_char_cst_p (tree t, char *p)
10542{
10543 if (!tree_fits_uhwi_p (t) || CHAR_TYPE_SIZE != HOST_BITS_PER_CHAR)
10544 return false;
10545
10546 *p = (char)tree_to_uhwi (t);
10547 return true;
10548}