]> git.ipfire.org Git - thirdparty/gcc.git/blame - gcc/builtins.c
gccgo: suppress "ar rcD" and "-zdefs" on AIX
[thirdparty/gcc.git] / gcc / builtins.c
CommitLineData
53800dbe 1/* Expand builtin functions.
8e8f6434 2 Copyright (C) 1988-2018 Free Software Foundation, Inc.
53800dbe 3
f12b58b3 4This file is part of GCC.
53800dbe 5
f12b58b3 6GCC is free software; you can redistribute it and/or modify it under
7the terms of the GNU General Public License as published by the Free
8c4c00c1 8Software Foundation; either version 3, or (at your option) any later
f12b58b3 9version.
53800dbe 10
f12b58b3 11GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12WARRANTY; without even the implied warranty of MERCHANTABILITY or
13FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14for more details.
53800dbe 15
16You should have received a copy of the GNU General Public License
8c4c00c1 17along with GCC; see the file COPYING3. If not see
18<http://www.gnu.org/licenses/>. */
53800dbe 19
7c2ecb89 20/* Legacy warning! Please add no further builtin simplifications here
21 (apart from pure constant folding) - builtin simplifications should go
22 to match.pd or gimple-fold.c instead. */
23
53800dbe 24#include "config.h"
25#include "system.h"
805e22b2 26#include "coretypes.h"
9ef16211 27#include "backend.h"
7c29e30e 28#include "target.h"
29#include "rtl.h"
9ef16211 30#include "tree.h"
ea36272b 31#include "memmodel.h"
9ef16211 32#include "gimple.h"
7c29e30e 33#include "predict.h"
34#include "tm_p.h"
35#include "stringpool.h"
c296f633 36#include "tree-vrp.h"
7c29e30e 37#include "tree-ssanames.h"
38#include "expmed.h"
39#include "optabs.h"
7c29e30e 40#include "emit-rtl.h"
41#include "recog.h"
7c29e30e 42#include "diagnostic-core.h"
b20a8bb4 43#include "alias.h"
b20a8bb4 44#include "fold-const.h"
6c21be92 45#include "fold-const-call.h"
e6a18b5a 46#include "gimple-ssa-warn-restrict.h"
9ed99284 47#include "stor-layout.h"
48#include "calls.h"
49#include "varasm.h"
50#include "tree-object-size.h"
dae0b5cb 51#include "realmpfr.h"
94ea8568 52#include "cfgrtl.h"
53800dbe 53#include "except.h"
d53441c8 54#include "dojump.h"
55#include "explow.h"
d53441c8 56#include "stmt.h"
53800dbe 57#include "expr.h"
d8fc4d0b 58#include "libfuncs.h"
53800dbe 59#include "output.h"
60#include "typeclass.h"
63c62881 61#include "langhooks.h"
162719b3 62#include "value-prof.h"
3b9c3a16 63#include "builtins.h"
30a86690 64#include "stringpool.h"
65#include "attribs.h"
f9acf11a 66#include "asan.h"
058a1b7a 67#include "tree-chkp.h"
68#include "rtl-chkp.h"
1f24b8e9 69#include "internal-fn.h"
e3240774 70#include "case-cfn-macros.h"
732905bb 71#include "gimple-fold.h"
5aef8938 72#include "intl.h"
859b51f8 73#include "file-prefix-map.h" /* remap_macro_filename() */
5383fb56 74
3b9c3a16 75struct target_builtins default_target_builtins;
76#if SWITCHABLE_TARGET
77struct target_builtins *this_target_builtins = &default_target_builtins;
78#endif
79
ab7943b9 80/* Define the names of the builtin function types and codes. */
96423453 81const char *const built_in_class_names[BUILT_IN_LAST]
ab7943b9 82 = {"NOT_BUILT_IN", "BUILT_IN_FRONTEND", "BUILT_IN_MD", "BUILT_IN_NORMAL"};
83
9cfddb70 84#define DEF_BUILTIN(X, N, C, T, LT, B, F, NA, AT, IM, COND) #X,
0dfc45b5 85const char * built_in_names[(int) END_BUILTINS] =
4e9d90c7 86{
87#include "builtins.def"
88};
ab7943b9 89
cffdfb3d 90/* Setup an array of builtin_info_type, make sure each element decl is
df94cd3b 91 initialized to NULL_TREE. */
cffdfb3d 92builtin_info_type builtin_info[(int)END_BUILTINS];
df94cd3b 93
0b049e15 94/* Non-zero if __builtin_constant_p should be folded right away. */
95bool force_folding_builtin_constant_p;
96
f77c4496 97static rtx c_readstr (const char *, scalar_int_mode);
aecda0d6 98static int target_char_cast (tree, char *);
d8ae1baa 99static rtx get_memory_rtx (tree, tree);
aecda0d6 100static int apply_args_size (void);
101static int apply_result_size (void);
aecda0d6 102static rtx result_vector (int, rtx);
aecda0d6 103static void expand_builtin_prefetch (tree);
104static rtx expand_builtin_apply_args (void);
105static rtx expand_builtin_apply_args_1 (void);
106static rtx expand_builtin_apply (rtx, rtx, rtx);
107static void expand_builtin_return (rtx);
108static enum type_class type_to_class (tree);
109static rtx expand_builtin_classify_type (tree);
6b43bae4 110static rtx expand_builtin_mathfn_3 (tree, rtx, rtx);
7e0713b1 111static rtx expand_builtin_mathfn_ternary (tree, rtx, rtx);
f97eea22 112static rtx expand_builtin_interclass_mathfn (tree, rtx);
c3147c1a 113static rtx expand_builtin_sincos (tree);
f97eea22 114static rtx expand_builtin_cexpi (tree, rtx);
ff1b14e4 115static rtx expand_builtin_int_roundingfn (tree, rtx);
116static rtx expand_builtin_int_roundingfn_2 (tree, rtx);
79012a9d 117static rtx expand_builtin_next_arg (void);
aecda0d6 118static rtx expand_builtin_va_start (tree);
119static rtx expand_builtin_va_end (tree);
120static rtx expand_builtin_va_copy (tree);
a65c4d64 121static rtx expand_builtin_strcmp (tree, rtx);
3754d046 122static rtx expand_builtin_strncmp (tree, rtx, machine_mode);
f77c4496 123static rtx builtin_memcpy_read_str (void *, HOST_WIDE_INT, scalar_int_mode);
8d6c6ef5 124static rtx expand_builtin_memchr (tree, rtx);
a65c4d64 125static rtx expand_builtin_memcpy (tree, rtx);
f21337ef 126static rtx expand_builtin_memcpy_with_bounds (tree, rtx);
d0fbba1a 127static rtx expand_builtin_memory_copy_args (tree dest, tree src, tree len,
128 rtx target, tree exp, int endp);
4d317237 129static rtx expand_builtin_memmove (tree, rtx);
d0fbba1a 130static rtx expand_builtin_mempcpy (tree, rtx);
131static rtx expand_builtin_mempcpy_with_bounds (tree, rtx);
132static rtx expand_builtin_mempcpy_args (tree, tree, tree, rtx, tree, int);
5aef8938 133static rtx expand_builtin_strcat (tree, rtx);
a65c4d64 134static rtx expand_builtin_strcpy (tree, rtx);
135static rtx expand_builtin_strcpy_args (tree, tree, rtx);
3754d046 136static rtx expand_builtin_stpcpy (tree, rtx, machine_mode);
4d317237 137static rtx expand_builtin_stpncpy (tree, rtx);
5aef8938 138static rtx expand_builtin_strncat (tree, rtx);
a65c4d64 139static rtx expand_builtin_strncpy (tree, rtx);
f77c4496 140static rtx builtin_memset_gen_str (void *, HOST_WIDE_INT, scalar_int_mode);
3754d046 141static rtx expand_builtin_memset (tree, rtx, machine_mode);
f21337ef 142static rtx expand_builtin_memset_with_bounds (tree, rtx, machine_mode);
3754d046 143static rtx expand_builtin_memset_args (tree, tree, tree, rtx, machine_mode, tree);
aecda0d6 144static rtx expand_builtin_bzero (tree);
3754d046 145static rtx expand_builtin_strlen (tree, rtx, machine_mode);
2b29cc6a 146static rtx expand_builtin_alloca (tree);
3754d046 147static rtx expand_builtin_unop (machine_mode, tree, rtx, rtx, optab);
aecda0d6 148static rtx expand_builtin_frame_address (tree, tree);
389dd41b 149static tree stabilize_va_list_loc (location_t, tree, int);
aecda0d6 150static rtx expand_builtin_expect (tree, rtx);
151static tree fold_builtin_constant_p (tree);
152static tree fold_builtin_classify_type (tree);
c7cbde74 153static tree fold_builtin_strlen (location_t, tree, tree);
389dd41b 154static tree fold_builtin_inf (location_t, tree, int);
389dd41b 155static tree rewrite_call_expr (location_t, tree, int, tree, int, ...);
184fac50 156static bool validate_arg (const_tree, enum tree_code code);
aecda0d6 157static rtx expand_builtin_fabs (tree, rtx, rtx);
27f261ef 158static rtx expand_builtin_signbit (tree, rtx);
389dd41b 159static tree fold_builtin_memcmp (location_t, tree, tree, tree);
389dd41b 160static tree fold_builtin_isascii (location_t, tree);
161static tree fold_builtin_toascii (location_t, tree);
162static tree fold_builtin_isdigit (location_t, tree);
163static tree fold_builtin_fabs (location_t, tree, tree);
164static tree fold_builtin_abs (location_t, tree, tree);
165static tree fold_builtin_unordered_cmp (location_t, tree, tree, tree, enum tree_code,
d5019fe8 166 enum tree_code);
e80cc485 167static tree fold_builtin_0 (location_t, tree);
168static tree fold_builtin_1 (location_t, tree, tree);
169static tree fold_builtin_2 (location_t, tree, tree, tree);
170static tree fold_builtin_3 (location_t, tree, tree, tree, tree);
12f08300 171static tree fold_builtin_varargs (location_t, tree, tree*, int);
389dd41b 172
173static tree fold_builtin_strpbrk (location_t, tree, tree, tree);
389dd41b 174static tree fold_builtin_strspn (location_t, tree, tree);
175static tree fold_builtin_strcspn (location_t, tree, tree);
4ee9c684 176
0a39fd54 177static rtx expand_builtin_object_size (tree);
3754d046 178static rtx expand_builtin_memory_chk (tree, rtx, machine_mode,
0a39fd54 179 enum built_in_function);
180static void maybe_emit_chk_warning (tree, enum built_in_function);
181static void maybe_emit_sprintf_chk_warning (tree, enum built_in_function);
2c281b15 182static void maybe_emit_free_warning (tree);
c2f47e15 183static tree fold_builtin_object_size (tree, tree);
99eabcc1 184
e788f202 185unsigned HOST_WIDE_INT target_newline;
b9ea678c 186unsigned HOST_WIDE_INT target_percent;
99eabcc1 187static unsigned HOST_WIDE_INT target_c;
188static unsigned HOST_WIDE_INT target_s;
aea88c77 189char target_percent_c[3];
b9ea678c 190char target_percent_s[3];
e788f202 191char target_percent_s_newline[4];
e5407ca6 192static tree do_mpfr_remquo (tree, tree, tree);
e84da7c1 193static tree do_mpfr_lgamma_r (tree, tree, tree);
1cd6e20d 194static void expand_builtin_sync_synchronize (void);
0a39fd54 195
7bfefa9d 196/* Return true if NAME starts with __builtin_ or __sync_. */
197
b29139ad 198static bool
1c47b3e8 199is_builtin_name (const char *name)
b6a5fc45 200{
b6a5fc45 201 if (strncmp (name, "__builtin_", 10) == 0)
202 return true;
203 if (strncmp (name, "__sync_", 7) == 0)
204 return true;
1cd6e20d 205 if (strncmp (name, "__atomic_", 9) == 0)
206 return true;
b6a5fc45 207 return false;
208}
4ee9c684 209
7bfefa9d 210
211/* Return true if DECL is a function symbol representing a built-in. */
212
213bool
214is_builtin_fn (tree decl)
215{
216 return TREE_CODE (decl) == FUNCTION_DECL && DECL_BUILT_IN (decl);
217}
218
1c47b3e8 219/* Return true if NODE should be considered for inline expansion regardless
220 of the optimization level. This means whenever a function is invoked with
221 its "internal" name, which normally contains the prefix "__builtin". */
222
ae62deea 223bool
1c47b3e8 224called_as_built_in (tree node)
225{
226 /* Note that we must use DECL_NAME, not DECL_ASSEMBLER_NAME_SET_P since
227 we want the name used to call the function, not the name it
228 will have. */
229 const char *name = IDENTIFIER_POINTER (DECL_NAME (node));
230 return is_builtin_name (name);
231}
232
ceea063b 233/* Compute values M and N such that M divides (address of EXP - N) and such
234 that N < M. If these numbers can be determined, store M in alignp and N in
235 *BITPOSP and return true. Otherwise return false and store BITS_PER_UNIT to
236 *alignp and any bit-offset to *bitposp.
0d8f7716 237
238 Note that the address (and thus the alignment) computed here is based
239 on the address to which a symbol resolves, whereas DECL_ALIGN is based
240 on the address at which an object is actually located. These two
241 addresses are not always the same. For example, on ARM targets,
242 the address &foo of a Thumb function foo() has the lowest bit set,
3482bf13 243 whereas foo() itself starts on an even address.
698537d1 244
3482bf13 245 If ADDR_P is true we are taking the address of the memory reference EXP
246 and thus cannot rely on the access taking place. */
247
248static bool
249get_object_alignment_2 (tree exp, unsigned int *alignp,
250 unsigned HOST_WIDE_INT *bitposp, bool addr_p)
698537d1 251{
eaa09bfd 252 poly_int64 bitsize, bitpos;
98ab9e8f 253 tree offset;
3754d046 254 machine_mode mode;
292237f3 255 int unsignedp, reversep, volatilep;
c8a2b4ff 256 unsigned int align = BITS_PER_UNIT;
ceea063b 257 bool known_alignment = false;
698537d1 258
98ab9e8f 259 /* Get the innermost object and the constant (bitpos) and possibly
260 variable (offset) offset of the access. */
292237f3 261 exp = get_inner_reference (exp, &bitsize, &bitpos, &offset, &mode,
b3b6e4b5 262 &unsignedp, &reversep, &volatilep);
98ab9e8f 263
264 /* Extract alignment information from the innermost object and
265 possibly adjust bitpos and offset. */
3482bf13 266 if (TREE_CODE (exp) == FUNCTION_DECL)
0d8f7716 267 {
3482bf13 268 /* Function addresses can encode extra information besides their
269 alignment. However, if TARGET_PTRMEMFUNC_VBIT_LOCATION
270 allows the low bit to be used as a virtual bit, we know
271 that the address itself must be at least 2-byte aligned. */
272 if (TARGET_PTRMEMFUNC_VBIT_LOCATION == ptrmemfunc_vbit_in_pfn)
273 align = 2 * BITS_PER_UNIT;
0d8f7716 274 }
3482bf13 275 else if (TREE_CODE (exp) == LABEL_DECL)
276 ;
277 else if (TREE_CODE (exp) == CONST_DECL)
98ab9e8f 278 {
3482bf13 279 /* The alignment of a CONST_DECL is determined by its initializer. */
280 exp = DECL_INITIAL (exp);
98ab9e8f 281 align = TYPE_ALIGN (TREE_TYPE (exp));
3482bf13 282 if (CONSTANT_CLASS_P (exp))
579d67ba 283 align = targetm.constant_alignment (exp, align);
e532afed 284
3482bf13 285 known_alignment = true;
98ab9e8f 286 }
3482bf13 287 else if (DECL_P (exp))
ceea063b 288 {
3482bf13 289 align = DECL_ALIGN (exp);
ceea063b 290 known_alignment = true;
ceea063b 291 }
3482bf13 292 else if (TREE_CODE (exp) == INDIRECT_REF
293 || TREE_CODE (exp) == MEM_REF
294 || TREE_CODE (exp) == TARGET_MEM_REF)
98ab9e8f 295 {
296 tree addr = TREE_OPERAND (exp, 0);
ceea063b 297 unsigned ptr_align;
298 unsigned HOST_WIDE_INT ptr_bitpos;
ab1e78e5 299 unsigned HOST_WIDE_INT ptr_bitmask = ~0;
ceea063b 300
ab1e78e5 301 /* If the address is explicitely aligned, handle that. */
98ab9e8f 302 if (TREE_CODE (addr) == BIT_AND_EXPR
303 && TREE_CODE (TREE_OPERAND (addr, 1)) == INTEGER_CST)
304 {
ab1e78e5 305 ptr_bitmask = TREE_INT_CST_LOW (TREE_OPERAND (addr, 1));
306 ptr_bitmask *= BITS_PER_UNIT;
ac29ece2 307 align = least_bit_hwi (ptr_bitmask);
98ab9e8f 308 addr = TREE_OPERAND (addr, 0);
309 }
ceea063b 310
3482bf13 311 known_alignment
312 = get_pointer_alignment_1 (addr, &ptr_align, &ptr_bitpos);
3482bf13 313 align = MAX (ptr_align, align);
314
ab1e78e5 315 /* Re-apply explicit alignment to the bitpos. */
316 ptr_bitpos &= ptr_bitmask;
317
4083990a 318 /* The alignment of the pointer operand in a TARGET_MEM_REF
319 has to take the variable offset parts into account. */
3482bf13 320 if (TREE_CODE (exp) == TARGET_MEM_REF)
153c3b50 321 {
3482bf13 322 if (TMR_INDEX (exp))
323 {
324 unsigned HOST_WIDE_INT step = 1;
325 if (TMR_STEP (exp))
f9ae6f95 326 step = TREE_INT_CST_LOW (TMR_STEP (exp));
ac29ece2 327 align = MIN (align, least_bit_hwi (step) * BITS_PER_UNIT);
3482bf13 328 }
329 if (TMR_INDEX2 (exp))
330 align = BITS_PER_UNIT;
331 known_alignment = false;
153c3b50 332 }
ceea063b 333
3482bf13 334 /* When EXP is an actual memory reference then we can use
335 TYPE_ALIGN of a pointer indirection to derive alignment.
336 Do so only if get_pointer_alignment_1 did not reveal absolute
4083990a 337 alignment knowledge and if using that alignment would
338 improve the situation. */
700a9760 339 unsigned int talign;
4083990a 340 if (!addr_p && !known_alignment
700a9760 341 && (talign = min_align_of_type (TREE_TYPE (exp)) * BITS_PER_UNIT)
342 && talign > align)
343 align = talign;
4083990a 344 else
345 {
346 /* Else adjust bitpos accordingly. */
347 bitpos += ptr_bitpos;
348 if (TREE_CODE (exp) == MEM_REF
349 || TREE_CODE (exp) == TARGET_MEM_REF)
90ca1268 350 bitpos += mem_ref_offset (exp).force_shwi () * BITS_PER_UNIT;
4083990a 351 }
98ab9e8f 352 }
3482bf13 353 else if (TREE_CODE (exp) == STRING_CST)
153c3b50 354 {
3482bf13 355 /* STRING_CST are the only constant objects we allow to be not
356 wrapped inside a CONST_DECL. */
357 align = TYPE_ALIGN (TREE_TYPE (exp));
3482bf13 358 if (CONSTANT_CLASS_P (exp))
579d67ba 359 align = targetm.constant_alignment (exp, align);
e532afed 360
3482bf13 361 known_alignment = true;
98ab9e8f 362 }
98ab9e8f 363
364 /* If there is a non-constant offset part extract the maximum
365 alignment that can prevail. */
c8a2b4ff 366 if (offset)
98ab9e8f 367 {
ad464c56 368 unsigned int trailing_zeros = tree_ctz (offset);
c8a2b4ff 369 if (trailing_zeros < HOST_BITS_PER_INT)
98ab9e8f 370 {
c8a2b4ff 371 unsigned int inner = (1U << trailing_zeros) * BITS_PER_UNIT;
372 if (inner)
373 align = MIN (align, inner);
98ab9e8f 374 }
98ab9e8f 375 }
376
eaa09bfd 377 /* Account for the alignment of runtime coefficients, so that the constant
378 bitpos is guaranteed to be accurate. */
379 unsigned int alt_align = ::known_alignment (bitpos - bitpos.coeffs[0]);
380 if (alt_align != 0 && alt_align < align)
381 {
382 align = alt_align;
383 known_alignment = false;
384 }
385
3482bf13 386 *alignp = align;
eaa09bfd 387 *bitposp = bitpos.coeffs[0] & (align - 1);
ceea063b 388 return known_alignment;
0c883ef3 389}
390
3482bf13 391/* For a memory reference expression EXP compute values M and N such that M
392 divides (&EXP - N) and such that N < M. If these numbers can be determined,
393 store M in alignp and N in *BITPOSP and return true. Otherwise return false
394 and store BITS_PER_UNIT to *alignp and any bit-offset to *bitposp. */
395
396bool
397get_object_alignment_1 (tree exp, unsigned int *alignp,
398 unsigned HOST_WIDE_INT *bitposp)
399{
400 return get_object_alignment_2 (exp, alignp, bitposp, false);
401}
402
957d0361 403/* Return the alignment in bits of EXP, an object. */
0c883ef3 404
405unsigned int
957d0361 406get_object_alignment (tree exp)
0c883ef3 407{
408 unsigned HOST_WIDE_INT bitpos = 0;
409 unsigned int align;
410
ceea063b 411 get_object_alignment_1 (exp, &align, &bitpos);
0c883ef3 412
98ab9e8f 413 /* align and bitpos now specify known low bits of the pointer.
414 ptr & (align - 1) == bitpos. */
415
416 if (bitpos != 0)
ac29ece2 417 align = least_bit_hwi (bitpos);
957d0361 418 return align;
698537d1 419}
420
ceea063b 421/* For a pointer valued expression EXP compute values M and N such that M
422 divides (EXP - N) and such that N < M. If these numbers can be determined,
3482bf13 423 store M in alignp and N in *BITPOSP and return true. Return false if
424 the results are just a conservative approximation.
53800dbe 425
ceea063b 426 If EXP is not a pointer, false is returned too. */
53800dbe 427
ceea063b 428bool
429get_pointer_alignment_1 (tree exp, unsigned int *alignp,
430 unsigned HOST_WIDE_INT *bitposp)
53800dbe 431{
153c3b50 432 STRIP_NOPS (exp);
535e2026 433
153c3b50 434 if (TREE_CODE (exp) == ADDR_EXPR)
3482bf13 435 return get_object_alignment_2 (TREE_OPERAND (exp, 0),
436 alignp, bitposp, true);
906a9403 437 else if (TREE_CODE (exp) == POINTER_PLUS_EXPR)
438 {
439 unsigned int align;
440 unsigned HOST_WIDE_INT bitpos;
441 bool res = get_pointer_alignment_1 (TREE_OPERAND (exp, 0),
442 &align, &bitpos);
443 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
444 bitpos += TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)) * BITS_PER_UNIT;
445 else
446 {
447 unsigned int trailing_zeros = tree_ctz (TREE_OPERAND (exp, 1));
448 if (trailing_zeros < HOST_BITS_PER_INT)
449 {
450 unsigned int inner = (1U << trailing_zeros) * BITS_PER_UNIT;
451 if (inner)
452 align = MIN (align, inner);
453 }
454 }
455 *alignp = align;
456 *bitposp = bitpos & (align - 1);
457 return res;
458 }
153c3b50 459 else if (TREE_CODE (exp) == SSA_NAME
460 && POINTER_TYPE_P (TREE_TYPE (exp)))
53800dbe 461 {
ceea063b 462 unsigned int ptr_align, ptr_misalign;
153c3b50 463 struct ptr_info_def *pi = SSA_NAME_PTR_INFO (exp);
ceea063b 464
465 if (pi && get_ptr_info_alignment (pi, &ptr_align, &ptr_misalign))
466 {
467 *bitposp = ptr_misalign * BITS_PER_UNIT;
468 *alignp = ptr_align * BITS_PER_UNIT;
d10da77a 469 /* Make sure to return a sensible alignment when the multiplication
470 by BITS_PER_UNIT overflowed. */
471 if (*alignp == 0)
472 *alignp = 1u << (HOST_BITS_PER_INT - 1);
3482bf13 473 /* We cannot really tell whether this result is an approximation. */
b428654a 474 return false;
ceea063b 475 }
476 else
69fbc3aa 477 {
478 *bitposp = 0;
ceea063b 479 *alignp = BITS_PER_UNIT;
480 return false;
69fbc3aa 481 }
53800dbe 482 }
0bb8b39a 483 else if (TREE_CODE (exp) == INTEGER_CST)
484 {
485 *alignp = BIGGEST_ALIGNMENT;
f9ae6f95 486 *bitposp = ((TREE_INT_CST_LOW (exp) * BITS_PER_UNIT)
0bb8b39a 487 & (BIGGEST_ALIGNMENT - 1));
488 return true;
489 }
153c3b50 490
69fbc3aa 491 *bitposp = 0;
ceea063b 492 *alignp = BITS_PER_UNIT;
493 return false;
53800dbe 494}
495
69fbc3aa 496/* Return the alignment in bits of EXP, a pointer valued expression.
497 The alignment returned is, by default, the alignment of the thing that
498 EXP points to. If it is not a POINTER_TYPE, 0 is returned.
499
500 Otherwise, look at the expression to see if we can do better, i.e., if the
501 expression is actually pointing at an object whose alignment is tighter. */
502
503unsigned int
504get_pointer_alignment (tree exp)
505{
506 unsigned HOST_WIDE_INT bitpos = 0;
507 unsigned int align;
ceea063b 508
509 get_pointer_alignment_1 (exp, &align, &bitpos);
69fbc3aa 510
511 /* align and bitpos now specify known low bits of the pointer.
512 ptr & (align - 1) == bitpos. */
513
514 if (bitpos != 0)
ac29ece2 515 align = least_bit_hwi (bitpos);
69fbc3aa 516
517 return align;
518}
519
c62d63d4 520/* Return the number of non-zero elements in the sequence
521 [ PTR, PTR + MAXELTS ) where each element's size is ELTSIZE bytes.
522 ELTSIZE must be a power of 2 less than 8. Used by c_strlen. */
523
524static unsigned
525string_length (const void *ptr, unsigned eltsize, unsigned maxelts)
526{
527 gcc_checking_assert (eltsize == 1 || eltsize == 2 || eltsize == 4);
528
529 unsigned n;
530
531 if (eltsize == 1)
532 {
533 /* Optimize the common case of plain char. */
534 for (n = 0; n < maxelts; n++)
535 {
536 const char *elt = (const char*) ptr + n;
537 if (!*elt)
538 break;
539 }
540 }
541 else
542 {
543 for (n = 0; n < maxelts; n++)
544 {
545 const char *elt = (const char*) ptr + n * eltsize;
546 if (!memcmp (elt, "\0\0\0\0", eltsize))
547 break;
548 }
549 }
550 return n;
551}
552
553/* Compute the length of a null-terminated character string or wide
554 character string handling character sizes of 1, 2, and 4 bytes.
555 TREE_STRING_LENGTH is not the right way because it evaluates to
556 the size of the character array in bytes (as opposed to characters)
557 and because it can contain a zero byte in the middle.
53800dbe 558
4172d65e 559 ONLY_VALUE should be nonzero if the result is not going to be emitted
c09841f6 560 into the instruction stream and zero if it is going to be expanded.
4172d65e 561 E.g. with i++ ? "foo" : "bar", if ONLY_VALUE is nonzero, constant 3
681fab1e 562 is returned, otherwise NULL, since
563 len = c_strlen (src, 1); if (len) expand_expr (len, ...); would not
564 evaluate the side-effects.
565
6bda159e 566 If ONLY_VALUE is two then we do not emit warnings about out-of-bound
567 accesses. Note that this implies the result is not going to be emitted
568 into the instruction stream.
569
902de8ed 570 The value returned is of type `ssizetype'.
571
53800dbe 572 Unfortunately, string_constant can't access the values of const char
573 arrays with initializers, so neither can we do so here. */
574
4ee9c684 575tree
681fab1e 576c_strlen (tree src, int only_value)
53800dbe 577{
681fab1e 578 STRIP_NOPS (src);
579 if (TREE_CODE (src) == COND_EXPR
580 && (only_value || !TREE_SIDE_EFFECTS (TREE_OPERAND (src, 0))))
581 {
582 tree len1, len2;
583
584 len1 = c_strlen (TREE_OPERAND (src, 1), only_value);
585 len2 = c_strlen (TREE_OPERAND (src, 2), only_value);
0862b7e9 586 if (tree_int_cst_equal (len1, len2))
681fab1e 587 return len1;
588 }
589
590 if (TREE_CODE (src) == COMPOUND_EXPR
591 && (only_value || !TREE_SIDE_EFFECTS (TREE_OPERAND (src, 0))))
592 return c_strlen (TREE_OPERAND (src, 1), only_value);
593
c62d63d4 594 location_t loc = EXPR_LOC_OR_LOC (src, input_location);
da136652 595
c62d63d4 596 /* Offset from the beginning of the string in bytes. */
597 tree byteoff;
598 src = string_constant (src, &byteoff);
53800dbe 599 if (src == 0)
c2f47e15 600 return NULL_TREE;
902de8ed 601
c62d63d4 602 /* Determine the size of the string element. */
603 unsigned eltsize
604 = tree_to_uhwi (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (src))));
605
606 /* Set MAXELTS to sizeof (SRC) / sizeof (*SRC) - 1, the maximum possible
607 length of SRC. */
608 unsigned maxelts = TREE_STRING_LENGTH (src) / eltsize - 1;
609
610 /* PTR can point to the byte representation of any string type, including
611 char* and wchar_t*. */
612 const char *ptr = TREE_STRING_POINTER (src);
902de8ed 613
c62d63d4 614 if (byteoff && TREE_CODE (byteoff) != INTEGER_CST)
53800dbe 615 {
616 /* If the string has an internal zero byte (e.g., "foo\0bar"), we can't
617 compute the offset to the following null if we don't know where to
618 start searching for it. */
c62d63d4 619 if (string_length (ptr, eltsize, maxelts) < maxelts)
620 {
621 /* Return when an embedded null character is found. */
c2f47e15 622 return NULL_TREE;
c62d63d4 623 }
902de8ed 624
d5d661d5 625 if (!maxelts)
626 return ssize_int (0);
627
53800dbe 628 /* We don't know the starting offset, but we do know that the string
629 has no internal zero bytes. We can assume that the offset falls
630 within the bounds of the string; otherwise, the programmer deserves
631 what he gets. Subtract the offset from the length of the string,
902de8ed 632 and return that. This would perhaps not be valid if we were dealing
633 with named arrays in addition to literal string constants. */
634
c62d63d4 635 return size_diffop_loc (loc, size_int (maxelts * eltsize), byteoff);
53800dbe 636 }
637
c62d63d4 638 /* Offset from the beginning of the string in elements. */
639 HOST_WIDE_INT eltoff;
640
53800dbe 641 /* We have a known offset into the string. Start searching there for
27d0c333 642 a null character if we can represent it as a single HOST_WIDE_INT. */
c62d63d4 643 if (byteoff == 0)
644 eltoff = 0;
645 else if (! tree_fits_shwi_p (byteoff))
646 eltoff = -1;
53800dbe 647 else
c62d63d4 648 eltoff = tree_to_shwi (byteoff) / eltsize;
902de8ed 649
1f63a7d6 650 /* If the offset is known to be out of bounds, warn, and call strlen at
651 runtime. */
c62d63d4 652 if (eltoff < 0 || eltoff > maxelts)
53800dbe 653 {
1f63a7d6 654 /* Suppress multiple warnings for propagated constant strings. */
2f1c4f17 655 if (only_value != 2
656 && !TREE_NO_WARNING (src))
1f63a7d6 657 {
d5d661d5 658 warning_at (loc, OPT_Warray_bounds,
659 "offset %qwi outside bounds of constant string",
c62d63d4 660 eltoff);
1f63a7d6 661 TREE_NO_WARNING (src) = 1;
662 }
c2f47e15 663 return NULL_TREE;
53800dbe 664 }
902de8ed 665
53800dbe 666 /* Use strlen to search for the first zero byte. Since any strings
667 constructed with build_string will have nulls appended, we win even
668 if we get handed something like (char[4])"abcd".
669
c62d63d4 670 Since ELTOFF is our starting index into the string, no further
53800dbe 671 calculation is needed. */
c62d63d4 672 unsigned len = string_length (ptr + eltoff * eltsize, eltsize,
673 maxelts - eltoff);
674
675 return ssize_int (len);
53800dbe 676}
677
e913b5cd 678/* Return a constant integer corresponding to target reading
8c85fcb7 679 GET_MODE_BITSIZE (MODE) bits from string constant STR. */
ecc318ff 680
6840589f 681static rtx
f77c4496 682c_readstr (const char *str, scalar_int_mode mode)
6840589f 683{
6840589f 684 HOST_WIDE_INT ch;
685 unsigned int i, j;
e913b5cd 686 HOST_WIDE_INT tmp[MAX_BITSIZE_MODE_ANY_INT / HOST_BITS_PER_WIDE_INT];
0407eaee 687
688 gcc_assert (GET_MODE_CLASS (mode) == MODE_INT);
e913b5cd 689 unsigned int len = (GET_MODE_PRECISION (mode) + HOST_BITS_PER_WIDE_INT - 1)
690 / HOST_BITS_PER_WIDE_INT;
691
a12aa4cc 692 gcc_assert (len <= MAX_BITSIZE_MODE_ANY_INT / HOST_BITS_PER_WIDE_INT);
e913b5cd 693 for (i = 0; i < len; i++)
694 tmp[i] = 0;
6840589f 695
6840589f 696 ch = 1;
697 for (i = 0; i < GET_MODE_SIZE (mode); i++)
698 {
699 j = i;
700 if (WORDS_BIG_ENDIAN)
701 j = GET_MODE_SIZE (mode) - i - 1;
702 if (BYTES_BIG_ENDIAN != WORDS_BIG_ENDIAN
ad8f8e52 703 && GET_MODE_SIZE (mode) >= UNITS_PER_WORD)
6840589f 704 j = j + UNITS_PER_WORD - 2 * (j % UNITS_PER_WORD) - 1;
705 j *= BITS_PER_UNIT;
7d3f6cc7 706
6840589f 707 if (ch)
708 ch = (unsigned char) str[i];
e913b5cd 709 tmp[j / HOST_BITS_PER_WIDE_INT] |= ch << (j % HOST_BITS_PER_WIDE_INT);
6840589f 710 }
ddb1be65 711
ab2c1de8 712 wide_int c = wide_int::from_array (tmp, len, GET_MODE_PRECISION (mode));
e913b5cd 713 return immed_wide_int_const (c, mode);
6840589f 714}
715
ecc318ff 716/* Cast a target constant CST to target CHAR and if that value fits into
5206b159 717 host char type, return zero and put that value into variable pointed to by
ecc318ff 718 P. */
719
720static int
aecda0d6 721target_char_cast (tree cst, char *p)
ecc318ff 722{
723 unsigned HOST_WIDE_INT val, hostval;
724
c19686c5 725 if (TREE_CODE (cst) != INTEGER_CST
ecc318ff 726 || CHAR_TYPE_SIZE > HOST_BITS_PER_WIDE_INT)
727 return 1;
728
e913b5cd 729 /* Do not care if it fits or not right here. */
f9ae6f95 730 val = TREE_INT_CST_LOW (cst);
e913b5cd 731
ecc318ff 732 if (CHAR_TYPE_SIZE < HOST_BITS_PER_WIDE_INT)
edc19fd0 733 val &= (HOST_WIDE_INT_1U << CHAR_TYPE_SIZE) - 1;
ecc318ff 734
735 hostval = val;
736 if (HOST_BITS_PER_CHAR < HOST_BITS_PER_WIDE_INT)
edc19fd0 737 hostval &= (HOST_WIDE_INT_1U << HOST_BITS_PER_CHAR) - 1;
ecc318ff 738
739 if (val != hostval)
740 return 1;
741
742 *p = hostval;
743 return 0;
744}
745
4ee9c684 746/* Similar to save_expr, but assumes that arbitrary code is not executed
747 in between the multiple evaluations. In particular, we assume that a
748 non-addressable local variable will not be modified. */
749
750static tree
751builtin_save_expr (tree exp)
752{
f6c35aa4 753 if (TREE_CODE (exp) == SSA_NAME
754 || (TREE_ADDRESSABLE (exp) == 0
755 && (TREE_CODE (exp) == PARM_DECL
53e9c5c4 756 || (VAR_P (exp) && !TREE_STATIC (exp)))))
4ee9c684 757 return exp;
758
759 return save_expr (exp);
760}
761
53800dbe 762/* Given TEM, a pointer to a stack frame, follow the dynamic chain COUNT
763 times to get the address of either a higher stack frame, or a return
764 address located within it (depending on FNDECL_CODE). */
902de8ed 765
c626df3d 766static rtx
869d0ef0 767expand_builtin_return_addr (enum built_in_function fndecl_code, int count)
53800dbe 768{
769 int i;
869d0ef0 770 rtx tem = INITIAL_FRAME_ADDRESS_RTX;
3f840859 771 if (tem == NULL_RTX)
e3e15c50 772 {
3f840859 773 /* For a zero count with __builtin_return_address, we don't care what
774 frame address we return, because target-specific definitions will
775 override us. Therefore frame pointer elimination is OK, and using
776 the soft frame pointer is OK.
777
778 For a nonzero count, or a zero count with __builtin_frame_address,
779 we require a stable offset from the current frame pointer to the
780 previous one, so we must use the hard frame pointer, and
781 we must disable frame pointer elimination. */
782 if (count == 0 && fndecl_code == BUILT_IN_RETURN_ADDRESS)
783 tem = frame_pointer_rtx;
784 else
785 {
786 tem = hard_frame_pointer_rtx;
e3e15c50 787
3f840859 788 /* Tell reload not to eliminate the frame pointer. */
789 crtl->accesses_prior_frames = 1;
790 }
e3e15c50 791 }
869d0ef0 792
53800dbe 793 if (count > 0)
794 SETUP_FRAME_ADDRESSES ();
53800dbe 795
3a69c60c 796 /* On the SPARC, the return address is not in the frame, it is in a
53800dbe 797 register. There is no way to access it off of the current frame
798 pointer, but it can be accessed off the previous frame pointer by
799 reading the value from the register window save area. */
a26d6c60 800 if (RETURN_ADDR_IN_PREVIOUS_FRAME && fndecl_code == BUILT_IN_RETURN_ADDRESS)
53800dbe 801 count--;
53800dbe 802
803 /* Scan back COUNT frames to the specified frame. */
804 for (i = 0; i < count; i++)
805 {
806 /* Assume the dynamic chain pointer is in the word that the
807 frame address points to, unless otherwise specified. */
53800dbe 808 tem = DYNAMIC_CHAIN_ADDRESS (tem);
53800dbe 809 tem = memory_address (Pmode, tem);
00060fc2 810 tem = gen_frame_mem (Pmode, tem);
83fc1478 811 tem = copy_to_reg (tem);
53800dbe 812 }
813
3a69c60c 814 /* For __builtin_frame_address, return what we've got. But, on
815 the SPARC for example, we may have to add a bias. */
53800dbe 816 if (fndecl_code == BUILT_IN_FRAME_ADDRESS)
3a69c60c 817 return FRAME_ADDR_RTX (tem);
53800dbe 818
3a69c60c 819 /* For __builtin_return_address, get the return address from that frame. */
53800dbe 820#ifdef RETURN_ADDR_RTX
821 tem = RETURN_ADDR_RTX (count, tem);
822#else
823 tem = memory_address (Pmode,
29c05e22 824 plus_constant (Pmode, tem, GET_MODE_SIZE (Pmode)));
00060fc2 825 tem = gen_frame_mem (Pmode, tem);
53800dbe 826#endif
827 return tem;
828}
829
f7c44134 830/* Alias set used for setjmp buffer. */
32c2fdea 831static alias_set_type setjmp_alias_set = -1;
f7c44134 832
6b7f6858 833/* Construct the leading half of a __builtin_setjmp call. Control will
2c8a1497 834 return to RECEIVER_LABEL. This is also called directly by the SJLJ
835 exception handling code. */
53800dbe 836
6b7f6858 837void
aecda0d6 838expand_builtin_setjmp_setup (rtx buf_addr, rtx receiver_label)
53800dbe 839{
3754d046 840 machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
53800dbe 841 rtx stack_save;
f7c44134 842 rtx mem;
53800dbe 843
f7c44134 844 if (setjmp_alias_set == -1)
845 setjmp_alias_set = new_alias_set ();
846
85d654dd 847 buf_addr = convert_memory_address (Pmode, buf_addr);
53800dbe 848
37ae8504 849 buf_addr = force_reg (Pmode, force_operand (buf_addr, NULL_RTX));
53800dbe 850
6b7f6858 851 /* We store the frame pointer and the address of receiver_label in
852 the buffer and use the rest of it for the stack save area, which
853 is machine-dependent. */
53800dbe 854
f7c44134 855 mem = gen_rtx_MEM (Pmode, buf_addr);
ab6ab77e 856 set_mem_alias_set (mem, setjmp_alias_set);
e3e026e8 857 emit_move_insn (mem, targetm.builtin_setjmp_frame_value ());
f7c44134 858
29c05e22 859 mem = gen_rtx_MEM (Pmode, plus_constant (Pmode, buf_addr,
860 GET_MODE_SIZE (Pmode))),
ab6ab77e 861 set_mem_alias_set (mem, setjmp_alias_set);
f7c44134 862
863 emit_move_insn (validize_mem (mem),
6b7f6858 864 force_reg (Pmode, gen_rtx_LABEL_REF (Pmode, receiver_label)));
53800dbe 865
866 stack_save = gen_rtx_MEM (sa_mode,
29c05e22 867 plus_constant (Pmode, buf_addr,
53800dbe 868 2 * GET_MODE_SIZE (Pmode)));
ab6ab77e 869 set_mem_alias_set (stack_save, setjmp_alias_set);
e9c97615 870 emit_stack_save (SAVE_NONLOCAL, &stack_save);
53800dbe 871
872 /* If there is further processing to do, do it. */
a3c81e61 873 if (targetm.have_builtin_setjmp_setup ())
874 emit_insn (targetm.gen_builtin_setjmp_setup (buf_addr));
53800dbe 875
29f09705 876 /* We have a nonlocal label. */
18d50ae6 877 cfun->has_nonlocal_label = 1;
6b7f6858 878}
53800dbe 879
2c8a1497 880/* Construct the trailing part of a __builtin_setjmp call. This is
4598ade9 881 also called directly by the SJLJ exception handling code.
882 If RECEIVER_LABEL is NULL, instead contruct a nonlocal goto handler. */
6b7f6858 883
884void
a3c81e61 885expand_builtin_setjmp_receiver (rtx receiver_label)
6b7f6858 886{
82c7907c 887 rtx chain;
888
4598ade9 889 /* Mark the FP as used when we get here, so we have to make sure it's
53800dbe 890 marked as used by this function. */
18b42941 891 emit_use (hard_frame_pointer_rtx);
53800dbe 892
893 /* Mark the static chain as clobbered here so life information
894 doesn't get messed up for it. */
3c56e0c1 895 chain = rtx_for_static_chain (current_function_decl, true);
82c7907c 896 if (chain && REG_P (chain))
897 emit_clobber (chain);
53800dbe 898
899 /* Now put in the code to restore the frame pointer, and argument
491e04ef 900 pointer, if needed. */
a3c81e61 901 if (! targetm.have_nonlocal_goto ())
62dcb5c8 902 {
903 /* First adjust our frame pointer to its actual value. It was
904 previously set to the start of the virtual area corresponding to
905 the stacked variables when we branched here and now needs to be
906 adjusted to the actual hardware fp value.
907
908 Assignments to virtual registers are converted by
909 instantiate_virtual_regs into the corresponding assignment
910 to the underlying register (fp in this case) that makes
911 the original assignment true.
912 So the following insn will actually be decrementing fp by
8374586c 913 TARGET_STARTING_FRAME_OFFSET. */
62dcb5c8 914 emit_move_insn (virtual_stack_vars_rtx, hard_frame_pointer_rtx);
915
916 /* Restoring the frame pointer also modifies the hard frame pointer.
917 Mark it used (so that the previous assignment remains live once
918 the frame pointer is eliminated) and clobbered (to represent the
919 implicit update from the assignment). */
920 emit_use (hard_frame_pointer_rtx);
921 emit_clobber (hard_frame_pointer_rtx);
922 }
53800dbe 923
a494b6d7 924 if (!HARD_FRAME_POINTER_IS_ARG_POINTER && fixed_regs[ARG_POINTER_REGNUM])
53800dbe 925 {
4598ade9 926 /* If the argument pointer can be eliminated in favor of the
927 frame pointer, we don't need to restore it. We assume here
928 that if such an elimination is present, it can always be used.
929 This is the case on all known machines; if we don't make this
930 assumption, we do unnecessary saving on many machines. */
53800dbe 931 size_t i;
e99c3a1d 932 static const struct elims {const int from, to;} elim_regs[] = ELIMINABLE_REGS;
53800dbe 933
3098b2d3 934 for (i = 0; i < ARRAY_SIZE (elim_regs); i++)
53800dbe 935 if (elim_regs[i].from == ARG_POINTER_REGNUM
936 && elim_regs[i].to == HARD_FRAME_POINTER_REGNUM)
937 break;
938
3098b2d3 939 if (i == ARRAY_SIZE (elim_regs))
53800dbe 940 {
941 /* Now restore our arg pointer from the address at which it
05927e40 942 was saved in our stack frame. */
27a7a23a 943 emit_move_insn (crtl->args.internal_arg_pointer,
b079a207 944 copy_to_reg (get_arg_pointer_save_area ()));
53800dbe 945 }
946 }
53800dbe 947
a3c81e61 948 if (receiver_label != NULL && targetm.have_builtin_setjmp_receiver ())
949 emit_insn (targetm.gen_builtin_setjmp_receiver (receiver_label));
950 else if (targetm.have_nonlocal_goto_receiver ())
951 emit_insn (targetm.gen_nonlocal_goto_receiver ());
53800dbe 952 else
a3c81e61 953 { /* Nothing */ }
57f6bb94 954
3072d30e 955 /* We must not allow the code we just generated to be reordered by
956 scheduling. Specifically, the update of the frame pointer must
62dcb5c8 957 happen immediately, not later. */
3072d30e 958 emit_insn (gen_blockage ());
6b7f6858 959}
53800dbe 960
53800dbe 961/* __builtin_longjmp is passed a pointer to an array of five words (not
962 all will be used on all machines). It operates similarly to the C
963 library function of the same name, but is more efficient. Much of
2c8a1497 964 the code below is copied from the handling of non-local gotos. */
53800dbe 965
c626df3d 966static void
aecda0d6 967expand_builtin_longjmp (rtx buf_addr, rtx value)
53800dbe 968{
1e0c0b35 969 rtx fp, lab, stack;
970 rtx_insn *insn, *last;
3754d046 971 machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
53800dbe 972
48e1416a 973 /* DRAP is needed for stack realign if longjmp is expanded to current
27a7a23a 974 function */
975 if (SUPPORTS_STACK_ALIGNMENT)
976 crtl->need_drap = true;
977
f7c44134 978 if (setjmp_alias_set == -1)
979 setjmp_alias_set = new_alias_set ();
980
85d654dd 981 buf_addr = convert_memory_address (Pmode, buf_addr);
479e4d5e 982
53800dbe 983 buf_addr = force_reg (Pmode, buf_addr);
984
82c7907c 985 /* We require that the user must pass a second argument of 1, because
986 that is what builtin_setjmp will return. */
64db345d 987 gcc_assert (value == const1_rtx);
53800dbe 988
4712c7d6 989 last = get_last_insn ();
a3c81e61 990 if (targetm.have_builtin_longjmp ())
991 emit_insn (targetm.gen_builtin_longjmp (buf_addr));
53800dbe 992 else
53800dbe 993 {
994 fp = gen_rtx_MEM (Pmode, buf_addr);
29c05e22 995 lab = gen_rtx_MEM (Pmode, plus_constant (Pmode, buf_addr,
53800dbe 996 GET_MODE_SIZE (Pmode)));
997
29c05e22 998 stack = gen_rtx_MEM (sa_mode, plus_constant (Pmode, buf_addr,
53800dbe 999 2 * GET_MODE_SIZE (Pmode)));
ab6ab77e 1000 set_mem_alias_set (fp, setjmp_alias_set);
1001 set_mem_alias_set (lab, setjmp_alias_set);
1002 set_mem_alias_set (stack, setjmp_alias_set);
53800dbe 1003
1004 /* Pick up FP, label, and SP from the block and jump. This code is
1005 from expand_goto in stmt.c; see there for detailed comments. */
a3c81e61 1006 if (targetm.have_nonlocal_goto ())
53800dbe 1007 /* We have to pass a value to the nonlocal_goto pattern that will
1008 get copied into the static_chain pointer, but it does not matter
1009 what that value is, because builtin_setjmp does not use it. */
a3c81e61 1010 emit_insn (targetm.gen_nonlocal_goto (value, lab, stack, fp));
53800dbe 1011 else
53800dbe 1012 {
1013 lab = copy_to_reg (lab);
1014
18b42941 1015 emit_clobber (gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode)));
1016 emit_clobber (gen_rtx_MEM (BLKmode, hard_frame_pointer_rtx));
2a871ad1 1017
53800dbe 1018 emit_move_insn (hard_frame_pointer_rtx, fp);
e9c97615 1019 emit_stack_restore (SAVE_NONLOCAL, stack);
53800dbe 1020
18b42941 1021 emit_use (hard_frame_pointer_rtx);
1022 emit_use (stack_pointer_rtx);
53800dbe 1023 emit_indirect_jump (lab);
1024 }
1025 }
615166bb 1026
1027 /* Search backwards and mark the jump insn as a non-local goto.
1028 Note that this precludes the use of __builtin_longjmp to a
1029 __builtin_setjmp target in the same function. However, we've
1030 already cautioned the user that these functions are for
1031 internal exception handling use only. */
449c0509 1032 for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
1033 {
64db345d 1034 gcc_assert (insn != last);
7d3f6cc7 1035
6d7dc5b9 1036 if (JUMP_P (insn))
449c0509 1037 {
a1ddb869 1038 add_reg_note (insn, REG_NON_LOCAL_GOTO, const0_rtx);
449c0509 1039 break;
1040 }
6d7dc5b9 1041 else if (CALL_P (insn))
9342ee68 1042 break;
449c0509 1043 }
53800dbe 1044}
1045
0e80b01d 1046static inline bool
1047more_const_call_expr_args_p (const const_call_expr_arg_iterator *iter)
1048{
1049 return (iter->i < iter->n);
1050}
1051
1052/* This function validates the types of a function call argument list
1053 against a specified list of tree_codes. If the last specifier is a 0,
5cfa3fc8 1054 that represents an ellipsis, otherwise the last specifier must be a
0e80b01d 1055 VOID_TYPE. */
1056
1057static bool
1058validate_arglist (const_tree callexpr, ...)
1059{
1060 enum tree_code code;
1061 bool res = 0;
1062 va_list ap;
1063 const_call_expr_arg_iterator iter;
1064 const_tree arg;
1065
1066 va_start (ap, callexpr);
1067 init_const_call_expr_arg_iterator (callexpr, &iter);
1068
5cfa3fc8 1069 /* Get a bitmap of pointer argument numbers declared attribute nonnull. */
184fac50 1070 tree fn = CALL_EXPR_FN (callexpr);
1071 bitmap argmap = get_nonnull_args (TREE_TYPE (TREE_TYPE (fn)));
5cfa3fc8 1072
1073 for (unsigned argno = 1; ; ++argno)
0e80b01d 1074 {
1075 code = (enum tree_code) va_arg (ap, int);
5cfa3fc8 1076
0e80b01d 1077 switch (code)
1078 {
1079 case 0:
1080 /* This signifies an ellipses, any further arguments are all ok. */
1081 res = true;
1082 goto end;
1083 case VOID_TYPE:
1084 /* This signifies an endlink, if no arguments remain, return
1085 true, otherwise return false. */
1086 res = !more_const_call_expr_args_p (&iter);
1087 goto end;
5cfa3fc8 1088 case POINTER_TYPE:
1089 /* The actual argument must be nonnull when either the whole
1090 called function has been declared nonnull, or when the formal
1091 argument corresponding to the actual argument has been. */
184fac50 1092 if (argmap
1093 && (bitmap_empty_p (argmap) || bitmap_bit_p (argmap, argno)))
1094 {
1095 arg = next_const_call_expr_arg (&iter);
1096 if (!validate_arg (arg, code) || integer_zerop (arg))
1097 goto end;
1098 break;
1099 }
5cfa3fc8 1100 /* FALLTHRU */
0e80b01d 1101 default:
1102 /* If no parameters remain or the parameter's code does not
1103 match the specified code, return false. Otherwise continue
1104 checking any remaining arguments. */
1105 arg = next_const_call_expr_arg (&iter);
184fac50 1106 if (!validate_arg (arg, code))
0e80b01d 1107 goto end;
1108 break;
1109 }
1110 }
0e80b01d 1111
1112 /* We need gotos here since we can only have one VA_CLOSE in a
1113 function. */
1114 end: ;
1115 va_end (ap);
1116
5cfa3fc8 1117 BITMAP_FREE (argmap);
1118
0e80b01d 1119 return res;
1120}
1121
4ee9c684 1122/* Expand a call to __builtin_nonlocal_goto. We're passed the target label
1123 and the address of the save area. */
1124
1125static rtx
c2f47e15 1126expand_builtin_nonlocal_goto (tree exp)
4ee9c684 1127{
1128 tree t_label, t_save_area;
1e0c0b35 1129 rtx r_label, r_save_area, r_fp, r_sp;
1130 rtx_insn *insn;
4ee9c684 1131
c2f47e15 1132 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
4ee9c684 1133 return NULL_RTX;
1134
c2f47e15 1135 t_label = CALL_EXPR_ARG (exp, 0);
1136 t_save_area = CALL_EXPR_ARG (exp, 1);
4ee9c684 1137
8ec3c5c2 1138 r_label = expand_normal (t_label);
3dce56cc 1139 r_label = convert_memory_address (Pmode, r_label);
8ec3c5c2 1140 r_save_area = expand_normal (t_save_area);
3dce56cc 1141 r_save_area = convert_memory_address (Pmode, r_save_area);
d1ff492e 1142 /* Copy the address of the save location to a register just in case it was
1143 based on the frame pointer. */
51adbc8a 1144 r_save_area = copy_to_reg (r_save_area);
4ee9c684 1145 r_fp = gen_rtx_MEM (Pmode, r_save_area);
1146 r_sp = gen_rtx_MEM (STACK_SAVEAREA_MODE (SAVE_NONLOCAL),
29c05e22 1147 plus_constant (Pmode, r_save_area,
1148 GET_MODE_SIZE (Pmode)));
4ee9c684 1149
18d50ae6 1150 crtl->has_nonlocal_goto = 1;
4ee9c684 1151
4ee9c684 1152 /* ??? We no longer need to pass the static chain value, afaik. */
a3c81e61 1153 if (targetm.have_nonlocal_goto ())
1154 emit_insn (targetm.gen_nonlocal_goto (const0_rtx, r_label, r_sp, r_fp));
4ee9c684 1155 else
4ee9c684 1156 {
1157 r_label = copy_to_reg (r_label);
1158
18b42941 1159 emit_clobber (gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode)));
1160 emit_clobber (gen_rtx_MEM (BLKmode, hard_frame_pointer_rtx));
491e04ef 1161
d1ff492e 1162 /* Restore frame pointer for containing function. */
4ee9c684 1163 emit_move_insn (hard_frame_pointer_rtx, r_fp);
e9c97615 1164 emit_stack_restore (SAVE_NONLOCAL, r_sp);
491e04ef 1165
4ee9c684 1166 /* USE of hard_frame_pointer_rtx added for consistency;
1167 not clear if really needed. */
18b42941 1168 emit_use (hard_frame_pointer_rtx);
1169 emit_use (stack_pointer_rtx);
ad0d0af8 1170
1171 /* If the architecture is using a GP register, we must
1172 conservatively assume that the target function makes use of it.
1173 The prologue of functions with nonlocal gotos must therefore
1174 initialize the GP register to the appropriate value, and we
1175 must then make sure that this value is live at the point
1176 of the jump. (Note that this doesn't necessarily apply
1177 to targets with a nonlocal_goto pattern; they are free
1178 to implement it in their own way. Note also that this is
1179 a no-op if the GP register is a global invariant.) */
1e826931 1180 unsigned regnum = PIC_OFFSET_TABLE_REGNUM;
1181 if (regnum != INVALID_REGNUM && fixed_regs[regnum])
18b42941 1182 emit_use (pic_offset_table_rtx);
ad0d0af8 1183
4ee9c684 1184 emit_indirect_jump (r_label);
1185 }
491e04ef 1186
4ee9c684 1187 /* Search backwards to the jump insn and mark it as a
1188 non-local goto. */
1189 for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
1190 {
6d7dc5b9 1191 if (JUMP_P (insn))
4ee9c684 1192 {
a1ddb869 1193 add_reg_note (insn, REG_NON_LOCAL_GOTO, const0_rtx);
4ee9c684 1194 break;
1195 }
6d7dc5b9 1196 else if (CALL_P (insn))
4ee9c684 1197 break;
1198 }
1199
1200 return const0_rtx;
1201}
1202
843d08a9 1203/* __builtin_update_setjmp_buf is passed a pointer to an array of five words
1204 (not all will be used on all machines) that was passed to __builtin_setjmp.
97354ae4 1205 It updates the stack pointer in that block to the current value. This is
1206 also called directly by the SJLJ exception handling code. */
843d08a9 1207
97354ae4 1208void
843d08a9 1209expand_builtin_update_setjmp_buf (rtx buf_addr)
1210{
3754d046 1211 machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
77e843a0 1212 buf_addr = convert_memory_address (Pmode, buf_addr);
d1ff492e 1213 rtx stack_save
843d08a9 1214 = gen_rtx_MEM (sa_mode,
1215 memory_address
1216 (sa_mode,
29c05e22 1217 plus_constant (Pmode, buf_addr,
1218 2 * GET_MODE_SIZE (Pmode))));
843d08a9 1219
e9c97615 1220 emit_stack_save (SAVE_NONLOCAL, &stack_save);
843d08a9 1221}
1222
5e3608d8 1223/* Expand a call to __builtin_prefetch. For a target that does not support
1224 data prefetch, evaluate the memory address argument in case it has side
1225 effects. */
1226
1227static void
c2f47e15 1228expand_builtin_prefetch (tree exp)
5e3608d8 1229{
1230 tree arg0, arg1, arg2;
c2f47e15 1231 int nargs;
5e3608d8 1232 rtx op0, op1, op2;
1233
c2f47e15 1234 if (!validate_arglist (exp, POINTER_TYPE, 0))
26a5cadb 1235 return;
1236
c2f47e15 1237 arg0 = CALL_EXPR_ARG (exp, 0);
1238
26a5cadb 1239 /* Arguments 1 and 2 are optional; argument 1 (read/write) defaults to
1240 zero (read) and argument 2 (locality) defaults to 3 (high degree of
1241 locality). */
c2f47e15 1242 nargs = call_expr_nargs (exp);
1243 if (nargs > 1)
1244 arg1 = CALL_EXPR_ARG (exp, 1);
26a5cadb 1245 else
c2f47e15 1246 arg1 = integer_zero_node;
1247 if (nargs > 2)
1248 arg2 = CALL_EXPR_ARG (exp, 2);
1249 else
2512209b 1250 arg2 = integer_three_node;
5e3608d8 1251
1252 /* Argument 0 is an address. */
1253 op0 = expand_expr (arg0, NULL_RTX, Pmode, EXPAND_NORMAL);
1254
1255 /* Argument 1 (read/write flag) must be a compile-time constant int. */
1256 if (TREE_CODE (arg1) != INTEGER_CST)
1257 {
07e3a3d2 1258 error ("second argument to %<__builtin_prefetch%> must be a constant");
9342ee68 1259 arg1 = integer_zero_node;
5e3608d8 1260 }
8ec3c5c2 1261 op1 = expand_normal (arg1);
5e3608d8 1262 /* Argument 1 must be either zero or one. */
1263 if (INTVAL (op1) != 0 && INTVAL (op1) != 1)
1264 {
c3ceba8e 1265 warning (0, "invalid second argument to %<__builtin_prefetch%>;"
07e3a3d2 1266 " using zero");
5e3608d8 1267 op1 = const0_rtx;
1268 }
1269
1270 /* Argument 2 (locality) must be a compile-time constant int. */
1271 if (TREE_CODE (arg2) != INTEGER_CST)
1272 {
07e3a3d2 1273 error ("third argument to %<__builtin_prefetch%> must be a constant");
5e3608d8 1274 arg2 = integer_zero_node;
1275 }
8ec3c5c2 1276 op2 = expand_normal (arg2);
5e3608d8 1277 /* Argument 2 must be 0, 1, 2, or 3. */
1278 if (INTVAL (op2) < 0 || INTVAL (op2) > 3)
1279 {
c3ceba8e 1280 warning (0, "invalid third argument to %<__builtin_prefetch%>; using zero");
5e3608d8 1281 op2 = const0_rtx;
1282 }
1283
1d375a79 1284 if (targetm.have_prefetch ())
5e3608d8 1285 {
8786db1e 1286 struct expand_operand ops[3];
1287
1288 create_address_operand (&ops[0], op0);
1289 create_integer_operand (&ops[1], INTVAL (op1));
1290 create_integer_operand (&ops[2], INTVAL (op2));
1d375a79 1291 if (maybe_expand_insn (targetm.code_for_prefetch, 3, ops))
8786db1e 1292 return;
5e3608d8 1293 }
0a534ba7 1294
f0ce3b1f 1295 /* Don't do anything with direct references to volatile memory, but
1296 generate code to handle other side effects. */
e16ceb8e 1297 if (!MEM_P (op0) && side_effects_p (op0))
f0ce3b1f 1298 emit_insn (op0);
5e3608d8 1299}
1300
f7c44134 1301/* Get a MEM rtx for expression EXP which is the address of an operand
d8ae1baa 1302 to be used in a string instruction (cmpstrsi, movmemsi, ..). LEN is
1303 the maximum length of the block of memory that might be accessed or
1304 NULL if unknown. */
f7c44134 1305
53800dbe 1306static rtx
d8ae1baa 1307get_memory_rtx (tree exp, tree len)
53800dbe 1308{
ad0a178f 1309 tree orig_exp = exp;
1310 rtx addr, mem;
ad0a178f 1311
1312 /* When EXP is not resolved SAVE_EXPR, MEM_ATTRS can be still derived
1313 from its expression, for expr->a.b only <variable>.a.b is recorded. */
1314 if (TREE_CODE (exp) == SAVE_EXPR && !SAVE_EXPR_RESOLVED_P (exp))
1315 exp = TREE_OPERAND (exp, 0);
1316
1317 addr = expand_expr (orig_exp, NULL_RTX, ptr_mode, EXPAND_NORMAL);
1318 mem = gen_rtx_MEM (BLKmode, memory_address (BLKmode, addr));
2a631e19 1319
f7c44134 1320 /* Get an expression we can use to find the attributes to assign to MEM.
5dd3f78f 1321 First remove any nops. */
72dd6141 1322 while (CONVERT_EXPR_P (exp)
f7c44134 1323 && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (exp, 0))))
1324 exp = TREE_OPERAND (exp, 0);
1325
5dd3f78f 1326 /* Build a MEM_REF representing the whole accessed area as a byte blob,
1327 (as builtin stringops may alias with anything). */
1328 exp = fold_build2 (MEM_REF,
1329 build_array_type (char_type_node,
1330 build_range_type (sizetype,
1331 size_one_node, len)),
1332 exp, build_int_cst (ptr_type_node, 0));
1333
1334 /* If the MEM_REF has no acceptable address, try to get the base object
1335 from the original address we got, and build an all-aliasing
1336 unknown-sized access to that one. */
1337 if (is_gimple_mem_ref_addr (TREE_OPERAND (exp, 0)))
1338 set_mem_attributes (mem, exp, 0);
1339 else if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
1340 && (exp = get_base_address (TREE_OPERAND (TREE_OPERAND (exp, 0),
1341 0))))
eec8e941 1342 {
5dd3f78f 1343 exp = build_fold_addr_expr (exp);
1344 exp = fold_build2 (MEM_REF,
1345 build_array_type (char_type_node,
1346 build_range_type (sizetype,
1347 size_zero_node,
1348 NULL)),
1349 exp, build_int_cst (ptr_type_node, 0));
a1a25d19 1350 set_mem_attributes (mem, exp, 0);
eec8e941 1351 }
5dd3f78f 1352 set_mem_alias_set (mem, 0);
53800dbe 1353 return mem;
1354}
1355\f
1356/* Built-in functions to perform an untyped call and return. */
1357
3b9c3a16 1358#define apply_args_mode \
1359 (this_target_builtins->x_apply_args_mode)
1360#define apply_result_mode \
1361 (this_target_builtins->x_apply_result_mode)
53800dbe 1362
53800dbe 1363/* Return the size required for the block returned by __builtin_apply_args,
1364 and initialize apply_args_mode. */
1365
1366static int
aecda0d6 1367apply_args_size (void)
53800dbe 1368{
1369 static int size = -1;
58e9ce8f 1370 int align;
1371 unsigned int regno;
53800dbe 1372
1373 /* The values computed by this function never change. */
1374 if (size < 0)
1375 {
1376 /* The first value is the incoming arg-pointer. */
1377 size = GET_MODE_SIZE (Pmode);
1378
1379 /* The second value is the structure value address unless this is
1380 passed as an "invisible" first argument. */
6812c89e 1381 if (targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0))
53800dbe 1382 size += GET_MODE_SIZE (Pmode);
1383
1384 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1385 if (FUNCTION_ARG_REGNO_P (regno))
1386 {
d8ba6ec1 1387 fixed_size_mode mode = targetm.calls.get_raw_arg_mode (regno);
0862b7e9 1388
64db345d 1389 gcc_assert (mode != VOIDmode);
53800dbe 1390
1391 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1392 if (size % align != 0)
1393 size = CEIL (size, align) * align;
53800dbe 1394 size += GET_MODE_SIZE (mode);
1395 apply_args_mode[regno] = mode;
1396 }
1397 else
1398 {
d8ba6ec1 1399 apply_args_mode[regno] = as_a <fixed_size_mode> (VOIDmode);
53800dbe 1400 }
1401 }
1402 return size;
1403}
1404
1405/* Return the size required for the block returned by __builtin_apply,
1406 and initialize apply_result_mode. */
1407
1408static int
aecda0d6 1409apply_result_size (void)
53800dbe 1410{
1411 static int size = -1;
1412 int align, regno;
53800dbe 1413
1414 /* The values computed by this function never change. */
1415 if (size < 0)
1416 {
1417 size = 0;
1418
1419 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
e1ce1485 1420 if (targetm.calls.function_value_regno_p (regno))
53800dbe 1421 {
d8ba6ec1 1422 fixed_size_mode mode = targetm.calls.get_raw_result_mode (regno);
0862b7e9 1423
64db345d 1424 gcc_assert (mode != VOIDmode);
53800dbe 1425
1426 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1427 if (size % align != 0)
1428 size = CEIL (size, align) * align;
1429 size += GET_MODE_SIZE (mode);
1430 apply_result_mode[regno] = mode;
1431 }
1432 else
d8ba6ec1 1433 apply_result_mode[regno] = as_a <fixed_size_mode> (VOIDmode);
53800dbe 1434
1435 /* Allow targets that use untyped_call and untyped_return to override
1436 the size so that machine-specific information can be stored here. */
1437#ifdef APPLY_RESULT_SIZE
1438 size = APPLY_RESULT_SIZE;
1439#endif
1440 }
1441 return size;
1442}
1443
53800dbe 1444/* Create a vector describing the result block RESULT. If SAVEP is true,
1445 the result block is used to save the values; otherwise it is used to
1446 restore the values. */
1447
1448static rtx
aecda0d6 1449result_vector (int savep, rtx result)
53800dbe 1450{
1451 int regno, size, align, nelts;
d8ba6ec1 1452 fixed_size_mode mode;
53800dbe 1453 rtx reg, mem;
364c0c59 1454 rtx *savevec = XALLOCAVEC (rtx, FIRST_PSEUDO_REGISTER);
bf8e3599 1455
53800dbe 1456 size = nelts = 0;
1457 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1458 if ((mode = apply_result_mode[regno]) != VOIDmode)
1459 {
1460 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1461 if (size % align != 0)
1462 size = CEIL (size, align) * align;
1463 reg = gen_rtx_REG (mode, savep ? regno : INCOMING_REGNO (regno));
e513d163 1464 mem = adjust_address (result, mode, size);
53800dbe 1465 savevec[nelts++] = (savep
d1f9b275 1466 ? gen_rtx_SET (mem, reg)
1467 : gen_rtx_SET (reg, mem));
53800dbe 1468 size += GET_MODE_SIZE (mode);
1469 }
1470 return gen_rtx_PARALLEL (VOIDmode, gen_rtvec_v (nelts, savevec));
1471}
53800dbe 1472
1473/* Save the state required to perform an untyped call with the same
1474 arguments as were passed to the current function. */
1475
1476static rtx
aecda0d6 1477expand_builtin_apply_args_1 (void)
53800dbe 1478{
1c7e61a7 1479 rtx registers, tem;
53800dbe 1480 int size, align, regno;
d8ba6ec1 1481 fixed_size_mode mode;
6812c89e 1482 rtx struct_incoming_value = targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 1);
53800dbe 1483
1484 /* Create a block where the arg-pointer, structure value address,
1485 and argument registers can be saved. */
1486 registers = assign_stack_local (BLKmode, apply_args_size (), -1);
1487
1488 /* Walk past the arg-pointer and structure value address. */
1489 size = GET_MODE_SIZE (Pmode);
6812c89e 1490 if (targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0))
53800dbe 1491 size += GET_MODE_SIZE (Pmode);
1492
1493 /* Save each register used in calling a function to the block. */
1494 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1495 if ((mode = apply_args_mode[regno]) != VOIDmode)
1496 {
53800dbe 1497 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1498 if (size % align != 0)
1499 size = CEIL (size, align) * align;
1500
1501 tem = gen_rtx_REG (mode, INCOMING_REGNO (regno));
1502
e513d163 1503 emit_move_insn (adjust_address (registers, mode, size), tem);
53800dbe 1504 size += GET_MODE_SIZE (mode);
1505 }
1506
1507 /* Save the arg pointer to the block. */
27a7a23a 1508 tem = copy_to_reg (crtl->args.internal_arg_pointer);
1c7e61a7 1509 /* We need the pointer as the caller actually passed them to us, not
9d4b544c 1510 as we might have pretended they were passed. Make sure it's a valid
1511 operand, as emit_move_insn isn't expected to handle a PLUS. */
3764c94e 1512 if (STACK_GROWS_DOWNWARD)
1513 tem
1514 = force_operand (plus_constant (Pmode, tem,
1515 crtl->args.pretend_args_size),
1516 NULL_RTX);
1c7e61a7 1517 emit_move_insn (adjust_address (registers, Pmode, 0), tem);
0862b7e9 1518
53800dbe 1519 size = GET_MODE_SIZE (Pmode);
1520
1521 /* Save the structure value address unless this is passed as an
1522 "invisible" first argument. */
45550790 1523 if (struct_incoming_value)
53800dbe 1524 {
e513d163 1525 emit_move_insn (adjust_address (registers, Pmode, size),
45550790 1526 copy_to_reg (struct_incoming_value));
53800dbe 1527 size += GET_MODE_SIZE (Pmode);
1528 }
1529
1530 /* Return the address of the block. */
1531 return copy_addr_to_reg (XEXP (registers, 0));
1532}
1533
1534/* __builtin_apply_args returns block of memory allocated on
1535 the stack into which is stored the arg pointer, structure
1536 value address, static chain, and all the registers that might
1537 possibly be used in performing a function call. The code is
1538 moved to the start of the function so the incoming values are
1539 saved. */
27d0c333 1540
53800dbe 1541static rtx
aecda0d6 1542expand_builtin_apply_args (void)
53800dbe 1543{
1544 /* Don't do __builtin_apply_args more than once in a function.
1545 Save the result of the first call and reuse it. */
1546 if (apply_args_value != 0)
1547 return apply_args_value;
1548 {
1549 /* When this function is called, it means that registers must be
1550 saved on entry to this function. So we migrate the
1551 call to the first insn of this function. */
1552 rtx temp;
53800dbe 1553
1554 start_sequence ();
1555 temp = expand_builtin_apply_args_1 ();
9ed997be 1556 rtx_insn *seq = get_insns ();
53800dbe 1557 end_sequence ();
1558
1559 apply_args_value = temp;
1560
31d3e01c 1561 /* Put the insns after the NOTE that starts the function.
1562 If this is inside a start_sequence, make the outer-level insn
53800dbe 1563 chain current, so the code is placed at the start of the
0ef1a651 1564 function. If internal_arg_pointer is a non-virtual pseudo,
1565 it needs to be placed after the function that initializes
1566 that pseudo. */
53800dbe 1567 push_topmost_sequence ();
0ef1a651 1568 if (REG_P (crtl->args.internal_arg_pointer)
1569 && REGNO (crtl->args.internal_arg_pointer) > LAST_VIRTUAL_REGISTER)
1570 emit_insn_before (seq, parm_birth_insn);
1571 else
1572 emit_insn_before (seq, NEXT_INSN (entry_of_function ()));
53800dbe 1573 pop_topmost_sequence ();
1574 return temp;
1575 }
1576}
1577
1578/* Perform an untyped call and save the state required to perform an
1579 untyped return of whatever value was returned by the given function. */
1580
1581static rtx
aecda0d6 1582expand_builtin_apply (rtx function, rtx arguments, rtx argsize)
53800dbe 1583{
1584 int size, align, regno;
d8ba6ec1 1585 fixed_size_mode mode;
1e0c0b35 1586 rtx incoming_args, result, reg, dest, src;
1587 rtx_call_insn *call_insn;
53800dbe 1588 rtx old_stack_level = 0;
1589 rtx call_fusage = 0;
6812c89e 1590 rtx struct_value = targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0);
53800dbe 1591
85d654dd 1592 arguments = convert_memory_address (Pmode, arguments);
726ec87c 1593
53800dbe 1594 /* Create a block where the return registers can be saved. */
1595 result = assign_stack_local (BLKmode, apply_result_size (), -1);
1596
53800dbe 1597 /* Fetch the arg pointer from the ARGUMENTS block. */
1598 incoming_args = gen_reg_rtx (Pmode);
726ec87c 1599 emit_move_insn (incoming_args, gen_rtx_MEM (Pmode, arguments));
3764c94e 1600 if (!STACK_GROWS_DOWNWARD)
1601 incoming_args = expand_simple_binop (Pmode, MINUS, incoming_args, argsize,
1602 incoming_args, 0, OPTAB_LIB_WIDEN);
53800dbe 1603
04a46d40 1604 /* Push a new argument block and copy the arguments. Do not allow
1605 the (potential) memcpy call below to interfere with our stack
1606 manipulations. */
53800dbe 1607 do_pending_stack_adjust ();
04a46d40 1608 NO_DEFER_POP;
53800dbe 1609
2358393e 1610 /* Save the stack with nonlocal if available. */
71512c05 1611 if (targetm.have_save_stack_nonlocal ())
e9c97615 1612 emit_stack_save (SAVE_NONLOCAL, &old_stack_level);
53800dbe 1613 else
e9c97615 1614 emit_stack_save (SAVE_BLOCK, &old_stack_level);
53800dbe 1615
59647703 1616 /* Allocate a block of memory onto the stack and copy the memory
990495a7 1617 arguments to the outgoing arguments address. We can pass TRUE
1618 as the 4th argument because we just saved the stack pointer
1619 and will restore it right after the call. */
2b34677f 1620 allocate_dynamic_stack_space (argsize, 0, BIGGEST_ALIGNMENT, -1, true);
27a7a23a 1621
1622 /* Set DRAP flag to true, even though allocate_dynamic_stack_space
1623 may have already set current_function_calls_alloca to true.
1624 current_function_calls_alloca won't be set if argsize is zero,
1625 so we have to guarantee need_drap is true here. */
1626 if (SUPPORTS_STACK_ALIGNMENT)
1627 crtl->need_drap = true;
1628
59647703 1629 dest = virtual_outgoing_args_rtx;
3764c94e 1630 if (!STACK_GROWS_DOWNWARD)
1631 {
1632 if (CONST_INT_P (argsize))
1633 dest = plus_constant (Pmode, dest, -INTVAL (argsize));
1634 else
1635 dest = gen_rtx_PLUS (Pmode, dest, negate_rtx (Pmode, argsize));
1636 }
2a631e19 1637 dest = gen_rtx_MEM (BLKmode, dest);
1638 set_mem_align (dest, PARM_BOUNDARY);
1639 src = gen_rtx_MEM (BLKmode, incoming_args);
1640 set_mem_align (src, PARM_BOUNDARY);
0378dbdc 1641 emit_block_move (dest, src, argsize, BLOCK_OP_NORMAL);
53800dbe 1642
1643 /* Refer to the argument block. */
1644 apply_args_size ();
1645 arguments = gen_rtx_MEM (BLKmode, arguments);
2a631e19 1646 set_mem_align (arguments, PARM_BOUNDARY);
53800dbe 1647
1648 /* Walk past the arg-pointer and structure value address. */
1649 size = GET_MODE_SIZE (Pmode);
45550790 1650 if (struct_value)
53800dbe 1651 size += GET_MODE_SIZE (Pmode);
1652
1653 /* Restore each of the registers previously saved. Make USE insns
1654 for each of these registers for use in making the call. */
1655 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1656 if ((mode = apply_args_mode[regno]) != VOIDmode)
1657 {
1658 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1659 if (size % align != 0)
1660 size = CEIL (size, align) * align;
1661 reg = gen_rtx_REG (mode, regno);
e513d163 1662 emit_move_insn (reg, adjust_address (arguments, mode, size));
53800dbe 1663 use_reg (&call_fusage, reg);
1664 size += GET_MODE_SIZE (mode);
1665 }
1666
1667 /* Restore the structure value address unless this is passed as an
1668 "invisible" first argument. */
1669 size = GET_MODE_SIZE (Pmode);
45550790 1670 if (struct_value)
53800dbe 1671 {
1672 rtx value = gen_reg_rtx (Pmode);
e513d163 1673 emit_move_insn (value, adjust_address (arguments, Pmode, size));
45550790 1674 emit_move_insn (struct_value, value);
8ad4c111 1675 if (REG_P (struct_value))
45550790 1676 use_reg (&call_fusage, struct_value);
53800dbe 1677 size += GET_MODE_SIZE (Pmode);
1678 }
1679
1680 /* All arguments and registers used for the call are set up by now! */
82c7907c 1681 function = prepare_call_address (NULL, function, NULL, &call_fusage, 0, 0);
53800dbe 1682
1683 /* Ensure address is valid. SYMBOL_REF is already valid, so no need,
1684 and we don't want to load it into a register as an optimization,
1685 because prepare_call_address already did it if it should be done. */
1686 if (GET_CODE (function) != SYMBOL_REF)
1687 function = memory_address (FUNCTION_MODE, function);
1688
1689 /* Generate the actual call instruction and save the return value. */
1d99ab0a 1690 if (targetm.have_untyped_call ())
1691 {
1692 rtx mem = gen_rtx_MEM (FUNCTION_MODE, function);
1693 emit_call_insn (targetm.gen_untyped_call (mem, result,
1694 result_vector (1, result)));
1695 }
7f265a08 1696 else if (targetm.have_call_value ())
53800dbe 1697 {
1698 rtx valreg = 0;
1699
1700 /* Locate the unique return register. It is not possible to
1701 express a call that sets more than one return register using
1702 call_value; use untyped_call for that. In fact, untyped_call
1703 only needs to save the return registers in the given block. */
1704 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1705 if ((mode = apply_result_mode[regno]) != VOIDmode)
1706 {
7f265a08 1707 gcc_assert (!valreg); /* have_untyped_call required. */
7d3f6cc7 1708
53800dbe 1709 valreg = gen_rtx_REG (mode, regno);
1710 }
1711
7f265a08 1712 emit_insn (targetm.gen_call_value (valreg,
1713 gen_rtx_MEM (FUNCTION_MODE, function),
1714 const0_rtx, NULL_RTX, const0_rtx));
53800dbe 1715
e513d163 1716 emit_move_insn (adjust_address (result, GET_MODE (valreg), 0), valreg);
53800dbe 1717 }
1718 else
64db345d 1719 gcc_unreachable ();
53800dbe 1720
d5f9786f 1721 /* Find the CALL insn we just emitted, and attach the register usage
1722 information. */
1723 call_insn = last_call_insn ();
1724 add_function_usage_to (call_insn, call_fusage);
53800dbe 1725
1726 /* Restore the stack. */
71512c05 1727 if (targetm.have_save_stack_nonlocal ())
e9c97615 1728 emit_stack_restore (SAVE_NONLOCAL, old_stack_level);
53800dbe 1729 else
e9c97615 1730 emit_stack_restore (SAVE_BLOCK, old_stack_level);
9af5ce0c 1731 fixup_args_size_notes (call_insn, get_last_insn (), 0);
53800dbe 1732
04a46d40 1733 OK_DEFER_POP;
1734
53800dbe 1735 /* Return the address of the result block. */
85d654dd 1736 result = copy_addr_to_reg (XEXP (result, 0));
1737 return convert_memory_address (ptr_mode, result);
53800dbe 1738}
1739
1740/* Perform an untyped return. */
1741
1742static void
aecda0d6 1743expand_builtin_return (rtx result)
53800dbe 1744{
1745 int size, align, regno;
d8ba6ec1 1746 fixed_size_mode mode;
53800dbe 1747 rtx reg;
57c26b3a 1748 rtx_insn *call_fusage = 0;
53800dbe 1749
85d654dd 1750 result = convert_memory_address (Pmode, result);
726ec87c 1751
53800dbe 1752 apply_result_size ();
1753 result = gen_rtx_MEM (BLKmode, result);
1754
1d99ab0a 1755 if (targetm.have_untyped_return ())
53800dbe 1756 {
1d99ab0a 1757 rtx vector = result_vector (0, result);
1758 emit_jump_insn (targetm.gen_untyped_return (result, vector));
53800dbe 1759 emit_barrier ();
1760 return;
1761 }
53800dbe 1762
1763 /* Restore the return value and note that each value is used. */
1764 size = 0;
1765 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1766 if ((mode = apply_result_mode[regno]) != VOIDmode)
1767 {
1768 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1769 if (size % align != 0)
1770 size = CEIL (size, align) * align;
1771 reg = gen_rtx_REG (mode, INCOMING_REGNO (regno));
e513d163 1772 emit_move_insn (reg, adjust_address (result, mode, size));
53800dbe 1773
1774 push_to_sequence (call_fusage);
18b42941 1775 emit_use (reg);
53800dbe 1776 call_fusage = get_insns ();
1777 end_sequence ();
1778 size += GET_MODE_SIZE (mode);
1779 }
1780
1781 /* Put the USE insns before the return. */
31d3e01c 1782 emit_insn (call_fusage);
53800dbe 1783
1784 /* Return whatever values was restored by jumping directly to the end
1785 of the function. */
62380d2d 1786 expand_naked_return ();
53800dbe 1787}
1788
539a3a92 1789/* Used by expand_builtin_classify_type and fold_builtin_classify_type. */
27d0c333 1790
539a3a92 1791static enum type_class
aecda0d6 1792type_to_class (tree type)
539a3a92 1793{
1794 switch (TREE_CODE (type))
1795 {
1796 case VOID_TYPE: return void_type_class;
1797 case INTEGER_TYPE: return integer_type_class;
539a3a92 1798 case ENUMERAL_TYPE: return enumeral_type_class;
1799 case BOOLEAN_TYPE: return boolean_type_class;
1800 case POINTER_TYPE: return pointer_type_class;
1801 case REFERENCE_TYPE: return reference_type_class;
1802 case OFFSET_TYPE: return offset_type_class;
1803 case REAL_TYPE: return real_type_class;
1804 case COMPLEX_TYPE: return complex_type_class;
1805 case FUNCTION_TYPE: return function_type_class;
1806 case METHOD_TYPE: return method_type_class;
1807 case RECORD_TYPE: return record_type_class;
1808 case UNION_TYPE:
1809 case QUAL_UNION_TYPE: return union_type_class;
1810 case ARRAY_TYPE: return (TYPE_STRING_FLAG (type)
1811 ? string_type_class : array_type_class);
539a3a92 1812 case LANG_TYPE: return lang_type_class;
1813 default: return no_type_class;
1814 }
1815}
bf8e3599 1816
c2f47e15 1817/* Expand a call EXP to __builtin_classify_type. */
27d0c333 1818
53800dbe 1819static rtx
c2f47e15 1820expand_builtin_classify_type (tree exp)
53800dbe 1821{
c2f47e15 1822 if (call_expr_nargs (exp))
1823 return GEN_INT (type_to_class (TREE_TYPE (CALL_EXPR_ARG (exp, 0))));
53800dbe 1824 return GEN_INT (no_type_class);
1825}
1826
8c32188e 1827/* This helper macro, meant to be used in mathfn_built_in below, determines
1828 which among a set of builtin math functions is appropriate for a given type
1829 mode. The `F' (float) and `L' (long double) are automatically generated
1830 from the 'double' case. If a function supports the _Float<N> and _Float<N>X
1831 types, there are additional types that are considered with 'F32', 'F64',
1832 'F128', etc. suffixes. */
e3240774 1833#define CASE_MATHFN(MATHFN) \
1834 CASE_CFN_##MATHFN: \
1835 fcode = BUILT_IN_##MATHFN; fcodef = BUILT_IN_##MATHFN##F ; \
1836 fcodel = BUILT_IN_##MATHFN##L ; break;
8c32188e 1837/* Similar to the above, but also add support for the _Float<N> and _Float<N>X
1838 types. */
1839#define CASE_MATHFN_FLOATN(MATHFN) \
1840 CASE_CFN_##MATHFN: \
1841 fcode = BUILT_IN_##MATHFN; fcodef = BUILT_IN_##MATHFN##F ; \
1842 fcodel = BUILT_IN_##MATHFN##L ; fcodef16 = BUILT_IN_##MATHFN##F16 ; \
1843 fcodef32 = BUILT_IN_##MATHFN##F32; fcodef64 = BUILT_IN_##MATHFN##F64 ; \
1844 fcodef128 = BUILT_IN_##MATHFN##F128 ; fcodef32x = BUILT_IN_##MATHFN##F32X ; \
1845 fcodef64x = BUILT_IN_##MATHFN##F64X ; fcodef128x = BUILT_IN_##MATHFN##F128X ;\
1846 break;
cd2656b0 1847/* Similar to above, but appends _R after any F/L suffix. */
e3240774 1848#define CASE_MATHFN_REENT(MATHFN) \
1849 case CFN_BUILT_IN_##MATHFN##_R: \
1850 case CFN_BUILT_IN_##MATHFN##F_R: \
1851 case CFN_BUILT_IN_##MATHFN##L_R: \
1852 fcode = BUILT_IN_##MATHFN##_R; fcodef = BUILT_IN_##MATHFN##F_R ; \
1853 fcodel = BUILT_IN_##MATHFN##L_R ; break;
07976da7 1854
6c21be92 1855/* Return a function equivalent to FN but operating on floating-point
1856 values of type TYPE, or END_BUILTINS if no such function exists.
e3240774 1857 This is purely an operation on function codes; it does not guarantee
1858 that the target actually has an implementation of the function. */
c319d56a 1859
6c21be92 1860static built_in_function
e3240774 1861mathfn_built_in_2 (tree type, combined_fn fn)
0a68165a 1862{
8c32188e 1863 tree mtype;
6c21be92 1864 built_in_function fcode, fcodef, fcodel;
8c32188e 1865 built_in_function fcodef16 = END_BUILTINS;
1866 built_in_function fcodef32 = END_BUILTINS;
1867 built_in_function fcodef64 = END_BUILTINS;
1868 built_in_function fcodef128 = END_BUILTINS;
1869 built_in_function fcodef32x = END_BUILTINS;
1870 built_in_function fcodef64x = END_BUILTINS;
1871 built_in_function fcodef128x = END_BUILTINS;
07976da7 1872
1873 switch (fn)
1874 {
e3240774 1875 CASE_MATHFN (ACOS)
1876 CASE_MATHFN (ACOSH)
1877 CASE_MATHFN (ASIN)
1878 CASE_MATHFN (ASINH)
1879 CASE_MATHFN (ATAN)
1880 CASE_MATHFN (ATAN2)
1881 CASE_MATHFN (ATANH)
1882 CASE_MATHFN (CBRT)
054e9558 1883 CASE_MATHFN_FLOATN (CEIL)
e3240774 1884 CASE_MATHFN (CEXPI)
8c32188e 1885 CASE_MATHFN_FLOATN (COPYSIGN)
e3240774 1886 CASE_MATHFN (COS)
1887 CASE_MATHFN (COSH)
1888 CASE_MATHFN (DREM)
1889 CASE_MATHFN (ERF)
1890 CASE_MATHFN (ERFC)
1891 CASE_MATHFN (EXP)
1892 CASE_MATHFN (EXP10)
1893 CASE_MATHFN (EXP2)
1894 CASE_MATHFN (EXPM1)
1895 CASE_MATHFN (FABS)
1896 CASE_MATHFN (FDIM)
054e9558 1897 CASE_MATHFN_FLOATN (FLOOR)
8c32188e 1898 CASE_MATHFN_FLOATN (FMA)
1899 CASE_MATHFN_FLOATN (FMAX)
1900 CASE_MATHFN_FLOATN (FMIN)
e3240774 1901 CASE_MATHFN (FMOD)
1902 CASE_MATHFN (FREXP)
1903 CASE_MATHFN (GAMMA)
1904 CASE_MATHFN_REENT (GAMMA) /* GAMMA_R */
1905 CASE_MATHFN (HUGE_VAL)
1906 CASE_MATHFN (HYPOT)
1907 CASE_MATHFN (ILOGB)
1908 CASE_MATHFN (ICEIL)
1909 CASE_MATHFN (IFLOOR)
1910 CASE_MATHFN (INF)
1911 CASE_MATHFN (IRINT)
1912 CASE_MATHFN (IROUND)
1913 CASE_MATHFN (ISINF)
1914 CASE_MATHFN (J0)
1915 CASE_MATHFN (J1)
1916 CASE_MATHFN (JN)
1917 CASE_MATHFN (LCEIL)
1918 CASE_MATHFN (LDEXP)
1919 CASE_MATHFN (LFLOOR)
1920 CASE_MATHFN (LGAMMA)
1921 CASE_MATHFN_REENT (LGAMMA) /* LGAMMA_R */
1922 CASE_MATHFN (LLCEIL)
1923 CASE_MATHFN (LLFLOOR)
1924 CASE_MATHFN (LLRINT)
1925 CASE_MATHFN (LLROUND)
1926 CASE_MATHFN (LOG)
1927 CASE_MATHFN (LOG10)
1928 CASE_MATHFN (LOG1P)
1929 CASE_MATHFN (LOG2)
1930 CASE_MATHFN (LOGB)
1931 CASE_MATHFN (LRINT)
1932 CASE_MATHFN (LROUND)
1933 CASE_MATHFN (MODF)
1934 CASE_MATHFN (NAN)
1935 CASE_MATHFN (NANS)
054e9558 1936 CASE_MATHFN_FLOATN (NEARBYINT)
e3240774 1937 CASE_MATHFN (NEXTAFTER)
1938 CASE_MATHFN (NEXTTOWARD)
1939 CASE_MATHFN (POW)
1940 CASE_MATHFN (POWI)
1941 CASE_MATHFN (POW10)
1942 CASE_MATHFN (REMAINDER)
1943 CASE_MATHFN (REMQUO)
054e9558 1944 CASE_MATHFN_FLOATN (RINT)
1945 CASE_MATHFN_FLOATN (ROUND)
e3240774 1946 CASE_MATHFN (SCALB)
1947 CASE_MATHFN (SCALBLN)
1948 CASE_MATHFN (SCALBN)
1949 CASE_MATHFN (SIGNBIT)
1950 CASE_MATHFN (SIGNIFICAND)
1951 CASE_MATHFN (SIN)
1952 CASE_MATHFN (SINCOS)
1953 CASE_MATHFN (SINH)
8c32188e 1954 CASE_MATHFN_FLOATN (SQRT)
e3240774 1955 CASE_MATHFN (TAN)
1956 CASE_MATHFN (TANH)
1957 CASE_MATHFN (TGAMMA)
054e9558 1958 CASE_MATHFN_FLOATN (TRUNC)
e3240774 1959 CASE_MATHFN (Y0)
1960 CASE_MATHFN (Y1)
1961 CASE_MATHFN (YN)
07976da7 1962
e3240774 1963 default:
1964 return END_BUILTINS;
1965 }
07976da7 1966
8c32188e 1967 mtype = TYPE_MAIN_VARIANT (type);
1968 if (mtype == double_type_node)
6c21be92 1969 return fcode;
8c32188e 1970 else if (mtype == float_type_node)
6c21be92 1971 return fcodef;
8c32188e 1972 else if (mtype == long_double_type_node)
6c21be92 1973 return fcodel;
8c32188e 1974 else if (mtype == float16_type_node)
1975 return fcodef16;
1976 else if (mtype == float32_type_node)
1977 return fcodef32;
1978 else if (mtype == float64_type_node)
1979 return fcodef64;
1980 else if (mtype == float128_type_node)
1981 return fcodef128;
1982 else if (mtype == float32x_type_node)
1983 return fcodef32x;
1984 else if (mtype == float64x_type_node)
1985 return fcodef64x;
1986 else if (mtype == float128x_type_node)
1987 return fcodef128x;
07976da7 1988 else
6c21be92 1989 return END_BUILTINS;
1990}
1991
1992/* Return mathematic function equivalent to FN but operating directly on TYPE,
1993 if available. If IMPLICIT_P is true use the implicit builtin declaration,
1994 otherwise use the explicit declaration. If we can't do the conversion,
1995 return null. */
1996
1997static tree
e3240774 1998mathfn_built_in_1 (tree type, combined_fn fn, bool implicit_p)
6c21be92 1999{
2000 built_in_function fcode2 = mathfn_built_in_2 (type, fn);
2001 if (fcode2 == END_BUILTINS)
c2f47e15 2002 return NULL_TREE;
b9a16870 2003
2004 if (implicit_p && !builtin_decl_implicit_p (fcode2))
2005 return NULL_TREE;
2006
2007 return builtin_decl_explicit (fcode2);
0a68165a 2008}
2009
e3240774 2010/* Like mathfn_built_in_1, but always use the implicit array. */
c319d56a 2011
2012tree
e3240774 2013mathfn_built_in (tree type, combined_fn fn)
c319d56a 2014{
2015 return mathfn_built_in_1 (type, fn, /*implicit=*/ 1);
2016}
2017
e3240774 2018/* Like mathfn_built_in_1, but take a built_in_function and
2019 always use the implicit array. */
2020
2021tree
2022mathfn_built_in (tree type, enum built_in_function fn)
2023{
2024 return mathfn_built_in_1 (type, as_combined_fn (fn), /*implicit=*/ 1);
2025}
2026
1f24b8e9 2027/* If BUILT_IN_NORMAL function FNDECL has an associated internal function,
2028 return its code, otherwise return IFN_LAST. Note that this function
2029 only tests whether the function is defined in internals.def, not whether
2030 it is actually available on the target. */
2031
2032internal_fn
2033associated_internal_fn (tree fndecl)
2034{
2035 gcc_checking_assert (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL);
2036 tree return_type = TREE_TYPE (TREE_TYPE (fndecl));
2037 switch (DECL_FUNCTION_CODE (fndecl))
2038 {
2039#define DEF_INTERNAL_FLT_FN(NAME, FLAGS, OPTAB, TYPE) \
2040 CASE_FLT_FN (BUILT_IN_##NAME): return IFN_##NAME;
8c32188e 2041#define DEF_INTERNAL_FLT_FLOATN_FN(NAME, FLAGS, OPTAB, TYPE) \
2042 CASE_FLT_FN (BUILT_IN_##NAME): return IFN_##NAME; \
2043 CASE_FLT_FN_FLOATN_NX (BUILT_IN_##NAME): return IFN_##NAME;
c9452b7c 2044#define DEF_INTERNAL_INT_FN(NAME, FLAGS, OPTAB, TYPE) \
2045 CASE_INT_FN (BUILT_IN_##NAME): return IFN_##NAME;
1f24b8e9 2046#include "internal-fn.def"
2047
2048 CASE_FLT_FN (BUILT_IN_POW10):
2049 return IFN_EXP10;
2050
2051 CASE_FLT_FN (BUILT_IN_DREM):
2052 return IFN_REMAINDER;
2053
2054 CASE_FLT_FN (BUILT_IN_SCALBN):
2055 CASE_FLT_FN (BUILT_IN_SCALBLN):
2056 if (REAL_MODE_FORMAT (TYPE_MODE (return_type))->b == 2)
2057 return IFN_LDEXP;
2058 return IFN_LAST;
2059
2060 default:
2061 return IFN_LAST;
2062 }
2063}
2064
2065/* If CALL is a call to a BUILT_IN_NORMAL function that could be replaced
2066 on the current target by a call to an internal function, return the
2067 code of that internal function, otherwise return IFN_LAST. The caller
2068 is responsible for ensuring that any side-effects of the built-in
2069 call are dealt with correctly. E.g. if CALL sets errno, the caller
2070 must decide that the errno result isn't needed or make it available
2071 in some other way. */
2072
2073internal_fn
2074replacement_internal_fn (gcall *call)
2075{
2076 if (gimple_call_builtin_p (call, BUILT_IN_NORMAL))
2077 {
2078 internal_fn ifn = associated_internal_fn (gimple_call_fndecl (call));
2079 if (ifn != IFN_LAST)
2080 {
2081 tree_pair types = direct_internal_fn_types (ifn, call);
acdfe9e0 2082 optimization_type opt_type = bb_optimization_type (gimple_bb (call));
2083 if (direct_internal_fn_supported_p (ifn, types, opt_type))
1f24b8e9 2084 return ifn;
2085 }
2086 }
2087 return IFN_LAST;
2088}
2089
7e0713b1 2090/* Expand a call to the builtin trinary math functions (fma).
2091 Return NULL_RTX if a normal call should be emitted rather than expanding the
2092 function in-line. EXP is the expression that is a call to the builtin
2093 function; if convenient, the result should be placed in TARGET.
2094 SUBTARGET may be used as the target for computing one of EXP's
2095 operands. */
2096
2097static rtx
2098expand_builtin_mathfn_ternary (tree exp, rtx target, rtx subtarget)
2099{
2100 optab builtin_optab;
1e0c0b35 2101 rtx op0, op1, op2, result;
2102 rtx_insn *insns;
7e0713b1 2103 tree fndecl = get_callee_fndecl (exp);
2104 tree arg0, arg1, arg2;
3754d046 2105 machine_mode mode;
7e0713b1 2106
2107 if (!validate_arglist (exp, REAL_TYPE, REAL_TYPE, REAL_TYPE, VOID_TYPE))
2108 return NULL_RTX;
2109
2110 arg0 = CALL_EXPR_ARG (exp, 0);
2111 arg1 = CALL_EXPR_ARG (exp, 1);
2112 arg2 = CALL_EXPR_ARG (exp, 2);
2113
2114 switch (DECL_FUNCTION_CODE (fndecl))
2115 {
2116 CASE_FLT_FN (BUILT_IN_FMA):
8c32188e 2117 CASE_FLT_FN_FLOATN_NX (BUILT_IN_FMA):
7e0713b1 2118 builtin_optab = fma_optab; break;
2119 default:
2120 gcc_unreachable ();
2121 }
2122
2123 /* Make a suitable register to place result in. */
2124 mode = TYPE_MODE (TREE_TYPE (exp));
2125
2126 /* Before working hard, check whether the instruction is available. */
2127 if (optab_handler (builtin_optab, mode) == CODE_FOR_nothing)
2128 return NULL_RTX;
2129
de2e453e 2130 result = gen_reg_rtx (mode);
7e0713b1 2131
2132 /* Always stabilize the argument list. */
2133 CALL_EXPR_ARG (exp, 0) = arg0 = builtin_save_expr (arg0);
2134 CALL_EXPR_ARG (exp, 1) = arg1 = builtin_save_expr (arg1);
2135 CALL_EXPR_ARG (exp, 2) = arg2 = builtin_save_expr (arg2);
2136
2137 op0 = expand_expr (arg0, subtarget, VOIDmode, EXPAND_NORMAL);
2138 op1 = expand_normal (arg1);
2139 op2 = expand_normal (arg2);
2140
2141 start_sequence ();
2142
de2e453e 2143 /* Compute into RESULT.
2144 Set RESULT to wherever the result comes back. */
2145 result = expand_ternary_op (mode, builtin_optab, op0, op1, op2,
2146 result, 0);
7e0713b1 2147
2148 /* If we were unable to expand via the builtin, stop the sequence
2149 (without outputting the insns) and call to the library function
2150 with the stabilized argument list. */
de2e453e 2151 if (result == 0)
7e0713b1 2152 {
2153 end_sequence ();
2154 return expand_call (exp, target, target == const0_rtx);
2155 }
2156
2157 /* Output the entire sequence. */
2158 insns = get_insns ();
2159 end_sequence ();
2160 emit_insn (insns);
2161
de2e453e 2162 return result;
7e0713b1 2163}
2164
6b43bae4 2165/* Expand a call to the builtin sin and cos math functions.
c2f47e15 2166 Return NULL_RTX if a normal call should be emitted rather than expanding the
6b43bae4 2167 function in-line. EXP is the expression that is a call to the builtin
2168 function; if convenient, the result should be placed in TARGET.
2169 SUBTARGET may be used as the target for computing one of EXP's
2170 operands. */
2171
2172static rtx
2173expand_builtin_mathfn_3 (tree exp, rtx target, rtx subtarget)
2174{
2175 optab builtin_optab;
1e0c0b35 2176 rtx op0;
2177 rtx_insn *insns;
6b43bae4 2178 tree fndecl = get_callee_fndecl (exp);
3754d046 2179 machine_mode mode;
abfea505 2180 tree arg;
6b43bae4 2181
c2f47e15 2182 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2183 return NULL_RTX;
6b43bae4 2184
c2f47e15 2185 arg = CALL_EXPR_ARG (exp, 0);
6b43bae4 2186
2187 switch (DECL_FUNCTION_CODE (fndecl))
2188 {
4f35b1fc 2189 CASE_FLT_FN (BUILT_IN_SIN):
2190 CASE_FLT_FN (BUILT_IN_COS):
6b43bae4 2191 builtin_optab = sincos_optab; break;
2192 default:
64db345d 2193 gcc_unreachable ();
6b43bae4 2194 }
2195
2196 /* Make a suitable register to place result in. */
2197 mode = TYPE_MODE (TREE_TYPE (exp));
2198
6b43bae4 2199 /* Check if sincos insn is available, otherwise fallback
0bed3869 2200 to sin or cos insn. */
d6bf3b14 2201 if (optab_handler (builtin_optab, mode) == CODE_FOR_nothing)
6b43bae4 2202 switch (DECL_FUNCTION_CODE (fndecl))
2203 {
4f35b1fc 2204 CASE_FLT_FN (BUILT_IN_SIN):
6b43bae4 2205 builtin_optab = sin_optab; break;
4f35b1fc 2206 CASE_FLT_FN (BUILT_IN_COS):
6b43bae4 2207 builtin_optab = cos_optab; break;
2208 default:
64db345d 2209 gcc_unreachable ();
6b43bae4 2210 }
6b43bae4 2211
2212 /* Before working hard, check whether the instruction is available. */
d6bf3b14 2213 if (optab_handler (builtin_optab, mode) != CODE_FOR_nothing)
6b43bae4 2214 {
de2e453e 2215 rtx result = gen_reg_rtx (mode);
6b43bae4 2216
2217 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2218 need to expand the argument again. This way, we will not perform
2219 side-effects more the once. */
abfea505 2220 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
6b43bae4 2221
1db6d067 2222 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
6b43bae4 2223
6b43bae4 2224 start_sequence ();
2225
de2e453e 2226 /* Compute into RESULT.
2227 Set RESULT to wherever the result comes back. */
6b43bae4 2228 if (builtin_optab == sincos_optab)
2229 {
de2e453e 2230 int ok;
7d3f6cc7 2231
6b43bae4 2232 switch (DECL_FUNCTION_CODE (fndecl))
2233 {
4f35b1fc 2234 CASE_FLT_FN (BUILT_IN_SIN):
de2e453e 2235 ok = expand_twoval_unop (builtin_optab, op0, 0, result, 0);
6b43bae4 2236 break;
4f35b1fc 2237 CASE_FLT_FN (BUILT_IN_COS):
de2e453e 2238 ok = expand_twoval_unop (builtin_optab, op0, result, 0, 0);
6b43bae4 2239 break;
2240 default:
64db345d 2241 gcc_unreachable ();
6b43bae4 2242 }
de2e453e 2243 gcc_assert (ok);
6b43bae4 2244 }
2245 else
de2e453e 2246 result = expand_unop (mode, builtin_optab, op0, result, 0);
6b43bae4 2247
de2e453e 2248 if (result != 0)
6b43bae4 2249 {
6b43bae4 2250 /* Output the entire sequence. */
2251 insns = get_insns ();
2252 end_sequence ();
2253 emit_insn (insns);
de2e453e 2254 return result;
6b43bae4 2255 }
2256
2257 /* If we were unable to expand via the builtin, stop the sequence
2258 (without outputting the insns) and call to the library function
2259 with the stabilized argument list. */
2260 end_sequence ();
2261 }
2262
de2e453e 2263 return expand_call (exp, target, target == const0_rtx);
6b43bae4 2264}
2265
a65c4d64 2266/* Given an interclass math builtin decl FNDECL and it's argument ARG
2267 return an RTL instruction code that implements the functionality.
2268 If that isn't possible or available return CODE_FOR_nothing. */
a67a90e5 2269
a65c4d64 2270static enum insn_code
2271interclass_mathfn_icode (tree arg, tree fndecl)
a67a90e5 2272{
a65c4d64 2273 bool errno_set = false;
6cdd383a 2274 optab builtin_optab = unknown_optab;
3754d046 2275 machine_mode mode;
a67a90e5 2276
2277 switch (DECL_FUNCTION_CODE (fndecl))
2278 {
2279 CASE_FLT_FN (BUILT_IN_ILOGB):
12f08300 2280 errno_set = true; builtin_optab = ilogb_optab; break;
2281 CASE_FLT_FN (BUILT_IN_ISINF):
2282 builtin_optab = isinf_optab; break;
2283 case BUILT_IN_ISNORMAL:
2284 case BUILT_IN_ISFINITE:
2285 CASE_FLT_FN (BUILT_IN_FINITE):
2286 case BUILT_IN_FINITED32:
2287 case BUILT_IN_FINITED64:
2288 case BUILT_IN_FINITED128:
2289 case BUILT_IN_ISINFD32:
2290 case BUILT_IN_ISINFD64:
2291 case BUILT_IN_ISINFD128:
2292 /* These builtins have no optabs (yet). */
cde061c1 2293 break;
a67a90e5 2294 default:
2295 gcc_unreachable ();
2296 }
2297
2298 /* There's no easy way to detect the case we need to set EDOM. */
2299 if (flag_errno_math && errno_set)
a65c4d64 2300 return CODE_FOR_nothing;
a67a90e5 2301
2302 /* Optab mode depends on the mode of the input argument. */
2303 mode = TYPE_MODE (TREE_TYPE (arg));
2304
cde061c1 2305 if (builtin_optab)
d6bf3b14 2306 return optab_handler (builtin_optab, mode);
a65c4d64 2307 return CODE_FOR_nothing;
2308}
2309
2310/* Expand a call to one of the builtin math functions that operate on
12f08300 2311 floating point argument and output an integer result (ilogb, isinf,
2312 isnan, etc).
a65c4d64 2313 Return 0 if a normal call should be emitted rather than expanding the
2314 function in-line. EXP is the expression that is a call to the builtin
f97eea22 2315 function; if convenient, the result should be placed in TARGET. */
a65c4d64 2316
2317static rtx
f97eea22 2318expand_builtin_interclass_mathfn (tree exp, rtx target)
a65c4d64 2319{
2320 enum insn_code icode = CODE_FOR_nothing;
2321 rtx op0;
2322 tree fndecl = get_callee_fndecl (exp);
3754d046 2323 machine_mode mode;
a65c4d64 2324 tree arg;
2325
2326 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2327 return NULL_RTX;
2328
2329 arg = CALL_EXPR_ARG (exp, 0);
2330 icode = interclass_mathfn_icode (arg, fndecl);
2331 mode = TYPE_MODE (TREE_TYPE (arg));
2332
a67a90e5 2333 if (icode != CODE_FOR_nothing)
2334 {
8786db1e 2335 struct expand_operand ops[1];
1e0c0b35 2336 rtx_insn *last = get_last_insn ();
4e2a2fb4 2337 tree orig_arg = arg;
a67a90e5 2338
2339 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2340 need to expand the argument again. This way, we will not perform
2341 side-effects more the once. */
abfea505 2342 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
a67a90e5 2343
f97eea22 2344 op0 = expand_expr (arg, NULL_RTX, VOIDmode, EXPAND_NORMAL);
a67a90e5 2345
2346 if (mode != GET_MODE (op0))
2347 op0 = convert_to_mode (mode, op0, 0);
2348
8786db1e 2349 create_output_operand (&ops[0], target, TYPE_MODE (TREE_TYPE (exp)));
2350 if (maybe_legitimize_operands (icode, 0, 1, ops)
2351 && maybe_emit_unop_insn (icode, ops[0].value, op0, UNKNOWN))
2352 return ops[0].value;
2353
4e2a2fb4 2354 delete_insns_since (last);
2355 CALL_EXPR_ARG (exp, 0) = orig_arg;
a67a90e5 2356 }
2357
a65c4d64 2358 return NULL_RTX;
a67a90e5 2359}
2360
c3147c1a 2361/* Expand a call to the builtin sincos math function.
c2f47e15 2362 Return NULL_RTX if a normal call should be emitted rather than expanding the
c3147c1a 2363 function in-line. EXP is the expression that is a call to the builtin
2364 function. */
2365
2366static rtx
2367expand_builtin_sincos (tree exp)
2368{
2369 rtx op0, op1, op2, target1, target2;
3754d046 2370 machine_mode mode;
c3147c1a 2371 tree arg, sinp, cosp;
2372 int result;
389dd41b 2373 location_t loc = EXPR_LOCATION (exp);
be5575b2 2374 tree alias_type, alias_off;
c3147c1a 2375
c2f47e15 2376 if (!validate_arglist (exp, REAL_TYPE,
2377 POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
2378 return NULL_RTX;
c3147c1a 2379
c2f47e15 2380 arg = CALL_EXPR_ARG (exp, 0);
2381 sinp = CALL_EXPR_ARG (exp, 1);
2382 cosp = CALL_EXPR_ARG (exp, 2);
c3147c1a 2383
2384 /* Make a suitable register to place result in. */
2385 mode = TYPE_MODE (TREE_TYPE (arg));
2386
2387 /* Check if sincos insn is available, otherwise emit the call. */
d6bf3b14 2388 if (optab_handler (sincos_optab, mode) == CODE_FOR_nothing)
c3147c1a 2389 return NULL_RTX;
2390
2391 target1 = gen_reg_rtx (mode);
2392 target2 = gen_reg_rtx (mode);
2393
8ec3c5c2 2394 op0 = expand_normal (arg);
be5575b2 2395 alias_type = build_pointer_type_for_mode (TREE_TYPE (arg), ptr_mode, true);
2396 alias_off = build_int_cst (alias_type, 0);
2397 op1 = expand_normal (fold_build2_loc (loc, MEM_REF, TREE_TYPE (arg),
2398 sinp, alias_off));
2399 op2 = expand_normal (fold_build2_loc (loc, MEM_REF, TREE_TYPE (arg),
2400 cosp, alias_off));
c3147c1a 2401
2402 /* Compute into target1 and target2.
2403 Set TARGET to wherever the result comes back. */
2404 result = expand_twoval_unop (sincos_optab, op0, target2, target1, 0);
2405 gcc_assert (result);
2406
2407 /* Move target1 and target2 to the memory locations indicated
2408 by op1 and op2. */
2409 emit_move_insn (op1, target1);
2410 emit_move_insn (op2, target2);
2411
2412 return const0_rtx;
2413}
2414
d735c391 2415/* Expand a call to the internal cexpi builtin to the sincos math function.
2416 EXP is the expression that is a call to the builtin function; if convenient,
f97eea22 2417 the result should be placed in TARGET. */
d735c391 2418
2419static rtx
f97eea22 2420expand_builtin_cexpi (tree exp, rtx target)
d735c391 2421{
2422 tree fndecl = get_callee_fndecl (exp);
d735c391 2423 tree arg, type;
3754d046 2424 machine_mode mode;
d735c391 2425 rtx op0, op1, op2;
389dd41b 2426 location_t loc = EXPR_LOCATION (exp);
d735c391 2427
c2f47e15 2428 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2429 return NULL_RTX;
d735c391 2430
c2f47e15 2431 arg = CALL_EXPR_ARG (exp, 0);
d735c391 2432 type = TREE_TYPE (arg);
2433 mode = TYPE_MODE (TREE_TYPE (arg));
2434
2435 /* Try expanding via a sincos optab, fall back to emitting a libcall
18b8d8ae 2436 to sincos or cexp. We are sure we have sincos or cexp because cexpi
2437 is only generated from sincos, cexp or if we have either of them. */
d6bf3b14 2438 if (optab_handler (sincos_optab, mode) != CODE_FOR_nothing)
d735c391 2439 {
2440 op1 = gen_reg_rtx (mode);
2441 op2 = gen_reg_rtx (mode);
2442
f97eea22 2443 op0 = expand_expr (arg, NULL_RTX, VOIDmode, EXPAND_NORMAL);
d735c391 2444
2445 /* Compute into op1 and op2. */
2446 expand_twoval_unop (sincos_optab, op0, op2, op1, 0);
2447 }
30f690e0 2448 else if (targetm.libc_has_function (function_sincos))
d735c391 2449 {
c2f47e15 2450 tree call, fn = NULL_TREE;
d735c391 2451 tree top1, top2;
2452 rtx op1a, op2a;
2453
2454 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
b9a16870 2455 fn = builtin_decl_explicit (BUILT_IN_SINCOSF);
d735c391 2456 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
b9a16870 2457 fn = builtin_decl_explicit (BUILT_IN_SINCOS);
d735c391 2458 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
b9a16870 2459 fn = builtin_decl_explicit (BUILT_IN_SINCOSL);
c2f47e15 2460 else
2461 gcc_unreachable ();
48e1416a 2462
0ab48139 2463 op1 = assign_temp (TREE_TYPE (arg), 1, 1);
2464 op2 = assign_temp (TREE_TYPE (arg), 1, 1);
99182918 2465 op1a = copy_addr_to_reg (XEXP (op1, 0));
2466 op2a = copy_addr_to_reg (XEXP (op2, 0));
d735c391 2467 top1 = make_tree (build_pointer_type (TREE_TYPE (arg)), op1a);
2468 top2 = make_tree (build_pointer_type (TREE_TYPE (arg)), op2a);
2469
d735c391 2470 /* Make sure not to fold the sincos call again. */
2471 call = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
c2f47e15 2472 expand_normal (build_call_nary (TREE_TYPE (TREE_TYPE (fn)),
2473 call, 3, arg, top1, top2));
d735c391 2474 }
18b8d8ae 2475 else
2476 {
0ecbc158 2477 tree call, fn = NULL_TREE, narg;
18b8d8ae 2478 tree ctype = build_complex_type (type);
2479
0ecbc158 2480 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
b9a16870 2481 fn = builtin_decl_explicit (BUILT_IN_CEXPF);
0ecbc158 2482 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
b9a16870 2483 fn = builtin_decl_explicit (BUILT_IN_CEXP);
0ecbc158 2484 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
b9a16870 2485 fn = builtin_decl_explicit (BUILT_IN_CEXPL);
c2f47e15 2486 else
2487 gcc_unreachable ();
fc0dfa6e 2488
2489 /* If we don't have a decl for cexp create one. This is the
2490 friendliest fallback if the user calls __builtin_cexpi
2491 without full target C99 function support. */
2492 if (fn == NULL_TREE)
2493 {
2494 tree fntype;
2495 const char *name = NULL;
2496
2497 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2498 name = "cexpf";
2499 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2500 name = "cexp";
2501 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2502 name = "cexpl";
2503
2504 fntype = build_function_type_list (ctype, ctype, NULL_TREE);
2505 fn = build_fn_decl (name, fntype);
2506 }
2507
389dd41b 2508 narg = fold_build2_loc (loc, COMPLEX_EXPR, ctype,
18b8d8ae 2509 build_real (type, dconst0), arg);
2510
2511 /* Make sure not to fold the cexp call again. */
2512 call = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
48e1416a 2513 return expand_expr (build_call_nary (ctype, call, 1, narg),
1db6d067 2514 target, VOIDmode, EXPAND_NORMAL);
18b8d8ae 2515 }
d735c391 2516
2517 /* Now build the proper return type. */
2518 return expand_expr (build2 (COMPLEX_EXPR, build_complex_type (type),
2519 make_tree (TREE_TYPE (arg), op2),
2520 make_tree (TREE_TYPE (arg), op1)),
1db6d067 2521 target, VOIDmode, EXPAND_NORMAL);
d735c391 2522}
2523
a65c4d64 2524/* Conveniently construct a function call expression. FNDECL names the
2525 function to be called, N is the number of arguments, and the "..."
2526 parameters are the argument expressions. Unlike build_call_exr
2527 this doesn't fold the call, hence it will always return a CALL_EXPR. */
2528
2529static tree
2530build_call_nofold_loc (location_t loc, tree fndecl, int n, ...)
2531{
2532 va_list ap;
2533 tree fntype = TREE_TYPE (fndecl);
2534 tree fn = build1 (ADDR_EXPR, build_pointer_type (fntype), fndecl);
2535
2536 va_start (ap, n);
2537 fn = build_call_valist (TREE_TYPE (fntype), fn, n, ap);
2538 va_end (ap);
2539 SET_EXPR_LOCATION (fn, loc);
2540 return fn;
2541}
a65c4d64 2542
7d3afc77 2543/* Expand a call to one of the builtin rounding functions gcc defines
2544 as an extension (lfloor and lceil). As these are gcc extensions we
2545 do not need to worry about setting errno to EDOM.
ad52b9b7 2546 If expanding via optab fails, lower expression to (int)(floor(x)).
2547 EXP is the expression that is a call to the builtin function;
ff1b14e4 2548 if convenient, the result should be placed in TARGET. */
ad52b9b7 2549
2550static rtx
ff1b14e4 2551expand_builtin_int_roundingfn (tree exp, rtx target)
ad52b9b7 2552{
9c42dd28 2553 convert_optab builtin_optab;
1e0c0b35 2554 rtx op0, tmp;
2555 rtx_insn *insns;
ad52b9b7 2556 tree fndecl = get_callee_fndecl (exp);
ad52b9b7 2557 enum built_in_function fallback_fn;
2558 tree fallback_fndecl;
3754d046 2559 machine_mode mode;
4de0924f 2560 tree arg;
ad52b9b7 2561
c2f47e15 2562 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
ad52b9b7 2563 gcc_unreachable ();
2564
c2f47e15 2565 arg = CALL_EXPR_ARG (exp, 0);
ad52b9b7 2566
2567 switch (DECL_FUNCTION_CODE (fndecl))
2568 {
80ff6494 2569 CASE_FLT_FN (BUILT_IN_ICEIL):
4f35b1fc 2570 CASE_FLT_FN (BUILT_IN_LCEIL):
2571 CASE_FLT_FN (BUILT_IN_LLCEIL):
ac148751 2572 builtin_optab = lceil_optab;
2573 fallback_fn = BUILT_IN_CEIL;
2574 break;
2575
80ff6494 2576 CASE_FLT_FN (BUILT_IN_IFLOOR):
4f35b1fc 2577 CASE_FLT_FN (BUILT_IN_LFLOOR):
2578 CASE_FLT_FN (BUILT_IN_LLFLOOR):
ad52b9b7 2579 builtin_optab = lfloor_optab;
2580 fallback_fn = BUILT_IN_FLOOR;
2581 break;
2582
2583 default:
2584 gcc_unreachable ();
2585 }
2586
2587 /* Make a suitable register to place result in. */
2588 mode = TYPE_MODE (TREE_TYPE (exp));
2589
9c42dd28 2590 target = gen_reg_rtx (mode);
ad52b9b7 2591
9c42dd28 2592 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2593 need to expand the argument again. This way, we will not perform
2594 side-effects more the once. */
abfea505 2595 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
ad52b9b7 2596
ff1b14e4 2597 op0 = expand_expr (arg, NULL, VOIDmode, EXPAND_NORMAL);
ad52b9b7 2598
9c42dd28 2599 start_sequence ();
ad52b9b7 2600
9c42dd28 2601 /* Compute into TARGET. */
2602 if (expand_sfix_optab (target, op0, builtin_optab))
2603 {
2604 /* Output the entire sequence. */
2605 insns = get_insns ();
ad52b9b7 2606 end_sequence ();
9c42dd28 2607 emit_insn (insns);
2608 return target;
ad52b9b7 2609 }
2610
9c42dd28 2611 /* If we were unable to expand via the builtin, stop the sequence
2612 (without outputting the insns). */
2613 end_sequence ();
2614
ad52b9b7 2615 /* Fall back to floating point rounding optab. */
2616 fallback_fndecl = mathfn_built_in (TREE_TYPE (arg), fallback_fn);
fc0dfa6e 2617
2618 /* For non-C99 targets we may end up without a fallback fndecl here
2619 if the user called __builtin_lfloor directly. In this case emit
2620 a call to the floor/ceil variants nevertheless. This should result
2621 in the best user experience for not full C99 targets. */
2622 if (fallback_fndecl == NULL_TREE)
2623 {
2624 tree fntype;
2625 const char *name = NULL;
2626
2627 switch (DECL_FUNCTION_CODE (fndecl))
2628 {
80ff6494 2629 case BUILT_IN_ICEIL:
fc0dfa6e 2630 case BUILT_IN_LCEIL:
2631 case BUILT_IN_LLCEIL:
2632 name = "ceil";
2633 break;
80ff6494 2634 case BUILT_IN_ICEILF:
fc0dfa6e 2635 case BUILT_IN_LCEILF:
2636 case BUILT_IN_LLCEILF:
2637 name = "ceilf";
2638 break;
80ff6494 2639 case BUILT_IN_ICEILL:
fc0dfa6e 2640 case BUILT_IN_LCEILL:
2641 case BUILT_IN_LLCEILL:
2642 name = "ceill";
2643 break;
80ff6494 2644 case BUILT_IN_IFLOOR:
fc0dfa6e 2645 case BUILT_IN_LFLOOR:
2646 case BUILT_IN_LLFLOOR:
2647 name = "floor";
2648 break;
80ff6494 2649 case BUILT_IN_IFLOORF:
fc0dfa6e 2650 case BUILT_IN_LFLOORF:
2651 case BUILT_IN_LLFLOORF:
2652 name = "floorf";
2653 break;
80ff6494 2654 case BUILT_IN_IFLOORL:
fc0dfa6e 2655 case BUILT_IN_LFLOORL:
2656 case BUILT_IN_LLFLOORL:
2657 name = "floorl";
2658 break;
2659 default:
2660 gcc_unreachable ();
2661 }
2662
2663 fntype = build_function_type_list (TREE_TYPE (arg),
2664 TREE_TYPE (arg), NULL_TREE);
2665 fallback_fndecl = build_fn_decl (name, fntype);
2666 }
2667
0568e9c1 2668 exp = build_call_nofold_loc (EXPR_LOCATION (exp), fallback_fndecl, 1, arg);
ad52b9b7 2669
d4c690af 2670 tmp = expand_normal (exp);
933eb13a 2671 tmp = maybe_emit_group_store (tmp, TREE_TYPE (exp));
ad52b9b7 2672
2673 /* Truncate the result of floating point optab to integer
2674 via expand_fix (). */
2675 target = gen_reg_rtx (mode);
2676 expand_fix (target, tmp, 0);
2677
2678 return target;
2679}
2680
7d3afc77 2681/* Expand a call to one of the builtin math functions doing integer
2682 conversion (lrint).
2683 Return 0 if a normal call should be emitted rather than expanding the
2684 function in-line. EXP is the expression that is a call to the builtin
ff1b14e4 2685 function; if convenient, the result should be placed in TARGET. */
7d3afc77 2686
2687static rtx
ff1b14e4 2688expand_builtin_int_roundingfn_2 (tree exp, rtx target)
7d3afc77 2689{
5f51ee59 2690 convert_optab builtin_optab;
1e0c0b35 2691 rtx op0;
2692 rtx_insn *insns;
7d3afc77 2693 tree fndecl = get_callee_fndecl (exp);
4de0924f 2694 tree arg;
3754d046 2695 machine_mode mode;
e951f9a4 2696 enum built_in_function fallback_fn = BUILT_IN_NONE;
7d3afc77 2697
c2f47e15 2698 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2699 gcc_unreachable ();
48e1416a 2700
c2f47e15 2701 arg = CALL_EXPR_ARG (exp, 0);
7d3afc77 2702
2703 switch (DECL_FUNCTION_CODE (fndecl))
2704 {
80ff6494 2705 CASE_FLT_FN (BUILT_IN_IRINT):
e951f9a4 2706 fallback_fn = BUILT_IN_LRINT;
3c77f69c 2707 gcc_fallthrough ();
7d3afc77 2708 CASE_FLT_FN (BUILT_IN_LRINT):
2709 CASE_FLT_FN (BUILT_IN_LLRINT):
e951f9a4 2710 builtin_optab = lrint_optab;
2711 break;
80ff6494 2712
2713 CASE_FLT_FN (BUILT_IN_IROUND):
e951f9a4 2714 fallback_fn = BUILT_IN_LROUND;
3c77f69c 2715 gcc_fallthrough ();
ef2f1a10 2716 CASE_FLT_FN (BUILT_IN_LROUND):
2717 CASE_FLT_FN (BUILT_IN_LLROUND):
e951f9a4 2718 builtin_optab = lround_optab;
2719 break;
80ff6494 2720
7d3afc77 2721 default:
2722 gcc_unreachable ();
2723 }
2724
e951f9a4 2725 /* There's no easy way to detect the case we need to set EDOM. */
2726 if (flag_errno_math && fallback_fn == BUILT_IN_NONE)
2727 return NULL_RTX;
2728
7d3afc77 2729 /* Make a suitable register to place result in. */
2730 mode = TYPE_MODE (TREE_TYPE (exp));
2731
e951f9a4 2732 /* There's no easy way to detect the case we need to set EDOM. */
2733 if (!flag_errno_math)
2734 {
de2e453e 2735 rtx result = gen_reg_rtx (mode);
7d3afc77 2736
e951f9a4 2737 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2738 need to expand the argument again. This way, we will not perform
2739 side-effects more the once. */
2740 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
7d3afc77 2741
e951f9a4 2742 op0 = expand_expr (arg, NULL, VOIDmode, EXPAND_NORMAL);
7d3afc77 2743
e951f9a4 2744 start_sequence ();
7d3afc77 2745
de2e453e 2746 if (expand_sfix_optab (result, op0, builtin_optab))
e951f9a4 2747 {
2748 /* Output the entire sequence. */
2749 insns = get_insns ();
2750 end_sequence ();
2751 emit_insn (insns);
de2e453e 2752 return result;
e951f9a4 2753 }
2754
2755 /* If we were unable to expand via the builtin, stop the sequence
2756 (without outputting the insns) and call to the library function
2757 with the stabilized argument list. */
7d3afc77 2758 end_sequence ();
2759 }
2760
e951f9a4 2761 if (fallback_fn != BUILT_IN_NONE)
2762 {
2763 /* Fall back to rounding to long int. Use implicit_p 0 - for non-C99
2764 targets, (int) round (x) should never be transformed into
2765 BUILT_IN_IROUND and if __builtin_iround is called directly, emit
2766 a call to lround in the hope that the target provides at least some
2767 C99 functions. This should result in the best user experience for
2768 not full C99 targets. */
e3240774 2769 tree fallback_fndecl = mathfn_built_in_1
2770 (TREE_TYPE (arg), as_combined_fn (fallback_fn), 0);
e951f9a4 2771
2772 exp = build_call_nofold_loc (EXPR_LOCATION (exp),
2773 fallback_fndecl, 1, arg);
2774
2775 target = expand_call (exp, NULL_RTX, target == const0_rtx);
933eb13a 2776 target = maybe_emit_group_store (target, TREE_TYPE (exp));
e951f9a4 2777 return convert_to_mode (mode, target, 0);
2778 }
5f51ee59 2779
de2e453e 2780 return expand_call (exp, target, target == const0_rtx);
7d3afc77 2781}
2782
c2f47e15 2783/* Expand a call to the powi built-in mathematical function. Return NULL_RTX if
757c219d 2784 a normal call should be emitted rather than expanding the function
2785 in-line. EXP is the expression that is a call to the builtin
2786 function; if convenient, the result should be placed in TARGET. */
2787
2788static rtx
f97eea22 2789expand_builtin_powi (tree exp, rtx target)
757c219d 2790{
757c219d 2791 tree arg0, arg1;
2792 rtx op0, op1;
3754d046 2793 machine_mode mode;
2794 machine_mode mode2;
757c219d 2795
c2f47e15 2796 if (! validate_arglist (exp, REAL_TYPE, INTEGER_TYPE, VOID_TYPE))
2797 return NULL_RTX;
757c219d 2798
c2f47e15 2799 arg0 = CALL_EXPR_ARG (exp, 0);
2800 arg1 = CALL_EXPR_ARG (exp, 1);
757c219d 2801 mode = TYPE_MODE (TREE_TYPE (exp));
2802
757c219d 2803 /* Emit a libcall to libgcc. */
2804
c2f47e15 2805 /* Mode of the 2nd argument must match that of an int. */
517be012 2806 mode2 = int_mode_for_size (INT_TYPE_SIZE, 0).require ();
d0405f40 2807
757c219d 2808 if (target == NULL_RTX)
2809 target = gen_reg_rtx (mode);
2810
f97eea22 2811 op0 = expand_expr (arg0, NULL_RTX, mode, EXPAND_NORMAL);
757c219d 2812 if (GET_MODE (op0) != mode)
2813 op0 = convert_to_mode (mode, op0, 0);
1db6d067 2814 op1 = expand_expr (arg1, NULL_RTX, mode2, EXPAND_NORMAL);
d0405f40 2815 if (GET_MODE (op1) != mode2)
2816 op1 = convert_to_mode (mode2, op1, 0);
757c219d 2817
f36b9f69 2818 target = emit_library_call_value (optab_libfunc (powi_optab, mode),
9e9e5c15 2819 target, LCT_CONST, mode,
d0405f40 2820 op0, mode, op1, mode2);
757c219d 2821
2822 return target;
2823}
2824
48e1416a 2825/* Expand expression EXP which is a call to the strlen builtin. Return
c2f47e15 2826 NULL_RTX if we failed the caller should emit a normal call, otherwise
aed0bd19 2827 try to get the result in TARGET, if convenient. */
f7c44134 2828
53800dbe 2829static rtx
c2f47e15 2830expand_builtin_strlen (tree exp, rtx target,
3754d046 2831 machine_mode target_mode)
53800dbe 2832{
c2f47e15 2833 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
2834 return NULL_RTX;
6248e345 2835
5c5d012b 2836 struct expand_operand ops[4];
2837 rtx pat;
2838 tree len;
2839 tree src = CALL_EXPR_ARG (exp, 0);
2840 rtx src_reg;
2841 rtx_insn *before_strlen;
2842 machine_mode insn_mode;
2843 enum insn_code icode = CODE_FOR_nothing;
2844 unsigned int align;
681fab1e 2845
5c5d012b 2846 /* If the length can be computed at compile-time, return it. */
2847 len = c_strlen (src, 0);
2848 if (len)
2849 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
2850
2851 /* If the length can be computed at compile-time and is constant
2852 integer, but there are side-effects in src, evaluate
2853 src for side-effects, then return len.
2854 E.g. x = strlen (i++ ? "xfoo" + 1 : "bar");
2855 can be optimized into: i++; x = 3; */
2856 len = c_strlen (src, 1);
2857 if (len && TREE_CODE (len) == INTEGER_CST)
2858 {
2859 expand_expr (src, const0_rtx, VOIDmode, EXPAND_NORMAL);
2860 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
2861 }
53800dbe 2862
5c5d012b 2863 align = get_pointer_alignment (src) / BITS_PER_UNIT;
53800dbe 2864
5c5d012b 2865 /* If SRC is not a pointer type, don't do this operation inline. */
2866 if (align == 0)
2867 return NULL_RTX;
2868
2869 /* Bail out if we can't compute strlen in the right mode. */
2870 FOR_EACH_MODE_FROM (insn_mode, target_mode)
2871 {
2872 icode = optab_handler (strlen_optab, insn_mode);
2873 if (icode != CODE_FOR_nothing)
2874 break;
2875 }
2876 if (insn_mode == VOIDmode)
2877 return NULL_RTX;
53800dbe 2878
5c5d012b 2879 /* Make a place to hold the source address. We will not expand
2880 the actual source until we are sure that the expansion will
2881 not fail -- there are trees that cannot be expanded twice. */
2882 src_reg = gen_reg_rtx (Pmode);
53800dbe 2883
5c5d012b 2884 /* Mark the beginning of the strlen sequence so we can emit the
2885 source operand later. */
2886 before_strlen = get_last_insn ();
53800dbe 2887
5c5d012b 2888 create_output_operand (&ops[0], target, insn_mode);
2889 create_fixed_operand (&ops[1], gen_rtx_MEM (BLKmode, src_reg));
2890 create_integer_operand (&ops[2], 0);
2891 create_integer_operand (&ops[3], align);
2892 if (!maybe_expand_insn (icode, 4, ops))
2893 return NULL_RTX;
911c0150 2894
5c5d012b 2895 /* Check to see if the argument was declared attribute nonstring
2896 and if so, issue a warning since at this point it's not known
2897 to be nul-terminated. */
2898 maybe_warn_nonstring_arg (get_callee_fndecl (exp), exp);
0c45740b 2899
5c5d012b 2900 /* Now that we are assured of success, expand the source. */
2901 start_sequence ();
2902 pat = expand_expr (src, src_reg, Pmode, EXPAND_NORMAL);
2903 if (pat != src_reg)
2904 {
499eee58 2905#ifdef POINTERS_EXTEND_UNSIGNED
5c5d012b 2906 if (GET_MODE (pat) != Pmode)
2907 pat = convert_to_mode (Pmode, pat,
2908 POINTERS_EXTEND_UNSIGNED);
499eee58 2909#endif
5c5d012b 2910 emit_move_insn (src_reg, pat);
2911 }
2912 pat = get_insns ();
2913 end_sequence ();
bceb0d1f 2914
5c5d012b 2915 if (before_strlen)
2916 emit_insn_after (pat, before_strlen);
2917 else
2918 emit_insn_before (pat, get_insns ());
53800dbe 2919
5c5d012b 2920 /* Return the value in the proper mode for this function. */
2921 if (GET_MODE (ops[0].value) == target_mode)
2922 target = ops[0].value;
2923 else if (target != 0)
2924 convert_move (target, ops[0].value, 0);
2925 else
2926 target = convert_to_mode (target_mode, ops[0].value, 0);
911c0150 2927
5c5d012b 2928 return target;
53800dbe 2929}
2930
6840589f 2931/* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
2932 bytes from constant string DATA + OFFSET and return it as target
2933 constant. */
2934
2935static rtx
aecda0d6 2936builtin_memcpy_read_str (void *data, HOST_WIDE_INT offset,
f77c4496 2937 scalar_int_mode mode)
6840589f 2938{
2939 const char *str = (const char *) data;
2940
64db345d 2941 gcc_assert (offset >= 0
2942 && ((unsigned HOST_WIDE_INT) offset + GET_MODE_SIZE (mode)
2943 <= strlen (str) + 1));
6840589f 2944
2945 return c_readstr (str + offset, mode);
2946}
2947
36d63243 2948/* LEN specify length of the block of memcpy/memset operation.
9db0f34d 2949 Figure out its range and put it into MIN_SIZE/MAX_SIZE.
2950 In some cases we can make very likely guess on max size, then we
2951 set it into PROBABLE_MAX_SIZE. */
36d63243 2952
2953static void
2954determine_block_size (tree len, rtx len_rtx,
2955 unsigned HOST_WIDE_INT *min_size,
9db0f34d 2956 unsigned HOST_WIDE_INT *max_size,
2957 unsigned HOST_WIDE_INT *probable_max_size)
36d63243 2958{
2959 if (CONST_INT_P (len_rtx))
2960 {
4e140a5c 2961 *min_size = *max_size = *probable_max_size = UINTVAL (len_rtx);
36d63243 2962 return;
2963 }
2964 else
2965 {
9c1be15e 2966 wide_int min, max;
9db0f34d 2967 enum value_range_type range_type = VR_UNDEFINED;
2968
2969 /* Determine bounds from the type. */
2970 if (tree_fits_uhwi_p (TYPE_MIN_VALUE (TREE_TYPE (len))))
2971 *min_size = tree_to_uhwi (TYPE_MIN_VALUE (TREE_TYPE (len)));
2972 else
2973 *min_size = 0;
2974 if (tree_fits_uhwi_p (TYPE_MAX_VALUE (TREE_TYPE (len))))
4e140a5c 2975 *probable_max_size = *max_size
2976 = tree_to_uhwi (TYPE_MAX_VALUE (TREE_TYPE (len)));
9db0f34d 2977 else
2978 *probable_max_size = *max_size = GET_MODE_MASK (GET_MODE (len_rtx));
2979
2980 if (TREE_CODE (len) == SSA_NAME)
2981 range_type = get_range_info (len, &min, &max);
2982 if (range_type == VR_RANGE)
36d63243 2983 {
fe5ad926 2984 if (wi::fits_uhwi_p (min) && *min_size < min.to_uhwi ())
36d63243 2985 *min_size = min.to_uhwi ();
fe5ad926 2986 if (wi::fits_uhwi_p (max) && *max_size > max.to_uhwi ())
9db0f34d 2987 *probable_max_size = *max_size = max.to_uhwi ();
36d63243 2988 }
9db0f34d 2989 else if (range_type == VR_ANTI_RANGE)
36d63243 2990 {
4a474a5a 2991 /* Anti range 0...N lets us to determine minimal size to N+1. */
fe5ad926 2992 if (min == 0)
9db0f34d 2993 {
9c1be15e 2994 if (wi::fits_uhwi_p (max) && max.to_uhwi () + 1 != 0)
2995 *min_size = max.to_uhwi () + 1;
9db0f34d 2996 }
2997 /* Code like
2998
2999 int n;
3000 if (n < 100)
4a474a5a 3001 memcpy (a, b, n)
9db0f34d 3002
3003 Produce anti range allowing negative values of N. We still
3004 can use the information and make a guess that N is not negative.
3005 */
fe5ad926 3006 else if (!wi::leu_p (max, 1 << 30) && wi::fits_uhwi_p (min))
3007 *probable_max_size = min.to_uhwi () - 1;
36d63243 3008 }
3009 }
3010 gcc_checking_assert (*max_size <=
3011 (unsigned HOST_WIDE_INT)
3012 GET_MODE_MASK (GET_MODE (len_rtx)));
3013}
3014
5aef8938 3015/* Try to verify that the sizes and lengths of the arguments to a string
3016 manipulation function given by EXP are within valid bounds and that
e6a18b5a 3017 the operation does not lead to buffer overflow or read past the end.
3018 Arguments other than EXP may be null. When non-null, the arguments
3019 have the following meaning:
3020 DST is the destination of a copy call or NULL otherwise.
3021 SRC is the source of a copy call or NULL otherwise.
3022 DSTWRITE is the number of bytes written into the destination obtained
3023 from the user-supplied size argument to the function (such as in
3024 memcpy(DST, SRCs, DSTWRITE) or strncpy(DST, DRC, DSTWRITE).
3025 MAXREAD is the user-supplied bound on the length of the source sequence
5aef8938 3026 (such as in strncat(d, s, N). It specifies the upper limit on the number
e6a18b5a 3027 of bytes to write. If NULL, it's taken to be the same as DSTWRITE.
3028 SRCSTR is the source string (such as in strcpy(DST, SRC)) when the
3029 expression EXP is a string function call (as opposed to a memory call
3030 like memcpy). As an exception, SRCSTR can also be an integer denoting
3031 the precomputed size of the source string or object (for functions like
3032 memcpy).
3033 DSTSIZE is the size of the destination object specified by the last
5aef8938 3034 argument to the _chk builtins, typically resulting from the expansion
e6a18b5a 3035 of __builtin_object_size (such as in __builtin___strcpy_chk(DST, SRC,
3036 DSTSIZE).
5aef8938 3037
e6a18b5a 3038 When DSTWRITE is null LEN is checked to verify that it doesn't exceed
5aef8938 3039 SIZE_MAX.
3040
e6a18b5a 3041 If the call is successfully verified as safe return true, otherwise
3042 return false. */
5aef8938 3043
3044static bool
e6a18b5a 3045check_access (tree exp, tree, tree, tree dstwrite,
3046 tree maxread, tree srcstr, tree dstsize)
5aef8938 3047{
e6a18b5a 3048 int opt = OPT_Wstringop_overflow_;
3049
5aef8938 3050 /* The size of the largest object is half the address space, or
e6a18b5a 3051 PTRDIFF_MAX. (This is way too permissive.) */
3052 tree maxobjsize = max_object_size ();
5aef8938 3053
e6a18b5a 3054 /* Either the length of the source string for string functions or
3055 the size of the source object for raw memory functions. */
5aef8938 3056 tree slen = NULL_TREE;
3057
8d6c6ef5 3058 tree range[2] = { NULL_TREE, NULL_TREE };
3059
5aef8938 3060 /* Set to true when the exact number of bytes written by a string
3061 function like strcpy is not known and the only thing that is
3062 known is that it must be at least one (for the terminating nul). */
3063 bool at_least_one = false;
e6a18b5a 3064 if (srcstr)
5aef8938 3065 {
e6a18b5a 3066 /* SRCSTR is normally a pointer to string but as a special case
5aef8938 3067 it can be an integer denoting the length of a string. */
e6a18b5a 3068 if (POINTER_TYPE_P (TREE_TYPE (srcstr)))
5aef8938 3069 {
3070 /* Try to determine the range of lengths the source string
8d6c6ef5 3071 refers to. If it can be determined and is less than
e6a18b5a 3072 the upper bound given by MAXREAD add one to it for
5aef8938 3073 the terminating nul. Otherwise, set it to one for
e6a18b5a 3074 the same reason, or to MAXREAD as appropriate. */
3075 get_range_strlen (srcstr, range);
3076 if (range[0] && (!maxread || TREE_CODE (maxread) == INTEGER_CST))
8d6c6ef5 3077 {
e6a18b5a 3078 if (maxread && tree_int_cst_le (maxread, range[0]))
3079 range[0] = range[1] = maxread;
8d6c6ef5 3080 else
3081 range[0] = fold_build2 (PLUS_EXPR, size_type_node,
3082 range[0], size_one_node);
3083
e6a18b5a 3084 if (maxread && tree_int_cst_le (maxread, range[1]))
3085 range[1] = maxread;
8d6c6ef5 3086 else if (!integer_all_onesp (range[1]))
3087 range[1] = fold_build2 (PLUS_EXPR, size_type_node,
3088 range[1], size_one_node);
3089
3090 slen = range[0];
3091 }
5aef8938 3092 else
3093 {
3094 at_least_one = true;
3095 slen = size_one_node;
3096 }
3097 }
3098 else
e6a18b5a 3099 slen = srcstr;
5aef8938 3100 }
3101
e6a18b5a 3102 if (!dstwrite && !maxread)
5aef8938 3103 {
3104 /* When the only available piece of data is the object size
3105 there is nothing to do. */
3106 if (!slen)
3107 return true;
3108
3109 /* Otherwise, when the length of the source sequence is known
e6a18b5a 3110 (as with strlen), set DSTWRITE to it. */
8d6c6ef5 3111 if (!range[0])
e6a18b5a 3112 dstwrite = slen;
5aef8938 3113 }
3114
e6a18b5a 3115 if (!dstsize)
3116 dstsize = maxobjsize;
5aef8938 3117
e6a18b5a 3118 if (dstwrite)
3119 get_size_range (dstwrite, range);
5aef8938 3120
e6a18b5a 3121 tree func = get_callee_fndecl (exp);
5aef8938 3122
3123 /* First check the number of bytes to be written against the maximum
3124 object size. */
3125 if (range[0] && tree_int_cst_lt (maxobjsize, range[0]))
3126 {
3127 location_t loc = tree_nonartificial_location (exp);
4d317237 3128 loc = expansion_point_location_if_in_system_header (loc);
5aef8938 3129
3130 if (range[0] == range[1])
3131 warning_at (loc, opt,
9098b938 3132 "%K%qD specified size %E "
8d6c6ef5 3133 "exceeds maximum object size %E",
e6a18b5a 3134 exp, func, range[0], maxobjsize);
5aef8938 3135 else
3136 warning_at (loc, opt,
9098b938 3137 "%K%qD specified size between %E and %E "
8d6c6ef5 3138 "exceeds maximum object size %E",
e6a18b5a 3139 exp, func,
8d6c6ef5 3140 range[0], range[1], maxobjsize);
5aef8938 3141 return false;
3142 }
3143
e6a18b5a 3144 /* The number of bytes to write is "exact" if DSTWRITE is non-null,
3145 constant, and in range of unsigned HOST_WIDE_INT. */
3146 bool exactwrite = dstwrite && tree_fits_uhwi_p (dstwrite);
3147
5aef8938 3148 /* Next check the number of bytes to be written against the destination
3149 object size. */
e6a18b5a 3150 if (range[0] || !exactwrite || integer_all_onesp (dstwrite))
5aef8938 3151 {
3152 if (range[0]
e6a18b5a 3153 && ((tree_fits_uhwi_p (dstsize)
3154 && tree_int_cst_lt (dstsize, range[0]))
3155 || (tree_fits_uhwi_p (dstwrite)
3156 && tree_int_cst_lt (dstwrite, range[0]))))
5aef8938 3157 {
080a1363 3158 if (TREE_NO_WARNING (exp))
3159 return false;
3160
5aef8938 3161 location_t loc = tree_nonartificial_location (exp);
4d317237 3162 loc = expansion_point_location_if_in_system_header (loc);
5aef8938 3163
e6a18b5a 3164 if (dstwrite == slen && at_least_one)
8d6c6ef5 3165 {
3166 /* This is a call to strcpy with a destination of 0 size
3167 and a source of unknown length. The call will write
3168 at least one byte past the end of the destination. */
3169 warning_at (loc, opt,
9098b938 3170 "%K%qD writing %E or more bytes into a region "
8d6c6ef5 3171 "of size %E overflows the destination",
e6a18b5a 3172 exp, func, range[0], dstsize);
8d6c6ef5 3173 }
3174 else if (tree_int_cst_equal (range[0], range[1]))
625a4dfc 3175 warning_n (loc, opt, tree_to_uhwi (range[0]),
3176 "%K%qD writing %E byte into a region "
3177 "of size %E overflows the destination",
3178 "%K%qD writing %E bytes into a region "
3179 "of size %E overflows the destination",
3180 exp, func, range[0], dstsize);
8d6c6ef5 3181 else if (tree_int_cst_sign_bit (range[1]))
3182 {
3183 /* Avoid printing the upper bound if it's invalid. */
3184 warning_at (loc, opt,
9098b938 3185 "%K%qD writing %E or more bytes into a region "
8d6c6ef5 3186 "of size %E overflows the destination",
e6a18b5a 3187 exp, func, range[0], dstsize);
8d6c6ef5 3188 }
5aef8938 3189 else
3190 warning_at (loc, opt,
9098b938 3191 "%K%qD writing between %E and %E bytes into "
8d6c6ef5 3192 "a region of size %E overflows the destination",
e6a18b5a 3193 exp, func, range[0], range[1],
3194 dstsize);
5aef8938 3195
3196 /* Return error when an overflow has been detected. */
3197 return false;
3198 }
3199 }
3200
3201 /* Check the maximum length of the source sequence against the size
3202 of the destination object if known, or against the maximum size
3203 of an object. */
e6a18b5a 3204 if (maxread)
5aef8938 3205 {
e6a18b5a 3206 get_size_range (maxread, range);
3207
3208 /* Use the lower end for MAXREAD from now on. */
3209 if (range[0])
3210 maxread = range[0];
5aef8938 3211
e6a18b5a 3212 if (range[0] && dstsize && tree_fits_uhwi_p (dstsize))
5aef8938 3213 {
3214 location_t loc = tree_nonartificial_location (exp);
4d317237 3215 loc = expansion_point_location_if_in_system_header (loc);
5aef8938 3216
3217 if (tree_int_cst_lt (maxobjsize, range[0]))
3218 {
080a1363 3219 if (TREE_NO_WARNING (exp))
3220 return false;
3221
5aef8938 3222 /* Warn about crazy big sizes first since that's more
3223 likely to be meaningful than saying that the bound
3224 is greater than the object size if both are big. */
3225 if (range[0] == range[1])
3226 warning_at (loc, opt,
9098b938 3227 "%K%qD specified bound %E "
8d6c6ef5 3228 "exceeds maximum object size %E",
e6a18b5a 3229 exp, func,
8d6c6ef5 3230 range[0], maxobjsize);
5aef8938 3231 else
3232 warning_at (loc, opt,
9098b938 3233 "%K%qD specified bound between %E and %E "
8d6c6ef5 3234 "exceeds maximum object size %E",
e6a18b5a 3235 exp, func,
8d6c6ef5 3236 range[0], range[1], maxobjsize);
5aef8938 3237
3238 return false;
3239 }
3240
e6a18b5a 3241 if (dstsize != maxobjsize && tree_int_cst_lt (dstsize, range[0]))
5aef8938 3242 {
080a1363 3243 if (TREE_NO_WARNING (exp))
3244 return false;
3245
8d6c6ef5 3246 if (tree_int_cst_equal (range[0], range[1]))
5aef8938 3247 warning_at (loc, opt,
9098b938 3248 "%K%qD specified bound %E "
8d6c6ef5 3249 "exceeds destination size %E",
e6a18b5a 3250 exp, func,
3251 range[0], dstsize);
5aef8938 3252 else
3253 warning_at (loc, opt,
9098b938 3254 "%K%qD specified bound between %E and %E "
8d6c6ef5 3255 "exceeds destination size %E",
e6a18b5a 3256 exp, func,
3257 range[0], range[1], dstsize);
5aef8938 3258 return false;
3259 }
3260 }
3261 }
3262
e6a18b5a 3263 /* Check for reading past the end of SRC. */
8d6c6ef5 3264 if (slen
e6a18b5a 3265 && slen == srcstr
3266 && dstwrite && range[0]
8d6c6ef5 3267 && tree_int_cst_lt (slen, range[0]))
3268 {
080a1363 3269 if (TREE_NO_WARNING (exp))
3270 return false;
3271
8d6c6ef5 3272 location_t loc = tree_nonartificial_location (exp);
3273
3274 if (tree_int_cst_equal (range[0], range[1]))
625a4dfc 3275 warning_n (loc, opt, tree_to_uhwi (range[0]),
3276 "%K%qD reading %E byte from a region of size %E",
3277 "%K%qD reading %E bytes from a region of size %E",
e6a18b5a 3278 exp, func, range[0], slen);
8d6c6ef5 3279 else if (tree_int_cst_sign_bit (range[1]))
3280 {
3281 /* Avoid printing the upper bound if it's invalid. */
3282 warning_at (loc, opt,
9098b938 3283 "%K%qD reading %E or more bytes from a region "
8d6c6ef5 3284 "of size %E",
e6a18b5a 3285 exp, func, range[0], slen);
8d6c6ef5 3286 }
3287 else
3288 warning_at (loc, opt,
9098b938 3289 "%K%qD reading between %E and %E bytes from a region "
8d6c6ef5 3290 "of size %E",
e6a18b5a 3291 exp, func, range[0], range[1], slen);
8d6c6ef5 3292 return false;
3293 }
3294
5aef8938 3295 return true;
3296}
3297
3298/* Helper to compute the size of the object referenced by the DEST
d8aad786 3299 expression which must have pointer type, using Object Size type
5aef8938 3300 OSTYPE (only the least significant 2 bits are used). Return
24e3b821 3301 an estimate of the size of the object if successful or NULL when
3302 the size cannot be determined. When the referenced object involves
3303 a non-constant offset in some range the returned value represents
3304 the largest size given the smallest non-negative offset in the
3305 range. The function is intended for diagnostics and should not
3306 be used to influence code generation or optimization. */
5aef8938 3307
d8aad786 3308tree
8d6c6ef5 3309compute_objsize (tree dest, int ostype)
5aef8938 3310{
3311 unsigned HOST_WIDE_INT size;
d8aad786 3312
3313 /* Only the two least significant bits are meaningful. */
3314 ostype &= 3;
3315
3316 if (compute_builtin_object_size (dest, ostype, &size))
5aef8938 3317 return build_int_cst (sizetype, size);
3318
d8aad786 3319 if (TREE_CODE (dest) == SSA_NAME)
3320 {
3321 gimple *stmt = SSA_NAME_DEF_STMT (dest);
3322 if (!is_gimple_assign (stmt))
3323 return NULL_TREE;
3324
24e3b821 3325 dest = gimple_assign_rhs1 (stmt);
3326
d8aad786 3327 tree_code code = gimple_assign_rhs_code (stmt);
24e3b821 3328 if (code == POINTER_PLUS_EXPR)
3329 {
3330 /* compute_builtin_object_size fails for addresses with
3331 non-constant offsets. Try to determine the range of
3332 such an offset here and use it to adjus the constant
3333 size. */
3334 tree off = gimple_assign_rhs2 (stmt);
3335 if (TREE_CODE (off) == SSA_NAME
3336 && INTEGRAL_TYPE_P (TREE_TYPE (off)))
3337 {
3338 wide_int min, max;
3339 enum value_range_type rng = get_range_info (off, &min, &max);
3340
3341 if (rng == VR_RANGE)
3342 {
3343 if (tree size = compute_objsize (dest, ostype))
3344 {
3345 wide_int wisiz = wi::to_wide (size);
3346
3347 /* Ignore negative offsets for now. For others,
3348 use the lower bound as the most optimistic
3349 estimate of the (remaining)size. */
3350 if (wi::sign_mask (min))
3351 ;
3352 else if (wi::ltu_p (min, wisiz))
3353 return wide_int_to_tree (TREE_TYPE (size),
3354 wi::sub (wisiz, min));
3355 else
3356 return size_zero_node;
3357 }
3358 }
3359 }
3360 }
3361 else if (code != ADDR_EXPR)
d8aad786 3362 return NULL_TREE;
d8aad786 3363 }
3364
24e3b821 3365 /* Unless computing the largest size (for memcpy and other raw memory
3366 functions), try to determine the size of the object from its type. */
3367 if (!ostype)
3368 return NULL_TREE;
3369
d8aad786 3370 if (TREE_CODE (dest) != ADDR_EXPR)
3371 return NULL_TREE;
3372
3373 tree type = TREE_TYPE (dest);
3374 if (TREE_CODE (type) == POINTER_TYPE)
3375 type = TREE_TYPE (type);
3376
3377 type = TYPE_MAIN_VARIANT (type);
3378
3379 if (TREE_CODE (type) == ARRAY_TYPE
d4ad98ea 3380 && !array_at_struct_end_p (TREE_OPERAND (dest, 0)))
d8aad786 3381 {
3382 /* Return the constant size unless it's zero (that's a zero-length
3383 array likely at the end of a struct). */
3384 tree size = TYPE_SIZE_UNIT (type);
3385 if (size && TREE_CODE (size) == INTEGER_CST
3386 && !integer_zerop (size))
3387 return size;
3388 }
3389
5aef8938 3390 return NULL_TREE;
3391}
3392
3393/* Helper to determine and check the sizes of the source and the destination
8d6c6ef5 3394 of calls to __builtin_{bzero,memcpy,mempcpy,memset} calls. EXP is the
3395 call expression, DEST is the destination argument, SRC is the source
3396 argument or null, and LEN is the number of bytes. Use Object Size type-0
3397 regardless of the OPT_Wstringop_overflow_ setting. Return true on success
5aef8938 3398 (no overflow or invalid sizes), false otherwise. */
3399
3400static bool
e6a18b5a 3401check_memop_access (tree exp, tree dest, tree src, tree size)
5aef8938 3402{
5aef8938 3403 /* For functions like memset and memcpy that operate on raw memory
8d6c6ef5 3404 try to determine the size of the largest source and destination
3405 object using type-0 Object Size regardless of the object size
3406 type specified by the option. */
3407 tree srcsize = src ? compute_objsize (src, 0) : NULL_TREE;
3408 tree dstsize = compute_objsize (dest, 0);
5aef8938 3409
e6a18b5a 3410 return check_access (exp, dest, src, size, /*maxread=*/NULL_TREE,
3411 srcsize, dstsize);
8d6c6ef5 3412}
3413
3414/* Validate memchr arguments without performing any expansion.
3415 Return NULL_RTX. */
3416
3417static rtx
3418expand_builtin_memchr (tree exp, rtx)
3419{
3420 if (!validate_arglist (exp,
3421 POINTER_TYPE, INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
3422 return NULL_RTX;
3423
3424 tree arg1 = CALL_EXPR_ARG (exp, 0);
3425 tree len = CALL_EXPR_ARG (exp, 2);
3426
3427 /* Diagnose calls where the specified length exceeds the size
3428 of the object. */
3429 if (warn_stringop_overflow)
3430 {
3431 tree size = compute_objsize (arg1, 0);
e6a18b5a 3432 check_access (exp, /*dst=*/NULL_TREE, /*src=*/NULL_TREE, len,
3433 /*maxread=*/NULL_TREE, size, /*objsize=*/NULL_TREE);
8d6c6ef5 3434 }
3435
3436 return NULL_RTX;
5aef8938 3437}
3438
c2f47e15 3439/* Expand a call EXP to the memcpy builtin.
3440 Return NULL_RTX if we failed, the caller should emit a normal call,
3b824fa6 3441 otherwise try to get the result in TARGET, if convenient (and in
9fe0e1b8 3442 mode MODE if that's convenient). */
c2f47e15 3443
53800dbe 3444static rtx
a65c4d64 3445expand_builtin_memcpy (tree exp, rtx target)
53800dbe 3446{
c2f47e15 3447 if (!validate_arglist (exp,
3448 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3449 return NULL_RTX;
5aef8938 3450
3451 tree dest = CALL_EXPR_ARG (exp, 0);
3452 tree src = CALL_EXPR_ARG (exp, 1);
3453 tree len = CALL_EXPR_ARG (exp, 2);
3454
e6a18b5a 3455 check_memop_access (exp, dest, src, len);
5aef8938 3456
d0fbba1a 3457 return expand_builtin_memory_copy_args (dest, src, len, target, exp,
3458 /*endp=*/ 0);
f21337ef 3459}
6840589f 3460
4d317237 3461/* Check a call EXP to the memmove built-in for validity.
3462 Return NULL_RTX on both success and failure. */
3463
3464static rtx
3465expand_builtin_memmove (tree exp, rtx)
3466{
3467 if (!validate_arglist (exp,
3468 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3469 return NULL_RTX;
3470
3471 tree dest = CALL_EXPR_ARG (exp, 0);
8d6c6ef5 3472 tree src = CALL_EXPR_ARG (exp, 1);
4d317237 3473 tree len = CALL_EXPR_ARG (exp, 2);
3474
e6a18b5a 3475 check_memop_access (exp, dest, src, len);
4d317237 3476
3477 return NULL_RTX;
3478}
3479
f21337ef 3480/* Expand an instrumented call EXP to the memcpy builtin.
3481 Return NULL_RTX if we failed, the caller should emit a normal call,
3482 otherwise try to get the result in TARGET, if convenient (and in
3483 mode MODE if that's convenient). */
53800dbe 3484
f21337ef 3485static rtx
3486expand_builtin_memcpy_with_bounds (tree exp, rtx target)
3487{
3488 if (!validate_arglist (exp,
3489 POINTER_TYPE, POINTER_BOUNDS_TYPE,
3490 POINTER_TYPE, POINTER_BOUNDS_TYPE,
3491 INTEGER_TYPE, VOID_TYPE))
3492 return NULL_RTX;
3493 else
3494 {
3495 tree dest = CALL_EXPR_ARG (exp, 0);
3496 tree src = CALL_EXPR_ARG (exp, 2);
3497 tree len = CALL_EXPR_ARG (exp, 4);
d0fbba1a 3498 rtx res = expand_builtin_memory_copy_args (dest, src, len, target, exp,
3499 /*end_p=*/ 0);
53800dbe 3500
f21337ef 3501 /* Return src bounds with the result. */
3502 if (res)
e5716f7e 3503 {
17d388d8 3504 rtx bnd = force_reg (targetm.chkp_bound_mode (),
f21337ef 3505 expand_normal (CALL_EXPR_ARG (exp, 1)));
3506 res = chkp_join_splitted_slot (res, bnd);
e5716f7e 3507 }
f21337ef 3508 return res;
53800dbe 3509 }
3510}
3511
c2f47e15 3512/* Expand a call EXP to the mempcpy builtin.
3513 Return NULL_RTX if we failed; the caller should emit a normal call,
647661c6 3514 otherwise try to get the result in TARGET, if convenient (and in
9fe0e1b8 3515 mode MODE if that's convenient). If ENDP is 0 return the
3516 destination pointer, if ENDP is 1 return the end pointer ala
3517 mempcpy, and if ENDP is 2 return the end pointer minus one ala
3518 stpcpy. */
647661c6 3519
3520static rtx
d0fbba1a 3521expand_builtin_mempcpy (tree exp, rtx target)
647661c6 3522{
c2f47e15 3523 if (!validate_arglist (exp,
3524 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3525 return NULL_RTX;
5aef8938 3526
3527 tree dest = CALL_EXPR_ARG (exp, 0);
3528 tree src = CALL_EXPR_ARG (exp, 1);
3529 tree len = CALL_EXPR_ARG (exp, 2);
3530
24e3b821 3531 /* Policy does not generally allow using compute_objsize (which
3532 is used internally by check_memop_size) to change code generation
3533 or drive optimization decisions.
3534
3535 In this instance it is safe because the code we generate has
3536 the same semantics regardless of the return value of
3537 check_memop_sizes. Exactly the same amount of data is copied
3538 and the return value is exactly the same in both cases.
3539
3540 Furthermore, check_memop_size always uses mode 0 for the call to
3541 compute_objsize, so the imprecise nature of compute_objsize is
3542 avoided. */
3543
5aef8938 3544 /* Avoid expanding mempcpy into memcpy when the call is determined
3545 to overflow the buffer. This also prevents the same overflow
3546 from being diagnosed again when expanding memcpy. */
e6a18b5a 3547 if (!check_memop_access (exp, dest, src, len))
5aef8938 3548 return NULL_RTX;
3549
3550 return expand_builtin_mempcpy_args (dest, src, len,
d0fbba1a 3551 target, exp, /*endp=*/ 1);
f21337ef 3552}
3553
3554/* Expand an instrumented call EXP to the mempcpy builtin.
3555 Return NULL_RTX if we failed, the caller should emit a normal call,
3556 otherwise try to get the result in TARGET, if convenient (and in
3557 mode MODE if that's convenient). */
3558
3559static rtx
d0fbba1a 3560expand_builtin_mempcpy_with_bounds (tree exp, rtx target)
f21337ef 3561{
3562 if (!validate_arglist (exp,
3563 POINTER_TYPE, POINTER_BOUNDS_TYPE,
3564 POINTER_TYPE, POINTER_BOUNDS_TYPE,
3565 INTEGER_TYPE, VOID_TYPE))
3566 return NULL_RTX;
3567 else
3568 {
3569 tree dest = CALL_EXPR_ARG (exp, 0);
3570 tree src = CALL_EXPR_ARG (exp, 2);
3571 tree len = CALL_EXPR_ARG (exp, 4);
3572 rtx res = expand_builtin_mempcpy_args (dest, src, len, target,
d0fbba1a 3573 exp, 1);
f21337ef 3574
3575 /* Return src bounds with the result. */
3576 if (res)
3577 {
17d388d8 3578 rtx bnd = force_reg (targetm.chkp_bound_mode (),
f21337ef 3579 expand_normal (CALL_EXPR_ARG (exp, 1)));
3580 res = chkp_join_splitted_slot (res, bnd);
3581 }
3582 return res;
c2f47e15 3583 }
3584}
3585
d0fbba1a 3586/* Helper function to do the actual work for expand of memory copy family
3587 functions (memcpy, mempcpy, stpcpy). Expansing should assign LEN bytes
3588 of memory from SRC to DEST and assign to TARGET if convenient.
3589 If ENDP is 0 return the
3590 destination pointer, if ENDP is 1 return the end pointer ala
3591 mempcpy, and if ENDP is 2 return the end pointer minus one ala
3592 stpcpy. */
c2f47e15 3593
3594static rtx
d0fbba1a 3595expand_builtin_memory_copy_args (tree dest, tree src, tree len,
3596 rtx target, tree exp, int endp)
c2f47e15 3597{
d0fbba1a 3598 const char *src_str;
3599 unsigned int src_align = get_pointer_alignment (src);
3600 unsigned int dest_align = get_pointer_alignment (dest);
3601 rtx dest_mem, src_mem, dest_addr, len_rtx;
3602 HOST_WIDE_INT expected_size = -1;
3603 unsigned int expected_align = 0;
3604 unsigned HOST_WIDE_INT min_size;
3605 unsigned HOST_WIDE_INT max_size;
3606 unsigned HOST_WIDE_INT probable_max_size;
f21337ef 3607
d0fbba1a 3608 /* If DEST is not a pointer type, call the normal function. */
3609 if (dest_align == 0)
3610 return NULL_RTX;
a0c938f0 3611
d0fbba1a 3612 /* If either SRC is not a pointer type, don't do this
3613 operation in-line. */
3614 if (src_align == 0)
3615 return NULL_RTX;
9fe0e1b8 3616
d0fbba1a 3617 if (currently_expanding_gimple_stmt)
3618 stringop_block_profile (currently_expanding_gimple_stmt,
3619 &expected_align, &expected_size);
0862b7e9 3620
d0fbba1a 3621 if (expected_align < dest_align)
3622 expected_align = dest_align;
3623 dest_mem = get_memory_rtx (dest, len);
3624 set_mem_align (dest_mem, dest_align);
3625 len_rtx = expand_normal (len);
3626 determine_block_size (len, len_rtx, &min_size, &max_size,
3627 &probable_max_size);
3628 src_str = c_getstr (src);
647661c6 3629
d0fbba1a 3630 /* If SRC is a string constant and block move would be done
3631 by pieces, we can avoid loading the string from memory
3632 and only stored the computed constants. */
3633 if (src_str
3634 && CONST_INT_P (len_rtx)
3635 && (unsigned HOST_WIDE_INT) INTVAL (len_rtx) <= strlen (src_str) + 1
3636 && can_store_by_pieces (INTVAL (len_rtx), builtin_memcpy_read_str,
3637 CONST_CAST (char *, src_str),
3638 dest_align, false))
3639 {
3640 dest_mem = store_by_pieces (dest_mem, INTVAL (len_rtx),
3641 builtin_memcpy_read_str,
d72123ce 3642 CONST_CAST (char *, src_str),
d0fbba1a 3643 dest_align, false, endp);
3644 dest_mem = force_operand (XEXP (dest_mem, 0), target);
3645 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3646 return dest_mem;
3647 }
647661c6 3648
d0fbba1a 3649 src_mem = get_memory_rtx (src, len);
3650 set_mem_align (src_mem, src_align);
9fe0e1b8 3651
d0fbba1a 3652 /* Copy word part most expediently. */
21781799 3653 enum block_op_methods method = BLOCK_OP_NORMAL;
3654 if (CALL_EXPR_TAILCALL (exp) && (endp == 0 || target == const0_rtx))
3655 method = BLOCK_OP_TAILCALL;
3656 if (endp == 1 && target != const0_rtx)
3657 method = BLOCK_OP_NO_LIBCALL_RET;
3658 dest_addr = emit_block_move_hints (dest_mem, src_mem, len_rtx, method,
d0fbba1a 3659 expected_align, expected_size,
3660 min_size, max_size, probable_max_size);
21781799 3661 if (dest_addr == pc_rtx)
3662 return NULL_RTX;
d0fbba1a 3663
3664 if (dest_addr == 0)
3665 {
3666 dest_addr = force_operand (XEXP (dest_mem, 0), target);
3667 dest_addr = convert_memory_address (ptr_mode, dest_addr);
3668 }
3669
3670 if (endp && target != const0_rtx)
3671 {
3672 dest_addr = gen_rtx_PLUS (ptr_mode, dest_addr, len_rtx);
3673 /* stpcpy pointer to last byte. */
3674 if (endp == 2)
3675 dest_addr = gen_rtx_MINUS (ptr_mode, dest_addr, const1_rtx);
d72123ce 3676 }
d0fbba1a 3677
3678 return dest_addr;
3679}
3680
3681static rtx
3682expand_builtin_mempcpy_args (tree dest, tree src, tree len,
3683 rtx target, tree orig_exp, int endp)
3684{
3685 return expand_builtin_memory_copy_args (dest, src, len, target, orig_exp,
3686 endp);
647661c6 3687}
3688
c2f47e15 3689/* Expand into a movstr instruction, if one is available. Return NULL_RTX if
727c62dd 3690 we failed, the caller should emit a normal call, otherwise try to
3691 get the result in TARGET, if convenient. If ENDP is 0 return the
3692 destination pointer, if ENDP is 1 return the end pointer ala
3693 mempcpy, and if ENDP is 2 return the end pointer minus one ala
3694 stpcpy. */
3695
3696static rtx
3697expand_movstr (tree dest, tree src, rtx target, int endp)
3698{
8786db1e 3699 struct expand_operand ops[3];
727c62dd 3700 rtx dest_mem;
3701 rtx src_mem;
727c62dd 3702
8d74dc42 3703 if (!targetm.have_movstr ())
c2f47e15 3704 return NULL_RTX;
727c62dd 3705
d8ae1baa 3706 dest_mem = get_memory_rtx (dest, NULL);
3707 src_mem = get_memory_rtx (src, NULL);
727c62dd 3708 if (!endp)
3709 {
3710 target = force_reg (Pmode, XEXP (dest_mem, 0));
3711 dest_mem = replace_equiv_address (dest_mem, target);
727c62dd 3712 }
3713
8786db1e 3714 create_output_operand (&ops[0], endp ? target : NULL_RTX, Pmode);
3715 create_fixed_operand (&ops[1], dest_mem);
3716 create_fixed_operand (&ops[2], src_mem);
8d74dc42 3717 if (!maybe_expand_insn (targetm.code_for_movstr, 3, ops))
1e1d5623 3718 return NULL_RTX;
727c62dd 3719
8786db1e 3720 if (endp && target != const0_rtx)
c5aba89c 3721 {
8786db1e 3722 target = ops[0].value;
3723 /* movstr is supposed to set end to the address of the NUL
3724 terminator. If the caller requested a mempcpy-like return value,
3725 adjust it. */
3726 if (endp == 1)
3727 {
29c05e22 3728 rtx tem = plus_constant (GET_MODE (target),
3729 gen_lowpart (GET_MODE (target), target), 1);
8786db1e 3730 emit_move_insn (target, force_operand (tem, NULL_RTX));
3731 }
c5aba89c 3732 }
727c62dd 3733 return target;
3734}
3735
5aef8938 3736/* Do some very basic size validation of a call to the strcpy builtin
3737 given by EXP. Return NULL_RTX to have the built-in expand to a call
3738 to the library function. */
3739
3740static rtx
3741expand_builtin_strcat (tree exp, rtx)
3742{
3743 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE)
3744 || !warn_stringop_overflow)
3745 return NULL_RTX;
3746
3747 tree dest = CALL_EXPR_ARG (exp, 0);
3748 tree src = CALL_EXPR_ARG (exp, 1);
3749
3750 /* There is no way here to determine the length of the string in
3751 the destination to which the SRC string is being appended so
3752 just diagnose cases when the souce string is longer than
3753 the destination object. */
3754
8d6c6ef5 3755 tree destsize = compute_objsize (dest, warn_stringop_overflow - 1);
5aef8938 3756
e6a18b5a 3757 check_access (exp, dest, src, /*size=*/NULL_TREE, /*maxread=*/NULL_TREE, src,
3758 destsize);
5aef8938 3759
3760 return NULL_RTX;
3761}
3762
48e1416a 3763/* Expand expression EXP, which is a call to the strcpy builtin. Return
3764 NULL_RTX if we failed the caller should emit a normal call, otherwise
c2f47e15 3765 try to get the result in TARGET, if convenient (and in mode MODE if that's
6f428e8b 3766 convenient). */
902de8ed 3767
53800dbe 3768static rtx
a65c4d64 3769expand_builtin_strcpy (tree exp, rtx target)
53800dbe 3770{
5aef8938 3771 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3772 return NULL_RTX;
3773
3774 tree dest = CALL_EXPR_ARG (exp, 0);
3775 tree src = CALL_EXPR_ARG (exp, 1);
3776
3777 if (warn_stringop_overflow)
3778 {
8d6c6ef5 3779 tree destsize = compute_objsize (dest, warn_stringop_overflow - 1);
e6a18b5a 3780 check_access (exp, dest, src, /*size=*/NULL_TREE, /*maxread=*/NULL_TREE,
3781 src, destsize);
5aef8938 3782 }
3783
3784 return expand_builtin_strcpy_args (dest, src, target);
c2f47e15 3785}
3786
3787/* Helper function to do the actual work for expand_builtin_strcpy. The
3788 arguments to the builtin_strcpy call DEST and SRC are broken out
3789 so that this can also be called without constructing an actual CALL_EXPR.
3790 The other arguments and return value are the same as for
3791 expand_builtin_strcpy. */
3792
3793static rtx
a65c4d64 3794expand_builtin_strcpy_args (tree dest, tree src, rtx target)
c2f47e15 3795{
c2f47e15 3796 return expand_movstr (dest, src, target, /*endp=*/0);
53800dbe 3797}
3798
c2f47e15 3799/* Expand a call EXP to the stpcpy builtin.
3800 Return NULL_RTX if we failed the caller should emit a normal call,
3b824fa6 3801 otherwise try to get the result in TARGET, if convenient (and in
3802 mode MODE if that's convenient). */
3803
3804static rtx
3754d046 3805expand_builtin_stpcpy (tree exp, rtx target, machine_mode mode)
3b824fa6 3806{
c2f47e15 3807 tree dst, src;
389dd41b 3808 location_t loc = EXPR_LOCATION (exp);
c2f47e15 3809
3810 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3811 return NULL_RTX;
3812
3813 dst = CALL_EXPR_ARG (exp, 0);
3814 src = CALL_EXPR_ARG (exp, 1);
3815
4d317237 3816 if (warn_stringop_overflow)
3817 {
8d6c6ef5 3818 tree destsize = compute_objsize (dst, warn_stringop_overflow - 1);
e6a18b5a 3819 check_access (exp, dst, src, /*size=*/NULL_TREE, /*maxread=*/NULL_TREE,
3820 src, destsize);
4d317237 3821 }
3822
727c62dd 3823 /* If return value is ignored, transform stpcpy into strcpy. */
b9a16870 3824 if (target == const0_rtx && builtin_decl_implicit (BUILT_IN_STRCPY))
978836e5 3825 {
b9a16870 3826 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
0568e9c1 3827 tree result = build_call_nofold_loc (loc, fn, 2, dst, src);
c8b17b2e 3828 return expand_expr (result, target, mode, EXPAND_NORMAL);
978836e5 3829 }
3b824fa6 3830 else
3831 {
c2f47e15 3832 tree len, lenp1;
727c62dd 3833 rtx ret;
647661c6 3834
9fe0e1b8 3835 /* Ensure we get an actual string whose length can be evaluated at
a0c938f0 3836 compile-time, not an expression containing a string. This is
3837 because the latter will potentially produce pessimized code
3838 when used to produce the return value. */
681fab1e 3839 if (! c_getstr (src) || ! (len = c_strlen (src, 0)))
c2f47e15 3840 return expand_movstr (dst, src, target, /*endp=*/2);
3b824fa6 3841
389dd41b 3842 lenp1 = size_binop_loc (loc, PLUS_EXPR, len, ssize_int (1));
a65c4d64 3843 ret = expand_builtin_mempcpy_args (dst, src, lenp1,
d0fbba1a 3844 target, exp, /*endp=*/2);
727c62dd 3845
3846 if (ret)
3847 return ret;
3848
3849 if (TREE_CODE (len) == INTEGER_CST)
3850 {
8ec3c5c2 3851 rtx len_rtx = expand_normal (len);
727c62dd 3852
971ba038 3853 if (CONST_INT_P (len_rtx))
727c62dd 3854 {
a65c4d64 3855 ret = expand_builtin_strcpy_args (dst, src, target);
727c62dd 3856
3857 if (ret)
3858 {
3859 if (! target)
7ac87324 3860 {
3861 if (mode != VOIDmode)
3862 target = gen_reg_rtx (mode);
3863 else
3864 target = gen_reg_rtx (GET_MODE (ret));
3865 }
727c62dd 3866 if (GET_MODE (target) != GET_MODE (ret))
3867 ret = gen_lowpart (GET_MODE (target), ret);
3868
29c05e22 3869 ret = plus_constant (GET_MODE (ret), ret, INTVAL (len_rtx));
c5aba89c 3870 ret = emit_move_insn (target, force_operand (ret, NULL_RTX));
64db345d 3871 gcc_assert (ret);
727c62dd 3872
3873 return target;
3874 }
3875 }
3876 }
3877
c2f47e15 3878 return expand_movstr (dst, src, target, /*endp=*/2);
3b824fa6 3879 }
3880}
3881
4d317237 3882/* Check a call EXP to the stpncpy built-in for validity.
3883 Return NULL_RTX on both success and failure. */
3884
3885static rtx
3886expand_builtin_stpncpy (tree exp, rtx)
3887{
3888 if (!validate_arglist (exp,
3889 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE)
3890 || !warn_stringop_overflow)
3891 return NULL_RTX;
3892
aca1a787 3893 /* The source and destination of the call. */
4d317237 3894 tree dest = CALL_EXPR_ARG (exp, 0);
3895 tree src = CALL_EXPR_ARG (exp, 1);
3896
aca1a787 3897 /* The exact number of bytes to write (not the maximum). */
4d317237 3898 tree len = CALL_EXPR_ARG (exp, 2);
4d317237 3899
aca1a787 3900 /* The size of the destination object. */
8d6c6ef5 3901 tree destsize = compute_objsize (dest, warn_stringop_overflow - 1);
4d317237 3902
e6a18b5a 3903 check_access (exp, dest, src, len, /*maxread=*/NULL_TREE, src, destsize);
4d317237 3904
3905 return NULL_RTX;
3906}
3907
6840589f 3908/* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3909 bytes from constant string DATA + OFFSET and return it as target
3910 constant. */
3911
09879952 3912rtx
aecda0d6 3913builtin_strncpy_read_str (void *data, HOST_WIDE_INT offset,
f77c4496 3914 scalar_int_mode mode)
6840589f 3915{
3916 const char *str = (const char *) data;
3917
3918 if ((unsigned HOST_WIDE_INT) offset > strlen (str))
3919 return const0_rtx;
3920
3921 return c_readstr (str + offset, mode);
3922}
3923
5aef8938 3924/* Helper to check the sizes of sequences and the destination of calls
3925 to __builtin_strncat and __builtin___strncat_chk. Returns true on
3926 success (no overflow or invalid sizes), false otherwise. */
3927
3928static bool
3929check_strncat_sizes (tree exp, tree objsize)
3930{
3931 tree dest = CALL_EXPR_ARG (exp, 0);
3932 tree src = CALL_EXPR_ARG (exp, 1);
e6a18b5a 3933 tree maxread = CALL_EXPR_ARG (exp, 2);
5aef8938 3934
3935 /* Try to determine the range of lengths that the source expression
3936 refers to. */
3937 tree lenrange[2];
3938 get_range_strlen (src, lenrange);
3939
3940 /* Try to verify that the destination is big enough for the shortest
3941 string. */
3942
3943 if (!objsize && warn_stringop_overflow)
3944 {
3945 /* If it hasn't been provided by __strncat_chk, try to determine
3946 the size of the destination object into which the source is
3947 being copied. */
8d6c6ef5 3948 objsize = compute_objsize (dest, warn_stringop_overflow - 1);
5aef8938 3949 }
3950
3951 /* Add one for the terminating nul. */
3952 tree srclen = (lenrange[0]
3953 ? fold_build2 (PLUS_EXPR, size_type_node, lenrange[0],
3954 size_one_node)
3955 : NULL_TREE);
3956
e6a18b5a 3957 /* The strncat function copies at most MAXREAD bytes and always appends
3958 the terminating nul so the specified upper bound should never be equal
3959 to (or greater than) the size of the destination. */
3960 if (tree_fits_uhwi_p (maxread) && tree_fits_uhwi_p (objsize)
3961 && tree_int_cst_equal (objsize, maxread))
5aef8938 3962 {
4d317237 3963 location_t loc = tree_nonartificial_location (exp);
3964 loc = expansion_point_location_if_in_system_header (loc);
3965
3966 warning_at (loc, OPT_Wstringop_overflow_,
9098b938 3967 "%K%qD specified bound %E equals destination size",
e6a18b5a 3968 exp, get_callee_fndecl (exp), maxread);
5aef8938 3969
3970 return false;
3971 }
3972
3973 if (!srclen
e6a18b5a 3974 || (maxread && tree_fits_uhwi_p (maxread)
5aef8938 3975 && tree_fits_uhwi_p (srclen)
e6a18b5a 3976 && tree_int_cst_lt (maxread, srclen)))
3977 srclen = maxread;
5aef8938 3978
e6a18b5a 3979 /* The number of bytes to write is LEN but check_access will also
5aef8938 3980 check SRCLEN if LEN's value isn't known. */
e6a18b5a 3981 return check_access (exp, dest, src, /*size=*/NULL_TREE, maxread, srclen,
3982 objsize);
5aef8938 3983}
3984
3985/* Similar to expand_builtin_strcat, do some very basic size validation
3986 of a call to the strcpy builtin given by EXP. Return NULL_RTX to have
3987 the built-in expand to a call to the library function. */
3988
3989static rtx
3990expand_builtin_strncat (tree exp, rtx)
3991{
3992 if (!validate_arglist (exp,
3993 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE)
3994 || !warn_stringop_overflow)
3995 return NULL_RTX;
3996
3997 tree dest = CALL_EXPR_ARG (exp, 0);
3998 tree src = CALL_EXPR_ARG (exp, 1);
3999 /* The upper bound on the number of bytes to write. */
e6a18b5a 4000 tree maxread = CALL_EXPR_ARG (exp, 2);
5aef8938 4001 /* The length of the source sequence. */
4002 tree slen = c_strlen (src, 1);
4003
4004 /* Try to determine the range of lengths that the source expression
4005 refers to. */
4006 tree lenrange[2];
4007 if (slen)
4008 lenrange[0] = lenrange[1] = slen;
4009 else
4010 get_range_strlen (src, lenrange);
4011
4012 /* Try to verify that the destination is big enough for the shortest
4013 string. First try to determine the size of the destination object
4014 into which the source is being copied. */
8d6c6ef5 4015 tree destsize = compute_objsize (dest, warn_stringop_overflow - 1);
5aef8938 4016
4017 /* Add one for the terminating nul. */
4018 tree srclen = (lenrange[0]
4019 ? fold_build2 (PLUS_EXPR, size_type_node, lenrange[0],
4020 size_one_node)
4021 : NULL_TREE);
4022
e6a18b5a 4023 /* The strncat function copies at most MAXREAD bytes and always appends
4024 the terminating nul so the specified upper bound should never be equal
4025 to (or greater than) the size of the destination. */
4026 if (tree_fits_uhwi_p (maxread) && tree_fits_uhwi_p (destsize)
4027 && tree_int_cst_equal (destsize, maxread))
5aef8938 4028 {
4d317237 4029 location_t loc = tree_nonartificial_location (exp);
4030 loc = expansion_point_location_if_in_system_header (loc);
4031
4032 warning_at (loc, OPT_Wstringop_overflow_,
9098b938 4033 "%K%qD specified bound %E equals destination size",
e6a18b5a 4034 exp, get_callee_fndecl (exp), maxread);
5aef8938 4035
4036 return NULL_RTX;
4037 }
4038
4039 if (!srclen
e6a18b5a 4040 || (maxread && tree_fits_uhwi_p (maxread)
5aef8938 4041 && tree_fits_uhwi_p (srclen)
e6a18b5a 4042 && tree_int_cst_lt (maxread, srclen)))
4043 srclen = maxread;
5aef8938 4044
e6a18b5a 4045 /* The number of bytes to write is SRCLEN. */
4046 check_access (exp, dest, src, NULL_TREE, maxread, srclen, destsize);
5aef8938 4047
4048 return NULL_RTX;
4049}
4050
48e1416a 4051/* Expand expression EXP, which is a call to the strncpy builtin. Return
c2f47e15 4052 NULL_RTX if we failed the caller should emit a normal call. */
ed09096d 4053
4054static rtx
a65c4d64 4055expand_builtin_strncpy (tree exp, rtx target)
ed09096d 4056{
389dd41b 4057 location_t loc = EXPR_LOCATION (exp);
c2f47e15 4058
4059 if (validate_arglist (exp,
4060 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
ed09096d 4061 {
c2f47e15 4062 tree dest = CALL_EXPR_ARG (exp, 0);
4063 tree src = CALL_EXPR_ARG (exp, 1);
5aef8938 4064 /* The number of bytes to write (not the maximum). */
c2f47e15 4065 tree len = CALL_EXPR_ARG (exp, 2);
5aef8938 4066 /* The length of the source sequence. */
c2f47e15 4067 tree slen = c_strlen (src, 1);
6840589f 4068
e6a18b5a 4069 if (warn_stringop_overflow)
4070 {
4071 tree destsize = compute_objsize (dest,
4072 warn_stringop_overflow - 1);
4073
4074 /* The number of bytes to write is LEN but check_access will also
4075 check SLEN if LEN's value isn't known. */
4076 check_access (exp, dest, src, len, /*maxread=*/NULL_TREE, src,
4077 destsize);
4078 }
5aef8938 4079
8ff6a5cd 4080 /* We must be passed a constant len and src parameter. */
e913b5cd 4081 if (!tree_fits_uhwi_p (len) || !slen || !tree_fits_uhwi_p (slen))
c2f47e15 4082 return NULL_RTX;
ed09096d 4083
389dd41b 4084 slen = size_binop_loc (loc, PLUS_EXPR, slen, ssize_int (1));
ed09096d 4085
4086 /* We're required to pad with trailing zeros if the requested
a0c938f0 4087 len is greater than strlen(s2)+1. In that case try to
6840589f 4088 use store_by_pieces, if it fails, punt. */
ed09096d 4089 if (tree_int_cst_lt (slen, len))
6840589f 4090 {
957d0361 4091 unsigned int dest_align = get_pointer_alignment (dest);
c2f47e15 4092 const char *p = c_getstr (src);
6840589f 4093 rtx dest_mem;
4094
e913b5cd 4095 if (!p || dest_align == 0 || !tree_fits_uhwi_p (len)
4096 || !can_store_by_pieces (tree_to_uhwi (len),
6840589f 4097 builtin_strncpy_read_str,
364c0c59 4098 CONST_CAST (char *, p),
4099 dest_align, false))
c2f47e15 4100 return NULL_RTX;
6840589f 4101
d8ae1baa 4102 dest_mem = get_memory_rtx (dest, len);
e913b5cd 4103 store_by_pieces (dest_mem, tree_to_uhwi (len),
6840589f 4104 builtin_strncpy_read_str,
364c0c59 4105 CONST_CAST (char *, p), dest_align, false, 0);
a65c4d64 4106 dest_mem = force_operand (XEXP (dest_mem, 0), target);
85d654dd 4107 dest_mem = convert_memory_address (ptr_mode, dest_mem);
e5716f7e 4108 return dest_mem;
6840589f 4109 }
ed09096d 4110 }
c2f47e15 4111 return NULL_RTX;
ed09096d 4112}
4113
ecc318ff 4114/* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
4115 bytes from constant string DATA + OFFSET and return it as target
4116 constant. */
4117
f656b751 4118rtx
aecda0d6 4119builtin_memset_read_str (void *data, HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
f77c4496 4120 scalar_int_mode mode)
ecc318ff 4121{
4122 const char *c = (const char *) data;
364c0c59 4123 char *p = XALLOCAVEC (char, GET_MODE_SIZE (mode));
ecc318ff 4124
4125 memset (p, *c, GET_MODE_SIZE (mode));
4126
4127 return c_readstr (p, mode);
4128}
4129
a7ec6974 4130/* Callback routine for store_by_pieces. Return the RTL of a register
4131 containing GET_MODE_SIZE (MODE) consecutive copies of the unsigned
4132 char value given in the RTL register data. For example, if mode is
4133 4 bytes wide, return the RTL for 0x01010101*data. */
4134
4135static rtx
aecda0d6 4136builtin_memset_gen_str (void *data, HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
f77c4496 4137 scalar_int_mode mode)
a7ec6974 4138{
4139 rtx target, coeff;
4140 size_t size;
4141 char *p;
4142
4143 size = GET_MODE_SIZE (mode);
f0ce3b1f 4144 if (size == 1)
4145 return (rtx) data;
a7ec6974 4146
364c0c59 4147 p = XALLOCAVEC (char, size);
a7ec6974 4148 memset (p, 1, size);
4149 coeff = c_readstr (p, mode);
4150
f0ce3b1f 4151 target = convert_to_mode (mode, (rtx) data, 1);
a7ec6974 4152 target = expand_mult (mode, target, coeff, NULL_RTX, 1);
4153 return force_reg (mode, target);
4154}
4155
48e1416a 4156/* Expand expression EXP, which is a call to the memset builtin. Return
4157 NULL_RTX if we failed the caller should emit a normal call, otherwise
c2f47e15 4158 try to get the result in TARGET, if convenient (and in mode MODE if that's
6f428e8b 4159 convenient). */
902de8ed 4160
53800dbe 4161static rtx
3754d046 4162expand_builtin_memset (tree exp, rtx target, machine_mode mode)
53800dbe 4163{
c2f47e15 4164 if (!validate_arglist (exp,
4165 POINTER_TYPE, INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
4166 return NULL_RTX;
5aef8938 4167
4168 tree dest = CALL_EXPR_ARG (exp, 0);
4169 tree val = CALL_EXPR_ARG (exp, 1);
4170 tree len = CALL_EXPR_ARG (exp, 2);
4171
e6a18b5a 4172 check_memop_access (exp, dest, NULL_TREE, len);
5aef8938 4173
4174 return expand_builtin_memset_args (dest, val, len, target, mode, exp);
c2f47e15 4175}
53800dbe 4176
f21337ef 4177/* Expand expression EXP, which is an instrumented call to the memset builtin.
4178 Return NULL_RTX if we failed the caller should emit a normal call, otherwise
4179 try to get the result in TARGET, if convenient (and in mode MODE if that's
4180 convenient). */
4181
4182static rtx
4183expand_builtin_memset_with_bounds (tree exp, rtx target, machine_mode mode)
4184{
4185 if (!validate_arglist (exp,
4186 POINTER_TYPE, POINTER_BOUNDS_TYPE,
4187 INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
4188 return NULL_RTX;
4189 else
4190 {
4191 tree dest = CALL_EXPR_ARG (exp, 0);
4192 tree val = CALL_EXPR_ARG (exp, 2);
4193 tree len = CALL_EXPR_ARG (exp, 3);
4194 rtx res = expand_builtin_memset_args (dest, val, len, target, mode, exp);
4195
4196 /* Return src bounds with the result. */
4197 if (res)
4198 {
17d388d8 4199 rtx bnd = force_reg (targetm.chkp_bound_mode (),
f21337ef 4200 expand_normal (CALL_EXPR_ARG (exp, 1)));
4201 res = chkp_join_splitted_slot (res, bnd);
4202 }
4203 return res;
4204 }
4205}
4206
c2f47e15 4207/* Helper function to do the actual work for expand_builtin_memset. The
4208 arguments to the builtin_memset call DEST, VAL, and LEN are broken out
4209 so that this can also be called without constructing an actual CALL_EXPR.
4210 The other arguments and return value are the same as for
4211 expand_builtin_memset. */
6b961939 4212
c2f47e15 4213static rtx
4214expand_builtin_memset_args (tree dest, tree val, tree len,
3754d046 4215 rtx target, machine_mode mode, tree orig_exp)
c2f47e15 4216{
4217 tree fndecl, fn;
4218 enum built_in_function fcode;
3754d046 4219 machine_mode val_mode;
c2f47e15 4220 char c;
4221 unsigned int dest_align;
4222 rtx dest_mem, dest_addr, len_rtx;
4223 HOST_WIDE_INT expected_size = -1;
4224 unsigned int expected_align = 0;
36d63243 4225 unsigned HOST_WIDE_INT min_size;
4226 unsigned HOST_WIDE_INT max_size;
9db0f34d 4227 unsigned HOST_WIDE_INT probable_max_size;
53800dbe 4228
957d0361 4229 dest_align = get_pointer_alignment (dest);
162719b3 4230
c2f47e15 4231 /* If DEST is not a pointer type, don't do this operation in-line. */
4232 if (dest_align == 0)
4233 return NULL_RTX;
6f428e8b 4234
8cee8dc0 4235 if (currently_expanding_gimple_stmt)
4236 stringop_block_profile (currently_expanding_gimple_stmt,
4237 &expected_align, &expected_size);
75a70cf9 4238
c2f47e15 4239 if (expected_align < dest_align)
4240 expected_align = dest_align;
6b961939 4241
c2f47e15 4242 /* If the LEN parameter is zero, return DEST. */
4243 if (integer_zerop (len))
4244 {
4245 /* Evaluate and ignore VAL in case it has side-effects. */
4246 expand_expr (val, const0_rtx, VOIDmode, EXPAND_NORMAL);
4247 return expand_expr (dest, target, mode, EXPAND_NORMAL);
4248 }
7a3e5564 4249
c2f47e15 4250 /* Stabilize the arguments in case we fail. */
4251 dest = builtin_save_expr (dest);
4252 val = builtin_save_expr (val);
4253 len = builtin_save_expr (len);
a7ec6974 4254
c2f47e15 4255 len_rtx = expand_normal (len);
9db0f34d 4256 determine_block_size (len, len_rtx, &min_size, &max_size,
4257 &probable_max_size);
c2f47e15 4258 dest_mem = get_memory_rtx (dest, len);
03a5dda9 4259 val_mode = TYPE_MODE (unsigned_char_type_node);
a7ec6974 4260
c2f47e15 4261 if (TREE_CODE (val) != INTEGER_CST)
4262 {
4263 rtx val_rtx;
a7ec6974 4264
c2f47e15 4265 val_rtx = expand_normal (val);
03a5dda9 4266 val_rtx = convert_to_mode (val_mode, val_rtx, 0);
53800dbe 4267
c2f47e15 4268 /* Assume that we can memset by pieces if we can store
4269 * the coefficients by pieces (in the required modes).
4270 * We can't pass builtin_memset_gen_str as that emits RTL. */
4271 c = 1;
e913b5cd 4272 if (tree_fits_uhwi_p (len)
4273 && can_store_by_pieces (tree_to_uhwi (len),
4b297e2e 4274 builtin_memset_read_str, &c, dest_align,
4275 true))
c2f47e15 4276 {
03a5dda9 4277 val_rtx = force_reg (val_mode, val_rtx);
e913b5cd 4278 store_by_pieces (dest_mem, tree_to_uhwi (len),
4b297e2e 4279 builtin_memset_gen_str, val_rtx, dest_align,
4280 true, 0);
c2f47e15 4281 }
4282 else if (!set_storage_via_setmem (dest_mem, len_rtx, val_rtx,
4283 dest_align, expected_align,
9db0f34d 4284 expected_size, min_size, max_size,
4285 probable_max_size))
6b961939 4286 goto do_libcall;
48e1416a 4287
c2f47e15 4288 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
4289 dest_mem = convert_memory_address (ptr_mode, dest_mem);
4290 return dest_mem;
4291 }
53800dbe 4292
c2f47e15 4293 if (target_char_cast (val, &c))
4294 goto do_libcall;
ecc318ff 4295
c2f47e15 4296 if (c)
4297 {
e913b5cd 4298 if (tree_fits_uhwi_p (len)
4299 && can_store_by_pieces (tree_to_uhwi (len),
4b297e2e 4300 builtin_memset_read_str, &c, dest_align,
4301 true))
e913b5cd 4302 store_by_pieces (dest_mem, tree_to_uhwi (len),
4b297e2e 4303 builtin_memset_read_str, &c, dest_align, true, 0);
03a5dda9 4304 else if (!set_storage_via_setmem (dest_mem, len_rtx,
4305 gen_int_mode (c, val_mode),
c2f47e15 4306 dest_align, expected_align,
9db0f34d 4307 expected_size, min_size, max_size,
4308 probable_max_size))
c2f47e15 4309 goto do_libcall;
48e1416a 4310
c2f47e15 4311 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
4312 dest_mem = convert_memory_address (ptr_mode, dest_mem);
4313 return dest_mem;
4314 }
ecc318ff 4315
c2f47e15 4316 set_mem_align (dest_mem, dest_align);
4317 dest_addr = clear_storage_hints (dest_mem, len_rtx,
4318 CALL_EXPR_TAILCALL (orig_exp)
4319 ? BLOCK_OP_TAILCALL : BLOCK_OP_NORMAL,
36d63243 4320 expected_align, expected_size,
9db0f34d 4321 min_size, max_size,
4322 probable_max_size);
53800dbe 4323
c2f47e15 4324 if (dest_addr == 0)
4325 {
4326 dest_addr = force_operand (XEXP (dest_mem, 0), NULL_RTX);
4327 dest_addr = convert_memory_address (ptr_mode, dest_addr);
4328 }
53800dbe 4329
c2f47e15 4330 return dest_addr;
6b961939 4331
c2f47e15 4332 do_libcall:
4333 fndecl = get_callee_fndecl (orig_exp);
4334 fcode = DECL_FUNCTION_CODE (fndecl);
f21337ef 4335 if (fcode == BUILT_IN_MEMSET
4336 || fcode == BUILT_IN_CHKP_MEMSET_NOBND_NOCHK_CHKP)
0568e9c1 4337 fn = build_call_nofold_loc (EXPR_LOCATION (orig_exp), fndecl, 3,
4338 dest, val, len);
c2f47e15 4339 else if (fcode == BUILT_IN_BZERO)
0568e9c1 4340 fn = build_call_nofold_loc (EXPR_LOCATION (orig_exp), fndecl, 2,
4341 dest, len);
c2f47e15 4342 else
4343 gcc_unreachable ();
a65c4d64 4344 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
4345 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (orig_exp);
c2f47e15 4346 return expand_call (fn, target, target == const0_rtx);
53800dbe 4347}
4348
48e1416a 4349/* Expand expression EXP, which is a call to the bzero builtin. Return
c2f47e15 4350 NULL_RTX if we failed the caller should emit a normal call. */
27d0c333 4351
ffc83088 4352static rtx
0b25db21 4353expand_builtin_bzero (tree exp)
ffc83088 4354{
c2f47e15 4355 if (!validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
7369e7ba 4356 return NULL_RTX;
ffc83088 4357
5aef8938 4358 tree dest = CALL_EXPR_ARG (exp, 0);
4359 tree size = CALL_EXPR_ARG (exp, 1);
4360
e6a18b5a 4361 check_memop_access (exp, dest, NULL_TREE, size);
bf8e3599 4362
7369e7ba 4363 /* New argument list transforming bzero(ptr x, int y) to
6f428e8b 4364 memset(ptr x, int 0, size_t y). This is done this way
4365 so that if it isn't expanded inline, we fallback to
4366 calling bzero instead of memset. */
bf8e3599 4367
5aef8938 4368 location_t loc = EXPR_LOCATION (exp);
4369
c2f47e15 4370 return expand_builtin_memset_args (dest, integer_zero_node,
a0553bff 4371 fold_convert_loc (loc,
4372 size_type_node, size),
c2f47e15 4373 const0_rtx, VOIDmode, exp);
ffc83088 4374}
4375
d6f01a40 4376/* Try to expand cmpstr operation ICODE with the given operands.
4377 Return the result rtx on success, otherwise return null. */
4378
4379static rtx
4380expand_cmpstr (insn_code icode, rtx target, rtx arg1_rtx, rtx arg2_rtx,
4381 HOST_WIDE_INT align)
4382{
4383 machine_mode insn_mode = insn_data[icode].operand[0].mode;
4384
4385 if (target && (!REG_P (target) || HARD_REGISTER_P (target)))
4386 target = NULL_RTX;
4387
4388 struct expand_operand ops[4];
4389 create_output_operand (&ops[0], target, insn_mode);
4390 create_fixed_operand (&ops[1], arg1_rtx);
4391 create_fixed_operand (&ops[2], arg2_rtx);
4392 create_integer_operand (&ops[3], align);
4393 if (maybe_expand_insn (icode, 4, ops))
4394 return ops[0].value;
4395 return NULL_RTX;
4396}
4397
7a3f89b5 4398/* Expand expression EXP, which is a call to the memcmp built-in function.
bd021c1c 4399 Return NULL_RTX if we failed and the caller should emit a normal call,
3e346f54 4400 otherwise try to get the result in TARGET, if convenient.
4401 RESULT_EQ is true if we can relax the returned value to be either zero
4402 or nonzero, without caring about the sign. */
27d0c333 4403
53800dbe 4404static rtx
3e346f54 4405expand_builtin_memcmp (tree exp, rtx target, bool result_eq)
53800dbe 4406{
c2f47e15 4407 if (!validate_arglist (exp,
4408 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
4409 return NULL_RTX;
6f428e8b 4410
ea368aac 4411 tree arg1 = CALL_EXPR_ARG (exp, 0);
4412 tree arg2 = CALL_EXPR_ARG (exp, 1);
4413 tree len = CALL_EXPR_ARG (exp, 2);
8d6c6ef5 4414
4415 /* Diagnose calls where the specified length exceeds the size of either
4416 object. */
4417 if (warn_stringop_overflow)
4418 {
4419 tree size = compute_objsize (arg1, 0);
e6a18b5a 4420 if (check_access (exp, /*dst=*/NULL_TREE, /*src=*/NULL_TREE, len,
4421 /*maxread=*/NULL_TREE, size, /*objsize=*/NULL_TREE))
8d6c6ef5 4422 {
4423 size = compute_objsize (arg2, 0);
e6a18b5a 4424 check_access (exp, /*dst=*/NULL_TREE, /*src=*/NULL_TREE, len,
4425 /*maxread=*/NULL_TREE, size, /*objsize=*/NULL_TREE);
8d6c6ef5 4426 }
4427 }
4428
3e346f54 4429 machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
4430 location_t loc = EXPR_LOCATION (exp);
b428c0a5 4431
ea368aac 4432 unsigned int arg1_align = get_pointer_alignment (arg1) / BITS_PER_UNIT;
4433 unsigned int arg2_align = get_pointer_alignment (arg2) / BITS_PER_UNIT;
53800dbe 4434
ea368aac 4435 /* If we don't have POINTER_TYPE, call the function. */
4436 if (arg1_align == 0 || arg2_align == 0)
4437 return NULL_RTX;
53800dbe 4438
ea368aac 4439 rtx arg1_rtx = get_memory_rtx (arg1, len);
4440 rtx arg2_rtx = get_memory_rtx (arg2, len);
3e346f54 4441 rtx len_rtx = expand_normal (fold_convert_loc (loc, sizetype, len));
53800dbe 4442
ea368aac 4443 /* Set MEM_SIZE as appropriate. */
3e346f54 4444 if (CONST_INT_P (len_rtx))
ea368aac 4445 {
3e346f54 4446 set_mem_size (arg1_rtx, INTVAL (len_rtx));
4447 set_mem_size (arg2_rtx, INTVAL (len_rtx));
ea368aac 4448 }
83f88f8e 4449
3e346f54 4450 by_pieces_constfn constfn = NULL;
4451
719f3058 4452 const char *src_str = c_getstr (arg2);
4453 if (result_eq && src_str == NULL)
4454 {
4455 src_str = c_getstr (arg1);
4456 if (src_str != NULL)
092db747 4457 std::swap (arg1_rtx, arg2_rtx);
719f3058 4458 }
3e346f54 4459
4460 /* If SRC is a string constant and block move would be done
4461 by pieces, we can avoid loading the string from memory
4462 and only stored the computed constants. */
4463 if (src_str
4464 && CONST_INT_P (len_rtx)
4465 && (unsigned HOST_WIDE_INT) INTVAL (len_rtx) <= strlen (src_str) + 1)
4466 constfn = builtin_memcpy_read_str;
4467
4468 rtx result = emit_block_cmp_hints (arg1_rtx, arg2_rtx, len_rtx,
4469 TREE_TYPE (len), target,
4470 result_eq, constfn,
4471 CONST_CAST (char *, src_str));
4472
ea368aac 4473 if (result)
4474 {
4475 /* Return the value in the proper mode for this function. */
4476 if (GET_MODE (result) == mode)
4477 return result;
83f88f8e 4478
ea368aac 4479 if (target != 0)
4480 {
4481 convert_move (target, result, 0);
4482 return target;
4483 }
0cd832f0 4484
53800dbe 4485 return convert_to_mode (mode, result, 0);
ea368aac 4486 }
53800dbe 4487
61ffc71a 4488 return NULL_RTX;
6f428e8b 4489}
4490
c2f47e15 4491/* Expand expression EXP, which is a call to the strcmp builtin. Return NULL_RTX
53800dbe 4492 if we failed the caller should emit a normal call, otherwise try to get
4493 the result in TARGET, if convenient. */
902de8ed 4494
53800dbe 4495static rtx
a65c4d64 4496expand_builtin_strcmp (tree exp, ATTRIBUTE_UNUSED rtx target)
53800dbe 4497{
c2f47e15 4498 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
4499 return NULL_RTX;
bf8e3599 4500
d6f01a40 4501 insn_code cmpstr_icode = direct_optab_handler (cmpstr_optab, SImode);
4502 insn_code cmpstrn_icode = direct_optab_handler (cmpstrn_optab, SImode);
5c5d012b 4503 if (cmpstr_icode == CODE_FOR_nothing && cmpstrn_icode == CODE_FOR_nothing)
4504 return NULL_RTX;
a0c938f0 4505
5c5d012b 4506 tree arg1 = CALL_EXPR_ARG (exp, 0);
4507 tree arg2 = CALL_EXPR_ARG (exp, 1);
6ac5504b 4508
5c5d012b 4509 unsigned int arg1_align = get_pointer_alignment (arg1) / BITS_PER_UNIT;
4510 unsigned int arg2_align = get_pointer_alignment (arg2) / BITS_PER_UNIT;
7a3f89b5 4511
5c5d012b 4512 /* If we don't have POINTER_TYPE, call the function. */
4513 if (arg1_align == 0 || arg2_align == 0)
4514 return NULL_RTX;
7a3f89b5 4515
5c5d012b 4516 /* Stabilize the arguments in case gen_cmpstr(n)si fail. */
4517 arg1 = builtin_save_expr (arg1);
4518 arg2 = builtin_save_expr (arg2);
53800dbe 4519
5c5d012b 4520 rtx arg1_rtx = get_memory_rtx (arg1, NULL);
4521 rtx arg2_rtx = get_memory_rtx (arg2, NULL);
d6f01a40 4522
5c5d012b 4523 rtx result = NULL_RTX;
4524 /* Try to call cmpstrsi. */
4525 if (cmpstr_icode != CODE_FOR_nothing)
4526 result = expand_cmpstr (cmpstr_icode, target, arg1_rtx, arg2_rtx,
4527 MIN (arg1_align, arg2_align));
6ac5504b 4528
5c5d012b 4529 /* Try to determine at least one length and call cmpstrnsi. */
4530 if (!result && cmpstrn_icode != CODE_FOR_nothing)
4531 {
4532 tree len;
4533 rtx arg3_rtx;
4534
4535 tree len1 = c_strlen (arg1, 1);
4536 tree len2 = c_strlen (arg2, 1);
4537
4538 if (len1)
4539 len1 = size_binop (PLUS_EXPR, ssize_int (1), len1);
4540 if (len2)
4541 len2 = size_binop (PLUS_EXPR, ssize_int (1), len2);
4542
4543 /* If we don't have a constant length for the first, use the length
4544 of the second, if we know it. We don't require a constant for
4545 this case; some cost analysis could be done if both are available
4546 but neither is constant. For now, assume they're equally cheap,
4547 unless one has side effects. If both strings have constant lengths,
4548 use the smaller. */
4549
4550 if (!len1)
4551 len = len2;
4552 else if (!len2)
4553 len = len1;
4554 else if (TREE_SIDE_EFFECTS (len1))
4555 len = len2;
4556 else if (TREE_SIDE_EFFECTS (len2))
4557 len = len1;
4558 else if (TREE_CODE (len1) != INTEGER_CST)
4559 len = len2;
4560 else if (TREE_CODE (len2) != INTEGER_CST)
4561 len = len1;
4562 else if (tree_int_cst_lt (len1, len2))
4563 len = len1;
4564 else
4565 len = len2;
3f8aefe2 4566
5c5d012b 4567 /* If both arguments have side effects, we cannot optimize. */
4568 if (len && !TREE_SIDE_EFFECTS (len))
6ac5504b 4569 {
5c5d012b 4570 arg3_rtx = expand_normal (len);
4571 result = expand_cmpstrn_or_cmpmem
4572 (cmpstrn_icode, target, arg1_rtx, arg2_rtx, TREE_TYPE (len),
4573 arg3_rtx, MIN (arg1_align, arg2_align));
6ac5504b 4574 }
5c5d012b 4575 }
4576
4577 /* Check to see if the argument was declared attribute nonstring
4578 and if so, issue a warning since at this point it's not known
4579 to be nul-terminated. */
4580 tree fndecl = get_callee_fndecl (exp);
4581 maybe_warn_nonstring_arg (fndecl, exp);
902de8ed 4582
5c5d012b 4583 if (result)
4584 {
4585 /* Return the value in the proper mode for this function. */
4586 machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
4587 if (GET_MODE (result) == mode)
4588 return result;
4589 if (target == 0)
4590 return convert_to_mode (mode, result, 0);
4591 convert_move (target, result, 0);
4592 return target;
6ac5504b 4593 }
5c5d012b 4594
4595 /* Expand the library call ourselves using a stabilized argument
4596 list to avoid re-evaluating the function's arguments twice. */
4597 tree fn = build_call_nofold_loc (EXPR_LOCATION (exp), fndecl, 2, arg1, arg2);
4598 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
4599 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
4600 return expand_call (fn, target, target == const0_rtx);
83d79705 4601}
53800dbe 4602
48e1416a 4603/* Expand expression EXP, which is a call to the strncmp builtin. Return
c2f47e15 4604 NULL_RTX if we failed the caller should emit a normal call, otherwise try to get
ed09096d 4605 the result in TARGET, if convenient. */
27d0c333 4606
ed09096d 4607static rtx
a65c4d64 4608expand_builtin_strncmp (tree exp, ATTRIBUTE_UNUSED rtx target,
3754d046 4609 ATTRIBUTE_UNUSED machine_mode mode)
ed09096d 4610{
c2f47e15 4611 if (!validate_arglist (exp,
4612 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
4613 return NULL_RTX;
ed09096d 4614
6e34e617 4615 /* If c_strlen can determine an expression for one of the string
6ac5504b 4616 lengths, and it doesn't have side effects, then emit cmpstrnsi
7a3f89b5 4617 using length MIN(strlen(string)+1, arg3). */
d6f01a40 4618 insn_code cmpstrn_icode = direct_optab_handler (cmpstrn_optab, SImode);
5c5d012b 4619 if (cmpstrn_icode == CODE_FOR_nothing)
4620 return NULL_RTX;
27d0c333 4621
5c5d012b 4622 tree len;
4623
4624 tree arg1 = CALL_EXPR_ARG (exp, 0);
4625 tree arg2 = CALL_EXPR_ARG (exp, 1);
4626 tree arg3 = CALL_EXPR_ARG (exp, 2);
4627
4628 unsigned int arg1_align = get_pointer_alignment (arg1) / BITS_PER_UNIT;
4629 unsigned int arg2_align = get_pointer_alignment (arg2) / BITS_PER_UNIT;
4630
4631 tree len1 = c_strlen (arg1, 1);
4632 tree len2 = c_strlen (arg2, 1);
4633
4634 location_t loc = EXPR_LOCATION (exp);
4635
4636 if (len1)
4637 len1 = size_binop_loc (loc, PLUS_EXPR, ssize_int (1), len1);
4638 if (len2)
4639 len2 = size_binop_loc (loc, PLUS_EXPR, ssize_int (1), len2);
4640
4641 tree len3 = fold_convert_loc (loc, sizetype, arg3);
4642
4643 /* If we don't have a constant length for the first, use the length
4644 of the second, if we know it. If neither string is constant length,
4645 use the given length argument. We don't require a constant for
4646 this case; some cost analysis could be done if both are available
4647 but neither is constant. For now, assume they're equally cheap,
4648 unless one has side effects. If both strings have constant lengths,
4649 use the smaller. */
4650
4651 if (!len1 && !len2)
4652 len = len3;
4653 else if (!len1)
4654 len = len2;
4655 else if (!len2)
4656 len = len1;
4657 else if (TREE_SIDE_EFFECTS (len1))
4658 len = len2;
4659 else if (TREE_SIDE_EFFECTS (len2))
4660 len = len1;
4661 else if (TREE_CODE (len1) != INTEGER_CST)
4662 len = len2;
4663 else if (TREE_CODE (len2) != INTEGER_CST)
4664 len = len1;
4665 else if (tree_int_cst_lt (len1, len2))
4666 len = len1;
4667 else
4668 len = len2;
4669
4670 /* If we are not using the given length, we must incorporate it here.
4671 The actual new length parameter will be MIN(len,arg3) in this case. */
4672 if (len != len3)
4673 len = fold_build2_loc (loc, MIN_EXPR, TREE_TYPE (len), len, len3);
4674 rtx arg1_rtx = get_memory_rtx (arg1, len);
4675 rtx arg2_rtx = get_memory_rtx (arg2, len);
4676 rtx arg3_rtx = expand_normal (len);
4677 rtx result = expand_cmpstrn_or_cmpmem (cmpstrn_icode, target, arg1_rtx,
4678 arg2_rtx, TREE_TYPE (len), arg3_rtx,
4679 MIN (arg1_align, arg2_align));
4680
4681 /* Check to see if the argument was declared attribute nonstring
4682 and if so, issue a warning since at this point it's not known
4683 to be nul-terminated. */
4684 tree fndecl = get_callee_fndecl (exp);
4685 maybe_warn_nonstring_arg (fndecl, exp);
4686
4687 if (result)
4688 {
4689 /* Return the value in the proper mode for this function. */
4690 mode = TYPE_MODE (TREE_TYPE (exp));
4691 if (GET_MODE (result) == mode)
4692 return result;
4693 if (target == 0)
4694 return convert_to_mode (mode, result, 0);
4695 convert_move (target, result, 0);
4696 return target;
4697 }
4698
4699 /* Expand the library call ourselves using a stabilized argument
4700 list to avoid re-evaluating the function's arguments twice. */
4701 tree fn = build_call_nofold_loc (loc, fndecl, 3, arg1, arg2, len);
4702 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
4703 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
4704 return expand_call (fn, target, target == const0_rtx);
49f0327b 4705}
4706
a66c9326 4707/* Expand a call to __builtin_saveregs, generating the result in TARGET,
4708 if that's convenient. */
902de8ed 4709
a66c9326 4710rtx
aecda0d6 4711expand_builtin_saveregs (void)
53800dbe 4712{
1e0c0b35 4713 rtx val;
4714 rtx_insn *seq;
53800dbe 4715
4716 /* Don't do __builtin_saveregs more than once in a function.
4717 Save the result of the first call and reuse it. */
4718 if (saveregs_value != 0)
4719 return saveregs_value;
53800dbe 4720
a66c9326 4721 /* When this function is called, it means that registers must be
4722 saved on entry to this function. So we migrate the call to the
4723 first insn of this function. */
4724
4725 start_sequence ();
53800dbe 4726
a66c9326 4727 /* Do whatever the machine needs done in this case. */
45550790 4728 val = targetm.calls.expand_builtin_saveregs ();
53800dbe 4729
a66c9326 4730 seq = get_insns ();
4731 end_sequence ();
53800dbe 4732
a66c9326 4733 saveregs_value = val;
53800dbe 4734
31d3e01c 4735 /* Put the insns after the NOTE that starts the function. If this
4736 is inside a start_sequence, make the outer-level insn chain current, so
a66c9326 4737 the code is placed at the start of the function. */
4738 push_topmost_sequence ();
0ec80471 4739 emit_insn_after (seq, entry_of_function ());
a66c9326 4740 pop_topmost_sequence ();
4741
4742 return val;
53800dbe 4743}
4744
79012a9d 4745/* Expand a call to __builtin_next_arg. */
27d0c333 4746
53800dbe 4747static rtx
79012a9d 4748expand_builtin_next_arg (void)
53800dbe 4749{
79012a9d 4750 /* Checking arguments is already done in fold_builtin_next_arg
4751 that must be called before this function. */
940ddc5c 4752 return expand_binop (ptr_mode, add_optab,
abe32cce 4753 crtl->args.internal_arg_pointer,
4754 crtl->args.arg_offset_rtx,
53800dbe 4755 NULL_RTX, 0, OPTAB_LIB_WIDEN);
4756}
4757
a66c9326 4758/* Make it easier for the backends by protecting the valist argument
4759 from multiple evaluations. */
4760
4761static tree
389dd41b 4762stabilize_va_list_loc (location_t loc, tree valist, int needs_lvalue)
a66c9326 4763{
5f57a8b1 4764 tree vatype = targetm.canonical_va_list_type (TREE_TYPE (valist));
4765
182cf5a9 4766 /* The current way of determining the type of valist is completely
4767 bogus. We should have the information on the va builtin instead. */
4768 if (!vatype)
4769 vatype = targetm.fn_abi_va_list (cfun->decl);
5f57a8b1 4770
4771 if (TREE_CODE (vatype) == ARRAY_TYPE)
a66c9326 4772 {
2d47cc32 4773 if (TREE_SIDE_EFFECTS (valist))
4774 valist = save_expr (valist);
11a61dea 4775
2d47cc32 4776 /* For this case, the backends will be expecting a pointer to
5f57a8b1 4777 vatype, but it's possible we've actually been given an array
4778 (an actual TARGET_CANONICAL_VA_LIST_TYPE (valist)).
2d47cc32 4779 So fix it. */
4780 if (TREE_CODE (TREE_TYPE (valist)) == ARRAY_TYPE)
8a15c04a 4781 {
5f57a8b1 4782 tree p1 = build_pointer_type (TREE_TYPE (vatype));
389dd41b 4783 valist = build_fold_addr_expr_with_type_loc (loc, valist, p1);
8a15c04a 4784 }
a66c9326 4785 }
11a61dea 4786 else
a66c9326 4787 {
182cf5a9 4788 tree pt = build_pointer_type (vatype);
11a61dea 4789
2d47cc32 4790 if (! needs_lvalue)
4791 {
11a61dea 4792 if (! TREE_SIDE_EFFECTS (valist))
4793 return valist;
bf8e3599 4794
389dd41b 4795 valist = fold_build1_loc (loc, ADDR_EXPR, pt, valist);
a66c9326 4796 TREE_SIDE_EFFECTS (valist) = 1;
a66c9326 4797 }
2d47cc32 4798
11a61dea 4799 if (TREE_SIDE_EFFECTS (valist))
2d47cc32 4800 valist = save_expr (valist);
182cf5a9 4801 valist = fold_build2_loc (loc, MEM_REF,
4802 vatype, valist, build_int_cst (pt, 0));
a66c9326 4803 }
4804
4805 return valist;
4806}
4807
2e15d750 4808/* The "standard" definition of va_list is void*. */
4809
4810tree
4811std_build_builtin_va_list (void)
4812{
4813 return ptr_type_node;
4814}
4815
5f57a8b1 4816/* The "standard" abi va_list is va_list_type_node. */
4817
4818tree
4819std_fn_abi_va_list (tree fndecl ATTRIBUTE_UNUSED)
4820{
4821 return va_list_type_node;
4822}
4823
4824/* The "standard" type of va_list is va_list_type_node. */
4825
4826tree
4827std_canonical_va_list_type (tree type)
4828{
4829 tree wtype, htype;
4830
5f57a8b1 4831 wtype = va_list_type_node;
4832 htype = type;
b6da2e41 4833
4834 if (TREE_CODE (wtype) == ARRAY_TYPE)
5f57a8b1 4835 {
4836 /* If va_list is an array type, the argument may have decayed
4837 to a pointer type, e.g. by being passed to another function.
4838 In that case, unwrap both types so that we can compare the
4839 underlying records. */
4840 if (TREE_CODE (htype) == ARRAY_TYPE
4841 || POINTER_TYPE_P (htype))
4842 {
4843 wtype = TREE_TYPE (wtype);
4844 htype = TREE_TYPE (htype);
4845 }
4846 }
4847 if (TYPE_MAIN_VARIANT (wtype) == TYPE_MAIN_VARIANT (htype))
4848 return va_list_type_node;
4849
4850 return NULL_TREE;
4851}
4852
a66c9326 4853/* The "standard" implementation of va_start: just assign `nextarg' to
4854 the variable. */
27d0c333 4855
a66c9326 4856void
aecda0d6 4857std_expand_builtin_va_start (tree valist, rtx nextarg)
a66c9326 4858{
f03c17bc 4859 rtx va_r = expand_expr (valist, NULL_RTX, VOIDmode, EXPAND_WRITE);
4860 convert_move (va_r, nextarg, 0);
058a1b7a 4861
4862 /* We do not have any valid bounds for the pointer, so
4863 just store zero bounds for it. */
4864 if (chkp_function_instrumented_p (current_function_decl))
4865 chkp_expand_bounds_reset_for_mem (valist,
4866 make_tree (TREE_TYPE (valist),
4867 nextarg));
a66c9326 4868}
4869
c2f47e15 4870/* Expand EXP, a call to __builtin_va_start. */
27d0c333 4871
a66c9326 4872static rtx
c2f47e15 4873expand_builtin_va_start (tree exp)
a66c9326 4874{
4875 rtx nextarg;
c2f47e15 4876 tree valist;
389dd41b 4877 location_t loc = EXPR_LOCATION (exp);
a66c9326 4878
c2f47e15 4879 if (call_expr_nargs (exp) < 2)
cb166087 4880 {
389dd41b 4881 error_at (loc, "too few arguments to function %<va_start%>");
cb166087 4882 return const0_rtx;
4883 }
a66c9326 4884
c2f47e15 4885 if (fold_builtin_next_arg (exp, true))
79012a9d 4886 return const0_rtx;
7c2f0500 4887
79012a9d 4888 nextarg = expand_builtin_next_arg ();
389dd41b 4889 valist = stabilize_va_list_loc (loc, CALL_EXPR_ARG (exp, 0), 1);
a66c9326 4890
8a58ed0a 4891 if (targetm.expand_builtin_va_start)
4892 targetm.expand_builtin_va_start (valist, nextarg);
4893 else
4894 std_expand_builtin_va_start (valist, nextarg);
a66c9326 4895
4896 return const0_rtx;
4897}
4898
c2f47e15 4899/* Expand EXP, a call to __builtin_va_end. */
f7c44134 4900
a66c9326 4901static rtx
c2f47e15 4902expand_builtin_va_end (tree exp)
a66c9326 4903{
c2f47e15 4904 tree valist = CALL_EXPR_ARG (exp, 0);
8a15c04a 4905
8a15c04a 4906 /* Evaluate for side effects, if needed. I hate macros that don't
4907 do that. */
4908 if (TREE_SIDE_EFFECTS (valist))
4909 expand_expr (valist, const0_rtx, VOIDmode, EXPAND_NORMAL);
a66c9326 4910
4911 return const0_rtx;
4912}
4913
c2f47e15 4914/* Expand EXP, a call to __builtin_va_copy. We do this as a
a66c9326 4915 builtin rather than just as an assignment in stdarg.h because of the
4916 nastiness of array-type va_list types. */
f7c44134 4917
a66c9326 4918static rtx
c2f47e15 4919expand_builtin_va_copy (tree exp)
a66c9326 4920{
4921 tree dst, src, t;
389dd41b 4922 location_t loc = EXPR_LOCATION (exp);
a66c9326 4923
c2f47e15 4924 dst = CALL_EXPR_ARG (exp, 0);
4925 src = CALL_EXPR_ARG (exp, 1);
a66c9326 4926
389dd41b 4927 dst = stabilize_va_list_loc (loc, dst, 1);
4928 src = stabilize_va_list_loc (loc, src, 0);
a66c9326 4929
5f57a8b1 4930 gcc_assert (cfun != NULL && cfun->decl != NULL_TREE);
4931
4932 if (TREE_CODE (targetm.fn_abi_va_list (cfun->decl)) != ARRAY_TYPE)
a66c9326 4933 {
5f57a8b1 4934 t = build2 (MODIFY_EXPR, targetm.fn_abi_va_list (cfun->decl), dst, src);
a66c9326 4935 TREE_SIDE_EFFECTS (t) = 1;
4936 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
4937 }
4938 else
4939 {
11a61dea 4940 rtx dstb, srcb, size;
4941
4942 /* Evaluate to pointers. */
4943 dstb = expand_expr (dst, NULL_RTX, Pmode, EXPAND_NORMAL);
4944 srcb = expand_expr (src, NULL_RTX, Pmode, EXPAND_NORMAL);
5f57a8b1 4945 size = expand_expr (TYPE_SIZE_UNIT (targetm.fn_abi_va_list (cfun->decl)),
4946 NULL_RTX, VOIDmode, EXPAND_NORMAL);
11a61dea 4947
85d654dd 4948 dstb = convert_memory_address (Pmode, dstb);
4949 srcb = convert_memory_address (Pmode, srcb);
726ec87c 4950
11a61dea 4951 /* "Dereference" to BLKmode memories. */
4952 dstb = gen_rtx_MEM (BLKmode, dstb);
ab6ab77e 4953 set_mem_alias_set (dstb, get_alias_set (TREE_TYPE (TREE_TYPE (dst))));
5f57a8b1 4954 set_mem_align (dstb, TYPE_ALIGN (targetm.fn_abi_va_list (cfun->decl)));
11a61dea 4955 srcb = gen_rtx_MEM (BLKmode, srcb);
ab6ab77e 4956 set_mem_alias_set (srcb, get_alias_set (TREE_TYPE (TREE_TYPE (src))));
5f57a8b1 4957 set_mem_align (srcb, TYPE_ALIGN (targetm.fn_abi_va_list (cfun->decl)));
11a61dea 4958
4959 /* Copy. */
0378dbdc 4960 emit_block_move (dstb, srcb, size, BLOCK_OP_NORMAL);
a66c9326 4961 }
4962
4963 return const0_rtx;
4964}
4965
53800dbe 4966/* Expand a call to one of the builtin functions __builtin_frame_address or
4967 __builtin_return_address. */
27d0c333 4968
53800dbe 4969static rtx
c2f47e15 4970expand_builtin_frame_address (tree fndecl, tree exp)
53800dbe 4971{
53800dbe 4972 /* The argument must be a nonnegative integer constant.
4973 It counts the number of frames to scan up the stack.
5b252e95 4974 The value is either the frame pointer value or the return
4975 address saved in that frame. */
c2f47e15 4976 if (call_expr_nargs (exp) == 0)
53800dbe 4977 /* Warning about missing arg was already issued. */
4978 return const0_rtx;
e913b5cd 4979 else if (! tree_fits_uhwi_p (CALL_EXPR_ARG (exp, 0)))
53800dbe 4980 {
5b252e95 4981 error ("invalid argument to %qD", fndecl);
53800dbe 4982 return const0_rtx;
4983 }
4984 else
4985 {
5b252e95 4986 /* Number of frames to scan up the stack. */
4987 unsigned HOST_WIDE_INT count = tree_to_uhwi (CALL_EXPR_ARG (exp, 0));
4988
4989 rtx tem = expand_builtin_return_addr (DECL_FUNCTION_CODE (fndecl), count);
53800dbe 4990
4991 /* Some ports cannot access arbitrary stack frames. */
4992 if (tem == NULL)
4993 {
5b252e95 4994 warning (0, "unsupported argument to %qD", fndecl);
53800dbe 4995 return const0_rtx;
4996 }
4997
5b252e95 4998 if (count)
4999 {
5000 /* Warn since no effort is made to ensure that any frame
5001 beyond the current one exists or can be safely reached. */
5002 warning (OPT_Wframe_address, "calling %qD with "
5003 "a nonzero argument is unsafe", fndecl);
5004 }
5005
53800dbe 5006 /* For __builtin_frame_address, return what we've got. */
5007 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
5008 return tem;
5009
8ad4c111 5010 if (!REG_P (tem)
53800dbe 5011 && ! CONSTANT_P (tem))
99182918 5012 tem = copy_addr_to_reg (tem);
53800dbe 5013 return tem;
5014 }
5015}
5016
990495a7 5017/* Expand EXP, a call to the alloca builtin. Return NULL_RTX if we
2b29cc6a 5018 failed and the caller should emit a normal call. */
15c6cf6b 5019
53800dbe 5020static rtx
2b29cc6a 5021expand_builtin_alloca (tree exp)
53800dbe 5022{
5023 rtx op0;
15c6cf6b 5024 rtx result;
581bf1c2 5025 unsigned int align;
370e45b9 5026 tree fndecl = get_callee_fndecl (exp);
2b34677f 5027 HOST_WIDE_INT max_size;
5028 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
2b29cc6a 5029 bool alloca_for_var = CALL_ALLOCA_FOR_VAR_P (exp);
370e45b9 5030 bool valid_arglist
2b34677f 5031 = (fcode == BUILT_IN_ALLOCA_WITH_ALIGN_AND_MAX
5032 ? validate_arglist (exp, INTEGER_TYPE, INTEGER_TYPE, INTEGER_TYPE,
5033 VOID_TYPE)
5034 : fcode == BUILT_IN_ALLOCA_WITH_ALIGN
5035 ? validate_arglist (exp, INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE)
5036 : validate_arglist (exp, INTEGER_TYPE, VOID_TYPE));
581bf1c2 5037
5038 if (!valid_arglist)
c2f47e15 5039 return NULL_RTX;
53800dbe 5040
2b34677f 5041 if ((alloca_for_var && !warn_vla_limit)
5042 || (!alloca_for_var && !warn_alloca_limit))
370e45b9 5043 {
5044 /* -Walloca-larger-than and -Wvla-larger-than settings override
5045 the more general -Walloc-size-larger-than so unless either of
5046 the former options is specified check the alloca arguments for
5047 overflow. */
5048 tree args[] = { CALL_EXPR_ARG (exp, 0), NULL_TREE };
5049 int idx[] = { 0, -1 };
5050 maybe_warn_alloc_args_overflow (fndecl, exp, args, idx);
5051 }
5052
53800dbe 5053 /* Compute the argument. */
c2f47e15 5054 op0 = expand_normal (CALL_EXPR_ARG (exp, 0));
53800dbe 5055
581bf1c2 5056 /* Compute the alignment. */
2b34677f 5057 align = (fcode == BUILT_IN_ALLOCA
5058 ? BIGGEST_ALIGNMENT
5059 : TREE_INT_CST_LOW (CALL_EXPR_ARG (exp, 1)));
5060
5061 /* Compute the maximum size. */
5062 max_size = (fcode == BUILT_IN_ALLOCA_WITH_ALIGN_AND_MAX
5063 ? TREE_INT_CST_LOW (CALL_EXPR_ARG (exp, 2))
5064 : -1);
581bf1c2 5065
2b29cc6a 5066 /* Allocate the desired space. If the allocation stems from the declaration
5067 of a variable-sized object, it cannot accumulate. */
2b34677f 5068 result
5069 = allocate_dynamic_stack_space (op0, 0, align, max_size, alloca_for_var);
85d654dd 5070 result = convert_memory_address (ptr_mode, result);
15c6cf6b 5071
5072 return result;
53800dbe 5073}
5074
d08919a7 5075/* Emit a call to __asan_allocas_unpoison call in EXP. Replace second argument
5076 of the call with virtual_stack_dynamic_rtx because in asan pass we emit a
5077 dummy value into second parameter relying on this function to perform the
5078 change. See motivation for this in comment to handle_builtin_stack_restore
5079 function. */
5080
5081static rtx
5082expand_asan_emit_allocas_unpoison (tree exp)
5083{
5084 tree arg0 = CALL_EXPR_ARG (exp, 0);
cd2ee6ee 5085 rtx top = expand_expr (arg0, NULL_RTX, ptr_mode, EXPAND_NORMAL);
5086 rtx bot = convert_memory_address (ptr_mode, virtual_stack_dynamic_rtx);
d08919a7 5087 rtx ret = init_one_libfunc ("__asan_allocas_unpoison");
9e9e5c15 5088 ret = emit_library_call_value (ret, NULL_RTX, LCT_NORMAL, ptr_mode,
5089 top, ptr_mode, bot, ptr_mode);
d08919a7 5090 return ret;
5091}
5092
74bdbe96 5093/* Expand a call to bswap builtin in EXP.
5094 Return NULL_RTX if a normal call should be emitted rather than expanding the
5095 function in-line. If convenient, the result should be placed in TARGET.
5096 SUBTARGET may be used as the target for computing one of EXP's operands. */
42791117 5097
5098static rtx
3754d046 5099expand_builtin_bswap (machine_mode target_mode, tree exp, rtx target,
74bdbe96 5100 rtx subtarget)
42791117 5101{
42791117 5102 tree arg;
5103 rtx op0;
5104
c2f47e15 5105 if (!validate_arglist (exp, INTEGER_TYPE, VOID_TYPE))
5106 return NULL_RTX;
42791117 5107
c2f47e15 5108 arg = CALL_EXPR_ARG (exp, 0);
74bdbe96 5109 op0 = expand_expr (arg,
5110 subtarget && GET_MODE (subtarget) == target_mode
5111 ? subtarget : NULL_RTX,
5112 target_mode, EXPAND_NORMAL);
5113 if (GET_MODE (op0) != target_mode)
5114 op0 = convert_to_mode (target_mode, op0, 1);
42791117 5115
74bdbe96 5116 target = expand_unop (target_mode, bswap_optab, op0, target, 1);
42791117 5117
5118 gcc_assert (target);
5119
74bdbe96 5120 return convert_to_mode (target_mode, target, 1);
42791117 5121}
5122
c2f47e15 5123/* Expand a call to a unary builtin in EXP.
5124 Return NULL_RTX if a normal call should be emitted rather than expanding the
53800dbe 5125 function in-line. If convenient, the result should be placed in TARGET.
5126 SUBTARGET may be used as the target for computing one of EXP's operands. */
15c6cf6b 5127
53800dbe 5128static rtx
3754d046 5129expand_builtin_unop (machine_mode target_mode, tree exp, rtx target,
aecda0d6 5130 rtx subtarget, optab op_optab)
53800dbe 5131{
5132 rtx op0;
c2f47e15 5133
5134 if (!validate_arglist (exp, INTEGER_TYPE, VOID_TYPE))
5135 return NULL_RTX;
53800dbe 5136
5137 /* Compute the argument. */
f97eea22 5138 op0 = expand_expr (CALL_EXPR_ARG (exp, 0),
5139 (subtarget
5140 && (TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 0)))
5141 == GET_MODE (subtarget))) ? subtarget : NULL_RTX,
1db6d067 5142 VOIDmode, EXPAND_NORMAL);
6a08d0ab 5143 /* Compute op, into TARGET if possible.
53800dbe 5144 Set TARGET to wherever the result comes back. */
c2f47e15 5145 target = expand_unop (TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 0))),
6aaa1f9e 5146 op_optab, op0, target, op_optab != clrsb_optab);
64db345d 5147 gcc_assert (target);
7d3f6cc7 5148
efb070c8 5149 return convert_to_mode (target_mode, target, 0);
53800dbe 5150}
89cfe6e5 5151
48e1416a 5152/* Expand a call to __builtin_expect. We just return our argument
5a74f77e 5153 as the builtin_expect semantic should've been already executed by
5154 tree branch prediction pass. */
89cfe6e5 5155
5156static rtx
c2f47e15 5157expand_builtin_expect (tree exp, rtx target)
89cfe6e5 5158{
1e4adcfc 5159 tree arg;
89cfe6e5 5160
c2f47e15 5161 if (call_expr_nargs (exp) < 2)
89cfe6e5 5162 return const0_rtx;
c2f47e15 5163 arg = CALL_EXPR_ARG (exp, 0);
89cfe6e5 5164
c2f47e15 5165 target = expand_expr (arg, target, VOIDmode, EXPAND_NORMAL);
5a74f77e 5166 /* When guessing was done, the hints should be already stripped away. */
07311427 5167 gcc_assert (!flag_guess_branch_prob
852f689e 5168 || optimize == 0 || seen_error ());
89cfe6e5 5169 return target;
5170}
689df48e 5171
fca0886c 5172/* Expand a call to __builtin_assume_aligned. We just return our first
5173 argument as the builtin_assume_aligned semantic should've been already
5174 executed by CCP. */
5175
5176static rtx
5177expand_builtin_assume_aligned (tree exp, rtx target)
5178{
5179 if (call_expr_nargs (exp) < 2)
5180 return const0_rtx;
5181 target = expand_expr (CALL_EXPR_ARG (exp, 0), target, VOIDmode,
5182 EXPAND_NORMAL);
5183 gcc_assert (!TREE_SIDE_EFFECTS (CALL_EXPR_ARG (exp, 1))
5184 && (call_expr_nargs (exp) < 3
5185 || !TREE_SIDE_EFFECTS (CALL_EXPR_ARG (exp, 2))));
5186 return target;
5187}
5188
c22de3f0 5189void
aecda0d6 5190expand_builtin_trap (void)
a0ef1725 5191{
4db8dd0c 5192 if (targetm.have_trap ())
f73960eb 5193 {
4db8dd0c 5194 rtx_insn *insn = emit_insn (targetm.gen_trap ());
f73960eb 5195 /* For trap insns when not accumulating outgoing args force
5196 REG_ARGS_SIZE note to prevent crossjumping of calls with
5197 different args sizes. */
5198 if (!ACCUMULATE_OUTGOING_ARGS)
f6a1fc98 5199 add_args_size_note (insn, stack_pointer_delta);
f73960eb 5200 }
a0ef1725 5201 else
61ffc71a 5202 {
5203 tree fn = builtin_decl_implicit (BUILT_IN_ABORT);
5204 tree call_expr = build_call_expr (fn, 0);
5205 expand_call (call_expr, NULL_RTX, false);
5206 }
5207
a0ef1725 5208 emit_barrier ();
5209}
78a74442 5210
d2b48f0c 5211/* Expand a call to __builtin_unreachable. We do nothing except emit
5212 a barrier saying that control flow will not pass here.
5213
5214 It is the responsibility of the program being compiled to ensure
5215 that control flow does never reach __builtin_unreachable. */
5216static void
5217expand_builtin_unreachable (void)
5218{
5219 emit_barrier ();
5220}
5221
c2f47e15 5222/* Expand EXP, a call to fabs, fabsf or fabsl.
5223 Return NULL_RTX if a normal call should be emitted rather than expanding
78a74442 5224 the function inline. If convenient, the result should be placed
5225 in TARGET. SUBTARGET may be used as the target for computing
5226 the operand. */
5227
5228static rtx
c2f47e15 5229expand_builtin_fabs (tree exp, rtx target, rtx subtarget)
78a74442 5230{
3754d046 5231 machine_mode mode;
78a74442 5232 tree arg;
5233 rtx op0;
5234
c2f47e15 5235 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
5236 return NULL_RTX;
78a74442 5237
c2f47e15 5238 arg = CALL_EXPR_ARG (exp, 0);
c7f617c2 5239 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
78a74442 5240 mode = TYPE_MODE (TREE_TYPE (arg));
1db6d067 5241 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
78a74442 5242 return expand_abs (mode, op0, target, 0, safe_from_p (target, arg, 1));
5243}
5244
c2f47e15 5245/* Expand EXP, a call to copysign, copysignf, or copysignl.
270436f3 5246 Return NULL is a normal call should be emitted rather than expanding the
5247 function inline. If convenient, the result should be placed in TARGET.
5248 SUBTARGET may be used as the target for computing the operand. */
5249
5250static rtx
c2f47e15 5251expand_builtin_copysign (tree exp, rtx target, rtx subtarget)
270436f3 5252{
5253 rtx op0, op1;
5254 tree arg;
5255
c2f47e15 5256 if (!validate_arglist (exp, REAL_TYPE, REAL_TYPE, VOID_TYPE))
5257 return NULL_RTX;
270436f3 5258
c2f47e15 5259 arg = CALL_EXPR_ARG (exp, 0);
8ec3c5c2 5260 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
270436f3 5261
c2f47e15 5262 arg = CALL_EXPR_ARG (exp, 1);
8ec3c5c2 5263 op1 = expand_normal (arg);
270436f3 5264
5265 return expand_copysign (op0, op1, target);
5266}
5267
ac8fb6db 5268/* Expand a call to __builtin___clear_cache. */
5269
5270static rtx
32e17df0 5271expand_builtin___clear_cache (tree exp)
ac8fb6db 5272{
32e17df0 5273 if (!targetm.code_for_clear_cache)
5274 {
ac8fb6db 5275#ifdef CLEAR_INSN_CACHE
32e17df0 5276 /* There is no "clear_cache" insn, and __clear_cache() in libgcc
5277 does something. Just do the default expansion to a call to
5278 __clear_cache(). */
5279 return NULL_RTX;
ac8fb6db 5280#else
32e17df0 5281 /* There is no "clear_cache" insn, and __clear_cache() in libgcc
5282 does nothing. There is no need to call it. Do nothing. */
5283 return const0_rtx;
ac8fb6db 5284#endif /* CLEAR_INSN_CACHE */
32e17df0 5285 }
5286
ac8fb6db 5287 /* We have a "clear_cache" insn, and it will handle everything. */
5288 tree begin, end;
5289 rtx begin_rtx, end_rtx;
ac8fb6db 5290
5291 /* We must not expand to a library call. If we did, any
5292 fallback library function in libgcc that might contain a call to
5293 __builtin___clear_cache() would recurse infinitely. */
5294 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
5295 {
5296 error ("both arguments to %<__builtin___clear_cache%> must be pointers");
5297 return const0_rtx;
5298 }
5299
32e17df0 5300 if (targetm.have_clear_cache ())
ac8fb6db 5301 {
8786db1e 5302 struct expand_operand ops[2];
ac8fb6db 5303
5304 begin = CALL_EXPR_ARG (exp, 0);
5305 begin_rtx = expand_expr (begin, NULL_RTX, Pmode, EXPAND_NORMAL);
ac8fb6db 5306
5307 end = CALL_EXPR_ARG (exp, 1);
5308 end_rtx = expand_expr (end, NULL_RTX, Pmode, EXPAND_NORMAL);
ac8fb6db 5309
8786db1e 5310 create_address_operand (&ops[0], begin_rtx);
5311 create_address_operand (&ops[1], end_rtx);
32e17df0 5312 if (maybe_expand_insn (targetm.code_for_clear_cache, 2, ops))
8786db1e 5313 return const0_rtx;
ac8fb6db 5314 }
5315 return const0_rtx;
ac8fb6db 5316}
5317
4ee9c684 5318/* Given a trampoline address, make sure it satisfies TRAMPOLINE_ALIGNMENT. */
5319
5320static rtx
5321round_trampoline_addr (rtx tramp)
5322{
5323 rtx temp, addend, mask;
5324
5325 /* If we don't need too much alignment, we'll have been guaranteed
5326 proper alignment by get_trampoline_type. */
5327 if (TRAMPOLINE_ALIGNMENT <= STACK_BOUNDARY)
5328 return tramp;
5329
5330 /* Round address up to desired boundary. */
5331 temp = gen_reg_rtx (Pmode);
0359f9f5 5332 addend = gen_int_mode (TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT - 1, Pmode);
5333 mask = gen_int_mode (-TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT, Pmode);
4ee9c684 5334
5335 temp = expand_simple_binop (Pmode, PLUS, tramp, addend,
5336 temp, 0, OPTAB_LIB_WIDEN);
5337 tramp = expand_simple_binop (Pmode, AND, temp, mask,
5338 temp, 0, OPTAB_LIB_WIDEN);
5339
5340 return tramp;
5341}
5342
5343static rtx
c307f106 5344expand_builtin_init_trampoline (tree exp, bool onstack)
4ee9c684 5345{
5346 tree t_tramp, t_func, t_chain;
82c7907c 5347 rtx m_tramp, r_tramp, r_chain, tmp;
4ee9c684 5348
c2f47e15 5349 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE,
4ee9c684 5350 POINTER_TYPE, VOID_TYPE))
5351 return NULL_RTX;
5352
c2f47e15 5353 t_tramp = CALL_EXPR_ARG (exp, 0);
5354 t_func = CALL_EXPR_ARG (exp, 1);
5355 t_chain = CALL_EXPR_ARG (exp, 2);
4ee9c684 5356
8ec3c5c2 5357 r_tramp = expand_normal (t_tramp);
82c7907c 5358 m_tramp = gen_rtx_MEM (BLKmode, r_tramp);
5359 MEM_NOTRAP_P (m_tramp) = 1;
5360
c307f106 5361 /* If ONSTACK, the TRAMP argument should be the address of a field
5362 within the local function's FRAME decl. Either way, let's see if
5363 we can fill in the MEM_ATTRs for this memory. */
82c7907c 5364 if (TREE_CODE (t_tramp) == ADDR_EXPR)
f4146cb8 5365 set_mem_attributes (m_tramp, TREE_OPERAND (t_tramp, 0), true);
82c7907c 5366
c307f106 5367 /* Creator of a heap trampoline is responsible for making sure the
5368 address is aligned to at least STACK_BOUNDARY. Normally malloc
5369 will ensure this anyhow. */
82c7907c 5370 tmp = round_trampoline_addr (r_tramp);
5371 if (tmp != r_tramp)
5372 {
5373 m_tramp = change_address (m_tramp, BLKmode, tmp);
5374 set_mem_align (m_tramp, TRAMPOLINE_ALIGNMENT);
5b2a69fa 5375 set_mem_size (m_tramp, TRAMPOLINE_SIZE);
82c7907c 5376 }
5377
5378 /* The FUNC argument should be the address of the nested function.
5379 Extract the actual function decl to pass to the hook. */
5380 gcc_assert (TREE_CODE (t_func) == ADDR_EXPR);
5381 t_func = TREE_OPERAND (t_func, 0);
5382 gcc_assert (TREE_CODE (t_func) == FUNCTION_DECL);
5383
8ec3c5c2 5384 r_chain = expand_normal (t_chain);
4ee9c684 5385
5386 /* Generate insns to initialize the trampoline. */
82c7907c 5387 targetm.calls.trampoline_init (m_tramp, t_func, r_chain);
4ee9c684 5388
c307f106 5389 if (onstack)
5390 {
5391 trampolines_created = 1;
8bc8a8f4 5392
a27e3913 5393 if (targetm.calls.custom_function_descriptors != 0)
5394 warning_at (DECL_SOURCE_LOCATION (t_func), OPT_Wtrampolines,
5395 "trampoline generated for nested function %qD", t_func);
c307f106 5396 }
8bc8a8f4 5397
4ee9c684 5398 return const0_rtx;
5399}
5400
5401static rtx
c2f47e15 5402expand_builtin_adjust_trampoline (tree exp)
4ee9c684 5403{
5404 rtx tramp;
5405
c2f47e15 5406 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
4ee9c684 5407 return NULL_RTX;
5408
c2f47e15 5409 tramp = expand_normal (CALL_EXPR_ARG (exp, 0));
4ee9c684 5410 tramp = round_trampoline_addr (tramp);
82c7907c 5411 if (targetm.calls.trampoline_adjust_address)
5412 tramp = targetm.calls.trampoline_adjust_address (tramp);
4ee9c684 5413
5414 return tramp;
5415}
5416
a27e3913 5417/* Expand a call to the builtin descriptor initialization routine.
5418 A descriptor is made up of a couple of pointers to the static
5419 chain and the code entry in this order. */
5420
5421static rtx
5422expand_builtin_init_descriptor (tree exp)
5423{
5424 tree t_descr, t_func, t_chain;
5425 rtx m_descr, r_descr, r_func, r_chain;
5426
5427 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, POINTER_TYPE,
5428 VOID_TYPE))
5429 return NULL_RTX;
5430
5431 t_descr = CALL_EXPR_ARG (exp, 0);
5432 t_func = CALL_EXPR_ARG (exp, 1);
5433 t_chain = CALL_EXPR_ARG (exp, 2);
5434
5435 r_descr = expand_normal (t_descr);
5436 m_descr = gen_rtx_MEM (BLKmode, r_descr);
5437 MEM_NOTRAP_P (m_descr) = 1;
5438
5439 r_func = expand_normal (t_func);
5440 r_chain = expand_normal (t_chain);
5441
5442 /* Generate insns to initialize the descriptor. */
5443 emit_move_insn (adjust_address_nv (m_descr, ptr_mode, 0), r_chain);
5444 emit_move_insn (adjust_address_nv (m_descr, ptr_mode,
5445 POINTER_SIZE / BITS_PER_UNIT), r_func);
5446
5447 return const0_rtx;
5448}
5449
5450/* Expand a call to the builtin descriptor adjustment routine. */
5451
5452static rtx
5453expand_builtin_adjust_descriptor (tree exp)
5454{
5455 rtx tramp;
5456
5457 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
5458 return NULL_RTX;
5459
5460 tramp = expand_normal (CALL_EXPR_ARG (exp, 0));
5461
5462 /* Unalign the descriptor to allow runtime identification. */
5463 tramp = plus_constant (ptr_mode, tramp,
5464 targetm.calls.custom_function_descriptors);
5465
5466 return force_operand (tramp, NULL_RTX);
5467}
5468
93f564d6 5469/* Expand the call EXP to the built-in signbit, signbitf or signbitl
5470 function. The function first checks whether the back end provides
5471 an insn to implement signbit for the respective mode. If not, it
5472 checks whether the floating point format of the value is such that
10902624 5473 the sign bit can be extracted. If that is not the case, error out.
5474 EXP is the expression that is a call to the builtin function; if
5475 convenient, the result should be placed in TARGET. */
27f261ef 5476static rtx
5477expand_builtin_signbit (tree exp, rtx target)
5478{
5479 const struct real_format *fmt;
299dd9fa 5480 scalar_float_mode fmode;
f77c4496 5481 scalar_int_mode rmode, imode;
c2f47e15 5482 tree arg;
ca4f1f5b 5483 int word, bitpos;
27eda240 5484 enum insn_code icode;
27f261ef 5485 rtx temp;
389dd41b 5486 location_t loc = EXPR_LOCATION (exp);
27f261ef 5487
c2f47e15 5488 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
5489 return NULL_RTX;
27f261ef 5490
c2f47e15 5491 arg = CALL_EXPR_ARG (exp, 0);
299dd9fa 5492 fmode = SCALAR_FLOAT_TYPE_MODE (TREE_TYPE (arg));
03b7a719 5493 rmode = SCALAR_INT_TYPE_MODE (TREE_TYPE (exp));
27f261ef 5494 fmt = REAL_MODE_FORMAT (fmode);
5495
93f564d6 5496 arg = builtin_save_expr (arg);
5497
5498 /* Expand the argument yielding a RTX expression. */
5499 temp = expand_normal (arg);
5500
5501 /* Check if the back end provides an insn that handles signbit for the
5502 argument's mode. */
d6bf3b14 5503 icode = optab_handler (signbit_optab, fmode);
27eda240 5504 if (icode != CODE_FOR_nothing)
93f564d6 5505 {
1e0c0b35 5506 rtx_insn *last = get_last_insn ();
93f564d6 5507 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
4e2a2fb4 5508 if (maybe_emit_unop_insn (icode, target, temp, UNKNOWN))
5509 return target;
5510 delete_insns_since (last);
93f564d6 5511 }
5512
27f261ef 5513 /* For floating point formats without a sign bit, implement signbit
5514 as "ARG < 0.0". */
8d564692 5515 bitpos = fmt->signbit_ro;
ca4f1f5b 5516 if (bitpos < 0)
27f261ef 5517 {
5518 /* But we can't do this if the format supports signed zero. */
10902624 5519 gcc_assert (!fmt->has_signed_zero || !HONOR_SIGNED_ZEROS (fmode));
27f261ef 5520
389dd41b 5521 arg = fold_build2_loc (loc, LT_EXPR, TREE_TYPE (exp), arg,
49d00087 5522 build_real (TREE_TYPE (arg), dconst0));
27f261ef 5523 return expand_expr (arg, target, VOIDmode, EXPAND_NORMAL);
5524 }
5525
ca4f1f5b 5526 if (GET_MODE_SIZE (fmode) <= UNITS_PER_WORD)
27f261ef 5527 {
2cf1bb25 5528 imode = int_mode_for_mode (fmode).require ();
ca4f1f5b 5529 temp = gen_lowpart (imode, temp);
24fd4260 5530 }
5531 else
5532 {
ca4f1f5b 5533 imode = word_mode;
5534 /* Handle targets with different FP word orders. */
5535 if (FLOAT_WORDS_BIG_ENDIAN)
a0c938f0 5536 word = (GET_MODE_BITSIZE (fmode) - bitpos) / BITS_PER_WORD;
ca4f1f5b 5537 else
a0c938f0 5538 word = bitpos / BITS_PER_WORD;
ca4f1f5b 5539 temp = operand_subword_force (temp, word, fmode);
5540 bitpos = bitpos % BITS_PER_WORD;
5541 }
5542
44b0f1d0 5543 /* Force the intermediate word_mode (or narrower) result into a
5544 register. This avoids attempting to create paradoxical SUBREGs
5545 of floating point modes below. */
5546 temp = force_reg (imode, temp);
5547
ca4f1f5b 5548 /* If the bitpos is within the "result mode" lowpart, the operation
5549 can be implement with a single bitwise AND. Otherwise, we need
5550 a right shift and an AND. */
5551
5552 if (bitpos < GET_MODE_BITSIZE (rmode))
5553 {
796b6678 5554 wide_int mask = wi::set_bit_in_zero (bitpos, GET_MODE_PRECISION (rmode));
27f261ef 5555
4a46f016 5556 if (GET_MODE_SIZE (imode) > GET_MODE_SIZE (rmode))
ca4f1f5b 5557 temp = gen_lowpart (rmode, temp);
24fd4260 5558 temp = expand_binop (rmode, and_optab, temp,
e913b5cd 5559 immed_wide_int_const (mask, rmode),
ca4f1f5b 5560 NULL_RTX, 1, OPTAB_LIB_WIDEN);
27f261ef 5561 }
ca4f1f5b 5562 else
5563 {
5564 /* Perform a logical right shift to place the signbit in the least
a0c938f0 5565 significant bit, then truncate the result to the desired mode
ca4f1f5b 5566 and mask just this bit. */
f5ff0b21 5567 temp = expand_shift (RSHIFT_EXPR, imode, temp, bitpos, NULL_RTX, 1);
ca4f1f5b 5568 temp = gen_lowpart (rmode, temp);
5569 temp = expand_binop (rmode, and_optab, temp, const1_rtx,
5570 NULL_RTX, 1, OPTAB_LIB_WIDEN);
5571 }
5572
27f261ef 5573 return temp;
5574}
73673831 5575
5576/* Expand fork or exec calls. TARGET is the desired target of the
c2f47e15 5577 call. EXP is the call. FN is the
73673831 5578 identificator of the actual function. IGNORE is nonzero if the
5579 value is to be ignored. */
5580
5581static rtx
c2f47e15 5582expand_builtin_fork_or_exec (tree fn, tree exp, rtx target, int ignore)
73673831 5583{
5584 tree id, decl;
5585 tree call;
5586
5587 /* If we are not profiling, just call the function. */
5588 if (!profile_arc_flag)
5589 return NULL_RTX;
5590
5591 /* Otherwise call the wrapper. This should be equivalent for the rest of
5592 compiler, so the code does not diverge, and the wrapper may run the
9c9bad97 5593 code necessary for keeping the profiling sane. */
73673831 5594
5595 switch (DECL_FUNCTION_CODE (fn))
5596 {
5597 case BUILT_IN_FORK:
5598 id = get_identifier ("__gcov_fork");
5599 break;
5600
5601 case BUILT_IN_EXECL:
5602 id = get_identifier ("__gcov_execl");
5603 break;
5604
5605 case BUILT_IN_EXECV:
5606 id = get_identifier ("__gcov_execv");
5607 break;
5608
5609 case BUILT_IN_EXECLP:
5610 id = get_identifier ("__gcov_execlp");
5611 break;
5612
5613 case BUILT_IN_EXECLE:
5614 id = get_identifier ("__gcov_execle");
5615 break;
5616
5617 case BUILT_IN_EXECVP:
5618 id = get_identifier ("__gcov_execvp");
5619 break;
5620
5621 case BUILT_IN_EXECVE:
5622 id = get_identifier ("__gcov_execve");
5623 break;
5624
5625 default:
64db345d 5626 gcc_unreachable ();
73673831 5627 }
5628
e60a6f7b 5629 decl = build_decl (DECL_SOURCE_LOCATION (fn),
5630 FUNCTION_DECL, id, TREE_TYPE (fn));
73673831 5631 DECL_EXTERNAL (decl) = 1;
5632 TREE_PUBLIC (decl) = 1;
5633 DECL_ARTIFICIAL (decl) = 1;
5634 TREE_NOTHROW (decl) = 1;
e82d310b 5635 DECL_VISIBILITY (decl) = VISIBILITY_DEFAULT;
5636 DECL_VISIBILITY_SPECIFIED (decl) = 1;
389dd41b 5637 call = rewrite_call_expr (EXPR_LOCATION (exp), exp, 0, decl, 0);
73673831 5638 return expand_call (call, target, ignore);
c2f47e15 5639 }
48e1416a 5640
b6a5fc45 5641
5642\f
3e272de8 5643/* Reconstitute a mode for a __sync intrinsic operation. Since the type of
5644 the pointer in these functions is void*, the tree optimizers may remove
5645 casts. The mode computed in expand_builtin isn't reliable either, due
5646 to __sync_bool_compare_and_swap.
5647
5648 FCODE_DIFF should be fcode - base, where base is the FOO_1 code for the
5649 group of builtins. This gives us log2 of the mode size. */
5650
3754d046 5651static inline machine_mode
3e272de8 5652get_builtin_sync_mode (int fcode_diff)
5653{
ad3a13b5 5654 /* The size is not negotiable, so ask not to get BLKmode in return
5655 if the target indicates that a smaller size would be better. */
517be012 5656 return int_mode_for_size (BITS_PER_UNIT << fcode_diff, 0).require ();
3e272de8 5657}
5658
041e0215 5659/* Expand the memory expression LOC and return the appropriate memory operand
5660 for the builtin_sync operations. */
5661
5662static rtx
3754d046 5663get_builtin_sync_mem (tree loc, machine_mode mode)
041e0215 5664{
5665 rtx addr, mem;
5666
7f4d56ad 5667 addr = expand_expr (loc, NULL_RTX, ptr_mode, EXPAND_SUM);
5668 addr = convert_memory_address (Pmode, addr);
041e0215 5669
5670 /* Note that we explicitly do not want any alias information for this
5671 memory, so that we kill all other live memories. Otherwise we don't
5672 satisfy the full barrier semantics of the intrinsic. */
5673 mem = validize_mem (gen_rtx_MEM (mode, addr));
5674
153c3b50 5675 /* The alignment needs to be at least according to that of the mode. */
5676 set_mem_align (mem, MAX (GET_MODE_ALIGNMENT (mode),
957d0361 5677 get_pointer_alignment (loc)));
c94cfd1c 5678 set_mem_alias_set (mem, ALIAS_SET_MEMORY_BARRIER);
041e0215 5679 MEM_VOLATILE_P (mem) = 1;
5680
5681 return mem;
5682}
5683
1cd6e20d 5684/* Make sure an argument is in the right mode.
5685 EXP is the tree argument.
5686 MODE is the mode it should be in. */
5687
5688static rtx
3754d046 5689expand_expr_force_mode (tree exp, machine_mode mode)
1cd6e20d 5690{
5691 rtx val;
3754d046 5692 machine_mode old_mode;
1cd6e20d 5693
5694 val = expand_expr (exp, NULL_RTX, mode, EXPAND_NORMAL);
5695 /* If VAL is promoted to a wider mode, convert it back to MODE. Take care
5696 of CONST_INTs, where we know the old_mode only from the call argument. */
5697
5698 old_mode = GET_MODE (val);
5699 if (old_mode == VOIDmode)
5700 old_mode = TYPE_MODE (TREE_TYPE (exp));
5701 val = convert_modes (mode, old_mode, val, 1);
5702 return val;
5703}
5704
5705
b6a5fc45 5706/* Expand the __sync_xxx_and_fetch and __sync_fetch_and_xxx intrinsics.
c2f47e15 5707 EXP is the CALL_EXPR. CODE is the rtx code
b6a5fc45 5708 that corresponds to the arithmetic or logical operation from the name;
5709 an exception here is that NOT actually means NAND. TARGET is an optional
5710 place for us to store the results; AFTER is true if this is the
1cd6e20d 5711 fetch_and_xxx form. */
b6a5fc45 5712
5713static rtx
3754d046 5714expand_builtin_sync_operation (machine_mode mode, tree exp,
3e272de8 5715 enum rtx_code code, bool after,
1cd6e20d 5716 rtx target)
b6a5fc45 5717{
041e0215 5718 rtx val, mem;
e60a6f7b 5719 location_t loc = EXPR_LOCATION (exp);
b6a5fc45 5720
cf73e559 5721 if (code == NOT && warn_sync_nand)
5722 {
5723 tree fndecl = get_callee_fndecl (exp);
5724 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
5725
5726 static bool warned_f_a_n, warned_n_a_f;
5727
5728 switch (fcode)
5729 {
2797f13a 5730 case BUILT_IN_SYNC_FETCH_AND_NAND_1:
5731 case BUILT_IN_SYNC_FETCH_AND_NAND_2:
5732 case BUILT_IN_SYNC_FETCH_AND_NAND_4:
5733 case BUILT_IN_SYNC_FETCH_AND_NAND_8:
5734 case BUILT_IN_SYNC_FETCH_AND_NAND_16:
cf73e559 5735 if (warned_f_a_n)
5736 break;
5737
b9a16870 5738 fndecl = builtin_decl_implicit (BUILT_IN_SYNC_FETCH_AND_NAND_N);
e60a6f7b 5739 inform (loc, "%qD changed semantics in GCC 4.4", fndecl);
cf73e559 5740 warned_f_a_n = true;
5741 break;
5742
2797f13a 5743 case BUILT_IN_SYNC_NAND_AND_FETCH_1:
5744 case BUILT_IN_SYNC_NAND_AND_FETCH_2:
5745 case BUILT_IN_SYNC_NAND_AND_FETCH_4:
5746 case BUILT_IN_SYNC_NAND_AND_FETCH_8:
5747 case BUILT_IN_SYNC_NAND_AND_FETCH_16:
cf73e559 5748 if (warned_n_a_f)
5749 break;
5750
b9a16870 5751 fndecl = builtin_decl_implicit (BUILT_IN_SYNC_NAND_AND_FETCH_N);
e60a6f7b 5752 inform (loc, "%qD changed semantics in GCC 4.4", fndecl);
cf73e559 5753 warned_n_a_f = true;
5754 break;
5755
5756 default:
5757 gcc_unreachable ();
5758 }
5759 }
5760
b6a5fc45 5761 /* Expand the operands. */
c2f47e15 5762 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
1cd6e20d 5763 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
b6a5fc45 5764
a372f7ca 5765 return expand_atomic_fetch_op (target, mem, val, code, MEMMODEL_SYNC_SEQ_CST,
1cd6e20d 5766 after);
b6a5fc45 5767}
5768
5769/* Expand the __sync_val_compare_and_swap and __sync_bool_compare_and_swap
c2f47e15 5770 intrinsics. EXP is the CALL_EXPR. IS_BOOL is
b6a5fc45 5771 true if this is the boolean form. TARGET is a place for us to store the
5772 results; this is NOT optional if IS_BOOL is true. */
5773
5774static rtx
3754d046 5775expand_builtin_compare_and_swap (machine_mode mode, tree exp,
3e272de8 5776 bool is_bool, rtx target)
b6a5fc45 5777{
041e0215 5778 rtx old_val, new_val, mem;
ba885f6a 5779 rtx *pbool, *poval;
b6a5fc45 5780
5781 /* Expand the operands. */
c2f47e15 5782 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
1cd6e20d 5783 old_val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5784 new_val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 2), mode);
b6a5fc45 5785
ba885f6a 5786 pbool = poval = NULL;
5787 if (target != const0_rtx)
5788 {
5789 if (is_bool)
5790 pbool = &target;
5791 else
5792 poval = &target;
5793 }
5794 if (!expand_atomic_compare_and_swap (pbool, poval, mem, old_val, new_val,
a372f7ca 5795 false, MEMMODEL_SYNC_SEQ_CST,
5796 MEMMODEL_SYNC_SEQ_CST))
1cd6e20d 5797 return NULL_RTX;
c2f47e15 5798
1cd6e20d 5799 return target;
b6a5fc45 5800}
5801
5802/* Expand the __sync_lock_test_and_set intrinsic. Note that the most
5803 general form is actually an atomic exchange, and some targets only
5804 support a reduced form with the second argument being a constant 1.
48e1416a 5805 EXP is the CALL_EXPR; TARGET is an optional place for us to store
c2f47e15 5806 the results. */
b6a5fc45 5807
5808static rtx
3754d046 5809expand_builtin_sync_lock_test_and_set (machine_mode mode, tree exp,
1cd6e20d 5810 rtx target)
b6a5fc45 5811{
041e0215 5812 rtx val, mem;
b6a5fc45 5813
5814 /* Expand the operands. */
c2f47e15 5815 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
1cd6e20d 5816 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5817
7821cde1 5818 return expand_sync_lock_test_and_set (target, mem, val);
1cd6e20d 5819}
5820
5821/* Expand the __sync_lock_release intrinsic. EXP is the CALL_EXPR. */
5822
5823static void
3754d046 5824expand_builtin_sync_lock_release (machine_mode mode, tree exp)
1cd6e20d 5825{
5826 rtx mem;
5827
5828 /* Expand the operands. */
5829 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5830
a372f7ca 5831 expand_atomic_store (mem, const0_rtx, MEMMODEL_SYNC_RELEASE, true);
1cd6e20d 5832}
5833
5834/* Given an integer representing an ``enum memmodel'', verify its
5835 correctness and return the memory model enum. */
5836
5837static enum memmodel
5838get_memmodel (tree exp)
5839{
5840 rtx op;
7f738025 5841 unsigned HOST_WIDE_INT val;
2cb724f9 5842 source_location loc
5843 = expansion_point_location_if_in_system_header (input_location);
1cd6e20d 5844
5845 /* If the parameter is not a constant, it's a run time value so we'll just
5846 convert it to MEMMODEL_SEQ_CST to avoid annoying runtime checking. */
5847 if (TREE_CODE (exp) != INTEGER_CST)
5848 return MEMMODEL_SEQ_CST;
5849
5850 op = expand_normal (exp);
7f738025 5851
5852 val = INTVAL (op);
5853 if (targetm.memmodel_check)
5854 val = targetm.memmodel_check (val);
5855 else if (val & ~MEMMODEL_MASK)
5856 {
2cb724f9 5857 warning_at (loc, OPT_Winvalid_memory_model,
5858 "unknown architecture specifier in memory model to builtin");
7f738025 5859 return MEMMODEL_SEQ_CST;
5860 }
5861
a372f7ca 5862 /* Should never see a user explicit SYNC memodel model, so >= LAST works. */
5863 if (memmodel_base (val) >= MEMMODEL_LAST)
1cd6e20d 5864 {
2cb724f9 5865 warning_at (loc, OPT_Winvalid_memory_model,
5866 "invalid memory model argument to builtin");
1cd6e20d 5867 return MEMMODEL_SEQ_CST;
5868 }
7f738025 5869
3070f133 5870 /* Workaround for Bugzilla 59448. GCC doesn't track consume properly, so
5871 be conservative and promote consume to acquire. */
5872 if (val == MEMMODEL_CONSUME)
5873 val = MEMMODEL_ACQUIRE;
5874
7f738025 5875 return (enum memmodel) val;
1cd6e20d 5876}
5877
5878/* Expand the __atomic_exchange intrinsic:
5879 TYPE __atomic_exchange (TYPE *object, TYPE desired, enum memmodel)
5880 EXP is the CALL_EXPR.
5881 TARGET is an optional place for us to store the results. */
5882
5883static rtx
3754d046 5884expand_builtin_atomic_exchange (machine_mode mode, tree exp, rtx target)
1cd6e20d 5885{
5886 rtx val, mem;
5887 enum memmodel model;
5888
5889 model = get_memmodel (CALL_EXPR_ARG (exp, 2));
1cd6e20d 5890
5891 if (!flag_inline_atomics)
5892 return NULL_RTX;
5893
5894 /* Expand the operands. */
5895 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5896 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5897
7821cde1 5898 return expand_atomic_exchange (target, mem, val, model);
1cd6e20d 5899}
5900
5901/* Expand the __atomic_compare_exchange intrinsic:
5902 bool __atomic_compare_exchange (TYPE *object, TYPE *expect,
5903 TYPE desired, BOOL weak,
5904 enum memmodel success,
5905 enum memmodel failure)
5906 EXP is the CALL_EXPR.
5907 TARGET is an optional place for us to store the results. */
5908
5909static rtx
3754d046 5910expand_builtin_atomic_compare_exchange (machine_mode mode, tree exp,
1cd6e20d 5911 rtx target)
5912{
1e0c0b35 5913 rtx expect, desired, mem, oldval;
5914 rtx_code_label *label;
1cd6e20d 5915 enum memmodel success, failure;
5916 tree weak;
5917 bool is_weak;
2cb724f9 5918 source_location loc
5919 = expansion_point_location_if_in_system_header (input_location);
1cd6e20d 5920
5921 success = get_memmodel (CALL_EXPR_ARG (exp, 4));
5922 failure = get_memmodel (CALL_EXPR_ARG (exp, 5));
5923
086f4e33 5924 if (failure > success)
5925 {
2cb724f9 5926 warning_at (loc, OPT_Winvalid_memory_model,
5927 "failure memory model cannot be stronger than success "
5928 "memory model for %<__atomic_compare_exchange%>");
086f4e33 5929 success = MEMMODEL_SEQ_CST;
5930 }
5931
a372f7ca 5932 if (is_mm_release (failure) || is_mm_acq_rel (failure))
1cd6e20d 5933 {
2cb724f9 5934 warning_at (loc, OPT_Winvalid_memory_model,
5935 "invalid failure memory model for "
5936 "%<__atomic_compare_exchange%>");
086f4e33 5937 failure = MEMMODEL_SEQ_CST;
5938 success = MEMMODEL_SEQ_CST;
1cd6e20d 5939 }
5940
086f4e33 5941
1cd6e20d 5942 if (!flag_inline_atomics)
5943 return NULL_RTX;
5944
5945 /* Expand the operands. */
5946 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5947
5948 expect = expand_normal (CALL_EXPR_ARG (exp, 1));
5949 expect = convert_memory_address (Pmode, expect);
c401b131 5950 expect = gen_rtx_MEM (mode, expect);
1cd6e20d 5951 desired = expand_expr_force_mode (CALL_EXPR_ARG (exp, 2), mode);
5952
5953 weak = CALL_EXPR_ARG (exp, 3);
5954 is_weak = false;
e913b5cd 5955 if (tree_fits_shwi_p (weak) && tree_to_shwi (weak) != 0)
1cd6e20d 5956 is_weak = true;
5957
d86e3752 5958 if (target == const0_rtx)
5959 target = NULL;
d86e3752 5960
3c29a9ea 5961 /* Lest the rtl backend create a race condition with an imporoper store
5962 to memory, always create a new pseudo for OLDVAL. */
5963 oldval = NULL;
5964
5965 if (!expand_atomic_compare_and_swap (&target, &oldval, mem, expect, desired,
ba885f6a 5966 is_weak, success, failure))
1cd6e20d 5967 return NULL_RTX;
5968
d86e3752 5969 /* Conditionally store back to EXPECT, lest we create a race condition
5970 with an improper store to memory. */
5971 /* ??? With a rearrangement of atomics at the gimple level, we can handle
5972 the normal case where EXPECT is totally private, i.e. a register. At
5973 which point the store can be unconditional. */
5974 label = gen_label_rtx ();
62589f76 5975 emit_cmp_and_jump_insns (target, const0_rtx, NE, NULL,
5976 GET_MODE (target), 1, label);
d86e3752 5977 emit_move_insn (expect, oldval);
5978 emit_label (label);
c401b131 5979
1cd6e20d 5980 return target;
5981}
5982
5a5ef659 5983/* Helper function for expand_ifn_atomic_compare_exchange - expand
5984 internal ATOMIC_COMPARE_EXCHANGE call into __atomic_compare_exchange_N
5985 call. The weak parameter must be dropped to match the expected parameter
5986 list and the expected argument changed from value to pointer to memory
5987 slot. */
5988
5989static void
5990expand_ifn_atomic_compare_exchange_into_call (gcall *call, machine_mode mode)
5991{
5992 unsigned int z;
5993 vec<tree, va_gc> *vec;
5994
5995 vec_alloc (vec, 5);
5996 vec->quick_push (gimple_call_arg (call, 0));
5997 tree expected = gimple_call_arg (call, 1);
5998 rtx x = assign_stack_temp_for_type (mode, GET_MODE_SIZE (mode),
5999 TREE_TYPE (expected));
6000 rtx expd = expand_expr (expected, x, mode, EXPAND_NORMAL);
6001 if (expd != x)
6002 emit_move_insn (x, expd);
6003 tree v = make_tree (TREE_TYPE (expected), x);
6004 vec->quick_push (build1 (ADDR_EXPR,
6005 build_pointer_type (TREE_TYPE (expected)), v));
6006 vec->quick_push (gimple_call_arg (call, 2));
6007 /* Skip the boolean weak parameter. */
6008 for (z = 4; z < 6; z++)
6009 vec->quick_push (gimple_call_arg (call, z));
5eaf31bb 6010 /* At present we only have BUILT_IN_ATOMIC_COMPARE_EXCHANGE_{1,2,4,8,16}. */
52acb7ae 6011 unsigned int bytes_log2 = exact_log2 (GET_MODE_SIZE (mode).to_constant ());
5eaf31bb 6012 gcc_assert (bytes_log2 < 5);
5a5ef659 6013 built_in_function fncode
6014 = (built_in_function) ((int) BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1
5eaf31bb 6015 + bytes_log2);
5a5ef659 6016 tree fndecl = builtin_decl_explicit (fncode);
6017 tree fn = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fndecl)),
6018 fndecl);
6019 tree exp = build_call_vec (boolean_type_node, fn, vec);
6020 tree lhs = gimple_call_lhs (call);
6021 rtx boolret = expand_call (exp, NULL_RTX, lhs == NULL_TREE);
6022 if (lhs)
6023 {
6024 rtx target = expand_expr (lhs, NULL_RTX, VOIDmode, EXPAND_WRITE);
6025 if (GET_MODE (boolret) != mode)
6026 boolret = convert_modes (mode, GET_MODE (boolret), boolret, 1);
6027 x = force_reg (mode, x);
6028 write_complex_part (target, boolret, true);
6029 write_complex_part (target, x, false);
6030 }
6031}
6032
6033/* Expand IFN_ATOMIC_COMPARE_EXCHANGE internal function. */
6034
6035void
6036expand_ifn_atomic_compare_exchange (gcall *call)
6037{
6038 int size = tree_to_shwi (gimple_call_arg (call, 3)) & 255;
6039 gcc_assert (size == 1 || size == 2 || size == 4 || size == 8 || size == 16);
517be012 6040 machine_mode mode = int_mode_for_size (BITS_PER_UNIT * size, 0).require ();
5a5ef659 6041 rtx expect, desired, mem, oldval, boolret;
6042 enum memmodel success, failure;
6043 tree lhs;
6044 bool is_weak;
6045 source_location loc
6046 = expansion_point_location_if_in_system_header (gimple_location (call));
6047
6048 success = get_memmodel (gimple_call_arg (call, 4));
6049 failure = get_memmodel (gimple_call_arg (call, 5));
6050
6051 if (failure > success)
6052 {
6053 warning_at (loc, OPT_Winvalid_memory_model,
6054 "failure memory model cannot be stronger than success "
6055 "memory model for %<__atomic_compare_exchange%>");
6056 success = MEMMODEL_SEQ_CST;
6057 }
6058
6059 if (is_mm_release (failure) || is_mm_acq_rel (failure))
6060 {
6061 warning_at (loc, OPT_Winvalid_memory_model,
6062 "invalid failure memory model for "
6063 "%<__atomic_compare_exchange%>");
6064 failure = MEMMODEL_SEQ_CST;
6065 success = MEMMODEL_SEQ_CST;
6066 }
6067
6068 if (!flag_inline_atomics)
6069 {
6070 expand_ifn_atomic_compare_exchange_into_call (call, mode);
6071 return;
6072 }
6073
6074 /* Expand the operands. */
6075 mem = get_builtin_sync_mem (gimple_call_arg (call, 0), mode);
6076
6077 expect = expand_expr_force_mode (gimple_call_arg (call, 1), mode);
6078 desired = expand_expr_force_mode (gimple_call_arg (call, 2), mode);
6079
6080 is_weak = (tree_to_shwi (gimple_call_arg (call, 3)) & 256) != 0;
6081
6082 boolret = NULL;
6083 oldval = NULL;
6084
6085 if (!expand_atomic_compare_and_swap (&boolret, &oldval, mem, expect, desired,
6086 is_weak, success, failure))
6087 {
6088 expand_ifn_atomic_compare_exchange_into_call (call, mode);
6089 return;
6090 }
6091
6092 lhs = gimple_call_lhs (call);
6093 if (lhs)
6094 {
6095 rtx target = expand_expr (lhs, NULL_RTX, VOIDmode, EXPAND_WRITE);
6096 if (GET_MODE (boolret) != mode)
6097 boolret = convert_modes (mode, GET_MODE (boolret), boolret, 1);
6098 write_complex_part (target, boolret, true);
6099 write_complex_part (target, oldval, false);
6100 }
6101}
6102
1cd6e20d 6103/* Expand the __atomic_load intrinsic:
6104 TYPE __atomic_load (TYPE *object, enum memmodel)
6105 EXP is the CALL_EXPR.
6106 TARGET is an optional place for us to store the results. */
6107
6108static rtx
3754d046 6109expand_builtin_atomic_load (machine_mode mode, tree exp, rtx target)
1cd6e20d 6110{
6111 rtx mem;
6112 enum memmodel model;
6113
6114 model = get_memmodel (CALL_EXPR_ARG (exp, 1));
a372f7ca 6115 if (is_mm_release (model) || is_mm_acq_rel (model))
1cd6e20d 6116 {
2cb724f9 6117 source_location loc
6118 = expansion_point_location_if_in_system_header (input_location);
6119 warning_at (loc, OPT_Winvalid_memory_model,
6120 "invalid memory model for %<__atomic_load%>");
086f4e33 6121 model = MEMMODEL_SEQ_CST;
1cd6e20d 6122 }
6123
6124 if (!flag_inline_atomics)
6125 return NULL_RTX;
6126
6127 /* Expand the operand. */
6128 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
6129
6130 return expand_atomic_load (target, mem, model);
6131}
6132
6133
6134/* Expand the __atomic_store intrinsic:
6135 void __atomic_store (TYPE *object, TYPE desired, enum memmodel)
6136 EXP is the CALL_EXPR.
6137 TARGET is an optional place for us to store the results. */
6138
6139static rtx
3754d046 6140expand_builtin_atomic_store (machine_mode mode, tree exp)
1cd6e20d 6141{
6142 rtx mem, val;
6143 enum memmodel model;
6144
6145 model = get_memmodel (CALL_EXPR_ARG (exp, 2));
a372f7ca 6146 if (!(is_mm_relaxed (model) || is_mm_seq_cst (model)
6147 || is_mm_release (model)))
1cd6e20d 6148 {
2cb724f9 6149 source_location loc
6150 = expansion_point_location_if_in_system_header (input_location);
6151 warning_at (loc, OPT_Winvalid_memory_model,
6152 "invalid memory model for %<__atomic_store%>");
086f4e33 6153 model = MEMMODEL_SEQ_CST;
1cd6e20d 6154 }
6155
6156 if (!flag_inline_atomics)
6157 return NULL_RTX;
6158
6159 /* Expand the operands. */
6160 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
6161 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
6162
8808bf16 6163 return expand_atomic_store (mem, val, model, false);
1cd6e20d 6164}
6165
6166/* Expand the __atomic_fetch_XXX intrinsic:
6167 TYPE __atomic_fetch_XXX (TYPE *object, TYPE val, enum memmodel)
6168 EXP is the CALL_EXPR.
6169 TARGET is an optional place for us to store the results.
6170 CODE is the operation, PLUS, MINUS, ADD, XOR, or IOR.
6171 FETCH_AFTER is true if returning the result of the operation.
6172 FETCH_AFTER is false if returning the value before the operation.
6173 IGNORE is true if the result is not used.
6174 EXT_CALL is the correct builtin for an external call if this cannot be
6175 resolved to an instruction sequence. */
6176
6177static rtx
3754d046 6178expand_builtin_atomic_fetch_op (machine_mode mode, tree exp, rtx target,
1cd6e20d 6179 enum rtx_code code, bool fetch_after,
6180 bool ignore, enum built_in_function ext_call)
6181{
6182 rtx val, mem, ret;
6183 enum memmodel model;
6184 tree fndecl;
6185 tree addr;
6186
6187 model = get_memmodel (CALL_EXPR_ARG (exp, 2));
6188
6189 /* Expand the operands. */
6190 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
6191 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
6192
6193 /* Only try generating instructions if inlining is turned on. */
6194 if (flag_inline_atomics)
6195 {
6196 ret = expand_atomic_fetch_op (target, mem, val, code, model, fetch_after);
6197 if (ret)
6198 return ret;
6199 }
6200
6201 /* Return if a different routine isn't needed for the library call. */
6202 if (ext_call == BUILT_IN_NONE)
6203 return NULL_RTX;
6204
6205 /* Change the call to the specified function. */
6206 fndecl = get_callee_fndecl (exp);
6207 addr = CALL_EXPR_FN (exp);
6208 STRIP_NOPS (addr);
6209
6210 gcc_assert (TREE_OPERAND (addr, 0) == fndecl);
9af5ce0c 6211 TREE_OPERAND (addr, 0) = builtin_decl_explicit (ext_call);
1cd6e20d 6212
a2f95d97 6213 /* If we will emit code after the call, the call can not be a tail call.
6214 If it is emitted as a tail call, a barrier is emitted after it, and
6215 then all trailing code is removed. */
6216 if (!ignore)
6217 CALL_EXPR_TAILCALL (exp) = 0;
6218
1cd6e20d 6219 /* Expand the call here so we can emit trailing code. */
6220 ret = expand_call (exp, target, ignore);
6221
6222 /* Replace the original function just in case it matters. */
6223 TREE_OPERAND (addr, 0) = fndecl;
6224
6225 /* Then issue the arithmetic correction to return the right result. */
6226 if (!ignore)
c449f851 6227 {
6228 if (code == NOT)
6229 {
6230 ret = expand_simple_binop (mode, AND, ret, val, NULL_RTX, true,
6231 OPTAB_LIB_WIDEN);
6232 ret = expand_simple_unop (mode, NOT, ret, target, true);
6233 }
6234 else
6235 ret = expand_simple_binop (mode, code, ret, val, target, true,
6236 OPTAB_LIB_WIDEN);
6237 }
1cd6e20d 6238 return ret;
6239}
6240
9c1a31e4 6241/* Expand IFN_ATOMIC_BIT_TEST_AND_* internal function. */
6242
6243void
6244expand_ifn_atomic_bit_test_and (gcall *call)
6245{
6246 tree ptr = gimple_call_arg (call, 0);
6247 tree bit = gimple_call_arg (call, 1);
6248 tree flag = gimple_call_arg (call, 2);
6249 tree lhs = gimple_call_lhs (call);
6250 enum memmodel model = MEMMODEL_SYNC_SEQ_CST;
6251 machine_mode mode = TYPE_MODE (TREE_TYPE (flag));
6252 enum rtx_code code;
6253 optab optab;
6254 struct expand_operand ops[5];
6255
6256 gcc_assert (flag_inline_atomics);
6257
6258 if (gimple_call_num_args (call) == 4)
6259 model = get_memmodel (gimple_call_arg (call, 3));
6260
6261 rtx mem = get_builtin_sync_mem (ptr, mode);
6262 rtx val = expand_expr_force_mode (bit, mode);
6263
6264 switch (gimple_call_internal_fn (call))
6265 {
6266 case IFN_ATOMIC_BIT_TEST_AND_SET:
6267 code = IOR;
6268 optab = atomic_bit_test_and_set_optab;
6269 break;
6270 case IFN_ATOMIC_BIT_TEST_AND_COMPLEMENT:
6271 code = XOR;
6272 optab = atomic_bit_test_and_complement_optab;
6273 break;
6274 case IFN_ATOMIC_BIT_TEST_AND_RESET:
6275 code = AND;
6276 optab = atomic_bit_test_and_reset_optab;
6277 break;
6278 default:
6279 gcc_unreachable ();
6280 }
6281
6282 if (lhs == NULL_TREE)
6283 {
6284 val = expand_simple_binop (mode, ASHIFT, const1_rtx,
6285 val, NULL_RTX, true, OPTAB_DIRECT);
6286 if (code == AND)
6287 val = expand_simple_unop (mode, NOT, val, NULL_RTX, true);
6288 expand_atomic_fetch_op (const0_rtx, mem, val, code, model, false);
6289 return;
6290 }
6291
6292 rtx target = expand_expr (lhs, NULL_RTX, VOIDmode, EXPAND_WRITE);
6293 enum insn_code icode = direct_optab_handler (optab, mode);
6294 gcc_assert (icode != CODE_FOR_nothing);
6295 create_output_operand (&ops[0], target, mode);
6296 create_fixed_operand (&ops[1], mem);
6297 create_convert_operand_to (&ops[2], val, mode, true);
6298 create_integer_operand (&ops[3], model);
6299 create_integer_operand (&ops[4], integer_onep (flag));
6300 if (maybe_expand_insn (icode, 5, ops))
6301 return;
6302
6303 rtx bitval = val;
6304 val = expand_simple_binop (mode, ASHIFT, const1_rtx,
6305 val, NULL_RTX, true, OPTAB_DIRECT);
6306 rtx maskval = val;
6307 if (code == AND)
6308 val = expand_simple_unop (mode, NOT, val, NULL_RTX, true);
6309 rtx result = expand_atomic_fetch_op (gen_reg_rtx (mode), mem, val,
6310 code, model, false);
6311 if (integer_onep (flag))
6312 {
6313 result = expand_simple_binop (mode, ASHIFTRT, result, bitval,
6314 NULL_RTX, true, OPTAB_DIRECT);
6315 result = expand_simple_binop (mode, AND, result, const1_rtx, target,
6316 true, OPTAB_DIRECT);
6317 }
6318 else
6319 result = expand_simple_binop (mode, AND, result, maskval, target, true,
6320 OPTAB_DIRECT);
6321 if (result != target)
6322 emit_move_insn (target, result);
6323}
6324
10b744a3 6325/* Expand an atomic clear operation.
6326 void _atomic_clear (BOOL *obj, enum memmodel)
6327 EXP is the call expression. */
6328
6329static rtx
6330expand_builtin_atomic_clear (tree exp)
6331{
3754d046 6332 machine_mode mode;
10b744a3 6333 rtx mem, ret;
6334 enum memmodel model;
6335
517be012 6336 mode = int_mode_for_size (BOOL_TYPE_SIZE, 0).require ();
10b744a3 6337 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
6338 model = get_memmodel (CALL_EXPR_ARG (exp, 1));
6339
a372f7ca 6340 if (is_mm_consume (model) || is_mm_acquire (model) || is_mm_acq_rel (model))
10b744a3 6341 {
2cb724f9 6342 source_location loc
6343 = expansion_point_location_if_in_system_header (input_location);
6344 warning_at (loc, OPT_Winvalid_memory_model,
6345 "invalid memory model for %<__atomic_store%>");
086f4e33 6346 model = MEMMODEL_SEQ_CST;
10b744a3 6347 }
6348
6349 /* Try issuing an __atomic_store, and allow fallback to __sync_lock_release.
6350 Failing that, a store is issued by __atomic_store. The only way this can
6351 fail is if the bool type is larger than a word size. Unlikely, but
6352 handle it anyway for completeness. Assume a single threaded model since
6353 there is no atomic support in this case, and no barriers are required. */
6354 ret = expand_atomic_store (mem, const0_rtx, model, true);
6355 if (!ret)
6356 emit_move_insn (mem, const0_rtx);
6357 return const0_rtx;
6358}
6359
6360/* Expand an atomic test_and_set operation.
6361 bool _atomic_test_and_set (BOOL *obj, enum memmodel)
6362 EXP is the call expression. */
6363
6364static rtx
7821cde1 6365expand_builtin_atomic_test_and_set (tree exp, rtx target)
10b744a3 6366{
7821cde1 6367 rtx mem;
10b744a3 6368 enum memmodel model;
3754d046 6369 machine_mode mode;
10b744a3 6370
517be012 6371 mode = int_mode_for_size (BOOL_TYPE_SIZE, 0).require ();
10b744a3 6372 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
6373 model = get_memmodel (CALL_EXPR_ARG (exp, 1));
6374
7821cde1 6375 return expand_atomic_test_and_set (target, mem, model);
10b744a3 6376}
6377
6378
1cd6e20d 6379/* Return true if (optional) argument ARG1 of size ARG0 is always lock free on
6380 this architecture. If ARG1 is NULL, use typical alignment for size ARG0. */
6381
6382static tree
6383fold_builtin_atomic_always_lock_free (tree arg0, tree arg1)
6384{
6385 int size;
3754d046 6386 machine_mode mode;
1cd6e20d 6387 unsigned int mode_align, type_align;
6388
6389 if (TREE_CODE (arg0) != INTEGER_CST)
6390 return NULL_TREE;
b6a5fc45 6391
517be012 6392 /* We need a corresponding integer mode for the access to be lock-free. */
1cd6e20d 6393 size = INTVAL (expand_normal (arg0)) * BITS_PER_UNIT;
517be012 6394 if (!int_mode_for_size (size, 0).exists (&mode))
6395 return boolean_false_node;
6396
1cd6e20d 6397 mode_align = GET_MODE_ALIGNMENT (mode);
6398
4ca99588 6399 if (TREE_CODE (arg1) == INTEGER_CST)
6400 {
6401 unsigned HOST_WIDE_INT val = UINTVAL (expand_normal (arg1));
6402
6403 /* Either this argument is null, or it's a fake pointer encoding
6404 the alignment of the object. */
ac29ece2 6405 val = least_bit_hwi (val);
4ca99588 6406 val *= BITS_PER_UNIT;
6407
6408 if (val == 0 || mode_align < val)
6409 type_align = mode_align;
6410 else
6411 type_align = val;
6412 }
1cd6e20d 6413 else
6414 {
6415 tree ttype = TREE_TYPE (arg1);
6416
6417 /* This function is usually invoked and folded immediately by the front
6418 end before anything else has a chance to look at it. The pointer
6419 parameter at this point is usually cast to a void *, so check for that
6420 and look past the cast. */
2f8a2ead 6421 if (CONVERT_EXPR_P (arg1)
6422 && POINTER_TYPE_P (ttype)
6423 && VOID_TYPE_P (TREE_TYPE (ttype))
6424 && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (arg1, 0))))
1cd6e20d 6425 arg1 = TREE_OPERAND (arg1, 0);
6426
6427 ttype = TREE_TYPE (arg1);
6428 gcc_assert (POINTER_TYPE_P (ttype));
6429
6430 /* Get the underlying type of the object. */
6431 ttype = TREE_TYPE (ttype);
6432 type_align = TYPE_ALIGN (ttype);
6433 }
6434
47ae02b7 6435 /* If the object has smaller alignment, the lock free routines cannot
1cd6e20d 6436 be used. */
6437 if (type_align < mode_align)
06308d2a 6438 return boolean_false_node;
1cd6e20d 6439
6440 /* Check if a compare_and_swap pattern exists for the mode which represents
6441 the required size. The pattern is not allowed to fail, so the existence
d5f5fa27 6442 of the pattern indicates support is present. Also require that an
6443 atomic load exists for the required size. */
6444 if (can_compare_and_swap_p (mode, true) && can_atomic_load_p (mode))
06308d2a 6445 return boolean_true_node;
1cd6e20d 6446 else
06308d2a 6447 return boolean_false_node;
1cd6e20d 6448}
6449
6450/* Return true if the parameters to call EXP represent an object which will
6451 always generate lock free instructions. The first argument represents the
6452 size of the object, and the second parameter is a pointer to the object
6453 itself. If NULL is passed for the object, then the result is based on
6454 typical alignment for an object of the specified size. Otherwise return
6455 false. */
6456
6457static rtx
6458expand_builtin_atomic_always_lock_free (tree exp)
6459{
6460 tree size;
6461 tree arg0 = CALL_EXPR_ARG (exp, 0);
6462 tree arg1 = CALL_EXPR_ARG (exp, 1);
6463
6464 if (TREE_CODE (arg0) != INTEGER_CST)
6465 {
6466 error ("non-constant argument 1 to __atomic_always_lock_free");
6467 return const0_rtx;
6468 }
6469
6470 size = fold_builtin_atomic_always_lock_free (arg0, arg1);
06308d2a 6471 if (size == boolean_true_node)
1cd6e20d 6472 return const1_rtx;
6473 return const0_rtx;
6474}
6475
6476/* Return a one or zero if it can be determined that object ARG1 of size ARG
6477 is lock free on this architecture. */
6478
6479static tree
6480fold_builtin_atomic_is_lock_free (tree arg0, tree arg1)
6481{
6482 if (!flag_inline_atomics)
6483 return NULL_TREE;
6484
6485 /* If it isn't always lock free, don't generate a result. */
06308d2a 6486 if (fold_builtin_atomic_always_lock_free (arg0, arg1) == boolean_true_node)
6487 return boolean_true_node;
1cd6e20d 6488
6489 return NULL_TREE;
6490}
6491
6492/* Return true if the parameters to call EXP represent an object which will
6493 always generate lock free instructions. The first argument represents the
6494 size of the object, and the second parameter is a pointer to the object
6495 itself. If NULL is passed for the object, then the result is based on
6496 typical alignment for an object of the specified size. Otherwise return
6497 NULL*/
6498
6499static rtx
6500expand_builtin_atomic_is_lock_free (tree exp)
6501{
6502 tree size;
6503 tree arg0 = CALL_EXPR_ARG (exp, 0);
6504 tree arg1 = CALL_EXPR_ARG (exp, 1);
6505
6506 if (!INTEGRAL_TYPE_P (TREE_TYPE (arg0)))
6507 {
6508 error ("non-integer argument 1 to __atomic_is_lock_free");
6509 return NULL_RTX;
6510 }
6511
6512 if (!flag_inline_atomics)
6513 return NULL_RTX;
6514
6515 /* If the value is known at compile time, return the RTX for it. */
6516 size = fold_builtin_atomic_is_lock_free (arg0, arg1);
06308d2a 6517 if (size == boolean_true_node)
1cd6e20d 6518 return const1_rtx;
6519
6520 return NULL_RTX;
6521}
6522
1cd6e20d 6523/* Expand the __atomic_thread_fence intrinsic:
6524 void __atomic_thread_fence (enum memmodel)
6525 EXP is the CALL_EXPR. */
6526
6527static void
6528expand_builtin_atomic_thread_fence (tree exp)
6529{
fe54c06b 6530 enum memmodel model = get_memmodel (CALL_EXPR_ARG (exp, 0));
6531 expand_mem_thread_fence (model);
1cd6e20d 6532}
6533
6534/* Expand the __atomic_signal_fence intrinsic:
6535 void __atomic_signal_fence (enum memmodel)
6536 EXP is the CALL_EXPR. */
6537
6538static void
6539expand_builtin_atomic_signal_fence (tree exp)
6540{
fe54c06b 6541 enum memmodel model = get_memmodel (CALL_EXPR_ARG (exp, 0));
6542 expand_mem_signal_fence (model);
b6a5fc45 6543}
6544
6545/* Expand the __sync_synchronize intrinsic. */
6546
6547static void
2797f13a 6548expand_builtin_sync_synchronize (void)
b6a5fc45 6549{
a372f7ca 6550 expand_mem_thread_fence (MEMMODEL_SYNC_SEQ_CST);
b6a5fc45 6551}
6552
badaa04c 6553static rtx
6554expand_builtin_thread_pointer (tree exp, rtx target)
6555{
6556 enum insn_code icode;
6557 if (!validate_arglist (exp, VOID_TYPE))
6558 return const0_rtx;
6559 icode = direct_optab_handler (get_thread_pointer_optab, Pmode);
6560 if (icode != CODE_FOR_nothing)
6561 {
6562 struct expand_operand op;
3ed779c3 6563 /* If the target is not sutitable then create a new target. */
6564 if (target == NULL_RTX
6565 || !REG_P (target)
6566 || GET_MODE (target) != Pmode)
badaa04c 6567 target = gen_reg_rtx (Pmode);
6568 create_output_operand (&op, target, Pmode);
6569 expand_insn (icode, 1, &op);
6570 return target;
6571 }
6572 error ("__builtin_thread_pointer is not supported on this target");
6573 return const0_rtx;
6574}
6575
6576static void
6577expand_builtin_set_thread_pointer (tree exp)
6578{
6579 enum insn_code icode;
6580 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6581 return;
6582 icode = direct_optab_handler (set_thread_pointer_optab, Pmode);
6583 if (icode != CODE_FOR_nothing)
6584 {
6585 struct expand_operand op;
6586 rtx val = expand_expr (CALL_EXPR_ARG (exp, 0), NULL_RTX,
6587 Pmode, EXPAND_NORMAL);
6f343c10 6588 create_input_operand (&op, val, Pmode);
badaa04c 6589 expand_insn (icode, 1, &op);
6590 return;
6591 }
6592 error ("__builtin_set_thread_pointer is not supported on this target");
6593}
6594
53800dbe 6595\f
0e80b01d 6596/* Emit code to restore the current value of stack. */
6597
6598static void
6599expand_stack_restore (tree var)
6600{
1e0c0b35 6601 rtx_insn *prev;
6602 rtx sa = expand_normal (var);
0e80b01d 6603
6604 sa = convert_memory_address (Pmode, sa);
6605
6606 prev = get_last_insn ();
6607 emit_stack_restore (SAVE_BLOCK, sa);
97354ae4 6608
6609 record_new_stack_level ();
6610
0e80b01d 6611 fixup_args_size_notes (prev, get_last_insn (), 0);
6612}
6613
0e80b01d 6614/* Emit code to save the current value of stack. */
6615
6616static rtx
6617expand_stack_save (void)
6618{
6619 rtx ret = NULL_RTX;
6620
0e80b01d 6621 emit_stack_save (SAVE_BLOCK, &ret);
6622 return ret;
6623}
6624
ca4c3545 6625
53800dbe 6626/* Expand an expression EXP that calls a built-in function,
6627 with result going to TARGET if that's convenient
6628 (and in mode MODE if that's convenient).
6629 SUBTARGET may be used as the target for computing one of EXP's operands.
6630 IGNORE is nonzero if the value is to be ignored. */
6631
6632rtx
3754d046 6633expand_builtin (tree exp, rtx target, rtx subtarget, machine_mode mode,
aecda0d6 6634 int ignore)
53800dbe 6635{
c6e6ecb1 6636 tree fndecl = get_callee_fndecl (exp);
53800dbe 6637 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
3754d046 6638 machine_mode target_mode = TYPE_MODE (TREE_TYPE (exp));
67fa4078 6639 int flags;
53800dbe 6640
4e2f4ed5 6641 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
6642 return targetm.expand_builtin (exp, target, subtarget, mode, ignore);
6643
f9acf11a 6644 /* When ASan is enabled, we don't want to expand some memory/string
6645 builtins and rely on libsanitizer's hooks. This allows us to avoid
6646 redundant checks and be sure, that possible overflow will be detected
6647 by ASan. */
6648
6649 if ((flag_sanitize & SANITIZE_ADDRESS) && asan_intercepted_p (fcode))
6650 return expand_call (exp, target, ignore);
6651
53800dbe 6652 /* When not optimizing, generate calls to library functions for a certain
6653 set of builtins. */
cd9ff771 6654 if (!optimize
b6a5fc45 6655 && !called_as_built_in (fndecl)
73037a1e 6656 && fcode != BUILT_IN_FORK
6657 && fcode != BUILT_IN_EXECL
6658 && fcode != BUILT_IN_EXECV
6659 && fcode != BUILT_IN_EXECLP
6660 && fcode != BUILT_IN_EXECLE
6661 && fcode != BUILT_IN_EXECVP
6662 && fcode != BUILT_IN_EXECVE
2b34677f 6663 && !ALLOCA_FUNCTION_CODE_P (fcode)
058a1b7a 6664 && fcode != BUILT_IN_FREE
6665 && fcode != BUILT_IN_CHKP_SET_PTR_BOUNDS
6666 && fcode != BUILT_IN_CHKP_INIT_PTR_BOUNDS
6667 && fcode != BUILT_IN_CHKP_NULL_PTR_BOUNDS
6668 && fcode != BUILT_IN_CHKP_COPY_PTR_BOUNDS
6669 && fcode != BUILT_IN_CHKP_NARROW_PTR_BOUNDS
6670 && fcode != BUILT_IN_CHKP_STORE_PTR_BOUNDS
6671 && fcode != BUILT_IN_CHKP_CHECK_PTR_LBOUNDS
6672 && fcode != BUILT_IN_CHKP_CHECK_PTR_UBOUNDS
6673 && fcode != BUILT_IN_CHKP_CHECK_PTR_BOUNDS
6674 && fcode != BUILT_IN_CHKP_GET_PTR_LBOUND
6675 && fcode != BUILT_IN_CHKP_GET_PTR_UBOUND
6676 && fcode != BUILT_IN_CHKP_BNDRET)
cd9ff771 6677 return expand_call (exp, target, ignore);
53800dbe 6678
8d6d7930 6679 /* The built-in function expanders test for target == const0_rtx
6680 to determine whether the function's result will be ignored. */
6681 if (ignore)
6682 target = const0_rtx;
6683
6684 /* If the result of a pure or const built-in function is ignored, and
6685 none of its arguments are volatile, we can avoid expanding the
6686 built-in call and just evaluate the arguments for side-effects. */
6687 if (target == const0_rtx
67fa4078 6688 && ((flags = flags_from_decl_or_type (fndecl)) & (ECF_CONST | ECF_PURE))
6689 && !(flags & ECF_LOOPING_CONST_OR_PURE))
8d6d7930 6690 {
6691 bool volatilep = false;
6692 tree arg;
c2f47e15 6693 call_expr_arg_iterator iter;
8d6d7930 6694
c2f47e15 6695 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
6696 if (TREE_THIS_VOLATILE (arg))
8d6d7930 6697 {
6698 volatilep = true;
6699 break;
6700 }
6701
6702 if (! volatilep)
6703 {
c2f47e15 6704 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
6705 expand_expr (arg, const0_rtx, VOIDmode, EXPAND_NORMAL);
8d6d7930 6706 return const0_rtx;
6707 }
6708 }
6709
f21337ef 6710 /* expand_builtin_with_bounds is supposed to be used for
6711 instrumented builtin calls. */
058a1b7a 6712 gcc_assert (!CALL_WITH_BOUNDS_P (exp));
6713
53800dbe 6714 switch (fcode)
6715 {
4f35b1fc 6716 CASE_FLT_FN (BUILT_IN_FABS):
012f068a 6717 CASE_FLT_FN_FLOATN_NX (BUILT_IN_FABS):
8aa32773 6718 case BUILT_IN_FABSD32:
6719 case BUILT_IN_FABSD64:
6720 case BUILT_IN_FABSD128:
c2f47e15 6721 target = expand_builtin_fabs (exp, target, subtarget);
78a74442 6722 if (target)
a0c938f0 6723 return target;
78a74442 6724 break;
6725
4f35b1fc 6726 CASE_FLT_FN (BUILT_IN_COPYSIGN):
012f068a 6727 CASE_FLT_FN_FLOATN_NX (BUILT_IN_COPYSIGN):
c2f47e15 6728 target = expand_builtin_copysign (exp, target, subtarget);
270436f3 6729 if (target)
6730 return target;
6731 break;
6732
7d3f6cc7 6733 /* Just do a normal library call if we were unable to fold
6734 the values. */
4f35b1fc 6735 CASE_FLT_FN (BUILT_IN_CABS):
78a74442 6736 break;
53800dbe 6737
7e0713b1 6738 CASE_FLT_FN (BUILT_IN_FMA):
8c32188e 6739 CASE_FLT_FN_FLOATN_NX (BUILT_IN_FMA):
7e0713b1 6740 target = expand_builtin_mathfn_ternary (exp, target, subtarget);
6741 if (target)
6742 return target;
6743 break;
6744
a67a90e5 6745 CASE_FLT_FN (BUILT_IN_ILOGB):
6746 if (! flag_unsafe_math_optimizations)
6747 break;
12f08300 6748 gcc_fallthrough ();
6749 CASE_FLT_FN (BUILT_IN_ISINF):
6750 CASE_FLT_FN (BUILT_IN_FINITE):
6751 case BUILT_IN_ISFINITE:
6752 case BUILT_IN_ISNORMAL:
f97eea22 6753 target = expand_builtin_interclass_mathfn (exp, target);
a67a90e5 6754 if (target)
6755 return target;
6756 break;
6757
80ff6494 6758 CASE_FLT_FN (BUILT_IN_ICEIL):
4f35b1fc 6759 CASE_FLT_FN (BUILT_IN_LCEIL):
6760 CASE_FLT_FN (BUILT_IN_LLCEIL):
6761 CASE_FLT_FN (BUILT_IN_LFLOOR):
80ff6494 6762 CASE_FLT_FN (BUILT_IN_IFLOOR):
4f35b1fc 6763 CASE_FLT_FN (BUILT_IN_LLFLOOR):
ff1b14e4 6764 target = expand_builtin_int_roundingfn (exp, target);
ad52b9b7 6765 if (target)
6766 return target;
6767 break;
6768
80ff6494 6769 CASE_FLT_FN (BUILT_IN_IRINT):
7d3afc77 6770 CASE_FLT_FN (BUILT_IN_LRINT):
6771 CASE_FLT_FN (BUILT_IN_LLRINT):
80ff6494 6772 CASE_FLT_FN (BUILT_IN_IROUND):
ef2f1a10 6773 CASE_FLT_FN (BUILT_IN_LROUND):
6774 CASE_FLT_FN (BUILT_IN_LLROUND):
ff1b14e4 6775 target = expand_builtin_int_roundingfn_2 (exp, target);
7d3afc77 6776 if (target)
6777 return target;
6778 break;
6779
4f35b1fc 6780 CASE_FLT_FN (BUILT_IN_POWI):
f97eea22 6781 target = expand_builtin_powi (exp, target);
757c219d 6782 if (target)
6783 return target;
6784 break;
6785
d735c391 6786 CASE_FLT_FN (BUILT_IN_CEXPI):
f97eea22 6787 target = expand_builtin_cexpi (exp, target);
d735c391 6788 gcc_assert (target);
6789 return target;
6790
4f35b1fc 6791 CASE_FLT_FN (BUILT_IN_SIN):
6792 CASE_FLT_FN (BUILT_IN_COS):
6b43bae4 6793 if (! flag_unsafe_math_optimizations)
6794 break;
6795 target = expand_builtin_mathfn_3 (exp, target, subtarget);
6796 if (target)
6797 return target;
6798 break;
6799
c3147c1a 6800 CASE_FLT_FN (BUILT_IN_SINCOS):
6801 if (! flag_unsafe_math_optimizations)
6802 break;
6803 target = expand_builtin_sincos (exp);
6804 if (target)
6805 return target;
6806 break;
6807
53800dbe 6808 case BUILT_IN_APPLY_ARGS:
6809 return expand_builtin_apply_args ();
6810
6811 /* __builtin_apply (FUNCTION, ARGUMENTS, ARGSIZE) invokes
6812 FUNCTION with a copy of the parameters described by
6813 ARGUMENTS, and ARGSIZE. It returns a block of memory
6814 allocated on the stack into which is stored all the registers
6815 that might possibly be used for returning the result of a
6816 function. ARGUMENTS is the value returned by
6817 __builtin_apply_args. ARGSIZE is the number of bytes of
6818 arguments that must be copied. ??? How should this value be
6819 computed? We'll also need a safe worst case value for varargs
6820 functions. */
6821 case BUILT_IN_APPLY:
c2f47e15 6822 if (!validate_arglist (exp, POINTER_TYPE,
0eb671f7 6823 POINTER_TYPE, INTEGER_TYPE, VOID_TYPE)
c2f47e15 6824 && !validate_arglist (exp, REFERENCE_TYPE,
0eb671f7 6825 POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
53800dbe 6826 return const0_rtx;
6827 else
6828 {
53800dbe 6829 rtx ops[3];
6830
c2f47e15 6831 ops[0] = expand_normal (CALL_EXPR_ARG (exp, 0));
6832 ops[1] = expand_normal (CALL_EXPR_ARG (exp, 1));
6833 ops[2] = expand_normal (CALL_EXPR_ARG (exp, 2));
53800dbe 6834
6835 return expand_builtin_apply (ops[0], ops[1], ops[2]);
6836 }
6837
6838 /* __builtin_return (RESULT) causes the function to return the
6839 value described by RESULT. RESULT is address of the block of
6840 memory returned by __builtin_apply. */
6841 case BUILT_IN_RETURN:
c2f47e15 6842 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6843 expand_builtin_return (expand_normal (CALL_EXPR_ARG (exp, 0)));
53800dbe 6844 return const0_rtx;
6845
6846 case BUILT_IN_SAVEREGS:
a66c9326 6847 return expand_builtin_saveregs ();
53800dbe 6848
48dc2227 6849 case BUILT_IN_VA_ARG_PACK:
6850 /* All valid uses of __builtin_va_arg_pack () are removed during
6851 inlining. */
b8c23db3 6852 error ("%Kinvalid use of %<__builtin_va_arg_pack ()%>", exp);
48dc2227 6853 return const0_rtx;
6854
4e1d7ea4 6855 case BUILT_IN_VA_ARG_PACK_LEN:
6856 /* All valid uses of __builtin_va_arg_pack_len () are removed during
6857 inlining. */
b8c23db3 6858 error ("%Kinvalid use of %<__builtin_va_arg_pack_len ()%>", exp);
4e1d7ea4 6859 return const0_rtx;
6860
53800dbe 6861 /* Return the address of the first anonymous stack arg. */
6862 case BUILT_IN_NEXT_ARG:
c2f47e15 6863 if (fold_builtin_next_arg (exp, false))
a0c938f0 6864 return const0_rtx;
79012a9d 6865 return expand_builtin_next_arg ();
53800dbe 6866
ac8fb6db 6867 case BUILT_IN_CLEAR_CACHE:
6868 target = expand_builtin___clear_cache (exp);
6869 if (target)
6870 return target;
6871 break;
6872
53800dbe 6873 case BUILT_IN_CLASSIFY_TYPE:
c2f47e15 6874 return expand_builtin_classify_type (exp);
53800dbe 6875
6876 case BUILT_IN_CONSTANT_P:
4ee9c684 6877 return const0_rtx;
53800dbe 6878
6879 case BUILT_IN_FRAME_ADDRESS:
6880 case BUILT_IN_RETURN_ADDRESS:
c2f47e15 6881 return expand_builtin_frame_address (fndecl, exp);
53800dbe 6882
6883 /* Returns the address of the area where the structure is returned.
6884 0 otherwise. */
6885 case BUILT_IN_AGGREGATE_INCOMING_ADDRESS:
c2f47e15 6886 if (call_expr_nargs (exp) != 0
9342ee68 6887 || ! AGGREGATE_TYPE_P (TREE_TYPE (TREE_TYPE (current_function_decl)))
e16ceb8e 6888 || !MEM_P (DECL_RTL (DECL_RESULT (current_function_decl))))
9342ee68 6889 return const0_rtx;
53800dbe 6890 else
9342ee68 6891 return XEXP (DECL_RTL (DECL_RESULT (current_function_decl)), 0);
53800dbe 6892
2b34677f 6893 CASE_BUILT_IN_ALLOCA:
2b29cc6a 6894 target = expand_builtin_alloca (exp);
53800dbe 6895 if (target)
6896 return target;
6897 break;
6898
d08919a7 6899 case BUILT_IN_ASAN_ALLOCAS_UNPOISON:
6900 return expand_asan_emit_allocas_unpoison (exp);
6901
4ee9c684 6902 case BUILT_IN_STACK_SAVE:
6903 return expand_stack_save ();
6904
6905 case BUILT_IN_STACK_RESTORE:
c2f47e15 6906 expand_stack_restore (CALL_EXPR_ARG (exp, 0));
4ee9c684 6907 return const0_rtx;
6908
74bdbe96 6909 case BUILT_IN_BSWAP16:
42791117 6910 case BUILT_IN_BSWAP32:
6911 case BUILT_IN_BSWAP64:
74bdbe96 6912 target = expand_builtin_bswap (target_mode, exp, target, subtarget);
42791117 6913 if (target)
6914 return target;
6915 break;
6916
4f35b1fc 6917 CASE_INT_FN (BUILT_IN_FFS):
c2f47e15 6918 target = expand_builtin_unop (target_mode, exp, target,
efb070c8 6919 subtarget, ffs_optab);
6a08d0ab 6920 if (target)
6921 return target;
6922 break;
6923
4f35b1fc 6924 CASE_INT_FN (BUILT_IN_CLZ):
c2f47e15 6925 target = expand_builtin_unop (target_mode, exp, target,
efb070c8 6926 subtarget, clz_optab);
6a08d0ab 6927 if (target)
6928 return target;
6929 break;
6930
4f35b1fc 6931 CASE_INT_FN (BUILT_IN_CTZ):
c2f47e15 6932 target = expand_builtin_unop (target_mode, exp, target,
efb070c8 6933 subtarget, ctz_optab);
6a08d0ab 6934 if (target)
6935 return target;
6936 break;
6937
d8492bd3 6938 CASE_INT_FN (BUILT_IN_CLRSB):
d8492bd3 6939 target = expand_builtin_unop (target_mode, exp, target,
6940 subtarget, clrsb_optab);
6941 if (target)
6942 return target;
6943 break;
6944
4f35b1fc 6945 CASE_INT_FN (BUILT_IN_POPCOUNT):
c2f47e15 6946 target = expand_builtin_unop (target_mode, exp, target,
efb070c8 6947 subtarget, popcount_optab);
6a08d0ab 6948 if (target)
6949 return target;
6950 break;
6951
4f35b1fc 6952 CASE_INT_FN (BUILT_IN_PARITY):
c2f47e15 6953 target = expand_builtin_unop (target_mode, exp, target,
efb070c8 6954 subtarget, parity_optab);
53800dbe 6955 if (target)
6956 return target;
6957 break;
6958
6959 case BUILT_IN_STRLEN:
c2f47e15 6960 target = expand_builtin_strlen (exp, target, target_mode);
53800dbe 6961 if (target)
6962 return target;
6963 break;
6964
5aef8938 6965 case BUILT_IN_STRCAT:
6966 target = expand_builtin_strcat (exp, target);
6967 if (target)
6968 return target;
6969 break;
6970
53800dbe 6971 case BUILT_IN_STRCPY:
a65c4d64 6972 target = expand_builtin_strcpy (exp, target);
53800dbe 6973 if (target)
6974 return target;
6975 break;
bf8e3599 6976
5aef8938 6977 case BUILT_IN_STRNCAT:
6978 target = expand_builtin_strncat (exp, target);
6979 if (target)
6980 return target;
6981 break;
6982
ed09096d 6983 case BUILT_IN_STRNCPY:
a65c4d64 6984 target = expand_builtin_strncpy (exp, target);
ed09096d 6985 if (target)
6986 return target;
6987 break;
bf8e3599 6988
3b824fa6 6989 case BUILT_IN_STPCPY:
dc369150 6990 target = expand_builtin_stpcpy (exp, target, mode);
3b824fa6 6991 if (target)
6992 return target;
6993 break;
6994
4d317237 6995 case BUILT_IN_STPNCPY:
6996 target = expand_builtin_stpncpy (exp, target);
6997 if (target)
6998 return target;
6999 break;
7000
8d6c6ef5 7001 case BUILT_IN_MEMCHR:
7002 target = expand_builtin_memchr (exp, target);
7003 if (target)
7004 return target;
7005 break;
7006
53800dbe 7007 case BUILT_IN_MEMCPY:
a65c4d64 7008 target = expand_builtin_memcpy (exp, target);
3b824fa6 7009 if (target)
7010 return target;
7011 break;
7012
4d317237 7013 case BUILT_IN_MEMMOVE:
7014 target = expand_builtin_memmove (exp, target);
7015 if (target)
7016 return target;
7017 break;
7018
3b824fa6 7019 case BUILT_IN_MEMPCPY:
d0fbba1a 7020 target = expand_builtin_mempcpy (exp, target);
53800dbe 7021 if (target)
7022 return target;
7023 break;
7024
7025 case BUILT_IN_MEMSET:
c2f47e15 7026 target = expand_builtin_memset (exp, target, mode);
53800dbe 7027 if (target)
7028 return target;
7029 break;
7030
ffc83088 7031 case BUILT_IN_BZERO:
0b25db21 7032 target = expand_builtin_bzero (exp);
ffc83088 7033 if (target)
7034 return target;
7035 break;
7036
53800dbe 7037 case BUILT_IN_STRCMP:
a65c4d64 7038 target = expand_builtin_strcmp (exp, target);
53800dbe 7039 if (target)
7040 return target;
7041 break;
7042
ed09096d 7043 case BUILT_IN_STRNCMP:
7044 target = expand_builtin_strncmp (exp, target, mode);
7045 if (target)
7046 return target;
7047 break;
7048
071f1696 7049 case BUILT_IN_BCMP:
53800dbe 7050 case BUILT_IN_MEMCMP:
3e346f54 7051 case BUILT_IN_MEMCMP_EQ:
7052 target = expand_builtin_memcmp (exp, target, fcode == BUILT_IN_MEMCMP_EQ);
53800dbe 7053 if (target)
7054 return target;
3e346f54 7055 if (fcode == BUILT_IN_MEMCMP_EQ)
7056 {
7057 tree newdecl = builtin_decl_explicit (BUILT_IN_MEMCMP);
7058 TREE_OPERAND (exp, 1) = build_fold_addr_expr (newdecl);
7059 }
53800dbe 7060 break;
53800dbe 7061
7062 case BUILT_IN_SETJMP:
12f08300 7063 /* This should have been lowered to the builtins below. */
2c8a1497 7064 gcc_unreachable ();
7065
7066 case BUILT_IN_SETJMP_SETUP:
7067 /* __builtin_setjmp_setup is passed a pointer to an array of five words
7068 and the receiver label. */
c2f47e15 7069 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
2c8a1497 7070 {
c2f47e15 7071 rtx buf_addr = expand_expr (CALL_EXPR_ARG (exp, 0), subtarget,
2c8a1497 7072 VOIDmode, EXPAND_NORMAL);
c2f47e15 7073 tree label = TREE_OPERAND (CALL_EXPR_ARG (exp, 1), 0);
9ed997be 7074 rtx_insn *label_r = label_rtx (label);
2c8a1497 7075
7076 /* This is copied from the handling of non-local gotos. */
7077 expand_builtin_setjmp_setup (buf_addr, label_r);
7078 nonlocal_goto_handler_labels
a4de1c23 7079 = gen_rtx_INSN_LIST (VOIDmode, label_r,
2c8a1497 7080 nonlocal_goto_handler_labels);
7081 /* ??? Do not let expand_label treat us as such since we would
7082 not want to be both on the list of non-local labels and on
7083 the list of forced labels. */
7084 FORCED_LABEL (label) = 0;
7085 return const0_rtx;
7086 }
7087 break;
7088
2c8a1497 7089 case BUILT_IN_SETJMP_RECEIVER:
7090 /* __builtin_setjmp_receiver is passed the receiver label. */
c2f47e15 7091 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
2c8a1497 7092 {
c2f47e15 7093 tree label = TREE_OPERAND (CALL_EXPR_ARG (exp, 0), 0);
9ed997be 7094 rtx_insn *label_r = label_rtx (label);
2c8a1497 7095
7096 expand_builtin_setjmp_receiver (label_r);
7097 return const0_rtx;
7098 }
6b7f6858 7099 break;
53800dbe 7100
7101 /* __builtin_longjmp is passed a pointer to an array of five words.
7102 It's similar to the C library longjmp function but works with
7103 __builtin_setjmp above. */
7104 case BUILT_IN_LONGJMP:
c2f47e15 7105 if (validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
53800dbe 7106 {
c2f47e15 7107 rtx buf_addr = expand_expr (CALL_EXPR_ARG (exp, 0), subtarget,
8ec3c5c2 7108 VOIDmode, EXPAND_NORMAL);
c2f47e15 7109 rtx value = expand_normal (CALL_EXPR_ARG (exp, 1));
53800dbe 7110
7111 if (value != const1_rtx)
7112 {
1e5fcbe2 7113 error ("%<__builtin_longjmp%> second argument must be 1");
53800dbe 7114 return const0_rtx;
7115 }
7116
7117 expand_builtin_longjmp (buf_addr, value);
7118 return const0_rtx;
7119 }
2c8a1497 7120 break;
53800dbe 7121
4ee9c684 7122 case BUILT_IN_NONLOCAL_GOTO:
c2f47e15 7123 target = expand_builtin_nonlocal_goto (exp);
4ee9c684 7124 if (target)
7125 return target;
7126 break;
7127
843d08a9 7128 /* This updates the setjmp buffer that is its argument with the value
7129 of the current stack pointer. */
7130 case BUILT_IN_UPDATE_SETJMP_BUF:
c2f47e15 7131 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
843d08a9 7132 {
7133 rtx buf_addr
c2f47e15 7134 = expand_normal (CALL_EXPR_ARG (exp, 0));
843d08a9 7135
7136 expand_builtin_update_setjmp_buf (buf_addr);
7137 return const0_rtx;
7138 }
7139 break;
7140
53800dbe 7141 case BUILT_IN_TRAP:
a0ef1725 7142 expand_builtin_trap ();
53800dbe 7143 return const0_rtx;
7144
d2b48f0c 7145 case BUILT_IN_UNREACHABLE:
7146 expand_builtin_unreachable ();
7147 return const0_rtx;
7148
4f35b1fc 7149 CASE_FLT_FN (BUILT_IN_SIGNBIT):
004e23c4 7150 case BUILT_IN_SIGNBITD32:
7151 case BUILT_IN_SIGNBITD64:
7152 case BUILT_IN_SIGNBITD128:
27f261ef 7153 target = expand_builtin_signbit (exp, target);
7154 if (target)
7155 return target;
7156 break;
7157
53800dbe 7158 /* Various hooks for the DWARF 2 __throw routine. */
7159 case BUILT_IN_UNWIND_INIT:
7160 expand_builtin_unwind_init ();
7161 return const0_rtx;
7162 case BUILT_IN_DWARF_CFA:
7163 return virtual_cfa_rtx;
7164#ifdef DWARF2_UNWIND_INFO
f8f023a5 7165 case BUILT_IN_DWARF_SP_COLUMN:
7166 return expand_builtin_dwarf_sp_column ();
695e919b 7167 case BUILT_IN_INIT_DWARF_REG_SIZES:
c2f47e15 7168 expand_builtin_init_dwarf_reg_sizes (CALL_EXPR_ARG (exp, 0));
695e919b 7169 return const0_rtx;
53800dbe 7170#endif
7171 case BUILT_IN_FROB_RETURN_ADDR:
c2f47e15 7172 return expand_builtin_frob_return_addr (CALL_EXPR_ARG (exp, 0));
53800dbe 7173 case BUILT_IN_EXTRACT_RETURN_ADDR:
c2f47e15 7174 return expand_builtin_extract_return_addr (CALL_EXPR_ARG (exp, 0));
53800dbe 7175 case BUILT_IN_EH_RETURN:
c2f47e15 7176 expand_builtin_eh_return (CALL_EXPR_ARG (exp, 0),
7177 CALL_EXPR_ARG (exp, 1));
53800dbe 7178 return const0_rtx;
df4b504c 7179 case BUILT_IN_EH_RETURN_DATA_REGNO:
c2f47e15 7180 return expand_builtin_eh_return_data_regno (exp);
26093bf4 7181 case BUILT_IN_EXTEND_POINTER:
c2f47e15 7182 return expand_builtin_extend_pointer (CALL_EXPR_ARG (exp, 0));
e38def9c 7183 case BUILT_IN_EH_POINTER:
7184 return expand_builtin_eh_pointer (exp);
7185 case BUILT_IN_EH_FILTER:
7186 return expand_builtin_eh_filter (exp);
7187 case BUILT_IN_EH_COPY_VALUES:
7188 return expand_builtin_eh_copy_values (exp);
26093bf4 7189
7ccc713a 7190 case BUILT_IN_VA_START:
c2f47e15 7191 return expand_builtin_va_start (exp);
a66c9326 7192 case BUILT_IN_VA_END:
c2f47e15 7193 return expand_builtin_va_end (exp);
a66c9326 7194 case BUILT_IN_VA_COPY:
c2f47e15 7195 return expand_builtin_va_copy (exp);
89cfe6e5 7196 case BUILT_IN_EXPECT:
c2f47e15 7197 return expand_builtin_expect (exp, target);
fca0886c 7198 case BUILT_IN_ASSUME_ALIGNED:
7199 return expand_builtin_assume_aligned (exp, target);
5e3608d8 7200 case BUILT_IN_PREFETCH:
c2f47e15 7201 expand_builtin_prefetch (exp);
5e3608d8 7202 return const0_rtx;
7203
4ee9c684 7204 case BUILT_IN_INIT_TRAMPOLINE:
c307f106 7205 return expand_builtin_init_trampoline (exp, true);
7206 case BUILT_IN_INIT_HEAP_TRAMPOLINE:
7207 return expand_builtin_init_trampoline (exp, false);
4ee9c684 7208 case BUILT_IN_ADJUST_TRAMPOLINE:
c2f47e15 7209 return expand_builtin_adjust_trampoline (exp);
4ee9c684 7210
a27e3913 7211 case BUILT_IN_INIT_DESCRIPTOR:
7212 return expand_builtin_init_descriptor (exp);
7213 case BUILT_IN_ADJUST_DESCRIPTOR:
7214 return expand_builtin_adjust_descriptor (exp);
7215
73673831 7216 case BUILT_IN_FORK:
7217 case BUILT_IN_EXECL:
7218 case BUILT_IN_EXECV:
7219 case BUILT_IN_EXECLP:
7220 case BUILT_IN_EXECLE:
7221 case BUILT_IN_EXECVP:
7222 case BUILT_IN_EXECVE:
c2f47e15 7223 target = expand_builtin_fork_or_exec (fndecl, exp, target, ignore);
73673831 7224 if (target)
7225 return target;
7226 break;
53800dbe 7227
2797f13a 7228 case BUILT_IN_SYNC_FETCH_AND_ADD_1:
7229 case BUILT_IN_SYNC_FETCH_AND_ADD_2:
7230 case BUILT_IN_SYNC_FETCH_AND_ADD_4:
7231 case BUILT_IN_SYNC_FETCH_AND_ADD_8:
7232 case BUILT_IN_SYNC_FETCH_AND_ADD_16:
7233 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_ADD_1);
1cd6e20d 7234 target = expand_builtin_sync_operation (mode, exp, PLUS, false, target);
b6a5fc45 7235 if (target)
7236 return target;
7237 break;
7238
2797f13a 7239 case BUILT_IN_SYNC_FETCH_AND_SUB_1:
7240 case BUILT_IN_SYNC_FETCH_AND_SUB_2:
7241 case BUILT_IN_SYNC_FETCH_AND_SUB_4:
7242 case BUILT_IN_SYNC_FETCH_AND_SUB_8:
7243 case BUILT_IN_SYNC_FETCH_AND_SUB_16:
7244 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_SUB_1);
1cd6e20d 7245 target = expand_builtin_sync_operation (mode, exp, MINUS, false, target);
b6a5fc45 7246 if (target)
7247 return target;
7248 break;
7249
2797f13a 7250 case BUILT_IN_SYNC_FETCH_AND_OR_1:
7251 case BUILT_IN_SYNC_FETCH_AND_OR_2:
7252 case BUILT_IN_SYNC_FETCH_AND_OR_4:
7253 case BUILT_IN_SYNC_FETCH_AND_OR_8:
7254 case BUILT_IN_SYNC_FETCH_AND_OR_16:
7255 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_OR_1);
1cd6e20d 7256 target = expand_builtin_sync_operation (mode, exp, IOR, false, target);
b6a5fc45 7257 if (target)
7258 return target;
7259 break;
7260
2797f13a 7261 case BUILT_IN_SYNC_FETCH_AND_AND_1:
7262 case BUILT_IN_SYNC_FETCH_AND_AND_2:
7263 case BUILT_IN_SYNC_FETCH_AND_AND_4:
7264 case BUILT_IN_SYNC_FETCH_AND_AND_8:
7265 case BUILT_IN_SYNC_FETCH_AND_AND_16:
7266 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_AND_1);
1cd6e20d 7267 target = expand_builtin_sync_operation (mode, exp, AND, false, target);
b6a5fc45 7268 if (target)
7269 return target;
7270 break;
7271
2797f13a 7272 case BUILT_IN_SYNC_FETCH_AND_XOR_1:
7273 case BUILT_IN_SYNC_FETCH_AND_XOR_2:
7274 case BUILT_IN_SYNC_FETCH_AND_XOR_4:
7275 case BUILT_IN_SYNC_FETCH_AND_XOR_8:
7276 case BUILT_IN_SYNC_FETCH_AND_XOR_16:
7277 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_XOR_1);
1cd6e20d 7278 target = expand_builtin_sync_operation (mode, exp, XOR, false, target);
b6a5fc45 7279 if (target)
7280 return target;
7281 break;
7282
2797f13a 7283 case BUILT_IN_SYNC_FETCH_AND_NAND_1:
7284 case BUILT_IN_SYNC_FETCH_AND_NAND_2:
7285 case BUILT_IN_SYNC_FETCH_AND_NAND_4:
7286 case BUILT_IN_SYNC_FETCH_AND_NAND_8:
7287 case BUILT_IN_SYNC_FETCH_AND_NAND_16:
7288 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_NAND_1);
1cd6e20d 7289 target = expand_builtin_sync_operation (mode, exp, NOT, false, target);
b6a5fc45 7290 if (target)
7291 return target;
7292 break;
7293
2797f13a 7294 case BUILT_IN_SYNC_ADD_AND_FETCH_1:
7295 case BUILT_IN_SYNC_ADD_AND_FETCH_2:
7296 case BUILT_IN_SYNC_ADD_AND_FETCH_4:
7297 case BUILT_IN_SYNC_ADD_AND_FETCH_8:
7298 case BUILT_IN_SYNC_ADD_AND_FETCH_16:
7299 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_ADD_AND_FETCH_1);
1cd6e20d 7300 target = expand_builtin_sync_operation (mode, exp, PLUS, true, target);
b6a5fc45 7301 if (target)
7302 return target;
7303 break;
7304
2797f13a 7305 case BUILT_IN_SYNC_SUB_AND_FETCH_1:
7306 case BUILT_IN_SYNC_SUB_AND_FETCH_2:
7307 case BUILT_IN_SYNC_SUB_AND_FETCH_4:
7308 case BUILT_IN_SYNC_SUB_AND_FETCH_8:
7309 case BUILT_IN_SYNC_SUB_AND_FETCH_16:
7310 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_SUB_AND_FETCH_1);
1cd6e20d 7311 target = expand_builtin_sync_operation (mode, exp, MINUS, true, target);
b6a5fc45 7312 if (target)
7313 return target;
7314 break;
7315
2797f13a 7316 case BUILT_IN_SYNC_OR_AND_FETCH_1:
7317 case BUILT_IN_SYNC_OR_AND_FETCH_2:
7318 case BUILT_IN_SYNC_OR_AND_FETCH_4:
7319 case BUILT_IN_SYNC_OR_AND_FETCH_8:
7320 case BUILT_IN_SYNC_OR_AND_FETCH_16:
7321 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_OR_AND_FETCH_1);
1cd6e20d 7322 target = expand_builtin_sync_operation (mode, exp, IOR, true, target);
b6a5fc45 7323 if (target)
7324 return target;
7325 break;
7326
2797f13a 7327 case BUILT_IN_SYNC_AND_AND_FETCH_1:
7328 case BUILT_IN_SYNC_AND_AND_FETCH_2:
7329 case BUILT_IN_SYNC_AND_AND_FETCH_4:
7330 case BUILT_IN_SYNC_AND_AND_FETCH_8:
7331 case BUILT_IN_SYNC_AND_AND_FETCH_16:
7332 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_AND_AND_FETCH_1);
1cd6e20d 7333 target = expand_builtin_sync_operation (mode, exp, AND, true, target);
b6a5fc45 7334 if (target)
7335 return target;
7336 break;
7337
2797f13a 7338 case BUILT_IN_SYNC_XOR_AND_FETCH_1:
7339 case BUILT_IN_SYNC_XOR_AND_FETCH_2:
7340 case BUILT_IN_SYNC_XOR_AND_FETCH_4:
7341 case BUILT_IN_SYNC_XOR_AND_FETCH_8:
7342 case BUILT_IN_SYNC_XOR_AND_FETCH_16:
7343 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_XOR_AND_FETCH_1);
1cd6e20d 7344 target = expand_builtin_sync_operation (mode, exp, XOR, true, target);
b6a5fc45 7345 if (target)
7346 return target;
7347 break;
7348
2797f13a 7349 case BUILT_IN_SYNC_NAND_AND_FETCH_1:
7350 case BUILT_IN_SYNC_NAND_AND_FETCH_2:
7351 case BUILT_IN_SYNC_NAND_AND_FETCH_4:
7352 case BUILT_IN_SYNC_NAND_AND_FETCH_8:
7353 case BUILT_IN_SYNC_NAND_AND_FETCH_16:
7354 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_NAND_AND_FETCH_1);
1cd6e20d 7355 target = expand_builtin_sync_operation (mode, exp, NOT, true, target);
b6a5fc45 7356 if (target)
7357 return target;
7358 break;
7359
2797f13a 7360 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_1:
7361 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_2:
7362 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_4:
7363 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_8:
7364 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_16:
a601d32a 7365 if (mode == VOIDmode)
7366 mode = TYPE_MODE (boolean_type_node);
b6a5fc45 7367 if (!target || !register_operand (target, mode))
7368 target = gen_reg_rtx (mode);
3e272de8 7369
2797f13a 7370 mode = get_builtin_sync_mode
7371 (fcode - BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_1);
c2f47e15 7372 target = expand_builtin_compare_and_swap (mode, exp, true, target);
b6a5fc45 7373 if (target)
7374 return target;
7375 break;
7376
2797f13a 7377 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_1:
7378 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_2:
7379 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_4:
7380 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_8:
7381 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_16:
7382 mode = get_builtin_sync_mode
7383 (fcode - BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_1);
c2f47e15 7384 target = expand_builtin_compare_and_swap (mode, exp, false, target);
b6a5fc45 7385 if (target)
7386 return target;
7387 break;
7388
2797f13a 7389 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_1:
7390 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_2:
7391 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_4:
7392 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_8:
7393 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_16:
7394 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_LOCK_TEST_AND_SET_1);
7395 target = expand_builtin_sync_lock_test_and_set (mode, exp, target);
b6a5fc45 7396 if (target)
7397 return target;
7398 break;
7399
2797f13a 7400 case BUILT_IN_SYNC_LOCK_RELEASE_1:
7401 case BUILT_IN_SYNC_LOCK_RELEASE_2:
7402 case BUILT_IN_SYNC_LOCK_RELEASE_4:
7403 case BUILT_IN_SYNC_LOCK_RELEASE_8:
7404 case BUILT_IN_SYNC_LOCK_RELEASE_16:
7405 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_LOCK_RELEASE_1);
7406 expand_builtin_sync_lock_release (mode, exp);
b6a5fc45 7407 return const0_rtx;
7408
2797f13a 7409 case BUILT_IN_SYNC_SYNCHRONIZE:
7410 expand_builtin_sync_synchronize ();
b6a5fc45 7411 return const0_rtx;
7412
1cd6e20d 7413 case BUILT_IN_ATOMIC_EXCHANGE_1:
7414 case BUILT_IN_ATOMIC_EXCHANGE_2:
7415 case BUILT_IN_ATOMIC_EXCHANGE_4:
7416 case BUILT_IN_ATOMIC_EXCHANGE_8:
7417 case BUILT_IN_ATOMIC_EXCHANGE_16:
7418 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_EXCHANGE_1);
7419 target = expand_builtin_atomic_exchange (mode, exp, target);
7420 if (target)
7421 return target;
7422 break;
7423
7424 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1:
7425 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_2:
7426 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_4:
7427 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_8:
7428 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_16:
2c201ad1 7429 {
7430 unsigned int nargs, z;
f1f41a6c 7431 vec<tree, va_gc> *vec;
2c201ad1 7432
7433 mode =
7434 get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1);
7435 target = expand_builtin_atomic_compare_exchange (mode, exp, target);
7436 if (target)
7437 return target;
7438
7439 /* If this is turned into an external library call, the weak parameter
7440 must be dropped to match the expected parameter list. */
7441 nargs = call_expr_nargs (exp);
f1f41a6c 7442 vec_alloc (vec, nargs - 1);
2c201ad1 7443 for (z = 0; z < 3; z++)
f1f41a6c 7444 vec->quick_push (CALL_EXPR_ARG (exp, z));
2c201ad1 7445 /* Skip the boolean weak parameter. */
7446 for (z = 4; z < 6; z++)
f1f41a6c 7447 vec->quick_push (CALL_EXPR_ARG (exp, z));
2c201ad1 7448 exp = build_call_vec (TREE_TYPE (exp), CALL_EXPR_FN (exp), vec);
7449 break;
7450 }
1cd6e20d 7451
7452 case BUILT_IN_ATOMIC_LOAD_1:
7453 case BUILT_IN_ATOMIC_LOAD_2:
7454 case BUILT_IN_ATOMIC_LOAD_4:
7455 case BUILT_IN_ATOMIC_LOAD_8:
7456 case BUILT_IN_ATOMIC_LOAD_16:
7457 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_LOAD_1);
7458 target = expand_builtin_atomic_load (mode, exp, target);
7459 if (target)
7460 return target;
7461 break;
7462
7463 case BUILT_IN_ATOMIC_STORE_1:
7464 case BUILT_IN_ATOMIC_STORE_2:
7465 case BUILT_IN_ATOMIC_STORE_4:
7466 case BUILT_IN_ATOMIC_STORE_8:
7467 case BUILT_IN_ATOMIC_STORE_16:
7468 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_STORE_1);
7469 target = expand_builtin_atomic_store (mode, exp);
7470 if (target)
7471 return const0_rtx;
7472 break;
7473
7474 case BUILT_IN_ATOMIC_ADD_FETCH_1:
7475 case BUILT_IN_ATOMIC_ADD_FETCH_2:
7476 case BUILT_IN_ATOMIC_ADD_FETCH_4:
7477 case BUILT_IN_ATOMIC_ADD_FETCH_8:
7478 case BUILT_IN_ATOMIC_ADD_FETCH_16:
7479 {
7480 enum built_in_function lib;
7481 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_ADD_FETCH_1);
7482 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_ADD_1 +
7483 (fcode - BUILT_IN_ATOMIC_ADD_FETCH_1));
7484 target = expand_builtin_atomic_fetch_op (mode, exp, target, PLUS, true,
7485 ignore, lib);
7486 if (target)
7487 return target;
7488 break;
7489 }
7490 case BUILT_IN_ATOMIC_SUB_FETCH_1:
7491 case BUILT_IN_ATOMIC_SUB_FETCH_2:
7492 case BUILT_IN_ATOMIC_SUB_FETCH_4:
7493 case BUILT_IN_ATOMIC_SUB_FETCH_8:
7494 case BUILT_IN_ATOMIC_SUB_FETCH_16:
7495 {
7496 enum built_in_function lib;
7497 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_SUB_FETCH_1);
7498 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_SUB_1 +
7499 (fcode - BUILT_IN_ATOMIC_SUB_FETCH_1));
7500 target = expand_builtin_atomic_fetch_op (mode, exp, target, MINUS, true,
7501 ignore, lib);
7502 if (target)
7503 return target;
7504 break;
7505 }
7506 case BUILT_IN_ATOMIC_AND_FETCH_1:
7507 case BUILT_IN_ATOMIC_AND_FETCH_2:
7508 case BUILT_IN_ATOMIC_AND_FETCH_4:
7509 case BUILT_IN_ATOMIC_AND_FETCH_8:
7510 case BUILT_IN_ATOMIC_AND_FETCH_16:
7511 {
7512 enum built_in_function lib;
7513 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_AND_FETCH_1);
7514 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_AND_1 +
7515 (fcode - BUILT_IN_ATOMIC_AND_FETCH_1));
7516 target = expand_builtin_atomic_fetch_op (mode, exp, target, AND, true,
7517 ignore, lib);
7518 if (target)
7519 return target;
7520 break;
7521 }
7522 case BUILT_IN_ATOMIC_NAND_FETCH_1:
7523 case BUILT_IN_ATOMIC_NAND_FETCH_2:
7524 case BUILT_IN_ATOMIC_NAND_FETCH_4:
7525 case BUILT_IN_ATOMIC_NAND_FETCH_8:
7526 case BUILT_IN_ATOMIC_NAND_FETCH_16:
7527 {
7528 enum built_in_function lib;
7529 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_NAND_FETCH_1);
7530 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_NAND_1 +
7531 (fcode - BUILT_IN_ATOMIC_NAND_FETCH_1));
7532 target = expand_builtin_atomic_fetch_op (mode, exp, target, NOT, true,
7533 ignore, lib);
7534 if (target)
7535 return target;
7536 break;
7537 }
7538 case BUILT_IN_ATOMIC_XOR_FETCH_1:
7539 case BUILT_IN_ATOMIC_XOR_FETCH_2:
7540 case BUILT_IN_ATOMIC_XOR_FETCH_4:
7541 case BUILT_IN_ATOMIC_XOR_FETCH_8:
7542 case BUILT_IN_ATOMIC_XOR_FETCH_16:
7543 {
7544 enum built_in_function lib;
7545 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_XOR_FETCH_1);
7546 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_XOR_1 +
7547 (fcode - BUILT_IN_ATOMIC_XOR_FETCH_1));
7548 target = expand_builtin_atomic_fetch_op (mode, exp, target, XOR, true,
7549 ignore, lib);
7550 if (target)
7551 return target;
7552 break;
7553 }
7554 case BUILT_IN_ATOMIC_OR_FETCH_1:
7555 case BUILT_IN_ATOMIC_OR_FETCH_2:
7556 case BUILT_IN_ATOMIC_OR_FETCH_4:
7557 case BUILT_IN_ATOMIC_OR_FETCH_8:
7558 case BUILT_IN_ATOMIC_OR_FETCH_16:
7559 {
7560 enum built_in_function lib;
7561 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_OR_FETCH_1);
7562 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_OR_1 +
7563 (fcode - BUILT_IN_ATOMIC_OR_FETCH_1));
7564 target = expand_builtin_atomic_fetch_op (mode, exp, target, IOR, true,
7565 ignore, lib);
7566 if (target)
7567 return target;
7568 break;
7569 }
7570 case BUILT_IN_ATOMIC_FETCH_ADD_1:
7571 case BUILT_IN_ATOMIC_FETCH_ADD_2:
7572 case BUILT_IN_ATOMIC_FETCH_ADD_4:
7573 case BUILT_IN_ATOMIC_FETCH_ADD_8:
7574 case BUILT_IN_ATOMIC_FETCH_ADD_16:
7575 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_ADD_1);
7576 target = expand_builtin_atomic_fetch_op (mode, exp, target, PLUS, false,
7577 ignore, BUILT_IN_NONE);
7578 if (target)
7579 return target;
7580 break;
7581
7582 case BUILT_IN_ATOMIC_FETCH_SUB_1:
7583 case BUILT_IN_ATOMIC_FETCH_SUB_2:
7584 case BUILT_IN_ATOMIC_FETCH_SUB_4:
7585 case BUILT_IN_ATOMIC_FETCH_SUB_8:
7586 case BUILT_IN_ATOMIC_FETCH_SUB_16:
7587 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_SUB_1);
7588 target = expand_builtin_atomic_fetch_op (mode, exp, target, MINUS, false,
7589 ignore, BUILT_IN_NONE);
7590 if (target)
7591 return target;
7592 break;
7593
7594 case BUILT_IN_ATOMIC_FETCH_AND_1:
7595 case BUILT_IN_ATOMIC_FETCH_AND_2:
7596 case BUILT_IN_ATOMIC_FETCH_AND_4:
7597 case BUILT_IN_ATOMIC_FETCH_AND_8:
7598 case BUILT_IN_ATOMIC_FETCH_AND_16:
7599 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_AND_1);
7600 target = expand_builtin_atomic_fetch_op (mode, exp, target, AND, false,
7601 ignore, BUILT_IN_NONE);
7602 if (target)
7603 return target;
7604 break;
7605
7606 case BUILT_IN_ATOMIC_FETCH_NAND_1:
7607 case BUILT_IN_ATOMIC_FETCH_NAND_2:
7608 case BUILT_IN_ATOMIC_FETCH_NAND_4:
7609 case BUILT_IN_ATOMIC_FETCH_NAND_8:
7610 case BUILT_IN_ATOMIC_FETCH_NAND_16:
7611 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_NAND_1);
7612 target = expand_builtin_atomic_fetch_op (mode, exp, target, NOT, false,
7613 ignore, BUILT_IN_NONE);
7614 if (target)
7615 return target;
7616 break;
7617
7618 case BUILT_IN_ATOMIC_FETCH_XOR_1:
7619 case BUILT_IN_ATOMIC_FETCH_XOR_2:
7620 case BUILT_IN_ATOMIC_FETCH_XOR_4:
7621 case BUILT_IN_ATOMIC_FETCH_XOR_8:
7622 case BUILT_IN_ATOMIC_FETCH_XOR_16:
7623 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_XOR_1);
7624 target = expand_builtin_atomic_fetch_op (mode, exp, target, XOR, false,
7625 ignore, BUILT_IN_NONE);
7626 if (target)
7627 return target;
7628 break;
7629
7630 case BUILT_IN_ATOMIC_FETCH_OR_1:
7631 case BUILT_IN_ATOMIC_FETCH_OR_2:
7632 case BUILT_IN_ATOMIC_FETCH_OR_4:
7633 case BUILT_IN_ATOMIC_FETCH_OR_8:
7634 case BUILT_IN_ATOMIC_FETCH_OR_16:
7635 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_OR_1);
7636 target = expand_builtin_atomic_fetch_op (mode, exp, target, IOR, false,
7637 ignore, BUILT_IN_NONE);
7638 if (target)
7639 return target;
7640 break;
10b744a3 7641
7642 case BUILT_IN_ATOMIC_TEST_AND_SET:
7821cde1 7643 return expand_builtin_atomic_test_and_set (exp, target);
10b744a3 7644
7645 case BUILT_IN_ATOMIC_CLEAR:
7646 return expand_builtin_atomic_clear (exp);
1cd6e20d 7647
7648 case BUILT_IN_ATOMIC_ALWAYS_LOCK_FREE:
7649 return expand_builtin_atomic_always_lock_free (exp);
7650
7651 case BUILT_IN_ATOMIC_IS_LOCK_FREE:
7652 target = expand_builtin_atomic_is_lock_free (exp);
7653 if (target)
7654 return target;
7655 break;
7656
7657 case BUILT_IN_ATOMIC_THREAD_FENCE:
7658 expand_builtin_atomic_thread_fence (exp);
7659 return const0_rtx;
7660
7661 case BUILT_IN_ATOMIC_SIGNAL_FENCE:
7662 expand_builtin_atomic_signal_fence (exp);
7663 return const0_rtx;
7664
0a39fd54 7665 case BUILT_IN_OBJECT_SIZE:
7666 return expand_builtin_object_size (exp);
7667
7668 case BUILT_IN_MEMCPY_CHK:
7669 case BUILT_IN_MEMPCPY_CHK:
7670 case BUILT_IN_MEMMOVE_CHK:
7671 case BUILT_IN_MEMSET_CHK:
7672 target = expand_builtin_memory_chk (exp, target, mode, fcode);
7673 if (target)
7674 return target;
7675 break;
7676
7677 case BUILT_IN_STRCPY_CHK:
7678 case BUILT_IN_STPCPY_CHK:
7679 case BUILT_IN_STRNCPY_CHK:
1063acde 7680 case BUILT_IN_STPNCPY_CHK:
0a39fd54 7681 case BUILT_IN_STRCAT_CHK:
b356dfef 7682 case BUILT_IN_STRNCAT_CHK:
0a39fd54 7683 case BUILT_IN_SNPRINTF_CHK:
7684 case BUILT_IN_VSNPRINTF_CHK:
7685 maybe_emit_chk_warning (exp, fcode);
7686 break;
7687
7688 case BUILT_IN_SPRINTF_CHK:
7689 case BUILT_IN_VSPRINTF_CHK:
7690 maybe_emit_sprintf_chk_warning (exp, fcode);
7691 break;
7692
2c281b15 7693 case BUILT_IN_FREE:
f74ea1c2 7694 if (warn_free_nonheap_object)
7695 maybe_emit_free_warning (exp);
2c281b15 7696 break;
7697
badaa04c 7698 case BUILT_IN_THREAD_POINTER:
7699 return expand_builtin_thread_pointer (exp, target);
7700
7701 case BUILT_IN_SET_THREAD_POINTER:
7702 expand_builtin_set_thread_pointer (exp);
7703 return const0_rtx;
7704
058a1b7a 7705 case BUILT_IN_CHKP_INIT_PTR_BOUNDS:
7706 case BUILT_IN_CHKP_NULL_PTR_BOUNDS:
7707 case BUILT_IN_CHKP_COPY_PTR_BOUNDS:
7708 case BUILT_IN_CHKP_CHECK_PTR_LBOUNDS:
7709 case BUILT_IN_CHKP_CHECK_PTR_UBOUNDS:
7710 case BUILT_IN_CHKP_CHECK_PTR_BOUNDS:
7711 case BUILT_IN_CHKP_SET_PTR_BOUNDS:
7712 case BUILT_IN_CHKP_NARROW_PTR_BOUNDS:
7713 case BUILT_IN_CHKP_STORE_PTR_BOUNDS:
7714 case BUILT_IN_CHKP_GET_PTR_LBOUND:
7715 case BUILT_IN_CHKP_GET_PTR_UBOUND:
7716 /* We allow user CHKP builtins if Pointer Bounds
7717 Checker is off. */
7718 if (!chkp_function_instrumented_p (current_function_decl))
7719 {
7720 if (fcode == BUILT_IN_CHKP_SET_PTR_BOUNDS
7721 || fcode == BUILT_IN_CHKP_NARROW_PTR_BOUNDS
7722 || fcode == BUILT_IN_CHKP_INIT_PTR_BOUNDS
7723 || fcode == BUILT_IN_CHKP_NULL_PTR_BOUNDS
7724 || fcode == BUILT_IN_CHKP_COPY_PTR_BOUNDS)
7725 return expand_normal (CALL_EXPR_ARG (exp, 0));
7726 else if (fcode == BUILT_IN_CHKP_GET_PTR_LBOUND)
7727 return expand_normal (size_zero_node);
7728 else if (fcode == BUILT_IN_CHKP_GET_PTR_UBOUND)
7729 return expand_normal (size_int (-1));
7730 else
7731 return const0_rtx;
7732 }
7733 /* FALLTHROUGH */
7734
7735 case BUILT_IN_CHKP_BNDMK:
7736 case BUILT_IN_CHKP_BNDSTX:
7737 case BUILT_IN_CHKP_BNDCL:
7738 case BUILT_IN_CHKP_BNDCU:
7739 case BUILT_IN_CHKP_BNDLDX:
7740 case BUILT_IN_CHKP_BNDRET:
7741 case BUILT_IN_CHKP_INTERSECT:
7742 case BUILT_IN_CHKP_NARROW:
7743 case BUILT_IN_CHKP_EXTRACT_LOWER:
7744 case BUILT_IN_CHKP_EXTRACT_UPPER:
7745 /* Software implementation of Pointer Bounds Checker is NYI.
7746 Target support is required. */
7747 error ("Your target platform does not support -fcheck-pointer-bounds");
7748 break;
7749
ca4c3545 7750 case BUILT_IN_ACC_ON_DEVICE:
1ae4e7aa 7751 /* Do library call, if we failed to expand the builtin when
7752 folding. */
ca4c3545 7753 break;
7754
92482ee0 7755 default: /* just do library call, if unknown builtin */
146c1b4f 7756 break;
53800dbe 7757 }
7758
7759 /* The switch statement above can drop through to cause the function
7760 to be called normally. */
7761 return expand_call (exp, target, ignore);
7762}
650e4c94 7763
f21337ef 7764/* Similar to expand_builtin but is used for instrumented calls. */
7765
7766rtx
7767expand_builtin_with_bounds (tree exp, rtx target,
7768 rtx subtarget ATTRIBUTE_UNUSED,
7769 machine_mode mode, int ignore)
7770{
7771 tree fndecl = get_callee_fndecl (exp);
7772 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
7773
7774 gcc_assert (CALL_WITH_BOUNDS_P (exp));
7775
7776 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
7777 return targetm.expand_builtin (exp, target, subtarget, mode, ignore);
7778
7779 gcc_assert (fcode > BEGIN_CHKP_BUILTINS
7780 && fcode < END_CHKP_BUILTINS);
7781
7782 switch (fcode)
7783 {
7784 case BUILT_IN_CHKP_MEMCPY_NOBND_NOCHK_CHKP:
7785 target = expand_builtin_memcpy_with_bounds (exp, target);
7786 if (target)
7787 return target;
7788 break;
7789
7790 case BUILT_IN_CHKP_MEMPCPY_NOBND_NOCHK_CHKP:
d0fbba1a 7791 target = expand_builtin_mempcpy_with_bounds (exp, target);
f21337ef 7792 if (target)
7793 return target;
7794 break;
7795
7796 case BUILT_IN_CHKP_MEMSET_NOBND_NOCHK_CHKP:
7797 target = expand_builtin_memset_with_bounds (exp, target, mode);
7798 if (target)
7799 return target;
7800 break;
7801
e6a18b5a 7802 case BUILT_IN_MEMCPY_CHKP:
7803 case BUILT_IN_MEMMOVE_CHKP:
7804 case BUILT_IN_MEMPCPY_CHKP:
7805 if (call_expr_nargs (exp) > 3)
7806 {
7807 /* memcpy_chkp (void *dst, size_t dstbnd,
7808 const void *src, size_t srcbnd, size_t n)
7809 and others take a pointer bound argument just after each
7810 pointer argument. */
7811 tree dest = CALL_EXPR_ARG (exp, 0);
7812 tree src = CALL_EXPR_ARG (exp, 2);
7813 tree len = CALL_EXPR_ARG (exp, 4);
7814
7815 check_memop_access (exp, dest, src, len);
7816 break;
7817 }
7818
f21337ef 7819 default:
7820 break;
7821 }
7822
7823 /* The switch statement above can drop through to cause the function
7824 to be called normally. */
7825 return expand_call (exp, target, ignore);
7826 }
7827
805e22b2 7828/* Determine whether a tree node represents a call to a built-in
52203a9d 7829 function. If the tree T is a call to a built-in function with
7830 the right number of arguments of the appropriate types, return
7831 the DECL_FUNCTION_CODE of the call, e.g. BUILT_IN_SQRT.
7832 Otherwise the return value is END_BUILTINS. */
aecda0d6 7833
805e22b2 7834enum built_in_function
b7bf20db 7835builtin_mathfn_code (const_tree t)
805e22b2 7836{
b7bf20db 7837 const_tree fndecl, arg, parmlist;
7838 const_tree argtype, parmtype;
7839 const_call_expr_arg_iterator iter;
805e22b2 7840
d44e3710 7841 if (TREE_CODE (t) != CALL_EXPR)
805e22b2 7842 return END_BUILTINS;
7843
c6e6ecb1 7844 fndecl = get_callee_fndecl (t);
7845 if (fndecl == NULL_TREE
52203a9d 7846 || TREE_CODE (fndecl) != FUNCTION_DECL
805e22b2 7847 || ! DECL_BUILT_IN (fndecl)
7848 || DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
7849 return END_BUILTINS;
7850
52203a9d 7851 parmlist = TYPE_ARG_TYPES (TREE_TYPE (fndecl));
b7bf20db 7852 init_const_call_expr_arg_iterator (t, &iter);
52203a9d 7853 for (; parmlist; parmlist = TREE_CHAIN (parmlist))
e9f80ff5 7854 {
52203a9d 7855 /* If a function doesn't take a variable number of arguments,
7856 the last element in the list will have type `void'. */
7857 parmtype = TREE_VALUE (parmlist);
7858 if (VOID_TYPE_P (parmtype))
7859 {
b7bf20db 7860 if (more_const_call_expr_args_p (&iter))
52203a9d 7861 return END_BUILTINS;
7862 return DECL_FUNCTION_CODE (fndecl);
7863 }
7864
b7bf20db 7865 if (! more_const_call_expr_args_p (&iter))
e9f80ff5 7866 return END_BUILTINS;
48e1416a 7867
b7bf20db 7868 arg = next_const_call_expr_arg (&iter);
c2f47e15 7869 argtype = TREE_TYPE (arg);
52203a9d 7870
7871 if (SCALAR_FLOAT_TYPE_P (parmtype))
7872 {
7873 if (! SCALAR_FLOAT_TYPE_P (argtype))
7874 return END_BUILTINS;
7875 }
7876 else if (COMPLEX_FLOAT_TYPE_P (parmtype))
7877 {
7878 if (! COMPLEX_FLOAT_TYPE_P (argtype))
7879 return END_BUILTINS;
7880 }
7881 else if (POINTER_TYPE_P (parmtype))
7882 {
7883 if (! POINTER_TYPE_P (argtype))
7884 return END_BUILTINS;
7885 }
7886 else if (INTEGRAL_TYPE_P (parmtype))
7887 {
7888 if (! INTEGRAL_TYPE_P (argtype))
7889 return END_BUILTINS;
7890 }
7891 else
e9f80ff5 7892 return END_BUILTINS;
e9f80ff5 7893 }
7894
52203a9d 7895 /* Variable-length argument list. */
805e22b2 7896 return DECL_FUNCTION_CODE (fndecl);
7897}
7898
c2f47e15 7899/* Fold a call to __builtin_constant_p, if we know its argument ARG will
7900 evaluate to a constant. */
650e4c94 7901
7902static tree
c2f47e15 7903fold_builtin_constant_p (tree arg)
650e4c94 7904{
650e4c94 7905 /* We return 1 for a numeric type that's known to be a constant
7906 value at compile-time or for an aggregate type that's a
7907 literal constant. */
c2f47e15 7908 STRIP_NOPS (arg);
650e4c94 7909
7910 /* If we know this is a constant, emit the constant of one. */
c2f47e15 7911 if (CONSTANT_CLASS_P (arg)
7912 || (TREE_CODE (arg) == CONSTRUCTOR
7913 && TREE_CONSTANT (arg)))
650e4c94 7914 return integer_one_node;
c2f47e15 7915 if (TREE_CODE (arg) == ADDR_EXPR)
adcfa3a3 7916 {
c2f47e15 7917 tree op = TREE_OPERAND (arg, 0);
adcfa3a3 7918 if (TREE_CODE (op) == STRING_CST
7919 || (TREE_CODE (op) == ARRAY_REF
7920 && integer_zerop (TREE_OPERAND (op, 1))
7921 && TREE_CODE (TREE_OPERAND (op, 0)) == STRING_CST))
7922 return integer_one_node;
7923 }
650e4c94 7924
1fb4300c 7925 /* If this expression has side effects, show we don't know it to be a
7926 constant. Likewise if it's a pointer or aggregate type since in
7927 those case we only want literals, since those are only optimized
f97c71a1 7928 when generating RTL, not later.
7929 And finally, if we are compiling an initializer, not code, we
7930 need to return a definite result now; there's not going to be any
7931 more optimization done. */
c2f47e15 7932 if (TREE_SIDE_EFFECTS (arg)
7933 || AGGREGATE_TYPE_P (TREE_TYPE (arg))
7934 || POINTER_TYPE_P (TREE_TYPE (arg))
47be647d 7935 || cfun == 0
0b049e15 7936 || folding_initializer
7937 || force_folding_builtin_constant_p)
650e4c94 7938 return integer_zero_node;
7939
c2f47e15 7940 return NULL_TREE;
650e4c94 7941}
7942
76f5a783 7943/* Create builtin_expect with PRED and EXPECTED as its arguments and
7944 return it as a truthvalue. */
4ee9c684 7945
7946static tree
c83059be 7947build_builtin_expect_predicate (location_t loc, tree pred, tree expected,
7948 tree predictor)
4ee9c684 7949{
76f5a783 7950 tree fn, arg_types, pred_type, expected_type, call_expr, ret_type;
4ee9c684 7951
b9a16870 7952 fn = builtin_decl_explicit (BUILT_IN_EXPECT);
76f5a783 7953 arg_types = TYPE_ARG_TYPES (TREE_TYPE (fn));
7954 ret_type = TREE_TYPE (TREE_TYPE (fn));
7955 pred_type = TREE_VALUE (arg_types);
7956 expected_type = TREE_VALUE (TREE_CHAIN (arg_types));
7957
389dd41b 7958 pred = fold_convert_loc (loc, pred_type, pred);
7959 expected = fold_convert_loc (loc, expected_type, expected);
c83059be 7960 call_expr = build_call_expr_loc (loc, fn, predictor ? 3 : 2, pred, expected,
7961 predictor);
76f5a783 7962
7963 return build2 (NE_EXPR, TREE_TYPE (pred), call_expr,
7964 build_int_cst (ret_type, 0));
7965}
7966
7967/* Fold a call to builtin_expect with arguments ARG0 and ARG1. Return
7968 NULL_TREE if no simplification is possible. */
7969
c83059be 7970tree
7971fold_builtin_expect (location_t loc, tree arg0, tree arg1, tree arg2)
76f5a783 7972{
083bada9 7973 tree inner, fndecl, inner_arg0;
76f5a783 7974 enum tree_code code;
7975
083bada9 7976 /* Distribute the expected value over short-circuiting operators.
7977 See through the cast from truthvalue_type_node to long. */
7978 inner_arg0 = arg0;
d09ef31a 7979 while (CONVERT_EXPR_P (inner_arg0)
083bada9 7980 && INTEGRAL_TYPE_P (TREE_TYPE (inner_arg0))
7981 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (inner_arg0, 0))))
7982 inner_arg0 = TREE_OPERAND (inner_arg0, 0);
7983
76f5a783 7984 /* If this is a builtin_expect within a builtin_expect keep the
7985 inner one. See through a comparison against a constant. It
7986 might have been added to create a thruthvalue. */
083bada9 7987 inner = inner_arg0;
7988
76f5a783 7989 if (COMPARISON_CLASS_P (inner)
7990 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST)
7991 inner = TREE_OPERAND (inner, 0);
7992
7993 if (TREE_CODE (inner) == CALL_EXPR
7994 && (fndecl = get_callee_fndecl (inner))
7995 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
7996 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_EXPECT)
7997 return arg0;
7998
083bada9 7999 inner = inner_arg0;
76f5a783 8000 code = TREE_CODE (inner);
8001 if (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR)
8002 {
8003 tree op0 = TREE_OPERAND (inner, 0);
8004 tree op1 = TREE_OPERAND (inner, 1);
2f2a7720 8005 arg1 = save_expr (arg1);
76f5a783 8006
c83059be 8007 op0 = build_builtin_expect_predicate (loc, op0, arg1, arg2);
8008 op1 = build_builtin_expect_predicate (loc, op1, arg1, arg2);
76f5a783 8009 inner = build2 (code, TREE_TYPE (inner), op0, op1);
8010
389dd41b 8011 return fold_convert_loc (loc, TREE_TYPE (arg0), inner);
76f5a783 8012 }
8013
8014 /* If the argument isn't invariant then there's nothing else we can do. */
083bada9 8015 if (!TREE_CONSTANT (inner_arg0))
c2f47e15 8016 return NULL_TREE;
4ee9c684 8017
76f5a783 8018 /* If we expect that a comparison against the argument will fold to
8019 a constant return the constant. In practice, this means a true
8020 constant or the address of a non-weak symbol. */
083bada9 8021 inner = inner_arg0;
4ee9c684 8022 STRIP_NOPS (inner);
8023 if (TREE_CODE (inner) == ADDR_EXPR)
8024 {
8025 do
8026 {
8027 inner = TREE_OPERAND (inner, 0);
8028 }
8029 while (TREE_CODE (inner) == COMPONENT_REF
8030 || TREE_CODE (inner) == ARRAY_REF);
53e9c5c4 8031 if (VAR_OR_FUNCTION_DECL_P (inner) && DECL_WEAK (inner))
c2f47e15 8032 return NULL_TREE;
4ee9c684 8033 }
8034
76f5a783 8035 /* Otherwise, ARG0 already has the proper type for the return value. */
8036 return arg0;
4ee9c684 8037}
8038
c2f47e15 8039/* Fold a call to __builtin_classify_type with argument ARG. */
27d0c333 8040
539a3a92 8041static tree
c2f47e15 8042fold_builtin_classify_type (tree arg)
539a3a92 8043{
c2f47e15 8044 if (arg == 0)
7002a1c8 8045 return build_int_cst (integer_type_node, no_type_class);
539a3a92 8046
7002a1c8 8047 return build_int_cst (integer_type_node, type_to_class (TREE_TYPE (arg)));
539a3a92 8048}
8049
c2f47e15 8050/* Fold a call to __builtin_strlen with argument ARG. */
e6e27594 8051
8052static tree
c7cbde74 8053fold_builtin_strlen (location_t loc, tree type, tree arg)
e6e27594 8054{
c2f47e15 8055 if (!validate_arg (arg, POINTER_TYPE))
e6e27594 8056 return NULL_TREE;
8057 else
8058 {
c2f47e15 8059 tree len = c_strlen (arg, 0);
e6e27594 8060
8061 if (len)
c7cbde74 8062 return fold_convert_loc (loc, type, len);
e6e27594 8063
8064 return NULL_TREE;
8065 }
8066}
8067
92c43e3c 8068/* Fold a call to __builtin_inf or __builtin_huge_val. */
8069
8070static tree
389dd41b 8071fold_builtin_inf (location_t loc, tree type, int warn)
92c43e3c 8072{
aa870c1b 8073 REAL_VALUE_TYPE real;
8074
40f4dbd5 8075 /* __builtin_inff is intended to be usable to define INFINITY on all
8076 targets. If an infinity is not available, INFINITY expands "to a
8077 positive constant of type float that overflows at translation
8078 time", footnote "In this case, using INFINITY will violate the
8079 constraint in 6.4.4 and thus require a diagnostic." (C99 7.12#4).
8080 Thus we pedwarn to ensure this constraint violation is
8081 diagnosed. */
92c43e3c 8082 if (!MODE_HAS_INFINITIES (TYPE_MODE (type)) && warn)
389dd41b 8083 pedwarn (loc, 0, "target format does not support infinity");
92c43e3c 8084
aa870c1b 8085 real_inf (&real);
8086 return build_real (type, real);
92c43e3c 8087}
8088
d735c391 8089/* Fold function call to builtin sincos, sincosf, or sincosl. Return
8090 NULL_TREE if no simplification can be made. */
8091
8092static tree
389dd41b 8093fold_builtin_sincos (location_t loc,
8094 tree arg0, tree arg1, tree arg2)
d735c391 8095{
c2f47e15 8096 tree type;
6c21be92 8097 tree fndecl, call = NULL_TREE;
d735c391 8098
c2f47e15 8099 if (!validate_arg (arg0, REAL_TYPE)
8100 || !validate_arg (arg1, POINTER_TYPE)
8101 || !validate_arg (arg2, POINTER_TYPE))
d735c391 8102 return NULL_TREE;
8103
d735c391 8104 type = TREE_TYPE (arg0);
d735c391 8105
8106 /* Calculate the result when the argument is a constant. */
e3240774 8107 built_in_function fn = mathfn_built_in_2 (type, CFN_BUILT_IN_CEXPI);
6c21be92 8108 if (fn == END_BUILTINS)
d735c391 8109 return NULL_TREE;
8110
6c21be92 8111 /* Canonicalize sincos to cexpi. */
8112 if (TREE_CODE (arg0) == REAL_CST)
8113 {
8114 tree complex_type = build_complex_type (type);
744fe358 8115 call = fold_const_call (as_combined_fn (fn), complex_type, arg0);
6c21be92 8116 }
8117 if (!call)
8118 {
8119 if (!targetm.libc_has_function (function_c99_math_complex)
8120 || !builtin_decl_implicit_p (fn))
8121 return NULL_TREE;
8122 fndecl = builtin_decl_explicit (fn);
8123 call = build_call_expr_loc (loc, fndecl, 1, arg0);
8124 call = builtin_save_expr (call);
8125 }
d735c391 8126
a75b1c71 8127 return build2 (COMPOUND_EXPR, void_type_node,
d735c391 8128 build2 (MODIFY_EXPR, void_type_node,
389dd41b 8129 build_fold_indirect_ref_loc (loc, arg1),
6c21be92 8130 fold_build1_loc (loc, IMAGPART_EXPR, type, call)),
d735c391 8131 build2 (MODIFY_EXPR, void_type_node,
389dd41b 8132 build_fold_indirect_ref_loc (loc, arg2),
6c21be92 8133 fold_build1_loc (loc, REALPART_EXPR, type, call)));
d735c391 8134}
8135
c2f47e15 8136/* Fold function call to builtin memcmp with arguments ARG1 and ARG2.
8137 Return NULL_TREE if no simplification can be made. */
9c8a1629 8138
8139static tree
389dd41b 8140fold_builtin_memcmp (location_t loc, tree arg1, tree arg2, tree len)
9c8a1629 8141{
c2f47e15 8142 if (!validate_arg (arg1, POINTER_TYPE)
8143 || !validate_arg (arg2, POINTER_TYPE)
8144 || !validate_arg (len, INTEGER_TYPE))
8145 return NULL_TREE;
9c8a1629 8146
8147 /* If the LEN parameter is zero, return zero. */
8148 if (integer_zerop (len))
389dd41b 8149 return omit_two_operands_loc (loc, integer_type_node, integer_zero_node,
c4fef134 8150 arg1, arg2);
9c8a1629 8151
8152 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
8153 if (operand_equal_p (arg1, arg2, 0))
389dd41b 8154 return omit_one_operand_loc (loc, integer_type_node, integer_zero_node, len);
c4fef134 8155
c4fef134 8156 /* If len parameter is one, return an expression corresponding to
8157 (*(const unsigned char*)arg1 - (const unsigned char*)arg2). */
e913b5cd 8158 if (tree_fits_uhwi_p (len) && tree_to_uhwi (len) == 1)
c4fef134 8159 {
8160 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
4f1b71c0 8161 tree cst_uchar_ptr_node
8162 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
8163
389dd41b 8164 tree ind1
8165 = fold_convert_loc (loc, integer_type_node,
8166 build1 (INDIRECT_REF, cst_uchar_node,
8167 fold_convert_loc (loc,
8168 cst_uchar_ptr_node,
c4fef134 8169 arg1)));
389dd41b 8170 tree ind2
8171 = fold_convert_loc (loc, integer_type_node,
8172 build1 (INDIRECT_REF, cst_uchar_node,
8173 fold_convert_loc (loc,
8174 cst_uchar_ptr_node,
c4fef134 8175 arg2)));
389dd41b 8176 return fold_build2_loc (loc, MINUS_EXPR, integer_type_node, ind1, ind2);
c4fef134 8177 }
9c8a1629 8178
c2f47e15 8179 return NULL_TREE;
9c8a1629 8180}
8181
c2f47e15 8182/* Fold a call to builtin isascii with argument ARG. */
d49367d4 8183
8184static tree
389dd41b 8185fold_builtin_isascii (location_t loc, tree arg)
d49367d4 8186{
c2f47e15 8187 if (!validate_arg (arg, INTEGER_TYPE))
8188 return NULL_TREE;
d49367d4 8189 else
8190 {
8191 /* Transform isascii(c) -> ((c & ~0x7f) == 0). */
c90b5d40 8192 arg = fold_build2 (BIT_AND_EXPR, integer_type_node, arg,
7002a1c8 8193 build_int_cst (integer_type_node,
c90b5d40 8194 ~ (unsigned HOST_WIDE_INT) 0x7f));
389dd41b 8195 return fold_build2_loc (loc, EQ_EXPR, integer_type_node,
7002a1c8 8196 arg, integer_zero_node);
d49367d4 8197 }
8198}
8199
c2f47e15 8200/* Fold a call to builtin toascii with argument ARG. */
d49367d4 8201
8202static tree
389dd41b 8203fold_builtin_toascii (location_t loc, tree arg)
d49367d4 8204{
c2f47e15 8205 if (!validate_arg (arg, INTEGER_TYPE))
8206 return NULL_TREE;
48e1416a 8207
c2f47e15 8208 /* Transform toascii(c) -> (c & 0x7f). */
389dd41b 8209 return fold_build2_loc (loc, BIT_AND_EXPR, integer_type_node, arg,
7002a1c8 8210 build_int_cst (integer_type_node, 0x7f));
d49367d4 8211}
8212
c2f47e15 8213/* Fold a call to builtin isdigit with argument ARG. */
df1cf42e 8214
8215static tree
389dd41b 8216fold_builtin_isdigit (location_t loc, tree arg)
df1cf42e 8217{
c2f47e15 8218 if (!validate_arg (arg, INTEGER_TYPE))
8219 return NULL_TREE;
df1cf42e 8220 else
8221 {
8222 /* Transform isdigit(c) -> (unsigned)(c) - '0' <= 9. */
624d37a6 8223 /* According to the C standard, isdigit is unaffected by locale.
8224 However, it definitely is affected by the target character set. */
624d37a6 8225 unsigned HOST_WIDE_INT target_digit0
8226 = lang_hooks.to_target_charset ('0');
8227
8228 if (target_digit0 == 0)
8229 return NULL_TREE;
8230
389dd41b 8231 arg = fold_convert_loc (loc, unsigned_type_node, arg);
c90b5d40 8232 arg = fold_build2 (MINUS_EXPR, unsigned_type_node, arg,
8233 build_int_cst (unsigned_type_node, target_digit0));
389dd41b 8234 return fold_build2_loc (loc, LE_EXPR, integer_type_node, arg,
f2532264 8235 build_int_cst (unsigned_type_node, 9));
df1cf42e 8236 }
8237}
27f261ef 8238
c2f47e15 8239/* Fold a call to fabs, fabsf or fabsl with argument ARG. */
d1aade50 8240
8241static tree
389dd41b 8242fold_builtin_fabs (location_t loc, tree arg, tree type)
d1aade50 8243{
c2f47e15 8244 if (!validate_arg (arg, REAL_TYPE))
8245 return NULL_TREE;
d1aade50 8246
389dd41b 8247 arg = fold_convert_loc (loc, type, arg);
389dd41b 8248 return fold_build1_loc (loc, ABS_EXPR, type, arg);
d1aade50 8249}
8250
c2f47e15 8251/* Fold a call to abs, labs, llabs or imaxabs with argument ARG. */
d1aade50 8252
8253static tree
389dd41b 8254fold_builtin_abs (location_t loc, tree arg, tree type)
d1aade50 8255{
c2f47e15 8256 if (!validate_arg (arg, INTEGER_TYPE))
8257 return NULL_TREE;
d1aade50 8258
389dd41b 8259 arg = fold_convert_loc (loc, type, arg);
389dd41b 8260 return fold_build1_loc (loc, ABS_EXPR, type, arg);
d1aade50 8261}
8262
b9be572e 8263/* Fold a call to fma, fmaf, or fmal with arguments ARG[012]. */
8264
8265static tree
8266fold_builtin_fma (location_t loc, tree arg0, tree arg1, tree arg2, tree type)
8267{
866b3d58 8268 /* ??? Only expand to FMA_EXPR if it's directly supported. */
b9be572e 8269 if (validate_arg (arg0, REAL_TYPE)
9af5ce0c 8270 && validate_arg (arg1, REAL_TYPE)
866b3d58 8271 && validate_arg (arg2, REAL_TYPE)
8272 && optab_handler (fma_optab, TYPE_MODE (type)) != CODE_FOR_nothing)
8273 return fold_build3_loc (loc, FMA_EXPR, type, arg0, arg1, arg2);
b9be572e 8274
b9be572e 8275 return NULL_TREE;
8276}
8277
abe4dcf6 8278/* Fold a call to builtin carg(a+bi) -> atan2(b,a). */
8279
8280static tree
389dd41b 8281fold_builtin_carg (location_t loc, tree arg, tree type)
abe4dcf6 8282{
239d491a 8283 if (validate_arg (arg, COMPLEX_TYPE)
8284 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) == REAL_TYPE)
abe4dcf6 8285 {
8286 tree atan2_fn = mathfn_built_in (type, BUILT_IN_ATAN2);
48e1416a 8287
abe4dcf6 8288 if (atan2_fn)
8289 {
c2f47e15 8290 tree new_arg = builtin_save_expr (arg);
389dd41b 8291 tree r_arg = fold_build1_loc (loc, REALPART_EXPR, type, new_arg);
8292 tree i_arg = fold_build1_loc (loc, IMAGPART_EXPR, type, new_arg);
8293 return build_call_expr_loc (loc, atan2_fn, 2, i_arg, r_arg);
abe4dcf6 8294 }
8295 }
48e1416a 8296
abe4dcf6 8297 return NULL_TREE;
8298}
8299
3838b9ae 8300/* Fold a call to builtin frexp, we can assume the base is 2. */
8301
8302static tree
389dd41b 8303fold_builtin_frexp (location_t loc, tree arg0, tree arg1, tree rettype)
3838b9ae 8304{
8305 if (! validate_arg (arg0, REAL_TYPE) || ! validate_arg (arg1, POINTER_TYPE))
8306 return NULL_TREE;
48e1416a 8307
3838b9ae 8308 STRIP_NOPS (arg0);
48e1416a 8309
3838b9ae 8310 if (!(TREE_CODE (arg0) == REAL_CST && ! TREE_OVERFLOW (arg0)))
8311 return NULL_TREE;
48e1416a 8312
389dd41b 8313 arg1 = build_fold_indirect_ref_loc (loc, arg1);
3838b9ae 8314
8315 /* Proceed if a valid pointer type was passed in. */
8316 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg1)) == integer_type_node)
8317 {
8318 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg0);
8319 tree frac, exp;
48e1416a 8320
3838b9ae 8321 switch (value->cl)
8322 {
8323 case rvc_zero:
8324 /* For +-0, return (*exp = 0, +-0). */
8325 exp = integer_zero_node;
8326 frac = arg0;
8327 break;
8328 case rvc_nan:
8329 case rvc_inf:
8330 /* For +-NaN or +-Inf, *exp is unspecified, return arg0. */
389dd41b 8331 return omit_one_operand_loc (loc, rettype, arg0, arg1);
3838b9ae 8332 case rvc_normal:
8333 {
8334 /* Since the frexp function always expects base 2, and in
8335 GCC normalized significands are already in the range
8336 [0.5, 1.0), we have exactly what frexp wants. */
8337 REAL_VALUE_TYPE frac_rvt = *value;
8338 SET_REAL_EXP (&frac_rvt, 0);
8339 frac = build_real (rettype, frac_rvt);
7002a1c8 8340 exp = build_int_cst (integer_type_node, REAL_EXP (value));
3838b9ae 8341 }
8342 break;
8343 default:
8344 gcc_unreachable ();
8345 }
48e1416a 8346
3838b9ae 8347 /* Create the COMPOUND_EXPR (*arg1 = trunc, frac). */
389dd41b 8348 arg1 = fold_build2_loc (loc, MODIFY_EXPR, rettype, arg1, exp);
3838b9ae 8349 TREE_SIDE_EFFECTS (arg1) = 1;
389dd41b 8350 return fold_build2_loc (loc, COMPOUND_EXPR, rettype, arg1, frac);
3838b9ae 8351 }
8352
8353 return NULL_TREE;
8354}
8355
ebf8b4f5 8356/* Fold a call to builtin modf. */
8357
8358static tree
389dd41b 8359fold_builtin_modf (location_t loc, tree arg0, tree arg1, tree rettype)
ebf8b4f5 8360{
8361 if (! validate_arg (arg0, REAL_TYPE) || ! validate_arg (arg1, POINTER_TYPE))
8362 return NULL_TREE;
48e1416a 8363
ebf8b4f5 8364 STRIP_NOPS (arg0);
48e1416a 8365
ebf8b4f5 8366 if (!(TREE_CODE (arg0) == REAL_CST && ! TREE_OVERFLOW (arg0)))
8367 return NULL_TREE;
48e1416a 8368
389dd41b 8369 arg1 = build_fold_indirect_ref_loc (loc, arg1);
ebf8b4f5 8370
8371 /* Proceed if a valid pointer type was passed in. */
8372 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg1)) == TYPE_MAIN_VARIANT (rettype))
8373 {
8374 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg0);
8375 REAL_VALUE_TYPE trunc, frac;
8376
8377 switch (value->cl)
8378 {
8379 case rvc_nan:
8380 case rvc_zero:
8381 /* For +-NaN or +-0, return (*arg1 = arg0, arg0). */
8382 trunc = frac = *value;
8383 break;
8384 case rvc_inf:
8385 /* For +-Inf, return (*arg1 = arg0, +-0). */
8386 frac = dconst0;
8387 frac.sign = value->sign;
8388 trunc = *value;
8389 break;
8390 case rvc_normal:
8391 /* Return (*arg1 = trunc(arg0), arg0-trunc(arg0)). */
8392 real_trunc (&trunc, VOIDmode, value);
8393 real_arithmetic (&frac, MINUS_EXPR, value, &trunc);
8394 /* If the original number was negative and already
8395 integral, then the fractional part is -0.0. */
8396 if (value->sign && frac.cl == rvc_zero)
8397 frac.sign = value->sign;
8398 break;
8399 }
48e1416a 8400
ebf8b4f5 8401 /* Create the COMPOUND_EXPR (*arg1 = trunc, frac). */
389dd41b 8402 arg1 = fold_build2_loc (loc, MODIFY_EXPR, rettype, arg1,
ebf8b4f5 8403 build_real (rettype, trunc));
8404 TREE_SIDE_EFFECTS (arg1) = 1;
389dd41b 8405 return fold_build2_loc (loc, COMPOUND_EXPR, rettype, arg1,
ebf8b4f5 8406 build_real (rettype, frac));
8407 }
48e1416a 8408
ebf8b4f5 8409 return NULL_TREE;
8410}
8411
12f08300 8412/* Given a location LOC, an interclass builtin function decl FNDECL
8413 and its single argument ARG, return an folded expression computing
8414 the same, or NULL_TREE if we either couldn't or didn't want to fold
8415 (the latter happen if there's an RTL instruction available). */
8416
8417static tree
8418fold_builtin_interclass_mathfn (location_t loc, tree fndecl, tree arg)
8419{
8420 machine_mode mode;
8421
8422 if (!validate_arg (arg, REAL_TYPE))
8423 return NULL_TREE;
8424
8425 if (interclass_mathfn_icode (arg, fndecl) != CODE_FOR_nothing)
8426 return NULL_TREE;
8427
8428 mode = TYPE_MODE (TREE_TYPE (arg));
8429
8430 bool is_ibm_extended = MODE_COMPOSITE_P (mode);
7f38718f 8431
12f08300 8432 /* If there is no optab, try generic code. */
8433 switch (DECL_FUNCTION_CODE (fndecl))
8434 {
8435 tree result;
a65c4d64 8436
12f08300 8437 CASE_FLT_FN (BUILT_IN_ISINF):
8438 {
8439 /* isinf(x) -> isgreater(fabs(x),DBL_MAX). */
8440 tree const isgr_fn = builtin_decl_explicit (BUILT_IN_ISGREATER);
8441 tree type = TREE_TYPE (arg);
8442 REAL_VALUE_TYPE r;
8443 char buf[128];
8444
8445 if (is_ibm_extended)
8446 {
8447 /* NaN and Inf are encoded in the high-order double value
8448 only. The low-order value is not significant. */
8449 type = double_type_node;
8450 mode = DFmode;
8451 arg = fold_build1_loc (loc, NOP_EXPR, type, arg);
8452 }
8453 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf));
8454 real_from_string (&r, buf);
8455 result = build_call_expr (isgr_fn, 2,
8456 fold_build1_loc (loc, ABS_EXPR, type, arg),
8457 build_real (type, r));
8458 return result;
8459 }
8460 CASE_FLT_FN (BUILT_IN_FINITE):
8461 case BUILT_IN_ISFINITE:
8462 {
8463 /* isfinite(x) -> islessequal(fabs(x),DBL_MAX). */
8464 tree const isle_fn = builtin_decl_explicit (BUILT_IN_ISLESSEQUAL);
8465 tree type = TREE_TYPE (arg);
8466 REAL_VALUE_TYPE r;
8467 char buf[128];
8468
8469 if (is_ibm_extended)
8470 {
8471 /* NaN and Inf are encoded in the high-order double value
8472 only. The low-order value is not significant. */
8473 type = double_type_node;
8474 mode = DFmode;
8475 arg = fold_build1_loc (loc, NOP_EXPR, type, arg);
8476 }
8477 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf));
8478 real_from_string (&r, buf);
8479 result = build_call_expr (isle_fn, 2,
8480 fold_build1_loc (loc, ABS_EXPR, type, arg),
8481 build_real (type, r));
8482 /*result = fold_build2_loc (loc, UNGT_EXPR,
8483 TREE_TYPE (TREE_TYPE (fndecl)),
8484 fold_build1_loc (loc, ABS_EXPR, type, arg),
8485 build_real (type, r));
8486 result = fold_build1_loc (loc, TRUTH_NOT_EXPR,
8487 TREE_TYPE (TREE_TYPE (fndecl)),
8488 result);*/
8489 return result;
8490 }
8491 case BUILT_IN_ISNORMAL:
8492 {
8493 /* isnormal(x) -> isgreaterequal(fabs(x),DBL_MIN) &
8494 islessequal(fabs(x),DBL_MAX). */
8495 tree const isle_fn = builtin_decl_explicit (BUILT_IN_ISLESSEQUAL);
8496 tree type = TREE_TYPE (arg);
8497 tree orig_arg, max_exp, min_exp;
8498 machine_mode orig_mode = mode;
8499 REAL_VALUE_TYPE rmax, rmin;
8500 char buf[128];
8501
8502 orig_arg = arg = builtin_save_expr (arg);
8503 if (is_ibm_extended)
8504 {
8505 /* Use double to test the normal range of IBM extended
8506 precision. Emin for IBM extended precision is
8507 different to emin for IEEE double, being 53 higher
8508 since the low double exponent is at least 53 lower
8509 than the high double exponent. */
8510 type = double_type_node;
8511 mode = DFmode;
8512 arg = fold_build1_loc (loc, NOP_EXPR, type, arg);
8513 }
8514 arg = fold_build1_loc (loc, ABS_EXPR, type, arg);
8515
8516 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf));
8517 real_from_string (&rmax, buf);
8518 sprintf (buf, "0x1p%d", REAL_MODE_FORMAT (orig_mode)->emin - 1);
8519 real_from_string (&rmin, buf);
8520 max_exp = build_real (type, rmax);
8521 min_exp = build_real (type, rmin);
8522
8523 max_exp = build_call_expr (isle_fn, 2, arg, max_exp);
8524 if (is_ibm_extended)
8525 {
8526 /* Testing the high end of the range is done just using
8527 the high double, using the same test as isfinite().
8528 For the subnormal end of the range we first test the
8529 high double, then if its magnitude is equal to the
8530 limit of 0x1p-969, we test whether the low double is
8531 non-zero and opposite sign to the high double. */
8532 tree const islt_fn = builtin_decl_explicit (BUILT_IN_ISLESS);
8533 tree const isgt_fn = builtin_decl_explicit (BUILT_IN_ISGREATER);
8534 tree gt_min = build_call_expr (isgt_fn, 2, arg, min_exp);
8535 tree eq_min = fold_build2 (EQ_EXPR, integer_type_node,
8536 arg, min_exp);
8537 tree as_complex = build1 (VIEW_CONVERT_EXPR,
8538 complex_double_type_node, orig_arg);
8539 tree hi_dbl = build1 (REALPART_EXPR, type, as_complex);
8540 tree lo_dbl = build1 (IMAGPART_EXPR, type, as_complex);
8541 tree zero = build_real (type, dconst0);
8542 tree hilt = build_call_expr (islt_fn, 2, hi_dbl, zero);
8543 tree lolt = build_call_expr (islt_fn, 2, lo_dbl, zero);
8544 tree logt = build_call_expr (isgt_fn, 2, lo_dbl, zero);
8545 tree ok_lo = fold_build1 (TRUTH_NOT_EXPR, integer_type_node,
8546 fold_build3 (COND_EXPR,
8547 integer_type_node,
8548 hilt, logt, lolt));
8549 eq_min = fold_build2 (TRUTH_ANDIF_EXPR, integer_type_node,
8550 eq_min, ok_lo);
8551 min_exp = fold_build2 (TRUTH_ORIF_EXPR, integer_type_node,
8552 gt_min, eq_min);
8553 }
8554 else
8555 {
8556 tree const isge_fn
8557 = builtin_decl_explicit (BUILT_IN_ISGREATEREQUAL);
8558 min_exp = build_call_expr (isge_fn, 2, arg, min_exp);
8559 }
8560 result = fold_build2 (BIT_AND_EXPR, integer_type_node,
8561 max_exp, min_exp);
8562 return result;
8563 }
8564 default:
8565 break;
8566 }
8567
8568 return NULL_TREE;
8569}
8570
8571/* Fold a call to __builtin_isnan(), __builtin_isinf, __builtin_finite.
c2f47e15 8572 ARG is the argument for the call. */
726069ba 8573
8574static tree
12f08300 8575fold_builtin_classify (location_t loc, tree fndecl, tree arg, int builtin_index)
726069ba 8576{
12f08300 8577 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8578
c2f47e15 8579 if (!validate_arg (arg, REAL_TYPE))
d43cee80 8580 return NULL_TREE;
726069ba 8581
726069ba 8582 switch (builtin_index)
8583 {
12f08300 8584 case BUILT_IN_ISINF:
8585 if (!HONOR_INFINITIES (arg))
8586 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
8587
8588 return NULL_TREE;
8589
c319d56a 8590 case BUILT_IN_ISINF_SIGN:
8591 {
8592 /* isinf_sign(x) -> isinf(x) ? (signbit(x) ? -1 : 1) : 0 */
8593 /* In a boolean context, GCC will fold the inner COND_EXPR to
8594 1. So e.g. "if (isinf_sign(x))" would be folded to just
8595 "if (isinf(x) ? 1 : 0)" which becomes "if (isinf(x))". */
6cfc7001 8596 tree signbit_fn = builtin_decl_explicit (BUILT_IN_SIGNBIT);
b9a16870 8597 tree isinf_fn = builtin_decl_explicit (BUILT_IN_ISINF);
c319d56a 8598 tree tmp = NULL_TREE;
8599
8600 arg = builtin_save_expr (arg);
8601
8602 if (signbit_fn && isinf_fn)
8603 {
389dd41b 8604 tree signbit_call = build_call_expr_loc (loc, signbit_fn, 1, arg);
8605 tree isinf_call = build_call_expr_loc (loc, isinf_fn, 1, arg);
c319d56a 8606
389dd41b 8607 signbit_call = fold_build2_loc (loc, NE_EXPR, integer_type_node,
c319d56a 8608 signbit_call, integer_zero_node);
389dd41b 8609 isinf_call = fold_build2_loc (loc, NE_EXPR, integer_type_node,
c319d56a 8610 isinf_call, integer_zero_node);
48e1416a 8611
389dd41b 8612 tmp = fold_build3_loc (loc, COND_EXPR, integer_type_node, signbit_call,
c319d56a 8613 integer_minus_one_node, integer_one_node);
389dd41b 8614 tmp = fold_build3_loc (loc, COND_EXPR, integer_type_node,
8615 isinf_call, tmp,
c319d56a 8616 integer_zero_node);
8617 }
8618
8619 return tmp;
8620 }
8621
12f08300 8622 case BUILT_IN_ISFINITE:
8623 if (!HONOR_NANS (arg)
8624 && !HONOR_INFINITIES (arg))
8625 return omit_one_operand_loc (loc, type, integer_one_node, arg);
8626
8627 return NULL_TREE;
8628
8629 case BUILT_IN_ISNAN:
8630 if (!HONOR_NANS (arg))
8631 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
8632
8633 {
8634 bool is_ibm_extended = MODE_COMPOSITE_P (TYPE_MODE (TREE_TYPE (arg)));
8635 if (is_ibm_extended)
8636 {
8637 /* NaN and Inf are encoded in the high-order double value
8638 only. The low-order value is not significant. */
8639 arg = fold_build1_loc (loc, NOP_EXPR, double_type_node, arg);
8640 }
8641 }
8642 arg = builtin_save_expr (arg);
8643 return fold_build2_loc (loc, UNORDERED_EXPR, type, arg, arg);
8644
726069ba 8645 default:
64db345d 8646 gcc_unreachable ();
726069ba 8647 }
8648}
8649
12f08300 8650/* Fold a call to __builtin_fpclassify(int, int, int, int, int, ...).
8651 This builtin will generate code to return the appropriate floating
8652 point classification depending on the value of the floating point
8653 number passed in. The possible return values must be supplied as
8654 int arguments to the call in the following order: FP_NAN, FP_INFINITE,
8655 FP_NORMAL, FP_SUBNORMAL and FP_ZERO. The ellipses is for exactly
8656 one floating point argument which is "type generic". */
8657
8658static tree
8659fold_builtin_fpclassify (location_t loc, tree *args, int nargs)
8660{
8661 tree fp_nan, fp_infinite, fp_normal, fp_subnormal, fp_zero,
8662 arg, type, res, tmp;
8663 machine_mode mode;
8664 REAL_VALUE_TYPE r;
8665 char buf[128];
8666
8667 /* Verify the required arguments in the original call. */
8668 if (nargs != 6
8669 || !validate_arg (args[0], INTEGER_TYPE)
8670 || !validate_arg (args[1], INTEGER_TYPE)
8671 || !validate_arg (args[2], INTEGER_TYPE)
8672 || !validate_arg (args[3], INTEGER_TYPE)
8673 || !validate_arg (args[4], INTEGER_TYPE)
8674 || !validate_arg (args[5], REAL_TYPE))
8675 return NULL_TREE;
8676
8677 fp_nan = args[0];
8678 fp_infinite = args[1];
8679 fp_normal = args[2];
8680 fp_subnormal = args[3];
8681 fp_zero = args[4];
8682 arg = args[5];
8683 type = TREE_TYPE (arg);
8684 mode = TYPE_MODE (type);
8685 arg = builtin_save_expr (fold_build1_loc (loc, ABS_EXPR, type, arg));
8686
8687 /* fpclassify(x) ->
8688 isnan(x) ? FP_NAN :
8689 (fabs(x) == Inf ? FP_INFINITE :
8690 (fabs(x) >= DBL_MIN ? FP_NORMAL :
8691 (x == 0 ? FP_ZERO : FP_SUBNORMAL))). */
8692
8693 tmp = fold_build2_loc (loc, EQ_EXPR, integer_type_node, arg,
8694 build_real (type, dconst0));
8695 res = fold_build3_loc (loc, COND_EXPR, integer_type_node,
8696 tmp, fp_zero, fp_subnormal);
8697
8698 sprintf (buf, "0x1p%d", REAL_MODE_FORMAT (mode)->emin - 1);
8699 real_from_string (&r, buf);
8700 tmp = fold_build2_loc (loc, GE_EXPR, integer_type_node,
8701 arg, build_real (type, r));
8702 res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp, fp_normal, res);
8703
8704 if (HONOR_INFINITIES (mode))
8705 {
8706 real_inf (&r);
8707 tmp = fold_build2_loc (loc, EQ_EXPR, integer_type_node, arg,
8708 build_real (type, r));
8709 res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp,
8710 fp_infinite, res);
8711 }
8712
8713 if (HONOR_NANS (mode))
8714 {
8715 tmp = fold_build2_loc (loc, ORDERED_EXPR, integer_type_node, arg, arg);
8716 res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp, res, fp_nan);
8717 }
8718
8719 return res;
8720}
8721
9bc9f15f 8722/* Fold a call to an unordered comparison function such as
d5019fe8 8723 __builtin_isgreater(). FNDECL is the FUNCTION_DECL for the function
c2f47e15 8724 being called and ARG0 and ARG1 are the arguments for the call.
726069ba 8725 UNORDERED_CODE and ORDERED_CODE are comparison codes that give
8726 the opposite of the desired result. UNORDERED_CODE is used
8727 for modes that can hold NaNs and ORDERED_CODE is used for
8728 the rest. */
9bc9f15f 8729
8730static tree
389dd41b 8731fold_builtin_unordered_cmp (location_t loc, tree fndecl, tree arg0, tree arg1,
9bc9f15f 8732 enum tree_code unordered_code,
8733 enum tree_code ordered_code)
8734{
859f903a 8735 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9bc9f15f 8736 enum tree_code code;
6978db0d 8737 tree type0, type1;
8738 enum tree_code code0, code1;
8739 tree cmp_type = NULL_TREE;
9bc9f15f 8740
6978db0d 8741 type0 = TREE_TYPE (arg0);
8742 type1 = TREE_TYPE (arg1);
a0c938f0 8743
6978db0d 8744 code0 = TREE_CODE (type0);
8745 code1 = TREE_CODE (type1);
a0c938f0 8746
6978db0d 8747 if (code0 == REAL_TYPE && code1 == REAL_TYPE)
8748 /* Choose the wider of two real types. */
8749 cmp_type = TYPE_PRECISION (type0) >= TYPE_PRECISION (type1)
8750 ? type0 : type1;
8751 else if (code0 == REAL_TYPE && code1 == INTEGER_TYPE)
8752 cmp_type = type0;
8753 else if (code0 == INTEGER_TYPE && code1 == REAL_TYPE)
8754 cmp_type = type1;
a0c938f0 8755
389dd41b 8756 arg0 = fold_convert_loc (loc, cmp_type, arg0);
8757 arg1 = fold_convert_loc (loc, cmp_type, arg1);
859f903a 8758
8759 if (unordered_code == UNORDERED_EXPR)
8760 {
93633022 8761 if (!HONOR_NANS (arg0))
389dd41b 8762 return omit_two_operands_loc (loc, type, integer_zero_node, arg0, arg1);
8763 return fold_build2_loc (loc, UNORDERED_EXPR, type, arg0, arg1);
859f903a 8764 }
9bc9f15f 8765
93633022 8766 code = HONOR_NANS (arg0) ? unordered_code : ordered_code;
389dd41b 8767 return fold_build1_loc (loc, TRUTH_NOT_EXPR, type,
8768 fold_build2_loc (loc, code, type, arg0, arg1));
9bc9f15f 8769}
8770
0c93c8a9 8771/* Fold __builtin_{,s,u}{add,sub,mul}{,l,ll}_overflow, either into normal
8772 arithmetics if it can never overflow, or into internal functions that
8773 return both result of arithmetics and overflowed boolean flag in
732905bb 8774 a complex integer result, or some other check for overflow.
8775 Similarly fold __builtin_{add,sub,mul}_overflow_p to just the overflow
8776 checking part of that. */
0c93c8a9 8777
8778static tree
8779fold_builtin_arith_overflow (location_t loc, enum built_in_function fcode,
8780 tree arg0, tree arg1, tree arg2)
8781{
8782 enum internal_fn ifn = IFN_LAST;
732905bb 8783 /* The code of the expression corresponding to the type-generic
8784 built-in, or ERROR_MARK for the type-specific ones. */
8785 enum tree_code opcode = ERROR_MARK;
8786 bool ovf_only = false;
8787
0c93c8a9 8788 switch (fcode)
8789 {
732905bb 8790 case BUILT_IN_ADD_OVERFLOW_P:
8791 ovf_only = true;
8792 /* FALLTHRU */
0c93c8a9 8793 case BUILT_IN_ADD_OVERFLOW:
732905bb 8794 opcode = PLUS_EXPR;
8795 /* FALLTHRU */
0c93c8a9 8796 case BUILT_IN_SADD_OVERFLOW:
8797 case BUILT_IN_SADDL_OVERFLOW:
8798 case BUILT_IN_SADDLL_OVERFLOW:
8799 case BUILT_IN_UADD_OVERFLOW:
8800 case BUILT_IN_UADDL_OVERFLOW:
8801 case BUILT_IN_UADDLL_OVERFLOW:
8802 ifn = IFN_ADD_OVERFLOW;
8803 break;
732905bb 8804 case BUILT_IN_SUB_OVERFLOW_P:
8805 ovf_only = true;
8806 /* FALLTHRU */
0c93c8a9 8807 case BUILT_IN_SUB_OVERFLOW:
732905bb 8808 opcode = MINUS_EXPR;
8809 /* FALLTHRU */
0c93c8a9 8810 case BUILT_IN_SSUB_OVERFLOW:
8811 case BUILT_IN_SSUBL_OVERFLOW:
8812 case BUILT_IN_SSUBLL_OVERFLOW:
8813 case BUILT_IN_USUB_OVERFLOW:
8814 case BUILT_IN_USUBL_OVERFLOW:
8815 case BUILT_IN_USUBLL_OVERFLOW:
8816 ifn = IFN_SUB_OVERFLOW;
8817 break;
732905bb 8818 case BUILT_IN_MUL_OVERFLOW_P:
8819 ovf_only = true;
8820 /* FALLTHRU */
0c93c8a9 8821 case BUILT_IN_MUL_OVERFLOW:
732905bb 8822 opcode = MULT_EXPR;
8823 /* FALLTHRU */
0c93c8a9 8824 case BUILT_IN_SMUL_OVERFLOW:
8825 case BUILT_IN_SMULL_OVERFLOW:
8826 case BUILT_IN_SMULLL_OVERFLOW:
8827 case BUILT_IN_UMUL_OVERFLOW:
8828 case BUILT_IN_UMULL_OVERFLOW:
8829 case BUILT_IN_UMULLL_OVERFLOW:
8830 ifn = IFN_MUL_OVERFLOW;
8831 break;
8832 default:
8833 gcc_unreachable ();
8834 }
732905bb 8835
8836 /* For the "generic" overloads, the first two arguments can have different
8837 types and the last argument determines the target type to use to check
8838 for overflow. The arguments of the other overloads all have the same
8839 type. */
8840 tree type = ovf_only ? TREE_TYPE (arg2) : TREE_TYPE (TREE_TYPE (arg2));
8841
8842 /* For the __builtin_{add,sub,mul}_overflow_p builtins, when the first two
8843 arguments are constant, attempt to fold the built-in call into a constant
8844 expression indicating whether or not it detected an overflow. */
8845 if (ovf_only
8846 && TREE_CODE (arg0) == INTEGER_CST
8847 && TREE_CODE (arg1) == INTEGER_CST)
8848 /* Perform the computation in the target type and check for overflow. */
8849 return omit_one_operand_loc (loc, boolean_type_node,
8850 arith_overflowed_p (opcode, type, arg0, arg1)
8851 ? boolean_true_node : boolean_false_node,
8852 arg2);
8853
0c93c8a9 8854 tree ctype = build_complex_type (type);
8855 tree call = build_call_expr_internal_loc (loc, ifn, ctype,
8856 2, arg0, arg1);
8857 tree tgt = save_expr (call);
8858 tree intres = build1_loc (loc, REALPART_EXPR, type, tgt);
8859 tree ovfres = build1_loc (loc, IMAGPART_EXPR, type, tgt);
8860 ovfres = fold_convert_loc (loc, boolean_type_node, ovfres);
732905bb 8861
8862 if (ovf_only)
8863 return omit_one_operand_loc (loc, boolean_type_node, ovfres, arg2);
8864
8865 tree mem_arg2 = build_fold_indirect_ref_loc (loc, arg2);
0c93c8a9 8866 tree store
8867 = fold_build2_loc (loc, MODIFY_EXPR, void_type_node, mem_arg2, intres);
8868 return build2_loc (loc, COMPOUND_EXPR, boolean_type_node, store, ovfres);
8869}
8870
c388a0cf 8871/* Fold a call to __builtin_FILE to a constant string. */
8872
8873static inline tree
8874fold_builtin_FILE (location_t loc)
8875{
8876 if (const char *fname = LOCATION_FILE (loc))
859b51f8 8877 {
8878 /* The documentation says this builtin is equivalent to the preprocessor
8879 __FILE__ macro so it appears appropriate to use the same file prefix
8880 mappings. */
8881 fname = remap_macro_filename (fname);
c388a0cf 8882 return build_string_literal (strlen (fname) + 1, fname);
859b51f8 8883 }
c388a0cf 8884
8885 return build_string_literal (1, "");
8886}
8887
8888/* Fold a call to __builtin_FUNCTION to a constant string. */
8889
8890static inline tree
8891fold_builtin_FUNCTION ()
8892{
c2d38635 8893 const char *name = "";
8894
c388a0cf 8895 if (current_function_decl)
c2d38635 8896 name = lang_hooks.decl_printable_name (current_function_decl, 0);
c388a0cf 8897
c2d38635 8898 return build_string_literal (strlen (name) + 1, name);
c388a0cf 8899}
8900
8901/* Fold a call to __builtin_LINE to an integer constant. */
8902
8903static inline tree
8904fold_builtin_LINE (location_t loc, tree type)
8905{
8906 return build_int_cst (type, LOCATION_LINE (loc));
8907}
8908
c2f47e15 8909/* Fold a call to built-in function FNDECL with 0 arguments.
e80cc485 8910 This function returns NULL_TREE if no simplification was possible. */
650e4c94 8911
4ee9c684 8912static tree
e80cc485 8913fold_builtin_0 (location_t loc, tree fndecl)
650e4c94 8914{
e9f80ff5 8915 tree type = TREE_TYPE (TREE_TYPE (fndecl));
c2f47e15 8916 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
189b3398 8917 switch (fcode)
650e4c94 8918 {
c388a0cf 8919 case BUILT_IN_FILE:
8920 return fold_builtin_FILE (loc);
8921
8922 case BUILT_IN_FUNCTION:
8923 return fold_builtin_FUNCTION ();
8924
8925 case BUILT_IN_LINE:
8926 return fold_builtin_LINE (loc, type);
8927
c2f47e15 8928 CASE_FLT_FN (BUILT_IN_INF):
012f068a 8929 CASE_FLT_FN_FLOATN_NX (BUILT_IN_INF):
c2f47e15 8930 case BUILT_IN_INFD32:
8931 case BUILT_IN_INFD64:
8932 case BUILT_IN_INFD128:
389dd41b 8933 return fold_builtin_inf (loc, type, true);
7c2f0500 8934
c2f47e15 8935 CASE_FLT_FN (BUILT_IN_HUGE_VAL):
012f068a 8936 CASE_FLT_FN_FLOATN_NX (BUILT_IN_HUGE_VAL):
389dd41b 8937 return fold_builtin_inf (loc, type, false);
7c2f0500 8938
c2f47e15 8939 case BUILT_IN_CLASSIFY_TYPE:
8940 return fold_builtin_classify_type (NULL_TREE);
7c2f0500 8941
c2f47e15 8942 default:
8943 break;
8944 }
8945 return NULL_TREE;
8946}
7c2f0500 8947
c2f47e15 8948/* Fold a call to built-in function FNDECL with 1 argument, ARG0.
e80cc485 8949 This function returns NULL_TREE if no simplification was possible. */
7c2f0500 8950
c2f47e15 8951static tree
e80cc485 8952fold_builtin_1 (location_t loc, tree fndecl, tree arg0)
c2f47e15 8953{
8954 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8955 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
6c21be92 8956
8957 if (TREE_CODE (arg0) == ERROR_MARK)
8958 return NULL_TREE;
8959
744fe358 8960 if (tree ret = fold_const_call (as_combined_fn (fcode), type, arg0))
6c21be92 8961 return ret;
8962
c2f47e15 8963 switch (fcode)
8964 {
650e4c94 8965 case BUILT_IN_CONSTANT_P:
7c2f0500 8966 {
c2f47e15 8967 tree val = fold_builtin_constant_p (arg0);
7c2f0500 8968
7c2f0500 8969 /* Gimplification will pull the CALL_EXPR for the builtin out of
8970 an if condition. When not optimizing, we'll not CSE it back.
8971 To avoid link error types of regressions, return false now. */
8972 if (!val && !optimize)
8973 val = integer_zero_node;
8974
8975 return val;
8976 }
650e4c94 8977
539a3a92 8978 case BUILT_IN_CLASSIFY_TYPE:
c2f47e15 8979 return fold_builtin_classify_type (arg0);
539a3a92 8980
650e4c94 8981 case BUILT_IN_STRLEN:
c7cbde74 8982 return fold_builtin_strlen (loc, type, arg0);
650e4c94 8983
4f35b1fc 8984 CASE_FLT_FN (BUILT_IN_FABS):
012f068a 8985 CASE_FLT_FN_FLOATN_NX (BUILT_IN_FABS):
8aa32773 8986 case BUILT_IN_FABSD32:
8987 case BUILT_IN_FABSD64:
8988 case BUILT_IN_FABSD128:
389dd41b 8989 return fold_builtin_fabs (loc, arg0, type);
d1aade50 8990
8991 case BUILT_IN_ABS:
8992 case BUILT_IN_LABS:
8993 case BUILT_IN_LLABS:
8994 case BUILT_IN_IMAXABS:
389dd41b 8995 return fold_builtin_abs (loc, arg0, type);
c63f4ad3 8996
4f35b1fc 8997 CASE_FLT_FN (BUILT_IN_CONJ):
239d491a 8998 if (validate_arg (arg0, COMPLEX_TYPE)
48e1416a 8999 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
389dd41b 9000 return fold_build1_loc (loc, CONJ_EXPR, type, arg0);
c2f47e15 9001 break;
36d3581d 9002
4f35b1fc 9003 CASE_FLT_FN (BUILT_IN_CREAL):
239d491a 9004 if (validate_arg (arg0, COMPLEX_TYPE)
48e1416a 9005 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
7082509e 9006 return non_lvalue_loc (loc, fold_build1_loc (loc, REALPART_EXPR, type, arg0));
c2f47e15 9007 break;
36d3581d 9008
4f35b1fc 9009 CASE_FLT_FN (BUILT_IN_CIMAG):
b0ce8887 9010 if (validate_arg (arg0, COMPLEX_TYPE)
9011 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
389dd41b 9012 return non_lvalue_loc (loc, fold_build1_loc (loc, IMAGPART_EXPR, type, arg0));
c2f47e15 9013 break;
36d3581d 9014
6c21be92 9015 CASE_FLT_FN (BUILT_IN_CARG):
9016 return fold_builtin_carg (loc, arg0, type);
c2373fdb 9017
6c21be92 9018 case BUILT_IN_ISASCII:
9019 return fold_builtin_isascii (loc, arg0);
48e1416a 9020
6c21be92 9021 case BUILT_IN_TOASCII:
9022 return fold_builtin_toascii (loc, arg0);
48e1416a 9023
6c21be92 9024 case BUILT_IN_ISDIGIT:
9025 return fold_builtin_isdigit (loc, arg0);
48e1416a 9026
12f08300 9027 CASE_FLT_FN (BUILT_IN_FINITE):
9028 case BUILT_IN_FINITED32:
9029 case BUILT_IN_FINITED64:
9030 case BUILT_IN_FINITED128:
9031 case BUILT_IN_ISFINITE:
9032 {
9033 tree ret = fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISFINITE);
9034 if (ret)
9035 return ret;
9036 return fold_builtin_interclass_mathfn (loc, fndecl, arg0);
9037 }
9038
9039 CASE_FLT_FN (BUILT_IN_ISINF):
9040 case BUILT_IN_ISINFD32:
9041 case BUILT_IN_ISINFD64:
9042 case BUILT_IN_ISINFD128:
9043 {
9044 tree ret = fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISINF);
9045 if (ret)
9046 return ret;
9047 return fold_builtin_interclass_mathfn (loc, fndecl, arg0);
9048 }
9049
9050 case BUILT_IN_ISNORMAL:
9051 return fold_builtin_interclass_mathfn (loc, fndecl, arg0);
9052
6c21be92 9053 case BUILT_IN_ISINF_SIGN:
12f08300 9054 return fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISINF_SIGN);
9055
9056 CASE_FLT_FN (BUILT_IN_ISNAN):
9057 case BUILT_IN_ISNAND32:
9058 case BUILT_IN_ISNAND64:
9059 case BUILT_IN_ISNAND128:
9060 return fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISNAN);
48e1416a 9061
6c21be92 9062 case BUILT_IN_FREE:
9063 if (integer_zerop (arg0))
9064 return build_empty_stmt (loc);
d064d976 9065 break;
c63f4ad3 9066
6c21be92 9067 default:
8b4af95f 9068 break;
6c21be92 9069 }
805e22b2 9070
6c21be92 9071 return NULL_TREE;
3bc5c41b 9072
6c21be92 9073}
728bac60 9074
6c21be92 9075/* Fold a call to built-in function FNDECL with 2 arguments, ARG0 and ARG1.
9076 This function returns NULL_TREE if no simplification was possible. */
c2f47e15 9077
9078static tree
e80cc485 9079fold_builtin_2 (location_t loc, tree fndecl, tree arg0, tree arg1)
c2f47e15 9080{
9081 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9082 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
9083
6c21be92 9084 if (TREE_CODE (arg0) == ERROR_MARK
9085 || TREE_CODE (arg1) == ERROR_MARK)
9086 return NULL_TREE;
e5407ca6 9087
744fe358 9088 if (tree ret = fold_const_call (as_combined_fn (fcode), type, arg0, arg1))
6c21be92 9089 return ret;
e84da7c1 9090
6c21be92 9091 switch (fcode)
9092 {
e84da7c1 9093 CASE_FLT_FN_REENT (BUILT_IN_GAMMA): /* GAMMA_R */
9094 CASE_FLT_FN_REENT (BUILT_IN_LGAMMA): /* LGAMMA_R */
9095 if (validate_arg (arg0, REAL_TYPE)
9af5ce0c 9096 && validate_arg (arg1, POINTER_TYPE))
e84da7c1 9097 return do_mpfr_lgamma_r (arg0, arg1, type);
9098 break;
c2f47e15 9099
3838b9ae 9100 CASE_FLT_FN (BUILT_IN_FREXP):
389dd41b 9101 return fold_builtin_frexp (loc, arg0, arg1, type);
3838b9ae 9102
ebf8b4f5 9103 CASE_FLT_FN (BUILT_IN_MODF):
389dd41b 9104 return fold_builtin_modf (loc, arg0, arg1, type);
ebf8b4f5 9105
c2f47e15 9106 case BUILT_IN_STRSPN:
389dd41b 9107 return fold_builtin_strspn (loc, arg0, arg1);
c2f47e15 9108
9109 case BUILT_IN_STRCSPN:
389dd41b 9110 return fold_builtin_strcspn (loc, arg0, arg1);
c2f47e15 9111
c2f47e15 9112 case BUILT_IN_STRPBRK:
389dd41b 9113 return fold_builtin_strpbrk (loc, arg0, arg1, type);
c2f47e15 9114
9115 case BUILT_IN_EXPECT:
c83059be 9116 return fold_builtin_expect (loc, arg0, arg1, NULL_TREE);
c2f47e15 9117
9bc9f15f 9118 case BUILT_IN_ISGREATER:
389dd41b 9119 return fold_builtin_unordered_cmp (loc, fndecl,
9120 arg0, arg1, UNLE_EXPR, LE_EXPR);
9bc9f15f 9121 case BUILT_IN_ISGREATEREQUAL:
389dd41b 9122 return fold_builtin_unordered_cmp (loc, fndecl,
9123 arg0, arg1, UNLT_EXPR, LT_EXPR);
9bc9f15f 9124 case BUILT_IN_ISLESS:
389dd41b 9125 return fold_builtin_unordered_cmp (loc, fndecl,
9126 arg0, arg1, UNGE_EXPR, GE_EXPR);
9bc9f15f 9127 case BUILT_IN_ISLESSEQUAL:
389dd41b 9128 return fold_builtin_unordered_cmp (loc, fndecl,
9129 arg0, arg1, UNGT_EXPR, GT_EXPR);
9bc9f15f 9130 case BUILT_IN_ISLESSGREATER:
389dd41b 9131 return fold_builtin_unordered_cmp (loc, fndecl,
9132 arg0, arg1, UNEQ_EXPR, EQ_EXPR);
9bc9f15f 9133 case BUILT_IN_ISUNORDERED:
389dd41b 9134 return fold_builtin_unordered_cmp (loc, fndecl,
9135 arg0, arg1, UNORDERED_EXPR,
d5019fe8 9136 NOP_EXPR);
9bc9f15f 9137
7c2f0500 9138 /* We do the folding for va_start in the expander. */
9139 case BUILT_IN_VA_START:
9140 break;
f0613857 9141
0a39fd54 9142 case BUILT_IN_OBJECT_SIZE:
c2f47e15 9143 return fold_builtin_object_size (arg0, arg1);
0a39fd54 9144
1cd6e20d 9145 case BUILT_IN_ATOMIC_ALWAYS_LOCK_FREE:
9146 return fold_builtin_atomic_always_lock_free (arg0, arg1);
9147
9148 case BUILT_IN_ATOMIC_IS_LOCK_FREE:
9149 return fold_builtin_atomic_is_lock_free (arg0, arg1);
9150
c2f47e15 9151 default:
9152 break;
9153 }
9154 return NULL_TREE;
9155}
9156
9157/* Fold a call to built-in function FNDECL with 3 arguments, ARG0, ARG1,
e80cc485 9158 and ARG2.
c2f47e15 9159 This function returns NULL_TREE if no simplification was possible. */
9160
9161static tree
389dd41b 9162fold_builtin_3 (location_t loc, tree fndecl,
e80cc485 9163 tree arg0, tree arg1, tree arg2)
c2f47e15 9164{
9165 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9166 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
6c21be92 9167
9168 if (TREE_CODE (arg0) == ERROR_MARK
9169 || TREE_CODE (arg1) == ERROR_MARK
9170 || TREE_CODE (arg2) == ERROR_MARK)
9171 return NULL_TREE;
9172
744fe358 9173 if (tree ret = fold_const_call (as_combined_fn (fcode), type,
9174 arg0, arg1, arg2))
6c21be92 9175 return ret;
9176
c2f47e15 9177 switch (fcode)
9178 {
9179
9180 CASE_FLT_FN (BUILT_IN_SINCOS):
389dd41b 9181 return fold_builtin_sincos (loc, arg0, arg1, arg2);
c2f47e15 9182
9183 CASE_FLT_FN (BUILT_IN_FMA):
8c32188e 9184 CASE_FLT_FN_FLOATN_NX (BUILT_IN_FMA):
b9be572e 9185 return fold_builtin_fma (loc, arg0, arg1, arg2, type);
c2f47e15 9186
e5407ca6 9187 CASE_FLT_FN (BUILT_IN_REMQUO):
9188 if (validate_arg (arg0, REAL_TYPE)
9af5ce0c 9189 && validate_arg (arg1, REAL_TYPE)
9190 && validate_arg (arg2, POINTER_TYPE))
e5407ca6 9191 return do_mpfr_remquo (arg0, arg1, arg2);
9192 break;
e5407ca6 9193
c2f47e15 9194 case BUILT_IN_MEMCMP:
7f38a6aa 9195 return fold_builtin_memcmp (loc, arg0, arg1, arg2);
c2f47e15 9196
c83059be 9197 case BUILT_IN_EXPECT:
9198 return fold_builtin_expect (loc, arg0, arg1, arg2);
9199
0c93c8a9 9200 case BUILT_IN_ADD_OVERFLOW:
9201 case BUILT_IN_SUB_OVERFLOW:
9202 case BUILT_IN_MUL_OVERFLOW:
732905bb 9203 case BUILT_IN_ADD_OVERFLOW_P:
9204 case BUILT_IN_SUB_OVERFLOW_P:
9205 case BUILT_IN_MUL_OVERFLOW_P:
0c93c8a9 9206 case BUILT_IN_SADD_OVERFLOW:
9207 case BUILT_IN_SADDL_OVERFLOW:
9208 case BUILT_IN_SADDLL_OVERFLOW:
9209 case BUILT_IN_SSUB_OVERFLOW:
9210 case BUILT_IN_SSUBL_OVERFLOW:
9211 case BUILT_IN_SSUBLL_OVERFLOW:
9212 case BUILT_IN_SMUL_OVERFLOW:
9213 case BUILT_IN_SMULL_OVERFLOW:
9214 case BUILT_IN_SMULLL_OVERFLOW:
9215 case BUILT_IN_UADD_OVERFLOW:
9216 case BUILT_IN_UADDL_OVERFLOW:
9217 case BUILT_IN_UADDLL_OVERFLOW:
9218 case BUILT_IN_USUB_OVERFLOW:
9219 case BUILT_IN_USUBL_OVERFLOW:
9220 case BUILT_IN_USUBLL_OVERFLOW:
9221 case BUILT_IN_UMUL_OVERFLOW:
9222 case BUILT_IN_UMULL_OVERFLOW:
9223 case BUILT_IN_UMULLL_OVERFLOW:
9224 return fold_builtin_arith_overflow (loc, fcode, arg0, arg1, arg2);
9225
650e4c94 9226 default:
9227 break;
9228 }
c2f47e15 9229 return NULL_TREE;
9230}
650e4c94 9231
c2f47e15 9232/* Fold a call to built-in function FNDECL. ARGS is an array of NARGS
9d884767 9233 arguments. IGNORE is true if the result of the
9234 function call is ignored. This function returns NULL_TREE if no
9235 simplification was possible. */
48e1416a 9236
2165588a 9237tree
e80cc485 9238fold_builtin_n (location_t loc, tree fndecl, tree *args, int nargs, bool)
c2f47e15 9239{
9240 tree ret = NULL_TREE;
a7f5bb2d 9241
c2f47e15 9242 switch (nargs)
9243 {
9244 case 0:
e80cc485 9245 ret = fold_builtin_0 (loc, fndecl);
c2f47e15 9246 break;
9247 case 1:
e80cc485 9248 ret = fold_builtin_1 (loc, fndecl, args[0]);
c2f47e15 9249 break;
9250 case 2:
e80cc485 9251 ret = fold_builtin_2 (loc, fndecl, args[0], args[1]);
c2f47e15 9252 break;
9253 case 3:
e80cc485 9254 ret = fold_builtin_3 (loc, fndecl, args[0], args[1], args[2]);
c2f47e15 9255 break;
c2f47e15 9256 default:
12f08300 9257 ret = fold_builtin_varargs (loc, fndecl, args, nargs);
c2f47e15 9258 break;
9259 }
9260 if (ret)
9261 {
75a70cf9 9262 ret = build1 (NOP_EXPR, TREE_TYPE (ret), ret);
389dd41b 9263 SET_EXPR_LOCATION (ret, loc);
c2f47e15 9264 TREE_NO_WARNING (ret) = 1;
9265 return ret;
9266 }
9267 return NULL_TREE;
9268}
9269
0e80b01d 9270/* Construct a new CALL_EXPR to FNDECL using the tail of the argument
9271 list ARGS along with N new arguments in NEWARGS. SKIP is the number
9272 of arguments in ARGS to be omitted. OLDNARGS is the number of
9273 elements in ARGS. */
c2f47e15 9274
9275static tree
0e80b01d 9276rewrite_call_expr_valist (location_t loc, int oldnargs, tree *args,
9277 int skip, tree fndecl, int n, va_list newargs)
c2f47e15 9278{
0e80b01d 9279 int nargs = oldnargs - skip + n;
9280 tree *buffer;
c2f47e15 9281
0e80b01d 9282 if (n > 0)
c2f47e15 9283 {
0e80b01d 9284 int i, j;
c2f47e15 9285
0e80b01d 9286 buffer = XALLOCAVEC (tree, nargs);
9287 for (i = 0; i < n; i++)
9288 buffer[i] = va_arg (newargs, tree);
9289 for (j = skip; j < oldnargs; j++, i++)
9290 buffer[i] = args[j];
9291 }
9292 else
9293 buffer = args + skip;
19fbe3a4 9294
0e80b01d 9295 return build_call_expr_loc_array (loc, fndecl, nargs, buffer);
9296}
c2f47e15 9297
198622c0 9298/* Return true if FNDECL shouldn't be folded right now.
9299 If a built-in function has an inline attribute always_inline
9300 wrapper, defer folding it after always_inline functions have
9301 been inlined, otherwise e.g. -D_FORTIFY_SOURCE checking
9302 might not be performed. */
9303
51d2c51e 9304bool
198622c0 9305avoid_folding_inline_builtin (tree fndecl)
9306{
9307 return (DECL_DECLARED_INLINE_P (fndecl)
9308 && DECL_DISREGARD_INLINE_LIMITS (fndecl)
9309 && cfun
9310 && !cfun->always_inline_functions_inlined
9311 && lookup_attribute ("always_inline", DECL_ATTRIBUTES (fndecl)));
9312}
9313
4ee9c684 9314/* A wrapper function for builtin folding that prevents warnings for
491e04ef 9315 "statement without effect" and the like, caused by removing the
4ee9c684 9316 call node earlier than the warning is generated. */
9317
9318tree
389dd41b 9319fold_call_expr (location_t loc, tree exp, bool ignore)
4ee9c684 9320{
c2f47e15 9321 tree ret = NULL_TREE;
9322 tree fndecl = get_callee_fndecl (exp);
9323 if (fndecl
9324 && TREE_CODE (fndecl) == FUNCTION_DECL
48dc2227 9325 && DECL_BUILT_IN (fndecl)
9326 /* If CALL_EXPR_VA_ARG_PACK is set, the arguments aren't finalized
9327 yet. Defer folding until we see all the arguments
9328 (after inlining). */
9329 && !CALL_EXPR_VA_ARG_PACK (exp))
9330 {
9331 int nargs = call_expr_nargs (exp);
9332
9333 /* Before gimplification CALL_EXPR_VA_ARG_PACK is not set, but
9334 instead last argument is __builtin_va_arg_pack (). Defer folding
9335 even in that case, until arguments are finalized. */
9336 if (nargs && TREE_CODE (CALL_EXPR_ARG (exp, nargs - 1)) == CALL_EXPR)
9337 {
9338 tree fndecl2 = get_callee_fndecl (CALL_EXPR_ARG (exp, nargs - 1));
9339 if (fndecl2
9340 && TREE_CODE (fndecl2) == FUNCTION_DECL
9341 && DECL_BUILT_IN_CLASS (fndecl2) == BUILT_IN_NORMAL
9342 && DECL_FUNCTION_CODE (fndecl2) == BUILT_IN_VA_ARG_PACK)
9343 return NULL_TREE;
9344 }
9345
198622c0 9346 if (avoid_folding_inline_builtin (fndecl))
9347 return NULL_TREE;
9348
c2f47e15 9349 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
97d67146 9350 return targetm.fold_builtin (fndecl, call_expr_nargs (exp),
9351 CALL_EXPR_ARGP (exp), ignore);
c2f47e15 9352 else
9353 {
9d884767 9354 tree *args = CALL_EXPR_ARGP (exp);
9355 ret = fold_builtin_n (loc, fndecl, args, nargs, ignore);
c2f47e15 9356 if (ret)
389dd41b 9357 return ret;
c2f47e15 9358 }
4ee9c684 9359 }
c2f47e15 9360 return NULL_TREE;
9361}
48e1416a 9362
9d884767 9363/* Fold a CALL_EXPR with type TYPE with FN as the function expression.
9364 N arguments are passed in the array ARGARRAY. Return a folded
9365 expression or NULL_TREE if no simplification was possible. */
805e22b2 9366
9367tree
9d884767 9368fold_builtin_call_array (location_t loc, tree,
d01f58f9 9369 tree fn,
9370 int n,
9371 tree *argarray)
7e15618b 9372{
9d884767 9373 if (TREE_CODE (fn) != ADDR_EXPR)
9374 return NULL_TREE;
c2f47e15 9375
9d884767 9376 tree fndecl = TREE_OPERAND (fn, 0);
9377 if (TREE_CODE (fndecl) == FUNCTION_DECL
9378 && DECL_BUILT_IN (fndecl))
9379 {
9380 /* If last argument is __builtin_va_arg_pack (), arguments to this
9381 function are not finalized yet. Defer folding until they are. */
9382 if (n && TREE_CODE (argarray[n - 1]) == CALL_EXPR)
9383 {
9384 tree fndecl2 = get_callee_fndecl (argarray[n - 1]);
9385 if (fndecl2
9386 && TREE_CODE (fndecl2) == FUNCTION_DECL
9387 && DECL_BUILT_IN_CLASS (fndecl2) == BUILT_IN_NORMAL
9388 && DECL_FUNCTION_CODE (fndecl2) == BUILT_IN_VA_ARG_PACK)
9389 return NULL_TREE;
9390 }
9391 if (avoid_folding_inline_builtin (fndecl))
9392 return NULL_TREE;
9393 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
9394 return targetm.fold_builtin (fndecl, n, argarray, false);
9395 else
9396 return fold_builtin_n (loc, fndecl, argarray, n, false);
9397 }
c2f47e15 9398
9d884767 9399 return NULL_TREE;
c2f47e15 9400}
9401
af1409ad 9402/* Construct a new CALL_EXPR using the tail of the argument list of EXP
9403 along with N new arguments specified as the "..." parameters. SKIP
9404 is the number of arguments in EXP to be omitted. This function is used
9405 to do varargs-to-varargs transformations. */
9406
9407static tree
9408rewrite_call_expr (location_t loc, tree exp, int skip, tree fndecl, int n, ...)
9409{
9410 va_list ap;
9411 tree t;
9412
9413 va_start (ap, n);
9414 t = rewrite_call_expr_valist (loc, call_expr_nargs (exp),
9415 CALL_EXPR_ARGP (exp), skip, fndecl, n, ap);
9416 va_end (ap);
c2f47e15 9417
af1409ad 9418 return t;
c2f47e15 9419}
9420
9421/* Validate a single argument ARG against a tree code CODE representing
184fac50 9422 a type. Return true when argument is valid. */
48e1416a 9423
c2f47e15 9424static bool
184fac50 9425validate_arg (const_tree arg, enum tree_code code)
c2f47e15 9426{
9427 if (!arg)
9428 return false;
9429 else if (code == POINTER_TYPE)
184fac50 9430 return POINTER_TYPE_P (TREE_TYPE (arg));
c7f617c2 9431 else if (code == INTEGER_TYPE)
9432 return INTEGRAL_TYPE_P (TREE_TYPE (arg));
c2f47e15 9433 return code == TREE_CODE (TREE_TYPE (arg));
7e15618b 9434}
0eb671f7 9435
75a70cf9 9436/* This function validates the types of a function call argument list
9437 against a specified list of tree_codes. If the last specifier is a 0,
9438 that represents an ellipses, otherwise the last specifier must be a
9439 VOID_TYPE.
9440
9441 This is the GIMPLE version of validate_arglist. Eventually we want to
9442 completely convert builtins.c to work from GIMPLEs and the tree based
9443 validate_arglist will then be removed. */
9444
9445bool
1a91d914 9446validate_gimple_arglist (const gcall *call, ...)
75a70cf9 9447{
9448 enum tree_code code;
9449 bool res = 0;
9450 va_list ap;
9451 const_tree arg;
9452 size_t i;
9453
9454 va_start (ap, call);
9455 i = 0;
9456
9457 do
9458 {
d62e827b 9459 code = (enum tree_code) va_arg (ap, int);
75a70cf9 9460 switch (code)
9461 {
9462 case 0:
9463 /* This signifies an ellipses, any further arguments are all ok. */
9464 res = true;
9465 goto end;
9466 case VOID_TYPE:
9467 /* This signifies an endlink, if no arguments remain, return
9468 true, otherwise return false. */
9469 res = (i == gimple_call_num_args (call));
9470 goto end;
9471 default:
9472 /* If no parameters remain or the parameter's code does not
9473 match the specified code, return false. Otherwise continue
9474 checking any remaining arguments. */
9475 arg = gimple_call_arg (call, i++);
9476 if (!validate_arg (arg, code))
9477 goto end;
9478 break;
9479 }
9480 }
9481 while (1);
9482
9483 /* We need gotos here since we can only have one VA_CLOSE in a
9484 function. */
9485 end: ;
9486 va_end (ap);
9487
9488 return res;
9489}
9490
fc2a2dcb 9491/* Default target-specific builtin expander that does nothing. */
9492
9493rtx
aecda0d6 9494default_expand_builtin (tree exp ATTRIBUTE_UNUSED,
9495 rtx target ATTRIBUTE_UNUSED,
9496 rtx subtarget ATTRIBUTE_UNUSED,
3754d046 9497 machine_mode mode ATTRIBUTE_UNUSED,
aecda0d6 9498 int ignore ATTRIBUTE_UNUSED)
fc2a2dcb 9499{
9500 return NULL_RTX;
9501}
c7926a82 9502
01537105 9503/* Returns true is EXP represents data that would potentially reside
9504 in a readonly section. */
9505
b9ea678c 9506bool
01537105 9507readonly_data_expr (tree exp)
9508{
9509 STRIP_NOPS (exp);
9510
9ff0637e 9511 if (TREE_CODE (exp) != ADDR_EXPR)
9512 return false;
9513
9514 exp = get_base_address (TREE_OPERAND (exp, 0));
9515 if (!exp)
9516 return false;
9517
9518 /* Make sure we call decl_readonly_section only for trees it
9519 can handle (since it returns true for everything it doesn't
9520 understand). */
491e04ef 9521 if (TREE_CODE (exp) == STRING_CST
9ff0637e 9522 || TREE_CODE (exp) == CONSTRUCTOR
53e9c5c4 9523 || (VAR_P (exp) && TREE_STATIC (exp)))
9ff0637e 9524 return decl_readonly_section (exp, 0);
01537105 9525 else
9526 return false;
9527}
4ee9c684 9528
c2f47e15 9529/* Simplify a call to the strpbrk builtin. S1 and S2 are the arguments
9530 to the call, and TYPE is its return type.
4ee9c684 9531
c2f47e15 9532 Return NULL_TREE if no simplification was possible, otherwise return the
4ee9c684 9533 simplified form of the call as a tree.
9534
9535 The simplified form may be a constant or other expression which
9536 computes the same value, but in a more efficient manner (including
9537 calls to other builtin functions).
9538
9539 The call may contain arguments which need to be evaluated, but
9540 which are not useful to determine the result of the call. In
9541 this case we return a chain of COMPOUND_EXPRs. The LHS of each
9542 COMPOUND_EXPR will be an argument which must be evaluated.
9543 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
9544 COMPOUND_EXPR in the chain will contain the tree for the simplified
9545 form of the builtin function call. */
9546
9547static tree
389dd41b 9548fold_builtin_strpbrk (location_t loc, tree s1, tree s2, tree type)
4ee9c684 9549{
c2f47e15 9550 if (!validate_arg (s1, POINTER_TYPE)
9551 || !validate_arg (s2, POINTER_TYPE))
9552 return NULL_TREE;
4ee9c684 9553 else
9554 {
4ee9c684 9555 tree fn;
9556 const char *p1, *p2;
9557
9558 p2 = c_getstr (s2);
9559 if (p2 == NULL)
c2f47e15 9560 return NULL_TREE;
4ee9c684 9561
9562 p1 = c_getstr (s1);
9563 if (p1 != NULL)
9564 {
9565 const char *r = strpbrk (p1, p2);
daa1d5f5 9566 tree tem;
4ee9c684 9567
9568 if (r == NULL)
779b4c41 9569 return build_int_cst (TREE_TYPE (s1), 0);
4ee9c684 9570
9571 /* Return an offset into the constant string argument. */
2cc66f2a 9572 tem = fold_build_pointer_plus_hwi_loc (loc, s1, r - p1);
389dd41b 9573 return fold_convert_loc (loc, type, tem);
4ee9c684 9574 }
9575
9576 if (p2[0] == '\0')
05abc81b 9577 /* strpbrk(x, "") == NULL.
9578 Evaluate and ignore s1 in case it had side-effects. */
44bfe16d 9579 return omit_one_operand_loc (loc, type, integer_zero_node, s1);
4ee9c684 9580
9581 if (p2[1] != '\0')
c2f47e15 9582 return NULL_TREE; /* Really call strpbrk. */
4ee9c684 9583
b9a16870 9584 fn = builtin_decl_implicit (BUILT_IN_STRCHR);
4ee9c684 9585 if (!fn)
c2f47e15 9586 return NULL_TREE;
4ee9c684 9587
9588 /* New argument list transforming strpbrk(s1, s2) to
9589 strchr(s1, s2[0]). */
7002a1c8 9590 return build_call_expr_loc (loc, fn, 2, s1,
9591 build_int_cst (integer_type_node, p2[0]));
4ee9c684 9592 }
9593}
9594
c2f47e15 9595/* Simplify a call to the strspn builtin. S1 and S2 are the arguments
9596 to the call.
4ee9c684 9597
c2f47e15 9598 Return NULL_TREE if no simplification was possible, otherwise return the
4ee9c684 9599 simplified form of the call as a tree.
9600
9601 The simplified form may be a constant or other expression which
9602 computes the same value, but in a more efficient manner (including
9603 calls to other builtin functions).
9604
9605 The call may contain arguments which need to be evaluated, but
9606 which are not useful to determine the result of the call. In
9607 this case we return a chain of COMPOUND_EXPRs. The LHS of each
9608 COMPOUND_EXPR will be an argument which must be evaluated.
9609 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
9610 COMPOUND_EXPR in the chain will contain the tree for the simplified
9611 form of the builtin function call. */
9612
9613static tree
389dd41b 9614fold_builtin_strspn (location_t loc, tree s1, tree s2)
4ee9c684 9615{
c2f47e15 9616 if (!validate_arg (s1, POINTER_TYPE)
9617 || !validate_arg (s2, POINTER_TYPE))
9618 return NULL_TREE;
4ee9c684 9619 else
9620 {
4ee9c684 9621 const char *p1 = c_getstr (s1), *p2 = c_getstr (s2);
9622
c2f47e15 9623 /* If either argument is "", return NULL_TREE. */
4ee9c684 9624 if ((p1 && *p1 == '\0') || (p2 && *p2 == '\0'))
9bc9f15f 9625 /* Evaluate and ignore both arguments in case either one has
9626 side-effects. */
389dd41b 9627 return omit_two_operands_loc (loc, size_type_node, size_zero_node,
9bc9f15f 9628 s1, s2);
c2f47e15 9629 return NULL_TREE;
4ee9c684 9630 }
9631}
9632
c2f47e15 9633/* Simplify a call to the strcspn builtin. S1 and S2 are the arguments
9634 to the call.
4ee9c684 9635
c2f47e15 9636 Return NULL_TREE if no simplification was possible, otherwise return the
4ee9c684 9637 simplified form of the call as a tree.
9638
9639 The simplified form may be a constant or other expression which
9640 computes the same value, but in a more efficient manner (including
9641 calls to other builtin functions).
9642
9643 The call may contain arguments which need to be evaluated, but
9644 which are not useful to determine the result of the call. In
9645 this case we return a chain of COMPOUND_EXPRs. The LHS of each
9646 COMPOUND_EXPR will be an argument which must be evaluated.
9647 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
9648 COMPOUND_EXPR in the chain will contain the tree for the simplified
9649 form of the builtin function call. */
9650
9651static tree
389dd41b 9652fold_builtin_strcspn (location_t loc, tree s1, tree s2)
4ee9c684 9653{
c2f47e15 9654 if (!validate_arg (s1, POINTER_TYPE)
9655 || !validate_arg (s2, POINTER_TYPE))
9656 return NULL_TREE;
4ee9c684 9657 else
9658 {
c2f47e15 9659 /* If the first argument is "", return NULL_TREE. */
b5e46e2c 9660 const char *p1 = c_getstr (s1);
4ee9c684 9661 if (p1 && *p1 == '\0')
9662 {
9663 /* Evaluate and ignore argument s2 in case it has
9664 side-effects. */
389dd41b 9665 return omit_one_operand_loc (loc, size_type_node,
39761420 9666 size_zero_node, s2);
4ee9c684 9667 }
9668
9669 /* If the second argument is "", return __builtin_strlen(s1). */
b5e46e2c 9670 const char *p2 = c_getstr (s2);
4ee9c684 9671 if (p2 && *p2 == '\0')
9672 {
b9a16870 9673 tree fn = builtin_decl_implicit (BUILT_IN_STRLEN);
4ee9c684 9674
9675 /* If the replacement _DECL isn't initialized, don't do the
9676 transformation. */
9677 if (!fn)
c2f47e15 9678 return NULL_TREE;
4ee9c684 9679
389dd41b 9680 return build_call_expr_loc (loc, fn, 1, s1);
4ee9c684 9681 }
c2f47e15 9682 return NULL_TREE;
4ee9c684 9683 }
9684}
9685
c2f47e15 9686/* Fold the next_arg or va_start call EXP. Returns true if there was an error
743b0c6a 9687 produced. False otherwise. This is done so that we don't output the error
9688 or warning twice or three times. */
75a70cf9 9689
743b0c6a 9690bool
c2f47e15 9691fold_builtin_next_arg (tree exp, bool va_start_p)
4ee9c684 9692{
9693 tree fntype = TREE_TYPE (current_function_decl);
c2f47e15 9694 int nargs = call_expr_nargs (exp);
9695 tree arg;
d98fd4a4 9696 /* There is good chance the current input_location points inside the
9697 definition of the va_start macro (perhaps on the token for
9698 builtin) in a system header, so warnings will not be emitted.
9699 Use the location in real source code. */
9700 source_location current_location =
9701 linemap_unwind_to_first_non_reserved_loc (line_table, input_location,
9702 NULL);
4ee9c684 9703
257d99c3 9704 if (!stdarg_p (fntype))
743b0c6a 9705 {
9706 error ("%<va_start%> used in function with fixed args");
9707 return true;
9708 }
c2f47e15 9709
9710 if (va_start_p)
79012a9d 9711 {
c2f47e15 9712 if (va_start_p && (nargs != 2))
9713 {
9714 error ("wrong number of arguments to function %<va_start%>");
9715 return true;
9716 }
9717 arg = CALL_EXPR_ARG (exp, 1);
79012a9d 9718 }
9719 /* We use __builtin_va_start (ap, 0, 0) or __builtin_next_arg (0, 0)
9720 when we checked the arguments and if needed issued a warning. */
c2f47e15 9721 else
4ee9c684 9722 {
c2f47e15 9723 if (nargs == 0)
9724 {
9725 /* Evidently an out of date version of <stdarg.h>; can't validate
9726 va_start's second argument, but can still work as intended. */
d98fd4a4 9727 warning_at (current_location,
7edb1062 9728 OPT_Wvarargs,
9729 "%<__builtin_next_arg%> called without an argument");
c2f47e15 9730 return true;
9731 }
9732 else if (nargs > 1)
a0c938f0 9733 {
c2f47e15 9734 error ("wrong number of arguments to function %<__builtin_next_arg%>");
a0c938f0 9735 return true;
9736 }
c2f47e15 9737 arg = CALL_EXPR_ARG (exp, 0);
9738 }
9739
a8dd994c 9740 if (TREE_CODE (arg) == SSA_NAME)
9741 arg = SSA_NAME_VAR (arg);
9742
c2f47e15 9743 /* We destructively modify the call to be __builtin_va_start (ap, 0)
48e1416a 9744 or __builtin_next_arg (0) the first time we see it, after checking
c2f47e15 9745 the arguments and if needed issuing a warning. */
9746 if (!integer_zerop (arg))
9747 {
9748 tree last_parm = tree_last (DECL_ARGUMENTS (current_function_decl));
79012a9d 9749
4ee9c684 9750 /* Strip off all nops for the sake of the comparison. This
9751 is not quite the same as STRIP_NOPS. It does more.
9752 We must also strip off INDIRECT_EXPR for C++ reference
9753 parameters. */
72dd6141 9754 while (CONVERT_EXPR_P (arg)
4ee9c684 9755 || TREE_CODE (arg) == INDIRECT_REF)
9756 arg = TREE_OPERAND (arg, 0);
9757 if (arg != last_parm)
a0c938f0 9758 {
b08cf617 9759 /* FIXME: Sometimes with the tree optimizers we can get the
9760 not the last argument even though the user used the last
9761 argument. We just warn and set the arg to be the last
9762 argument so that we will get wrong-code because of
9763 it. */
d98fd4a4 9764 warning_at (current_location,
7edb1062 9765 OPT_Wvarargs,
d98fd4a4 9766 "second parameter of %<va_start%> not last named argument");
743b0c6a 9767 }
24158ad7 9768
9769 /* Undefined by C99 7.15.1.4p4 (va_start):
9770 "If the parameter parmN is declared with the register storage
9771 class, with a function or array type, or with a type that is
9772 not compatible with the type that results after application of
9773 the default argument promotions, the behavior is undefined."
9774 */
9775 else if (DECL_REGISTER (arg))
d98fd4a4 9776 {
9777 warning_at (current_location,
7edb1062 9778 OPT_Wvarargs,
67cf9b55 9779 "undefined behavior when second parameter of "
d98fd4a4 9780 "%<va_start%> is declared with %<register%> storage");
9781 }
24158ad7 9782
79012a9d 9783 /* We want to verify the second parameter just once before the tree
a0c938f0 9784 optimizers are run and then avoid keeping it in the tree,
9785 as otherwise we could warn even for correct code like:
9786 void foo (int i, ...)
9787 { va_list ap; i++; va_start (ap, i); va_end (ap); } */
c2f47e15 9788 if (va_start_p)
9789 CALL_EXPR_ARG (exp, 1) = integer_zero_node;
9790 else
9791 CALL_EXPR_ARG (exp, 0) = integer_zero_node;
743b0c6a 9792 }
9793 return false;
4ee9c684 9794}
9795
9796
c2f47e15 9797/* Expand a call EXP to __builtin_object_size. */
0a39fd54 9798
f7715905 9799static rtx
0a39fd54 9800expand_builtin_object_size (tree exp)
9801{
9802 tree ost;
9803 int object_size_type;
9804 tree fndecl = get_callee_fndecl (exp);
0a39fd54 9805
c2f47e15 9806 if (!validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
0a39fd54 9807 {
8c41abe8 9808 error ("%Kfirst argument of %qD must be a pointer, second integer constant",
b8c23db3 9809 exp, fndecl);
0a39fd54 9810 expand_builtin_trap ();
9811 return const0_rtx;
9812 }
9813
c2f47e15 9814 ost = CALL_EXPR_ARG (exp, 1);
0a39fd54 9815 STRIP_NOPS (ost);
9816
9817 if (TREE_CODE (ost) != INTEGER_CST
9818 || tree_int_cst_sgn (ost) < 0
9819 || compare_tree_int (ost, 3) > 0)
9820 {
8c41abe8 9821 error ("%Klast argument of %qD is not integer constant between 0 and 3",
b8c23db3 9822 exp, fndecl);
0a39fd54 9823 expand_builtin_trap ();
9824 return const0_rtx;
9825 }
9826
e913b5cd 9827 object_size_type = tree_to_shwi (ost);
0a39fd54 9828
9829 return object_size_type < 2 ? constm1_rtx : const0_rtx;
9830}
9831
9832/* Expand EXP, a call to the __mem{cpy,pcpy,move,set}_chk builtin.
9833 FCODE is the BUILT_IN_* to use.
c2f47e15 9834 Return NULL_RTX if we failed; the caller should emit a normal call,
0a39fd54 9835 otherwise try to get the result in TARGET, if convenient (and in
9836 mode MODE if that's convenient). */
9837
9838static rtx
3754d046 9839expand_builtin_memory_chk (tree exp, rtx target, machine_mode mode,
0a39fd54 9840 enum built_in_function fcode)
9841{
c2f47e15 9842 if (!validate_arglist (exp,
0a39fd54 9843 POINTER_TYPE,
9844 fcode == BUILT_IN_MEMSET_CHK
9845 ? INTEGER_TYPE : POINTER_TYPE,
9846 INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
c2f47e15 9847 return NULL_RTX;
0a39fd54 9848
e6a18b5a 9849 tree dest = CALL_EXPR_ARG (exp, 0);
9850 tree src = CALL_EXPR_ARG (exp, 1);
9851 tree len = CALL_EXPR_ARG (exp, 2);
9852 tree size = CALL_EXPR_ARG (exp, 3);
0a39fd54 9853
e6a18b5a 9854 bool sizes_ok = check_access (exp, dest, src, len, /*maxread=*/NULL_TREE,
9855 /*str=*/NULL_TREE, size);
5aef8938 9856
9857 if (!tree_fits_uhwi_p (size))
c2f47e15 9858 return NULL_RTX;
0a39fd54 9859
e913b5cd 9860 if (tree_fits_uhwi_p (len) || integer_all_onesp (size))
0a39fd54 9861 {
5aef8938 9862 /* Avoid transforming the checking call to an ordinary one when
9863 an overflow has been detected or when the call couldn't be
9864 validated because the size is not constant. */
9865 if (!sizes_ok && !integer_all_onesp (size) && tree_int_cst_lt (size, len))
9866 return NULL_RTX;
0a39fd54 9867
5aef8938 9868 tree fn = NULL_TREE;
0a39fd54 9869 /* If __builtin_mem{cpy,pcpy,move,set}_chk is used, assume
9870 mem{cpy,pcpy,move,set} is available. */
9871 switch (fcode)
9872 {
9873 case BUILT_IN_MEMCPY_CHK:
b9a16870 9874 fn = builtin_decl_explicit (BUILT_IN_MEMCPY);
0a39fd54 9875 break;
9876 case BUILT_IN_MEMPCPY_CHK:
b9a16870 9877 fn = builtin_decl_explicit (BUILT_IN_MEMPCPY);
0a39fd54 9878 break;
9879 case BUILT_IN_MEMMOVE_CHK:
b9a16870 9880 fn = builtin_decl_explicit (BUILT_IN_MEMMOVE);
0a39fd54 9881 break;
9882 case BUILT_IN_MEMSET_CHK:
b9a16870 9883 fn = builtin_decl_explicit (BUILT_IN_MEMSET);
0a39fd54 9884 break;
9885 default:
9886 break;
9887 }
9888
9889 if (! fn)
c2f47e15 9890 return NULL_RTX;
0a39fd54 9891
0568e9c1 9892 fn = build_call_nofold_loc (EXPR_LOCATION (exp), fn, 3, dest, src, len);
a65c4d64 9893 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
9894 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
0a39fd54 9895 return expand_expr (fn, target, mode, EXPAND_NORMAL);
9896 }
9897 else if (fcode == BUILT_IN_MEMSET_CHK)
c2f47e15 9898 return NULL_RTX;
0a39fd54 9899 else
9900 {
957d0361 9901 unsigned int dest_align = get_pointer_alignment (dest);
0a39fd54 9902
9903 /* If DEST is not a pointer type, call the normal function. */
9904 if (dest_align == 0)
c2f47e15 9905 return NULL_RTX;
0a39fd54 9906
9907 /* If SRC and DEST are the same (and not volatile), do nothing. */
9908 if (operand_equal_p (src, dest, 0))
9909 {
9910 tree expr;
9911
9912 if (fcode != BUILT_IN_MEMPCPY_CHK)
9913 {
9914 /* Evaluate and ignore LEN in case it has side-effects. */
9915 expand_expr (len, const0_rtx, VOIDmode, EXPAND_NORMAL);
9916 return expand_expr (dest, target, mode, EXPAND_NORMAL);
9917 }
9918
2cc66f2a 9919 expr = fold_build_pointer_plus (dest, len);
0a39fd54 9920 return expand_expr (expr, target, mode, EXPAND_NORMAL);
9921 }
9922
9923 /* __memmove_chk special case. */
9924 if (fcode == BUILT_IN_MEMMOVE_CHK)
9925 {
957d0361 9926 unsigned int src_align = get_pointer_alignment (src);
0a39fd54 9927
9928 if (src_align == 0)
c2f47e15 9929 return NULL_RTX;
0a39fd54 9930
9931 /* If src is categorized for a readonly section we can use
9932 normal __memcpy_chk. */
9933 if (readonly_data_expr (src))
9934 {
b9a16870 9935 tree fn = builtin_decl_explicit (BUILT_IN_MEMCPY_CHK);
0a39fd54 9936 if (!fn)
c2f47e15 9937 return NULL_RTX;
0568e9c1 9938 fn = build_call_nofold_loc (EXPR_LOCATION (exp), fn, 4,
9939 dest, src, len, size);
a65c4d64 9940 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
9941 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
0a39fd54 9942 return expand_expr (fn, target, mode, EXPAND_NORMAL);
9943 }
9944 }
c2f47e15 9945 return NULL_RTX;
0a39fd54 9946 }
9947}
9948
9949/* Emit warning if a buffer overflow is detected at compile time. */
9950
9951static void
9952maybe_emit_chk_warning (tree exp, enum built_in_function fcode)
9953{
5aef8938 9954 /* The source string. */
9955 tree srcstr = NULL_TREE;
9956 /* The size of the destination object. */
9957 tree objsize = NULL_TREE;
9958 /* The string that is being concatenated with (as in __strcat_chk)
9959 or null if it isn't. */
9960 tree catstr = NULL_TREE;
9961 /* The maximum length of the source sequence in a bounded operation
9962 (such as __strncat_chk) or null if the operation isn't bounded
9963 (such as __strcat_chk). */
e6a18b5a 9964 tree maxread = NULL_TREE;
f3969b49 9965 /* The exact size of the access (such as in __strncpy_chk). */
9966 tree size = NULL_TREE;
0a39fd54 9967
9968 switch (fcode)
9969 {
9970 case BUILT_IN_STRCPY_CHK:
9971 case BUILT_IN_STPCPY_CHK:
5aef8938 9972 srcstr = CALL_EXPR_ARG (exp, 1);
9973 objsize = CALL_EXPR_ARG (exp, 2);
9974 break;
9975
0a39fd54 9976 case BUILT_IN_STRCAT_CHK:
5aef8938 9977 /* For __strcat_chk the warning will be emitted only if overflowing
9978 by at least strlen (dest) + 1 bytes. */
9979 catstr = CALL_EXPR_ARG (exp, 0);
9980 srcstr = CALL_EXPR_ARG (exp, 1);
9981 objsize = CALL_EXPR_ARG (exp, 2);
0a39fd54 9982 break;
5aef8938 9983
b356dfef 9984 case BUILT_IN_STRNCAT_CHK:
5aef8938 9985 catstr = CALL_EXPR_ARG (exp, 0);
9986 srcstr = CALL_EXPR_ARG (exp, 1);
e6a18b5a 9987 maxread = CALL_EXPR_ARG (exp, 2);
5aef8938 9988 objsize = CALL_EXPR_ARG (exp, 3);
9989 break;
9990
0a39fd54 9991 case BUILT_IN_STRNCPY_CHK:
1063acde 9992 case BUILT_IN_STPNCPY_CHK:
5aef8938 9993 srcstr = CALL_EXPR_ARG (exp, 1);
f3969b49 9994 size = CALL_EXPR_ARG (exp, 2);
5aef8938 9995 objsize = CALL_EXPR_ARG (exp, 3);
0a39fd54 9996 break;
5aef8938 9997
0a39fd54 9998 case BUILT_IN_SNPRINTF_CHK:
9999 case BUILT_IN_VSNPRINTF_CHK:
e6a18b5a 10000 maxread = CALL_EXPR_ARG (exp, 1);
5aef8938 10001 objsize = CALL_EXPR_ARG (exp, 3);
0a39fd54 10002 break;
10003 default:
10004 gcc_unreachable ();
10005 }
10006
e6a18b5a 10007 if (catstr && maxread)
0a39fd54 10008 {
5aef8938 10009 /* Check __strncat_chk. There is no way to determine the length
10010 of the string to which the source string is being appended so
10011 just warn when the length of the source string is not known. */
8d6c6ef5 10012 check_strncat_sizes (exp, objsize);
10013 return;
0a39fd54 10014 }
0a39fd54 10015
e6a18b5a 10016 /* The destination argument is the first one for all built-ins above. */
10017 tree dst = CALL_EXPR_ARG (exp, 0);
10018
10019 check_access (exp, dst, srcstr, size, maxread, srcstr, objsize);
0a39fd54 10020}
10021
10022/* Emit warning if a buffer overflow is detected at compile time
10023 in __sprintf_chk/__vsprintf_chk calls. */
10024
10025static void
10026maybe_emit_sprintf_chk_warning (tree exp, enum built_in_function fcode)
10027{
1e4adcfc 10028 tree size, len, fmt;
0a39fd54 10029 const char *fmt_str;
c2f47e15 10030 int nargs = call_expr_nargs (exp);
0a39fd54 10031
10032 /* Verify the required arguments in the original call. */
48e1416a 10033
c2f47e15 10034 if (nargs < 4)
0a39fd54 10035 return;
c2f47e15 10036 size = CALL_EXPR_ARG (exp, 2);
10037 fmt = CALL_EXPR_ARG (exp, 3);
0a39fd54 10038
e913b5cd 10039 if (! tree_fits_uhwi_p (size) || integer_all_onesp (size))
0a39fd54 10040 return;
10041
10042 /* Check whether the format is a literal string constant. */
10043 fmt_str = c_getstr (fmt);
10044 if (fmt_str == NULL)
10045 return;
10046
d4473c84 10047 if (!init_target_chars ())
99eabcc1 10048 return;
10049
0a39fd54 10050 /* If the format doesn't contain % args or %%, we know its size. */
99eabcc1 10051 if (strchr (fmt_str, target_percent) == 0)
0a39fd54 10052 len = build_int_cstu (size_type_node, strlen (fmt_str));
10053 /* If the format is "%s" and first ... argument is a string literal,
10054 we know it too. */
c2f47e15 10055 else if (fcode == BUILT_IN_SPRINTF_CHK
10056 && strcmp (fmt_str, target_percent_s) == 0)
0a39fd54 10057 {
10058 tree arg;
10059
c2f47e15 10060 if (nargs < 5)
0a39fd54 10061 return;
c2f47e15 10062 arg = CALL_EXPR_ARG (exp, 4);
0a39fd54 10063 if (! POINTER_TYPE_P (TREE_TYPE (arg)))
10064 return;
10065
10066 len = c_strlen (arg, 1);
e913b5cd 10067 if (!len || ! tree_fits_uhwi_p (len))
0a39fd54 10068 return;
10069 }
10070 else
10071 return;
10072
5aef8938 10073 /* Add one for the terminating nul. */
10074 len = fold_build2 (PLUS_EXPR, TREE_TYPE (len), len, size_one_node);
e6a18b5a 10075
10076 check_access (exp, /*dst=*/NULL_TREE, /*src=*/NULL_TREE, /*size=*/NULL_TREE,
10077 /*maxread=*/NULL_TREE, len, size);
0a39fd54 10078}
10079
2c281b15 10080/* Emit warning if a free is called with address of a variable. */
10081
10082static void
10083maybe_emit_free_warning (tree exp)
10084{
10085 tree arg = CALL_EXPR_ARG (exp, 0);
10086
10087 STRIP_NOPS (arg);
10088 if (TREE_CODE (arg) != ADDR_EXPR)
10089 return;
10090
10091 arg = get_base_address (TREE_OPERAND (arg, 0));
182cf5a9 10092 if (arg == NULL || INDIRECT_REF_P (arg) || TREE_CODE (arg) == MEM_REF)
2c281b15 10093 return;
10094
10095 if (SSA_VAR_P (arg))
f74ea1c2 10096 warning_at (tree_nonartificial_location (exp), OPT_Wfree_nonheap_object,
10097 "%Kattempt to free a non-heap object %qD", exp, arg);
2c281b15 10098 else
f74ea1c2 10099 warning_at (tree_nonartificial_location (exp), OPT_Wfree_nonheap_object,
10100 "%Kattempt to free a non-heap object", exp);
2c281b15 10101}
10102
c2f47e15 10103/* Fold a call to __builtin_object_size with arguments PTR and OST,
10104 if possible. */
0a39fd54 10105
f7715905 10106static tree
c2f47e15 10107fold_builtin_object_size (tree ptr, tree ost)
0a39fd54 10108{
a6caa15f 10109 unsigned HOST_WIDE_INT bytes;
0a39fd54 10110 int object_size_type;
10111
c2f47e15 10112 if (!validate_arg (ptr, POINTER_TYPE)
10113 || !validate_arg (ost, INTEGER_TYPE))
10114 return NULL_TREE;
0a39fd54 10115
0a39fd54 10116 STRIP_NOPS (ost);
10117
10118 if (TREE_CODE (ost) != INTEGER_CST
10119 || tree_int_cst_sgn (ost) < 0
10120 || compare_tree_int (ost, 3) > 0)
c2f47e15 10121 return NULL_TREE;
0a39fd54 10122
e913b5cd 10123 object_size_type = tree_to_shwi (ost);
0a39fd54 10124
10125 /* __builtin_object_size doesn't evaluate side-effects in its arguments;
10126 if there are any side-effects, it returns (size_t) -1 for types 0 and 1
10127 and (size_t) 0 for types 2 and 3. */
10128 if (TREE_SIDE_EFFECTS (ptr))
697bbc3f 10129 return build_int_cst_type (size_type_node, object_size_type < 2 ? -1 : 0);
0a39fd54 10130
10131 if (TREE_CODE (ptr) == ADDR_EXPR)
a6caa15f 10132 {
4e91a07b 10133 compute_builtin_object_size (ptr, object_size_type, &bytes);
6da74b21 10134 if (wi::fits_to_tree_p (bytes, size_type_node))
10135 return build_int_cstu (size_type_node, bytes);
a6caa15f 10136 }
0a39fd54 10137 else if (TREE_CODE (ptr) == SSA_NAME)
10138 {
0a39fd54 10139 /* If object size is not known yet, delay folding until
10140 later. Maybe subsequent passes will help determining
10141 it. */
4e91a07b 10142 if (compute_builtin_object_size (ptr, object_size_type, &bytes)
10143 && wi::fits_to_tree_p (bytes, size_type_node))
6da74b21 10144 return build_int_cstu (size_type_node, bytes);
0a39fd54 10145 }
10146
a6caa15f 10147 return NULL_TREE;
0a39fd54 10148}
10149
12f08300 10150/* Builtins with folding operations that operate on "..." arguments
10151 need special handling; we need to store the arguments in a convenient
10152 data structure before attempting any folding. Fortunately there are
10153 only a few builtins that fall into this category. FNDECL is the
10154 function, EXP is the CALL_EXPR for the call. */
10155
10156static tree
10157fold_builtin_varargs (location_t loc, tree fndecl, tree *args, int nargs)
10158{
10159 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10160 tree ret = NULL_TREE;
10161
10162 switch (fcode)
10163 {
10164 case BUILT_IN_FPCLASSIFY:
10165 ret = fold_builtin_fpclassify (loc, args, nargs);
10166 break;
10167
10168 default:
10169 break;
10170 }
10171 if (ret)
10172 {
10173 ret = build1 (NOP_EXPR, TREE_TYPE (ret), ret);
10174 SET_EXPR_LOCATION (ret, loc);
10175 TREE_NO_WARNING (ret) = 1;
10176 return ret;
10177 }
10178 return NULL_TREE;
10179}
10180
99eabcc1 10181/* Initialize format string characters in the target charset. */
10182
b9ea678c 10183bool
99eabcc1 10184init_target_chars (void)
10185{
10186 static bool init;
10187 if (!init)
10188 {
10189 target_newline = lang_hooks.to_target_charset ('\n');
10190 target_percent = lang_hooks.to_target_charset ('%');
10191 target_c = lang_hooks.to_target_charset ('c');
10192 target_s = lang_hooks.to_target_charset ('s');
10193 if (target_newline == 0 || target_percent == 0 || target_c == 0
10194 || target_s == 0)
10195 return false;
10196
10197 target_percent_c[0] = target_percent;
10198 target_percent_c[1] = target_c;
10199 target_percent_c[2] = '\0';
10200
10201 target_percent_s[0] = target_percent;
10202 target_percent_s[1] = target_s;
10203 target_percent_s[2] = '\0';
10204
10205 target_percent_s_newline[0] = target_percent;
10206 target_percent_s_newline[1] = target_s;
10207 target_percent_s_newline[2] = target_newline;
10208 target_percent_s_newline[3] = '\0';
a0c938f0 10209
99eabcc1 10210 init = true;
10211 }
10212 return true;
10213}
bffb7645 10214
f0c477f2 10215/* Helper function for do_mpfr_arg*(). Ensure M is a normal number
10216 and no overflow/underflow occurred. INEXACT is true if M was not
fa7637bd 10217 exactly calculated. TYPE is the tree type for the result. This
f0c477f2 10218 function assumes that you cleared the MPFR flags and then
10219 calculated M to see if anything subsequently set a flag prior to
10220 entering this function. Return NULL_TREE if any checks fail. */
10221
10222static tree
d4473c84 10223do_mpfr_ckconv (mpfr_srcptr m, tree type, int inexact)
f0c477f2 10224{
10225 /* Proceed iff we get a normal number, i.e. not NaN or Inf and no
10226 overflow/underflow occurred. If -frounding-math, proceed iff the
10227 result of calling FUNC was exact. */
d4473c84 10228 if (mpfr_number_p (m) && !mpfr_overflow_p () && !mpfr_underflow_p ()
f0c477f2 10229 && (!flag_rounding_math || !inexact))
10230 {
10231 REAL_VALUE_TYPE rr;
10232
66fa16e6 10233 real_from_mpfr (&rr, m, type, GMP_RNDN);
f0c477f2 10234 /* Proceed iff GCC's REAL_VALUE_TYPE can hold the MPFR value,
10235 check for overflow/underflow. If the REAL_VALUE_TYPE is zero
10236 but the mpft_t is not, then we underflowed in the
10237 conversion. */
776a7bab 10238 if (real_isfinite (&rr)
f0c477f2 10239 && (rr.cl == rvc_zero) == (mpfr_zero_p (m) != 0))
10240 {
10241 REAL_VALUE_TYPE rmode;
10242
10243 real_convert (&rmode, TYPE_MODE (type), &rr);
10244 /* Proceed iff the specified mode can hold the value. */
10245 if (real_identical (&rmode, &rr))
10246 return build_real (type, rmode);
10247 }
10248 }
10249 return NULL_TREE;
10250}
10251
239d491a 10252/* Helper function for do_mpc_arg*(). Ensure M is a normal complex
10253 number and no overflow/underflow occurred. INEXACT is true if M
10254 was not exactly calculated. TYPE is the tree type for the result.
10255 This function assumes that you cleared the MPFR flags and then
10256 calculated M to see if anything subsequently set a flag prior to
652d9409 10257 entering this function. Return NULL_TREE if any checks fail, if
10258 FORCE_CONVERT is true, then bypass the checks. */
239d491a 10259
10260static tree
652d9409 10261do_mpc_ckconv (mpc_srcptr m, tree type, int inexact, int force_convert)
239d491a 10262{
10263 /* Proceed iff we get a normal number, i.e. not NaN or Inf and no
10264 overflow/underflow occurred. If -frounding-math, proceed iff the
10265 result of calling FUNC was exact. */
652d9409 10266 if (force_convert
10267 || (mpfr_number_p (mpc_realref (m)) && mpfr_number_p (mpc_imagref (m))
10268 && !mpfr_overflow_p () && !mpfr_underflow_p ()
10269 && (!flag_rounding_math || !inexact)))
239d491a 10270 {
10271 REAL_VALUE_TYPE re, im;
10272
b0e7c4d4 10273 real_from_mpfr (&re, mpc_realref (m), TREE_TYPE (type), GMP_RNDN);
10274 real_from_mpfr (&im, mpc_imagref (m), TREE_TYPE (type), GMP_RNDN);
239d491a 10275 /* Proceed iff GCC's REAL_VALUE_TYPE can hold the MPFR values,
10276 check for overflow/underflow. If the REAL_VALUE_TYPE is zero
10277 but the mpft_t is not, then we underflowed in the
10278 conversion. */
652d9409 10279 if (force_convert
10280 || (real_isfinite (&re) && real_isfinite (&im)
10281 && (re.cl == rvc_zero) == (mpfr_zero_p (mpc_realref (m)) != 0)
10282 && (im.cl == rvc_zero) == (mpfr_zero_p (mpc_imagref (m)) != 0)))
239d491a 10283 {
10284 REAL_VALUE_TYPE re_mode, im_mode;
10285
10286 real_convert (&re_mode, TYPE_MODE (TREE_TYPE (type)), &re);
10287 real_convert (&im_mode, TYPE_MODE (TREE_TYPE (type)), &im);
10288 /* Proceed iff the specified mode can hold the value. */
652d9409 10289 if (force_convert
10290 || (real_identical (&re_mode, &re)
10291 && real_identical (&im_mode, &im)))
239d491a 10292 return build_complex (type, build_real (TREE_TYPE (type), re_mode),
10293 build_real (TREE_TYPE (type), im_mode));
10294 }
10295 }
10296 return NULL_TREE;
10297}
239d491a 10298
e5407ca6 10299/* If arguments ARG0 and ARG1 are REAL_CSTs, call mpfr_remquo() to set
10300 the pointer *(ARG_QUO) and return the result. The type is taken
10301 from the type of ARG0 and is used for setting the precision of the
10302 calculation and results. */
10303
10304static tree
10305do_mpfr_remquo (tree arg0, tree arg1, tree arg_quo)
10306{
10307 tree const type = TREE_TYPE (arg0);
10308 tree result = NULL_TREE;
48e1416a 10309
e5407ca6 10310 STRIP_NOPS (arg0);
10311 STRIP_NOPS (arg1);
48e1416a 10312
e5407ca6 10313 /* To proceed, MPFR must exactly represent the target floating point
10314 format, which only happens when the target base equals two. */
10315 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
10316 && TREE_CODE (arg0) == REAL_CST && !TREE_OVERFLOW (arg0)
10317 && TREE_CODE (arg1) == REAL_CST && !TREE_OVERFLOW (arg1))
10318 {
10319 const REAL_VALUE_TYPE *const ra0 = TREE_REAL_CST_PTR (arg0);
10320 const REAL_VALUE_TYPE *const ra1 = TREE_REAL_CST_PTR (arg1);
10321
776a7bab 10322 if (real_isfinite (ra0) && real_isfinite (ra1))
e5407ca6 10323 {
e2eb2b7f 10324 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
10325 const int prec = fmt->p;
10326 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
e5407ca6 10327 tree result_rem;
10328 long integer_quo;
10329 mpfr_t m0, m1;
10330
10331 mpfr_inits2 (prec, m0, m1, NULL);
10332 mpfr_from_real (m0, ra0, GMP_RNDN);
10333 mpfr_from_real (m1, ra1, GMP_RNDN);
10334 mpfr_clear_flags ();
e2eb2b7f 10335 mpfr_remquo (m0, &integer_quo, m0, m1, rnd);
e5407ca6 10336 /* Remquo is independent of the rounding mode, so pass
10337 inexact=0 to do_mpfr_ckconv(). */
10338 result_rem = do_mpfr_ckconv (m0, type, /*inexact=*/ 0);
10339 mpfr_clears (m0, m1, NULL);
10340 if (result_rem)
10341 {
10342 /* MPFR calculates quo in the host's long so it may
10343 return more bits in quo than the target int can hold
10344 if sizeof(host long) > sizeof(target int). This can
10345 happen even for native compilers in LP64 mode. In
10346 these cases, modulo the quo value with the largest
10347 number that the target int can hold while leaving one
10348 bit for the sign. */
10349 if (sizeof (integer_quo) * CHAR_BIT > INT_TYPE_SIZE)
10350 integer_quo %= (long)(1UL << (INT_TYPE_SIZE - 1));
10351
10352 /* Dereference the quo pointer argument. */
10353 arg_quo = build_fold_indirect_ref (arg_quo);
10354 /* Proceed iff a valid pointer type was passed in. */
10355 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg_quo)) == integer_type_node)
10356 {
10357 /* Set the value. */
7002a1c8 10358 tree result_quo
10359 = fold_build2 (MODIFY_EXPR, TREE_TYPE (arg_quo), arg_quo,
10360 build_int_cst (TREE_TYPE (arg_quo),
10361 integer_quo));
e5407ca6 10362 TREE_SIDE_EFFECTS (result_quo) = 1;
10363 /* Combine the quo assignment with the rem. */
10364 result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
10365 result_quo, result_rem));
10366 }
10367 }
10368 }
10369 }
10370 return result;
10371}
e84da7c1 10372
10373/* If ARG is a REAL_CST, call mpfr_lgamma() on it and return the
10374 resulting value as a tree with type TYPE. The mpfr precision is
10375 set to the precision of TYPE. We assume that this mpfr function
10376 returns zero if the result could be calculated exactly within the
10377 requested precision. In addition, the integer pointer represented
10378 by ARG_SG will be dereferenced and set to the appropriate signgam
10379 (-1,1) value. */
10380
10381static tree
10382do_mpfr_lgamma_r (tree arg, tree arg_sg, tree type)
10383{
10384 tree result = NULL_TREE;
10385
10386 STRIP_NOPS (arg);
48e1416a 10387
e84da7c1 10388 /* To proceed, MPFR must exactly represent the target floating point
10389 format, which only happens when the target base equals two. Also
10390 verify ARG is a constant and that ARG_SG is an int pointer. */
10391 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
10392 && TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg)
10393 && TREE_CODE (TREE_TYPE (arg_sg)) == POINTER_TYPE
10394 && TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (arg_sg))) == integer_type_node)
10395 {
10396 const REAL_VALUE_TYPE *const ra = TREE_REAL_CST_PTR (arg);
10397
10398 /* In addition to NaN and Inf, the argument cannot be zero or a
10399 negative integer. */
776a7bab 10400 if (real_isfinite (ra)
e84da7c1 10401 && ra->cl != rvc_zero
9af5ce0c 10402 && !(real_isneg (ra) && real_isinteger (ra, TYPE_MODE (type))))
e84da7c1 10403 {
e2eb2b7f 10404 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
10405 const int prec = fmt->p;
10406 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
e84da7c1 10407 int inexact, sg;
10408 mpfr_t m;
10409 tree result_lg;
10410
10411 mpfr_init2 (m, prec);
10412 mpfr_from_real (m, ra, GMP_RNDN);
10413 mpfr_clear_flags ();
e2eb2b7f 10414 inexact = mpfr_lgamma (m, &sg, m, rnd);
e84da7c1 10415 result_lg = do_mpfr_ckconv (m, type, inexact);
10416 mpfr_clear (m);
10417 if (result_lg)
10418 {
10419 tree result_sg;
10420
10421 /* Dereference the arg_sg pointer argument. */
10422 arg_sg = build_fold_indirect_ref (arg_sg);
10423 /* Assign the signgam value into *arg_sg. */
10424 result_sg = fold_build2 (MODIFY_EXPR,
10425 TREE_TYPE (arg_sg), arg_sg,
7002a1c8 10426 build_int_cst (TREE_TYPE (arg_sg), sg));
e84da7c1 10427 TREE_SIDE_EFFECTS (result_sg) = 1;
10428 /* Combine the signgam assignment with the lgamma result. */
10429 result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
10430 result_sg, result_lg));
10431 }
10432 }
10433 }
10434
10435 return result;
10436}
75a70cf9 10437
c699fab8 10438/* If arguments ARG0 and ARG1 are a COMPLEX_CST, call the two-argument
10439 mpc function FUNC on it and return the resulting value as a tree
10440 with type TYPE. The mpfr precision is set to the precision of
10441 TYPE. We assume that function FUNC returns zero if the result
652d9409 10442 could be calculated exactly within the requested precision. If
10443 DO_NONFINITE is true, then fold expressions containing Inf or NaN
10444 in the arguments and/or results. */
c699fab8 10445
63e89698 10446tree
652d9409 10447do_mpc_arg2 (tree arg0, tree arg1, tree type, int do_nonfinite,
c699fab8 10448 int (*func)(mpc_ptr, mpc_srcptr, mpc_srcptr, mpc_rnd_t))
10449{
10450 tree result = NULL_TREE;
48e1416a 10451
c699fab8 10452 STRIP_NOPS (arg0);
10453 STRIP_NOPS (arg1);
10454
10455 /* To proceed, MPFR must exactly represent the target floating point
10456 format, which only happens when the target base equals two. */
10457 if (TREE_CODE (arg0) == COMPLEX_CST && !TREE_OVERFLOW (arg0)
10458 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE
10459 && TREE_CODE (arg1) == COMPLEX_CST && !TREE_OVERFLOW (arg1)
10460 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg1))) == REAL_TYPE
10461 && REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (TREE_TYPE (arg0))))->b == 2)
10462 {
10463 const REAL_VALUE_TYPE *const re0 = TREE_REAL_CST_PTR (TREE_REALPART (arg0));
10464 const REAL_VALUE_TYPE *const im0 = TREE_REAL_CST_PTR (TREE_IMAGPART (arg0));
10465 const REAL_VALUE_TYPE *const re1 = TREE_REAL_CST_PTR (TREE_REALPART (arg1));
10466 const REAL_VALUE_TYPE *const im1 = TREE_REAL_CST_PTR (TREE_IMAGPART (arg1));
10467
652d9409 10468 if (do_nonfinite
10469 || (real_isfinite (re0) && real_isfinite (im0)
10470 && real_isfinite (re1) && real_isfinite (im1)))
c699fab8 10471 {
10472 const struct real_format *const fmt =
10473 REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (type)));
10474 const int prec = fmt->p;
10475 const mp_rnd_t rnd = fmt->round_towards_zero ? GMP_RNDZ : GMP_RNDN;
10476 const mpc_rnd_t crnd = fmt->round_towards_zero ? MPC_RNDZZ : MPC_RNDNN;
10477 int inexact;
10478 mpc_t m0, m1;
48e1416a 10479
c699fab8 10480 mpc_init2 (m0, prec);
10481 mpc_init2 (m1, prec);
9af5ce0c 10482 mpfr_from_real (mpc_realref (m0), re0, rnd);
10483 mpfr_from_real (mpc_imagref (m0), im0, rnd);
10484 mpfr_from_real (mpc_realref (m1), re1, rnd);
10485 mpfr_from_real (mpc_imagref (m1), im1, rnd);
c699fab8 10486 mpfr_clear_flags ();
10487 inexact = func (m0, m0, m1, crnd);
652d9409 10488 result = do_mpc_ckconv (m0, type, inexact, do_nonfinite);
c699fab8 10489 mpc_clear (m0);
10490 mpc_clear (m1);
10491 }
10492 }
10493
10494 return result;
10495}
239d491a 10496
75a70cf9 10497/* A wrapper function for builtin folding that prevents warnings for
10498 "statement without effect" and the like, caused by removing the
10499 call node earlier than the warning is generated. */
10500
10501tree
1a91d914 10502fold_call_stmt (gcall *stmt, bool ignore)
75a70cf9 10503{
10504 tree ret = NULL_TREE;
10505 tree fndecl = gimple_call_fndecl (stmt);
389dd41b 10506 location_t loc = gimple_location (stmt);
75a70cf9 10507 if (fndecl
10508 && TREE_CODE (fndecl) == FUNCTION_DECL
10509 && DECL_BUILT_IN (fndecl)
10510 && !gimple_call_va_arg_pack_p (stmt))
10511 {
10512 int nargs = gimple_call_num_args (stmt);
9845fb99 10513 tree *args = (nargs > 0
10514 ? gimple_call_arg_ptr (stmt, 0)
10515 : &error_mark_node);
75a70cf9 10516
198622c0 10517 if (avoid_folding_inline_builtin (fndecl))
10518 return NULL_TREE;
75a70cf9 10519 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
10520 {
9845fb99 10521 return targetm.fold_builtin (fndecl, nargs, args, ignore);
75a70cf9 10522 }
10523 else
10524 {
9d884767 10525 ret = fold_builtin_n (loc, fndecl, args, nargs, ignore);
75a70cf9 10526 if (ret)
10527 {
10528 /* Propagate location information from original call to
10529 expansion of builtin. Otherwise things like
10530 maybe_emit_chk_warning, that operate on the expansion
10531 of a builtin, will use the wrong location information. */
10532 if (gimple_has_location (stmt))
10533 {
10534 tree realret = ret;
10535 if (TREE_CODE (ret) == NOP_EXPR)
10536 realret = TREE_OPERAND (ret, 0);
10537 if (CAN_HAVE_LOCATION_P (realret)
10538 && !EXPR_HAS_LOCATION (realret))
389dd41b 10539 SET_EXPR_LOCATION (realret, loc);
75a70cf9 10540 return realret;
10541 }
10542 return ret;
10543 }
10544 }
10545 }
10546 return NULL_TREE;
10547}
7bfefa9d 10548
b9a16870 10549/* Look up the function in builtin_decl that corresponds to DECL
7bfefa9d 10550 and set ASMSPEC as its user assembler name. DECL must be a
10551 function decl that declares a builtin. */
10552
10553void
10554set_builtin_user_assembler_name (tree decl, const char *asmspec)
10555{
7bfefa9d 10556 gcc_assert (TREE_CODE (decl) == FUNCTION_DECL
10557 && DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL
10558 && asmspec != 0);
10559
61ffc71a 10560 tree builtin = builtin_decl_explicit (DECL_FUNCTION_CODE (decl));
4d8e0d6d 10561 set_user_assembler_name (builtin, asmspec);
61ffc71a 10562
10563 if (DECL_FUNCTION_CODE (decl) == BUILT_IN_FFS
10564 && INT_TYPE_SIZE < BITS_PER_WORD)
7bfefa9d 10565 {
44504d18 10566 scalar_int_mode mode = int_mode_for_size (INT_TYPE_SIZE, 0).require ();
61ffc71a 10567 set_user_assembler_libfunc ("ffs", asmspec);
44504d18 10568 set_optab_libfunc (ffs_optab, mode, "ffs");
7bfefa9d 10569 }
10570}
a6b74a67 10571
10572/* Return true if DECL is a builtin that expands to a constant or similarly
10573 simple code. */
10574bool
10575is_simple_builtin (tree decl)
10576{
10577 if (decl && DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL)
10578 switch (DECL_FUNCTION_CODE (decl))
10579 {
10580 /* Builtins that expand to constants. */
10581 case BUILT_IN_CONSTANT_P:
10582 case BUILT_IN_EXPECT:
10583 case BUILT_IN_OBJECT_SIZE:
10584 case BUILT_IN_UNREACHABLE:
10585 /* Simple register moves or loads from stack. */
fca0886c 10586 case BUILT_IN_ASSUME_ALIGNED:
a6b74a67 10587 case BUILT_IN_RETURN_ADDRESS:
10588 case BUILT_IN_EXTRACT_RETURN_ADDR:
10589 case BUILT_IN_FROB_RETURN_ADDR:
10590 case BUILT_IN_RETURN:
10591 case BUILT_IN_AGGREGATE_INCOMING_ADDRESS:
10592 case BUILT_IN_FRAME_ADDRESS:
10593 case BUILT_IN_VA_END:
10594 case BUILT_IN_STACK_SAVE:
10595 case BUILT_IN_STACK_RESTORE:
10596 /* Exception state returns or moves registers around. */
10597 case BUILT_IN_EH_FILTER:
10598 case BUILT_IN_EH_POINTER:
10599 case BUILT_IN_EH_COPY_VALUES:
10600 return true;
10601
10602 default:
10603 return false;
10604 }
10605
10606 return false;
10607}
10608
10609/* Return true if DECL is a builtin that is not expensive, i.e., they are
10610 most probably expanded inline into reasonably simple code. This is a
10611 superset of is_simple_builtin. */
10612bool
10613is_inexpensive_builtin (tree decl)
10614{
10615 if (!decl)
10616 return false;
10617 else if (DECL_BUILT_IN_CLASS (decl) == BUILT_IN_MD)
10618 return true;
10619 else if (DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL)
10620 switch (DECL_FUNCTION_CODE (decl))
10621 {
10622 case BUILT_IN_ABS:
2b34677f 10623 CASE_BUILT_IN_ALLOCA:
74bdbe96 10624 case BUILT_IN_BSWAP16:
a6b74a67 10625 case BUILT_IN_BSWAP32:
10626 case BUILT_IN_BSWAP64:
10627 case BUILT_IN_CLZ:
10628 case BUILT_IN_CLZIMAX:
10629 case BUILT_IN_CLZL:
10630 case BUILT_IN_CLZLL:
10631 case BUILT_IN_CTZ:
10632 case BUILT_IN_CTZIMAX:
10633 case BUILT_IN_CTZL:
10634 case BUILT_IN_CTZLL:
10635 case BUILT_IN_FFS:
10636 case BUILT_IN_FFSIMAX:
10637 case BUILT_IN_FFSL:
10638 case BUILT_IN_FFSLL:
10639 case BUILT_IN_IMAXABS:
10640 case BUILT_IN_FINITE:
10641 case BUILT_IN_FINITEF:
10642 case BUILT_IN_FINITEL:
10643 case BUILT_IN_FINITED32:
10644 case BUILT_IN_FINITED64:
10645 case BUILT_IN_FINITED128:
10646 case BUILT_IN_FPCLASSIFY:
10647 case BUILT_IN_ISFINITE:
10648 case BUILT_IN_ISINF_SIGN:
10649 case BUILT_IN_ISINF:
10650 case BUILT_IN_ISINFF:
10651 case BUILT_IN_ISINFL:
10652 case BUILT_IN_ISINFD32:
10653 case BUILT_IN_ISINFD64:
10654 case BUILT_IN_ISINFD128:
10655 case BUILT_IN_ISNAN:
10656 case BUILT_IN_ISNANF:
10657 case BUILT_IN_ISNANL:
10658 case BUILT_IN_ISNAND32:
10659 case BUILT_IN_ISNAND64:
10660 case BUILT_IN_ISNAND128:
10661 case BUILT_IN_ISNORMAL:
10662 case BUILT_IN_ISGREATER:
10663 case BUILT_IN_ISGREATEREQUAL:
10664 case BUILT_IN_ISLESS:
10665 case BUILT_IN_ISLESSEQUAL:
10666 case BUILT_IN_ISLESSGREATER:
10667 case BUILT_IN_ISUNORDERED:
10668 case BUILT_IN_VA_ARG_PACK:
10669 case BUILT_IN_VA_ARG_PACK_LEN:
10670 case BUILT_IN_VA_COPY:
10671 case BUILT_IN_TRAP:
10672 case BUILT_IN_SAVEREGS:
10673 case BUILT_IN_POPCOUNTL:
10674 case BUILT_IN_POPCOUNTLL:
10675 case BUILT_IN_POPCOUNTIMAX:
10676 case BUILT_IN_POPCOUNT:
10677 case BUILT_IN_PARITYL:
10678 case BUILT_IN_PARITYLL:
10679 case BUILT_IN_PARITYIMAX:
10680 case BUILT_IN_PARITY:
10681 case BUILT_IN_LABS:
10682 case BUILT_IN_LLABS:
10683 case BUILT_IN_PREFETCH:
ca4c3545 10684 case BUILT_IN_ACC_ON_DEVICE:
a6b74a67 10685 return true;
10686
10687 default:
10688 return is_simple_builtin (decl);
10689 }
10690
10691 return false;
10692}
507a998e 10693
10694/* Return true if T is a constant and the value cast to a target char
10695 can be represented by a host char.
10696 Store the casted char constant in *P if so. */
10697
10698bool
10699target_char_cst_p (tree t, char *p)
10700{
10701 if (!tree_fits_uhwi_p (t) || CHAR_TYPE_SIZE != HOST_BITS_PER_CHAR)
10702 return false;
10703
10704 *p = (char)tree_to_uhwi (t);
10705 return true;
10706}
e6a18b5a 10707
10708/* Return the maximum object size. */
10709
10710tree
10711max_object_size (void)
10712{
10713 /* To do: Make this a configurable parameter. */
10714 return TYPE_MAX_VALUE (ptrdiff_type_node);
10715}