]> git.ipfire.org Git - thirdparty/gcc.git/blame - gcc/builtins.c
* constraint.cc (diagnose_check_constraint): Fix %E thinko.
[thirdparty/gcc.git] / gcc / builtins.c
CommitLineData
53800dbe 1/* Expand builtin functions.
aad93da1 2 Copyright (C) 1988-2017 Free Software Foundation, Inc.
53800dbe 3
f12b58b3 4This file is part of GCC.
53800dbe 5
f12b58b3 6GCC is free software; you can redistribute it and/or modify it under
7the terms of the GNU General Public License as published by the Free
8c4c00c1 8Software Foundation; either version 3, or (at your option) any later
f12b58b3 9version.
53800dbe 10
f12b58b3 11GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12WARRANTY; without even the implied warranty of MERCHANTABILITY or
13FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14for more details.
53800dbe 15
16You should have received a copy of the GNU General Public License
8c4c00c1 17along with GCC; see the file COPYING3. If not see
18<http://www.gnu.org/licenses/>. */
53800dbe 19
7c2ecb89 20/* Legacy warning! Please add no further builtin simplifications here
21 (apart from pure constant folding) - builtin simplifications should go
22 to match.pd or gimple-fold.c instead. */
23
53800dbe 24#include "config.h"
25#include "system.h"
805e22b2 26#include "coretypes.h"
9ef16211 27#include "backend.h"
7c29e30e 28#include "target.h"
29#include "rtl.h"
9ef16211 30#include "tree.h"
ea36272b 31#include "memmodel.h"
9ef16211 32#include "gimple.h"
7c29e30e 33#include "predict.h"
34#include "tm_p.h"
35#include "stringpool.h"
c296f633 36#include "tree-vrp.h"
7c29e30e 37#include "tree-ssanames.h"
38#include "expmed.h"
39#include "optabs.h"
7c29e30e 40#include "emit-rtl.h"
41#include "recog.h"
7c29e30e 42#include "diagnostic-core.h"
b20a8bb4 43#include "alias.h"
b20a8bb4 44#include "fold-const.h"
6c21be92 45#include "fold-const-call.h"
9ed99284 46#include "stor-layout.h"
47#include "calls.h"
48#include "varasm.h"
49#include "tree-object-size.h"
dae0b5cb 50#include "realmpfr.h"
94ea8568 51#include "cfgrtl.h"
53800dbe 52#include "except.h"
d53441c8 53#include "dojump.h"
54#include "explow.h"
d53441c8 55#include "stmt.h"
53800dbe 56#include "expr.h"
d8fc4d0b 57#include "libfuncs.h"
53800dbe 58#include "output.h"
59#include "typeclass.h"
63c62881 60#include "langhooks.h"
162719b3 61#include "value-prof.h"
3b9c3a16 62#include "builtins.h"
f9acf11a 63#include "asan.h"
d037099f 64#include "cilk.h"
058a1b7a 65#include "tree-chkp.h"
66#include "rtl-chkp.h"
1f24b8e9 67#include "internal-fn.h"
e3240774 68#include "case-cfn-macros.h"
732905bb 69#include "gimple-fold.h"
5aef8938 70#include "intl.h"
5383fb56 71
3b9c3a16 72struct target_builtins default_target_builtins;
73#if SWITCHABLE_TARGET
74struct target_builtins *this_target_builtins = &default_target_builtins;
75#endif
76
ab7943b9 77/* Define the names of the builtin function types and codes. */
96423453 78const char *const built_in_class_names[BUILT_IN_LAST]
ab7943b9 79 = {"NOT_BUILT_IN", "BUILT_IN_FRONTEND", "BUILT_IN_MD", "BUILT_IN_NORMAL"};
80
9cfddb70 81#define DEF_BUILTIN(X, N, C, T, LT, B, F, NA, AT, IM, COND) #X,
0dfc45b5 82const char * built_in_names[(int) END_BUILTINS] =
4e9d90c7 83{
84#include "builtins.def"
85};
ab7943b9 86
cffdfb3d 87/* Setup an array of builtin_info_type, make sure each element decl is
df94cd3b 88 initialized to NULL_TREE. */
cffdfb3d 89builtin_info_type builtin_info[(int)END_BUILTINS];
df94cd3b 90
0b049e15 91/* Non-zero if __builtin_constant_p should be folded right away. */
92bool force_folding_builtin_constant_p;
93
3754d046 94static rtx c_readstr (const char *, machine_mode);
aecda0d6 95static int target_char_cast (tree, char *);
d8ae1baa 96static rtx get_memory_rtx (tree, tree);
aecda0d6 97static int apply_args_size (void);
98static int apply_result_size (void);
aecda0d6 99static rtx result_vector (int, rtx);
aecda0d6 100static void expand_builtin_prefetch (tree);
101static rtx expand_builtin_apply_args (void);
102static rtx expand_builtin_apply_args_1 (void);
103static rtx expand_builtin_apply (rtx, rtx, rtx);
104static void expand_builtin_return (rtx);
105static enum type_class type_to_class (tree);
106static rtx expand_builtin_classify_type (tree);
6b43bae4 107static rtx expand_builtin_mathfn_3 (tree, rtx, rtx);
7e0713b1 108static rtx expand_builtin_mathfn_ternary (tree, rtx, rtx);
f97eea22 109static rtx expand_builtin_interclass_mathfn (tree, rtx);
c3147c1a 110static rtx expand_builtin_sincos (tree);
f97eea22 111static rtx expand_builtin_cexpi (tree, rtx);
ff1b14e4 112static rtx expand_builtin_int_roundingfn (tree, rtx);
113static rtx expand_builtin_int_roundingfn_2 (tree, rtx);
79012a9d 114static rtx expand_builtin_next_arg (void);
aecda0d6 115static rtx expand_builtin_va_start (tree);
116static rtx expand_builtin_va_end (tree);
117static rtx expand_builtin_va_copy (tree);
a65c4d64 118static rtx expand_builtin_strcmp (tree, rtx);
3754d046 119static rtx expand_builtin_strncmp (tree, rtx, machine_mode);
120static rtx builtin_memcpy_read_str (void *, HOST_WIDE_INT, machine_mode);
a65c4d64 121static rtx expand_builtin_memcpy (tree, rtx);
f21337ef 122static rtx expand_builtin_memcpy_with_bounds (tree, rtx);
123static rtx expand_builtin_memcpy_args (tree, tree, tree, rtx, tree);
3754d046 124static rtx expand_builtin_mempcpy (tree, rtx, machine_mode);
f21337ef 125static rtx expand_builtin_mempcpy_with_bounds (tree, rtx, machine_mode);
48e1416a 126static rtx expand_builtin_mempcpy_args (tree, tree, tree, rtx,
f21337ef 127 machine_mode, int, tree);
5aef8938 128static rtx expand_builtin_strcat (tree, rtx);
a65c4d64 129static rtx expand_builtin_strcpy (tree, rtx);
130static rtx expand_builtin_strcpy_args (tree, tree, rtx);
3754d046 131static rtx expand_builtin_stpcpy (tree, rtx, machine_mode);
5aef8938 132static rtx expand_builtin_strncat (tree, rtx);
a65c4d64 133static rtx expand_builtin_strncpy (tree, rtx);
3754d046 134static rtx builtin_memset_gen_str (void *, HOST_WIDE_INT, machine_mode);
135static rtx expand_builtin_memset (tree, rtx, machine_mode);
f21337ef 136static rtx expand_builtin_memset_with_bounds (tree, rtx, machine_mode);
3754d046 137static rtx expand_builtin_memset_args (tree, tree, tree, rtx, machine_mode, tree);
aecda0d6 138static rtx expand_builtin_bzero (tree);
3754d046 139static rtx expand_builtin_strlen (tree, rtx, machine_mode);
5be42b39 140static rtx expand_builtin_alloca (tree, bool);
3754d046 141static rtx expand_builtin_unop (machine_mode, tree, rtx, rtx, optab);
aecda0d6 142static rtx expand_builtin_frame_address (tree, tree);
389dd41b 143static tree stabilize_va_list_loc (location_t, tree, int);
aecda0d6 144static rtx expand_builtin_expect (tree, rtx);
145static tree fold_builtin_constant_p (tree);
146static tree fold_builtin_classify_type (tree);
c7cbde74 147static tree fold_builtin_strlen (location_t, tree, tree);
389dd41b 148static tree fold_builtin_inf (location_t, tree, int);
389dd41b 149static tree rewrite_call_expr (location_t, tree, int, tree, int, ...);
184fac50 150static bool validate_arg (const_tree, enum tree_code code);
aecda0d6 151static rtx expand_builtin_fabs (tree, rtx, rtx);
27f261ef 152static rtx expand_builtin_signbit (tree, rtx);
389dd41b 153static tree fold_builtin_memcmp (location_t, tree, tree, tree);
389dd41b 154static tree fold_builtin_isascii (location_t, tree);
155static tree fold_builtin_toascii (location_t, tree);
156static tree fold_builtin_isdigit (location_t, tree);
157static tree fold_builtin_fabs (location_t, tree, tree);
158static tree fold_builtin_abs (location_t, tree, tree);
159static tree fold_builtin_unordered_cmp (location_t, tree, tree, tree, enum tree_code,
d5019fe8 160 enum tree_code);
e80cc485 161static tree fold_builtin_0 (location_t, tree);
162static tree fold_builtin_1 (location_t, tree, tree);
163static tree fold_builtin_2 (location_t, tree, tree, tree);
164static tree fold_builtin_3 (location_t, tree, tree, tree, tree);
165static tree fold_builtin_varargs (location_t, tree, tree*, int);
389dd41b 166
167static tree fold_builtin_strpbrk (location_t, tree, tree, tree);
389dd41b 168static tree fold_builtin_strspn (location_t, tree, tree);
169static tree fold_builtin_strcspn (location_t, tree, tree);
4ee9c684 170
0a39fd54 171static rtx expand_builtin_object_size (tree);
3754d046 172static rtx expand_builtin_memory_chk (tree, rtx, machine_mode,
0a39fd54 173 enum built_in_function);
174static void maybe_emit_chk_warning (tree, enum built_in_function);
175static void maybe_emit_sprintf_chk_warning (tree, enum built_in_function);
2c281b15 176static void maybe_emit_free_warning (tree);
c2f47e15 177static tree fold_builtin_object_size (tree, tree);
99eabcc1 178
e788f202 179unsigned HOST_WIDE_INT target_newline;
b9ea678c 180unsigned HOST_WIDE_INT target_percent;
99eabcc1 181static unsigned HOST_WIDE_INT target_c;
182static unsigned HOST_WIDE_INT target_s;
aea88c77 183char target_percent_c[3];
b9ea678c 184char target_percent_s[3];
e788f202 185char target_percent_s_newline[4];
e5407ca6 186static tree do_mpfr_remquo (tree, tree, tree);
e84da7c1 187static tree do_mpfr_lgamma_r (tree, tree, tree);
1cd6e20d 188static void expand_builtin_sync_synchronize (void);
0a39fd54 189
7bfefa9d 190/* Return true if NAME starts with __builtin_ or __sync_. */
191
b29139ad 192static bool
1c47b3e8 193is_builtin_name (const char *name)
b6a5fc45 194{
b6a5fc45 195 if (strncmp (name, "__builtin_", 10) == 0)
196 return true;
197 if (strncmp (name, "__sync_", 7) == 0)
198 return true;
1cd6e20d 199 if (strncmp (name, "__atomic_", 9) == 0)
200 return true;
a89e6c15 201 if (flag_cilkplus
d037099f 202 && (!strcmp (name, "__cilkrts_detach")
203 || !strcmp (name, "__cilkrts_pop_frame")))
204 return true;
b6a5fc45 205 return false;
206}
4ee9c684 207
7bfefa9d 208
209/* Return true if DECL is a function symbol representing a built-in. */
210
211bool
212is_builtin_fn (tree decl)
213{
214 return TREE_CODE (decl) == FUNCTION_DECL && DECL_BUILT_IN (decl);
215}
216
1c47b3e8 217/* Return true if NODE should be considered for inline expansion regardless
218 of the optimization level. This means whenever a function is invoked with
219 its "internal" name, which normally contains the prefix "__builtin". */
220
ae62deea 221bool
1c47b3e8 222called_as_built_in (tree node)
223{
224 /* Note that we must use DECL_NAME, not DECL_ASSEMBLER_NAME_SET_P since
225 we want the name used to call the function, not the name it
226 will have. */
227 const char *name = IDENTIFIER_POINTER (DECL_NAME (node));
228 return is_builtin_name (name);
229}
230
ceea063b 231/* Compute values M and N such that M divides (address of EXP - N) and such
232 that N < M. If these numbers can be determined, store M in alignp and N in
233 *BITPOSP and return true. Otherwise return false and store BITS_PER_UNIT to
234 *alignp and any bit-offset to *bitposp.
0d8f7716 235
236 Note that the address (and thus the alignment) computed here is based
237 on the address to which a symbol resolves, whereas DECL_ALIGN is based
238 on the address at which an object is actually located. These two
239 addresses are not always the same. For example, on ARM targets,
240 the address &foo of a Thumb function foo() has the lowest bit set,
3482bf13 241 whereas foo() itself starts on an even address.
698537d1 242
3482bf13 243 If ADDR_P is true we are taking the address of the memory reference EXP
244 and thus cannot rely on the access taking place. */
245
246static bool
247get_object_alignment_2 (tree exp, unsigned int *alignp,
248 unsigned HOST_WIDE_INT *bitposp, bool addr_p)
698537d1 249{
98ab9e8f 250 HOST_WIDE_INT bitsize, bitpos;
251 tree offset;
3754d046 252 machine_mode mode;
292237f3 253 int unsignedp, reversep, volatilep;
c8a2b4ff 254 unsigned int align = BITS_PER_UNIT;
ceea063b 255 bool known_alignment = false;
698537d1 256
98ab9e8f 257 /* Get the innermost object and the constant (bitpos) and possibly
258 variable (offset) offset of the access. */
292237f3 259 exp = get_inner_reference (exp, &bitsize, &bitpos, &offset, &mode,
b3b6e4b5 260 &unsignedp, &reversep, &volatilep);
98ab9e8f 261
262 /* Extract alignment information from the innermost object and
263 possibly adjust bitpos and offset. */
3482bf13 264 if (TREE_CODE (exp) == FUNCTION_DECL)
0d8f7716 265 {
3482bf13 266 /* Function addresses can encode extra information besides their
267 alignment. However, if TARGET_PTRMEMFUNC_VBIT_LOCATION
268 allows the low bit to be used as a virtual bit, we know
269 that the address itself must be at least 2-byte aligned. */
270 if (TARGET_PTRMEMFUNC_VBIT_LOCATION == ptrmemfunc_vbit_in_pfn)
271 align = 2 * BITS_PER_UNIT;
0d8f7716 272 }
3482bf13 273 else if (TREE_CODE (exp) == LABEL_DECL)
274 ;
275 else if (TREE_CODE (exp) == CONST_DECL)
98ab9e8f 276 {
3482bf13 277 /* The alignment of a CONST_DECL is determined by its initializer. */
278 exp = DECL_INITIAL (exp);
98ab9e8f 279 align = TYPE_ALIGN (TREE_TYPE (exp));
3482bf13 280 if (CONSTANT_CLASS_P (exp))
281 align = (unsigned) CONSTANT_ALIGNMENT (exp, align);
e532afed 282
3482bf13 283 known_alignment = true;
98ab9e8f 284 }
3482bf13 285 else if (DECL_P (exp))
ceea063b 286 {
3482bf13 287 align = DECL_ALIGN (exp);
ceea063b 288 known_alignment = true;
ceea063b 289 }
3482bf13 290 else if (TREE_CODE (exp) == INDIRECT_REF
291 || TREE_CODE (exp) == MEM_REF
292 || TREE_CODE (exp) == TARGET_MEM_REF)
98ab9e8f 293 {
294 tree addr = TREE_OPERAND (exp, 0);
ceea063b 295 unsigned ptr_align;
296 unsigned HOST_WIDE_INT ptr_bitpos;
ab1e78e5 297 unsigned HOST_WIDE_INT ptr_bitmask = ~0;
ceea063b 298
ab1e78e5 299 /* If the address is explicitely aligned, handle that. */
98ab9e8f 300 if (TREE_CODE (addr) == BIT_AND_EXPR
301 && TREE_CODE (TREE_OPERAND (addr, 1)) == INTEGER_CST)
302 {
ab1e78e5 303 ptr_bitmask = TREE_INT_CST_LOW (TREE_OPERAND (addr, 1));
304 ptr_bitmask *= BITS_PER_UNIT;
ac29ece2 305 align = least_bit_hwi (ptr_bitmask);
98ab9e8f 306 addr = TREE_OPERAND (addr, 0);
307 }
ceea063b 308
3482bf13 309 known_alignment
310 = get_pointer_alignment_1 (addr, &ptr_align, &ptr_bitpos);
3482bf13 311 align = MAX (ptr_align, align);
312
ab1e78e5 313 /* Re-apply explicit alignment to the bitpos. */
314 ptr_bitpos &= ptr_bitmask;
315
4083990a 316 /* The alignment of the pointer operand in a TARGET_MEM_REF
317 has to take the variable offset parts into account. */
3482bf13 318 if (TREE_CODE (exp) == TARGET_MEM_REF)
153c3b50 319 {
3482bf13 320 if (TMR_INDEX (exp))
321 {
322 unsigned HOST_WIDE_INT step = 1;
323 if (TMR_STEP (exp))
f9ae6f95 324 step = TREE_INT_CST_LOW (TMR_STEP (exp));
ac29ece2 325 align = MIN (align, least_bit_hwi (step) * BITS_PER_UNIT);
3482bf13 326 }
327 if (TMR_INDEX2 (exp))
328 align = BITS_PER_UNIT;
329 known_alignment = false;
153c3b50 330 }
ceea063b 331
3482bf13 332 /* When EXP is an actual memory reference then we can use
333 TYPE_ALIGN of a pointer indirection to derive alignment.
334 Do so only if get_pointer_alignment_1 did not reveal absolute
4083990a 335 alignment knowledge and if using that alignment would
336 improve the situation. */
700a9760 337 unsigned int talign;
4083990a 338 if (!addr_p && !known_alignment
700a9760 339 && (talign = min_align_of_type (TREE_TYPE (exp)) * BITS_PER_UNIT)
340 && talign > align)
341 align = talign;
4083990a 342 else
343 {
344 /* Else adjust bitpos accordingly. */
345 bitpos += ptr_bitpos;
346 if (TREE_CODE (exp) == MEM_REF
347 || TREE_CODE (exp) == TARGET_MEM_REF)
e913b5cd 348 bitpos += mem_ref_offset (exp).to_short_addr () * BITS_PER_UNIT;
4083990a 349 }
98ab9e8f 350 }
3482bf13 351 else if (TREE_CODE (exp) == STRING_CST)
153c3b50 352 {
3482bf13 353 /* STRING_CST are the only constant objects we allow to be not
354 wrapped inside a CONST_DECL. */
355 align = TYPE_ALIGN (TREE_TYPE (exp));
3482bf13 356 if (CONSTANT_CLASS_P (exp))
357 align = (unsigned) CONSTANT_ALIGNMENT (exp, align);
e532afed 358
3482bf13 359 known_alignment = true;
98ab9e8f 360 }
98ab9e8f 361
362 /* If there is a non-constant offset part extract the maximum
363 alignment that can prevail. */
c8a2b4ff 364 if (offset)
98ab9e8f 365 {
ad464c56 366 unsigned int trailing_zeros = tree_ctz (offset);
c8a2b4ff 367 if (trailing_zeros < HOST_BITS_PER_INT)
98ab9e8f 368 {
c8a2b4ff 369 unsigned int inner = (1U << trailing_zeros) * BITS_PER_UNIT;
370 if (inner)
371 align = MIN (align, inner);
98ab9e8f 372 }
98ab9e8f 373 }
374
3482bf13 375 *alignp = align;
376 *bitposp = bitpos & (*alignp - 1);
ceea063b 377 return known_alignment;
0c883ef3 378}
379
3482bf13 380/* For a memory reference expression EXP compute values M and N such that M
381 divides (&EXP - N) and such that N < M. If these numbers can be determined,
382 store M in alignp and N in *BITPOSP and return true. Otherwise return false
383 and store BITS_PER_UNIT to *alignp and any bit-offset to *bitposp. */
384
385bool
386get_object_alignment_1 (tree exp, unsigned int *alignp,
387 unsigned HOST_WIDE_INT *bitposp)
388{
389 return get_object_alignment_2 (exp, alignp, bitposp, false);
390}
391
957d0361 392/* Return the alignment in bits of EXP, an object. */
0c883ef3 393
394unsigned int
957d0361 395get_object_alignment (tree exp)
0c883ef3 396{
397 unsigned HOST_WIDE_INT bitpos = 0;
398 unsigned int align;
399
ceea063b 400 get_object_alignment_1 (exp, &align, &bitpos);
0c883ef3 401
98ab9e8f 402 /* align and bitpos now specify known low bits of the pointer.
403 ptr & (align - 1) == bitpos. */
404
405 if (bitpos != 0)
ac29ece2 406 align = least_bit_hwi (bitpos);
957d0361 407 return align;
698537d1 408}
409
ceea063b 410/* For a pointer valued expression EXP compute values M and N such that M
411 divides (EXP - N) and such that N < M. If these numbers can be determined,
3482bf13 412 store M in alignp and N in *BITPOSP and return true. Return false if
413 the results are just a conservative approximation.
53800dbe 414
ceea063b 415 If EXP is not a pointer, false is returned too. */
53800dbe 416
ceea063b 417bool
418get_pointer_alignment_1 (tree exp, unsigned int *alignp,
419 unsigned HOST_WIDE_INT *bitposp)
53800dbe 420{
153c3b50 421 STRIP_NOPS (exp);
535e2026 422
153c3b50 423 if (TREE_CODE (exp) == ADDR_EXPR)
3482bf13 424 return get_object_alignment_2 (TREE_OPERAND (exp, 0),
425 alignp, bitposp, true);
906a9403 426 else if (TREE_CODE (exp) == POINTER_PLUS_EXPR)
427 {
428 unsigned int align;
429 unsigned HOST_WIDE_INT bitpos;
430 bool res = get_pointer_alignment_1 (TREE_OPERAND (exp, 0),
431 &align, &bitpos);
432 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
433 bitpos += TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)) * BITS_PER_UNIT;
434 else
435 {
436 unsigned int trailing_zeros = tree_ctz (TREE_OPERAND (exp, 1));
437 if (trailing_zeros < HOST_BITS_PER_INT)
438 {
439 unsigned int inner = (1U << trailing_zeros) * BITS_PER_UNIT;
440 if (inner)
441 align = MIN (align, inner);
442 }
443 }
444 *alignp = align;
445 *bitposp = bitpos & (align - 1);
446 return res;
447 }
153c3b50 448 else if (TREE_CODE (exp) == SSA_NAME
449 && POINTER_TYPE_P (TREE_TYPE (exp)))
53800dbe 450 {
ceea063b 451 unsigned int ptr_align, ptr_misalign;
153c3b50 452 struct ptr_info_def *pi = SSA_NAME_PTR_INFO (exp);
ceea063b 453
454 if (pi && get_ptr_info_alignment (pi, &ptr_align, &ptr_misalign))
455 {
456 *bitposp = ptr_misalign * BITS_PER_UNIT;
457 *alignp = ptr_align * BITS_PER_UNIT;
d10da77a 458 /* Make sure to return a sensible alignment when the multiplication
459 by BITS_PER_UNIT overflowed. */
460 if (*alignp == 0)
461 *alignp = 1u << (HOST_BITS_PER_INT - 1);
3482bf13 462 /* We cannot really tell whether this result is an approximation. */
b428654a 463 return false;
ceea063b 464 }
465 else
69fbc3aa 466 {
467 *bitposp = 0;
ceea063b 468 *alignp = BITS_PER_UNIT;
469 return false;
69fbc3aa 470 }
53800dbe 471 }
0bb8b39a 472 else if (TREE_CODE (exp) == INTEGER_CST)
473 {
474 *alignp = BIGGEST_ALIGNMENT;
f9ae6f95 475 *bitposp = ((TREE_INT_CST_LOW (exp) * BITS_PER_UNIT)
0bb8b39a 476 & (BIGGEST_ALIGNMENT - 1));
477 return true;
478 }
153c3b50 479
69fbc3aa 480 *bitposp = 0;
ceea063b 481 *alignp = BITS_PER_UNIT;
482 return false;
53800dbe 483}
484
69fbc3aa 485/* Return the alignment in bits of EXP, a pointer valued expression.
486 The alignment returned is, by default, the alignment of the thing that
487 EXP points to. If it is not a POINTER_TYPE, 0 is returned.
488
489 Otherwise, look at the expression to see if we can do better, i.e., if the
490 expression is actually pointing at an object whose alignment is tighter. */
491
492unsigned int
493get_pointer_alignment (tree exp)
494{
495 unsigned HOST_WIDE_INT bitpos = 0;
496 unsigned int align;
ceea063b 497
498 get_pointer_alignment_1 (exp, &align, &bitpos);
69fbc3aa 499
500 /* align and bitpos now specify known low bits of the pointer.
501 ptr & (align - 1) == bitpos. */
502
503 if (bitpos != 0)
ac29ece2 504 align = least_bit_hwi (bitpos);
69fbc3aa 505
506 return align;
507}
508
c62d63d4 509/* Return the number of non-zero elements in the sequence
510 [ PTR, PTR + MAXELTS ) where each element's size is ELTSIZE bytes.
511 ELTSIZE must be a power of 2 less than 8. Used by c_strlen. */
512
513static unsigned
514string_length (const void *ptr, unsigned eltsize, unsigned maxelts)
515{
516 gcc_checking_assert (eltsize == 1 || eltsize == 2 || eltsize == 4);
517
518 unsigned n;
519
520 if (eltsize == 1)
521 {
522 /* Optimize the common case of plain char. */
523 for (n = 0; n < maxelts; n++)
524 {
525 const char *elt = (const char*) ptr + n;
526 if (!*elt)
527 break;
528 }
529 }
530 else
531 {
532 for (n = 0; n < maxelts; n++)
533 {
534 const char *elt = (const char*) ptr + n * eltsize;
535 if (!memcmp (elt, "\0\0\0\0", eltsize))
536 break;
537 }
538 }
539 return n;
540}
541
542/* Compute the length of a null-terminated character string or wide
543 character string handling character sizes of 1, 2, and 4 bytes.
544 TREE_STRING_LENGTH is not the right way because it evaluates to
545 the size of the character array in bytes (as opposed to characters)
546 and because it can contain a zero byte in the middle.
53800dbe 547
4172d65e 548 ONLY_VALUE should be nonzero if the result is not going to be emitted
c09841f6 549 into the instruction stream and zero if it is going to be expanded.
4172d65e 550 E.g. with i++ ? "foo" : "bar", if ONLY_VALUE is nonzero, constant 3
681fab1e 551 is returned, otherwise NULL, since
552 len = c_strlen (src, 1); if (len) expand_expr (len, ...); would not
553 evaluate the side-effects.
554
6bda159e 555 If ONLY_VALUE is two then we do not emit warnings about out-of-bound
556 accesses. Note that this implies the result is not going to be emitted
557 into the instruction stream.
558
902de8ed 559 The value returned is of type `ssizetype'.
560
53800dbe 561 Unfortunately, string_constant can't access the values of const char
562 arrays with initializers, so neither can we do so here. */
563
4ee9c684 564tree
681fab1e 565c_strlen (tree src, int only_value)
53800dbe 566{
681fab1e 567 STRIP_NOPS (src);
568 if (TREE_CODE (src) == COND_EXPR
569 && (only_value || !TREE_SIDE_EFFECTS (TREE_OPERAND (src, 0))))
570 {
571 tree len1, len2;
572
573 len1 = c_strlen (TREE_OPERAND (src, 1), only_value);
574 len2 = c_strlen (TREE_OPERAND (src, 2), only_value);
0862b7e9 575 if (tree_int_cst_equal (len1, len2))
681fab1e 576 return len1;
577 }
578
579 if (TREE_CODE (src) == COMPOUND_EXPR
580 && (only_value || !TREE_SIDE_EFFECTS (TREE_OPERAND (src, 0))))
581 return c_strlen (TREE_OPERAND (src, 1), only_value);
582
c62d63d4 583 location_t loc = EXPR_LOC_OR_LOC (src, input_location);
da136652 584
c62d63d4 585 /* Offset from the beginning of the string in bytes. */
586 tree byteoff;
587 src = string_constant (src, &byteoff);
53800dbe 588 if (src == 0)
c2f47e15 589 return NULL_TREE;
902de8ed 590
c62d63d4 591 /* Determine the size of the string element. */
592 unsigned eltsize
593 = tree_to_uhwi (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (src))));
594
595 /* Set MAXELTS to sizeof (SRC) / sizeof (*SRC) - 1, the maximum possible
596 length of SRC. */
597 unsigned maxelts = TREE_STRING_LENGTH (src) / eltsize - 1;
598
599 /* PTR can point to the byte representation of any string type, including
600 char* and wchar_t*. */
601 const char *ptr = TREE_STRING_POINTER (src);
902de8ed 602
c62d63d4 603 if (byteoff && TREE_CODE (byteoff) != INTEGER_CST)
53800dbe 604 {
605 /* If the string has an internal zero byte (e.g., "foo\0bar"), we can't
606 compute the offset to the following null if we don't know where to
607 start searching for it. */
c62d63d4 608 if (string_length (ptr, eltsize, maxelts) < maxelts)
609 {
610 /* Return when an embedded null character is found. */
c2f47e15 611 return NULL_TREE;
c62d63d4 612 }
902de8ed 613
53800dbe 614 /* We don't know the starting offset, but we do know that the string
615 has no internal zero bytes. We can assume that the offset falls
616 within the bounds of the string; otherwise, the programmer deserves
617 what he gets. Subtract the offset from the length of the string,
902de8ed 618 and return that. This would perhaps not be valid if we were dealing
619 with named arrays in addition to literal string constants. */
620
c62d63d4 621 return size_diffop_loc (loc, size_int (maxelts * eltsize), byteoff);
53800dbe 622 }
623
c62d63d4 624 /* Offset from the beginning of the string in elements. */
625 HOST_WIDE_INT eltoff;
626
53800dbe 627 /* We have a known offset into the string. Start searching there for
27d0c333 628 a null character if we can represent it as a single HOST_WIDE_INT. */
c62d63d4 629 if (byteoff == 0)
630 eltoff = 0;
631 else if (! tree_fits_shwi_p (byteoff))
632 eltoff = -1;
53800dbe 633 else
c62d63d4 634 eltoff = tree_to_shwi (byteoff) / eltsize;
902de8ed 635
1f63a7d6 636 /* If the offset is known to be out of bounds, warn, and call strlen at
637 runtime. */
c62d63d4 638 if (eltoff < 0 || eltoff > maxelts)
53800dbe 639 {
1f63a7d6 640 /* Suppress multiple warnings for propagated constant strings. */
2f1c4f17 641 if (only_value != 2
642 && !TREE_NO_WARNING (src))
1f63a7d6 643 {
c62d63d4 644 warning_at (loc, 0, "offset %qwi outside bounds of constant string",
645 eltoff);
1f63a7d6 646 TREE_NO_WARNING (src) = 1;
647 }
c2f47e15 648 return NULL_TREE;
53800dbe 649 }
902de8ed 650
53800dbe 651 /* Use strlen to search for the first zero byte. Since any strings
652 constructed with build_string will have nulls appended, we win even
653 if we get handed something like (char[4])"abcd".
654
c62d63d4 655 Since ELTOFF is our starting index into the string, no further
53800dbe 656 calculation is needed. */
c62d63d4 657 unsigned len = string_length (ptr + eltoff * eltsize, eltsize,
658 maxelts - eltoff);
659
660 return ssize_int (len);
53800dbe 661}
662
e913b5cd 663/* Return a constant integer corresponding to target reading
8c85fcb7 664 GET_MODE_BITSIZE (MODE) bits from string constant STR. */
ecc318ff 665
6840589f 666static rtx
3754d046 667c_readstr (const char *str, machine_mode mode)
6840589f 668{
6840589f 669 HOST_WIDE_INT ch;
670 unsigned int i, j;
e913b5cd 671 HOST_WIDE_INT tmp[MAX_BITSIZE_MODE_ANY_INT / HOST_BITS_PER_WIDE_INT];
0407eaee 672
673 gcc_assert (GET_MODE_CLASS (mode) == MODE_INT);
e913b5cd 674 unsigned int len = (GET_MODE_PRECISION (mode) + HOST_BITS_PER_WIDE_INT - 1)
675 / HOST_BITS_PER_WIDE_INT;
676
a12aa4cc 677 gcc_assert (len <= MAX_BITSIZE_MODE_ANY_INT / HOST_BITS_PER_WIDE_INT);
e913b5cd 678 for (i = 0; i < len; i++)
679 tmp[i] = 0;
6840589f 680
6840589f 681 ch = 1;
682 for (i = 0; i < GET_MODE_SIZE (mode); i++)
683 {
684 j = i;
685 if (WORDS_BIG_ENDIAN)
686 j = GET_MODE_SIZE (mode) - i - 1;
687 if (BYTES_BIG_ENDIAN != WORDS_BIG_ENDIAN
ad8f8e52 688 && GET_MODE_SIZE (mode) >= UNITS_PER_WORD)
6840589f 689 j = j + UNITS_PER_WORD - 2 * (j % UNITS_PER_WORD) - 1;
690 j *= BITS_PER_UNIT;
7d3f6cc7 691
6840589f 692 if (ch)
693 ch = (unsigned char) str[i];
e913b5cd 694 tmp[j / HOST_BITS_PER_WIDE_INT] |= ch << (j % HOST_BITS_PER_WIDE_INT);
6840589f 695 }
ddb1be65 696
ab2c1de8 697 wide_int c = wide_int::from_array (tmp, len, GET_MODE_PRECISION (mode));
e913b5cd 698 return immed_wide_int_const (c, mode);
6840589f 699}
700
ecc318ff 701/* Cast a target constant CST to target CHAR and if that value fits into
5206b159 702 host char type, return zero and put that value into variable pointed to by
ecc318ff 703 P. */
704
705static int
aecda0d6 706target_char_cast (tree cst, char *p)
ecc318ff 707{
708 unsigned HOST_WIDE_INT val, hostval;
709
c19686c5 710 if (TREE_CODE (cst) != INTEGER_CST
ecc318ff 711 || CHAR_TYPE_SIZE > HOST_BITS_PER_WIDE_INT)
712 return 1;
713
e913b5cd 714 /* Do not care if it fits or not right here. */
f9ae6f95 715 val = TREE_INT_CST_LOW (cst);
e913b5cd 716
ecc318ff 717 if (CHAR_TYPE_SIZE < HOST_BITS_PER_WIDE_INT)
edc19fd0 718 val &= (HOST_WIDE_INT_1U << CHAR_TYPE_SIZE) - 1;
ecc318ff 719
720 hostval = val;
721 if (HOST_BITS_PER_CHAR < HOST_BITS_PER_WIDE_INT)
edc19fd0 722 hostval &= (HOST_WIDE_INT_1U << HOST_BITS_PER_CHAR) - 1;
ecc318ff 723
724 if (val != hostval)
725 return 1;
726
727 *p = hostval;
728 return 0;
729}
730
4ee9c684 731/* Similar to save_expr, but assumes that arbitrary code is not executed
732 in between the multiple evaluations. In particular, we assume that a
733 non-addressable local variable will not be modified. */
734
735static tree
736builtin_save_expr (tree exp)
737{
f6c35aa4 738 if (TREE_CODE (exp) == SSA_NAME
739 || (TREE_ADDRESSABLE (exp) == 0
740 && (TREE_CODE (exp) == PARM_DECL
53e9c5c4 741 || (VAR_P (exp) && !TREE_STATIC (exp)))))
4ee9c684 742 return exp;
743
744 return save_expr (exp);
745}
746
53800dbe 747/* Given TEM, a pointer to a stack frame, follow the dynamic chain COUNT
748 times to get the address of either a higher stack frame, or a return
749 address located within it (depending on FNDECL_CODE). */
902de8ed 750
c626df3d 751static rtx
869d0ef0 752expand_builtin_return_addr (enum built_in_function fndecl_code, int count)
53800dbe 753{
754 int i;
869d0ef0 755 rtx tem = INITIAL_FRAME_ADDRESS_RTX;
3f840859 756 if (tem == NULL_RTX)
e3e15c50 757 {
3f840859 758 /* For a zero count with __builtin_return_address, we don't care what
759 frame address we return, because target-specific definitions will
760 override us. Therefore frame pointer elimination is OK, and using
761 the soft frame pointer is OK.
762
763 For a nonzero count, or a zero count with __builtin_frame_address,
764 we require a stable offset from the current frame pointer to the
765 previous one, so we must use the hard frame pointer, and
766 we must disable frame pointer elimination. */
767 if (count == 0 && fndecl_code == BUILT_IN_RETURN_ADDRESS)
768 tem = frame_pointer_rtx;
769 else
770 {
771 tem = hard_frame_pointer_rtx;
e3e15c50 772
3f840859 773 /* Tell reload not to eliminate the frame pointer. */
774 crtl->accesses_prior_frames = 1;
775 }
e3e15c50 776 }
869d0ef0 777
53800dbe 778 if (count > 0)
779 SETUP_FRAME_ADDRESSES ();
53800dbe 780
3a69c60c 781 /* On the SPARC, the return address is not in the frame, it is in a
53800dbe 782 register. There is no way to access it off of the current frame
783 pointer, but it can be accessed off the previous frame pointer by
784 reading the value from the register window save area. */
a26d6c60 785 if (RETURN_ADDR_IN_PREVIOUS_FRAME && fndecl_code == BUILT_IN_RETURN_ADDRESS)
53800dbe 786 count--;
53800dbe 787
788 /* Scan back COUNT frames to the specified frame. */
789 for (i = 0; i < count; i++)
790 {
791 /* Assume the dynamic chain pointer is in the word that the
792 frame address points to, unless otherwise specified. */
53800dbe 793 tem = DYNAMIC_CHAIN_ADDRESS (tem);
53800dbe 794 tem = memory_address (Pmode, tem);
00060fc2 795 tem = gen_frame_mem (Pmode, tem);
83fc1478 796 tem = copy_to_reg (tem);
53800dbe 797 }
798
3a69c60c 799 /* For __builtin_frame_address, return what we've got. But, on
800 the SPARC for example, we may have to add a bias. */
53800dbe 801 if (fndecl_code == BUILT_IN_FRAME_ADDRESS)
3a69c60c 802 return FRAME_ADDR_RTX (tem);
53800dbe 803
3a69c60c 804 /* For __builtin_return_address, get the return address from that frame. */
53800dbe 805#ifdef RETURN_ADDR_RTX
806 tem = RETURN_ADDR_RTX (count, tem);
807#else
808 tem = memory_address (Pmode,
29c05e22 809 plus_constant (Pmode, tem, GET_MODE_SIZE (Pmode)));
00060fc2 810 tem = gen_frame_mem (Pmode, tem);
53800dbe 811#endif
812 return tem;
813}
814
f7c44134 815/* Alias set used for setjmp buffer. */
32c2fdea 816static alias_set_type setjmp_alias_set = -1;
f7c44134 817
6b7f6858 818/* Construct the leading half of a __builtin_setjmp call. Control will
2c8a1497 819 return to RECEIVER_LABEL. This is also called directly by the SJLJ
820 exception handling code. */
53800dbe 821
6b7f6858 822void
aecda0d6 823expand_builtin_setjmp_setup (rtx buf_addr, rtx receiver_label)
53800dbe 824{
3754d046 825 machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
53800dbe 826 rtx stack_save;
f7c44134 827 rtx mem;
53800dbe 828
f7c44134 829 if (setjmp_alias_set == -1)
830 setjmp_alias_set = new_alias_set ();
831
85d654dd 832 buf_addr = convert_memory_address (Pmode, buf_addr);
53800dbe 833
37ae8504 834 buf_addr = force_reg (Pmode, force_operand (buf_addr, NULL_RTX));
53800dbe 835
6b7f6858 836 /* We store the frame pointer and the address of receiver_label in
837 the buffer and use the rest of it for the stack save area, which
838 is machine-dependent. */
53800dbe 839
f7c44134 840 mem = gen_rtx_MEM (Pmode, buf_addr);
ab6ab77e 841 set_mem_alias_set (mem, setjmp_alias_set);
e3e026e8 842 emit_move_insn (mem, targetm.builtin_setjmp_frame_value ());
f7c44134 843
29c05e22 844 mem = gen_rtx_MEM (Pmode, plus_constant (Pmode, buf_addr,
845 GET_MODE_SIZE (Pmode))),
ab6ab77e 846 set_mem_alias_set (mem, setjmp_alias_set);
f7c44134 847
848 emit_move_insn (validize_mem (mem),
6b7f6858 849 force_reg (Pmode, gen_rtx_LABEL_REF (Pmode, receiver_label)));
53800dbe 850
851 stack_save = gen_rtx_MEM (sa_mode,
29c05e22 852 plus_constant (Pmode, buf_addr,
53800dbe 853 2 * GET_MODE_SIZE (Pmode)));
ab6ab77e 854 set_mem_alias_set (stack_save, setjmp_alias_set);
e9c97615 855 emit_stack_save (SAVE_NONLOCAL, &stack_save);
53800dbe 856
857 /* If there is further processing to do, do it. */
a3c81e61 858 if (targetm.have_builtin_setjmp_setup ())
859 emit_insn (targetm.gen_builtin_setjmp_setup (buf_addr));
53800dbe 860
29f09705 861 /* We have a nonlocal label. */
18d50ae6 862 cfun->has_nonlocal_label = 1;
6b7f6858 863}
53800dbe 864
2c8a1497 865/* Construct the trailing part of a __builtin_setjmp call. This is
4598ade9 866 also called directly by the SJLJ exception handling code.
867 If RECEIVER_LABEL is NULL, instead contruct a nonlocal goto handler. */
6b7f6858 868
869void
a3c81e61 870expand_builtin_setjmp_receiver (rtx receiver_label)
6b7f6858 871{
82c7907c 872 rtx chain;
873
4598ade9 874 /* Mark the FP as used when we get here, so we have to make sure it's
53800dbe 875 marked as used by this function. */
18b42941 876 emit_use (hard_frame_pointer_rtx);
53800dbe 877
878 /* Mark the static chain as clobbered here so life information
879 doesn't get messed up for it. */
82c7907c 880 chain = targetm.calls.static_chain (current_function_decl, true);
881 if (chain && REG_P (chain))
882 emit_clobber (chain);
53800dbe 883
884 /* Now put in the code to restore the frame pointer, and argument
491e04ef 885 pointer, if needed. */
a3c81e61 886 if (! targetm.have_nonlocal_goto ())
62dcb5c8 887 {
888 /* First adjust our frame pointer to its actual value. It was
889 previously set to the start of the virtual area corresponding to
890 the stacked variables when we branched here and now needs to be
891 adjusted to the actual hardware fp value.
892
893 Assignments to virtual registers are converted by
894 instantiate_virtual_regs into the corresponding assignment
895 to the underlying register (fp in this case) that makes
896 the original assignment true.
897 So the following insn will actually be decrementing fp by
898 STARTING_FRAME_OFFSET. */
899 emit_move_insn (virtual_stack_vars_rtx, hard_frame_pointer_rtx);
900
901 /* Restoring the frame pointer also modifies the hard frame pointer.
902 Mark it used (so that the previous assignment remains live once
903 the frame pointer is eliminated) and clobbered (to represent the
904 implicit update from the assignment). */
905 emit_use (hard_frame_pointer_rtx);
906 emit_clobber (hard_frame_pointer_rtx);
907 }
53800dbe 908
a494b6d7 909 if (!HARD_FRAME_POINTER_IS_ARG_POINTER && fixed_regs[ARG_POINTER_REGNUM])
53800dbe 910 {
4598ade9 911 /* If the argument pointer can be eliminated in favor of the
912 frame pointer, we don't need to restore it. We assume here
913 that if such an elimination is present, it can always be used.
914 This is the case on all known machines; if we don't make this
915 assumption, we do unnecessary saving on many machines. */
53800dbe 916 size_t i;
e99c3a1d 917 static const struct elims {const int from, to;} elim_regs[] = ELIMINABLE_REGS;
53800dbe 918
3098b2d3 919 for (i = 0; i < ARRAY_SIZE (elim_regs); i++)
53800dbe 920 if (elim_regs[i].from == ARG_POINTER_REGNUM
921 && elim_regs[i].to == HARD_FRAME_POINTER_REGNUM)
922 break;
923
3098b2d3 924 if (i == ARRAY_SIZE (elim_regs))
53800dbe 925 {
926 /* Now restore our arg pointer from the address at which it
05927e40 927 was saved in our stack frame. */
27a7a23a 928 emit_move_insn (crtl->args.internal_arg_pointer,
b079a207 929 copy_to_reg (get_arg_pointer_save_area ()));
53800dbe 930 }
931 }
53800dbe 932
a3c81e61 933 if (receiver_label != NULL && targetm.have_builtin_setjmp_receiver ())
934 emit_insn (targetm.gen_builtin_setjmp_receiver (receiver_label));
935 else if (targetm.have_nonlocal_goto_receiver ())
936 emit_insn (targetm.gen_nonlocal_goto_receiver ());
53800dbe 937 else
a3c81e61 938 { /* Nothing */ }
57f6bb94 939
3072d30e 940 /* We must not allow the code we just generated to be reordered by
941 scheduling. Specifically, the update of the frame pointer must
62dcb5c8 942 happen immediately, not later. */
3072d30e 943 emit_insn (gen_blockage ());
6b7f6858 944}
53800dbe 945
53800dbe 946/* __builtin_longjmp is passed a pointer to an array of five words (not
947 all will be used on all machines). It operates similarly to the C
948 library function of the same name, but is more efficient. Much of
2c8a1497 949 the code below is copied from the handling of non-local gotos. */
53800dbe 950
c626df3d 951static void
aecda0d6 952expand_builtin_longjmp (rtx buf_addr, rtx value)
53800dbe 953{
1e0c0b35 954 rtx fp, lab, stack;
955 rtx_insn *insn, *last;
3754d046 956 machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
53800dbe 957
48e1416a 958 /* DRAP is needed for stack realign if longjmp is expanded to current
27a7a23a 959 function */
960 if (SUPPORTS_STACK_ALIGNMENT)
961 crtl->need_drap = true;
962
f7c44134 963 if (setjmp_alias_set == -1)
964 setjmp_alias_set = new_alias_set ();
965
85d654dd 966 buf_addr = convert_memory_address (Pmode, buf_addr);
479e4d5e 967
53800dbe 968 buf_addr = force_reg (Pmode, buf_addr);
969
82c7907c 970 /* We require that the user must pass a second argument of 1, because
971 that is what builtin_setjmp will return. */
64db345d 972 gcc_assert (value == const1_rtx);
53800dbe 973
4712c7d6 974 last = get_last_insn ();
a3c81e61 975 if (targetm.have_builtin_longjmp ())
976 emit_insn (targetm.gen_builtin_longjmp (buf_addr));
53800dbe 977 else
53800dbe 978 {
979 fp = gen_rtx_MEM (Pmode, buf_addr);
29c05e22 980 lab = gen_rtx_MEM (Pmode, plus_constant (Pmode, buf_addr,
53800dbe 981 GET_MODE_SIZE (Pmode)));
982
29c05e22 983 stack = gen_rtx_MEM (sa_mode, plus_constant (Pmode, buf_addr,
53800dbe 984 2 * GET_MODE_SIZE (Pmode)));
ab6ab77e 985 set_mem_alias_set (fp, setjmp_alias_set);
986 set_mem_alias_set (lab, setjmp_alias_set);
987 set_mem_alias_set (stack, setjmp_alias_set);
53800dbe 988
989 /* Pick up FP, label, and SP from the block and jump. This code is
990 from expand_goto in stmt.c; see there for detailed comments. */
a3c81e61 991 if (targetm.have_nonlocal_goto ())
53800dbe 992 /* We have to pass a value to the nonlocal_goto pattern that will
993 get copied into the static_chain pointer, but it does not matter
994 what that value is, because builtin_setjmp does not use it. */
a3c81e61 995 emit_insn (targetm.gen_nonlocal_goto (value, lab, stack, fp));
53800dbe 996 else
53800dbe 997 {
998 lab = copy_to_reg (lab);
999
18b42941 1000 emit_clobber (gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode)));
1001 emit_clobber (gen_rtx_MEM (BLKmode, hard_frame_pointer_rtx));
2a871ad1 1002
53800dbe 1003 emit_move_insn (hard_frame_pointer_rtx, fp);
e9c97615 1004 emit_stack_restore (SAVE_NONLOCAL, stack);
53800dbe 1005
18b42941 1006 emit_use (hard_frame_pointer_rtx);
1007 emit_use (stack_pointer_rtx);
53800dbe 1008 emit_indirect_jump (lab);
1009 }
1010 }
615166bb 1011
1012 /* Search backwards and mark the jump insn as a non-local goto.
1013 Note that this precludes the use of __builtin_longjmp to a
1014 __builtin_setjmp target in the same function. However, we've
1015 already cautioned the user that these functions are for
1016 internal exception handling use only. */
449c0509 1017 for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
1018 {
64db345d 1019 gcc_assert (insn != last);
7d3f6cc7 1020
6d7dc5b9 1021 if (JUMP_P (insn))
449c0509 1022 {
a1ddb869 1023 add_reg_note (insn, REG_NON_LOCAL_GOTO, const0_rtx);
449c0509 1024 break;
1025 }
6d7dc5b9 1026 else if (CALL_P (insn))
9342ee68 1027 break;
449c0509 1028 }
53800dbe 1029}
1030
0e80b01d 1031static inline bool
1032more_const_call_expr_args_p (const const_call_expr_arg_iterator *iter)
1033{
1034 return (iter->i < iter->n);
1035}
1036
1037/* This function validates the types of a function call argument list
1038 against a specified list of tree_codes. If the last specifier is a 0,
5cfa3fc8 1039 that represents an ellipsis, otherwise the last specifier must be a
0e80b01d 1040 VOID_TYPE. */
1041
1042static bool
1043validate_arglist (const_tree callexpr, ...)
1044{
1045 enum tree_code code;
1046 bool res = 0;
1047 va_list ap;
1048 const_call_expr_arg_iterator iter;
1049 const_tree arg;
1050
1051 va_start (ap, callexpr);
1052 init_const_call_expr_arg_iterator (callexpr, &iter);
1053
5cfa3fc8 1054 /* Get a bitmap of pointer argument numbers declared attribute nonnull. */
184fac50 1055 tree fn = CALL_EXPR_FN (callexpr);
1056 bitmap argmap = get_nonnull_args (TREE_TYPE (TREE_TYPE (fn)));
5cfa3fc8 1057
1058 for (unsigned argno = 1; ; ++argno)
0e80b01d 1059 {
1060 code = (enum tree_code) va_arg (ap, int);
5cfa3fc8 1061
0e80b01d 1062 switch (code)
1063 {
1064 case 0:
1065 /* This signifies an ellipses, any further arguments are all ok. */
1066 res = true;
1067 goto end;
1068 case VOID_TYPE:
1069 /* This signifies an endlink, if no arguments remain, return
1070 true, otherwise return false. */
1071 res = !more_const_call_expr_args_p (&iter);
1072 goto end;
5cfa3fc8 1073 case POINTER_TYPE:
1074 /* The actual argument must be nonnull when either the whole
1075 called function has been declared nonnull, or when the formal
1076 argument corresponding to the actual argument has been. */
184fac50 1077 if (argmap
1078 && (bitmap_empty_p (argmap) || bitmap_bit_p (argmap, argno)))
1079 {
1080 arg = next_const_call_expr_arg (&iter);
1081 if (!validate_arg (arg, code) || integer_zerop (arg))
1082 goto end;
1083 break;
1084 }
5cfa3fc8 1085 /* FALLTHRU */
0e80b01d 1086 default:
1087 /* If no parameters remain or the parameter's code does not
1088 match the specified code, return false. Otherwise continue
1089 checking any remaining arguments. */
1090 arg = next_const_call_expr_arg (&iter);
184fac50 1091 if (!validate_arg (arg, code))
0e80b01d 1092 goto end;
1093 break;
1094 }
1095 }
0e80b01d 1096
1097 /* We need gotos here since we can only have one VA_CLOSE in a
1098 function. */
1099 end: ;
1100 va_end (ap);
1101
5cfa3fc8 1102 BITMAP_FREE (argmap);
1103
0e80b01d 1104 return res;
1105}
1106
4ee9c684 1107/* Expand a call to __builtin_nonlocal_goto. We're passed the target label
1108 and the address of the save area. */
1109
1110static rtx
c2f47e15 1111expand_builtin_nonlocal_goto (tree exp)
4ee9c684 1112{
1113 tree t_label, t_save_area;
1e0c0b35 1114 rtx r_label, r_save_area, r_fp, r_sp;
1115 rtx_insn *insn;
4ee9c684 1116
c2f47e15 1117 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
4ee9c684 1118 return NULL_RTX;
1119
c2f47e15 1120 t_label = CALL_EXPR_ARG (exp, 0);
1121 t_save_area = CALL_EXPR_ARG (exp, 1);
4ee9c684 1122
8ec3c5c2 1123 r_label = expand_normal (t_label);
3dce56cc 1124 r_label = convert_memory_address (Pmode, r_label);
8ec3c5c2 1125 r_save_area = expand_normal (t_save_area);
3dce56cc 1126 r_save_area = convert_memory_address (Pmode, r_save_area);
d1ff492e 1127 /* Copy the address of the save location to a register just in case it was
1128 based on the frame pointer. */
51adbc8a 1129 r_save_area = copy_to_reg (r_save_area);
4ee9c684 1130 r_fp = gen_rtx_MEM (Pmode, r_save_area);
1131 r_sp = gen_rtx_MEM (STACK_SAVEAREA_MODE (SAVE_NONLOCAL),
29c05e22 1132 plus_constant (Pmode, r_save_area,
1133 GET_MODE_SIZE (Pmode)));
4ee9c684 1134
18d50ae6 1135 crtl->has_nonlocal_goto = 1;
4ee9c684 1136
4ee9c684 1137 /* ??? We no longer need to pass the static chain value, afaik. */
a3c81e61 1138 if (targetm.have_nonlocal_goto ())
1139 emit_insn (targetm.gen_nonlocal_goto (const0_rtx, r_label, r_sp, r_fp));
4ee9c684 1140 else
4ee9c684 1141 {
1142 r_label = copy_to_reg (r_label);
1143
18b42941 1144 emit_clobber (gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode)));
1145 emit_clobber (gen_rtx_MEM (BLKmode, hard_frame_pointer_rtx));
491e04ef 1146
d1ff492e 1147 /* Restore frame pointer for containing function. */
4ee9c684 1148 emit_move_insn (hard_frame_pointer_rtx, r_fp);
e9c97615 1149 emit_stack_restore (SAVE_NONLOCAL, r_sp);
491e04ef 1150
4ee9c684 1151 /* USE of hard_frame_pointer_rtx added for consistency;
1152 not clear if really needed. */
18b42941 1153 emit_use (hard_frame_pointer_rtx);
1154 emit_use (stack_pointer_rtx);
ad0d0af8 1155
1156 /* If the architecture is using a GP register, we must
1157 conservatively assume that the target function makes use of it.
1158 The prologue of functions with nonlocal gotos must therefore
1159 initialize the GP register to the appropriate value, and we
1160 must then make sure that this value is live at the point
1161 of the jump. (Note that this doesn't necessarily apply
1162 to targets with a nonlocal_goto pattern; they are free
1163 to implement it in their own way. Note also that this is
1164 a no-op if the GP register is a global invariant.) */
1e826931 1165 unsigned regnum = PIC_OFFSET_TABLE_REGNUM;
1166 if (regnum != INVALID_REGNUM && fixed_regs[regnum])
18b42941 1167 emit_use (pic_offset_table_rtx);
ad0d0af8 1168
4ee9c684 1169 emit_indirect_jump (r_label);
1170 }
491e04ef 1171
4ee9c684 1172 /* Search backwards to the jump insn and mark it as a
1173 non-local goto. */
1174 for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
1175 {
6d7dc5b9 1176 if (JUMP_P (insn))
4ee9c684 1177 {
a1ddb869 1178 add_reg_note (insn, REG_NON_LOCAL_GOTO, const0_rtx);
4ee9c684 1179 break;
1180 }
6d7dc5b9 1181 else if (CALL_P (insn))
4ee9c684 1182 break;
1183 }
1184
1185 return const0_rtx;
1186}
1187
843d08a9 1188/* __builtin_update_setjmp_buf is passed a pointer to an array of five words
1189 (not all will be used on all machines) that was passed to __builtin_setjmp.
97354ae4 1190 It updates the stack pointer in that block to the current value. This is
1191 also called directly by the SJLJ exception handling code. */
843d08a9 1192
97354ae4 1193void
843d08a9 1194expand_builtin_update_setjmp_buf (rtx buf_addr)
1195{
3754d046 1196 machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
d1ff492e 1197 rtx stack_save
843d08a9 1198 = gen_rtx_MEM (sa_mode,
1199 memory_address
1200 (sa_mode,
29c05e22 1201 plus_constant (Pmode, buf_addr,
1202 2 * GET_MODE_SIZE (Pmode))));
843d08a9 1203
e9c97615 1204 emit_stack_save (SAVE_NONLOCAL, &stack_save);
843d08a9 1205}
1206
5e3608d8 1207/* Expand a call to __builtin_prefetch. For a target that does not support
1208 data prefetch, evaluate the memory address argument in case it has side
1209 effects. */
1210
1211static void
c2f47e15 1212expand_builtin_prefetch (tree exp)
5e3608d8 1213{
1214 tree arg0, arg1, arg2;
c2f47e15 1215 int nargs;
5e3608d8 1216 rtx op0, op1, op2;
1217
c2f47e15 1218 if (!validate_arglist (exp, POINTER_TYPE, 0))
26a5cadb 1219 return;
1220
c2f47e15 1221 arg0 = CALL_EXPR_ARG (exp, 0);
1222
26a5cadb 1223 /* Arguments 1 and 2 are optional; argument 1 (read/write) defaults to
1224 zero (read) and argument 2 (locality) defaults to 3 (high degree of
1225 locality). */
c2f47e15 1226 nargs = call_expr_nargs (exp);
1227 if (nargs > 1)
1228 arg1 = CALL_EXPR_ARG (exp, 1);
26a5cadb 1229 else
c2f47e15 1230 arg1 = integer_zero_node;
1231 if (nargs > 2)
1232 arg2 = CALL_EXPR_ARG (exp, 2);
1233 else
2512209b 1234 arg2 = integer_three_node;
5e3608d8 1235
1236 /* Argument 0 is an address. */
1237 op0 = expand_expr (arg0, NULL_RTX, Pmode, EXPAND_NORMAL);
1238
1239 /* Argument 1 (read/write flag) must be a compile-time constant int. */
1240 if (TREE_CODE (arg1) != INTEGER_CST)
1241 {
07e3a3d2 1242 error ("second argument to %<__builtin_prefetch%> must be a constant");
9342ee68 1243 arg1 = integer_zero_node;
5e3608d8 1244 }
8ec3c5c2 1245 op1 = expand_normal (arg1);
5e3608d8 1246 /* Argument 1 must be either zero or one. */
1247 if (INTVAL (op1) != 0 && INTVAL (op1) != 1)
1248 {
c3ceba8e 1249 warning (0, "invalid second argument to %<__builtin_prefetch%>;"
07e3a3d2 1250 " using zero");
5e3608d8 1251 op1 = const0_rtx;
1252 }
1253
1254 /* Argument 2 (locality) must be a compile-time constant int. */
1255 if (TREE_CODE (arg2) != INTEGER_CST)
1256 {
07e3a3d2 1257 error ("third argument to %<__builtin_prefetch%> must be a constant");
5e3608d8 1258 arg2 = integer_zero_node;
1259 }
8ec3c5c2 1260 op2 = expand_normal (arg2);
5e3608d8 1261 /* Argument 2 must be 0, 1, 2, or 3. */
1262 if (INTVAL (op2) < 0 || INTVAL (op2) > 3)
1263 {
c3ceba8e 1264 warning (0, "invalid third argument to %<__builtin_prefetch%>; using zero");
5e3608d8 1265 op2 = const0_rtx;
1266 }
1267
1d375a79 1268 if (targetm.have_prefetch ())
5e3608d8 1269 {
8786db1e 1270 struct expand_operand ops[3];
1271
1272 create_address_operand (&ops[0], op0);
1273 create_integer_operand (&ops[1], INTVAL (op1));
1274 create_integer_operand (&ops[2], INTVAL (op2));
1d375a79 1275 if (maybe_expand_insn (targetm.code_for_prefetch, 3, ops))
8786db1e 1276 return;
5e3608d8 1277 }
0a534ba7 1278
f0ce3b1f 1279 /* Don't do anything with direct references to volatile memory, but
1280 generate code to handle other side effects. */
e16ceb8e 1281 if (!MEM_P (op0) && side_effects_p (op0))
f0ce3b1f 1282 emit_insn (op0);
5e3608d8 1283}
1284
f7c44134 1285/* Get a MEM rtx for expression EXP which is the address of an operand
d8ae1baa 1286 to be used in a string instruction (cmpstrsi, movmemsi, ..). LEN is
1287 the maximum length of the block of memory that might be accessed or
1288 NULL if unknown. */
f7c44134 1289
53800dbe 1290static rtx
d8ae1baa 1291get_memory_rtx (tree exp, tree len)
53800dbe 1292{
ad0a178f 1293 tree orig_exp = exp;
1294 rtx addr, mem;
ad0a178f 1295
1296 /* When EXP is not resolved SAVE_EXPR, MEM_ATTRS can be still derived
1297 from its expression, for expr->a.b only <variable>.a.b is recorded. */
1298 if (TREE_CODE (exp) == SAVE_EXPR && !SAVE_EXPR_RESOLVED_P (exp))
1299 exp = TREE_OPERAND (exp, 0);
1300
1301 addr = expand_expr (orig_exp, NULL_RTX, ptr_mode, EXPAND_NORMAL);
1302 mem = gen_rtx_MEM (BLKmode, memory_address (BLKmode, addr));
2a631e19 1303
f7c44134 1304 /* Get an expression we can use to find the attributes to assign to MEM.
5dd3f78f 1305 First remove any nops. */
72dd6141 1306 while (CONVERT_EXPR_P (exp)
f7c44134 1307 && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (exp, 0))))
1308 exp = TREE_OPERAND (exp, 0);
1309
5dd3f78f 1310 /* Build a MEM_REF representing the whole accessed area as a byte blob,
1311 (as builtin stringops may alias with anything). */
1312 exp = fold_build2 (MEM_REF,
1313 build_array_type (char_type_node,
1314 build_range_type (sizetype,
1315 size_one_node, len)),
1316 exp, build_int_cst (ptr_type_node, 0));
1317
1318 /* If the MEM_REF has no acceptable address, try to get the base object
1319 from the original address we got, and build an all-aliasing
1320 unknown-sized access to that one. */
1321 if (is_gimple_mem_ref_addr (TREE_OPERAND (exp, 0)))
1322 set_mem_attributes (mem, exp, 0);
1323 else if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
1324 && (exp = get_base_address (TREE_OPERAND (TREE_OPERAND (exp, 0),
1325 0))))
eec8e941 1326 {
5dd3f78f 1327 exp = build_fold_addr_expr (exp);
1328 exp = fold_build2 (MEM_REF,
1329 build_array_type (char_type_node,
1330 build_range_type (sizetype,
1331 size_zero_node,
1332 NULL)),
1333 exp, build_int_cst (ptr_type_node, 0));
a1a25d19 1334 set_mem_attributes (mem, exp, 0);
eec8e941 1335 }
5dd3f78f 1336 set_mem_alias_set (mem, 0);
53800dbe 1337 return mem;
1338}
1339\f
1340/* Built-in functions to perform an untyped call and return. */
1341
3b9c3a16 1342#define apply_args_mode \
1343 (this_target_builtins->x_apply_args_mode)
1344#define apply_result_mode \
1345 (this_target_builtins->x_apply_result_mode)
53800dbe 1346
53800dbe 1347/* Return the size required for the block returned by __builtin_apply_args,
1348 and initialize apply_args_mode. */
1349
1350static int
aecda0d6 1351apply_args_size (void)
53800dbe 1352{
1353 static int size = -1;
58e9ce8f 1354 int align;
1355 unsigned int regno;
3754d046 1356 machine_mode mode;
53800dbe 1357
1358 /* The values computed by this function never change. */
1359 if (size < 0)
1360 {
1361 /* The first value is the incoming arg-pointer. */
1362 size = GET_MODE_SIZE (Pmode);
1363
1364 /* The second value is the structure value address unless this is
1365 passed as an "invisible" first argument. */
6812c89e 1366 if (targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0))
53800dbe 1367 size += GET_MODE_SIZE (Pmode);
1368
1369 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1370 if (FUNCTION_ARG_REGNO_P (regno))
1371 {
4bac51c9 1372 mode = targetm.calls.get_raw_arg_mode (regno);
0862b7e9 1373
64db345d 1374 gcc_assert (mode != VOIDmode);
53800dbe 1375
1376 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1377 if (size % align != 0)
1378 size = CEIL (size, align) * align;
53800dbe 1379 size += GET_MODE_SIZE (mode);
1380 apply_args_mode[regno] = mode;
1381 }
1382 else
1383 {
1384 apply_args_mode[regno] = VOIDmode;
53800dbe 1385 }
1386 }
1387 return size;
1388}
1389
1390/* Return the size required for the block returned by __builtin_apply,
1391 and initialize apply_result_mode. */
1392
1393static int
aecda0d6 1394apply_result_size (void)
53800dbe 1395{
1396 static int size = -1;
1397 int align, regno;
3754d046 1398 machine_mode mode;
53800dbe 1399
1400 /* The values computed by this function never change. */
1401 if (size < 0)
1402 {
1403 size = 0;
1404
1405 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
e1ce1485 1406 if (targetm.calls.function_value_regno_p (regno))
53800dbe 1407 {
4bac51c9 1408 mode = targetm.calls.get_raw_result_mode (regno);
0862b7e9 1409
64db345d 1410 gcc_assert (mode != VOIDmode);
53800dbe 1411
1412 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1413 if (size % align != 0)
1414 size = CEIL (size, align) * align;
1415 size += GET_MODE_SIZE (mode);
1416 apply_result_mode[regno] = mode;
1417 }
1418 else
1419 apply_result_mode[regno] = VOIDmode;
1420
1421 /* Allow targets that use untyped_call and untyped_return to override
1422 the size so that machine-specific information can be stored here. */
1423#ifdef APPLY_RESULT_SIZE
1424 size = APPLY_RESULT_SIZE;
1425#endif
1426 }
1427 return size;
1428}
1429
53800dbe 1430/* Create a vector describing the result block RESULT. If SAVEP is true,
1431 the result block is used to save the values; otherwise it is used to
1432 restore the values. */
1433
1434static rtx
aecda0d6 1435result_vector (int savep, rtx result)
53800dbe 1436{
1437 int regno, size, align, nelts;
3754d046 1438 machine_mode mode;
53800dbe 1439 rtx reg, mem;
364c0c59 1440 rtx *savevec = XALLOCAVEC (rtx, FIRST_PSEUDO_REGISTER);
bf8e3599 1441
53800dbe 1442 size = nelts = 0;
1443 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1444 if ((mode = apply_result_mode[regno]) != VOIDmode)
1445 {
1446 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1447 if (size % align != 0)
1448 size = CEIL (size, align) * align;
1449 reg = gen_rtx_REG (mode, savep ? regno : INCOMING_REGNO (regno));
e513d163 1450 mem = adjust_address (result, mode, size);
53800dbe 1451 savevec[nelts++] = (savep
d1f9b275 1452 ? gen_rtx_SET (mem, reg)
1453 : gen_rtx_SET (reg, mem));
53800dbe 1454 size += GET_MODE_SIZE (mode);
1455 }
1456 return gen_rtx_PARALLEL (VOIDmode, gen_rtvec_v (nelts, savevec));
1457}
53800dbe 1458
1459/* Save the state required to perform an untyped call with the same
1460 arguments as were passed to the current function. */
1461
1462static rtx
aecda0d6 1463expand_builtin_apply_args_1 (void)
53800dbe 1464{
1c7e61a7 1465 rtx registers, tem;
53800dbe 1466 int size, align, regno;
3754d046 1467 machine_mode mode;
6812c89e 1468 rtx struct_incoming_value = targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 1);
53800dbe 1469
1470 /* Create a block where the arg-pointer, structure value address,
1471 and argument registers can be saved. */
1472 registers = assign_stack_local (BLKmode, apply_args_size (), -1);
1473
1474 /* Walk past the arg-pointer and structure value address. */
1475 size = GET_MODE_SIZE (Pmode);
6812c89e 1476 if (targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0))
53800dbe 1477 size += GET_MODE_SIZE (Pmode);
1478
1479 /* Save each register used in calling a function to the block. */
1480 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1481 if ((mode = apply_args_mode[regno]) != VOIDmode)
1482 {
53800dbe 1483 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1484 if (size % align != 0)
1485 size = CEIL (size, align) * align;
1486
1487 tem = gen_rtx_REG (mode, INCOMING_REGNO (regno));
1488
e513d163 1489 emit_move_insn (adjust_address (registers, mode, size), tem);
53800dbe 1490 size += GET_MODE_SIZE (mode);
1491 }
1492
1493 /* Save the arg pointer to the block. */
27a7a23a 1494 tem = copy_to_reg (crtl->args.internal_arg_pointer);
1c7e61a7 1495 /* We need the pointer as the caller actually passed them to us, not
9d4b544c 1496 as we might have pretended they were passed. Make sure it's a valid
1497 operand, as emit_move_insn isn't expected to handle a PLUS. */
3764c94e 1498 if (STACK_GROWS_DOWNWARD)
1499 tem
1500 = force_operand (plus_constant (Pmode, tem,
1501 crtl->args.pretend_args_size),
1502 NULL_RTX);
1c7e61a7 1503 emit_move_insn (adjust_address (registers, Pmode, 0), tem);
0862b7e9 1504
53800dbe 1505 size = GET_MODE_SIZE (Pmode);
1506
1507 /* Save the structure value address unless this is passed as an
1508 "invisible" first argument. */
45550790 1509 if (struct_incoming_value)
53800dbe 1510 {
e513d163 1511 emit_move_insn (adjust_address (registers, Pmode, size),
45550790 1512 copy_to_reg (struct_incoming_value));
53800dbe 1513 size += GET_MODE_SIZE (Pmode);
1514 }
1515
1516 /* Return the address of the block. */
1517 return copy_addr_to_reg (XEXP (registers, 0));
1518}
1519
1520/* __builtin_apply_args returns block of memory allocated on
1521 the stack into which is stored the arg pointer, structure
1522 value address, static chain, and all the registers that might
1523 possibly be used in performing a function call. The code is
1524 moved to the start of the function so the incoming values are
1525 saved. */
27d0c333 1526
53800dbe 1527static rtx
aecda0d6 1528expand_builtin_apply_args (void)
53800dbe 1529{
1530 /* Don't do __builtin_apply_args more than once in a function.
1531 Save the result of the first call and reuse it. */
1532 if (apply_args_value != 0)
1533 return apply_args_value;
1534 {
1535 /* When this function is called, it means that registers must be
1536 saved on entry to this function. So we migrate the
1537 call to the first insn of this function. */
1538 rtx temp;
53800dbe 1539
1540 start_sequence ();
1541 temp = expand_builtin_apply_args_1 ();
9ed997be 1542 rtx_insn *seq = get_insns ();
53800dbe 1543 end_sequence ();
1544
1545 apply_args_value = temp;
1546
31d3e01c 1547 /* Put the insns after the NOTE that starts the function.
1548 If this is inside a start_sequence, make the outer-level insn
53800dbe 1549 chain current, so the code is placed at the start of the
0ef1a651 1550 function. If internal_arg_pointer is a non-virtual pseudo,
1551 it needs to be placed after the function that initializes
1552 that pseudo. */
53800dbe 1553 push_topmost_sequence ();
0ef1a651 1554 if (REG_P (crtl->args.internal_arg_pointer)
1555 && REGNO (crtl->args.internal_arg_pointer) > LAST_VIRTUAL_REGISTER)
1556 emit_insn_before (seq, parm_birth_insn);
1557 else
1558 emit_insn_before (seq, NEXT_INSN (entry_of_function ()));
53800dbe 1559 pop_topmost_sequence ();
1560 return temp;
1561 }
1562}
1563
1564/* Perform an untyped call and save the state required to perform an
1565 untyped return of whatever value was returned by the given function. */
1566
1567static rtx
aecda0d6 1568expand_builtin_apply (rtx function, rtx arguments, rtx argsize)
53800dbe 1569{
1570 int size, align, regno;
3754d046 1571 machine_mode mode;
1e0c0b35 1572 rtx incoming_args, result, reg, dest, src;
1573 rtx_call_insn *call_insn;
53800dbe 1574 rtx old_stack_level = 0;
1575 rtx call_fusage = 0;
6812c89e 1576 rtx struct_value = targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0);
53800dbe 1577
85d654dd 1578 arguments = convert_memory_address (Pmode, arguments);
726ec87c 1579
53800dbe 1580 /* Create a block where the return registers can be saved. */
1581 result = assign_stack_local (BLKmode, apply_result_size (), -1);
1582
53800dbe 1583 /* Fetch the arg pointer from the ARGUMENTS block. */
1584 incoming_args = gen_reg_rtx (Pmode);
726ec87c 1585 emit_move_insn (incoming_args, gen_rtx_MEM (Pmode, arguments));
3764c94e 1586 if (!STACK_GROWS_DOWNWARD)
1587 incoming_args = expand_simple_binop (Pmode, MINUS, incoming_args, argsize,
1588 incoming_args, 0, OPTAB_LIB_WIDEN);
53800dbe 1589
04a46d40 1590 /* Push a new argument block and copy the arguments. Do not allow
1591 the (potential) memcpy call below to interfere with our stack
1592 manipulations. */
53800dbe 1593 do_pending_stack_adjust ();
04a46d40 1594 NO_DEFER_POP;
53800dbe 1595
2358393e 1596 /* Save the stack with nonlocal if available. */
71512c05 1597 if (targetm.have_save_stack_nonlocal ())
e9c97615 1598 emit_stack_save (SAVE_NONLOCAL, &old_stack_level);
53800dbe 1599 else
e9c97615 1600 emit_stack_save (SAVE_BLOCK, &old_stack_level);
53800dbe 1601
59647703 1602 /* Allocate a block of memory onto the stack and copy the memory
990495a7 1603 arguments to the outgoing arguments address. We can pass TRUE
1604 as the 4th argument because we just saved the stack pointer
1605 and will restore it right after the call. */
5be42b39 1606 allocate_dynamic_stack_space (argsize, 0, BIGGEST_ALIGNMENT, true);
27a7a23a 1607
1608 /* Set DRAP flag to true, even though allocate_dynamic_stack_space
1609 may have already set current_function_calls_alloca to true.
1610 current_function_calls_alloca won't be set if argsize is zero,
1611 so we have to guarantee need_drap is true here. */
1612 if (SUPPORTS_STACK_ALIGNMENT)
1613 crtl->need_drap = true;
1614
59647703 1615 dest = virtual_outgoing_args_rtx;
3764c94e 1616 if (!STACK_GROWS_DOWNWARD)
1617 {
1618 if (CONST_INT_P (argsize))
1619 dest = plus_constant (Pmode, dest, -INTVAL (argsize));
1620 else
1621 dest = gen_rtx_PLUS (Pmode, dest, negate_rtx (Pmode, argsize));
1622 }
2a631e19 1623 dest = gen_rtx_MEM (BLKmode, dest);
1624 set_mem_align (dest, PARM_BOUNDARY);
1625 src = gen_rtx_MEM (BLKmode, incoming_args);
1626 set_mem_align (src, PARM_BOUNDARY);
0378dbdc 1627 emit_block_move (dest, src, argsize, BLOCK_OP_NORMAL);
53800dbe 1628
1629 /* Refer to the argument block. */
1630 apply_args_size ();
1631 arguments = gen_rtx_MEM (BLKmode, arguments);
2a631e19 1632 set_mem_align (arguments, PARM_BOUNDARY);
53800dbe 1633
1634 /* Walk past the arg-pointer and structure value address. */
1635 size = GET_MODE_SIZE (Pmode);
45550790 1636 if (struct_value)
53800dbe 1637 size += GET_MODE_SIZE (Pmode);
1638
1639 /* Restore each of the registers previously saved. Make USE insns
1640 for each of these registers for use in making the call. */
1641 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1642 if ((mode = apply_args_mode[regno]) != VOIDmode)
1643 {
1644 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1645 if (size % align != 0)
1646 size = CEIL (size, align) * align;
1647 reg = gen_rtx_REG (mode, regno);
e513d163 1648 emit_move_insn (reg, adjust_address (arguments, mode, size));
53800dbe 1649 use_reg (&call_fusage, reg);
1650 size += GET_MODE_SIZE (mode);
1651 }
1652
1653 /* Restore the structure value address unless this is passed as an
1654 "invisible" first argument. */
1655 size = GET_MODE_SIZE (Pmode);
45550790 1656 if (struct_value)
53800dbe 1657 {
1658 rtx value = gen_reg_rtx (Pmode);
e513d163 1659 emit_move_insn (value, adjust_address (arguments, Pmode, size));
45550790 1660 emit_move_insn (struct_value, value);
8ad4c111 1661 if (REG_P (struct_value))
45550790 1662 use_reg (&call_fusage, struct_value);
53800dbe 1663 size += GET_MODE_SIZE (Pmode);
1664 }
1665
1666 /* All arguments and registers used for the call are set up by now! */
82c7907c 1667 function = prepare_call_address (NULL, function, NULL, &call_fusage, 0, 0);
53800dbe 1668
1669 /* Ensure address is valid. SYMBOL_REF is already valid, so no need,
1670 and we don't want to load it into a register as an optimization,
1671 because prepare_call_address already did it if it should be done. */
1672 if (GET_CODE (function) != SYMBOL_REF)
1673 function = memory_address (FUNCTION_MODE, function);
1674
1675 /* Generate the actual call instruction and save the return value. */
1d99ab0a 1676 if (targetm.have_untyped_call ())
1677 {
1678 rtx mem = gen_rtx_MEM (FUNCTION_MODE, function);
1679 emit_call_insn (targetm.gen_untyped_call (mem, result,
1680 result_vector (1, result)));
1681 }
7f265a08 1682 else if (targetm.have_call_value ())
53800dbe 1683 {
1684 rtx valreg = 0;
1685
1686 /* Locate the unique return register. It is not possible to
1687 express a call that sets more than one return register using
1688 call_value; use untyped_call for that. In fact, untyped_call
1689 only needs to save the return registers in the given block. */
1690 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1691 if ((mode = apply_result_mode[regno]) != VOIDmode)
1692 {
7f265a08 1693 gcc_assert (!valreg); /* have_untyped_call required. */
7d3f6cc7 1694
53800dbe 1695 valreg = gen_rtx_REG (mode, regno);
1696 }
1697
7f265a08 1698 emit_insn (targetm.gen_call_value (valreg,
1699 gen_rtx_MEM (FUNCTION_MODE, function),
1700 const0_rtx, NULL_RTX, const0_rtx));
53800dbe 1701
e513d163 1702 emit_move_insn (adjust_address (result, GET_MODE (valreg), 0), valreg);
53800dbe 1703 }
1704 else
64db345d 1705 gcc_unreachable ();
53800dbe 1706
d5f9786f 1707 /* Find the CALL insn we just emitted, and attach the register usage
1708 information. */
1709 call_insn = last_call_insn ();
1710 add_function_usage_to (call_insn, call_fusage);
53800dbe 1711
1712 /* Restore the stack. */
71512c05 1713 if (targetm.have_save_stack_nonlocal ())
e9c97615 1714 emit_stack_restore (SAVE_NONLOCAL, old_stack_level);
53800dbe 1715 else
e9c97615 1716 emit_stack_restore (SAVE_BLOCK, old_stack_level);
9af5ce0c 1717 fixup_args_size_notes (call_insn, get_last_insn (), 0);
53800dbe 1718
04a46d40 1719 OK_DEFER_POP;
1720
53800dbe 1721 /* Return the address of the result block. */
85d654dd 1722 result = copy_addr_to_reg (XEXP (result, 0));
1723 return convert_memory_address (ptr_mode, result);
53800dbe 1724}
1725
1726/* Perform an untyped return. */
1727
1728static void
aecda0d6 1729expand_builtin_return (rtx result)
53800dbe 1730{
1731 int size, align, regno;
3754d046 1732 machine_mode mode;
53800dbe 1733 rtx reg;
57c26b3a 1734 rtx_insn *call_fusage = 0;
53800dbe 1735
85d654dd 1736 result = convert_memory_address (Pmode, result);
726ec87c 1737
53800dbe 1738 apply_result_size ();
1739 result = gen_rtx_MEM (BLKmode, result);
1740
1d99ab0a 1741 if (targetm.have_untyped_return ())
53800dbe 1742 {
1d99ab0a 1743 rtx vector = result_vector (0, result);
1744 emit_jump_insn (targetm.gen_untyped_return (result, vector));
53800dbe 1745 emit_barrier ();
1746 return;
1747 }
53800dbe 1748
1749 /* Restore the return value and note that each value is used. */
1750 size = 0;
1751 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1752 if ((mode = apply_result_mode[regno]) != VOIDmode)
1753 {
1754 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1755 if (size % align != 0)
1756 size = CEIL (size, align) * align;
1757 reg = gen_rtx_REG (mode, INCOMING_REGNO (regno));
e513d163 1758 emit_move_insn (reg, adjust_address (result, mode, size));
53800dbe 1759
1760 push_to_sequence (call_fusage);
18b42941 1761 emit_use (reg);
53800dbe 1762 call_fusage = get_insns ();
1763 end_sequence ();
1764 size += GET_MODE_SIZE (mode);
1765 }
1766
1767 /* Put the USE insns before the return. */
31d3e01c 1768 emit_insn (call_fusage);
53800dbe 1769
1770 /* Return whatever values was restored by jumping directly to the end
1771 of the function. */
62380d2d 1772 expand_naked_return ();
53800dbe 1773}
1774
539a3a92 1775/* Used by expand_builtin_classify_type and fold_builtin_classify_type. */
27d0c333 1776
539a3a92 1777static enum type_class
aecda0d6 1778type_to_class (tree type)
539a3a92 1779{
1780 switch (TREE_CODE (type))
1781 {
1782 case VOID_TYPE: return void_type_class;
1783 case INTEGER_TYPE: return integer_type_class;
539a3a92 1784 case ENUMERAL_TYPE: return enumeral_type_class;
1785 case BOOLEAN_TYPE: return boolean_type_class;
1786 case POINTER_TYPE: return pointer_type_class;
1787 case REFERENCE_TYPE: return reference_type_class;
1788 case OFFSET_TYPE: return offset_type_class;
1789 case REAL_TYPE: return real_type_class;
1790 case COMPLEX_TYPE: return complex_type_class;
1791 case FUNCTION_TYPE: return function_type_class;
1792 case METHOD_TYPE: return method_type_class;
1793 case RECORD_TYPE: return record_type_class;
1794 case UNION_TYPE:
1795 case QUAL_UNION_TYPE: return union_type_class;
1796 case ARRAY_TYPE: return (TYPE_STRING_FLAG (type)
1797 ? string_type_class : array_type_class);
539a3a92 1798 case LANG_TYPE: return lang_type_class;
1799 default: return no_type_class;
1800 }
1801}
bf8e3599 1802
c2f47e15 1803/* Expand a call EXP to __builtin_classify_type. */
27d0c333 1804
53800dbe 1805static rtx
c2f47e15 1806expand_builtin_classify_type (tree exp)
53800dbe 1807{
c2f47e15 1808 if (call_expr_nargs (exp))
1809 return GEN_INT (type_to_class (TREE_TYPE (CALL_EXPR_ARG (exp, 0))));
53800dbe 1810 return GEN_INT (no_type_class);
1811}
1812
07976da7 1813/* This helper macro, meant to be used in mathfn_built_in below,
1814 determines which among a set of three builtin math functions is
1815 appropriate for a given type mode. The `F' and `L' cases are
1816 automatically generated from the `double' case. */
e3240774 1817#define CASE_MATHFN(MATHFN) \
1818 CASE_CFN_##MATHFN: \
1819 fcode = BUILT_IN_##MATHFN; fcodef = BUILT_IN_##MATHFN##F ; \
1820 fcodel = BUILT_IN_##MATHFN##L ; break;
cd2656b0 1821/* Similar to above, but appends _R after any F/L suffix. */
e3240774 1822#define CASE_MATHFN_REENT(MATHFN) \
1823 case CFN_BUILT_IN_##MATHFN##_R: \
1824 case CFN_BUILT_IN_##MATHFN##F_R: \
1825 case CFN_BUILT_IN_##MATHFN##L_R: \
1826 fcode = BUILT_IN_##MATHFN##_R; fcodef = BUILT_IN_##MATHFN##F_R ; \
1827 fcodel = BUILT_IN_##MATHFN##L_R ; break;
07976da7 1828
6c21be92 1829/* Return a function equivalent to FN but operating on floating-point
1830 values of type TYPE, or END_BUILTINS if no such function exists.
e3240774 1831 This is purely an operation on function codes; it does not guarantee
1832 that the target actually has an implementation of the function. */
c319d56a 1833
6c21be92 1834static built_in_function
e3240774 1835mathfn_built_in_2 (tree type, combined_fn fn)
0a68165a 1836{
6c21be92 1837 built_in_function fcode, fcodef, fcodel;
07976da7 1838
1839 switch (fn)
1840 {
e3240774 1841 CASE_MATHFN (ACOS)
1842 CASE_MATHFN (ACOSH)
1843 CASE_MATHFN (ASIN)
1844 CASE_MATHFN (ASINH)
1845 CASE_MATHFN (ATAN)
1846 CASE_MATHFN (ATAN2)
1847 CASE_MATHFN (ATANH)
1848 CASE_MATHFN (CBRT)
1849 CASE_MATHFN (CEIL)
1850 CASE_MATHFN (CEXPI)
1851 CASE_MATHFN (COPYSIGN)
1852 CASE_MATHFN (COS)
1853 CASE_MATHFN (COSH)
1854 CASE_MATHFN (DREM)
1855 CASE_MATHFN (ERF)
1856 CASE_MATHFN (ERFC)
1857 CASE_MATHFN (EXP)
1858 CASE_MATHFN (EXP10)
1859 CASE_MATHFN (EXP2)
1860 CASE_MATHFN (EXPM1)
1861 CASE_MATHFN (FABS)
1862 CASE_MATHFN (FDIM)
1863 CASE_MATHFN (FLOOR)
1864 CASE_MATHFN (FMA)
1865 CASE_MATHFN (FMAX)
1866 CASE_MATHFN (FMIN)
1867 CASE_MATHFN (FMOD)
1868 CASE_MATHFN (FREXP)
1869 CASE_MATHFN (GAMMA)
1870 CASE_MATHFN_REENT (GAMMA) /* GAMMA_R */
1871 CASE_MATHFN (HUGE_VAL)
1872 CASE_MATHFN (HYPOT)
1873 CASE_MATHFN (ILOGB)
1874 CASE_MATHFN (ICEIL)
1875 CASE_MATHFN (IFLOOR)
1876 CASE_MATHFN (INF)
1877 CASE_MATHFN (IRINT)
1878 CASE_MATHFN (IROUND)
1879 CASE_MATHFN (ISINF)
1880 CASE_MATHFN (J0)
1881 CASE_MATHFN (J1)
1882 CASE_MATHFN (JN)
1883 CASE_MATHFN (LCEIL)
1884 CASE_MATHFN (LDEXP)
1885 CASE_MATHFN (LFLOOR)
1886 CASE_MATHFN (LGAMMA)
1887 CASE_MATHFN_REENT (LGAMMA) /* LGAMMA_R */
1888 CASE_MATHFN (LLCEIL)
1889 CASE_MATHFN (LLFLOOR)
1890 CASE_MATHFN (LLRINT)
1891 CASE_MATHFN (LLROUND)
1892 CASE_MATHFN (LOG)
1893 CASE_MATHFN (LOG10)
1894 CASE_MATHFN (LOG1P)
1895 CASE_MATHFN (LOG2)
1896 CASE_MATHFN (LOGB)
1897 CASE_MATHFN (LRINT)
1898 CASE_MATHFN (LROUND)
1899 CASE_MATHFN (MODF)
1900 CASE_MATHFN (NAN)
1901 CASE_MATHFN (NANS)
1902 CASE_MATHFN (NEARBYINT)
1903 CASE_MATHFN (NEXTAFTER)
1904 CASE_MATHFN (NEXTTOWARD)
1905 CASE_MATHFN (POW)
1906 CASE_MATHFN (POWI)
1907 CASE_MATHFN (POW10)
1908 CASE_MATHFN (REMAINDER)
1909 CASE_MATHFN (REMQUO)
1910 CASE_MATHFN (RINT)
1911 CASE_MATHFN (ROUND)
1912 CASE_MATHFN (SCALB)
1913 CASE_MATHFN (SCALBLN)
1914 CASE_MATHFN (SCALBN)
1915 CASE_MATHFN (SIGNBIT)
1916 CASE_MATHFN (SIGNIFICAND)
1917 CASE_MATHFN (SIN)
1918 CASE_MATHFN (SINCOS)
1919 CASE_MATHFN (SINH)
1920 CASE_MATHFN (SQRT)
1921 CASE_MATHFN (TAN)
1922 CASE_MATHFN (TANH)
1923 CASE_MATHFN (TGAMMA)
1924 CASE_MATHFN (TRUNC)
1925 CASE_MATHFN (Y0)
1926 CASE_MATHFN (Y1)
1927 CASE_MATHFN (YN)
07976da7 1928
e3240774 1929 default:
1930 return END_BUILTINS;
1931 }
07976da7 1932
96b9f485 1933 if (TYPE_MAIN_VARIANT (type) == double_type_node)
6c21be92 1934 return fcode;
96b9f485 1935 else if (TYPE_MAIN_VARIANT (type) == float_type_node)
6c21be92 1936 return fcodef;
96b9f485 1937 else if (TYPE_MAIN_VARIANT (type) == long_double_type_node)
6c21be92 1938 return fcodel;
07976da7 1939 else
6c21be92 1940 return END_BUILTINS;
1941}
1942
1943/* Return mathematic function equivalent to FN but operating directly on TYPE,
1944 if available. If IMPLICIT_P is true use the implicit builtin declaration,
1945 otherwise use the explicit declaration. If we can't do the conversion,
1946 return null. */
1947
1948static tree
e3240774 1949mathfn_built_in_1 (tree type, combined_fn fn, bool implicit_p)
6c21be92 1950{
1951 built_in_function fcode2 = mathfn_built_in_2 (type, fn);
1952 if (fcode2 == END_BUILTINS)
c2f47e15 1953 return NULL_TREE;
b9a16870 1954
1955 if (implicit_p && !builtin_decl_implicit_p (fcode2))
1956 return NULL_TREE;
1957
1958 return builtin_decl_explicit (fcode2);
0a68165a 1959}
1960
e3240774 1961/* Like mathfn_built_in_1, but always use the implicit array. */
c319d56a 1962
1963tree
e3240774 1964mathfn_built_in (tree type, combined_fn fn)
c319d56a 1965{
1966 return mathfn_built_in_1 (type, fn, /*implicit=*/ 1);
1967}
1968
e3240774 1969/* Like mathfn_built_in_1, but take a built_in_function and
1970 always use the implicit array. */
1971
1972tree
1973mathfn_built_in (tree type, enum built_in_function fn)
1974{
1975 return mathfn_built_in_1 (type, as_combined_fn (fn), /*implicit=*/ 1);
1976}
1977
1f24b8e9 1978/* If BUILT_IN_NORMAL function FNDECL has an associated internal function,
1979 return its code, otherwise return IFN_LAST. Note that this function
1980 only tests whether the function is defined in internals.def, not whether
1981 it is actually available on the target. */
1982
1983internal_fn
1984associated_internal_fn (tree fndecl)
1985{
1986 gcc_checking_assert (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL);
1987 tree return_type = TREE_TYPE (TREE_TYPE (fndecl));
1988 switch (DECL_FUNCTION_CODE (fndecl))
1989 {
1990#define DEF_INTERNAL_FLT_FN(NAME, FLAGS, OPTAB, TYPE) \
1991 CASE_FLT_FN (BUILT_IN_##NAME): return IFN_##NAME;
c9452b7c 1992#define DEF_INTERNAL_INT_FN(NAME, FLAGS, OPTAB, TYPE) \
1993 CASE_INT_FN (BUILT_IN_##NAME): return IFN_##NAME;
1f24b8e9 1994#include "internal-fn.def"
1995
1996 CASE_FLT_FN (BUILT_IN_POW10):
1997 return IFN_EXP10;
1998
1999 CASE_FLT_FN (BUILT_IN_DREM):
2000 return IFN_REMAINDER;
2001
2002 CASE_FLT_FN (BUILT_IN_SCALBN):
2003 CASE_FLT_FN (BUILT_IN_SCALBLN):
2004 if (REAL_MODE_FORMAT (TYPE_MODE (return_type))->b == 2)
2005 return IFN_LDEXP;
2006 return IFN_LAST;
2007
2008 default:
2009 return IFN_LAST;
2010 }
2011}
2012
2013/* If CALL is a call to a BUILT_IN_NORMAL function that could be replaced
2014 on the current target by a call to an internal function, return the
2015 code of that internal function, otherwise return IFN_LAST. The caller
2016 is responsible for ensuring that any side-effects of the built-in
2017 call are dealt with correctly. E.g. if CALL sets errno, the caller
2018 must decide that the errno result isn't needed or make it available
2019 in some other way. */
2020
2021internal_fn
2022replacement_internal_fn (gcall *call)
2023{
2024 if (gimple_call_builtin_p (call, BUILT_IN_NORMAL))
2025 {
2026 internal_fn ifn = associated_internal_fn (gimple_call_fndecl (call));
2027 if (ifn != IFN_LAST)
2028 {
2029 tree_pair types = direct_internal_fn_types (ifn, call);
acdfe9e0 2030 optimization_type opt_type = bb_optimization_type (gimple_bb (call));
2031 if (direct_internal_fn_supported_p (ifn, types, opt_type))
1f24b8e9 2032 return ifn;
2033 }
2034 }
2035 return IFN_LAST;
2036}
2037
7e0713b1 2038/* Expand a call to the builtin trinary math functions (fma).
2039 Return NULL_RTX if a normal call should be emitted rather than expanding the
2040 function in-line. EXP is the expression that is a call to the builtin
2041 function; if convenient, the result should be placed in TARGET.
2042 SUBTARGET may be used as the target for computing one of EXP's
2043 operands. */
2044
2045static rtx
2046expand_builtin_mathfn_ternary (tree exp, rtx target, rtx subtarget)
2047{
2048 optab builtin_optab;
1e0c0b35 2049 rtx op0, op1, op2, result;
2050 rtx_insn *insns;
7e0713b1 2051 tree fndecl = get_callee_fndecl (exp);
2052 tree arg0, arg1, arg2;
3754d046 2053 machine_mode mode;
7e0713b1 2054
2055 if (!validate_arglist (exp, REAL_TYPE, REAL_TYPE, REAL_TYPE, VOID_TYPE))
2056 return NULL_RTX;
2057
2058 arg0 = CALL_EXPR_ARG (exp, 0);
2059 arg1 = CALL_EXPR_ARG (exp, 1);
2060 arg2 = CALL_EXPR_ARG (exp, 2);
2061
2062 switch (DECL_FUNCTION_CODE (fndecl))
2063 {
2064 CASE_FLT_FN (BUILT_IN_FMA):
2065 builtin_optab = fma_optab; break;
2066 default:
2067 gcc_unreachable ();
2068 }
2069
2070 /* Make a suitable register to place result in. */
2071 mode = TYPE_MODE (TREE_TYPE (exp));
2072
2073 /* Before working hard, check whether the instruction is available. */
2074 if (optab_handler (builtin_optab, mode) == CODE_FOR_nothing)
2075 return NULL_RTX;
2076
de2e453e 2077 result = gen_reg_rtx (mode);
7e0713b1 2078
2079 /* Always stabilize the argument list. */
2080 CALL_EXPR_ARG (exp, 0) = arg0 = builtin_save_expr (arg0);
2081 CALL_EXPR_ARG (exp, 1) = arg1 = builtin_save_expr (arg1);
2082 CALL_EXPR_ARG (exp, 2) = arg2 = builtin_save_expr (arg2);
2083
2084 op0 = expand_expr (arg0, subtarget, VOIDmode, EXPAND_NORMAL);
2085 op1 = expand_normal (arg1);
2086 op2 = expand_normal (arg2);
2087
2088 start_sequence ();
2089
de2e453e 2090 /* Compute into RESULT.
2091 Set RESULT to wherever the result comes back. */
2092 result = expand_ternary_op (mode, builtin_optab, op0, op1, op2,
2093 result, 0);
7e0713b1 2094
2095 /* If we were unable to expand via the builtin, stop the sequence
2096 (without outputting the insns) and call to the library function
2097 with the stabilized argument list. */
de2e453e 2098 if (result == 0)
7e0713b1 2099 {
2100 end_sequence ();
2101 return expand_call (exp, target, target == const0_rtx);
2102 }
2103
2104 /* Output the entire sequence. */
2105 insns = get_insns ();
2106 end_sequence ();
2107 emit_insn (insns);
2108
de2e453e 2109 return result;
7e0713b1 2110}
2111
6b43bae4 2112/* Expand a call to the builtin sin and cos math functions.
c2f47e15 2113 Return NULL_RTX if a normal call should be emitted rather than expanding the
6b43bae4 2114 function in-line. EXP is the expression that is a call to the builtin
2115 function; if convenient, the result should be placed in TARGET.
2116 SUBTARGET may be used as the target for computing one of EXP's
2117 operands. */
2118
2119static rtx
2120expand_builtin_mathfn_3 (tree exp, rtx target, rtx subtarget)
2121{
2122 optab builtin_optab;
1e0c0b35 2123 rtx op0;
2124 rtx_insn *insns;
6b43bae4 2125 tree fndecl = get_callee_fndecl (exp);
3754d046 2126 machine_mode mode;
abfea505 2127 tree arg;
6b43bae4 2128
c2f47e15 2129 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2130 return NULL_RTX;
6b43bae4 2131
c2f47e15 2132 arg = CALL_EXPR_ARG (exp, 0);
6b43bae4 2133
2134 switch (DECL_FUNCTION_CODE (fndecl))
2135 {
4f35b1fc 2136 CASE_FLT_FN (BUILT_IN_SIN):
2137 CASE_FLT_FN (BUILT_IN_COS):
6b43bae4 2138 builtin_optab = sincos_optab; break;
2139 default:
64db345d 2140 gcc_unreachable ();
6b43bae4 2141 }
2142
2143 /* Make a suitable register to place result in. */
2144 mode = TYPE_MODE (TREE_TYPE (exp));
2145
6b43bae4 2146 /* Check if sincos insn is available, otherwise fallback
0bed3869 2147 to sin or cos insn. */
d6bf3b14 2148 if (optab_handler (builtin_optab, mode) == CODE_FOR_nothing)
6b43bae4 2149 switch (DECL_FUNCTION_CODE (fndecl))
2150 {
4f35b1fc 2151 CASE_FLT_FN (BUILT_IN_SIN):
6b43bae4 2152 builtin_optab = sin_optab; break;
4f35b1fc 2153 CASE_FLT_FN (BUILT_IN_COS):
6b43bae4 2154 builtin_optab = cos_optab; break;
2155 default:
64db345d 2156 gcc_unreachable ();
6b43bae4 2157 }
6b43bae4 2158
2159 /* Before working hard, check whether the instruction is available. */
d6bf3b14 2160 if (optab_handler (builtin_optab, mode) != CODE_FOR_nothing)
6b43bae4 2161 {
de2e453e 2162 rtx result = gen_reg_rtx (mode);
6b43bae4 2163
2164 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2165 need to expand the argument again. This way, we will not perform
2166 side-effects more the once. */
abfea505 2167 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
6b43bae4 2168
1db6d067 2169 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
6b43bae4 2170
6b43bae4 2171 start_sequence ();
2172
de2e453e 2173 /* Compute into RESULT.
2174 Set RESULT to wherever the result comes back. */
6b43bae4 2175 if (builtin_optab == sincos_optab)
2176 {
de2e453e 2177 int ok;
7d3f6cc7 2178
6b43bae4 2179 switch (DECL_FUNCTION_CODE (fndecl))
2180 {
4f35b1fc 2181 CASE_FLT_FN (BUILT_IN_SIN):
de2e453e 2182 ok = expand_twoval_unop (builtin_optab, op0, 0, result, 0);
6b43bae4 2183 break;
4f35b1fc 2184 CASE_FLT_FN (BUILT_IN_COS):
de2e453e 2185 ok = expand_twoval_unop (builtin_optab, op0, result, 0, 0);
6b43bae4 2186 break;
2187 default:
64db345d 2188 gcc_unreachable ();
6b43bae4 2189 }
de2e453e 2190 gcc_assert (ok);
6b43bae4 2191 }
2192 else
de2e453e 2193 result = expand_unop (mode, builtin_optab, op0, result, 0);
6b43bae4 2194
de2e453e 2195 if (result != 0)
6b43bae4 2196 {
6b43bae4 2197 /* Output the entire sequence. */
2198 insns = get_insns ();
2199 end_sequence ();
2200 emit_insn (insns);
de2e453e 2201 return result;
6b43bae4 2202 }
2203
2204 /* If we were unable to expand via the builtin, stop the sequence
2205 (without outputting the insns) and call to the library function
2206 with the stabilized argument list. */
2207 end_sequence ();
2208 }
2209
de2e453e 2210 return expand_call (exp, target, target == const0_rtx);
6b43bae4 2211}
2212
a65c4d64 2213/* Given an interclass math builtin decl FNDECL and it's argument ARG
2214 return an RTL instruction code that implements the functionality.
2215 If that isn't possible or available return CODE_FOR_nothing. */
a67a90e5 2216
a65c4d64 2217static enum insn_code
2218interclass_mathfn_icode (tree arg, tree fndecl)
a67a90e5 2219{
a65c4d64 2220 bool errno_set = false;
6cdd383a 2221 optab builtin_optab = unknown_optab;
3754d046 2222 machine_mode mode;
a67a90e5 2223
2224 switch (DECL_FUNCTION_CODE (fndecl))
2225 {
2226 CASE_FLT_FN (BUILT_IN_ILOGB):
2227 errno_set = true; builtin_optab = ilogb_optab; break;
69b779ea 2228 CASE_FLT_FN (BUILT_IN_ISINF):
2229 builtin_optab = isinf_optab; break;
8a1a9cb7 2230 case BUILT_IN_ISNORMAL:
cde061c1 2231 case BUILT_IN_ISFINITE:
2232 CASE_FLT_FN (BUILT_IN_FINITE):
a65c4d64 2233 case BUILT_IN_FINITED32:
2234 case BUILT_IN_FINITED64:
2235 case BUILT_IN_FINITED128:
2236 case BUILT_IN_ISINFD32:
2237 case BUILT_IN_ISINFD64:
2238 case BUILT_IN_ISINFD128:
cde061c1 2239 /* These builtins have no optabs (yet). */
2240 break;
a67a90e5 2241 default:
2242 gcc_unreachable ();
2243 }
2244
2245 /* There's no easy way to detect the case we need to set EDOM. */
2246 if (flag_errno_math && errno_set)
a65c4d64 2247 return CODE_FOR_nothing;
a67a90e5 2248
2249 /* Optab mode depends on the mode of the input argument. */
2250 mode = TYPE_MODE (TREE_TYPE (arg));
2251
cde061c1 2252 if (builtin_optab)
d6bf3b14 2253 return optab_handler (builtin_optab, mode);
a65c4d64 2254 return CODE_FOR_nothing;
2255}
2256
2257/* Expand a call to one of the builtin math functions that operate on
2258 floating point argument and output an integer result (ilogb, isinf,
2259 isnan, etc).
2260 Return 0 if a normal call should be emitted rather than expanding the
2261 function in-line. EXP is the expression that is a call to the builtin
f97eea22 2262 function; if convenient, the result should be placed in TARGET. */
a65c4d64 2263
2264static rtx
f97eea22 2265expand_builtin_interclass_mathfn (tree exp, rtx target)
a65c4d64 2266{
2267 enum insn_code icode = CODE_FOR_nothing;
2268 rtx op0;
2269 tree fndecl = get_callee_fndecl (exp);
3754d046 2270 machine_mode mode;
a65c4d64 2271 tree arg;
2272
2273 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2274 return NULL_RTX;
2275
2276 arg = CALL_EXPR_ARG (exp, 0);
2277 icode = interclass_mathfn_icode (arg, fndecl);
2278 mode = TYPE_MODE (TREE_TYPE (arg));
2279
a67a90e5 2280 if (icode != CODE_FOR_nothing)
2281 {
8786db1e 2282 struct expand_operand ops[1];
1e0c0b35 2283 rtx_insn *last = get_last_insn ();
4e2a2fb4 2284 tree orig_arg = arg;
a67a90e5 2285
2286 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2287 need to expand the argument again. This way, we will not perform
2288 side-effects more the once. */
abfea505 2289 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
a67a90e5 2290
f97eea22 2291 op0 = expand_expr (arg, NULL_RTX, VOIDmode, EXPAND_NORMAL);
a67a90e5 2292
2293 if (mode != GET_MODE (op0))
2294 op0 = convert_to_mode (mode, op0, 0);
2295
8786db1e 2296 create_output_operand (&ops[0], target, TYPE_MODE (TREE_TYPE (exp)));
2297 if (maybe_legitimize_operands (icode, 0, 1, ops)
2298 && maybe_emit_unop_insn (icode, ops[0].value, op0, UNKNOWN))
2299 return ops[0].value;
2300
4e2a2fb4 2301 delete_insns_since (last);
2302 CALL_EXPR_ARG (exp, 0) = orig_arg;
a67a90e5 2303 }
2304
a65c4d64 2305 return NULL_RTX;
a67a90e5 2306}
2307
c3147c1a 2308/* Expand a call to the builtin sincos math function.
c2f47e15 2309 Return NULL_RTX if a normal call should be emitted rather than expanding the
c3147c1a 2310 function in-line. EXP is the expression that is a call to the builtin
2311 function. */
2312
2313static rtx
2314expand_builtin_sincos (tree exp)
2315{
2316 rtx op0, op1, op2, target1, target2;
3754d046 2317 machine_mode mode;
c3147c1a 2318 tree arg, sinp, cosp;
2319 int result;
389dd41b 2320 location_t loc = EXPR_LOCATION (exp);
be5575b2 2321 tree alias_type, alias_off;
c3147c1a 2322
c2f47e15 2323 if (!validate_arglist (exp, REAL_TYPE,
2324 POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
2325 return NULL_RTX;
c3147c1a 2326
c2f47e15 2327 arg = CALL_EXPR_ARG (exp, 0);
2328 sinp = CALL_EXPR_ARG (exp, 1);
2329 cosp = CALL_EXPR_ARG (exp, 2);
c3147c1a 2330
2331 /* Make a suitable register to place result in. */
2332 mode = TYPE_MODE (TREE_TYPE (arg));
2333
2334 /* Check if sincos insn is available, otherwise emit the call. */
d6bf3b14 2335 if (optab_handler (sincos_optab, mode) == CODE_FOR_nothing)
c3147c1a 2336 return NULL_RTX;
2337
2338 target1 = gen_reg_rtx (mode);
2339 target2 = gen_reg_rtx (mode);
2340
8ec3c5c2 2341 op0 = expand_normal (arg);
be5575b2 2342 alias_type = build_pointer_type_for_mode (TREE_TYPE (arg), ptr_mode, true);
2343 alias_off = build_int_cst (alias_type, 0);
2344 op1 = expand_normal (fold_build2_loc (loc, MEM_REF, TREE_TYPE (arg),
2345 sinp, alias_off));
2346 op2 = expand_normal (fold_build2_loc (loc, MEM_REF, TREE_TYPE (arg),
2347 cosp, alias_off));
c3147c1a 2348
2349 /* Compute into target1 and target2.
2350 Set TARGET to wherever the result comes back. */
2351 result = expand_twoval_unop (sincos_optab, op0, target2, target1, 0);
2352 gcc_assert (result);
2353
2354 /* Move target1 and target2 to the memory locations indicated
2355 by op1 and op2. */
2356 emit_move_insn (op1, target1);
2357 emit_move_insn (op2, target2);
2358
2359 return const0_rtx;
2360}
2361
d735c391 2362/* Expand a call to the internal cexpi builtin to the sincos math function.
2363 EXP is the expression that is a call to the builtin function; if convenient,
f97eea22 2364 the result should be placed in TARGET. */
d735c391 2365
2366static rtx
f97eea22 2367expand_builtin_cexpi (tree exp, rtx target)
d735c391 2368{
2369 tree fndecl = get_callee_fndecl (exp);
d735c391 2370 tree arg, type;
3754d046 2371 machine_mode mode;
d735c391 2372 rtx op0, op1, op2;
389dd41b 2373 location_t loc = EXPR_LOCATION (exp);
d735c391 2374
c2f47e15 2375 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2376 return NULL_RTX;
d735c391 2377
c2f47e15 2378 arg = CALL_EXPR_ARG (exp, 0);
d735c391 2379 type = TREE_TYPE (arg);
2380 mode = TYPE_MODE (TREE_TYPE (arg));
2381
2382 /* Try expanding via a sincos optab, fall back to emitting a libcall
18b8d8ae 2383 to sincos or cexp. We are sure we have sincos or cexp because cexpi
2384 is only generated from sincos, cexp or if we have either of them. */
d6bf3b14 2385 if (optab_handler (sincos_optab, mode) != CODE_FOR_nothing)
d735c391 2386 {
2387 op1 = gen_reg_rtx (mode);
2388 op2 = gen_reg_rtx (mode);
2389
f97eea22 2390 op0 = expand_expr (arg, NULL_RTX, VOIDmode, EXPAND_NORMAL);
d735c391 2391
2392 /* Compute into op1 and op2. */
2393 expand_twoval_unop (sincos_optab, op0, op2, op1, 0);
2394 }
30f690e0 2395 else if (targetm.libc_has_function (function_sincos))
d735c391 2396 {
c2f47e15 2397 tree call, fn = NULL_TREE;
d735c391 2398 tree top1, top2;
2399 rtx op1a, op2a;
2400
2401 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
b9a16870 2402 fn = builtin_decl_explicit (BUILT_IN_SINCOSF);
d735c391 2403 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
b9a16870 2404 fn = builtin_decl_explicit (BUILT_IN_SINCOS);
d735c391 2405 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
b9a16870 2406 fn = builtin_decl_explicit (BUILT_IN_SINCOSL);
c2f47e15 2407 else
2408 gcc_unreachable ();
48e1416a 2409
0ab48139 2410 op1 = assign_temp (TREE_TYPE (arg), 1, 1);
2411 op2 = assign_temp (TREE_TYPE (arg), 1, 1);
99182918 2412 op1a = copy_addr_to_reg (XEXP (op1, 0));
2413 op2a = copy_addr_to_reg (XEXP (op2, 0));
d735c391 2414 top1 = make_tree (build_pointer_type (TREE_TYPE (arg)), op1a);
2415 top2 = make_tree (build_pointer_type (TREE_TYPE (arg)), op2a);
2416
d735c391 2417 /* Make sure not to fold the sincos call again. */
2418 call = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
c2f47e15 2419 expand_normal (build_call_nary (TREE_TYPE (TREE_TYPE (fn)),
2420 call, 3, arg, top1, top2));
d735c391 2421 }
18b8d8ae 2422 else
2423 {
0ecbc158 2424 tree call, fn = NULL_TREE, narg;
18b8d8ae 2425 tree ctype = build_complex_type (type);
2426
0ecbc158 2427 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
b9a16870 2428 fn = builtin_decl_explicit (BUILT_IN_CEXPF);
0ecbc158 2429 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
b9a16870 2430 fn = builtin_decl_explicit (BUILT_IN_CEXP);
0ecbc158 2431 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
b9a16870 2432 fn = builtin_decl_explicit (BUILT_IN_CEXPL);
c2f47e15 2433 else
2434 gcc_unreachable ();
fc0dfa6e 2435
2436 /* If we don't have a decl for cexp create one. This is the
2437 friendliest fallback if the user calls __builtin_cexpi
2438 without full target C99 function support. */
2439 if (fn == NULL_TREE)
2440 {
2441 tree fntype;
2442 const char *name = NULL;
2443
2444 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2445 name = "cexpf";
2446 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2447 name = "cexp";
2448 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2449 name = "cexpl";
2450
2451 fntype = build_function_type_list (ctype, ctype, NULL_TREE);
2452 fn = build_fn_decl (name, fntype);
2453 }
2454
389dd41b 2455 narg = fold_build2_loc (loc, COMPLEX_EXPR, ctype,
18b8d8ae 2456 build_real (type, dconst0), arg);
2457
2458 /* Make sure not to fold the cexp call again. */
2459 call = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
48e1416a 2460 return expand_expr (build_call_nary (ctype, call, 1, narg),
1db6d067 2461 target, VOIDmode, EXPAND_NORMAL);
18b8d8ae 2462 }
d735c391 2463
2464 /* Now build the proper return type. */
2465 return expand_expr (build2 (COMPLEX_EXPR, build_complex_type (type),
2466 make_tree (TREE_TYPE (arg), op2),
2467 make_tree (TREE_TYPE (arg), op1)),
1db6d067 2468 target, VOIDmode, EXPAND_NORMAL);
d735c391 2469}
2470
a65c4d64 2471/* Conveniently construct a function call expression. FNDECL names the
2472 function to be called, N is the number of arguments, and the "..."
2473 parameters are the argument expressions. Unlike build_call_exr
2474 this doesn't fold the call, hence it will always return a CALL_EXPR. */
2475
2476static tree
2477build_call_nofold_loc (location_t loc, tree fndecl, int n, ...)
2478{
2479 va_list ap;
2480 tree fntype = TREE_TYPE (fndecl);
2481 tree fn = build1 (ADDR_EXPR, build_pointer_type (fntype), fndecl);
2482
2483 va_start (ap, n);
2484 fn = build_call_valist (TREE_TYPE (fntype), fn, n, ap);
2485 va_end (ap);
2486 SET_EXPR_LOCATION (fn, loc);
2487 return fn;
2488}
a65c4d64 2489
7d3afc77 2490/* Expand a call to one of the builtin rounding functions gcc defines
2491 as an extension (lfloor and lceil). As these are gcc extensions we
2492 do not need to worry about setting errno to EDOM.
ad52b9b7 2493 If expanding via optab fails, lower expression to (int)(floor(x)).
2494 EXP is the expression that is a call to the builtin function;
ff1b14e4 2495 if convenient, the result should be placed in TARGET. */
ad52b9b7 2496
2497static rtx
ff1b14e4 2498expand_builtin_int_roundingfn (tree exp, rtx target)
ad52b9b7 2499{
9c42dd28 2500 convert_optab builtin_optab;
1e0c0b35 2501 rtx op0, tmp;
2502 rtx_insn *insns;
ad52b9b7 2503 tree fndecl = get_callee_fndecl (exp);
ad52b9b7 2504 enum built_in_function fallback_fn;
2505 tree fallback_fndecl;
3754d046 2506 machine_mode mode;
4de0924f 2507 tree arg;
ad52b9b7 2508
c2f47e15 2509 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
ad52b9b7 2510 gcc_unreachable ();
2511
c2f47e15 2512 arg = CALL_EXPR_ARG (exp, 0);
ad52b9b7 2513
2514 switch (DECL_FUNCTION_CODE (fndecl))
2515 {
80ff6494 2516 CASE_FLT_FN (BUILT_IN_ICEIL):
4f35b1fc 2517 CASE_FLT_FN (BUILT_IN_LCEIL):
2518 CASE_FLT_FN (BUILT_IN_LLCEIL):
ac148751 2519 builtin_optab = lceil_optab;
2520 fallback_fn = BUILT_IN_CEIL;
2521 break;
2522
80ff6494 2523 CASE_FLT_FN (BUILT_IN_IFLOOR):
4f35b1fc 2524 CASE_FLT_FN (BUILT_IN_LFLOOR):
2525 CASE_FLT_FN (BUILT_IN_LLFLOOR):
ad52b9b7 2526 builtin_optab = lfloor_optab;
2527 fallback_fn = BUILT_IN_FLOOR;
2528 break;
2529
2530 default:
2531 gcc_unreachable ();
2532 }
2533
2534 /* Make a suitable register to place result in. */
2535 mode = TYPE_MODE (TREE_TYPE (exp));
2536
9c42dd28 2537 target = gen_reg_rtx (mode);
ad52b9b7 2538
9c42dd28 2539 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2540 need to expand the argument again. This way, we will not perform
2541 side-effects more the once. */
abfea505 2542 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
ad52b9b7 2543
ff1b14e4 2544 op0 = expand_expr (arg, NULL, VOIDmode, EXPAND_NORMAL);
ad52b9b7 2545
9c42dd28 2546 start_sequence ();
ad52b9b7 2547
9c42dd28 2548 /* Compute into TARGET. */
2549 if (expand_sfix_optab (target, op0, builtin_optab))
2550 {
2551 /* Output the entire sequence. */
2552 insns = get_insns ();
ad52b9b7 2553 end_sequence ();
9c42dd28 2554 emit_insn (insns);
2555 return target;
ad52b9b7 2556 }
2557
9c42dd28 2558 /* If we were unable to expand via the builtin, stop the sequence
2559 (without outputting the insns). */
2560 end_sequence ();
2561
ad52b9b7 2562 /* Fall back to floating point rounding optab. */
2563 fallback_fndecl = mathfn_built_in (TREE_TYPE (arg), fallback_fn);
fc0dfa6e 2564
2565 /* For non-C99 targets we may end up without a fallback fndecl here
2566 if the user called __builtin_lfloor directly. In this case emit
2567 a call to the floor/ceil variants nevertheless. This should result
2568 in the best user experience for not full C99 targets. */
2569 if (fallback_fndecl == NULL_TREE)
2570 {
2571 tree fntype;
2572 const char *name = NULL;
2573
2574 switch (DECL_FUNCTION_CODE (fndecl))
2575 {
80ff6494 2576 case BUILT_IN_ICEIL:
fc0dfa6e 2577 case BUILT_IN_LCEIL:
2578 case BUILT_IN_LLCEIL:
2579 name = "ceil";
2580 break;
80ff6494 2581 case BUILT_IN_ICEILF:
fc0dfa6e 2582 case BUILT_IN_LCEILF:
2583 case BUILT_IN_LLCEILF:
2584 name = "ceilf";
2585 break;
80ff6494 2586 case BUILT_IN_ICEILL:
fc0dfa6e 2587 case BUILT_IN_LCEILL:
2588 case BUILT_IN_LLCEILL:
2589 name = "ceill";
2590 break;
80ff6494 2591 case BUILT_IN_IFLOOR:
fc0dfa6e 2592 case BUILT_IN_LFLOOR:
2593 case BUILT_IN_LLFLOOR:
2594 name = "floor";
2595 break;
80ff6494 2596 case BUILT_IN_IFLOORF:
fc0dfa6e 2597 case BUILT_IN_LFLOORF:
2598 case BUILT_IN_LLFLOORF:
2599 name = "floorf";
2600 break;
80ff6494 2601 case BUILT_IN_IFLOORL:
fc0dfa6e 2602 case BUILT_IN_LFLOORL:
2603 case BUILT_IN_LLFLOORL:
2604 name = "floorl";
2605 break;
2606 default:
2607 gcc_unreachable ();
2608 }
2609
2610 fntype = build_function_type_list (TREE_TYPE (arg),
2611 TREE_TYPE (arg), NULL_TREE);
2612 fallback_fndecl = build_fn_decl (name, fntype);
2613 }
2614
0568e9c1 2615 exp = build_call_nofold_loc (EXPR_LOCATION (exp), fallback_fndecl, 1, arg);
ad52b9b7 2616
d4c690af 2617 tmp = expand_normal (exp);
933eb13a 2618 tmp = maybe_emit_group_store (tmp, TREE_TYPE (exp));
ad52b9b7 2619
2620 /* Truncate the result of floating point optab to integer
2621 via expand_fix (). */
2622 target = gen_reg_rtx (mode);
2623 expand_fix (target, tmp, 0);
2624
2625 return target;
2626}
2627
7d3afc77 2628/* Expand a call to one of the builtin math functions doing integer
2629 conversion (lrint).
2630 Return 0 if a normal call should be emitted rather than expanding the
2631 function in-line. EXP is the expression that is a call to the builtin
ff1b14e4 2632 function; if convenient, the result should be placed in TARGET. */
7d3afc77 2633
2634static rtx
ff1b14e4 2635expand_builtin_int_roundingfn_2 (tree exp, rtx target)
7d3afc77 2636{
5f51ee59 2637 convert_optab builtin_optab;
1e0c0b35 2638 rtx op0;
2639 rtx_insn *insns;
7d3afc77 2640 tree fndecl = get_callee_fndecl (exp);
4de0924f 2641 tree arg;
3754d046 2642 machine_mode mode;
e951f9a4 2643 enum built_in_function fallback_fn = BUILT_IN_NONE;
7d3afc77 2644
c2f47e15 2645 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2646 gcc_unreachable ();
48e1416a 2647
c2f47e15 2648 arg = CALL_EXPR_ARG (exp, 0);
7d3afc77 2649
2650 switch (DECL_FUNCTION_CODE (fndecl))
2651 {
80ff6494 2652 CASE_FLT_FN (BUILT_IN_IRINT):
e951f9a4 2653 fallback_fn = BUILT_IN_LRINT;
3c77f69c 2654 gcc_fallthrough ();
7d3afc77 2655 CASE_FLT_FN (BUILT_IN_LRINT):
2656 CASE_FLT_FN (BUILT_IN_LLRINT):
e951f9a4 2657 builtin_optab = lrint_optab;
2658 break;
80ff6494 2659
2660 CASE_FLT_FN (BUILT_IN_IROUND):
e951f9a4 2661 fallback_fn = BUILT_IN_LROUND;
3c77f69c 2662 gcc_fallthrough ();
ef2f1a10 2663 CASE_FLT_FN (BUILT_IN_LROUND):
2664 CASE_FLT_FN (BUILT_IN_LLROUND):
e951f9a4 2665 builtin_optab = lround_optab;
2666 break;
80ff6494 2667
7d3afc77 2668 default:
2669 gcc_unreachable ();
2670 }
2671
e951f9a4 2672 /* There's no easy way to detect the case we need to set EDOM. */
2673 if (flag_errno_math && fallback_fn == BUILT_IN_NONE)
2674 return NULL_RTX;
2675
7d3afc77 2676 /* Make a suitable register to place result in. */
2677 mode = TYPE_MODE (TREE_TYPE (exp));
2678
e951f9a4 2679 /* There's no easy way to detect the case we need to set EDOM. */
2680 if (!flag_errno_math)
2681 {
de2e453e 2682 rtx result = gen_reg_rtx (mode);
7d3afc77 2683
e951f9a4 2684 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2685 need to expand the argument again. This way, we will not perform
2686 side-effects more the once. */
2687 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
7d3afc77 2688
e951f9a4 2689 op0 = expand_expr (arg, NULL, VOIDmode, EXPAND_NORMAL);
7d3afc77 2690
e951f9a4 2691 start_sequence ();
7d3afc77 2692
de2e453e 2693 if (expand_sfix_optab (result, op0, builtin_optab))
e951f9a4 2694 {
2695 /* Output the entire sequence. */
2696 insns = get_insns ();
2697 end_sequence ();
2698 emit_insn (insns);
de2e453e 2699 return result;
e951f9a4 2700 }
2701
2702 /* If we were unable to expand via the builtin, stop the sequence
2703 (without outputting the insns) and call to the library function
2704 with the stabilized argument list. */
7d3afc77 2705 end_sequence ();
2706 }
2707
e951f9a4 2708 if (fallback_fn != BUILT_IN_NONE)
2709 {
2710 /* Fall back to rounding to long int. Use implicit_p 0 - for non-C99
2711 targets, (int) round (x) should never be transformed into
2712 BUILT_IN_IROUND and if __builtin_iround is called directly, emit
2713 a call to lround in the hope that the target provides at least some
2714 C99 functions. This should result in the best user experience for
2715 not full C99 targets. */
e3240774 2716 tree fallback_fndecl = mathfn_built_in_1
2717 (TREE_TYPE (arg), as_combined_fn (fallback_fn), 0);
e951f9a4 2718
2719 exp = build_call_nofold_loc (EXPR_LOCATION (exp),
2720 fallback_fndecl, 1, arg);
2721
2722 target = expand_call (exp, NULL_RTX, target == const0_rtx);
933eb13a 2723 target = maybe_emit_group_store (target, TREE_TYPE (exp));
e951f9a4 2724 return convert_to_mode (mode, target, 0);
2725 }
5f51ee59 2726
de2e453e 2727 return expand_call (exp, target, target == const0_rtx);
7d3afc77 2728}
2729
c2f47e15 2730/* Expand a call to the powi built-in mathematical function. Return NULL_RTX if
757c219d 2731 a normal call should be emitted rather than expanding the function
2732 in-line. EXP is the expression that is a call to the builtin
2733 function; if convenient, the result should be placed in TARGET. */
2734
2735static rtx
f97eea22 2736expand_builtin_powi (tree exp, rtx target)
757c219d 2737{
757c219d 2738 tree arg0, arg1;
2739 rtx op0, op1;
3754d046 2740 machine_mode mode;
2741 machine_mode mode2;
757c219d 2742
c2f47e15 2743 if (! validate_arglist (exp, REAL_TYPE, INTEGER_TYPE, VOID_TYPE))
2744 return NULL_RTX;
757c219d 2745
c2f47e15 2746 arg0 = CALL_EXPR_ARG (exp, 0);
2747 arg1 = CALL_EXPR_ARG (exp, 1);
757c219d 2748 mode = TYPE_MODE (TREE_TYPE (exp));
2749
757c219d 2750 /* Emit a libcall to libgcc. */
2751
c2f47e15 2752 /* Mode of the 2nd argument must match that of an int. */
d0405f40 2753 mode2 = mode_for_size (INT_TYPE_SIZE, MODE_INT, 0);
2754
757c219d 2755 if (target == NULL_RTX)
2756 target = gen_reg_rtx (mode);
2757
f97eea22 2758 op0 = expand_expr (arg0, NULL_RTX, mode, EXPAND_NORMAL);
757c219d 2759 if (GET_MODE (op0) != mode)
2760 op0 = convert_to_mode (mode, op0, 0);
1db6d067 2761 op1 = expand_expr (arg1, NULL_RTX, mode2, EXPAND_NORMAL);
d0405f40 2762 if (GET_MODE (op1) != mode2)
2763 op1 = convert_to_mode (mode2, op1, 0);
757c219d 2764
f36b9f69 2765 target = emit_library_call_value (optab_libfunc (powi_optab, mode),
2dd6f9ed 2766 target, LCT_CONST, mode, 2,
d0405f40 2767 op0, mode, op1, mode2);
757c219d 2768
2769 return target;
2770}
2771
48e1416a 2772/* Expand expression EXP which is a call to the strlen builtin. Return
c2f47e15 2773 NULL_RTX if we failed the caller should emit a normal call, otherwise
aed0bd19 2774 try to get the result in TARGET, if convenient. */
f7c44134 2775
53800dbe 2776static rtx
c2f47e15 2777expand_builtin_strlen (tree exp, rtx target,
3754d046 2778 machine_mode target_mode)
53800dbe 2779{
c2f47e15 2780 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
2781 return NULL_RTX;
53800dbe 2782 else
2783 {
8786db1e 2784 struct expand_operand ops[4];
911c0150 2785 rtx pat;
c2f47e15 2786 tree len;
2787 tree src = CALL_EXPR_ARG (exp, 0);
1e0c0b35 2788 rtx src_reg;
2789 rtx_insn *before_strlen;
3754d046 2790 machine_mode insn_mode = target_mode;
ef2c4a29 2791 enum insn_code icode = CODE_FOR_nothing;
153c3b50 2792 unsigned int align;
6248e345 2793
2794 /* If the length can be computed at compile-time, return it. */
681fab1e 2795 len = c_strlen (src, 0);
6248e345 2796 if (len)
80cd7a5e 2797 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
6248e345 2798
681fab1e 2799 /* If the length can be computed at compile-time and is constant
2800 integer, but there are side-effects in src, evaluate
2801 src for side-effects, then return len.
2802 E.g. x = strlen (i++ ? "xfoo" + 1 : "bar");
2803 can be optimized into: i++; x = 3; */
2804 len = c_strlen (src, 1);
2805 if (len && TREE_CODE (len) == INTEGER_CST)
2806 {
2807 expand_expr (src, const0_rtx, VOIDmode, EXPAND_NORMAL);
2808 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
2809 }
2810
957d0361 2811 align = get_pointer_alignment (src) / BITS_PER_UNIT;
53800dbe 2812
53800dbe 2813 /* If SRC is not a pointer type, don't do this operation inline. */
2814 if (align == 0)
c2f47e15 2815 return NULL_RTX;
53800dbe 2816
911c0150 2817 /* Bail out if we can't compute strlen in the right mode. */
53800dbe 2818 while (insn_mode != VOIDmode)
2819 {
d6bf3b14 2820 icode = optab_handler (strlen_optab, insn_mode);
53800dbe 2821 if (icode != CODE_FOR_nothing)
c28ae87f 2822 break;
53800dbe 2823
2824 insn_mode = GET_MODE_WIDER_MODE (insn_mode);
2825 }
2826 if (insn_mode == VOIDmode)
c2f47e15 2827 return NULL_RTX;
53800dbe 2828
911c0150 2829 /* Make a place to hold the source address. We will not expand
2830 the actual source until we are sure that the expansion will
2831 not fail -- there are trees that cannot be expanded twice. */
2832 src_reg = gen_reg_rtx (Pmode);
53800dbe 2833
911c0150 2834 /* Mark the beginning of the strlen sequence so we can emit the
2835 source operand later. */
f0ce3b1f 2836 before_strlen = get_last_insn ();
53800dbe 2837
8786db1e 2838 create_output_operand (&ops[0], target, insn_mode);
2839 create_fixed_operand (&ops[1], gen_rtx_MEM (BLKmode, src_reg));
2840 create_integer_operand (&ops[2], 0);
2841 create_integer_operand (&ops[3], align);
2842 if (!maybe_expand_insn (icode, 4, ops))
c2f47e15 2843 return NULL_RTX;
911c0150 2844
2845 /* Now that we are assured of success, expand the source. */
2846 start_sequence ();
499eee58 2847 pat = expand_expr (src, src_reg, Pmode, EXPAND_NORMAL);
911c0150 2848 if (pat != src_reg)
499eee58 2849 {
2850#ifdef POINTERS_EXTEND_UNSIGNED
2851 if (GET_MODE (pat) != Pmode)
2852 pat = convert_to_mode (Pmode, pat,
2853 POINTERS_EXTEND_UNSIGNED);
2854#endif
2855 emit_move_insn (src_reg, pat);
2856 }
31d3e01c 2857 pat = get_insns ();
911c0150 2858 end_sequence ();
bceb0d1f 2859
2860 if (before_strlen)
2861 emit_insn_after (pat, before_strlen);
2862 else
2863 emit_insn_before (pat, get_insns ());
53800dbe 2864
2865 /* Return the value in the proper mode for this function. */
8786db1e 2866 if (GET_MODE (ops[0].value) == target_mode)
2867 target = ops[0].value;
53800dbe 2868 else if (target != 0)
8786db1e 2869 convert_move (target, ops[0].value, 0);
53800dbe 2870 else
8786db1e 2871 target = convert_to_mode (target_mode, ops[0].value, 0);
911c0150 2872
2873 return target;
53800dbe 2874 }
2875}
2876
6840589f 2877/* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
2878 bytes from constant string DATA + OFFSET and return it as target
2879 constant. */
2880
2881static rtx
aecda0d6 2882builtin_memcpy_read_str (void *data, HOST_WIDE_INT offset,
3754d046 2883 machine_mode mode)
6840589f 2884{
2885 const char *str = (const char *) data;
2886
64db345d 2887 gcc_assert (offset >= 0
2888 && ((unsigned HOST_WIDE_INT) offset + GET_MODE_SIZE (mode)
2889 <= strlen (str) + 1));
6840589f 2890
2891 return c_readstr (str + offset, mode);
2892}
2893
36d63243 2894/* LEN specify length of the block of memcpy/memset operation.
9db0f34d 2895 Figure out its range and put it into MIN_SIZE/MAX_SIZE.
2896 In some cases we can make very likely guess on max size, then we
2897 set it into PROBABLE_MAX_SIZE. */
36d63243 2898
2899static void
2900determine_block_size (tree len, rtx len_rtx,
2901 unsigned HOST_WIDE_INT *min_size,
9db0f34d 2902 unsigned HOST_WIDE_INT *max_size,
2903 unsigned HOST_WIDE_INT *probable_max_size)
36d63243 2904{
2905 if (CONST_INT_P (len_rtx))
2906 {
4e140a5c 2907 *min_size = *max_size = *probable_max_size = UINTVAL (len_rtx);
36d63243 2908 return;
2909 }
2910 else
2911 {
9c1be15e 2912 wide_int min, max;
9db0f34d 2913 enum value_range_type range_type = VR_UNDEFINED;
2914
2915 /* Determine bounds from the type. */
2916 if (tree_fits_uhwi_p (TYPE_MIN_VALUE (TREE_TYPE (len))))
2917 *min_size = tree_to_uhwi (TYPE_MIN_VALUE (TREE_TYPE (len)));
2918 else
2919 *min_size = 0;
2920 if (tree_fits_uhwi_p (TYPE_MAX_VALUE (TREE_TYPE (len))))
4e140a5c 2921 *probable_max_size = *max_size
2922 = tree_to_uhwi (TYPE_MAX_VALUE (TREE_TYPE (len)));
9db0f34d 2923 else
2924 *probable_max_size = *max_size = GET_MODE_MASK (GET_MODE (len_rtx));
2925
2926 if (TREE_CODE (len) == SSA_NAME)
2927 range_type = get_range_info (len, &min, &max);
2928 if (range_type == VR_RANGE)
36d63243 2929 {
fe5ad926 2930 if (wi::fits_uhwi_p (min) && *min_size < min.to_uhwi ())
36d63243 2931 *min_size = min.to_uhwi ();
fe5ad926 2932 if (wi::fits_uhwi_p (max) && *max_size > max.to_uhwi ())
9db0f34d 2933 *probable_max_size = *max_size = max.to_uhwi ();
36d63243 2934 }
9db0f34d 2935 else if (range_type == VR_ANTI_RANGE)
36d63243 2936 {
4a474a5a 2937 /* Anti range 0...N lets us to determine minimal size to N+1. */
fe5ad926 2938 if (min == 0)
9db0f34d 2939 {
9c1be15e 2940 if (wi::fits_uhwi_p (max) && max.to_uhwi () + 1 != 0)
2941 *min_size = max.to_uhwi () + 1;
9db0f34d 2942 }
2943 /* Code like
2944
2945 int n;
2946 if (n < 100)
4a474a5a 2947 memcpy (a, b, n)
9db0f34d 2948
2949 Produce anti range allowing negative values of N. We still
2950 can use the information and make a guess that N is not negative.
2951 */
fe5ad926 2952 else if (!wi::leu_p (max, 1 << 30) && wi::fits_uhwi_p (min))
2953 *probable_max_size = min.to_uhwi () - 1;
36d63243 2954 }
2955 }
2956 gcc_checking_assert (*max_size <=
2957 (unsigned HOST_WIDE_INT)
2958 GET_MODE_MASK (GET_MODE (len_rtx)));
2959}
2960
f21337ef 2961/* Helper function to do the actual work for expand_builtin_memcpy. */
2962
2963static rtx
2964expand_builtin_memcpy_args (tree dest, tree src, tree len, rtx target, tree exp)
2965{
2966 const char *src_str;
2967 unsigned int src_align = get_pointer_alignment (src);
2968 unsigned int dest_align = get_pointer_alignment (dest);
2969 rtx dest_mem, src_mem, dest_addr, len_rtx;
2970 HOST_WIDE_INT expected_size = -1;
2971 unsigned int expected_align = 0;
2972 unsigned HOST_WIDE_INT min_size;
2973 unsigned HOST_WIDE_INT max_size;
2974 unsigned HOST_WIDE_INT probable_max_size;
2975
2976 /* If DEST is not a pointer type, call the normal function. */
2977 if (dest_align == 0)
2978 return NULL_RTX;
2979
2980 /* If either SRC is not a pointer type, don't do this
2981 operation in-line. */
2982 if (src_align == 0)
2983 return NULL_RTX;
2984
2985 if (currently_expanding_gimple_stmt)
2986 stringop_block_profile (currently_expanding_gimple_stmt,
2987 &expected_align, &expected_size);
2988
2989 if (expected_align < dest_align)
2990 expected_align = dest_align;
2991 dest_mem = get_memory_rtx (dest, len);
2992 set_mem_align (dest_mem, dest_align);
2993 len_rtx = expand_normal (len);
2994 determine_block_size (len, len_rtx, &min_size, &max_size,
2995 &probable_max_size);
2996 src_str = c_getstr (src);
2997
2998 /* If SRC is a string constant and block move would be done
2999 by pieces, we can avoid loading the string from memory
3000 and only stored the computed constants. */
3001 if (src_str
3002 && CONST_INT_P (len_rtx)
3003 && (unsigned HOST_WIDE_INT) INTVAL (len_rtx) <= strlen (src_str) + 1
3004 && can_store_by_pieces (INTVAL (len_rtx), builtin_memcpy_read_str,
3005 CONST_CAST (char *, src_str),
3006 dest_align, false))
3007 {
3008 dest_mem = store_by_pieces (dest_mem, INTVAL (len_rtx),
3009 builtin_memcpy_read_str,
3010 CONST_CAST (char *, src_str),
3011 dest_align, false, 0);
3012 dest_mem = force_operand (XEXP (dest_mem, 0), target);
3013 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3014 return dest_mem;
3015 }
3016
3017 src_mem = get_memory_rtx (src, len);
3018 set_mem_align (src_mem, src_align);
3019
3020 /* Copy word part most expediently. */
3021 dest_addr = emit_block_move_hints (dest_mem, src_mem, len_rtx,
3022 CALL_EXPR_TAILCALL (exp)
3023 ? BLOCK_OP_TAILCALL : BLOCK_OP_NORMAL,
3024 expected_align, expected_size,
3025 min_size, max_size, probable_max_size);
3026
3027 if (dest_addr == 0)
3028 {
3029 dest_addr = force_operand (XEXP (dest_mem, 0), target);
3030 dest_addr = convert_memory_address (ptr_mode, dest_addr);
3031 }
3032
3033 return dest_addr;
3034}
3035
5aef8938 3036/* Try to verify that the sizes and lengths of the arguments to a string
3037 manipulation function given by EXP are within valid bounds and that
3038 the operation does not lead to buffer overflow. Arguments other than
3039 EXP may be null. When non-null, the arguments have the following
3040 meaning:
3041 SIZE is the user-supplied size argument to the function (such as in
3042 memcpy(d, s, SIZE) or strncpy(d, s, SIZE). It specifies the exact
3043 number of bytes to write.
3044 MAXLEN is the user-supplied bound on the length of the source sequence
3045 (such as in strncat(d, s, N). It specifies the upper limit on the number
3046 of bytes to write.
3047 STR is the source string (such as in strcpy(d, s)) when the epxression
3048 EXP is a string function call (as opposed to a memory call like memcpy).
3049 As an exception, STR can also be an integer denoting the precomputed
3050 length of the source string.
3051 OBJSIZE is the size of the destination object specified by the last
3052 argument to the _chk builtins, typically resulting from the expansion
3053 of __builtin_object_size (such as in __builtin___strcpy_chk(d, s,
3054 OBJSIZE).
3055
3056 When SIZE is null LEN is checked to verify that it doesn't exceed
3057 SIZE_MAX.
3058
3059 If the call is successfully verified as safe from buffer overflow
3060 the function returns true, otherwise false.. */
3061
3062static bool
3063check_sizes (int opt, tree exp, tree size, tree maxlen, tree str, tree objsize)
3064{
3065 /* The size of the largest object is half the address space, or
3066 SSIZE_MAX. (This is way too permissive.) */
3067 tree maxobjsize = TYPE_MAX_VALUE (ssizetype);
3068
3069 tree slen = NULL_TREE;
3070
3071 /* Set to true when the exact number of bytes written by a string
3072 function like strcpy is not known and the only thing that is
3073 known is that it must be at least one (for the terminating nul). */
3074 bool at_least_one = false;
3075 if (str)
3076 {
3077 /* STR is normally a pointer to string but as a special case
3078 it can be an integer denoting the length of a string. */
b317b227 3079 if (POINTER_TYPE_P (TREE_TYPE (str)))
5aef8938 3080 {
3081 /* Try to determine the range of lengths the source string
3082 refers to. If it can be determined add one to it for
3083 the terminating nul. Otherwise, set it to one for
3084 the same reason. */
3085 tree lenrange[2];
3086 get_range_strlen (str, lenrange);
3087 if (lenrange[0])
3088 slen = fold_build2 (PLUS_EXPR, size_type_node, lenrange[0],
3089 size_one_node);
3090 else
3091 {
3092 at_least_one = true;
3093 slen = size_one_node;
3094 }
3095 }
3096 else
3097 slen = str;
3098 }
3099
3100 if (!size && !maxlen)
3101 {
3102 /* When the only available piece of data is the object size
3103 there is nothing to do. */
3104 if (!slen)
3105 return true;
3106
3107 /* Otherwise, when the length of the source sequence is known
3108 (as with with strlen), set SIZE to it. */
3109 size = slen;
3110 }
3111
3112 if (!objsize)
3113 objsize = maxobjsize;
3114
3115 /* The SIZE is exact if it's non-null, constant, and in range of
3116 unsigned HOST_WIDE_INT. */
3117 bool exactsize = size && tree_fits_uhwi_p (size);
3118
3119 tree range[2] = { NULL_TREE, NULL_TREE };
3120 if (size)
3121 get_size_range (size, range);
3122
3123 /* First check the number of bytes to be written against the maximum
3124 object size. */
3125 if (range[0] && tree_int_cst_lt (maxobjsize, range[0]))
3126 {
3127 location_t loc = tree_nonartificial_location (exp);
3128
3129 if (range[0] == range[1])
3130 warning_at (loc, opt,
3131 "%K%qD: specified size %wu "
3132 "exceeds maximum object size %wu",
3133 exp, get_callee_fndecl (exp),
3134 tree_to_uhwi (range[0]),
3135 tree_to_uhwi (maxobjsize));
3136 else
3137 warning_at (loc, opt,
3138 "%K%qD: specified size between %wu and %wu "
3139 "exceeds maximum object size %wu",
3140 exp, get_callee_fndecl (exp),
3141 tree_to_uhwi (range[0]),
3142 tree_to_uhwi (range[1]),
3143 tree_to_uhwi (maxobjsize));
3144 return false;
3145 }
3146
3147 /* Next check the number of bytes to be written against the destination
3148 object size. */
3149 if (range[0] || !exactsize || integer_all_onesp (size))
3150 {
3151 if (range[0]
3152 && ((tree_fits_uhwi_p (objsize)
3153 && tree_int_cst_lt (objsize, range[0]))
3154 || (tree_fits_uhwi_p (size)
3155 && tree_int_cst_lt (size, range[0]))))
3156 {
3157 unsigned HOST_WIDE_INT uwir0 = tree_to_uhwi (range[0]);
3158
3159 location_t loc = tree_nonartificial_location (exp);
3160
3161 if (at_least_one)
3162 warning_at (loc, opt,
3163 "%K%qD: writing at least %wu byte into a region "
3164 "of size %wu overflows the destination",
3165 exp, get_callee_fndecl (exp), uwir0,
3166 tree_to_uhwi (objsize));
3167 else if (range[0] == range[1])
3168 warning_at (loc, opt,
3169 (uwir0 == 1
3170 ? G_("%K%qD: writing %wu byte into a region "
3171 "of size %wu overflows the destination")
3172 : G_("%K%qD writing %wu bytes into a region "
3173 "of size %wu overflows the destination")),
3174 exp, get_callee_fndecl (exp), uwir0,
3175 tree_to_uhwi (objsize));
3176 else
3177 warning_at (loc, opt,
3178 "%K%qD: writing between %wu and %wu bytes "
3179 "into a region of size %wu overflows "
3180 "the destination",
3181 exp, get_callee_fndecl (exp), uwir0,
3182 tree_to_uhwi (range[1]), tree_to_uhwi (objsize));
3183
3184 /* Return error when an overflow has been detected. */
3185 return false;
3186 }
3187 }
3188
3189 /* Check the maximum length of the source sequence against the size
3190 of the destination object if known, or against the maximum size
3191 of an object. */
3192 if (maxlen)
3193 {
3194 get_size_range (maxlen, range);
3195
3196 if (range[0] && objsize && tree_fits_uhwi_p (objsize))
3197 {
3198 location_t loc = tree_nonartificial_location (exp);
3199
3200 if (tree_int_cst_lt (maxobjsize, range[0]))
3201 {
3202 /* Warn about crazy big sizes first since that's more
3203 likely to be meaningful than saying that the bound
3204 is greater than the object size if both are big. */
3205 if (range[0] == range[1])
3206 warning_at (loc, opt,
3207 "%K%qD: specified bound %wu "
3208 "exceeds maximum object size %wu",
3209 exp, get_callee_fndecl (exp),
3210 tree_to_uhwi (range[0]),
3211 tree_to_uhwi (maxobjsize));
3212 else
3213 warning_at (loc, opt,
3214 "%K%qD: specified bound between %wu and %wu "
3215 " exceeds maximum object size %wu",
3216 exp, get_callee_fndecl (exp),
3217 tree_to_uhwi (range[0]),
3218 tree_to_uhwi (range[1]),
3219 tree_to_uhwi (maxobjsize));
3220
3221 return false;
3222 }
3223
3224 if (objsize != maxobjsize && tree_int_cst_lt (objsize, range[0]))
3225 {
3226 if (range[0] == range[1])
3227 warning_at (loc, opt,
3228 "%K%qD: specified bound %wu "
3229 "exceeds the size %wu of the destination",
3230 exp, get_callee_fndecl (exp),
3231 tree_to_uhwi (range[0]),
3232 tree_to_uhwi (objsize));
3233 else
3234 warning_at (loc, opt,
3235 "%K%qD: specified bound between %wu and %wu "
3236 " exceeds the size %wu of the destination",
3237 exp, get_callee_fndecl (exp),
3238 tree_to_uhwi (range[0]),
3239 tree_to_uhwi (range[1]),
3240 tree_to_uhwi (objsize));
3241 return false;
3242 }
3243 }
3244 }
3245
3246 return true;
3247}
3248
3249/* Helper to compute the size of the object referenced by the DEST
3250 expression which must of of pointer type, using Object Size type
3251 OSTYPE (only the least significant 2 bits are used). Return
3252 the size of the object if successful or NULL when the size cannot
3253 be determined. */
3254
3255static inline tree
3256compute_dest_size (tree dest, int ostype)
3257{
3258 unsigned HOST_WIDE_INT size;
3259 if (compute_builtin_object_size (dest, ostype & 3, &size))
3260 return build_int_cst (sizetype, size);
3261
3262 return NULL_TREE;
3263}
3264
3265/* Helper to determine and check the sizes of the source and the destination
3266 of calls to __builtin_{bzero,memcpy,memset} calls. Use Object Size type-0
3267 regardless of the OPT_Wstringop_overflow_ setting. Returns true on success
3268 (no overflow or invalid sizes), false otherwise. */
3269
3270static bool
3271check_memop_sizes (tree exp, tree dest, tree size)
3272{
3273 if (!warn_stringop_overflow)
3274 return true;
3275
3276 /* For functions like memset and memcpy that operate on raw memory
3277 try to determine the size of the largest destination object using
3278 type-0 Object Size regardless of the object size type specified
3279 by the option. */
3280 tree objsize = compute_dest_size (dest, 0);
3281
3282 return check_sizes (OPT_Wstringop_overflow_, exp,
3283 size, /*maxlen=*/NULL_TREE, /*str=*/NULL_TREE, objsize);
3284}
3285
c2f47e15 3286/* Expand a call EXP to the memcpy builtin.
3287 Return NULL_RTX if we failed, the caller should emit a normal call,
3b824fa6 3288 otherwise try to get the result in TARGET, if convenient (and in
9fe0e1b8 3289 mode MODE if that's convenient). */
c2f47e15 3290
53800dbe 3291static rtx
a65c4d64 3292expand_builtin_memcpy (tree exp, rtx target)
53800dbe 3293{
c2f47e15 3294 if (!validate_arglist (exp,
3295 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3296 return NULL_RTX;
5aef8938 3297
3298 tree dest = CALL_EXPR_ARG (exp, 0);
3299 tree src = CALL_EXPR_ARG (exp, 1);
3300 tree len = CALL_EXPR_ARG (exp, 2);
3301
3302 check_memop_sizes (exp, dest, len);
3303
3304 return expand_builtin_memcpy_args (dest, src, len, target, exp);
f21337ef 3305}
6840589f 3306
f21337ef 3307/* Expand an instrumented call EXP to the memcpy builtin.
3308 Return NULL_RTX if we failed, the caller should emit a normal call,
3309 otherwise try to get the result in TARGET, if convenient (and in
3310 mode MODE if that's convenient). */
53800dbe 3311
f21337ef 3312static rtx
3313expand_builtin_memcpy_with_bounds (tree exp, rtx target)
3314{
3315 if (!validate_arglist (exp,
3316 POINTER_TYPE, POINTER_BOUNDS_TYPE,
3317 POINTER_TYPE, POINTER_BOUNDS_TYPE,
3318 INTEGER_TYPE, VOID_TYPE))
3319 return NULL_RTX;
3320 else
3321 {
3322 tree dest = CALL_EXPR_ARG (exp, 0);
3323 tree src = CALL_EXPR_ARG (exp, 2);
3324 tree len = CALL_EXPR_ARG (exp, 4);
3325 rtx res = expand_builtin_memcpy_args (dest, src, len, target, exp);
53800dbe 3326
f21337ef 3327 /* Return src bounds with the result. */
3328 if (res)
e5716f7e 3329 {
17d388d8 3330 rtx bnd = force_reg (targetm.chkp_bound_mode (),
f21337ef 3331 expand_normal (CALL_EXPR_ARG (exp, 1)));
3332 res = chkp_join_splitted_slot (res, bnd);
e5716f7e 3333 }
f21337ef 3334 return res;
53800dbe 3335 }
3336}
3337
c2f47e15 3338/* Expand a call EXP to the mempcpy builtin.
3339 Return NULL_RTX if we failed; the caller should emit a normal call,
647661c6 3340 otherwise try to get the result in TARGET, if convenient (and in
9fe0e1b8 3341 mode MODE if that's convenient). If ENDP is 0 return the
3342 destination pointer, if ENDP is 1 return the end pointer ala
3343 mempcpy, and if ENDP is 2 return the end pointer minus one ala
3344 stpcpy. */
647661c6 3345
3346static rtx
3754d046 3347expand_builtin_mempcpy (tree exp, rtx target, machine_mode mode)
647661c6 3348{
c2f47e15 3349 if (!validate_arglist (exp,
3350 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3351 return NULL_RTX;
5aef8938 3352
3353 tree dest = CALL_EXPR_ARG (exp, 0);
3354 tree src = CALL_EXPR_ARG (exp, 1);
3355 tree len = CALL_EXPR_ARG (exp, 2);
3356
3357 /* Avoid expanding mempcpy into memcpy when the call is determined
3358 to overflow the buffer. This also prevents the same overflow
3359 from being diagnosed again when expanding memcpy. */
3360 if (!check_memop_sizes (exp, dest, len))
3361 return NULL_RTX;
3362
3363 return expand_builtin_mempcpy_args (dest, src, len,
3364 target, mode, /*endp=*/ 1,
3365 exp);
f21337ef 3366}
3367
3368/* Expand an instrumented call EXP to the mempcpy builtin.
3369 Return NULL_RTX if we failed, the caller should emit a normal call,
3370 otherwise try to get the result in TARGET, if convenient (and in
3371 mode MODE if that's convenient). */
3372
3373static rtx
3374expand_builtin_mempcpy_with_bounds (tree exp, rtx target, machine_mode mode)
3375{
3376 if (!validate_arglist (exp,
3377 POINTER_TYPE, POINTER_BOUNDS_TYPE,
3378 POINTER_TYPE, POINTER_BOUNDS_TYPE,
3379 INTEGER_TYPE, VOID_TYPE))
3380 return NULL_RTX;
3381 else
3382 {
3383 tree dest = CALL_EXPR_ARG (exp, 0);
3384 tree src = CALL_EXPR_ARG (exp, 2);
3385 tree len = CALL_EXPR_ARG (exp, 4);
3386 rtx res = expand_builtin_mempcpy_args (dest, src, len, target,
3387 mode, 1, exp);
3388
3389 /* Return src bounds with the result. */
3390 if (res)
3391 {
17d388d8 3392 rtx bnd = force_reg (targetm.chkp_bound_mode (),
f21337ef 3393 expand_normal (CALL_EXPR_ARG (exp, 1)));
3394 res = chkp_join_splitted_slot (res, bnd);
3395 }
3396 return res;
c2f47e15 3397 }
3398}
3399
3400/* Helper function to do the actual work for expand_builtin_mempcpy. The
3401 arguments to the builtin_mempcpy call DEST, SRC, and LEN are broken out
3402 so that this can also be called without constructing an actual CALL_EXPR.
a65c4d64 3403 The other arguments and return value are the same as for
3404 expand_builtin_mempcpy. */
c2f47e15 3405
3406static rtx
a65c4d64 3407expand_builtin_mempcpy_args (tree dest, tree src, tree len,
f21337ef 3408 rtx target, machine_mode mode, int endp,
3409 tree orig_exp)
c2f47e15 3410{
f21337ef 3411 tree fndecl = get_callee_fndecl (orig_exp);
3412
c2f47e15 3413 /* If return value is ignored, transform mempcpy into memcpy. */
f21337ef 3414 if (target == const0_rtx
3415 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_MEMPCPY_NOBND_NOCHK_CHKP
3416 && builtin_decl_implicit_p (BUILT_IN_CHKP_MEMCPY_NOBND_NOCHK_CHKP))
3417 {
3418 tree fn = builtin_decl_implicit (BUILT_IN_CHKP_MEMCPY_NOBND_NOCHK_CHKP);
3419 tree result = build_call_nofold_loc (UNKNOWN_LOCATION, fn, 3,
3420 dest, src, len);
3421 return expand_expr (result, target, mode, EXPAND_NORMAL);
3422 }
3423 else if (target == const0_rtx
3424 && builtin_decl_implicit_p (BUILT_IN_MEMCPY))
9fe0e1b8 3425 {
b9a16870 3426 tree fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
0568e9c1 3427 tree result = build_call_nofold_loc (UNKNOWN_LOCATION, fn, 3,
3428 dest, src, len);
c8b17b2e 3429 return expand_expr (result, target, mode, EXPAND_NORMAL);
9fe0e1b8 3430 }
647661c6 3431 else
3432 {
9fe0e1b8 3433 const char *src_str;
957d0361 3434 unsigned int src_align = get_pointer_alignment (src);
3435 unsigned int dest_align = get_pointer_alignment (dest);
9fe0e1b8 3436 rtx dest_mem, src_mem, len_rtx;
a0c938f0 3437
7da1412b 3438 /* If either SRC or DEST is not a pointer type, don't do this
a0c938f0 3439 operation in-line. */
7da1412b 3440 if (dest_align == 0 || src_align == 0)
c2f47e15 3441 return NULL_RTX;
9fe0e1b8 3442
6217c238 3443 /* If LEN is not constant, call the normal function. */
e913b5cd 3444 if (! tree_fits_uhwi_p (len))
c2f47e15 3445 return NULL_RTX;
0862b7e9 3446
8ec3c5c2 3447 len_rtx = expand_normal (len);
9fe0e1b8 3448 src_str = c_getstr (src);
647661c6 3449
9fe0e1b8 3450 /* If SRC is a string constant and block move would be done
3451 by pieces, we can avoid loading the string from memory
3452 and only stored the computed constants. */
3453 if (src_str
971ba038 3454 && CONST_INT_P (len_rtx)
9fe0e1b8 3455 && (unsigned HOST_WIDE_INT) INTVAL (len_rtx) <= strlen (src_str) + 1
3456 && can_store_by_pieces (INTVAL (len_rtx), builtin_memcpy_read_str,
364c0c59 3457 CONST_CAST (char *, src_str),
3458 dest_align, false))
9fe0e1b8 3459 {
d8ae1baa 3460 dest_mem = get_memory_rtx (dest, len);
9fe0e1b8 3461 set_mem_align (dest_mem, dest_align);
3462 dest_mem = store_by_pieces (dest_mem, INTVAL (len_rtx),
3463 builtin_memcpy_read_str,
364c0c59 3464 CONST_CAST (char *, src_str),
3465 dest_align, false, endp);
9fe0e1b8 3466 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
85d654dd 3467 dest_mem = convert_memory_address (ptr_mode, dest_mem);
9fe0e1b8 3468 return dest_mem;
647661c6 3469 }
3470
971ba038 3471 if (CONST_INT_P (len_rtx)
9fe0e1b8 3472 && can_move_by_pieces (INTVAL (len_rtx),
3473 MIN (dest_align, src_align)))
3474 {
d8ae1baa 3475 dest_mem = get_memory_rtx (dest, len);
9fe0e1b8 3476 set_mem_align (dest_mem, dest_align);
d8ae1baa 3477 src_mem = get_memory_rtx (src, len);
9fe0e1b8 3478 set_mem_align (src_mem, src_align);
3479 dest_mem = move_by_pieces (dest_mem, src_mem, INTVAL (len_rtx),
3480 MIN (dest_align, src_align), endp);
3481 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
85d654dd 3482 dest_mem = convert_memory_address (ptr_mode, dest_mem);
9fe0e1b8 3483 return dest_mem;
3484 }
3485
c2f47e15 3486 return NULL_RTX;
647661c6 3487 }
3488}
3489
c2f47e15 3490/* Expand into a movstr instruction, if one is available. Return NULL_RTX if
727c62dd 3491 we failed, the caller should emit a normal call, otherwise try to
3492 get the result in TARGET, if convenient. If ENDP is 0 return the
3493 destination pointer, if ENDP is 1 return the end pointer ala
3494 mempcpy, and if ENDP is 2 return the end pointer minus one ala
3495 stpcpy. */
3496
3497static rtx
3498expand_movstr (tree dest, tree src, rtx target, int endp)
3499{
8786db1e 3500 struct expand_operand ops[3];
727c62dd 3501 rtx dest_mem;
3502 rtx src_mem;
727c62dd 3503
8d74dc42 3504 if (!targetm.have_movstr ())
c2f47e15 3505 return NULL_RTX;
727c62dd 3506
d8ae1baa 3507 dest_mem = get_memory_rtx (dest, NULL);
3508 src_mem = get_memory_rtx (src, NULL);
727c62dd 3509 if (!endp)
3510 {
3511 target = force_reg (Pmode, XEXP (dest_mem, 0));
3512 dest_mem = replace_equiv_address (dest_mem, target);
727c62dd 3513 }
3514
8786db1e 3515 create_output_operand (&ops[0], endp ? target : NULL_RTX, Pmode);
3516 create_fixed_operand (&ops[1], dest_mem);
3517 create_fixed_operand (&ops[2], src_mem);
8d74dc42 3518 if (!maybe_expand_insn (targetm.code_for_movstr, 3, ops))
1e1d5623 3519 return NULL_RTX;
727c62dd 3520
8786db1e 3521 if (endp && target != const0_rtx)
c5aba89c 3522 {
8786db1e 3523 target = ops[0].value;
3524 /* movstr is supposed to set end to the address of the NUL
3525 terminator. If the caller requested a mempcpy-like return value,
3526 adjust it. */
3527 if (endp == 1)
3528 {
29c05e22 3529 rtx tem = plus_constant (GET_MODE (target),
3530 gen_lowpart (GET_MODE (target), target), 1);
8786db1e 3531 emit_move_insn (target, force_operand (tem, NULL_RTX));
3532 }
c5aba89c 3533 }
727c62dd 3534 return target;
3535}
3536
5aef8938 3537/* Do some very basic size validation of a call to the strcpy builtin
3538 given by EXP. Return NULL_RTX to have the built-in expand to a call
3539 to the library function. */
3540
3541static rtx
3542expand_builtin_strcat (tree exp, rtx)
3543{
3544 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE)
3545 || !warn_stringop_overflow)
3546 return NULL_RTX;
3547
3548 tree dest = CALL_EXPR_ARG (exp, 0);
3549 tree src = CALL_EXPR_ARG (exp, 1);
3550
3551 /* There is no way here to determine the length of the string in
3552 the destination to which the SRC string is being appended so
3553 just diagnose cases when the souce string is longer than
3554 the destination object. */
3555
3556 tree destsize = compute_dest_size (dest, warn_stringop_overflow - 1);
3557
3558 check_sizes (OPT_Wstringop_overflow_,
3559 exp, /*size=*/NULL_TREE, /*maxlen=*/NULL_TREE, src, destsize);
3560
3561 return NULL_RTX;
3562}
3563
48e1416a 3564/* Expand expression EXP, which is a call to the strcpy builtin. Return
3565 NULL_RTX if we failed the caller should emit a normal call, otherwise
c2f47e15 3566 try to get the result in TARGET, if convenient (and in mode MODE if that's
6f428e8b 3567 convenient). */
902de8ed 3568
53800dbe 3569static rtx
a65c4d64 3570expand_builtin_strcpy (tree exp, rtx target)
53800dbe 3571{
5aef8938 3572 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3573 return NULL_RTX;
3574
3575 tree dest = CALL_EXPR_ARG (exp, 0);
3576 tree src = CALL_EXPR_ARG (exp, 1);
3577
3578 if (warn_stringop_overflow)
3579 {
3580 tree destsize = compute_dest_size (dest, warn_stringop_overflow - 1);
3581 check_sizes (OPT_Wstringop_overflow_,
3582 exp, /*size=*/NULL_TREE, /*maxlen=*/NULL_TREE, src, destsize);
3583 }
3584
3585 return expand_builtin_strcpy_args (dest, src, target);
c2f47e15 3586}
3587
3588/* Helper function to do the actual work for expand_builtin_strcpy. The
3589 arguments to the builtin_strcpy call DEST and SRC are broken out
3590 so that this can also be called without constructing an actual CALL_EXPR.
3591 The other arguments and return value are the same as for
3592 expand_builtin_strcpy. */
3593
3594static rtx
a65c4d64 3595expand_builtin_strcpy_args (tree dest, tree src, rtx target)
c2f47e15 3596{
c2f47e15 3597 return expand_movstr (dest, src, target, /*endp=*/0);
53800dbe 3598}
3599
c2f47e15 3600/* Expand a call EXP to the stpcpy builtin.
3601 Return NULL_RTX if we failed the caller should emit a normal call,
3b824fa6 3602 otherwise try to get the result in TARGET, if convenient (and in
3603 mode MODE if that's convenient). */
3604
3605static rtx
3754d046 3606expand_builtin_stpcpy (tree exp, rtx target, machine_mode mode)
3b824fa6 3607{
c2f47e15 3608 tree dst, src;
389dd41b 3609 location_t loc = EXPR_LOCATION (exp);
c2f47e15 3610
3611 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3612 return NULL_RTX;
3613
3614 dst = CALL_EXPR_ARG (exp, 0);
3615 src = CALL_EXPR_ARG (exp, 1);
3616
727c62dd 3617 /* If return value is ignored, transform stpcpy into strcpy. */
b9a16870 3618 if (target == const0_rtx && builtin_decl_implicit (BUILT_IN_STRCPY))
978836e5 3619 {
b9a16870 3620 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
0568e9c1 3621 tree result = build_call_nofold_loc (loc, fn, 2, dst, src);
c8b17b2e 3622 return expand_expr (result, target, mode, EXPAND_NORMAL);
978836e5 3623 }
3b824fa6 3624 else
3625 {
c2f47e15 3626 tree len, lenp1;
727c62dd 3627 rtx ret;
647661c6 3628
9fe0e1b8 3629 /* Ensure we get an actual string whose length can be evaluated at
a0c938f0 3630 compile-time, not an expression containing a string. This is
3631 because the latter will potentially produce pessimized code
3632 when used to produce the return value. */
681fab1e 3633 if (! c_getstr (src) || ! (len = c_strlen (src, 0)))
c2f47e15 3634 return expand_movstr (dst, src, target, /*endp=*/2);
3b824fa6 3635
389dd41b 3636 lenp1 = size_binop_loc (loc, PLUS_EXPR, len, ssize_int (1));
a65c4d64 3637 ret = expand_builtin_mempcpy_args (dst, src, lenp1,
f21337ef 3638 target, mode, /*endp=*/2,
3639 exp);
727c62dd 3640
3641 if (ret)
3642 return ret;
3643
3644 if (TREE_CODE (len) == INTEGER_CST)
3645 {
8ec3c5c2 3646 rtx len_rtx = expand_normal (len);
727c62dd 3647
971ba038 3648 if (CONST_INT_P (len_rtx))
727c62dd 3649 {
a65c4d64 3650 ret = expand_builtin_strcpy_args (dst, src, target);
727c62dd 3651
3652 if (ret)
3653 {
3654 if (! target)
7ac87324 3655 {
3656 if (mode != VOIDmode)
3657 target = gen_reg_rtx (mode);
3658 else
3659 target = gen_reg_rtx (GET_MODE (ret));
3660 }
727c62dd 3661 if (GET_MODE (target) != GET_MODE (ret))
3662 ret = gen_lowpart (GET_MODE (target), ret);
3663
29c05e22 3664 ret = plus_constant (GET_MODE (ret), ret, INTVAL (len_rtx));
c5aba89c 3665 ret = emit_move_insn (target, force_operand (ret, NULL_RTX));
64db345d 3666 gcc_assert (ret);
727c62dd 3667
3668 return target;
3669 }
3670 }
3671 }
3672
c2f47e15 3673 return expand_movstr (dst, src, target, /*endp=*/2);
3b824fa6 3674 }
3675}
3676
6840589f 3677/* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3678 bytes from constant string DATA + OFFSET and return it as target
3679 constant. */
3680
09879952 3681rtx
aecda0d6 3682builtin_strncpy_read_str (void *data, HOST_WIDE_INT offset,
3754d046 3683 machine_mode mode)
6840589f 3684{
3685 const char *str = (const char *) data;
3686
3687 if ((unsigned HOST_WIDE_INT) offset > strlen (str))
3688 return const0_rtx;
3689
3690 return c_readstr (str + offset, mode);
3691}
3692
5aef8938 3693/* Helper to check the sizes of sequences and the destination of calls
3694 to __builtin_strncat and __builtin___strncat_chk. Returns true on
3695 success (no overflow or invalid sizes), false otherwise. */
3696
3697static bool
3698check_strncat_sizes (tree exp, tree objsize)
3699{
3700 tree dest = CALL_EXPR_ARG (exp, 0);
3701 tree src = CALL_EXPR_ARG (exp, 1);
3702 tree maxlen = CALL_EXPR_ARG (exp, 2);
3703
3704 /* Try to determine the range of lengths that the source expression
3705 refers to. */
3706 tree lenrange[2];
3707 get_range_strlen (src, lenrange);
3708
3709 /* Try to verify that the destination is big enough for the shortest
3710 string. */
3711
3712 if (!objsize && warn_stringop_overflow)
3713 {
3714 /* If it hasn't been provided by __strncat_chk, try to determine
3715 the size of the destination object into which the source is
3716 being copied. */
3717 objsize = compute_dest_size (dest, warn_stringop_overflow - 1);
3718 }
3719
3720 /* Add one for the terminating nul. */
3721 tree srclen = (lenrange[0]
3722 ? fold_build2 (PLUS_EXPR, size_type_node, lenrange[0],
3723 size_one_node)
3724 : NULL_TREE);
3725
3726 /* Strncat copies at most MAXLEN bytes and always appends the terminating
3727 nul so the specified upper bound should never be equal to (or greater
3728 than) the size of the destination. */
3729 if (tree_fits_uhwi_p (maxlen) && tree_fits_uhwi_p (objsize)
3730 && tree_int_cst_equal (objsize, maxlen))
3731 {
3732 warning_at (EXPR_LOCATION (exp), OPT_Wstringop_overflow_,
3733 "specified bound %wu "
3734 "equals the size of the destination",
3735 tree_to_uhwi (maxlen));
3736
3737 return false;
3738 }
3739
3740 if (!srclen
3741 || (maxlen && tree_fits_uhwi_p (maxlen)
3742 && tree_fits_uhwi_p (srclen)
3743 && tree_int_cst_lt (maxlen, srclen)))
3744 srclen = maxlen;
3745
3746 /* The number of bytes to write is LEN but check_sizes will also
3747 check SRCLEN if LEN's value isn't known. */
3748 return check_sizes (OPT_Wstringop_overflow_,
3749 exp, /*size=*/NULL_TREE, maxlen, srclen, objsize);
3750}
3751
3752/* Similar to expand_builtin_strcat, do some very basic size validation
3753 of a call to the strcpy builtin given by EXP. Return NULL_RTX to have
3754 the built-in expand to a call to the library function. */
3755
3756static rtx
3757expand_builtin_strncat (tree exp, rtx)
3758{
3759 if (!validate_arglist (exp,
3760 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE)
3761 || !warn_stringop_overflow)
3762 return NULL_RTX;
3763
3764 tree dest = CALL_EXPR_ARG (exp, 0);
3765 tree src = CALL_EXPR_ARG (exp, 1);
3766 /* The upper bound on the number of bytes to write. */
3767 tree maxlen = CALL_EXPR_ARG (exp, 2);
3768 /* The length of the source sequence. */
3769 tree slen = c_strlen (src, 1);
3770
3771 /* Try to determine the range of lengths that the source expression
3772 refers to. */
3773 tree lenrange[2];
3774 if (slen)
3775 lenrange[0] = lenrange[1] = slen;
3776 else
3777 get_range_strlen (src, lenrange);
3778
3779 /* Try to verify that the destination is big enough for the shortest
3780 string. First try to determine the size of the destination object
3781 into which the source is being copied. */
3782 tree destsize = compute_dest_size (dest, warn_stringop_overflow - 1);
3783
3784 /* Add one for the terminating nul. */
3785 tree srclen = (lenrange[0]
3786 ? fold_build2 (PLUS_EXPR, size_type_node, lenrange[0],
3787 size_one_node)
3788 : NULL_TREE);
3789
3790 /* Strncat copies at most MAXLEN bytes and always appends the terminating
3791 nul so the specified upper bound should never be equal to (or greater
3792 than) the size of the destination. */
3793 if (tree_fits_uhwi_p (maxlen) && tree_fits_uhwi_p (destsize)
3794 && tree_int_cst_equal (destsize, maxlen))
3795 {
3796 warning_at (EXPR_LOCATION (exp), OPT_Wstringop_overflow_,
3797 "specified bound %wu "
3798 "equals the size of the destination",
3799 tree_to_uhwi (maxlen));
3800
3801 return NULL_RTX;
3802 }
3803
3804 if (!srclen
3805 || (maxlen && tree_fits_uhwi_p (maxlen)
3806 && tree_fits_uhwi_p (srclen)
3807 && tree_int_cst_lt (maxlen, srclen)))
3808 srclen = maxlen;
3809
3810 /* The number of bytes to write is LEN but check_sizes will also
3811 check SRCLEN if LEN's value isn't known. */
3812 check_sizes (OPT_Wstringop_overflow_,
3813 exp, /*size=*/NULL_TREE, maxlen, srclen, destsize);
3814
3815 return NULL_RTX;
3816}
3817
48e1416a 3818/* Expand expression EXP, which is a call to the strncpy builtin. Return
c2f47e15 3819 NULL_RTX if we failed the caller should emit a normal call. */
ed09096d 3820
3821static rtx
a65c4d64 3822expand_builtin_strncpy (tree exp, rtx target)
ed09096d 3823{
389dd41b 3824 location_t loc = EXPR_LOCATION (exp);
c2f47e15 3825
3826 if (validate_arglist (exp,
3827 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
ed09096d 3828 {
c2f47e15 3829 tree dest = CALL_EXPR_ARG (exp, 0);
3830 tree src = CALL_EXPR_ARG (exp, 1);
5aef8938 3831 /* The number of bytes to write (not the maximum). */
c2f47e15 3832 tree len = CALL_EXPR_ARG (exp, 2);
5aef8938 3833 /* The length of the source sequence. */
c2f47e15 3834 tree slen = c_strlen (src, 1);
6840589f 3835
5aef8938 3836 if (warn_stringop_overflow)
3837 {
3838 /* Try to determine the range of lengths that the source expression
3839 refers to. */
3840 tree lenrange[2];
3841 if (slen)
3842 lenrange[0] = lenrange[1] = slen;
3843 else
3844 {
3845 get_range_strlen (src, lenrange);
3846 slen = lenrange[0];
3847 }
3848
3849 tree destsize = compute_dest_size (dest,
3850 warn_stringop_overflow - 1);
3851
3852 /* The number of bytes to write is LEN but check_sizes will also
3853 check SLEN if LEN's value isn't known. */
3854 check_sizes (OPT_Wstringop_overflow_,
3855 exp, len, /*maxlen=*/NULL_TREE, slen, destsize);
3856 }
3857
8ff6a5cd 3858 /* We must be passed a constant len and src parameter. */
e913b5cd 3859 if (!tree_fits_uhwi_p (len) || !slen || !tree_fits_uhwi_p (slen))
c2f47e15 3860 return NULL_RTX;
ed09096d 3861
389dd41b 3862 slen = size_binop_loc (loc, PLUS_EXPR, slen, ssize_int (1));
ed09096d 3863
3864 /* We're required to pad with trailing zeros if the requested
a0c938f0 3865 len is greater than strlen(s2)+1. In that case try to
6840589f 3866 use store_by_pieces, if it fails, punt. */
ed09096d 3867 if (tree_int_cst_lt (slen, len))
6840589f 3868 {
957d0361 3869 unsigned int dest_align = get_pointer_alignment (dest);
c2f47e15 3870 const char *p = c_getstr (src);
6840589f 3871 rtx dest_mem;
3872
e913b5cd 3873 if (!p || dest_align == 0 || !tree_fits_uhwi_p (len)
3874 || !can_store_by_pieces (tree_to_uhwi (len),
6840589f 3875 builtin_strncpy_read_str,
364c0c59 3876 CONST_CAST (char *, p),
3877 dest_align, false))
c2f47e15 3878 return NULL_RTX;
6840589f 3879
d8ae1baa 3880 dest_mem = get_memory_rtx (dest, len);
e913b5cd 3881 store_by_pieces (dest_mem, tree_to_uhwi (len),
6840589f 3882 builtin_strncpy_read_str,
364c0c59 3883 CONST_CAST (char *, p), dest_align, false, 0);
a65c4d64 3884 dest_mem = force_operand (XEXP (dest_mem, 0), target);
85d654dd 3885 dest_mem = convert_memory_address (ptr_mode, dest_mem);
e5716f7e 3886 return dest_mem;
6840589f 3887 }
ed09096d 3888 }
c2f47e15 3889 return NULL_RTX;
ed09096d 3890}
3891
ecc318ff 3892/* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3893 bytes from constant string DATA + OFFSET and return it as target
3894 constant. */
3895
f656b751 3896rtx
aecda0d6 3897builtin_memset_read_str (void *data, HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
3754d046 3898 machine_mode mode)
ecc318ff 3899{
3900 const char *c = (const char *) data;
364c0c59 3901 char *p = XALLOCAVEC (char, GET_MODE_SIZE (mode));
ecc318ff 3902
3903 memset (p, *c, GET_MODE_SIZE (mode));
3904
3905 return c_readstr (p, mode);
3906}
3907
a7ec6974 3908/* Callback routine for store_by_pieces. Return the RTL of a register
3909 containing GET_MODE_SIZE (MODE) consecutive copies of the unsigned
3910 char value given in the RTL register data. For example, if mode is
3911 4 bytes wide, return the RTL for 0x01010101*data. */
3912
3913static rtx
aecda0d6 3914builtin_memset_gen_str (void *data, HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
3754d046 3915 machine_mode mode)
a7ec6974 3916{
3917 rtx target, coeff;
3918 size_t size;
3919 char *p;
3920
3921 size = GET_MODE_SIZE (mode);
f0ce3b1f 3922 if (size == 1)
3923 return (rtx) data;
a7ec6974 3924
364c0c59 3925 p = XALLOCAVEC (char, size);
a7ec6974 3926 memset (p, 1, size);
3927 coeff = c_readstr (p, mode);
3928
f0ce3b1f 3929 target = convert_to_mode (mode, (rtx) data, 1);
a7ec6974 3930 target = expand_mult (mode, target, coeff, NULL_RTX, 1);
3931 return force_reg (mode, target);
3932}
3933
48e1416a 3934/* Expand expression EXP, which is a call to the memset builtin. Return
3935 NULL_RTX if we failed the caller should emit a normal call, otherwise
c2f47e15 3936 try to get the result in TARGET, if convenient (and in mode MODE if that's
6f428e8b 3937 convenient). */
902de8ed 3938
53800dbe 3939static rtx
3754d046 3940expand_builtin_memset (tree exp, rtx target, machine_mode mode)
53800dbe 3941{
c2f47e15 3942 if (!validate_arglist (exp,
3943 POINTER_TYPE, INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
3944 return NULL_RTX;
5aef8938 3945
3946 tree dest = CALL_EXPR_ARG (exp, 0);
3947 tree val = CALL_EXPR_ARG (exp, 1);
3948 tree len = CALL_EXPR_ARG (exp, 2);
3949
3950 check_memop_sizes (exp, dest, len);
3951
3952 return expand_builtin_memset_args (dest, val, len, target, mode, exp);
c2f47e15 3953}
53800dbe 3954
f21337ef 3955/* Expand expression EXP, which is an instrumented call to the memset builtin.
3956 Return NULL_RTX if we failed the caller should emit a normal call, otherwise
3957 try to get the result in TARGET, if convenient (and in mode MODE if that's
3958 convenient). */
3959
3960static rtx
3961expand_builtin_memset_with_bounds (tree exp, rtx target, machine_mode mode)
3962{
3963 if (!validate_arglist (exp,
3964 POINTER_TYPE, POINTER_BOUNDS_TYPE,
3965 INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
3966 return NULL_RTX;
3967 else
3968 {
3969 tree dest = CALL_EXPR_ARG (exp, 0);
3970 tree val = CALL_EXPR_ARG (exp, 2);
3971 tree len = CALL_EXPR_ARG (exp, 3);
3972 rtx res = expand_builtin_memset_args (dest, val, len, target, mode, exp);
3973
3974 /* Return src bounds with the result. */
3975 if (res)
3976 {
17d388d8 3977 rtx bnd = force_reg (targetm.chkp_bound_mode (),
f21337ef 3978 expand_normal (CALL_EXPR_ARG (exp, 1)));
3979 res = chkp_join_splitted_slot (res, bnd);
3980 }
3981 return res;
3982 }
3983}
3984
c2f47e15 3985/* Helper function to do the actual work for expand_builtin_memset. The
3986 arguments to the builtin_memset call DEST, VAL, and LEN are broken out
3987 so that this can also be called without constructing an actual CALL_EXPR.
3988 The other arguments and return value are the same as for
3989 expand_builtin_memset. */
6b961939 3990
c2f47e15 3991static rtx
3992expand_builtin_memset_args (tree dest, tree val, tree len,
3754d046 3993 rtx target, machine_mode mode, tree orig_exp)
c2f47e15 3994{
3995 tree fndecl, fn;
3996 enum built_in_function fcode;
3754d046 3997 machine_mode val_mode;
c2f47e15 3998 char c;
3999 unsigned int dest_align;
4000 rtx dest_mem, dest_addr, len_rtx;
4001 HOST_WIDE_INT expected_size = -1;
4002 unsigned int expected_align = 0;
36d63243 4003 unsigned HOST_WIDE_INT min_size;
4004 unsigned HOST_WIDE_INT max_size;
9db0f34d 4005 unsigned HOST_WIDE_INT probable_max_size;
53800dbe 4006
957d0361 4007 dest_align = get_pointer_alignment (dest);
162719b3 4008
c2f47e15 4009 /* If DEST is not a pointer type, don't do this operation in-line. */
4010 if (dest_align == 0)
4011 return NULL_RTX;
6f428e8b 4012
8cee8dc0 4013 if (currently_expanding_gimple_stmt)
4014 stringop_block_profile (currently_expanding_gimple_stmt,
4015 &expected_align, &expected_size);
75a70cf9 4016
c2f47e15 4017 if (expected_align < dest_align)
4018 expected_align = dest_align;
6b961939 4019
c2f47e15 4020 /* If the LEN parameter is zero, return DEST. */
4021 if (integer_zerop (len))
4022 {
4023 /* Evaluate and ignore VAL in case it has side-effects. */
4024 expand_expr (val, const0_rtx, VOIDmode, EXPAND_NORMAL);
4025 return expand_expr (dest, target, mode, EXPAND_NORMAL);
4026 }
7a3e5564 4027
c2f47e15 4028 /* Stabilize the arguments in case we fail. */
4029 dest = builtin_save_expr (dest);
4030 val = builtin_save_expr (val);
4031 len = builtin_save_expr (len);
a7ec6974 4032
c2f47e15 4033 len_rtx = expand_normal (len);
9db0f34d 4034 determine_block_size (len, len_rtx, &min_size, &max_size,
4035 &probable_max_size);
c2f47e15 4036 dest_mem = get_memory_rtx (dest, len);
03a5dda9 4037 val_mode = TYPE_MODE (unsigned_char_type_node);
a7ec6974 4038
c2f47e15 4039 if (TREE_CODE (val) != INTEGER_CST)
4040 {
4041 rtx val_rtx;
a7ec6974 4042
c2f47e15 4043 val_rtx = expand_normal (val);
03a5dda9 4044 val_rtx = convert_to_mode (val_mode, val_rtx, 0);
53800dbe 4045
c2f47e15 4046 /* Assume that we can memset by pieces if we can store
4047 * the coefficients by pieces (in the required modes).
4048 * We can't pass builtin_memset_gen_str as that emits RTL. */
4049 c = 1;
e913b5cd 4050 if (tree_fits_uhwi_p (len)
4051 && can_store_by_pieces (tree_to_uhwi (len),
4b297e2e 4052 builtin_memset_read_str, &c, dest_align,
4053 true))
c2f47e15 4054 {
03a5dda9 4055 val_rtx = force_reg (val_mode, val_rtx);
e913b5cd 4056 store_by_pieces (dest_mem, tree_to_uhwi (len),
4b297e2e 4057 builtin_memset_gen_str, val_rtx, dest_align,
4058 true, 0);
c2f47e15 4059 }
4060 else if (!set_storage_via_setmem (dest_mem, len_rtx, val_rtx,
4061 dest_align, expected_align,
9db0f34d 4062 expected_size, min_size, max_size,
4063 probable_max_size))
6b961939 4064 goto do_libcall;
48e1416a 4065
c2f47e15 4066 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
4067 dest_mem = convert_memory_address (ptr_mode, dest_mem);
4068 return dest_mem;
4069 }
53800dbe 4070
c2f47e15 4071 if (target_char_cast (val, &c))
4072 goto do_libcall;
ecc318ff 4073
c2f47e15 4074 if (c)
4075 {
e913b5cd 4076 if (tree_fits_uhwi_p (len)
4077 && can_store_by_pieces (tree_to_uhwi (len),
4b297e2e 4078 builtin_memset_read_str, &c, dest_align,
4079 true))
e913b5cd 4080 store_by_pieces (dest_mem, tree_to_uhwi (len),
4b297e2e 4081 builtin_memset_read_str, &c, dest_align, true, 0);
03a5dda9 4082 else if (!set_storage_via_setmem (dest_mem, len_rtx,
4083 gen_int_mode (c, val_mode),
c2f47e15 4084 dest_align, expected_align,
9db0f34d 4085 expected_size, min_size, max_size,
4086 probable_max_size))
c2f47e15 4087 goto do_libcall;
48e1416a 4088
c2f47e15 4089 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
4090 dest_mem = convert_memory_address (ptr_mode, dest_mem);
4091 return dest_mem;
4092 }
ecc318ff 4093
c2f47e15 4094 set_mem_align (dest_mem, dest_align);
4095 dest_addr = clear_storage_hints (dest_mem, len_rtx,
4096 CALL_EXPR_TAILCALL (orig_exp)
4097 ? BLOCK_OP_TAILCALL : BLOCK_OP_NORMAL,
36d63243 4098 expected_align, expected_size,
9db0f34d 4099 min_size, max_size,
4100 probable_max_size);
53800dbe 4101
c2f47e15 4102 if (dest_addr == 0)
4103 {
4104 dest_addr = force_operand (XEXP (dest_mem, 0), NULL_RTX);
4105 dest_addr = convert_memory_address (ptr_mode, dest_addr);
4106 }
53800dbe 4107
c2f47e15 4108 return dest_addr;
6b961939 4109
c2f47e15 4110 do_libcall:
4111 fndecl = get_callee_fndecl (orig_exp);
4112 fcode = DECL_FUNCTION_CODE (fndecl);
f21337ef 4113 if (fcode == BUILT_IN_MEMSET
4114 || fcode == BUILT_IN_CHKP_MEMSET_NOBND_NOCHK_CHKP)
0568e9c1 4115 fn = build_call_nofold_loc (EXPR_LOCATION (orig_exp), fndecl, 3,
4116 dest, val, len);
c2f47e15 4117 else if (fcode == BUILT_IN_BZERO)
0568e9c1 4118 fn = build_call_nofold_loc (EXPR_LOCATION (orig_exp), fndecl, 2,
4119 dest, len);
c2f47e15 4120 else
4121 gcc_unreachable ();
a65c4d64 4122 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
4123 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (orig_exp);
c2f47e15 4124 return expand_call (fn, target, target == const0_rtx);
53800dbe 4125}
4126
48e1416a 4127/* Expand expression EXP, which is a call to the bzero builtin. Return
c2f47e15 4128 NULL_RTX if we failed the caller should emit a normal call. */
27d0c333 4129
ffc83088 4130static rtx
0b25db21 4131expand_builtin_bzero (tree exp)
ffc83088 4132{
c2f47e15 4133 if (!validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
7369e7ba 4134 return NULL_RTX;
ffc83088 4135
5aef8938 4136 tree dest = CALL_EXPR_ARG (exp, 0);
4137 tree size = CALL_EXPR_ARG (exp, 1);
4138
4139 check_memop_sizes (exp, dest, size);
bf8e3599 4140
7369e7ba 4141 /* New argument list transforming bzero(ptr x, int y) to
6f428e8b 4142 memset(ptr x, int 0, size_t y). This is done this way
4143 so that if it isn't expanded inline, we fallback to
4144 calling bzero instead of memset. */
bf8e3599 4145
5aef8938 4146 location_t loc = EXPR_LOCATION (exp);
4147
c2f47e15 4148 return expand_builtin_memset_args (dest, integer_zero_node,
a0553bff 4149 fold_convert_loc (loc,
4150 size_type_node, size),
c2f47e15 4151 const0_rtx, VOIDmode, exp);
ffc83088 4152}
4153
d6f01a40 4154/* Try to expand cmpstr operation ICODE with the given operands.
4155 Return the result rtx on success, otherwise return null. */
4156
4157static rtx
4158expand_cmpstr (insn_code icode, rtx target, rtx arg1_rtx, rtx arg2_rtx,
4159 HOST_WIDE_INT align)
4160{
4161 machine_mode insn_mode = insn_data[icode].operand[0].mode;
4162
4163 if (target && (!REG_P (target) || HARD_REGISTER_P (target)))
4164 target = NULL_RTX;
4165
4166 struct expand_operand ops[4];
4167 create_output_operand (&ops[0], target, insn_mode);
4168 create_fixed_operand (&ops[1], arg1_rtx);
4169 create_fixed_operand (&ops[2], arg2_rtx);
4170 create_integer_operand (&ops[3], align);
4171 if (maybe_expand_insn (icode, 4, ops))
4172 return ops[0].value;
4173 return NULL_RTX;
4174}
4175
7a3f89b5 4176/* Expand expression EXP, which is a call to the memcmp built-in function.
bd021c1c 4177 Return NULL_RTX if we failed and the caller should emit a normal call,
3e346f54 4178 otherwise try to get the result in TARGET, if convenient.
4179 RESULT_EQ is true if we can relax the returned value to be either zero
4180 or nonzero, without caring about the sign. */
27d0c333 4181
53800dbe 4182static rtx
3e346f54 4183expand_builtin_memcmp (tree exp, rtx target, bool result_eq)
53800dbe 4184{
c2f47e15 4185 if (!validate_arglist (exp,
4186 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
4187 return NULL_RTX;
6f428e8b 4188
ea368aac 4189 tree arg1 = CALL_EXPR_ARG (exp, 0);
4190 tree arg2 = CALL_EXPR_ARG (exp, 1);
4191 tree len = CALL_EXPR_ARG (exp, 2);
3e346f54 4192 machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
4193 location_t loc = EXPR_LOCATION (exp);
b428c0a5 4194
ea368aac 4195 unsigned int arg1_align = get_pointer_alignment (arg1) / BITS_PER_UNIT;
4196 unsigned int arg2_align = get_pointer_alignment (arg2) / BITS_PER_UNIT;
53800dbe 4197
ea368aac 4198 /* If we don't have POINTER_TYPE, call the function. */
4199 if (arg1_align == 0 || arg2_align == 0)
4200 return NULL_RTX;
53800dbe 4201
ea368aac 4202 rtx arg1_rtx = get_memory_rtx (arg1, len);
4203 rtx arg2_rtx = get_memory_rtx (arg2, len);
3e346f54 4204 rtx len_rtx = expand_normal (fold_convert_loc (loc, sizetype, len));
53800dbe 4205
ea368aac 4206 /* Set MEM_SIZE as appropriate. */
3e346f54 4207 if (CONST_INT_P (len_rtx))
ea368aac 4208 {
3e346f54 4209 set_mem_size (arg1_rtx, INTVAL (len_rtx));
4210 set_mem_size (arg2_rtx, INTVAL (len_rtx));
ea368aac 4211 }
83f88f8e 4212
3e346f54 4213 by_pieces_constfn constfn = NULL;
4214
719f3058 4215 const char *src_str = c_getstr (arg2);
4216 if (result_eq && src_str == NULL)
4217 {
4218 src_str = c_getstr (arg1);
4219 if (src_str != NULL)
092db747 4220 std::swap (arg1_rtx, arg2_rtx);
719f3058 4221 }
3e346f54 4222
4223 /* If SRC is a string constant and block move would be done
4224 by pieces, we can avoid loading the string from memory
4225 and only stored the computed constants. */
4226 if (src_str
4227 && CONST_INT_P (len_rtx)
4228 && (unsigned HOST_WIDE_INT) INTVAL (len_rtx) <= strlen (src_str) + 1)
4229 constfn = builtin_memcpy_read_str;
4230
4231 rtx result = emit_block_cmp_hints (arg1_rtx, arg2_rtx, len_rtx,
4232 TREE_TYPE (len), target,
4233 result_eq, constfn,
4234 CONST_CAST (char *, src_str));
4235
ea368aac 4236 if (result)
4237 {
4238 /* Return the value in the proper mode for this function. */
4239 if (GET_MODE (result) == mode)
4240 return result;
83f88f8e 4241
ea368aac 4242 if (target != 0)
4243 {
4244 convert_move (target, result, 0);
4245 return target;
4246 }
0cd832f0 4247
53800dbe 4248 return convert_to_mode (mode, result, 0);
ea368aac 4249 }
53800dbe 4250
61ffc71a 4251 return NULL_RTX;
6f428e8b 4252}
4253
c2f47e15 4254/* Expand expression EXP, which is a call to the strcmp builtin. Return NULL_RTX
53800dbe 4255 if we failed the caller should emit a normal call, otherwise try to get
4256 the result in TARGET, if convenient. */
902de8ed 4257
53800dbe 4258static rtx
a65c4d64 4259expand_builtin_strcmp (tree exp, ATTRIBUTE_UNUSED rtx target)
53800dbe 4260{
c2f47e15 4261 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
4262 return NULL_RTX;
bf8e3599 4263
d6f01a40 4264 insn_code cmpstr_icode = direct_optab_handler (cmpstr_optab, SImode);
4265 insn_code cmpstrn_icode = direct_optab_handler (cmpstrn_optab, SImode);
4266 if (cmpstr_icode != CODE_FOR_nothing || cmpstrn_icode != CODE_FOR_nothing)
6ac5504b 4267 {
4268 rtx arg1_rtx, arg2_rtx;
6ac5504b 4269 tree fndecl, fn;
c2f47e15 4270 tree arg1 = CALL_EXPR_ARG (exp, 0);
4271 tree arg2 = CALL_EXPR_ARG (exp, 1);
d6f01a40 4272 rtx result = NULL_RTX;
a0c938f0 4273
957d0361 4274 unsigned int arg1_align = get_pointer_alignment (arg1) / BITS_PER_UNIT;
4275 unsigned int arg2_align = get_pointer_alignment (arg2) / BITS_PER_UNIT;
6ac5504b 4276
4277 /* If we don't have POINTER_TYPE, call the function. */
4278 if (arg1_align == 0 || arg2_align == 0)
c2f47e15 4279 return NULL_RTX;
7a3f89b5 4280
6ac5504b 4281 /* Stabilize the arguments in case gen_cmpstr(n)si fail. */
4282 arg1 = builtin_save_expr (arg1);
4283 arg2 = builtin_save_expr (arg2);
7a3f89b5 4284
d8ae1baa 4285 arg1_rtx = get_memory_rtx (arg1, NULL);
4286 arg2_rtx = get_memory_rtx (arg2, NULL);
53800dbe 4287
6ac5504b 4288 /* Try to call cmpstrsi. */
d6f01a40 4289 if (cmpstr_icode != CODE_FOR_nothing)
4290 result = expand_cmpstr (cmpstr_icode, target, arg1_rtx, arg2_rtx,
4291 MIN (arg1_align, arg2_align));
4292
6ac5504b 4293 /* Try to determine at least one length and call cmpstrnsi. */
d6f01a40 4294 if (!result && cmpstrn_icode != CODE_FOR_nothing)
6ac5504b 4295 {
4296 tree len;
4297 rtx arg3_rtx;
4298
6ac5504b 4299 tree len1 = c_strlen (arg1, 1);
4300 tree len2 = c_strlen (arg2, 1);
4301
4302 if (len1)
4303 len1 = size_binop (PLUS_EXPR, ssize_int (1), len1);
4304 if (len2)
4305 len2 = size_binop (PLUS_EXPR, ssize_int (1), len2);
4306
4307 /* If we don't have a constant length for the first, use the length
4308 of the second, if we know it. We don't require a constant for
4309 this case; some cost analysis could be done if both are available
4310 but neither is constant. For now, assume they're equally cheap,
4311 unless one has side effects. If both strings have constant lengths,
4312 use the smaller. */
4313
4314 if (!len1)
4315 len = len2;
4316 else if (!len2)
4317 len = len1;
4318 else if (TREE_SIDE_EFFECTS (len1))
4319 len = len2;
4320 else if (TREE_SIDE_EFFECTS (len2))
4321 len = len1;
4322 else if (TREE_CODE (len1) != INTEGER_CST)
4323 len = len2;
4324 else if (TREE_CODE (len2) != INTEGER_CST)
4325 len = len1;
4326 else if (tree_int_cst_lt (len1, len2))
4327 len = len1;
4328 else
4329 len = len2;
4330
4331 /* If both arguments have side effects, we cannot optimize. */
d6f01a40 4332 if (len && !TREE_SIDE_EFFECTS (len))
4333 {
4334 arg3_rtx = expand_normal (len);
ea368aac 4335 result = expand_cmpstrn_or_cmpmem
4336 (cmpstrn_icode, target, arg1_rtx, arg2_rtx, TREE_TYPE (len),
4337 arg3_rtx, MIN (arg1_align, arg2_align));
d6f01a40 4338 }
6ac5504b 4339 }
3f8aefe2 4340
d6f01a40 4341 if (result)
6ac5504b 4342 {
6ac5504b 4343 /* Return the value in the proper mode for this function. */
d6f01a40 4344 machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
6ac5504b 4345 if (GET_MODE (result) == mode)
4346 return result;
4347 if (target == 0)
4348 return convert_to_mode (mode, result, 0);
4349 convert_move (target, result, 0);
4350 return target;
4351 }
902de8ed 4352
6ac5504b 4353 /* Expand the library call ourselves using a stabilized argument
4354 list to avoid re-evaluating the function's arguments twice. */
6ac5504b 4355 fndecl = get_callee_fndecl (exp);
0568e9c1 4356 fn = build_call_nofold_loc (EXPR_LOCATION (exp), fndecl, 2, arg1, arg2);
a65c4d64 4357 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
4358 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
6ac5504b 4359 return expand_call (fn, target, target == const0_rtx);
4360 }
c2f47e15 4361 return NULL_RTX;
83d79705 4362}
53800dbe 4363
48e1416a 4364/* Expand expression EXP, which is a call to the strncmp builtin. Return
c2f47e15 4365 NULL_RTX if we failed the caller should emit a normal call, otherwise try to get
ed09096d 4366 the result in TARGET, if convenient. */
27d0c333 4367
ed09096d 4368static rtx
a65c4d64 4369expand_builtin_strncmp (tree exp, ATTRIBUTE_UNUSED rtx target,
3754d046 4370 ATTRIBUTE_UNUSED machine_mode mode)
ed09096d 4371{
a65c4d64 4372 location_t loc ATTRIBUTE_UNUSED = EXPR_LOCATION (exp);
389dd41b 4373
c2f47e15 4374 if (!validate_arglist (exp,
4375 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
4376 return NULL_RTX;
ed09096d 4377
6e34e617 4378 /* If c_strlen can determine an expression for one of the string
6ac5504b 4379 lengths, and it doesn't have side effects, then emit cmpstrnsi
7a3f89b5 4380 using length MIN(strlen(string)+1, arg3). */
d6f01a40 4381 insn_code cmpstrn_icode = direct_optab_handler (cmpstrn_optab, SImode);
4382 if (cmpstrn_icode != CODE_FOR_nothing)
7a3f89b5 4383 {
175cdef4 4384 tree len, len1, len2, len3;
7a3f89b5 4385 rtx arg1_rtx, arg2_rtx, arg3_rtx;
d6f01a40 4386 rtx result;
0b25db21 4387 tree fndecl, fn;
c2f47e15 4388 tree arg1 = CALL_EXPR_ARG (exp, 0);
4389 tree arg2 = CALL_EXPR_ARG (exp, 1);
4390 tree arg3 = CALL_EXPR_ARG (exp, 2);
6f428e8b 4391
957d0361 4392 unsigned int arg1_align = get_pointer_alignment (arg1) / BITS_PER_UNIT;
4393 unsigned int arg2_align = get_pointer_alignment (arg2) / BITS_PER_UNIT;
bf8e3599 4394
681fab1e 4395 len1 = c_strlen (arg1, 1);
4396 len2 = c_strlen (arg2, 1);
7a3f89b5 4397
4398 if (len1)
389dd41b 4399 len1 = size_binop_loc (loc, PLUS_EXPR, ssize_int (1), len1);
7a3f89b5 4400 if (len2)
389dd41b 4401 len2 = size_binop_loc (loc, PLUS_EXPR, ssize_int (1), len2);
7a3f89b5 4402
175cdef4 4403 len3 = fold_convert_loc (loc, sizetype, arg3);
4404
7a3f89b5 4405 /* If we don't have a constant length for the first, use the length
175cdef4 4406 of the second, if we know it. If neither string is constant length,
4407 use the given length argument. We don't require a constant for
7a3f89b5 4408 this case; some cost analysis could be done if both are available
4409 but neither is constant. For now, assume they're equally cheap,
4410 unless one has side effects. If both strings have constant lengths,
4411 use the smaller. */
4412
175cdef4 4413 if (!len1 && !len2)
4414 len = len3;
4415 else if (!len1)
7a3f89b5 4416 len = len2;
4417 else if (!len2)
4418 len = len1;
4419 else if (TREE_SIDE_EFFECTS (len1))
4420 len = len2;
4421 else if (TREE_SIDE_EFFECTS (len2))
4422 len = len1;
4423 else if (TREE_CODE (len1) != INTEGER_CST)
4424 len = len2;
4425 else if (TREE_CODE (len2) != INTEGER_CST)
4426 len = len1;
4427 else if (tree_int_cst_lt (len1, len2))
4428 len = len1;
4429 else
4430 len = len2;
6e34e617 4431
175cdef4 4432 /* If we are not using the given length, we must incorporate it here.
4433 The actual new length parameter will be MIN(len,arg3) in this case. */
4434 if (len != len3)
4435 len = fold_build2_loc (loc, MIN_EXPR, TREE_TYPE (len), len, len3);
a65c4d64 4436 arg1_rtx = get_memory_rtx (arg1, len);
4437 arg2_rtx = get_memory_rtx (arg2, len);
4438 arg3_rtx = expand_normal (len);
ea368aac 4439 result = expand_cmpstrn_or_cmpmem (cmpstrn_icode, target, arg1_rtx,
4440 arg2_rtx, TREE_TYPE (len), arg3_rtx,
4441 MIN (arg1_align, arg2_align));
d6f01a40 4442 if (result)
a65c4d64 4443 {
a65c4d64 4444 /* Return the value in the proper mode for this function. */
4445 mode = TYPE_MODE (TREE_TYPE (exp));
4446 if (GET_MODE (result) == mode)
4447 return result;
4448 if (target == 0)
4449 return convert_to_mode (mode, result, 0);
4450 convert_move (target, result, 0);
4451 return target;
4452 }
27d0c333 4453
a65c4d64 4454 /* Expand the library call ourselves using a stabilized argument
4455 list to avoid re-evaluating the function's arguments twice. */
4456 fndecl = get_callee_fndecl (exp);
0568e9c1 4457 fn = build_call_nofold_loc (EXPR_LOCATION (exp), fndecl, 3,
4458 arg1, arg2, len);
a65c4d64 4459 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
4460 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
4461 return expand_call (fn, target, target == const0_rtx);
4462 }
c2f47e15 4463 return NULL_RTX;
49f0327b 4464}
4465
a66c9326 4466/* Expand a call to __builtin_saveregs, generating the result in TARGET,
4467 if that's convenient. */
902de8ed 4468
a66c9326 4469rtx
aecda0d6 4470expand_builtin_saveregs (void)
53800dbe 4471{
1e0c0b35 4472 rtx val;
4473 rtx_insn *seq;
53800dbe 4474
4475 /* Don't do __builtin_saveregs more than once in a function.
4476 Save the result of the first call and reuse it. */
4477 if (saveregs_value != 0)
4478 return saveregs_value;
53800dbe 4479
a66c9326 4480 /* When this function is called, it means that registers must be
4481 saved on entry to this function. So we migrate the call to the
4482 first insn of this function. */
4483
4484 start_sequence ();
53800dbe 4485
a66c9326 4486 /* Do whatever the machine needs done in this case. */
45550790 4487 val = targetm.calls.expand_builtin_saveregs ();
53800dbe 4488
a66c9326 4489 seq = get_insns ();
4490 end_sequence ();
53800dbe 4491
a66c9326 4492 saveregs_value = val;
53800dbe 4493
31d3e01c 4494 /* Put the insns after the NOTE that starts the function. If this
4495 is inside a start_sequence, make the outer-level insn chain current, so
a66c9326 4496 the code is placed at the start of the function. */
4497 push_topmost_sequence ();
0ec80471 4498 emit_insn_after (seq, entry_of_function ());
a66c9326 4499 pop_topmost_sequence ();
4500
4501 return val;
53800dbe 4502}
4503
79012a9d 4504/* Expand a call to __builtin_next_arg. */
27d0c333 4505
53800dbe 4506static rtx
79012a9d 4507expand_builtin_next_arg (void)
53800dbe 4508{
79012a9d 4509 /* Checking arguments is already done in fold_builtin_next_arg
4510 that must be called before this function. */
940ddc5c 4511 return expand_binop (ptr_mode, add_optab,
abe32cce 4512 crtl->args.internal_arg_pointer,
4513 crtl->args.arg_offset_rtx,
53800dbe 4514 NULL_RTX, 0, OPTAB_LIB_WIDEN);
4515}
4516
a66c9326 4517/* Make it easier for the backends by protecting the valist argument
4518 from multiple evaluations. */
4519
4520static tree
389dd41b 4521stabilize_va_list_loc (location_t loc, tree valist, int needs_lvalue)
a66c9326 4522{
5f57a8b1 4523 tree vatype = targetm.canonical_va_list_type (TREE_TYPE (valist));
4524
182cf5a9 4525 /* The current way of determining the type of valist is completely
4526 bogus. We should have the information on the va builtin instead. */
4527 if (!vatype)
4528 vatype = targetm.fn_abi_va_list (cfun->decl);
5f57a8b1 4529
4530 if (TREE_CODE (vatype) == ARRAY_TYPE)
a66c9326 4531 {
2d47cc32 4532 if (TREE_SIDE_EFFECTS (valist))
4533 valist = save_expr (valist);
11a61dea 4534
2d47cc32 4535 /* For this case, the backends will be expecting a pointer to
5f57a8b1 4536 vatype, but it's possible we've actually been given an array
4537 (an actual TARGET_CANONICAL_VA_LIST_TYPE (valist)).
2d47cc32 4538 So fix it. */
4539 if (TREE_CODE (TREE_TYPE (valist)) == ARRAY_TYPE)
8a15c04a 4540 {
5f57a8b1 4541 tree p1 = build_pointer_type (TREE_TYPE (vatype));
389dd41b 4542 valist = build_fold_addr_expr_with_type_loc (loc, valist, p1);
8a15c04a 4543 }
a66c9326 4544 }
11a61dea 4545 else
a66c9326 4546 {
182cf5a9 4547 tree pt = build_pointer_type (vatype);
11a61dea 4548
2d47cc32 4549 if (! needs_lvalue)
4550 {
11a61dea 4551 if (! TREE_SIDE_EFFECTS (valist))
4552 return valist;
bf8e3599 4553
389dd41b 4554 valist = fold_build1_loc (loc, ADDR_EXPR, pt, valist);
a66c9326 4555 TREE_SIDE_EFFECTS (valist) = 1;
a66c9326 4556 }
2d47cc32 4557
11a61dea 4558 if (TREE_SIDE_EFFECTS (valist))
2d47cc32 4559 valist = save_expr (valist);
182cf5a9 4560 valist = fold_build2_loc (loc, MEM_REF,
4561 vatype, valist, build_int_cst (pt, 0));
a66c9326 4562 }
4563
4564 return valist;
4565}
4566
2e15d750 4567/* The "standard" definition of va_list is void*. */
4568
4569tree
4570std_build_builtin_va_list (void)
4571{
4572 return ptr_type_node;
4573}
4574
5f57a8b1 4575/* The "standard" abi va_list is va_list_type_node. */
4576
4577tree
4578std_fn_abi_va_list (tree fndecl ATTRIBUTE_UNUSED)
4579{
4580 return va_list_type_node;
4581}
4582
4583/* The "standard" type of va_list is va_list_type_node. */
4584
4585tree
4586std_canonical_va_list_type (tree type)
4587{
4588 tree wtype, htype;
4589
5f57a8b1 4590 wtype = va_list_type_node;
4591 htype = type;
b6da2e41 4592
4593 if (TREE_CODE (wtype) == ARRAY_TYPE)
5f57a8b1 4594 {
4595 /* If va_list is an array type, the argument may have decayed
4596 to a pointer type, e.g. by being passed to another function.
4597 In that case, unwrap both types so that we can compare the
4598 underlying records. */
4599 if (TREE_CODE (htype) == ARRAY_TYPE
4600 || POINTER_TYPE_P (htype))
4601 {
4602 wtype = TREE_TYPE (wtype);
4603 htype = TREE_TYPE (htype);
4604 }
4605 }
4606 if (TYPE_MAIN_VARIANT (wtype) == TYPE_MAIN_VARIANT (htype))
4607 return va_list_type_node;
4608
4609 return NULL_TREE;
4610}
4611
a66c9326 4612/* The "standard" implementation of va_start: just assign `nextarg' to
4613 the variable. */
27d0c333 4614
a66c9326 4615void
aecda0d6 4616std_expand_builtin_va_start (tree valist, rtx nextarg)
a66c9326 4617{
f03c17bc 4618 rtx va_r = expand_expr (valist, NULL_RTX, VOIDmode, EXPAND_WRITE);
4619 convert_move (va_r, nextarg, 0);
058a1b7a 4620
4621 /* We do not have any valid bounds for the pointer, so
4622 just store zero bounds for it. */
4623 if (chkp_function_instrumented_p (current_function_decl))
4624 chkp_expand_bounds_reset_for_mem (valist,
4625 make_tree (TREE_TYPE (valist),
4626 nextarg));
a66c9326 4627}
4628
c2f47e15 4629/* Expand EXP, a call to __builtin_va_start. */
27d0c333 4630
a66c9326 4631static rtx
c2f47e15 4632expand_builtin_va_start (tree exp)
a66c9326 4633{
4634 rtx nextarg;
c2f47e15 4635 tree valist;
389dd41b 4636 location_t loc = EXPR_LOCATION (exp);
a66c9326 4637
c2f47e15 4638 if (call_expr_nargs (exp) < 2)
cb166087 4639 {
389dd41b 4640 error_at (loc, "too few arguments to function %<va_start%>");
cb166087 4641 return const0_rtx;
4642 }
a66c9326 4643
c2f47e15 4644 if (fold_builtin_next_arg (exp, true))
79012a9d 4645 return const0_rtx;
7c2f0500 4646
79012a9d 4647 nextarg = expand_builtin_next_arg ();
389dd41b 4648 valist = stabilize_va_list_loc (loc, CALL_EXPR_ARG (exp, 0), 1);
a66c9326 4649
8a58ed0a 4650 if (targetm.expand_builtin_va_start)
4651 targetm.expand_builtin_va_start (valist, nextarg);
4652 else
4653 std_expand_builtin_va_start (valist, nextarg);
a66c9326 4654
4655 return const0_rtx;
4656}
4657
c2f47e15 4658/* Expand EXP, a call to __builtin_va_end. */
f7c44134 4659
a66c9326 4660static rtx
c2f47e15 4661expand_builtin_va_end (tree exp)
a66c9326 4662{
c2f47e15 4663 tree valist = CALL_EXPR_ARG (exp, 0);
8a15c04a 4664
8a15c04a 4665 /* Evaluate for side effects, if needed. I hate macros that don't
4666 do that. */
4667 if (TREE_SIDE_EFFECTS (valist))
4668 expand_expr (valist, const0_rtx, VOIDmode, EXPAND_NORMAL);
a66c9326 4669
4670 return const0_rtx;
4671}
4672
c2f47e15 4673/* Expand EXP, a call to __builtin_va_copy. We do this as a
a66c9326 4674 builtin rather than just as an assignment in stdarg.h because of the
4675 nastiness of array-type va_list types. */
f7c44134 4676
a66c9326 4677static rtx
c2f47e15 4678expand_builtin_va_copy (tree exp)
a66c9326 4679{
4680 tree dst, src, t;
389dd41b 4681 location_t loc = EXPR_LOCATION (exp);
a66c9326 4682
c2f47e15 4683 dst = CALL_EXPR_ARG (exp, 0);
4684 src = CALL_EXPR_ARG (exp, 1);
a66c9326 4685
389dd41b 4686 dst = stabilize_va_list_loc (loc, dst, 1);
4687 src = stabilize_va_list_loc (loc, src, 0);
a66c9326 4688
5f57a8b1 4689 gcc_assert (cfun != NULL && cfun->decl != NULL_TREE);
4690
4691 if (TREE_CODE (targetm.fn_abi_va_list (cfun->decl)) != ARRAY_TYPE)
a66c9326 4692 {
5f57a8b1 4693 t = build2 (MODIFY_EXPR, targetm.fn_abi_va_list (cfun->decl), dst, src);
a66c9326 4694 TREE_SIDE_EFFECTS (t) = 1;
4695 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
4696 }
4697 else
4698 {
11a61dea 4699 rtx dstb, srcb, size;
4700
4701 /* Evaluate to pointers. */
4702 dstb = expand_expr (dst, NULL_RTX, Pmode, EXPAND_NORMAL);
4703 srcb = expand_expr (src, NULL_RTX, Pmode, EXPAND_NORMAL);
5f57a8b1 4704 size = expand_expr (TYPE_SIZE_UNIT (targetm.fn_abi_va_list (cfun->decl)),
4705 NULL_RTX, VOIDmode, EXPAND_NORMAL);
11a61dea 4706
85d654dd 4707 dstb = convert_memory_address (Pmode, dstb);
4708 srcb = convert_memory_address (Pmode, srcb);
726ec87c 4709
11a61dea 4710 /* "Dereference" to BLKmode memories. */
4711 dstb = gen_rtx_MEM (BLKmode, dstb);
ab6ab77e 4712 set_mem_alias_set (dstb, get_alias_set (TREE_TYPE (TREE_TYPE (dst))));
5f57a8b1 4713 set_mem_align (dstb, TYPE_ALIGN (targetm.fn_abi_va_list (cfun->decl)));
11a61dea 4714 srcb = gen_rtx_MEM (BLKmode, srcb);
ab6ab77e 4715 set_mem_alias_set (srcb, get_alias_set (TREE_TYPE (TREE_TYPE (src))));
5f57a8b1 4716 set_mem_align (srcb, TYPE_ALIGN (targetm.fn_abi_va_list (cfun->decl)));
11a61dea 4717
4718 /* Copy. */
0378dbdc 4719 emit_block_move (dstb, srcb, size, BLOCK_OP_NORMAL);
a66c9326 4720 }
4721
4722 return const0_rtx;
4723}
4724
53800dbe 4725/* Expand a call to one of the builtin functions __builtin_frame_address or
4726 __builtin_return_address. */
27d0c333 4727
53800dbe 4728static rtx
c2f47e15 4729expand_builtin_frame_address (tree fndecl, tree exp)
53800dbe 4730{
53800dbe 4731 /* The argument must be a nonnegative integer constant.
4732 It counts the number of frames to scan up the stack.
5b252e95 4733 The value is either the frame pointer value or the return
4734 address saved in that frame. */
c2f47e15 4735 if (call_expr_nargs (exp) == 0)
53800dbe 4736 /* Warning about missing arg was already issued. */
4737 return const0_rtx;
e913b5cd 4738 else if (! tree_fits_uhwi_p (CALL_EXPR_ARG (exp, 0)))
53800dbe 4739 {
5b252e95 4740 error ("invalid argument to %qD", fndecl);
53800dbe 4741 return const0_rtx;
4742 }
4743 else
4744 {
5b252e95 4745 /* Number of frames to scan up the stack. */
4746 unsigned HOST_WIDE_INT count = tree_to_uhwi (CALL_EXPR_ARG (exp, 0));
4747
4748 rtx tem = expand_builtin_return_addr (DECL_FUNCTION_CODE (fndecl), count);
53800dbe 4749
4750 /* Some ports cannot access arbitrary stack frames. */
4751 if (tem == NULL)
4752 {
5b252e95 4753 warning (0, "unsupported argument to %qD", fndecl);
53800dbe 4754 return const0_rtx;
4755 }
4756
5b252e95 4757 if (count)
4758 {
4759 /* Warn since no effort is made to ensure that any frame
4760 beyond the current one exists or can be safely reached. */
4761 warning (OPT_Wframe_address, "calling %qD with "
4762 "a nonzero argument is unsafe", fndecl);
4763 }
4764
53800dbe 4765 /* For __builtin_frame_address, return what we've got. */
4766 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
4767 return tem;
4768
8ad4c111 4769 if (!REG_P (tem)
53800dbe 4770 && ! CONSTANT_P (tem))
99182918 4771 tem = copy_addr_to_reg (tem);
53800dbe 4772 return tem;
4773 }
4774}
4775
990495a7 4776/* Expand EXP, a call to the alloca builtin. Return NULL_RTX if we
5be42b39 4777 failed and the caller should emit a normal call. CANNOT_ACCUMULATE
4778 is the same as for allocate_dynamic_stack_space. */
15c6cf6b 4779
53800dbe 4780static rtx
5be42b39 4781expand_builtin_alloca (tree exp, bool cannot_accumulate)
53800dbe 4782{
4783 rtx op0;
15c6cf6b 4784 rtx result;
581bf1c2 4785 unsigned int align;
370e45b9 4786 tree fndecl = get_callee_fndecl (exp);
4787 bool alloca_with_align = (DECL_FUNCTION_CODE (fndecl)
581bf1c2 4788 == BUILT_IN_ALLOCA_WITH_ALIGN);
53800dbe 4789
370e45b9 4790 bool valid_arglist
581bf1c2 4791 = (alloca_with_align
4792 ? validate_arglist (exp, INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE)
4793 : validate_arglist (exp, INTEGER_TYPE, VOID_TYPE));
4794
4795 if (!valid_arglist)
c2f47e15 4796 return NULL_RTX;
53800dbe 4797
370e45b9 4798 if ((alloca_with_align && !warn_vla_limit)
4799 || (!alloca_with_align && !warn_alloca_limit))
4800 {
4801 /* -Walloca-larger-than and -Wvla-larger-than settings override
4802 the more general -Walloc-size-larger-than so unless either of
4803 the former options is specified check the alloca arguments for
4804 overflow. */
4805 tree args[] = { CALL_EXPR_ARG (exp, 0), NULL_TREE };
4806 int idx[] = { 0, -1 };
4807 maybe_warn_alloc_args_overflow (fndecl, exp, args, idx);
4808 }
4809
53800dbe 4810 /* Compute the argument. */
c2f47e15 4811 op0 = expand_normal (CALL_EXPR_ARG (exp, 0));
53800dbe 4812
581bf1c2 4813 /* Compute the alignment. */
4814 align = (alloca_with_align
f9ae6f95 4815 ? TREE_INT_CST_LOW (CALL_EXPR_ARG (exp, 1))
581bf1c2 4816 : BIGGEST_ALIGNMENT);
4817
53800dbe 4818 /* Allocate the desired space. */
581bf1c2 4819 result = allocate_dynamic_stack_space (op0, 0, align, cannot_accumulate);
85d654dd 4820 result = convert_memory_address (ptr_mode, result);
15c6cf6b 4821
4822 return result;
53800dbe 4823}
4824
74bdbe96 4825/* Expand a call to bswap builtin in EXP.
4826 Return NULL_RTX if a normal call should be emitted rather than expanding the
4827 function in-line. If convenient, the result should be placed in TARGET.
4828 SUBTARGET may be used as the target for computing one of EXP's operands. */
42791117 4829
4830static rtx
3754d046 4831expand_builtin_bswap (machine_mode target_mode, tree exp, rtx target,
74bdbe96 4832 rtx subtarget)
42791117 4833{
42791117 4834 tree arg;
4835 rtx op0;
4836
c2f47e15 4837 if (!validate_arglist (exp, INTEGER_TYPE, VOID_TYPE))
4838 return NULL_RTX;
42791117 4839
c2f47e15 4840 arg = CALL_EXPR_ARG (exp, 0);
74bdbe96 4841 op0 = expand_expr (arg,
4842 subtarget && GET_MODE (subtarget) == target_mode
4843 ? subtarget : NULL_RTX,
4844 target_mode, EXPAND_NORMAL);
4845 if (GET_MODE (op0) != target_mode)
4846 op0 = convert_to_mode (target_mode, op0, 1);
42791117 4847
74bdbe96 4848 target = expand_unop (target_mode, bswap_optab, op0, target, 1);
42791117 4849
4850 gcc_assert (target);
4851
74bdbe96 4852 return convert_to_mode (target_mode, target, 1);
42791117 4853}
4854
c2f47e15 4855/* Expand a call to a unary builtin in EXP.
4856 Return NULL_RTX if a normal call should be emitted rather than expanding the
53800dbe 4857 function in-line. If convenient, the result should be placed in TARGET.
4858 SUBTARGET may be used as the target for computing one of EXP's operands. */
15c6cf6b 4859
53800dbe 4860static rtx
3754d046 4861expand_builtin_unop (machine_mode target_mode, tree exp, rtx target,
aecda0d6 4862 rtx subtarget, optab op_optab)
53800dbe 4863{
4864 rtx op0;
c2f47e15 4865
4866 if (!validate_arglist (exp, INTEGER_TYPE, VOID_TYPE))
4867 return NULL_RTX;
53800dbe 4868
4869 /* Compute the argument. */
f97eea22 4870 op0 = expand_expr (CALL_EXPR_ARG (exp, 0),
4871 (subtarget
4872 && (TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 0)))
4873 == GET_MODE (subtarget))) ? subtarget : NULL_RTX,
1db6d067 4874 VOIDmode, EXPAND_NORMAL);
6a08d0ab 4875 /* Compute op, into TARGET if possible.
53800dbe 4876 Set TARGET to wherever the result comes back. */
c2f47e15 4877 target = expand_unop (TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 0))),
6aaa1f9e 4878 op_optab, op0, target, op_optab != clrsb_optab);
64db345d 4879 gcc_assert (target);
7d3f6cc7 4880
efb070c8 4881 return convert_to_mode (target_mode, target, 0);
53800dbe 4882}
89cfe6e5 4883
48e1416a 4884/* Expand a call to __builtin_expect. We just return our argument
5a74f77e 4885 as the builtin_expect semantic should've been already executed by
4886 tree branch prediction pass. */
89cfe6e5 4887
4888static rtx
c2f47e15 4889expand_builtin_expect (tree exp, rtx target)
89cfe6e5 4890{
1e4adcfc 4891 tree arg;
89cfe6e5 4892
c2f47e15 4893 if (call_expr_nargs (exp) < 2)
89cfe6e5 4894 return const0_rtx;
c2f47e15 4895 arg = CALL_EXPR_ARG (exp, 0);
89cfe6e5 4896
c2f47e15 4897 target = expand_expr (arg, target, VOIDmode, EXPAND_NORMAL);
5a74f77e 4898 /* When guessing was done, the hints should be already stripped away. */
07311427 4899 gcc_assert (!flag_guess_branch_prob
852f689e 4900 || optimize == 0 || seen_error ());
89cfe6e5 4901 return target;
4902}
689df48e 4903
fca0886c 4904/* Expand a call to __builtin_assume_aligned. We just return our first
4905 argument as the builtin_assume_aligned semantic should've been already
4906 executed by CCP. */
4907
4908static rtx
4909expand_builtin_assume_aligned (tree exp, rtx target)
4910{
4911 if (call_expr_nargs (exp) < 2)
4912 return const0_rtx;
4913 target = expand_expr (CALL_EXPR_ARG (exp, 0), target, VOIDmode,
4914 EXPAND_NORMAL);
4915 gcc_assert (!TREE_SIDE_EFFECTS (CALL_EXPR_ARG (exp, 1))
4916 && (call_expr_nargs (exp) < 3
4917 || !TREE_SIDE_EFFECTS (CALL_EXPR_ARG (exp, 2))));
4918 return target;
4919}
4920
c22de3f0 4921void
aecda0d6 4922expand_builtin_trap (void)
a0ef1725 4923{
4db8dd0c 4924 if (targetm.have_trap ())
f73960eb 4925 {
4db8dd0c 4926 rtx_insn *insn = emit_insn (targetm.gen_trap ());
f73960eb 4927 /* For trap insns when not accumulating outgoing args force
4928 REG_ARGS_SIZE note to prevent crossjumping of calls with
4929 different args sizes. */
4930 if (!ACCUMULATE_OUTGOING_ARGS)
4931 add_reg_note (insn, REG_ARGS_SIZE, GEN_INT (stack_pointer_delta));
4932 }
a0ef1725 4933 else
61ffc71a 4934 {
4935 tree fn = builtin_decl_implicit (BUILT_IN_ABORT);
4936 tree call_expr = build_call_expr (fn, 0);
4937 expand_call (call_expr, NULL_RTX, false);
4938 }
4939
a0ef1725 4940 emit_barrier ();
4941}
78a74442 4942
d2b48f0c 4943/* Expand a call to __builtin_unreachable. We do nothing except emit
4944 a barrier saying that control flow will not pass here.
4945
4946 It is the responsibility of the program being compiled to ensure
4947 that control flow does never reach __builtin_unreachable. */
4948static void
4949expand_builtin_unreachable (void)
4950{
4951 emit_barrier ();
4952}
4953
c2f47e15 4954/* Expand EXP, a call to fabs, fabsf or fabsl.
4955 Return NULL_RTX if a normal call should be emitted rather than expanding
78a74442 4956 the function inline. If convenient, the result should be placed
4957 in TARGET. SUBTARGET may be used as the target for computing
4958 the operand. */
4959
4960static rtx
c2f47e15 4961expand_builtin_fabs (tree exp, rtx target, rtx subtarget)
78a74442 4962{
3754d046 4963 machine_mode mode;
78a74442 4964 tree arg;
4965 rtx op0;
4966
c2f47e15 4967 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
4968 return NULL_RTX;
78a74442 4969
c2f47e15 4970 arg = CALL_EXPR_ARG (exp, 0);
c7f617c2 4971 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
78a74442 4972 mode = TYPE_MODE (TREE_TYPE (arg));
1db6d067 4973 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
78a74442 4974 return expand_abs (mode, op0, target, 0, safe_from_p (target, arg, 1));
4975}
4976
c2f47e15 4977/* Expand EXP, a call to copysign, copysignf, or copysignl.
270436f3 4978 Return NULL is a normal call should be emitted rather than expanding the
4979 function inline. If convenient, the result should be placed in TARGET.
4980 SUBTARGET may be used as the target for computing the operand. */
4981
4982static rtx
c2f47e15 4983expand_builtin_copysign (tree exp, rtx target, rtx subtarget)
270436f3 4984{
4985 rtx op0, op1;
4986 tree arg;
4987
c2f47e15 4988 if (!validate_arglist (exp, REAL_TYPE, REAL_TYPE, VOID_TYPE))
4989 return NULL_RTX;
270436f3 4990
c2f47e15 4991 arg = CALL_EXPR_ARG (exp, 0);
8ec3c5c2 4992 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
270436f3 4993
c2f47e15 4994 arg = CALL_EXPR_ARG (exp, 1);
8ec3c5c2 4995 op1 = expand_normal (arg);
270436f3 4996
4997 return expand_copysign (op0, op1, target);
4998}
4999
ac8fb6db 5000/* Expand a call to __builtin___clear_cache. */
5001
5002static rtx
32e17df0 5003expand_builtin___clear_cache (tree exp)
ac8fb6db 5004{
32e17df0 5005 if (!targetm.code_for_clear_cache)
5006 {
ac8fb6db 5007#ifdef CLEAR_INSN_CACHE
32e17df0 5008 /* There is no "clear_cache" insn, and __clear_cache() in libgcc
5009 does something. Just do the default expansion to a call to
5010 __clear_cache(). */
5011 return NULL_RTX;
ac8fb6db 5012#else
32e17df0 5013 /* There is no "clear_cache" insn, and __clear_cache() in libgcc
5014 does nothing. There is no need to call it. Do nothing. */
5015 return const0_rtx;
ac8fb6db 5016#endif /* CLEAR_INSN_CACHE */
32e17df0 5017 }
5018
ac8fb6db 5019 /* We have a "clear_cache" insn, and it will handle everything. */
5020 tree begin, end;
5021 rtx begin_rtx, end_rtx;
ac8fb6db 5022
5023 /* We must not expand to a library call. If we did, any
5024 fallback library function in libgcc that might contain a call to
5025 __builtin___clear_cache() would recurse infinitely. */
5026 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
5027 {
5028 error ("both arguments to %<__builtin___clear_cache%> must be pointers");
5029 return const0_rtx;
5030 }
5031
32e17df0 5032 if (targetm.have_clear_cache ())
ac8fb6db 5033 {
8786db1e 5034 struct expand_operand ops[2];
ac8fb6db 5035
5036 begin = CALL_EXPR_ARG (exp, 0);
5037 begin_rtx = expand_expr (begin, NULL_RTX, Pmode, EXPAND_NORMAL);
ac8fb6db 5038
5039 end = CALL_EXPR_ARG (exp, 1);
5040 end_rtx = expand_expr (end, NULL_RTX, Pmode, EXPAND_NORMAL);
ac8fb6db 5041
8786db1e 5042 create_address_operand (&ops[0], begin_rtx);
5043 create_address_operand (&ops[1], end_rtx);
32e17df0 5044 if (maybe_expand_insn (targetm.code_for_clear_cache, 2, ops))
8786db1e 5045 return const0_rtx;
ac8fb6db 5046 }
5047 return const0_rtx;
ac8fb6db 5048}
5049
4ee9c684 5050/* Given a trampoline address, make sure it satisfies TRAMPOLINE_ALIGNMENT. */
5051
5052static rtx
5053round_trampoline_addr (rtx tramp)
5054{
5055 rtx temp, addend, mask;
5056
5057 /* If we don't need too much alignment, we'll have been guaranteed
5058 proper alignment by get_trampoline_type. */
5059 if (TRAMPOLINE_ALIGNMENT <= STACK_BOUNDARY)
5060 return tramp;
5061
5062 /* Round address up to desired boundary. */
5063 temp = gen_reg_rtx (Pmode);
0359f9f5 5064 addend = gen_int_mode (TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT - 1, Pmode);
5065 mask = gen_int_mode (-TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT, Pmode);
4ee9c684 5066
5067 temp = expand_simple_binop (Pmode, PLUS, tramp, addend,
5068 temp, 0, OPTAB_LIB_WIDEN);
5069 tramp = expand_simple_binop (Pmode, AND, temp, mask,
5070 temp, 0, OPTAB_LIB_WIDEN);
5071
5072 return tramp;
5073}
5074
5075static rtx
c307f106 5076expand_builtin_init_trampoline (tree exp, bool onstack)
4ee9c684 5077{
5078 tree t_tramp, t_func, t_chain;
82c7907c 5079 rtx m_tramp, r_tramp, r_chain, tmp;
4ee9c684 5080
c2f47e15 5081 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE,
4ee9c684 5082 POINTER_TYPE, VOID_TYPE))
5083 return NULL_RTX;
5084
c2f47e15 5085 t_tramp = CALL_EXPR_ARG (exp, 0);
5086 t_func = CALL_EXPR_ARG (exp, 1);
5087 t_chain = CALL_EXPR_ARG (exp, 2);
4ee9c684 5088
8ec3c5c2 5089 r_tramp = expand_normal (t_tramp);
82c7907c 5090 m_tramp = gen_rtx_MEM (BLKmode, r_tramp);
5091 MEM_NOTRAP_P (m_tramp) = 1;
5092
c307f106 5093 /* If ONSTACK, the TRAMP argument should be the address of a field
5094 within the local function's FRAME decl. Either way, let's see if
5095 we can fill in the MEM_ATTRs for this memory. */
82c7907c 5096 if (TREE_CODE (t_tramp) == ADDR_EXPR)
f4146cb8 5097 set_mem_attributes (m_tramp, TREE_OPERAND (t_tramp, 0), true);
82c7907c 5098
c307f106 5099 /* Creator of a heap trampoline is responsible for making sure the
5100 address is aligned to at least STACK_BOUNDARY. Normally malloc
5101 will ensure this anyhow. */
82c7907c 5102 tmp = round_trampoline_addr (r_tramp);
5103 if (tmp != r_tramp)
5104 {
5105 m_tramp = change_address (m_tramp, BLKmode, tmp);
5106 set_mem_align (m_tramp, TRAMPOLINE_ALIGNMENT);
5b2a69fa 5107 set_mem_size (m_tramp, TRAMPOLINE_SIZE);
82c7907c 5108 }
5109
5110 /* The FUNC argument should be the address of the nested function.
5111 Extract the actual function decl to pass to the hook. */
5112 gcc_assert (TREE_CODE (t_func) == ADDR_EXPR);
5113 t_func = TREE_OPERAND (t_func, 0);
5114 gcc_assert (TREE_CODE (t_func) == FUNCTION_DECL);
5115
8ec3c5c2 5116 r_chain = expand_normal (t_chain);
4ee9c684 5117
5118 /* Generate insns to initialize the trampoline. */
82c7907c 5119 targetm.calls.trampoline_init (m_tramp, t_func, r_chain);
4ee9c684 5120
c307f106 5121 if (onstack)
5122 {
5123 trampolines_created = 1;
8bc8a8f4 5124
a27e3913 5125 if (targetm.calls.custom_function_descriptors != 0)
5126 warning_at (DECL_SOURCE_LOCATION (t_func), OPT_Wtrampolines,
5127 "trampoline generated for nested function %qD", t_func);
c307f106 5128 }
8bc8a8f4 5129
4ee9c684 5130 return const0_rtx;
5131}
5132
5133static rtx
c2f47e15 5134expand_builtin_adjust_trampoline (tree exp)
4ee9c684 5135{
5136 rtx tramp;
5137
c2f47e15 5138 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
4ee9c684 5139 return NULL_RTX;
5140
c2f47e15 5141 tramp = expand_normal (CALL_EXPR_ARG (exp, 0));
4ee9c684 5142 tramp = round_trampoline_addr (tramp);
82c7907c 5143 if (targetm.calls.trampoline_adjust_address)
5144 tramp = targetm.calls.trampoline_adjust_address (tramp);
4ee9c684 5145
5146 return tramp;
5147}
5148
a27e3913 5149/* Expand a call to the builtin descriptor initialization routine.
5150 A descriptor is made up of a couple of pointers to the static
5151 chain and the code entry in this order. */
5152
5153static rtx
5154expand_builtin_init_descriptor (tree exp)
5155{
5156 tree t_descr, t_func, t_chain;
5157 rtx m_descr, r_descr, r_func, r_chain;
5158
5159 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, POINTER_TYPE,
5160 VOID_TYPE))
5161 return NULL_RTX;
5162
5163 t_descr = CALL_EXPR_ARG (exp, 0);
5164 t_func = CALL_EXPR_ARG (exp, 1);
5165 t_chain = CALL_EXPR_ARG (exp, 2);
5166
5167 r_descr = expand_normal (t_descr);
5168 m_descr = gen_rtx_MEM (BLKmode, r_descr);
5169 MEM_NOTRAP_P (m_descr) = 1;
5170
5171 r_func = expand_normal (t_func);
5172 r_chain = expand_normal (t_chain);
5173
5174 /* Generate insns to initialize the descriptor. */
5175 emit_move_insn (adjust_address_nv (m_descr, ptr_mode, 0), r_chain);
5176 emit_move_insn (adjust_address_nv (m_descr, ptr_mode,
5177 POINTER_SIZE / BITS_PER_UNIT), r_func);
5178
5179 return const0_rtx;
5180}
5181
5182/* Expand a call to the builtin descriptor adjustment routine. */
5183
5184static rtx
5185expand_builtin_adjust_descriptor (tree exp)
5186{
5187 rtx tramp;
5188
5189 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
5190 return NULL_RTX;
5191
5192 tramp = expand_normal (CALL_EXPR_ARG (exp, 0));
5193
5194 /* Unalign the descriptor to allow runtime identification. */
5195 tramp = plus_constant (ptr_mode, tramp,
5196 targetm.calls.custom_function_descriptors);
5197
5198 return force_operand (tramp, NULL_RTX);
5199}
5200
93f564d6 5201/* Expand the call EXP to the built-in signbit, signbitf or signbitl
5202 function. The function first checks whether the back end provides
5203 an insn to implement signbit for the respective mode. If not, it
5204 checks whether the floating point format of the value is such that
10902624 5205 the sign bit can be extracted. If that is not the case, error out.
5206 EXP is the expression that is a call to the builtin function; if
5207 convenient, the result should be placed in TARGET. */
27f261ef 5208static rtx
5209expand_builtin_signbit (tree exp, rtx target)
5210{
5211 const struct real_format *fmt;
3754d046 5212 machine_mode fmode, imode, rmode;
c2f47e15 5213 tree arg;
ca4f1f5b 5214 int word, bitpos;
27eda240 5215 enum insn_code icode;
27f261ef 5216 rtx temp;
389dd41b 5217 location_t loc = EXPR_LOCATION (exp);
27f261ef 5218
c2f47e15 5219 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
5220 return NULL_RTX;
27f261ef 5221
c2f47e15 5222 arg = CALL_EXPR_ARG (exp, 0);
27f261ef 5223 fmode = TYPE_MODE (TREE_TYPE (arg));
5224 rmode = TYPE_MODE (TREE_TYPE (exp));
5225 fmt = REAL_MODE_FORMAT (fmode);
5226
93f564d6 5227 arg = builtin_save_expr (arg);
5228
5229 /* Expand the argument yielding a RTX expression. */
5230 temp = expand_normal (arg);
5231
5232 /* Check if the back end provides an insn that handles signbit for the
5233 argument's mode. */
d6bf3b14 5234 icode = optab_handler (signbit_optab, fmode);
27eda240 5235 if (icode != CODE_FOR_nothing)
93f564d6 5236 {
1e0c0b35 5237 rtx_insn *last = get_last_insn ();
93f564d6 5238 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
4e2a2fb4 5239 if (maybe_emit_unop_insn (icode, target, temp, UNKNOWN))
5240 return target;
5241 delete_insns_since (last);
93f564d6 5242 }
5243
27f261ef 5244 /* For floating point formats without a sign bit, implement signbit
5245 as "ARG < 0.0". */
8d564692 5246 bitpos = fmt->signbit_ro;
ca4f1f5b 5247 if (bitpos < 0)
27f261ef 5248 {
5249 /* But we can't do this if the format supports signed zero. */
10902624 5250 gcc_assert (!fmt->has_signed_zero || !HONOR_SIGNED_ZEROS (fmode));
27f261ef 5251
389dd41b 5252 arg = fold_build2_loc (loc, LT_EXPR, TREE_TYPE (exp), arg,
49d00087 5253 build_real (TREE_TYPE (arg), dconst0));
27f261ef 5254 return expand_expr (arg, target, VOIDmode, EXPAND_NORMAL);
5255 }
5256
ca4f1f5b 5257 if (GET_MODE_SIZE (fmode) <= UNITS_PER_WORD)
27f261ef 5258 {
ca4f1f5b 5259 imode = int_mode_for_mode (fmode);
10902624 5260 gcc_assert (imode != BLKmode);
ca4f1f5b 5261 temp = gen_lowpart (imode, temp);
24fd4260 5262 }
5263 else
5264 {
ca4f1f5b 5265 imode = word_mode;
5266 /* Handle targets with different FP word orders. */
5267 if (FLOAT_WORDS_BIG_ENDIAN)
a0c938f0 5268 word = (GET_MODE_BITSIZE (fmode) - bitpos) / BITS_PER_WORD;
ca4f1f5b 5269 else
a0c938f0 5270 word = bitpos / BITS_PER_WORD;
ca4f1f5b 5271 temp = operand_subword_force (temp, word, fmode);
5272 bitpos = bitpos % BITS_PER_WORD;
5273 }
5274
44b0f1d0 5275 /* Force the intermediate word_mode (or narrower) result into a
5276 register. This avoids attempting to create paradoxical SUBREGs
5277 of floating point modes below. */
5278 temp = force_reg (imode, temp);
5279
ca4f1f5b 5280 /* If the bitpos is within the "result mode" lowpart, the operation
5281 can be implement with a single bitwise AND. Otherwise, we need
5282 a right shift and an AND. */
5283
5284 if (bitpos < GET_MODE_BITSIZE (rmode))
5285 {
796b6678 5286 wide_int mask = wi::set_bit_in_zero (bitpos, GET_MODE_PRECISION (rmode));
27f261ef 5287
4a46f016 5288 if (GET_MODE_SIZE (imode) > GET_MODE_SIZE (rmode))
ca4f1f5b 5289 temp = gen_lowpart (rmode, temp);
24fd4260 5290 temp = expand_binop (rmode, and_optab, temp,
e913b5cd 5291 immed_wide_int_const (mask, rmode),
ca4f1f5b 5292 NULL_RTX, 1, OPTAB_LIB_WIDEN);
27f261ef 5293 }
ca4f1f5b 5294 else
5295 {
5296 /* Perform a logical right shift to place the signbit in the least
a0c938f0 5297 significant bit, then truncate the result to the desired mode
ca4f1f5b 5298 and mask just this bit. */
f5ff0b21 5299 temp = expand_shift (RSHIFT_EXPR, imode, temp, bitpos, NULL_RTX, 1);
ca4f1f5b 5300 temp = gen_lowpart (rmode, temp);
5301 temp = expand_binop (rmode, and_optab, temp, const1_rtx,
5302 NULL_RTX, 1, OPTAB_LIB_WIDEN);
5303 }
5304
27f261ef 5305 return temp;
5306}
73673831 5307
5308/* Expand fork or exec calls. TARGET is the desired target of the
c2f47e15 5309 call. EXP is the call. FN is the
73673831 5310 identificator of the actual function. IGNORE is nonzero if the
5311 value is to be ignored. */
5312
5313static rtx
c2f47e15 5314expand_builtin_fork_or_exec (tree fn, tree exp, rtx target, int ignore)
73673831 5315{
5316 tree id, decl;
5317 tree call;
5318
5319 /* If we are not profiling, just call the function. */
5320 if (!profile_arc_flag)
5321 return NULL_RTX;
5322
5323 /* Otherwise call the wrapper. This should be equivalent for the rest of
5324 compiler, so the code does not diverge, and the wrapper may run the
9c9bad97 5325 code necessary for keeping the profiling sane. */
73673831 5326
5327 switch (DECL_FUNCTION_CODE (fn))
5328 {
5329 case BUILT_IN_FORK:
5330 id = get_identifier ("__gcov_fork");
5331 break;
5332
5333 case BUILT_IN_EXECL:
5334 id = get_identifier ("__gcov_execl");
5335 break;
5336
5337 case BUILT_IN_EXECV:
5338 id = get_identifier ("__gcov_execv");
5339 break;
5340
5341 case BUILT_IN_EXECLP:
5342 id = get_identifier ("__gcov_execlp");
5343 break;
5344
5345 case BUILT_IN_EXECLE:
5346 id = get_identifier ("__gcov_execle");
5347 break;
5348
5349 case BUILT_IN_EXECVP:
5350 id = get_identifier ("__gcov_execvp");
5351 break;
5352
5353 case BUILT_IN_EXECVE:
5354 id = get_identifier ("__gcov_execve");
5355 break;
5356
5357 default:
64db345d 5358 gcc_unreachable ();
73673831 5359 }
5360
e60a6f7b 5361 decl = build_decl (DECL_SOURCE_LOCATION (fn),
5362 FUNCTION_DECL, id, TREE_TYPE (fn));
73673831 5363 DECL_EXTERNAL (decl) = 1;
5364 TREE_PUBLIC (decl) = 1;
5365 DECL_ARTIFICIAL (decl) = 1;
5366 TREE_NOTHROW (decl) = 1;
e82d310b 5367 DECL_VISIBILITY (decl) = VISIBILITY_DEFAULT;
5368 DECL_VISIBILITY_SPECIFIED (decl) = 1;
389dd41b 5369 call = rewrite_call_expr (EXPR_LOCATION (exp), exp, 0, decl, 0);
73673831 5370 return expand_call (call, target, ignore);
c2f47e15 5371 }
48e1416a 5372
b6a5fc45 5373
5374\f
3e272de8 5375/* Reconstitute a mode for a __sync intrinsic operation. Since the type of
5376 the pointer in these functions is void*, the tree optimizers may remove
5377 casts. The mode computed in expand_builtin isn't reliable either, due
5378 to __sync_bool_compare_and_swap.
5379
5380 FCODE_DIFF should be fcode - base, where base is the FOO_1 code for the
5381 group of builtins. This gives us log2 of the mode size. */
5382
3754d046 5383static inline machine_mode
3e272de8 5384get_builtin_sync_mode (int fcode_diff)
5385{
ad3a13b5 5386 /* The size is not negotiable, so ask not to get BLKmode in return
5387 if the target indicates that a smaller size would be better. */
5388 return mode_for_size (BITS_PER_UNIT << fcode_diff, MODE_INT, 0);
3e272de8 5389}
5390
041e0215 5391/* Expand the memory expression LOC and return the appropriate memory operand
5392 for the builtin_sync operations. */
5393
5394static rtx
3754d046 5395get_builtin_sync_mem (tree loc, machine_mode mode)
041e0215 5396{
5397 rtx addr, mem;
5398
7f4d56ad 5399 addr = expand_expr (loc, NULL_RTX, ptr_mode, EXPAND_SUM);
5400 addr = convert_memory_address (Pmode, addr);
041e0215 5401
5402 /* Note that we explicitly do not want any alias information for this
5403 memory, so that we kill all other live memories. Otherwise we don't
5404 satisfy the full barrier semantics of the intrinsic. */
5405 mem = validize_mem (gen_rtx_MEM (mode, addr));
5406
153c3b50 5407 /* The alignment needs to be at least according to that of the mode. */
5408 set_mem_align (mem, MAX (GET_MODE_ALIGNMENT (mode),
957d0361 5409 get_pointer_alignment (loc)));
c94cfd1c 5410 set_mem_alias_set (mem, ALIAS_SET_MEMORY_BARRIER);
041e0215 5411 MEM_VOLATILE_P (mem) = 1;
5412
5413 return mem;
5414}
5415
1cd6e20d 5416/* Make sure an argument is in the right mode.
5417 EXP is the tree argument.
5418 MODE is the mode it should be in. */
5419
5420static rtx
3754d046 5421expand_expr_force_mode (tree exp, machine_mode mode)
1cd6e20d 5422{
5423 rtx val;
3754d046 5424 machine_mode old_mode;
1cd6e20d 5425
5426 val = expand_expr (exp, NULL_RTX, mode, EXPAND_NORMAL);
5427 /* If VAL is promoted to a wider mode, convert it back to MODE. Take care
5428 of CONST_INTs, where we know the old_mode only from the call argument. */
5429
5430 old_mode = GET_MODE (val);
5431 if (old_mode == VOIDmode)
5432 old_mode = TYPE_MODE (TREE_TYPE (exp));
5433 val = convert_modes (mode, old_mode, val, 1);
5434 return val;
5435}
5436
5437
b6a5fc45 5438/* Expand the __sync_xxx_and_fetch and __sync_fetch_and_xxx intrinsics.
c2f47e15 5439 EXP is the CALL_EXPR. CODE is the rtx code
b6a5fc45 5440 that corresponds to the arithmetic or logical operation from the name;
5441 an exception here is that NOT actually means NAND. TARGET is an optional
5442 place for us to store the results; AFTER is true if this is the
1cd6e20d 5443 fetch_and_xxx form. */
b6a5fc45 5444
5445static rtx
3754d046 5446expand_builtin_sync_operation (machine_mode mode, tree exp,
3e272de8 5447 enum rtx_code code, bool after,
1cd6e20d 5448 rtx target)
b6a5fc45 5449{
041e0215 5450 rtx val, mem;
e60a6f7b 5451 location_t loc = EXPR_LOCATION (exp);
b6a5fc45 5452
cf73e559 5453 if (code == NOT && warn_sync_nand)
5454 {
5455 tree fndecl = get_callee_fndecl (exp);
5456 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
5457
5458 static bool warned_f_a_n, warned_n_a_f;
5459
5460 switch (fcode)
5461 {
2797f13a 5462 case BUILT_IN_SYNC_FETCH_AND_NAND_1:
5463 case BUILT_IN_SYNC_FETCH_AND_NAND_2:
5464 case BUILT_IN_SYNC_FETCH_AND_NAND_4:
5465 case BUILT_IN_SYNC_FETCH_AND_NAND_8:
5466 case BUILT_IN_SYNC_FETCH_AND_NAND_16:
cf73e559 5467 if (warned_f_a_n)
5468 break;
5469
b9a16870 5470 fndecl = builtin_decl_implicit (BUILT_IN_SYNC_FETCH_AND_NAND_N);
e60a6f7b 5471 inform (loc, "%qD changed semantics in GCC 4.4", fndecl);
cf73e559 5472 warned_f_a_n = true;
5473 break;
5474
2797f13a 5475 case BUILT_IN_SYNC_NAND_AND_FETCH_1:
5476 case BUILT_IN_SYNC_NAND_AND_FETCH_2:
5477 case BUILT_IN_SYNC_NAND_AND_FETCH_4:
5478 case BUILT_IN_SYNC_NAND_AND_FETCH_8:
5479 case BUILT_IN_SYNC_NAND_AND_FETCH_16:
cf73e559 5480 if (warned_n_a_f)
5481 break;
5482
b9a16870 5483 fndecl = builtin_decl_implicit (BUILT_IN_SYNC_NAND_AND_FETCH_N);
e60a6f7b 5484 inform (loc, "%qD changed semantics in GCC 4.4", fndecl);
cf73e559 5485 warned_n_a_f = true;
5486 break;
5487
5488 default:
5489 gcc_unreachable ();
5490 }
5491 }
5492
b6a5fc45 5493 /* Expand the operands. */
c2f47e15 5494 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
1cd6e20d 5495 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
b6a5fc45 5496
a372f7ca 5497 return expand_atomic_fetch_op (target, mem, val, code, MEMMODEL_SYNC_SEQ_CST,
1cd6e20d 5498 after);
b6a5fc45 5499}
5500
5501/* Expand the __sync_val_compare_and_swap and __sync_bool_compare_and_swap
c2f47e15 5502 intrinsics. EXP is the CALL_EXPR. IS_BOOL is
b6a5fc45 5503 true if this is the boolean form. TARGET is a place for us to store the
5504 results; this is NOT optional if IS_BOOL is true. */
5505
5506static rtx
3754d046 5507expand_builtin_compare_and_swap (machine_mode mode, tree exp,
3e272de8 5508 bool is_bool, rtx target)
b6a5fc45 5509{
041e0215 5510 rtx old_val, new_val, mem;
ba885f6a 5511 rtx *pbool, *poval;
b6a5fc45 5512
5513 /* Expand the operands. */
c2f47e15 5514 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
1cd6e20d 5515 old_val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5516 new_val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 2), mode);
b6a5fc45 5517
ba885f6a 5518 pbool = poval = NULL;
5519 if (target != const0_rtx)
5520 {
5521 if (is_bool)
5522 pbool = &target;
5523 else
5524 poval = &target;
5525 }
5526 if (!expand_atomic_compare_and_swap (pbool, poval, mem, old_val, new_val,
a372f7ca 5527 false, MEMMODEL_SYNC_SEQ_CST,
5528 MEMMODEL_SYNC_SEQ_CST))
1cd6e20d 5529 return NULL_RTX;
c2f47e15 5530
1cd6e20d 5531 return target;
b6a5fc45 5532}
5533
5534/* Expand the __sync_lock_test_and_set intrinsic. Note that the most
5535 general form is actually an atomic exchange, and some targets only
5536 support a reduced form with the second argument being a constant 1.
48e1416a 5537 EXP is the CALL_EXPR; TARGET is an optional place for us to store
c2f47e15 5538 the results. */
b6a5fc45 5539
5540static rtx
3754d046 5541expand_builtin_sync_lock_test_and_set (machine_mode mode, tree exp,
1cd6e20d 5542 rtx target)
b6a5fc45 5543{
041e0215 5544 rtx val, mem;
b6a5fc45 5545
5546 /* Expand the operands. */
c2f47e15 5547 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
1cd6e20d 5548 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5549
7821cde1 5550 return expand_sync_lock_test_and_set (target, mem, val);
1cd6e20d 5551}
5552
5553/* Expand the __sync_lock_release intrinsic. EXP is the CALL_EXPR. */
5554
5555static void
3754d046 5556expand_builtin_sync_lock_release (machine_mode mode, tree exp)
1cd6e20d 5557{
5558 rtx mem;
5559
5560 /* Expand the operands. */
5561 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5562
a372f7ca 5563 expand_atomic_store (mem, const0_rtx, MEMMODEL_SYNC_RELEASE, true);
1cd6e20d 5564}
5565
5566/* Given an integer representing an ``enum memmodel'', verify its
5567 correctness and return the memory model enum. */
5568
5569static enum memmodel
5570get_memmodel (tree exp)
5571{
5572 rtx op;
7f738025 5573 unsigned HOST_WIDE_INT val;
2cb724f9 5574 source_location loc
5575 = expansion_point_location_if_in_system_header (input_location);
1cd6e20d 5576
5577 /* If the parameter is not a constant, it's a run time value so we'll just
5578 convert it to MEMMODEL_SEQ_CST to avoid annoying runtime checking. */
5579 if (TREE_CODE (exp) != INTEGER_CST)
5580 return MEMMODEL_SEQ_CST;
5581
5582 op = expand_normal (exp);
7f738025 5583
5584 val = INTVAL (op);
5585 if (targetm.memmodel_check)
5586 val = targetm.memmodel_check (val);
5587 else if (val & ~MEMMODEL_MASK)
5588 {
2cb724f9 5589 warning_at (loc, OPT_Winvalid_memory_model,
5590 "unknown architecture specifier in memory model to builtin");
7f738025 5591 return MEMMODEL_SEQ_CST;
5592 }
5593
a372f7ca 5594 /* Should never see a user explicit SYNC memodel model, so >= LAST works. */
5595 if (memmodel_base (val) >= MEMMODEL_LAST)
1cd6e20d 5596 {
2cb724f9 5597 warning_at (loc, OPT_Winvalid_memory_model,
5598 "invalid memory model argument to builtin");
1cd6e20d 5599 return MEMMODEL_SEQ_CST;
5600 }
7f738025 5601
3070f133 5602 /* Workaround for Bugzilla 59448. GCC doesn't track consume properly, so
5603 be conservative and promote consume to acquire. */
5604 if (val == MEMMODEL_CONSUME)
5605 val = MEMMODEL_ACQUIRE;
5606
7f738025 5607 return (enum memmodel) val;
1cd6e20d 5608}
5609
5610/* Expand the __atomic_exchange intrinsic:
5611 TYPE __atomic_exchange (TYPE *object, TYPE desired, enum memmodel)
5612 EXP is the CALL_EXPR.
5613 TARGET is an optional place for us to store the results. */
5614
5615static rtx
3754d046 5616expand_builtin_atomic_exchange (machine_mode mode, tree exp, rtx target)
1cd6e20d 5617{
5618 rtx val, mem;
5619 enum memmodel model;
5620
5621 model = get_memmodel (CALL_EXPR_ARG (exp, 2));
1cd6e20d 5622
5623 if (!flag_inline_atomics)
5624 return NULL_RTX;
5625
5626 /* Expand the operands. */
5627 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5628 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5629
7821cde1 5630 return expand_atomic_exchange (target, mem, val, model);
1cd6e20d 5631}
5632
5633/* Expand the __atomic_compare_exchange intrinsic:
5634 bool __atomic_compare_exchange (TYPE *object, TYPE *expect,
5635 TYPE desired, BOOL weak,
5636 enum memmodel success,
5637 enum memmodel failure)
5638 EXP is the CALL_EXPR.
5639 TARGET is an optional place for us to store the results. */
5640
5641static rtx
3754d046 5642expand_builtin_atomic_compare_exchange (machine_mode mode, tree exp,
1cd6e20d 5643 rtx target)
5644{
1e0c0b35 5645 rtx expect, desired, mem, oldval;
5646 rtx_code_label *label;
1cd6e20d 5647 enum memmodel success, failure;
5648 tree weak;
5649 bool is_weak;
2cb724f9 5650 source_location loc
5651 = expansion_point_location_if_in_system_header (input_location);
1cd6e20d 5652
5653 success = get_memmodel (CALL_EXPR_ARG (exp, 4));
5654 failure = get_memmodel (CALL_EXPR_ARG (exp, 5));
5655
086f4e33 5656 if (failure > success)
5657 {
2cb724f9 5658 warning_at (loc, OPT_Winvalid_memory_model,
5659 "failure memory model cannot be stronger than success "
5660 "memory model for %<__atomic_compare_exchange%>");
086f4e33 5661 success = MEMMODEL_SEQ_CST;
5662 }
5663
a372f7ca 5664 if (is_mm_release (failure) || is_mm_acq_rel (failure))
1cd6e20d 5665 {
2cb724f9 5666 warning_at (loc, OPT_Winvalid_memory_model,
5667 "invalid failure memory model for "
5668 "%<__atomic_compare_exchange%>");
086f4e33 5669 failure = MEMMODEL_SEQ_CST;
5670 success = MEMMODEL_SEQ_CST;
1cd6e20d 5671 }
5672
086f4e33 5673
1cd6e20d 5674 if (!flag_inline_atomics)
5675 return NULL_RTX;
5676
5677 /* Expand the operands. */
5678 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5679
5680 expect = expand_normal (CALL_EXPR_ARG (exp, 1));
5681 expect = convert_memory_address (Pmode, expect);
c401b131 5682 expect = gen_rtx_MEM (mode, expect);
1cd6e20d 5683 desired = expand_expr_force_mode (CALL_EXPR_ARG (exp, 2), mode);
5684
5685 weak = CALL_EXPR_ARG (exp, 3);
5686 is_weak = false;
e913b5cd 5687 if (tree_fits_shwi_p (weak) && tree_to_shwi (weak) != 0)
1cd6e20d 5688 is_weak = true;
5689
d86e3752 5690 if (target == const0_rtx)
5691 target = NULL;
d86e3752 5692
3c29a9ea 5693 /* Lest the rtl backend create a race condition with an imporoper store
5694 to memory, always create a new pseudo for OLDVAL. */
5695 oldval = NULL;
5696
5697 if (!expand_atomic_compare_and_swap (&target, &oldval, mem, expect, desired,
ba885f6a 5698 is_weak, success, failure))
1cd6e20d 5699 return NULL_RTX;
5700
d86e3752 5701 /* Conditionally store back to EXPECT, lest we create a race condition
5702 with an improper store to memory. */
5703 /* ??? With a rearrangement of atomics at the gimple level, we can handle
5704 the normal case where EXPECT is totally private, i.e. a register. At
5705 which point the store can be unconditional. */
5706 label = gen_label_rtx ();
62589f76 5707 emit_cmp_and_jump_insns (target, const0_rtx, NE, NULL,
5708 GET_MODE (target), 1, label);
d86e3752 5709 emit_move_insn (expect, oldval);
5710 emit_label (label);
c401b131 5711
1cd6e20d 5712 return target;
5713}
5714
5a5ef659 5715/* Helper function for expand_ifn_atomic_compare_exchange - expand
5716 internal ATOMIC_COMPARE_EXCHANGE call into __atomic_compare_exchange_N
5717 call. The weak parameter must be dropped to match the expected parameter
5718 list and the expected argument changed from value to pointer to memory
5719 slot. */
5720
5721static void
5722expand_ifn_atomic_compare_exchange_into_call (gcall *call, machine_mode mode)
5723{
5724 unsigned int z;
5725 vec<tree, va_gc> *vec;
5726
5727 vec_alloc (vec, 5);
5728 vec->quick_push (gimple_call_arg (call, 0));
5729 tree expected = gimple_call_arg (call, 1);
5730 rtx x = assign_stack_temp_for_type (mode, GET_MODE_SIZE (mode),
5731 TREE_TYPE (expected));
5732 rtx expd = expand_expr (expected, x, mode, EXPAND_NORMAL);
5733 if (expd != x)
5734 emit_move_insn (x, expd);
5735 tree v = make_tree (TREE_TYPE (expected), x);
5736 vec->quick_push (build1 (ADDR_EXPR,
5737 build_pointer_type (TREE_TYPE (expected)), v));
5738 vec->quick_push (gimple_call_arg (call, 2));
5739 /* Skip the boolean weak parameter. */
5740 for (z = 4; z < 6; z++)
5741 vec->quick_push (gimple_call_arg (call, z));
5742 built_in_function fncode
5743 = (built_in_function) ((int) BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1
5744 + exact_log2 (GET_MODE_SIZE (mode)));
5745 tree fndecl = builtin_decl_explicit (fncode);
5746 tree fn = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fndecl)),
5747 fndecl);
5748 tree exp = build_call_vec (boolean_type_node, fn, vec);
5749 tree lhs = gimple_call_lhs (call);
5750 rtx boolret = expand_call (exp, NULL_RTX, lhs == NULL_TREE);
5751 if (lhs)
5752 {
5753 rtx target = expand_expr (lhs, NULL_RTX, VOIDmode, EXPAND_WRITE);
5754 if (GET_MODE (boolret) != mode)
5755 boolret = convert_modes (mode, GET_MODE (boolret), boolret, 1);
5756 x = force_reg (mode, x);
5757 write_complex_part (target, boolret, true);
5758 write_complex_part (target, x, false);
5759 }
5760}
5761
5762/* Expand IFN_ATOMIC_COMPARE_EXCHANGE internal function. */
5763
5764void
5765expand_ifn_atomic_compare_exchange (gcall *call)
5766{
5767 int size = tree_to_shwi (gimple_call_arg (call, 3)) & 255;
5768 gcc_assert (size == 1 || size == 2 || size == 4 || size == 8 || size == 16);
5769 machine_mode mode = mode_for_size (BITS_PER_UNIT * size, MODE_INT, 0);
5770 rtx expect, desired, mem, oldval, boolret;
5771 enum memmodel success, failure;
5772 tree lhs;
5773 bool is_weak;
5774 source_location loc
5775 = expansion_point_location_if_in_system_header (gimple_location (call));
5776
5777 success = get_memmodel (gimple_call_arg (call, 4));
5778 failure = get_memmodel (gimple_call_arg (call, 5));
5779
5780 if (failure > success)
5781 {
5782 warning_at (loc, OPT_Winvalid_memory_model,
5783 "failure memory model cannot be stronger than success "
5784 "memory model for %<__atomic_compare_exchange%>");
5785 success = MEMMODEL_SEQ_CST;
5786 }
5787
5788 if (is_mm_release (failure) || is_mm_acq_rel (failure))
5789 {
5790 warning_at (loc, OPT_Winvalid_memory_model,
5791 "invalid failure memory model for "
5792 "%<__atomic_compare_exchange%>");
5793 failure = MEMMODEL_SEQ_CST;
5794 success = MEMMODEL_SEQ_CST;
5795 }
5796
5797 if (!flag_inline_atomics)
5798 {
5799 expand_ifn_atomic_compare_exchange_into_call (call, mode);
5800 return;
5801 }
5802
5803 /* Expand the operands. */
5804 mem = get_builtin_sync_mem (gimple_call_arg (call, 0), mode);
5805
5806 expect = expand_expr_force_mode (gimple_call_arg (call, 1), mode);
5807 desired = expand_expr_force_mode (gimple_call_arg (call, 2), mode);
5808
5809 is_weak = (tree_to_shwi (gimple_call_arg (call, 3)) & 256) != 0;
5810
5811 boolret = NULL;
5812 oldval = NULL;
5813
5814 if (!expand_atomic_compare_and_swap (&boolret, &oldval, mem, expect, desired,
5815 is_weak, success, failure))
5816 {
5817 expand_ifn_atomic_compare_exchange_into_call (call, mode);
5818 return;
5819 }
5820
5821 lhs = gimple_call_lhs (call);
5822 if (lhs)
5823 {
5824 rtx target = expand_expr (lhs, NULL_RTX, VOIDmode, EXPAND_WRITE);
5825 if (GET_MODE (boolret) != mode)
5826 boolret = convert_modes (mode, GET_MODE (boolret), boolret, 1);
5827 write_complex_part (target, boolret, true);
5828 write_complex_part (target, oldval, false);
5829 }
5830}
5831
1cd6e20d 5832/* Expand the __atomic_load intrinsic:
5833 TYPE __atomic_load (TYPE *object, enum memmodel)
5834 EXP is the CALL_EXPR.
5835 TARGET is an optional place for us to store the results. */
5836
5837static rtx
3754d046 5838expand_builtin_atomic_load (machine_mode mode, tree exp, rtx target)
1cd6e20d 5839{
5840 rtx mem;
5841 enum memmodel model;
5842
5843 model = get_memmodel (CALL_EXPR_ARG (exp, 1));
a372f7ca 5844 if (is_mm_release (model) || is_mm_acq_rel (model))
1cd6e20d 5845 {
2cb724f9 5846 source_location loc
5847 = expansion_point_location_if_in_system_header (input_location);
5848 warning_at (loc, OPT_Winvalid_memory_model,
5849 "invalid memory model for %<__atomic_load%>");
086f4e33 5850 model = MEMMODEL_SEQ_CST;
1cd6e20d 5851 }
5852
5853 if (!flag_inline_atomics)
5854 return NULL_RTX;
5855
5856 /* Expand the operand. */
5857 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5858
5859 return expand_atomic_load (target, mem, model);
5860}
5861
5862
5863/* Expand the __atomic_store intrinsic:
5864 void __atomic_store (TYPE *object, TYPE desired, enum memmodel)
5865 EXP is the CALL_EXPR.
5866 TARGET is an optional place for us to store the results. */
5867
5868static rtx
3754d046 5869expand_builtin_atomic_store (machine_mode mode, tree exp)
1cd6e20d 5870{
5871 rtx mem, val;
5872 enum memmodel model;
5873
5874 model = get_memmodel (CALL_EXPR_ARG (exp, 2));
a372f7ca 5875 if (!(is_mm_relaxed (model) || is_mm_seq_cst (model)
5876 || is_mm_release (model)))
1cd6e20d 5877 {
2cb724f9 5878 source_location loc
5879 = expansion_point_location_if_in_system_header (input_location);
5880 warning_at (loc, OPT_Winvalid_memory_model,
5881 "invalid memory model for %<__atomic_store%>");
086f4e33 5882 model = MEMMODEL_SEQ_CST;
1cd6e20d 5883 }
5884
5885 if (!flag_inline_atomics)
5886 return NULL_RTX;
5887
5888 /* Expand the operands. */
5889 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5890 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5891
8808bf16 5892 return expand_atomic_store (mem, val, model, false);
1cd6e20d 5893}
5894
5895/* Expand the __atomic_fetch_XXX intrinsic:
5896 TYPE __atomic_fetch_XXX (TYPE *object, TYPE val, enum memmodel)
5897 EXP is the CALL_EXPR.
5898 TARGET is an optional place for us to store the results.
5899 CODE is the operation, PLUS, MINUS, ADD, XOR, or IOR.
5900 FETCH_AFTER is true if returning the result of the operation.
5901 FETCH_AFTER is false if returning the value before the operation.
5902 IGNORE is true if the result is not used.
5903 EXT_CALL is the correct builtin for an external call if this cannot be
5904 resolved to an instruction sequence. */
5905
5906static rtx
3754d046 5907expand_builtin_atomic_fetch_op (machine_mode mode, tree exp, rtx target,
1cd6e20d 5908 enum rtx_code code, bool fetch_after,
5909 bool ignore, enum built_in_function ext_call)
5910{
5911 rtx val, mem, ret;
5912 enum memmodel model;
5913 tree fndecl;
5914 tree addr;
5915
5916 model = get_memmodel (CALL_EXPR_ARG (exp, 2));
5917
5918 /* Expand the operands. */
5919 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5920 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5921
5922 /* Only try generating instructions if inlining is turned on. */
5923 if (flag_inline_atomics)
5924 {
5925 ret = expand_atomic_fetch_op (target, mem, val, code, model, fetch_after);
5926 if (ret)
5927 return ret;
5928 }
5929
5930 /* Return if a different routine isn't needed for the library call. */
5931 if (ext_call == BUILT_IN_NONE)
5932 return NULL_RTX;
5933
5934 /* Change the call to the specified function. */
5935 fndecl = get_callee_fndecl (exp);
5936 addr = CALL_EXPR_FN (exp);
5937 STRIP_NOPS (addr);
5938
5939 gcc_assert (TREE_OPERAND (addr, 0) == fndecl);
9af5ce0c 5940 TREE_OPERAND (addr, 0) = builtin_decl_explicit (ext_call);
1cd6e20d 5941
5942 /* Expand the call here so we can emit trailing code. */
5943 ret = expand_call (exp, target, ignore);
5944
5945 /* Replace the original function just in case it matters. */
5946 TREE_OPERAND (addr, 0) = fndecl;
5947
5948 /* Then issue the arithmetic correction to return the right result. */
5949 if (!ignore)
c449f851 5950 {
5951 if (code == NOT)
5952 {
5953 ret = expand_simple_binop (mode, AND, ret, val, NULL_RTX, true,
5954 OPTAB_LIB_WIDEN);
5955 ret = expand_simple_unop (mode, NOT, ret, target, true);
5956 }
5957 else
5958 ret = expand_simple_binop (mode, code, ret, val, target, true,
5959 OPTAB_LIB_WIDEN);
5960 }
1cd6e20d 5961 return ret;
5962}
5963
9c1a31e4 5964/* Expand IFN_ATOMIC_BIT_TEST_AND_* internal function. */
5965
5966void
5967expand_ifn_atomic_bit_test_and (gcall *call)
5968{
5969 tree ptr = gimple_call_arg (call, 0);
5970 tree bit = gimple_call_arg (call, 1);
5971 tree flag = gimple_call_arg (call, 2);
5972 tree lhs = gimple_call_lhs (call);
5973 enum memmodel model = MEMMODEL_SYNC_SEQ_CST;
5974 machine_mode mode = TYPE_MODE (TREE_TYPE (flag));
5975 enum rtx_code code;
5976 optab optab;
5977 struct expand_operand ops[5];
5978
5979 gcc_assert (flag_inline_atomics);
5980
5981 if (gimple_call_num_args (call) == 4)
5982 model = get_memmodel (gimple_call_arg (call, 3));
5983
5984 rtx mem = get_builtin_sync_mem (ptr, mode);
5985 rtx val = expand_expr_force_mode (bit, mode);
5986
5987 switch (gimple_call_internal_fn (call))
5988 {
5989 case IFN_ATOMIC_BIT_TEST_AND_SET:
5990 code = IOR;
5991 optab = atomic_bit_test_and_set_optab;
5992 break;
5993 case IFN_ATOMIC_BIT_TEST_AND_COMPLEMENT:
5994 code = XOR;
5995 optab = atomic_bit_test_and_complement_optab;
5996 break;
5997 case IFN_ATOMIC_BIT_TEST_AND_RESET:
5998 code = AND;
5999 optab = atomic_bit_test_and_reset_optab;
6000 break;
6001 default:
6002 gcc_unreachable ();
6003 }
6004
6005 if (lhs == NULL_TREE)
6006 {
6007 val = expand_simple_binop (mode, ASHIFT, const1_rtx,
6008 val, NULL_RTX, true, OPTAB_DIRECT);
6009 if (code == AND)
6010 val = expand_simple_unop (mode, NOT, val, NULL_RTX, true);
6011 expand_atomic_fetch_op (const0_rtx, mem, val, code, model, false);
6012 return;
6013 }
6014
6015 rtx target = expand_expr (lhs, NULL_RTX, VOIDmode, EXPAND_WRITE);
6016 enum insn_code icode = direct_optab_handler (optab, mode);
6017 gcc_assert (icode != CODE_FOR_nothing);
6018 create_output_operand (&ops[0], target, mode);
6019 create_fixed_operand (&ops[1], mem);
6020 create_convert_operand_to (&ops[2], val, mode, true);
6021 create_integer_operand (&ops[3], model);
6022 create_integer_operand (&ops[4], integer_onep (flag));
6023 if (maybe_expand_insn (icode, 5, ops))
6024 return;
6025
6026 rtx bitval = val;
6027 val = expand_simple_binop (mode, ASHIFT, const1_rtx,
6028 val, NULL_RTX, true, OPTAB_DIRECT);
6029 rtx maskval = val;
6030 if (code == AND)
6031 val = expand_simple_unop (mode, NOT, val, NULL_RTX, true);
6032 rtx result = expand_atomic_fetch_op (gen_reg_rtx (mode), mem, val,
6033 code, model, false);
6034 if (integer_onep (flag))
6035 {
6036 result = expand_simple_binop (mode, ASHIFTRT, result, bitval,
6037 NULL_RTX, true, OPTAB_DIRECT);
6038 result = expand_simple_binop (mode, AND, result, const1_rtx, target,
6039 true, OPTAB_DIRECT);
6040 }
6041 else
6042 result = expand_simple_binop (mode, AND, result, maskval, target, true,
6043 OPTAB_DIRECT);
6044 if (result != target)
6045 emit_move_insn (target, result);
6046}
6047
10b744a3 6048/* Expand an atomic clear operation.
6049 void _atomic_clear (BOOL *obj, enum memmodel)
6050 EXP is the call expression. */
6051
6052static rtx
6053expand_builtin_atomic_clear (tree exp)
6054{
3754d046 6055 machine_mode mode;
10b744a3 6056 rtx mem, ret;
6057 enum memmodel model;
6058
6059 mode = mode_for_size (BOOL_TYPE_SIZE, MODE_INT, 0);
6060 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
6061 model = get_memmodel (CALL_EXPR_ARG (exp, 1));
6062
a372f7ca 6063 if (is_mm_consume (model) || is_mm_acquire (model) || is_mm_acq_rel (model))
10b744a3 6064 {
2cb724f9 6065 source_location loc
6066 = expansion_point_location_if_in_system_header (input_location);
6067 warning_at (loc, OPT_Winvalid_memory_model,
6068 "invalid memory model for %<__atomic_store%>");
086f4e33 6069 model = MEMMODEL_SEQ_CST;
10b744a3 6070 }
6071
6072 /* Try issuing an __atomic_store, and allow fallback to __sync_lock_release.
6073 Failing that, a store is issued by __atomic_store. The only way this can
6074 fail is if the bool type is larger than a word size. Unlikely, but
6075 handle it anyway for completeness. Assume a single threaded model since
6076 there is no atomic support in this case, and no barriers are required. */
6077 ret = expand_atomic_store (mem, const0_rtx, model, true);
6078 if (!ret)
6079 emit_move_insn (mem, const0_rtx);
6080 return const0_rtx;
6081}
6082
6083/* Expand an atomic test_and_set operation.
6084 bool _atomic_test_and_set (BOOL *obj, enum memmodel)
6085 EXP is the call expression. */
6086
6087static rtx
7821cde1 6088expand_builtin_atomic_test_and_set (tree exp, rtx target)
10b744a3 6089{
7821cde1 6090 rtx mem;
10b744a3 6091 enum memmodel model;
3754d046 6092 machine_mode mode;
10b744a3 6093
6094 mode = mode_for_size (BOOL_TYPE_SIZE, MODE_INT, 0);
6095 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
6096 model = get_memmodel (CALL_EXPR_ARG (exp, 1));
6097
7821cde1 6098 return expand_atomic_test_and_set (target, mem, model);
10b744a3 6099}
6100
6101
1cd6e20d 6102/* Return true if (optional) argument ARG1 of size ARG0 is always lock free on
6103 this architecture. If ARG1 is NULL, use typical alignment for size ARG0. */
6104
6105static tree
6106fold_builtin_atomic_always_lock_free (tree arg0, tree arg1)
6107{
6108 int size;
3754d046 6109 machine_mode mode;
1cd6e20d 6110 unsigned int mode_align, type_align;
6111
6112 if (TREE_CODE (arg0) != INTEGER_CST)
6113 return NULL_TREE;
b6a5fc45 6114
1cd6e20d 6115 size = INTVAL (expand_normal (arg0)) * BITS_PER_UNIT;
6116 mode = mode_for_size (size, MODE_INT, 0);
6117 mode_align = GET_MODE_ALIGNMENT (mode);
6118
4ca99588 6119 if (TREE_CODE (arg1) == INTEGER_CST)
6120 {
6121 unsigned HOST_WIDE_INT val = UINTVAL (expand_normal (arg1));
6122
6123 /* Either this argument is null, or it's a fake pointer encoding
6124 the alignment of the object. */
ac29ece2 6125 val = least_bit_hwi (val);
4ca99588 6126 val *= BITS_PER_UNIT;
6127
6128 if (val == 0 || mode_align < val)
6129 type_align = mode_align;
6130 else
6131 type_align = val;
6132 }
1cd6e20d 6133 else
6134 {
6135 tree ttype = TREE_TYPE (arg1);
6136
6137 /* This function is usually invoked and folded immediately by the front
6138 end before anything else has a chance to look at it. The pointer
6139 parameter at this point is usually cast to a void *, so check for that
6140 and look past the cast. */
2f8a2ead 6141 if (CONVERT_EXPR_P (arg1)
6142 && POINTER_TYPE_P (ttype)
6143 && VOID_TYPE_P (TREE_TYPE (ttype))
6144 && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (arg1, 0))))
1cd6e20d 6145 arg1 = TREE_OPERAND (arg1, 0);
6146
6147 ttype = TREE_TYPE (arg1);
6148 gcc_assert (POINTER_TYPE_P (ttype));
6149
6150 /* Get the underlying type of the object. */
6151 ttype = TREE_TYPE (ttype);
6152 type_align = TYPE_ALIGN (ttype);
6153 }
6154
47ae02b7 6155 /* If the object has smaller alignment, the lock free routines cannot
1cd6e20d 6156 be used. */
6157 if (type_align < mode_align)
06308d2a 6158 return boolean_false_node;
1cd6e20d 6159
6160 /* Check if a compare_and_swap pattern exists for the mode which represents
6161 the required size. The pattern is not allowed to fail, so the existence
d5f5fa27 6162 of the pattern indicates support is present. Also require that an
6163 atomic load exists for the required size. */
6164 if (can_compare_and_swap_p (mode, true) && can_atomic_load_p (mode))
06308d2a 6165 return boolean_true_node;
1cd6e20d 6166 else
06308d2a 6167 return boolean_false_node;
1cd6e20d 6168}
6169
6170/* Return true if the parameters to call EXP represent an object which will
6171 always generate lock free instructions. The first argument represents the
6172 size of the object, and the second parameter is a pointer to the object
6173 itself. If NULL is passed for the object, then the result is based on
6174 typical alignment for an object of the specified size. Otherwise return
6175 false. */
6176
6177static rtx
6178expand_builtin_atomic_always_lock_free (tree exp)
6179{
6180 tree size;
6181 tree arg0 = CALL_EXPR_ARG (exp, 0);
6182 tree arg1 = CALL_EXPR_ARG (exp, 1);
6183
6184 if (TREE_CODE (arg0) != INTEGER_CST)
6185 {
6186 error ("non-constant argument 1 to __atomic_always_lock_free");
6187 return const0_rtx;
6188 }
6189
6190 size = fold_builtin_atomic_always_lock_free (arg0, arg1);
06308d2a 6191 if (size == boolean_true_node)
1cd6e20d 6192 return const1_rtx;
6193 return const0_rtx;
6194}
6195
6196/* Return a one or zero if it can be determined that object ARG1 of size ARG
6197 is lock free on this architecture. */
6198
6199static tree
6200fold_builtin_atomic_is_lock_free (tree arg0, tree arg1)
6201{
6202 if (!flag_inline_atomics)
6203 return NULL_TREE;
6204
6205 /* If it isn't always lock free, don't generate a result. */
06308d2a 6206 if (fold_builtin_atomic_always_lock_free (arg0, arg1) == boolean_true_node)
6207 return boolean_true_node;
1cd6e20d 6208
6209 return NULL_TREE;
6210}
6211
6212/* Return true if the parameters to call EXP represent an object which will
6213 always generate lock free instructions. The first argument represents the
6214 size of the object, and the second parameter is a pointer to the object
6215 itself. If NULL is passed for the object, then the result is based on
6216 typical alignment for an object of the specified size. Otherwise return
6217 NULL*/
6218
6219static rtx
6220expand_builtin_atomic_is_lock_free (tree exp)
6221{
6222 tree size;
6223 tree arg0 = CALL_EXPR_ARG (exp, 0);
6224 tree arg1 = CALL_EXPR_ARG (exp, 1);
6225
6226 if (!INTEGRAL_TYPE_P (TREE_TYPE (arg0)))
6227 {
6228 error ("non-integer argument 1 to __atomic_is_lock_free");
6229 return NULL_RTX;
6230 }
6231
6232 if (!flag_inline_atomics)
6233 return NULL_RTX;
6234
6235 /* If the value is known at compile time, return the RTX for it. */
6236 size = fold_builtin_atomic_is_lock_free (arg0, arg1);
06308d2a 6237 if (size == boolean_true_node)
1cd6e20d 6238 return const1_rtx;
6239
6240 return NULL_RTX;
6241}
6242
1cd6e20d 6243/* Expand the __atomic_thread_fence intrinsic:
6244 void __atomic_thread_fence (enum memmodel)
6245 EXP is the CALL_EXPR. */
6246
6247static void
6248expand_builtin_atomic_thread_fence (tree exp)
6249{
fe54c06b 6250 enum memmodel model = get_memmodel (CALL_EXPR_ARG (exp, 0));
6251 expand_mem_thread_fence (model);
1cd6e20d 6252}
6253
6254/* Expand the __atomic_signal_fence intrinsic:
6255 void __atomic_signal_fence (enum memmodel)
6256 EXP is the CALL_EXPR. */
6257
6258static void
6259expand_builtin_atomic_signal_fence (tree exp)
6260{
fe54c06b 6261 enum memmodel model = get_memmodel (CALL_EXPR_ARG (exp, 0));
6262 expand_mem_signal_fence (model);
b6a5fc45 6263}
6264
6265/* Expand the __sync_synchronize intrinsic. */
6266
6267static void
2797f13a 6268expand_builtin_sync_synchronize (void)
b6a5fc45 6269{
a372f7ca 6270 expand_mem_thread_fence (MEMMODEL_SYNC_SEQ_CST);
b6a5fc45 6271}
6272
badaa04c 6273static rtx
6274expand_builtin_thread_pointer (tree exp, rtx target)
6275{
6276 enum insn_code icode;
6277 if (!validate_arglist (exp, VOID_TYPE))
6278 return const0_rtx;
6279 icode = direct_optab_handler (get_thread_pointer_optab, Pmode);
6280 if (icode != CODE_FOR_nothing)
6281 {
6282 struct expand_operand op;
3ed779c3 6283 /* If the target is not sutitable then create a new target. */
6284 if (target == NULL_RTX
6285 || !REG_P (target)
6286 || GET_MODE (target) != Pmode)
badaa04c 6287 target = gen_reg_rtx (Pmode);
6288 create_output_operand (&op, target, Pmode);
6289 expand_insn (icode, 1, &op);
6290 return target;
6291 }
6292 error ("__builtin_thread_pointer is not supported on this target");
6293 return const0_rtx;
6294}
6295
6296static void
6297expand_builtin_set_thread_pointer (tree exp)
6298{
6299 enum insn_code icode;
6300 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6301 return;
6302 icode = direct_optab_handler (set_thread_pointer_optab, Pmode);
6303 if (icode != CODE_FOR_nothing)
6304 {
6305 struct expand_operand op;
6306 rtx val = expand_expr (CALL_EXPR_ARG (exp, 0), NULL_RTX,
6307 Pmode, EXPAND_NORMAL);
6f343c10 6308 create_input_operand (&op, val, Pmode);
badaa04c 6309 expand_insn (icode, 1, &op);
6310 return;
6311 }
6312 error ("__builtin_set_thread_pointer is not supported on this target");
6313}
6314
53800dbe 6315\f
0e80b01d 6316/* Emit code to restore the current value of stack. */
6317
6318static void
6319expand_stack_restore (tree var)
6320{
1e0c0b35 6321 rtx_insn *prev;
6322 rtx sa = expand_normal (var);
0e80b01d 6323
6324 sa = convert_memory_address (Pmode, sa);
6325
6326 prev = get_last_insn ();
6327 emit_stack_restore (SAVE_BLOCK, sa);
97354ae4 6328
6329 record_new_stack_level ();
6330
0e80b01d 6331 fixup_args_size_notes (prev, get_last_insn (), 0);
6332}
6333
0e80b01d 6334/* Emit code to save the current value of stack. */
6335
6336static rtx
6337expand_stack_save (void)
6338{
6339 rtx ret = NULL_RTX;
6340
0e80b01d 6341 emit_stack_save (SAVE_BLOCK, &ret);
6342 return ret;
6343}
6344
ca4c3545 6345
53800dbe 6346/* Expand an expression EXP that calls a built-in function,
6347 with result going to TARGET if that's convenient
6348 (and in mode MODE if that's convenient).
6349 SUBTARGET may be used as the target for computing one of EXP's operands.
6350 IGNORE is nonzero if the value is to be ignored. */
6351
6352rtx
3754d046 6353expand_builtin (tree exp, rtx target, rtx subtarget, machine_mode mode,
aecda0d6 6354 int ignore)
53800dbe 6355{
c6e6ecb1 6356 tree fndecl = get_callee_fndecl (exp);
53800dbe 6357 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
3754d046 6358 machine_mode target_mode = TYPE_MODE (TREE_TYPE (exp));
67fa4078 6359 int flags;
53800dbe 6360
4e2f4ed5 6361 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
6362 return targetm.expand_builtin (exp, target, subtarget, mode, ignore);
6363
f9acf11a 6364 /* When ASan is enabled, we don't want to expand some memory/string
6365 builtins and rely on libsanitizer's hooks. This allows us to avoid
6366 redundant checks and be sure, that possible overflow will be detected
6367 by ASan. */
6368
6369 if ((flag_sanitize & SANITIZE_ADDRESS) && asan_intercepted_p (fcode))
6370 return expand_call (exp, target, ignore);
6371
53800dbe 6372 /* When not optimizing, generate calls to library functions for a certain
6373 set of builtins. */
cd9ff771 6374 if (!optimize
b6a5fc45 6375 && !called_as_built_in (fndecl)
73037a1e 6376 && fcode != BUILT_IN_FORK
6377 && fcode != BUILT_IN_EXECL
6378 && fcode != BUILT_IN_EXECV
6379 && fcode != BUILT_IN_EXECLP
6380 && fcode != BUILT_IN_EXECLE
6381 && fcode != BUILT_IN_EXECVP
6382 && fcode != BUILT_IN_EXECVE
2c281b15 6383 && fcode != BUILT_IN_ALLOCA
581bf1c2 6384 && fcode != BUILT_IN_ALLOCA_WITH_ALIGN
058a1b7a 6385 && fcode != BUILT_IN_FREE
6386 && fcode != BUILT_IN_CHKP_SET_PTR_BOUNDS
6387 && fcode != BUILT_IN_CHKP_INIT_PTR_BOUNDS
6388 && fcode != BUILT_IN_CHKP_NULL_PTR_BOUNDS
6389 && fcode != BUILT_IN_CHKP_COPY_PTR_BOUNDS
6390 && fcode != BUILT_IN_CHKP_NARROW_PTR_BOUNDS
6391 && fcode != BUILT_IN_CHKP_STORE_PTR_BOUNDS
6392 && fcode != BUILT_IN_CHKP_CHECK_PTR_LBOUNDS
6393 && fcode != BUILT_IN_CHKP_CHECK_PTR_UBOUNDS
6394 && fcode != BUILT_IN_CHKP_CHECK_PTR_BOUNDS
6395 && fcode != BUILT_IN_CHKP_GET_PTR_LBOUND
6396 && fcode != BUILT_IN_CHKP_GET_PTR_UBOUND
6397 && fcode != BUILT_IN_CHKP_BNDRET)
cd9ff771 6398 return expand_call (exp, target, ignore);
53800dbe 6399
8d6d7930 6400 /* The built-in function expanders test for target == const0_rtx
6401 to determine whether the function's result will be ignored. */
6402 if (ignore)
6403 target = const0_rtx;
6404
6405 /* If the result of a pure or const built-in function is ignored, and
6406 none of its arguments are volatile, we can avoid expanding the
6407 built-in call and just evaluate the arguments for side-effects. */
6408 if (target == const0_rtx
67fa4078 6409 && ((flags = flags_from_decl_or_type (fndecl)) & (ECF_CONST | ECF_PURE))
6410 && !(flags & ECF_LOOPING_CONST_OR_PURE))
8d6d7930 6411 {
6412 bool volatilep = false;
6413 tree arg;
c2f47e15 6414 call_expr_arg_iterator iter;
8d6d7930 6415
c2f47e15 6416 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
6417 if (TREE_THIS_VOLATILE (arg))
8d6d7930 6418 {
6419 volatilep = true;
6420 break;
6421 }
6422
6423 if (! volatilep)
6424 {
c2f47e15 6425 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
6426 expand_expr (arg, const0_rtx, VOIDmode, EXPAND_NORMAL);
8d6d7930 6427 return const0_rtx;
6428 }
6429 }
6430
f21337ef 6431 /* expand_builtin_with_bounds is supposed to be used for
6432 instrumented builtin calls. */
058a1b7a 6433 gcc_assert (!CALL_WITH_BOUNDS_P (exp));
6434
53800dbe 6435 switch (fcode)
6436 {
4f35b1fc 6437 CASE_FLT_FN (BUILT_IN_FABS):
012f068a 6438 CASE_FLT_FN_FLOATN_NX (BUILT_IN_FABS):
8aa32773 6439 case BUILT_IN_FABSD32:
6440 case BUILT_IN_FABSD64:
6441 case BUILT_IN_FABSD128:
c2f47e15 6442 target = expand_builtin_fabs (exp, target, subtarget);
78a74442 6443 if (target)
a0c938f0 6444 return target;
78a74442 6445 break;
6446
4f35b1fc 6447 CASE_FLT_FN (BUILT_IN_COPYSIGN):
012f068a 6448 CASE_FLT_FN_FLOATN_NX (BUILT_IN_COPYSIGN):
c2f47e15 6449 target = expand_builtin_copysign (exp, target, subtarget);
270436f3 6450 if (target)
6451 return target;
6452 break;
6453
7d3f6cc7 6454 /* Just do a normal library call if we were unable to fold
6455 the values. */
4f35b1fc 6456 CASE_FLT_FN (BUILT_IN_CABS):
78a74442 6457 break;
53800dbe 6458
7e0713b1 6459 CASE_FLT_FN (BUILT_IN_FMA):
6460 target = expand_builtin_mathfn_ternary (exp, target, subtarget);
6461 if (target)
6462 return target;
6463 break;
6464
a67a90e5 6465 CASE_FLT_FN (BUILT_IN_ILOGB):
6466 if (! flag_unsafe_math_optimizations)
6467 break;
3c77f69c 6468 gcc_fallthrough ();
69b779ea 6469 CASE_FLT_FN (BUILT_IN_ISINF):
cde061c1 6470 CASE_FLT_FN (BUILT_IN_FINITE):
6471 case BUILT_IN_ISFINITE:
8a1a9cb7 6472 case BUILT_IN_ISNORMAL:
f97eea22 6473 target = expand_builtin_interclass_mathfn (exp, target);
a67a90e5 6474 if (target)
6475 return target;
6476 break;
6477
80ff6494 6478 CASE_FLT_FN (BUILT_IN_ICEIL):
4f35b1fc 6479 CASE_FLT_FN (BUILT_IN_LCEIL):
6480 CASE_FLT_FN (BUILT_IN_LLCEIL):
6481 CASE_FLT_FN (BUILT_IN_LFLOOR):
80ff6494 6482 CASE_FLT_FN (BUILT_IN_IFLOOR):
4f35b1fc 6483 CASE_FLT_FN (BUILT_IN_LLFLOOR):
ff1b14e4 6484 target = expand_builtin_int_roundingfn (exp, target);
ad52b9b7 6485 if (target)
6486 return target;
6487 break;
6488
80ff6494 6489 CASE_FLT_FN (BUILT_IN_IRINT):
7d3afc77 6490 CASE_FLT_FN (BUILT_IN_LRINT):
6491 CASE_FLT_FN (BUILT_IN_LLRINT):
80ff6494 6492 CASE_FLT_FN (BUILT_IN_IROUND):
ef2f1a10 6493 CASE_FLT_FN (BUILT_IN_LROUND):
6494 CASE_FLT_FN (BUILT_IN_LLROUND):
ff1b14e4 6495 target = expand_builtin_int_roundingfn_2 (exp, target);
7d3afc77 6496 if (target)
6497 return target;
6498 break;
6499
4f35b1fc 6500 CASE_FLT_FN (BUILT_IN_POWI):
f97eea22 6501 target = expand_builtin_powi (exp, target);
757c219d 6502 if (target)
6503 return target;
6504 break;
6505
d735c391 6506 CASE_FLT_FN (BUILT_IN_CEXPI):
f97eea22 6507 target = expand_builtin_cexpi (exp, target);
d735c391 6508 gcc_assert (target);
6509 return target;
6510
4f35b1fc 6511 CASE_FLT_FN (BUILT_IN_SIN):
6512 CASE_FLT_FN (BUILT_IN_COS):
6b43bae4 6513 if (! flag_unsafe_math_optimizations)
6514 break;
6515 target = expand_builtin_mathfn_3 (exp, target, subtarget);
6516 if (target)
6517 return target;
6518 break;
6519
c3147c1a 6520 CASE_FLT_FN (BUILT_IN_SINCOS):
6521 if (! flag_unsafe_math_optimizations)
6522 break;
6523 target = expand_builtin_sincos (exp);
6524 if (target)
6525 return target;
6526 break;
6527
53800dbe 6528 case BUILT_IN_APPLY_ARGS:
6529 return expand_builtin_apply_args ();
6530
6531 /* __builtin_apply (FUNCTION, ARGUMENTS, ARGSIZE) invokes
6532 FUNCTION with a copy of the parameters described by
6533 ARGUMENTS, and ARGSIZE. It returns a block of memory
6534 allocated on the stack into which is stored all the registers
6535 that might possibly be used for returning the result of a
6536 function. ARGUMENTS is the value returned by
6537 __builtin_apply_args. ARGSIZE is the number of bytes of
6538 arguments that must be copied. ??? How should this value be
6539 computed? We'll also need a safe worst case value for varargs
6540 functions. */
6541 case BUILT_IN_APPLY:
c2f47e15 6542 if (!validate_arglist (exp, POINTER_TYPE,
0eb671f7 6543 POINTER_TYPE, INTEGER_TYPE, VOID_TYPE)
c2f47e15 6544 && !validate_arglist (exp, REFERENCE_TYPE,
0eb671f7 6545 POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
53800dbe 6546 return const0_rtx;
6547 else
6548 {
53800dbe 6549 rtx ops[3];
6550
c2f47e15 6551 ops[0] = expand_normal (CALL_EXPR_ARG (exp, 0));
6552 ops[1] = expand_normal (CALL_EXPR_ARG (exp, 1));
6553 ops[2] = expand_normal (CALL_EXPR_ARG (exp, 2));
53800dbe 6554
6555 return expand_builtin_apply (ops[0], ops[1], ops[2]);
6556 }
6557
6558 /* __builtin_return (RESULT) causes the function to return the
6559 value described by RESULT. RESULT is address of the block of
6560 memory returned by __builtin_apply. */
6561 case BUILT_IN_RETURN:
c2f47e15 6562 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6563 expand_builtin_return (expand_normal (CALL_EXPR_ARG (exp, 0)));
53800dbe 6564 return const0_rtx;
6565
6566 case BUILT_IN_SAVEREGS:
a66c9326 6567 return expand_builtin_saveregs ();
53800dbe 6568
48dc2227 6569 case BUILT_IN_VA_ARG_PACK:
6570 /* All valid uses of __builtin_va_arg_pack () are removed during
6571 inlining. */
b8c23db3 6572 error ("%Kinvalid use of %<__builtin_va_arg_pack ()%>", exp);
48dc2227 6573 return const0_rtx;
6574
4e1d7ea4 6575 case BUILT_IN_VA_ARG_PACK_LEN:
6576 /* All valid uses of __builtin_va_arg_pack_len () are removed during
6577 inlining. */
b8c23db3 6578 error ("%Kinvalid use of %<__builtin_va_arg_pack_len ()%>", exp);
4e1d7ea4 6579 return const0_rtx;
6580
53800dbe 6581 /* Return the address of the first anonymous stack arg. */
6582 case BUILT_IN_NEXT_ARG:
c2f47e15 6583 if (fold_builtin_next_arg (exp, false))
a0c938f0 6584 return const0_rtx;
79012a9d 6585 return expand_builtin_next_arg ();
53800dbe 6586
ac8fb6db 6587 case BUILT_IN_CLEAR_CACHE:
6588 target = expand_builtin___clear_cache (exp);
6589 if (target)
6590 return target;
6591 break;
6592
53800dbe 6593 case BUILT_IN_CLASSIFY_TYPE:
c2f47e15 6594 return expand_builtin_classify_type (exp);
53800dbe 6595
6596 case BUILT_IN_CONSTANT_P:
4ee9c684 6597 return const0_rtx;
53800dbe 6598
6599 case BUILT_IN_FRAME_ADDRESS:
6600 case BUILT_IN_RETURN_ADDRESS:
c2f47e15 6601 return expand_builtin_frame_address (fndecl, exp);
53800dbe 6602
6603 /* Returns the address of the area where the structure is returned.
6604 0 otherwise. */
6605 case BUILT_IN_AGGREGATE_INCOMING_ADDRESS:
c2f47e15 6606 if (call_expr_nargs (exp) != 0
9342ee68 6607 || ! AGGREGATE_TYPE_P (TREE_TYPE (TREE_TYPE (current_function_decl)))
e16ceb8e 6608 || !MEM_P (DECL_RTL (DECL_RESULT (current_function_decl))))
9342ee68 6609 return const0_rtx;
53800dbe 6610 else
9342ee68 6611 return XEXP (DECL_RTL (DECL_RESULT (current_function_decl)), 0);
53800dbe 6612
6613 case BUILT_IN_ALLOCA:
581bf1c2 6614 case BUILT_IN_ALLOCA_WITH_ALIGN:
990495a7 6615 /* If the allocation stems from the declaration of a variable-sized
6616 object, it cannot accumulate. */
a882d754 6617 target = expand_builtin_alloca (exp, CALL_ALLOCA_FOR_VAR_P (exp));
53800dbe 6618 if (target)
6619 return target;
6620 break;
6621
4ee9c684 6622 case BUILT_IN_STACK_SAVE:
6623 return expand_stack_save ();
6624
6625 case BUILT_IN_STACK_RESTORE:
c2f47e15 6626 expand_stack_restore (CALL_EXPR_ARG (exp, 0));
4ee9c684 6627 return const0_rtx;
6628
74bdbe96 6629 case BUILT_IN_BSWAP16:
42791117 6630 case BUILT_IN_BSWAP32:
6631 case BUILT_IN_BSWAP64:
74bdbe96 6632 target = expand_builtin_bswap (target_mode, exp, target, subtarget);
42791117 6633 if (target)
6634 return target;
6635 break;
6636
4f35b1fc 6637 CASE_INT_FN (BUILT_IN_FFS):
c2f47e15 6638 target = expand_builtin_unop (target_mode, exp, target,
efb070c8 6639 subtarget, ffs_optab);
6a08d0ab 6640 if (target)
6641 return target;
6642 break;
6643
4f35b1fc 6644 CASE_INT_FN (BUILT_IN_CLZ):
c2f47e15 6645 target = expand_builtin_unop (target_mode, exp, target,
efb070c8 6646 subtarget, clz_optab);
6a08d0ab 6647 if (target)
6648 return target;
6649 break;
6650
4f35b1fc 6651 CASE_INT_FN (BUILT_IN_CTZ):
c2f47e15 6652 target = expand_builtin_unop (target_mode, exp, target,
efb070c8 6653 subtarget, ctz_optab);
6a08d0ab 6654 if (target)
6655 return target;
6656 break;
6657
d8492bd3 6658 CASE_INT_FN (BUILT_IN_CLRSB):
d8492bd3 6659 target = expand_builtin_unop (target_mode, exp, target,
6660 subtarget, clrsb_optab);
6661 if (target)
6662 return target;
6663 break;
6664
4f35b1fc 6665 CASE_INT_FN (BUILT_IN_POPCOUNT):
c2f47e15 6666 target = expand_builtin_unop (target_mode, exp, target,
efb070c8 6667 subtarget, popcount_optab);
6a08d0ab 6668 if (target)
6669 return target;
6670 break;
6671
4f35b1fc 6672 CASE_INT_FN (BUILT_IN_PARITY):
c2f47e15 6673 target = expand_builtin_unop (target_mode, exp, target,
efb070c8 6674 subtarget, parity_optab);
53800dbe 6675 if (target)
6676 return target;
6677 break;
6678
6679 case BUILT_IN_STRLEN:
c2f47e15 6680 target = expand_builtin_strlen (exp, target, target_mode);
53800dbe 6681 if (target)
6682 return target;
6683 break;
6684
5aef8938 6685 case BUILT_IN_STRCAT:
6686 target = expand_builtin_strcat (exp, target);
6687 if (target)
6688 return target;
6689 break;
6690
53800dbe 6691 case BUILT_IN_STRCPY:
a65c4d64 6692 target = expand_builtin_strcpy (exp, target);
53800dbe 6693 if (target)
6694 return target;
6695 break;
bf8e3599 6696
5aef8938 6697 case BUILT_IN_STRNCAT:
6698 target = expand_builtin_strncat (exp, target);
6699 if (target)
6700 return target;
6701 break;
6702
ed09096d 6703 case BUILT_IN_STRNCPY:
a65c4d64 6704 target = expand_builtin_strncpy (exp, target);
ed09096d 6705 if (target)
6706 return target;
6707 break;
bf8e3599 6708
3b824fa6 6709 case BUILT_IN_STPCPY:
dc369150 6710 target = expand_builtin_stpcpy (exp, target, mode);
3b824fa6 6711 if (target)
6712 return target;
6713 break;
6714
53800dbe 6715 case BUILT_IN_MEMCPY:
a65c4d64 6716 target = expand_builtin_memcpy (exp, target);
3b824fa6 6717 if (target)
6718 return target;
6719 break;
6720
6721 case BUILT_IN_MEMPCPY:
c2f47e15 6722 target = expand_builtin_mempcpy (exp, target, mode);
53800dbe 6723 if (target)
6724 return target;
6725 break;
6726
6727 case BUILT_IN_MEMSET:
c2f47e15 6728 target = expand_builtin_memset (exp, target, mode);
53800dbe 6729 if (target)
6730 return target;
6731 break;
6732
ffc83088 6733 case BUILT_IN_BZERO:
0b25db21 6734 target = expand_builtin_bzero (exp);
ffc83088 6735 if (target)
6736 return target;
6737 break;
6738
53800dbe 6739 case BUILT_IN_STRCMP:
a65c4d64 6740 target = expand_builtin_strcmp (exp, target);
53800dbe 6741 if (target)
6742 return target;
6743 break;
6744
ed09096d 6745 case BUILT_IN_STRNCMP:
6746 target = expand_builtin_strncmp (exp, target, mode);
6747 if (target)
6748 return target;
6749 break;
6750
071f1696 6751 case BUILT_IN_BCMP:
53800dbe 6752 case BUILT_IN_MEMCMP:
3e346f54 6753 case BUILT_IN_MEMCMP_EQ:
6754 target = expand_builtin_memcmp (exp, target, fcode == BUILT_IN_MEMCMP_EQ);
53800dbe 6755 if (target)
6756 return target;
3e346f54 6757 if (fcode == BUILT_IN_MEMCMP_EQ)
6758 {
6759 tree newdecl = builtin_decl_explicit (BUILT_IN_MEMCMP);
6760 TREE_OPERAND (exp, 1) = build_fold_addr_expr (newdecl);
6761 }
53800dbe 6762 break;
53800dbe 6763
6764 case BUILT_IN_SETJMP:
2c8a1497 6765 /* This should have been lowered to the builtins below. */
6766 gcc_unreachable ();
6767
6768 case BUILT_IN_SETJMP_SETUP:
6769 /* __builtin_setjmp_setup is passed a pointer to an array of five words
6770 and the receiver label. */
c2f47e15 6771 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
2c8a1497 6772 {
c2f47e15 6773 rtx buf_addr = expand_expr (CALL_EXPR_ARG (exp, 0), subtarget,
2c8a1497 6774 VOIDmode, EXPAND_NORMAL);
c2f47e15 6775 tree label = TREE_OPERAND (CALL_EXPR_ARG (exp, 1), 0);
9ed997be 6776 rtx_insn *label_r = label_rtx (label);
2c8a1497 6777
6778 /* This is copied from the handling of non-local gotos. */
6779 expand_builtin_setjmp_setup (buf_addr, label_r);
6780 nonlocal_goto_handler_labels
a4de1c23 6781 = gen_rtx_INSN_LIST (VOIDmode, label_r,
2c8a1497 6782 nonlocal_goto_handler_labels);
6783 /* ??? Do not let expand_label treat us as such since we would
6784 not want to be both on the list of non-local labels and on
6785 the list of forced labels. */
6786 FORCED_LABEL (label) = 0;
6787 return const0_rtx;
6788 }
6789 break;
6790
2c8a1497 6791 case BUILT_IN_SETJMP_RECEIVER:
6792 /* __builtin_setjmp_receiver is passed the receiver label. */
c2f47e15 6793 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
2c8a1497 6794 {
c2f47e15 6795 tree label = TREE_OPERAND (CALL_EXPR_ARG (exp, 0), 0);
9ed997be 6796 rtx_insn *label_r = label_rtx (label);
2c8a1497 6797
6798 expand_builtin_setjmp_receiver (label_r);
6799 return const0_rtx;
6800 }
6b7f6858 6801 break;
53800dbe 6802
6803 /* __builtin_longjmp is passed a pointer to an array of five words.
6804 It's similar to the C library longjmp function but works with
6805 __builtin_setjmp above. */
6806 case BUILT_IN_LONGJMP:
c2f47e15 6807 if (validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
53800dbe 6808 {
c2f47e15 6809 rtx buf_addr = expand_expr (CALL_EXPR_ARG (exp, 0), subtarget,
8ec3c5c2 6810 VOIDmode, EXPAND_NORMAL);
c2f47e15 6811 rtx value = expand_normal (CALL_EXPR_ARG (exp, 1));
53800dbe 6812
6813 if (value != const1_rtx)
6814 {
1e5fcbe2 6815 error ("%<__builtin_longjmp%> second argument must be 1");
53800dbe 6816 return const0_rtx;
6817 }
6818
6819 expand_builtin_longjmp (buf_addr, value);
6820 return const0_rtx;
6821 }
2c8a1497 6822 break;
53800dbe 6823
4ee9c684 6824 case BUILT_IN_NONLOCAL_GOTO:
c2f47e15 6825 target = expand_builtin_nonlocal_goto (exp);
4ee9c684 6826 if (target)
6827 return target;
6828 break;
6829
843d08a9 6830 /* This updates the setjmp buffer that is its argument with the value
6831 of the current stack pointer. */
6832 case BUILT_IN_UPDATE_SETJMP_BUF:
c2f47e15 6833 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
843d08a9 6834 {
6835 rtx buf_addr
c2f47e15 6836 = expand_normal (CALL_EXPR_ARG (exp, 0));
843d08a9 6837
6838 expand_builtin_update_setjmp_buf (buf_addr);
6839 return const0_rtx;
6840 }
6841 break;
6842
53800dbe 6843 case BUILT_IN_TRAP:
a0ef1725 6844 expand_builtin_trap ();
53800dbe 6845 return const0_rtx;
6846
d2b48f0c 6847 case BUILT_IN_UNREACHABLE:
6848 expand_builtin_unreachable ();
6849 return const0_rtx;
6850
4f35b1fc 6851 CASE_FLT_FN (BUILT_IN_SIGNBIT):
004e23c4 6852 case BUILT_IN_SIGNBITD32:
6853 case BUILT_IN_SIGNBITD64:
6854 case BUILT_IN_SIGNBITD128:
27f261ef 6855 target = expand_builtin_signbit (exp, target);
6856 if (target)
6857 return target;
6858 break;
6859
53800dbe 6860 /* Various hooks for the DWARF 2 __throw routine. */
6861 case BUILT_IN_UNWIND_INIT:
6862 expand_builtin_unwind_init ();
6863 return const0_rtx;
6864 case BUILT_IN_DWARF_CFA:
6865 return virtual_cfa_rtx;
6866#ifdef DWARF2_UNWIND_INFO
f8f023a5 6867 case BUILT_IN_DWARF_SP_COLUMN:
6868 return expand_builtin_dwarf_sp_column ();
695e919b 6869 case BUILT_IN_INIT_DWARF_REG_SIZES:
c2f47e15 6870 expand_builtin_init_dwarf_reg_sizes (CALL_EXPR_ARG (exp, 0));
695e919b 6871 return const0_rtx;
53800dbe 6872#endif
6873 case BUILT_IN_FROB_RETURN_ADDR:
c2f47e15 6874 return expand_builtin_frob_return_addr (CALL_EXPR_ARG (exp, 0));
53800dbe 6875 case BUILT_IN_EXTRACT_RETURN_ADDR:
c2f47e15 6876 return expand_builtin_extract_return_addr (CALL_EXPR_ARG (exp, 0));
53800dbe 6877 case BUILT_IN_EH_RETURN:
c2f47e15 6878 expand_builtin_eh_return (CALL_EXPR_ARG (exp, 0),
6879 CALL_EXPR_ARG (exp, 1));
53800dbe 6880 return const0_rtx;
df4b504c 6881 case BUILT_IN_EH_RETURN_DATA_REGNO:
c2f47e15 6882 return expand_builtin_eh_return_data_regno (exp);
26093bf4 6883 case BUILT_IN_EXTEND_POINTER:
c2f47e15 6884 return expand_builtin_extend_pointer (CALL_EXPR_ARG (exp, 0));
e38def9c 6885 case BUILT_IN_EH_POINTER:
6886 return expand_builtin_eh_pointer (exp);
6887 case BUILT_IN_EH_FILTER:
6888 return expand_builtin_eh_filter (exp);
6889 case BUILT_IN_EH_COPY_VALUES:
6890 return expand_builtin_eh_copy_values (exp);
26093bf4 6891
7ccc713a 6892 case BUILT_IN_VA_START:
c2f47e15 6893 return expand_builtin_va_start (exp);
a66c9326 6894 case BUILT_IN_VA_END:
c2f47e15 6895 return expand_builtin_va_end (exp);
a66c9326 6896 case BUILT_IN_VA_COPY:
c2f47e15 6897 return expand_builtin_va_copy (exp);
89cfe6e5 6898 case BUILT_IN_EXPECT:
c2f47e15 6899 return expand_builtin_expect (exp, target);
fca0886c 6900 case BUILT_IN_ASSUME_ALIGNED:
6901 return expand_builtin_assume_aligned (exp, target);
5e3608d8 6902 case BUILT_IN_PREFETCH:
c2f47e15 6903 expand_builtin_prefetch (exp);
5e3608d8 6904 return const0_rtx;
6905
4ee9c684 6906 case BUILT_IN_INIT_TRAMPOLINE:
c307f106 6907 return expand_builtin_init_trampoline (exp, true);
6908 case BUILT_IN_INIT_HEAP_TRAMPOLINE:
6909 return expand_builtin_init_trampoline (exp, false);
4ee9c684 6910 case BUILT_IN_ADJUST_TRAMPOLINE:
c2f47e15 6911 return expand_builtin_adjust_trampoline (exp);
4ee9c684 6912
a27e3913 6913 case BUILT_IN_INIT_DESCRIPTOR:
6914 return expand_builtin_init_descriptor (exp);
6915 case BUILT_IN_ADJUST_DESCRIPTOR:
6916 return expand_builtin_adjust_descriptor (exp);
6917
73673831 6918 case BUILT_IN_FORK:
6919 case BUILT_IN_EXECL:
6920 case BUILT_IN_EXECV:
6921 case BUILT_IN_EXECLP:
6922 case BUILT_IN_EXECLE:
6923 case BUILT_IN_EXECVP:
6924 case BUILT_IN_EXECVE:
c2f47e15 6925 target = expand_builtin_fork_or_exec (fndecl, exp, target, ignore);
73673831 6926 if (target)
6927 return target;
6928 break;
53800dbe 6929
2797f13a 6930 case BUILT_IN_SYNC_FETCH_AND_ADD_1:
6931 case BUILT_IN_SYNC_FETCH_AND_ADD_2:
6932 case BUILT_IN_SYNC_FETCH_AND_ADD_4:
6933 case BUILT_IN_SYNC_FETCH_AND_ADD_8:
6934 case BUILT_IN_SYNC_FETCH_AND_ADD_16:
6935 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_ADD_1);
1cd6e20d 6936 target = expand_builtin_sync_operation (mode, exp, PLUS, false, target);
b6a5fc45 6937 if (target)
6938 return target;
6939 break;
6940
2797f13a 6941 case BUILT_IN_SYNC_FETCH_AND_SUB_1:
6942 case BUILT_IN_SYNC_FETCH_AND_SUB_2:
6943 case BUILT_IN_SYNC_FETCH_AND_SUB_4:
6944 case BUILT_IN_SYNC_FETCH_AND_SUB_8:
6945 case BUILT_IN_SYNC_FETCH_AND_SUB_16:
6946 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_SUB_1);
1cd6e20d 6947 target = expand_builtin_sync_operation (mode, exp, MINUS, false, target);
b6a5fc45 6948 if (target)
6949 return target;
6950 break;
6951
2797f13a 6952 case BUILT_IN_SYNC_FETCH_AND_OR_1:
6953 case BUILT_IN_SYNC_FETCH_AND_OR_2:
6954 case BUILT_IN_SYNC_FETCH_AND_OR_4:
6955 case BUILT_IN_SYNC_FETCH_AND_OR_8:
6956 case BUILT_IN_SYNC_FETCH_AND_OR_16:
6957 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_OR_1);
1cd6e20d 6958 target = expand_builtin_sync_operation (mode, exp, IOR, false, target);
b6a5fc45 6959 if (target)
6960 return target;
6961 break;
6962
2797f13a 6963 case BUILT_IN_SYNC_FETCH_AND_AND_1:
6964 case BUILT_IN_SYNC_FETCH_AND_AND_2:
6965 case BUILT_IN_SYNC_FETCH_AND_AND_4:
6966 case BUILT_IN_SYNC_FETCH_AND_AND_8:
6967 case BUILT_IN_SYNC_FETCH_AND_AND_16:
6968 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_AND_1);
1cd6e20d 6969 target = expand_builtin_sync_operation (mode, exp, AND, false, target);
b6a5fc45 6970 if (target)
6971 return target;
6972 break;
6973
2797f13a 6974 case BUILT_IN_SYNC_FETCH_AND_XOR_1:
6975 case BUILT_IN_SYNC_FETCH_AND_XOR_2:
6976 case BUILT_IN_SYNC_FETCH_AND_XOR_4:
6977 case BUILT_IN_SYNC_FETCH_AND_XOR_8:
6978 case BUILT_IN_SYNC_FETCH_AND_XOR_16:
6979 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_XOR_1);
1cd6e20d 6980 target = expand_builtin_sync_operation (mode, exp, XOR, false, target);
b6a5fc45 6981 if (target)
6982 return target;
6983 break;
6984
2797f13a 6985 case BUILT_IN_SYNC_FETCH_AND_NAND_1:
6986 case BUILT_IN_SYNC_FETCH_AND_NAND_2:
6987 case BUILT_IN_SYNC_FETCH_AND_NAND_4:
6988 case BUILT_IN_SYNC_FETCH_AND_NAND_8:
6989 case BUILT_IN_SYNC_FETCH_AND_NAND_16:
6990 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_NAND_1);
1cd6e20d 6991 target = expand_builtin_sync_operation (mode, exp, NOT, false, target);
b6a5fc45 6992 if (target)
6993 return target;
6994 break;
6995
2797f13a 6996 case BUILT_IN_SYNC_ADD_AND_FETCH_1:
6997 case BUILT_IN_SYNC_ADD_AND_FETCH_2:
6998 case BUILT_IN_SYNC_ADD_AND_FETCH_4:
6999 case BUILT_IN_SYNC_ADD_AND_FETCH_8:
7000 case BUILT_IN_SYNC_ADD_AND_FETCH_16:
7001 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_ADD_AND_FETCH_1);
1cd6e20d 7002 target = expand_builtin_sync_operation (mode, exp, PLUS, true, target);
b6a5fc45 7003 if (target)
7004 return target;
7005 break;
7006
2797f13a 7007 case BUILT_IN_SYNC_SUB_AND_FETCH_1:
7008 case BUILT_IN_SYNC_SUB_AND_FETCH_2:
7009 case BUILT_IN_SYNC_SUB_AND_FETCH_4:
7010 case BUILT_IN_SYNC_SUB_AND_FETCH_8:
7011 case BUILT_IN_SYNC_SUB_AND_FETCH_16:
7012 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_SUB_AND_FETCH_1);
1cd6e20d 7013 target = expand_builtin_sync_operation (mode, exp, MINUS, true, target);
b6a5fc45 7014 if (target)
7015 return target;
7016 break;
7017
2797f13a 7018 case BUILT_IN_SYNC_OR_AND_FETCH_1:
7019 case BUILT_IN_SYNC_OR_AND_FETCH_2:
7020 case BUILT_IN_SYNC_OR_AND_FETCH_4:
7021 case BUILT_IN_SYNC_OR_AND_FETCH_8:
7022 case BUILT_IN_SYNC_OR_AND_FETCH_16:
7023 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_OR_AND_FETCH_1);
1cd6e20d 7024 target = expand_builtin_sync_operation (mode, exp, IOR, true, target);
b6a5fc45 7025 if (target)
7026 return target;
7027 break;
7028
2797f13a 7029 case BUILT_IN_SYNC_AND_AND_FETCH_1:
7030 case BUILT_IN_SYNC_AND_AND_FETCH_2:
7031 case BUILT_IN_SYNC_AND_AND_FETCH_4:
7032 case BUILT_IN_SYNC_AND_AND_FETCH_8:
7033 case BUILT_IN_SYNC_AND_AND_FETCH_16:
7034 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_AND_AND_FETCH_1);
1cd6e20d 7035 target = expand_builtin_sync_operation (mode, exp, AND, true, target);
b6a5fc45 7036 if (target)
7037 return target;
7038 break;
7039
2797f13a 7040 case BUILT_IN_SYNC_XOR_AND_FETCH_1:
7041 case BUILT_IN_SYNC_XOR_AND_FETCH_2:
7042 case BUILT_IN_SYNC_XOR_AND_FETCH_4:
7043 case BUILT_IN_SYNC_XOR_AND_FETCH_8:
7044 case BUILT_IN_SYNC_XOR_AND_FETCH_16:
7045 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_XOR_AND_FETCH_1);
1cd6e20d 7046 target = expand_builtin_sync_operation (mode, exp, XOR, true, target);
b6a5fc45 7047 if (target)
7048 return target;
7049 break;
7050
2797f13a 7051 case BUILT_IN_SYNC_NAND_AND_FETCH_1:
7052 case BUILT_IN_SYNC_NAND_AND_FETCH_2:
7053 case BUILT_IN_SYNC_NAND_AND_FETCH_4:
7054 case BUILT_IN_SYNC_NAND_AND_FETCH_8:
7055 case BUILT_IN_SYNC_NAND_AND_FETCH_16:
7056 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_NAND_AND_FETCH_1);
1cd6e20d 7057 target = expand_builtin_sync_operation (mode, exp, NOT, true, target);
b6a5fc45 7058 if (target)
7059 return target;
7060 break;
7061
2797f13a 7062 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_1:
7063 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_2:
7064 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_4:
7065 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_8:
7066 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_16:
a601d32a 7067 if (mode == VOIDmode)
7068 mode = TYPE_MODE (boolean_type_node);
b6a5fc45 7069 if (!target || !register_operand (target, mode))
7070 target = gen_reg_rtx (mode);
3e272de8 7071
2797f13a 7072 mode = get_builtin_sync_mode
7073 (fcode - BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_1);
c2f47e15 7074 target = expand_builtin_compare_and_swap (mode, exp, true, target);
b6a5fc45 7075 if (target)
7076 return target;
7077 break;
7078
2797f13a 7079 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_1:
7080 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_2:
7081 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_4:
7082 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_8:
7083 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_16:
7084 mode = get_builtin_sync_mode
7085 (fcode - BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_1);
c2f47e15 7086 target = expand_builtin_compare_and_swap (mode, exp, false, target);
b6a5fc45 7087 if (target)
7088 return target;
7089 break;
7090
2797f13a 7091 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_1:
7092 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_2:
7093 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_4:
7094 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_8:
7095 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_16:
7096 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_LOCK_TEST_AND_SET_1);
7097 target = expand_builtin_sync_lock_test_and_set (mode, exp, target);
b6a5fc45 7098 if (target)
7099 return target;
7100 break;
7101
2797f13a 7102 case BUILT_IN_SYNC_LOCK_RELEASE_1:
7103 case BUILT_IN_SYNC_LOCK_RELEASE_2:
7104 case BUILT_IN_SYNC_LOCK_RELEASE_4:
7105 case BUILT_IN_SYNC_LOCK_RELEASE_8:
7106 case BUILT_IN_SYNC_LOCK_RELEASE_16:
7107 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_LOCK_RELEASE_1);
7108 expand_builtin_sync_lock_release (mode, exp);
b6a5fc45 7109 return const0_rtx;
7110
2797f13a 7111 case BUILT_IN_SYNC_SYNCHRONIZE:
7112 expand_builtin_sync_synchronize ();
b6a5fc45 7113 return const0_rtx;
7114
1cd6e20d 7115 case BUILT_IN_ATOMIC_EXCHANGE_1:
7116 case BUILT_IN_ATOMIC_EXCHANGE_2:
7117 case BUILT_IN_ATOMIC_EXCHANGE_4:
7118 case BUILT_IN_ATOMIC_EXCHANGE_8:
7119 case BUILT_IN_ATOMIC_EXCHANGE_16:
7120 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_EXCHANGE_1);
7121 target = expand_builtin_atomic_exchange (mode, exp, target);
7122 if (target)
7123 return target;
7124 break;
7125
7126 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1:
7127 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_2:
7128 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_4:
7129 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_8:
7130 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_16:
2c201ad1 7131 {
7132 unsigned int nargs, z;
f1f41a6c 7133 vec<tree, va_gc> *vec;
2c201ad1 7134
7135 mode =
7136 get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1);
7137 target = expand_builtin_atomic_compare_exchange (mode, exp, target);
7138 if (target)
7139 return target;
7140
7141 /* If this is turned into an external library call, the weak parameter
7142 must be dropped to match the expected parameter list. */
7143 nargs = call_expr_nargs (exp);
f1f41a6c 7144 vec_alloc (vec, nargs - 1);
2c201ad1 7145 for (z = 0; z < 3; z++)
f1f41a6c 7146 vec->quick_push (CALL_EXPR_ARG (exp, z));
2c201ad1 7147 /* Skip the boolean weak parameter. */
7148 for (z = 4; z < 6; z++)
f1f41a6c 7149 vec->quick_push (CALL_EXPR_ARG (exp, z));
2c201ad1 7150 exp = build_call_vec (TREE_TYPE (exp), CALL_EXPR_FN (exp), vec);
7151 break;
7152 }
1cd6e20d 7153
7154 case BUILT_IN_ATOMIC_LOAD_1:
7155 case BUILT_IN_ATOMIC_LOAD_2:
7156 case BUILT_IN_ATOMIC_LOAD_4:
7157 case BUILT_IN_ATOMIC_LOAD_8:
7158 case BUILT_IN_ATOMIC_LOAD_16:
7159 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_LOAD_1);
7160 target = expand_builtin_atomic_load (mode, exp, target);
7161 if (target)
7162 return target;
7163 break;
7164
7165 case BUILT_IN_ATOMIC_STORE_1:
7166 case BUILT_IN_ATOMIC_STORE_2:
7167 case BUILT_IN_ATOMIC_STORE_4:
7168 case BUILT_IN_ATOMIC_STORE_8:
7169 case BUILT_IN_ATOMIC_STORE_16:
7170 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_STORE_1);
7171 target = expand_builtin_atomic_store (mode, exp);
7172 if (target)
7173 return const0_rtx;
7174 break;
7175
7176 case BUILT_IN_ATOMIC_ADD_FETCH_1:
7177 case BUILT_IN_ATOMIC_ADD_FETCH_2:
7178 case BUILT_IN_ATOMIC_ADD_FETCH_4:
7179 case BUILT_IN_ATOMIC_ADD_FETCH_8:
7180 case BUILT_IN_ATOMIC_ADD_FETCH_16:
7181 {
7182 enum built_in_function lib;
7183 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_ADD_FETCH_1);
7184 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_ADD_1 +
7185 (fcode - BUILT_IN_ATOMIC_ADD_FETCH_1));
7186 target = expand_builtin_atomic_fetch_op (mode, exp, target, PLUS, true,
7187 ignore, lib);
7188 if (target)
7189 return target;
7190 break;
7191 }
7192 case BUILT_IN_ATOMIC_SUB_FETCH_1:
7193 case BUILT_IN_ATOMIC_SUB_FETCH_2:
7194 case BUILT_IN_ATOMIC_SUB_FETCH_4:
7195 case BUILT_IN_ATOMIC_SUB_FETCH_8:
7196 case BUILT_IN_ATOMIC_SUB_FETCH_16:
7197 {
7198 enum built_in_function lib;
7199 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_SUB_FETCH_1);
7200 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_SUB_1 +
7201 (fcode - BUILT_IN_ATOMIC_SUB_FETCH_1));
7202 target = expand_builtin_atomic_fetch_op (mode, exp, target, MINUS, true,
7203 ignore, lib);
7204 if (target)
7205 return target;
7206 break;
7207 }
7208 case BUILT_IN_ATOMIC_AND_FETCH_1:
7209 case BUILT_IN_ATOMIC_AND_FETCH_2:
7210 case BUILT_IN_ATOMIC_AND_FETCH_4:
7211 case BUILT_IN_ATOMIC_AND_FETCH_8:
7212 case BUILT_IN_ATOMIC_AND_FETCH_16:
7213 {
7214 enum built_in_function lib;
7215 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_AND_FETCH_1);
7216 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_AND_1 +
7217 (fcode - BUILT_IN_ATOMIC_AND_FETCH_1));
7218 target = expand_builtin_atomic_fetch_op (mode, exp, target, AND, true,
7219 ignore, lib);
7220 if (target)
7221 return target;
7222 break;
7223 }
7224 case BUILT_IN_ATOMIC_NAND_FETCH_1:
7225 case BUILT_IN_ATOMIC_NAND_FETCH_2:
7226 case BUILT_IN_ATOMIC_NAND_FETCH_4:
7227 case BUILT_IN_ATOMIC_NAND_FETCH_8:
7228 case BUILT_IN_ATOMIC_NAND_FETCH_16:
7229 {
7230 enum built_in_function lib;
7231 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_NAND_FETCH_1);
7232 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_NAND_1 +
7233 (fcode - BUILT_IN_ATOMIC_NAND_FETCH_1));
7234 target = expand_builtin_atomic_fetch_op (mode, exp, target, NOT, true,
7235 ignore, lib);
7236 if (target)
7237 return target;
7238 break;
7239 }
7240 case BUILT_IN_ATOMIC_XOR_FETCH_1:
7241 case BUILT_IN_ATOMIC_XOR_FETCH_2:
7242 case BUILT_IN_ATOMIC_XOR_FETCH_4:
7243 case BUILT_IN_ATOMIC_XOR_FETCH_8:
7244 case BUILT_IN_ATOMIC_XOR_FETCH_16:
7245 {
7246 enum built_in_function lib;
7247 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_XOR_FETCH_1);
7248 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_XOR_1 +
7249 (fcode - BUILT_IN_ATOMIC_XOR_FETCH_1));
7250 target = expand_builtin_atomic_fetch_op (mode, exp, target, XOR, true,
7251 ignore, lib);
7252 if (target)
7253 return target;
7254 break;
7255 }
7256 case BUILT_IN_ATOMIC_OR_FETCH_1:
7257 case BUILT_IN_ATOMIC_OR_FETCH_2:
7258 case BUILT_IN_ATOMIC_OR_FETCH_4:
7259 case BUILT_IN_ATOMIC_OR_FETCH_8:
7260 case BUILT_IN_ATOMIC_OR_FETCH_16:
7261 {
7262 enum built_in_function lib;
7263 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_OR_FETCH_1);
7264 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_OR_1 +
7265 (fcode - BUILT_IN_ATOMIC_OR_FETCH_1));
7266 target = expand_builtin_atomic_fetch_op (mode, exp, target, IOR, true,
7267 ignore, lib);
7268 if (target)
7269 return target;
7270 break;
7271 }
7272 case BUILT_IN_ATOMIC_FETCH_ADD_1:
7273 case BUILT_IN_ATOMIC_FETCH_ADD_2:
7274 case BUILT_IN_ATOMIC_FETCH_ADD_4:
7275 case BUILT_IN_ATOMIC_FETCH_ADD_8:
7276 case BUILT_IN_ATOMIC_FETCH_ADD_16:
7277 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_ADD_1);
7278 target = expand_builtin_atomic_fetch_op (mode, exp, target, PLUS, false,
7279 ignore, BUILT_IN_NONE);
7280 if (target)
7281 return target;
7282 break;
7283
7284 case BUILT_IN_ATOMIC_FETCH_SUB_1:
7285 case BUILT_IN_ATOMIC_FETCH_SUB_2:
7286 case BUILT_IN_ATOMIC_FETCH_SUB_4:
7287 case BUILT_IN_ATOMIC_FETCH_SUB_8:
7288 case BUILT_IN_ATOMIC_FETCH_SUB_16:
7289 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_SUB_1);
7290 target = expand_builtin_atomic_fetch_op (mode, exp, target, MINUS, false,
7291 ignore, BUILT_IN_NONE);
7292 if (target)
7293 return target;
7294 break;
7295
7296 case BUILT_IN_ATOMIC_FETCH_AND_1:
7297 case BUILT_IN_ATOMIC_FETCH_AND_2:
7298 case BUILT_IN_ATOMIC_FETCH_AND_4:
7299 case BUILT_IN_ATOMIC_FETCH_AND_8:
7300 case BUILT_IN_ATOMIC_FETCH_AND_16:
7301 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_AND_1);
7302 target = expand_builtin_atomic_fetch_op (mode, exp, target, AND, false,
7303 ignore, BUILT_IN_NONE);
7304 if (target)
7305 return target;
7306 break;
7307
7308 case BUILT_IN_ATOMIC_FETCH_NAND_1:
7309 case BUILT_IN_ATOMIC_FETCH_NAND_2:
7310 case BUILT_IN_ATOMIC_FETCH_NAND_4:
7311 case BUILT_IN_ATOMIC_FETCH_NAND_8:
7312 case BUILT_IN_ATOMIC_FETCH_NAND_16:
7313 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_NAND_1);
7314 target = expand_builtin_atomic_fetch_op (mode, exp, target, NOT, false,
7315 ignore, BUILT_IN_NONE);
7316 if (target)
7317 return target;
7318 break;
7319
7320 case BUILT_IN_ATOMIC_FETCH_XOR_1:
7321 case BUILT_IN_ATOMIC_FETCH_XOR_2:
7322 case BUILT_IN_ATOMIC_FETCH_XOR_4:
7323 case BUILT_IN_ATOMIC_FETCH_XOR_8:
7324 case BUILT_IN_ATOMIC_FETCH_XOR_16:
7325 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_XOR_1);
7326 target = expand_builtin_atomic_fetch_op (mode, exp, target, XOR, false,
7327 ignore, BUILT_IN_NONE);
7328 if (target)
7329 return target;
7330 break;
7331
7332 case BUILT_IN_ATOMIC_FETCH_OR_1:
7333 case BUILT_IN_ATOMIC_FETCH_OR_2:
7334 case BUILT_IN_ATOMIC_FETCH_OR_4:
7335 case BUILT_IN_ATOMIC_FETCH_OR_8:
7336 case BUILT_IN_ATOMIC_FETCH_OR_16:
7337 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_OR_1);
7338 target = expand_builtin_atomic_fetch_op (mode, exp, target, IOR, false,
7339 ignore, BUILT_IN_NONE);
7340 if (target)
7341 return target;
7342 break;
10b744a3 7343
7344 case BUILT_IN_ATOMIC_TEST_AND_SET:
7821cde1 7345 return expand_builtin_atomic_test_and_set (exp, target);
10b744a3 7346
7347 case BUILT_IN_ATOMIC_CLEAR:
7348 return expand_builtin_atomic_clear (exp);
1cd6e20d 7349
7350 case BUILT_IN_ATOMIC_ALWAYS_LOCK_FREE:
7351 return expand_builtin_atomic_always_lock_free (exp);
7352
7353 case BUILT_IN_ATOMIC_IS_LOCK_FREE:
7354 target = expand_builtin_atomic_is_lock_free (exp);
7355 if (target)
7356 return target;
7357 break;
7358
7359 case BUILT_IN_ATOMIC_THREAD_FENCE:
7360 expand_builtin_atomic_thread_fence (exp);
7361 return const0_rtx;
7362
7363 case BUILT_IN_ATOMIC_SIGNAL_FENCE:
7364 expand_builtin_atomic_signal_fence (exp);
7365 return const0_rtx;
7366
0a39fd54 7367 case BUILT_IN_OBJECT_SIZE:
7368 return expand_builtin_object_size (exp);
7369
7370 case BUILT_IN_MEMCPY_CHK:
7371 case BUILT_IN_MEMPCPY_CHK:
7372 case BUILT_IN_MEMMOVE_CHK:
7373 case BUILT_IN_MEMSET_CHK:
7374 target = expand_builtin_memory_chk (exp, target, mode, fcode);
7375 if (target)
7376 return target;
7377 break;
7378
7379 case BUILT_IN_STRCPY_CHK:
7380 case BUILT_IN_STPCPY_CHK:
7381 case BUILT_IN_STRNCPY_CHK:
1063acde 7382 case BUILT_IN_STPNCPY_CHK:
0a39fd54 7383 case BUILT_IN_STRCAT_CHK:
b356dfef 7384 case BUILT_IN_STRNCAT_CHK:
0a39fd54 7385 case BUILT_IN_SNPRINTF_CHK:
7386 case BUILT_IN_VSNPRINTF_CHK:
7387 maybe_emit_chk_warning (exp, fcode);
7388 break;
7389
7390 case BUILT_IN_SPRINTF_CHK:
7391 case BUILT_IN_VSPRINTF_CHK:
7392 maybe_emit_sprintf_chk_warning (exp, fcode);
7393 break;
7394
2c281b15 7395 case BUILT_IN_FREE:
f74ea1c2 7396 if (warn_free_nonheap_object)
7397 maybe_emit_free_warning (exp);
2c281b15 7398 break;
7399
badaa04c 7400 case BUILT_IN_THREAD_POINTER:
7401 return expand_builtin_thread_pointer (exp, target);
7402
7403 case BUILT_IN_SET_THREAD_POINTER:
7404 expand_builtin_set_thread_pointer (exp);
7405 return const0_rtx;
7406
d037099f 7407 case BUILT_IN_CILK_DETACH:
7408 expand_builtin_cilk_detach (exp);
7409 return const0_rtx;
7410
7411 case BUILT_IN_CILK_POP_FRAME:
7412 expand_builtin_cilk_pop_frame (exp);
7413 return const0_rtx;
7414
058a1b7a 7415 case BUILT_IN_CHKP_INIT_PTR_BOUNDS:
7416 case BUILT_IN_CHKP_NULL_PTR_BOUNDS:
7417 case BUILT_IN_CHKP_COPY_PTR_BOUNDS:
7418 case BUILT_IN_CHKP_CHECK_PTR_LBOUNDS:
7419 case BUILT_IN_CHKP_CHECK_PTR_UBOUNDS:
7420 case BUILT_IN_CHKP_CHECK_PTR_BOUNDS:
7421 case BUILT_IN_CHKP_SET_PTR_BOUNDS:
7422 case BUILT_IN_CHKP_NARROW_PTR_BOUNDS:
7423 case BUILT_IN_CHKP_STORE_PTR_BOUNDS:
7424 case BUILT_IN_CHKP_GET_PTR_LBOUND:
7425 case BUILT_IN_CHKP_GET_PTR_UBOUND:
7426 /* We allow user CHKP builtins if Pointer Bounds
7427 Checker is off. */
7428 if (!chkp_function_instrumented_p (current_function_decl))
7429 {
7430 if (fcode == BUILT_IN_CHKP_SET_PTR_BOUNDS
7431 || fcode == BUILT_IN_CHKP_NARROW_PTR_BOUNDS
7432 || fcode == BUILT_IN_CHKP_INIT_PTR_BOUNDS
7433 || fcode == BUILT_IN_CHKP_NULL_PTR_BOUNDS
7434 || fcode == BUILT_IN_CHKP_COPY_PTR_BOUNDS)
7435 return expand_normal (CALL_EXPR_ARG (exp, 0));
7436 else if (fcode == BUILT_IN_CHKP_GET_PTR_LBOUND)
7437 return expand_normal (size_zero_node);
7438 else if (fcode == BUILT_IN_CHKP_GET_PTR_UBOUND)
7439 return expand_normal (size_int (-1));
7440 else
7441 return const0_rtx;
7442 }
7443 /* FALLTHROUGH */
7444
7445 case BUILT_IN_CHKP_BNDMK:
7446 case BUILT_IN_CHKP_BNDSTX:
7447 case BUILT_IN_CHKP_BNDCL:
7448 case BUILT_IN_CHKP_BNDCU:
7449 case BUILT_IN_CHKP_BNDLDX:
7450 case BUILT_IN_CHKP_BNDRET:
7451 case BUILT_IN_CHKP_INTERSECT:
7452 case BUILT_IN_CHKP_NARROW:
7453 case BUILT_IN_CHKP_EXTRACT_LOWER:
7454 case BUILT_IN_CHKP_EXTRACT_UPPER:
7455 /* Software implementation of Pointer Bounds Checker is NYI.
7456 Target support is required. */
7457 error ("Your target platform does not support -fcheck-pointer-bounds");
7458 break;
7459
ca4c3545 7460 case BUILT_IN_ACC_ON_DEVICE:
1ae4e7aa 7461 /* Do library call, if we failed to expand the builtin when
7462 folding. */
ca4c3545 7463 break;
7464
92482ee0 7465 default: /* just do library call, if unknown builtin */
146c1b4f 7466 break;
53800dbe 7467 }
7468
7469 /* The switch statement above can drop through to cause the function
7470 to be called normally. */
7471 return expand_call (exp, target, ignore);
7472}
650e4c94 7473
f21337ef 7474/* Similar to expand_builtin but is used for instrumented calls. */
7475
7476rtx
7477expand_builtin_with_bounds (tree exp, rtx target,
7478 rtx subtarget ATTRIBUTE_UNUSED,
7479 machine_mode mode, int ignore)
7480{
7481 tree fndecl = get_callee_fndecl (exp);
7482 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
7483
7484 gcc_assert (CALL_WITH_BOUNDS_P (exp));
7485
7486 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
7487 return targetm.expand_builtin (exp, target, subtarget, mode, ignore);
7488
7489 gcc_assert (fcode > BEGIN_CHKP_BUILTINS
7490 && fcode < END_CHKP_BUILTINS);
7491
7492 switch (fcode)
7493 {
7494 case BUILT_IN_CHKP_MEMCPY_NOBND_NOCHK_CHKP:
7495 target = expand_builtin_memcpy_with_bounds (exp, target);
7496 if (target)
7497 return target;
7498 break;
7499
7500 case BUILT_IN_CHKP_MEMPCPY_NOBND_NOCHK_CHKP:
7501 target = expand_builtin_mempcpy_with_bounds (exp, target, mode);
7502 if (target)
7503 return target;
7504 break;
7505
7506 case BUILT_IN_CHKP_MEMSET_NOBND_NOCHK_CHKP:
7507 target = expand_builtin_memset_with_bounds (exp, target, mode);
7508 if (target)
7509 return target;
7510 break;
7511
7512 default:
7513 break;
7514 }
7515
7516 /* The switch statement above can drop through to cause the function
7517 to be called normally. */
7518 return expand_call (exp, target, ignore);
7519 }
7520
805e22b2 7521/* Determine whether a tree node represents a call to a built-in
52203a9d 7522 function. If the tree T is a call to a built-in function with
7523 the right number of arguments of the appropriate types, return
7524 the DECL_FUNCTION_CODE of the call, e.g. BUILT_IN_SQRT.
7525 Otherwise the return value is END_BUILTINS. */
aecda0d6 7526
805e22b2 7527enum built_in_function
b7bf20db 7528builtin_mathfn_code (const_tree t)
805e22b2 7529{
b7bf20db 7530 const_tree fndecl, arg, parmlist;
7531 const_tree argtype, parmtype;
7532 const_call_expr_arg_iterator iter;
805e22b2 7533
7534 if (TREE_CODE (t) != CALL_EXPR
c2f47e15 7535 || TREE_CODE (CALL_EXPR_FN (t)) != ADDR_EXPR)
805e22b2 7536 return END_BUILTINS;
7537
c6e6ecb1 7538 fndecl = get_callee_fndecl (t);
7539 if (fndecl == NULL_TREE
52203a9d 7540 || TREE_CODE (fndecl) != FUNCTION_DECL
805e22b2 7541 || ! DECL_BUILT_IN (fndecl)
7542 || DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
7543 return END_BUILTINS;
7544
52203a9d 7545 parmlist = TYPE_ARG_TYPES (TREE_TYPE (fndecl));
b7bf20db 7546 init_const_call_expr_arg_iterator (t, &iter);
52203a9d 7547 for (; parmlist; parmlist = TREE_CHAIN (parmlist))
e9f80ff5 7548 {
52203a9d 7549 /* If a function doesn't take a variable number of arguments,
7550 the last element in the list will have type `void'. */
7551 parmtype = TREE_VALUE (parmlist);
7552 if (VOID_TYPE_P (parmtype))
7553 {
b7bf20db 7554 if (more_const_call_expr_args_p (&iter))
52203a9d 7555 return END_BUILTINS;
7556 return DECL_FUNCTION_CODE (fndecl);
7557 }
7558
b7bf20db 7559 if (! more_const_call_expr_args_p (&iter))
e9f80ff5 7560 return END_BUILTINS;
48e1416a 7561
b7bf20db 7562 arg = next_const_call_expr_arg (&iter);
c2f47e15 7563 argtype = TREE_TYPE (arg);
52203a9d 7564
7565 if (SCALAR_FLOAT_TYPE_P (parmtype))
7566 {
7567 if (! SCALAR_FLOAT_TYPE_P (argtype))
7568 return END_BUILTINS;
7569 }
7570 else if (COMPLEX_FLOAT_TYPE_P (parmtype))
7571 {
7572 if (! COMPLEX_FLOAT_TYPE_P (argtype))
7573 return END_BUILTINS;
7574 }
7575 else if (POINTER_TYPE_P (parmtype))
7576 {
7577 if (! POINTER_TYPE_P (argtype))
7578 return END_BUILTINS;
7579 }
7580 else if (INTEGRAL_TYPE_P (parmtype))
7581 {
7582 if (! INTEGRAL_TYPE_P (argtype))
7583 return END_BUILTINS;
7584 }
7585 else
e9f80ff5 7586 return END_BUILTINS;
e9f80ff5 7587 }
7588
52203a9d 7589 /* Variable-length argument list. */
805e22b2 7590 return DECL_FUNCTION_CODE (fndecl);
7591}
7592
c2f47e15 7593/* Fold a call to __builtin_constant_p, if we know its argument ARG will
7594 evaluate to a constant. */
650e4c94 7595
7596static tree
c2f47e15 7597fold_builtin_constant_p (tree arg)
650e4c94 7598{
650e4c94 7599 /* We return 1 for a numeric type that's known to be a constant
7600 value at compile-time or for an aggregate type that's a
7601 literal constant. */
c2f47e15 7602 STRIP_NOPS (arg);
650e4c94 7603
7604 /* If we know this is a constant, emit the constant of one. */
c2f47e15 7605 if (CONSTANT_CLASS_P (arg)
7606 || (TREE_CODE (arg) == CONSTRUCTOR
7607 && TREE_CONSTANT (arg)))
650e4c94 7608 return integer_one_node;
c2f47e15 7609 if (TREE_CODE (arg) == ADDR_EXPR)
adcfa3a3 7610 {
c2f47e15 7611 tree op = TREE_OPERAND (arg, 0);
adcfa3a3 7612 if (TREE_CODE (op) == STRING_CST
7613 || (TREE_CODE (op) == ARRAY_REF
7614 && integer_zerop (TREE_OPERAND (op, 1))
7615 && TREE_CODE (TREE_OPERAND (op, 0)) == STRING_CST))
7616 return integer_one_node;
7617 }
650e4c94 7618
1fb4300c 7619 /* If this expression has side effects, show we don't know it to be a
7620 constant. Likewise if it's a pointer or aggregate type since in
7621 those case we only want literals, since those are only optimized
f97c71a1 7622 when generating RTL, not later.
7623 And finally, if we are compiling an initializer, not code, we
7624 need to return a definite result now; there's not going to be any
7625 more optimization done. */
c2f47e15 7626 if (TREE_SIDE_EFFECTS (arg)
7627 || AGGREGATE_TYPE_P (TREE_TYPE (arg))
7628 || POINTER_TYPE_P (TREE_TYPE (arg))
47be647d 7629 || cfun == 0
0b049e15 7630 || folding_initializer
7631 || force_folding_builtin_constant_p)
650e4c94 7632 return integer_zero_node;
7633
c2f47e15 7634 return NULL_TREE;
650e4c94 7635}
7636
76f5a783 7637/* Create builtin_expect with PRED and EXPECTED as its arguments and
7638 return it as a truthvalue. */
4ee9c684 7639
7640static tree
c83059be 7641build_builtin_expect_predicate (location_t loc, tree pred, tree expected,
7642 tree predictor)
4ee9c684 7643{
76f5a783 7644 tree fn, arg_types, pred_type, expected_type, call_expr, ret_type;
4ee9c684 7645
b9a16870 7646 fn = builtin_decl_explicit (BUILT_IN_EXPECT);
76f5a783 7647 arg_types = TYPE_ARG_TYPES (TREE_TYPE (fn));
7648 ret_type = TREE_TYPE (TREE_TYPE (fn));
7649 pred_type = TREE_VALUE (arg_types);
7650 expected_type = TREE_VALUE (TREE_CHAIN (arg_types));
7651
389dd41b 7652 pred = fold_convert_loc (loc, pred_type, pred);
7653 expected = fold_convert_loc (loc, expected_type, expected);
c83059be 7654 call_expr = build_call_expr_loc (loc, fn, predictor ? 3 : 2, pred, expected,
7655 predictor);
76f5a783 7656
7657 return build2 (NE_EXPR, TREE_TYPE (pred), call_expr,
7658 build_int_cst (ret_type, 0));
7659}
7660
7661/* Fold a call to builtin_expect with arguments ARG0 and ARG1. Return
7662 NULL_TREE if no simplification is possible. */
7663
c83059be 7664tree
7665fold_builtin_expect (location_t loc, tree arg0, tree arg1, tree arg2)
76f5a783 7666{
083bada9 7667 tree inner, fndecl, inner_arg0;
76f5a783 7668 enum tree_code code;
7669
083bada9 7670 /* Distribute the expected value over short-circuiting operators.
7671 See through the cast from truthvalue_type_node to long. */
7672 inner_arg0 = arg0;
d09ef31a 7673 while (CONVERT_EXPR_P (inner_arg0)
083bada9 7674 && INTEGRAL_TYPE_P (TREE_TYPE (inner_arg0))
7675 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (inner_arg0, 0))))
7676 inner_arg0 = TREE_OPERAND (inner_arg0, 0);
7677
76f5a783 7678 /* If this is a builtin_expect within a builtin_expect keep the
7679 inner one. See through a comparison against a constant. It
7680 might have been added to create a thruthvalue. */
083bada9 7681 inner = inner_arg0;
7682
76f5a783 7683 if (COMPARISON_CLASS_P (inner)
7684 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST)
7685 inner = TREE_OPERAND (inner, 0);
7686
7687 if (TREE_CODE (inner) == CALL_EXPR
7688 && (fndecl = get_callee_fndecl (inner))
7689 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
7690 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_EXPECT)
7691 return arg0;
7692
083bada9 7693 inner = inner_arg0;
76f5a783 7694 code = TREE_CODE (inner);
7695 if (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR)
7696 {
7697 tree op0 = TREE_OPERAND (inner, 0);
7698 tree op1 = TREE_OPERAND (inner, 1);
7699
c83059be 7700 op0 = build_builtin_expect_predicate (loc, op0, arg1, arg2);
7701 op1 = build_builtin_expect_predicate (loc, op1, arg1, arg2);
76f5a783 7702 inner = build2 (code, TREE_TYPE (inner), op0, op1);
7703
389dd41b 7704 return fold_convert_loc (loc, TREE_TYPE (arg0), inner);
76f5a783 7705 }
7706
7707 /* If the argument isn't invariant then there's nothing else we can do. */
083bada9 7708 if (!TREE_CONSTANT (inner_arg0))
c2f47e15 7709 return NULL_TREE;
4ee9c684 7710
76f5a783 7711 /* If we expect that a comparison against the argument will fold to
7712 a constant return the constant. In practice, this means a true
7713 constant or the address of a non-weak symbol. */
083bada9 7714 inner = inner_arg0;
4ee9c684 7715 STRIP_NOPS (inner);
7716 if (TREE_CODE (inner) == ADDR_EXPR)
7717 {
7718 do
7719 {
7720 inner = TREE_OPERAND (inner, 0);
7721 }
7722 while (TREE_CODE (inner) == COMPONENT_REF
7723 || TREE_CODE (inner) == ARRAY_REF);
53e9c5c4 7724 if (VAR_OR_FUNCTION_DECL_P (inner) && DECL_WEAK (inner))
c2f47e15 7725 return NULL_TREE;
4ee9c684 7726 }
7727
76f5a783 7728 /* Otherwise, ARG0 already has the proper type for the return value. */
7729 return arg0;
4ee9c684 7730}
7731
c2f47e15 7732/* Fold a call to __builtin_classify_type with argument ARG. */
27d0c333 7733
539a3a92 7734static tree
c2f47e15 7735fold_builtin_classify_type (tree arg)
539a3a92 7736{
c2f47e15 7737 if (arg == 0)
7002a1c8 7738 return build_int_cst (integer_type_node, no_type_class);
539a3a92 7739
7002a1c8 7740 return build_int_cst (integer_type_node, type_to_class (TREE_TYPE (arg)));
539a3a92 7741}
7742
c2f47e15 7743/* Fold a call to __builtin_strlen with argument ARG. */
e6e27594 7744
7745static tree
c7cbde74 7746fold_builtin_strlen (location_t loc, tree type, tree arg)
e6e27594 7747{
c2f47e15 7748 if (!validate_arg (arg, POINTER_TYPE))
e6e27594 7749 return NULL_TREE;
7750 else
7751 {
c2f47e15 7752 tree len = c_strlen (arg, 0);
e6e27594 7753
7754 if (len)
c7cbde74 7755 return fold_convert_loc (loc, type, len);
e6e27594 7756
7757 return NULL_TREE;
7758 }
7759}
7760
92c43e3c 7761/* Fold a call to __builtin_inf or __builtin_huge_val. */
7762
7763static tree
389dd41b 7764fold_builtin_inf (location_t loc, tree type, int warn)
92c43e3c 7765{
aa870c1b 7766 REAL_VALUE_TYPE real;
7767
40f4dbd5 7768 /* __builtin_inff is intended to be usable to define INFINITY on all
7769 targets. If an infinity is not available, INFINITY expands "to a
7770 positive constant of type float that overflows at translation
7771 time", footnote "In this case, using INFINITY will violate the
7772 constraint in 6.4.4 and thus require a diagnostic." (C99 7.12#4).
7773 Thus we pedwarn to ensure this constraint violation is
7774 diagnosed. */
92c43e3c 7775 if (!MODE_HAS_INFINITIES (TYPE_MODE (type)) && warn)
389dd41b 7776 pedwarn (loc, 0, "target format does not support infinity");
92c43e3c 7777
aa870c1b 7778 real_inf (&real);
7779 return build_real (type, real);
92c43e3c 7780}
7781
d735c391 7782/* Fold function call to builtin sincos, sincosf, or sincosl. Return
7783 NULL_TREE if no simplification can be made. */
7784
7785static tree
389dd41b 7786fold_builtin_sincos (location_t loc,
7787 tree arg0, tree arg1, tree arg2)
d735c391 7788{
c2f47e15 7789 tree type;
6c21be92 7790 tree fndecl, call = NULL_TREE;
d735c391 7791
c2f47e15 7792 if (!validate_arg (arg0, REAL_TYPE)
7793 || !validate_arg (arg1, POINTER_TYPE)
7794 || !validate_arg (arg2, POINTER_TYPE))
d735c391 7795 return NULL_TREE;
7796
d735c391 7797 type = TREE_TYPE (arg0);
d735c391 7798
7799 /* Calculate the result when the argument is a constant. */
e3240774 7800 built_in_function fn = mathfn_built_in_2 (type, CFN_BUILT_IN_CEXPI);
6c21be92 7801 if (fn == END_BUILTINS)
d735c391 7802 return NULL_TREE;
7803
6c21be92 7804 /* Canonicalize sincos to cexpi. */
7805 if (TREE_CODE (arg0) == REAL_CST)
7806 {
7807 tree complex_type = build_complex_type (type);
744fe358 7808 call = fold_const_call (as_combined_fn (fn), complex_type, arg0);
6c21be92 7809 }
7810 if (!call)
7811 {
7812 if (!targetm.libc_has_function (function_c99_math_complex)
7813 || !builtin_decl_implicit_p (fn))
7814 return NULL_TREE;
7815 fndecl = builtin_decl_explicit (fn);
7816 call = build_call_expr_loc (loc, fndecl, 1, arg0);
7817 call = builtin_save_expr (call);
7818 }
d735c391 7819
a75b1c71 7820 return build2 (COMPOUND_EXPR, void_type_node,
d735c391 7821 build2 (MODIFY_EXPR, void_type_node,
389dd41b 7822 build_fold_indirect_ref_loc (loc, arg1),
6c21be92 7823 fold_build1_loc (loc, IMAGPART_EXPR, type, call)),
d735c391 7824 build2 (MODIFY_EXPR, void_type_node,
389dd41b 7825 build_fold_indirect_ref_loc (loc, arg2),
6c21be92 7826 fold_build1_loc (loc, REALPART_EXPR, type, call)));
d735c391 7827}
7828
c2f47e15 7829/* Fold function call to builtin memcmp with arguments ARG1 and ARG2.
7830 Return NULL_TREE if no simplification can be made. */
9c8a1629 7831
7832static tree
389dd41b 7833fold_builtin_memcmp (location_t loc, tree arg1, tree arg2, tree len)
9c8a1629 7834{
c2f47e15 7835 if (!validate_arg (arg1, POINTER_TYPE)
7836 || !validate_arg (arg2, POINTER_TYPE)
7837 || !validate_arg (len, INTEGER_TYPE))
7838 return NULL_TREE;
9c8a1629 7839
7840 /* If the LEN parameter is zero, return zero. */
7841 if (integer_zerop (len))
389dd41b 7842 return omit_two_operands_loc (loc, integer_type_node, integer_zero_node,
c4fef134 7843 arg1, arg2);
9c8a1629 7844
7845 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
7846 if (operand_equal_p (arg1, arg2, 0))
389dd41b 7847 return omit_one_operand_loc (loc, integer_type_node, integer_zero_node, len);
c4fef134 7848
c4fef134 7849 /* If len parameter is one, return an expression corresponding to
7850 (*(const unsigned char*)arg1 - (const unsigned char*)arg2). */
e913b5cd 7851 if (tree_fits_uhwi_p (len) && tree_to_uhwi (len) == 1)
c4fef134 7852 {
7853 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
4f1b71c0 7854 tree cst_uchar_ptr_node
7855 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
7856
389dd41b 7857 tree ind1
7858 = fold_convert_loc (loc, integer_type_node,
7859 build1 (INDIRECT_REF, cst_uchar_node,
7860 fold_convert_loc (loc,
7861 cst_uchar_ptr_node,
c4fef134 7862 arg1)));
389dd41b 7863 tree ind2
7864 = fold_convert_loc (loc, integer_type_node,
7865 build1 (INDIRECT_REF, cst_uchar_node,
7866 fold_convert_loc (loc,
7867 cst_uchar_ptr_node,
c4fef134 7868 arg2)));
389dd41b 7869 return fold_build2_loc (loc, MINUS_EXPR, integer_type_node, ind1, ind2);
c4fef134 7870 }
9c8a1629 7871
c2f47e15 7872 return NULL_TREE;
9c8a1629 7873}
7874
c2f47e15 7875/* Fold a call to builtin isascii with argument ARG. */
d49367d4 7876
7877static tree
389dd41b 7878fold_builtin_isascii (location_t loc, tree arg)
d49367d4 7879{
c2f47e15 7880 if (!validate_arg (arg, INTEGER_TYPE))
7881 return NULL_TREE;
d49367d4 7882 else
7883 {
7884 /* Transform isascii(c) -> ((c & ~0x7f) == 0). */
c90b5d40 7885 arg = fold_build2 (BIT_AND_EXPR, integer_type_node, arg,
7002a1c8 7886 build_int_cst (integer_type_node,
c90b5d40 7887 ~ (unsigned HOST_WIDE_INT) 0x7f));
389dd41b 7888 return fold_build2_loc (loc, EQ_EXPR, integer_type_node,
7002a1c8 7889 arg, integer_zero_node);
d49367d4 7890 }
7891}
7892
c2f47e15 7893/* Fold a call to builtin toascii with argument ARG. */
d49367d4 7894
7895static tree
389dd41b 7896fold_builtin_toascii (location_t loc, tree arg)
d49367d4 7897{
c2f47e15 7898 if (!validate_arg (arg, INTEGER_TYPE))
7899 return NULL_TREE;
48e1416a 7900
c2f47e15 7901 /* Transform toascii(c) -> (c & 0x7f). */
389dd41b 7902 return fold_build2_loc (loc, BIT_AND_EXPR, integer_type_node, arg,
7002a1c8 7903 build_int_cst (integer_type_node, 0x7f));
d49367d4 7904}
7905
c2f47e15 7906/* Fold a call to builtin isdigit with argument ARG. */
df1cf42e 7907
7908static tree
389dd41b 7909fold_builtin_isdigit (location_t loc, tree arg)
df1cf42e 7910{
c2f47e15 7911 if (!validate_arg (arg, INTEGER_TYPE))
7912 return NULL_TREE;
df1cf42e 7913 else
7914 {
7915 /* Transform isdigit(c) -> (unsigned)(c) - '0' <= 9. */
624d37a6 7916 /* According to the C standard, isdigit is unaffected by locale.
7917 However, it definitely is affected by the target character set. */
624d37a6 7918 unsigned HOST_WIDE_INT target_digit0
7919 = lang_hooks.to_target_charset ('0');
7920
7921 if (target_digit0 == 0)
7922 return NULL_TREE;
7923
389dd41b 7924 arg = fold_convert_loc (loc, unsigned_type_node, arg);
c90b5d40 7925 arg = fold_build2 (MINUS_EXPR, unsigned_type_node, arg,
7926 build_int_cst (unsigned_type_node, target_digit0));
389dd41b 7927 return fold_build2_loc (loc, LE_EXPR, integer_type_node, arg,
f2532264 7928 build_int_cst (unsigned_type_node, 9));
df1cf42e 7929 }
7930}
27f261ef 7931
c2f47e15 7932/* Fold a call to fabs, fabsf or fabsl with argument ARG. */
d1aade50 7933
7934static tree
389dd41b 7935fold_builtin_fabs (location_t loc, tree arg, tree type)
d1aade50 7936{
c2f47e15 7937 if (!validate_arg (arg, REAL_TYPE))
7938 return NULL_TREE;
d1aade50 7939
389dd41b 7940 arg = fold_convert_loc (loc, type, arg);
389dd41b 7941 return fold_build1_loc (loc, ABS_EXPR, type, arg);
d1aade50 7942}
7943
c2f47e15 7944/* Fold a call to abs, labs, llabs or imaxabs with argument ARG. */
d1aade50 7945
7946static tree
389dd41b 7947fold_builtin_abs (location_t loc, tree arg, tree type)
d1aade50 7948{
c2f47e15 7949 if (!validate_arg (arg, INTEGER_TYPE))
7950 return NULL_TREE;
d1aade50 7951
389dd41b 7952 arg = fold_convert_loc (loc, type, arg);
389dd41b 7953 return fold_build1_loc (loc, ABS_EXPR, type, arg);
d1aade50 7954}
7955
b9be572e 7956/* Fold a call to fma, fmaf, or fmal with arguments ARG[012]. */
7957
7958static tree
7959fold_builtin_fma (location_t loc, tree arg0, tree arg1, tree arg2, tree type)
7960{
866b3d58 7961 /* ??? Only expand to FMA_EXPR if it's directly supported. */
b9be572e 7962 if (validate_arg (arg0, REAL_TYPE)
9af5ce0c 7963 && validate_arg (arg1, REAL_TYPE)
866b3d58 7964 && validate_arg (arg2, REAL_TYPE)
7965 && optab_handler (fma_optab, TYPE_MODE (type)) != CODE_FOR_nothing)
7966 return fold_build3_loc (loc, FMA_EXPR, type, arg0, arg1, arg2);
b9be572e 7967
b9be572e 7968 return NULL_TREE;
7969}
7970
abe4dcf6 7971/* Fold a call to builtin carg(a+bi) -> atan2(b,a). */
7972
7973static tree
389dd41b 7974fold_builtin_carg (location_t loc, tree arg, tree type)
abe4dcf6 7975{
239d491a 7976 if (validate_arg (arg, COMPLEX_TYPE)
7977 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) == REAL_TYPE)
abe4dcf6 7978 {
7979 tree atan2_fn = mathfn_built_in (type, BUILT_IN_ATAN2);
48e1416a 7980
abe4dcf6 7981 if (atan2_fn)
7982 {
c2f47e15 7983 tree new_arg = builtin_save_expr (arg);
389dd41b 7984 tree r_arg = fold_build1_loc (loc, REALPART_EXPR, type, new_arg);
7985 tree i_arg = fold_build1_loc (loc, IMAGPART_EXPR, type, new_arg);
7986 return build_call_expr_loc (loc, atan2_fn, 2, i_arg, r_arg);
abe4dcf6 7987 }
7988 }
48e1416a 7989
abe4dcf6 7990 return NULL_TREE;
7991}
7992
3838b9ae 7993/* Fold a call to builtin frexp, we can assume the base is 2. */
7994
7995static tree
389dd41b 7996fold_builtin_frexp (location_t loc, tree arg0, tree arg1, tree rettype)
3838b9ae 7997{
7998 if (! validate_arg (arg0, REAL_TYPE) || ! validate_arg (arg1, POINTER_TYPE))
7999 return NULL_TREE;
48e1416a 8000
3838b9ae 8001 STRIP_NOPS (arg0);
48e1416a 8002
3838b9ae 8003 if (!(TREE_CODE (arg0) == REAL_CST && ! TREE_OVERFLOW (arg0)))
8004 return NULL_TREE;
48e1416a 8005
389dd41b 8006 arg1 = build_fold_indirect_ref_loc (loc, arg1);
3838b9ae 8007
8008 /* Proceed if a valid pointer type was passed in. */
8009 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg1)) == integer_type_node)
8010 {
8011 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg0);
8012 tree frac, exp;
48e1416a 8013
3838b9ae 8014 switch (value->cl)
8015 {
8016 case rvc_zero:
8017 /* For +-0, return (*exp = 0, +-0). */
8018 exp = integer_zero_node;
8019 frac = arg0;
8020 break;
8021 case rvc_nan:
8022 case rvc_inf:
8023 /* For +-NaN or +-Inf, *exp is unspecified, return arg0. */
389dd41b 8024 return omit_one_operand_loc (loc, rettype, arg0, arg1);
3838b9ae 8025 case rvc_normal:
8026 {
8027 /* Since the frexp function always expects base 2, and in
8028 GCC normalized significands are already in the range
8029 [0.5, 1.0), we have exactly what frexp wants. */
8030 REAL_VALUE_TYPE frac_rvt = *value;
8031 SET_REAL_EXP (&frac_rvt, 0);
8032 frac = build_real (rettype, frac_rvt);
7002a1c8 8033 exp = build_int_cst (integer_type_node, REAL_EXP (value));
3838b9ae 8034 }
8035 break;
8036 default:
8037 gcc_unreachable ();
8038 }
48e1416a 8039
3838b9ae 8040 /* Create the COMPOUND_EXPR (*arg1 = trunc, frac). */
389dd41b 8041 arg1 = fold_build2_loc (loc, MODIFY_EXPR, rettype, arg1, exp);
3838b9ae 8042 TREE_SIDE_EFFECTS (arg1) = 1;
389dd41b 8043 return fold_build2_loc (loc, COMPOUND_EXPR, rettype, arg1, frac);
3838b9ae 8044 }
8045
8046 return NULL_TREE;
8047}
8048
ebf8b4f5 8049/* Fold a call to builtin modf. */
8050
8051static tree
389dd41b 8052fold_builtin_modf (location_t loc, tree arg0, tree arg1, tree rettype)
ebf8b4f5 8053{
8054 if (! validate_arg (arg0, REAL_TYPE) || ! validate_arg (arg1, POINTER_TYPE))
8055 return NULL_TREE;
48e1416a 8056
ebf8b4f5 8057 STRIP_NOPS (arg0);
48e1416a 8058
ebf8b4f5 8059 if (!(TREE_CODE (arg0) == REAL_CST && ! TREE_OVERFLOW (arg0)))
8060 return NULL_TREE;
48e1416a 8061
389dd41b 8062 arg1 = build_fold_indirect_ref_loc (loc, arg1);
ebf8b4f5 8063
8064 /* Proceed if a valid pointer type was passed in. */
8065 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg1)) == TYPE_MAIN_VARIANT (rettype))
8066 {
8067 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg0);
8068 REAL_VALUE_TYPE trunc, frac;
8069
8070 switch (value->cl)
8071 {
8072 case rvc_nan:
8073 case rvc_zero:
8074 /* For +-NaN or +-0, return (*arg1 = arg0, arg0). */
8075 trunc = frac = *value;
8076 break;
8077 case rvc_inf:
8078 /* For +-Inf, return (*arg1 = arg0, +-0). */
8079 frac = dconst0;
8080 frac.sign = value->sign;
8081 trunc = *value;
8082 break;
8083 case rvc_normal:
8084 /* Return (*arg1 = trunc(arg0), arg0-trunc(arg0)). */
8085 real_trunc (&trunc, VOIDmode, value);
8086 real_arithmetic (&frac, MINUS_EXPR, value, &trunc);
8087 /* If the original number was negative and already
8088 integral, then the fractional part is -0.0. */
8089 if (value->sign && frac.cl == rvc_zero)
8090 frac.sign = value->sign;
8091 break;
8092 }
48e1416a 8093
ebf8b4f5 8094 /* Create the COMPOUND_EXPR (*arg1 = trunc, frac). */
389dd41b 8095 arg1 = fold_build2_loc (loc, MODIFY_EXPR, rettype, arg1,
ebf8b4f5 8096 build_real (rettype, trunc));
8097 TREE_SIDE_EFFECTS (arg1) = 1;
389dd41b 8098 return fold_build2_loc (loc, COMPOUND_EXPR, rettype, arg1,
ebf8b4f5 8099 build_real (rettype, frac));
8100 }
48e1416a 8101
ebf8b4f5 8102 return NULL_TREE;
8103}
8104
a65c4d64 8105/* Given a location LOC, an interclass builtin function decl FNDECL
8106 and its single argument ARG, return an folded expression computing
8107 the same, or NULL_TREE if we either couldn't or didn't want to fold
8108 (the latter happen if there's an RTL instruction available). */
8109
8110static tree
8111fold_builtin_interclass_mathfn (location_t loc, tree fndecl, tree arg)
8112{
3754d046 8113 machine_mode mode;
a65c4d64 8114
8115 if (!validate_arg (arg, REAL_TYPE))
8116 return NULL_TREE;
8117
8118 if (interclass_mathfn_icode (arg, fndecl) != CODE_FOR_nothing)
8119 return NULL_TREE;
8120
8121 mode = TYPE_MODE (TREE_TYPE (arg));
8122
7f38718f 8123 bool is_ibm_extended = MODE_COMPOSITE_P (mode);
8124
a65c4d64 8125 /* If there is no optab, try generic code. */
8126 switch (DECL_FUNCTION_CODE (fndecl))
8127 {
8128 tree result;
8129
8130 CASE_FLT_FN (BUILT_IN_ISINF):
8131 {
8132 /* isinf(x) -> isgreater(fabs(x),DBL_MAX). */
b9a16870 8133 tree const isgr_fn = builtin_decl_explicit (BUILT_IN_ISGREATER);
7f38718f 8134 tree type = TREE_TYPE (arg);
a65c4d64 8135 REAL_VALUE_TYPE r;
8136 char buf[128];
8137
7f38718f 8138 if (is_ibm_extended)
8139 {
8140 /* NaN and Inf are encoded in the high-order double value
8141 only. The low-order value is not significant. */
8142 type = double_type_node;
8143 mode = DFmode;
8144 arg = fold_build1_loc (loc, NOP_EXPR, type, arg);
8145 }
a65c4d64 8146 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf));
8147 real_from_string (&r, buf);
8148 result = build_call_expr (isgr_fn, 2,
8149 fold_build1_loc (loc, ABS_EXPR, type, arg),
8150 build_real (type, r));
8151 return result;
8152 }
8153 CASE_FLT_FN (BUILT_IN_FINITE):
8154 case BUILT_IN_ISFINITE:
8155 {
8156 /* isfinite(x) -> islessequal(fabs(x),DBL_MAX). */
b9a16870 8157 tree const isle_fn = builtin_decl_explicit (BUILT_IN_ISLESSEQUAL);
7f38718f 8158 tree type = TREE_TYPE (arg);
a65c4d64 8159 REAL_VALUE_TYPE r;
8160 char buf[128];
8161
7f38718f 8162 if (is_ibm_extended)
8163 {
8164 /* NaN and Inf are encoded in the high-order double value
8165 only. The low-order value is not significant. */
8166 type = double_type_node;
8167 mode = DFmode;
8168 arg = fold_build1_loc (loc, NOP_EXPR, type, arg);
8169 }
a65c4d64 8170 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf));
8171 real_from_string (&r, buf);
8172 result = build_call_expr (isle_fn, 2,
8173 fold_build1_loc (loc, ABS_EXPR, type, arg),
8174 build_real (type, r));
8175 /*result = fold_build2_loc (loc, UNGT_EXPR,
8176 TREE_TYPE (TREE_TYPE (fndecl)),
8177 fold_build1_loc (loc, ABS_EXPR, type, arg),
8178 build_real (type, r));
8179 result = fold_build1_loc (loc, TRUTH_NOT_EXPR,
8180 TREE_TYPE (TREE_TYPE (fndecl)),
8181 result);*/
8182 return result;
8183 }
8184 case BUILT_IN_ISNORMAL:
8185 {
8186 /* isnormal(x) -> isgreaterequal(fabs(x),DBL_MIN) &
8187 islessequal(fabs(x),DBL_MAX). */
b9a16870 8188 tree const isle_fn = builtin_decl_explicit (BUILT_IN_ISLESSEQUAL);
7f38718f 8189 tree type = TREE_TYPE (arg);
8190 tree orig_arg, max_exp, min_exp;
8191 machine_mode orig_mode = mode;
a65c4d64 8192 REAL_VALUE_TYPE rmax, rmin;
8193 char buf[128];
8194
7f38718f 8195 orig_arg = arg = builtin_save_expr (arg);
8196 if (is_ibm_extended)
8197 {
8198 /* Use double to test the normal range of IBM extended
8199 precision. Emin for IBM extended precision is
8200 different to emin for IEEE double, being 53 higher
8201 since the low double exponent is at least 53 lower
8202 than the high double exponent. */
8203 type = double_type_node;
8204 mode = DFmode;
8205 arg = fold_build1_loc (loc, NOP_EXPR, type, arg);
8206 }
8207 arg = fold_build1_loc (loc, ABS_EXPR, type, arg);
8208
a65c4d64 8209 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf));
8210 real_from_string (&rmax, buf);
7f38718f 8211 sprintf (buf, "0x1p%d", REAL_MODE_FORMAT (orig_mode)->emin - 1);
a65c4d64 8212 real_from_string (&rmin, buf);
7f38718f 8213 max_exp = build_real (type, rmax);
8214 min_exp = build_real (type, rmin);
8215
8216 max_exp = build_call_expr (isle_fn, 2, arg, max_exp);
8217 if (is_ibm_extended)
8218 {
8219 /* Testing the high end of the range is done just using
8220 the high double, using the same test as isfinite().
8221 For the subnormal end of the range we first test the
8222 high double, then if its magnitude is equal to the
8223 limit of 0x1p-969, we test whether the low double is
8224 non-zero and opposite sign to the high double. */
8225 tree const islt_fn = builtin_decl_explicit (BUILT_IN_ISLESS);
8226 tree const isgt_fn = builtin_decl_explicit (BUILT_IN_ISGREATER);
8227 tree gt_min = build_call_expr (isgt_fn, 2, arg, min_exp);
8228 tree eq_min = fold_build2 (EQ_EXPR, integer_type_node,
8229 arg, min_exp);
8230 tree as_complex = build1 (VIEW_CONVERT_EXPR,
8231 complex_double_type_node, orig_arg);
8232 tree hi_dbl = build1 (REALPART_EXPR, type, as_complex);
8233 tree lo_dbl = build1 (IMAGPART_EXPR, type, as_complex);
8234 tree zero = build_real (type, dconst0);
8235 tree hilt = build_call_expr (islt_fn, 2, hi_dbl, zero);
8236 tree lolt = build_call_expr (islt_fn, 2, lo_dbl, zero);
8237 tree logt = build_call_expr (isgt_fn, 2, lo_dbl, zero);
8238 tree ok_lo = fold_build1 (TRUTH_NOT_EXPR, integer_type_node,
8239 fold_build3 (COND_EXPR,
8240 integer_type_node,
8241 hilt, logt, lolt));
8242 eq_min = fold_build2 (TRUTH_ANDIF_EXPR, integer_type_node,
8243 eq_min, ok_lo);
8244 min_exp = fold_build2 (TRUTH_ORIF_EXPR, integer_type_node,
8245 gt_min, eq_min);
8246 }
8247 else
8248 {
8249 tree const isge_fn
8250 = builtin_decl_explicit (BUILT_IN_ISGREATEREQUAL);
8251 min_exp = build_call_expr (isge_fn, 2, arg, min_exp);
8252 }
8253 result = fold_build2 (BIT_AND_EXPR, integer_type_node,
8254 max_exp, min_exp);
a65c4d64 8255 return result;
8256 }
8257 default:
8258 break;
8259 }
8260
8261 return NULL_TREE;
8262}
8263
726069ba 8264/* Fold a call to __builtin_isnan(), __builtin_isinf, __builtin_finite.
c2f47e15 8265 ARG is the argument for the call. */
726069ba 8266
8267static tree
389dd41b 8268fold_builtin_classify (location_t loc, tree fndecl, tree arg, int builtin_index)
726069ba 8269{
726069ba 8270 tree type = TREE_TYPE (TREE_TYPE (fndecl));
726069ba 8271
c2f47e15 8272 if (!validate_arg (arg, REAL_TYPE))
d43cee80 8273 return NULL_TREE;
726069ba 8274
726069ba 8275 switch (builtin_index)
8276 {
8277 case BUILT_IN_ISINF:
fe994837 8278 if (!HONOR_INFINITIES (arg))
389dd41b 8279 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
726069ba 8280
726069ba 8281 return NULL_TREE;
8282
c319d56a 8283 case BUILT_IN_ISINF_SIGN:
8284 {
8285 /* isinf_sign(x) -> isinf(x) ? (signbit(x) ? -1 : 1) : 0 */
8286 /* In a boolean context, GCC will fold the inner COND_EXPR to
8287 1. So e.g. "if (isinf_sign(x))" would be folded to just
8288 "if (isinf(x) ? 1 : 0)" which becomes "if (isinf(x))". */
6cfc7001 8289 tree signbit_fn = builtin_decl_explicit (BUILT_IN_SIGNBIT);
b9a16870 8290 tree isinf_fn = builtin_decl_explicit (BUILT_IN_ISINF);
c319d56a 8291 tree tmp = NULL_TREE;
8292
8293 arg = builtin_save_expr (arg);
8294
8295 if (signbit_fn && isinf_fn)
8296 {
389dd41b 8297 tree signbit_call = build_call_expr_loc (loc, signbit_fn, 1, arg);
8298 tree isinf_call = build_call_expr_loc (loc, isinf_fn, 1, arg);
c319d56a 8299
389dd41b 8300 signbit_call = fold_build2_loc (loc, NE_EXPR, integer_type_node,
c319d56a 8301 signbit_call, integer_zero_node);
389dd41b 8302 isinf_call = fold_build2_loc (loc, NE_EXPR, integer_type_node,
c319d56a 8303 isinf_call, integer_zero_node);
48e1416a 8304
389dd41b 8305 tmp = fold_build3_loc (loc, COND_EXPR, integer_type_node, signbit_call,
c319d56a 8306 integer_minus_one_node, integer_one_node);
389dd41b 8307 tmp = fold_build3_loc (loc, COND_EXPR, integer_type_node,
8308 isinf_call, tmp,
c319d56a 8309 integer_zero_node);
8310 }
8311
8312 return tmp;
8313 }
8314
cde061c1 8315 case BUILT_IN_ISFINITE:
93633022 8316 if (!HONOR_NANS (arg)
fe994837 8317 && !HONOR_INFINITIES (arg))
389dd41b 8318 return omit_one_operand_loc (loc, type, integer_one_node, arg);
726069ba 8319
726069ba 8320 return NULL_TREE;
8321
8322 case BUILT_IN_ISNAN:
93633022 8323 if (!HONOR_NANS (arg))
389dd41b 8324 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
726069ba 8325
7f38718f 8326 {
8327 bool is_ibm_extended = MODE_COMPOSITE_P (TYPE_MODE (TREE_TYPE (arg)));
8328 if (is_ibm_extended)
8329 {
8330 /* NaN and Inf are encoded in the high-order double value
8331 only. The low-order value is not significant. */
8332 arg = fold_build1_loc (loc, NOP_EXPR, double_type_node, arg);
8333 }
8334 }
726069ba 8335 arg = builtin_save_expr (arg);
389dd41b 8336 return fold_build2_loc (loc, UNORDERED_EXPR, type, arg, arg);
726069ba 8337
8338 default:
64db345d 8339 gcc_unreachable ();
726069ba 8340 }
8341}
8342
19fbe3a4 8343/* Fold a call to __builtin_fpclassify(int, int, int, int, int, ...).
8344 This builtin will generate code to return the appropriate floating
8345 point classification depending on the value of the floating point
8346 number passed in. The possible return values must be supplied as
921b27c0 8347 int arguments to the call in the following order: FP_NAN, FP_INFINITE,
19fbe3a4 8348 FP_NORMAL, FP_SUBNORMAL and FP_ZERO. The ellipses is for exactly
8349 one floating point argument which is "type generic". */
8350
8351static tree
9d884767 8352fold_builtin_fpclassify (location_t loc, tree *args, int nargs)
19fbe3a4 8353{
921b27c0 8354 tree fp_nan, fp_infinite, fp_normal, fp_subnormal, fp_zero,
8355 arg, type, res, tmp;
3754d046 8356 machine_mode mode;
19fbe3a4 8357 REAL_VALUE_TYPE r;
8358 char buf[128];
48e1416a 8359
19fbe3a4 8360 /* Verify the required arguments in the original call. */
9d884767 8361 if (nargs != 6
8362 || !validate_arg (args[0], INTEGER_TYPE)
8363 || !validate_arg (args[1], INTEGER_TYPE)
8364 || !validate_arg (args[2], INTEGER_TYPE)
8365 || !validate_arg (args[3], INTEGER_TYPE)
8366 || !validate_arg (args[4], INTEGER_TYPE)
8367 || !validate_arg (args[5], REAL_TYPE))
19fbe3a4 8368 return NULL_TREE;
48e1416a 8369
9d884767 8370 fp_nan = args[0];
8371 fp_infinite = args[1];
8372 fp_normal = args[2];
8373 fp_subnormal = args[3];
8374 fp_zero = args[4];
8375 arg = args[5];
19fbe3a4 8376 type = TREE_TYPE (arg);
8377 mode = TYPE_MODE (type);
389dd41b 8378 arg = builtin_save_expr (fold_build1_loc (loc, ABS_EXPR, type, arg));
19fbe3a4 8379
48e1416a 8380 /* fpclassify(x) ->
19fbe3a4 8381 isnan(x) ? FP_NAN :
921b27c0 8382 (fabs(x) == Inf ? FP_INFINITE :
19fbe3a4 8383 (fabs(x) >= DBL_MIN ? FP_NORMAL :
8384 (x == 0 ? FP_ZERO : FP_SUBNORMAL))). */
48e1416a 8385
389dd41b 8386 tmp = fold_build2_loc (loc, EQ_EXPR, integer_type_node, arg,
19fbe3a4 8387 build_real (type, dconst0));
389dd41b 8388 res = fold_build3_loc (loc, COND_EXPR, integer_type_node,
8389 tmp, fp_zero, fp_subnormal);
19fbe3a4 8390
8391 sprintf (buf, "0x1p%d", REAL_MODE_FORMAT (mode)->emin - 1);
8392 real_from_string (&r, buf);
389dd41b 8393 tmp = fold_build2_loc (loc, GE_EXPR, integer_type_node,
8394 arg, build_real (type, r));
8395 res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp, fp_normal, res);
48e1416a 8396
19fbe3a4 8397 if (HONOR_INFINITIES (mode))
8398 {
8399 real_inf (&r);
389dd41b 8400 tmp = fold_build2_loc (loc, EQ_EXPR, integer_type_node, arg,
19fbe3a4 8401 build_real (type, r));
389dd41b 8402 res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp,
8403 fp_infinite, res);
19fbe3a4 8404 }
8405
8406 if (HONOR_NANS (mode))
8407 {
389dd41b 8408 tmp = fold_build2_loc (loc, ORDERED_EXPR, integer_type_node, arg, arg);
8409 res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp, res, fp_nan);
19fbe3a4 8410 }
48e1416a 8411
19fbe3a4 8412 return res;
8413}
8414
9bc9f15f 8415/* Fold a call to an unordered comparison function such as
d5019fe8 8416 __builtin_isgreater(). FNDECL is the FUNCTION_DECL for the function
c2f47e15 8417 being called and ARG0 and ARG1 are the arguments for the call.
726069ba 8418 UNORDERED_CODE and ORDERED_CODE are comparison codes that give
8419 the opposite of the desired result. UNORDERED_CODE is used
8420 for modes that can hold NaNs and ORDERED_CODE is used for
8421 the rest. */
9bc9f15f 8422
8423static tree
389dd41b 8424fold_builtin_unordered_cmp (location_t loc, tree fndecl, tree arg0, tree arg1,
9bc9f15f 8425 enum tree_code unordered_code,
8426 enum tree_code ordered_code)
8427{
859f903a 8428 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9bc9f15f 8429 enum tree_code code;
6978db0d 8430 tree type0, type1;
8431 enum tree_code code0, code1;
8432 tree cmp_type = NULL_TREE;
9bc9f15f 8433
6978db0d 8434 type0 = TREE_TYPE (arg0);
8435 type1 = TREE_TYPE (arg1);
a0c938f0 8436
6978db0d 8437 code0 = TREE_CODE (type0);
8438 code1 = TREE_CODE (type1);
a0c938f0 8439
6978db0d 8440 if (code0 == REAL_TYPE && code1 == REAL_TYPE)
8441 /* Choose the wider of two real types. */
8442 cmp_type = TYPE_PRECISION (type0) >= TYPE_PRECISION (type1)
8443 ? type0 : type1;
8444 else if (code0 == REAL_TYPE && code1 == INTEGER_TYPE)
8445 cmp_type = type0;
8446 else if (code0 == INTEGER_TYPE && code1 == REAL_TYPE)
8447 cmp_type = type1;
a0c938f0 8448
389dd41b 8449 arg0 = fold_convert_loc (loc, cmp_type, arg0);
8450 arg1 = fold_convert_loc (loc, cmp_type, arg1);
859f903a 8451
8452 if (unordered_code == UNORDERED_EXPR)
8453 {
93633022 8454 if (!HONOR_NANS (arg0))
389dd41b 8455 return omit_two_operands_loc (loc, type, integer_zero_node, arg0, arg1);
8456 return fold_build2_loc (loc, UNORDERED_EXPR, type, arg0, arg1);
859f903a 8457 }
9bc9f15f 8458
93633022 8459 code = HONOR_NANS (arg0) ? unordered_code : ordered_code;
389dd41b 8460 return fold_build1_loc (loc, TRUTH_NOT_EXPR, type,
8461 fold_build2_loc (loc, code, type, arg0, arg1));
9bc9f15f 8462}
8463
0c93c8a9 8464/* Fold __builtin_{,s,u}{add,sub,mul}{,l,ll}_overflow, either into normal
8465 arithmetics if it can never overflow, or into internal functions that
8466 return both result of arithmetics and overflowed boolean flag in
732905bb 8467 a complex integer result, or some other check for overflow.
8468 Similarly fold __builtin_{add,sub,mul}_overflow_p to just the overflow
8469 checking part of that. */
0c93c8a9 8470
8471static tree
8472fold_builtin_arith_overflow (location_t loc, enum built_in_function fcode,
8473 tree arg0, tree arg1, tree arg2)
8474{
8475 enum internal_fn ifn = IFN_LAST;
732905bb 8476 /* The code of the expression corresponding to the type-generic
8477 built-in, or ERROR_MARK for the type-specific ones. */
8478 enum tree_code opcode = ERROR_MARK;
8479 bool ovf_only = false;
8480
0c93c8a9 8481 switch (fcode)
8482 {
732905bb 8483 case BUILT_IN_ADD_OVERFLOW_P:
8484 ovf_only = true;
8485 /* FALLTHRU */
0c93c8a9 8486 case BUILT_IN_ADD_OVERFLOW:
732905bb 8487 opcode = PLUS_EXPR;
8488 /* FALLTHRU */
0c93c8a9 8489 case BUILT_IN_SADD_OVERFLOW:
8490 case BUILT_IN_SADDL_OVERFLOW:
8491 case BUILT_IN_SADDLL_OVERFLOW:
8492 case BUILT_IN_UADD_OVERFLOW:
8493 case BUILT_IN_UADDL_OVERFLOW:
8494 case BUILT_IN_UADDLL_OVERFLOW:
8495 ifn = IFN_ADD_OVERFLOW;
8496 break;
732905bb 8497 case BUILT_IN_SUB_OVERFLOW_P:
8498 ovf_only = true;
8499 /* FALLTHRU */
0c93c8a9 8500 case BUILT_IN_SUB_OVERFLOW:
732905bb 8501 opcode = MINUS_EXPR;
8502 /* FALLTHRU */
0c93c8a9 8503 case BUILT_IN_SSUB_OVERFLOW:
8504 case BUILT_IN_SSUBL_OVERFLOW:
8505 case BUILT_IN_SSUBLL_OVERFLOW:
8506 case BUILT_IN_USUB_OVERFLOW:
8507 case BUILT_IN_USUBL_OVERFLOW:
8508 case BUILT_IN_USUBLL_OVERFLOW:
8509 ifn = IFN_SUB_OVERFLOW;
8510 break;
732905bb 8511 case BUILT_IN_MUL_OVERFLOW_P:
8512 ovf_only = true;
8513 /* FALLTHRU */
0c93c8a9 8514 case BUILT_IN_MUL_OVERFLOW:
732905bb 8515 opcode = MULT_EXPR;
8516 /* FALLTHRU */
0c93c8a9 8517 case BUILT_IN_SMUL_OVERFLOW:
8518 case BUILT_IN_SMULL_OVERFLOW:
8519 case BUILT_IN_SMULLL_OVERFLOW:
8520 case BUILT_IN_UMUL_OVERFLOW:
8521 case BUILT_IN_UMULL_OVERFLOW:
8522 case BUILT_IN_UMULLL_OVERFLOW:
8523 ifn = IFN_MUL_OVERFLOW;
8524 break;
8525 default:
8526 gcc_unreachable ();
8527 }
732905bb 8528
8529 /* For the "generic" overloads, the first two arguments can have different
8530 types and the last argument determines the target type to use to check
8531 for overflow. The arguments of the other overloads all have the same
8532 type. */
8533 tree type = ovf_only ? TREE_TYPE (arg2) : TREE_TYPE (TREE_TYPE (arg2));
8534
8535 /* For the __builtin_{add,sub,mul}_overflow_p builtins, when the first two
8536 arguments are constant, attempt to fold the built-in call into a constant
8537 expression indicating whether or not it detected an overflow. */
8538 if (ovf_only
8539 && TREE_CODE (arg0) == INTEGER_CST
8540 && TREE_CODE (arg1) == INTEGER_CST)
8541 /* Perform the computation in the target type and check for overflow. */
8542 return omit_one_operand_loc (loc, boolean_type_node,
8543 arith_overflowed_p (opcode, type, arg0, arg1)
8544 ? boolean_true_node : boolean_false_node,
8545 arg2);
8546
0c93c8a9 8547 tree ctype = build_complex_type (type);
8548 tree call = build_call_expr_internal_loc (loc, ifn, ctype,
8549 2, arg0, arg1);
8550 tree tgt = save_expr (call);
8551 tree intres = build1_loc (loc, REALPART_EXPR, type, tgt);
8552 tree ovfres = build1_loc (loc, IMAGPART_EXPR, type, tgt);
8553 ovfres = fold_convert_loc (loc, boolean_type_node, ovfres);
732905bb 8554
8555 if (ovf_only)
8556 return omit_one_operand_loc (loc, boolean_type_node, ovfres, arg2);
8557
8558 tree mem_arg2 = build_fold_indirect_ref_loc (loc, arg2);
0c93c8a9 8559 tree store
8560 = fold_build2_loc (loc, MODIFY_EXPR, void_type_node, mem_arg2, intres);
8561 return build2_loc (loc, COMPOUND_EXPR, boolean_type_node, store, ovfres);
8562}
8563
c388a0cf 8564/* Fold a call to __builtin_FILE to a constant string. */
8565
8566static inline tree
8567fold_builtin_FILE (location_t loc)
8568{
8569 if (const char *fname = LOCATION_FILE (loc))
8570 return build_string_literal (strlen (fname) + 1, fname);
8571
8572 return build_string_literal (1, "");
8573}
8574
8575/* Fold a call to __builtin_FUNCTION to a constant string. */
8576
8577static inline tree
8578fold_builtin_FUNCTION ()
8579{
8580 if (current_function_decl)
8581 {
8582 const char *name = IDENTIFIER_POINTER (DECL_NAME (current_function_decl));
8583 return build_string_literal (strlen (name) + 1, name);
8584 }
8585
8586 return build_string_literal (1, "");
8587}
8588
8589/* Fold a call to __builtin_LINE to an integer constant. */
8590
8591static inline tree
8592fold_builtin_LINE (location_t loc, tree type)
8593{
8594 return build_int_cst (type, LOCATION_LINE (loc));
8595}
8596
c2f47e15 8597/* Fold a call to built-in function FNDECL with 0 arguments.
e80cc485 8598 This function returns NULL_TREE if no simplification was possible. */
650e4c94 8599
4ee9c684 8600static tree
e80cc485 8601fold_builtin_0 (location_t loc, tree fndecl)
650e4c94 8602{
e9f80ff5 8603 tree type = TREE_TYPE (TREE_TYPE (fndecl));
c2f47e15 8604 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
189b3398 8605 switch (fcode)
650e4c94 8606 {
c388a0cf 8607 case BUILT_IN_FILE:
8608 return fold_builtin_FILE (loc);
8609
8610 case BUILT_IN_FUNCTION:
8611 return fold_builtin_FUNCTION ();
8612
8613 case BUILT_IN_LINE:
8614 return fold_builtin_LINE (loc, type);
8615
c2f47e15 8616 CASE_FLT_FN (BUILT_IN_INF):
012f068a 8617 CASE_FLT_FN_FLOATN_NX (BUILT_IN_INF):
c2f47e15 8618 case BUILT_IN_INFD32:
8619 case BUILT_IN_INFD64:
8620 case BUILT_IN_INFD128:
389dd41b 8621 return fold_builtin_inf (loc, type, true);
7c2f0500 8622
c2f47e15 8623 CASE_FLT_FN (BUILT_IN_HUGE_VAL):
012f068a 8624 CASE_FLT_FN_FLOATN_NX (BUILT_IN_HUGE_VAL):
389dd41b 8625 return fold_builtin_inf (loc, type, false);
7c2f0500 8626
c2f47e15 8627 case BUILT_IN_CLASSIFY_TYPE:
8628 return fold_builtin_classify_type (NULL_TREE);
7c2f0500 8629
c2f47e15 8630 default:
8631 break;
8632 }
8633 return NULL_TREE;
8634}
7c2f0500 8635
c2f47e15 8636/* Fold a call to built-in function FNDECL with 1 argument, ARG0.
e80cc485 8637 This function returns NULL_TREE if no simplification was possible. */
7c2f0500 8638
c2f47e15 8639static tree
e80cc485 8640fold_builtin_1 (location_t loc, tree fndecl, tree arg0)
c2f47e15 8641{
8642 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8643 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
6c21be92 8644
8645 if (TREE_CODE (arg0) == ERROR_MARK)
8646 return NULL_TREE;
8647
744fe358 8648 if (tree ret = fold_const_call (as_combined_fn (fcode), type, arg0))
6c21be92 8649 return ret;
8650
c2f47e15 8651 switch (fcode)
8652 {
650e4c94 8653 case BUILT_IN_CONSTANT_P:
7c2f0500 8654 {
c2f47e15 8655 tree val = fold_builtin_constant_p (arg0);
7c2f0500 8656
7c2f0500 8657 /* Gimplification will pull the CALL_EXPR for the builtin out of
8658 an if condition. When not optimizing, we'll not CSE it back.
8659 To avoid link error types of regressions, return false now. */
8660 if (!val && !optimize)
8661 val = integer_zero_node;
8662
8663 return val;
8664 }
650e4c94 8665
539a3a92 8666 case BUILT_IN_CLASSIFY_TYPE:
c2f47e15 8667 return fold_builtin_classify_type (arg0);
539a3a92 8668
650e4c94 8669 case BUILT_IN_STRLEN:
c7cbde74 8670 return fold_builtin_strlen (loc, type, arg0);
650e4c94 8671
4f35b1fc 8672 CASE_FLT_FN (BUILT_IN_FABS):
012f068a 8673 CASE_FLT_FN_FLOATN_NX (BUILT_IN_FABS):
8aa32773 8674 case BUILT_IN_FABSD32:
8675 case BUILT_IN_FABSD64:
8676 case BUILT_IN_FABSD128:
389dd41b 8677 return fold_builtin_fabs (loc, arg0, type);
d1aade50 8678
8679 case BUILT_IN_ABS:
8680 case BUILT_IN_LABS:
8681 case BUILT_IN_LLABS:
8682 case BUILT_IN_IMAXABS:
389dd41b 8683 return fold_builtin_abs (loc, arg0, type);
c63f4ad3 8684
4f35b1fc 8685 CASE_FLT_FN (BUILT_IN_CONJ):
239d491a 8686 if (validate_arg (arg0, COMPLEX_TYPE)
48e1416a 8687 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
389dd41b 8688 return fold_build1_loc (loc, CONJ_EXPR, type, arg0);
c2f47e15 8689 break;
36d3581d 8690
4f35b1fc 8691 CASE_FLT_FN (BUILT_IN_CREAL):
239d491a 8692 if (validate_arg (arg0, COMPLEX_TYPE)
48e1416a 8693 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
7082509e 8694 return non_lvalue_loc (loc, fold_build1_loc (loc, REALPART_EXPR, type, arg0));
c2f47e15 8695 break;
36d3581d 8696
4f35b1fc 8697 CASE_FLT_FN (BUILT_IN_CIMAG):
b0ce8887 8698 if (validate_arg (arg0, COMPLEX_TYPE)
8699 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
389dd41b 8700 return non_lvalue_loc (loc, fold_build1_loc (loc, IMAGPART_EXPR, type, arg0));
c2f47e15 8701 break;
36d3581d 8702
6c21be92 8703 CASE_FLT_FN (BUILT_IN_CARG):
8704 return fold_builtin_carg (loc, arg0, type);
c2373fdb 8705
6c21be92 8706 case BUILT_IN_ISASCII:
8707 return fold_builtin_isascii (loc, arg0);
48e1416a 8708
6c21be92 8709 case BUILT_IN_TOASCII:
8710 return fold_builtin_toascii (loc, arg0);
48e1416a 8711
6c21be92 8712 case BUILT_IN_ISDIGIT:
8713 return fold_builtin_isdigit (loc, arg0);
48e1416a 8714
6c21be92 8715 CASE_FLT_FN (BUILT_IN_FINITE):
8716 case BUILT_IN_FINITED32:
8717 case BUILT_IN_FINITED64:
8718 case BUILT_IN_FINITED128:
8719 case BUILT_IN_ISFINITE:
8720 {
8721 tree ret = fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISFINITE);
8722 if (ret)
8723 return ret;
8724 return fold_builtin_interclass_mathfn (loc, fndecl, arg0);
8725 }
48e1416a 8726
6c21be92 8727 CASE_FLT_FN (BUILT_IN_ISINF):
8728 case BUILT_IN_ISINFD32:
8729 case BUILT_IN_ISINFD64:
8730 case BUILT_IN_ISINFD128:
8731 {
8732 tree ret = fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISINF);
8733 if (ret)
8734 return ret;
8735 return fold_builtin_interclass_mathfn (loc, fndecl, arg0);
8736 }
48e1416a 8737
6c21be92 8738 case BUILT_IN_ISNORMAL:
8739 return fold_builtin_interclass_mathfn (loc, fndecl, arg0);
48e1416a 8740
6c21be92 8741 case BUILT_IN_ISINF_SIGN:
8742 return fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISINF_SIGN);
48e1416a 8743
6c21be92 8744 CASE_FLT_FN (BUILT_IN_ISNAN):
8745 case BUILT_IN_ISNAND32:
8746 case BUILT_IN_ISNAND64:
8747 case BUILT_IN_ISNAND128:
8748 return fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISNAN);
48e1416a 8749
6c21be92 8750 case BUILT_IN_FREE:
8751 if (integer_zerop (arg0))
8752 return build_empty_stmt (loc);
d064d976 8753 break;
c63f4ad3 8754
6c21be92 8755 default:
8b4af95f 8756 break;
6c21be92 8757 }
805e22b2 8758
6c21be92 8759 return NULL_TREE;
3bc5c41b 8760
6c21be92 8761}
728bac60 8762
6c21be92 8763/* Fold a call to built-in function FNDECL with 2 arguments, ARG0 and ARG1.
8764 This function returns NULL_TREE if no simplification was possible. */
c2f47e15 8765
8766static tree
e80cc485 8767fold_builtin_2 (location_t loc, tree fndecl, tree arg0, tree arg1)
c2f47e15 8768{
8769 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8770 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
8771
6c21be92 8772 if (TREE_CODE (arg0) == ERROR_MARK
8773 || TREE_CODE (arg1) == ERROR_MARK)
8774 return NULL_TREE;
e5407ca6 8775
744fe358 8776 if (tree ret = fold_const_call (as_combined_fn (fcode), type, arg0, arg1))
6c21be92 8777 return ret;
e84da7c1 8778
6c21be92 8779 switch (fcode)
8780 {
e84da7c1 8781 CASE_FLT_FN_REENT (BUILT_IN_GAMMA): /* GAMMA_R */
8782 CASE_FLT_FN_REENT (BUILT_IN_LGAMMA): /* LGAMMA_R */
8783 if (validate_arg (arg0, REAL_TYPE)
9af5ce0c 8784 && validate_arg (arg1, POINTER_TYPE))
e84da7c1 8785 return do_mpfr_lgamma_r (arg0, arg1, type);
8786 break;
c2f47e15 8787
3838b9ae 8788 CASE_FLT_FN (BUILT_IN_FREXP):
389dd41b 8789 return fold_builtin_frexp (loc, arg0, arg1, type);
3838b9ae 8790
ebf8b4f5 8791 CASE_FLT_FN (BUILT_IN_MODF):
389dd41b 8792 return fold_builtin_modf (loc, arg0, arg1, type);
ebf8b4f5 8793
c2f47e15 8794 case BUILT_IN_STRSPN:
389dd41b 8795 return fold_builtin_strspn (loc, arg0, arg1);
c2f47e15 8796
8797 case BUILT_IN_STRCSPN:
389dd41b 8798 return fold_builtin_strcspn (loc, arg0, arg1);
c2f47e15 8799
c2f47e15 8800 case BUILT_IN_STRPBRK:
389dd41b 8801 return fold_builtin_strpbrk (loc, arg0, arg1, type);
c2f47e15 8802
8803 case BUILT_IN_EXPECT:
c83059be 8804 return fold_builtin_expect (loc, arg0, arg1, NULL_TREE);
c2f47e15 8805
9bc9f15f 8806 case BUILT_IN_ISGREATER:
389dd41b 8807 return fold_builtin_unordered_cmp (loc, fndecl,
8808 arg0, arg1, UNLE_EXPR, LE_EXPR);
9bc9f15f 8809 case BUILT_IN_ISGREATEREQUAL:
389dd41b 8810 return fold_builtin_unordered_cmp (loc, fndecl,
8811 arg0, arg1, UNLT_EXPR, LT_EXPR);
9bc9f15f 8812 case BUILT_IN_ISLESS:
389dd41b 8813 return fold_builtin_unordered_cmp (loc, fndecl,
8814 arg0, arg1, UNGE_EXPR, GE_EXPR);
9bc9f15f 8815 case BUILT_IN_ISLESSEQUAL:
389dd41b 8816 return fold_builtin_unordered_cmp (loc, fndecl,
8817 arg0, arg1, UNGT_EXPR, GT_EXPR);
9bc9f15f 8818 case BUILT_IN_ISLESSGREATER:
389dd41b 8819 return fold_builtin_unordered_cmp (loc, fndecl,
8820 arg0, arg1, UNEQ_EXPR, EQ_EXPR);
9bc9f15f 8821 case BUILT_IN_ISUNORDERED:
389dd41b 8822 return fold_builtin_unordered_cmp (loc, fndecl,
8823 arg0, arg1, UNORDERED_EXPR,
d5019fe8 8824 NOP_EXPR);
9bc9f15f 8825
7c2f0500 8826 /* We do the folding for va_start in the expander. */
8827 case BUILT_IN_VA_START:
8828 break;
f0613857 8829
0a39fd54 8830 case BUILT_IN_OBJECT_SIZE:
c2f47e15 8831 return fold_builtin_object_size (arg0, arg1);
0a39fd54 8832
1cd6e20d 8833 case BUILT_IN_ATOMIC_ALWAYS_LOCK_FREE:
8834 return fold_builtin_atomic_always_lock_free (arg0, arg1);
8835
8836 case BUILT_IN_ATOMIC_IS_LOCK_FREE:
8837 return fold_builtin_atomic_is_lock_free (arg0, arg1);
8838
c2f47e15 8839 default:
8840 break;
8841 }
8842 return NULL_TREE;
8843}
8844
8845/* Fold a call to built-in function FNDECL with 3 arguments, ARG0, ARG1,
e80cc485 8846 and ARG2.
c2f47e15 8847 This function returns NULL_TREE if no simplification was possible. */
8848
8849static tree
389dd41b 8850fold_builtin_3 (location_t loc, tree fndecl,
e80cc485 8851 tree arg0, tree arg1, tree arg2)
c2f47e15 8852{
8853 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8854 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
6c21be92 8855
8856 if (TREE_CODE (arg0) == ERROR_MARK
8857 || TREE_CODE (arg1) == ERROR_MARK
8858 || TREE_CODE (arg2) == ERROR_MARK)
8859 return NULL_TREE;
8860
744fe358 8861 if (tree ret = fold_const_call (as_combined_fn (fcode), type,
8862 arg0, arg1, arg2))
6c21be92 8863 return ret;
8864
c2f47e15 8865 switch (fcode)
8866 {
8867
8868 CASE_FLT_FN (BUILT_IN_SINCOS):
389dd41b 8869 return fold_builtin_sincos (loc, arg0, arg1, arg2);
c2f47e15 8870
8871 CASE_FLT_FN (BUILT_IN_FMA):
b9be572e 8872 return fold_builtin_fma (loc, arg0, arg1, arg2, type);
c2f47e15 8873
e5407ca6 8874 CASE_FLT_FN (BUILT_IN_REMQUO):
8875 if (validate_arg (arg0, REAL_TYPE)
9af5ce0c 8876 && validate_arg (arg1, REAL_TYPE)
8877 && validate_arg (arg2, POINTER_TYPE))
e5407ca6 8878 return do_mpfr_remquo (arg0, arg1, arg2);
8879 break;
e5407ca6 8880
c2f47e15 8881 case BUILT_IN_BCMP:
8882 case BUILT_IN_MEMCMP:
389dd41b 8883 return fold_builtin_memcmp (loc, arg0, arg1, arg2);;
c2f47e15 8884
c83059be 8885 case BUILT_IN_EXPECT:
8886 return fold_builtin_expect (loc, arg0, arg1, arg2);
8887
0c93c8a9 8888 case BUILT_IN_ADD_OVERFLOW:
8889 case BUILT_IN_SUB_OVERFLOW:
8890 case BUILT_IN_MUL_OVERFLOW:
732905bb 8891 case BUILT_IN_ADD_OVERFLOW_P:
8892 case BUILT_IN_SUB_OVERFLOW_P:
8893 case BUILT_IN_MUL_OVERFLOW_P:
0c93c8a9 8894 case BUILT_IN_SADD_OVERFLOW:
8895 case BUILT_IN_SADDL_OVERFLOW:
8896 case BUILT_IN_SADDLL_OVERFLOW:
8897 case BUILT_IN_SSUB_OVERFLOW:
8898 case BUILT_IN_SSUBL_OVERFLOW:
8899 case BUILT_IN_SSUBLL_OVERFLOW:
8900 case BUILT_IN_SMUL_OVERFLOW:
8901 case BUILT_IN_SMULL_OVERFLOW:
8902 case BUILT_IN_SMULLL_OVERFLOW:
8903 case BUILT_IN_UADD_OVERFLOW:
8904 case BUILT_IN_UADDL_OVERFLOW:
8905 case BUILT_IN_UADDLL_OVERFLOW:
8906 case BUILT_IN_USUB_OVERFLOW:
8907 case BUILT_IN_USUBL_OVERFLOW:
8908 case BUILT_IN_USUBLL_OVERFLOW:
8909 case BUILT_IN_UMUL_OVERFLOW:
8910 case BUILT_IN_UMULL_OVERFLOW:
8911 case BUILT_IN_UMULLL_OVERFLOW:
8912 return fold_builtin_arith_overflow (loc, fcode, arg0, arg1, arg2);
8913
650e4c94 8914 default:
8915 break;
8916 }
c2f47e15 8917 return NULL_TREE;
8918}
650e4c94 8919
c2f47e15 8920/* Fold a call to built-in function FNDECL. ARGS is an array of NARGS
9d884767 8921 arguments. IGNORE is true if the result of the
8922 function call is ignored. This function returns NULL_TREE if no
8923 simplification was possible. */
48e1416a 8924
2165588a 8925tree
e80cc485 8926fold_builtin_n (location_t loc, tree fndecl, tree *args, int nargs, bool)
c2f47e15 8927{
8928 tree ret = NULL_TREE;
a7f5bb2d 8929
c2f47e15 8930 switch (nargs)
8931 {
8932 case 0:
e80cc485 8933 ret = fold_builtin_0 (loc, fndecl);
c2f47e15 8934 break;
8935 case 1:
e80cc485 8936 ret = fold_builtin_1 (loc, fndecl, args[0]);
c2f47e15 8937 break;
8938 case 2:
e80cc485 8939 ret = fold_builtin_2 (loc, fndecl, args[0], args[1]);
c2f47e15 8940 break;
8941 case 3:
e80cc485 8942 ret = fold_builtin_3 (loc, fndecl, args[0], args[1], args[2]);
c2f47e15 8943 break;
c2f47e15 8944 default:
e80cc485 8945 ret = fold_builtin_varargs (loc, fndecl, args, nargs);
c2f47e15 8946 break;
8947 }
8948 if (ret)
8949 {
75a70cf9 8950 ret = build1 (NOP_EXPR, TREE_TYPE (ret), ret);
389dd41b 8951 SET_EXPR_LOCATION (ret, loc);
c2f47e15 8952 TREE_NO_WARNING (ret) = 1;
8953 return ret;
8954 }
8955 return NULL_TREE;
8956}
8957
0e80b01d 8958/* Construct a new CALL_EXPR to FNDECL using the tail of the argument
8959 list ARGS along with N new arguments in NEWARGS. SKIP is the number
8960 of arguments in ARGS to be omitted. OLDNARGS is the number of
8961 elements in ARGS. */
c2f47e15 8962
8963static tree
0e80b01d 8964rewrite_call_expr_valist (location_t loc, int oldnargs, tree *args,
8965 int skip, tree fndecl, int n, va_list newargs)
c2f47e15 8966{
0e80b01d 8967 int nargs = oldnargs - skip + n;
8968 tree *buffer;
c2f47e15 8969
0e80b01d 8970 if (n > 0)
c2f47e15 8971 {
0e80b01d 8972 int i, j;
c2f47e15 8973
0e80b01d 8974 buffer = XALLOCAVEC (tree, nargs);
8975 for (i = 0; i < n; i++)
8976 buffer[i] = va_arg (newargs, tree);
8977 for (j = skip; j < oldnargs; j++, i++)
8978 buffer[i] = args[j];
8979 }
8980 else
8981 buffer = args + skip;
19fbe3a4 8982
0e80b01d 8983 return build_call_expr_loc_array (loc, fndecl, nargs, buffer);
8984}
c2f47e15 8985
198622c0 8986/* Return true if FNDECL shouldn't be folded right now.
8987 If a built-in function has an inline attribute always_inline
8988 wrapper, defer folding it after always_inline functions have
8989 been inlined, otherwise e.g. -D_FORTIFY_SOURCE checking
8990 might not be performed. */
8991
51d2c51e 8992bool
198622c0 8993avoid_folding_inline_builtin (tree fndecl)
8994{
8995 return (DECL_DECLARED_INLINE_P (fndecl)
8996 && DECL_DISREGARD_INLINE_LIMITS (fndecl)
8997 && cfun
8998 && !cfun->always_inline_functions_inlined
8999 && lookup_attribute ("always_inline", DECL_ATTRIBUTES (fndecl)));
9000}
9001
4ee9c684 9002/* A wrapper function for builtin folding that prevents warnings for
491e04ef 9003 "statement without effect" and the like, caused by removing the
4ee9c684 9004 call node earlier than the warning is generated. */
9005
9006tree
389dd41b 9007fold_call_expr (location_t loc, tree exp, bool ignore)
4ee9c684 9008{
c2f47e15 9009 tree ret = NULL_TREE;
9010 tree fndecl = get_callee_fndecl (exp);
9011 if (fndecl
9012 && TREE_CODE (fndecl) == FUNCTION_DECL
48dc2227 9013 && DECL_BUILT_IN (fndecl)
9014 /* If CALL_EXPR_VA_ARG_PACK is set, the arguments aren't finalized
9015 yet. Defer folding until we see all the arguments
9016 (after inlining). */
9017 && !CALL_EXPR_VA_ARG_PACK (exp))
9018 {
9019 int nargs = call_expr_nargs (exp);
9020
9021 /* Before gimplification CALL_EXPR_VA_ARG_PACK is not set, but
9022 instead last argument is __builtin_va_arg_pack (). Defer folding
9023 even in that case, until arguments are finalized. */
9024 if (nargs && TREE_CODE (CALL_EXPR_ARG (exp, nargs - 1)) == CALL_EXPR)
9025 {
9026 tree fndecl2 = get_callee_fndecl (CALL_EXPR_ARG (exp, nargs - 1));
9027 if (fndecl2
9028 && TREE_CODE (fndecl2) == FUNCTION_DECL
9029 && DECL_BUILT_IN_CLASS (fndecl2) == BUILT_IN_NORMAL
9030 && DECL_FUNCTION_CODE (fndecl2) == BUILT_IN_VA_ARG_PACK)
9031 return NULL_TREE;
9032 }
9033
198622c0 9034 if (avoid_folding_inline_builtin (fndecl))
9035 return NULL_TREE;
9036
c2f47e15 9037 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
97d67146 9038 return targetm.fold_builtin (fndecl, call_expr_nargs (exp),
9039 CALL_EXPR_ARGP (exp), ignore);
c2f47e15 9040 else
9041 {
9d884767 9042 tree *args = CALL_EXPR_ARGP (exp);
9043 ret = fold_builtin_n (loc, fndecl, args, nargs, ignore);
c2f47e15 9044 if (ret)
389dd41b 9045 return ret;
c2f47e15 9046 }
4ee9c684 9047 }
c2f47e15 9048 return NULL_TREE;
9049}
48e1416a 9050
9d884767 9051/* Fold a CALL_EXPR with type TYPE with FN as the function expression.
9052 N arguments are passed in the array ARGARRAY. Return a folded
9053 expression or NULL_TREE if no simplification was possible. */
805e22b2 9054
9055tree
9d884767 9056fold_builtin_call_array (location_t loc, tree,
d01f58f9 9057 tree fn,
9058 int n,
9059 tree *argarray)
7e15618b 9060{
9d884767 9061 if (TREE_CODE (fn) != ADDR_EXPR)
9062 return NULL_TREE;
c2f47e15 9063
9d884767 9064 tree fndecl = TREE_OPERAND (fn, 0);
9065 if (TREE_CODE (fndecl) == FUNCTION_DECL
9066 && DECL_BUILT_IN (fndecl))
9067 {
9068 /* If last argument is __builtin_va_arg_pack (), arguments to this
9069 function are not finalized yet. Defer folding until they are. */
9070 if (n && TREE_CODE (argarray[n - 1]) == CALL_EXPR)
9071 {
9072 tree fndecl2 = get_callee_fndecl (argarray[n - 1]);
9073 if (fndecl2
9074 && TREE_CODE (fndecl2) == FUNCTION_DECL
9075 && DECL_BUILT_IN_CLASS (fndecl2) == BUILT_IN_NORMAL
9076 && DECL_FUNCTION_CODE (fndecl2) == BUILT_IN_VA_ARG_PACK)
9077 return NULL_TREE;
9078 }
9079 if (avoid_folding_inline_builtin (fndecl))
9080 return NULL_TREE;
9081 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
9082 return targetm.fold_builtin (fndecl, n, argarray, false);
9083 else
9084 return fold_builtin_n (loc, fndecl, argarray, n, false);
9085 }
c2f47e15 9086
9d884767 9087 return NULL_TREE;
c2f47e15 9088}
9089
af1409ad 9090/* Construct a new CALL_EXPR using the tail of the argument list of EXP
9091 along with N new arguments specified as the "..." parameters. SKIP
9092 is the number of arguments in EXP to be omitted. This function is used
9093 to do varargs-to-varargs transformations. */
9094
9095static tree
9096rewrite_call_expr (location_t loc, tree exp, int skip, tree fndecl, int n, ...)
9097{
9098 va_list ap;
9099 tree t;
9100
9101 va_start (ap, n);
9102 t = rewrite_call_expr_valist (loc, call_expr_nargs (exp),
9103 CALL_EXPR_ARGP (exp), skip, fndecl, n, ap);
9104 va_end (ap);
c2f47e15 9105
af1409ad 9106 return t;
c2f47e15 9107}
9108
9109/* Validate a single argument ARG against a tree code CODE representing
184fac50 9110 a type. Return true when argument is valid. */
48e1416a 9111
c2f47e15 9112static bool
184fac50 9113validate_arg (const_tree arg, enum tree_code code)
c2f47e15 9114{
9115 if (!arg)
9116 return false;
9117 else if (code == POINTER_TYPE)
184fac50 9118 return POINTER_TYPE_P (TREE_TYPE (arg));
c7f617c2 9119 else if (code == INTEGER_TYPE)
9120 return INTEGRAL_TYPE_P (TREE_TYPE (arg));
c2f47e15 9121 return code == TREE_CODE (TREE_TYPE (arg));
7e15618b 9122}
0eb671f7 9123
75a70cf9 9124/* This function validates the types of a function call argument list
9125 against a specified list of tree_codes. If the last specifier is a 0,
9126 that represents an ellipses, otherwise the last specifier must be a
9127 VOID_TYPE.
9128
9129 This is the GIMPLE version of validate_arglist. Eventually we want to
9130 completely convert builtins.c to work from GIMPLEs and the tree based
9131 validate_arglist will then be removed. */
9132
9133bool
1a91d914 9134validate_gimple_arglist (const gcall *call, ...)
75a70cf9 9135{
9136 enum tree_code code;
9137 bool res = 0;
9138 va_list ap;
9139 const_tree arg;
9140 size_t i;
9141
9142 va_start (ap, call);
9143 i = 0;
9144
9145 do
9146 {
d62e827b 9147 code = (enum tree_code) va_arg (ap, int);
75a70cf9 9148 switch (code)
9149 {
9150 case 0:
9151 /* This signifies an ellipses, any further arguments are all ok. */
9152 res = true;
9153 goto end;
9154 case VOID_TYPE:
9155 /* This signifies an endlink, if no arguments remain, return
9156 true, otherwise return false. */
9157 res = (i == gimple_call_num_args (call));
9158 goto end;
9159 default:
9160 /* If no parameters remain or the parameter's code does not
9161 match the specified code, return false. Otherwise continue
9162 checking any remaining arguments. */
9163 arg = gimple_call_arg (call, i++);
9164 if (!validate_arg (arg, code))
9165 goto end;
9166 break;
9167 }
9168 }
9169 while (1);
9170
9171 /* We need gotos here since we can only have one VA_CLOSE in a
9172 function. */
9173 end: ;
9174 va_end (ap);
9175
9176 return res;
9177}
9178
fc2a2dcb 9179/* Default target-specific builtin expander that does nothing. */
9180
9181rtx
aecda0d6 9182default_expand_builtin (tree exp ATTRIBUTE_UNUSED,
9183 rtx target ATTRIBUTE_UNUSED,
9184 rtx subtarget ATTRIBUTE_UNUSED,
3754d046 9185 machine_mode mode ATTRIBUTE_UNUSED,
aecda0d6 9186 int ignore ATTRIBUTE_UNUSED)
fc2a2dcb 9187{
9188 return NULL_RTX;
9189}
c7926a82 9190
01537105 9191/* Returns true is EXP represents data that would potentially reside
9192 in a readonly section. */
9193
b9ea678c 9194bool
01537105 9195readonly_data_expr (tree exp)
9196{
9197 STRIP_NOPS (exp);
9198
9ff0637e 9199 if (TREE_CODE (exp) != ADDR_EXPR)
9200 return false;
9201
9202 exp = get_base_address (TREE_OPERAND (exp, 0));
9203 if (!exp)
9204 return false;
9205
9206 /* Make sure we call decl_readonly_section only for trees it
9207 can handle (since it returns true for everything it doesn't
9208 understand). */
491e04ef 9209 if (TREE_CODE (exp) == STRING_CST
9ff0637e 9210 || TREE_CODE (exp) == CONSTRUCTOR
53e9c5c4 9211 || (VAR_P (exp) && TREE_STATIC (exp)))
9ff0637e 9212 return decl_readonly_section (exp, 0);
01537105 9213 else
9214 return false;
9215}
4ee9c684 9216
c2f47e15 9217/* Simplify a call to the strpbrk builtin. S1 and S2 are the arguments
9218 to the call, and TYPE is its return type.
4ee9c684 9219
c2f47e15 9220 Return NULL_TREE if no simplification was possible, otherwise return the
4ee9c684 9221 simplified form of the call as a tree.
9222
9223 The simplified form may be a constant or other expression which
9224 computes the same value, but in a more efficient manner (including
9225 calls to other builtin functions).
9226
9227 The call may contain arguments which need to be evaluated, but
9228 which are not useful to determine the result of the call. In
9229 this case we return a chain of COMPOUND_EXPRs. The LHS of each
9230 COMPOUND_EXPR will be an argument which must be evaluated.
9231 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
9232 COMPOUND_EXPR in the chain will contain the tree for the simplified
9233 form of the builtin function call. */
9234
9235static tree
389dd41b 9236fold_builtin_strpbrk (location_t loc, tree s1, tree s2, tree type)
4ee9c684 9237{
c2f47e15 9238 if (!validate_arg (s1, POINTER_TYPE)
9239 || !validate_arg (s2, POINTER_TYPE))
9240 return NULL_TREE;
4ee9c684 9241 else
9242 {
4ee9c684 9243 tree fn;
9244 const char *p1, *p2;
9245
9246 p2 = c_getstr (s2);
9247 if (p2 == NULL)
c2f47e15 9248 return NULL_TREE;
4ee9c684 9249
9250 p1 = c_getstr (s1);
9251 if (p1 != NULL)
9252 {
9253 const char *r = strpbrk (p1, p2);
daa1d5f5 9254 tree tem;
4ee9c684 9255
9256 if (r == NULL)
779b4c41 9257 return build_int_cst (TREE_TYPE (s1), 0);
4ee9c684 9258
9259 /* Return an offset into the constant string argument. */
2cc66f2a 9260 tem = fold_build_pointer_plus_hwi_loc (loc, s1, r - p1);
389dd41b 9261 return fold_convert_loc (loc, type, tem);
4ee9c684 9262 }
9263
9264 if (p2[0] == '\0')
05abc81b 9265 /* strpbrk(x, "") == NULL.
9266 Evaluate and ignore s1 in case it had side-effects. */
389dd41b 9267 return omit_one_operand_loc (loc, TREE_TYPE (s1), integer_zero_node, s1);
4ee9c684 9268
9269 if (p2[1] != '\0')
c2f47e15 9270 return NULL_TREE; /* Really call strpbrk. */
4ee9c684 9271
b9a16870 9272 fn = builtin_decl_implicit (BUILT_IN_STRCHR);
4ee9c684 9273 if (!fn)
c2f47e15 9274 return NULL_TREE;
4ee9c684 9275
9276 /* New argument list transforming strpbrk(s1, s2) to
9277 strchr(s1, s2[0]). */
7002a1c8 9278 return build_call_expr_loc (loc, fn, 2, s1,
9279 build_int_cst (integer_type_node, p2[0]));
4ee9c684 9280 }
9281}
9282
c2f47e15 9283/* Simplify a call to the strspn builtin. S1 and S2 are the arguments
9284 to the call.
4ee9c684 9285
c2f47e15 9286 Return NULL_TREE if no simplification was possible, otherwise return the
4ee9c684 9287 simplified form of the call as a tree.
9288
9289 The simplified form may be a constant or other expression which
9290 computes the same value, but in a more efficient manner (including
9291 calls to other builtin functions).
9292
9293 The call may contain arguments which need to be evaluated, but
9294 which are not useful to determine the result of the call. In
9295 this case we return a chain of COMPOUND_EXPRs. The LHS of each
9296 COMPOUND_EXPR will be an argument which must be evaluated.
9297 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
9298 COMPOUND_EXPR in the chain will contain the tree for the simplified
9299 form of the builtin function call. */
9300
9301static tree
389dd41b 9302fold_builtin_strspn (location_t loc, tree s1, tree s2)
4ee9c684 9303{
c2f47e15 9304 if (!validate_arg (s1, POINTER_TYPE)
9305 || !validate_arg (s2, POINTER_TYPE))
9306 return NULL_TREE;
4ee9c684 9307 else
9308 {
4ee9c684 9309 const char *p1 = c_getstr (s1), *p2 = c_getstr (s2);
9310
c2f47e15 9311 /* If either argument is "", return NULL_TREE. */
4ee9c684 9312 if ((p1 && *p1 == '\0') || (p2 && *p2 == '\0'))
9bc9f15f 9313 /* Evaluate and ignore both arguments in case either one has
9314 side-effects. */
389dd41b 9315 return omit_two_operands_loc (loc, size_type_node, size_zero_node,
9bc9f15f 9316 s1, s2);
c2f47e15 9317 return NULL_TREE;
4ee9c684 9318 }
9319}
9320
c2f47e15 9321/* Simplify a call to the strcspn builtin. S1 and S2 are the arguments
9322 to the call.
4ee9c684 9323
c2f47e15 9324 Return NULL_TREE if no simplification was possible, otherwise return the
4ee9c684 9325 simplified form of the call as a tree.
9326
9327 The simplified form may be a constant or other expression which
9328 computes the same value, but in a more efficient manner (including
9329 calls to other builtin functions).
9330
9331 The call may contain arguments which need to be evaluated, but
9332 which are not useful to determine the result of the call. In
9333 this case we return a chain of COMPOUND_EXPRs. The LHS of each
9334 COMPOUND_EXPR will be an argument which must be evaluated.
9335 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
9336 COMPOUND_EXPR in the chain will contain the tree for the simplified
9337 form of the builtin function call. */
9338
9339static tree
389dd41b 9340fold_builtin_strcspn (location_t loc, tree s1, tree s2)
4ee9c684 9341{
c2f47e15 9342 if (!validate_arg (s1, POINTER_TYPE)
9343 || !validate_arg (s2, POINTER_TYPE))
9344 return NULL_TREE;
4ee9c684 9345 else
9346 {
c2f47e15 9347 /* If the first argument is "", return NULL_TREE. */
b5e46e2c 9348 const char *p1 = c_getstr (s1);
4ee9c684 9349 if (p1 && *p1 == '\0')
9350 {
9351 /* Evaluate and ignore argument s2 in case it has
9352 side-effects. */
389dd41b 9353 return omit_one_operand_loc (loc, size_type_node,
39761420 9354 size_zero_node, s2);
4ee9c684 9355 }
9356
9357 /* If the second argument is "", return __builtin_strlen(s1). */
b5e46e2c 9358 const char *p2 = c_getstr (s2);
4ee9c684 9359 if (p2 && *p2 == '\0')
9360 {
b9a16870 9361 tree fn = builtin_decl_implicit (BUILT_IN_STRLEN);
4ee9c684 9362
9363 /* If the replacement _DECL isn't initialized, don't do the
9364 transformation. */
9365 if (!fn)
c2f47e15 9366 return NULL_TREE;
4ee9c684 9367
389dd41b 9368 return build_call_expr_loc (loc, fn, 1, s1);
4ee9c684 9369 }
c2f47e15 9370 return NULL_TREE;
4ee9c684 9371 }
9372}
9373
c2f47e15 9374/* Fold the next_arg or va_start call EXP. Returns true if there was an error
743b0c6a 9375 produced. False otherwise. This is done so that we don't output the error
9376 or warning twice or three times. */
75a70cf9 9377
743b0c6a 9378bool
c2f47e15 9379fold_builtin_next_arg (tree exp, bool va_start_p)
4ee9c684 9380{
9381 tree fntype = TREE_TYPE (current_function_decl);
c2f47e15 9382 int nargs = call_expr_nargs (exp);
9383 tree arg;
d98fd4a4 9384 /* There is good chance the current input_location points inside the
9385 definition of the va_start macro (perhaps on the token for
9386 builtin) in a system header, so warnings will not be emitted.
9387 Use the location in real source code. */
9388 source_location current_location =
9389 linemap_unwind_to_first_non_reserved_loc (line_table, input_location,
9390 NULL);
4ee9c684 9391
257d99c3 9392 if (!stdarg_p (fntype))
743b0c6a 9393 {
9394 error ("%<va_start%> used in function with fixed args");
9395 return true;
9396 }
c2f47e15 9397
9398 if (va_start_p)
79012a9d 9399 {
c2f47e15 9400 if (va_start_p && (nargs != 2))
9401 {
9402 error ("wrong number of arguments to function %<va_start%>");
9403 return true;
9404 }
9405 arg = CALL_EXPR_ARG (exp, 1);
79012a9d 9406 }
9407 /* We use __builtin_va_start (ap, 0, 0) or __builtin_next_arg (0, 0)
9408 when we checked the arguments and if needed issued a warning. */
c2f47e15 9409 else
4ee9c684 9410 {
c2f47e15 9411 if (nargs == 0)
9412 {
9413 /* Evidently an out of date version of <stdarg.h>; can't validate
9414 va_start's second argument, but can still work as intended. */
d98fd4a4 9415 warning_at (current_location,
7edb1062 9416 OPT_Wvarargs,
9417 "%<__builtin_next_arg%> called without an argument");
c2f47e15 9418 return true;
9419 }
9420 else if (nargs > 1)
a0c938f0 9421 {
c2f47e15 9422 error ("wrong number of arguments to function %<__builtin_next_arg%>");
a0c938f0 9423 return true;
9424 }
c2f47e15 9425 arg = CALL_EXPR_ARG (exp, 0);
9426 }
9427
a8dd994c 9428 if (TREE_CODE (arg) == SSA_NAME)
9429 arg = SSA_NAME_VAR (arg);
9430
c2f47e15 9431 /* We destructively modify the call to be __builtin_va_start (ap, 0)
48e1416a 9432 or __builtin_next_arg (0) the first time we see it, after checking
c2f47e15 9433 the arguments and if needed issuing a warning. */
9434 if (!integer_zerop (arg))
9435 {
9436 tree last_parm = tree_last (DECL_ARGUMENTS (current_function_decl));
79012a9d 9437
4ee9c684 9438 /* Strip off all nops for the sake of the comparison. This
9439 is not quite the same as STRIP_NOPS. It does more.
9440 We must also strip off INDIRECT_EXPR for C++ reference
9441 parameters. */
72dd6141 9442 while (CONVERT_EXPR_P (arg)
4ee9c684 9443 || TREE_CODE (arg) == INDIRECT_REF)
9444 arg = TREE_OPERAND (arg, 0);
9445 if (arg != last_parm)
a0c938f0 9446 {
b08cf617 9447 /* FIXME: Sometimes with the tree optimizers we can get the
9448 not the last argument even though the user used the last
9449 argument. We just warn and set the arg to be the last
9450 argument so that we will get wrong-code because of
9451 it. */
d98fd4a4 9452 warning_at (current_location,
7edb1062 9453 OPT_Wvarargs,
d98fd4a4 9454 "second parameter of %<va_start%> not last named argument");
743b0c6a 9455 }
24158ad7 9456
9457 /* Undefined by C99 7.15.1.4p4 (va_start):
9458 "If the parameter parmN is declared with the register storage
9459 class, with a function or array type, or with a type that is
9460 not compatible with the type that results after application of
9461 the default argument promotions, the behavior is undefined."
9462 */
9463 else if (DECL_REGISTER (arg))
d98fd4a4 9464 {
9465 warning_at (current_location,
7edb1062 9466 OPT_Wvarargs,
67cf9b55 9467 "undefined behavior when second parameter of "
d98fd4a4 9468 "%<va_start%> is declared with %<register%> storage");
9469 }
24158ad7 9470
79012a9d 9471 /* We want to verify the second parameter just once before the tree
a0c938f0 9472 optimizers are run and then avoid keeping it in the tree,
9473 as otherwise we could warn even for correct code like:
9474 void foo (int i, ...)
9475 { va_list ap; i++; va_start (ap, i); va_end (ap); } */
c2f47e15 9476 if (va_start_p)
9477 CALL_EXPR_ARG (exp, 1) = integer_zero_node;
9478 else
9479 CALL_EXPR_ARG (exp, 0) = integer_zero_node;
743b0c6a 9480 }
9481 return false;
4ee9c684 9482}
9483
9484
c2f47e15 9485/* Expand a call EXP to __builtin_object_size. */
0a39fd54 9486
f7715905 9487static rtx
0a39fd54 9488expand_builtin_object_size (tree exp)
9489{
9490 tree ost;
9491 int object_size_type;
9492 tree fndecl = get_callee_fndecl (exp);
0a39fd54 9493
c2f47e15 9494 if (!validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
0a39fd54 9495 {
8c41abe8 9496 error ("%Kfirst argument of %qD must be a pointer, second integer constant",
b8c23db3 9497 exp, fndecl);
0a39fd54 9498 expand_builtin_trap ();
9499 return const0_rtx;
9500 }
9501
c2f47e15 9502 ost = CALL_EXPR_ARG (exp, 1);
0a39fd54 9503 STRIP_NOPS (ost);
9504
9505 if (TREE_CODE (ost) != INTEGER_CST
9506 || tree_int_cst_sgn (ost) < 0
9507 || compare_tree_int (ost, 3) > 0)
9508 {
8c41abe8 9509 error ("%Klast argument of %qD is not integer constant between 0 and 3",
b8c23db3 9510 exp, fndecl);
0a39fd54 9511 expand_builtin_trap ();
9512 return const0_rtx;
9513 }
9514
e913b5cd 9515 object_size_type = tree_to_shwi (ost);
0a39fd54 9516
9517 return object_size_type < 2 ? constm1_rtx : const0_rtx;
9518}
9519
9520/* Expand EXP, a call to the __mem{cpy,pcpy,move,set}_chk builtin.
9521 FCODE is the BUILT_IN_* to use.
c2f47e15 9522 Return NULL_RTX if we failed; the caller should emit a normal call,
0a39fd54 9523 otherwise try to get the result in TARGET, if convenient (and in
9524 mode MODE if that's convenient). */
9525
9526static rtx
3754d046 9527expand_builtin_memory_chk (tree exp, rtx target, machine_mode mode,
0a39fd54 9528 enum built_in_function fcode)
9529{
0a39fd54 9530 tree dest, src, len, size;
9531
c2f47e15 9532 if (!validate_arglist (exp,
0a39fd54 9533 POINTER_TYPE,
9534 fcode == BUILT_IN_MEMSET_CHK
9535 ? INTEGER_TYPE : POINTER_TYPE,
9536 INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
c2f47e15 9537 return NULL_RTX;
0a39fd54 9538
c2f47e15 9539 dest = CALL_EXPR_ARG (exp, 0);
9540 src = CALL_EXPR_ARG (exp, 1);
9541 len = CALL_EXPR_ARG (exp, 2);
9542 size = CALL_EXPR_ARG (exp, 3);
0a39fd54 9543
5aef8938 9544 bool sizes_ok = check_sizes (OPT_Wstringop_overflow_,
9545 exp, len, /*maxlen=*/NULL_TREE,
9546 /*str=*/NULL_TREE, size);
9547
9548 if (!tree_fits_uhwi_p (size))
c2f47e15 9549 return NULL_RTX;
0a39fd54 9550
e913b5cd 9551 if (tree_fits_uhwi_p (len) || integer_all_onesp (size))
0a39fd54 9552 {
5aef8938 9553 /* Avoid transforming the checking call to an ordinary one when
9554 an overflow has been detected or when the call couldn't be
9555 validated because the size is not constant. */
9556 if (!sizes_ok && !integer_all_onesp (size) && tree_int_cst_lt (size, len))
9557 return NULL_RTX;
0a39fd54 9558
5aef8938 9559 tree fn = NULL_TREE;
0a39fd54 9560 /* If __builtin_mem{cpy,pcpy,move,set}_chk is used, assume
9561 mem{cpy,pcpy,move,set} is available. */
9562 switch (fcode)
9563 {
9564 case BUILT_IN_MEMCPY_CHK:
b9a16870 9565 fn = builtin_decl_explicit (BUILT_IN_MEMCPY);
0a39fd54 9566 break;
9567 case BUILT_IN_MEMPCPY_CHK:
b9a16870 9568 fn = builtin_decl_explicit (BUILT_IN_MEMPCPY);
0a39fd54 9569 break;
9570 case BUILT_IN_MEMMOVE_CHK:
b9a16870 9571 fn = builtin_decl_explicit (BUILT_IN_MEMMOVE);
0a39fd54 9572 break;
9573 case BUILT_IN_MEMSET_CHK:
b9a16870 9574 fn = builtin_decl_explicit (BUILT_IN_MEMSET);
0a39fd54 9575 break;
9576 default:
9577 break;
9578 }
9579
9580 if (! fn)
c2f47e15 9581 return NULL_RTX;
0a39fd54 9582
0568e9c1 9583 fn = build_call_nofold_loc (EXPR_LOCATION (exp), fn, 3, dest, src, len);
a65c4d64 9584 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
9585 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
0a39fd54 9586 return expand_expr (fn, target, mode, EXPAND_NORMAL);
9587 }
9588 else if (fcode == BUILT_IN_MEMSET_CHK)
c2f47e15 9589 return NULL_RTX;
0a39fd54 9590 else
9591 {
957d0361 9592 unsigned int dest_align = get_pointer_alignment (dest);
0a39fd54 9593
9594 /* If DEST is not a pointer type, call the normal function. */
9595 if (dest_align == 0)
c2f47e15 9596 return NULL_RTX;
0a39fd54 9597
9598 /* If SRC and DEST are the same (and not volatile), do nothing. */
9599 if (operand_equal_p (src, dest, 0))
9600 {
9601 tree expr;
9602
9603 if (fcode != BUILT_IN_MEMPCPY_CHK)
9604 {
9605 /* Evaluate and ignore LEN in case it has side-effects. */
9606 expand_expr (len, const0_rtx, VOIDmode, EXPAND_NORMAL);
9607 return expand_expr (dest, target, mode, EXPAND_NORMAL);
9608 }
9609
2cc66f2a 9610 expr = fold_build_pointer_plus (dest, len);
0a39fd54 9611 return expand_expr (expr, target, mode, EXPAND_NORMAL);
9612 }
9613
9614 /* __memmove_chk special case. */
9615 if (fcode == BUILT_IN_MEMMOVE_CHK)
9616 {
957d0361 9617 unsigned int src_align = get_pointer_alignment (src);
0a39fd54 9618
9619 if (src_align == 0)
c2f47e15 9620 return NULL_RTX;
0a39fd54 9621
9622 /* If src is categorized for a readonly section we can use
9623 normal __memcpy_chk. */
9624 if (readonly_data_expr (src))
9625 {
b9a16870 9626 tree fn = builtin_decl_explicit (BUILT_IN_MEMCPY_CHK);
0a39fd54 9627 if (!fn)
c2f47e15 9628 return NULL_RTX;
0568e9c1 9629 fn = build_call_nofold_loc (EXPR_LOCATION (exp), fn, 4,
9630 dest, src, len, size);
a65c4d64 9631 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
9632 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
0a39fd54 9633 return expand_expr (fn, target, mode, EXPAND_NORMAL);
9634 }
9635 }
c2f47e15 9636 return NULL_RTX;
0a39fd54 9637 }
9638}
9639
9640/* Emit warning if a buffer overflow is detected at compile time. */
9641
9642static void
9643maybe_emit_chk_warning (tree exp, enum built_in_function fcode)
9644{
5aef8938 9645 /* The source string. */
9646 tree srcstr = NULL_TREE;
9647 /* The size of the destination object. */
9648 tree objsize = NULL_TREE;
9649 /* The string that is being concatenated with (as in __strcat_chk)
9650 or null if it isn't. */
9651 tree catstr = NULL_TREE;
9652 /* The maximum length of the source sequence in a bounded operation
9653 (such as __strncat_chk) or null if the operation isn't bounded
9654 (such as __strcat_chk). */
9655 tree maxlen = NULL_TREE;
0a39fd54 9656
9657 switch (fcode)
9658 {
9659 case BUILT_IN_STRCPY_CHK:
9660 case BUILT_IN_STPCPY_CHK:
5aef8938 9661 srcstr = CALL_EXPR_ARG (exp, 1);
9662 objsize = CALL_EXPR_ARG (exp, 2);
9663 break;
9664
0a39fd54 9665 case BUILT_IN_STRCAT_CHK:
5aef8938 9666 /* For __strcat_chk the warning will be emitted only if overflowing
9667 by at least strlen (dest) + 1 bytes. */
9668 catstr = CALL_EXPR_ARG (exp, 0);
9669 srcstr = CALL_EXPR_ARG (exp, 1);
9670 objsize = CALL_EXPR_ARG (exp, 2);
0a39fd54 9671 break;
5aef8938 9672
b356dfef 9673 case BUILT_IN_STRNCAT_CHK:
5aef8938 9674 catstr = CALL_EXPR_ARG (exp, 0);
9675 srcstr = CALL_EXPR_ARG (exp, 1);
9676 maxlen = CALL_EXPR_ARG (exp, 2);
9677 objsize = CALL_EXPR_ARG (exp, 3);
9678 break;
9679
0a39fd54 9680 case BUILT_IN_STRNCPY_CHK:
1063acde 9681 case BUILT_IN_STPNCPY_CHK:
5aef8938 9682 srcstr = CALL_EXPR_ARG (exp, 1);
9683 maxlen = CALL_EXPR_ARG (exp, 2);
9684 objsize = CALL_EXPR_ARG (exp, 3);
0a39fd54 9685 break;
5aef8938 9686
0a39fd54 9687 case BUILT_IN_SNPRINTF_CHK:
9688 case BUILT_IN_VSNPRINTF_CHK:
5aef8938 9689 maxlen = CALL_EXPR_ARG (exp, 1);
9690 objsize = CALL_EXPR_ARG (exp, 3);
0a39fd54 9691 break;
9692 default:
9693 gcc_unreachable ();
9694 }
9695
5aef8938 9696 if (catstr && maxlen)
0a39fd54 9697 {
5aef8938 9698 /* Check __strncat_chk. There is no way to determine the length
9699 of the string to which the source string is being appended so
9700 just warn when the length of the source string is not known. */
9701 if (!check_strncat_sizes (exp, objsize))
0a39fd54 9702 return;
9703 }
0a39fd54 9704
5aef8938 9705 check_sizes (OPT_Wstringop_overflow_, exp,
9706 /*size=*/NULL_TREE, maxlen, srcstr, objsize);
0a39fd54 9707}
9708
9709/* Emit warning if a buffer overflow is detected at compile time
9710 in __sprintf_chk/__vsprintf_chk calls. */
9711
9712static void
9713maybe_emit_sprintf_chk_warning (tree exp, enum built_in_function fcode)
9714{
1e4adcfc 9715 tree size, len, fmt;
0a39fd54 9716 const char *fmt_str;
c2f47e15 9717 int nargs = call_expr_nargs (exp);
0a39fd54 9718
9719 /* Verify the required arguments in the original call. */
48e1416a 9720
c2f47e15 9721 if (nargs < 4)
0a39fd54 9722 return;
c2f47e15 9723 size = CALL_EXPR_ARG (exp, 2);
9724 fmt = CALL_EXPR_ARG (exp, 3);
0a39fd54 9725
e913b5cd 9726 if (! tree_fits_uhwi_p (size) || integer_all_onesp (size))
0a39fd54 9727 return;
9728
9729 /* Check whether the format is a literal string constant. */
9730 fmt_str = c_getstr (fmt);
9731 if (fmt_str == NULL)
9732 return;
9733
d4473c84 9734 if (!init_target_chars ())
99eabcc1 9735 return;
9736
0a39fd54 9737 /* If the format doesn't contain % args or %%, we know its size. */
99eabcc1 9738 if (strchr (fmt_str, target_percent) == 0)
0a39fd54 9739 len = build_int_cstu (size_type_node, strlen (fmt_str));
9740 /* If the format is "%s" and first ... argument is a string literal,
9741 we know it too. */
c2f47e15 9742 else if (fcode == BUILT_IN_SPRINTF_CHK
9743 && strcmp (fmt_str, target_percent_s) == 0)
0a39fd54 9744 {
9745 tree arg;
9746
c2f47e15 9747 if (nargs < 5)
0a39fd54 9748 return;
c2f47e15 9749 arg = CALL_EXPR_ARG (exp, 4);
0a39fd54 9750 if (! POINTER_TYPE_P (TREE_TYPE (arg)))
9751 return;
9752
9753 len = c_strlen (arg, 1);
e913b5cd 9754 if (!len || ! tree_fits_uhwi_p (len))
0a39fd54 9755 return;
9756 }
9757 else
9758 return;
9759
5aef8938 9760 /* Add one for the terminating nul. */
9761 len = fold_build2 (PLUS_EXPR, TREE_TYPE (len), len, size_one_node);
9762 check_sizes (OPT_Wstringop_overflow_,
9763 exp, /*size=*/NULL_TREE, /*maxlen=*/NULL_TREE, len, size);
0a39fd54 9764}
9765
2c281b15 9766/* Emit warning if a free is called with address of a variable. */
9767
9768static void
9769maybe_emit_free_warning (tree exp)
9770{
9771 tree arg = CALL_EXPR_ARG (exp, 0);
9772
9773 STRIP_NOPS (arg);
9774 if (TREE_CODE (arg) != ADDR_EXPR)
9775 return;
9776
9777 arg = get_base_address (TREE_OPERAND (arg, 0));
182cf5a9 9778 if (arg == NULL || INDIRECT_REF_P (arg) || TREE_CODE (arg) == MEM_REF)
2c281b15 9779 return;
9780
9781 if (SSA_VAR_P (arg))
f74ea1c2 9782 warning_at (tree_nonartificial_location (exp), OPT_Wfree_nonheap_object,
9783 "%Kattempt to free a non-heap object %qD", exp, arg);
2c281b15 9784 else
f74ea1c2 9785 warning_at (tree_nonartificial_location (exp), OPT_Wfree_nonheap_object,
9786 "%Kattempt to free a non-heap object", exp);
2c281b15 9787}
9788
c2f47e15 9789/* Fold a call to __builtin_object_size with arguments PTR and OST,
9790 if possible. */
0a39fd54 9791
f7715905 9792static tree
c2f47e15 9793fold_builtin_object_size (tree ptr, tree ost)
0a39fd54 9794{
a6caa15f 9795 unsigned HOST_WIDE_INT bytes;
0a39fd54 9796 int object_size_type;
9797
c2f47e15 9798 if (!validate_arg (ptr, POINTER_TYPE)
9799 || !validate_arg (ost, INTEGER_TYPE))
9800 return NULL_TREE;
0a39fd54 9801
0a39fd54 9802 STRIP_NOPS (ost);
9803
9804 if (TREE_CODE (ost) != INTEGER_CST
9805 || tree_int_cst_sgn (ost) < 0
9806 || compare_tree_int (ost, 3) > 0)
c2f47e15 9807 return NULL_TREE;
0a39fd54 9808
e913b5cd 9809 object_size_type = tree_to_shwi (ost);
0a39fd54 9810
9811 /* __builtin_object_size doesn't evaluate side-effects in its arguments;
9812 if there are any side-effects, it returns (size_t) -1 for types 0 and 1
9813 and (size_t) 0 for types 2 and 3. */
9814 if (TREE_SIDE_EFFECTS (ptr))
697bbc3f 9815 return build_int_cst_type (size_type_node, object_size_type < 2 ? -1 : 0);
0a39fd54 9816
9817 if (TREE_CODE (ptr) == ADDR_EXPR)
a6caa15f 9818 {
4e91a07b 9819 compute_builtin_object_size (ptr, object_size_type, &bytes);
6da74b21 9820 if (wi::fits_to_tree_p (bytes, size_type_node))
9821 return build_int_cstu (size_type_node, bytes);
a6caa15f 9822 }
0a39fd54 9823 else if (TREE_CODE (ptr) == SSA_NAME)
9824 {
0a39fd54 9825 /* If object size is not known yet, delay folding until
9826 later. Maybe subsequent passes will help determining
9827 it. */
4e91a07b 9828 if (compute_builtin_object_size (ptr, object_size_type, &bytes)
9829 && wi::fits_to_tree_p (bytes, size_type_node))
6da74b21 9830 return build_int_cstu (size_type_node, bytes);
0a39fd54 9831 }
9832
a6caa15f 9833 return NULL_TREE;
0a39fd54 9834}
9835
0e80b01d 9836/* Builtins with folding operations that operate on "..." arguments
9837 need special handling; we need to store the arguments in a convenient
9838 data structure before attempting any folding. Fortunately there are
9839 only a few builtins that fall into this category. FNDECL is the
e80cc485 9840 function, EXP is the CALL_EXPR for the call. */
0e80b01d 9841
9842static tree
e80cc485 9843fold_builtin_varargs (location_t loc, tree fndecl, tree *args, int nargs)
0e80b01d 9844{
9845 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
9846 tree ret = NULL_TREE;
9847
9848 switch (fcode)
9849 {
0e80b01d 9850 case BUILT_IN_FPCLASSIFY:
9d884767 9851 ret = fold_builtin_fpclassify (loc, args, nargs);
0e80b01d 9852 break;
9853
9854 default:
9855 break;
9856 }
9857 if (ret)
9858 {
9859 ret = build1 (NOP_EXPR, TREE_TYPE (ret), ret);
9860 SET_EXPR_LOCATION (ret, loc);
9861 TREE_NO_WARNING (ret) = 1;
9862 return ret;
9863 }
9864 return NULL_TREE;
9865}
9866
99eabcc1 9867/* Initialize format string characters in the target charset. */
9868
b9ea678c 9869bool
99eabcc1 9870init_target_chars (void)
9871{
9872 static bool init;
9873 if (!init)
9874 {
9875 target_newline = lang_hooks.to_target_charset ('\n');
9876 target_percent = lang_hooks.to_target_charset ('%');
9877 target_c = lang_hooks.to_target_charset ('c');
9878 target_s = lang_hooks.to_target_charset ('s');
9879 if (target_newline == 0 || target_percent == 0 || target_c == 0
9880 || target_s == 0)
9881 return false;
9882
9883 target_percent_c[0] = target_percent;
9884 target_percent_c[1] = target_c;
9885 target_percent_c[2] = '\0';
9886
9887 target_percent_s[0] = target_percent;
9888 target_percent_s[1] = target_s;
9889 target_percent_s[2] = '\0';
9890
9891 target_percent_s_newline[0] = target_percent;
9892 target_percent_s_newline[1] = target_s;
9893 target_percent_s_newline[2] = target_newline;
9894 target_percent_s_newline[3] = '\0';
a0c938f0 9895
99eabcc1 9896 init = true;
9897 }
9898 return true;
9899}
bffb7645 9900
f0c477f2 9901/* Helper function for do_mpfr_arg*(). Ensure M is a normal number
9902 and no overflow/underflow occurred. INEXACT is true if M was not
fa7637bd 9903 exactly calculated. TYPE is the tree type for the result. This
f0c477f2 9904 function assumes that you cleared the MPFR flags and then
9905 calculated M to see if anything subsequently set a flag prior to
9906 entering this function. Return NULL_TREE if any checks fail. */
9907
9908static tree
d4473c84 9909do_mpfr_ckconv (mpfr_srcptr m, tree type, int inexact)
f0c477f2 9910{
9911 /* Proceed iff we get a normal number, i.e. not NaN or Inf and no
9912 overflow/underflow occurred. If -frounding-math, proceed iff the
9913 result of calling FUNC was exact. */
d4473c84 9914 if (mpfr_number_p (m) && !mpfr_overflow_p () && !mpfr_underflow_p ()
f0c477f2 9915 && (!flag_rounding_math || !inexact))
9916 {
9917 REAL_VALUE_TYPE rr;
9918
66fa16e6 9919 real_from_mpfr (&rr, m, type, GMP_RNDN);
f0c477f2 9920 /* Proceed iff GCC's REAL_VALUE_TYPE can hold the MPFR value,
9921 check for overflow/underflow. If the REAL_VALUE_TYPE is zero
9922 but the mpft_t is not, then we underflowed in the
9923 conversion. */
776a7bab 9924 if (real_isfinite (&rr)
f0c477f2 9925 && (rr.cl == rvc_zero) == (mpfr_zero_p (m) != 0))
9926 {
9927 REAL_VALUE_TYPE rmode;
9928
9929 real_convert (&rmode, TYPE_MODE (type), &rr);
9930 /* Proceed iff the specified mode can hold the value. */
9931 if (real_identical (&rmode, &rr))
9932 return build_real (type, rmode);
9933 }
9934 }
9935 return NULL_TREE;
9936}
9937
239d491a 9938/* Helper function for do_mpc_arg*(). Ensure M is a normal complex
9939 number and no overflow/underflow occurred. INEXACT is true if M
9940 was not exactly calculated. TYPE is the tree type for the result.
9941 This function assumes that you cleared the MPFR flags and then
9942 calculated M to see if anything subsequently set a flag prior to
652d9409 9943 entering this function. Return NULL_TREE if any checks fail, if
9944 FORCE_CONVERT is true, then bypass the checks. */
239d491a 9945
9946static tree
652d9409 9947do_mpc_ckconv (mpc_srcptr m, tree type, int inexact, int force_convert)
239d491a 9948{
9949 /* Proceed iff we get a normal number, i.e. not NaN or Inf and no
9950 overflow/underflow occurred. If -frounding-math, proceed iff the
9951 result of calling FUNC was exact. */
652d9409 9952 if (force_convert
9953 || (mpfr_number_p (mpc_realref (m)) && mpfr_number_p (mpc_imagref (m))
9954 && !mpfr_overflow_p () && !mpfr_underflow_p ()
9955 && (!flag_rounding_math || !inexact)))
239d491a 9956 {
9957 REAL_VALUE_TYPE re, im;
9958
b0e7c4d4 9959 real_from_mpfr (&re, mpc_realref (m), TREE_TYPE (type), GMP_RNDN);
9960 real_from_mpfr (&im, mpc_imagref (m), TREE_TYPE (type), GMP_RNDN);
239d491a 9961 /* Proceed iff GCC's REAL_VALUE_TYPE can hold the MPFR values,
9962 check for overflow/underflow. If the REAL_VALUE_TYPE is zero
9963 but the mpft_t is not, then we underflowed in the
9964 conversion. */
652d9409 9965 if (force_convert
9966 || (real_isfinite (&re) && real_isfinite (&im)
9967 && (re.cl == rvc_zero) == (mpfr_zero_p (mpc_realref (m)) != 0)
9968 && (im.cl == rvc_zero) == (mpfr_zero_p (mpc_imagref (m)) != 0)))
239d491a 9969 {
9970 REAL_VALUE_TYPE re_mode, im_mode;
9971
9972 real_convert (&re_mode, TYPE_MODE (TREE_TYPE (type)), &re);
9973 real_convert (&im_mode, TYPE_MODE (TREE_TYPE (type)), &im);
9974 /* Proceed iff the specified mode can hold the value. */
652d9409 9975 if (force_convert
9976 || (real_identical (&re_mode, &re)
9977 && real_identical (&im_mode, &im)))
239d491a 9978 return build_complex (type, build_real (TREE_TYPE (type), re_mode),
9979 build_real (TREE_TYPE (type), im_mode));
9980 }
9981 }
9982 return NULL_TREE;
9983}
239d491a 9984
e5407ca6 9985/* If arguments ARG0 and ARG1 are REAL_CSTs, call mpfr_remquo() to set
9986 the pointer *(ARG_QUO) and return the result. The type is taken
9987 from the type of ARG0 and is used for setting the precision of the
9988 calculation and results. */
9989
9990static tree
9991do_mpfr_remquo (tree arg0, tree arg1, tree arg_quo)
9992{
9993 tree const type = TREE_TYPE (arg0);
9994 tree result = NULL_TREE;
48e1416a 9995
e5407ca6 9996 STRIP_NOPS (arg0);
9997 STRIP_NOPS (arg1);
48e1416a 9998
e5407ca6 9999 /* To proceed, MPFR must exactly represent the target floating point
10000 format, which only happens when the target base equals two. */
10001 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
10002 && TREE_CODE (arg0) == REAL_CST && !TREE_OVERFLOW (arg0)
10003 && TREE_CODE (arg1) == REAL_CST && !TREE_OVERFLOW (arg1))
10004 {
10005 const REAL_VALUE_TYPE *const ra0 = TREE_REAL_CST_PTR (arg0);
10006 const REAL_VALUE_TYPE *const ra1 = TREE_REAL_CST_PTR (arg1);
10007
776a7bab 10008 if (real_isfinite (ra0) && real_isfinite (ra1))
e5407ca6 10009 {
e2eb2b7f 10010 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
10011 const int prec = fmt->p;
10012 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
e5407ca6 10013 tree result_rem;
10014 long integer_quo;
10015 mpfr_t m0, m1;
10016
10017 mpfr_inits2 (prec, m0, m1, NULL);
10018 mpfr_from_real (m0, ra0, GMP_RNDN);
10019 mpfr_from_real (m1, ra1, GMP_RNDN);
10020 mpfr_clear_flags ();
e2eb2b7f 10021 mpfr_remquo (m0, &integer_quo, m0, m1, rnd);
e5407ca6 10022 /* Remquo is independent of the rounding mode, so pass
10023 inexact=0 to do_mpfr_ckconv(). */
10024 result_rem = do_mpfr_ckconv (m0, type, /*inexact=*/ 0);
10025 mpfr_clears (m0, m1, NULL);
10026 if (result_rem)
10027 {
10028 /* MPFR calculates quo in the host's long so it may
10029 return more bits in quo than the target int can hold
10030 if sizeof(host long) > sizeof(target int). This can
10031 happen even for native compilers in LP64 mode. In
10032 these cases, modulo the quo value with the largest
10033 number that the target int can hold while leaving one
10034 bit for the sign. */
10035 if (sizeof (integer_quo) * CHAR_BIT > INT_TYPE_SIZE)
10036 integer_quo %= (long)(1UL << (INT_TYPE_SIZE - 1));
10037
10038 /* Dereference the quo pointer argument. */
10039 arg_quo = build_fold_indirect_ref (arg_quo);
10040 /* Proceed iff a valid pointer type was passed in. */
10041 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg_quo)) == integer_type_node)
10042 {
10043 /* Set the value. */
7002a1c8 10044 tree result_quo
10045 = fold_build2 (MODIFY_EXPR, TREE_TYPE (arg_quo), arg_quo,
10046 build_int_cst (TREE_TYPE (arg_quo),
10047 integer_quo));
e5407ca6 10048 TREE_SIDE_EFFECTS (result_quo) = 1;
10049 /* Combine the quo assignment with the rem. */
10050 result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
10051 result_quo, result_rem));
10052 }
10053 }
10054 }
10055 }
10056 return result;
10057}
e84da7c1 10058
10059/* If ARG is a REAL_CST, call mpfr_lgamma() on it and return the
10060 resulting value as a tree with type TYPE. The mpfr precision is
10061 set to the precision of TYPE. We assume that this mpfr function
10062 returns zero if the result could be calculated exactly within the
10063 requested precision. In addition, the integer pointer represented
10064 by ARG_SG will be dereferenced and set to the appropriate signgam
10065 (-1,1) value. */
10066
10067static tree
10068do_mpfr_lgamma_r (tree arg, tree arg_sg, tree type)
10069{
10070 tree result = NULL_TREE;
10071
10072 STRIP_NOPS (arg);
48e1416a 10073
e84da7c1 10074 /* To proceed, MPFR must exactly represent the target floating point
10075 format, which only happens when the target base equals two. Also
10076 verify ARG is a constant and that ARG_SG is an int pointer. */
10077 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
10078 && TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg)
10079 && TREE_CODE (TREE_TYPE (arg_sg)) == POINTER_TYPE
10080 && TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (arg_sg))) == integer_type_node)
10081 {
10082 const REAL_VALUE_TYPE *const ra = TREE_REAL_CST_PTR (arg);
10083
10084 /* In addition to NaN and Inf, the argument cannot be zero or a
10085 negative integer. */
776a7bab 10086 if (real_isfinite (ra)
e84da7c1 10087 && ra->cl != rvc_zero
9af5ce0c 10088 && !(real_isneg (ra) && real_isinteger (ra, TYPE_MODE (type))))
e84da7c1 10089 {
e2eb2b7f 10090 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
10091 const int prec = fmt->p;
10092 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
e84da7c1 10093 int inexact, sg;
10094 mpfr_t m;
10095 tree result_lg;
10096
10097 mpfr_init2 (m, prec);
10098 mpfr_from_real (m, ra, GMP_RNDN);
10099 mpfr_clear_flags ();
e2eb2b7f 10100 inexact = mpfr_lgamma (m, &sg, m, rnd);
e84da7c1 10101 result_lg = do_mpfr_ckconv (m, type, inexact);
10102 mpfr_clear (m);
10103 if (result_lg)
10104 {
10105 tree result_sg;
10106
10107 /* Dereference the arg_sg pointer argument. */
10108 arg_sg = build_fold_indirect_ref (arg_sg);
10109 /* Assign the signgam value into *arg_sg. */
10110 result_sg = fold_build2 (MODIFY_EXPR,
10111 TREE_TYPE (arg_sg), arg_sg,
7002a1c8 10112 build_int_cst (TREE_TYPE (arg_sg), sg));
e84da7c1 10113 TREE_SIDE_EFFECTS (result_sg) = 1;
10114 /* Combine the signgam assignment with the lgamma result. */
10115 result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
10116 result_sg, result_lg));
10117 }
10118 }
10119 }
10120
10121 return result;
10122}
75a70cf9 10123
c699fab8 10124/* If arguments ARG0 and ARG1 are a COMPLEX_CST, call the two-argument
10125 mpc function FUNC on it and return the resulting value as a tree
10126 with type TYPE. The mpfr precision is set to the precision of
10127 TYPE. We assume that function FUNC returns zero if the result
652d9409 10128 could be calculated exactly within the requested precision. If
10129 DO_NONFINITE is true, then fold expressions containing Inf or NaN
10130 in the arguments and/or results. */
c699fab8 10131
63e89698 10132tree
652d9409 10133do_mpc_arg2 (tree arg0, tree arg1, tree type, int do_nonfinite,
c699fab8 10134 int (*func)(mpc_ptr, mpc_srcptr, mpc_srcptr, mpc_rnd_t))
10135{
10136 tree result = NULL_TREE;
48e1416a 10137
c699fab8 10138 STRIP_NOPS (arg0);
10139 STRIP_NOPS (arg1);
10140
10141 /* To proceed, MPFR must exactly represent the target floating point
10142 format, which only happens when the target base equals two. */
10143 if (TREE_CODE (arg0) == COMPLEX_CST && !TREE_OVERFLOW (arg0)
10144 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE
10145 && TREE_CODE (arg1) == COMPLEX_CST && !TREE_OVERFLOW (arg1)
10146 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg1))) == REAL_TYPE
10147 && REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (TREE_TYPE (arg0))))->b == 2)
10148 {
10149 const REAL_VALUE_TYPE *const re0 = TREE_REAL_CST_PTR (TREE_REALPART (arg0));
10150 const REAL_VALUE_TYPE *const im0 = TREE_REAL_CST_PTR (TREE_IMAGPART (arg0));
10151 const REAL_VALUE_TYPE *const re1 = TREE_REAL_CST_PTR (TREE_REALPART (arg1));
10152 const REAL_VALUE_TYPE *const im1 = TREE_REAL_CST_PTR (TREE_IMAGPART (arg1));
10153
652d9409 10154 if (do_nonfinite
10155 || (real_isfinite (re0) && real_isfinite (im0)
10156 && real_isfinite (re1) && real_isfinite (im1)))
c699fab8 10157 {
10158 const struct real_format *const fmt =
10159 REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (type)));
10160 const int prec = fmt->p;
10161 const mp_rnd_t rnd = fmt->round_towards_zero ? GMP_RNDZ : GMP_RNDN;
10162 const mpc_rnd_t crnd = fmt->round_towards_zero ? MPC_RNDZZ : MPC_RNDNN;
10163 int inexact;
10164 mpc_t m0, m1;
48e1416a 10165
c699fab8 10166 mpc_init2 (m0, prec);
10167 mpc_init2 (m1, prec);
9af5ce0c 10168 mpfr_from_real (mpc_realref (m0), re0, rnd);
10169 mpfr_from_real (mpc_imagref (m0), im0, rnd);
10170 mpfr_from_real (mpc_realref (m1), re1, rnd);
10171 mpfr_from_real (mpc_imagref (m1), im1, rnd);
c699fab8 10172 mpfr_clear_flags ();
10173 inexact = func (m0, m0, m1, crnd);
652d9409 10174 result = do_mpc_ckconv (m0, type, inexact, do_nonfinite);
c699fab8 10175 mpc_clear (m0);
10176 mpc_clear (m1);
10177 }
10178 }
10179
10180 return result;
10181}
239d491a 10182
75a70cf9 10183/* A wrapper function for builtin folding that prevents warnings for
10184 "statement without effect" and the like, caused by removing the
10185 call node earlier than the warning is generated. */
10186
10187tree
1a91d914 10188fold_call_stmt (gcall *stmt, bool ignore)
75a70cf9 10189{
10190 tree ret = NULL_TREE;
10191 tree fndecl = gimple_call_fndecl (stmt);
389dd41b 10192 location_t loc = gimple_location (stmt);
75a70cf9 10193 if (fndecl
10194 && TREE_CODE (fndecl) == FUNCTION_DECL
10195 && DECL_BUILT_IN (fndecl)
10196 && !gimple_call_va_arg_pack_p (stmt))
10197 {
10198 int nargs = gimple_call_num_args (stmt);
9845fb99 10199 tree *args = (nargs > 0
10200 ? gimple_call_arg_ptr (stmt, 0)
10201 : &error_mark_node);
75a70cf9 10202
198622c0 10203 if (avoid_folding_inline_builtin (fndecl))
10204 return NULL_TREE;
75a70cf9 10205 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
10206 {
9845fb99 10207 return targetm.fold_builtin (fndecl, nargs, args, ignore);
75a70cf9 10208 }
10209 else
10210 {
9d884767 10211 ret = fold_builtin_n (loc, fndecl, args, nargs, ignore);
75a70cf9 10212 if (ret)
10213 {
10214 /* Propagate location information from original call to
10215 expansion of builtin. Otherwise things like
10216 maybe_emit_chk_warning, that operate on the expansion
10217 of a builtin, will use the wrong location information. */
10218 if (gimple_has_location (stmt))
10219 {
10220 tree realret = ret;
10221 if (TREE_CODE (ret) == NOP_EXPR)
10222 realret = TREE_OPERAND (ret, 0);
10223 if (CAN_HAVE_LOCATION_P (realret)
10224 && !EXPR_HAS_LOCATION (realret))
389dd41b 10225 SET_EXPR_LOCATION (realret, loc);
75a70cf9 10226 return realret;
10227 }
10228 return ret;
10229 }
10230 }
10231 }
10232 return NULL_TREE;
10233}
7bfefa9d 10234
b9a16870 10235/* Look up the function in builtin_decl that corresponds to DECL
7bfefa9d 10236 and set ASMSPEC as its user assembler name. DECL must be a
10237 function decl that declares a builtin. */
10238
10239void
10240set_builtin_user_assembler_name (tree decl, const char *asmspec)
10241{
7bfefa9d 10242 gcc_assert (TREE_CODE (decl) == FUNCTION_DECL
10243 && DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL
10244 && asmspec != 0);
10245
61ffc71a 10246 tree builtin = builtin_decl_explicit (DECL_FUNCTION_CODE (decl));
4d8e0d6d 10247 set_user_assembler_name (builtin, asmspec);
61ffc71a 10248
10249 if (DECL_FUNCTION_CODE (decl) == BUILT_IN_FFS
10250 && INT_TYPE_SIZE < BITS_PER_WORD)
7bfefa9d 10251 {
61ffc71a 10252 set_user_assembler_libfunc ("ffs", asmspec);
10253 set_optab_libfunc (ffs_optab, mode_for_size (INT_TYPE_SIZE, MODE_INT, 0),
10254 "ffs");
7bfefa9d 10255 }
10256}
a6b74a67 10257
10258/* Return true if DECL is a builtin that expands to a constant or similarly
10259 simple code. */
10260bool
10261is_simple_builtin (tree decl)
10262{
10263 if (decl && DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL)
10264 switch (DECL_FUNCTION_CODE (decl))
10265 {
10266 /* Builtins that expand to constants. */
10267 case BUILT_IN_CONSTANT_P:
10268 case BUILT_IN_EXPECT:
10269 case BUILT_IN_OBJECT_SIZE:
10270 case BUILT_IN_UNREACHABLE:
10271 /* Simple register moves or loads from stack. */
fca0886c 10272 case BUILT_IN_ASSUME_ALIGNED:
a6b74a67 10273 case BUILT_IN_RETURN_ADDRESS:
10274 case BUILT_IN_EXTRACT_RETURN_ADDR:
10275 case BUILT_IN_FROB_RETURN_ADDR:
10276 case BUILT_IN_RETURN:
10277 case BUILT_IN_AGGREGATE_INCOMING_ADDRESS:
10278 case BUILT_IN_FRAME_ADDRESS:
10279 case BUILT_IN_VA_END:
10280 case BUILT_IN_STACK_SAVE:
10281 case BUILT_IN_STACK_RESTORE:
10282 /* Exception state returns or moves registers around. */
10283 case BUILT_IN_EH_FILTER:
10284 case BUILT_IN_EH_POINTER:
10285 case BUILT_IN_EH_COPY_VALUES:
10286 return true;
10287
10288 default:
10289 return false;
10290 }
10291
10292 return false;
10293}
10294
10295/* Return true if DECL is a builtin that is not expensive, i.e., they are
10296 most probably expanded inline into reasonably simple code. This is a
10297 superset of is_simple_builtin. */
10298bool
10299is_inexpensive_builtin (tree decl)
10300{
10301 if (!decl)
10302 return false;
10303 else if (DECL_BUILT_IN_CLASS (decl) == BUILT_IN_MD)
10304 return true;
10305 else if (DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL)
10306 switch (DECL_FUNCTION_CODE (decl))
10307 {
10308 case BUILT_IN_ABS:
10309 case BUILT_IN_ALLOCA:
581bf1c2 10310 case BUILT_IN_ALLOCA_WITH_ALIGN:
74bdbe96 10311 case BUILT_IN_BSWAP16:
a6b74a67 10312 case BUILT_IN_BSWAP32:
10313 case BUILT_IN_BSWAP64:
10314 case BUILT_IN_CLZ:
10315 case BUILT_IN_CLZIMAX:
10316 case BUILT_IN_CLZL:
10317 case BUILT_IN_CLZLL:
10318 case BUILT_IN_CTZ:
10319 case BUILT_IN_CTZIMAX:
10320 case BUILT_IN_CTZL:
10321 case BUILT_IN_CTZLL:
10322 case BUILT_IN_FFS:
10323 case BUILT_IN_FFSIMAX:
10324 case BUILT_IN_FFSL:
10325 case BUILT_IN_FFSLL:
10326 case BUILT_IN_IMAXABS:
10327 case BUILT_IN_FINITE:
10328 case BUILT_IN_FINITEF:
10329 case BUILT_IN_FINITEL:
10330 case BUILT_IN_FINITED32:
10331 case BUILT_IN_FINITED64:
10332 case BUILT_IN_FINITED128:
10333 case BUILT_IN_FPCLASSIFY:
10334 case BUILT_IN_ISFINITE:
10335 case BUILT_IN_ISINF_SIGN:
10336 case BUILT_IN_ISINF:
10337 case BUILT_IN_ISINFF:
10338 case BUILT_IN_ISINFL:
10339 case BUILT_IN_ISINFD32:
10340 case BUILT_IN_ISINFD64:
10341 case BUILT_IN_ISINFD128:
10342 case BUILT_IN_ISNAN:
10343 case BUILT_IN_ISNANF:
10344 case BUILT_IN_ISNANL:
10345 case BUILT_IN_ISNAND32:
10346 case BUILT_IN_ISNAND64:
10347 case BUILT_IN_ISNAND128:
10348 case BUILT_IN_ISNORMAL:
10349 case BUILT_IN_ISGREATER:
10350 case BUILT_IN_ISGREATEREQUAL:
10351 case BUILT_IN_ISLESS:
10352 case BUILT_IN_ISLESSEQUAL:
10353 case BUILT_IN_ISLESSGREATER:
10354 case BUILT_IN_ISUNORDERED:
10355 case BUILT_IN_VA_ARG_PACK:
10356 case BUILT_IN_VA_ARG_PACK_LEN:
10357 case BUILT_IN_VA_COPY:
10358 case BUILT_IN_TRAP:
10359 case BUILT_IN_SAVEREGS:
10360 case BUILT_IN_POPCOUNTL:
10361 case BUILT_IN_POPCOUNTLL:
10362 case BUILT_IN_POPCOUNTIMAX:
10363 case BUILT_IN_POPCOUNT:
10364 case BUILT_IN_PARITYL:
10365 case BUILT_IN_PARITYLL:
10366 case BUILT_IN_PARITYIMAX:
10367 case BUILT_IN_PARITY:
10368 case BUILT_IN_LABS:
10369 case BUILT_IN_LLABS:
10370 case BUILT_IN_PREFETCH:
ca4c3545 10371 case BUILT_IN_ACC_ON_DEVICE:
a6b74a67 10372 return true;
10373
10374 default:
10375 return is_simple_builtin (decl);
10376 }
10377
10378 return false;
10379}
507a998e 10380
10381/* Return true if T is a constant and the value cast to a target char
10382 can be represented by a host char.
10383 Store the casted char constant in *P if so. */
10384
10385bool
10386target_char_cst_p (tree t, char *p)
10387{
10388 if (!tree_fits_uhwi_p (t) || CHAR_TYPE_SIZE != HOST_BITS_PER_CHAR)
10389 return false;
10390
10391 *p = (char)tree_to_uhwi (t);
10392 return true;
10393}