]> git.ipfire.org Git - thirdparty/gcc.git/blame - gcc/builtins.c
Fold __builtin_str{n}{case}cmp functions
[thirdparty/gcc.git] / gcc / builtins.c
CommitLineData
53800dbe 1/* Expand builtin functions.
f1717362 2 Copyright (C) 1988-2016 Free Software Foundation, Inc.
53800dbe 3
f12b58b3 4This file is part of GCC.
53800dbe 5
f12b58b3 6GCC is free software; you can redistribute it and/or modify it under
7the terms of the GNU General Public License as published by the Free
8c4c00c1 8Software Foundation; either version 3, or (at your option) any later
f12b58b3 9version.
53800dbe 10
f12b58b3 11GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12WARRANTY; without even the implied warranty of MERCHANTABILITY or
13FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14for more details.
53800dbe 15
16You should have received a copy of the GNU General Public License
8c4c00c1 17along with GCC; see the file COPYING3. If not see
18<http://www.gnu.org/licenses/>. */
53800dbe 19
7c2ecb89 20/* Legacy warning! Please add no further builtin simplifications here
21 (apart from pure constant folding) - builtin simplifications should go
22 to match.pd or gimple-fold.c instead. */
23
53800dbe 24#include "config.h"
25#include "system.h"
805e22b2 26#include "coretypes.h"
9ef16211 27#include "backend.h"
7c29e30e 28#include "target.h"
29#include "rtl.h"
9ef16211 30#include "tree.h"
ea36272b 31#include "memmodel.h"
9ef16211 32#include "gimple.h"
7c29e30e 33#include "predict.h"
34#include "tm_p.h"
35#include "stringpool.h"
c296f633 36#include "tree-vrp.h"
7c29e30e 37#include "tree-ssanames.h"
38#include "expmed.h"
39#include "optabs.h"
7c29e30e 40#include "emit-rtl.h"
41#include "recog.h"
7c29e30e 42#include "diagnostic-core.h"
b20a8bb4 43#include "alias.h"
b20a8bb4 44#include "fold-const.h"
6c21be92 45#include "fold-const-call.h"
9ed99284 46#include "stor-layout.h"
47#include "calls.h"
48#include "varasm.h"
49#include "tree-object-size.h"
dae0b5cb 50#include "realmpfr.h"
94ea8568 51#include "cfgrtl.h"
53800dbe 52#include "except.h"
d53441c8 53#include "dojump.h"
54#include "explow.h"
d53441c8 55#include "stmt.h"
53800dbe 56#include "expr.h"
d8fc4d0b 57#include "libfuncs.h"
53800dbe 58#include "output.h"
59#include "typeclass.h"
63c62881 60#include "langhooks.h"
162719b3 61#include "value-prof.h"
3b9c3a16 62#include "builtins.h"
f9acf11a 63#include "asan.h"
d037099f 64#include "cilk.h"
058a1b7a 65#include "tree-chkp.h"
66#include "rtl-chkp.h"
1f24b8e9 67#include "internal-fn.h"
e3240774 68#include "case-cfn-macros.h"
732905bb 69#include "gimple-fold.h"
53800dbe 70
5383fb56 71
3b9c3a16 72struct target_builtins default_target_builtins;
73#if SWITCHABLE_TARGET
74struct target_builtins *this_target_builtins = &default_target_builtins;
75#endif
76
ab7943b9 77/* Define the names of the builtin function types and codes. */
96423453 78const char *const built_in_class_names[BUILT_IN_LAST]
ab7943b9 79 = {"NOT_BUILT_IN", "BUILT_IN_FRONTEND", "BUILT_IN_MD", "BUILT_IN_NORMAL"};
80
9cfddb70 81#define DEF_BUILTIN(X, N, C, T, LT, B, F, NA, AT, IM, COND) #X,
0dfc45b5 82const char * built_in_names[(int) END_BUILTINS] =
4e9d90c7 83{
84#include "builtins.def"
85};
ab7943b9 86
cffdfb3d 87/* Setup an array of builtin_info_type, make sure each element decl is
df94cd3b 88 initialized to NULL_TREE. */
cffdfb3d 89builtin_info_type builtin_info[(int)END_BUILTINS];
df94cd3b 90
0b049e15 91/* Non-zero if __builtin_constant_p should be folded right away. */
92bool force_folding_builtin_constant_p;
93
3754d046 94static rtx c_readstr (const char *, machine_mode);
aecda0d6 95static int target_char_cast (tree, char *);
d8ae1baa 96static rtx get_memory_rtx (tree, tree);
aecda0d6 97static int apply_args_size (void);
98static int apply_result_size (void);
aecda0d6 99static rtx result_vector (int, rtx);
aecda0d6 100static void expand_builtin_prefetch (tree);
101static rtx expand_builtin_apply_args (void);
102static rtx expand_builtin_apply_args_1 (void);
103static rtx expand_builtin_apply (rtx, rtx, rtx);
104static void expand_builtin_return (rtx);
105static enum type_class type_to_class (tree);
106static rtx expand_builtin_classify_type (tree);
6b43bae4 107static rtx expand_builtin_mathfn_3 (tree, rtx, rtx);
7e0713b1 108static rtx expand_builtin_mathfn_ternary (tree, rtx, rtx);
f97eea22 109static rtx expand_builtin_interclass_mathfn (tree, rtx);
c3147c1a 110static rtx expand_builtin_sincos (tree);
f97eea22 111static rtx expand_builtin_cexpi (tree, rtx);
ff1b14e4 112static rtx expand_builtin_int_roundingfn (tree, rtx);
113static rtx expand_builtin_int_roundingfn_2 (tree, rtx);
79012a9d 114static rtx expand_builtin_next_arg (void);
aecda0d6 115static rtx expand_builtin_va_start (tree);
116static rtx expand_builtin_va_end (tree);
117static rtx expand_builtin_va_copy (tree);
a65c4d64 118static rtx expand_builtin_strcmp (tree, rtx);
3754d046 119static rtx expand_builtin_strncmp (tree, rtx, machine_mode);
120static rtx builtin_memcpy_read_str (void *, HOST_WIDE_INT, machine_mode);
a65c4d64 121static rtx expand_builtin_memcpy (tree, rtx);
f21337ef 122static rtx expand_builtin_memcpy_with_bounds (tree, rtx);
123static rtx expand_builtin_memcpy_args (tree, tree, tree, rtx, tree);
3754d046 124static rtx expand_builtin_mempcpy (tree, rtx, machine_mode);
f21337ef 125static rtx expand_builtin_mempcpy_with_bounds (tree, rtx, machine_mode);
48e1416a 126static rtx expand_builtin_mempcpy_args (tree, tree, tree, rtx,
f21337ef 127 machine_mode, int, tree);
a65c4d64 128static rtx expand_builtin_strcpy (tree, rtx);
129static rtx expand_builtin_strcpy_args (tree, tree, rtx);
3754d046 130static rtx expand_builtin_stpcpy (tree, rtx, machine_mode);
a65c4d64 131static rtx expand_builtin_strncpy (tree, rtx);
3754d046 132static rtx builtin_memset_gen_str (void *, HOST_WIDE_INT, machine_mode);
133static rtx expand_builtin_memset (tree, rtx, machine_mode);
f21337ef 134static rtx expand_builtin_memset_with_bounds (tree, rtx, machine_mode);
3754d046 135static rtx expand_builtin_memset_args (tree, tree, tree, rtx, machine_mode, tree);
aecda0d6 136static rtx expand_builtin_bzero (tree);
3754d046 137static rtx expand_builtin_strlen (tree, rtx, machine_mode);
5be42b39 138static rtx expand_builtin_alloca (tree, bool);
3754d046 139static rtx expand_builtin_unop (machine_mode, tree, rtx, rtx, optab);
aecda0d6 140static rtx expand_builtin_frame_address (tree, tree);
389dd41b 141static tree stabilize_va_list_loc (location_t, tree, int);
aecda0d6 142static rtx expand_builtin_expect (tree, rtx);
143static tree fold_builtin_constant_p (tree);
144static tree fold_builtin_classify_type (tree);
c7cbde74 145static tree fold_builtin_strlen (location_t, tree, tree);
389dd41b 146static tree fold_builtin_inf (location_t, tree, int);
389dd41b 147static tree rewrite_call_expr (location_t, tree, int, tree, int, ...);
b7bf20db 148static bool validate_arg (const_tree, enum tree_code code);
aecda0d6 149static rtx expand_builtin_fabs (tree, rtx, rtx);
27f261ef 150static rtx expand_builtin_signbit (tree, rtx);
389dd41b 151static tree fold_builtin_memchr (location_t, tree, tree, tree, tree);
152static tree fold_builtin_memcmp (location_t, tree, tree, tree);
389dd41b 153static tree fold_builtin_isascii (location_t, tree);
154static tree fold_builtin_toascii (location_t, tree);
155static tree fold_builtin_isdigit (location_t, tree);
156static tree fold_builtin_fabs (location_t, tree, tree);
157static tree fold_builtin_abs (location_t, tree, tree);
158static tree fold_builtin_unordered_cmp (location_t, tree, tree, tree, enum tree_code,
d5019fe8 159 enum tree_code);
e80cc485 160static tree fold_builtin_0 (location_t, tree);
161static tree fold_builtin_1 (location_t, tree, tree);
162static tree fold_builtin_2 (location_t, tree, tree, tree);
163static tree fold_builtin_3 (location_t, tree, tree, tree, tree);
164static tree fold_builtin_varargs (location_t, tree, tree*, int);
389dd41b 165
166static tree fold_builtin_strpbrk (location_t, tree, tree, tree);
167static tree fold_builtin_strstr (location_t, tree, tree, tree);
389dd41b 168static tree fold_builtin_strspn (location_t, tree, tree);
169static tree fold_builtin_strcspn (location_t, tree, tree);
4ee9c684 170
0a39fd54 171static rtx expand_builtin_object_size (tree);
3754d046 172static rtx expand_builtin_memory_chk (tree, rtx, machine_mode,
0a39fd54 173 enum built_in_function);
174static void maybe_emit_chk_warning (tree, enum built_in_function);
175static void maybe_emit_sprintf_chk_warning (tree, enum built_in_function);
2c281b15 176static void maybe_emit_free_warning (tree);
c2f47e15 177static tree fold_builtin_object_size (tree, tree);
99eabcc1 178
e788f202 179unsigned HOST_WIDE_INT target_newline;
b9ea678c 180unsigned HOST_WIDE_INT target_percent;
99eabcc1 181static unsigned HOST_WIDE_INT target_c;
182static unsigned HOST_WIDE_INT target_s;
aea88c77 183char target_percent_c[3];
b9ea678c 184char target_percent_s[3];
e788f202 185char target_percent_s_newline[4];
e5407ca6 186static tree do_mpfr_remquo (tree, tree, tree);
e84da7c1 187static tree do_mpfr_lgamma_r (tree, tree, tree);
1cd6e20d 188static void expand_builtin_sync_synchronize (void);
0a39fd54 189
7bfefa9d 190/* Return true if NAME starts with __builtin_ or __sync_. */
191
b29139ad 192static bool
1c47b3e8 193is_builtin_name (const char *name)
b6a5fc45 194{
b6a5fc45 195 if (strncmp (name, "__builtin_", 10) == 0)
196 return true;
197 if (strncmp (name, "__sync_", 7) == 0)
198 return true;
1cd6e20d 199 if (strncmp (name, "__atomic_", 9) == 0)
200 return true;
a89e6c15 201 if (flag_cilkplus
d037099f 202 && (!strcmp (name, "__cilkrts_detach")
203 || !strcmp (name, "__cilkrts_pop_frame")))
204 return true;
b6a5fc45 205 return false;
206}
4ee9c684 207
7bfefa9d 208
209/* Return true if DECL is a function symbol representing a built-in. */
210
211bool
212is_builtin_fn (tree decl)
213{
214 return TREE_CODE (decl) == FUNCTION_DECL && DECL_BUILT_IN (decl);
215}
216
1c47b3e8 217/* Return true if NODE should be considered for inline expansion regardless
218 of the optimization level. This means whenever a function is invoked with
219 its "internal" name, which normally contains the prefix "__builtin". */
220
ae62deea 221bool
1c47b3e8 222called_as_built_in (tree node)
223{
224 /* Note that we must use DECL_NAME, not DECL_ASSEMBLER_NAME_SET_P since
225 we want the name used to call the function, not the name it
226 will have. */
227 const char *name = IDENTIFIER_POINTER (DECL_NAME (node));
228 return is_builtin_name (name);
229}
230
ceea063b 231/* Compute values M and N such that M divides (address of EXP - N) and such
232 that N < M. If these numbers can be determined, store M in alignp and N in
233 *BITPOSP and return true. Otherwise return false and store BITS_PER_UNIT to
234 *alignp and any bit-offset to *bitposp.
0d8f7716 235
236 Note that the address (and thus the alignment) computed here is based
237 on the address to which a symbol resolves, whereas DECL_ALIGN is based
238 on the address at which an object is actually located. These two
239 addresses are not always the same. For example, on ARM targets,
240 the address &foo of a Thumb function foo() has the lowest bit set,
3482bf13 241 whereas foo() itself starts on an even address.
698537d1 242
3482bf13 243 If ADDR_P is true we are taking the address of the memory reference EXP
244 and thus cannot rely on the access taking place. */
245
246static bool
247get_object_alignment_2 (tree exp, unsigned int *alignp,
248 unsigned HOST_WIDE_INT *bitposp, bool addr_p)
698537d1 249{
98ab9e8f 250 HOST_WIDE_INT bitsize, bitpos;
251 tree offset;
3754d046 252 machine_mode mode;
292237f3 253 int unsignedp, reversep, volatilep;
c8a2b4ff 254 unsigned int align = BITS_PER_UNIT;
ceea063b 255 bool known_alignment = false;
698537d1 256
98ab9e8f 257 /* Get the innermost object and the constant (bitpos) and possibly
258 variable (offset) offset of the access. */
292237f3 259 exp = get_inner_reference (exp, &bitsize, &bitpos, &offset, &mode,
b3b6e4b5 260 &unsignedp, &reversep, &volatilep);
98ab9e8f 261
262 /* Extract alignment information from the innermost object and
263 possibly adjust bitpos and offset. */
3482bf13 264 if (TREE_CODE (exp) == FUNCTION_DECL)
0d8f7716 265 {
3482bf13 266 /* Function addresses can encode extra information besides their
267 alignment. However, if TARGET_PTRMEMFUNC_VBIT_LOCATION
268 allows the low bit to be used as a virtual bit, we know
269 that the address itself must be at least 2-byte aligned. */
270 if (TARGET_PTRMEMFUNC_VBIT_LOCATION == ptrmemfunc_vbit_in_pfn)
271 align = 2 * BITS_PER_UNIT;
0d8f7716 272 }
3482bf13 273 else if (TREE_CODE (exp) == LABEL_DECL)
274 ;
275 else if (TREE_CODE (exp) == CONST_DECL)
98ab9e8f 276 {
3482bf13 277 /* The alignment of a CONST_DECL is determined by its initializer. */
278 exp = DECL_INITIAL (exp);
98ab9e8f 279 align = TYPE_ALIGN (TREE_TYPE (exp));
3482bf13 280 if (CONSTANT_CLASS_P (exp))
281 align = (unsigned) CONSTANT_ALIGNMENT (exp, align);
e532afed 282
3482bf13 283 known_alignment = true;
98ab9e8f 284 }
3482bf13 285 else if (DECL_P (exp))
ceea063b 286 {
3482bf13 287 align = DECL_ALIGN (exp);
ceea063b 288 known_alignment = true;
ceea063b 289 }
3482bf13 290 else if (TREE_CODE (exp) == INDIRECT_REF
291 || TREE_CODE (exp) == MEM_REF
292 || TREE_CODE (exp) == TARGET_MEM_REF)
98ab9e8f 293 {
294 tree addr = TREE_OPERAND (exp, 0);
ceea063b 295 unsigned ptr_align;
296 unsigned HOST_WIDE_INT ptr_bitpos;
ab1e78e5 297 unsigned HOST_WIDE_INT ptr_bitmask = ~0;
ceea063b 298
ab1e78e5 299 /* If the address is explicitely aligned, handle that. */
98ab9e8f 300 if (TREE_CODE (addr) == BIT_AND_EXPR
301 && TREE_CODE (TREE_OPERAND (addr, 1)) == INTEGER_CST)
302 {
ab1e78e5 303 ptr_bitmask = TREE_INT_CST_LOW (TREE_OPERAND (addr, 1));
304 ptr_bitmask *= BITS_PER_UNIT;
ac29ece2 305 align = least_bit_hwi (ptr_bitmask);
98ab9e8f 306 addr = TREE_OPERAND (addr, 0);
307 }
ceea063b 308
3482bf13 309 known_alignment
310 = get_pointer_alignment_1 (addr, &ptr_align, &ptr_bitpos);
3482bf13 311 align = MAX (ptr_align, align);
312
ab1e78e5 313 /* Re-apply explicit alignment to the bitpos. */
314 ptr_bitpos &= ptr_bitmask;
315
4083990a 316 /* The alignment of the pointer operand in a TARGET_MEM_REF
317 has to take the variable offset parts into account. */
3482bf13 318 if (TREE_CODE (exp) == TARGET_MEM_REF)
153c3b50 319 {
3482bf13 320 if (TMR_INDEX (exp))
321 {
322 unsigned HOST_WIDE_INT step = 1;
323 if (TMR_STEP (exp))
f9ae6f95 324 step = TREE_INT_CST_LOW (TMR_STEP (exp));
ac29ece2 325 align = MIN (align, least_bit_hwi (step) * BITS_PER_UNIT);
3482bf13 326 }
327 if (TMR_INDEX2 (exp))
328 align = BITS_PER_UNIT;
329 known_alignment = false;
153c3b50 330 }
ceea063b 331
3482bf13 332 /* When EXP is an actual memory reference then we can use
333 TYPE_ALIGN of a pointer indirection to derive alignment.
334 Do so only if get_pointer_alignment_1 did not reveal absolute
4083990a 335 alignment knowledge and if using that alignment would
336 improve the situation. */
337 if (!addr_p && !known_alignment
338 && TYPE_ALIGN (TREE_TYPE (exp)) > align)
339 align = TYPE_ALIGN (TREE_TYPE (exp));
340 else
341 {
342 /* Else adjust bitpos accordingly. */
343 bitpos += ptr_bitpos;
344 if (TREE_CODE (exp) == MEM_REF
345 || TREE_CODE (exp) == TARGET_MEM_REF)
e913b5cd 346 bitpos += mem_ref_offset (exp).to_short_addr () * BITS_PER_UNIT;
4083990a 347 }
98ab9e8f 348 }
3482bf13 349 else if (TREE_CODE (exp) == STRING_CST)
153c3b50 350 {
3482bf13 351 /* STRING_CST are the only constant objects we allow to be not
352 wrapped inside a CONST_DECL. */
353 align = TYPE_ALIGN (TREE_TYPE (exp));
3482bf13 354 if (CONSTANT_CLASS_P (exp))
355 align = (unsigned) CONSTANT_ALIGNMENT (exp, align);
e532afed 356
3482bf13 357 known_alignment = true;
98ab9e8f 358 }
98ab9e8f 359
360 /* If there is a non-constant offset part extract the maximum
361 alignment that can prevail. */
c8a2b4ff 362 if (offset)
98ab9e8f 363 {
ad464c56 364 unsigned int trailing_zeros = tree_ctz (offset);
c8a2b4ff 365 if (trailing_zeros < HOST_BITS_PER_INT)
98ab9e8f 366 {
c8a2b4ff 367 unsigned int inner = (1U << trailing_zeros) * BITS_PER_UNIT;
368 if (inner)
369 align = MIN (align, inner);
98ab9e8f 370 }
98ab9e8f 371 }
372
3482bf13 373 *alignp = align;
374 *bitposp = bitpos & (*alignp - 1);
ceea063b 375 return known_alignment;
0c883ef3 376}
377
3482bf13 378/* For a memory reference expression EXP compute values M and N such that M
379 divides (&EXP - N) and such that N < M. If these numbers can be determined,
380 store M in alignp and N in *BITPOSP and return true. Otherwise return false
381 and store BITS_PER_UNIT to *alignp and any bit-offset to *bitposp. */
382
383bool
384get_object_alignment_1 (tree exp, unsigned int *alignp,
385 unsigned HOST_WIDE_INT *bitposp)
386{
387 return get_object_alignment_2 (exp, alignp, bitposp, false);
388}
389
957d0361 390/* Return the alignment in bits of EXP, an object. */
0c883ef3 391
392unsigned int
957d0361 393get_object_alignment (tree exp)
0c883ef3 394{
395 unsigned HOST_WIDE_INT bitpos = 0;
396 unsigned int align;
397
ceea063b 398 get_object_alignment_1 (exp, &align, &bitpos);
0c883ef3 399
98ab9e8f 400 /* align and bitpos now specify known low bits of the pointer.
401 ptr & (align - 1) == bitpos. */
402
403 if (bitpos != 0)
ac29ece2 404 align = least_bit_hwi (bitpos);
957d0361 405 return align;
698537d1 406}
407
ceea063b 408/* For a pointer valued expression EXP compute values M and N such that M
409 divides (EXP - N) and such that N < M. If these numbers can be determined,
3482bf13 410 store M in alignp and N in *BITPOSP and return true. Return false if
411 the results are just a conservative approximation.
53800dbe 412
ceea063b 413 If EXP is not a pointer, false is returned too. */
53800dbe 414
ceea063b 415bool
416get_pointer_alignment_1 (tree exp, unsigned int *alignp,
417 unsigned HOST_WIDE_INT *bitposp)
53800dbe 418{
153c3b50 419 STRIP_NOPS (exp);
535e2026 420
153c3b50 421 if (TREE_CODE (exp) == ADDR_EXPR)
3482bf13 422 return get_object_alignment_2 (TREE_OPERAND (exp, 0),
423 alignp, bitposp, true);
906a9403 424 else if (TREE_CODE (exp) == POINTER_PLUS_EXPR)
425 {
426 unsigned int align;
427 unsigned HOST_WIDE_INT bitpos;
428 bool res = get_pointer_alignment_1 (TREE_OPERAND (exp, 0),
429 &align, &bitpos);
430 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
431 bitpos += TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)) * BITS_PER_UNIT;
432 else
433 {
434 unsigned int trailing_zeros = tree_ctz (TREE_OPERAND (exp, 1));
435 if (trailing_zeros < HOST_BITS_PER_INT)
436 {
437 unsigned int inner = (1U << trailing_zeros) * BITS_PER_UNIT;
438 if (inner)
439 align = MIN (align, inner);
440 }
441 }
442 *alignp = align;
443 *bitposp = bitpos & (align - 1);
444 return res;
445 }
153c3b50 446 else if (TREE_CODE (exp) == SSA_NAME
447 && POINTER_TYPE_P (TREE_TYPE (exp)))
53800dbe 448 {
ceea063b 449 unsigned int ptr_align, ptr_misalign;
153c3b50 450 struct ptr_info_def *pi = SSA_NAME_PTR_INFO (exp);
ceea063b 451
452 if (pi && get_ptr_info_alignment (pi, &ptr_align, &ptr_misalign))
453 {
454 *bitposp = ptr_misalign * BITS_PER_UNIT;
455 *alignp = ptr_align * BITS_PER_UNIT;
d10da77a 456 /* Make sure to return a sensible alignment when the multiplication
457 by BITS_PER_UNIT overflowed. */
458 if (*alignp == 0)
459 *alignp = 1u << (HOST_BITS_PER_INT - 1);
3482bf13 460 /* We cannot really tell whether this result is an approximation. */
b428654a 461 return false;
ceea063b 462 }
463 else
69fbc3aa 464 {
465 *bitposp = 0;
ceea063b 466 *alignp = BITS_PER_UNIT;
467 return false;
69fbc3aa 468 }
53800dbe 469 }
0bb8b39a 470 else if (TREE_CODE (exp) == INTEGER_CST)
471 {
472 *alignp = BIGGEST_ALIGNMENT;
f9ae6f95 473 *bitposp = ((TREE_INT_CST_LOW (exp) * BITS_PER_UNIT)
0bb8b39a 474 & (BIGGEST_ALIGNMENT - 1));
475 return true;
476 }
153c3b50 477
69fbc3aa 478 *bitposp = 0;
ceea063b 479 *alignp = BITS_PER_UNIT;
480 return false;
53800dbe 481}
482
69fbc3aa 483/* Return the alignment in bits of EXP, a pointer valued expression.
484 The alignment returned is, by default, the alignment of the thing that
485 EXP points to. If it is not a POINTER_TYPE, 0 is returned.
486
487 Otherwise, look at the expression to see if we can do better, i.e., if the
488 expression is actually pointing at an object whose alignment is tighter. */
489
490unsigned int
491get_pointer_alignment (tree exp)
492{
493 unsigned HOST_WIDE_INT bitpos = 0;
494 unsigned int align;
ceea063b 495
496 get_pointer_alignment_1 (exp, &align, &bitpos);
69fbc3aa 497
498 /* align and bitpos now specify known low bits of the pointer.
499 ptr & (align - 1) == bitpos. */
500
501 if (bitpos != 0)
ac29ece2 502 align = least_bit_hwi (bitpos);
69fbc3aa 503
504 return align;
505}
506
53800dbe 507/* Compute the length of a C string. TREE_STRING_LENGTH is not the right
508 way, because it could contain a zero byte in the middle.
509 TREE_STRING_LENGTH is the size of the character array, not the string.
510
4172d65e 511 ONLY_VALUE should be nonzero if the result is not going to be emitted
c09841f6 512 into the instruction stream and zero if it is going to be expanded.
4172d65e 513 E.g. with i++ ? "foo" : "bar", if ONLY_VALUE is nonzero, constant 3
681fab1e 514 is returned, otherwise NULL, since
515 len = c_strlen (src, 1); if (len) expand_expr (len, ...); would not
516 evaluate the side-effects.
517
6bda159e 518 If ONLY_VALUE is two then we do not emit warnings about out-of-bound
519 accesses. Note that this implies the result is not going to be emitted
520 into the instruction stream.
521
902de8ed 522 The value returned is of type `ssizetype'.
523
53800dbe 524 Unfortunately, string_constant can't access the values of const char
525 arrays with initializers, so neither can we do so here. */
526
4ee9c684 527tree
681fab1e 528c_strlen (tree src, int only_value)
53800dbe 529{
530 tree offset_node;
27d0c333 531 HOST_WIDE_INT offset;
532 int max;
44acf429 533 const char *ptr;
da136652 534 location_t loc;
53800dbe 535
681fab1e 536 STRIP_NOPS (src);
537 if (TREE_CODE (src) == COND_EXPR
538 && (only_value || !TREE_SIDE_EFFECTS (TREE_OPERAND (src, 0))))
539 {
540 tree len1, len2;
541
542 len1 = c_strlen (TREE_OPERAND (src, 1), only_value);
543 len2 = c_strlen (TREE_OPERAND (src, 2), only_value);
0862b7e9 544 if (tree_int_cst_equal (len1, len2))
681fab1e 545 return len1;
546 }
547
548 if (TREE_CODE (src) == COMPOUND_EXPR
549 && (only_value || !TREE_SIDE_EFFECTS (TREE_OPERAND (src, 0))))
550 return c_strlen (TREE_OPERAND (src, 1), only_value);
551
3df42822 552 loc = EXPR_LOC_OR_LOC (src, input_location);
da136652 553
53800dbe 554 src = string_constant (src, &offset_node);
555 if (src == 0)
c2f47e15 556 return NULL_TREE;
902de8ed 557
83d79705 558 max = TREE_STRING_LENGTH (src) - 1;
53800dbe 559 ptr = TREE_STRING_POINTER (src);
902de8ed 560
53800dbe 561 if (offset_node && TREE_CODE (offset_node) != INTEGER_CST)
562 {
563 /* If the string has an internal zero byte (e.g., "foo\0bar"), we can't
564 compute the offset to the following null if we don't know where to
565 start searching for it. */
566 int i;
902de8ed 567
53800dbe 568 for (i = 0; i < max; i++)
569 if (ptr[i] == 0)
c2f47e15 570 return NULL_TREE;
902de8ed 571
53800dbe 572 /* We don't know the starting offset, but we do know that the string
573 has no internal zero bytes. We can assume that the offset falls
574 within the bounds of the string; otherwise, the programmer deserves
575 what he gets. Subtract the offset from the length of the string,
902de8ed 576 and return that. This would perhaps not be valid if we were dealing
577 with named arrays in addition to literal string constants. */
578
da136652 579 return size_diffop_loc (loc, size_int (max), offset_node);
53800dbe 580 }
581
582 /* We have a known offset into the string. Start searching there for
27d0c333 583 a null character if we can represent it as a single HOST_WIDE_INT. */
dabc4084 584 if (offset_node == 0)
53800dbe 585 offset = 0;
35ec552a 586 else if (! tree_fits_shwi_p (offset_node))
dabc4084 587 offset = -1;
53800dbe 588 else
e913b5cd 589 offset = tree_to_shwi (offset_node);
902de8ed 590
1f63a7d6 591 /* If the offset is known to be out of bounds, warn, and call strlen at
592 runtime. */
2f1c4f17 593 if (offset < 0 || offset > max)
53800dbe 594 {
1f63a7d6 595 /* Suppress multiple warnings for propagated constant strings. */
2f1c4f17 596 if (only_value != 2
597 && !TREE_NO_WARNING (src))
1f63a7d6 598 {
da136652 599 warning_at (loc, 0, "offset outside bounds of constant string");
1f63a7d6 600 TREE_NO_WARNING (src) = 1;
601 }
c2f47e15 602 return NULL_TREE;
53800dbe 603 }
902de8ed 604
53800dbe 605 /* Use strlen to search for the first zero byte. Since any strings
606 constructed with build_string will have nulls appended, we win even
607 if we get handed something like (char[4])"abcd".
608
609 Since OFFSET is our starting index into the string, no further
610 calculation is needed. */
902de8ed 611 return ssize_int (strlen (ptr + offset));
53800dbe 612}
613
e913b5cd 614/* Return a constant integer corresponding to target reading
8c85fcb7 615 GET_MODE_BITSIZE (MODE) bits from string constant STR. */
ecc318ff 616
6840589f 617static rtx
3754d046 618c_readstr (const char *str, machine_mode mode)
6840589f 619{
6840589f 620 HOST_WIDE_INT ch;
621 unsigned int i, j;
e913b5cd 622 HOST_WIDE_INT tmp[MAX_BITSIZE_MODE_ANY_INT / HOST_BITS_PER_WIDE_INT];
0407eaee 623
624 gcc_assert (GET_MODE_CLASS (mode) == MODE_INT);
e913b5cd 625 unsigned int len = (GET_MODE_PRECISION (mode) + HOST_BITS_PER_WIDE_INT - 1)
626 / HOST_BITS_PER_WIDE_INT;
627
a12aa4cc 628 gcc_assert (len <= MAX_BITSIZE_MODE_ANY_INT / HOST_BITS_PER_WIDE_INT);
e913b5cd 629 for (i = 0; i < len; i++)
630 tmp[i] = 0;
6840589f 631
6840589f 632 ch = 1;
633 for (i = 0; i < GET_MODE_SIZE (mode); i++)
634 {
635 j = i;
636 if (WORDS_BIG_ENDIAN)
637 j = GET_MODE_SIZE (mode) - i - 1;
638 if (BYTES_BIG_ENDIAN != WORDS_BIG_ENDIAN
ad8f8e52 639 && GET_MODE_SIZE (mode) >= UNITS_PER_WORD)
6840589f 640 j = j + UNITS_PER_WORD - 2 * (j % UNITS_PER_WORD) - 1;
641 j *= BITS_PER_UNIT;
7d3f6cc7 642
6840589f 643 if (ch)
644 ch = (unsigned char) str[i];
e913b5cd 645 tmp[j / HOST_BITS_PER_WIDE_INT] |= ch << (j % HOST_BITS_PER_WIDE_INT);
6840589f 646 }
ddb1be65 647
ab2c1de8 648 wide_int c = wide_int::from_array (tmp, len, GET_MODE_PRECISION (mode));
e913b5cd 649 return immed_wide_int_const (c, mode);
6840589f 650}
651
ecc318ff 652/* Cast a target constant CST to target CHAR and if that value fits into
5206b159 653 host char type, return zero and put that value into variable pointed to by
ecc318ff 654 P. */
655
656static int
aecda0d6 657target_char_cast (tree cst, char *p)
ecc318ff 658{
659 unsigned HOST_WIDE_INT val, hostval;
660
c19686c5 661 if (TREE_CODE (cst) != INTEGER_CST
ecc318ff 662 || CHAR_TYPE_SIZE > HOST_BITS_PER_WIDE_INT)
663 return 1;
664
e913b5cd 665 /* Do not care if it fits or not right here. */
f9ae6f95 666 val = TREE_INT_CST_LOW (cst);
e913b5cd 667
ecc318ff 668 if (CHAR_TYPE_SIZE < HOST_BITS_PER_WIDE_INT)
edc19fd0 669 val &= (HOST_WIDE_INT_1U << CHAR_TYPE_SIZE) - 1;
ecc318ff 670
671 hostval = val;
672 if (HOST_BITS_PER_CHAR < HOST_BITS_PER_WIDE_INT)
edc19fd0 673 hostval &= (HOST_WIDE_INT_1U << HOST_BITS_PER_CHAR) - 1;
ecc318ff 674
675 if (val != hostval)
676 return 1;
677
678 *p = hostval;
679 return 0;
680}
681
4ee9c684 682/* Similar to save_expr, but assumes that arbitrary code is not executed
683 in between the multiple evaluations. In particular, we assume that a
684 non-addressable local variable will not be modified. */
685
686static tree
687builtin_save_expr (tree exp)
688{
f6c35aa4 689 if (TREE_CODE (exp) == SSA_NAME
690 || (TREE_ADDRESSABLE (exp) == 0
691 && (TREE_CODE (exp) == PARM_DECL
53e9c5c4 692 || (VAR_P (exp) && !TREE_STATIC (exp)))))
4ee9c684 693 return exp;
694
695 return save_expr (exp);
696}
697
53800dbe 698/* Given TEM, a pointer to a stack frame, follow the dynamic chain COUNT
699 times to get the address of either a higher stack frame, or a return
700 address located within it (depending on FNDECL_CODE). */
902de8ed 701
c626df3d 702static rtx
869d0ef0 703expand_builtin_return_addr (enum built_in_function fndecl_code, int count)
53800dbe 704{
705 int i;
869d0ef0 706 rtx tem = INITIAL_FRAME_ADDRESS_RTX;
3f840859 707 if (tem == NULL_RTX)
e3e15c50 708 {
3f840859 709 /* For a zero count with __builtin_return_address, we don't care what
710 frame address we return, because target-specific definitions will
711 override us. Therefore frame pointer elimination is OK, and using
712 the soft frame pointer is OK.
713
714 For a nonzero count, or a zero count with __builtin_frame_address,
715 we require a stable offset from the current frame pointer to the
716 previous one, so we must use the hard frame pointer, and
717 we must disable frame pointer elimination. */
718 if (count == 0 && fndecl_code == BUILT_IN_RETURN_ADDRESS)
719 tem = frame_pointer_rtx;
720 else
721 {
722 tem = hard_frame_pointer_rtx;
e3e15c50 723
3f840859 724 /* Tell reload not to eliminate the frame pointer. */
725 crtl->accesses_prior_frames = 1;
726 }
e3e15c50 727 }
869d0ef0 728
53800dbe 729 if (count > 0)
730 SETUP_FRAME_ADDRESSES ();
53800dbe 731
3a69c60c 732 /* On the SPARC, the return address is not in the frame, it is in a
53800dbe 733 register. There is no way to access it off of the current frame
734 pointer, but it can be accessed off the previous frame pointer by
735 reading the value from the register window save area. */
a26d6c60 736 if (RETURN_ADDR_IN_PREVIOUS_FRAME && fndecl_code == BUILT_IN_RETURN_ADDRESS)
53800dbe 737 count--;
53800dbe 738
739 /* Scan back COUNT frames to the specified frame. */
740 for (i = 0; i < count; i++)
741 {
742 /* Assume the dynamic chain pointer is in the word that the
743 frame address points to, unless otherwise specified. */
53800dbe 744 tem = DYNAMIC_CHAIN_ADDRESS (tem);
53800dbe 745 tem = memory_address (Pmode, tem);
00060fc2 746 tem = gen_frame_mem (Pmode, tem);
83fc1478 747 tem = copy_to_reg (tem);
53800dbe 748 }
749
3a69c60c 750 /* For __builtin_frame_address, return what we've got. But, on
751 the SPARC for example, we may have to add a bias. */
53800dbe 752 if (fndecl_code == BUILT_IN_FRAME_ADDRESS)
3a69c60c 753 return FRAME_ADDR_RTX (tem);
53800dbe 754
3a69c60c 755 /* For __builtin_return_address, get the return address from that frame. */
53800dbe 756#ifdef RETURN_ADDR_RTX
757 tem = RETURN_ADDR_RTX (count, tem);
758#else
759 tem = memory_address (Pmode,
29c05e22 760 plus_constant (Pmode, tem, GET_MODE_SIZE (Pmode)));
00060fc2 761 tem = gen_frame_mem (Pmode, tem);
53800dbe 762#endif
763 return tem;
764}
765
f7c44134 766/* Alias set used for setjmp buffer. */
32c2fdea 767static alias_set_type setjmp_alias_set = -1;
f7c44134 768
6b7f6858 769/* Construct the leading half of a __builtin_setjmp call. Control will
2c8a1497 770 return to RECEIVER_LABEL. This is also called directly by the SJLJ
771 exception handling code. */
53800dbe 772
6b7f6858 773void
aecda0d6 774expand_builtin_setjmp_setup (rtx buf_addr, rtx receiver_label)
53800dbe 775{
3754d046 776 machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
53800dbe 777 rtx stack_save;
f7c44134 778 rtx mem;
53800dbe 779
f7c44134 780 if (setjmp_alias_set == -1)
781 setjmp_alias_set = new_alias_set ();
782
85d654dd 783 buf_addr = convert_memory_address (Pmode, buf_addr);
53800dbe 784
37ae8504 785 buf_addr = force_reg (Pmode, force_operand (buf_addr, NULL_RTX));
53800dbe 786
6b7f6858 787 /* We store the frame pointer and the address of receiver_label in
788 the buffer and use the rest of it for the stack save area, which
789 is machine-dependent. */
53800dbe 790
f7c44134 791 mem = gen_rtx_MEM (Pmode, buf_addr);
ab6ab77e 792 set_mem_alias_set (mem, setjmp_alias_set);
e3e026e8 793 emit_move_insn (mem, targetm.builtin_setjmp_frame_value ());
f7c44134 794
29c05e22 795 mem = gen_rtx_MEM (Pmode, plus_constant (Pmode, buf_addr,
796 GET_MODE_SIZE (Pmode))),
ab6ab77e 797 set_mem_alias_set (mem, setjmp_alias_set);
f7c44134 798
799 emit_move_insn (validize_mem (mem),
6b7f6858 800 force_reg (Pmode, gen_rtx_LABEL_REF (Pmode, receiver_label)));
53800dbe 801
802 stack_save = gen_rtx_MEM (sa_mode,
29c05e22 803 plus_constant (Pmode, buf_addr,
53800dbe 804 2 * GET_MODE_SIZE (Pmode)));
ab6ab77e 805 set_mem_alias_set (stack_save, setjmp_alias_set);
e9c97615 806 emit_stack_save (SAVE_NONLOCAL, &stack_save);
53800dbe 807
808 /* If there is further processing to do, do it. */
a3c81e61 809 if (targetm.have_builtin_setjmp_setup ())
810 emit_insn (targetm.gen_builtin_setjmp_setup (buf_addr));
53800dbe 811
29f09705 812 /* We have a nonlocal label. */
18d50ae6 813 cfun->has_nonlocal_label = 1;
6b7f6858 814}
53800dbe 815
2c8a1497 816/* Construct the trailing part of a __builtin_setjmp call. This is
4598ade9 817 also called directly by the SJLJ exception handling code.
818 If RECEIVER_LABEL is NULL, instead contruct a nonlocal goto handler. */
6b7f6858 819
820void
a3c81e61 821expand_builtin_setjmp_receiver (rtx receiver_label)
6b7f6858 822{
82c7907c 823 rtx chain;
824
4598ade9 825 /* Mark the FP as used when we get here, so we have to make sure it's
53800dbe 826 marked as used by this function. */
18b42941 827 emit_use (hard_frame_pointer_rtx);
53800dbe 828
829 /* Mark the static chain as clobbered here so life information
830 doesn't get messed up for it. */
82c7907c 831 chain = targetm.calls.static_chain (current_function_decl, true);
832 if (chain && REG_P (chain))
833 emit_clobber (chain);
53800dbe 834
835 /* Now put in the code to restore the frame pointer, and argument
491e04ef 836 pointer, if needed. */
a3c81e61 837 if (! targetm.have_nonlocal_goto ())
62dcb5c8 838 {
839 /* First adjust our frame pointer to its actual value. It was
840 previously set to the start of the virtual area corresponding to
841 the stacked variables when we branched here and now needs to be
842 adjusted to the actual hardware fp value.
843
844 Assignments to virtual registers are converted by
845 instantiate_virtual_regs into the corresponding assignment
846 to the underlying register (fp in this case) that makes
847 the original assignment true.
848 So the following insn will actually be decrementing fp by
849 STARTING_FRAME_OFFSET. */
850 emit_move_insn (virtual_stack_vars_rtx, hard_frame_pointer_rtx);
851
852 /* Restoring the frame pointer also modifies the hard frame pointer.
853 Mark it used (so that the previous assignment remains live once
854 the frame pointer is eliminated) and clobbered (to represent the
855 implicit update from the assignment). */
856 emit_use (hard_frame_pointer_rtx);
857 emit_clobber (hard_frame_pointer_rtx);
858 }
53800dbe 859
a494b6d7 860 if (!HARD_FRAME_POINTER_IS_ARG_POINTER && fixed_regs[ARG_POINTER_REGNUM])
53800dbe 861 {
4598ade9 862 /* If the argument pointer can be eliminated in favor of the
863 frame pointer, we don't need to restore it. We assume here
864 that if such an elimination is present, it can always be used.
865 This is the case on all known machines; if we don't make this
866 assumption, we do unnecessary saving on many machines. */
53800dbe 867 size_t i;
e99c3a1d 868 static const struct elims {const int from, to;} elim_regs[] = ELIMINABLE_REGS;
53800dbe 869
3098b2d3 870 for (i = 0; i < ARRAY_SIZE (elim_regs); i++)
53800dbe 871 if (elim_regs[i].from == ARG_POINTER_REGNUM
872 && elim_regs[i].to == HARD_FRAME_POINTER_REGNUM)
873 break;
874
3098b2d3 875 if (i == ARRAY_SIZE (elim_regs))
53800dbe 876 {
877 /* Now restore our arg pointer from the address at which it
05927e40 878 was saved in our stack frame. */
27a7a23a 879 emit_move_insn (crtl->args.internal_arg_pointer,
b079a207 880 copy_to_reg (get_arg_pointer_save_area ()));
53800dbe 881 }
882 }
53800dbe 883
a3c81e61 884 if (receiver_label != NULL && targetm.have_builtin_setjmp_receiver ())
885 emit_insn (targetm.gen_builtin_setjmp_receiver (receiver_label));
886 else if (targetm.have_nonlocal_goto_receiver ())
887 emit_insn (targetm.gen_nonlocal_goto_receiver ());
53800dbe 888 else
a3c81e61 889 { /* Nothing */ }
57f6bb94 890
3072d30e 891 /* We must not allow the code we just generated to be reordered by
892 scheduling. Specifically, the update of the frame pointer must
62dcb5c8 893 happen immediately, not later. */
3072d30e 894 emit_insn (gen_blockage ());
6b7f6858 895}
53800dbe 896
53800dbe 897/* __builtin_longjmp is passed a pointer to an array of five words (not
898 all will be used on all machines). It operates similarly to the C
899 library function of the same name, but is more efficient. Much of
2c8a1497 900 the code below is copied from the handling of non-local gotos. */
53800dbe 901
c626df3d 902static void
aecda0d6 903expand_builtin_longjmp (rtx buf_addr, rtx value)
53800dbe 904{
1e0c0b35 905 rtx fp, lab, stack;
906 rtx_insn *insn, *last;
3754d046 907 machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
53800dbe 908
48e1416a 909 /* DRAP is needed for stack realign if longjmp is expanded to current
27a7a23a 910 function */
911 if (SUPPORTS_STACK_ALIGNMENT)
912 crtl->need_drap = true;
913
f7c44134 914 if (setjmp_alias_set == -1)
915 setjmp_alias_set = new_alias_set ();
916
85d654dd 917 buf_addr = convert_memory_address (Pmode, buf_addr);
479e4d5e 918
53800dbe 919 buf_addr = force_reg (Pmode, buf_addr);
920
82c7907c 921 /* We require that the user must pass a second argument of 1, because
922 that is what builtin_setjmp will return. */
64db345d 923 gcc_assert (value == const1_rtx);
53800dbe 924
4712c7d6 925 last = get_last_insn ();
a3c81e61 926 if (targetm.have_builtin_longjmp ())
927 emit_insn (targetm.gen_builtin_longjmp (buf_addr));
53800dbe 928 else
53800dbe 929 {
930 fp = gen_rtx_MEM (Pmode, buf_addr);
29c05e22 931 lab = gen_rtx_MEM (Pmode, plus_constant (Pmode, buf_addr,
53800dbe 932 GET_MODE_SIZE (Pmode)));
933
29c05e22 934 stack = gen_rtx_MEM (sa_mode, plus_constant (Pmode, buf_addr,
53800dbe 935 2 * GET_MODE_SIZE (Pmode)));
ab6ab77e 936 set_mem_alias_set (fp, setjmp_alias_set);
937 set_mem_alias_set (lab, setjmp_alias_set);
938 set_mem_alias_set (stack, setjmp_alias_set);
53800dbe 939
940 /* Pick up FP, label, and SP from the block and jump. This code is
941 from expand_goto in stmt.c; see there for detailed comments. */
a3c81e61 942 if (targetm.have_nonlocal_goto ())
53800dbe 943 /* We have to pass a value to the nonlocal_goto pattern that will
944 get copied into the static_chain pointer, but it does not matter
945 what that value is, because builtin_setjmp does not use it. */
a3c81e61 946 emit_insn (targetm.gen_nonlocal_goto (value, lab, stack, fp));
53800dbe 947 else
53800dbe 948 {
949 lab = copy_to_reg (lab);
950
18b42941 951 emit_clobber (gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode)));
952 emit_clobber (gen_rtx_MEM (BLKmode, hard_frame_pointer_rtx));
2a871ad1 953
53800dbe 954 emit_move_insn (hard_frame_pointer_rtx, fp);
e9c97615 955 emit_stack_restore (SAVE_NONLOCAL, stack);
53800dbe 956
18b42941 957 emit_use (hard_frame_pointer_rtx);
958 emit_use (stack_pointer_rtx);
53800dbe 959 emit_indirect_jump (lab);
960 }
961 }
615166bb 962
963 /* Search backwards and mark the jump insn as a non-local goto.
964 Note that this precludes the use of __builtin_longjmp to a
965 __builtin_setjmp target in the same function. However, we've
966 already cautioned the user that these functions are for
967 internal exception handling use only. */
449c0509 968 for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
969 {
64db345d 970 gcc_assert (insn != last);
7d3f6cc7 971
6d7dc5b9 972 if (JUMP_P (insn))
449c0509 973 {
a1ddb869 974 add_reg_note (insn, REG_NON_LOCAL_GOTO, const0_rtx);
449c0509 975 break;
976 }
6d7dc5b9 977 else if (CALL_P (insn))
9342ee68 978 break;
449c0509 979 }
53800dbe 980}
981
0e80b01d 982static inline bool
983more_const_call_expr_args_p (const const_call_expr_arg_iterator *iter)
984{
985 return (iter->i < iter->n);
986}
987
988/* This function validates the types of a function call argument list
989 against a specified list of tree_codes. If the last specifier is a 0,
990 that represents an ellipses, otherwise the last specifier must be a
991 VOID_TYPE. */
992
993static bool
994validate_arglist (const_tree callexpr, ...)
995{
996 enum tree_code code;
997 bool res = 0;
998 va_list ap;
999 const_call_expr_arg_iterator iter;
1000 const_tree arg;
1001
1002 va_start (ap, callexpr);
1003 init_const_call_expr_arg_iterator (callexpr, &iter);
1004
1005 do
1006 {
1007 code = (enum tree_code) va_arg (ap, int);
1008 switch (code)
1009 {
1010 case 0:
1011 /* This signifies an ellipses, any further arguments are all ok. */
1012 res = true;
1013 goto end;
1014 case VOID_TYPE:
1015 /* This signifies an endlink, if no arguments remain, return
1016 true, otherwise return false. */
1017 res = !more_const_call_expr_args_p (&iter);
1018 goto end;
1019 default:
1020 /* If no parameters remain or the parameter's code does not
1021 match the specified code, return false. Otherwise continue
1022 checking any remaining arguments. */
1023 arg = next_const_call_expr_arg (&iter);
1024 if (!validate_arg (arg, code))
1025 goto end;
1026 break;
1027 }
1028 }
1029 while (1);
1030
1031 /* We need gotos here since we can only have one VA_CLOSE in a
1032 function. */
1033 end: ;
1034 va_end (ap);
1035
1036 return res;
1037}
1038
4ee9c684 1039/* Expand a call to __builtin_nonlocal_goto. We're passed the target label
1040 and the address of the save area. */
1041
1042static rtx
c2f47e15 1043expand_builtin_nonlocal_goto (tree exp)
4ee9c684 1044{
1045 tree t_label, t_save_area;
1e0c0b35 1046 rtx r_label, r_save_area, r_fp, r_sp;
1047 rtx_insn *insn;
4ee9c684 1048
c2f47e15 1049 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
4ee9c684 1050 return NULL_RTX;
1051
c2f47e15 1052 t_label = CALL_EXPR_ARG (exp, 0);
1053 t_save_area = CALL_EXPR_ARG (exp, 1);
4ee9c684 1054
8ec3c5c2 1055 r_label = expand_normal (t_label);
3dce56cc 1056 r_label = convert_memory_address (Pmode, r_label);
8ec3c5c2 1057 r_save_area = expand_normal (t_save_area);
3dce56cc 1058 r_save_area = convert_memory_address (Pmode, r_save_area);
d1ff492e 1059 /* Copy the address of the save location to a register just in case it was
1060 based on the frame pointer. */
51adbc8a 1061 r_save_area = copy_to_reg (r_save_area);
4ee9c684 1062 r_fp = gen_rtx_MEM (Pmode, r_save_area);
1063 r_sp = gen_rtx_MEM (STACK_SAVEAREA_MODE (SAVE_NONLOCAL),
29c05e22 1064 plus_constant (Pmode, r_save_area,
1065 GET_MODE_SIZE (Pmode)));
4ee9c684 1066
18d50ae6 1067 crtl->has_nonlocal_goto = 1;
4ee9c684 1068
4ee9c684 1069 /* ??? We no longer need to pass the static chain value, afaik. */
a3c81e61 1070 if (targetm.have_nonlocal_goto ())
1071 emit_insn (targetm.gen_nonlocal_goto (const0_rtx, r_label, r_sp, r_fp));
4ee9c684 1072 else
4ee9c684 1073 {
1074 r_label = copy_to_reg (r_label);
1075
18b42941 1076 emit_clobber (gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode)));
1077 emit_clobber (gen_rtx_MEM (BLKmode, hard_frame_pointer_rtx));
491e04ef 1078
d1ff492e 1079 /* Restore frame pointer for containing function. */
4ee9c684 1080 emit_move_insn (hard_frame_pointer_rtx, r_fp);
e9c97615 1081 emit_stack_restore (SAVE_NONLOCAL, r_sp);
491e04ef 1082
4ee9c684 1083 /* USE of hard_frame_pointer_rtx added for consistency;
1084 not clear if really needed. */
18b42941 1085 emit_use (hard_frame_pointer_rtx);
1086 emit_use (stack_pointer_rtx);
ad0d0af8 1087
1088 /* If the architecture is using a GP register, we must
1089 conservatively assume that the target function makes use of it.
1090 The prologue of functions with nonlocal gotos must therefore
1091 initialize the GP register to the appropriate value, and we
1092 must then make sure that this value is live at the point
1093 of the jump. (Note that this doesn't necessarily apply
1094 to targets with a nonlocal_goto pattern; they are free
1095 to implement it in their own way. Note also that this is
1096 a no-op if the GP register is a global invariant.) */
1097 if ((unsigned) PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM
1098 && fixed_regs[PIC_OFFSET_TABLE_REGNUM])
18b42941 1099 emit_use (pic_offset_table_rtx);
ad0d0af8 1100
4ee9c684 1101 emit_indirect_jump (r_label);
1102 }
491e04ef 1103
4ee9c684 1104 /* Search backwards to the jump insn and mark it as a
1105 non-local goto. */
1106 for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
1107 {
6d7dc5b9 1108 if (JUMP_P (insn))
4ee9c684 1109 {
a1ddb869 1110 add_reg_note (insn, REG_NON_LOCAL_GOTO, const0_rtx);
4ee9c684 1111 break;
1112 }
6d7dc5b9 1113 else if (CALL_P (insn))
4ee9c684 1114 break;
1115 }
1116
1117 return const0_rtx;
1118}
1119
843d08a9 1120/* __builtin_update_setjmp_buf is passed a pointer to an array of five words
1121 (not all will be used on all machines) that was passed to __builtin_setjmp.
97354ae4 1122 It updates the stack pointer in that block to the current value. This is
1123 also called directly by the SJLJ exception handling code. */
843d08a9 1124
97354ae4 1125void
843d08a9 1126expand_builtin_update_setjmp_buf (rtx buf_addr)
1127{
3754d046 1128 machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
d1ff492e 1129 rtx stack_save
843d08a9 1130 = gen_rtx_MEM (sa_mode,
1131 memory_address
1132 (sa_mode,
29c05e22 1133 plus_constant (Pmode, buf_addr,
1134 2 * GET_MODE_SIZE (Pmode))));
843d08a9 1135
e9c97615 1136 emit_stack_save (SAVE_NONLOCAL, &stack_save);
843d08a9 1137}
1138
5e3608d8 1139/* Expand a call to __builtin_prefetch. For a target that does not support
1140 data prefetch, evaluate the memory address argument in case it has side
1141 effects. */
1142
1143static void
c2f47e15 1144expand_builtin_prefetch (tree exp)
5e3608d8 1145{
1146 tree arg0, arg1, arg2;
c2f47e15 1147 int nargs;
5e3608d8 1148 rtx op0, op1, op2;
1149
c2f47e15 1150 if (!validate_arglist (exp, POINTER_TYPE, 0))
26a5cadb 1151 return;
1152
c2f47e15 1153 arg0 = CALL_EXPR_ARG (exp, 0);
1154
26a5cadb 1155 /* Arguments 1 and 2 are optional; argument 1 (read/write) defaults to
1156 zero (read) and argument 2 (locality) defaults to 3 (high degree of
1157 locality). */
c2f47e15 1158 nargs = call_expr_nargs (exp);
1159 if (nargs > 1)
1160 arg1 = CALL_EXPR_ARG (exp, 1);
26a5cadb 1161 else
c2f47e15 1162 arg1 = integer_zero_node;
1163 if (nargs > 2)
1164 arg2 = CALL_EXPR_ARG (exp, 2);
1165 else
2512209b 1166 arg2 = integer_three_node;
5e3608d8 1167
1168 /* Argument 0 is an address. */
1169 op0 = expand_expr (arg0, NULL_RTX, Pmode, EXPAND_NORMAL);
1170
1171 /* Argument 1 (read/write flag) must be a compile-time constant int. */
1172 if (TREE_CODE (arg1) != INTEGER_CST)
1173 {
07e3a3d2 1174 error ("second argument to %<__builtin_prefetch%> must be a constant");
9342ee68 1175 arg1 = integer_zero_node;
5e3608d8 1176 }
8ec3c5c2 1177 op1 = expand_normal (arg1);
5e3608d8 1178 /* Argument 1 must be either zero or one. */
1179 if (INTVAL (op1) != 0 && INTVAL (op1) != 1)
1180 {
c3ceba8e 1181 warning (0, "invalid second argument to %<__builtin_prefetch%>;"
07e3a3d2 1182 " using zero");
5e3608d8 1183 op1 = const0_rtx;
1184 }
1185
1186 /* Argument 2 (locality) must be a compile-time constant int. */
1187 if (TREE_CODE (arg2) != INTEGER_CST)
1188 {
07e3a3d2 1189 error ("third argument to %<__builtin_prefetch%> must be a constant");
5e3608d8 1190 arg2 = integer_zero_node;
1191 }
8ec3c5c2 1192 op2 = expand_normal (arg2);
5e3608d8 1193 /* Argument 2 must be 0, 1, 2, or 3. */
1194 if (INTVAL (op2) < 0 || INTVAL (op2) > 3)
1195 {
c3ceba8e 1196 warning (0, "invalid third argument to %<__builtin_prefetch%>; using zero");
5e3608d8 1197 op2 = const0_rtx;
1198 }
1199
1d375a79 1200 if (targetm.have_prefetch ())
5e3608d8 1201 {
8786db1e 1202 struct expand_operand ops[3];
1203
1204 create_address_operand (&ops[0], op0);
1205 create_integer_operand (&ops[1], INTVAL (op1));
1206 create_integer_operand (&ops[2], INTVAL (op2));
1d375a79 1207 if (maybe_expand_insn (targetm.code_for_prefetch, 3, ops))
8786db1e 1208 return;
5e3608d8 1209 }
0a534ba7 1210
f0ce3b1f 1211 /* Don't do anything with direct references to volatile memory, but
1212 generate code to handle other side effects. */
e16ceb8e 1213 if (!MEM_P (op0) && side_effects_p (op0))
f0ce3b1f 1214 emit_insn (op0);
5e3608d8 1215}
1216
f7c44134 1217/* Get a MEM rtx for expression EXP which is the address of an operand
d8ae1baa 1218 to be used in a string instruction (cmpstrsi, movmemsi, ..). LEN is
1219 the maximum length of the block of memory that might be accessed or
1220 NULL if unknown. */
f7c44134 1221
53800dbe 1222static rtx
d8ae1baa 1223get_memory_rtx (tree exp, tree len)
53800dbe 1224{
ad0a178f 1225 tree orig_exp = exp;
1226 rtx addr, mem;
ad0a178f 1227
1228 /* When EXP is not resolved SAVE_EXPR, MEM_ATTRS can be still derived
1229 from its expression, for expr->a.b only <variable>.a.b is recorded. */
1230 if (TREE_CODE (exp) == SAVE_EXPR && !SAVE_EXPR_RESOLVED_P (exp))
1231 exp = TREE_OPERAND (exp, 0);
1232
1233 addr = expand_expr (orig_exp, NULL_RTX, ptr_mode, EXPAND_NORMAL);
1234 mem = gen_rtx_MEM (BLKmode, memory_address (BLKmode, addr));
2a631e19 1235
f7c44134 1236 /* Get an expression we can use to find the attributes to assign to MEM.
5dd3f78f 1237 First remove any nops. */
72dd6141 1238 while (CONVERT_EXPR_P (exp)
f7c44134 1239 && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (exp, 0))))
1240 exp = TREE_OPERAND (exp, 0);
1241
5dd3f78f 1242 /* Build a MEM_REF representing the whole accessed area as a byte blob,
1243 (as builtin stringops may alias with anything). */
1244 exp = fold_build2 (MEM_REF,
1245 build_array_type (char_type_node,
1246 build_range_type (sizetype,
1247 size_one_node, len)),
1248 exp, build_int_cst (ptr_type_node, 0));
1249
1250 /* If the MEM_REF has no acceptable address, try to get the base object
1251 from the original address we got, and build an all-aliasing
1252 unknown-sized access to that one. */
1253 if (is_gimple_mem_ref_addr (TREE_OPERAND (exp, 0)))
1254 set_mem_attributes (mem, exp, 0);
1255 else if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
1256 && (exp = get_base_address (TREE_OPERAND (TREE_OPERAND (exp, 0),
1257 0))))
eec8e941 1258 {
5dd3f78f 1259 exp = build_fold_addr_expr (exp);
1260 exp = fold_build2 (MEM_REF,
1261 build_array_type (char_type_node,
1262 build_range_type (sizetype,
1263 size_zero_node,
1264 NULL)),
1265 exp, build_int_cst (ptr_type_node, 0));
a1a25d19 1266 set_mem_attributes (mem, exp, 0);
eec8e941 1267 }
5dd3f78f 1268 set_mem_alias_set (mem, 0);
53800dbe 1269 return mem;
1270}
1271\f
1272/* Built-in functions to perform an untyped call and return. */
1273
3b9c3a16 1274#define apply_args_mode \
1275 (this_target_builtins->x_apply_args_mode)
1276#define apply_result_mode \
1277 (this_target_builtins->x_apply_result_mode)
53800dbe 1278
53800dbe 1279/* Return the size required for the block returned by __builtin_apply_args,
1280 and initialize apply_args_mode. */
1281
1282static int
aecda0d6 1283apply_args_size (void)
53800dbe 1284{
1285 static int size = -1;
58e9ce8f 1286 int align;
1287 unsigned int regno;
3754d046 1288 machine_mode mode;
53800dbe 1289
1290 /* The values computed by this function never change. */
1291 if (size < 0)
1292 {
1293 /* The first value is the incoming arg-pointer. */
1294 size = GET_MODE_SIZE (Pmode);
1295
1296 /* The second value is the structure value address unless this is
1297 passed as an "invisible" first argument. */
6812c89e 1298 if (targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0))
53800dbe 1299 size += GET_MODE_SIZE (Pmode);
1300
1301 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1302 if (FUNCTION_ARG_REGNO_P (regno))
1303 {
4bac51c9 1304 mode = targetm.calls.get_raw_arg_mode (regno);
0862b7e9 1305
64db345d 1306 gcc_assert (mode != VOIDmode);
53800dbe 1307
1308 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1309 if (size % align != 0)
1310 size = CEIL (size, align) * align;
53800dbe 1311 size += GET_MODE_SIZE (mode);
1312 apply_args_mode[regno] = mode;
1313 }
1314 else
1315 {
1316 apply_args_mode[regno] = VOIDmode;
53800dbe 1317 }
1318 }
1319 return size;
1320}
1321
1322/* Return the size required for the block returned by __builtin_apply,
1323 and initialize apply_result_mode. */
1324
1325static int
aecda0d6 1326apply_result_size (void)
53800dbe 1327{
1328 static int size = -1;
1329 int align, regno;
3754d046 1330 machine_mode mode;
53800dbe 1331
1332 /* The values computed by this function never change. */
1333 if (size < 0)
1334 {
1335 size = 0;
1336
1337 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
e1ce1485 1338 if (targetm.calls.function_value_regno_p (regno))
53800dbe 1339 {
4bac51c9 1340 mode = targetm.calls.get_raw_result_mode (regno);
0862b7e9 1341
64db345d 1342 gcc_assert (mode != VOIDmode);
53800dbe 1343
1344 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1345 if (size % align != 0)
1346 size = CEIL (size, align) * align;
1347 size += GET_MODE_SIZE (mode);
1348 apply_result_mode[regno] = mode;
1349 }
1350 else
1351 apply_result_mode[regno] = VOIDmode;
1352
1353 /* Allow targets that use untyped_call and untyped_return to override
1354 the size so that machine-specific information can be stored here. */
1355#ifdef APPLY_RESULT_SIZE
1356 size = APPLY_RESULT_SIZE;
1357#endif
1358 }
1359 return size;
1360}
1361
53800dbe 1362/* Create a vector describing the result block RESULT. If SAVEP is true,
1363 the result block is used to save the values; otherwise it is used to
1364 restore the values. */
1365
1366static rtx
aecda0d6 1367result_vector (int savep, rtx result)
53800dbe 1368{
1369 int regno, size, align, nelts;
3754d046 1370 machine_mode mode;
53800dbe 1371 rtx reg, mem;
364c0c59 1372 rtx *savevec = XALLOCAVEC (rtx, FIRST_PSEUDO_REGISTER);
bf8e3599 1373
53800dbe 1374 size = nelts = 0;
1375 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1376 if ((mode = apply_result_mode[regno]) != VOIDmode)
1377 {
1378 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1379 if (size % align != 0)
1380 size = CEIL (size, align) * align;
1381 reg = gen_rtx_REG (mode, savep ? regno : INCOMING_REGNO (regno));
e513d163 1382 mem = adjust_address (result, mode, size);
53800dbe 1383 savevec[nelts++] = (savep
d1f9b275 1384 ? gen_rtx_SET (mem, reg)
1385 : gen_rtx_SET (reg, mem));
53800dbe 1386 size += GET_MODE_SIZE (mode);
1387 }
1388 return gen_rtx_PARALLEL (VOIDmode, gen_rtvec_v (nelts, savevec));
1389}
53800dbe 1390
1391/* Save the state required to perform an untyped call with the same
1392 arguments as were passed to the current function. */
1393
1394static rtx
aecda0d6 1395expand_builtin_apply_args_1 (void)
53800dbe 1396{
1c7e61a7 1397 rtx registers, tem;
53800dbe 1398 int size, align, regno;
3754d046 1399 machine_mode mode;
6812c89e 1400 rtx struct_incoming_value = targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 1);
53800dbe 1401
1402 /* Create a block where the arg-pointer, structure value address,
1403 and argument registers can be saved. */
1404 registers = assign_stack_local (BLKmode, apply_args_size (), -1);
1405
1406 /* Walk past the arg-pointer and structure value address. */
1407 size = GET_MODE_SIZE (Pmode);
6812c89e 1408 if (targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0))
53800dbe 1409 size += GET_MODE_SIZE (Pmode);
1410
1411 /* Save each register used in calling a function to the block. */
1412 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1413 if ((mode = apply_args_mode[regno]) != VOIDmode)
1414 {
53800dbe 1415 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1416 if (size % align != 0)
1417 size = CEIL (size, align) * align;
1418
1419 tem = gen_rtx_REG (mode, INCOMING_REGNO (regno));
1420
e513d163 1421 emit_move_insn (adjust_address (registers, mode, size), tem);
53800dbe 1422 size += GET_MODE_SIZE (mode);
1423 }
1424
1425 /* Save the arg pointer to the block. */
27a7a23a 1426 tem = copy_to_reg (crtl->args.internal_arg_pointer);
1c7e61a7 1427 /* We need the pointer as the caller actually passed them to us, not
9d4b544c 1428 as we might have pretended they were passed. Make sure it's a valid
1429 operand, as emit_move_insn isn't expected to handle a PLUS. */
3764c94e 1430 if (STACK_GROWS_DOWNWARD)
1431 tem
1432 = force_operand (plus_constant (Pmode, tem,
1433 crtl->args.pretend_args_size),
1434 NULL_RTX);
1c7e61a7 1435 emit_move_insn (adjust_address (registers, Pmode, 0), tem);
0862b7e9 1436
53800dbe 1437 size = GET_MODE_SIZE (Pmode);
1438
1439 /* Save the structure value address unless this is passed as an
1440 "invisible" first argument. */
45550790 1441 if (struct_incoming_value)
53800dbe 1442 {
e513d163 1443 emit_move_insn (adjust_address (registers, Pmode, size),
45550790 1444 copy_to_reg (struct_incoming_value));
53800dbe 1445 size += GET_MODE_SIZE (Pmode);
1446 }
1447
1448 /* Return the address of the block. */
1449 return copy_addr_to_reg (XEXP (registers, 0));
1450}
1451
1452/* __builtin_apply_args returns block of memory allocated on
1453 the stack into which is stored the arg pointer, structure
1454 value address, static chain, and all the registers that might
1455 possibly be used in performing a function call. The code is
1456 moved to the start of the function so the incoming values are
1457 saved. */
27d0c333 1458
53800dbe 1459static rtx
aecda0d6 1460expand_builtin_apply_args (void)
53800dbe 1461{
1462 /* Don't do __builtin_apply_args more than once in a function.
1463 Save the result of the first call and reuse it. */
1464 if (apply_args_value != 0)
1465 return apply_args_value;
1466 {
1467 /* When this function is called, it means that registers must be
1468 saved on entry to this function. So we migrate the
1469 call to the first insn of this function. */
1470 rtx temp;
53800dbe 1471
1472 start_sequence ();
1473 temp = expand_builtin_apply_args_1 ();
9ed997be 1474 rtx_insn *seq = get_insns ();
53800dbe 1475 end_sequence ();
1476
1477 apply_args_value = temp;
1478
31d3e01c 1479 /* Put the insns after the NOTE that starts the function.
1480 If this is inside a start_sequence, make the outer-level insn
53800dbe 1481 chain current, so the code is placed at the start of the
0ef1a651 1482 function. If internal_arg_pointer is a non-virtual pseudo,
1483 it needs to be placed after the function that initializes
1484 that pseudo. */
53800dbe 1485 push_topmost_sequence ();
0ef1a651 1486 if (REG_P (crtl->args.internal_arg_pointer)
1487 && REGNO (crtl->args.internal_arg_pointer) > LAST_VIRTUAL_REGISTER)
1488 emit_insn_before (seq, parm_birth_insn);
1489 else
1490 emit_insn_before (seq, NEXT_INSN (entry_of_function ()));
53800dbe 1491 pop_topmost_sequence ();
1492 return temp;
1493 }
1494}
1495
1496/* Perform an untyped call and save the state required to perform an
1497 untyped return of whatever value was returned by the given function. */
1498
1499static rtx
aecda0d6 1500expand_builtin_apply (rtx function, rtx arguments, rtx argsize)
53800dbe 1501{
1502 int size, align, regno;
3754d046 1503 machine_mode mode;
1e0c0b35 1504 rtx incoming_args, result, reg, dest, src;
1505 rtx_call_insn *call_insn;
53800dbe 1506 rtx old_stack_level = 0;
1507 rtx call_fusage = 0;
6812c89e 1508 rtx struct_value = targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0);
53800dbe 1509
85d654dd 1510 arguments = convert_memory_address (Pmode, arguments);
726ec87c 1511
53800dbe 1512 /* Create a block where the return registers can be saved. */
1513 result = assign_stack_local (BLKmode, apply_result_size (), -1);
1514
53800dbe 1515 /* Fetch the arg pointer from the ARGUMENTS block. */
1516 incoming_args = gen_reg_rtx (Pmode);
726ec87c 1517 emit_move_insn (incoming_args, gen_rtx_MEM (Pmode, arguments));
3764c94e 1518 if (!STACK_GROWS_DOWNWARD)
1519 incoming_args = expand_simple_binop (Pmode, MINUS, incoming_args, argsize,
1520 incoming_args, 0, OPTAB_LIB_WIDEN);
53800dbe 1521
04a46d40 1522 /* Push a new argument block and copy the arguments. Do not allow
1523 the (potential) memcpy call below to interfere with our stack
1524 manipulations. */
53800dbe 1525 do_pending_stack_adjust ();
04a46d40 1526 NO_DEFER_POP;
53800dbe 1527
2358393e 1528 /* Save the stack with nonlocal if available. */
71512c05 1529 if (targetm.have_save_stack_nonlocal ())
e9c97615 1530 emit_stack_save (SAVE_NONLOCAL, &old_stack_level);
53800dbe 1531 else
e9c97615 1532 emit_stack_save (SAVE_BLOCK, &old_stack_level);
53800dbe 1533
59647703 1534 /* Allocate a block of memory onto the stack and copy the memory
990495a7 1535 arguments to the outgoing arguments address. We can pass TRUE
1536 as the 4th argument because we just saved the stack pointer
1537 and will restore it right after the call. */
5be42b39 1538 allocate_dynamic_stack_space (argsize, 0, BIGGEST_ALIGNMENT, true);
27a7a23a 1539
1540 /* Set DRAP flag to true, even though allocate_dynamic_stack_space
1541 may have already set current_function_calls_alloca to true.
1542 current_function_calls_alloca won't be set if argsize is zero,
1543 so we have to guarantee need_drap is true here. */
1544 if (SUPPORTS_STACK_ALIGNMENT)
1545 crtl->need_drap = true;
1546
59647703 1547 dest = virtual_outgoing_args_rtx;
3764c94e 1548 if (!STACK_GROWS_DOWNWARD)
1549 {
1550 if (CONST_INT_P (argsize))
1551 dest = plus_constant (Pmode, dest, -INTVAL (argsize));
1552 else
1553 dest = gen_rtx_PLUS (Pmode, dest, negate_rtx (Pmode, argsize));
1554 }
2a631e19 1555 dest = gen_rtx_MEM (BLKmode, dest);
1556 set_mem_align (dest, PARM_BOUNDARY);
1557 src = gen_rtx_MEM (BLKmode, incoming_args);
1558 set_mem_align (src, PARM_BOUNDARY);
0378dbdc 1559 emit_block_move (dest, src, argsize, BLOCK_OP_NORMAL);
53800dbe 1560
1561 /* Refer to the argument block. */
1562 apply_args_size ();
1563 arguments = gen_rtx_MEM (BLKmode, arguments);
2a631e19 1564 set_mem_align (arguments, PARM_BOUNDARY);
53800dbe 1565
1566 /* Walk past the arg-pointer and structure value address. */
1567 size = GET_MODE_SIZE (Pmode);
45550790 1568 if (struct_value)
53800dbe 1569 size += GET_MODE_SIZE (Pmode);
1570
1571 /* Restore each of the registers previously saved. Make USE insns
1572 for each of these registers for use in making the call. */
1573 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1574 if ((mode = apply_args_mode[regno]) != VOIDmode)
1575 {
1576 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1577 if (size % align != 0)
1578 size = CEIL (size, align) * align;
1579 reg = gen_rtx_REG (mode, regno);
e513d163 1580 emit_move_insn (reg, adjust_address (arguments, mode, size));
53800dbe 1581 use_reg (&call_fusage, reg);
1582 size += GET_MODE_SIZE (mode);
1583 }
1584
1585 /* Restore the structure value address unless this is passed as an
1586 "invisible" first argument. */
1587 size = GET_MODE_SIZE (Pmode);
45550790 1588 if (struct_value)
53800dbe 1589 {
1590 rtx value = gen_reg_rtx (Pmode);
e513d163 1591 emit_move_insn (value, adjust_address (arguments, Pmode, size));
45550790 1592 emit_move_insn (struct_value, value);
8ad4c111 1593 if (REG_P (struct_value))
45550790 1594 use_reg (&call_fusage, struct_value);
53800dbe 1595 size += GET_MODE_SIZE (Pmode);
1596 }
1597
1598 /* All arguments and registers used for the call are set up by now! */
82c7907c 1599 function = prepare_call_address (NULL, function, NULL, &call_fusage, 0, 0);
53800dbe 1600
1601 /* Ensure address is valid. SYMBOL_REF is already valid, so no need,
1602 and we don't want to load it into a register as an optimization,
1603 because prepare_call_address already did it if it should be done. */
1604 if (GET_CODE (function) != SYMBOL_REF)
1605 function = memory_address (FUNCTION_MODE, function);
1606
1607 /* Generate the actual call instruction and save the return value. */
1d99ab0a 1608 if (targetm.have_untyped_call ())
1609 {
1610 rtx mem = gen_rtx_MEM (FUNCTION_MODE, function);
1611 emit_call_insn (targetm.gen_untyped_call (mem, result,
1612 result_vector (1, result)));
1613 }
7f265a08 1614 else if (targetm.have_call_value ())
53800dbe 1615 {
1616 rtx valreg = 0;
1617
1618 /* Locate the unique return register. It is not possible to
1619 express a call that sets more than one return register using
1620 call_value; use untyped_call for that. In fact, untyped_call
1621 only needs to save the return registers in the given block. */
1622 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1623 if ((mode = apply_result_mode[regno]) != VOIDmode)
1624 {
7f265a08 1625 gcc_assert (!valreg); /* have_untyped_call required. */
7d3f6cc7 1626
53800dbe 1627 valreg = gen_rtx_REG (mode, regno);
1628 }
1629
7f265a08 1630 emit_insn (targetm.gen_call_value (valreg,
1631 gen_rtx_MEM (FUNCTION_MODE, function),
1632 const0_rtx, NULL_RTX, const0_rtx));
53800dbe 1633
e513d163 1634 emit_move_insn (adjust_address (result, GET_MODE (valreg), 0), valreg);
53800dbe 1635 }
1636 else
64db345d 1637 gcc_unreachable ();
53800dbe 1638
d5f9786f 1639 /* Find the CALL insn we just emitted, and attach the register usage
1640 information. */
1641 call_insn = last_call_insn ();
1642 add_function_usage_to (call_insn, call_fusage);
53800dbe 1643
1644 /* Restore the stack. */
71512c05 1645 if (targetm.have_save_stack_nonlocal ())
e9c97615 1646 emit_stack_restore (SAVE_NONLOCAL, old_stack_level);
53800dbe 1647 else
e9c97615 1648 emit_stack_restore (SAVE_BLOCK, old_stack_level);
9af5ce0c 1649 fixup_args_size_notes (call_insn, get_last_insn (), 0);
53800dbe 1650
04a46d40 1651 OK_DEFER_POP;
1652
53800dbe 1653 /* Return the address of the result block. */
85d654dd 1654 result = copy_addr_to_reg (XEXP (result, 0));
1655 return convert_memory_address (ptr_mode, result);
53800dbe 1656}
1657
1658/* Perform an untyped return. */
1659
1660static void
aecda0d6 1661expand_builtin_return (rtx result)
53800dbe 1662{
1663 int size, align, regno;
3754d046 1664 machine_mode mode;
53800dbe 1665 rtx reg;
57c26b3a 1666 rtx_insn *call_fusage = 0;
53800dbe 1667
85d654dd 1668 result = convert_memory_address (Pmode, result);
726ec87c 1669
53800dbe 1670 apply_result_size ();
1671 result = gen_rtx_MEM (BLKmode, result);
1672
1d99ab0a 1673 if (targetm.have_untyped_return ())
53800dbe 1674 {
1d99ab0a 1675 rtx vector = result_vector (0, result);
1676 emit_jump_insn (targetm.gen_untyped_return (result, vector));
53800dbe 1677 emit_barrier ();
1678 return;
1679 }
53800dbe 1680
1681 /* Restore the return value and note that each value is used. */
1682 size = 0;
1683 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1684 if ((mode = apply_result_mode[regno]) != VOIDmode)
1685 {
1686 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1687 if (size % align != 0)
1688 size = CEIL (size, align) * align;
1689 reg = gen_rtx_REG (mode, INCOMING_REGNO (regno));
e513d163 1690 emit_move_insn (reg, adjust_address (result, mode, size));
53800dbe 1691
1692 push_to_sequence (call_fusage);
18b42941 1693 emit_use (reg);
53800dbe 1694 call_fusage = get_insns ();
1695 end_sequence ();
1696 size += GET_MODE_SIZE (mode);
1697 }
1698
1699 /* Put the USE insns before the return. */
31d3e01c 1700 emit_insn (call_fusage);
53800dbe 1701
1702 /* Return whatever values was restored by jumping directly to the end
1703 of the function. */
62380d2d 1704 expand_naked_return ();
53800dbe 1705}
1706
539a3a92 1707/* Used by expand_builtin_classify_type and fold_builtin_classify_type. */
27d0c333 1708
539a3a92 1709static enum type_class
aecda0d6 1710type_to_class (tree type)
539a3a92 1711{
1712 switch (TREE_CODE (type))
1713 {
1714 case VOID_TYPE: return void_type_class;
1715 case INTEGER_TYPE: return integer_type_class;
539a3a92 1716 case ENUMERAL_TYPE: return enumeral_type_class;
1717 case BOOLEAN_TYPE: return boolean_type_class;
1718 case POINTER_TYPE: return pointer_type_class;
1719 case REFERENCE_TYPE: return reference_type_class;
1720 case OFFSET_TYPE: return offset_type_class;
1721 case REAL_TYPE: return real_type_class;
1722 case COMPLEX_TYPE: return complex_type_class;
1723 case FUNCTION_TYPE: return function_type_class;
1724 case METHOD_TYPE: return method_type_class;
1725 case RECORD_TYPE: return record_type_class;
1726 case UNION_TYPE:
1727 case QUAL_UNION_TYPE: return union_type_class;
1728 case ARRAY_TYPE: return (TYPE_STRING_FLAG (type)
1729 ? string_type_class : array_type_class);
539a3a92 1730 case LANG_TYPE: return lang_type_class;
1731 default: return no_type_class;
1732 }
1733}
bf8e3599 1734
c2f47e15 1735/* Expand a call EXP to __builtin_classify_type. */
27d0c333 1736
53800dbe 1737static rtx
c2f47e15 1738expand_builtin_classify_type (tree exp)
53800dbe 1739{
c2f47e15 1740 if (call_expr_nargs (exp))
1741 return GEN_INT (type_to_class (TREE_TYPE (CALL_EXPR_ARG (exp, 0))));
53800dbe 1742 return GEN_INT (no_type_class);
1743}
1744
07976da7 1745/* This helper macro, meant to be used in mathfn_built_in below,
1746 determines which among a set of three builtin math functions is
1747 appropriate for a given type mode. The `F' and `L' cases are
1748 automatically generated from the `double' case. */
e3240774 1749#define CASE_MATHFN(MATHFN) \
1750 CASE_CFN_##MATHFN: \
1751 fcode = BUILT_IN_##MATHFN; fcodef = BUILT_IN_##MATHFN##F ; \
1752 fcodel = BUILT_IN_##MATHFN##L ; break;
cd2656b0 1753/* Similar to above, but appends _R after any F/L suffix. */
e3240774 1754#define CASE_MATHFN_REENT(MATHFN) \
1755 case CFN_BUILT_IN_##MATHFN##_R: \
1756 case CFN_BUILT_IN_##MATHFN##F_R: \
1757 case CFN_BUILT_IN_##MATHFN##L_R: \
1758 fcode = BUILT_IN_##MATHFN##_R; fcodef = BUILT_IN_##MATHFN##F_R ; \
1759 fcodel = BUILT_IN_##MATHFN##L_R ; break;
07976da7 1760
6c21be92 1761/* Return a function equivalent to FN but operating on floating-point
1762 values of type TYPE, or END_BUILTINS if no such function exists.
e3240774 1763 This is purely an operation on function codes; it does not guarantee
1764 that the target actually has an implementation of the function. */
c319d56a 1765
6c21be92 1766static built_in_function
e3240774 1767mathfn_built_in_2 (tree type, combined_fn fn)
0a68165a 1768{
6c21be92 1769 built_in_function fcode, fcodef, fcodel;
07976da7 1770
1771 switch (fn)
1772 {
e3240774 1773 CASE_MATHFN (ACOS)
1774 CASE_MATHFN (ACOSH)
1775 CASE_MATHFN (ASIN)
1776 CASE_MATHFN (ASINH)
1777 CASE_MATHFN (ATAN)
1778 CASE_MATHFN (ATAN2)
1779 CASE_MATHFN (ATANH)
1780 CASE_MATHFN (CBRT)
1781 CASE_MATHFN (CEIL)
1782 CASE_MATHFN (CEXPI)
1783 CASE_MATHFN (COPYSIGN)
1784 CASE_MATHFN (COS)
1785 CASE_MATHFN (COSH)
1786 CASE_MATHFN (DREM)
1787 CASE_MATHFN (ERF)
1788 CASE_MATHFN (ERFC)
1789 CASE_MATHFN (EXP)
1790 CASE_MATHFN (EXP10)
1791 CASE_MATHFN (EXP2)
1792 CASE_MATHFN (EXPM1)
1793 CASE_MATHFN (FABS)
1794 CASE_MATHFN (FDIM)
1795 CASE_MATHFN (FLOOR)
1796 CASE_MATHFN (FMA)
1797 CASE_MATHFN (FMAX)
1798 CASE_MATHFN (FMIN)
1799 CASE_MATHFN (FMOD)
1800 CASE_MATHFN (FREXP)
1801 CASE_MATHFN (GAMMA)
1802 CASE_MATHFN_REENT (GAMMA) /* GAMMA_R */
1803 CASE_MATHFN (HUGE_VAL)
1804 CASE_MATHFN (HYPOT)
1805 CASE_MATHFN (ILOGB)
1806 CASE_MATHFN (ICEIL)
1807 CASE_MATHFN (IFLOOR)
1808 CASE_MATHFN (INF)
1809 CASE_MATHFN (IRINT)
1810 CASE_MATHFN (IROUND)
1811 CASE_MATHFN (ISINF)
1812 CASE_MATHFN (J0)
1813 CASE_MATHFN (J1)
1814 CASE_MATHFN (JN)
1815 CASE_MATHFN (LCEIL)
1816 CASE_MATHFN (LDEXP)
1817 CASE_MATHFN (LFLOOR)
1818 CASE_MATHFN (LGAMMA)
1819 CASE_MATHFN_REENT (LGAMMA) /* LGAMMA_R */
1820 CASE_MATHFN (LLCEIL)
1821 CASE_MATHFN (LLFLOOR)
1822 CASE_MATHFN (LLRINT)
1823 CASE_MATHFN (LLROUND)
1824 CASE_MATHFN (LOG)
1825 CASE_MATHFN (LOG10)
1826 CASE_MATHFN (LOG1P)
1827 CASE_MATHFN (LOG2)
1828 CASE_MATHFN (LOGB)
1829 CASE_MATHFN (LRINT)
1830 CASE_MATHFN (LROUND)
1831 CASE_MATHFN (MODF)
1832 CASE_MATHFN (NAN)
1833 CASE_MATHFN (NANS)
1834 CASE_MATHFN (NEARBYINT)
1835 CASE_MATHFN (NEXTAFTER)
1836 CASE_MATHFN (NEXTTOWARD)
1837 CASE_MATHFN (POW)
1838 CASE_MATHFN (POWI)
1839 CASE_MATHFN (POW10)
1840 CASE_MATHFN (REMAINDER)
1841 CASE_MATHFN (REMQUO)
1842 CASE_MATHFN (RINT)
1843 CASE_MATHFN (ROUND)
1844 CASE_MATHFN (SCALB)
1845 CASE_MATHFN (SCALBLN)
1846 CASE_MATHFN (SCALBN)
1847 CASE_MATHFN (SIGNBIT)
1848 CASE_MATHFN (SIGNIFICAND)
1849 CASE_MATHFN (SIN)
1850 CASE_MATHFN (SINCOS)
1851 CASE_MATHFN (SINH)
1852 CASE_MATHFN (SQRT)
1853 CASE_MATHFN (TAN)
1854 CASE_MATHFN (TANH)
1855 CASE_MATHFN (TGAMMA)
1856 CASE_MATHFN (TRUNC)
1857 CASE_MATHFN (Y0)
1858 CASE_MATHFN (Y1)
1859 CASE_MATHFN (YN)
07976da7 1860
e3240774 1861 default:
1862 return END_BUILTINS;
1863 }
07976da7 1864
96b9f485 1865 if (TYPE_MAIN_VARIANT (type) == double_type_node)
6c21be92 1866 return fcode;
96b9f485 1867 else if (TYPE_MAIN_VARIANT (type) == float_type_node)
6c21be92 1868 return fcodef;
96b9f485 1869 else if (TYPE_MAIN_VARIANT (type) == long_double_type_node)
6c21be92 1870 return fcodel;
07976da7 1871 else
6c21be92 1872 return END_BUILTINS;
1873}
1874
1875/* Return mathematic function equivalent to FN but operating directly on TYPE,
1876 if available. If IMPLICIT_P is true use the implicit builtin declaration,
1877 otherwise use the explicit declaration. If we can't do the conversion,
1878 return null. */
1879
1880static tree
e3240774 1881mathfn_built_in_1 (tree type, combined_fn fn, bool implicit_p)
6c21be92 1882{
1883 built_in_function fcode2 = mathfn_built_in_2 (type, fn);
1884 if (fcode2 == END_BUILTINS)
c2f47e15 1885 return NULL_TREE;
b9a16870 1886
1887 if (implicit_p && !builtin_decl_implicit_p (fcode2))
1888 return NULL_TREE;
1889
1890 return builtin_decl_explicit (fcode2);
0a68165a 1891}
1892
e3240774 1893/* Like mathfn_built_in_1, but always use the implicit array. */
c319d56a 1894
1895tree
e3240774 1896mathfn_built_in (tree type, combined_fn fn)
c319d56a 1897{
1898 return mathfn_built_in_1 (type, fn, /*implicit=*/ 1);
1899}
1900
e3240774 1901/* Like mathfn_built_in_1, but take a built_in_function and
1902 always use the implicit array. */
1903
1904tree
1905mathfn_built_in (tree type, enum built_in_function fn)
1906{
1907 return mathfn_built_in_1 (type, as_combined_fn (fn), /*implicit=*/ 1);
1908}
1909
1f24b8e9 1910/* If BUILT_IN_NORMAL function FNDECL has an associated internal function,
1911 return its code, otherwise return IFN_LAST. Note that this function
1912 only tests whether the function is defined in internals.def, not whether
1913 it is actually available on the target. */
1914
1915internal_fn
1916associated_internal_fn (tree fndecl)
1917{
1918 gcc_checking_assert (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL);
1919 tree return_type = TREE_TYPE (TREE_TYPE (fndecl));
1920 switch (DECL_FUNCTION_CODE (fndecl))
1921 {
1922#define DEF_INTERNAL_FLT_FN(NAME, FLAGS, OPTAB, TYPE) \
1923 CASE_FLT_FN (BUILT_IN_##NAME): return IFN_##NAME;
c9452b7c 1924#define DEF_INTERNAL_INT_FN(NAME, FLAGS, OPTAB, TYPE) \
1925 CASE_INT_FN (BUILT_IN_##NAME): return IFN_##NAME;
1f24b8e9 1926#include "internal-fn.def"
1927
1928 CASE_FLT_FN (BUILT_IN_POW10):
1929 return IFN_EXP10;
1930
1931 CASE_FLT_FN (BUILT_IN_DREM):
1932 return IFN_REMAINDER;
1933
1934 CASE_FLT_FN (BUILT_IN_SCALBN):
1935 CASE_FLT_FN (BUILT_IN_SCALBLN):
1936 if (REAL_MODE_FORMAT (TYPE_MODE (return_type))->b == 2)
1937 return IFN_LDEXP;
1938 return IFN_LAST;
1939
1940 default:
1941 return IFN_LAST;
1942 }
1943}
1944
1945/* If CALL is a call to a BUILT_IN_NORMAL function that could be replaced
1946 on the current target by a call to an internal function, return the
1947 code of that internal function, otherwise return IFN_LAST. The caller
1948 is responsible for ensuring that any side-effects of the built-in
1949 call are dealt with correctly. E.g. if CALL sets errno, the caller
1950 must decide that the errno result isn't needed or make it available
1951 in some other way. */
1952
1953internal_fn
1954replacement_internal_fn (gcall *call)
1955{
1956 if (gimple_call_builtin_p (call, BUILT_IN_NORMAL))
1957 {
1958 internal_fn ifn = associated_internal_fn (gimple_call_fndecl (call));
1959 if (ifn != IFN_LAST)
1960 {
1961 tree_pair types = direct_internal_fn_types (ifn, call);
acdfe9e0 1962 optimization_type opt_type = bb_optimization_type (gimple_bb (call));
1963 if (direct_internal_fn_supported_p (ifn, types, opt_type))
1f24b8e9 1964 return ifn;
1965 }
1966 }
1967 return IFN_LAST;
1968}
1969
7e0713b1 1970/* Expand a call to the builtin trinary math functions (fma).
1971 Return NULL_RTX if a normal call should be emitted rather than expanding the
1972 function in-line. EXP is the expression that is a call to the builtin
1973 function; if convenient, the result should be placed in TARGET.
1974 SUBTARGET may be used as the target for computing one of EXP's
1975 operands. */
1976
1977static rtx
1978expand_builtin_mathfn_ternary (tree exp, rtx target, rtx subtarget)
1979{
1980 optab builtin_optab;
1e0c0b35 1981 rtx op0, op1, op2, result;
1982 rtx_insn *insns;
7e0713b1 1983 tree fndecl = get_callee_fndecl (exp);
1984 tree arg0, arg1, arg2;
3754d046 1985 machine_mode mode;
7e0713b1 1986
1987 if (!validate_arglist (exp, REAL_TYPE, REAL_TYPE, REAL_TYPE, VOID_TYPE))
1988 return NULL_RTX;
1989
1990 arg0 = CALL_EXPR_ARG (exp, 0);
1991 arg1 = CALL_EXPR_ARG (exp, 1);
1992 arg2 = CALL_EXPR_ARG (exp, 2);
1993
1994 switch (DECL_FUNCTION_CODE (fndecl))
1995 {
1996 CASE_FLT_FN (BUILT_IN_FMA):
1997 builtin_optab = fma_optab; break;
1998 default:
1999 gcc_unreachable ();
2000 }
2001
2002 /* Make a suitable register to place result in. */
2003 mode = TYPE_MODE (TREE_TYPE (exp));
2004
2005 /* Before working hard, check whether the instruction is available. */
2006 if (optab_handler (builtin_optab, mode) == CODE_FOR_nothing)
2007 return NULL_RTX;
2008
de2e453e 2009 result = gen_reg_rtx (mode);
7e0713b1 2010
2011 /* Always stabilize the argument list. */
2012 CALL_EXPR_ARG (exp, 0) = arg0 = builtin_save_expr (arg0);
2013 CALL_EXPR_ARG (exp, 1) = arg1 = builtin_save_expr (arg1);
2014 CALL_EXPR_ARG (exp, 2) = arg2 = builtin_save_expr (arg2);
2015
2016 op0 = expand_expr (arg0, subtarget, VOIDmode, EXPAND_NORMAL);
2017 op1 = expand_normal (arg1);
2018 op2 = expand_normal (arg2);
2019
2020 start_sequence ();
2021
de2e453e 2022 /* Compute into RESULT.
2023 Set RESULT to wherever the result comes back. */
2024 result = expand_ternary_op (mode, builtin_optab, op0, op1, op2,
2025 result, 0);
7e0713b1 2026
2027 /* If we were unable to expand via the builtin, stop the sequence
2028 (without outputting the insns) and call to the library function
2029 with the stabilized argument list. */
de2e453e 2030 if (result == 0)
7e0713b1 2031 {
2032 end_sequence ();
2033 return expand_call (exp, target, target == const0_rtx);
2034 }
2035
2036 /* Output the entire sequence. */
2037 insns = get_insns ();
2038 end_sequence ();
2039 emit_insn (insns);
2040
de2e453e 2041 return result;
7e0713b1 2042}
2043
6b43bae4 2044/* Expand a call to the builtin sin and cos math functions.
c2f47e15 2045 Return NULL_RTX if a normal call should be emitted rather than expanding the
6b43bae4 2046 function in-line. EXP is the expression that is a call to the builtin
2047 function; if convenient, the result should be placed in TARGET.
2048 SUBTARGET may be used as the target for computing one of EXP's
2049 operands. */
2050
2051static rtx
2052expand_builtin_mathfn_3 (tree exp, rtx target, rtx subtarget)
2053{
2054 optab builtin_optab;
1e0c0b35 2055 rtx op0;
2056 rtx_insn *insns;
6b43bae4 2057 tree fndecl = get_callee_fndecl (exp);
3754d046 2058 machine_mode mode;
abfea505 2059 tree arg;
6b43bae4 2060
c2f47e15 2061 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2062 return NULL_RTX;
6b43bae4 2063
c2f47e15 2064 arg = CALL_EXPR_ARG (exp, 0);
6b43bae4 2065
2066 switch (DECL_FUNCTION_CODE (fndecl))
2067 {
4f35b1fc 2068 CASE_FLT_FN (BUILT_IN_SIN):
2069 CASE_FLT_FN (BUILT_IN_COS):
6b43bae4 2070 builtin_optab = sincos_optab; break;
2071 default:
64db345d 2072 gcc_unreachable ();
6b43bae4 2073 }
2074
2075 /* Make a suitable register to place result in. */
2076 mode = TYPE_MODE (TREE_TYPE (exp));
2077
6b43bae4 2078 /* Check if sincos insn is available, otherwise fallback
0bed3869 2079 to sin or cos insn. */
d6bf3b14 2080 if (optab_handler (builtin_optab, mode) == CODE_FOR_nothing)
6b43bae4 2081 switch (DECL_FUNCTION_CODE (fndecl))
2082 {
4f35b1fc 2083 CASE_FLT_FN (BUILT_IN_SIN):
6b43bae4 2084 builtin_optab = sin_optab; break;
4f35b1fc 2085 CASE_FLT_FN (BUILT_IN_COS):
6b43bae4 2086 builtin_optab = cos_optab; break;
2087 default:
64db345d 2088 gcc_unreachable ();
6b43bae4 2089 }
6b43bae4 2090
2091 /* Before working hard, check whether the instruction is available. */
d6bf3b14 2092 if (optab_handler (builtin_optab, mode) != CODE_FOR_nothing)
6b43bae4 2093 {
de2e453e 2094 rtx result = gen_reg_rtx (mode);
6b43bae4 2095
2096 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2097 need to expand the argument again. This way, we will not perform
2098 side-effects more the once. */
abfea505 2099 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
6b43bae4 2100
1db6d067 2101 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
6b43bae4 2102
6b43bae4 2103 start_sequence ();
2104
de2e453e 2105 /* Compute into RESULT.
2106 Set RESULT to wherever the result comes back. */
6b43bae4 2107 if (builtin_optab == sincos_optab)
2108 {
de2e453e 2109 int ok;
7d3f6cc7 2110
6b43bae4 2111 switch (DECL_FUNCTION_CODE (fndecl))
2112 {
4f35b1fc 2113 CASE_FLT_FN (BUILT_IN_SIN):
de2e453e 2114 ok = expand_twoval_unop (builtin_optab, op0, 0, result, 0);
6b43bae4 2115 break;
4f35b1fc 2116 CASE_FLT_FN (BUILT_IN_COS):
de2e453e 2117 ok = expand_twoval_unop (builtin_optab, op0, result, 0, 0);
6b43bae4 2118 break;
2119 default:
64db345d 2120 gcc_unreachable ();
6b43bae4 2121 }
de2e453e 2122 gcc_assert (ok);
6b43bae4 2123 }
2124 else
de2e453e 2125 result = expand_unop (mode, builtin_optab, op0, result, 0);
6b43bae4 2126
de2e453e 2127 if (result != 0)
6b43bae4 2128 {
6b43bae4 2129 /* Output the entire sequence. */
2130 insns = get_insns ();
2131 end_sequence ();
2132 emit_insn (insns);
de2e453e 2133 return result;
6b43bae4 2134 }
2135
2136 /* If we were unable to expand via the builtin, stop the sequence
2137 (without outputting the insns) and call to the library function
2138 with the stabilized argument list. */
2139 end_sequence ();
2140 }
2141
de2e453e 2142 return expand_call (exp, target, target == const0_rtx);
6b43bae4 2143}
2144
a65c4d64 2145/* Given an interclass math builtin decl FNDECL and it's argument ARG
2146 return an RTL instruction code that implements the functionality.
2147 If that isn't possible or available return CODE_FOR_nothing. */
a67a90e5 2148
a65c4d64 2149static enum insn_code
2150interclass_mathfn_icode (tree arg, tree fndecl)
a67a90e5 2151{
a65c4d64 2152 bool errno_set = false;
6cdd383a 2153 optab builtin_optab = unknown_optab;
3754d046 2154 machine_mode mode;
a67a90e5 2155
2156 switch (DECL_FUNCTION_CODE (fndecl))
2157 {
2158 CASE_FLT_FN (BUILT_IN_ILOGB):
2159 errno_set = true; builtin_optab = ilogb_optab; break;
69b779ea 2160 CASE_FLT_FN (BUILT_IN_ISINF):
2161 builtin_optab = isinf_optab; break;
8a1a9cb7 2162 case BUILT_IN_ISNORMAL:
cde061c1 2163 case BUILT_IN_ISFINITE:
2164 CASE_FLT_FN (BUILT_IN_FINITE):
a65c4d64 2165 case BUILT_IN_FINITED32:
2166 case BUILT_IN_FINITED64:
2167 case BUILT_IN_FINITED128:
2168 case BUILT_IN_ISINFD32:
2169 case BUILT_IN_ISINFD64:
2170 case BUILT_IN_ISINFD128:
cde061c1 2171 /* These builtins have no optabs (yet). */
2172 break;
a67a90e5 2173 default:
2174 gcc_unreachable ();
2175 }
2176
2177 /* There's no easy way to detect the case we need to set EDOM. */
2178 if (flag_errno_math && errno_set)
a65c4d64 2179 return CODE_FOR_nothing;
a67a90e5 2180
2181 /* Optab mode depends on the mode of the input argument. */
2182 mode = TYPE_MODE (TREE_TYPE (arg));
2183
cde061c1 2184 if (builtin_optab)
d6bf3b14 2185 return optab_handler (builtin_optab, mode);
a65c4d64 2186 return CODE_FOR_nothing;
2187}
2188
2189/* Expand a call to one of the builtin math functions that operate on
2190 floating point argument and output an integer result (ilogb, isinf,
2191 isnan, etc).
2192 Return 0 if a normal call should be emitted rather than expanding the
2193 function in-line. EXP is the expression that is a call to the builtin
f97eea22 2194 function; if convenient, the result should be placed in TARGET. */
a65c4d64 2195
2196static rtx
f97eea22 2197expand_builtin_interclass_mathfn (tree exp, rtx target)
a65c4d64 2198{
2199 enum insn_code icode = CODE_FOR_nothing;
2200 rtx op0;
2201 tree fndecl = get_callee_fndecl (exp);
3754d046 2202 machine_mode mode;
a65c4d64 2203 tree arg;
2204
2205 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2206 return NULL_RTX;
2207
2208 arg = CALL_EXPR_ARG (exp, 0);
2209 icode = interclass_mathfn_icode (arg, fndecl);
2210 mode = TYPE_MODE (TREE_TYPE (arg));
2211
a67a90e5 2212 if (icode != CODE_FOR_nothing)
2213 {
8786db1e 2214 struct expand_operand ops[1];
1e0c0b35 2215 rtx_insn *last = get_last_insn ();
4e2a2fb4 2216 tree orig_arg = arg;
a67a90e5 2217
2218 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2219 need to expand the argument again. This way, we will not perform
2220 side-effects more the once. */
abfea505 2221 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
a67a90e5 2222
f97eea22 2223 op0 = expand_expr (arg, NULL_RTX, VOIDmode, EXPAND_NORMAL);
a67a90e5 2224
2225 if (mode != GET_MODE (op0))
2226 op0 = convert_to_mode (mode, op0, 0);
2227
8786db1e 2228 create_output_operand (&ops[0], target, TYPE_MODE (TREE_TYPE (exp)));
2229 if (maybe_legitimize_operands (icode, 0, 1, ops)
2230 && maybe_emit_unop_insn (icode, ops[0].value, op0, UNKNOWN))
2231 return ops[0].value;
2232
4e2a2fb4 2233 delete_insns_since (last);
2234 CALL_EXPR_ARG (exp, 0) = orig_arg;
a67a90e5 2235 }
2236
a65c4d64 2237 return NULL_RTX;
a67a90e5 2238}
2239
c3147c1a 2240/* Expand a call to the builtin sincos math function.
c2f47e15 2241 Return NULL_RTX if a normal call should be emitted rather than expanding the
c3147c1a 2242 function in-line. EXP is the expression that is a call to the builtin
2243 function. */
2244
2245static rtx
2246expand_builtin_sincos (tree exp)
2247{
2248 rtx op0, op1, op2, target1, target2;
3754d046 2249 machine_mode mode;
c3147c1a 2250 tree arg, sinp, cosp;
2251 int result;
389dd41b 2252 location_t loc = EXPR_LOCATION (exp);
be5575b2 2253 tree alias_type, alias_off;
c3147c1a 2254
c2f47e15 2255 if (!validate_arglist (exp, REAL_TYPE,
2256 POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
2257 return NULL_RTX;
c3147c1a 2258
c2f47e15 2259 arg = CALL_EXPR_ARG (exp, 0);
2260 sinp = CALL_EXPR_ARG (exp, 1);
2261 cosp = CALL_EXPR_ARG (exp, 2);
c3147c1a 2262
2263 /* Make a suitable register to place result in. */
2264 mode = TYPE_MODE (TREE_TYPE (arg));
2265
2266 /* Check if sincos insn is available, otherwise emit the call. */
d6bf3b14 2267 if (optab_handler (sincos_optab, mode) == CODE_FOR_nothing)
c3147c1a 2268 return NULL_RTX;
2269
2270 target1 = gen_reg_rtx (mode);
2271 target2 = gen_reg_rtx (mode);
2272
8ec3c5c2 2273 op0 = expand_normal (arg);
be5575b2 2274 alias_type = build_pointer_type_for_mode (TREE_TYPE (arg), ptr_mode, true);
2275 alias_off = build_int_cst (alias_type, 0);
2276 op1 = expand_normal (fold_build2_loc (loc, MEM_REF, TREE_TYPE (arg),
2277 sinp, alias_off));
2278 op2 = expand_normal (fold_build2_loc (loc, MEM_REF, TREE_TYPE (arg),
2279 cosp, alias_off));
c3147c1a 2280
2281 /* Compute into target1 and target2.
2282 Set TARGET to wherever the result comes back. */
2283 result = expand_twoval_unop (sincos_optab, op0, target2, target1, 0);
2284 gcc_assert (result);
2285
2286 /* Move target1 and target2 to the memory locations indicated
2287 by op1 and op2. */
2288 emit_move_insn (op1, target1);
2289 emit_move_insn (op2, target2);
2290
2291 return const0_rtx;
2292}
2293
d735c391 2294/* Expand a call to the internal cexpi builtin to the sincos math function.
2295 EXP is the expression that is a call to the builtin function; if convenient,
f97eea22 2296 the result should be placed in TARGET. */
d735c391 2297
2298static rtx
f97eea22 2299expand_builtin_cexpi (tree exp, rtx target)
d735c391 2300{
2301 tree fndecl = get_callee_fndecl (exp);
d735c391 2302 tree arg, type;
3754d046 2303 machine_mode mode;
d735c391 2304 rtx op0, op1, op2;
389dd41b 2305 location_t loc = EXPR_LOCATION (exp);
d735c391 2306
c2f47e15 2307 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2308 return NULL_RTX;
d735c391 2309
c2f47e15 2310 arg = CALL_EXPR_ARG (exp, 0);
d735c391 2311 type = TREE_TYPE (arg);
2312 mode = TYPE_MODE (TREE_TYPE (arg));
2313
2314 /* Try expanding via a sincos optab, fall back to emitting a libcall
18b8d8ae 2315 to sincos or cexp. We are sure we have sincos or cexp because cexpi
2316 is only generated from sincos, cexp or if we have either of them. */
d6bf3b14 2317 if (optab_handler (sincos_optab, mode) != CODE_FOR_nothing)
d735c391 2318 {
2319 op1 = gen_reg_rtx (mode);
2320 op2 = gen_reg_rtx (mode);
2321
f97eea22 2322 op0 = expand_expr (arg, NULL_RTX, VOIDmode, EXPAND_NORMAL);
d735c391 2323
2324 /* Compute into op1 and op2. */
2325 expand_twoval_unop (sincos_optab, op0, op2, op1, 0);
2326 }
30f690e0 2327 else if (targetm.libc_has_function (function_sincos))
d735c391 2328 {
c2f47e15 2329 tree call, fn = NULL_TREE;
d735c391 2330 tree top1, top2;
2331 rtx op1a, op2a;
2332
2333 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
b9a16870 2334 fn = builtin_decl_explicit (BUILT_IN_SINCOSF);
d735c391 2335 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
b9a16870 2336 fn = builtin_decl_explicit (BUILT_IN_SINCOS);
d735c391 2337 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
b9a16870 2338 fn = builtin_decl_explicit (BUILT_IN_SINCOSL);
c2f47e15 2339 else
2340 gcc_unreachable ();
48e1416a 2341
0ab48139 2342 op1 = assign_temp (TREE_TYPE (arg), 1, 1);
2343 op2 = assign_temp (TREE_TYPE (arg), 1, 1);
99182918 2344 op1a = copy_addr_to_reg (XEXP (op1, 0));
2345 op2a = copy_addr_to_reg (XEXP (op2, 0));
d735c391 2346 top1 = make_tree (build_pointer_type (TREE_TYPE (arg)), op1a);
2347 top2 = make_tree (build_pointer_type (TREE_TYPE (arg)), op2a);
2348
d735c391 2349 /* Make sure not to fold the sincos call again. */
2350 call = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
c2f47e15 2351 expand_normal (build_call_nary (TREE_TYPE (TREE_TYPE (fn)),
2352 call, 3, arg, top1, top2));
d735c391 2353 }
18b8d8ae 2354 else
2355 {
0ecbc158 2356 tree call, fn = NULL_TREE, narg;
18b8d8ae 2357 tree ctype = build_complex_type (type);
2358
0ecbc158 2359 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
b9a16870 2360 fn = builtin_decl_explicit (BUILT_IN_CEXPF);
0ecbc158 2361 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
b9a16870 2362 fn = builtin_decl_explicit (BUILT_IN_CEXP);
0ecbc158 2363 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
b9a16870 2364 fn = builtin_decl_explicit (BUILT_IN_CEXPL);
c2f47e15 2365 else
2366 gcc_unreachable ();
fc0dfa6e 2367
2368 /* If we don't have a decl for cexp create one. This is the
2369 friendliest fallback if the user calls __builtin_cexpi
2370 without full target C99 function support. */
2371 if (fn == NULL_TREE)
2372 {
2373 tree fntype;
2374 const char *name = NULL;
2375
2376 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2377 name = "cexpf";
2378 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2379 name = "cexp";
2380 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2381 name = "cexpl";
2382
2383 fntype = build_function_type_list (ctype, ctype, NULL_TREE);
2384 fn = build_fn_decl (name, fntype);
2385 }
2386
389dd41b 2387 narg = fold_build2_loc (loc, COMPLEX_EXPR, ctype,
18b8d8ae 2388 build_real (type, dconst0), arg);
2389
2390 /* Make sure not to fold the cexp call again. */
2391 call = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
48e1416a 2392 return expand_expr (build_call_nary (ctype, call, 1, narg),
1db6d067 2393 target, VOIDmode, EXPAND_NORMAL);
18b8d8ae 2394 }
d735c391 2395
2396 /* Now build the proper return type. */
2397 return expand_expr (build2 (COMPLEX_EXPR, build_complex_type (type),
2398 make_tree (TREE_TYPE (arg), op2),
2399 make_tree (TREE_TYPE (arg), op1)),
1db6d067 2400 target, VOIDmode, EXPAND_NORMAL);
d735c391 2401}
2402
a65c4d64 2403/* Conveniently construct a function call expression. FNDECL names the
2404 function to be called, N is the number of arguments, and the "..."
2405 parameters are the argument expressions. Unlike build_call_exr
2406 this doesn't fold the call, hence it will always return a CALL_EXPR. */
2407
2408static tree
2409build_call_nofold_loc (location_t loc, tree fndecl, int n, ...)
2410{
2411 va_list ap;
2412 tree fntype = TREE_TYPE (fndecl);
2413 tree fn = build1 (ADDR_EXPR, build_pointer_type (fntype), fndecl);
2414
2415 va_start (ap, n);
2416 fn = build_call_valist (TREE_TYPE (fntype), fn, n, ap);
2417 va_end (ap);
2418 SET_EXPR_LOCATION (fn, loc);
2419 return fn;
2420}
a65c4d64 2421
7d3afc77 2422/* Expand a call to one of the builtin rounding functions gcc defines
2423 as an extension (lfloor and lceil). As these are gcc extensions we
2424 do not need to worry about setting errno to EDOM.
ad52b9b7 2425 If expanding via optab fails, lower expression to (int)(floor(x)).
2426 EXP is the expression that is a call to the builtin function;
ff1b14e4 2427 if convenient, the result should be placed in TARGET. */
ad52b9b7 2428
2429static rtx
ff1b14e4 2430expand_builtin_int_roundingfn (tree exp, rtx target)
ad52b9b7 2431{
9c42dd28 2432 convert_optab builtin_optab;
1e0c0b35 2433 rtx op0, tmp;
2434 rtx_insn *insns;
ad52b9b7 2435 tree fndecl = get_callee_fndecl (exp);
ad52b9b7 2436 enum built_in_function fallback_fn;
2437 tree fallback_fndecl;
3754d046 2438 machine_mode mode;
4de0924f 2439 tree arg;
ad52b9b7 2440
c2f47e15 2441 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
ad52b9b7 2442 gcc_unreachable ();
2443
c2f47e15 2444 arg = CALL_EXPR_ARG (exp, 0);
ad52b9b7 2445
2446 switch (DECL_FUNCTION_CODE (fndecl))
2447 {
80ff6494 2448 CASE_FLT_FN (BUILT_IN_ICEIL):
4f35b1fc 2449 CASE_FLT_FN (BUILT_IN_LCEIL):
2450 CASE_FLT_FN (BUILT_IN_LLCEIL):
ac148751 2451 builtin_optab = lceil_optab;
2452 fallback_fn = BUILT_IN_CEIL;
2453 break;
2454
80ff6494 2455 CASE_FLT_FN (BUILT_IN_IFLOOR):
4f35b1fc 2456 CASE_FLT_FN (BUILT_IN_LFLOOR):
2457 CASE_FLT_FN (BUILT_IN_LLFLOOR):
ad52b9b7 2458 builtin_optab = lfloor_optab;
2459 fallback_fn = BUILT_IN_FLOOR;
2460 break;
2461
2462 default:
2463 gcc_unreachable ();
2464 }
2465
2466 /* Make a suitable register to place result in. */
2467 mode = TYPE_MODE (TREE_TYPE (exp));
2468
9c42dd28 2469 target = gen_reg_rtx (mode);
ad52b9b7 2470
9c42dd28 2471 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2472 need to expand the argument again. This way, we will not perform
2473 side-effects more the once. */
abfea505 2474 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
ad52b9b7 2475
ff1b14e4 2476 op0 = expand_expr (arg, NULL, VOIDmode, EXPAND_NORMAL);
ad52b9b7 2477
9c42dd28 2478 start_sequence ();
ad52b9b7 2479
9c42dd28 2480 /* Compute into TARGET. */
2481 if (expand_sfix_optab (target, op0, builtin_optab))
2482 {
2483 /* Output the entire sequence. */
2484 insns = get_insns ();
ad52b9b7 2485 end_sequence ();
9c42dd28 2486 emit_insn (insns);
2487 return target;
ad52b9b7 2488 }
2489
9c42dd28 2490 /* If we were unable to expand via the builtin, stop the sequence
2491 (without outputting the insns). */
2492 end_sequence ();
2493
ad52b9b7 2494 /* Fall back to floating point rounding optab. */
2495 fallback_fndecl = mathfn_built_in (TREE_TYPE (arg), fallback_fn);
fc0dfa6e 2496
2497 /* For non-C99 targets we may end up without a fallback fndecl here
2498 if the user called __builtin_lfloor directly. In this case emit
2499 a call to the floor/ceil variants nevertheless. This should result
2500 in the best user experience for not full C99 targets. */
2501 if (fallback_fndecl == NULL_TREE)
2502 {
2503 tree fntype;
2504 const char *name = NULL;
2505
2506 switch (DECL_FUNCTION_CODE (fndecl))
2507 {
80ff6494 2508 case BUILT_IN_ICEIL:
fc0dfa6e 2509 case BUILT_IN_LCEIL:
2510 case BUILT_IN_LLCEIL:
2511 name = "ceil";
2512 break;
80ff6494 2513 case BUILT_IN_ICEILF:
fc0dfa6e 2514 case BUILT_IN_LCEILF:
2515 case BUILT_IN_LLCEILF:
2516 name = "ceilf";
2517 break;
80ff6494 2518 case BUILT_IN_ICEILL:
fc0dfa6e 2519 case BUILT_IN_LCEILL:
2520 case BUILT_IN_LLCEILL:
2521 name = "ceill";
2522 break;
80ff6494 2523 case BUILT_IN_IFLOOR:
fc0dfa6e 2524 case BUILT_IN_LFLOOR:
2525 case BUILT_IN_LLFLOOR:
2526 name = "floor";
2527 break;
80ff6494 2528 case BUILT_IN_IFLOORF:
fc0dfa6e 2529 case BUILT_IN_LFLOORF:
2530 case BUILT_IN_LLFLOORF:
2531 name = "floorf";
2532 break;
80ff6494 2533 case BUILT_IN_IFLOORL:
fc0dfa6e 2534 case BUILT_IN_LFLOORL:
2535 case BUILT_IN_LLFLOORL:
2536 name = "floorl";
2537 break;
2538 default:
2539 gcc_unreachable ();
2540 }
2541
2542 fntype = build_function_type_list (TREE_TYPE (arg),
2543 TREE_TYPE (arg), NULL_TREE);
2544 fallback_fndecl = build_fn_decl (name, fntype);
2545 }
2546
0568e9c1 2547 exp = build_call_nofold_loc (EXPR_LOCATION (exp), fallback_fndecl, 1, arg);
ad52b9b7 2548
d4c690af 2549 tmp = expand_normal (exp);
933eb13a 2550 tmp = maybe_emit_group_store (tmp, TREE_TYPE (exp));
ad52b9b7 2551
2552 /* Truncate the result of floating point optab to integer
2553 via expand_fix (). */
2554 target = gen_reg_rtx (mode);
2555 expand_fix (target, tmp, 0);
2556
2557 return target;
2558}
2559
7d3afc77 2560/* Expand a call to one of the builtin math functions doing integer
2561 conversion (lrint).
2562 Return 0 if a normal call should be emitted rather than expanding the
2563 function in-line. EXP is the expression that is a call to the builtin
ff1b14e4 2564 function; if convenient, the result should be placed in TARGET. */
7d3afc77 2565
2566static rtx
ff1b14e4 2567expand_builtin_int_roundingfn_2 (tree exp, rtx target)
7d3afc77 2568{
5f51ee59 2569 convert_optab builtin_optab;
1e0c0b35 2570 rtx op0;
2571 rtx_insn *insns;
7d3afc77 2572 tree fndecl = get_callee_fndecl (exp);
4de0924f 2573 tree arg;
3754d046 2574 machine_mode mode;
e951f9a4 2575 enum built_in_function fallback_fn = BUILT_IN_NONE;
7d3afc77 2576
c2f47e15 2577 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2578 gcc_unreachable ();
48e1416a 2579
c2f47e15 2580 arg = CALL_EXPR_ARG (exp, 0);
7d3afc77 2581
2582 switch (DECL_FUNCTION_CODE (fndecl))
2583 {
80ff6494 2584 CASE_FLT_FN (BUILT_IN_IRINT):
e951f9a4 2585 fallback_fn = BUILT_IN_LRINT;
3c77f69c 2586 gcc_fallthrough ();
7d3afc77 2587 CASE_FLT_FN (BUILT_IN_LRINT):
2588 CASE_FLT_FN (BUILT_IN_LLRINT):
e951f9a4 2589 builtin_optab = lrint_optab;
2590 break;
80ff6494 2591
2592 CASE_FLT_FN (BUILT_IN_IROUND):
e951f9a4 2593 fallback_fn = BUILT_IN_LROUND;
3c77f69c 2594 gcc_fallthrough ();
ef2f1a10 2595 CASE_FLT_FN (BUILT_IN_LROUND):
2596 CASE_FLT_FN (BUILT_IN_LLROUND):
e951f9a4 2597 builtin_optab = lround_optab;
2598 break;
80ff6494 2599
7d3afc77 2600 default:
2601 gcc_unreachable ();
2602 }
2603
e951f9a4 2604 /* There's no easy way to detect the case we need to set EDOM. */
2605 if (flag_errno_math && fallback_fn == BUILT_IN_NONE)
2606 return NULL_RTX;
2607
7d3afc77 2608 /* Make a suitable register to place result in. */
2609 mode = TYPE_MODE (TREE_TYPE (exp));
2610
e951f9a4 2611 /* There's no easy way to detect the case we need to set EDOM. */
2612 if (!flag_errno_math)
2613 {
de2e453e 2614 rtx result = gen_reg_rtx (mode);
7d3afc77 2615
e951f9a4 2616 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2617 need to expand the argument again. This way, we will not perform
2618 side-effects more the once. */
2619 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
7d3afc77 2620
e951f9a4 2621 op0 = expand_expr (arg, NULL, VOIDmode, EXPAND_NORMAL);
7d3afc77 2622
e951f9a4 2623 start_sequence ();
7d3afc77 2624
de2e453e 2625 if (expand_sfix_optab (result, op0, builtin_optab))
e951f9a4 2626 {
2627 /* Output the entire sequence. */
2628 insns = get_insns ();
2629 end_sequence ();
2630 emit_insn (insns);
de2e453e 2631 return result;
e951f9a4 2632 }
2633
2634 /* If we were unable to expand via the builtin, stop the sequence
2635 (without outputting the insns) and call to the library function
2636 with the stabilized argument list. */
7d3afc77 2637 end_sequence ();
2638 }
2639
e951f9a4 2640 if (fallback_fn != BUILT_IN_NONE)
2641 {
2642 /* Fall back to rounding to long int. Use implicit_p 0 - for non-C99
2643 targets, (int) round (x) should never be transformed into
2644 BUILT_IN_IROUND and if __builtin_iround is called directly, emit
2645 a call to lround in the hope that the target provides at least some
2646 C99 functions. This should result in the best user experience for
2647 not full C99 targets. */
e3240774 2648 tree fallback_fndecl = mathfn_built_in_1
2649 (TREE_TYPE (arg), as_combined_fn (fallback_fn), 0);
e951f9a4 2650
2651 exp = build_call_nofold_loc (EXPR_LOCATION (exp),
2652 fallback_fndecl, 1, arg);
2653
2654 target = expand_call (exp, NULL_RTX, target == const0_rtx);
933eb13a 2655 target = maybe_emit_group_store (target, TREE_TYPE (exp));
e951f9a4 2656 return convert_to_mode (mode, target, 0);
2657 }
5f51ee59 2658
de2e453e 2659 return expand_call (exp, target, target == const0_rtx);
7d3afc77 2660}
2661
c2f47e15 2662/* Expand a call to the powi built-in mathematical function. Return NULL_RTX if
757c219d 2663 a normal call should be emitted rather than expanding the function
2664 in-line. EXP is the expression that is a call to the builtin
2665 function; if convenient, the result should be placed in TARGET. */
2666
2667static rtx
f97eea22 2668expand_builtin_powi (tree exp, rtx target)
757c219d 2669{
757c219d 2670 tree arg0, arg1;
2671 rtx op0, op1;
3754d046 2672 machine_mode mode;
2673 machine_mode mode2;
757c219d 2674
c2f47e15 2675 if (! validate_arglist (exp, REAL_TYPE, INTEGER_TYPE, VOID_TYPE))
2676 return NULL_RTX;
757c219d 2677
c2f47e15 2678 arg0 = CALL_EXPR_ARG (exp, 0);
2679 arg1 = CALL_EXPR_ARG (exp, 1);
757c219d 2680 mode = TYPE_MODE (TREE_TYPE (exp));
2681
757c219d 2682 /* Emit a libcall to libgcc. */
2683
c2f47e15 2684 /* Mode of the 2nd argument must match that of an int. */
d0405f40 2685 mode2 = mode_for_size (INT_TYPE_SIZE, MODE_INT, 0);
2686
757c219d 2687 if (target == NULL_RTX)
2688 target = gen_reg_rtx (mode);
2689
f97eea22 2690 op0 = expand_expr (arg0, NULL_RTX, mode, EXPAND_NORMAL);
757c219d 2691 if (GET_MODE (op0) != mode)
2692 op0 = convert_to_mode (mode, op0, 0);
1db6d067 2693 op1 = expand_expr (arg1, NULL_RTX, mode2, EXPAND_NORMAL);
d0405f40 2694 if (GET_MODE (op1) != mode2)
2695 op1 = convert_to_mode (mode2, op1, 0);
757c219d 2696
f36b9f69 2697 target = emit_library_call_value (optab_libfunc (powi_optab, mode),
2dd6f9ed 2698 target, LCT_CONST, mode, 2,
d0405f40 2699 op0, mode, op1, mode2);
757c219d 2700
2701 return target;
2702}
2703
48e1416a 2704/* Expand expression EXP which is a call to the strlen builtin. Return
c2f47e15 2705 NULL_RTX if we failed the caller should emit a normal call, otherwise
aed0bd19 2706 try to get the result in TARGET, if convenient. */
f7c44134 2707
53800dbe 2708static rtx
c2f47e15 2709expand_builtin_strlen (tree exp, rtx target,
3754d046 2710 machine_mode target_mode)
53800dbe 2711{
c2f47e15 2712 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
2713 return NULL_RTX;
53800dbe 2714 else
2715 {
8786db1e 2716 struct expand_operand ops[4];
911c0150 2717 rtx pat;
c2f47e15 2718 tree len;
2719 tree src = CALL_EXPR_ARG (exp, 0);
1e0c0b35 2720 rtx src_reg;
2721 rtx_insn *before_strlen;
3754d046 2722 machine_mode insn_mode = target_mode;
ef2c4a29 2723 enum insn_code icode = CODE_FOR_nothing;
153c3b50 2724 unsigned int align;
6248e345 2725
2726 /* If the length can be computed at compile-time, return it. */
681fab1e 2727 len = c_strlen (src, 0);
6248e345 2728 if (len)
80cd7a5e 2729 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
6248e345 2730
681fab1e 2731 /* If the length can be computed at compile-time and is constant
2732 integer, but there are side-effects in src, evaluate
2733 src for side-effects, then return len.
2734 E.g. x = strlen (i++ ? "xfoo" + 1 : "bar");
2735 can be optimized into: i++; x = 3; */
2736 len = c_strlen (src, 1);
2737 if (len && TREE_CODE (len) == INTEGER_CST)
2738 {
2739 expand_expr (src, const0_rtx, VOIDmode, EXPAND_NORMAL);
2740 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
2741 }
2742
957d0361 2743 align = get_pointer_alignment (src) / BITS_PER_UNIT;
53800dbe 2744
53800dbe 2745 /* If SRC is not a pointer type, don't do this operation inline. */
2746 if (align == 0)
c2f47e15 2747 return NULL_RTX;
53800dbe 2748
911c0150 2749 /* Bail out if we can't compute strlen in the right mode. */
53800dbe 2750 while (insn_mode != VOIDmode)
2751 {
d6bf3b14 2752 icode = optab_handler (strlen_optab, insn_mode);
53800dbe 2753 if (icode != CODE_FOR_nothing)
c28ae87f 2754 break;
53800dbe 2755
2756 insn_mode = GET_MODE_WIDER_MODE (insn_mode);
2757 }
2758 if (insn_mode == VOIDmode)
c2f47e15 2759 return NULL_RTX;
53800dbe 2760
911c0150 2761 /* Make a place to hold the source address. We will not expand
2762 the actual source until we are sure that the expansion will
2763 not fail -- there are trees that cannot be expanded twice. */
2764 src_reg = gen_reg_rtx (Pmode);
53800dbe 2765
911c0150 2766 /* Mark the beginning of the strlen sequence so we can emit the
2767 source operand later. */
f0ce3b1f 2768 before_strlen = get_last_insn ();
53800dbe 2769
8786db1e 2770 create_output_operand (&ops[0], target, insn_mode);
2771 create_fixed_operand (&ops[1], gen_rtx_MEM (BLKmode, src_reg));
2772 create_integer_operand (&ops[2], 0);
2773 create_integer_operand (&ops[3], align);
2774 if (!maybe_expand_insn (icode, 4, ops))
c2f47e15 2775 return NULL_RTX;
911c0150 2776
2777 /* Now that we are assured of success, expand the source. */
2778 start_sequence ();
499eee58 2779 pat = expand_expr (src, src_reg, Pmode, EXPAND_NORMAL);
911c0150 2780 if (pat != src_reg)
499eee58 2781 {
2782#ifdef POINTERS_EXTEND_UNSIGNED
2783 if (GET_MODE (pat) != Pmode)
2784 pat = convert_to_mode (Pmode, pat,
2785 POINTERS_EXTEND_UNSIGNED);
2786#endif
2787 emit_move_insn (src_reg, pat);
2788 }
31d3e01c 2789 pat = get_insns ();
911c0150 2790 end_sequence ();
bceb0d1f 2791
2792 if (before_strlen)
2793 emit_insn_after (pat, before_strlen);
2794 else
2795 emit_insn_before (pat, get_insns ());
53800dbe 2796
2797 /* Return the value in the proper mode for this function. */
8786db1e 2798 if (GET_MODE (ops[0].value) == target_mode)
2799 target = ops[0].value;
53800dbe 2800 else if (target != 0)
8786db1e 2801 convert_move (target, ops[0].value, 0);
53800dbe 2802 else
8786db1e 2803 target = convert_to_mode (target_mode, ops[0].value, 0);
911c0150 2804
2805 return target;
53800dbe 2806 }
2807}
2808
6840589f 2809/* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
2810 bytes from constant string DATA + OFFSET and return it as target
2811 constant. */
2812
2813static rtx
aecda0d6 2814builtin_memcpy_read_str (void *data, HOST_WIDE_INT offset,
3754d046 2815 machine_mode mode)
6840589f 2816{
2817 const char *str = (const char *) data;
2818
64db345d 2819 gcc_assert (offset >= 0
2820 && ((unsigned HOST_WIDE_INT) offset + GET_MODE_SIZE (mode)
2821 <= strlen (str) + 1));
6840589f 2822
2823 return c_readstr (str + offset, mode);
2824}
2825
36d63243 2826/* LEN specify length of the block of memcpy/memset operation.
9db0f34d 2827 Figure out its range and put it into MIN_SIZE/MAX_SIZE.
2828 In some cases we can make very likely guess on max size, then we
2829 set it into PROBABLE_MAX_SIZE. */
36d63243 2830
2831static void
2832determine_block_size (tree len, rtx len_rtx,
2833 unsigned HOST_WIDE_INT *min_size,
9db0f34d 2834 unsigned HOST_WIDE_INT *max_size,
2835 unsigned HOST_WIDE_INT *probable_max_size)
36d63243 2836{
2837 if (CONST_INT_P (len_rtx))
2838 {
4e140a5c 2839 *min_size = *max_size = *probable_max_size = UINTVAL (len_rtx);
36d63243 2840 return;
2841 }
2842 else
2843 {
9c1be15e 2844 wide_int min, max;
9db0f34d 2845 enum value_range_type range_type = VR_UNDEFINED;
2846
2847 /* Determine bounds from the type. */
2848 if (tree_fits_uhwi_p (TYPE_MIN_VALUE (TREE_TYPE (len))))
2849 *min_size = tree_to_uhwi (TYPE_MIN_VALUE (TREE_TYPE (len)));
2850 else
2851 *min_size = 0;
2852 if (tree_fits_uhwi_p (TYPE_MAX_VALUE (TREE_TYPE (len))))
4e140a5c 2853 *probable_max_size = *max_size
2854 = tree_to_uhwi (TYPE_MAX_VALUE (TREE_TYPE (len)));
9db0f34d 2855 else
2856 *probable_max_size = *max_size = GET_MODE_MASK (GET_MODE (len_rtx));
2857
2858 if (TREE_CODE (len) == SSA_NAME)
2859 range_type = get_range_info (len, &min, &max);
2860 if (range_type == VR_RANGE)
36d63243 2861 {
fe5ad926 2862 if (wi::fits_uhwi_p (min) && *min_size < min.to_uhwi ())
36d63243 2863 *min_size = min.to_uhwi ();
fe5ad926 2864 if (wi::fits_uhwi_p (max) && *max_size > max.to_uhwi ())
9db0f34d 2865 *probable_max_size = *max_size = max.to_uhwi ();
36d63243 2866 }
9db0f34d 2867 else if (range_type == VR_ANTI_RANGE)
36d63243 2868 {
4a474a5a 2869 /* Anti range 0...N lets us to determine minimal size to N+1. */
fe5ad926 2870 if (min == 0)
9db0f34d 2871 {
9c1be15e 2872 if (wi::fits_uhwi_p (max) && max.to_uhwi () + 1 != 0)
2873 *min_size = max.to_uhwi () + 1;
9db0f34d 2874 }
2875 /* Code like
2876
2877 int n;
2878 if (n < 100)
4a474a5a 2879 memcpy (a, b, n)
9db0f34d 2880
2881 Produce anti range allowing negative values of N. We still
2882 can use the information and make a guess that N is not negative.
2883 */
fe5ad926 2884 else if (!wi::leu_p (max, 1 << 30) && wi::fits_uhwi_p (min))
2885 *probable_max_size = min.to_uhwi () - 1;
36d63243 2886 }
2887 }
2888 gcc_checking_assert (*max_size <=
2889 (unsigned HOST_WIDE_INT)
2890 GET_MODE_MASK (GET_MODE (len_rtx)));
2891}
2892
f21337ef 2893/* Helper function to do the actual work for expand_builtin_memcpy. */
2894
2895static rtx
2896expand_builtin_memcpy_args (tree dest, tree src, tree len, rtx target, tree exp)
2897{
2898 const char *src_str;
2899 unsigned int src_align = get_pointer_alignment (src);
2900 unsigned int dest_align = get_pointer_alignment (dest);
2901 rtx dest_mem, src_mem, dest_addr, len_rtx;
2902 HOST_WIDE_INT expected_size = -1;
2903 unsigned int expected_align = 0;
2904 unsigned HOST_WIDE_INT min_size;
2905 unsigned HOST_WIDE_INT max_size;
2906 unsigned HOST_WIDE_INT probable_max_size;
2907
2908 /* If DEST is not a pointer type, call the normal function. */
2909 if (dest_align == 0)
2910 return NULL_RTX;
2911
2912 /* If either SRC is not a pointer type, don't do this
2913 operation in-line. */
2914 if (src_align == 0)
2915 return NULL_RTX;
2916
2917 if (currently_expanding_gimple_stmt)
2918 stringop_block_profile (currently_expanding_gimple_stmt,
2919 &expected_align, &expected_size);
2920
2921 if (expected_align < dest_align)
2922 expected_align = dest_align;
2923 dest_mem = get_memory_rtx (dest, len);
2924 set_mem_align (dest_mem, dest_align);
2925 len_rtx = expand_normal (len);
2926 determine_block_size (len, len_rtx, &min_size, &max_size,
2927 &probable_max_size);
2928 src_str = c_getstr (src);
2929
2930 /* If SRC is a string constant and block move would be done
2931 by pieces, we can avoid loading the string from memory
2932 and only stored the computed constants. */
2933 if (src_str
2934 && CONST_INT_P (len_rtx)
2935 && (unsigned HOST_WIDE_INT) INTVAL (len_rtx) <= strlen (src_str) + 1
2936 && can_store_by_pieces (INTVAL (len_rtx), builtin_memcpy_read_str,
2937 CONST_CAST (char *, src_str),
2938 dest_align, false))
2939 {
2940 dest_mem = store_by_pieces (dest_mem, INTVAL (len_rtx),
2941 builtin_memcpy_read_str,
2942 CONST_CAST (char *, src_str),
2943 dest_align, false, 0);
2944 dest_mem = force_operand (XEXP (dest_mem, 0), target);
2945 dest_mem = convert_memory_address (ptr_mode, dest_mem);
2946 return dest_mem;
2947 }
2948
2949 src_mem = get_memory_rtx (src, len);
2950 set_mem_align (src_mem, src_align);
2951
2952 /* Copy word part most expediently. */
2953 dest_addr = emit_block_move_hints (dest_mem, src_mem, len_rtx,
2954 CALL_EXPR_TAILCALL (exp)
2955 ? BLOCK_OP_TAILCALL : BLOCK_OP_NORMAL,
2956 expected_align, expected_size,
2957 min_size, max_size, probable_max_size);
2958
2959 if (dest_addr == 0)
2960 {
2961 dest_addr = force_operand (XEXP (dest_mem, 0), target);
2962 dest_addr = convert_memory_address (ptr_mode, dest_addr);
2963 }
2964
2965 return dest_addr;
2966}
2967
c2f47e15 2968/* Expand a call EXP to the memcpy builtin.
2969 Return NULL_RTX if we failed, the caller should emit a normal call,
3b824fa6 2970 otherwise try to get the result in TARGET, if convenient (and in
9fe0e1b8 2971 mode MODE if that's convenient). */
c2f47e15 2972
53800dbe 2973static rtx
a65c4d64 2974expand_builtin_memcpy (tree exp, rtx target)
53800dbe 2975{
c2f47e15 2976 if (!validate_arglist (exp,
2977 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
2978 return NULL_RTX;
53800dbe 2979 else
2980 {
c2f47e15 2981 tree dest = CALL_EXPR_ARG (exp, 0);
2982 tree src = CALL_EXPR_ARG (exp, 1);
2983 tree len = CALL_EXPR_ARG (exp, 2);
f21337ef 2984 return expand_builtin_memcpy_args (dest, src, len, target, exp);
2985 }
2986}
6840589f 2987
f21337ef 2988/* Expand an instrumented call EXP to the memcpy builtin.
2989 Return NULL_RTX if we failed, the caller should emit a normal call,
2990 otherwise try to get the result in TARGET, if convenient (and in
2991 mode MODE if that's convenient). */
53800dbe 2992
f21337ef 2993static rtx
2994expand_builtin_memcpy_with_bounds (tree exp, rtx target)
2995{
2996 if (!validate_arglist (exp,
2997 POINTER_TYPE, POINTER_BOUNDS_TYPE,
2998 POINTER_TYPE, POINTER_BOUNDS_TYPE,
2999 INTEGER_TYPE, VOID_TYPE))
3000 return NULL_RTX;
3001 else
3002 {
3003 tree dest = CALL_EXPR_ARG (exp, 0);
3004 tree src = CALL_EXPR_ARG (exp, 2);
3005 tree len = CALL_EXPR_ARG (exp, 4);
3006 rtx res = expand_builtin_memcpy_args (dest, src, len, target, exp);
53800dbe 3007
f21337ef 3008 /* Return src bounds with the result. */
3009 if (res)
e5716f7e 3010 {
17d388d8 3011 rtx bnd = force_reg (targetm.chkp_bound_mode (),
f21337ef 3012 expand_normal (CALL_EXPR_ARG (exp, 1)));
3013 res = chkp_join_splitted_slot (res, bnd);
e5716f7e 3014 }
f21337ef 3015 return res;
53800dbe 3016 }
3017}
3018
c2f47e15 3019/* Expand a call EXP to the mempcpy builtin.
3020 Return NULL_RTX if we failed; the caller should emit a normal call,
647661c6 3021 otherwise try to get the result in TARGET, if convenient (and in
9fe0e1b8 3022 mode MODE if that's convenient). If ENDP is 0 return the
3023 destination pointer, if ENDP is 1 return the end pointer ala
3024 mempcpy, and if ENDP is 2 return the end pointer minus one ala
3025 stpcpy. */
647661c6 3026
3027static rtx
3754d046 3028expand_builtin_mempcpy (tree exp, rtx target, machine_mode mode)
647661c6 3029{
c2f47e15 3030 if (!validate_arglist (exp,
3031 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3032 return NULL_RTX;
3033 else
3034 {
3035 tree dest = CALL_EXPR_ARG (exp, 0);
3036 tree src = CALL_EXPR_ARG (exp, 1);
3037 tree len = CALL_EXPR_ARG (exp, 2);
3038 return expand_builtin_mempcpy_args (dest, src, len,
f21337ef 3039 target, mode, /*endp=*/ 1,
3040 exp);
3041 }
3042}
3043
3044/* Expand an instrumented call EXP to the mempcpy builtin.
3045 Return NULL_RTX if we failed, the caller should emit a normal call,
3046 otherwise try to get the result in TARGET, if convenient (and in
3047 mode MODE if that's convenient). */
3048
3049static rtx
3050expand_builtin_mempcpy_with_bounds (tree exp, rtx target, machine_mode mode)
3051{
3052 if (!validate_arglist (exp,
3053 POINTER_TYPE, POINTER_BOUNDS_TYPE,
3054 POINTER_TYPE, POINTER_BOUNDS_TYPE,
3055 INTEGER_TYPE, VOID_TYPE))
3056 return NULL_RTX;
3057 else
3058 {
3059 tree dest = CALL_EXPR_ARG (exp, 0);
3060 tree src = CALL_EXPR_ARG (exp, 2);
3061 tree len = CALL_EXPR_ARG (exp, 4);
3062 rtx res = expand_builtin_mempcpy_args (dest, src, len, target,
3063 mode, 1, exp);
3064
3065 /* Return src bounds with the result. */
3066 if (res)
3067 {
17d388d8 3068 rtx bnd = force_reg (targetm.chkp_bound_mode (),
f21337ef 3069 expand_normal (CALL_EXPR_ARG (exp, 1)));
3070 res = chkp_join_splitted_slot (res, bnd);
3071 }
3072 return res;
c2f47e15 3073 }
3074}
3075
3076/* Helper function to do the actual work for expand_builtin_mempcpy. The
3077 arguments to the builtin_mempcpy call DEST, SRC, and LEN are broken out
3078 so that this can also be called without constructing an actual CALL_EXPR.
a65c4d64 3079 The other arguments and return value are the same as for
3080 expand_builtin_mempcpy. */
c2f47e15 3081
3082static rtx
a65c4d64 3083expand_builtin_mempcpy_args (tree dest, tree src, tree len,
f21337ef 3084 rtx target, machine_mode mode, int endp,
3085 tree orig_exp)
c2f47e15 3086{
f21337ef 3087 tree fndecl = get_callee_fndecl (orig_exp);
3088
c2f47e15 3089 /* If return value is ignored, transform mempcpy into memcpy. */
f21337ef 3090 if (target == const0_rtx
3091 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_MEMPCPY_NOBND_NOCHK_CHKP
3092 && builtin_decl_implicit_p (BUILT_IN_CHKP_MEMCPY_NOBND_NOCHK_CHKP))
3093 {
3094 tree fn = builtin_decl_implicit (BUILT_IN_CHKP_MEMCPY_NOBND_NOCHK_CHKP);
3095 tree result = build_call_nofold_loc (UNKNOWN_LOCATION, fn, 3,
3096 dest, src, len);
3097 return expand_expr (result, target, mode, EXPAND_NORMAL);
3098 }
3099 else if (target == const0_rtx
3100 && builtin_decl_implicit_p (BUILT_IN_MEMCPY))
9fe0e1b8 3101 {
b9a16870 3102 tree fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
0568e9c1 3103 tree result = build_call_nofold_loc (UNKNOWN_LOCATION, fn, 3,
3104 dest, src, len);
c8b17b2e 3105 return expand_expr (result, target, mode, EXPAND_NORMAL);
9fe0e1b8 3106 }
647661c6 3107 else
3108 {
9fe0e1b8 3109 const char *src_str;
957d0361 3110 unsigned int src_align = get_pointer_alignment (src);
3111 unsigned int dest_align = get_pointer_alignment (dest);
9fe0e1b8 3112 rtx dest_mem, src_mem, len_rtx;
a0c938f0 3113
7da1412b 3114 /* If either SRC or DEST is not a pointer type, don't do this
a0c938f0 3115 operation in-line. */
7da1412b 3116 if (dest_align == 0 || src_align == 0)
c2f47e15 3117 return NULL_RTX;
9fe0e1b8 3118
6217c238 3119 /* If LEN is not constant, call the normal function. */
e913b5cd 3120 if (! tree_fits_uhwi_p (len))
c2f47e15 3121 return NULL_RTX;
0862b7e9 3122
8ec3c5c2 3123 len_rtx = expand_normal (len);
9fe0e1b8 3124 src_str = c_getstr (src);
647661c6 3125
9fe0e1b8 3126 /* If SRC is a string constant and block move would be done
3127 by pieces, we can avoid loading the string from memory
3128 and only stored the computed constants. */
3129 if (src_str
971ba038 3130 && CONST_INT_P (len_rtx)
9fe0e1b8 3131 && (unsigned HOST_WIDE_INT) INTVAL (len_rtx) <= strlen (src_str) + 1
3132 && can_store_by_pieces (INTVAL (len_rtx), builtin_memcpy_read_str,
364c0c59 3133 CONST_CAST (char *, src_str),
3134 dest_align, false))
9fe0e1b8 3135 {
d8ae1baa 3136 dest_mem = get_memory_rtx (dest, len);
9fe0e1b8 3137 set_mem_align (dest_mem, dest_align);
3138 dest_mem = store_by_pieces (dest_mem, INTVAL (len_rtx),
3139 builtin_memcpy_read_str,
364c0c59 3140 CONST_CAST (char *, src_str),
3141 dest_align, false, endp);
9fe0e1b8 3142 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
85d654dd 3143 dest_mem = convert_memory_address (ptr_mode, dest_mem);
9fe0e1b8 3144 return dest_mem;
647661c6 3145 }
3146
971ba038 3147 if (CONST_INT_P (len_rtx)
9fe0e1b8 3148 && can_move_by_pieces (INTVAL (len_rtx),
3149 MIN (dest_align, src_align)))
3150 {
d8ae1baa 3151 dest_mem = get_memory_rtx (dest, len);
9fe0e1b8 3152 set_mem_align (dest_mem, dest_align);
d8ae1baa 3153 src_mem = get_memory_rtx (src, len);
9fe0e1b8 3154 set_mem_align (src_mem, src_align);
3155 dest_mem = move_by_pieces (dest_mem, src_mem, INTVAL (len_rtx),
3156 MIN (dest_align, src_align), endp);
3157 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
85d654dd 3158 dest_mem = convert_memory_address (ptr_mode, dest_mem);
9fe0e1b8 3159 return dest_mem;
3160 }
3161
c2f47e15 3162 return NULL_RTX;
647661c6 3163 }
3164}
3165
c2f47e15 3166/* Expand into a movstr instruction, if one is available. Return NULL_RTX if
727c62dd 3167 we failed, the caller should emit a normal call, otherwise try to
3168 get the result in TARGET, if convenient. If ENDP is 0 return the
3169 destination pointer, if ENDP is 1 return the end pointer ala
3170 mempcpy, and if ENDP is 2 return the end pointer minus one ala
3171 stpcpy. */
3172
3173static rtx
3174expand_movstr (tree dest, tree src, rtx target, int endp)
3175{
8786db1e 3176 struct expand_operand ops[3];
727c62dd 3177 rtx dest_mem;
3178 rtx src_mem;
727c62dd 3179
8d74dc42 3180 if (!targetm.have_movstr ())
c2f47e15 3181 return NULL_RTX;
727c62dd 3182
d8ae1baa 3183 dest_mem = get_memory_rtx (dest, NULL);
3184 src_mem = get_memory_rtx (src, NULL);
727c62dd 3185 if (!endp)
3186 {
3187 target = force_reg (Pmode, XEXP (dest_mem, 0));
3188 dest_mem = replace_equiv_address (dest_mem, target);
727c62dd 3189 }
3190
8786db1e 3191 create_output_operand (&ops[0], endp ? target : NULL_RTX, Pmode);
3192 create_fixed_operand (&ops[1], dest_mem);
3193 create_fixed_operand (&ops[2], src_mem);
8d74dc42 3194 if (!maybe_expand_insn (targetm.code_for_movstr, 3, ops))
1e1d5623 3195 return NULL_RTX;
727c62dd 3196
8786db1e 3197 if (endp && target != const0_rtx)
c5aba89c 3198 {
8786db1e 3199 target = ops[0].value;
3200 /* movstr is supposed to set end to the address of the NUL
3201 terminator. If the caller requested a mempcpy-like return value,
3202 adjust it. */
3203 if (endp == 1)
3204 {
29c05e22 3205 rtx tem = plus_constant (GET_MODE (target),
3206 gen_lowpart (GET_MODE (target), target), 1);
8786db1e 3207 emit_move_insn (target, force_operand (tem, NULL_RTX));
3208 }
c5aba89c 3209 }
727c62dd 3210 return target;
3211}
3212
48e1416a 3213/* Expand expression EXP, which is a call to the strcpy builtin. Return
3214 NULL_RTX if we failed the caller should emit a normal call, otherwise
c2f47e15 3215 try to get the result in TARGET, if convenient (and in mode MODE if that's
6f428e8b 3216 convenient). */
902de8ed 3217
53800dbe 3218static rtx
a65c4d64 3219expand_builtin_strcpy (tree exp, rtx target)
53800dbe 3220{
c2f47e15 3221 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3222 {
3223 tree dest = CALL_EXPR_ARG (exp, 0);
3224 tree src = CALL_EXPR_ARG (exp, 1);
a65c4d64 3225 return expand_builtin_strcpy_args (dest, src, target);
c2f47e15 3226 }
3227 return NULL_RTX;
3228}
3229
3230/* Helper function to do the actual work for expand_builtin_strcpy. The
3231 arguments to the builtin_strcpy call DEST and SRC are broken out
3232 so that this can also be called without constructing an actual CALL_EXPR.
3233 The other arguments and return value are the same as for
3234 expand_builtin_strcpy. */
3235
3236static rtx
a65c4d64 3237expand_builtin_strcpy_args (tree dest, tree src, rtx target)
c2f47e15 3238{
c2f47e15 3239 return expand_movstr (dest, src, target, /*endp=*/0);
53800dbe 3240}
3241
c2f47e15 3242/* Expand a call EXP to the stpcpy builtin.
3243 Return NULL_RTX if we failed the caller should emit a normal call,
3b824fa6 3244 otherwise try to get the result in TARGET, if convenient (and in
3245 mode MODE if that's convenient). */
3246
3247static rtx
3754d046 3248expand_builtin_stpcpy (tree exp, rtx target, machine_mode mode)
3b824fa6 3249{
c2f47e15 3250 tree dst, src;
389dd41b 3251 location_t loc = EXPR_LOCATION (exp);
c2f47e15 3252
3253 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3254 return NULL_RTX;
3255
3256 dst = CALL_EXPR_ARG (exp, 0);
3257 src = CALL_EXPR_ARG (exp, 1);
3258
727c62dd 3259 /* If return value is ignored, transform stpcpy into strcpy. */
b9a16870 3260 if (target == const0_rtx && builtin_decl_implicit (BUILT_IN_STRCPY))
978836e5 3261 {
b9a16870 3262 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
0568e9c1 3263 tree result = build_call_nofold_loc (loc, fn, 2, dst, src);
c8b17b2e 3264 return expand_expr (result, target, mode, EXPAND_NORMAL);
978836e5 3265 }
3b824fa6 3266 else
3267 {
c2f47e15 3268 tree len, lenp1;
727c62dd 3269 rtx ret;
647661c6 3270
9fe0e1b8 3271 /* Ensure we get an actual string whose length can be evaluated at
a0c938f0 3272 compile-time, not an expression containing a string. This is
3273 because the latter will potentially produce pessimized code
3274 when used to produce the return value. */
681fab1e 3275 if (! c_getstr (src) || ! (len = c_strlen (src, 0)))
c2f47e15 3276 return expand_movstr (dst, src, target, /*endp=*/2);
3b824fa6 3277
389dd41b 3278 lenp1 = size_binop_loc (loc, PLUS_EXPR, len, ssize_int (1));
a65c4d64 3279 ret = expand_builtin_mempcpy_args (dst, src, lenp1,
f21337ef 3280 target, mode, /*endp=*/2,
3281 exp);
727c62dd 3282
3283 if (ret)
3284 return ret;
3285
3286 if (TREE_CODE (len) == INTEGER_CST)
3287 {
8ec3c5c2 3288 rtx len_rtx = expand_normal (len);
727c62dd 3289
971ba038 3290 if (CONST_INT_P (len_rtx))
727c62dd 3291 {
a65c4d64 3292 ret = expand_builtin_strcpy_args (dst, src, target);
727c62dd 3293
3294 if (ret)
3295 {
3296 if (! target)
7ac87324 3297 {
3298 if (mode != VOIDmode)
3299 target = gen_reg_rtx (mode);
3300 else
3301 target = gen_reg_rtx (GET_MODE (ret));
3302 }
727c62dd 3303 if (GET_MODE (target) != GET_MODE (ret))
3304 ret = gen_lowpart (GET_MODE (target), ret);
3305
29c05e22 3306 ret = plus_constant (GET_MODE (ret), ret, INTVAL (len_rtx));
c5aba89c 3307 ret = emit_move_insn (target, force_operand (ret, NULL_RTX));
64db345d 3308 gcc_assert (ret);
727c62dd 3309
3310 return target;
3311 }
3312 }
3313 }
3314
c2f47e15 3315 return expand_movstr (dst, src, target, /*endp=*/2);
3b824fa6 3316 }
3317}
3318
6840589f 3319/* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3320 bytes from constant string DATA + OFFSET and return it as target
3321 constant. */
3322
09879952 3323rtx
aecda0d6 3324builtin_strncpy_read_str (void *data, HOST_WIDE_INT offset,
3754d046 3325 machine_mode mode)
6840589f 3326{
3327 const char *str = (const char *) data;
3328
3329 if ((unsigned HOST_WIDE_INT) offset > strlen (str))
3330 return const0_rtx;
3331
3332 return c_readstr (str + offset, mode);
3333}
3334
48e1416a 3335/* Expand expression EXP, which is a call to the strncpy builtin. Return
c2f47e15 3336 NULL_RTX if we failed the caller should emit a normal call. */
ed09096d 3337
3338static rtx
a65c4d64 3339expand_builtin_strncpy (tree exp, rtx target)
ed09096d 3340{
389dd41b 3341 location_t loc = EXPR_LOCATION (exp);
c2f47e15 3342
3343 if (validate_arglist (exp,
3344 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
ed09096d 3345 {
c2f47e15 3346 tree dest = CALL_EXPR_ARG (exp, 0);
3347 tree src = CALL_EXPR_ARG (exp, 1);
3348 tree len = CALL_EXPR_ARG (exp, 2);
3349 tree slen = c_strlen (src, 1);
6840589f 3350
8ff6a5cd 3351 /* We must be passed a constant len and src parameter. */
e913b5cd 3352 if (!tree_fits_uhwi_p (len) || !slen || !tree_fits_uhwi_p (slen))
c2f47e15 3353 return NULL_RTX;
ed09096d 3354
389dd41b 3355 slen = size_binop_loc (loc, PLUS_EXPR, slen, ssize_int (1));
ed09096d 3356
3357 /* We're required to pad with trailing zeros if the requested
a0c938f0 3358 len is greater than strlen(s2)+1. In that case try to
6840589f 3359 use store_by_pieces, if it fails, punt. */
ed09096d 3360 if (tree_int_cst_lt (slen, len))
6840589f 3361 {
957d0361 3362 unsigned int dest_align = get_pointer_alignment (dest);
c2f47e15 3363 const char *p = c_getstr (src);
6840589f 3364 rtx dest_mem;
3365
e913b5cd 3366 if (!p || dest_align == 0 || !tree_fits_uhwi_p (len)
3367 || !can_store_by_pieces (tree_to_uhwi (len),
6840589f 3368 builtin_strncpy_read_str,
364c0c59 3369 CONST_CAST (char *, p),
3370 dest_align, false))
c2f47e15 3371 return NULL_RTX;
6840589f 3372
d8ae1baa 3373 dest_mem = get_memory_rtx (dest, len);
e913b5cd 3374 store_by_pieces (dest_mem, tree_to_uhwi (len),
6840589f 3375 builtin_strncpy_read_str,
364c0c59 3376 CONST_CAST (char *, p), dest_align, false, 0);
a65c4d64 3377 dest_mem = force_operand (XEXP (dest_mem, 0), target);
85d654dd 3378 dest_mem = convert_memory_address (ptr_mode, dest_mem);
e5716f7e 3379 return dest_mem;
6840589f 3380 }
ed09096d 3381 }
c2f47e15 3382 return NULL_RTX;
ed09096d 3383}
3384
ecc318ff 3385/* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3386 bytes from constant string DATA + OFFSET and return it as target
3387 constant. */
3388
f656b751 3389rtx
aecda0d6 3390builtin_memset_read_str (void *data, HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
3754d046 3391 machine_mode mode)
ecc318ff 3392{
3393 const char *c = (const char *) data;
364c0c59 3394 char *p = XALLOCAVEC (char, GET_MODE_SIZE (mode));
ecc318ff 3395
3396 memset (p, *c, GET_MODE_SIZE (mode));
3397
3398 return c_readstr (p, mode);
3399}
3400
a7ec6974 3401/* Callback routine for store_by_pieces. Return the RTL of a register
3402 containing GET_MODE_SIZE (MODE) consecutive copies of the unsigned
3403 char value given in the RTL register data. For example, if mode is
3404 4 bytes wide, return the RTL for 0x01010101*data. */
3405
3406static rtx
aecda0d6 3407builtin_memset_gen_str (void *data, HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
3754d046 3408 machine_mode mode)
a7ec6974 3409{
3410 rtx target, coeff;
3411 size_t size;
3412 char *p;
3413
3414 size = GET_MODE_SIZE (mode);
f0ce3b1f 3415 if (size == 1)
3416 return (rtx) data;
a7ec6974 3417
364c0c59 3418 p = XALLOCAVEC (char, size);
a7ec6974 3419 memset (p, 1, size);
3420 coeff = c_readstr (p, mode);
3421
f0ce3b1f 3422 target = convert_to_mode (mode, (rtx) data, 1);
a7ec6974 3423 target = expand_mult (mode, target, coeff, NULL_RTX, 1);
3424 return force_reg (mode, target);
3425}
3426
48e1416a 3427/* Expand expression EXP, which is a call to the memset builtin. Return
3428 NULL_RTX if we failed the caller should emit a normal call, otherwise
c2f47e15 3429 try to get the result in TARGET, if convenient (and in mode MODE if that's
6f428e8b 3430 convenient). */
902de8ed 3431
53800dbe 3432static rtx
3754d046 3433expand_builtin_memset (tree exp, rtx target, machine_mode mode)
53800dbe 3434{
c2f47e15 3435 if (!validate_arglist (exp,
3436 POINTER_TYPE, INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
3437 return NULL_RTX;
53800dbe 3438 else
3439 {
c2f47e15 3440 tree dest = CALL_EXPR_ARG (exp, 0);
3441 tree val = CALL_EXPR_ARG (exp, 1);
3442 tree len = CALL_EXPR_ARG (exp, 2);
3443 return expand_builtin_memset_args (dest, val, len, target, mode, exp);
3444 }
3445}
53800dbe 3446
f21337ef 3447/* Expand expression EXP, which is an instrumented call to the memset builtin.
3448 Return NULL_RTX if we failed the caller should emit a normal call, otherwise
3449 try to get the result in TARGET, if convenient (and in mode MODE if that's
3450 convenient). */
3451
3452static rtx
3453expand_builtin_memset_with_bounds (tree exp, rtx target, machine_mode mode)
3454{
3455 if (!validate_arglist (exp,
3456 POINTER_TYPE, POINTER_BOUNDS_TYPE,
3457 INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
3458 return NULL_RTX;
3459 else
3460 {
3461 tree dest = CALL_EXPR_ARG (exp, 0);
3462 tree val = CALL_EXPR_ARG (exp, 2);
3463 tree len = CALL_EXPR_ARG (exp, 3);
3464 rtx res = expand_builtin_memset_args (dest, val, len, target, mode, exp);
3465
3466 /* Return src bounds with the result. */
3467 if (res)
3468 {
17d388d8 3469 rtx bnd = force_reg (targetm.chkp_bound_mode (),
f21337ef 3470 expand_normal (CALL_EXPR_ARG (exp, 1)));
3471 res = chkp_join_splitted_slot (res, bnd);
3472 }
3473 return res;
3474 }
3475}
3476
c2f47e15 3477/* Helper function to do the actual work for expand_builtin_memset. The
3478 arguments to the builtin_memset call DEST, VAL, and LEN are broken out
3479 so that this can also be called without constructing an actual CALL_EXPR.
3480 The other arguments and return value are the same as for
3481 expand_builtin_memset. */
6b961939 3482
c2f47e15 3483static rtx
3484expand_builtin_memset_args (tree dest, tree val, tree len,
3754d046 3485 rtx target, machine_mode mode, tree orig_exp)
c2f47e15 3486{
3487 tree fndecl, fn;
3488 enum built_in_function fcode;
3754d046 3489 machine_mode val_mode;
c2f47e15 3490 char c;
3491 unsigned int dest_align;
3492 rtx dest_mem, dest_addr, len_rtx;
3493 HOST_WIDE_INT expected_size = -1;
3494 unsigned int expected_align = 0;
36d63243 3495 unsigned HOST_WIDE_INT min_size;
3496 unsigned HOST_WIDE_INT max_size;
9db0f34d 3497 unsigned HOST_WIDE_INT probable_max_size;
53800dbe 3498
957d0361 3499 dest_align = get_pointer_alignment (dest);
162719b3 3500
c2f47e15 3501 /* If DEST is not a pointer type, don't do this operation in-line. */
3502 if (dest_align == 0)
3503 return NULL_RTX;
6f428e8b 3504
8cee8dc0 3505 if (currently_expanding_gimple_stmt)
3506 stringop_block_profile (currently_expanding_gimple_stmt,
3507 &expected_align, &expected_size);
75a70cf9 3508
c2f47e15 3509 if (expected_align < dest_align)
3510 expected_align = dest_align;
6b961939 3511
c2f47e15 3512 /* If the LEN parameter is zero, return DEST. */
3513 if (integer_zerop (len))
3514 {
3515 /* Evaluate and ignore VAL in case it has side-effects. */
3516 expand_expr (val, const0_rtx, VOIDmode, EXPAND_NORMAL);
3517 return expand_expr (dest, target, mode, EXPAND_NORMAL);
3518 }
7a3e5564 3519
c2f47e15 3520 /* Stabilize the arguments in case we fail. */
3521 dest = builtin_save_expr (dest);
3522 val = builtin_save_expr (val);
3523 len = builtin_save_expr (len);
a7ec6974 3524
c2f47e15 3525 len_rtx = expand_normal (len);
9db0f34d 3526 determine_block_size (len, len_rtx, &min_size, &max_size,
3527 &probable_max_size);
c2f47e15 3528 dest_mem = get_memory_rtx (dest, len);
03a5dda9 3529 val_mode = TYPE_MODE (unsigned_char_type_node);
a7ec6974 3530
c2f47e15 3531 if (TREE_CODE (val) != INTEGER_CST)
3532 {
3533 rtx val_rtx;
a7ec6974 3534
c2f47e15 3535 val_rtx = expand_normal (val);
03a5dda9 3536 val_rtx = convert_to_mode (val_mode, val_rtx, 0);
53800dbe 3537
c2f47e15 3538 /* Assume that we can memset by pieces if we can store
3539 * the coefficients by pieces (in the required modes).
3540 * We can't pass builtin_memset_gen_str as that emits RTL. */
3541 c = 1;
e913b5cd 3542 if (tree_fits_uhwi_p (len)
3543 && can_store_by_pieces (tree_to_uhwi (len),
4b297e2e 3544 builtin_memset_read_str, &c, dest_align,
3545 true))
c2f47e15 3546 {
03a5dda9 3547 val_rtx = force_reg (val_mode, val_rtx);
e913b5cd 3548 store_by_pieces (dest_mem, tree_to_uhwi (len),
4b297e2e 3549 builtin_memset_gen_str, val_rtx, dest_align,
3550 true, 0);
c2f47e15 3551 }
3552 else if (!set_storage_via_setmem (dest_mem, len_rtx, val_rtx,
3553 dest_align, expected_align,
9db0f34d 3554 expected_size, min_size, max_size,
3555 probable_max_size))
6b961939 3556 goto do_libcall;
48e1416a 3557
c2f47e15 3558 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3559 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3560 return dest_mem;
3561 }
53800dbe 3562
c2f47e15 3563 if (target_char_cast (val, &c))
3564 goto do_libcall;
ecc318ff 3565
c2f47e15 3566 if (c)
3567 {
e913b5cd 3568 if (tree_fits_uhwi_p (len)
3569 && can_store_by_pieces (tree_to_uhwi (len),
4b297e2e 3570 builtin_memset_read_str, &c, dest_align,
3571 true))
e913b5cd 3572 store_by_pieces (dest_mem, tree_to_uhwi (len),
4b297e2e 3573 builtin_memset_read_str, &c, dest_align, true, 0);
03a5dda9 3574 else if (!set_storage_via_setmem (dest_mem, len_rtx,
3575 gen_int_mode (c, val_mode),
c2f47e15 3576 dest_align, expected_align,
9db0f34d 3577 expected_size, min_size, max_size,
3578 probable_max_size))
c2f47e15 3579 goto do_libcall;
48e1416a 3580
c2f47e15 3581 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3582 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3583 return dest_mem;
3584 }
ecc318ff 3585
c2f47e15 3586 set_mem_align (dest_mem, dest_align);
3587 dest_addr = clear_storage_hints (dest_mem, len_rtx,
3588 CALL_EXPR_TAILCALL (orig_exp)
3589 ? BLOCK_OP_TAILCALL : BLOCK_OP_NORMAL,
36d63243 3590 expected_align, expected_size,
9db0f34d 3591 min_size, max_size,
3592 probable_max_size);
53800dbe 3593
c2f47e15 3594 if (dest_addr == 0)
3595 {
3596 dest_addr = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3597 dest_addr = convert_memory_address (ptr_mode, dest_addr);
3598 }
53800dbe 3599
c2f47e15 3600 return dest_addr;
6b961939 3601
c2f47e15 3602 do_libcall:
3603 fndecl = get_callee_fndecl (orig_exp);
3604 fcode = DECL_FUNCTION_CODE (fndecl);
f21337ef 3605 if (fcode == BUILT_IN_MEMSET
3606 || fcode == BUILT_IN_CHKP_MEMSET_NOBND_NOCHK_CHKP)
0568e9c1 3607 fn = build_call_nofold_loc (EXPR_LOCATION (orig_exp), fndecl, 3,
3608 dest, val, len);
c2f47e15 3609 else if (fcode == BUILT_IN_BZERO)
0568e9c1 3610 fn = build_call_nofold_loc (EXPR_LOCATION (orig_exp), fndecl, 2,
3611 dest, len);
c2f47e15 3612 else
3613 gcc_unreachable ();
a65c4d64 3614 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
3615 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (orig_exp);
c2f47e15 3616 return expand_call (fn, target, target == const0_rtx);
53800dbe 3617}
3618
48e1416a 3619/* Expand expression EXP, which is a call to the bzero builtin. Return
c2f47e15 3620 NULL_RTX if we failed the caller should emit a normal call. */
27d0c333 3621
ffc83088 3622static rtx
0b25db21 3623expand_builtin_bzero (tree exp)
ffc83088 3624{
c2f47e15 3625 tree dest, size;
389dd41b 3626 location_t loc = EXPR_LOCATION (exp);
ffc83088 3627
c2f47e15 3628 if (!validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
7369e7ba 3629 return NULL_RTX;
ffc83088 3630
c2f47e15 3631 dest = CALL_EXPR_ARG (exp, 0);
3632 size = CALL_EXPR_ARG (exp, 1);
bf8e3599 3633
7369e7ba 3634 /* New argument list transforming bzero(ptr x, int y) to
6f428e8b 3635 memset(ptr x, int 0, size_t y). This is done this way
3636 so that if it isn't expanded inline, we fallback to
3637 calling bzero instead of memset. */
bf8e3599 3638
c2f47e15 3639 return expand_builtin_memset_args (dest, integer_zero_node,
a0553bff 3640 fold_convert_loc (loc,
3641 size_type_node, size),
c2f47e15 3642 const0_rtx, VOIDmode, exp);
ffc83088 3643}
3644
d6f01a40 3645/* Try to expand cmpstr operation ICODE with the given operands.
3646 Return the result rtx on success, otherwise return null. */
3647
3648static rtx
3649expand_cmpstr (insn_code icode, rtx target, rtx arg1_rtx, rtx arg2_rtx,
3650 HOST_WIDE_INT align)
3651{
3652 machine_mode insn_mode = insn_data[icode].operand[0].mode;
3653
3654 if (target && (!REG_P (target) || HARD_REGISTER_P (target)))
3655 target = NULL_RTX;
3656
3657 struct expand_operand ops[4];
3658 create_output_operand (&ops[0], target, insn_mode);
3659 create_fixed_operand (&ops[1], arg1_rtx);
3660 create_fixed_operand (&ops[2], arg2_rtx);
3661 create_integer_operand (&ops[3], align);
3662 if (maybe_expand_insn (icode, 4, ops))
3663 return ops[0].value;
3664 return NULL_RTX;
3665}
3666
7a3f89b5 3667/* Expand expression EXP, which is a call to the memcmp built-in function.
bd021c1c 3668 Return NULL_RTX if we failed and the caller should emit a normal call,
3e346f54 3669 otherwise try to get the result in TARGET, if convenient.
3670 RESULT_EQ is true if we can relax the returned value to be either zero
3671 or nonzero, without caring about the sign. */
27d0c333 3672
53800dbe 3673static rtx
3e346f54 3674expand_builtin_memcmp (tree exp, rtx target, bool result_eq)
53800dbe 3675{
c2f47e15 3676 if (!validate_arglist (exp,
3677 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3678 return NULL_RTX;
6f428e8b 3679
ea368aac 3680 tree arg1 = CALL_EXPR_ARG (exp, 0);
3681 tree arg2 = CALL_EXPR_ARG (exp, 1);
3682 tree len = CALL_EXPR_ARG (exp, 2);
3e346f54 3683 machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
3684 location_t loc = EXPR_LOCATION (exp);
b428c0a5 3685
ea368aac 3686 unsigned int arg1_align = get_pointer_alignment (arg1) / BITS_PER_UNIT;
3687 unsigned int arg2_align = get_pointer_alignment (arg2) / BITS_PER_UNIT;
53800dbe 3688
ea368aac 3689 /* If we don't have POINTER_TYPE, call the function. */
3690 if (arg1_align == 0 || arg2_align == 0)
3691 return NULL_RTX;
53800dbe 3692
ea368aac 3693 rtx arg1_rtx = get_memory_rtx (arg1, len);
3694 rtx arg2_rtx = get_memory_rtx (arg2, len);
3e346f54 3695 rtx len_rtx = expand_normal (fold_convert_loc (loc, sizetype, len));
53800dbe 3696
ea368aac 3697 /* Set MEM_SIZE as appropriate. */
3e346f54 3698 if (CONST_INT_P (len_rtx))
ea368aac 3699 {
3e346f54 3700 set_mem_size (arg1_rtx, INTVAL (len_rtx));
3701 set_mem_size (arg2_rtx, INTVAL (len_rtx));
ea368aac 3702 }
83f88f8e 3703
3e346f54 3704 by_pieces_constfn constfn = NULL;
3705
719f3058 3706 const char *src_str = c_getstr (arg2);
3707 if (result_eq && src_str == NULL)
3708 {
3709 src_str = c_getstr (arg1);
3710 if (src_str != NULL)
3711 std::swap (arg1_rtx, arg2_rtx);
3712 }
3e346f54 3713
3714 /* If SRC is a string constant and block move would be done
3715 by pieces, we can avoid loading the string from memory
3716 and only stored the computed constants. */
3717 if (src_str
3718 && CONST_INT_P (len_rtx)
3719 && (unsigned HOST_WIDE_INT) INTVAL (len_rtx) <= strlen (src_str) + 1)
3720 constfn = builtin_memcpy_read_str;
3721
3722 rtx result = emit_block_cmp_hints (arg1_rtx, arg2_rtx, len_rtx,
3723 TREE_TYPE (len), target,
3724 result_eq, constfn,
3725 CONST_CAST (char *, src_str));
3726
ea368aac 3727 if (result)
3728 {
3729 /* Return the value in the proper mode for this function. */
3730 if (GET_MODE (result) == mode)
3731 return result;
83f88f8e 3732
ea368aac 3733 if (target != 0)
3734 {
3735 convert_move (target, result, 0);
3736 return target;
3737 }
0cd832f0 3738
53800dbe 3739 return convert_to_mode (mode, result, 0);
ea368aac 3740 }
53800dbe 3741
61ffc71a 3742 return NULL_RTX;
6f428e8b 3743}
3744
c2f47e15 3745/* Expand expression EXP, which is a call to the strcmp builtin. Return NULL_RTX
53800dbe 3746 if we failed the caller should emit a normal call, otherwise try to get
3747 the result in TARGET, if convenient. */
902de8ed 3748
53800dbe 3749static rtx
a65c4d64 3750expand_builtin_strcmp (tree exp, ATTRIBUTE_UNUSED rtx target)
53800dbe 3751{
c2f47e15 3752 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3753 return NULL_RTX;
bf8e3599 3754
d6f01a40 3755 insn_code cmpstr_icode = direct_optab_handler (cmpstr_optab, SImode);
3756 insn_code cmpstrn_icode = direct_optab_handler (cmpstrn_optab, SImode);
3757 if (cmpstr_icode != CODE_FOR_nothing || cmpstrn_icode != CODE_FOR_nothing)
6ac5504b 3758 {
3759 rtx arg1_rtx, arg2_rtx;
6ac5504b 3760 tree fndecl, fn;
c2f47e15 3761 tree arg1 = CALL_EXPR_ARG (exp, 0);
3762 tree arg2 = CALL_EXPR_ARG (exp, 1);
d6f01a40 3763 rtx result = NULL_RTX;
a0c938f0 3764
957d0361 3765 unsigned int arg1_align = get_pointer_alignment (arg1) / BITS_PER_UNIT;
3766 unsigned int arg2_align = get_pointer_alignment (arg2) / BITS_PER_UNIT;
6ac5504b 3767
3768 /* If we don't have POINTER_TYPE, call the function. */
3769 if (arg1_align == 0 || arg2_align == 0)
c2f47e15 3770 return NULL_RTX;
7a3f89b5 3771
6ac5504b 3772 /* Stabilize the arguments in case gen_cmpstr(n)si fail. */
3773 arg1 = builtin_save_expr (arg1);
3774 arg2 = builtin_save_expr (arg2);
7a3f89b5 3775
d8ae1baa 3776 arg1_rtx = get_memory_rtx (arg1, NULL);
3777 arg2_rtx = get_memory_rtx (arg2, NULL);
53800dbe 3778
6ac5504b 3779 /* Try to call cmpstrsi. */
d6f01a40 3780 if (cmpstr_icode != CODE_FOR_nothing)
3781 result = expand_cmpstr (cmpstr_icode, target, arg1_rtx, arg2_rtx,
3782 MIN (arg1_align, arg2_align));
3783
6ac5504b 3784 /* Try to determine at least one length and call cmpstrnsi. */
d6f01a40 3785 if (!result && cmpstrn_icode != CODE_FOR_nothing)
6ac5504b 3786 {
3787 tree len;
3788 rtx arg3_rtx;
3789
6ac5504b 3790 tree len1 = c_strlen (arg1, 1);
3791 tree len2 = c_strlen (arg2, 1);
3792
3793 if (len1)
3794 len1 = size_binop (PLUS_EXPR, ssize_int (1), len1);
3795 if (len2)
3796 len2 = size_binop (PLUS_EXPR, ssize_int (1), len2);
3797
3798 /* If we don't have a constant length for the first, use the length
3799 of the second, if we know it. We don't require a constant for
3800 this case; some cost analysis could be done if both are available
3801 but neither is constant. For now, assume they're equally cheap,
3802 unless one has side effects. If both strings have constant lengths,
3803 use the smaller. */
3804
3805 if (!len1)
3806 len = len2;
3807 else if (!len2)
3808 len = len1;
3809 else if (TREE_SIDE_EFFECTS (len1))
3810 len = len2;
3811 else if (TREE_SIDE_EFFECTS (len2))
3812 len = len1;
3813 else if (TREE_CODE (len1) != INTEGER_CST)
3814 len = len2;
3815 else if (TREE_CODE (len2) != INTEGER_CST)
3816 len = len1;
3817 else if (tree_int_cst_lt (len1, len2))
3818 len = len1;
3819 else
3820 len = len2;
3821
3822 /* If both arguments have side effects, we cannot optimize. */
d6f01a40 3823 if (len && !TREE_SIDE_EFFECTS (len))
3824 {
3825 arg3_rtx = expand_normal (len);
ea368aac 3826 result = expand_cmpstrn_or_cmpmem
3827 (cmpstrn_icode, target, arg1_rtx, arg2_rtx, TREE_TYPE (len),
3828 arg3_rtx, MIN (arg1_align, arg2_align));
d6f01a40 3829 }
6ac5504b 3830 }
3f8aefe2 3831
d6f01a40 3832 if (result)
6ac5504b 3833 {
6ac5504b 3834 /* Return the value in the proper mode for this function. */
d6f01a40 3835 machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
6ac5504b 3836 if (GET_MODE (result) == mode)
3837 return result;
3838 if (target == 0)
3839 return convert_to_mode (mode, result, 0);
3840 convert_move (target, result, 0);
3841 return target;
3842 }
902de8ed 3843
6ac5504b 3844 /* Expand the library call ourselves using a stabilized argument
3845 list to avoid re-evaluating the function's arguments twice. */
6ac5504b 3846 fndecl = get_callee_fndecl (exp);
0568e9c1 3847 fn = build_call_nofold_loc (EXPR_LOCATION (exp), fndecl, 2, arg1, arg2);
a65c4d64 3848 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
3849 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
6ac5504b 3850 return expand_call (fn, target, target == const0_rtx);
3851 }
c2f47e15 3852 return NULL_RTX;
83d79705 3853}
53800dbe 3854
48e1416a 3855/* Expand expression EXP, which is a call to the strncmp builtin. Return
c2f47e15 3856 NULL_RTX if we failed the caller should emit a normal call, otherwise try to get
ed09096d 3857 the result in TARGET, if convenient. */
27d0c333 3858
ed09096d 3859static rtx
a65c4d64 3860expand_builtin_strncmp (tree exp, ATTRIBUTE_UNUSED rtx target,
3754d046 3861 ATTRIBUTE_UNUSED machine_mode mode)
ed09096d 3862{
a65c4d64 3863 location_t loc ATTRIBUTE_UNUSED = EXPR_LOCATION (exp);
389dd41b 3864
c2f47e15 3865 if (!validate_arglist (exp,
3866 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3867 return NULL_RTX;
ed09096d 3868
6e34e617 3869 /* If c_strlen can determine an expression for one of the string
6ac5504b 3870 lengths, and it doesn't have side effects, then emit cmpstrnsi
7a3f89b5 3871 using length MIN(strlen(string)+1, arg3). */
d6f01a40 3872 insn_code cmpstrn_icode = direct_optab_handler (cmpstrn_optab, SImode);
3873 if (cmpstrn_icode != CODE_FOR_nothing)
7a3f89b5 3874 {
3875 tree len, len1, len2;
3876 rtx arg1_rtx, arg2_rtx, arg3_rtx;
d6f01a40 3877 rtx result;
0b25db21 3878 tree fndecl, fn;
c2f47e15 3879 tree arg1 = CALL_EXPR_ARG (exp, 0);
3880 tree arg2 = CALL_EXPR_ARG (exp, 1);
3881 tree arg3 = CALL_EXPR_ARG (exp, 2);
6f428e8b 3882
957d0361 3883 unsigned int arg1_align = get_pointer_alignment (arg1) / BITS_PER_UNIT;
3884 unsigned int arg2_align = get_pointer_alignment (arg2) / BITS_PER_UNIT;
bf8e3599 3885
681fab1e 3886 len1 = c_strlen (arg1, 1);
3887 len2 = c_strlen (arg2, 1);
7a3f89b5 3888
3889 if (len1)
389dd41b 3890 len1 = size_binop_loc (loc, PLUS_EXPR, ssize_int (1), len1);
7a3f89b5 3891 if (len2)
389dd41b 3892 len2 = size_binop_loc (loc, PLUS_EXPR, ssize_int (1), len2);
7a3f89b5 3893
3894 /* If we don't have a constant length for the first, use the length
3895 of the second, if we know it. We don't require a constant for
3896 this case; some cost analysis could be done if both are available
3897 but neither is constant. For now, assume they're equally cheap,
3898 unless one has side effects. If both strings have constant lengths,
3899 use the smaller. */
3900
3901 if (!len1)
3902 len = len2;
3903 else if (!len2)
3904 len = len1;
3905 else if (TREE_SIDE_EFFECTS (len1))
3906 len = len2;
3907 else if (TREE_SIDE_EFFECTS (len2))
3908 len = len1;
3909 else if (TREE_CODE (len1) != INTEGER_CST)
3910 len = len2;
3911 else if (TREE_CODE (len2) != INTEGER_CST)
3912 len = len1;
3913 else if (tree_int_cst_lt (len1, len2))
3914 len = len1;
3915 else
3916 len = len2;
6e34e617 3917
7a3f89b5 3918 /* If both arguments have side effects, we cannot optimize. */
3919 if (!len || TREE_SIDE_EFFECTS (len))
c2f47e15 3920 return NULL_RTX;
bf8e3599 3921
7a3f89b5 3922 /* The actual new length parameter is MIN(len,arg3). */
389dd41b 3923 len = fold_build2_loc (loc, MIN_EXPR, TREE_TYPE (len), len,
3924 fold_convert_loc (loc, TREE_TYPE (len), arg3));
7a3f89b5 3925
3926 /* If we don't have POINTER_TYPE, call the function. */
3927 if (arg1_align == 0 || arg2_align == 0)
c2f47e15 3928 return NULL_RTX;
7a3f89b5 3929
a65c4d64 3930 /* Stabilize the arguments in case gen_cmpstrnsi fails. */
3931 arg1 = builtin_save_expr (arg1);
3932 arg2 = builtin_save_expr (arg2);
3933 len = builtin_save_expr (len);
27d0c333 3934
a65c4d64 3935 arg1_rtx = get_memory_rtx (arg1, len);
3936 arg2_rtx = get_memory_rtx (arg2, len);
3937 arg3_rtx = expand_normal (len);
ea368aac 3938 result = expand_cmpstrn_or_cmpmem (cmpstrn_icode, target, arg1_rtx,
3939 arg2_rtx, TREE_TYPE (len), arg3_rtx,
3940 MIN (arg1_align, arg2_align));
d6f01a40 3941 if (result)
a65c4d64 3942 {
a65c4d64 3943 /* Return the value in the proper mode for this function. */
3944 mode = TYPE_MODE (TREE_TYPE (exp));
3945 if (GET_MODE (result) == mode)
3946 return result;
3947 if (target == 0)
3948 return convert_to_mode (mode, result, 0);
3949 convert_move (target, result, 0);
3950 return target;
3951 }
27d0c333 3952
a65c4d64 3953 /* Expand the library call ourselves using a stabilized argument
3954 list to avoid re-evaluating the function's arguments twice. */
3955 fndecl = get_callee_fndecl (exp);
0568e9c1 3956 fn = build_call_nofold_loc (EXPR_LOCATION (exp), fndecl, 3,
3957 arg1, arg2, len);
a65c4d64 3958 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
3959 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
3960 return expand_call (fn, target, target == const0_rtx);
3961 }
c2f47e15 3962 return NULL_RTX;
49f0327b 3963}
3964
a66c9326 3965/* Expand a call to __builtin_saveregs, generating the result in TARGET,
3966 if that's convenient. */
902de8ed 3967
a66c9326 3968rtx
aecda0d6 3969expand_builtin_saveregs (void)
53800dbe 3970{
1e0c0b35 3971 rtx val;
3972 rtx_insn *seq;
53800dbe 3973
3974 /* Don't do __builtin_saveregs more than once in a function.
3975 Save the result of the first call and reuse it. */
3976 if (saveregs_value != 0)
3977 return saveregs_value;
53800dbe 3978
a66c9326 3979 /* When this function is called, it means that registers must be
3980 saved on entry to this function. So we migrate the call to the
3981 first insn of this function. */
3982
3983 start_sequence ();
53800dbe 3984
a66c9326 3985 /* Do whatever the machine needs done in this case. */
45550790 3986 val = targetm.calls.expand_builtin_saveregs ();
53800dbe 3987
a66c9326 3988 seq = get_insns ();
3989 end_sequence ();
53800dbe 3990
a66c9326 3991 saveregs_value = val;
53800dbe 3992
31d3e01c 3993 /* Put the insns after the NOTE that starts the function. If this
3994 is inside a start_sequence, make the outer-level insn chain current, so
a66c9326 3995 the code is placed at the start of the function. */
3996 push_topmost_sequence ();
0ec80471 3997 emit_insn_after (seq, entry_of_function ());
a66c9326 3998 pop_topmost_sequence ();
3999
4000 return val;
53800dbe 4001}
4002
79012a9d 4003/* Expand a call to __builtin_next_arg. */
27d0c333 4004
53800dbe 4005static rtx
79012a9d 4006expand_builtin_next_arg (void)
53800dbe 4007{
79012a9d 4008 /* Checking arguments is already done in fold_builtin_next_arg
4009 that must be called before this function. */
940ddc5c 4010 return expand_binop (ptr_mode, add_optab,
abe32cce 4011 crtl->args.internal_arg_pointer,
4012 crtl->args.arg_offset_rtx,
53800dbe 4013 NULL_RTX, 0, OPTAB_LIB_WIDEN);
4014}
4015
a66c9326 4016/* Make it easier for the backends by protecting the valist argument
4017 from multiple evaluations. */
4018
4019static tree
389dd41b 4020stabilize_va_list_loc (location_t loc, tree valist, int needs_lvalue)
a66c9326 4021{
5f57a8b1 4022 tree vatype = targetm.canonical_va_list_type (TREE_TYPE (valist));
4023
182cf5a9 4024 /* The current way of determining the type of valist is completely
4025 bogus. We should have the information on the va builtin instead. */
4026 if (!vatype)
4027 vatype = targetm.fn_abi_va_list (cfun->decl);
5f57a8b1 4028
4029 if (TREE_CODE (vatype) == ARRAY_TYPE)
a66c9326 4030 {
2d47cc32 4031 if (TREE_SIDE_EFFECTS (valist))
4032 valist = save_expr (valist);
11a61dea 4033
2d47cc32 4034 /* For this case, the backends will be expecting a pointer to
5f57a8b1 4035 vatype, but it's possible we've actually been given an array
4036 (an actual TARGET_CANONICAL_VA_LIST_TYPE (valist)).
2d47cc32 4037 So fix it. */
4038 if (TREE_CODE (TREE_TYPE (valist)) == ARRAY_TYPE)
8a15c04a 4039 {
5f57a8b1 4040 tree p1 = build_pointer_type (TREE_TYPE (vatype));
389dd41b 4041 valist = build_fold_addr_expr_with_type_loc (loc, valist, p1);
8a15c04a 4042 }
a66c9326 4043 }
11a61dea 4044 else
a66c9326 4045 {
182cf5a9 4046 tree pt = build_pointer_type (vatype);
11a61dea 4047
2d47cc32 4048 if (! needs_lvalue)
4049 {
11a61dea 4050 if (! TREE_SIDE_EFFECTS (valist))
4051 return valist;
bf8e3599 4052
389dd41b 4053 valist = fold_build1_loc (loc, ADDR_EXPR, pt, valist);
a66c9326 4054 TREE_SIDE_EFFECTS (valist) = 1;
a66c9326 4055 }
2d47cc32 4056
11a61dea 4057 if (TREE_SIDE_EFFECTS (valist))
2d47cc32 4058 valist = save_expr (valist);
182cf5a9 4059 valist = fold_build2_loc (loc, MEM_REF,
4060 vatype, valist, build_int_cst (pt, 0));
a66c9326 4061 }
4062
4063 return valist;
4064}
4065
2e15d750 4066/* The "standard" definition of va_list is void*. */
4067
4068tree
4069std_build_builtin_va_list (void)
4070{
4071 return ptr_type_node;
4072}
4073
5f57a8b1 4074/* The "standard" abi va_list is va_list_type_node. */
4075
4076tree
4077std_fn_abi_va_list (tree fndecl ATTRIBUTE_UNUSED)
4078{
4079 return va_list_type_node;
4080}
4081
4082/* The "standard" type of va_list is va_list_type_node. */
4083
4084tree
4085std_canonical_va_list_type (tree type)
4086{
4087 tree wtype, htype;
4088
5f57a8b1 4089 wtype = va_list_type_node;
4090 htype = type;
b6da2e41 4091
4092 if (TREE_CODE (wtype) == ARRAY_TYPE)
5f57a8b1 4093 {
4094 /* If va_list is an array type, the argument may have decayed
4095 to a pointer type, e.g. by being passed to another function.
4096 In that case, unwrap both types so that we can compare the
4097 underlying records. */
4098 if (TREE_CODE (htype) == ARRAY_TYPE
4099 || POINTER_TYPE_P (htype))
4100 {
4101 wtype = TREE_TYPE (wtype);
4102 htype = TREE_TYPE (htype);
4103 }
4104 }
4105 if (TYPE_MAIN_VARIANT (wtype) == TYPE_MAIN_VARIANT (htype))
4106 return va_list_type_node;
4107
4108 return NULL_TREE;
4109}
4110
a66c9326 4111/* The "standard" implementation of va_start: just assign `nextarg' to
4112 the variable. */
27d0c333 4113
a66c9326 4114void
aecda0d6 4115std_expand_builtin_va_start (tree valist, rtx nextarg)
a66c9326 4116{
f03c17bc 4117 rtx va_r = expand_expr (valist, NULL_RTX, VOIDmode, EXPAND_WRITE);
4118 convert_move (va_r, nextarg, 0);
058a1b7a 4119
4120 /* We do not have any valid bounds for the pointer, so
4121 just store zero bounds for it. */
4122 if (chkp_function_instrumented_p (current_function_decl))
4123 chkp_expand_bounds_reset_for_mem (valist,
4124 make_tree (TREE_TYPE (valist),
4125 nextarg));
a66c9326 4126}
4127
c2f47e15 4128/* Expand EXP, a call to __builtin_va_start. */
27d0c333 4129
a66c9326 4130static rtx
c2f47e15 4131expand_builtin_va_start (tree exp)
a66c9326 4132{
4133 rtx nextarg;
c2f47e15 4134 tree valist;
389dd41b 4135 location_t loc = EXPR_LOCATION (exp);
a66c9326 4136
c2f47e15 4137 if (call_expr_nargs (exp) < 2)
cb166087 4138 {
389dd41b 4139 error_at (loc, "too few arguments to function %<va_start%>");
cb166087 4140 return const0_rtx;
4141 }
a66c9326 4142
c2f47e15 4143 if (fold_builtin_next_arg (exp, true))
79012a9d 4144 return const0_rtx;
7c2f0500 4145
79012a9d 4146 nextarg = expand_builtin_next_arg ();
389dd41b 4147 valist = stabilize_va_list_loc (loc, CALL_EXPR_ARG (exp, 0), 1);
a66c9326 4148
8a58ed0a 4149 if (targetm.expand_builtin_va_start)
4150 targetm.expand_builtin_va_start (valist, nextarg);
4151 else
4152 std_expand_builtin_va_start (valist, nextarg);
a66c9326 4153
4154 return const0_rtx;
4155}
4156
c2f47e15 4157/* Expand EXP, a call to __builtin_va_end. */
f7c44134 4158
a66c9326 4159static rtx
c2f47e15 4160expand_builtin_va_end (tree exp)
a66c9326 4161{
c2f47e15 4162 tree valist = CALL_EXPR_ARG (exp, 0);
8a15c04a 4163
8a15c04a 4164 /* Evaluate for side effects, if needed. I hate macros that don't
4165 do that. */
4166 if (TREE_SIDE_EFFECTS (valist))
4167 expand_expr (valist, const0_rtx, VOIDmode, EXPAND_NORMAL);
a66c9326 4168
4169 return const0_rtx;
4170}
4171
c2f47e15 4172/* Expand EXP, a call to __builtin_va_copy. We do this as a
a66c9326 4173 builtin rather than just as an assignment in stdarg.h because of the
4174 nastiness of array-type va_list types. */
f7c44134 4175
a66c9326 4176static rtx
c2f47e15 4177expand_builtin_va_copy (tree exp)
a66c9326 4178{
4179 tree dst, src, t;
389dd41b 4180 location_t loc = EXPR_LOCATION (exp);
a66c9326 4181
c2f47e15 4182 dst = CALL_EXPR_ARG (exp, 0);
4183 src = CALL_EXPR_ARG (exp, 1);
a66c9326 4184
389dd41b 4185 dst = stabilize_va_list_loc (loc, dst, 1);
4186 src = stabilize_va_list_loc (loc, src, 0);
a66c9326 4187
5f57a8b1 4188 gcc_assert (cfun != NULL && cfun->decl != NULL_TREE);
4189
4190 if (TREE_CODE (targetm.fn_abi_va_list (cfun->decl)) != ARRAY_TYPE)
a66c9326 4191 {
5f57a8b1 4192 t = build2 (MODIFY_EXPR, targetm.fn_abi_va_list (cfun->decl), dst, src);
a66c9326 4193 TREE_SIDE_EFFECTS (t) = 1;
4194 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
4195 }
4196 else
4197 {
11a61dea 4198 rtx dstb, srcb, size;
4199
4200 /* Evaluate to pointers. */
4201 dstb = expand_expr (dst, NULL_RTX, Pmode, EXPAND_NORMAL);
4202 srcb = expand_expr (src, NULL_RTX, Pmode, EXPAND_NORMAL);
5f57a8b1 4203 size = expand_expr (TYPE_SIZE_UNIT (targetm.fn_abi_va_list (cfun->decl)),
4204 NULL_RTX, VOIDmode, EXPAND_NORMAL);
11a61dea 4205
85d654dd 4206 dstb = convert_memory_address (Pmode, dstb);
4207 srcb = convert_memory_address (Pmode, srcb);
726ec87c 4208
11a61dea 4209 /* "Dereference" to BLKmode memories. */
4210 dstb = gen_rtx_MEM (BLKmode, dstb);
ab6ab77e 4211 set_mem_alias_set (dstb, get_alias_set (TREE_TYPE (TREE_TYPE (dst))));
5f57a8b1 4212 set_mem_align (dstb, TYPE_ALIGN (targetm.fn_abi_va_list (cfun->decl)));
11a61dea 4213 srcb = gen_rtx_MEM (BLKmode, srcb);
ab6ab77e 4214 set_mem_alias_set (srcb, get_alias_set (TREE_TYPE (TREE_TYPE (src))));
5f57a8b1 4215 set_mem_align (srcb, TYPE_ALIGN (targetm.fn_abi_va_list (cfun->decl)));
11a61dea 4216
4217 /* Copy. */
0378dbdc 4218 emit_block_move (dstb, srcb, size, BLOCK_OP_NORMAL);
a66c9326 4219 }
4220
4221 return const0_rtx;
4222}
4223
53800dbe 4224/* Expand a call to one of the builtin functions __builtin_frame_address or
4225 __builtin_return_address. */
27d0c333 4226
53800dbe 4227static rtx
c2f47e15 4228expand_builtin_frame_address (tree fndecl, tree exp)
53800dbe 4229{
53800dbe 4230 /* The argument must be a nonnegative integer constant.
4231 It counts the number of frames to scan up the stack.
5b252e95 4232 The value is either the frame pointer value or the return
4233 address saved in that frame. */
c2f47e15 4234 if (call_expr_nargs (exp) == 0)
53800dbe 4235 /* Warning about missing arg was already issued. */
4236 return const0_rtx;
e913b5cd 4237 else if (! tree_fits_uhwi_p (CALL_EXPR_ARG (exp, 0)))
53800dbe 4238 {
5b252e95 4239 error ("invalid argument to %qD", fndecl);
53800dbe 4240 return const0_rtx;
4241 }
4242 else
4243 {
5b252e95 4244 /* Number of frames to scan up the stack. */
4245 unsigned HOST_WIDE_INT count = tree_to_uhwi (CALL_EXPR_ARG (exp, 0));
4246
4247 rtx tem = expand_builtin_return_addr (DECL_FUNCTION_CODE (fndecl), count);
53800dbe 4248
4249 /* Some ports cannot access arbitrary stack frames. */
4250 if (tem == NULL)
4251 {
5b252e95 4252 warning (0, "unsupported argument to %qD", fndecl);
53800dbe 4253 return const0_rtx;
4254 }
4255
5b252e95 4256 if (count)
4257 {
4258 /* Warn since no effort is made to ensure that any frame
4259 beyond the current one exists or can be safely reached. */
4260 warning (OPT_Wframe_address, "calling %qD with "
4261 "a nonzero argument is unsafe", fndecl);
4262 }
4263
53800dbe 4264 /* For __builtin_frame_address, return what we've got. */
4265 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
4266 return tem;
4267
8ad4c111 4268 if (!REG_P (tem)
53800dbe 4269 && ! CONSTANT_P (tem))
99182918 4270 tem = copy_addr_to_reg (tem);
53800dbe 4271 return tem;
4272 }
4273}
4274
990495a7 4275/* Expand EXP, a call to the alloca builtin. Return NULL_RTX if we
5be42b39 4276 failed and the caller should emit a normal call. CANNOT_ACCUMULATE
4277 is the same as for allocate_dynamic_stack_space. */
15c6cf6b 4278
53800dbe 4279static rtx
5be42b39 4280expand_builtin_alloca (tree exp, bool cannot_accumulate)
53800dbe 4281{
4282 rtx op0;
15c6cf6b 4283 rtx result;
581bf1c2 4284 bool valid_arglist;
4285 unsigned int align;
4286 bool alloca_with_align = (DECL_FUNCTION_CODE (get_callee_fndecl (exp))
4287 == BUILT_IN_ALLOCA_WITH_ALIGN);
53800dbe 4288
581bf1c2 4289 valid_arglist
4290 = (alloca_with_align
4291 ? validate_arglist (exp, INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE)
4292 : validate_arglist (exp, INTEGER_TYPE, VOID_TYPE));
4293
4294 if (!valid_arglist)
c2f47e15 4295 return NULL_RTX;
53800dbe 4296
4297 /* Compute the argument. */
c2f47e15 4298 op0 = expand_normal (CALL_EXPR_ARG (exp, 0));
53800dbe 4299
581bf1c2 4300 /* Compute the alignment. */
4301 align = (alloca_with_align
f9ae6f95 4302 ? TREE_INT_CST_LOW (CALL_EXPR_ARG (exp, 1))
581bf1c2 4303 : BIGGEST_ALIGNMENT);
4304
53800dbe 4305 /* Allocate the desired space. */
581bf1c2 4306 result = allocate_dynamic_stack_space (op0, 0, align, cannot_accumulate);
85d654dd 4307 result = convert_memory_address (ptr_mode, result);
15c6cf6b 4308
4309 return result;
53800dbe 4310}
4311
74bdbe96 4312/* Expand a call to bswap builtin in EXP.
4313 Return NULL_RTX if a normal call should be emitted rather than expanding the
4314 function in-line. If convenient, the result should be placed in TARGET.
4315 SUBTARGET may be used as the target for computing one of EXP's operands. */
42791117 4316
4317static rtx
3754d046 4318expand_builtin_bswap (machine_mode target_mode, tree exp, rtx target,
74bdbe96 4319 rtx subtarget)
42791117 4320{
42791117 4321 tree arg;
4322 rtx op0;
4323
c2f47e15 4324 if (!validate_arglist (exp, INTEGER_TYPE, VOID_TYPE))
4325 return NULL_RTX;
42791117 4326
c2f47e15 4327 arg = CALL_EXPR_ARG (exp, 0);
74bdbe96 4328 op0 = expand_expr (arg,
4329 subtarget && GET_MODE (subtarget) == target_mode
4330 ? subtarget : NULL_RTX,
4331 target_mode, EXPAND_NORMAL);
4332 if (GET_MODE (op0) != target_mode)
4333 op0 = convert_to_mode (target_mode, op0, 1);
42791117 4334
74bdbe96 4335 target = expand_unop (target_mode, bswap_optab, op0, target, 1);
42791117 4336
4337 gcc_assert (target);
4338
74bdbe96 4339 return convert_to_mode (target_mode, target, 1);
42791117 4340}
4341
c2f47e15 4342/* Expand a call to a unary builtin in EXP.
4343 Return NULL_RTX if a normal call should be emitted rather than expanding the
53800dbe 4344 function in-line. If convenient, the result should be placed in TARGET.
4345 SUBTARGET may be used as the target for computing one of EXP's operands. */
15c6cf6b 4346
53800dbe 4347static rtx
3754d046 4348expand_builtin_unop (machine_mode target_mode, tree exp, rtx target,
aecda0d6 4349 rtx subtarget, optab op_optab)
53800dbe 4350{
4351 rtx op0;
c2f47e15 4352
4353 if (!validate_arglist (exp, INTEGER_TYPE, VOID_TYPE))
4354 return NULL_RTX;
53800dbe 4355
4356 /* Compute the argument. */
f97eea22 4357 op0 = expand_expr (CALL_EXPR_ARG (exp, 0),
4358 (subtarget
4359 && (TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 0)))
4360 == GET_MODE (subtarget))) ? subtarget : NULL_RTX,
1db6d067 4361 VOIDmode, EXPAND_NORMAL);
6a08d0ab 4362 /* Compute op, into TARGET if possible.
53800dbe 4363 Set TARGET to wherever the result comes back. */
c2f47e15 4364 target = expand_unop (TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 0))),
6aaa1f9e 4365 op_optab, op0, target, op_optab != clrsb_optab);
64db345d 4366 gcc_assert (target);
7d3f6cc7 4367
efb070c8 4368 return convert_to_mode (target_mode, target, 0);
53800dbe 4369}
89cfe6e5 4370
48e1416a 4371/* Expand a call to __builtin_expect. We just return our argument
5a74f77e 4372 as the builtin_expect semantic should've been already executed by
4373 tree branch prediction pass. */
89cfe6e5 4374
4375static rtx
c2f47e15 4376expand_builtin_expect (tree exp, rtx target)
89cfe6e5 4377{
1e4adcfc 4378 tree arg;
89cfe6e5 4379
c2f47e15 4380 if (call_expr_nargs (exp) < 2)
89cfe6e5 4381 return const0_rtx;
c2f47e15 4382 arg = CALL_EXPR_ARG (exp, 0);
89cfe6e5 4383
c2f47e15 4384 target = expand_expr (arg, target, VOIDmode, EXPAND_NORMAL);
5a74f77e 4385 /* When guessing was done, the hints should be already stripped away. */
07311427 4386 gcc_assert (!flag_guess_branch_prob
852f689e 4387 || optimize == 0 || seen_error ());
89cfe6e5 4388 return target;
4389}
689df48e 4390
fca0886c 4391/* Expand a call to __builtin_assume_aligned. We just return our first
4392 argument as the builtin_assume_aligned semantic should've been already
4393 executed by CCP. */
4394
4395static rtx
4396expand_builtin_assume_aligned (tree exp, rtx target)
4397{
4398 if (call_expr_nargs (exp) < 2)
4399 return const0_rtx;
4400 target = expand_expr (CALL_EXPR_ARG (exp, 0), target, VOIDmode,
4401 EXPAND_NORMAL);
4402 gcc_assert (!TREE_SIDE_EFFECTS (CALL_EXPR_ARG (exp, 1))
4403 && (call_expr_nargs (exp) < 3
4404 || !TREE_SIDE_EFFECTS (CALL_EXPR_ARG (exp, 2))));
4405 return target;
4406}
4407
c22de3f0 4408void
aecda0d6 4409expand_builtin_trap (void)
a0ef1725 4410{
4db8dd0c 4411 if (targetm.have_trap ())
f73960eb 4412 {
4db8dd0c 4413 rtx_insn *insn = emit_insn (targetm.gen_trap ());
f73960eb 4414 /* For trap insns when not accumulating outgoing args force
4415 REG_ARGS_SIZE note to prevent crossjumping of calls with
4416 different args sizes. */
4417 if (!ACCUMULATE_OUTGOING_ARGS)
4418 add_reg_note (insn, REG_ARGS_SIZE, GEN_INT (stack_pointer_delta));
4419 }
a0ef1725 4420 else
61ffc71a 4421 {
4422 tree fn = builtin_decl_implicit (BUILT_IN_ABORT);
4423 tree call_expr = build_call_expr (fn, 0);
4424 expand_call (call_expr, NULL_RTX, false);
4425 }
4426
a0ef1725 4427 emit_barrier ();
4428}
78a74442 4429
d2b48f0c 4430/* Expand a call to __builtin_unreachable. We do nothing except emit
4431 a barrier saying that control flow will not pass here.
4432
4433 It is the responsibility of the program being compiled to ensure
4434 that control flow does never reach __builtin_unreachable. */
4435static void
4436expand_builtin_unreachable (void)
4437{
4438 emit_barrier ();
4439}
4440
c2f47e15 4441/* Expand EXP, a call to fabs, fabsf or fabsl.
4442 Return NULL_RTX if a normal call should be emitted rather than expanding
78a74442 4443 the function inline. If convenient, the result should be placed
4444 in TARGET. SUBTARGET may be used as the target for computing
4445 the operand. */
4446
4447static rtx
c2f47e15 4448expand_builtin_fabs (tree exp, rtx target, rtx subtarget)
78a74442 4449{
3754d046 4450 machine_mode mode;
78a74442 4451 tree arg;
4452 rtx op0;
4453
c2f47e15 4454 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
4455 return NULL_RTX;
78a74442 4456
c2f47e15 4457 arg = CALL_EXPR_ARG (exp, 0);
c7f617c2 4458 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
78a74442 4459 mode = TYPE_MODE (TREE_TYPE (arg));
1db6d067 4460 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
78a74442 4461 return expand_abs (mode, op0, target, 0, safe_from_p (target, arg, 1));
4462}
4463
c2f47e15 4464/* Expand EXP, a call to copysign, copysignf, or copysignl.
270436f3 4465 Return NULL is a normal call should be emitted rather than expanding the
4466 function inline. If convenient, the result should be placed in TARGET.
4467 SUBTARGET may be used as the target for computing the operand. */
4468
4469static rtx
c2f47e15 4470expand_builtin_copysign (tree exp, rtx target, rtx subtarget)
270436f3 4471{
4472 rtx op0, op1;
4473 tree arg;
4474
c2f47e15 4475 if (!validate_arglist (exp, REAL_TYPE, REAL_TYPE, VOID_TYPE))
4476 return NULL_RTX;
270436f3 4477
c2f47e15 4478 arg = CALL_EXPR_ARG (exp, 0);
8ec3c5c2 4479 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
270436f3 4480
c2f47e15 4481 arg = CALL_EXPR_ARG (exp, 1);
8ec3c5c2 4482 op1 = expand_normal (arg);
270436f3 4483
4484 return expand_copysign (op0, op1, target);
4485}
4486
ac8fb6db 4487/* Expand a call to __builtin___clear_cache. */
4488
4489static rtx
32e17df0 4490expand_builtin___clear_cache (tree exp)
ac8fb6db 4491{
32e17df0 4492 if (!targetm.code_for_clear_cache)
4493 {
ac8fb6db 4494#ifdef CLEAR_INSN_CACHE
32e17df0 4495 /* There is no "clear_cache" insn, and __clear_cache() in libgcc
4496 does something. Just do the default expansion to a call to
4497 __clear_cache(). */
4498 return NULL_RTX;
ac8fb6db 4499#else
32e17df0 4500 /* There is no "clear_cache" insn, and __clear_cache() in libgcc
4501 does nothing. There is no need to call it. Do nothing. */
4502 return const0_rtx;
ac8fb6db 4503#endif /* CLEAR_INSN_CACHE */
32e17df0 4504 }
4505
ac8fb6db 4506 /* We have a "clear_cache" insn, and it will handle everything. */
4507 tree begin, end;
4508 rtx begin_rtx, end_rtx;
ac8fb6db 4509
4510 /* We must not expand to a library call. If we did, any
4511 fallback library function in libgcc that might contain a call to
4512 __builtin___clear_cache() would recurse infinitely. */
4513 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
4514 {
4515 error ("both arguments to %<__builtin___clear_cache%> must be pointers");
4516 return const0_rtx;
4517 }
4518
32e17df0 4519 if (targetm.have_clear_cache ())
ac8fb6db 4520 {
8786db1e 4521 struct expand_operand ops[2];
ac8fb6db 4522
4523 begin = CALL_EXPR_ARG (exp, 0);
4524 begin_rtx = expand_expr (begin, NULL_RTX, Pmode, EXPAND_NORMAL);
ac8fb6db 4525
4526 end = CALL_EXPR_ARG (exp, 1);
4527 end_rtx = expand_expr (end, NULL_RTX, Pmode, EXPAND_NORMAL);
ac8fb6db 4528
8786db1e 4529 create_address_operand (&ops[0], begin_rtx);
4530 create_address_operand (&ops[1], end_rtx);
32e17df0 4531 if (maybe_expand_insn (targetm.code_for_clear_cache, 2, ops))
8786db1e 4532 return const0_rtx;
ac8fb6db 4533 }
4534 return const0_rtx;
ac8fb6db 4535}
4536
4ee9c684 4537/* Given a trampoline address, make sure it satisfies TRAMPOLINE_ALIGNMENT. */
4538
4539static rtx
4540round_trampoline_addr (rtx tramp)
4541{
4542 rtx temp, addend, mask;
4543
4544 /* If we don't need too much alignment, we'll have been guaranteed
4545 proper alignment by get_trampoline_type. */
4546 if (TRAMPOLINE_ALIGNMENT <= STACK_BOUNDARY)
4547 return tramp;
4548
4549 /* Round address up to desired boundary. */
4550 temp = gen_reg_rtx (Pmode);
0359f9f5 4551 addend = gen_int_mode (TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT - 1, Pmode);
4552 mask = gen_int_mode (-TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT, Pmode);
4ee9c684 4553
4554 temp = expand_simple_binop (Pmode, PLUS, tramp, addend,
4555 temp, 0, OPTAB_LIB_WIDEN);
4556 tramp = expand_simple_binop (Pmode, AND, temp, mask,
4557 temp, 0, OPTAB_LIB_WIDEN);
4558
4559 return tramp;
4560}
4561
4562static rtx
c307f106 4563expand_builtin_init_trampoline (tree exp, bool onstack)
4ee9c684 4564{
4565 tree t_tramp, t_func, t_chain;
82c7907c 4566 rtx m_tramp, r_tramp, r_chain, tmp;
4ee9c684 4567
c2f47e15 4568 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE,
4ee9c684 4569 POINTER_TYPE, VOID_TYPE))
4570 return NULL_RTX;
4571
c2f47e15 4572 t_tramp = CALL_EXPR_ARG (exp, 0);
4573 t_func = CALL_EXPR_ARG (exp, 1);
4574 t_chain = CALL_EXPR_ARG (exp, 2);
4ee9c684 4575
8ec3c5c2 4576 r_tramp = expand_normal (t_tramp);
82c7907c 4577 m_tramp = gen_rtx_MEM (BLKmode, r_tramp);
4578 MEM_NOTRAP_P (m_tramp) = 1;
4579
c307f106 4580 /* If ONSTACK, the TRAMP argument should be the address of a field
4581 within the local function's FRAME decl. Either way, let's see if
4582 we can fill in the MEM_ATTRs for this memory. */
82c7907c 4583 if (TREE_CODE (t_tramp) == ADDR_EXPR)
f4146cb8 4584 set_mem_attributes (m_tramp, TREE_OPERAND (t_tramp, 0), true);
82c7907c 4585
c307f106 4586 /* Creator of a heap trampoline is responsible for making sure the
4587 address is aligned to at least STACK_BOUNDARY. Normally malloc
4588 will ensure this anyhow. */
82c7907c 4589 tmp = round_trampoline_addr (r_tramp);
4590 if (tmp != r_tramp)
4591 {
4592 m_tramp = change_address (m_tramp, BLKmode, tmp);
4593 set_mem_align (m_tramp, TRAMPOLINE_ALIGNMENT);
5b2a69fa 4594 set_mem_size (m_tramp, TRAMPOLINE_SIZE);
82c7907c 4595 }
4596
4597 /* The FUNC argument should be the address of the nested function.
4598 Extract the actual function decl to pass to the hook. */
4599 gcc_assert (TREE_CODE (t_func) == ADDR_EXPR);
4600 t_func = TREE_OPERAND (t_func, 0);
4601 gcc_assert (TREE_CODE (t_func) == FUNCTION_DECL);
4602
8ec3c5c2 4603 r_chain = expand_normal (t_chain);
4ee9c684 4604
4605 /* Generate insns to initialize the trampoline. */
82c7907c 4606 targetm.calls.trampoline_init (m_tramp, t_func, r_chain);
4ee9c684 4607
c307f106 4608 if (onstack)
4609 {
4610 trampolines_created = 1;
8bc8a8f4 4611
c307f106 4612 warning_at (DECL_SOURCE_LOCATION (t_func), OPT_Wtrampolines,
4613 "trampoline generated for nested function %qD", t_func);
4614 }
8bc8a8f4 4615
4ee9c684 4616 return const0_rtx;
4617}
4618
4619static rtx
c2f47e15 4620expand_builtin_adjust_trampoline (tree exp)
4ee9c684 4621{
4622 rtx tramp;
4623
c2f47e15 4624 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
4ee9c684 4625 return NULL_RTX;
4626
c2f47e15 4627 tramp = expand_normal (CALL_EXPR_ARG (exp, 0));
4ee9c684 4628 tramp = round_trampoline_addr (tramp);
82c7907c 4629 if (targetm.calls.trampoline_adjust_address)
4630 tramp = targetm.calls.trampoline_adjust_address (tramp);
4ee9c684 4631
4632 return tramp;
4633}
4634
93f564d6 4635/* Expand the call EXP to the built-in signbit, signbitf or signbitl
4636 function. The function first checks whether the back end provides
4637 an insn to implement signbit for the respective mode. If not, it
4638 checks whether the floating point format of the value is such that
10902624 4639 the sign bit can be extracted. If that is not the case, error out.
4640 EXP is the expression that is a call to the builtin function; if
4641 convenient, the result should be placed in TARGET. */
27f261ef 4642static rtx
4643expand_builtin_signbit (tree exp, rtx target)
4644{
4645 const struct real_format *fmt;
3754d046 4646 machine_mode fmode, imode, rmode;
c2f47e15 4647 tree arg;
ca4f1f5b 4648 int word, bitpos;
27eda240 4649 enum insn_code icode;
27f261ef 4650 rtx temp;
389dd41b 4651 location_t loc = EXPR_LOCATION (exp);
27f261ef 4652
c2f47e15 4653 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
4654 return NULL_RTX;
27f261ef 4655
c2f47e15 4656 arg = CALL_EXPR_ARG (exp, 0);
27f261ef 4657 fmode = TYPE_MODE (TREE_TYPE (arg));
4658 rmode = TYPE_MODE (TREE_TYPE (exp));
4659 fmt = REAL_MODE_FORMAT (fmode);
4660
93f564d6 4661 arg = builtin_save_expr (arg);
4662
4663 /* Expand the argument yielding a RTX expression. */
4664 temp = expand_normal (arg);
4665
4666 /* Check if the back end provides an insn that handles signbit for the
4667 argument's mode. */
d6bf3b14 4668 icode = optab_handler (signbit_optab, fmode);
27eda240 4669 if (icode != CODE_FOR_nothing)
93f564d6 4670 {
1e0c0b35 4671 rtx_insn *last = get_last_insn ();
93f564d6 4672 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
4e2a2fb4 4673 if (maybe_emit_unop_insn (icode, target, temp, UNKNOWN))
4674 return target;
4675 delete_insns_since (last);
93f564d6 4676 }
4677
27f261ef 4678 /* For floating point formats without a sign bit, implement signbit
4679 as "ARG < 0.0". */
8d564692 4680 bitpos = fmt->signbit_ro;
ca4f1f5b 4681 if (bitpos < 0)
27f261ef 4682 {
4683 /* But we can't do this if the format supports signed zero. */
10902624 4684 gcc_assert (!fmt->has_signed_zero || !HONOR_SIGNED_ZEROS (fmode));
27f261ef 4685
389dd41b 4686 arg = fold_build2_loc (loc, LT_EXPR, TREE_TYPE (exp), arg,
49d00087 4687 build_real (TREE_TYPE (arg), dconst0));
27f261ef 4688 return expand_expr (arg, target, VOIDmode, EXPAND_NORMAL);
4689 }
4690
ca4f1f5b 4691 if (GET_MODE_SIZE (fmode) <= UNITS_PER_WORD)
27f261ef 4692 {
ca4f1f5b 4693 imode = int_mode_for_mode (fmode);
10902624 4694 gcc_assert (imode != BLKmode);
ca4f1f5b 4695 temp = gen_lowpart (imode, temp);
24fd4260 4696 }
4697 else
4698 {
ca4f1f5b 4699 imode = word_mode;
4700 /* Handle targets with different FP word orders. */
4701 if (FLOAT_WORDS_BIG_ENDIAN)
a0c938f0 4702 word = (GET_MODE_BITSIZE (fmode) - bitpos) / BITS_PER_WORD;
ca4f1f5b 4703 else
a0c938f0 4704 word = bitpos / BITS_PER_WORD;
ca4f1f5b 4705 temp = operand_subword_force (temp, word, fmode);
4706 bitpos = bitpos % BITS_PER_WORD;
4707 }
4708
44b0f1d0 4709 /* Force the intermediate word_mode (or narrower) result into a
4710 register. This avoids attempting to create paradoxical SUBREGs
4711 of floating point modes below. */
4712 temp = force_reg (imode, temp);
4713
ca4f1f5b 4714 /* If the bitpos is within the "result mode" lowpart, the operation
4715 can be implement with a single bitwise AND. Otherwise, we need
4716 a right shift and an AND. */
4717
4718 if (bitpos < GET_MODE_BITSIZE (rmode))
4719 {
796b6678 4720 wide_int mask = wi::set_bit_in_zero (bitpos, GET_MODE_PRECISION (rmode));
27f261ef 4721
4a46f016 4722 if (GET_MODE_SIZE (imode) > GET_MODE_SIZE (rmode))
ca4f1f5b 4723 temp = gen_lowpart (rmode, temp);
24fd4260 4724 temp = expand_binop (rmode, and_optab, temp,
e913b5cd 4725 immed_wide_int_const (mask, rmode),
ca4f1f5b 4726 NULL_RTX, 1, OPTAB_LIB_WIDEN);
27f261ef 4727 }
ca4f1f5b 4728 else
4729 {
4730 /* Perform a logical right shift to place the signbit in the least
a0c938f0 4731 significant bit, then truncate the result to the desired mode
ca4f1f5b 4732 and mask just this bit. */
f5ff0b21 4733 temp = expand_shift (RSHIFT_EXPR, imode, temp, bitpos, NULL_RTX, 1);
ca4f1f5b 4734 temp = gen_lowpart (rmode, temp);
4735 temp = expand_binop (rmode, and_optab, temp, const1_rtx,
4736 NULL_RTX, 1, OPTAB_LIB_WIDEN);
4737 }
4738
27f261ef 4739 return temp;
4740}
73673831 4741
4742/* Expand fork or exec calls. TARGET is the desired target of the
c2f47e15 4743 call. EXP is the call. FN is the
73673831 4744 identificator of the actual function. IGNORE is nonzero if the
4745 value is to be ignored. */
4746
4747static rtx
c2f47e15 4748expand_builtin_fork_or_exec (tree fn, tree exp, rtx target, int ignore)
73673831 4749{
4750 tree id, decl;
4751 tree call;
4752
4753 /* If we are not profiling, just call the function. */
4754 if (!profile_arc_flag)
4755 return NULL_RTX;
4756
4757 /* Otherwise call the wrapper. This should be equivalent for the rest of
4758 compiler, so the code does not diverge, and the wrapper may run the
9c9bad97 4759 code necessary for keeping the profiling sane. */
73673831 4760
4761 switch (DECL_FUNCTION_CODE (fn))
4762 {
4763 case BUILT_IN_FORK:
4764 id = get_identifier ("__gcov_fork");
4765 break;
4766
4767 case BUILT_IN_EXECL:
4768 id = get_identifier ("__gcov_execl");
4769 break;
4770
4771 case BUILT_IN_EXECV:
4772 id = get_identifier ("__gcov_execv");
4773 break;
4774
4775 case BUILT_IN_EXECLP:
4776 id = get_identifier ("__gcov_execlp");
4777 break;
4778
4779 case BUILT_IN_EXECLE:
4780 id = get_identifier ("__gcov_execle");
4781 break;
4782
4783 case BUILT_IN_EXECVP:
4784 id = get_identifier ("__gcov_execvp");
4785 break;
4786
4787 case BUILT_IN_EXECVE:
4788 id = get_identifier ("__gcov_execve");
4789 break;
4790
4791 default:
64db345d 4792 gcc_unreachable ();
73673831 4793 }
4794
e60a6f7b 4795 decl = build_decl (DECL_SOURCE_LOCATION (fn),
4796 FUNCTION_DECL, id, TREE_TYPE (fn));
73673831 4797 DECL_EXTERNAL (decl) = 1;
4798 TREE_PUBLIC (decl) = 1;
4799 DECL_ARTIFICIAL (decl) = 1;
4800 TREE_NOTHROW (decl) = 1;
e82d310b 4801 DECL_VISIBILITY (decl) = VISIBILITY_DEFAULT;
4802 DECL_VISIBILITY_SPECIFIED (decl) = 1;
389dd41b 4803 call = rewrite_call_expr (EXPR_LOCATION (exp), exp, 0, decl, 0);
73673831 4804 return expand_call (call, target, ignore);
c2f47e15 4805 }
48e1416a 4806
b6a5fc45 4807
4808\f
3e272de8 4809/* Reconstitute a mode for a __sync intrinsic operation. Since the type of
4810 the pointer in these functions is void*, the tree optimizers may remove
4811 casts. The mode computed in expand_builtin isn't reliable either, due
4812 to __sync_bool_compare_and_swap.
4813
4814 FCODE_DIFF should be fcode - base, where base is the FOO_1 code for the
4815 group of builtins. This gives us log2 of the mode size. */
4816
3754d046 4817static inline machine_mode
3e272de8 4818get_builtin_sync_mode (int fcode_diff)
4819{
ad3a13b5 4820 /* The size is not negotiable, so ask not to get BLKmode in return
4821 if the target indicates that a smaller size would be better. */
4822 return mode_for_size (BITS_PER_UNIT << fcode_diff, MODE_INT, 0);
3e272de8 4823}
4824
041e0215 4825/* Expand the memory expression LOC and return the appropriate memory operand
4826 for the builtin_sync operations. */
4827
4828static rtx
3754d046 4829get_builtin_sync_mem (tree loc, machine_mode mode)
041e0215 4830{
4831 rtx addr, mem;
4832
7f4d56ad 4833 addr = expand_expr (loc, NULL_RTX, ptr_mode, EXPAND_SUM);
4834 addr = convert_memory_address (Pmode, addr);
041e0215 4835
4836 /* Note that we explicitly do not want any alias information for this
4837 memory, so that we kill all other live memories. Otherwise we don't
4838 satisfy the full barrier semantics of the intrinsic. */
4839 mem = validize_mem (gen_rtx_MEM (mode, addr));
4840
153c3b50 4841 /* The alignment needs to be at least according to that of the mode. */
4842 set_mem_align (mem, MAX (GET_MODE_ALIGNMENT (mode),
957d0361 4843 get_pointer_alignment (loc)));
c94cfd1c 4844 set_mem_alias_set (mem, ALIAS_SET_MEMORY_BARRIER);
041e0215 4845 MEM_VOLATILE_P (mem) = 1;
4846
4847 return mem;
4848}
4849
1cd6e20d 4850/* Make sure an argument is in the right mode.
4851 EXP is the tree argument.
4852 MODE is the mode it should be in. */
4853
4854static rtx
3754d046 4855expand_expr_force_mode (tree exp, machine_mode mode)
1cd6e20d 4856{
4857 rtx val;
3754d046 4858 machine_mode old_mode;
1cd6e20d 4859
4860 val = expand_expr (exp, NULL_RTX, mode, EXPAND_NORMAL);
4861 /* If VAL is promoted to a wider mode, convert it back to MODE. Take care
4862 of CONST_INTs, where we know the old_mode only from the call argument. */
4863
4864 old_mode = GET_MODE (val);
4865 if (old_mode == VOIDmode)
4866 old_mode = TYPE_MODE (TREE_TYPE (exp));
4867 val = convert_modes (mode, old_mode, val, 1);
4868 return val;
4869}
4870
4871
b6a5fc45 4872/* Expand the __sync_xxx_and_fetch and __sync_fetch_and_xxx intrinsics.
c2f47e15 4873 EXP is the CALL_EXPR. CODE is the rtx code
b6a5fc45 4874 that corresponds to the arithmetic or logical operation from the name;
4875 an exception here is that NOT actually means NAND. TARGET is an optional
4876 place for us to store the results; AFTER is true if this is the
1cd6e20d 4877 fetch_and_xxx form. */
b6a5fc45 4878
4879static rtx
3754d046 4880expand_builtin_sync_operation (machine_mode mode, tree exp,
3e272de8 4881 enum rtx_code code, bool after,
1cd6e20d 4882 rtx target)
b6a5fc45 4883{
041e0215 4884 rtx val, mem;
e60a6f7b 4885 location_t loc = EXPR_LOCATION (exp);
b6a5fc45 4886
cf73e559 4887 if (code == NOT && warn_sync_nand)
4888 {
4889 tree fndecl = get_callee_fndecl (exp);
4890 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
4891
4892 static bool warned_f_a_n, warned_n_a_f;
4893
4894 switch (fcode)
4895 {
2797f13a 4896 case BUILT_IN_SYNC_FETCH_AND_NAND_1:
4897 case BUILT_IN_SYNC_FETCH_AND_NAND_2:
4898 case BUILT_IN_SYNC_FETCH_AND_NAND_4:
4899 case BUILT_IN_SYNC_FETCH_AND_NAND_8:
4900 case BUILT_IN_SYNC_FETCH_AND_NAND_16:
cf73e559 4901 if (warned_f_a_n)
4902 break;
4903
b9a16870 4904 fndecl = builtin_decl_implicit (BUILT_IN_SYNC_FETCH_AND_NAND_N);
e60a6f7b 4905 inform (loc, "%qD changed semantics in GCC 4.4", fndecl);
cf73e559 4906 warned_f_a_n = true;
4907 break;
4908
2797f13a 4909 case BUILT_IN_SYNC_NAND_AND_FETCH_1:
4910 case BUILT_IN_SYNC_NAND_AND_FETCH_2:
4911 case BUILT_IN_SYNC_NAND_AND_FETCH_4:
4912 case BUILT_IN_SYNC_NAND_AND_FETCH_8:
4913 case BUILT_IN_SYNC_NAND_AND_FETCH_16:
cf73e559 4914 if (warned_n_a_f)
4915 break;
4916
b9a16870 4917 fndecl = builtin_decl_implicit (BUILT_IN_SYNC_NAND_AND_FETCH_N);
e60a6f7b 4918 inform (loc, "%qD changed semantics in GCC 4.4", fndecl);
cf73e559 4919 warned_n_a_f = true;
4920 break;
4921
4922 default:
4923 gcc_unreachable ();
4924 }
4925 }
4926
b6a5fc45 4927 /* Expand the operands. */
c2f47e15 4928 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
1cd6e20d 4929 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
b6a5fc45 4930
a372f7ca 4931 return expand_atomic_fetch_op (target, mem, val, code, MEMMODEL_SYNC_SEQ_CST,
1cd6e20d 4932 after);
b6a5fc45 4933}
4934
4935/* Expand the __sync_val_compare_and_swap and __sync_bool_compare_and_swap
c2f47e15 4936 intrinsics. EXP is the CALL_EXPR. IS_BOOL is
b6a5fc45 4937 true if this is the boolean form. TARGET is a place for us to store the
4938 results; this is NOT optional if IS_BOOL is true. */
4939
4940static rtx
3754d046 4941expand_builtin_compare_and_swap (machine_mode mode, tree exp,
3e272de8 4942 bool is_bool, rtx target)
b6a5fc45 4943{
041e0215 4944 rtx old_val, new_val, mem;
ba885f6a 4945 rtx *pbool, *poval;
b6a5fc45 4946
4947 /* Expand the operands. */
c2f47e15 4948 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
1cd6e20d 4949 old_val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
4950 new_val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 2), mode);
b6a5fc45 4951
ba885f6a 4952 pbool = poval = NULL;
4953 if (target != const0_rtx)
4954 {
4955 if (is_bool)
4956 pbool = &target;
4957 else
4958 poval = &target;
4959 }
4960 if (!expand_atomic_compare_and_swap (pbool, poval, mem, old_val, new_val,
a372f7ca 4961 false, MEMMODEL_SYNC_SEQ_CST,
4962 MEMMODEL_SYNC_SEQ_CST))
1cd6e20d 4963 return NULL_RTX;
c2f47e15 4964
1cd6e20d 4965 return target;
b6a5fc45 4966}
4967
4968/* Expand the __sync_lock_test_and_set intrinsic. Note that the most
4969 general form is actually an atomic exchange, and some targets only
4970 support a reduced form with the second argument being a constant 1.
48e1416a 4971 EXP is the CALL_EXPR; TARGET is an optional place for us to store
c2f47e15 4972 the results. */
b6a5fc45 4973
4974static rtx
3754d046 4975expand_builtin_sync_lock_test_and_set (machine_mode mode, tree exp,
1cd6e20d 4976 rtx target)
b6a5fc45 4977{
041e0215 4978 rtx val, mem;
b6a5fc45 4979
4980 /* Expand the operands. */
c2f47e15 4981 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
1cd6e20d 4982 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
4983
7821cde1 4984 return expand_sync_lock_test_and_set (target, mem, val);
1cd6e20d 4985}
4986
4987/* Expand the __sync_lock_release intrinsic. EXP is the CALL_EXPR. */
4988
4989static void
3754d046 4990expand_builtin_sync_lock_release (machine_mode mode, tree exp)
1cd6e20d 4991{
4992 rtx mem;
4993
4994 /* Expand the operands. */
4995 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
4996
a372f7ca 4997 expand_atomic_store (mem, const0_rtx, MEMMODEL_SYNC_RELEASE, true);
1cd6e20d 4998}
4999
5000/* Given an integer representing an ``enum memmodel'', verify its
5001 correctness and return the memory model enum. */
5002
5003static enum memmodel
5004get_memmodel (tree exp)
5005{
5006 rtx op;
7f738025 5007 unsigned HOST_WIDE_INT val;
2cb724f9 5008 source_location loc
5009 = expansion_point_location_if_in_system_header (input_location);
1cd6e20d 5010
5011 /* If the parameter is not a constant, it's a run time value so we'll just
5012 convert it to MEMMODEL_SEQ_CST to avoid annoying runtime checking. */
5013 if (TREE_CODE (exp) != INTEGER_CST)
5014 return MEMMODEL_SEQ_CST;
5015
5016 op = expand_normal (exp);
7f738025 5017
5018 val = INTVAL (op);
5019 if (targetm.memmodel_check)
5020 val = targetm.memmodel_check (val);
5021 else if (val & ~MEMMODEL_MASK)
5022 {
2cb724f9 5023 warning_at (loc, OPT_Winvalid_memory_model,
5024 "unknown architecture specifier in memory model to builtin");
7f738025 5025 return MEMMODEL_SEQ_CST;
5026 }
5027
a372f7ca 5028 /* Should never see a user explicit SYNC memodel model, so >= LAST works. */
5029 if (memmodel_base (val) >= MEMMODEL_LAST)
1cd6e20d 5030 {
2cb724f9 5031 warning_at (loc, OPT_Winvalid_memory_model,
5032 "invalid memory model argument to builtin");
1cd6e20d 5033 return MEMMODEL_SEQ_CST;
5034 }
7f738025 5035
3070f133 5036 /* Workaround for Bugzilla 59448. GCC doesn't track consume properly, so
5037 be conservative and promote consume to acquire. */
5038 if (val == MEMMODEL_CONSUME)
5039 val = MEMMODEL_ACQUIRE;
5040
7f738025 5041 return (enum memmodel) val;
1cd6e20d 5042}
5043
5044/* Expand the __atomic_exchange intrinsic:
5045 TYPE __atomic_exchange (TYPE *object, TYPE desired, enum memmodel)
5046 EXP is the CALL_EXPR.
5047 TARGET is an optional place for us to store the results. */
5048
5049static rtx
3754d046 5050expand_builtin_atomic_exchange (machine_mode mode, tree exp, rtx target)
1cd6e20d 5051{
5052 rtx val, mem;
5053 enum memmodel model;
5054
5055 model = get_memmodel (CALL_EXPR_ARG (exp, 2));
1cd6e20d 5056
5057 if (!flag_inline_atomics)
5058 return NULL_RTX;
5059
5060 /* Expand the operands. */
5061 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5062 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5063
7821cde1 5064 return expand_atomic_exchange (target, mem, val, model);
1cd6e20d 5065}
5066
5067/* Expand the __atomic_compare_exchange intrinsic:
5068 bool __atomic_compare_exchange (TYPE *object, TYPE *expect,
5069 TYPE desired, BOOL weak,
5070 enum memmodel success,
5071 enum memmodel failure)
5072 EXP is the CALL_EXPR.
5073 TARGET is an optional place for us to store the results. */
5074
5075static rtx
3754d046 5076expand_builtin_atomic_compare_exchange (machine_mode mode, tree exp,
1cd6e20d 5077 rtx target)
5078{
1e0c0b35 5079 rtx expect, desired, mem, oldval;
5080 rtx_code_label *label;
1cd6e20d 5081 enum memmodel success, failure;
5082 tree weak;
5083 bool is_weak;
2cb724f9 5084 source_location loc
5085 = expansion_point_location_if_in_system_header (input_location);
1cd6e20d 5086
5087 success = get_memmodel (CALL_EXPR_ARG (exp, 4));
5088 failure = get_memmodel (CALL_EXPR_ARG (exp, 5));
5089
086f4e33 5090 if (failure > success)
5091 {
2cb724f9 5092 warning_at (loc, OPT_Winvalid_memory_model,
5093 "failure memory model cannot be stronger than success "
5094 "memory model for %<__atomic_compare_exchange%>");
086f4e33 5095 success = MEMMODEL_SEQ_CST;
5096 }
5097
a372f7ca 5098 if (is_mm_release (failure) || is_mm_acq_rel (failure))
1cd6e20d 5099 {
2cb724f9 5100 warning_at (loc, OPT_Winvalid_memory_model,
5101 "invalid failure memory model for "
5102 "%<__atomic_compare_exchange%>");
086f4e33 5103 failure = MEMMODEL_SEQ_CST;
5104 success = MEMMODEL_SEQ_CST;
1cd6e20d 5105 }
5106
086f4e33 5107
1cd6e20d 5108 if (!flag_inline_atomics)
5109 return NULL_RTX;
5110
5111 /* Expand the operands. */
5112 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5113
5114 expect = expand_normal (CALL_EXPR_ARG (exp, 1));
5115 expect = convert_memory_address (Pmode, expect);
c401b131 5116 expect = gen_rtx_MEM (mode, expect);
1cd6e20d 5117 desired = expand_expr_force_mode (CALL_EXPR_ARG (exp, 2), mode);
5118
5119 weak = CALL_EXPR_ARG (exp, 3);
5120 is_weak = false;
e913b5cd 5121 if (tree_fits_shwi_p (weak) && tree_to_shwi (weak) != 0)
1cd6e20d 5122 is_weak = true;
5123
d86e3752 5124 if (target == const0_rtx)
5125 target = NULL;
d86e3752 5126
3c29a9ea 5127 /* Lest the rtl backend create a race condition with an imporoper store
5128 to memory, always create a new pseudo for OLDVAL. */
5129 oldval = NULL;
5130
5131 if (!expand_atomic_compare_and_swap (&target, &oldval, mem, expect, desired,
ba885f6a 5132 is_weak, success, failure))
1cd6e20d 5133 return NULL_RTX;
5134
d86e3752 5135 /* Conditionally store back to EXPECT, lest we create a race condition
5136 with an improper store to memory. */
5137 /* ??? With a rearrangement of atomics at the gimple level, we can handle
5138 the normal case where EXPECT is totally private, i.e. a register. At
5139 which point the store can be unconditional. */
5140 label = gen_label_rtx ();
62589f76 5141 emit_cmp_and_jump_insns (target, const0_rtx, NE, NULL,
5142 GET_MODE (target), 1, label);
d86e3752 5143 emit_move_insn (expect, oldval);
5144 emit_label (label);
c401b131 5145
1cd6e20d 5146 return target;
5147}
5148
5a5ef659 5149/* Helper function for expand_ifn_atomic_compare_exchange - expand
5150 internal ATOMIC_COMPARE_EXCHANGE call into __atomic_compare_exchange_N
5151 call. The weak parameter must be dropped to match the expected parameter
5152 list and the expected argument changed from value to pointer to memory
5153 slot. */
5154
5155static void
5156expand_ifn_atomic_compare_exchange_into_call (gcall *call, machine_mode mode)
5157{
5158 unsigned int z;
5159 vec<tree, va_gc> *vec;
5160
5161 vec_alloc (vec, 5);
5162 vec->quick_push (gimple_call_arg (call, 0));
5163 tree expected = gimple_call_arg (call, 1);
5164 rtx x = assign_stack_temp_for_type (mode, GET_MODE_SIZE (mode),
5165 TREE_TYPE (expected));
5166 rtx expd = expand_expr (expected, x, mode, EXPAND_NORMAL);
5167 if (expd != x)
5168 emit_move_insn (x, expd);
5169 tree v = make_tree (TREE_TYPE (expected), x);
5170 vec->quick_push (build1 (ADDR_EXPR,
5171 build_pointer_type (TREE_TYPE (expected)), v));
5172 vec->quick_push (gimple_call_arg (call, 2));
5173 /* Skip the boolean weak parameter. */
5174 for (z = 4; z < 6; z++)
5175 vec->quick_push (gimple_call_arg (call, z));
5176 built_in_function fncode
5177 = (built_in_function) ((int) BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1
5178 + exact_log2 (GET_MODE_SIZE (mode)));
5179 tree fndecl = builtin_decl_explicit (fncode);
5180 tree fn = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fndecl)),
5181 fndecl);
5182 tree exp = build_call_vec (boolean_type_node, fn, vec);
5183 tree lhs = gimple_call_lhs (call);
5184 rtx boolret = expand_call (exp, NULL_RTX, lhs == NULL_TREE);
5185 if (lhs)
5186 {
5187 rtx target = expand_expr (lhs, NULL_RTX, VOIDmode, EXPAND_WRITE);
5188 if (GET_MODE (boolret) != mode)
5189 boolret = convert_modes (mode, GET_MODE (boolret), boolret, 1);
5190 x = force_reg (mode, x);
5191 write_complex_part (target, boolret, true);
5192 write_complex_part (target, x, false);
5193 }
5194}
5195
5196/* Expand IFN_ATOMIC_COMPARE_EXCHANGE internal function. */
5197
5198void
5199expand_ifn_atomic_compare_exchange (gcall *call)
5200{
5201 int size = tree_to_shwi (gimple_call_arg (call, 3)) & 255;
5202 gcc_assert (size == 1 || size == 2 || size == 4 || size == 8 || size == 16);
5203 machine_mode mode = mode_for_size (BITS_PER_UNIT * size, MODE_INT, 0);
5204 rtx expect, desired, mem, oldval, boolret;
5205 enum memmodel success, failure;
5206 tree lhs;
5207 bool is_weak;
5208 source_location loc
5209 = expansion_point_location_if_in_system_header (gimple_location (call));
5210
5211 success = get_memmodel (gimple_call_arg (call, 4));
5212 failure = get_memmodel (gimple_call_arg (call, 5));
5213
5214 if (failure > success)
5215 {
5216 warning_at (loc, OPT_Winvalid_memory_model,
5217 "failure memory model cannot be stronger than success "
5218 "memory model for %<__atomic_compare_exchange%>");
5219 success = MEMMODEL_SEQ_CST;
5220 }
5221
5222 if (is_mm_release (failure) || is_mm_acq_rel (failure))
5223 {
5224 warning_at (loc, OPT_Winvalid_memory_model,
5225 "invalid failure memory model for "
5226 "%<__atomic_compare_exchange%>");
5227 failure = MEMMODEL_SEQ_CST;
5228 success = MEMMODEL_SEQ_CST;
5229 }
5230
5231 if (!flag_inline_atomics)
5232 {
5233 expand_ifn_atomic_compare_exchange_into_call (call, mode);
5234 return;
5235 }
5236
5237 /* Expand the operands. */
5238 mem = get_builtin_sync_mem (gimple_call_arg (call, 0), mode);
5239
5240 expect = expand_expr_force_mode (gimple_call_arg (call, 1), mode);
5241 desired = expand_expr_force_mode (gimple_call_arg (call, 2), mode);
5242
5243 is_weak = (tree_to_shwi (gimple_call_arg (call, 3)) & 256) != 0;
5244
5245 boolret = NULL;
5246 oldval = NULL;
5247
5248 if (!expand_atomic_compare_and_swap (&boolret, &oldval, mem, expect, desired,
5249 is_weak, success, failure))
5250 {
5251 expand_ifn_atomic_compare_exchange_into_call (call, mode);
5252 return;
5253 }
5254
5255 lhs = gimple_call_lhs (call);
5256 if (lhs)
5257 {
5258 rtx target = expand_expr (lhs, NULL_RTX, VOIDmode, EXPAND_WRITE);
5259 if (GET_MODE (boolret) != mode)
5260 boolret = convert_modes (mode, GET_MODE (boolret), boolret, 1);
5261 write_complex_part (target, boolret, true);
5262 write_complex_part (target, oldval, false);
5263 }
5264}
5265
1cd6e20d 5266/* Expand the __atomic_load intrinsic:
5267 TYPE __atomic_load (TYPE *object, enum memmodel)
5268 EXP is the CALL_EXPR.
5269 TARGET is an optional place for us to store the results. */
5270
5271static rtx
3754d046 5272expand_builtin_atomic_load (machine_mode mode, tree exp, rtx target)
1cd6e20d 5273{
5274 rtx mem;
5275 enum memmodel model;
5276
5277 model = get_memmodel (CALL_EXPR_ARG (exp, 1));
a372f7ca 5278 if (is_mm_release (model) || is_mm_acq_rel (model))
1cd6e20d 5279 {
2cb724f9 5280 source_location loc
5281 = expansion_point_location_if_in_system_header (input_location);
5282 warning_at (loc, OPT_Winvalid_memory_model,
5283 "invalid memory model for %<__atomic_load%>");
086f4e33 5284 model = MEMMODEL_SEQ_CST;
1cd6e20d 5285 }
5286
5287 if (!flag_inline_atomics)
5288 return NULL_RTX;
5289
5290 /* Expand the operand. */
5291 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5292
5293 return expand_atomic_load (target, mem, model);
5294}
5295
5296
5297/* Expand the __atomic_store intrinsic:
5298 void __atomic_store (TYPE *object, TYPE desired, enum memmodel)
5299 EXP is the CALL_EXPR.
5300 TARGET is an optional place for us to store the results. */
5301
5302static rtx
3754d046 5303expand_builtin_atomic_store (machine_mode mode, tree exp)
1cd6e20d 5304{
5305 rtx mem, val;
5306 enum memmodel model;
5307
5308 model = get_memmodel (CALL_EXPR_ARG (exp, 2));
a372f7ca 5309 if (!(is_mm_relaxed (model) || is_mm_seq_cst (model)
5310 || is_mm_release (model)))
1cd6e20d 5311 {
2cb724f9 5312 source_location loc
5313 = expansion_point_location_if_in_system_header (input_location);
5314 warning_at (loc, OPT_Winvalid_memory_model,
5315 "invalid memory model for %<__atomic_store%>");
086f4e33 5316 model = MEMMODEL_SEQ_CST;
1cd6e20d 5317 }
5318
5319 if (!flag_inline_atomics)
5320 return NULL_RTX;
5321
5322 /* Expand the operands. */
5323 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5324 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5325
8808bf16 5326 return expand_atomic_store (mem, val, model, false);
1cd6e20d 5327}
5328
5329/* Expand the __atomic_fetch_XXX intrinsic:
5330 TYPE __atomic_fetch_XXX (TYPE *object, TYPE val, enum memmodel)
5331 EXP is the CALL_EXPR.
5332 TARGET is an optional place for us to store the results.
5333 CODE is the operation, PLUS, MINUS, ADD, XOR, or IOR.
5334 FETCH_AFTER is true if returning the result of the operation.
5335 FETCH_AFTER is false if returning the value before the operation.
5336 IGNORE is true if the result is not used.
5337 EXT_CALL is the correct builtin for an external call if this cannot be
5338 resolved to an instruction sequence. */
5339
5340static rtx
3754d046 5341expand_builtin_atomic_fetch_op (machine_mode mode, tree exp, rtx target,
1cd6e20d 5342 enum rtx_code code, bool fetch_after,
5343 bool ignore, enum built_in_function ext_call)
5344{
5345 rtx val, mem, ret;
5346 enum memmodel model;
5347 tree fndecl;
5348 tree addr;
5349
5350 model = get_memmodel (CALL_EXPR_ARG (exp, 2));
5351
5352 /* Expand the operands. */
5353 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5354 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5355
5356 /* Only try generating instructions if inlining is turned on. */
5357 if (flag_inline_atomics)
5358 {
5359 ret = expand_atomic_fetch_op (target, mem, val, code, model, fetch_after);
5360 if (ret)
5361 return ret;
5362 }
5363
5364 /* Return if a different routine isn't needed for the library call. */
5365 if (ext_call == BUILT_IN_NONE)
5366 return NULL_RTX;
5367
5368 /* Change the call to the specified function. */
5369 fndecl = get_callee_fndecl (exp);
5370 addr = CALL_EXPR_FN (exp);
5371 STRIP_NOPS (addr);
5372
5373 gcc_assert (TREE_OPERAND (addr, 0) == fndecl);
9af5ce0c 5374 TREE_OPERAND (addr, 0) = builtin_decl_explicit (ext_call);
1cd6e20d 5375
5376 /* Expand the call here so we can emit trailing code. */
5377 ret = expand_call (exp, target, ignore);
5378
5379 /* Replace the original function just in case it matters. */
5380 TREE_OPERAND (addr, 0) = fndecl;
5381
5382 /* Then issue the arithmetic correction to return the right result. */
5383 if (!ignore)
c449f851 5384 {
5385 if (code == NOT)
5386 {
5387 ret = expand_simple_binop (mode, AND, ret, val, NULL_RTX, true,
5388 OPTAB_LIB_WIDEN);
5389 ret = expand_simple_unop (mode, NOT, ret, target, true);
5390 }
5391 else
5392 ret = expand_simple_binop (mode, code, ret, val, target, true,
5393 OPTAB_LIB_WIDEN);
5394 }
1cd6e20d 5395 return ret;
5396}
5397
9c1a31e4 5398/* Expand IFN_ATOMIC_BIT_TEST_AND_* internal function. */
5399
5400void
5401expand_ifn_atomic_bit_test_and (gcall *call)
5402{
5403 tree ptr = gimple_call_arg (call, 0);
5404 tree bit = gimple_call_arg (call, 1);
5405 tree flag = gimple_call_arg (call, 2);
5406 tree lhs = gimple_call_lhs (call);
5407 enum memmodel model = MEMMODEL_SYNC_SEQ_CST;
5408 machine_mode mode = TYPE_MODE (TREE_TYPE (flag));
5409 enum rtx_code code;
5410 optab optab;
5411 struct expand_operand ops[5];
5412
5413 gcc_assert (flag_inline_atomics);
5414
5415 if (gimple_call_num_args (call) == 4)
5416 model = get_memmodel (gimple_call_arg (call, 3));
5417
5418 rtx mem = get_builtin_sync_mem (ptr, mode);
5419 rtx val = expand_expr_force_mode (bit, mode);
5420
5421 switch (gimple_call_internal_fn (call))
5422 {
5423 case IFN_ATOMIC_BIT_TEST_AND_SET:
5424 code = IOR;
5425 optab = atomic_bit_test_and_set_optab;
5426 break;
5427 case IFN_ATOMIC_BIT_TEST_AND_COMPLEMENT:
5428 code = XOR;
5429 optab = atomic_bit_test_and_complement_optab;
5430 break;
5431 case IFN_ATOMIC_BIT_TEST_AND_RESET:
5432 code = AND;
5433 optab = atomic_bit_test_and_reset_optab;
5434 break;
5435 default:
5436 gcc_unreachable ();
5437 }
5438
5439 if (lhs == NULL_TREE)
5440 {
5441 val = expand_simple_binop (mode, ASHIFT, const1_rtx,
5442 val, NULL_RTX, true, OPTAB_DIRECT);
5443 if (code == AND)
5444 val = expand_simple_unop (mode, NOT, val, NULL_RTX, true);
5445 expand_atomic_fetch_op (const0_rtx, mem, val, code, model, false);
5446 return;
5447 }
5448
5449 rtx target = expand_expr (lhs, NULL_RTX, VOIDmode, EXPAND_WRITE);
5450 enum insn_code icode = direct_optab_handler (optab, mode);
5451 gcc_assert (icode != CODE_FOR_nothing);
5452 create_output_operand (&ops[0], target, mode);
5453 create_fixed_operand (&ops[1], mem);
5454 create_convert_operand_to (&ops[2], val, mode, true);
5455 create_integer_operand (&ops[3], model);
5456 create_integer_operand (&ops[4], integer_onep (flag));
5457 if (maybe_expand_insn (icode, 5, ops))
5458 return;
5459
5460 rtx bitval = val;
5461 val = expand_simple_binop (mode, ASHIFT, const1_rtx,
5462 val, NULL_RTX, true, OPTAB_DIRECT);
5463 rtx maskval = val;
5464 if (code == AND)
5465 val = expand_simple_unop (mode, NOT, val, NULL_RTX, true);
5466 rtx result = expand_atomic_fetch_op (gen_reg_rtx (mode), mem, val,
5467 code, model, false);
5468 if (integer_onep (flag))
5469 {
5470 result = expand_simple_binop (mode, ASHIFTRT, result, bitval,
5471 NULL_RTX, true, OPTAB_DIRECT);
5472 result = expand_simple_binop (mode, AND, result, const1_rtx, target,
5473 true, OPTAB_DIRECT);
5474 }
5475 else
5476 result = expand_simple_binop (mode, AND, result, maskval, target, true,
5477 OPTAB_DIRECT);
5478 if (result != target)
5479 emit_move_insn (target, result);
5480}
5481
10b744a3 5482/* Expand an atomic clear operation.
5483 void _atomic_clear (BOOL *obj, enum memmodel)
5484 EXP is the call expression. */
5485
5486static rtx
5487expand_builtin_atomic_clear (tree exp)
5488{
3754d046 5489 machine_mode mode;
10b744a3 5490 rtx mem, ret;
5491 enum memmodel model;
5492
5493 mode = mode_for_size (BOOL_TYPE_SIZE, MODE_INT, 0);
5494 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5495 model = get_memmodel (CALL_EXPR_ARG (exp, 1));
5496
a372f7ca 5497 if (is_mm_consume (model) || is_mm_acquire (model) || is_mm_acq_rel (model))
10b744a3 5498 {
2cb724f9 5499 source_location loc
5500 = expansion_point_location_if_in_system_header (input_location);
5501 warning_at (loc, OPT_Winvalid_memory_model,
5502 "invalid memory model for %<__atomic_store%>");
086f4e33 5503 model = MEMMODEL_SEQ_CST;
10b744a3 5504 }
5505
5506 /* Try issuing an __atomic_store, and allow fallback to __sync_lock_release.
5507 Failing that, a store is issued by __atomic_store. The only way this can
5508 fail is if the bool type is larger than a word size. Unlikely, but
5509 handle it anyway for completeness. Assume a single threaded model since
5510 there is no atomic support in this case, and no barriers are required. */
5511 ret = expand_atomic_store (mem, const0_rtx, model, true);
5512 if (!ret)
5513 emit_move_insn (mem, const0_rtx);
5514 return const0_rtx;
5515}
5516
5517/* Expand an atomic test_and_set operation.
5518 bool _atomic_test_and_set (BOOL *obj, enum memmodel)
5519 EXP is the call expression. */
5520
5521static rtx
7821cde1 5522expand_builtin_atomic_test_and_set (tree exp, rtx target)
10b744a3 5523{
7821cde1 5524 rtx mem;
10b744a3 5525 enum memmodel model;
3754d046 5526 machine_mode mode;
10b744a3 5527
5528 mode = mode_for_size (BOOL_TYPE_SIZE, MODE_INT, 0);
5529 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5530 model = get_memmodel (CALL_EXPR_ARG (exp, 1));
5531
7821cde1 5532 return expand_atomic_test_and_set (target, mem, model);
10b744a3 5533}
5534
5535
1cd6e20d 5536/* Return true if (optional) argument ARG1 of size ARG0 is always lock free on
5537 this architecture. If ARG1 is NULL, use typical alignment for size ARG0. */
5538
5539static tree
5540fold_builtin_atomic_always_lock_free (tree arg0, tree arg1)
5541{
5542 int size;
3754d046 5543 machine_mode mode;
1cd6e20d 5544 unsigned int mode_align, type_align;
5545
5546 if (TREE_CODE (arg0) != INTEGER_CST)
5547 return NULL_TREE;
b6a5fc45 5548
1cd6e20d 5549 size = INTVAL (expand_normal (arg0)) * BITS_PER_UNIT;
5550 mode = mode_for_size (size, MODE_INT, 0);
5551 mode_align = GET_MODE_ALIGNMENT (mode);
5552
4ca99588 5553 if (TREE_CODE (arg1) == INTEGER_CST)
5554 {
5555 unsigned HOST_WIDE_INT val = UINTVAL (expand_normal (arg1));
5556
5557 /* Either this argument is null, or it's a fake pointer encoding
5558 the alignment of the object. */
ac29ece2 5559 val = least_bit_hwi (val);
4ca99588 5560 val *= BITS_PER_UNIT;
5561
5562 if (val == 0 || mode_align < val)
5563 type_align = mode_align;
5564 else
5565 type_align = val;
5566 }
1cd6e20d 5567 else
5568 {
5569 tree ttype = TREE_TYPE (arg1);
5570
5571 /* This function is usually invoked and folded immediately by the front
5572 end before anything else has a chance to look at it. The pointer
5573 parameter at this point is usually cast to a void *, so check for that
5574 and look past the cast. */
2f8a2ead 5575 if (CONVERT_EXPR_P (arg1)
5576 && POINTER_TYPE_P (ttype)
5577 && VOID_TYPE_P (TREE_TYPE (ttype))
5578 && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (arg1, 0))))
1cd6e20d 5579 arg1 = TREE_OPERAND (arg1, 0);
5580
5581 ttype = TREE_TYPE (arg1);
5582 gcc_assert (POINTER_TYPE_P (ttype));
5583
5584 /* Get the underlying type of the object. */
5585 ttype = TREE_TYPE (ttype);
5586 type_align = TYPE_ALIGN (ttype);
5587 }
5588
47ae02b7 5589 /* If the object has smaller alignment, the lock free routines cannot
1cd6e20d 5590 be used. */
5591 if (type_align < mode_align)
06308d2a 5592 return boolean_false_node;
1cd6e20d 5593
5594 /* Check if a compare_and_swap pattern exists for the mode which represents
5595 the required size. The pattern is not allowed to fail, so the existence
5596 of the pattern indicates support is present. */
29139cdc 5597 if (can_compare_and_swap_p (mode, true))
06308d2a 5598 return boolean_true_node;
1cd6e20d 5599 else
06308d2a 5600 return boolean_false_node;
1cd6e20d 5601}
5602
5603/* Return true if the parameters to call EXP represent an object which will
5604 always generate lock free instructions. The first argument represents the
5605 size of the object, and the second parameter is a pointer to the object
5606 itself. If NULL is passed for the object, then the result is based on
5607 typical alignment for an object of the specified size. Otherwise return
5608 false. */
5609
5610static rtx
5611expand_builtin_atomic_always_lock_free (tree exp)
5612{
5613 tree size;
5614 tree arg0 = CALL_EXPR_ARG (exp, 0);
5615 tree arg1 = CALL_EXPR_ARG (exp, 1);
5616
5617 if (TREE_CODE (arg0) != INTEGER_CST)
5618 {
5619 error ("non-constant argument 1 to __atomic_always_lock_free");
5620 return const0_rtx;
5621 }
5622
5623 size = fold_builtin_atomic_always_lock_free (arg0, arg1);
06308d2a 5624 if (size == boolean_true_node)
1cd6e20d 5625 return const1_rtx;
5626 return const0_rtx;
5627}
5628
5629/* Return a one or zero if it can be determined that object ARG1 of size ARG
5630 is lock free on this architecture. */
5631
5632static tree
5633fold_builtin_atomic_is_lock_free (tree arg0, tree arg1)
5634{
5635 if (!flag_inline_atomics)
5636 return NULL_TREE;
5637
5638 /* If it isn't always lock free, don't generate a result. */
06308d2a 5639 if (fold_builtin_atomic_always_lock_free (arg0, arg1) == boolean_true_node)
5640 return boolean_true_node;
1cd6e20d 5641
5642 return NULL_TREE;
5643}
5644
5645/* Return true if the parameters to call EXP represent an object which will
5646 always generate lock free instructions. The first argument represents the
5647 size of the object, and the second parameter is a pointer to the object
5648 itself. If NULL is passed for the object, then the result is based on
5649 typical alignment for an object of the specified size. Otherwise return
5650 NULL*/
5651
5652static rtx
5653expand_builtin_atomic_is_lock_free (tree exp)
5654{
5655 tree size;
5656 tree arg0 = CALL_EXPR_ARG (exp, 0);
5657 tree arg1 = CALL_EXPR_ARG (exp, 1);
5658
5659 if (!INTEGRAL_TYPE_P (TREE_TYPE (arg0)))
5660 {
5661 error ("non-integer argument 1 to __atomic_is_lock_free");
5662 return NULL_RTX;
5663 }
5664
5665 if (!flag_inline_atomics)
5666 return NULL_RTX;
5667
5668 /* If the value is known at compile time, return the RTX for it. */
5669 size = fold_builtin_atomic_is_lock_free (arg0, arg1);
06308d2a 5670 if (size == boolean_true_node)
1cd6e20d 5671 return const1_rtx;
5672
5673 return NULL_RTX;
5674}
5675
1cd6e20d 5676/* Expand the __atomic_thread_fence intrinsic:
5677 void __atomic_thread_fence (enum memmodel)
5678 EXP is the CALL_EXPR. */
5679
5680static void
5681expand_builtin_atomic_thread_fence (tree exp)
5682{
fe54c06b 5683 enum memmodel model = get_memmodel (CALL_EXPR_ARG (exp, 0));
5684 expand_mem_thread_fence (model);
1cd6e20d 5685}
5686
5687/* Expand the __atomic_signal_fence intrinsic:
5688 void __atomic_signal_fence (enum memmodel)
5689 EXP is the CALL_EXPR. */
5690
5691static void
5692expand_builtin_atomic_signal_fence (tree exp)
5693{
fe54c06b 5694 enum memmodel model = get_memmodel (CALL_EXPR_ARG (exp, 0));
5695 expand_mem_signal_fence (model);
b6a5fc45 5696}
5697
5698/* Expand the __sync_synchronize intrinsic. */
5699
5700static void
2797f13a 5701expand_builtin_sync_synchronize (void)
b6a5fc45 5702{
a372f7ca 5703 expand_mem_thread_fence (MEMMODEL_SYNC_SEQ_CST);
b6a5fc45 5704}
5705
badaa04c 5706static rtx
5707expand_builtin_thread_pointer (tree exp, rtx target)
5708{
5709 enum insn_code icode;
5710 if (!validate_arglist (exp, VOID_TYPE))
5711 return const0_rtx;
5712 icode = direct_optab_handler (get_thread_pointer_optab, Pmode);
5713 if (icode != CODE_FOR_nothing)
5714 {
5715 struct expand_operand op;
3ed779c3 5716 /* If the target is not sutitable then create a new target. */
5717 if (target == NULL_RTX
5718 || !REG_P (target)
5719 || GET_MODE (target) != Pmode)
badaa04c 5720 target = gen_reg_rtx (Pmode);
5721 create_output_operand (&op, target, Pmode);
5722 expand_insn (icode, 1, &op);
5723 return target;
5724 }
5725 error ("__builtin_thread_pointer is not supported on this target");
5726 return const0_rtx;
5727}
5728
5729static void
5730expand_builtin_set_thread_pointer (tree exp)
5731{
5732 enum insn_code icode;
5733 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
5734 return;
5735 icode = direct_optab_handler (set_thread_pointer_optab, Pmode);
5736 if (icode != CODE_FOR_nothing)
5737 {
5738 struct expand_operand op;
5739 rtx val = expand_expr (CALL_EXPR_ARG (exp, 0), NULL_RTX,
5740 Pmode, EXPAND_NORMAL);
6f343c10 5741 create_input_operand (&op, val, Pmode);
badaa04c 5742 expand_insn (icode, 1, &op);
5743 return;
5744 }
5745 error ("__builtin_set_thread_pointer is not supported on this target");
5746}
5747
53800dbe 5748\f
0e80b01d 5749/* Emit code to restore the current value of stack. */
5750
5751static void
5752expand_stack_restore (tree var)
5753{
1e0c0b35 5754 rtx_insn *prev;
5755 rtx sa = expand_normal (var);
0e80b01d 5756
5757 sa = convert_memory_address (Pmode, sa);
5758
5759 prev = get_last_insn ();
5760 emit_stack_restore (SAVE_BLOCK, sa);
97354ae4 5761
5762 record_new_stack_level ();
5763
0e80b01d 5764 fixup_args_size_notes (prev, get_last_insn (), 0);
5765}
5766
0e80b01d 5767/* Emit code to save the current value of stack. */
5768
5769static rtx
5770expand_stack_save (void)
5771{
5772 rtx ret = NULL_RTX;
5773
0e80b01d 5774 emit_stack_save (SAVE_BLOCK, &ret);
5775 return ret;
5776}
5777
ca4c3545 5778
53800dbe 5779/* Expand an expression EXP that calls a built-in function,
5780 with result going to TARGET if that's convenient
5781 (and in mode MODE if that's convenient).
5782 SUBTARGET may be used as the target for computing one of EXP's operands.
5783 IGNORE is nonzero if the value is to be ignored. */
5784
5785rtx
3754d046 5786expand_builtin (tree exp, rtx target, rtx subtarget, machine_mode mode,
aecda0d6 5787 int ignore)
53800dbe 5788{
c6e6ecb1 5789 tree fndecl = get_callee_fndecl (exp);
53800dbe 5790 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
3754d046 5791 machine_mode target_mode = TYPE_MODE (TREE_TYPE (exp));
67fa4078 5792 int flags;
53800dbe 5793
4e2f4ed5 5794 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
5795 return targetm.expand_builtin (exp, target, subtarget, mode, ignore);
5796
f9acf11a 5797 /* When ASan is enabled, we don't want to expand some memory/string
5798 builtins and rely on libsanitizer's hooks. This allows us to avoid
5799 redundant checks and be sure, that possible overflow will be detected
5800 by ASan. */
5801
5802 if ((flag_sanitize & SANITIZE_ADDRESS) && asan_intercepted_p (fcode))
5803 return expand_call (exp, target, ignore);
5804
53800dbe 5805 /* When not optimizing, generate calls to library functions for a certain
5806 set of builtins. */
cd9ff771 5807 if (!optimize
b6a5fc45 5808 && !called_as_built_in (fndecl)
73037a1e 5809 && fcode != BUILT_IN_FORK
5810 && fcode != BUILT_IN_EXECL
5811 && fcode != BUILT_IN_EXECV
5812 && fcode != BUILT_IN_EXECLP
5813 && fcode != BUILT_IN_EXECLE
5814 && fcode != BUILT_IN_EXECVP
5815 && fcode != BUILT_IN_EXECVE
2c281b15 5816 && fcode != BUILT_IN_ALLOCA
581bf1c2 5817 && fcode != BUILT_IN_ALLOCA_WITH_ALIGN
058a1b7a 5818 && fcode != BUILT_IN_FREE
5819 && fcode != BUILT_IN_CHKP_SET_PTR_BOUNDS
5820 && fcode != BUILT_IN_CHKP_INIT_PTR_BOUNDS
5821 && fcode != BUILT_IN_CHKP_NULL_PTR_BOUNDS
5822 && fcode != BUILT_IN_CHKP_COPY_PTR_BOUNDS
5823 && fcode != BUILT_IN_CHKP_NARROW_PTR_BOUNDS
5824 && fcode != BUILT_IN_CHKP_STORE_PTR_BOUNDS
5825 && fcode != BUILT_IN_CHKP_CHECK_PTR_LBOUNDS
5826 && fcode != BUILT_IN_CHKP_CHECK_PTR_UBOUNDS
5827 && fcode != BUILT_IN_CHKP_CHECK_PTR_BOUNDS
5828 && fcode != BUILT_IN_CHKP_GET_PTR_LBOUND
5829 && fcode != BUILT_IN_CHKP_GET_PTR_UBOUND
5830 && fcode != BUILT_IN_CHKP_BNDRET)
cd9ff771 5831 return expand_call (exp, target, ignore);
53800dbe 5832
8d6d7930 5833 /* The built-in function expanders test for target == const0_rtx
5834 to determine whether the function's result will be ignored. */
5835 if (ignore)
5836 target = const0_rtx;
5837
5838 /* If the result of a pure or const built-in function is ignored, and
5839 none of its arguments are volatile, we can avoid expanding the
5840 built-in call and just evaluate the arguments for side-effects. */
5841 if (target == const0_rtx
67fa4078 5842 && ((flags = flags_from_decl_or_type (fndecl)) & (ECF_CONST | ECF_PURE))
5843 && !(flags & ECF_LOOPING_CONST_OR_PURE))
8d6d7930 5844 {
5845 bool volatilep = false;
5846 tree arg;
c2f47e15 5847 call_expr_arg_iterator iter;
8d6d7930 5848
c2f47e15 5849 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
5850 if (TREE_THIS_VOLATILE (arg))
8d6d7930 5851 {
5852 volatilep = true;
5853 break;
5854 }
5855
5856 if (! volatilep)
5857 {
c2f47e15 5858 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
5859 expand_expr (arg, const0_rtx, VOIDmode, EXPAND_NORMAL);
8d6d7930 5860 return const0_rtx;
5861 }
5862 }
5863
f21337ef 5864 /* expand_builtin_with_bounds is supposed to be used for
5865 instrumented builtin calls. */
058a1b7a 5866 gcc_assert (!CALL_WITH_BOUNDS_P (exp));
5867
53800dbe 5868 switch (fcode)
5869 {
4f35b1fc 5870 CASE_FLT_FN (BUILT_IN_FABS):
012f068a 5871 CASE_FLT_FN_FLOATN_NX (BUILT_IN_FABS):
8aa32773 5872 case BUILT_IN_FABSD32:
5873 case BUILT_IN_FABSD64:
5874 case BUILT_IN_FABSD128:
c2f47e15 5875 target = expand_builtin_fabs (exp, target, subtarget);
78a74442 5876 if (target)
a0c938f0 5877 return target;
78a74442 5878 break;
5879
4f35b1fc 5880 CASE_FLT_FN (BUILT_IN_COPYSIGN):
012f068a 5881 CASE_FLT_FN_FLOATN_NX (BUILT_IN_COPYSIGN):
c2f47e15 5882 target = expand_builtin_copysign (exp, target, subtarget);
270436f3 5883 if (target)
5884 return target;
5885 break;
5886
7d3f6cc7 5887 /* Just do a normal library call if we were unable to fold
5888 the values. */
4f35b1fc 5889 CASE_FLT_FN (BUILT_IN_CABS):
78a74442 5890 break;
53800dbe 5891
7e0713b1 5892 CASE_FLT_FN (BUILT_IN_FMA):
5893 target = expand_builtin_mathfn_ternary (exp, target, subtarget);
5894 if (target)
5895 return target;
5896 break;
5897
a67a90e5 5898 CASE_FLT_FN (BUILT_IN_ILOGB):
5899 if (! flag_unsafe_math_optimizations)
5900 break;
3c77f69c 5901 gcc_fallthrough ();
69b779ea 5902 CASE_FLT_FN (BUILT_IN_ISINF):
cde061c1 5903 CASE_FLT_FN (BUILT_IN_FINITE):
5904 case BUILT_IN_ISFINITE:
8a1a9cb7 5905 case BUILT_IN_ISNORMAL:
f97eea22 5906 target = expand_builtin_interclass_mathfn (exp, target);
a67a90e5 5907 if (target)
5908 return target;
5909 break;
5910
80ff6494 5911 CASE_FLT_FN (BUILT_IN_ICEIL):
4f35b1fc 5912 CASE_FLT_FN (BUILT_IN_LCEIL):
5913 CASE_FLT_FN (BUILT_IN_LLCEIL):
5914 CASE_FLT_FN (BUILT_IN_LFLOOR):
80ff6494 5915 CASE_FLT_FN (BUILT_IN_IFLOOR):
4f35b1fc 5916 CASE_FLT_FN (BUILT_IN_LLFLOOR):
ff1b14e4 5917 target = expand_builtin_int_roundingfn (exp, target);
ad52b9b7 5918 if (target)
5919 return target;
5920 break;
5921
80ff6494 5922 CASE_FLT_FN (BUILT_IN_IRINT):
7d3afc77 5923 CASE_FLT_FN (BUILT_IN_LRINT):
5924 CASE_FLT_FN (BUILT_IN_LLRINT):
80ff6494 5925 CASE_FLT_FN (BUILT_IN_IROUND):
ef2f1a10 5926 CASE_FLT_FN (BUILT_IN_LROUND):
5927 CASE_FLT_FN (BUILT_IN_LLROUND):
ff1b14e4 5928 target = expand_builtin_int_roundingfn_2 (exp, target);
7d3afc77 5929 if (target)
5930 return target;
5931 break;
5932
4f35b1fc 5933 CASE_FLT_FN (BUILT_IN_POWI):
f97eea22 5934 target = expand_builtin_powi (exp, target);
757c219d 5935 if (target)
5936 return target;
5937 break;
5938
d735c391 5939 CASE_FLT_FN (BUILT_IN_CEXPI):
f97eea22 5940 target = expand_builtin_cexpi (exp, target);
d735c391 5941 gcc_assert (target);
5942 return target;
5943
4f35b1fc 5944 CASE_FLT_FN (BUILT_IN_SIN):
5945 CASE_FLT_FN (BUILT_IN_COS):
6b43bae4 5946 if (! flag_unsafe_math_optimizations)
5947 break;
5948 target = expand_builtin_mathfn_3 (exp, target, subtarget);
5949 if (target)
5950 return target;
5951 break;
5952
c3147c1a 5953 CASE_FLT_FN (BUILT_IN_SINCOS):
5954 if (! flag_unsafe_math_optimizations)
5955 break;
5956 target = expand_builtin_sincos (exp);
5957 if (target)
5958 return target;
5959 break;
5960
53800dbe 5961 case BUILT_IN_APPLY_ARGS:
5962 return expand_builtin_apply_args ();
5963
5964 /* __builtin_apply (FUNCTION, ARGUMENTS, ARGSIZE) invokes
5965 FUNCTION with a copy of the parameters described by
5966 ARGUMENTS, and ARGSIZE. It returns a block of memory
5967 allocated on the stack into which is stored all the registers
5968 that might possibly be used for returning the result of a
5969 function. ARGUMENTS is the value returned by
5970 __builtin_apply_args. ARGSIZE is the number of bytes of
5971 arguments that must be copied. ??? How should this value be
5972 computed? We'll also need a safe worst case value for varargs
5973 functions. */
5974 case BUILT_IN_APPLY:
c2f47e15 5975 if (!validate_arglist (exp, POINTER_TYPE,
0eb671f7 5976 POINTER_TYPE, INTEGER_TYPE, VOID_TYPE)
c2f47e15 5977 && !validate_arglist (exp, REFERENCE_TYPE,
0eb671f7 5978 POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
53800dbe 5979 return const0_rtx;
5980 else
5981 {
53800dbe 5982 rtx ops[3];
5983
c2f47e15 5984 ops[0] = expand_normal (CALL_EXPR_ARG (exp, 0));
5985 ops[1] = expand_normal (CALL_EXPR_ARG (exp, 1));
5986 ops[2] = expand_normal (CALL_EXPR_ARG (exp, 2));
53800dbe 5987
5988 return expand_builtin_apply (ops[0], ops[1], ops[2]);
5989 }
5990
5991 /* __builtin_return (RESULT) causes the function to return the
5992 value described by RESULT. RESULT is address of the block of
5993 memory returned by __builtin_apply. */
5994 case BUILT_IN_RETURN:
c2f47e15 5995 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
5996 expand_builtin_return (expand_normal (CALL_EXPR_ARG (exp, 0)));
53800dbe 5997 return const0_rtx;
5998
5999 case BUILT_IN_SAVEREGS:
a66c9326 6000 return expand_builtin_saveregs ();
53800dbe 6001
48dc2227 6002 case BUILT_IN_VA_ARG_PACK:
6003 /* All valid uses of __builtin_va_arg_pack () are removed during
6004 inlining. */
b8c23db3 6005 error ("%Kinvalid use of %<__builtin_va_arg_pack ()%>", exp);
48dc2227 6006 return const0_rtx;
6007
4e1d7ea4 6008 case BUILT_IN_VA_ARG_PACK_LEN:
6009 /* All valid uses of __builtin_va_arg_pack_len () are removed during
6010 inlining. */
b8c23db3 6011 error ("%Kinvalid use of %<__builtin_va_arg_pack_len ()%>", exp);
4e1d7ea4 6012 return const0_rtx;
6013
53800dbe 6014 /* Return the address of the first anonymous stack arg. */
6015 case BUILT_IN_NEXT_ARG:
c2f47e15 6016 if (fold_builtin_next_arg (exp, false))
a0c938f0 6017 return const0_rtx;
79012a9d 6018 return expand_builtin_next_arg ();
53800dbe 6019
ac8fb6db 6020 case BUILT_IN_CLEAR_CACHE:
6021 target = expand_builtin___clear_cache (exp);
6022 if (target)
6023 return target;
6024 break;
6025
53800dbe 6026 case BUILT_IN_CLASSIFY_TYPE:
c2f47e15 6027 return expand_builtin_classify_type (exp);
53800dbe 6028
6029 case BUILT_IN_CONSTANT_P:
4ee9c684 6030 return const0_rtx;
53800dbe 6031
6032 case BUILT_IN_FRAME_ADDRESS:
6033 case BUILT_IN_RETURN_ADDRESS:
c2f47e15 6034 return expand_builtin_frame_address (fndecl, exp);
53800dbe 6035
6036 /* Returns the address of the area where the structure is returned.
6037 0 otherwise. */
6038 case BUILT_IN_AGGREGATE_INCOMING_ADDRESS:
c2f47e15 6039 if (call_expr_nargs (exp) != 0
9342ee68 6040 || ! AGGREGATE_TYPE_P (TREE_TYPE (TREE_TYPE (current_function_decl)))
e16ceb8e 6041 || !MEM_P (DECL_RTL (DECL_RESULT (current_function_decl))))
9342ee68 6042 return const0_rtx;
53800dbe 6043 else
9342ee68 6044 return XEXP (DECL_RTL (DECL_RESULT (current_function_decl)), 0);
53800dbe 6045
6046 case BUILT_IN_ALLOCA:
581bf1c2 6047 case BUILT_IN_ALLOCA_WITH_ALIGN:
990495a7 6048 /* If the allocation stems from the declaration of a variable-sized
6049 object, it cannot accumulate. */
a882d754 6050 target = expand_builtin_alloca (exp, CALL_ALLOCA_FOR_VAR_P (exp));
53800dbe 6051 if (target)
6052 return target;
6053 break;
6054
4ee9c684 6055 case BUILT_IN_STACK_SAVE:
6056 return expand_stack_save ();
6057
6058 case BUILT_IN_STACK_RESTORE:
c2f47e15 6059 expand_stack_restore (CALL_EXPR_ARG (exp, 0));
4ee9c684 6060 return const0_rtx;
6061
74bdbe96 6062 case BUILT_IN_BSWAP16:
42791117 6063 case BUILT_IN_BSWAP32:
6064 case BUILT_IN_BSWAP64:
74bdbe96 6065 target = expand_builtin_bswap (target_mode, exp, target, subtarget);
42791117 6066 if (target)
6067 return target;
6068 break;
6069
4f35b1fc 6070 CASE_INT_FN (BUILT_IN_FFS):
c2f47e15 6071 target = expand_builtin_unop (target_mode, exp, target,
efb070c8 6072 subtarget, ffs_optab);
6a08d0ab 6073 if (target)
6074 return target;
6075 break;
6076
4f35b1fc 6077 CASE_INT_FN (BUILT_IN_CLZ):
c2f47e15 6078 target = expand_builtin_unop (target_mode, exp, target,
efb070c8 6079 subtarget, clz_optab);
6a08d0ab 6080 if (target)
6081 return target;
6082 break;
6083
4f35b1fc 6084 CASE_INT_FN (BUILT_IN_CTZ):
c2f47e15 6085 target = expand_builtin_unop (target_mode, exp, target,
efb070c8 6086 subtarget, ctz_optab);
6a08d0ab 6087 if (target)
6088 return target;
6089 break;
6090
d8492bd3 6091 CASE_INT_FN (BUILT_IN_CLRSB):
d8492bd3 6092 target = expand_builtin_unop (target_mode, exp, target,
6093 subtarget, clrsb_optab);
6094 if (target)
6095 return target;
6096 break;
6097
4f35b1fc 6098 CASE_INT_FN (BUILT_IN_POPCOUNT):
c2f47e15 6099 target = expand_builtin_unop (target_mode, exp, target,
efb070c8 6100 subtarget, popcount_optab);
6a08d0ab 6101 if (target)
6102 return target;
6103 break;
6104
4f35b1fc 6105 CASE_INT_FN (BUILT_IN_PARITY):
c2f47e15 6106 target = expand_builtin_unop (target_mode, exp, target,
efb070c8 6107 subtarget, parity_optab);
53800dbe 6108 if (target)
6109 return target;
6110 break;
6111
6112 case BUILT_IN_STRLEN:
c2f47e15 6113 target = expand_builtin_strlen (exp, target, target_mode);
53800dbe 6114 if (target)
6115 return target;
6116 break;
6117
6118 case BUILT_IN_STRCPY:
a65c4d64 6119 target = expand_builtin_strcpy (exp, target);
53800dbe 6120 if (target)
6121 return target;
6122 break;
bf8e3599 6123
ed09096d 6124 case BUILT_IN_STRNCPY:
a65c4d64 6125 target = expand_builtin_strncpy (exp, target);
ed09096d 6126 if (target)
6127 return target;
6128 break;
bf8e3599 6129
3b824fa6 6130 case BUILT_IN_STPCPY:
dc369150 6131 target = expand_builtin_stpcpy (exp, target, mode);
3b824fa6 6132 if (target)
6133 return target;
6134 break;
6135
53800dbe 6136 case BUILT_IN_MEMCPY:
a65c4d64 6137 target = expand_builtin_memcpy (exp, target);
3b824fa6 6138 if (target)
6139 return target;
6140 break;
6141
6142 case BUILT_IN_MEMPCPY:
c2f47e15 6143 target = expand_builtin_mempcpy (exp, target, mode);
53800dbe 6144 if (target)
6145 return target;
6146 break;
6147
6148 case BUILT_IN_MEMSET:
c2f47e15 6149 target = expand_builtin_memset (exp, target, mode);
53800dbe 6150 if (target)
6151 return target;
6152 break;
6153
ffc83088 6154 case BUILT_IN_BZERO:
0b25db21 6155 target = expand_builtin_bzero (exp);
ffc83088 6156 if (target)
6157 return target;
6158 break;
6159
53800dbe 6160 case BUILT_IN_STRCMP:
a65c4d64 6161 target = expand_builtin_strcmp (exp, target);
53800dbe 6162 if (target)
6163 return target;
6164 break;
6165
ed09096d 6166 case BUILT_IN_STRNCMP:
6167 target = expand_builtin_strncmp (exp, target, mode);
6168 if (target)
6169 return target;
6170 break;
6171
071f1696 6172 case BUILT_IN_BCMP:
53800dbe 6173 case BUILT_IN_MEMCMP:
3e346f54 6174 case BUILT_IN_MEMCMP_EQ:
6175 target = expand_builtin_memcmp (exp, target, fcode == BUILT_IN_MEMCMP_EQ);
53800dbe 6176 if (target)
6177 return target;
3e346f54 6178 if (fcode == BUILT_IN_MEMCMP_EQ)
6179 {
6180 tree newdecl = builtin_decl_explicit (BUILT_IN_MEMCMP);
6181 TREE_OPERAND (exp, 1) = build_fold_addr_expr (newdecl);
6182 }
53800dbe 6183 break;
53800dbe 6184
6185 case BUILT_IN_SETJMP:
2c8a1497 6186 /* This should have been lowered to the builtins below. */
6187 gcc_unreachable ();
6188
6189 case BUILT_IN_SETJMP_SETUP:
6190 /* __builtin_setjmp_setup is passed a pointer to an array of five words
6191 and the receiver label. */
c2f47e15 6192 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
2c8a1497 6193 {
c2f47e15 6194 rtx buf_addr = expand_expr (CALL_EXPR_ARG (exp, 0), subtarget,
2c8a1497 6195 VOIDmode, EXPAND_NORMAL);
c2f47e15 6196 tree label = TREE_OPERAND (CALL_EXPR_ARG (exp, 1), 0);
9ed997be 6197 rtx_insn *label_r = label_rtx (label);
2c8a1497 6198
6199 /* This is copied from the handling of non-local gotos. */
6200 expand_builtin_setjmp_setup (buf_addr, label_r);
6201 nonlocal_goto_handler_labels
a4de1c23 6202 = gen_rtx_INSN_LIST (VOIDmode, label_r,
2c8a1497 6203 nonlocal_goto_handler_labels);
6204 /* ??? Do not let expand_label treat us as such since we would
6205 not want to be both on the list of non-local labels and on
6206 the list of forced labels. */
6207 FORCED_LABEL (label) = 0;
6208 return const0_rtx;
6209 }
6210 break;
6211
2c8a1497 6212 case BUILT_IN_SETJMP_RECEIVER:
6213 /* __builtin_setjmp_receiver is passed the receiver label. */
c2f47e15 6214 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
2c8a1497 6215 {
c2f47e15 6216 tree label = TREE_OPERAND (CALL_EXPR_ARG (exp, 0), 0);
9ed997be 6217 rtx_insn *label_r = label_rtx (label);
2c8a1497 6218
6219 expand_builtin_setjmp_receiver (label_r);
6220 return const0_rtx;
6221 }
6b7f6858 6222 break;
53800dbe 6223
6224 /* __builtin_longjmp is passed a pointer to an array of five words.
6225 It's similar to the C library longjmp function but works with
6226 __builtin_setjmp above. */
6227 case BUILT_IN_LONGJMP:
c2f47e15 6228 if (validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
53800dbe 6229 {
c2f47e15 6230 rtx buf_addr = expand_expr (CALL_EXPR_ARG (exp, 0), subtarget,
8ec3c5c2 6231 VOIDmode, EXPAND_NORMAL);
c2f47e15 6232 rtx value = expand_normal (CALL_EXPR_ARG (exp, 1));
53800dbe 6233
6234 if (value != const1_rtx)
6235 {
1e5fcbe2 6236 error ("%<__builtin_longjmp%> second argument must be 1");
53800dbe 6237 return const0_rtx;
6238 }
6239
6240 expand_builtin_longjmp (buf_addr, value);
6241 return const0_rtx;
6242 }
2c8a1497 6243 break;
53800dbe 6244
4ee9c684 6245 case BUILT_IN_NONLOCAL_GOTO:
c2f47e15 6246 target = expand_builtin_nonlocal_goto (exp);
4ee9c684 6247 if (target)
6248 return target;
6249 break;
6250
843d08a9 6251 /* This updates the setjmp buffer that is its argument with the value
6252 of the current stack pointer. */
6253 case BUILT_IN_UPDATE_SETJMP_BUF:
c2f47e15 6254 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
843d08a9 6255 {
6256 rtx buf_addr
c2f47e15 6257 = expand_normal (CALL_EXPR_ARG (exp, 0));
843d08a9 6258
6259 expand_builtin_update_setjmp_buf (buf_addr);
6260 return const0_rtx;
6261 }
6262 break;
6263
53800dbe 6264 case BUILT_IN_TRAP:
a0ef1725 6265 expand_builtin_trap ();
53800dbe 6266 return const0_rtx;
6267
d2b48f0c 6268 case BUILT_IN_UNREACHABLE:
6269 expand_builtin_unreachable ();
6270 return const0_rtx;
6271
4f35b1fc 6272 CASE_FLT_FN (BUILT_IN_SIGNBIT):
004e23c4 6273 case BUILT_IN_SIGNBITD32:
6274 case BUILT_IN_SIGNBITD64:
6275 case BUILT_IN_SIGNBITD128:
27f261ef 6276 target = expand_builtin_signbit (exp, target);
6277 if (target)
6278 return target;
6279 break;
6280
53800dbe 6281 /* Various hooks for the DWARF 2 __throw routine. */
6282 case BUILT_IN_UNWIND_INIT:
6283 expand_builtin_unwind_init ();
6284 return const0_rtx;
6285 case BUILT_IN_DWARF_CFA:
6286 return virtual_cfa_rtx;
6287#ifdef DWARF2_UNWIND_INFO
f8f023a5 6288 case BUILT_IN_DWARF_SP_COLUMN:
6289 return expand_builtin_dwarf_sp_column ();
695e919b 6290 case BUILT_IN_INIT_DWARF_REG_SIZES:
c2f47e15 6291 expand_builtin_init_dwarf_reg_sizes (CALL_EXPR_ARG (exp, 0));
695e919b 6292 return const0_rtx;
53800dbe 6293#endif
6294 case BUILT_IN_FROB_RETURN_ADDR:
c2f47e15 6295 return expand_builtin_frob_return_addr (CALL_EXPR_ARG (exp, 0));
53800dbe 6296 case BUILT_IN_EXTRACT_RETURN_ADDR:
c2f47e15 6297 return expand_builtin_extract_return_addr (CALL_EXPR_ARG (exp, 0));
53800dbe 6298 case BUILT_IN_EH_RETURN:
c2f47e15 6299 expand_builtin_eh_return (CALL_EXPR_ARG (exp, 0),
6300 CALL_EXPR_ARG (exp, 1));
53800dbe 6301 return const0_rtx;
df4b504c 6302 case BUILT_IN_EH_RETURN_DATA_REGNO:
c2f47e15 6303 return expand_builtin_eh_return_data_regno (exp);
26093bf4 6304 case BUILT_IN_EXTEND_POINTER:
c2f47e15 6305 return expand_builtin_extend_pointer (CALL_EXPR_ARG (exp, 0));
e38def9c 6306 case BUILT_IN_EH_POINTER:
6307 return expand_builtin_eh_pointer (exp);
6308 case BUILT_IN_EH_FILTER:
6309 return expand_builtin_eh_filter (exp);
6310 case BUILT_IN_EH_COPY_VALUES:
6311 return expand_builtin_eh_copy_values (exp);
26093bf4 6312
7ccc713a 6313 case BUILT_IN_VA_START:
c2f47e15 6314 return expand_builtin_va_start (exp);
a66c9326 6315 case BUILT_IN_VA_END:
c2f47e15 6316 return expand_builtin_va_end (exp);
a66c9326 6317 case BUILT_IN_VA_COPY:
c2f47e15 6318 return expand_builtin_va_copy (exp);
89cfe6e5 6319 case BUILT_IN_EXPECT:
c2f47e15 6320 return expand_builtin_expect (exp, target);
fca0886c 6321 case BUILT_IN_ASSUME_ALIGNED:
6322 return expand_builtin_assume_aligned (exp, target);
5e3608d8 6323 case BUILT_IN_PREFETCH:
c2f47e15 6324 expand_builtin_prefetch (exp);
5e3608d8 6325 return const0_rtx;
6326
4ee9c684 6327 case BUILT_IN_INIT_TRAMPOLINE:
c307f106 6328 return expand_builtin_init_trampoline (exp, true);
6329 case BUILT_IN_INIT_HEAP_TRAMPOLINE:
6330 return expand_builtin_init_trampoline (exp, false);
4ee9c684 6331 case BUILT_IN_ADJUST_TRAMPOLINE:
c2f47e15 6332 return expand_builtin_adjust_trampoline (exp);
4ee9c684 6333
73673831 6334 case BUILT_IN_FORK:
6335 case BUILT_IN_EXECL:
6336 case BUILT_IN_EXECV:
6337 case BUILT_IN_EXECLP:
6338 case BUILT_IN_EXECLE:
6339 case BUILT_IN_EXECVP:
6340 case BUILT_IN_EXECVE:
c2f47e15 6341 target = expand_builtin_fork_or_exec (fndecl, exp, target, ignore);
73673831 6342 if (target)
6343 return target;
6344 break;
53800dbe 6345
2797f13a 6346 case BUILT_IN_SYNC_FETCH_AND_ADD_1:
6347 case BUILT_IN_SYNC_FETCH_AND_ADD_2:
6348 case BUILT_IN_SYNC_FETCH_AND_ADD_4:
6349 case BUILT_IN_SYNC_FETCH_AND_ADD_8:
6350 case BUILT_IN_SYNC_FETCH_AND_ADD_16:
6351 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_ADD_1);
1cd6e20d 6352 target = expand_builtin_sync_operation (mode, exp, PLUS, false, target);
b6a5fc45 6353 if (target)
6354 return target;
6355 break;
6356
2797f13a 6357 case BUILT_IN_SYNC_FETCH_AND_SUB_1:
6358 case BUILT_IN_SYNC_FETCH_AND_SUB_2:
6359 case BUILT_IN_SYNC_FETCH_AND_SUB_4:
6360 case BUILT_IN_SYNC_FETCH_AND_SUB_8:
6361 case BUILT_IN_SYNC_FETCH_AND_SUB_16:
6362 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_SUB_1);
1cd6e20d 6363 target = expand_builtin_sync_operation (mode, exp, MINUS, false, target);
b6a5fc45 6364 if (target)
6365 return target;
6366 break;
6367
2797f13a 6368 case BUILT_IN_SYNC_FETCH_AND_OR_1:
6369 case BUILT_IN_SYNC_FETCH_AND_OR_2:
6370 case BUILT_IN_SYNC_FETCH_AND_OR_4:
6371 case BUILT_IN_SYNC_FETCH_AND_OR_8:
6372 case BUILT_IN_SYNC_FETCH_AND_OR_16:
6373 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_OR_1);
1cd6e20d 6374 target = expand_builtin_sync_operation (mode, exp, IOR, false, target);
b6a5fc45 6375 if (target)
6376 return target;
6377 break;
6378
2797f13a 6379 case BUILT_IN_SYNC_FETCH_AND_AND_1:
6380 case BUILT_IN_SYNC_FETCH_AND_AND_2:
6381 case BUILT_IN_SYNC_FETCH_AND_AND_4:
6382 case BUILT_IN_SYNC_FETCH_AND_AND_8:
6383 case BUILT_IN_SYNC_FETCH_AND_AND_16:
6384 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_AND_1);
1cd6e20d 6385 target = expand_builtin_sync_operation (mode, exp, AND, false, target);
b6a5fc45 6386 if (target)
6387 return target;
6388 break;
6389
2797f13a 6390 case BUILT_IN_SYNC_FETCH_AND_XOR_1:
6391 case BUILT_IN_SYNC_FETCH_AND_XOR_2:
6392 case BUILT_IN_SYNC_FETCH_AND_XOR_4:
6393 case BUILT_IN_SYNC_FETCH_AND_XOR_8:
6394 case BUILT_IN_SYNC_FETCH_AND_XOR_16:
6395 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_XOR_1);
1cd6e20d 6396 target = expand_builtin_sync_operation (mode, exp, XOR, false, target);
b6a5fc45 6397 if (target)
6398 return target;
6399 break;
6400
2797f13a 6401 case BUILT_IN_SYNC_FETCH_AND_NAND_1:
6402 case BUILT_IN_SYNC_FETCH_AND_NAND_2:
6403 case BUILT_IN_SYNC_FETCH_AND_NAND_4:
6404 case BUILT_IN_SYNC_FETCH_AND_NAND_8:
6405 case BUILT_IN_SYNC_FETCH_AND_NAND_16:
6406 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_NAND_1);
1cd6e20d 6407 target = expand_builtin_sync_operation (mode, exp, NOT, false, target);
b6a5fc45 6408 if (target)
6409 return target;
6410 break;
6411
2797f13a 6412 case BUILT_IN_SYNC_ADD_AND_FETCH_1:
6413 case BUILT_IN_SYNC_ADD_AND_FETCH_2:
6414 case BUILT_IN_SYNC_ADD_AND_FETCH_4:
6415 case BUILT_IN_SYNC_ADD_AND_FETCH_8:
6416 case BUILT_IN_SYNC_ADD_AND_FETCH_16:
6417 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_ADD_AND_FETCH_1);
1cd6e20d 6418 target = expand_builtin_sync_operation (mode, exp, PLUS, true, target);
b6a5fc45 6419 if (target)
6420 return target;
6421 break;
6422
2797f13a 6423 case BUILT_IN_SYNC_SUB_AND_FETCH_1:
6424 case BUILT_IN_SYNC_SUB_AND_FETCH_2:
6425 case BUILT_IN_SYNC_SUB_AND_FETCH_4:
6426 case BUILT_IN_SYNC_SUB_AND_FETCH_8:
6427 case BUILT_IN_SYNC_SUB_AND_FETCH_16:
6428 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_SUB_AND_FETCH_1);
1cd6e20d 6429 target = expand_builtin_sync_operation (mode, exp, MINUS, true, target);
b6a5fc45 6430 if (target)
6431 return target;
6432 break;
6433
2797f13a 6434 case BUILT_IN_SYNC_OR_AND_FETCH_1:
6435 case BUILT_IN_SYNC_OR_AND_FETCH_2:
6436 case BUILT_IN_SYNC_OR_AND_FETCH_4:
6437 case BUILT_IN_SYNC_OR_AND_FETCH_8:
6438 case BUILT_IN_SYNC_OR_AND_FETCH_16:
6439 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_OR_AND_FETCH_1);
1cd6e20d 6440 target = expand_builtin_sync_operation (mode, exp, IOR, true, target);
b6a5fc45 6441 if (target)
6442 return target;
6443 break;
6444
2797f13a 6445 case BUILT_IN_SYNC_AND_AND_FETCH_1:
6446 case BUILT_IN_SYNC_AND_AND_FETCH_2:
6447 case BUILT_IN_SYNC_AND_AND_FETCH_4:
6448 case BUILT_IN_SYNC_AND_AND_FETCH_8:
6449 case BUILT_IN_SYNC_AND_AND_FETCH_16:
6450 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_AND_AND_FETCH_1);
1cd6e20d 6451 target = expand_builtin_sync_operation (mode, exp, AND, true, target);
b6a5fc45 6452 if (target)
6453 return target;
6454 break;
6455
2797f13a 6456 case BUILT_IN_SYNC_XOR_AND_FETCH_1:
6457 case BUILT_IN_SYNC_XOR_AND_FETCH_2:
6458 case BUILT_IN_SYNC_XOR_AND_FETCH_4:
6459 case BUILT_IN_SYNC_XOR_AND_FETCH_8:
6460 case BUILT_IN_SYNC_XOR_AND_FETCH_16:
6461 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_XOR_AND_FETCH_1);
1cd6e20d 6462 target = expand_builtin_sync_operation (mode, exp, XOR, true, target);
b6a5fc45 6463 if (target)
6464 return target;
6465 break;
6466
2797f13a 6467 case BUILT_IN_SYNC_NAND_AND_FETCH_1:
6468 case BUILT_IN_SYNC_NAND_AND_FETCH_2:
6469 case BUILT_IN_SYNC_NAND_AND_FETCH_4:
6470 case BUILT_IN_SYNC_NAND_AND_FETCH_8:
6471 case BUILT_IN_SYNC_NAND_AND_FETCH_16:
6472 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_NAND_AND_FETCH_1);
1cd6e20d 6473 target = expand_builtin_sync_operation (mode, exp, NOT, true, target);
b6a5fc45 6474 if (target)
6475 return target;
6476 break;
6477
2797f13a 6478 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_1:
6479 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_2:
6480 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_4:
6481 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_8:
6482 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_16:
a601d32a 6483 if (mode == VOIDmode)
6484 mode = TYPE_MODE (boolean_type_node);
b6a5fc45 6485 if (!target || !register_operand (target, mode))
6486 target = gen_reg_rtx (mode);
3e272de8 6487
2797f13a 6488 mode = get_builtin_sync_mode
6489 (fcode - BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_1);
c2f47e15 6490 target = expand_builtin_compare_and_swap (mode, exp, true, target);
b6a5fc45 6491 if (target)
6492 return target;
6493 break;
6494
2797f13a 6495 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_1:
6496 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_2:
6497 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_4:
6498 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_8:
6499 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_16:
6500 mode = get_builtin_sync_mode
6501 (fcode - BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_1);
c2f47e15 6502 target = expand_builtin_compare_and_swap (mode, exp, false, target);
b6a5fc45 6503 if (target)
6504 return target;
6505 break;
6506
2797f13a 6507 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_1:
6508 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_2:
6509 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_4:
6510 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_8:
6511 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_16:
6512 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_LOCK_TEST_AND_SET_1);
6513 target = expand_builtin_sync_lock_test_and_set (mode, exp, target);
b6a5fc45 6514 if (target)
6515 return target;
6516 break;
6517
2797f13a 6518 case BUILT_IN_SYNC_LOCK_RELEASE_1:
6519 case BUILT_IN_SYNC_LOCK_RELEASE_2:
6520 case BUILT_IN_SYNC_LOCK_RELEASE_4:
6521 case BUILT_IN_SYNC_LOCK_RELEASE_8:
6522 case BUILT_IN_SYNC_LOCK_RELEASE_16:
6523 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_LOCK_RELEASE_1);
6524 expand_builtin_sync_lock_release (mode, exp);
b6a5fc45 6525 return const0_rtx;
6526
2797f13a 6527 case BUILT_IN_SYNC_SYNCHRONIZE:
6528 expand_builtin_sync_synchronize ();
b6a5fc45 6529 return const0_rtx;
6530
1cd6e20d 6531 case BUILT_IN_ATOMIC_EXCHANGE_1:
6532 case BUILT_IN_ATOMIC_EXCHANGE_2:
6533 case BUILT_IN_ATOMIC_EXCHANGE_4:
6534 case BUILT_IN_ATOMIC_EXCHANGE_8:
6535 case BUILT_IN_ATOMIC_EXCHANGE_16:
6536 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_EXCHANGE_1);
6537 target = expand_builtin_atomic_exchange (mode, exp, target);
6538 if (target)
6539 return target;
6540 break;
6541
6542 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1:
6543 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_2:
6544 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_4:
6545 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_8:
6546 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_16:
2c201ad1 6547 {
6548 unsigned int nargs, z;
f1f41a6c 6549 vec<tree, va_gc> *vec;
2c201ad1 6550
6551 mode =
6552 get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1);
6553 target = expand_builtin_atomic_compare_exchange (mode, exp, target);
6554 if (target)
6555 return target;
6556
6557 /* If this is turned into an external library call, the weak parameter
6558 must be dropped to match the expected parameter list. */
6559 nargs = call_expr_nargs (exp);
f1f41a6c 6560 vec_alloc (vec, nargs - 1);
2c201ad1 6561 for (z = 0; z < 3; z++)
f1f41a6c 6562 vec->quick_push (CALL_EXPR_ARG (exp, z));
2c201ad1 6563 /* Skip the boolean weak parameter. */
6564 for (z = 4; z < 6; z++)
f1f41a6c 6565 vec->quick_push (CALL_EXPR_ARG (exp, z));
2c201ad1 6566 exp = build_call_vec (TREE_TYPE (exp), CALL_EXPR_FN (exp), vec);
6567 break;
6568 }
1cd6e20d 6569
6570 case BUILT_IN_ATOMIC_LOAD_1:
6571 case BUILT_IN_ATOMIC_LOAD_2:
6572 case BUILT_IN_ATOMIC_LOAD_4:
6573 case BUILT_IN_ATOMIC_LOAD_8:
6574 case BUILT_IN_ATOMIC_LOAD_16:
6575 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_LOAD_1);
6576 target = expand_builtin_atomic_load (mode, exp, target);
6577 if (target)
6578 return target;
6579 break;
6580
6581 case BUILT_IN_ATOMIC_STORE_1:
6582 case BUILT_IN_ATOMIC_STORE_2:
6583 case BUILT_IN_ATOMIC_STORE_4:
6584 case BUILT_IN_ATOMIC_STORE_8:
6585 case BUILT_IN_ATOMIC_STORE_16:
6586 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_STORE_1);
6587 target = expand_builtin_atomic_store (mode, exp);
6588 if (target)
6589 return const0_rtx;
6590 break;
6591
6592 case BUILT_IN_ATOMIC_ADD_FETCH_1:
6593 case BUILT_IN_ATOMIC_ADD_FETCH_2:
6594 case BUILT_IN_ATOMIC_ADD_FETCH_4:
6595 case BUILT_IN_ATOMIC_ADD_FETCH_8:
6596 case BUILT_IN_ATOMIC_ADD_FETCH_16:
6597 {
6598 enum built_in_function lib;
6599 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_ADD_FETCH_1);
6600 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_ADD_1 +
6601 (fcode - BUILT_IN_ATOMIC_ADD_FETCH_1));
6602 target = expand_builtin_atomic_fetch_op (mode, exp, target, PLUS, true,
6603 ignore, lib);
6604 if (target)
6605 return target;
6606 break;
6607 }
6608 case BUILT_IN_ATOMIC_SUB_FETCH_1:
6609 case BUILT_IN_ATOMIC_SUB_FETCH_2:
6610 case BUILT_IN_ATOMIC_SUB_FETCH_4:
6611 case BUILT_IN_ATOMIC_SUB_FETCH_8:
6612 case BUILT_IN_ATOMIC_SUB_FETCH_16:
6613 {
6614 enum built_in_function lib;
6615 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_SUB_FETCH_1);
6616 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_SUB_1 +
6617 (fcode - BUILT_IN_ATOMIC_SUB_FETCH_1));
6618 target = expand_builtin_atomic_fetch_op (mode, exp, target, MINUS, true,
6619 ignore, lib);
6620 if (target)
6621 return target;
6622 break;
6623 }
6624 case BUILT_IN_ATOMIC_AND_FETCH_1:
6625 case BUILT_IN_ATOMIC_AND_FETCH_2:
6626 case BUILT_IN_ATOMIC_AND_FETCH_4:
6627 case BUILT_IN_ATOMIC_AND_FETCH_8:
6628 case BUILT_IN_ATOMIC_AND_FETCH_16:
6629 {
6630 enum built_in_function lib;
6631 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_AND_FETCH_1);
6632 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_AND_1 +
6633 (fcode - BUILT_IN_ATOMIC_AND_FETCH_1));
6634 target = expand_builtin_atomic_fetch_op (mode, exp, target, AND, true,
6635 ignore, lib);
6636 if (target)
6637 return target;
6638 break;
6639 }
6640 case BUILT_IN_ATOMIC_NAND_FETCH_1:
6641 case BUILT_IN_ATOMIC_NAND_FETCH_2:
6642 case BUILT_IN_ATOMIC_NAND_FETCH_4:
6643 case BUILT_IN_ATOMIC_NAND_FETCH_8:
6644 case BUILT_IN_ATOMIC_NAND_FETCH_16:
6645 {
6646 enum built_in_function lib;
6647 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_NAND_FETCH_1);
6648 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_NAND_1 +
6649 (fcode - BUILT_IN_ATOMIC_NAND_FETCH_1));
6650 target = expand_builtin_atomic_fetch_op (mode, exp, target, NOT, true,
6651 ignore, lib);
6652 if (target)
6653 return target;
6654 break;
6655 }
6656 case BUILT_IN_ATOMIC_XOR_FETCH_1:
6657 case BUILT_IN_ATOMIC_XOR_FETCH_2:
6658 case BUILT_IN_ATOMIC_XOR_FETCH_4:
6659 case BUILT_IN_ATOMIC_XOR_FETCH_8:
6660 case BUILT_IN_ATOMIC_XOR_FETCH_16:
6661 {
6662 enum built_in_function lib;
6663 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_XOR_FETCH_1);
6664 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_XOR_1 +
6665 (fcode - BUILT_IN_ATOMIC_XOR_FETCH_1));
6666 target = expand_builtin_atomic_fetch_op (mode, exp, target, XOR, true,
6667 ignore, lib);
6668 if (target)
6669 return target;
6670 break;
6671 }
6672 case BUILT_IN_ATOMIC_OR_FETCH_1:
6673 case BUILT_IN_ATOMIC_OR_FETCH_2:
6674 case BUILT_IN_ATOMIC_OR_FETCH_4:
6675 case BUILT_IN_ATOMIC_OR_FETCH_8:
6676 case BUILT_IN_ATOMIC_OR_FETCH_16:
6677 {
6678 enum built_in_function lib;
6679 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_OR_FETCH_1);
6680 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_OR_1 +
6681 (fcode - BUILT_IN_ATOMIC_OR_FETCH_1));
6682 target = expand_builtin_atomic_fetch_op (mode, exp, target, IOR, true,
6683 ignore, lib);
6684 if (target)
6685 return target;
6686 break;
6687 }
6688 case BUILT_IN_ATOMIC_FETCH_ADD_1:
6689 case BUILT_IN_ATOMIC_FETCH_ADD_2:
6690 case BUILT_IN_ATOMIC_FETCH_ADD_4:
6691 case BUILT_IN_ATOMIC_FETCH_ADD_8:
6692 case BUILT_IN_ATOMIC_FETCH_ADD_16:
6693 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_ADD_1);
6694 target = expand_builtin_atomic_fetch_op (mode, exp, target, PLUS, false,
6695 ignore, BUILT_IN_NONE);
6696 if (target)
6697 return target;
6698 break;
6699
6700 case BUILT_IN_ATOMIC_FETCH_SUB_1:
6701 case BUILT_IN_ATOMIC_FETCH_SUB_2:
6702 case BUILT_IN_ATOMIC_FETCH_SUB_4:
6703 case BUILT_IN_ATOMIC_FETCH_SUB_8:
6704 case BUILT_IN_ATOMIC_FETCH_SUB_16:
6705 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_SUB_1);
6706 target = expand_builtin_atomic_fetch_op (mode, exp, target, MINUS, false,
6707 ignore, BUILT_IN_NONE);
6708 if (target)
6709 return target;
6710 break;
6711
6712 case BUILT_IN_ATOMIC_FETCH_AND_1:
6713 case BUILT_IN_ATOMIC_FETCH_AND_2:
6714 case BUILT_IN_ATOMIC_FETCH_AND_4:
6715 case BUILT_IN_ATOMIC_FETCH_AND_8:
6716 case BUILT_IN_ATOMIC_FETCH_AND_16:
6717 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_AND_1);
6718 target = expand_builtin_atomic_fetch_op (mode, exp, target, AND, false,
6719 ignore, BUILT_IN_NONE);
6720 if (target)
6721 return target;
6722 break;
6723
6724 case BUILT_IN_ATOMIC_FETCH_NAND_1:
6725 case BUILT_IN_ATOMIC_FETCH_NAND_2:
6726 case BUILT_IN_ATOMIC_FETCH_NAND_4:
6727 case BUILT_IN_ATOMIC_FETCH_NAND_8:
6728 case BUILT_IN_ATOMIC_FETCH_NAND_16:
6729 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_NAND_1);
6730 target = expand_builtin_atomic_fetch_op (mode, exp, target, NOT, false,
6731 ignore, BUILT_IN_NONE);
6732 if (target)
6733 return target;
6734 break;
6735
6736 case BUILT_IN_ATOMIC_FETCH_XOR_1:
6737 case BUILT_IN_ATOMIC_FETCH_XOR_2:
6738 case BUILT_IN_ATOMIC_FETCH_XOR_4:
6739 case BUILT_IN_ATOMIC_FETCH_XOR_8:
6740 case BUILT_IN_ATOMIC_FETCH_XOR_16:
6741 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_XOR_1);
6742 target = expand_builtin_atomic_fetch_op (mode, exp, target, XOR, false,
6743 ignore, BUILT_IN_NONE);
6744 if (target)
6745 return target;
6746 break;
6747
6748 case BUILT_IN_ATOMIC_FETCH_OR_1:
6749 case BUILT_IN_ATOMIC_FETCH_OR_2:
6750 case BUILT_IN_ATOMIC_FETCH_OR_4:
6751 case BUILT_IN_ATOMIC_FETCH_OR_8:
6752 case BUILT_IN_ATOMIC_FETCH_OR_16:
6753 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_OR_1);
6754 target = expand_builtin_atomic_fetch_op (mode, exp, target, IOR, false,
6755 ignore, BUILT_IN_NONE);
6756 if (target)
6757 return target;
6758 break;
10b744a3 6759
6760 case BUILT_IN_ATOMIC_TEST_AND_SET:
7821cde1 6761 return expand_builtin_atomic_test_and_set (exp, target);
10b744a3 6762
6763 case BUILT_IN_ATOMIC_CLEAR:
6764 return expand_builtin_atomic_clear (exp);
1cd6e20d 6765
6766 case BUILT_IN_ATOMIC_ALWAYS_LOCK_FREE:
6767 return expand_builtin_atomic_always_lock_free (exp);
6768
6769 case BUILT_IN_ATOMIC_IS_LOCK_FREE:
6770 target = expand_builtin_atomic_is_lock_free (exp);
6771 if (target)
6772 return target;
6773 break;
6774
6775 case BUILT_IN_ATOMIC_THREAD_FENCE:
6776 expand_builtin_atomic_thread_fence (exp);
6777 return const0_rtx;
6778
6779 case BUILT_IN_ATOMIC_SIGNAL_FENCE:
6780 expand_builtin_atomic_signal_fence (exp);
6781 return const0_rtx;
6782
0a39fd54 6783 case BUILT_IN_OBJECT_SIZE:
6784 return expand_builtin_object_size (exp);
6785
6786 case BUILT_IN_MEMCPY_CHK:
6787 case BUILT_IN_MEMPCPY_CHK:
6788 case BUILT_IN_MEMMOVE_CHK:
6789 case BUILT_IN_MEMSET_CHK:
6790 target = expand_builtin_memory_chk (exp, target, mode, fcode);
6791 if (target)
6792 return target;
6793 break;
6794
6795 case BUILT_IN_STRCPY_CHK:
6796 case BUILT_IN_STPCPY_CHK:
6797 case BUILT_IN_STRNCPY_CHK:
1063acde 6798 case BUILT_IN_STPNCPY_CHK:
0a39fd54 6799 case BUILT_IN_STRCAT_CHK:
b356dfef 6800 case BUILT_IN_STRNCAT_CHK:
0a39fd54 6801 case BUILT_IN_SNPRINTF_CHK:
6802 case BUILT_IN_VSNPRINTF_CHK:
6803 maybe_emit_chk_warning (exp, fcode);
6804 break;
6805
6806 case BUILT_IN_SPRINTF_CHK:
6807 case BUILT_IN_VSPRINTF_CHK:
6808 maybe_emit_sprintf_chk_warning (exp, fcode);
6809 break;
6810
2c281b15 6811 case BUILT_IN_FREE:
f74ea1c2 6812 if (warn_free_nonheap_object)
6813 maybe_emit_free_warning (exp);
2c281b15 6814 break;
6815
badaa04c 6816 case BUILT_IN_THREAD_POINTER:
6817 return expand_builtin_thread_pointer (exp, target);
6818
6819 case BUILT_IN_SET_THREAD_POINTER:
6820 expand_builtin_set_thread_pointer (exp);
6821 return const0_rtx;
6822
d037099f 6823 case BUILT_IN_CILK_DETACH:
6824 expand_builtin_cilk_detach (exp);
6825 return const0_rtx;
6826
6827 case BUILT_IN_CILK_POP_FRAME:
6828 expand_builtin_cilk_pop_frame (exp);
6829 return const0_rtx;
6830
058a1b7a 6831 case BUILT_IN_CHKP_INIT_PTR_BOUNDS:
6832 case BUILT_IN_CHKP_NULL_PTR_BOUNDS:
6833 case BUILT_IN_CHKP_COPY_PTR_BOUNDS:
6834 case BUILT_IN_CHKP_CHECK_PTR_LBOUNDS:
6835 case BUILT_IN_CHKP_CHECK_PTR_UBOUNDS:
6836 case BUILT_IN_CHKP_CHECK_PTR_BOUNDS:
6837 case BUILT_IN_CHKP_SET_PTR_BOUNDS:
6838 case BUILT_IN_CHKP_NARROW_PTR_BOUNDS:
6839 case BUILT_IN_CHKP_STORE_PTR_BOUNDS:
6840 case BUILT_IN_CHKP_GET_PTR_LBOUND:
6841 case BUILT_IN_CHKP_GET_PTR_UBOUND:
6842 /* We allow user CHKP builtins if Pointer Bounds
6843 Checker is off. */
6844 if (!chkp_function_instrumented_p (current_function_decl))
6845 {
6846 if (fcode == BUILT_IN_CHKP_SET_PTR_BOUNDS
6847 || fcode == BUILT_IN_CHKP_NARROW_PTR_BOUNDS
6848 || fcode == BUILT_IN_CHKP_INIT_PTR_BOUNDS
6849 || fcode == BUILT_IN_CHKP_NULL_PTR_BOUNDS
6850 || fcode == BUILT_IN_CHKP_COPY_PTR_BOUNDS)
6851 return expand_normal (CALL_EXPR_ARG (exp, 0));
6852 else if (fcode == BUILT_IN_CHKP_GET_PTR_LBOUND)
6853 return expand_normal (size_zero_node);
6854 else if (fcode == BUILT_IN_CHKP_GET_PTR_UBOUND)
6855 return expand_normal (size_int (-1));
6856 else
6857 return const0_rtx;
6858 }
6859 /* FALLTHROUGH */
6860
6861 case BUILT_IN_CHKP_BNDMK:
6862 case BUILT_IN_CHKP_BNDSTX:
6863 case BUILT_IN_CHKP_BNDCL:
6864 case BUILT_IN_CHKP_BNDCU:
6865 case BUILT_IN_CHKP_BNDLDX:
6866 case BUILT_IN_CHKP_BNDRET:
6867 case BUILT_IN_CHKP_INTERSECT:
6868 case BUILT_IN_CHKP_NARROW:
6869 case BUILT_IN_CHKP_EXTRACT_LOWER:
6870 case BUILT_IN_CHKP_EXTRACT_UPPER:
6871 /* Software implementation of Pointer Bounds Checker is NYI.
6872 Target support is required. */
6873 error ("Your target platform does not support -fcheck-pointer-bounds");
6874 break;
6875
ca4c3545 6876 case BUILT_IN_ACC_ON_DEVICE:
1ae4e7aa 6877 /* Do library call, if we failed to expand the builtin when
6878 folding. */
ca4c3545 6879 break;
6880
92482ee0 6881 default: /* just do library call, if unknown builtin */
146c1b4f 6882 break;
53800dbe 6883 }
6884
6885 /* The switch statement above can drop through to cause the function
6886 to be called normally. */
6887 return expand_call (exp, target, ignore);
6888}
650e4c94 6889
f21337ef 6890/* Similar to expand_builtin but is used for instrumented calls. */
6891
6892rtx
6893expand_builtin_with_bounds (tree exp, rtx target,
6894 rtx subtarget ATTRIBUTE_UNUSED,
6895 machine_mode mode, int ignore)
6896{
6897 tree fndecl = get_callee_fndecl (exp);
6898 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
6899
6900 gcc_assert (CALL_WITH_BOUNDS_P (exp));
6901
6902 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
6903 return targetm.expand_builtin (exp, target, subtarget, mode, ignore);
6904
6905 gcc_assert (fcode > BEGIN_CHKP_BUILTINS
6906 && fcode < END_CHKP_BUILTINS);
6907
6908 switch (fcode)
6909 {
6910 case BUILT_IN_CHKP_MEMCPY_NOBND_NOCHK_CHKP:
6911 target = expand_builtin_memcpy_with_bounds (exp, target);
6912 if (target)
6913 return target;
6914 break;
6915
6916 case BUILT_IN_CHKP_MEMPCPY_NOBND_NOCHK_CHKP:
6917 target = expand_builtin_mempcpy_with_bounds (exp, target, mode);
6918 if (target)
6919 return target;
6920 break;
6921
6922 case BUILT_IN_CHKP_MEMSET_NOBND_NOCHK_CHKP:
6923 target = expand_builtin_memset_with_bounds (exp, target, mode);
6924 if (target)
6925 return target;
6926 break;
6927
6928 default:
6929 break;
6930 }
6931
6932 /* The switch statement above can drop through to cause the function
6933 to be called normally. */
6934 return expand_call (exp, target, ignore);
6935 }
6936
805e22b2 6937/* Determine whether a tree node represents a call to a built-in
52203a9d 6938 function. If the tree T is a call to a built-in function with
6939 the right number of arguments of the appropriate types, return
6940 the DECL_FUNCTION_CODE of the call, e.g. BUILT_IN_SQRT.
6941 Otherwise the return value is END_BUILTINS. */
aecda0d6 6942
805e22b2 6943enum built_in_function
b7bf20db 6944builtin_mathfn_code (const_tree t)
805e22b2 6945{
b7bf20db 6946 const_tree fndecl, arg, parmlist;
6947 const_tree argtype, parmtype;
6948 const_call_expr_arg_iterator iter;
805e22b2 6949
6950 if (TREE_CODE (t) != CALL_EXPR
c2f47e15 6951 || TREE_CODE (CALL_EXPR_FN (t)) != ADDR_EXPR)
805e22b2 6952 return END_BUILTINS;
6953
c6e6ecb1 6954 fndecl = get_callee_fndecl (t);
6955 if (fndecl == NULL_TREE
52203a9d 6956 || TREE_CODE (fndecl) != FUNCTION_DECL
805e22b2 6957 || ! DECL_BUILT_IN (fndecl)
6958 || DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
6959 return END_BUILTINS;
6960
52203a9d 6961 parmlist = TYPE_ARG_TYPES (TREE_TYPE (fndecl));
b7bf20db 6962 init_const_call_expr_arg_iterator (t, &iter);
52203a9d 6963 for (; parmlist; parmlist = TREE_CHAIN (parmlist))
e9f80ff5 6964 {
52203a9d 6965 /* If a function doesn't take a variable number of arguments,
6966 the last element in the list will have type `void'. */
6967 parmtype = TREE_VALUE (parmlist);
6968 if (VOID_TYPE_P (parmtype))
6969 {
b7bf20db 6970 if (more_const_call_expr_args_p (&iter))
52203a9d 6971 return END_BUILTINS;
6972 return DECL_FUNCTION_CODE (fndecl);
6973 }
6974
b7bf20db 6975 if (! more_const_call_expr_args_p (&iter))
e9f80ff5 6976 return END_BUILTINS;
48e1416a 6977
b7bf20db 6978 arg = next_const_call_expr_arg (&iter);
c2f47e15 6979 argtype = TREE_TYPE (arg);
52203a9d 6980
6981 if (SCALAR_FLOAT_TYPE_P (parmtype))
6982 {
6983 if (! SCALAR_FLOAT_TYPE_P (argtype))
6984 return END_BUILTINS;
6985 }
6986 else if (COMPLEX_FLOAT_TYPE_P (parmtype))
6987 {
6988 if (! COMPLEX_FLOAT_TYPE_P (argtype))
6989 return END_BUILTINS;
6990 }
6991 else if (POINTER_TYPE_P (parmtype))
6992 {
6993 if (! POINTER_TYPE_P (argtype))
6994 return END_BUILTINS;
6995 }
6996 else if (INTEGRAL_TYPE_P (parmtype))
6997 {
6998 if (! INTEGRAL_TYPE_P (argtype))
6999 return END_BUILTINS;
7000 }
7001 else
e9f80ff5 7002 return END_BUILTINS;
e9f80ff5 7003 }
7004
52203a9d 7005 /* Variable-length argument list. */
805e22b2 7006 return DECL_FUNCTION_CODE (fndecl);
7007}
7008
c2f47e15 7009/* Fold a call to __builtin_constant_p, if we know its argument ARG will
7010 evaluate to a constant. */
650e4c94 7011
7012static tree
c2f47e15 7013fold_builtin_constant_p (tree arg)
650e4c94 7014{
650e4c94 7015 /* We return 1 for a numeric type that's known to be a constant
7016 value at compile-time or for an aggregate type that's a
7017 literal constant. */
c2f47e15 7018 STRIP_NOPS (arg);
650e4c94 7019
7020 /* If we know this is a constant, emit the constant of one. */
c2f47e15 7021 if (CONSTANT_CLASS_P (arg)
7022 || (TREE_CODE (arg) == CONSTRUCTOR
7023 && TREE_CONSTANT (arg)))
650e4c94 7024 return integer_one_node;
c2f47e15 7025 if (TREE_CODE (arg) == ADDR_EXPR)
adcfa3a3 7026 {
c2f47e15 7027 tree op = TREE_OPERAND (arg, 0);
adcfa3a3 7028 if (TREE_CODE (op) == STRING_CST
7029 || (TREE_CODE (op) == ARRAY_REF
7030 && integer_zerop (TREE_OPERAND (op, 1))
7031 && TREE_CODE (TREE_OPERAND (op, 0)) == STRING_CST))
7032 return integer_one_node;
7033 }
650e4c94 7034
1fb4300c 7035 /* If this expression has side effects, show we don't know it to be a
7036 constant. Likewise if it's a pointer or aggregate type since in
7037 those case we only want literals, since those are only optimized
f97c71a1 7038 when generating RTL, not later.
7039 And finally, if we are compiling an initializer, not code, we
7040 need to return a definite result now; there's not going to be any
7041 more optimization done. */
c2f47e15 7042 if (TREE_SIDE_EFFECTS (arg)
7043 || AGGREGATE_TYPE_P (TREE_TYPE (arg))
7044 || POINTER_TYPE_P (TREE_TYPE (arg))
47be647d 7045 || cfun == 0
0b049e15 7046 || folding_initializer
7047 || force_folding_builtin_constant_p)
650e4c94 7048 return integer_zero_node;
7049
c2f47e15 7050 return NULL_TREE;
650e4c94 7051}
7052
76f5a783 7053/* Create builtin_expect with PRED and EXPECTED as its arguments and
7054 return it as a truthvalue. */
4ee9c684 7055
7056static tree
c83059be 7057build_builtin_expect_predicate (location_t loc, tree pred, tree expected,
7058 tree predictor)
4ee9c684 7059{
76f5a783 7060 tree fn, arg_types, pred_type, expected_type, call_expr, ret_type;
4ee9c684 7061
b9a16870 7062 fn = builtin_decl_explicit (BUILT_IN_EXPECT);
76f5a783 7063 arg_types = TYPE_ARG_TYPES (TREE_TYPE (fn));
7064 ret_type = TREE_TYPE (TREE_TYPE (fn));
7065 pred_type = TREE_VALUE (arg_types);
7066 expected_type = TREE_VALUE (TREE_CHAIN (arg_types));
7067
389dd41b 7068 pred = fold_convert_loc (loc, pred_type, pred);
7069 expected = fold_convert_loc (loc, expected_type, expected);
c83059be 7070 call_expr = build_call_expr_loc (loc, fn, predictor ? 3 : 2, pred, expected,
7071 predictor);
76f5a783 7072
7073 return build2 (NE_EXPR, TREE_TYPE (pred), call_expr,
7074 build_int_cst (ret_type, 0));
7075}
7076
7077/* Fold a call to builtin_expect with arguments ARG0 and ARG1. Return
7078 NULL_TREE if no simplification is possible. */
7079
c83059be 7080tree
7081fold_builtin_expect (location_t loc, tree arg0, tree arg1, tree arg2)
76f5a783 7082{
083bada9 7083 tree inner, fndecl, inner_arg0;
76f5a783 7084 enum tree_code code;
7085
083bada9 7086 /* Distribute the expected value over short-circuiting operators.
7087 See through the cast from truthvalue_type_node to long. */
7088 inner_arg0 = arg0;
d09ef31a 7089 while (CONVERT_EXPR_P (inner_arg0)
083bada9 7090 && INTEGRAL_TYPE_P (TREE_TYPE (inner_arg0))
7091 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (inner_arg0, 0))))
7092 inner_arg0 = TREE_OPERAND (inner_arg0, 0);
7093
76f5a783 7094 /* If this is a builtin_expect within a builtin_expect keep the
7095 inner one. See through a comparison against a constant. It
7096 might have been added to create a thruthvalue. */
083bada9 7097 inner = inner_arg0;
7098
76f5a783 7099 if (COMPARISON_CLASS_P (inner)
7100 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST)
7101 inner = TREE_OPERAND (inner, 0);
7102
7103 if (TREE_CODE (inner) == CALL_EXPR
7104 && (fndecl = get_callee_fndecl (inner))
7105 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
7106 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_EXPECT)
7107 return arg0;
7108
083bada9 7109 inner = inner_arg0;
76f5a783 7110 code = TREE_CODE (inner);
7111 if (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR)
7112 {
7113 tree op0 = TREE_OPERAND (inner, 0);
7114 tree op1 = TREE_OPERAND (inner, 1);
7115
c83059be 7116 op0 = build_builtin_expect_predicate (loc, op0, arg1, arg2);
7117 op1 = build_builtin_expect_predicate (loc, op1, arg1, arg2);
76f5a783 7118 inner = build2 (code, TREE_TYPE (inner), op0, op1);
7119
389dd41b 7120 return fold_convert_loc (loc, TREE_TYPE (arg0), inner);
76f5a783 7121 }
7122
7123 /* If the argument isn't invariant then there's nothing else we can do. */
083bada9 7124 if (!TREE_CONSTANT (inner_arg0))
c2f47e15 7125 return NULL_TREE;
4ee9c684 7126
76f5a783 7127 /* If we expect that a comparison against the argument will fold to
7128 a constant return the constant. In practice, this means a true
7129 constant or the address of a non-weak symbol. */
083bada9 7130 inner = inner_arg0;
4ee9c684 7131 STRIP_NOPS (inner);
7132 if (TREE_CODE (inner) == ADDR_EXPR)
7133 {
7134 do
7135 {
7136 inner = TREE_OPERAND (inner, 0);
7137 }
7138 while (TREE_CODE (inner) == COMPONENT_REF
7139 || TREE_CODE (inner) == ARRAY_REF);
53e9c5c4 7140 if (VAR_OR_FUNCTION_DECL_P (inner) && DECL_WEAK (inner))
c2f47e15 7141 return NULL_TREE;
4ee9c684 7142 }
7143
76f5a783 7144 /* Otherwise, ARG0 already has the proper type for the return value. */
7145 return arg0;
4ee9c684 7146}
7147
c2f47e15 7148/* Fold a call to __builtin_classify_type with argument ARG. */
27d0c333 7149
539a3a92 7150static tree
c2f47e15 7151fold_builtin_classify_type (tree arg)
539a3a92 7152{
c2f47e15 7153 if (arg == 0)
7002a1c8 7154 return build_int_cst (integer_type_node, no_type_class);
539a3a92 7155
7002a1c8 7156 return build_int_cst (integer_type_node, type_to_class (TREE_TYPE (arg)));
539a3a92 7157}
7158
c2f47e15 7159/* Fold a call to __builtin_strlen with argument ARG. */
e6e27594 7160
7161static tree
c7cbde74 7162fold_builtin_strlen (location_t loc, tree type, tree arg)
e6e27594 7163{
c2f47e15 7164 if (!validate_arg (arg, POINTER_TYPE))
e6e27594 7165 return NULL_TREE;
7166 else
7167 {
c2f47e15 7168 tree len = c_strlen (arg, 0);
e6e27594 7169
7170 if (len)
c7cbde74 7171 return fold_convert_loc (loc, type, len);
e6e27594 7172
7173 return NULL_TREE;
7174 }
7175}
7176
92c43e3c 7177/* Fold a call to __builtin_inf or __builtin_huge_val. */
7178
7179static tree
389dd41b 7180fold_builtin_inf (location_t loc, tree type, int warn)
92c43e3c 7181{
aa870c1b 7182 REAL_VALUE_TYPE real;
7183
40f4dbd5 7184 /* __builtin_inff is intended to be usable to define INFINITY on all
7185 targets. If an infinity is not available, INFINITY expands "to a
7186 positive constant of type float that overflows at translation
7187 time", footnote "In this case, using INFINITY will violate the
7188 constraint in 6.4.4 and thus require a diagnostic." (C99 7.12#4).
7189 Thus we pedwarn to ensure this constraint violation is
7190 diagnosed. */
92c43e3c 7191 if (!MODE_HAS_INFINITIES (TYPE_MODE (type)) && warn)
389dd41b 7192 pedwarn (loc, 0, "target format does not support infinity");
92c43e3c 7193
aa870c1b 7194 real_inf (&real);
7195 return build_real (type, real);
92c43e3c 7196}
7197
d735c391 7198/* Fold function call to builtin sincos, sincosf, or sincosl. Return
7199 NULL_TREE if no simplification can be made. */
7200
7201static tree
389dd41b 7202fold_builtin_sincos (location_t loc,
7203 tree arg0, tree arg1, tree arg2)
d735c391 7204{
c2f47e15 7205 tree type;
6c21be92 7206 tree fndecl, call = NULL_TREE;
d735c391 7207
c2f47e15 7208 if (!validate_arg (arg0, REAL_TYPE)
7209 || !validate_arg (arg1, POINTER_TYPE)
7210 || !validate_arg (arg2, POINTER_TYPE))
d735c391 7211 return NULL_TREE;
7212
d735c391 7213 type = TREE_TYPE (arg0);
d735c391 7214
7215 /* Calculate the result when the argument is a constant. */
e3240774 7216 built_in_function fn = mathfn_built_in_2 (type, CFN_BUILT_IN_CEXPI);
6c21be92 7217 if (fn == END_BUILTINS)
d735c391 7218 return NULL_TREE;
7219
6c21be92 7220 /* Canonicalize sincos to cexpi. */
7221 if (TREE_CODE (arg0) == REAL_CST)
7222 {
7223 tree complex_type = build_complex_type (type);
744fe358 7224 call = fold_const_call (as_combined_fn (fn), complex_type, arg0);
6c21be92 7225 }
7226 if (!call)
7227 {
7228 if (!targetm.libc_has_function (function_c99_math_complex)
7229 || !builtin_decl_implicit_p (fn))
7230 return NULL_TREE;
7231 fndecl = builtin_decl_explicit (fn);
7232 call = build_call_expr_loc (loc, fndecl, 1, arg0);
7233 call = builtin_save_expr (call);
7234 }
d735c391 7235
a75b1c71 7236 return build2 (COMPOUND_EXPR, void_type_node,
d735c391 7237 build2 (MODIFY_EXPR, void_type_node,
389dd41b 7238 build_fold_indirect_ref_loc (loc, arg1),
6c21be92 7239 fold_build1_loc (loc, IMAGPART_EXPR, type, call)),
d735c391 7240 build2 (MODIFY_EXPR, void_type_node,
389dd41b 7241 build_fold_indirect_ref_loc (loc, arg2),
6c21be92 7242 fold_build1_loc (loc, REALPART_EXPR, type, call)));
d735c391 7243}
7244
7959b13b 7245/* Fold function call to builtin memchr. ARG1, ARG2 and LEN are the
7246 arguments to the call, and TYPE is its return type.
7247 Return NULL_TREE if no simplification can be made. */
7248
7249static tree
389dd41b 7250fold_builtin_memchr (location_t loc, tree arg1, tree arg2, tree len, tree type)
7959b13b 7251{
7252 if (!validate_arg (arg1, POINTER_TYPE)
7253 || !validate_arg (arg2, INTEGER_TYPE)
7254 || !validate_arg (len, INTEGER_TYPE))
7255 return NULL_TREE;
7256 else
7257 {
7258 const char *p1;
7259
7260 if (TREE_CODE (arg2) != INTEGER_CST
e913b5cd 7261 || !tree_fits_uhwi_p (len))
7959b13b 7262 return NULL_TREE;
7263
7264 p1 = c_getstr (arg1);
7265 if (p1 && compare_tree_int (len, strlen (p1) + 1) <= 0)
7266 {
7267 char c;
7268 const char *r;
7269 tree tem;
7270
7271 if (target_char_cast (arg2, &c))
7272 return NULL_TREE;
7273
e913b5cd 7274 r = (const char *) memchr (p1, c, tree_to_uhwi (len));
7959b13b 7275
7276 if (r == NULL)
7277 return build_int_cst (TREE_TYPE (arg1), 0);
7278
2cc66f2a 7279 tem = fold_build_pointer_plus_hwi_loc (loc, arg1, r - p1);
389dd41b 7280 return fold_convert_loc (loc, type, tem);
7959b13b 7281 }
7282 return NULL_TREE;
7283 }
7284}
7285
c2f47e15 7286/* Fold function call to builtin memcmp with arguments ARG1 and ARG2.
7287 Return NULL_TREE if no simplification can be made. */
9c8a1629 7288
7289static tree
389dd41b 7290fold_builtin_memcmp (location_t loc, tree arg1, tree arg2, tree len)
9c8a1629 7291{
c2f47e15 7292 if (!validate_arg (arg1, POINTER_TYPE)
7293 || !validate_arg (arg2, POINTER_TYPE)
7294 || !validate_arg (len, INTEGER_TYPE))
7295 return NULL_TREE;
9c8a1629 7296
7297 /* If the LEN parameter is zero, return zero. */
7298 if (integer_zerop (len))
389dd41b 7299 return omit_two_operands_loc (loc, integer_type_node, integer_zero_node,
c4fef134 7300 arg1, arg2);
9c8a1629 7301
7302 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
7303 if (operand_equal_p (arg1, arg2, 0))
389dd41b 7304 return omit_one_operand_loc (loc, integer_type_node, integer_zero_node, len);
c4fef134 7305
c4fef134 7306 /* If len parameter is one, return an expression corresponding to
7307 (*(const unsigned char*)arg1 - (const unsigned char*)arg2). */
e913b5cd 7308 if (tree_fits_uhwi_p (len) && tree_to_uhwi (len) == 1)
c4fef134 7309 {
7310 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
4f1b71c0 7311 tree cst_uchar_ptr_node
7312 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
7313
389dd41b 7314 tree ind1
7315 = fold_convert_loc (loc, integer_type_node,
7316 build1 (INDIRECT_REF, cst_uchar_node,
7317 fold_convert_loc (loc,
7318 cst_uchar_ptr_node,
c4fef134 7319 arg1)));
389dd41b 7320 tree ind2
7321 = fold_convert_loc (loc, integer_type_node,
7322 build1 (INDIRECT_REF, cst_uchar_node,
7323 fold_convert_loc (loc,
7324 cst_uchar_ptr_node,
c4fef134 7325 arg2)));
389dd41b 7326 return fold_build2_loc (loc, MINUS_EXPR, integer_type_node, ind1, ind2);
c4fef134 7327 }
9c8a1629 7328
c2f47e15 7329 return NULL_TREE;
9c8a1629 7330}
7331
c2f47e15 7332/* Fold a call to builtin isascii with argument ARG. */
d49367d4 7333
7334static tree
389dd41b 7335fold_builtin_isascii (location_t loc, tree arg)
d49367d4 7336{
c2f47e15 7337 if (!validate_arg (arg, INTEGER_TYPE))
7338 return NULL_TREE;
d49367d4 7339 else
7340 {
7341 /* Transform isascii(c) -> ((c & ~0x7f) == 0). */
c90b5d40 7342 arg = fold_build2 (BIT_AND_EXPR, integer_type_node, arg,
7002a1c8 7343 build_int_cst (integer_type_node,
c90b5d40 7344 ~ (unsigned HOST_WIDE_INT) 0x7f));
389dd41b 7345 return fold_build2_loc (loc, EQ_EXPR, integer_type_node,
7002a1c8 7346 arg, integer_zero_node);
d49367d4 7347 }
7348}
7349
c2f47e15 7350/* Fold a call to builtin toascii with argument ARG. */
d49367d4 7351
7352static tree
389dd41b 7353fold_builtin_toascii (location_t loc, tree arg)
d49367d4 7354{
c2f47e15 7355 if (!validate_arg (arg, INTEGER_TYPE))
7356 return NULL_TREE;
48e1416a 7357
c2f47e15 7358 /* Transform toascii(c) -> (c & 0x7f). */
389dd41b 7359 return fold_build2_loc (loc, BIT_AND_EXPR, integer_type_node, arg,
7002a1c8 7360 build_int_cst (integer_type_node, 0x7f));
d49367d4 7361}
7362
c2f47e15 7363/* Fold a call to builtin isdigit with argument ARG. */
df1cf42e 7364
7365static tree
389dd41b 7366fold_builtin_isdigit (location_t loc, tree arg)
df1cf42e 7367{
c2f47e15 7368 if (!validate_arg (arg, INTEGER_TYPE))
7369 return NULL_TREE;
df1cf42e 7370 else
7371 {
7372 /* Transform isdigit(c) -> (unsigned)(c) - '0' <= 9. */
624d37a6 7373 /* According to the C standard, isdigit is unaffected by locale.
7374 However, it definitely is affected by the target character set. */
624d37a6 7375 unsigned HOST_WIDE_INT target_digit0
7376 = lang_hooks.to_target_charset ('0');
7377
7378 if (target_digit0 == 0)
7379 return NULL_TREE;
7380
389dd41b 7381 arg = fold_convert_loc (loc, unsigned_type_node, arg);
c90b5d40 7382 arg = fold_build2 (MINUS_EXPR, unsigned_type_node, arg,
7383 build_int_cst (unsigned_type_node, target_digit0));
389dd41b 7384 return fold_build2_loc (loc, LE_EXPR, integer_type_node, arg,
f2532264 7385 build_int_cst (unsigned_type_node, 9));
df1cf42e 7386 }
7387}
27f261ef 7388
c2f47e15 7389/* Fold a call to fabs, fabsf or fabsl with argument ARG. */
d1aade50 7390
7391static tree
389dd41b 7392fold_builtin_fabs (location_t loc, tree arg, tree type)
d1aade50 7393{
c2f47e15 7394 if (!validate_arg (arg, REAL_TYPE))
7395 return NULL_TREE;
d1aade50 7396
389dd41b 7397 arg = fold_convert_loc (loc, type, arg);
389dd41b 7398 return fold_build1_loc (loc, ABS_EXPR, type, arg);
d1aade50 7399}
7400
c2f47e15 7401/* Fold a call to abs, labs, llabs or imaxabs with argument ARG. */
d1aade50 7402
7403static tree
389dd41b 7404fold_builtin_abs (location_t loc, tree arg, tree type)
d1aade50 7405{
c2f47e15 7406 if (!validate_arg (arg, INTEGER_TYPE))
7407 return NULL_TREE;
d1aade50 7408
389dd41b 7409 arg = fold_convert_loc (loc, type, arg);
389dd41b 7410 return fold_build1_loc (loc, ABS_EXPR, type, arg);
d1aade50 7411}
7412
b9be572e 7413/* Fold a call to fma, fmaf, or fmal with arguments ARG[012]. */
7414
7415static tree
7416fold_builtin_fma (location_t loc, tree arg0, tree arg1, tree arg2, tree type)
7417{
866b3d58 7418 /* ??? Only expand to FMA_EXPR if it's directly supported. */
b9be572e 7419 if (validate_arg (arg0, REAL_TYPE)
9af5ce0c 7420 && validate_arg (arg1, REAL_TYPE)
866b3d58 7421 && validate_arg (arg2, REAL_TYPE)
7422 && optab_handler (fma_optab, TYPE_MODE (type)) != CODE_FOR_nothing)
7423 return fold_build3_loc (loc, FMA_EXPR, type, arg0, arg1, arg2);
b9be572e 7424
b9be572e 7425 return NULL_TREE;
7426}
7427
abe4dcf6 7428/* Fold a call to builtin carg(a+bi) -> atan2(b,a). */
7429
7430static tree
389dd41b 7431fold_builtin_carg (location_t loc, tree arg, tree type)
abe4dcf6 7432{
239d491a 7433 if (validate_arg (arg, COMPLEX_TYPE)
7434 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) == REAL_TYPE)
abe4dcf6 7435 {
7436 tree atan2_fn = mathfn_built_in (type, BUILT_IN_ATAN2);
48e1416a 7437
abe4dcf6 7438 if (atan2_fn)
7439 {
c2f47e15 7440 tree new_arg = builtin_save_expr (arg);
389dd41b 7441 tree r_arg = fold_build1_loc (loc, REALPART_EXPR, type, new_arg);
7442 tree i_arg = fold_build1_loc (loc, IMAGPART_EXPR, type, new_arg);
7443 return build_call_expr_loc (loc, atan2_fn, 2, i_arg, r_arg);
abe4dcf6 7444 }
7445 }
48e1416a 7446
abe4dcf6 7447 return NULL_TREE;
7448}
7449
3838b9ae 7450/* Fold a call to builtin frexp, we can assume the base is 2. */
7451
7452static tree
389dd41b 7453fold_builtin_frexp (location_t loc, tree arg0, tree arg1, tree rettype)
3838b9ae 7454{
7455 if (! validate_arg (arg0, REAL_TYPE) || ! validate_arg (arg1, POINTER_TYPE))
7456 return NULL_TREE;
48e1416a 7457
3838b9ae 7458 STRIP_NOPS (arg0);
48e1416a 7459
3838b9ae 7460 if (!(TREE_CODE (arg0) == REAL_CST && ! TREE_OVERFLOW (arg0)))
7461 return NULL_TREE;
48e1416a 7462
389dd41b 7463 arg1 = build_fold_indirect_ref_loc (loc, arg1);
3838b9ae 7464
7465 /* Proceed if a valid pointer type was passed in. */
7466 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg1)) == integer_type_node)
7467 {
7468 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg0);
7469 tree frac, exp;
48e1416a 7470
3838b9ae 7471 switch (value->cl)
7472 {
7473 case rvc_zero:
7474 /* For +-0, return (*exp = 0, +-0). */
7475 exp = integer_zero_node;
7476 frac = arg0;
7477 break;
7478 case rvc_nan:
7479 case rvc_inf:
7480 /* For +-NaN or +-Inf, *exp is unspecified, return arg0. */
389dd41b 7481 return omit_one_operand_loc (loc, rettype, arg0, arg1);
3838b9ae 7482 case rvc_normal:
7483 {
7484 /* Since the frexp function always expects base 2, and in
7485 GCC normalized significands are already in the range
7486 [0.5, 1.0), we have exactly what frexp wants. */
7487 REAL_VALUE_TYPE frac_rvt = *value;
7488 SET_REAL_EXP (&frac_rvt, 0);
7489 frac = build_real (rettype, frac_rvt);
7002a1c8 7490 exp = build_int_cst (integer_type_node, REAL_EXP (value));
3838b9ae 7491 }
7492 break;
7493 default:
7494 gcc_unreachable ();
7495 }
48e1416a 7496
3838b9ae 7497 /* Create the COMPOUND_EXPR (*arg1 = trunc, frac). */
389dd41b 7498 arg1 = fold_build2_loc (loc, MODIFY_EXPR, rettype, arg1, exp);
3838b9ae 7499 TREE_SIDE_EFFECTS (arg1) = 1;
389dd41b 7500 return fold_build2_loc (loc, COMPOUND_EXPR, rettype, arg1, frac);
3838b9ae 7501 }
7502
7503 return NULL_TREE;
7504}
7505
ebf8b4f5 7506/* Fold a call to builtin modf. */
7507
7508static tree
389dd41b 7509fold_builtin_modf (location_t loc, tree arg0, tree arg1, tree rettype)
ebf8b4f5 7510{
7511 if (! validate_arg (arg0, REAL_TYPE) || ! validate_arg (arg1, POINTER_TYPE))
7512 return NULL_TREE;
48e1416a 7513
ebf8b4f5 7514 STRIP_NOPS (arg0);
48e1416a 7515
ebf8b4f5 7516 if (!(TREE_CODE (arg0) == REAL_CST && ! TREE_OVERFLOW (arg0)))
7517 return NULL_TREE;
48e1416a 7518
389dd41b 7519 arg1 = build_fold_indirect_ref_loc (loc, arg1);
ebf8b4f5 7520
7521 /* Proceed if a valid pointer type was passed in. */
7522 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg1)) == TYPE_MAIN_VARIANT (rettype))
7523 {
7524 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg0);
7525 REAL_VALUE_TYPE trunc, frac;
7526
7527 switch (value->cl)
7528 {
7529 case rvc_nan:
7530 case rvc_zero:
7531 /* For +-NaN or +-0, return (*arg1 = arg0, arg0). */
7532 trunc = frac = *value;
7533 break;
7534 case rvc_inf:
7535 /* For +-Inf, return (*arg1 = arg0, +-0). */
7536 frac = dconst0;
7537 frac.sign = value->sign;
7538 trunc = *value;
7539 break;
7540 case rvc_normal:
7541 /* Return (*arg1 = trunc(arg0), arg0-trunc(arg0)). */
7542 real_trunc (&trunc, VOIDmode, value);
7543 real_arithmetic (&frac, MINUS_EXPR, value, &trunc);
7544 /* If the original number was negative and already
7545 integral, then the fractional part is -0.0. */
7546 if (value->sign && frac.cl == rvc_zero)
7547 frac.sign = value->sign;
7548 break;
7549 }
48e1416a 7550
ebf8b4f5 7551 /* Create the COMPOUND_EXPR (*arg1 = trunc, frac). */
389dd41b 7552 arg1 = fold_build2_loc (loc, MODIFY_EXPR, rettype, arg1,
ebf8b4f5 7553 build_real (rettype, trunc));
7554 TREE_SIDE_EFFECTS (arg1) = 1;
389dd41b 7555 return fold_build2_loc (loc, COMPOUND_EXPR, rettype, arg1,
ebf8b4f5 7556 build_real (rettype, frac));
7557 }
48e1416a 7558
ebf8b4f5 7559 return NULL_TREE;
7560}
7561
a65c4d64 7562/* Given a location LOC, an interclass builtin function decl FNDECL
7563 and its single argument ARG, return an folded expression computing
7564 the same, or NULL_TREE if we either couldn't or didn't want to fold
7565 (the latter happen if there's an RTL instruction available). */
7566
7567static tree
7568fold_builtin_interclass_mathfn (location_t loc, tree fndecl, tree arg)
7569{
3754d046 7570 machine_mode mode;
a65c4d64 7571
7572 if (!validate_arg (arg, REAL_TYPE))
7573 return NULL_TREE;
7574
7575 if (interclass_mathfn_icode (arg, fndecl) != CODE_FOR_nothing)
7576 return NULL_TREE;
7577
7578 mode = TYPE_MODE (TREE_TYPE (arg));
7579
7f38718f 7580 bool is_ibm_extended = MODE_COMPOSITE_P (mode);
7581
a65c4d64 7582 /* If there is no optab, try generic code. */
7583 switch (DECL_FUNCTION_CODE (fndecl))
7584 {
7585 tree result;
7586
7587 CASE_FLT_FN (BUILT_IN_ISINF):
7588 {
7589 /* isinf(x) -> isgreater(fabs(x),DBL_MAX). */
b9a16870 7590 tree const isgr_fn = builtin_decl_explicit (BUILT_IN_ISGREATER);
7f38718f 7591 tree type = TREE_TYPE (arg);
a65c4d64 7592 REAL_VALUE_TYPE r;
7593 char buf[128];
7594
7f38718f 7595 if (is_ibm_extended)
7596 {
7597 /* NaN and Inf are encoded in the high-order double value
7598 only. The low-order value is not significant. */
7599 type = double_type_node;
7600 mode = DFmode;
7601 arg = fold_build1_loc (loc, NOP_EXPR, type, arg);
7602 }
a65c4d64 7603 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf));
7604 real_from_string (&r, buf);
7605 result = build_call_expr (isgr_fn, 2,
7606 fold_build1_loc (loc, ABS_EXPR, type, arg),
7607 build_real (type, r));
7608 return result;
7609 }
7610 CASE_FLT_FN (BUILT_IN_FINITE):
7611 case BUILT_IN_ISFINITE:
7612 {
7613 /* isfinite(x) -> islessequal(fabs(x),DBL_MAX). */
b9a16870 7614 tree const isle_fn = builtin_decl_explicit (BUILT_IN_ISLESSEQUAL);
7f38718f 7615 tree type = TREE_TYPE (arg);
a65c4d64 7616 REAL_VALUE_TYPE r;
7617 char buf[128];
7618
7f38718f 7619 if (is_ibm_extended)
7620 {
7621 /* NaN and Inf are encoded in the high-order double value
7622 only. The low-order value is not significant. */
7623 type = double_type_node;
7624 mode = DFmode;
7625 arg = fold_build1_loc (loc, NOP_EXPR, type, arg);
7626 }
a65c4d64 7627 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf));
7628 real_from_string (&r, buf);
7629 result = build_call_expr (isle_fn, 2,
7630 fold_build1_loc (loc, ABS_EXPR, type, arg),
7631 build_real (type, r));
7632 /*result = fold_build2_loc (loc, UNGT_EXPR,
7633 TREE_TYPE (TREE_TYPE (fndecl)),
7634 fold_build1_loc (loc, ABS_EXPR, type, arg),
7635 build_real (type, r));
7636 result = fold_build1_loc (loc, TRUTH_NOT_EXPR,
7637 TREE_TYPE (TREE_TYPE (fndecl)),
7638 result);*/
7639 return result;
7640 }
7641 case BUILT_IN_ISNORMAL:
7642 {
7643 /* isnormal(x) -> isgreaterequal(fabs(x),DBL_MIN) &
7644 islessequal(fabs(x),DBL_MAX). */
b9a16870 7645 tree const isle_fn = builtin_decl_explicit (BUILT_IN_ISLESSEQUAL);
7f38718f 7646 tree type = TREE_TYPE (arg);
7647 tree orig_arg, max_exp, min_exp;
7648 machine_mode orig_mode = mode;
a65c4d64 7649 REAL_VALUE_TYPE rmax, rmin;
7650 char buf[128];
7651
7f38718f 7652 orig_arg = arg = builtin_save_expr (arg);
7653 if (is_ibm_extended)
7654 {
7655 /* Use double to test the normal range of IBM extended
7656 precision. Emin for IBM extended precision is
7657 different to emin for IEEE double, being 53 higher
7658 since the low double exponent is at least 53 lower
7659 than the high double exponent. */
7660 type = double_type_node;
7661 mode = DFmode;
7662 arg = fold_build1_loc (loc, NOP_EXPR, type, arg);
7663 }
7664 arg = fold_build1_loc (loc, ABS_EXPR, type, arg);
7665
a65c4d64 7666 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf));
7667 real_from_string (&rmax, buf);
7f38718f 7668 sprintf (buf, "0x1p%d", REAL_MODE_FORMAT (orig_mode)->emin - 1);
a65c4d64 7669 real_from_string (&rmin, buf);
7f38718f 7670 max_exp = build_real (type, rmax);
7671 min_exp = build_real (type, rmin);
7672
7673 max_exp = build_call_expr (isle_fn, 2, arg, max_exp);
7674 if (is_ibm_extended)
7675 {
7676 /* Testing the high end of the range is done just using
7677 the high double, using the same test as isfinite().
7678 For the subnormal end of the range we first test the
7679 high double, then if its magnitude is equal to the
7680 limit of 0x1p-969, we test whether the low double is
7681 non-zero and opposite sign to the high double. */
7682 tree const islt_fn = builtin_decl_explicit (BUILT_IN_ISLESS);
7683 tree const isgt_fn = builtin_decl_explicit (BUILT_IN_ISGREATER);
7684 tree gt_min = build_call_expr (isgt_fn, 2, arg, min_exp);
7685 tree eq_min = fold_build2 (EQ_EXPR, integer_type_node,
7686 arg, min_exp);
7687 tree as_complex = build1 (VIEW_CONVERT_EXPR,
7688 complex_double_type_node, orig_arg);
7689 tree hi_dbl = build1 (REALPART_EXPR, type, as_complex);
7690 tree lo_dbl = build1 (IMAGPART_EXPR, type, as_complex);
7691 tree zero = build_real (type, dconst0);
7692 tree hilt = build_call_expr (islt_fn, 2, hi_dbl, zero);
7693 tree lolt = build_call_expr (islt_fn, 2, lo_dbl, zero);
7694 tree logt = build_call_expr (isgt_fn, 2, lo_dbl, zero);
7695 tree ok_lo = fold_build1 (TRUTH_NOT_EXPR, integer_type_node,
7696 fold_build3 (COND_EXPR,
7697 integer_type_node,
7698 hilt, logt, lolt));
7699 eq_min = fold_build2 (TRUTH_ANDIF_EXPR, integer_type_node,
7700 eq_min, ok_lo);
7701 min_exp = fold_build2 (TRUTH_ORIF_EXPR, integer_type_node,
7702 gt_min, eq_min);
7703 }
7704 else
7705 {
7706 tree const isge_fn
7707 = builtin_decl_explicit (BUILT_IN_ISGREATEREQUAL);
7708 min_exp = build_call_expr (isge_fn, 2, arg, min_exp);
7709 }
7710 result = fold_build2 (BIT_AND_EXPR, integer_type_node,
7711 max_exp, min_exp);
a65c4d64 7712 return result;
7713 }
7714 default:
7715 break;
7716 }
7717
7718 return NULL_TREE;
7719}
7720
726069ba 7721/* Fold a call to __builtin_isnan(), __builtin_isinf, __builtin_finite.
c2f47e15 7722 ARG is the argument for the call. */
726069ba 7723
7724static tree
389dd41b 7725fold_builtin_classify (location_t loc, tree fndecl, tree arg, int builtin_index)
726069ba 7726{
726069ba 7727 tree type = TREE_TYPE (TREE_TYPE (fndecl));
726069ba 7728
c2f47e15 7729 if (!validate_arg (arg, REAL_TYPE))
d43cee80 7730 return NULL_TREE;
726069ba 7731
726069ba 7732 switch (builtin_index)
7733 {
7734 case BUILT_IN_ISINF:
fe994837 7735 if (!HONOR_INFINITIES (arg))
389dd41b 7736 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
726069ba 7737
726069ba 7738 return NULL_TREE;
7739
c319d56a 7740 case BUILT_IN_ISINF_SIGN:
7741 {
7742 /* isinf_sign(x) -> isinf(x) ? (signbit(x) ? -1 : 1) : 0 */
7743 /* In a boolean context, GCC will fold the inner COND_EXPR to
7744 1. So e.g. "if (isinf_sign(x))" would be folded to just
7745 "if (isinf(x) ? 1 : 0)" which becomes "if (isinf(x))". */
6cfc7001 7746 tree signbit_fn = builtin_decl_explicit (BUILT_IN_SIGNBIT);
b9a16870 7747 tree isinf_fn = builtin_decl_explicit (BUILT_IN_ISINF);
c319d56a 7748 tree tmp = NULL_TREE;
7749
7750 arg = builtin_save_expr (arg);
7751
7752 if (signbit_fn && isinf_fn)
7753 {
389dd41b 7754 tree signbit_call = build_call_expr_loc (loc, signbit_fn, 1, arg);
7755 tree isinf_call = build_call_expr_loc (loc, isinf_fn, 1, arg);
c319d56a 7756
389dd41b 7757 signbit_call = fold_build2_loc (loc, NE_EXPR, integer_type_node,
c319d56a 7758 signbit_call, integer_zero_node);
389dd41b 7759 isinf_call = fold_build2_loc (loc, NE_EXPR, integer_type_node,
c319d56a 7760 isinf_call, integer_zero_node);
48e1416a 7761
389dd41b 7762 tmp = fold_build3_loc (loc, COND_EXPR, integer_type_node, signbit_call,
c319d56a 7763 integer_minus_one_node, integer_one_node);
389dd41b 7764 tmp = fold_build3_loc (loc, COND_EXPR, integer_type_node,
7765 isinf_call, tmp,
c319d56a 7766 integer_zero_node);
7767 }
7768
7769 return tmp;
7770 }
7771
cde061c1 7772 case BUILT_IN_ISFINITE:
93633022 7773 if (!HONOR_NANS (arg)
fe994837 7774 && !HONOR_INFINITIES (arg))
389dd41b 7775 return omit_one_operand_loc (loc, type, integer_one_node, arg);
726069ba 7776
726069ba 7777 return NULL_TREE;
7778
7779 case BUILT_IN_ISNAN:
93633022 7780 if (!HONOR_NANS (arg))
389dd41b 7781 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
726069ba 7782
7f38718f 7783 {
7784 bool is_ibm_extended = MODE_COMPOSITE_P (TYPE_MODE (TREE_TYPE (arg)));
7785 if (is_ibm_extended)
7786 {
7787 /* NaN and Inf are encoded in the high-order double value
7788 only. The low-order value is not significant. */
7789 arg = fold_build1_loc (loc, NOP_EXPR, double_type_node, arg);
7790 }
7791 }
726069ba 7792 arg = builtin_save_expr (arg);
389dd41b 7793 return fold_build2_loc (loc, UNORDERED_EXPR, type, arg, arg);
726069ba 7794
7795 default:
64db345d 7796 gcc_unreachable ();
726069ba 7797 }
7798}
7799
19fbe3a4 7800/* Fold a call to __builtin_fpclassify(int, int, int, int, int, ...).
7801 This builtin will generate code to return the appropriate floating
7802 point classification depending on the value of the floating point
7803 number passed in. The possible return values must be supplied as
921b27c0 7804 int arguments to the call in the following order: FP_NAN, FP_INFINITE,
19fbe3a4 7805 FP_NORMAL, FP_SUBNORMAL and FP_ZERO. The ellipses is for exactly
7806 one floating point argument which is "type generic". */
7807
7808static tree
9d884767 7809fold_builtin_fpclassify (location_t loc, tree *args, int nargs)
19fbe3a4 7810{
921b27c0 7811 tree fp_nan, fp_infinite, fp_normal, fp_subnormal, fp_zero,
7812 arg, type, res, tmp;
3754d046 7813 machine_mode mode;
19fbe3a4 7814 REAL_VALUE_TYPE r;
7815 char buf[128];
48e1416a 7816
19fbe3a4 7817 /* Verify the required arguments in the original call. */
9d884767 7818 if (nargs != 6
7819 || !validate_arg (args[0], INTEGER_TYPE)
7820 || !validate_arg (args[1], INTEGER_TYPE)
7821 || !validate_arg (args[2], INTEGER_TYPE)
7822 || !validate_arg (args[3], INTEGER_TYPE)
7823 || !validate_arg (args[4], INTEGER_TYPE)
7824 || !validate_arg (args[5], REAL_TYPE))
19fbe3a4 7825 return NULL_TREE;
48e1416a 7826
9d884767 7827 fp_nan = args[0];
7828 fp_infinite = args[1];
7829 fp_normal = args[2];
7830 fp_subnormal = args[3];
7831 fp_zero = args[4];
7832 arg = args[5];
19fbe3a4 7833 type = TREE_TYPE (arg);
7834 mode = TYPE_MODE (type);
389dd41b 7835 arg = builtin_save_expr (fold_build1_loc (loc, ABS_EXPR, type, arg));
19fbe3a4 7836
48e1416a 7837 /* fpclassify(x) ->
19fbe3a4 7838 isnan(x) ? FP_NAN :
921b27c0 7839 (fabs(x) == Inf ? FP_INFINITE :
19fbe3a4 7840 (fabs(x) >= DBL_MIN ? FP_NORMAL :
7841 (x == 0 ? FP_ZERO : FP_SUBNORMAL))). */
48e1416a 7842
389dd41b 7843 tmp = fold_build2_loc (loc, EQ_EXPR, integer_type_node, arg,
19fbe3a4 7844 build_real (type, dconst0));
389dd41b 7845 res = fold_build3_loc (loc, COND_EXPR, integer_type_node,
7846 tmp, fp_zero, fp_subnormal);
19fbe3a4 7847
7848 sprintf (buf, "0x1p%d", REAL_MODE_FORMAT (mode)->emin - 1);
7849 real_from_string (&r, buf);
389dd41b 7850 tmp = fold_build2_loc (loc, GE_EXPR, integer_type_node,
7851 arg, build_real (type, r));
7852 res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp, fp_normal, res);
48e1416a 7853
19fbe3a4 7854 if (HONOR_INFINITIES (mode))
7855 {
7856 real_inf (&r);
389dd41b 7857 tmp = fold_build2_loc (loc, EQ_EXPR, integer_type_node, arg,
19fbe3a4 7858 build_real (type, r));
389dd41b 7859 res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp,
7860 fp_infinite, res);
19fbe3a4 7861 }
7862
7863 if (HONOR_NANS (mode))
7864 {
389dd41b 7865 tmp = fold_build2_loc (loc, ORDERED_EXPR, integer_type_node, arg, arg);
7866 res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp, res, fp_nan);
19fbe3a4 7867 }
48e1416a 7868
19fbe3a4 7869 return res;
7870}
7871
9bc9f15f 7872/* Fold a call to an unordered comparison function such as
d5019fe8 7873 __builtin_isgreater(). FNDECL is the FUNCTION_DECL for the function
c2f47e15 7874 being called and ARG0 and ARG1 are the arguments for the call.
726069ba 7875 UNORDERED_CODE and ORDERED_CODE are comparison codes that give
7876 the opposite of the desired result. UNORDERED_CODE is used
7877 for modes that can hold NaNs and ORDERED_CODE is used for
7878 the rest. */
9bc9f15f 7879
7880static tree
389dd41b 7881fold_builtin_unordered_cmp (location_t loc, tree fndecl, tree arg0, tree arg1,
9bc9f15f 7882 enum tree_code unordered_code,
7883 enum tree_code ordered_code)
7884{
859f903a 7885 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9bc9f15f 7886 enum tree_code code;
6978db0d 7887 tree type0, type1;
7888 enum tree_code code0, code1;
7889 tree cmp_type = NULL_TREE;
9bc9f15f 7890
6978db0d 7891 type0 = TREE_TYPE (arg0);
7892 type1 = TREE_TYPE (arg1);
a0c938f0 7893
6978db0d 7894 code0 = TREE_CODE (type0);
7895 code1 = TREE_CODE (type1);
a0c938f0 7896
6978db0d 7897 if (code0 == REAL_TYPE && code1 == REAL_TYPE)
7898 /* Choose the wider of two real types. */
7899 cmp_type = TYPE_PRECISION (type0) >= TYPE_PRECISION (type1)
7900 ? type0 : type1;
7901 else if (code0 == REAL_TYPE && code1 == INTEGER_TYPE)
7902 cmp_type = type0;
7903 else if (code0 == INTEGER_TYPE && code1 == REAL_TYPE)
7904 cmp_type = type1;
a0c938f0 7905
389dd41b 7906 arg0 = fold_convert_loc (loc, cmp_type, arg0);
7907 arg1 = fold_convert_loc (loc, cmp_type, arg1);
859f903a 7908
7909 if (unordered_code == UNORDERED_EXPR)
7910 {
93633022 7911 if (!HONOR_NANS (arg0))
389dd41b 7912 return omit_two_operands_loc (loc, type, integer_zero_node, arg0, arg1);
7913 return fold_build2_loc (loc, UNORDERED_EXPR, type, arg0, arg1);
859f903a 7914 }
9bc9f15f 7915
93633022 7916 code = HONOR_NANS (arg0) ? unordered_code : ordered_code;
389dd41b 7917 return fold_build1_loc (loc, TRUTH_NOT_EXPR, type,
7918 fold_build2_loc (loc, code, type, arg0, arg1));
9bc9f15f 7919}
7920
0c93c8a9 7921/* Fold __builtin_{,s,u}{add,sub,mul}{,l,ll}_overflow, either into normal
7922 arithmetics if it can never overflow, or into internal functions that
7923 return both result of arithmetics and overflowed boolean flag in
732905bb 7924 a complex integer result, or some other check for overflow.
7925 Similarly fold __builtin_{add,sub,mul}_overflow_p to just the overflow
7926 checking part of that. */
0c93c8a9 7927
7928static tree
7929fold_builtin_arith_overflow (location_t loc, enum built_in_function fcode,
7930 tree arg0, tree arg1, tree arg2)
7931{
7932 enum internal_fn ifn = IFN_LAST;
732905bb 7933 /* The code of the expression corresponding to the type-generic
7934 built-in, or ERROR_MARK for the type-specific ones. */
7935 enum tree_code opcode = ERROR_MARK;
7936 bool ovf_only = false;
7937
0c93c8a9 7938 switch (fcode)
7939 {
732905bb 7940 case BUILT_IN_ADD_OVERFLOW_P:
7941 ovf_only = true;
7942 /* FALLTHRU */
0c93c8a9 7943 case BUILT_IN_ADD_OVERFLOW:
732905bb 7944 opcode = PLUS_EXPR;
7945 /* FALLTHRU */
0c93c8a9 7946 case BUILT_IN_SADD_OVERFLOW:
7947 case BUILT_IN_SADDL_OVERFLOW:
7948 case BUILT_IN_SADDLL_OVERFLOW:
7949 case BUILT_IN_UADD_OVERFLOW:
7950 case BUILT_IN_UADDL_OVERFLOW:
7951 case BUILT_IN_UADDLL_OVERFLOW:
7952 ifn = IFN_ADD_OVERFLOW;
7953 break;
732905bb 7954 case BUILT_IN_SUB_OVERFLOW_P:
7955 ovf_only = true;
7956 /* FALLTHRU */
0c93c8a9 7957 case BUILT_IN_SUB_OVERFLOW:
732905bb 7958 opcode = MINUS_EXPR;
7959 /* FALLTHRU */
0c93c8a9 7960 case BUILT_IN_SSUB_OVERFLOW:
7961 case BUILT_IN_SSUBL_OVERFLOW:
7962 case BUILT_IN_SSUBLL_OVERFLOW:
7963 case BUILT_IN_USUB_OVERFLOW:
7964 case BUILT_IN_USUBL_OVERFLOW:
7965 case BUILT_IN_USUBLL_OVERFLOW:
7966 ifn = IFN_SUB_OVERFLOW;
7967 break;
732905bb 7968 case BUILT_IN_MUL_OVERFLOW_P:
7969 ovf_only = true;
7970 /* FALLTHRU */
0c93c8a9 7971 case BUILT_IN_MUL_OVERFLOW:
732905bb 7972 opcode = MULT_EXPR;
7973 /* FALLTHRU */
0c93c8a9 7974 case BUILT_IN_SMUL_OVERFLOW:
7975 case BUILT_IN_SMULL_OVERFLOW:
7976 case BUILT_IN_SMULLL_OVERFLOW:
7977 case BUILT_IN_UMUL_OVERFLOW:
7978 case BUILT_IN_UMULL_OVERFLOW:
7979 case BUILT_IN_UMULLL_OVERFLOW:
7980 ifn = IFN_MUL_OVERFLOW;
7981 break;
7982 default:
7983 gcc_unreachable ();
7984 }
732905bb 7985
7986 /* For the "generic" overloads, the first two arguments can have different
7987 types and the last argument determines the target type to use to check
7988 for overflow. The arguments of the other overloads all have the same
7989 type. */
7990 tree type = ovf_only ? TREE_TYPE (arg2) : TREE_TYPE (TREE_TYPE (arg2));
7991
7992 /* For the __builtin_{add,sub,mul}_overflow_p builtins, when the first two
7993 arguments are constant, attempt to fold the built-in call into a constant
7994 expression indicating whether or not it detected an overflow. */
7995 if (ovf_only
7996 && TREE_CODE (arg0) == INTEGER_CST
7997 && TREE_CODE (arg1) == INTEGER_CST)
7998 /* Perform the computation in the target type and check for overflow. */
7999 return omit_one_operand_loc (loc, boolean_type_node,
8000 arith_overflowed_p (opcode, type, arg0, arg1)
8001 ? boolean_true_node : boolean_false_node,
8002 arg2);
8003
0c93c8a9 8004 tree ctype = build_complex_type (type);
8005 tree call = build_call_expr_internal_loc (loc, ifn, ctype,
8006 2, arg0, arg1);
8007 tree tgt = save_expr (call);
8008 tree intres = build1_loc (loc, REALPART_EXPR, type, tgt);
8009 tree ovfres = build1_loc (loc, IMAGPART_EXPR, type, tgt);
8010 ovfres = fold_convert_loc (loc, boolean_type_node, ovfres);
732905bb 8011
8012 if (ovf_only)
8013 return omit_one_operand_loc (loc, boolean_type_node, ovfres, arg2);
8014
8015 tree mem_arg2 = build_fold_indirect_ref_loc (loc, arg2);
0c93c8a9 8016 tree store
8017 = fold_build2_loc (loc, MODIFY_EXPR, void_type_node, mem_arg2, intres);
8018 return build2_loc (loc, COMPOUND_EXPR, boolean_type_node, store, ovfres);
8019}
8020
c388a0cf 8021/* Fold a call to __builtin_FILE to a constant string. */
8022
8023static inline tree
8024fold_builtin_FILE (location_t loc)
8025{
8026 if (const char *fname = LOCATION_FILE (loc))
8027 return build_string_literal (strlen (fname) + 1, fname);
8028
8029 return build_string_literal (1, "");
8030}
8031
8032/* Fold a call to __builtin_FUNCTION to a constant string. */
8033
8034static inline tree
8035fold_builtin_FUNCTION ()
8036{
8037 if (current_function_decl)
8038 {
8039 const char *name = IDENTIFIER_POINTER (DECL_NAME (current_function_decl));
8040 return build_string_literal (strlen (name) + 1, name);
8041 }
8042
8043 return build_string_literal (1, "");
8044}
8045
8046/* Fold a call to __builtin_LINE to an integer constant. */
8047
8048static inline tree
8049fold_builtin_LINE (location_t loc, tree type)
8050{
8051 return build_int_cst (type, LOCATION_LINE (loc));
8052}
8053
c2f47e15 8054/* Fold a call to built-in function FNDECL with 0 arguments.
e80cc485 8055 This function returns NULL_TREE if no simplification was possible. */
650e4c94 8056
4ee9c684 8057static tree
e80cc485 8058fold_builtin_0 (location_t loc, tree fndecl)
650e4c94 8059{
e9f80ff5 8060 tree type = TREE_TYPE (TREE_TYPE (fndecl));
c2f47e15 8061 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
189b3398 8062 switch (fcode)
650e4c94 8063 {
c388a0cf 8064 case BUILT_IN_FILE:
8065 return fold_builtin_FILE (loc);
8066
8067 case BUILT_IN_FUNCTION:
8068 return fold_builtin_FUNCTION ();
8069
8070 case BUILT_IN_LINE:
8071 return fold_builtin_LINE (loc, type);
8072
c2f47e15 8073 CASE_FLT_FN (BUILT_IN_INF):
012f068a 8074 CASE_FLT_FN_FLOATN_NX (BUILT_IN_INF):
c2f47e15 8075 case BUILT_IN_INFD32:
8076 case BUILT_IN_INFD64:
8077 case BUILT_IN_INFD128:
389dd41b 8078 return fold_builtin_inf (loc, type, true);
7c2f0500 8079
c2f47e15 8080 CASE_FLT_FN (BUILT_IN_HUGE_VAL):
012f068a 8081 CASE_FLT_FN_FLOATN_NX (BUILT_IN_HUGE_VAL):
389dd41b 8082 return fold_builtin_inf (loc, type, false);
7c2f0500 8083
c2f47e15 8084 case BUILT_IN_CLASSIFY_TYPE:
8085 return fold_builtin_classify_type (NULL_TREE);
7c2f0500 8086
c2f47e15 8087 default:
8088 break;
8089 }
8090 return NULL_TREE;
8091}
7c2f0500 8092
c2f47e15 8093/* Fold a call to built-in function FNDECL with 1 argument, ARG0.
e80cc485 8094 This function returns NULL_TREE if no simplification was possible. */
7c2f0500 8095
c2f47e15 8096static tree
e80cc485 8097fold_builtin_1 (location_t loc, tree fndecl, tree arg0)
c2f47e15 8098{
8099 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8100 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
6c21be92 8101
8102 if (TREE_CODE (arg0) == ERROR_MARK)
8103 return NULL_TREE;
8104
744fe358 8105 if (tree ret = fold_const_call (as_combined_fn (fcode), type, arg0))
6c21be92 8106 return ret;
8107
c2f47e15 8108 switch (fcode)
8109 {
650e4c94 8110 case BUILT_IN_CONSTANT_P:
7c2f0500 8111 {
c2f47e15 8112 tree val = fold_builtin_constant_p (arg0);
7c2f0500 8113
7c2f0500 8114 /* Gimplification will pull the CALL_EXPR for the builtin out of
8115 an if condition. When not optimizing, we'll not CSE it back.
8116 To avoid link error types of regressions, return false now. */
8117 if (!val && !optimize)
8118 val = integer_zero_node;
8119
8120 return val;
8121 }
650e4c94 8122
539a3a92 8123 case BUILT_IN_CLASSIFY_TYPE:
c2f47e15 8124 return fold_builtin_classify_type (arg0);
539a3a92 8125
650e4c94 8126 case BUILT_IN_STRLEN:
c7cbde74 8127 return fold_builtin_strlen (loc, type, arg0);
650e4c94 8128
4f35b1fc 8129 CASE_FLT_FN (BUILT_IN_FABS):
012f068a 8130 CASE_FLT_FN_FLOATN_NX (BUILT_IN_FABS):
8aa32773 8131 case BUILT_IN_FABSD32:
8132 case BUILT_IN_FABSD64:
8133 case BUILT_IN_FABSD128:
389dd41b 8134 return fold_builtin_fabs (loc, arg0, type);
d1aade50 8135
8136 case BUILT_IN_ABS:
8137 case BUILT_IN_LABS:
8138 case BUILT_IN_LLABS:
8139 case BUILT_IN_IMAXABS:
389dd41b 8140 return fold_builtin_abs (loc, arg0, type);
c63f4ad3 8141
4f35b1fc 8142 CASE_FLT_FN (BUILT_IN_CONJ):
239d491a 8143 if (validate_arg (arg0, COMPLEX_TYPE)
48e1416a 8144 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
389dd41b 8145 return fold_build1_loc (loc, CONJ_EXPR, type, arg0);
c2f47e15 8146 break;
36d3581d 8147
4f35b1fc 8148 CASE_FLT_FN (BUILT_IN_CREAL):
239d491a 8149 if (validate_arg (arg0, COMPLEX_TYPE)
48e1416a 8150 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
7082509e 8151 return non_lvalue_loc (loc, fold_build1_loc (loc, REALPART_EXPR, type, arg0));
c2f47e15 8152 break;
36d3581d 8153
4f35b1fc 8154 CASE_FLT_FN (BUILT_IN_CIMAG):
b0ce8887 8155 if (validate_arg (arg0, COMPLEX_TYPE)
8156 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
389dd41b 8157 return non_lvalue_loc (loc, fold_build1_loc (loc, IMAGPART_EXPR, type, arg0));
c2f47e15 8158 break;
36d3581d 8159
6c21be92 8160 CASE_FLT_FN (BUILT_IN_CARG):
8161 return fold_builtin_carg (loc, arg0, type);
c2373fdb 8162
6c21be92 8163 case BUILT_IN_ISASCII:
8164 return fold_builtin_isascii (loc, arg0);
48e1416a 8165
6c21be92 8166 case BUILT_IN_TOASCII:
8167 return fold_builtin_toascii (loc, arg0);
48e1416a 8168
6c21be92 8169 case BUILT_IN_ISDIGIT:
8170 return fold_builtin_isdigit (loc, arg0);
48e1416a 8171
6c21be92 8172 CASE_FLT_FN (BUILT_IN_FINITE):
8173 case BUILT_IN_FINITED32:
8174 case BUILT_IN_FINITED64:
8175 case BUILT_IN_FINITED128:
8176 case BUILT_IN_ISFINITE:
8177 {
8178 tree ret = fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISFINITE);
8179 if (ret)
8180 return ret;
8181 return fold_builtin_interclass_mathfn (loc, fndecl, arg0);
8182 }
48e1416a 8183
6c21be92 8184 CASE_FLT_FN (BUILT_IN_ISINF):
8185 case BUILT_IN_ISINFD32:
8186 case BUILT_IN_ISINFD64:
8187 case BUILT_IN_ISINFD128:
8188 {
8189 tree ret = fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISINF);
8190 if (ret)
8191 return ret;
8192 return fold_builtin_interclass_mathfn (loc, fndecl, arg0);
8193 }
48e1416a 8194
6c21be92 8195 case BUILT_IN_ISNORMAL:
8196 return fold_builtin_interclass_mathfn (loc, fndecl, arg0);
48e1416a 8197
6c21be92 8198 case BUILT_IN_ISINF_SIGN:
8199 return fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISINF_SIGN);
48e1416a 8200
6c21be92 8201 CASE_FLT_FN (BUILT_IN_ISNAN):
8202 case BUILT_IN_ISNAND32:
8203 case BUILT_IN_ISNAND64:
8204 case BUILT_IN_ISNAND128:
8205 return fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISNAN);
48e1416a 8206
6c21be92 8207 case BUILT_IN_FREE:
8208 if (integer_zerop (arg0))
8209 return build_empty_stmt (loc);
d064d976 8210 break;
c63f4ad3 8211
6c21be92 8212 default:
8b4af95f 8213 break;
6c21be92 8214 }
805e22b2 8215
6c21be92 8216 return NULL_TREE;
3bc5c41b 8217
6c21be92 8218}
728bac60 8219
6c21be92 8220/* Fold a call to built-in function FNDECL with 2 arguments, ARG0 and ARG1.
8221 This function returns NULL_TREE if no simplification was possible. */
c2f47e15 8222
8223static tree
e80cc485 8224fold_builtin_2 (location_t loc, tree fndecl, tree arg0, tree arg1)
c2f47e15 8225{
8226 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8227 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
8228
6c21be92 8229 if (TREE_CODE (arg0) == ERROR_MARK
8230 || TREE_CODE (arg1) == ERROR_MARK)
8231 return NULL_TREE;
e5407ca6 8232
744fe358 8233 if (tree ret = fold_const_call (as_combined_fn (fcode), type, arg0, arg1))
6c21be92 8234 return ret;
e84da7c1 8235
6c21be92 8236 switch (fcode)
8237 {
e84da7c1 8238 CASE_FLT_FN_REENT (BUILT_IN_GAMMA): /* GAMMA_R */
8239 CASE_FLT_FN_REENT (BUILT_IN_LGAMMA): /* LGAMMA_R */
8240 if (validate_arg (arg0, REAL_TYPE)
9af5ce0c 8241 && validate_arg (arg1, POINTER_TYPE))
e84da7c1 8242 return do_mpfr_lgamma_r (arg0, arg1, type);
8243 break;
c2f47e15 8244
3838b9ae 8245 CASE_FLT_FN (BUILT_IN_FREXP):
389dd41b 8246 return fold_builtin_frexp (loc, arg0, arg1, type);
3838b9ae 8247
ebf8b4f5 8248 CASE_FLT_FN (BUILT_IN_MODF):
389dd41b 8249 return fold_builtin_modf (loc, arg0, arg1, type);
ebf8b4f5 8250
c2f47e15 8251 case BUILT_IN_STRSTR:
389dd41b 8252 return fold_builtin_strstr (loc, arg0, arg1, type);
c2f47e15 8253
c2f47e15 8254 case BUILT_IN_STRSPN:
389dd41b 8255 return fold_builtin_strspn (loc, arg0, arg1);
c2f47e15 8256
8257 case BUILT_IN_STRCSPN:
389dd41b 8258 return fold_builtin_strcspn (loc, arg0, arg1);
c2f47e15 8259
c2f47e15 8260 case BUILT_IN_STRPBRK:
389dd41b 8261 return fold_builtin_strpbrk (loc, arg0, arg1, type);
c2f47e15 8262
8263 case BUILT_IN_EXPECT:
c83059be 8264 return fold_builtin_expect (loc, arg0, arg1, NULL_TREE);
c2f47e15 8265
9bc9f15f 8266 case BUILT_IN_ISGREATER:
389dd41b 8267 return fold_builtin_unordered_cmp (loc, fndecl,
8268 arg0, arg1, UNLE_EXPR, LE_EXPR);
9bc9f15f 8269 case BUILT_IN_ISGREATEREQUAL:
389dd41b 8270 return fold_builtin_unordered_cmp (loc, fndecl,
8271 arg0, arg1, UNLT_EXPR, LT_EXPR);
9bc9f15f 8272 case BUILT_IN_ISLESS:
389dd41b 8273 return fold_builtin_unordered_cmp (loc, fndecl,
8274 arg0, arg1, UNGE_EXPR, GE_EXPR);
9bc9f15f 8275 case BUILT_IN_ISLESSEQUAL:
389dd41b 8276 return fold_builtin_unordered_cmp (loc, fndecl,
8277 arg0, arg1, UNGT_EXPR, GT_EXPR);
9bc9f15f 8278 case BUILT_IN_ISLESSGREATER:
389dd41b 8279 return fold_builtin_unordered_cmp (loc, fndecl,
8280 arg0, arg1, UNEQ_EXPR, EQ_EXPR);
9bc9f15f 8281 case BUILT_IN_ISUNORDERED:
389dd41b 8282 return fold_builtin_unordered_cmp (loc, fndecl,
8283 arg0, arg1, UNORDERED_EXPR,
d5019fe8 8284 NOP_EXPR);
9bc9f15f 8285
7c2f0500 8286 /* We do the folding for va_start in the expander. */
8287 case BUILT_IN_VA_START:
8288 break;
f0613857 8289
0a39fd54 8290 case BUILT_IN_OBJECT_SIZE:
c2f47e15 8291 return fold_builtin_object_size (arg0, arg1);
0a39fd54 8292
1cd6e20d 8293 case BUILT_IN_ATOMIC_ALWAYS_LOCK_FREE:
8294 return fold_builtin_atomic_always_lock_free (arg0, arg1);
8295
8296 case BUILT_IN_ATOMIC_IS_LOCK_FREE:
8297 return fold_builtin_atomic_is_lock_free (arg0, arg1);
8298
c2f47e15 8299 default:
8300 break;
8301 }
8302 return NULL_TREE;
8303}
8304
8305/* Fold a call to built-in function FNDECL with 3 arguments, ARG0, ARG1,
e80cc485 8306 and ARG2.
c2f47e15 8307 This function returns NULL_TREE if no simplification was possible. */
8308
8309static tree
389dd41b 8310fold_builtin_3 (location_t loc, tree fndecl,
e80cc485 8311 tree arg0, tree arg1, tree arg2)
c2f47e15 8312{
8313 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8314 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
6c21be92 8315
8316 if (TREE_CODE (arg0) == ERROR_MARK
8317 || TREE_CODE (arg1) == ERROR_MARK
8318 || TREE_CODE (arg2) == ERROR_MARK)
8319 return NULL_TREE;
8320
744fe358 8321 if (tree ret = fold_const_call (as_combined_fn (fcode), type,
8322 arg0, arg1, arg2))
6c21be92 8323 return ret;
8324
c2f47e15 8325 switch (fcode)
8326 {
8327
8328 CASE_FLT_FN (BUILT_IN_SINCOS):
389dd41b 8329 return fold_builtin_sincos (loc, arg0, arg1, arg2);
c2f47e15 8330
8331 CASE_FLT_FN (BUILT_IN_FMA):
b9be572e 8332 return fold_builtin_fma (loc, arg0, arg1, arg2, type);
c2f47e15 8333
e5407ca6 8334 CASE_FLT_FN (BUILT_IN_REMQUO):
8335 if (validate_arg (arg0, REAL_TYPE)
9af5ce0c 8336 && validate_arg (arg1, REAL_TYPE)
8337 && validate_arg (arg2, POINTER_TYPE))
e5407ca6 8338 return do_mpfr_remquo (arg0, arg1, arg2);
8339 break;
e5407ca6 8340
7959b13b 8341 case BUILT_IN_MEMCHR:
389dd41b 8342 return fold_builtin_memchr (loc, arg0, arg1, arg2, type);
7959b13b 8343
c2f47e15 8344 case BUILT_IN_BCMP:
8345 case BUILT_IN_MEMCMP:
389dd41b 8346 return fold_builtin_memcmp (loc, arg0, arg1, arg2);;
c2f47e15 8347
c83059be 8348 case BUILT_IN_EXPECT:
8349 return fold_builtin_expect (loc, arg0, arg1, arg2);
8350
0c93c8a9 8351 case BUILT_IN_ADD_OVERFLOW:
8352 case BUILT_IN_SUB_OVERFLOW:
8353 case BUILT_IN_MUL_OVERFLOW:
732905bb 8354 case BUILT_IN_ADD_OVERFLOW_P:
8355 case BUILT_IN_SUB_OVERFLOW_P:
8356 case BUILT_IN_MUL_OVERFLOW_P:
0c93c8a9 8357 case BUILT_IN_SADD_OVERFLOW:
8358 case BUILT_IN_SADDL_OVERFLOW:
8359 case BUILT_IN_SADDLL_OVERFLOW:
8360 case BUILT_IN_SSUB_OVERFLOW:
8361 case BUILT_IN_SSUBL_OVERFLOW:
8362 case BUILT_IN_SSUBLL_OVERFLOW:
8363 case BUILT_IN_SMUL_OVERFLOW:
8364 case BUILT_IN_SMULL_OVERFLOW:
8365 case BUILT_IN_SMULLL_OVERFLOW:
8366 case BUILT_IN_UADD_OVERFLOW:
8367 case BUILT_IN_UADDL_OVERFLOW:
8368 case BUILT_IN_UADDLL_OVERFLOW:
8369 case BUILT_IN_USUB_OVERFLOW:
8370 case BUILT_IN_USUBL_OVERFLOW:
8371 case BUILT_IN_USUBLL_OVERFLOW:
8372 case BUILT_IN_UMUL_OVERFLOW:
8373 case BUILT_IN_UMULL_OVERFLOW:
8374 case BUILT_IN_UMULLL_OVERFLOW:
8375 return fold_builtin_arith_overflow (loc, fcode, arg0, arg1, arg2);
8376
650e4c94 8377 default:
8378 break;
8379 }
c2f47e15 8380 return NULL_TREE;
8381}
650e4c94 8382
c2f47e15 8383/* Fold a call to built-in function FNDECL. ARGS is an array of NARGS
9d884767 8384 arguments. IGNORE is true if the result of the
8385 function call is ignored. This function returns NULL_TREE if no
8386 simplification was possible. */
48e1416a 8387
2165588a 8388tree
e80cc485 8389fold_builtin_n (location_t loc, tree fndecl, tree *args, int nargs, bool)
c2f47e15 8390{
8391 tree ret = NULL_TREE;
a7f5bb2d 8392
c2f47e15 8393 switch (nargs)
8394 {
8395 case 0:
e80cc485 8396 ret = fold_builtin_0 (loc, fndecl);
c2f47e15 8397 break;
8398 case 1:
e80cc485 8399 ret = fold_builtin_1 (loc, fndecl, args[0]);
c2f47e15 8400 break;
8401 case 2:
e80cc485 8402 ret = fold_builtin_2 (loc, fndecl, args[0], args[1]);
c2f47e15 8403 break;
8404 case 3:
e80cc485 8405 ret = fold_builtin_3 (loc, fndecl, args[0], args[1], args[2]);
c2f47e15 8406 break;
c2f47e15 8407 default:
e80cc485 8408 ret = fold_builtin_varargs (loc, fndecl, args, nargs);
c2f47e15 8409 break;
8410 }
8411 if (ret)
8412 {
75a70cf9 8413 ret = build1 (NOP_EXPR, TREE_TYPE (ret), ret);
389dd41b 8414 SET_EXPR_LOCATION (ret, loc);
c2f47e15 8415 TREE_NO_WARNING (ret) = 1;
8416 return ret;
8417 }
8418 return NULL_TREE;
8419}
8420
0e80b01d 8421/* Construct a new CALL_EXPR to FNDECL using the tail of the argument
8422 list ARGS along with N new arguments in NEWARGS. SKIP is the number
8423 of arguments in ARGS to be omitted. OLDNARGS is the number of
8424 elements in ARGS. */
c2f47e15 8425
8426static tree
0e80b01d 8427rewrite_call_expr_valist (location_t loc, int oldnargs, tree *args,
8428 int skip, tree fndecl, int n, va_list newargs)
c2f47e15 8429{
0e80b01d 8430 int nargs = oldnargs - skip + n;
8431 tree *buffer;
c2f47e15 8432
0e80b01d 8433 if (n > 0)
c2f47e15 8434 {
0e80b01d 8435 int i, j;
c2f47e15 8436
0e80b01d 8437 buffer = XALLOCAVEC (tree, nargs);
8438 for (i = 0; i < n; i++)
8439 buffer[i] = va_arg (newargs, tree);
8440 for (j = skip; j < oldnargs; j++, i++)
8441 buffer[i] = args[j];
8442 }
8443 else
8444 buffer = args + skip;
19fbe3a4 8445
0e80b01d 8446 return build_call_expr_loc_array (loc, fndecl, nargs, buffer);
8447}
c2f47e15 8448
198622c0 8449/* Return true if FNDECL shouldn't be folded right now.
8450 If a built-in function has an inline attribute always_inline
8451 wrapper, defer folding it after always_inline functions have
8452 been inlined, otherwise e.g. -D_FORTIFY_SOURCE checking
8453 might not be performed. */
8454
51d2c51e 8455bool
198622c0 8456avoid_folding_inline_builtin (tree fndecl)
8457{
8458 return (DECL_DECLARED_INLINE_P (fndecl)
8459 && DECL_DISREGARD_INLINE_LIMITS (fndecl)
8460 && cfun
8461 && !cfun->always_inline_functions_inlined
8462 && lookup_attribute ("always_inline", DECL_ATTRIBUTES (fndecl)));
8463}
8464
4ee9c684 8465/* A wrapper function for builtin folding that prevents warnings for
491e04ef 8466 "statement without effect" and the like, caused by removing the
4ee9c684 8467 call node earlier than the warning is generated. */
8468
8469tree
389dd41b 8470fold_call_expr (location_t loc, tree exp, bool ignore)
4ee9c684 8471{
c2f47e15 8472 tree ret = NULL_TREE;
8473 tree fndecl = get_callee_fndecl (exp);
8474 if (fndecl
8475 && TREE_CODE (fndecl) == FUNCTION_DECL
48dc2227 8476 && DECL_BUILT_IN (fndecl)
8477 /* If CALL_EXPR_VA_ARG_PACK is set, the arguments aren't finalized
8478 yet. Defer folding until we see all the arguments
8479 (after inlining). */
8480 && !CALL_EXPR_VA_ARG_PACK (exp))
8481 {
8482 int nargs = call_expr_nargs (exp);
8483
8484 /* Before gimplification CALL_EXPR_VA_ARG_PACK is not set, but
8485 instead last argument is __builtin_va_arg_pack (). Defer folding
8486 even in that case, until arguments are finalized. */
8487 if (nargs && TREE_CODE (CALL_EXPR_ARG (exp, nargs - 1)) == CALL_EXPR)
8488 {
8489 tree fndecl2 = get_callee_fndecl (CALL_EXPR_ARG (exp, nargs - 1));
8490 if (fndecl2
8491 && TREE_CODE (fndecl2) == FUNCTION_DECL
8492 && DECL_BUILT_IN_CLASS (fndecl2) == BUILT_IN_NORMAL
8493 && DECL_FUNCTION_CODE (fndecl2) == BUILT_IN_VA_ARG_PACK)
8494 return NULL_TREE;
8495 }
8496
198622c0 8497 if (avoid_folding_inline_builtin (fndecl))
8498 return NULL_TREE;
8499
c2f47e15 8500 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
97d67146 8501 return targetm.fold_builtin (fndecl, call_expr_nargs (exp),
8502 CALL_EXPR_ARGP (exp), ignore);
c2f47e15 8503 else
8504 {
9d884767 8505 tree *args = CALL_EXPR_ARGP (exp);
8506 ret = fold_builtin_n (loc, fndecl, args, nargs, ignore);
c2f47e15 8507 if (ret)
389dd41b 8508 return ret;
c2f47e15 8509 }
4ee9c684 8510 }
c2f47e15 8511 return NULL_TREE;
8512}
48e1416a 8513
9d884767 8514/* Fold a CALL_EXPR with type TYPE with FN as the function expression.
8515 N arguments are passed in the array ARGARRAY. Return a folded
8516 expression or NULL_TREE if no simplification was possible. */
805e22b2 8517
8518tree
9d884767 8519fold_builtin_call_array (location_t loc, tree,
d01f58f9 8520 tree fn,
8521 int n,
8522 tree *argarray)
7e15618b 8523{
9d884767 8524 if (TREE_CODE (fn) != ADDR_EXPR)
8525 return NULL_TREE;
c2f47e15 8526
9d884767 8527 tree fndecl = TREE_OPERAND (fn, 0);
8528 if (TREE_CODE (fndecl) == FUNCTION_DECL
8529 && DECL_BUILT_IN (fndecl))
8530 {
8531 /* If last argument is __builtin_va_arg_pack (), arguments to this
8532 function are not finalized yet. Defer folding until they are. */
8533 if (n && TREE_CODE (argarray[n - 1]) == CALL_EXPR)
8534 {
8535 tree fndecl2 = get_callee_fndecl (argarray[n - 1]);
8536 if (fndecl2
8537 && TREE_CODE (fndecl2) == FUNCTION_DECL
8538 && DECL_BUILT_IN_CLASS (fndecl2) == BUILT_IN_NORMAL
8539 && DECL_FUNCTION_CODE (fndecl2) == BUILT_IN_VA_ARG_PACK)
8540 return NULL_TREE;
8541 }
8542 if (avoid_folding_inline_builtin (fndecl))
8543 return NULL_TREE;
8544 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
8545 return targetm.fold_builtin (fndecl, n, argarray, false);
8546 else
8547 return fold_builtin_n (loc, fndecl, argarray, n, false);
8548 }
c2f47e15 8549
9d884767 8550 return NULL_TREE;
c2f47e15 8551}
8552
af1409ad 8553/* Construct a new CALL_EXPR using the tail of the argument list of EXP
8554 along with N new arguments specified as the "..." parameters. SKIP
8555 is the number of arguments in EXP to be omitted. This function is used
8556 to do varargs-to-varargs transformations. */
8557
8558static tree
8559rewrite_call_expr (location_t loc, tree exp, int skip, tree fndecl, int n, ...)
8560{
8561 va_list ap;
8562 tree t;
8563
8564 va_start (ap, n);
8565 t = rewrite_call_expr_valist (loc, call_expr_nargs (exp),
8566 CALL_EXPR_ARGP (exp), skip, fndecl, n, ap);
8567 va_end (ap);
c2f47e15 8568
af1409ad 8569 return t;
c2f47e15 8570}
8571
8572/* Validate a single argument ARG against a tree code CODE representing
8573 a type. */
48e1416a 8574
c2f47e15 8575static bool
b7bf20db 8576validate_arg (const_tree arg, enum tree_code code)
c2f47e15 8577{
8578 if (!arg)
8579 return false;
8580 else if (code == POINTER_TYPE)
8581 return POINTER_TYPE_P (TREE_TYPE (arg));
c7f617c2 8582 else if (code == INTEGER_TYPE)
8583 return INTEGRAL_TYPE_P (TREE_TYPE (arg));
c2f47e15 8584 return code == TREE_CODE (TREE_TYPE (arg));
7e15618b 8585}
0eb671f7 8586
75a70cf9 8587/* This function validates the types of a function call argument list
8588 against a specified list of tree_codes. If the last specifier is a 0,
8589 that represents an ellipses, otherwise the last specifier must be a
8590 VOID_TYPE.
8591
8592 This is the GIMPLE version of validate_arglist. Eventually we want to
8593 completely convert builtins.c to work from GIMPLEs and the tree based
8594 validate_arglist will then be removed. */
8595
8596bool
1a91d914 8597validate_gimple_arglist (const gcall *call, ...)
75a70cf9 8598{
8599 enum tree_code code;
8600 bool res = 0;
8601 va_list ap;
8602 const_tree arg;
8603 size_t i;
8604
8605 va_start (ap, call);
8606 i = 0;
8607
8608 do
8609 {
d62e827b 8610 code = (enum tree_code) va_arg (ap, int);
75a70cf9 8611 switch (code)
8612 {
8613 case 0:
8614 /* This signifies an ellipses, any further arguments are all ok. */
8615 res = true;
8616 goto end;
8617 case VOID_TYPE:
8618 /* This signifies an endlink, if no arguments remain, return
8619 true, otherwise return false. */
8620 res = (i == gimple_call_num_args (call));
8621 goto end;
8622 default:
8623 /* If no parameters remain or the parameter's code does not
8624 match the specified code, return false. Otherwise continue
8625 checking any remaining arguments. */
8626 arg = gimple_call_arg (call, i++);
8627 if (!validate_arg (arg, code))
8628 goto end;
8629 break;
8630 }
8631 }
8632 while (1);
8633
8634 /* We need gotos here since we can only have one VA_CLOSE in a
8635 function. */
8636 end: ;
8637 va_end (ap);
8638
8639 return res;
8640}
8641
fc2a2dcb 8642/* Default target-specific builtin expander that does nothing. */
8643
8644rtx
aecda0d6 8645default_expand_builtin (tree exp ATTRIBUTE_UNUSED,
8646 rtx target ATTRIBUTE_UNUSED,
8647 rtx subtarget ATTRIBUTE_UNUSED,
3754d046 8648 machine_mode mode ATTRIBUTE_UNUSED,
aecda0d6 8649 int ignore ATTRIBUTE_UNUSED)
fc2a2dcb 8650{
8651 return NULL_RTX;
8652}
c7926a82 8653
01537105 8654/* Returns true is EXP represents data that would potentially reside
8655 in a readonly section. */
8656
b9ea678c 8657bool
01537105 8658readonly_data_expr (tree exp)
8659{
8660 STRIP_NOPS (exp);
8661
9ff0637e 8662 if (TREE_CODE (exp) != ADDR_EXPR)
8663 return false;
8664
8665 exp = get_base_address (TREE_OPERAND (exp, 0));
8666 if (!exp)
8667 return false;
8668
8669 /* Make sure we call decl_readonly_section only for trees it
8670 can handle (since it returns true for everything it doesn't
8671 understand). */
491e04ef 8672 if (TREE_CODE (exp) == STRING_CST
9ff0637e 8673 || TREE_CODE (exp) == CONSTRUCTOR
53e9c5c4 8674 || (VAR_P (exp) && TREE_STATIC (exp)))
9ff0637e 8675 return decl_readonly_section (exp, 0);
01537105 8676 else
8677 return false;
8678}
4ee9c684 8679
c2f47e15 8680/* Simplify a call to the strstr builtin. S1 and S2 are the arguments
8681 to the call, and TYPE is its return type.
4ee9c684 8682
c2f47e15 8683 Return NULL_TREE if no simplification was possible, otherwise return the
4ee9c684 8684 simplified form of the call as a tree.
8685
8686 The simplified form may be a constant or other expression which
8687 computes the same value, but in a more efficient manner (including
8688 calls to other builtin functions).
8689
8690 The call may contain arguments which need to be evaluated, but
8691 which are not useful to determine the result of the call. In
8692 this case we return a chain of COMPOUND_EXPRs. The LHS of each
8693 COMPOUND_EXPR will be an argument which must be evaluated.
8694 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
8695 COMPOUND_EXPR in the chain will contain the tree for the simplified
8696 form of the builtin function call. */
8697
8698static tree
389dd41b 8699fold_builtin_strstr (location_t loc, tree s1, tree s2, tree type)
4ee9c684 8700{
c2f47e15 8701 if (!validate_arg (s1, POINTER_TYPE)
8702 || !validate_arg (s2, POINTER_TYPE))
8703 return NULL_TREE;
4ee9c684 8704 else
8705 {
4ee9c684 8706 tree fn;
8707 const char *p1, *p2;
8708
8709 p2 = c_getstr (s2);
8710 if (p2 == NULL)
c2f47e15 8711 return NULL_TREE;
4ee9c684 8712
8713 p1 = c_getstr (s1);
8714 if (p1 != NULL)
8715 {
8716 const char *r = strstr (p1, p2);
daa1d5f5 8717 tree tem;
4ee9c684 8718
4ee9c684 8719 if (r == NULL)
779b4c41 8720 return build_int_cst (TREE_TYPE (s1), 0);
c0c67e38 8721
8722 /* Return an offset into the constant string argument. */
2cc66f2a 8723 tem = fold_build_pointer_plus_hwi_loc (loc, s1, r - p1);
389dd41b 8724 return fold_convert_loc (loc, type, tem);
4ee9c684 8725 }
8726
7efa231c 8727 /* The argument is const char *, and the result is char *, so we need
8728 a type conversion here to avoid a warning. */
4ee9c684 8729 if (p2[0] == '\0')
389dd41b 8730 return fold_convert_loc (loc, type, s1);
4ee9c684 8731
8732 if (p2[1] != '\0')
c2f47e15 8733 return NULL_TREE;
4ee9c684 8734
b9a16870 8735 fn = builtin_decl_implicit (BUILT_IN_STRCHR);
4ee9c684 8736 if (!fn)
c2f47e15 8737 return NULL_TREE;
4ee9c684 8738
8739 /* New argument list transforming strstr(s1, s2) to
8740 strchr(s1, s2[0]). */
7002a1c8 8741 return build_call_expr_loc (loc, fn, 2, s1,
8742 build_int_cst (integer_type_node, p2[0]));
4ee9c684 8743 }
8744}
8745
c2f47e15 8746/* Simplify a call to the strpbrk builtin. S1 and S2 are the arguments
8747 to the call, and TYPE is its return type.
4ee9c684 8748
c2f47e15 8749 Return NULL_TREE if no simplification was possible, otherwise return the
4ee9c684 8750 simplified form of the call as a tree.
8751
8752 The simplified form may be a constant or other expression which
8753 computes the same value, but in a more efficient manner (including
8754 calls to other builtin functions).
8755
8756 The call may contain arguments which need to be evaluated, but
8757 which are not useful to determine the result of the call. In
8758 this case we return a chain of COMPOUND_EXPRs. The LHS of each
8759 COMPOUND_EXPR will be an argument which must be evaluated.
8760 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
8761 COMPOUND_EXPR in the chain will contain the tree for the simplified
8762 form of the builtin function call. */
8763
8764static tree
389dd41b 8765fold_builtin_strpbrk (location_t loc, tree s1, tree s2, tree type)
4ee9c684 8766{
c2f47e15 8767 if (!validate_arg (s1, POINTER_TYPE)
8768 || !validate_arg (s2, POINTER_TYPE))
8769 return NULL_TREE;
4ee9c684 8770 else
8771 {
4ee9c684 8772 tree fn;
8773 const char *p1, *p2;
8774
8775 p2 = c_getstr (s2);
8776 if (p2 == NULL)
c2f47e15 8777 return NULL_TREE;
4ee9c684 8778
8779 p1 = c_getstr (s1);
8780 if (p1 != NULL)
8781 {
8782 const char *r = strpbrk (p1, p2);
daa1d5f5 8783 tree tem;
4ee9c684 8784
8785 if (r == NULL)
779b4c41 8786 return build_int_cst (TREE_TYPE (s1), 0);
4ee9c684 8787
8788 /* Return an offset into the constant string argument. */
2cc66f2a 8789 tem = fold_build_pointer_plus_hwi_loc (loc, s1, r - p1);
389dd41b 8790 return fold_convert_loc (loc, type, tem);
4ee9c684 8791 }
8792
8793 if (p2[0] == '\0')
05abc81b 8794 /* strpbrk(x, "") == NULL.
8795 Evaluate and ignore s1 in case it had side-effects. */
389dd41b 8796 return omit_one_operand_loc (loc, TREE_TYPE (s1), integer_zero_node, s1);
4ee9c684 8797
8798 if (p2[1] != '\0')
c2f47e15 8799 return NULL_TREE; /* Really call strpbrk. */
4ee9c684 8800
b9a16870 8801 fn = builtin_decl_implicit (BUILT_IN_STRCHR);
4ee9c684 8802 if (!fn)
c2f47e15 8803 return NULL_TREE;
4ee9c684 8804
8805 /* New argument list transforming strpbrk(s1, s2) to
8806 strchr(s1, s2[0]). */
7002a1c8 8807 return build_call_expr_loc (loc, fn, 2, s1,
8808 build_int_cst (integer_type_node, p2[0]));
4ee9c684 8809 }
8810}
8811
c2f47e15 8812/* Simplify a call to the strspn builtin. S1 and S2 are the arguments
8813 to the call.
4ee9c684 8814
c2f47e15 8815 Return NULL_TREE if no simplification was possible, otherwise return the
4ee9c684 8816 simplified form of the call as a tree.
8817
8818 The simplified form may be a constant or other expression which
8819 computes the same value, but in a more efficient manner (including
8820 calls to other builtin functions).
8821
8822 The call may contain arguments which need to be evaluated, but
8823 which are not useful to determine the result of the call. In
8824 this case we return a chain of COMPOUND_EXPRs. The LHS of each
8825 COMPOUND_EXPR will be an argument which must be evaluated.
8826 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
8827 COMPOUND_EXPR in the chain will contain the tree for the simplified
8828 form of the builtin function call. */
8829
8830static tree
389dd41b 8831fold_builtin_strspn (location_t loc, tree s1, tree s2)
4ee9c684 8832{
c2f47e15 8833 if (!validate_arg (s1, POINTER_TYPE)
8834 || !validate_arg (s2, POINTER_TYPE))
8835 return NULL_TREE;
4ee9c684 8836 else
8837 {
4ee9c684 8838 const char *p1 = c_getstr (s1), *p2 = c_getstr (s2);
8839
c2f47e15 8840 /* If either argument is "", return NULL_TREE. */
4ee9c684 8841 if ((p1 && *p1 == '\0') || (p2 && *p2 == '\0'))
9bc9f15f 8842 /* Evaluate and ignore both arguments in case either one has
8843 side-effects. */
389dd41b 8844 return omit_two_operands_loc (loc, size_type_node, size_zero_node,
9bc9f15f 8845 s1, s2);
c2f47e15 8846 return NULL_TREE;
4ee9c684 8847 }
8848}
8849
c2f47e15 8850/* Simplify a call to the strcspn builtin. S1 and S2 are the arguments
8851 to the call.
4ee9c684 8852
c2f47e15 8853 Return NULL_TREE if no simplification was possible, otherwise return the
4ee9c684 8854 simplified form of the call as a tree.
8855
8856 The simplified form may be a constant or other expression which
8857 computes the same value, but in a more efficient manner (including
8858 calls to other builtin functions).
8859
8860 The call may contain arguments which need to be evaluated, but
8861 which are not useful to determine the result of the call. In
8862 this case we return a chain of COMPOUND_EXPRs. The LHS of each
8863 COMPOUND_EXPR will be an argument which must be evaluated.
8864 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
8865 COMPOUND_EXPR in the chain will contain the tree for the simplified
8866 form of the builtin function call. */
8867
8868static tree
389dd41b 8869fold_builtin_strcspn (location_t loc, tree s1, tree s2)
4ee9c684 8870{
c2f47e15 8871 if (!validate_arg (s1, POINTER_TYPE)
8872 || !validate_arg (s2, POINTER_TYPE))
8873 return NULL_TREE;
4ee9c684 8874 else
8875 {
c2f47e15 8876 /* If the first argument is "", return NULL_TREE. */
b5e46e2c 8877 const char *p1 = c_getstr (s1);
4ee9c684 8878 if (p1 && *p1 == '\0')
8879 {
8880 /* Evaluate and ignore argument s2 in case it has
8881 side-effects. */
389dd41b 8882 return omit_one_operand_loc (loc, size_type_node,
39761420 8883 size_zero_node, s2);
4ee9c684 8884 }
8885
8886 /* If the second argument is "", return __builtin_strlen(s1). */
b5e46e2c 8887 const char *p2 = c_getstr (s2);
4ee9c684 8888 if (p2 && *p2 == '\0')
8889 {
b9a16870 8890 tree fn = builtin_decl_implicit (BUILT_IN_STRLEN);
4ee9c684 8891
8892 /* If the replacement _DECL isn't initialized, don't do the
8893 transformation. */
8894 if (!fn)
c2f47e15 8895 return NULL_TREE;
4ee9c684 8896
389dd41b 8897 return build_call_expr_loc (loc, fn, 1, s1);
4ee9c684 8898 }
c2f47e15 8899 return NULL_TREE;
4ee9c684 8900 }
8901}
8902
c2f47e15 8903/* Fold the next_arg or va_start call EXP. Returns true if there was an error
743b0c6a 8904 produced. False otherwise. This is done so that we don't output the error
8905 or warning twice or three times. */
75a70cf9 8906
743b0c6a 8907bool
c2f47e15 8908fold_builtin_next_arg (tree exp, bool va_start_p)
4ee9c684 8909{
8910 tree fntype = TREE_TYPE (current_function_decl);
c2f47e15 8911 int nargs = call_expr_nargs (exp);
8912 tree arg;
d98fd4a4 8913 /* There is good chance the current input_location points inside the
8914 definition of the va_start macro (perhaps on the token for
8915 builtin) in a system header, so warnings will not be emitted.
8916 Use the location in real source code. */
8917 source_location current_location =
8918 linemap_unwind_to_first_non_reserved_loc (line_table, input_location,
8919 NULL);
4ee9c684 8920
257d99c3 8921 if (!stdarg_p (fntype))
743b0c6a 8922 {
8923 error ("%<va_start%> used in function with fixed args");
8924 return true;
8925 }
c2f47e15 8926
8927 if (va_start_p)
79012a9d 8928 {
c2f47e15 8929 if (va_start_p && (nargs != 2))
8930 {
8931 error ("wrong number of arguments to function %<va_start%>");
8932 return true;
8933 }
8934 arg = CALL_EXPR_ARG (exp, 1);
79012a9d 8935 }
8936 /* We use __builtin_va_start (ap, 0, 0) or __builtin_next_arg (0, 0)
8937 when we checked the arguments and if needed issued a warning. */
c2f47e15 8938 else
4ee9c684 8939 {
c2f47e15 8940 if (nargs == 0)
8941 {
8942 /* Evidently an out of date version of <stdarg.h>; can't validate
8943 va_start's second argument, but can still work as intended. */
d98fd4a4 8944 warning_at (current_location,
7edb1062 8945 OPT_Wvarargs,
8946 "%<__builtin_next_arg%> called without an argument");
c2f47e15 8947 return true;
8948 }
8949 else if (nargs > 1)
a0c938f0 8950 {
c2f47e15 8951 error ("wrong number of arguments to function %<__builtin_next_arg%>");
a0c938f0 8952 return true;
8953 }
c2f47e15 8954 arg = CALL_EXPR_ARG (exp, 0);
8955 }
8956
a8dd994c 8957 if (TREE_CODE (arg) == SSA_NAME)
8958 arg = SSA_NAME_VAR (arg);
8959
c2f47e15 8960 /* We destructively modify the call to be __builtin_va_start (ap, 0)
48e1416a 8961 or __builtin_next_arg (0) the first time we see it, after checking
c2f47e15 8962 the arguments and if needed issuing a warning. */
8963 if (!integer_zerop (arg))
8964 {
8965 tree last_parm = tree_last (DECL_ARGUMENTS (current_function_decl));
79012a9d 8966
4ee9c684 8967 /* Strip off all nops for the sake of the comparison. This
8968 is not quite the same as STRIP_NOPS. It does more.
8969 We must also strip off INDIRECT_EXPR for C++ reference
8970 parameters. */
72dd6141 8971 while (CONVERT_EXPR_P (arg)
4ee9c684 8972 || TREE_CODE (arg) == INDIRECT_REF)
8973 arg = TREE_OPERAND (arg, 0);
8974 if (arg != last_parm)
a0c938f0 8975 {
b08cf617 8976 /* FIXME: Sometimes with the tree optimizers we can get the
8977 not the last argument even though the user used the last
8978 argument. We just warn and set the arg to be the last
8979 argument so that we will get wrong-code because of
8980 it. */
d98fd4a4 8981 warning_at (current_location,
7edb1062 8982 OPT_Wvarargs,
d98fd4a4 8983 "second parameter of %<va_start%> not last named argument");
743b0c6a 8984 }
24158ad7 8985
8986 /* Undefined by C99 7.15.1.4p4 (va_start):
8987 "If the parameter parmN is declared with the register storage
8988 class, with a function or array type, or with a type that is
8989 not compatible with the type that results after application of
8990 the default argument promotions, the behavior is undefined."
8991 */
8992 else if (DECL_REGISTER (arg))
d98fd4a4 8993 {
8994 warning_at (current_location,
7edb1062 8995 OPT_Wvarargs,
67cf9b55 8996 "undefined behavior when second parameter of "
d98fd4a4 8997 "%<va_start%> is declared with %<register%> storage");
8998 }
24158ad7 8999
79012a9d 9000 /* We want to verify the second parameter just once before the tree
a0c938f0 9001 optimizers are run and then avoid keeping it in the tree,
9002 as otherwise we could warn even for correct code like:
9003 void foo (int i, ...)
9004 { va_list ap; i++; va_start (ap, i); va_end (ap); } */
c2f47e15 9005 if (va_start_p)
9006 CALL_EXPR_ARG (exp, 1) = integer_zero_node;
9007 else
9008 CALL_EXPR_ARG (exp, 0) = integer_zero_node;
743b0c6a 9009 }
9010 return false;
4ee9c684 9011}
9012
9013
c2f47e15 9014/* Expand a call EXP to __builtin_object_size. */
0a39fd54 9015
f7715905 9016static rtx
0a39fd54 9017expand_builtin_object_size (tree exp)
9018{
9019 tree ost;
9020 int object_size_type;
9021 tree fndecl = get_callee_fndecl (exp);
0a39fd54 9022
c2f47e15 9023 if (!validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
0a39fd54 9024 {
b8c23db3 9025 error ("%Kfirst argument of %D must be a pointer, second integer constant",
9026 exp, fndecl);
0a39fd54 9027 expand_builtin_trap ();
9028 return const0_rtx;
9029 }
9030
c2f47e15 9031 ost = CALL_EXPR_ARG (exp, 1);
0a39fd54 9032 STRIP_NOPS (ost);
9033
9034 if (TREE_CODE (ost) != INTEGER_CST
9035 || tree_int_cst_sgn (ost) < 0
9036 || compare_tree_int (ost, 3) > 0)
9037 {
b8c23db3 9038 error ("%Klast argument of %D is not integer constant between 0 and 3",
9039 exp, fndecl);
0a39fd54 9040 expand_builtin_trap ();
9041 return const0_rtx;
9042 }
9043
e913b5cd 9044 object_size_type = tree_to_shwi (ost);
0a39fd54 9045
9046 return object_size_type < 2 ? constm1_rtx : const0_rtx;
9047}
9048
9049/* Expand EXP, a call to the __mem{cpy,pcpy,move,set}_chk builtin.
9050 FCODE is the BUILT_IN_* to use.
c2f47e15 9051 Return NULL_RTX if we failed; the caller should emit a normal call,
0a39fd54 9052 otherwise try to get the result in TARGET, if convenient (and in
9053 mode MODE if that's convenient). */
9054
9055static rtx
3754d046 9056expand_builtin_memory_chk (tree exp, rtx target, machine_mode mode,
0a39fd54 9057 enum built_in_function fcode)
9058{
0a39fd54 9059 tree dest, src, len, size;
9060
c2f47e15 9061 if (!validate_arglist (exp,
0a39fd54 9062 POINTER_TYPE,
9063 fcode == BUILT_IN_MEMSET_CHK
9064 ? INTEGER_TYPE : POINTER_TYPE,
9065 INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
c2f47e15 9066 return NULL_RTX;
0a39fd54 9067
c2f47e15 9068 dest = CALL_EXPR_ARG (exp, 0);
9069 src = CALL_EXPR_ARG (exp, 1);
9070 len = CALL_EXPR_ARG (exp, 2);
9071 size = CALL_EXPR_ARG (exp, 3);
0a39fd54 9072
e913b5cd 9073 if (! tree_fits_uhwi_p (size))
c2f47e15 9074 return NULL_RTX;
0a39fd54 9075
e913b5cd 9076 if (tree_fits_uhwi_p (len) || integer_all_onesp (size))
0a39fd54 9077 {
9078 tree fn;
9079
9080 if (! integer_all_onesp (size) && tree_int_cst_lt (size, len))
9081 {
b430e8d9 9082 warning_at (tree_nonartificial_location (exp),
9083 0, "%Kcall to %D will always overflow destination buffer",
9084 exp, get_callee_fndecl (exp));
c2f47e15 9085 return NULL_RTX;
0a39fd54 9086 }
9087
0a39fd54 9088 fn = NULL_TREE;
9089 /* If __builtin_mem{cpy,pcpy,move,set}_chk is used, assume
9090 mem{cpy,pcpy,move,set} is available. */
9091 switch (fcode)
9092 {
9093 case BUILT_IN_MEMCPY_CHK:
b9a16870 9094 fn = builtin_decl_explicit (BUILT_IN_MEMCPY);
0a39fd54 9095 break;
9096 case BUILT_IN_MEMPCPY_CHK:
b9a16870 9097 fn = builtin_decl_explicit (BUILT_IN_MEMPCPY);
0a39fd54 9098 break;
9099 case BUILT_IN_MEMMOVE_CHK:
b9a16870 9100 fn = builtin_decl_explicit (BUILT_IN_MEMMOVE);
0a39fd54 9101 break;
9102 case BUILT_IN_MEMSET_CHK:
b9a16870 9103 fn = builtin_decl_explicit (BUILT_IN_MEMSET);
0a39fd54 9104 break;
9105 default:
9106 break;
9107 }
9108
9109 if (! fn)
c2f47e15 9110 return NULL_RTX;
0a39fd54 9111
0568e9c1 9112 fn = build_call_nofold_loc (EXPR_LOCATION (exp), fn, 3, dest, src, len);
a65c4d64 9113 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
9114 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
0a39fd54 9115 return expand_expr (fn, target, mode, EXPAND_NORMAL);
9116 }
9117 else if (fcode == BUILT_IN_MEMSET_CHK)
c2f47e15 9118 return NULL_RTX;
0a39fd54 9119 else
9120 {
957d0361 9121 unsigned int dest_align = get_pointer_alignment (dest);
0a39fd54 9122
9123 /* If DEST is not a pointer type, call the normal function. */
9124 if (dest_align == 0)
c2f47e15 9125 return NULL_RTX;
0a39fd54 9126
9127 /* If SRC and DEST are the same (and not volatile), do nothing. */
9128 if (operand_equal_p (src, dest, 0))
9129 {
9130 tree expr;
9131
9132 if (fcode != BUILT_IN_MEMPCPY_CHK)
9133 {
9134 /* Evaluate and ignore LEN in case it has side-effects. */
9135 expand_expr (len, const0_rtx, VOIDmode, EXPAND_NORMAL);
9136 return expand_expr (dest, target, mode, EXPAND_NORMAL);
9137 }
9138
2cc66f2a 9139 expr = fold_build_pointer_plus (dest, len);
0a39fd54 9140 return expand_expr (expr, target, mode, EXPAND_NORMAL);
9141 }
9142
9143 /* __memmove_chk special case. */
9144 if (fcode == BUILT_IN_MEMMOVE_CHK)
9145 {
957d0361 9146 unsigned int src_align = get_pointer_alignment (src);
0a39fd54 9147
9148 if (src_align == 0)
c2f47e15 9149 return NULL_RTX;
0a39fd54 9150
9151 /* If src is categorized for a readonly section we can use
9152 normal __memcpy_chk. */
9153 if (readonly_data_expr (src))
9154 {
b9a16870 9155 tree fn = builtin_decl_explicit (BUILT_IN_MEMCPY_CHK);
0a39fd54 9156 if (!fn)
c2f47e15 9157 return NULL_RTX;
0568e9c1 9158 fn = build_call_nofold_loc (EXPR_LOCATION (exp), fn, 4,
9159 dest, src, len, size);
a65c4d64 9160 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
9161 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
0a39fd54 9162 return expand_expr (fn, target, mode, EXPAND_NORMAL);
9163 }
9164 }
c2f47e15 9165 return NULL_RTX;
0a39fd54 9166 }
9167}
9168
9169/* Emit warning if a buffer overflow is detected at compile time. */
9170
9171static void
9172maybe_emit_chk_warning (tree exp, enum built_in_function fcode)
9173{
c2f47e15 9174 int is_strlen = 0;
0a39fd54 9175 tree len, size;
b430e8d9 9176 location_t loc = tree_nonartificial_location (exp);
0a39fd54 9177
9178 switch (fcode)
9179 {
9180 case BUILT_IN_STRCPY_CHK:
9181 case BUILT_IN_STPCPY_CHK:
9182 /* For __strcat_chk the warning will be emitted only if overflowing
9183 by at least strlen (dest) + 1 bytes. */
9184 case BUILT_IN_STRCAT_CHK:
c2f47e15 9185 len = CALL_EXPR_ARG (exp, 1);
9186 size = CALL_EXPR_ARG (exp, 2);
0a39fd54 9187 is_strlen = 1;
9188 break;
b356dfef 9189 case BUILT_IN_STRNCAT_CHK:
0a39fd54 9190 case BUILT_IN_STRNCPY_CHK:
1063acde 9191 case BUILT_IN_STPNCPY_CHK:
c2f47e15 9192 len = CALL_EXPR_ARG (exp, 2);
9193 size = CALL_EXPR_ARG (exp, 3);
0a39fd54 9194 break;
9195 case BUILT_IN_SNPRINTF_CHK:
9196 case BUILT_IN_VSNPRINTF_CHK:
c2f47e15 9197 len = CALL_EXPR_ARG (exp, 1);
9198 size = CALL_EXPR_ARG (exp, 3);
0a39fd54 9199 break;
9200 default:
9201 gcc_unreachable ();
9202 }
9203
0a39fd54 9204 if (!len || !size)
9205 return;
9206
e913b5cd 9207 if (! tree_fits_uhwi_p (size) || integer_all_onesp (size))
0a39fd54 9208 return;
9209
9210 if (is_strlen)
9211 {
9212 len = c_strlen (len, 1);
e913b5cd 9213 if (! len || ! tree_fits_uhwi_p (len) || tree_int_cst_lt (len, size))
0a39fd54 9214 return;
9215 }
b356dfef 9216 else if (fcode == BUILT_IN_STRNCAT_CHK)
9217 {
c2f47e15 9218 tree src = CALL_EXPR_ARG (exp, 1);
e913b5cd 9219 if (! src || ! tree_fits_uhwi_p (len) || tree_int_cst_lt (len, size))
b356dfef 9220 return;
9221 src = c_strlen (src, 1);
e913b5cd 9222 if (! src || ! tree_fits_uhwi_p (src))
b356dfef 9223 {
b430e8d9 9224 warning_at (loc, 0, "%Kcall to %D might overflow destination buffer",
9225 exp, get_callee_fndecl (exp));
b356dfef 9226 return;
9227 }
9228 else if (tree_int_cst_lt (src, size))
9229 return;
9230 }
e913b5cd 9231 else if (! tree_fits_uhwi_p (len) || ! tree_int_cst_lt (size, len))
0a39fd54 9232 return;
9233
b430e8d9 9234 warning_at (loc, 0, "%Kcall to %D will always overflow destination buffer",
9235 exp, get_callee_fndecl (exp));
0a39fd54 9236}
9237
9238/* Emit warning if a buffer overflow is detected at compile time
9239 in __sprintf_chk/__vsprintf_chk calls. */
9240
9241static void
9242maybe_emit_sprintf_chk_warning (tree exp, enum built_in_function fcode)
9243{
1e4adcfc 9244 tree size, len, fmt;
0a39fd54 9245 const char *fmt_str;
c2f47e15 9246 int nargs = call_expr_nargs (exp);
0a39fd54 9247
9248 /* Verify the required arguments in the original call. */
48e1416a 9249
c2f47e15 9250 if (nargs < 4)
0a39fd54 9251 return;
c2f47e15 9252 size = CALL_EXPR_ARG (exp, 2);
9253 fmt = CALL_EXPR_ARG (exp, 3);
0a39fd54 9254
e913b5cd 9255 if (! tree_fits_uhwi_p (size) || integer_all_onesp (size))
0a39fd54 9256 return;
9257
9258 /* Check whether the format is a literal string constant. */
9259 fmt_str = c_getstr (fmt);
9260 if (fmt_str == NULL)
9261 return;
9262
d4473c84 9263 if (!init_target_chars ())
99eabcc1 9264 return;
9265
0a39fd54 9266 /* If the format doesn't contain % args or %%, we know its size. */
99eabcc1 9267 if (strchr (fmt_str, target_percent) == 0)
0a39fd54 9268 len = build_int_cstu (size_type_node, strlen (fmt_str));
9269 /* If the format is "%s" and first ... argument is a string literal,
9270 we know it too. */
c2f47e15 9271 else if (fcode == BUILT_IN_SPRINTF_CHK
9272 && strcmp (fmt_str, target_percent_s) == 0)
0a39fd54 9273 {
9274 tree arg;
9275
c2f47e15 9276 if (nargs < 5)
0a39fd54 9277 return;
c2f47e15 9278 arg = CALL_EXPR_ARG (exp, 4);
0a39fd54 9279 if (! POINTER_TYPE_P (TREE_TYPE (arg)))
9280 return;
9281
9282 len = c_strlen (arg, 1);
e913b5cd 9283 if (!len || ! tree_fits_uhwi_p (len))
0a39fd54 9284 return;
9285 }
9286 else
9287 return;
9288
9289 if (! tree_int_cst_lt (len, size))
b430e8d9 9290 warning_at (tree_nonartificial_location (exp),
9291 0, "%Kcall to %D will always overflow destination buffer",
9292 exp, get_callee_fndecl (exp));
0a39fd54 9293}
9294
2c281b15 9295/* Emit warning if a free is called with address of a variable. */
9296
9297static void
9298maybe_emit_free_warning (tree exp)
9299{
9300 tree arg = CALL_EXPR_ARG (exp, 0);
9301
9302 STRIP_NOPS (arg);
9303 if (TREE_CODE (arg) != ADDR_EXPR)
9304 return;
9305
9306 arg = get_base_address (TREE_OPERAND (arg, 0));
182cf5a9 9307 if (arg == NULL || INDIRECT_REF_P (arg) || TREE_CODE (arg) == MEM_REF)
2c281b15 9308 return;
9309
9310 if (SSA_VAR_P (arg))
f74ea1c2 9311 warning_at (tree_nonartificial_location (exp), OPT_Wfree_nonheap_object,
9312 "%Kattempt to free a non-heap object %qD", exp, arg);
2c281b15 9313 else
f74ea1c2 9314 warning_at (tree_nonartificial_location (exp), OPT_Wfree_nonheap_object,
9315 "%Kattempt to free a non-heap object", exp);
2c281b15 9316}
9317
c2f47e15 9318/* Fold a call to __builtin_object_size with arguments PTR and OST,
9319 if possible. */
0a39fd54 9320
f7715905 9321static tree
c2f47e15 9322fold_builtin_object_size (tree ptr, tree ost)
0a39fd54 9323{
a6caa15f 9324 unsigned HOST_WIDE_INT bytes;
0a39fd54 9325 int object_size_type;
9326
c2f47e15 9327 if (!validate_arg (ptr, POINTER_TYPE)
9328 || !validate_arg (ost, INTEGER_TYPE))
9329 return NULL_TREE;
0a39fd54 9330
0a39fd54 9331 STRIP_NOPS (ost);
9332
9333 if (TREE_CODE (ost) != INTEGER_CST
9334 || tree_int_cst_sgn (ost) < 0
9335 || compare_tree_int (ost, 3) > 0)
c2f47e15 9336 return NULL_TREE;
0a39fd54 9337
e913b5cd 9338 object_size_type = tree_to_shwi (ost);
0a39fd54 9339
9340 /* __builtin_object_size doesn't evaluate side-effects in its arguments;
9341 if there are any side-effects, it returns (size_t) -1 for types 0 and 1
9342 and (size_t) 0 for types 2 and 3. */
9343 if (TREE_SIDE_EFFECTS (ptr))
697bbc3f 9344 return build_int_cst_type (size_type_node, object_size_type < 2 ? -1 : 0);
0a39fd54 9345
9346 if (TREE_CODE (ptr) == ADDR_EXPR)
a6caa15f 9347 {
4e91a07b 9348 compute_builtin_object_size (ptr, object_size_type, &bytes);
6da74b21 9349 if (wi::fits_to_tree_p (bytes, size_type_node))
9350 return build_int_cstu (size_type_node, bytes);
a6caa15f 9351 }
0a39fd54 9352 else if (TREE_CODE (ptr) == SSA_NAME)
9353 {
0a39fd54 9354 /* If object size is not known yet, delay folding until
9355 later. Maybe subsequent passes will help determining
9356 it. */
4e91a07b 9357 if (compute_builtin_object_size (ptr, object_size_type, &bytes)
9358 && wi::fits_to_tree_p (bytes, size_type_node))
6da74b21 9359 return build_int_cstu (size_type_node, bytes);
0a39fd54 9360 }
9361
a6caa15f 9362 return NULL_TREE;
0a39fd54 9363}
9364
0e80b01d 9365/* Builtins with folding operations that operate on "..." arguments
9366 need special handling; we need to store the arguments in a convenient
9367 data structure before attempting any folding. Fortunately there are
9368 only a few builtins that fall into this category. FNDECL is the
e80cc485 9369 function, EXP is the CALL_EXPR for the call. */
0e80b01d 9370
9371static tree
e80cc485 9372fold_builtin_varargs (location_t loc, tree fndecl, tree *args, int nargs)
0e80b01d 9373{
9374 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
9375 tree ret = NULL_TREE;
9376
9377 switch (fcode)
9378 {
0e80b01d 9379 case BUILT_IN_FPCLASSIFY:
9d884767 9380 ret = fold_builtin_fpclassify (loc, args, nargs);
0e80b01d 9381 break;
9382
9383 default:
9384 break;
9385 }
9386 if (ret)
9387 {
9388 ret = build1 (NOP_EXPR, TREE_TYPE (ret), ret);
9389 SET_EXPR_LOCATION (ret, loc);
9390 TREE_NO_WARNING (ret) = 1;
9391 return ret;
9392 }
9393 return NULL_TREE;
9394}
9395
99eabcc1 9396/* Initialize format string characters in the target charset. */
9397
b9ea678c 9398bool
99eabcc1 9399init_target_chars (void)
9400{
9401 static bool init;
9402 if (!init)
9403 {
9404 target_newline = lang_hooks.to_target_charset ('\n');
9405 target_percent = lang_hooks.to_target_charset ('%');
9406 target_c = lang_hooks.to_target_charset ('c');
9407 target_s = lang_hooks.to_target_charset ('s');
9408 if (target_newline == 0 || target_percent == 0 || target_c == 0
9409 || target_s == 0)
9410 return false;
9411
9412 target_percent_c[0] = target_percent;
9413 target_percent_c[1] = target_c;
9414 target_percent_c[2] = '\0';
9415
9416 target_percent_s[0] = target_percent;
9417 target_percent_s[1] = target_s;
9418 target_percent_s[2] = '\0';
9419
9420 target_percent_s_newline[0] = target_percent;
9421 target_percent_s_newline[1] = target_s;
9422 target_percent_s_newline[2] = target_newline;
9423 target_percent_s_newline[3] = '\0';
a0c938f0 9424
99eabcc1 9425 init = true;
9426 }
9427 return true;
9428}
bffb7645 9429
f0c477f2 9430/* Helper function for do_mpfr_arg*(). Ensure M is a normal number
9431 and no overflow/underflow occurred. INEXACT is true if M was not
fa7637bd 9432 exactly calculated. TYPE is the tree type for the result. This
f0c477f2 9433 function assumes that you cleared the MPFR flags and then
9434 calculated M to see if anything subsequently set a flag prior to
9435 entering this function. Return NULL_TREE if any checks fail. */
9436
9437static tree
d4473c84 9438do_mpfr_ckconv (mpfr_srcptr m, tree type, int inexact)
f0c477f2 9439{
9440 /* Proceed iff we get a normal number, i.e. not NaN or Inf and no
9441 overflow/underflow occurred. If -frounding-math, proceed iff the
9442 result of calling FUNC was exact. */
d4473c84 9443 if (mpfr_number_p (m) && !mpfr_overflow_p () && !mpfr_underflow_p ()
f0c477f2 9444 && (!flag_rounding_math || !inexact))
9445 {
9446 REAL_VALUE_TYPE rr;
9447
66fa16e6 9448 real_from_mpfr (&rr, m, type, GMP_RNDN);
f0c477f2 9449 /* Proceed iff GCC's REAL_VALUE_TYPE can hold the MPFR value,
9450 check for overflow/underflow. If the REAL_VALUE_TYPE is zero
9451 but the mpft_t is not, then we underflowed in the
9452 conversion. */
776a7bab 9453 if (real_isfinite (&rr)
f0c477f2 9454 && (rr.cl == rvc_zero) == (mpfr_zero_p (m) != 0))
9455 {
9456 REAL_VALUE_TYPE rmode;
9457
9458 real_convert (&rmode, TYPE_MODE (type), &rr);
9459 /* Proceed iff the specified mode can hold the value. */
9460 if (real_identical (&rmode, &rr))
9461 return build_real (type, rmode);
9462 }
9463 }
9464 return NULL_TREE;
9465}
9466
239d491a 9467/* Helper function for do_mpc_arg*(). Ensure M is a normal complex
9468 number and no overflow/underflow occurred. INEXACT is true if M
9469 was not exactly calculated. TYPE is the tree type for the result.
9470 This function assumes that you cleared the MPFR flags and then
9471 calculated M to see if anything subsequently set a flag prior to
652d9409 9472 entering this function. Return NULL_TREE if any checks fail, if
9473 FORCE_CONVERT is true, then bypass the checks. */
239d491a 9474
9475static tree
652d9409 9476do_mpc_ckconv (mpc_srcptr m, tree type, int inexact, int force_convert)
239d491a 9477{
9478 /* Proceed iff we get a normal number, i.e. not NaN or Inf and no
9479 overflow/underflow occurred. If -frounding-math, proceed iff the
9480 result of calling FUNC was exact. */
652d9409 9481 if (force_convert
9482 || (mpfr_number_p (mpc_realref (m)) && mpfr_number_p (mpc_imagref (m))
9483 && !mpfr_overflow_p () && !mpfr_underflow_p ()
9484 && (!flag_rounding_math || !inexact)))
239d491a 9485 {
9486 REAL_VALUE_TYPE re, im;
9487
b0e7c4d4 9488 real_from_mpfr (&re, mpc_realref (m), TREE_TYPE (type), GMP_RNDN);
9489 real_from_mpfr (&im, mpc_imagref (m), TREE_TYPE (type), GMP_RNDN);
239d491a 9490 /* Proceed iff GCC's REAL_VALUE_TYPE can hold the MPFR values,
9491 check for overflow/underflow. If the REAL_VALUE_TYPE is zero
9492 but the mpft_t is not, then we underflowed in the
9493 conversion. */
652d9409 9494 if (force_convert
9495 || (real_isfinite (&re) && real_isfinite (&im)
9496 && (re.cl == rvc_zero) == (mpfr_zero_p (mpc_realref (m)) != 0)
9497 && (im.cl == rvc_zero) == (mpfr_zero_p (mpc_imagref (m)) != 0)))
239d491a 9498 {
9499 REAL_VALUE_TYPE re_mode, im_mode;
9500
9501 real_convert (&re_mode, TYPE_MODE (TREE_TYPE (type)), &re);
9502 real_convert (&im_mode, TYPE_MODE (TREE_TYPE (type)), &im);
9503 /* Proceed iff the specified mode can hold the value. */
652d9409 9504 if (force_convert
9505 || (real_identical (&re_mode, &re)
9506 && real_identical (&im_mode, &im)))
239d491a 9507 return build_complex (type, build_real (TREE_TYPE (type), re_mode),
9508 build_real (TREE_TYPE (type), im_mode));
9509 }
9510 }
9511 return NULL_TREE;
9512}
239d491a 9513
e5407ca6 9514/* If arguments ARG0 and ARG1 are REAL_CSTs, call mpfr_remquo() to set
9515 the pointer *(ARG_QUO) and return the result. The type is taken
9516 from the type of ARG0 and is used for setting the precision of the
9517 calculation and results. */
9518
9519static tree
9520do_mpfr_remquo (tree arg0, tree arg1, tree arg_quo)
9521{
9522 tree const type = TREE_TYPE (arg0);
9523 tree result = NULL_TREE;
48e1416a 9524
e5407ca6 9525 STRIP_NOPS (arg0);
9526 STRIP_NOPS (arg1);
48e1416a 9527
e5407ca6 9528 /* To proceed, MPFR must exactly represent the target floating point
9529 format, which only happens when the target base equals two. */
9530 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
9531 && TREE_CODE (arg0) == REAL_CST && !TREE_OVERFLOW (arg0)
9532 && TREE_CODE (arg1) == REAL_CST && !TREE_OVERFLOW (arg1))
9533 {
9534 const REAL_VALUE_TYPE *const ra0 = TREE_REAL_CST_PTR (arg0);
9535 const REAL_VALUE_TYPE *const ra1 = TREE_REAL_CST_PTR (arg1);
9536
776a7bab 9537 if (real_isfinite (ra0) && real_isfinite (ra1))
e5407ca6 9538 {
e2eb2b7f 9539 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
9540 const int prec = fmt->p;
9541 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
e5407ca6 9542 tree result_rem;
9543 long integer_quo;
9544 mpfr_t m0, m1;
9545
9546 mpfr_inits2 (prec, m0, m1, NULL);
9547 mpfr_from_real (m0, ra0, GMP_RNDN);
9548 mpfr_from_real (m1, ra1, GMP_RNDN);
9549 mpfr_clear_flags ();
e2eb2b7f 9550 mpfr_remquo (m0, &integer_quo, m0, m1, rnd);
e5407ca6 9551 /* Remquo is independent of the rounding mode, so pass
9552 inexact=0 to do_mpfr_ckconv(). */
9553 result_rem = do_mpfr_ckconv (m0, type, /*inexact=*/ 0);
9554 mpfr_clears (m0, m1, NULL);
9555 if (result_rem)
9556 {
9557 /* MPFR calculates quo in the host's long so it may
9558 return more bits in quo than the target int can hold
9559 if sizeof(host long) > sizeof(target int). This can
9560 happen even for native compilers in LP64 mode. In
9561 these cases, modulo the quo value with the largest
9562 number that the target int can hold while leaving one
9563 bit for the sign. */
9564 if (sizeof (integer_quo) * CHAR_BIT > INT_TYPE_SIZE)
9565 integer_quo %= (long)(1UL << (INT_TYPE_SIZE - 1));
9566
9567 /* Dereference the quo pointer argument. */
9568 arg_quo = build_fold_indirect_ref (arg_quo);
9569 /* Proceed iff a valid pointer type was passed in. */
9570 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg_quo)) == integer_type_node)
9571 {
9572 /* Set the value. */
7002a1c8 9573 tree result_quo
9574 = fold_build2 (MODIFY_EXPR, TREE_TYPE (arg_quo), arg_quo,
9575 build_int_cst (TREE_TYPE (arg_quo),
9576 integer_quo));
e5407ca6 9577 TREE_SIDE_EFFECTS (result_quo) = 1;
9578 /* Combine the quo assignment with the rem. */
9579 result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
9580 result_quo, result_rem));
9581 }
9582 }
9583 }
9584 }
9585 return result;
9586}
e84da7c1 9587
9588/* If ARG is a REAL_CST, call mpfr_lgamma() on it and return the
9589 resulting value as a tree with type TYPE. The mpfr precision is
9590 set to the precision of TYPE. We assume that this mpfr function
9591 returns zero if the result could be calculated exactly within the
9592 requested precision. In addition, the integer pointer represented
9593 by ARG_SG will be dereferenced and set to the appropriate signgam
9594 (-1,1) value. */
9595
9596static tree
9597do_mpfr_lgamma_r (tree arg, tree arg_sg, tree type)
9598{
9599 tree result = NULL_TREE;
9600
9601 STRIP_NOPS (arg);
48e1416a 9602
e84da7c1 9603 /* To proceed, MPFR must exactly represent the target floating point
9604 format, which only happens when the target base equals two. Also
9605 verify ARG is a constant and that ARG_SG is an int pointer. */
9606 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
9607 && TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg)
9608 && TREE_CODE (TREE_TYPE (arg_sg)) == POINTER_TYPE
9609 && TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (arg_sg))) == integer_type_node)
9610 {
9611 const REAL_VALUE_TYPE *const ra = TREE_REAL_CST_PTR (arg);
9612
9613 /* In addition to NaN and Inf, the argument cannot be zero or a
9614 negative integer. */
776a7bab 9615 if (real_isfinite (ra)
e84da7c1 9616 && ra->cl != rvc_zero
9af5ce0c 9617 && !(real_isneg (ra) && real_isinteger (ra, TYPE_MODE (type))))
e84da7c1 9618 {
e2eb2b7f 9619 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
9620 const int prec = fmt->p;
9621 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
e84da7c1 9622 int inexact, sg;
9623 mpfr_t m;
9624 tree result_lg;
9625
9626 mpfr_init2 (m, prec);
9627 mpfr_from_real (m, ra, GMP_RNDN);
9628 mpfr_clear_flags ();
e2eb2b7f 9629 inexact = mpfr_lgamma (m, &sg, m, rnd);
e84da7c1 9630 result_lg = do_mpfr_ckconv (m, type, inexact);
9631 mpfr_clear (m);
9632 if (result_lg)
9633 {
9634 tree result_sg;
9635
9636 /* Dereference the arg_sg pointer argument. */
9637 arg_sg = build_fold_indirect_ref (arg_sg);
9638 /* Assign the signgam value into *arg_sg. */
9639 result_sg = fold_build2 (MODIFY_EXPR,
9640 TREE_TYPE (arg_sg), arg_sg,
7002a1c8 9641 build_int_cst (TREE_TYPE (arg_sg), sg));
e84da7c1 9642 TREE_SIDE_EFFECTS (result_sg) = 1;
9643 /* Combine the signgam assignment with the lgamma result. */
9644 result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
9645 result_sg, result_lg));
9646 }
9647 }
9648 }
9649
9650 return result;
9651}
75a70cf9 9652
c699fab8 9653/* If arguments ARG0 and ARG1 are a COMPLEX_CST, call the two-argument
9654 mpc function FUNC on it and return the resulting value as a tree
9655 with type TYPE. The mpfr precision is set to the precision of
9656 TYPE. We assume that function FUNC returns zero if the result
652d9409 9657 could be calculated exactly within the requested precision. If
9658 DO_NONFINITE is true, then fold expressions containing Inf or NaN
9659 in the arguments and/or results. */
c699fab8 9660
63e89698 9661tree
652d9409 9662do_mpc_arg2 (tree arg0, tree arg1, tree type, int do_nonfinite,
c699fab8 9663 int (*func)(mpc_ptr, mpc_srcptr, mpc_srcptr, mpc_rnd_t))
9664{
9665 tree result = NULL_TREE;
48e1416a 9666
c699fab8 9667 STRIP_NOPS (arg0);
9668 STRIP_NOPS (arg1);
9669
9670 /* To proceed, MPFR must exactly represent the target floating point
9671 format, which only happens when the target base equals two. */
9672 if (TREE_CODE (arg0) == COMPLEX_CST && !TREE_OVERFLOW (arg0)
9673 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE
9674 && TREE_CODE (arg1) == COMPLEX_CST && !TREE_OVERFLOW (arg1)
9675 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg1))) == REAL_TYPE
9676 && REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (TREE_TYPE (arg0))))->b == 2)
9677 {
9678 const REAL_VALUE_TYPE *const re0 = TREE_REAL_CST_PTR (TREE_REALPART (arg0));
9679 const REAL_VALUE_TYPE *const im0 = TREE_REAL_CST_PTR (TREE_IMAGPART (arg0));
9680 const REAL_VALUE_TYPE *const re1 = TREE_REAL_CST_PTR (TREE_REALPART (arg1));
9681 const REAL_VALUE_TYPE *const im1 = TREE_REAL_CST_PTR (TREE_IMAGPART (arg1));
9682
652d9409 9683 if (do_nonfinite
9684 || (real_isfinite (re0) && real_isfinite (im0)
9685 && real_isfinite (re1) && real_isfinite (im1)))
c699fab8 9686 {
9687 const struct real_format *const fmt =
9688 REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (type)));
9689 const int prec = fmt->p;
9690 const mp_rnd_t rnd = fmt->round_towards_zero ? GMP_RNDZ : GMP_RNDN;
9691 const mpc_rnd_t crnd = fmt->round_towards_zero ? MPC_RNDZZ : MPC_RNDNN;
9692 int inexact;
9693 mpc_t m0, m1;
48e1416a 9694
c699fab8 9695 mpc_init2 (m0, prec);
9696 mpc_init2 (m1, prec);
9af5ce0c 9697 mpfr_from_real (mpc_realref (m0), re0, rnd);
9698 mpfr_from_real (mpc_imagref (m0), im0, rnd);
9699 mpfr_from_real (mpc_realref (m1), re1, rnd);
9700 mpfr_from_real (mpc_imagref (m1), im1, rnd);
c699fab8 9701 mpfr_clear_flags ();
9702 inexact = func (m0, m0, m1, crnd);
652d9409 9703 result = do_mpc_ckconv (m0, type, inexact, do_nonfinite);
c699fab8 9704 mpc_clear (m0);
9705 mpc_clear (m1);
9706 }
9707 }
9708
9709 return result;
9710}
239d491a 9711
75a70cf9 9712/* A wrapper function for builtin folding that prevents warnings for
9713 "statement without effect" and the like, caused by removing the
9714 call node earlier than the warning is generated. */
9715
9716tree
1a91d914 9717fold_call_stmt (gcall *stmt, bool ignore)
75a70cf9 9718{
9719 tree ret = NULL_TREE;
9720 tree fndecl = gimple_call_fndecl (stmt);
389dd41b 9721 location_t loc = gimple_location (stmt);
75a70cf9 9722 if (fndecl
9723 && TREE_CODE (fndecl) == FUNCTION_DECL
9724 && DECL_BUILT_IN (fndecl)
9725 && !gimple_call_va_arg_pack_p (stmt))
9726 {
9727 int nargs = gimple_call_num_args (stmt);
9845fb99 9728 tree *args = (nargs > 0
9729 ? gimple_call_arg_ptr (stmt, 0)
9730 : &error_mark_node);
75a70cf9 9731
198622c0 9732 if (avoid_folding_inline_builtin (fndecl))
9733 return NULL_TREE;
75a70cf9 9734 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
9735 {
9845fb99 9736 return targetm.fold_builtin (fndecl, nargs, args, ignore);
75a70cf9 9737 }
9738 else
9739 {
9d884767 9740 ret = fold_builtin_n (loc, fndecl, args, nargs, ignore);
75a70cf9 9741 if (ret)
9742 {
9743 /* Propagate location information from original call to
9744 expansion of builtin. Otherwise things like
9745 maybe_emit_chk_warning, that operate on the expansion
9746 of a builtin, will use the wrong location information. */
9747 if (gimple_has_location (stmt))
9748 {
9749 tree realret = ret;
9750 if (TREE_CODE (ret) == NOP_EXPR)
9751 realret = TREE_OPERAND (ret, 0);
9752 if (CAN_HAVE_LOCATION_P (realret)
9753 && !EXPR_HAS_LOCATION (realret))
389dd41b 9754 SET_EXPR_LOCATION (realret, loc);
75a70cf9 9755 return realret;
9756 }
9757 return ret;
9758 }
9759 }
9760 }
9761 return NULL_TREE;
9762}
7bfefa9d 9763
b9a16870 9764/* Look up the function in builtin_decl that corresponds to DECL
7bfefa9d 9765 and set ASMSPEC as its user assembler name. DECL must be a
9766 function decl that declares a builtin. */
9767
9768void
9769set_builtin_user_assembler_name (tree decl, const char *asmspec)
9770{
7bfefa9d 9771 gcc_assert (TREE_CODE (decl) == FUNCTION_DECL
9772 && DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL
9773 && asmspec != 0);
9774
61ffc71a 9775 tree builtin = builtin_decl_explicit (DECL_FUNCTION_CODE (decl));
4d8e0d6d 9776 set_user_assembler_name (builtin, asmspec);
61ffc71a 9777
9778 if (DECL_FUNCTION_CODE (decl) == BUILT_IN_FFS
9779 && INT_TYPE_SIZE < BITS_PER_WORD)
7bfefa9d 9780 {
61ffc71a 9781 set_user_assembler_libfunc ("ffs", asmspec);
9782 set_optab_libfunc (ffs_optab, mode_for_size (INT_TYPE_SIZE, MODE_INT, 0),
9783 "ffs");
7bfefa9d 9784 }
9785}
a6b74a67 9786
9787/* Return true if DECL is a builtin that expands to a constant or similarly
9788 simple code. */
9789bool
9790is_simple_builtin (tree decl)
9791{
9792 if (decl && DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL)
9793 switch (DECL_FUNCTION_CODE (decl))
9794 {
9795 /* Builtins that expand to constants. */
9796 case BUILT_IN_CONSTANT_P:
9797 case BUILT_IN_EXPECT:
9798 case BUILT_IN_OBJECT_SIZE:
9799 case BUILT_IN_UNREACHABLE:
9800 /* Simple register moves or loads from stack. */
fca0886c 9801 case BUILT_IN_ASSUME_ALIGNED:
a6b74a67 9802 case BUILT_IN_RETURN_ADDRESS:
9803 case BUILT_IN_EXTRACT_RETURN_ADDR:
9804 case BUILT_IN_FROB_RETURN_ADDR:
9805 case BUILT_IN_RETURN:
9806 case BUILT_IN_AGGREGATE_INCOMING_ADDRESS:
9807 case BUILT_IN_FRAME_ADDRESS:
9808 case BUILT_IN_VA_END:
9809 case BUILT_IN_STACK_SAVE:
9810 case BUILT_IN_STACK_RESTORE:
9811 /* Exception state returns or moves registers around. */
9812 case BUILT_IN_EH_FILTER:
9813 case BUILT_IN_EH_POINTER:
9814 case BUILT_IN_EH_COPY_VALUES:
9815 return true;
9816
9817 default:
9818 return false;
9819 }
9820
9821 return false;
9822}
9823
9824/* Return true if DECL is a builtin that is not expensive, i.e., they are
9825 most probably expanded inline into reasonably simple code. This is a
9826 superset of is_simple_builtin. */
9827bool
9828is_inexpensive_builtin (tree decl)
9829{
9830 if (!decl)
9831 return false;
9832 else if (DECL_BUILT_IN_CLASS (decl) == BUILT_IN_MD)
9833 return true;
9834 else if (DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL)
9835 switch (DECL_FUNCTION_CODE (decl))
9836 {
9837 case BUILT_IN_ABS:
9838 case BUILT_IN_ALLOCA:
581bf1c2 9839 case BUILT_IN_ALLOCA_WITH_ALIGN:
74bdbe96 9840 case BUILT_IN_BSWAP16:
a6b74a67 9841 case BUILT_IN_BSWAP32:
9842 case BUILT_IN_BSWAP64:
9843 case BUILT_IN_CLZ:
9844 case BUILT_IN_CLZIMAX:
9845 case BUILT_IN_CLZL:
9846 case BUILT_IN_CLZLL:
9847 case BUILT_IN_CTZ:
9848 case BUILT_IN_CTZIMAX:
9849 case BUILT_IN_CTZL:
9850 case BUILT_IN_CTZLL:
9851 case BUILT_IN_FFS:
9852 case BUILT_IN_FFSIMAX:
9853 case BUILT_IN_FFSL:
9854 case BUILT_IN_FFSLL:
9855 case BUILT_IN_IMAXABS:
9856 case BUILT_IN_FINITE:
9857 case BUILT_IN_FINITEF:
9858 case BUILT_IN_FINITEL:
9859 case BUILT_IN_FINITED32:
9860 case BUILT_IN_FINITED64:
9861 case BUILT_IN_FINITED128:
9862 case BUILT_IN_FPCLASSIFY:
9863 case BUILT_IN_ISFINITE:
9864 case BUILT_IN_ISINF_SIGN:
9865 case BUILT_IN_ISINF:
9866 case BUILT_IN_ISINFF:
9867 case BUILT_IN_ISINFL:
9868 case BUILT_IN_ISINFD32:
9869 case BUILT_IN_ISINFD64:
9870 case BUILT_IN_ISINFD128:
9871 case BUILT_IN_ISNAN:
9872 case BUILT_IN_ISNANF:
9873 case BUILT_IN_ISNANL:
9874 case BUILT_IN_ISNAND32:
9875 case BUILT_IN_ISNAND64:
9876 case BUILT_IN_ISNAND128:
9877 case BUILT_IN_ISNORMAL:
9878 case BUILT_IN_ISGREATER:
9879 case BUILT_IN_ISGREATEREQUAL:
9880 case BUILT_IN_ISLESS:
9881 case BUILT_IN_ISLESSEQUAL:
9882 case BUILT_IN_ISLESSGREATER:
9883 case BUILT_IN_ISUNORDERED:
9884 case BUILT_IN_VA_ARG_PACK:
9885 case BUILT_IN_VA_ARG_PACK_LEN:
9886 case BUILT_IN_VA_COPY:
9887 case BUILT_IN_TRAP:
9888 case BUILT_IN_SAVEREGS:
9889 case BUILT_IN_POPCOUNTL:
9890 case BUILT_IN_POPCOUNTLL:
9891 case BUILT_IN_POPCOUNTIMAX:
9892 case BUILT_IN_POPCOUNT:
9893 case BUILT_IN_PARITYL:
9894 case BUILT_IN_PARITYLL:
9895 case BUILT_IN_PARITYIMAX:
9896 case BUILT_IN_PARITY:
9897 case BUILT_IN_LABS:
9898 case BUILT_IN_LLABS:
9899 case BUILT_IN_PREFETCH:
ca4c3545 9900 case BUILT_IN_ACC_ON_DEVICE:
a6b74a67 9901 return true;
9902
9903 default:
9904 return is_simple_builtin (decl);
9905 }
9906
9907 return false;
9908}