]> git.ipfire.org Git - thirdparty/gcc.git/blame - gcc/builtins.c
gcc/
[thirdparty/gcc.git] / gcc / builtins.c
CommitLineData
53800dbe 1/* Expand builtin functions.
d353bf18 2 Copyright (C) 1988-2015 Free Software Foundation, Inc.
53800dbe 3
f12b58b3 4This file is part of GCC.
53800dbe 5
f12b58b3 6GCC is free software; you can redistribute it and/or modify it under
7the terms of the GNU General Public License as published by the Free
8c4c00c1 8Software Foundation; either version 3, or (at your option) any later
f12b58b3 9version.
53800dbe 10
f12b58b3 11GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12WARRANTY; without even the implied warranty of MERCHANTABILITY or
13FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14for more details.
53800dbe 15
16You should have received a copy of the GNU General Public License
8c4c00c1 17along with GCC; see the file COPYING3. If not see
18<http://www.gnu.org/licenses/>. */
53800dbe 19
20#include "config.h"
21#include "system.h"
805e22b2 22#include "coretypes.h"
9ef16211 23#include "backend.h"
d040a5b0 24#include "predict.h"
9ef16211 25#include "tree.h"
26#include "gimple.h"
53800dbe 27#include "rtl.h"
b20a8bb4 28#include "alias.h"
b20a8bb4 29#include "fold-const.h"
9ed99284 30#include "stringpool.h"
31#include "stor-layout.h"
32#include "calls.h"
33#include "varasm.h"
34#include "tree-object-size.h"
dae0b5cb 35#include "realmpfr.h"
94ea8568 36#include "cfgrtl.h"
bc61cadb 37#include "internal-fn.h"
53800dbe 38#include "flags.h"
39#include "regs.h"
53800dbe 40#include "except.h"
53800dbe 41#include "insn-config.h"
d53441c8 42#include "expmed.h"
43#include "dojump.h"
44#include "explow.h"
45#include "emit-rtl.h"
46#include "stmt.h"
53800dbe 47#include "expr.h"
34517c64 48#include "insn-codes.h"
d8fc4d0b 49#include "optabs.h"
50#include "libfuncs.h"
53800dbe 51#include "recog.h"
52#include "output.h"
53#include "typeclass.h"
1dd6c958 54#include "tm_p.h"
fc2a2dcb 55#include "target.h"
63c62881 56#include "langhooks.h"
073c1fd5 57#include "tree-ssanames.h"
58#include "tree-dfa.h"
162719b3 59#include "value-prof.h"
852f689e 60#include "diagnostic-core.h"
3b9c3a16 61#include "builtins.h"
f9acf11a 62#include "asan.h"
d037099f 63#include "cilk.h"
058a1b7a 64#include "cgraph.h"
65#include "tree-chkp.h"
66#include "rtl-chkp.h"
ca4c3545 67#include "gomp-constants.h"
53800dbe 68
5383fb56 69
239d491a 70static tree do_mpc_arg1 (tree, tree, int (*)(mpc_ptr, mpc_srcptr, mpc_rnd_t));
726e2588 71
3b9c3a16 72struct target_builtins default_target_builtins;
73#if SWITCHABLE_TARGET
74struct target_builtins *this_target_builtins = &default_target_builtins;
75#endif
76
ab7943b9 77/* Define the names of the builtin function types and codes. */
96423453 78const char *const built_in_class_names[BUILT_IN_LAST]
ab7943b9 79 = {"NOT_BUILT_IN", "BUILT_IN_FRONTEND", "BUILT_IN_MD", "BUILT_IN_NORMAL"};
80
9cfddb70 81#define DEF_BUILTIN(X, N, C, T, LT, B, F, NA, AT, IM, COND) #X,
0dfc45b5 82const char * built_in_names[(int) END_BUILTINS] =
4e9d90c7 83{
84#include "builtins.def"
85};
86#undef DEF_BUILTIN
ab7943b9 87
cffdfb3d 88/* Setup an array of builtin_info_type, make sure each element decl is
df94cd3b 89 initialized to NULL_TREE. */
cffdfb3d 90builtin_info_type builtin_info[(int)END_BUILTINS];
df94cd3b 91
0b049e15 92/* Non-zero if __builtin_constant_p should be folded right away. */
93bool force_folding_builtin_constant_p;
94
3754d046 95static rtx c_readstr (const char *, machine_mode);
aecda0d6 96static int target_char_cast (tree, char *);
d8ae1baa 97static rtx get_memory_rtx (tree, tree);
aecda0d6 98static int apply_args_size (void);
99static int apply_result_size (void);
aecda0d6 100static rtx result_vector (int, rtx);
aecda0d6 101static void expand_builtin_prefetch (tree);
102static rtx expand_builtin_apply_args (void);
103static rtx expand_builtin_apply_args_1 (void);
104static rtx expand_builtin_apply (rtx, rtx, rtx);
105static void expand_builtin_return (rtx);
106static enum type_class type_to_class (tree);
107static rtx expand_builtin_classify_type (tree);
108static void expand_errno_check (tree, rtx);
109static rtx expand_builtin_mathfn (tree, rtx, rtx);
110static rtx expand_builtin_mathfn_2 (tree, rtx, rtx);
6b43bae4 111static rtx expand_builtin_mathfn_3 (tree, rtx, rtx);
7e0713b1 112static rtx expand_builtin_mathfn_ternary (tree, rtx, rtx);
f97eea22 113static rtx expand_builtin_interclass_mathfn (tree, rtx);
c3147c1a 114static rtx expand_builtin_sincos (tree);
f97eea22 115static rtx expand_builtin_cexpi (tree, rtx);
ff1b14e4 116static rtx expand_builtin_int_roundingfn (tree, rtx);
117static rtx expand_builtin_int_roundingfn_2 (tree, rtx);
79012a9d 118static rtx expand_builtin_next_arg (void);
aecda0d6 119static rtx expand_builtin_va_start (tree);
120static rtx expand_builtin_va_end (tree);
121static rtx expand_builtin_va_copy (tree);
a65c4d64 122static rtx expand_builtin_strcmp (tree, rtx);
3754d046 123static rtx expand_builtin_strncmp (tree, rtx, machine_mode);
124static rtx builtin_memcpy_read_str (void *, HOST_WIDE_INT, machine_mode);
a65c4d64 125static rtx expand_builtin_memcpy (tree, rtx);
f21337ef 126static rtx expand_builtin_memcpy_with_bounds (tree, rtx);
127static rtx expand_builtin_memcpy_args (tree, tree, tree, rtx, tree);
3754d046 128static rtx expand_builtin_mempcpy (tree, rtx, machine_mode);
f21337ef 129static rtx expand_builtin_mempcpy_with_bounds (tree, rtx, machine_mode);
48e1416a 130static rtx expand_builtin_mempcpy_args (tree, tree, tree, rtx,
f21337ef 131 machine_mode, int, tree);
a65c4d64 132static rtx expand_builtin_strcpy (tree, rtx);
133static rtx expand_builtin_strcpy_args (tree, tree, rtx);
3754d046 134static rtx expand_builtin_stpcpy (tree, rtx, machine_mode);
a65c4d64 135static rtx expand_builtin_strncpy (tree, rtx);
3754d046 136static rtx builtin_memset_gen_str (void *, HOST_WIDE_INT, machine_mode);
137static rtx expand_builtin_memset (tree, rtx, machine_mode);
f21337ef 138static rtx expand_builtin_memset_with_bounds (tree, rtx, machine_mode);
3754d046 139static rtx expand_builtin_memset_args (tree, tree, tree, rtx, machine_mode, tree);
aecda0d6 140static rtx expand_builtin_bzero (tree);
3754d046 141static rtx expand_builtin_strlen (tree, rtx, machine_mode);
5be42b39 142static rtx expand_builtin_alloca (tree, bool);
3754d046 143static rtx expand_builtin_unop (machine_mode, tree, rtx, rtx, optab);
aecda0d6 144static rtx expand_builtin_frame_address (tree, tree);
389dd41b 145static tree stabilize_va_list_loc (location_t, tree, int);
aecda0d6 146static rtx expand_builtin_expect (tree, rtx);
147static tree fold_builtin_constant_p (tree);
148static tree fold_builtin_classify_type (tree);
c7cbde74 149static tree fold_builtin_strlen (location_t, tree, tree);
389dd41b 150static tree fold_builtin_inf (location_t, tree, int);
aecda0d6 151static tree fold_builtin_nan (tree, tree, int);
389dd41b 152static tree rewrite_call_expr (location_t, tree, int, tree, int, ...);
b7bf20db 153static bool validate_arg (const_tree, enum tree_code code);
277f8dd2 154static bool integer_valued_real_p (tree);
389dd41b 155static tree fold_trunc_transparent_mathfn (location_t, tree, tree);
aecda0d6 156static rtx expand_builtin_fabs (tree, rtx, rtx);
27f261ef 157static rtx expand_builtin_signbit (tree, rtx);
389dd41b 158static tree fold_builtin_sqrt (location_t, tree, tree);
159static tree fold_builtin_cbrt (location_t, tree, tree);
160static tree fold_builtin_pow (location_t, tree, tree, tree, tree);
161static tree fold_builtin_powi (location_t, tree, tree, tree, tree);
162static tree fold_builtin_cos (location_t, tree, tree, tree);
163static tree fold_builtin_cosh (location_t, tree, tree, tree);
bffb7645 164static tree fold_builtin_tan (tree, tree);
389dd41b 165static tree fold_builtin_trunc (location_t, tree, tree);
166static tree fold_builtin_floor (location_t, tree, tree);
167static tree fold_builtin_ceil (location_t, tree, tree);
168static tree fold_builtin_round (location_t, tree, tree);
169static tree fold_builtin_int_roundingfn (location_t, tree, tree);
10b9666f 170static tree fold_builtin_bitop (tree, tree);
389dd41b 171static tree fold_builtin_strchr (location_t, tree, tree, tree);
172static tree fold_builtin_memchr (location_t, tree, tree, tree, tree);
173static tree fold_builtin_memcmp (location_t, tree, tree, tree);
174static tree fold_builtin_strcmp (location_t, tree, tree);
175static tree fold_builtin_strncmp (location_t, tree, tree, tree);
176static tree fold_builtin_signbit (location_t, tree, tree);
177static tree fold_builtin_copysign (location_t, tree, tree, tree, tree);
178static tree fold_builtin_isascii (location_t, tree);
179static tree fold_builtin_toascii (location_t, tree);
180static tree fold_builtin_isdigit (location_t, tree);
181static tree fold_builtin_fabs (location_t, tree, tree);
182static tree fold_builtin_abs (location_t, tree, tree);
183static tree fold_builtin_unordered_cmp (location_t, tree, tree, tree, enum tree_code,
d5019fe8 184 enum tree_code);
e80cc485 185static tree fold_builtin_0 (location_t, tree);
186static tree fold_builtin_1 (location_t, tree, tree);
187static tree fold_builtin_2 (location_t, tree, tree, tree);
188static tree fold_builtin_3 (location_t, tree, tree, tree, tree);
189static tree fold_builtin_varargs (location_t, tree, tree*, int);
389dd41b 190
191static tree fold_builtin_strpbrk (location_t, tree, tree, tree);
192static tree fold_builtin_strstr (location_t, tree, tree, tree);
193static tree fold_builtin_strrchr (location_t, tree, tree, tree);
389dd41b 194static tree fold_builtin_strspn (location_t, tree, tree);
195static tree fold_builtin_strcspn (location_t, tree, tree);
4ee9c684 196
0a39fd54 197static rtx expand_builtin_object_size (tree);
3754d046 198static rtx expand_builtin_memory_chk (tree, rtx, machine_mode,
0a39fd54 199 enum built_in_function);
200static void maybe_emit_chk_warning (tree, enum built_in_function);
201static void maybe_emit_sprintf_chk_warning (tree, enum built_in_function);
2c281b15 202static void maybe_emit_free_warning (tree);
c2f47e15 203static tree fold_builtin_object_size (tree, tree);
99eabcc1 204
e788f202 205unsigned HOST_WIDE_INT target_newline;
b9ea678c 206unsigned HOST_WIDE_INT target_percent;
99eabcc1 207static unsigned HOST_WIDE_INT target_c;
208static unsigned HOST_WIDE_INT target_s;
aea88c77 209char target_percent_c[3];
b9ea678c 210char target_percent_s[3];
e788f202 211char target_percent_s_newline[4];
728bac60 212static tree do_mpfr_arg1 (tree, tree, int (*)(mpfr_ptr, mpfr_srcptr, mp_rnd_t),
213 const REAL_VALUE_TYPE *, const REAL_VALUE_TYPE *, bool);
f0c477f2 214static tree do_mpfr_arg2 (tree, tree, tree,
215 int (*)(mpfr_ptr, mpfr_srcptr, mpfr_srcptr, mp_rnd_t));
9917422b 216static tree do_mpfr_arg3 (tree, tree, tree, tree,
217 int (*)(mpfr_ptr, mpfr_srcptr, mpfr_srcptr, mpfr_srcptr, mp_rnd_t));
d92f994c 218static tree do_mpfr_sincos (tree, tree, tree);
65dd1378 219static tree do_mpfr_bessel_n (tree, tree, tree,
220 int (*)(mpfr_ptr, long, mpfr_srcptr, mp_rnd_t),
221 const REAL_VALUE_TYPE *, bool);
e5407ca6 222static tree do_mpfr_remquo (tree, tree, tree);
e84da7c1 223static tree do_mpfr_lgamma_r (tree, tree, tree);
1cd6e20d 224static void expand_builtin_sync_synchronize (void);
0a39fd54 225
7bfefa9d 226/* Return true if NAME starts with __builtin_ or __sync_. */
227
b29139ad 228static bool
1c47b3e8 229is_builtin_name (const char *name)
b6a5fc45 230{
b6a5fc45 231 if (strncmp (name, "__builtin_", 10) == 0)
232 return true;
233 if (strncmp (name, "__sync_", 7) == 0)
234 return true;
1cd6e20d 235 if (strncmp (name, "__atomic_", 9) == 0)
236 return true;
a89e6c15 237 if (flag_cilkplus
d037099f 238 && (!strcmp (name, "__cilkrts_detach")
239 || !strcmp (name, "__cilkrts_pop_frame")))
240 return true;
b6a5fc45 241 return false;
242}
4ee9c684 243
7bfefa9d 244
245/* Return true if DECL is a function symbol representing a built-in. */
246
247bool
248is_builtin_fn (tree decl)
249{
250 return TREE_CODE (decl) == FUNCTION_DECL && DECL_BUILT_IN (decl);
251}
252
1c47b3e8 253/* Return true if NODE should be considered for inline expansion regardless
254 of the optimization level. This means whenever a function is invoked with
255 its "internal" name, which normally contains the prefix "__builtin". */
256
257static bool
258called_as_built_in (tree node)
259{
260 /* Note that we must use DECL_NAME, not DECL_ASSEMBLER_NAME_SET_P since
261 we want the name used to call the function, not the name it
262 will have. */
263 const char *name = IDENTIFIER_POINTER (DECL_NAME (node));
264 return is_builtin_name (name);
265}
266
ceea063b 267/* Compute values M and N such that M divides (address of EXP - N) and such
268 that N < M. If these numbers can be determined, store M in alignp and N in
269 *BITPOSP and return true. Otherwise return false and store BITS_PER_UNIT to
270 *alignp and any bit-offset to *bitposp.
0d8f7716 271
272 Note that the address (and thus the alignment) computed here is based
273 on the address to which a symbol resolves, whereas DECL_ALIGN is based
274 on the address at which an object is actually located. These two
275 addresses are not always the same. For example, on ARM targets,
276 the address &foo of a Thumb function foo() has the lowest bit set,
3482bf13 277 whereas foo() itself starts on an even address.
698537d1 278
3482bf13 279 If ADDR_P is true we are taking the address of the memory reference EXP
280 and thus cannot rely on the access taking place. */
281
282static bool
283get_object_alignment_2 (tree exp, unsigned int *alignp,
284 unsigned HOST_WIDE_INT *bitposp, bool addr_p)
698537d1 285{
98ab9e8f 286 HOST_WIDE_INT bitsize, bitpos;
287 tree offset;
3754d046 288 machine_mode mode;
98ab9e8f 289 int unsignedp, volatilep;
c8a2b4ff 290 unsigned int align = BITS_PER_UNIT;
ceea063b 291 bool known_alignment = false;
698537d1 292
98ab9e8f 293 /* Get the innermost object and the constant (bitpos) and possibly
294 variable (offset) offset of the access. */
295 exp = get_inner_reference (exp, &bitsize, &bitpos, &offset,
296 &mode, &unsignedp, &volatilep, true);
297
298 /* Extract alignment information from the innermost object and
299 possibly adjust bitpos and offset. */
3482bf13 300 if (TREE_CODE (exp) == FUNCTION_DECL)
0d8f7716 301 {
3482bf13 302 /* Function addresses can encode extra information besides their
303 alignment. However, if TARGET_PTRMEMFUNC_VBIT_LOCATION
304 allows the low bit to be used as a virtual bit, we know
305 that the address itself must be at least 2-byte aligned. */
306 if (TARGET_PTRMEMFUNC_VBIT_LOCATION == ptrmemfunc_vbit_in_pfn)
307 align = 2 * BITS_PER_UNIT;
0d8f7716 308 }
3482bf13 309 else if (TREE_CODE (exp) == LABEL_DECL)
310 ;
311 else if (TREE_CODE (exp) == CONST_DECL)
98ab9e8f 312 {
3482bf13 313 /* The alignment of a CONST_DECL is determined by its initializer. */
314 exp = DECL_INITIAL (exp);
98ab9e8f 315 align = TYPE_ALIGN (TREE_TYPE (exp));
3482bf13 316 if (CONSTANT_CLASS_P (exp))
317 align = (unsigned) CONSTANT_ALIGNMENT (exp, align);
e532afed 318
3482bf13 319 known_alignment = true;
98ab9e8f 320 }
3482bf13 321 else if (DECL_P (exp))
ceea063b 322 {
3482bf13 323 align = DECL_ALIGN (exp);
ceea063b 324 known_alignment = true;
ceea063b 325 }
3482bf13 326 else if (TREE_CODE (exp) == VIEW_CONVERT_EXPR)
ceea063b 327 {
ceea063b 328 align = TYPE_ALIGN (TREE_TYPE (exp));
329 }
3482bf13 330 else if (TREE_CODE (exp) == INDIRECT_REF
331 || TREE_CODE (exp) == MEM_REF
332 || TREE_CODE (exp) == TARGET_MEM_REF)
98ab9e8f 333 {
334 tree addr = TREE_OPERAND (exp, 0);
ceea063b 335 unsigned ptr_align;
336 unsigned HOST_WIDE_INT ptr_bitpos;
ab1e78e5 337 unsigned HOST_WIDE_INT ptr_bitmask = ~0;
ceea063b 338
ab1e78e5 339 /* If the address is explicitely aligned, handle that. */
98ab9e8f 340 if (TREE_CODE (addr) == BIT_AND_EXPR
341 && TREE_CODE (TREE_OPERAND (addr, 1)) == INTEGER_CST)
342 {
ab1e78e5 343 ptr_bitmask = TREE_INT_CST_LOW (TREE_OPERAND (addr, 1));
344 ptr_bitmask *= BITS_PER_UNIT;
345 align = ptr_bitmask & -ptr_bitmask;
98ab9e8f 346 addr = TREE_OPERAND (addr, 0);
347 }
ceea063b 348
3482bf13 349 known_alignment
350 = get_pointer_alignment_1 (addr, &ptr_align, &ptr_bitpos);
3482bf13 351 align = MAX (ptr_align, align);
352
ab1e78e5 353 /* Re-apply explicit alignment to the bitpos. */
354 ptr_bitpos &= ptr_bitmask;
355
4083990a 356 /* The alignment of the pointer operand in a TARGET_MEM_REF
357 has to take the variable offset parts into account. */
3482bf13 358 if (TREE_CODE (exp) == TARGET_MEM_REF)
153c3b50 359 {
3482bf13 360 if (TMR_INDEX (exp))
361 {
362 unsigned HOST_WIDE_INT step = 1;
363 if (TMR_STEP (exp))
f9ae6f95 364 step = TREE_INT_CST_LOW (TMR_STEP (exp));
3482bf13 365 align = MIN (align, (step & -step) * BITS_PER_UNIT);
366 }
367 if (TMR_INDEX2 (exp))
368 align = BITS_PER_UNIT;
369 known_alignment = false;
153c3b50 370 }
ceea063b 371
3482bf13 372 /* When EXP is an actual memory reference then we can use
373 TYPE_ALIGN of a pointer indirection to derive alignment.
374 Do so only if get_pointer_alignment_1 did not reveal absolute
4083990a 375 alignment knowledge and if using that alignment would
376 improve the situation. */
377 if (!addr_p && !known_alignment
378 && TYPE_ALIGN (TREE_TYPE (exp)) > align)
379 align = TYPE_ALIGN (TREE_TYPE (exp));
380 else
381 {
382 /* Else adjust bitpos accordingly. */
383 bitpos += ptr_bitpos;
384 if (TREE_CODE (exp) == MEM_REF
385 || TREE_CODE (exp) == TARGET_MEM_REF)
e913b5cd 386 bitpos += mem_ref_offset (exp).to_short_addr () * BITS_PER_UNIT;
4083990a 387 }
98ab9e8f 388 }
3482bf13 389 else if (TREE_CODE (exp) == STRING_CST)
153c3b50 390 {
3482bf13 391 /* STRING_CST are the only constant objects we allow to be not
392 wrapped inside a CONST_DECL. */
393 align = TYPE_ALIGN (TREE_TYPE (exp));
3482bf13 394 if (CONSTANT_CLASS_P (exp))
395 align = (unsigned) CONSTANT_ALIGNMENT (exp, align);
e532afed 396
3482bf13 397 known_alignment = true;
98ab9e8f 398 }
98ab9e8f 399
400 /* If there is a non-constant offset part extract the maximum
401 alignment that can prevail. */
c8a2b4ff 402 if (offset)
98ab9e8f 403 {
ad464c56 404 unsigned int trailing_zeros = tree_ctz (offset);
c8a2b4ff 405 if (trailing_zeros < HOST_BITS_PER_INT)
98ab9e8f 406 {
c8a2b4ff 407 unsigned int inner = (1U << trailing_zeros) * BITS_PER_UNIT;
408 if (inner)
409 align = MIN (align, inner);
98ab9e8f 410 }
98ab9e8f 411 }
412
3482bf13 413 *alignp = align;
414 *bitposp = bitpos & (*alignp - 1);
ceea063b 415 return known_alignment;
0c883ef3 416}
417
3482bf13 418/* For a memory reference expression EXP compute values M and N such that M
419 divides (&EXP - N) and such that N < M. If these numbers can be determined,
420 store M in alignp and N in *BITPOSP and return true. Otherwise return false
421 and store BITS_PER_UNIT to *alignp and any bit-offset to *bitposp. */
422
423bool
424get_object_alignment_1 (tree exp, unsigned int *alignp,
425 unsigned HOST_WIDE_INT *bitposp)
426{
427 return get_object_alignment_2 (exp, alignp, bitposp, false);
428}
429
957d0361 430/* Return the alignment in bits of EXP, an object. */
0c883ef3 431
432unsigned int
957d0361 433get_object_alignment (tree exp)
0c883ef3 434{
435 unsigned HOST_WIDE_INT bitpos = 0;
436 unsigned int align;
437
ceea063b 438 get_object_alignment_1 (exp, &align, &bitpos);
0c883ef3 439
98ab9e8f 440 /* align and bitpos now specify known low bits of the pointer.
441 ptr & (align - 1) == bitpos. */
442
443 if (bitpos != 0)
444 align = (bitpos & -bitpos);
957d0361 445 return align;
698537d1 446}
447
ceea063b 448/* For a pointer valued expression EXP compute values M and N such that M
449 divides (EXP - N) and such that N < M. If these numbers can be determined,
3482bf13 450 store M in alignp and N in *BITPOSP and return true. Return false if
451 the results are just a conservative approximation.
53800dbe 452
ceea063b 453 If EXP is not a pointer, false is returned too. */
53800dbe 454
ceea063b 455bool
456get_pointer_alignment_1 (tree exp, unsigned int *alignp,
457 unsigned HOST_WIDE_INT *bitposp)
53800dbe 458{
153c3b50 459 STRIP_NOPS (exp);
535e2026 460
153c3b50 461 if (TREE_CODE (exp) == ADDR_EXPR)
3482bf13 462 return get_object_alignment_2 (TREE_OPERAND (exp, 0),
463 alignp, bitposp, true);
906a9403 464 else if (TREE_CODE (exp) == POINTER_PLUS_EXPR)
465 {
466 unsigned int align;
467 unsigned HOST_WIDE_INT bitpos;
468 bool res = get_pointer_alignment_1 (TREE_OPERAND (exp, 0),
469 &align, &bitpos);
470 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
471 bitpos += TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)) * BITS_PER_UNIT;
472 else
473 {
474 unsigned int trailing_zeros = tree_ctz (TREE_OPERAND (exp, 1));
475 if (trailing_zeros < HOST_BITS_PER_INT)
476 {
477 unsigned int inner = (1U << trailing_zeros) * BITS_PER_UNIT;
478 if (inner)
479 align = MIN (align, inner);
480 }
481 }
482 *alignp = align;
483 *bitposp = bitpos & (align - 1);
484 return res;
485 }
153c3b50 486 else if (TREE_CODE (exp) == SSA_NAME
487 && POINTER_TYPE_P (TREE_TYPE (exp)))
53800dbe 488 {
ceea063b 489 unsigned int ptr_align, ptr_misalign;
153c3b50 490 struct ptr_info_def *pi = SSA_NAME_PTR_INFO (exp);
ceea063b 491
492 if (pi && get_ptr_info_alignment (pi, &ptr_align, &ptr_misalign))
493 {
494 *bitposp = ptr_misalign * BITS_PER_UNIT;
495 *alignp = ptr_align * BITS_PER_UNIT;
3482bf13 496 /* We cannot really tell whether this result is an approximation. */
ceea063b 497 return true;
498 }
499 else
69fbc3aa 500 {
501 *bitposp = 0;
ceea063b 502 *alignp = BITS_PER_UNIT;
503 return false;
69fbc3aa 504 }
53800dbe 505 }
0bb8b39a 506 else if (TREE_CODE (exp) == INTEGER_CST)
507 {
508 *alignp = BIGGEST_ALIGNMENT;
f9ae6f95 509 *bitposp = ((TREE_INT_CST_LOW (exp) * BITS_PER_UNIT)
0bb8b39a 510 & (BIGGEST_ALIGNMENT - 1));
511 return true;
512 }
153c3b50 513
69fbc3aa 514 *bitposp = 0;
ceea063b 515 *alignp = BITS_PER_UNIT;
516 return false;
53800dbe 517}
518
69fbc3aa 519/* Return the alignment in bits of EXP, a pointer valued expression.
520 The alignment returned is, by default, the alignment of the thing that
521 EXP points to. If it is not a POINTER_TYPE, 0 is returned.
522
523 Otherwise, look at the expression to see if we can do better, i.e., if the
524 expression is actually pointing at an object whose alignment is tighter. */
525
526unsigned int
527get_pointer_alignment (tree exp)
528{
529 unsigned HOST_WIDE_INT bitpos = 0;
530 unsigned int align;
ceea063b 531
532 get_pointer_alignment_1 (exp, &align, &bitpos);
69fbc3aa 533
534 /* align and bitpos now specify known low bits of the pointer.
535 ptr & (align - 1) == bitpos. */
536
537 if (bitpos != 0)
538 align = (bitpos & -bitpos);
539
540 return align;
541}
542
53800dbe 543/* Compute the length of a C string. TREE_STRING_LENGTH is not the right
544 way, because it could contain a zero byte in the middle.
545 TREE_STRING_LENGTH is the size of the character array, not the string.
546
4172d65e 547 ONLY_VALUE should be nonzero if the result is not going to be emitted
c09841f6 548 into the instruction stream and zero if it is going to be expanded.
4172d65e 549 E.g. with i++ ? "foo" : "bar", if ONLY_VALUE is nonzero, constant 3
681fab1e 550 is returned, otherwise NULL, since
551 len = c_strlen (src, 1); if (len) expand_expr (len, ...); would not
552 evaluate the side-effects.
553
6bda159e 554 If ONLY_VALUE is two then we do not emit warnings about out-of-bound
555 accesses. Note that this implies the result is not going to be emitted
556 into the instruction stream.
557
902de8ed 558 The value returned is of type `ssizetype'.
559
53800dbe 560 Unfortunately, string_constant can't access the values of const char
561 arrays with initializers, so neither can we do so here. */
562
4ee9c684 563tree
681fab1e 564c_strlen (tree src, int only_value)
53800dbe 565{
566 tree offset_node;
27d0c333 567 HOST_WIDE_INT offset;
568 int max;
44acf429 569 const char *ptr;
da136652 570 location_t loc;
53800dbe 571
681fab1e 572 STRIP_NOPS (src);
573 if (TREE_CODE (src) == COND_EXPR
574 && (only_value || !TREE_SIDE_EFFECTS (TREE_OPERAND (src, 0))))
575 {
576 tree len1, len2;
577
578 len1 = c_strlen (TREE_OPERAND (src, 1), only_value);
579 len2 = c_strlen (TREE_OPERAND (src, 2), only_value);
0862b7e9 580 if (tree_int_cst_equal (len1, len2))
681fab1e 581 return len1;
582 }
583
584 if (TREE_CODE (src) == COMPOUND_EXPR
585 && (only_value || !TREE_SIDE_EFFECTS (TREE_OPERAND (src, 0))))
586 return c_strlen (TREE_OPERAND (src, 1), only_value);
587
3df42822 588 loc = EXPR_LOC_OR_LOC (src, input_location);
da136652 589
53800dbe 590 src = string_constant (src, &offset_node);
591 if (src == 0)
c2f47e15 592 return NULL_TREE;
902de8ed 593
83d79705 594 max = TREE_STRING_LENGTH (src) - 1;
53800dbe 595 ptr = TREE_STRING_POINTER (src);
902de8ed 596
53800dbe 597 if (offset_node && TREE_CODE (offset_node) != INTEGER_CST)
598 {
599 /* If the string has an internal zero byte (e.g., "foo\0bar"), we can't
600 compute the offset to the following null if we don't know where to
601 start searching for it. */
602 int i;
902de8ed 603
53800dbe 604 for (i = 0; i < max; i++)
605 if (ptr[i] == 0)
c2f47e15 606 return NULL_TREE;
902de8ed 607
53800dbe 608 /* We don't know the starting offset, but we do know that the string
609 has no internal zero bytes. We can assume that the offset falls
610 within the bounds of the string; otherwise, the programmer deserves
611 what he gets. Subtract the offset from the length of the string,
902de8ed 612 and return that. This would perhaps not be valid if we were dealing
613 with named arrays in addition to literal string constants. */
614
da136652 615 return size_diffop_loc (loc, size_int (max), offset_node);
53800dbe 616 }
617
618 /* We have a known offset into the string. Start searching there for
27d0c333 619 a null character if we can represent it as a single HOST_WIDE_INT. */
dabc4084 620 if (offset_node == 0)
53800dbe 621 offset = 0;
35ec552a 622 else if (! tree_fits_shwi_p (offset_node))
dabc4084 623 offset = -1;
53800dbe 624 else
e913b5cd 625 offset = tree_to_shwi (offset_node);
902de8ed 626
1f63a7d6 627 /* If the offset is known to be out of bounds, warn, and call strlen at
628 runtime. */
2f1c4f17 629 if (offset < 0 || offset > max)
53800dbe 630 {
1f63a7d6 631 /* Suppress multiple warnings for propagated constant strings. */
2f1c4f17 632 if (only_value != 2
633 && !TREE_NO_WARNING (src))
1f63a7d6 634 {
da136652 635 warning_at (loc, 0, "offset outside bounds of constant string");
1f63a7d6 636 TREE_NO_WARNING (src) = 1;
637 }
c2f47e15 638 return NULL_TREE;
53800dbe 639 }
902de8ed 640
53800dbe 641 /* Use strlen to search for the first zero byte. Since any strings
642 constructed with build_string will have nulls appended, we win even
643 if we get handed something like (char[4])"abcd".
644
645 Since OFFSET is our starting index into the string, no further
646 calculation is needed. */
902de8ed 647 return ssize_int (strlen (ptr + offset));
53800dbe 648}
649
83d79705 650/* Return a char pointer for a C string if it is a string constant
651 or sum of string constant and integer constant. */
652
b9ea678c 653const char *
aecda0d6 654c_getstr (tree src)
83d79705 655{
656 tree offset_node;
83d79705 657
658 src = string_constant (src, &offset_node);
659 if (src == 0)
660 return 0;
661
8c85fcb7 662 if (offset_node == 0)
663 return TREE_STRING_POINTER (src);
e913b5cd 664 else if (!tree_fits_uhwi_p (offset_node)
8c85fcb7 665 || compare_tree_int (offset_node, TREE_STRING_LENGTH (src) - 1) > 0)
83d79705 666 return 0;
83d79705 667
e913b5cd 668 return TREE_STRING_POINTER (src) + tree_to_uhwi (offset_node);
83d79705 669}
670
e913b5cd 671/* Return a constant integer corresponding to target reading
8c85fcb7 672 GET_MODE_BITSIZE (MODE) bits from string constant STR. */
ecc318ff 673
6840589f 674static rtx
3754d046 675c_readstr (const char *str, machine_mode mode)
6840589f 676{
6840589f 677 HOST_WIDE_INT ch;
678 unsigned int i, j;
e913b5cd 679 HOST_WIDE_INT tmp[MAX_BITSIZE_MODE_ANY_INT / HOST_BITS_PER_WIDE_INT];
0407eaee 680
681 gcc_assert (GET_MODE_CLASS (mode) == MODE_INT);
e913b5cd 682 unsigned int len = (GET_MODE_PRECISION (mode) + HOST_BITS_PER_WIDE_INT - 1)
683 / HOST_BITS_PER_WIDE_INT;
684
a12aa4cc 685 gcc_assert (len <= MAX_BITSIZE_MODE_ANY_INT / HOST_BITS_PER_WIDE_INT);
e913b5cd 686 for (i = 0; i < len; i++)
687 tmp[i] = 0;
6840589f 688
6840589f 689 ch = 1;
690 for (i = 0; i < GET_MODE_SIZE (mode); i++)
691 {
692 j = i;
693 if (WORDS_BIG_ENDIAN)
694 j = GET_MODE_SIZE (mode) - i - 1;
695 if (BYTES_BIG_ENDIAN != WORDS_BIG_ENDIAN
ad8f8e52 696 && GET_MODE_SIZE (mode) >= UNITS_PER_WORD)
6840589f 697 j = j + UNITS_PER_WORD - 2 * (j % UNITS_PER_WORD) - 1;
698 j *= BITS_PER_UNIT;
7d3f6cc7 699
6840589f 700 if (ch)
701 ch = (unsigned char) str[i];
e913b5cd 702 tmp[j / HOST_BITS_PER_WIDE_INT] |= ch << (j % HOST_BITS_PER_WIDE_INT);
6840589f 703 }
ddb1be65 704
ab2c1de8 705 wide_int c = wide_int::from_array (tmp, len, GET_MODE_PRECISION (mode));
e913b5cd 706 return immed_wide_int_const (c, mode);
6840589f 707}
708
ecc318ff 709/* Cast a target constant CST to target CHAR and if that value fits into
5206b159 710 host char type, return zero and put that value into variable pointed to by
ecc318ff 711 P. */
712
713static int
aecda0d6 714target_char_cast (tree cst, char *p)
ecc318ff 715{
716 unsigned HOST_WIDE_INT val, hostval;
717
c19686c5 718 if (TREE_CODE (cst) != INTEGER_CST
ecc318ff 719 || CHAR_TYPE_SIZE > HOST_BITS_PER_WIDE_INT)
720 return 1;
721
e913b5cd 722 /* Do not care if it fits or not right here. */
f9ae6f95 723 val = TREE_INT_CST_LOW (cst);
e913b5cd 724
ecc318ff 725 if (CHAR_TYPE_SIZE < HOST_BITS_PER_WIDE_INT)
726 val &= (((unsigned HOST_WIDE_INT) 1) << CHAR_TYPE_SIZE) - 1;
727
728 hostval = val;
729 if (HOST_BITS_PER_CHAR < HOST_BITS_PER_WIDE_INT)
730 hostval &= (((unsigned HOST_WIDE_INT) 1) << HOST_BITS_PER_CHAR) - 1;
731
732 if (val != hostval)
733 return 1;
734
735 *p = hostval;
736 return 0;
737}
738
4ee9c684 739/* Similar to save_expr, but assumes that arbitrary code is not executed
740 in between the multiple evaluations. In particular, we assume that a
741 non-addressable local variable will not be modified. */
742
743static tree
744builtin_save_expr (tree exp)
745{
f6c35aa4 746 if (TREE_CODE (exp) == SSA_NAME
747 || (TREE_ADDRESSABLE (exp) == 0
748 && (TREE_CODE (exp) == PARM_DECL
749 || (TREE_CODE (exp) == VAR_DECL && !TREE_STATIC (exp)))))
4ee9c684 750 return exp;
751
752 return save_expr (exp);
753}
754
53800dbe 755/* Given TEM, a pointer to a stack frame, follow the dynamic chain COUNT
756 times to get the address of either a higher stack frame, or a return
757 address located within it (depending on FNDECL_CODE). */
902de8ed 758
c626df3d 759static rtx
869d0ef0 760expand_builtin_return_addr (enum built_in_function fndecl_code, int count)
53800dbe 761{
762 int i;
763
869d0ef0 764#ifdef INITIAL_FRAME_ADDRESS_RTX
765 rtx tem = INITIAL_FRAME_ADDRESS_RTX;
766#else
e3e15c50 767 rtx tem;
768
1b74fde7 769 /* For a zero count with __builtin_return_address, we don't care what
770 frame address we return, because target-specific definitions will
771 override us. Therefore frame pointer elimination is OK, and using
772 the soft frame pointer is OK.
773
fa7637bd 774 For a nonzero count, or a zero count with __builtin_frame_address,
1b74fde7 775 we require a stable offset from the current frame pointer to the
776 previous one, so we must use the hard frame pointer, and
e3e15c50 777 we must disable frame pointer elimination. */
1b74fde7 778 if (count == 0 && fndecl_code == BUILT_IN_RETURN_ADDRESS)
e3e15c50 779 tem = frame_pointer_rtx;
a0c938f0 780 else
e3e15c50 781 {
782 tem = hard_frame_pointer_rtx;
783
784 /* Tell reload not to eliminate the frame pointer. */
18d50ae6 785 crtl->accesses_prior_frames = 1;
e3e15c50 786 }
869d0ef0 787#endif
788
53800dbe 789 /* Some machines need special handling before we can access
3a69c60c 790 arbitrary frames. For example, on the SPARC, we must first flush
53800dbe 791 all register windows to the stack. */
792#ifdef SETUP_FRAME_ADDRESSES
793 if (count > 0)
794 SETUP_FRAME_ADDRESSES ();
795#endif
796
3a69c60c 797 /* On the SPARC, the return address is not in the frame, it is in a
53800dbe 798 register. There is no way to access it off of the current frame
799 pointer, but it can be accessed off the previous frame pointer by
800 reading the value from the register window save area. */
a26d6c60 801 if (RETURN_ADDR_IN_PREVIOUS_FRAME && fndecl_code == BUILT_IN_RETURN_ADDRESS)
53800dbe 802 count--;
53800dbe 803
804 /* Scan back COUNT frames to the specified frame. */
805 for (i = 0; i < count; i++)
806 {
807 /* Assume the dynamic chain pointer is in the word that the
808 frame address points to, unless otherwise specified. */
809#ifdef DYNAMIC_CHAIN_ADDRESS
810 tem = DYNAMIC_CHAIN_ADDRESS (tem);
811#endif
812 tem = memory_address (Pmode, tem);
00060fc2 813 tem = gen_frame_mem (Pmode, tem);
83fc1478 814 tem = copy_to_reg (tem);
53800dbe 815 }
816
3a69c60c 817 /* For __builtin_frame_address, return what we've got. But, on
818 the SPARC for example, we may have to add a bias. */
53800dbe 819 if (fndecl_code == BUILT_IN_FRAME_ADDRESS)
3a69c60c 820#ifdef FRAME_ADDR_RTX
821 return FRAME_ADDR_RTX (tem);
822#else
53800dbe 823 return tem;
3a69c60c 824#endif
53800dbe 825
3a69c60c 826 /* For __builtin_return_address, get the return address from that frame. */
53800dbe 827#ifdef RETURN_ADDR_RTX
828 tem = RETURN_ADDR_RTX (count, tem);
829#else
830 tem = memory_address (Pmode,
29c05e22 831 plus_constant (Pmode, tem, GET_MODE_SIZE (Pmode)));
00060fc2 832 tem = gen_frame_mem (Pmode, tem);
53800dbe 833#endif
834 return tem;
835}
836
f7c44134 837/* Alias set used for setjmp buffer. */
32c2fdea 838static alias_set_type setjmp_alias_set = -1;
f7c44134 839
6b7f6858 840/* Construct the leading half of a __builtin_setjmp call. Control will
2c8a1497 841 return to RECEIVER_LABEL. This is also called directly by the SJLJ
842 exception handling code. */
53800dbe 843
6b7f6858 844void
aecda0d6 845expand_builtin_setjmp_setup (rtx buf_addr, rtx receiver_label)
53800dbe 846{
3754d046 847 machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
53800dbe 848 rtx stack_save;
f7c44134 849 rtx mem;
53800dbe 850
f7c44134 851 if (setjmp_alias_set == -1)
852 setjmp_alias_set = new_alias_set ();
853
85d654dd 854 buf_addr = convert_memory_address (Pmode, buf_addr);
53800dbe 855
37ae8504 856 buf_addr = force_reg (Pmode, force_operand (buf_addr, NULL_RTX));
53800dbe 857
6b7f6858 858 /* We store the frame pointer and the address of receiver_label in
859 the buffer and use the rest of it for the stack save area, which
860 is machine-dependent. */
53800dbe 861
f7c44134 862 mem = gen_rtx_MEM (Pmode, buf_addr);
ab6ab77e 863 set_mem_alias_set (mem, setjmp_alias_set);
e3e026e8 864 emit_move_insn (mem, targetm.builtin_setjmp_frame_value ());
f7c44134 865
29c05e22 866 mem = gen_rtx_MEM (Pmode, plus_constant (Pmode, buf_addr,
867 GET_MODE_SIZE (Pmode))),
ab6ab77e 868 set_mem_alias_set (mem, setjmp_alias_set);
f7c44134 869
870 emit_move_insn (validize_mem (mem),
6b7f6858 871 force_reg (Pmode, gen_rtx_LABEL_REF (Pmode, receiver_label)));
53800dbe 872
873 stack_save = gen_rtx_MEM (sa_mode,
29c05e22 874 plus_constant (Pmode, buf_addr,
53800dbe 875 2 * GET_MODE_SIZE (Pmode)));
ab6ab77e 876 set_mem_alias_set (stack_save, setjmp_alias_set);
e9c97615 877 emit_stack_save (SAVE_NONLOCAL, &stack_save);
53800dbe 878
879 /* If there is further processing to do, do it. */
a3c81e61 880 if (targetm.have_builtin_setjmp_setup ())
881 emit_insn (targetm.gen_builtin_setjmp_setup (buf_addr));
53800dbe 882
29f09705 883 /* We have a nonlocal label. */
18d50ae6 884 cfun->has_nonlocal_label = 1;
6b7f6858 885}
53800dbe 886
2c8a1497 887/* Construct the trailing part of a __builtin_setjmp call. This is
4598ade9 888 also called directly by the SJLJ exception handling code.
889 If RECEIVER_LABEL is NULL, instead contruct a nonlocal goto handler. */
6b7f6858 890
891void
a3c81e61 892expand_builtin_setjmp_receiver (rtx receiver_label)
6b7f6858 893{
82c7907c 894 rtx chain;
895
4598ade9 896 /* Mark the FP as used when we get here, so we have to make sure it's
53800dbe 897 marked as used by this function. */
18b42941 898 emit_use (hard_frame_pointer_rtx);
53800dbe 899
900 /* Mark the static chain as clobbered here so life information
901 doesn't get messed up for it. */
82c7907c 902 chain = targetm.calls.static_chain (current_function_decl, true);
903 if (chain && REG_P (chain))
904 emit_clobber (chain);
53800dbe 905
906 /* Now put in the code to restore the frame pointer, and argument
491e04ef 907 pointer, if needed. */
a3c81e61 908 if (! targetm.have_nonlocal_goto ())
62dcb5c8 909 {
910 /* First adjust our frame pointer to its actual value. It was
911 previously set to the start of the virtual area corresponding to
912 the stacked variables when we branched here and now needs to be
913 adjusted to the actual hardware fp value.
914
915 Assignments to virtual registers are converted by
916 instantiate_virtual_regs into the corresponding assignment
917 to the underlying register (fp in this case) that makes
918 the original assignment true.
919 So the following insn will actually be decrementing fp by
920 STARTING_FRAME_OFFSET. */
921 emit_move_insn (virtual_stack_vars_rtx, hard_frame_pointer_rtx);
922
923 /* Restoring the frame pointer also modifies the hard frame pointer.
924 Mark it used (so that the previous assignment remains live once
925 the frame pointer is eliminated) and clobbered (to represent the
926 implicit update from the assignment). */
927 emit_use (hard_frame_pointer_rtx);
928 emit_clobber (hard_frame_pointer_rtx);
929 }
53800dbe 930
5ae82d58 931#if !HARD_FRAME_POINTER_IS_ARG_POINTER
53800dbe 932 if (fixed_regs[ARG_POINTER_REGNUM])
933 {
934#ifdef ELIMINABLE_REGS
4598ade9 935 /* If the argument pointer can be eliminated in favor of the
936 frame pointer, we don't need to restore it. We assume here
937 that if such an elimination is present, it can always be used.
938 This is the case on all known machines; if we don't make this
939 assumption, we do unnecessary saving on many machines. */
53800dbe 940 size_t i;
e99c3a1d 941 static const struct elims {const int from, to;} elim_regs[] = ELIMINABLE_REGS;
53800dbe 942
3098b2d3 943 for (i = 0; i < ARRAY_SIZE (elim_regs); i++)
53800dbe 944 if (elim_regs[i].from == ARG_POINTER_REGNUM
945 && elim_regs[i].to == HARD_FRAME_POINTER_REGNUM)
946 break;
947
3098b2d3 948 if (i == ARRAY_SIZE (elim_regs))
53800dbe 949#endif
950 {
951 /* Now restore our arg pointer from the address at which it
05927e40 952 was saved in our stack frame. */
27a7a23a 953 emit_move_insn (crtl->args.internal_arg_pointer,
b079a207 954 copy_to_reg (get_arg_pointer_save_area ()));
53800dbe 955 }
956 }
957#endif
958
a3c81e61 959 if (receiver_label != NULL && targetm.have_builtin_setjmp_receiver ())
960 emit_insn (targetm.gen_builtin_setjmp_receiver (receiver_label));
961 else if (targetm.have_nonlocal_goto_receiver ())
962 emit_insn (targetm.gen_nonlocal_goto_receiver ());
53800dbe 963 else
a3c81e61 964 { /* Nothing */ }
57f6bb94 965
3072d30e 966 /* We must not allow the code we just generated to be reordered by
967 scheduling. Specifically, the update of the frame pointer must
62dcb5c8 968 happen immediately, not later. */
3072d30e 969 emit_insn (gen_blockage ());
6b7f6858 970}
53800dbe 971
53800dbe 972/* __builtin_longjmp is passed a pointer to an array of five words (not
973 all will be used on all machines). It operates similarly to the C
974 library function of the same name, but is more efficient. Much of
2c8a1497 975 the code below is copied from the handling of non-local gotos. */
53800dbe 976
c626df3d 977static void
aecda0d6 978expand_builtin_longjmp (rtx buf_addr, rtx value)
53800dbe 979{
1e0c0b35 980 rtx fp, lab, stack;
981 rtx_insn *insn, *last;
3754d046 982 machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
53800dbe 983
48e1416a 984 /* DRAP is needed for stack realign if longjmp is expanded to current
27a7a23a 985 function */
986 if (SUPPORTS_STACK_ALIGNMENT)
987 crtl->need_drap = true;
988
f7c44134 989 if (setjmp_alias_set == -1)
990 setjmp_alias_set = new_alias_set ();
991
85d654dd 992 buf_addr = convert_memory_address (Pmode, buf_addr);
479e4d5e 993
53800dbe 994 buf_addr = force_reg (Pmode, buf_addr);
995
82c7907c 996 /* We require that the user must pass a second argument of 1, because
997 that is what builtin_setjmp will return. */
64db345d 998 gcc_assert (value == const1_rtx);
53800dbe 999
4712c7d6 1000 last = get_last_insn ();
a3c81e61 1001 if (targetm.have_builtin_longjmp ())
1002 emit_insn (targetm.gen_builtin_longjmp (buf_addr));
53800dbe 1003 else
53800dbe 1004 {
1005 fp = gen_rtx_MEM (Pmode, buf_addr);
29c05e22 1006 lab = gen_rtx_MEM (Pmode, plus_constant (Pmode, buf_addr,
53800dbe 1007 GET_MODE_SIZE (Pmode)));
1008
29c05e22 1009 stack = gen_rtx_MEM (sa_mode, plus_constant (Pmode, buf_addr,
53800dbe 1010 2 * GET_MODE_SIZE (Pmode)));
ab6ab77e 1011 set_mem_alias_set (fp, setjmp_alias_set);
1012 set_mem_alias_set (lab, setjmp_alias_set);
1013 set_mem_alias_set (stack, setjmp_alias_set);
53800dbe 1014
1015 /* Pick up FP, label, and SP from the block and jump. This code is
1016 from expand_goto in stmt.c; see there for detailed comments. */
a3c81e61 1017 if (targetm.have_nonlocal_goto ())
53800dbe 1018 /* We have to pass a value to the nonlocal_goto pattern that will
1019 get copied into the static_chain pointer, but it does not matter
1020 what that value is, because builtin_setjmp does not use it. */
a3c81e61 1021 emit_insn (targetm.gen_nonlocal_goto (value, lab, stack, fp));
53800dbe 1022 else
53800dbe 1023 {
1024 lab = copy_to_reg (lab);
1025
18b42941 1026 emit_clobber (gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode)));
1027 emit_clobber (gen_rtx_MEM (BLKmode, hard_frame_pointer_rtx));
2a871ad1 1028
53800dbe 1029 emit_move_insn (hard_frame_pointer_rtx, fp);
e9c97615 1030 emit_stack_restore (SAVE_NONLOCAL, stack);
53800dbe 1031
18b42941 1032 emit_use (hard_frame_pointer_rtx);
1033 emit_use (stack_pointer_rtx);
53800dbe 1034 emit_indirect_jump (lab);
1035 }
1036 }
615166bb 1037
1038 /* Search backwards and mark the jump insn as a non-local goto.
1039 Note that this precludes the use of __builtin_longjmp to a
1040 __builtin_setjmp target in the same function. However, we've
1041 already cautioned the user that these functions are for
1042 internal exception handling use only. */
449c0509 1043 for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
1044 {
64db345d 1045 gcc_assert (insn != last);
7d3f6cc7 1046
6d7dc5b9 1047 if (JUMP_P (insn))
449c0509 1048 {
a1ddb869 1049 add_reg_note (insn, REG_NON_LOCAL_GOTO, const0_rtx);
449c0509 1050 break;
1051 }
6d7dc5b9 1052 else if (CALL_P (insn))
9342ee68 1053 break;
449c0509 1054 }
53800dbe 1055}
1056
0e80b01d 1057static inline bool
1058more_const_call_expr_args_p (const const_call_expr_arg_iterator *iter)
1059{
1060 return (iter->i < iter->n);
1061}
1062
1063/* This function validates the types of a function call argument list
1064 against a specified list of tree_codes. If the last specifier is a 0,
1065 that represents an ellipses, otherwise the last specifier must be a
1066 VOID_TYPE. */
1067
1068static bool
1069validate_arglist (const_tree callexpr, ...)
1070{
1071 enum tree_code code;
1072 bool res = 0;
1073 va_list ap;
1074 const_call_expr_arg_iterator iter;
1075 const_tree arg;
1076
1077 va_start (ap, callexpr);
1078 init_const_call_expr_arg_iterator (callexpr, &iter);
1079
1080 do
1081 {
1082 code = (enum tree_code) va_arg (ap, int);
1083 switch (code)
1084 {
1085 case 0:
1086 /* This signifies an ellipses, any further arguments are all ok. */
1087 res = true;
1088 goto end;
1089 case VOID_TYPE:
1090 /* This signifies an endlink, if no arguments remain, return
1091 true, otherwise return false. */
1092 res = !more_const_call_expr_args_p (&iter);
1093 goto end;
1094 default:
1095 /* If no parameters remain or the parameter's code does not
1096 match the specified code, return false. Otherwise continue
1097 checking any remaining arguments. */
1098 arg = next_const_call_expr_arg (&iter);
1099 if (!validate_arg (arg, code))
1100 goto end;
1101 break;
1102 }
1103 }
1104 while (1);
1105
1106 /* We need gotos here since we can only have one VA_CLOSE in a
1107 function. */
1108 end: ;
1109 va_end (ap);
1110
1111 return res;
1112}
1113
4ee9c684 1114/* Expand a call to __builtin_nonlocal_goto. We're passed the target label
1115 and the address of the save area. */
1116
1117static rtx
c2f47e15 1118expand_builtin_nonlocal_goto (tree exp)
4ee9c684 1119{
1120 tree t_label, t_save_area;
1e0c0b35 1121 rtx r_label, r_save_area, r_fp, r_sp;
1122 rtx_insn *insn;
4ee9c684 1123
c2f47e15 1124 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
4ee9c684 1125 return NULL_RTX;
1126
c2f47e15 1127 t_label = CALL_EXPR_ARG (exp, 0);
1128 t_save_area = CALL_EXPR_ARG (exp, 1);
4ee9c684 1129
8ec3c5c2 1130 r_label = expand_normal (t_label);
3dce56cc 1131 r_label = convert_memory_address (Pmode, r_label);
8ec3c5c2 1132 r_save_area = expand_normal (t_save_area);
3dce56cc 1133 r_save_area = convert_memory_address (Pmode, r_save_area);
d1ff492e 1134 /* Copy the address of the save location to a register just in case it was
1135 based on the frame pointer. */
51adbc8a 1136 r_save_area = copy_to_reg (r_save_area);
4ee9c684 1137 r_fp = gen_rtx_MEM (Pmode, r_save_area);
1138 r_sp = gen_rtx_MEM (STACK_SAVEAREA_MODE (SAVE_NONLOCAL),
29c05e22 1139 plus_constant (Pmode, r_save_area,
1140 GET_MODE_SIZE (Pmode)));
4ee9c684 1141
18d50ae6 1142 crtl->has_nonlocal_goto = 1;
4ee9c684 1143
4ee9c684 1144 /* ??? We no longer need to pass the static chain value, afaik. */
a3c81e61 1145 if (targetm.have_nonlocal_goto ())
1146 emit_insn (targetm.gen_nonlocal_goto (const0_rtx, r_label, r_sp, r_fp));
4ee9c684 1147 else
4ee9c684 1148 {
1149 r_label = copy_to_reg (r_label);
1150
18b42941 1151 emit_clobber (gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode)));
1152 emit_clobber (gen_rtx_MEM (BLKmode, hard_frame_pointer_rtx));
491e04ef 1153
d1ff492e 1154 /* Restore frame pointer for containing function. */
4ee9c684 1155 emit_move_insn (hard_frame_pointer_rtx, r_fp);
e9c97615 1156 emit_stack_restore (SAVE_NONLOCAL, r_sp);
491e04ef 1157
4ee9c684 1158 /* USE of hard_frame_pointer_rtx added for consistency;
1159 not clear if really needed. */
18b42941 1160 emit_use (hard_frame_pointer_rtx);
1161 emit_use (stack_pointer_rtx);
ad0d0af8 1162
1163 /* If the architecture is using a GP register, we must
1164 conservatively assume that the target function makes use of it.
1165 The prologue of functions with nonlocal gotos must therefore
1166 initialize the GP register to the appropriate value, and we
1167 must then make sure that this value is live at the point
1168 of the jump. (Note that this doesn't necessarily apply
1169 to targets with a nonlocal_goto pattern; they are free
1170 to implement it in their own way. Note also that this is
1171 a no-op if the GP register is a global invariant.) */
1172 if ((unsigned) PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM
1173 && fixed_regs[PIC_OFFSET_TABLE_REGNUM])
18b42941 1174 emit_use (pic_offset_table_rtx);
ad0d0af8 1175
4ee9c684 1176 emit_indirect_jump (r_label);
1177 }
491e04ef 1178
4ee9c684 1179 /* Search backwards to the jump insn and mark it as a
1180 non-local goto. */
1181 for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
1182 {
6d7dc5b9 1183 if (JUMP_P (insn))
4ee9c684 1184 {
a1ddb869 1185 add_reg_note (insn, REG_NON_LOCAL_GOTO, const0_rtx);
4ee9c684 1186 break;
1187 }
6d7dc5b9 1188 else if (CALL_P (insn))
4ee9c684 1189 break;
1190 }
1191
1192 return const0_rtx;
1193}
1194
843d08a9 1195/* __builtin_update_setjmp_buf is passed a pointer to an array of five words
1196 (not all will be used on all machines) that was passed to __builtin_setjmp.
97354ae4 1197 It updates the stack pointer in that block to the current value. This is
1198 also called directly by the SJLJ exception handling code. */
843d08a9 1199
97354ae4 1200void
843d08a9 1201expand_builtin_update_setjmp_buf (rtx buf_addr)
1202{
3754d046 1203 machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
d1ff492e 1204 rtx stack_save
843d08a9 1205 = gen_rtx_MEM (sa_mode,
1206 memory_address
1207 (sa_mode,
29c05e22 1208 plus_constant (Pmode, buf_addr,
1209 2 * GET_MODE_SIZE (Pmode))));
843d08a9 1210
e9c97615 1211 emit_stack_save (SAVE_NONLOCAL, &stack_save);
843d08a9 1212}
1213
5e3608d8 1214/* Expand a call to __builtin_prefetch. For a target that does not support
1215 data prefetch, evaluate the memory address argument in case it has side
1216 effects. */
1217
1218static void
c2f47e15 1219expand_builtin_prefetch (tree exp)
5e3608d8 1220{
1221 tree arg0, arg1, arg2;
c2f47e15 1222 int nargs;
5e3608d8 1223 rtx op0, op1, op2;
1224
c2f47e15 1225 if (!validate_arglist (exp, POINTER_TYPE, 0))
26a5cadb 1226 return;
1227
c2f47e15 1228 arg0 = CALL_EXPR_ARG (exp, 0);
1229
26a5cadb 1230 /* Arguments 1 and 2 are optional; argument 1 (read/write) defaults to
1231 zero (read) and argument 2 (locality) defaults to 3 (high degree of
1232 locality). */
c2f47e15 1233 nargs = call_expr_nargs (exp);
1234 if (nargs > 1)
1235 arg1 = CALL_EXPR_ARG (exp, 1);
26a5cadb 1236 else
c2f47e15 1237 arg1 = integer_zero_node;
1238 if (nargs > 2)
1239 arg2 = CALL_EXPR_ARG (exp, 2);
1240 else
2512209b 1241 arg2 = integer_three_node;
5e3608d8 1242
1243 /* Argument 0 is an address. */
1244 op0 = expand_expr (arg0, NULL_RTX, Pmode, EXPAND_NORMAL);
1245
1246 /* Argument 1 (read/write flag) must be a compile-time constant int. */
1247 if (TREE_CODE (arg1) != INTEGER_CST)
1248 {
07e3a3d2 1249 error ("second argument to %<__builtin_prefetch%> must be a constant");
9342ee68 1250 arg1 = integer_zero_node;
5e3608d8 1251 }
8ec3c5c2 1252 op1 = expand_normal (arg1);
5e3608d8 1253 /* Argument 1 must be either zero or one. */
1254 if (INTVAL (op1) != 0 && INTVAL (op1) != 1)
1255 {
c3ceba8e 1256 warning (0, "invalid second argument to %<__builtin_prefetch%>;"
07e3a3d2 1257 " using zero");
5e3608d8 1258 op1 = const0_rtx;
1259 }
1260
1261 /* Argument 2 (locality) must be a compile-time constant int. */
1262 if (TREE_CODE (arg2) != INTEGER_CST)
1263 {
07e3a3d2 1264 error ("third argument to %<__builtin_prefetch%> must be a constant");
5e3608d8 1265 arg2 = integer_zero_node;
1266 }
8ec3c5c2 1267 op2 = expand_normal (arg2);
5e3608d8 1268 /* Argument 2 must be 0, 1, 2, or 3. */
1269 if (INTVAL (op2) < 0 || INTVAL (op2) > 3)
1270 {
c3ceba8e 1271 warning (0, "invalid third argument to %<__builtin_prefetch%>; using zero");
5e3608d8 1272 op2 = const0_rtx;
1273 }
1274
1d375a79 1275 if (targetm.have_prefetch ())
5e3608d8 1276 {
8786db1e 1277 struct expand_operand ops[3];
1278
1279 create_address_operand (&ops[0], op0);
1280 create_integer_operand (&ops[1], INTVAL (op1));
1281 create_integer_operand (&ops[2], INTVAL (op2));
1d375a79 1282 if (maybe_expand_insn (targetm.code_for_prefetch, 3, ops))
8786db1e 1283 return;
5e3608d8 1284 }
0a534ba7 1285
f0ce3b1f 1286 /* Don't do anything with direct references to volatile memory, but
1287 generate code to handle other side effects. */
e16ceb8e 1288 if (!MEM_P (op0) && side_effects_p (op0))
f0ce3b1f 1289 emit_insn (op0);
5e3608d8 1290}
1291
f7c44134 1292/* Get a MEM rtx for expression EXP which is the address of an operand
d8ae1baa 1293 to be used in a string instruction (cmpstrsi, movmemsi, ..). LEN is
1294 the maximum length of the block of memory that might be accessed or
1295 NULL if unknown. */
f7c44134 1296
53800dbe 1297static rtx
d8ae1baa 1298get_memory_rtx (tree exp, tree len)
53800dbe 1299{
ad0a178f 1300 tree orig_exp = exp;
1301 rtx addr, mem;
ad0a178f 1302
1303 /* When EXP is not resolved SAVE_EXPR, MEM_ATTRS can be still derived
1304 from its expression, for expr->a.b only <variable>.a.b is recorded. */
1305 if (TREE_CODE (exp) == SAVE_EXPR && !SAVE_EXPR_RESOLVED_P (exp))
1306 exp = TREE_OPERAND (exp, 0);
1307
1308 addr = expand_expr (orig_exp, NULL_RTX, ptr_mode, EXPAND_NORMAL);
1309 mem = gen_rtx_MEM (BLKmode, memory_address (BLKmode, addr));
2a631e19 1310
f7c44134 1311 /* Get an expression we can use to find the attributes to assign to MEM.
5dd3f78f 1312 First remove any nops. */
72dd6141 1313 while (CONVERT_EXPR_P (exp)
f7c44134 1314 && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (exp, 0))))
1315 exp = TREE_OPERAND (exp, 0);
1316
5dd3f78f 1317 /* Build a MEM_REF representing the whole accessed area as a byte blob,
1318 (as builtin stringops may alias with anything). */
1319 exp = fold_build2 (MEM_REF,
1320 build_array_type (char_type_node,
1321 build_range_type (sizetype,
1322 size_one_node, len)),
1323 exp, build_int_cst (ptr_type_node, 0));
1324
1325 /* If the MEM_REF has no acceptable address, try to get the base object
1326 from the original address we got, and build an all-aliasing
1327 unknown-sized access to that one. */
1328 if (is_gimple_mem_ref_addr (TREE_OPERAND (exp, 0)))
1329 set_mem_attributes (mem, exp, 0);
1330 else if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
1331 && (exp = get_base_address (TREE_OPERAND (TREE_OPERAND (exp, 0),
1332 0))))
eec8e941 1333 {
5dd3f78f 1334 exp = build_fold_addr_expr (exp);
1335 exp = fold_build2 (MEM_REF,
1336 build_array_type (char_type_node,
1337 build_range_type (sizetype,
1338 size_zero_node,
1339 NULL)),
1340 exp, build_int_cst (ptr_type_node, 0));
a1a25d19 1341 set_mem_attributes (mem, exp, 0);
eec8e941 1342 }
5dd3f78f 1343 set_mem_alias_set (mem, 0);
53800dbe 1344 return mem;
1345}
1346\f
1347/* Built-in functions to perform an untyped call and return. */
1348
3b9c3a16 1349#define apply_args_mode \
1350 (this_target_builtins->x_apply_args_mode)
1351#define apply_result_mode \
1352 (this_target_builtins->x_apply_result_mode)
53800dbe 1353
53800dbe 1354/* Return the size required for the block returned by __builtin_apply_args,
1355 and initialize apply_args_mode. */
1356
1357static int
aecda0d6 1358apply_args_size (void)
53800dbe 1359{
1360 static int size = -1;
58e9ce8f 1361 int align;
1362 unsigned int regno;
3754d046 1363 machine_mode mode;
53800dbe 1364
1365 /* The values computed by this function never change. */
1366 if (size < 0)
1367 {
1368 /* The first value is the incoming arg-pointer. */
1369 size = GET_MODE_SIZE (Pmode);
1370
1371 /* The second value is the structure value address unless this is
1372 passed as an "invisible" first argument. */
6812c89e 1373 if (targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0))
53800dbe 1374 size += GET_MODE_SIZE (Pmode);
1375
1376 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1377 if (FUNCTION_ARG_REGNO_P (regno))
1378 {
4bac51c9 1379 mode = targetm.calls.get_raw_arg_mode (regno);
0862b7e9 1380
64db345d 1381 gcc_assert (mode != VOIDmode);
53800dbe 1382
1383 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1384 if (size % align != 0)
1385 size = CEIL (size, align) * align;
53800dbe 1386 size += GET_MODE_SIZE (mode);
1387 apply_args_mode[regno] = mode;
1388 }
1389 else
1390 {
1391 apply_args_mode[regno] = VOIDmode;
53800dbe 1392 }
1393 }
1394 return size;
1395}
1396
1397/* Return the size required for the block returned by __builtin_apply,
1398 and initialize apply_result_mode. */
1399
1400static int
aecda0d6 1401apply_result_size (void)
53800dbe 1402{
1403 static int size = -1;
1404 int align, regno;
3754d046 1405 machine_mode mode;
53800dbe 1406
1407 /* The values computed by this function never change. */
1408 if (size < 0)
1409 {
1410 size = 0;
1411
1412 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
e1ce1485 1413 if (targetm.calls.function_value_regno_p (regno))
53800dbe 1414 {
4bac51c9 1415 mode = targetm.calls.get_raw_result_mode (regno);
0862b7e9 1416
64db345d 1417 gcc_assert (mode != VOIDmode);
53800dbe 1418
1419 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1420 if (size % align != 0)
1421 size = CEIL (size, align) * align;
1422 size += GET_MODE_SIZE (mode);
1423 apply_result_mode[regno] = mode;
1424 }
1425 else
1426 apply_result_mode[regno] = VOIDmode;
1427
1428 /* Allow targets that use untyped_call and untyped_return to override
1429 the size so that machine-specific information can be stored here. */
1430#ifdef APPLY_RESULT_SIZE
1431 size = APPLY_RESULT_SIZE;
1432#endif
1433 }
1434 return size;
1435}
1436
53800dbe 1437/* Create a vector describing the result block RESULT. If SAVEP is true,
1438 the result block is used to save the values; otherwise it is used to
1439 restore the values. */
1440
1441static rtx
aecda0d6 1442result_vector (int savep, rtx result)
53800dbe 1443{
1444 int regno, size, align, nelts;
3754d046 1445 machine_mode mode;
53800dbe 1446 rtx reg, mem;
364c0c59 1447 rtx *savevec = XALLOCAVEC (rtx, FIRST_PSEUDO_REGISTER);
bf8e3599 1448
53800dbe 1449 size = nelts = 0;
1450 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1451 if ((mode = apply_result_mode[regno]) != VOIDmode)
1452 {
1453 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1454 if (size % align != 0)
1455 size = CEIL (size, align) * align;
1456 reg = gen_rtx_REG (mode, savep ? regno : INCOMING_REGNO (regno));
e513d163 1457 mem = adjust_address (result, mode, size);
53800dbe 1458 savevec[nelts++] = (savep
d1f9b275 1459 ? gen_rtx_SET (mem, reg)
1460 : gen_rtx_SET (reg, mem));
53800dbe 1461 size += GET_MODE_SIZE (mode);
1462 }
1463 return gen_rtx_PARALLEL (VOIDmode, gen_rtvec_v (nelts, savevec));
1464}
53800dbe 1465
1466/* Save the state required to perform an untyped call with the same
1467 arguments as were passed to the current function. */
1468
1469static rtx
aecda0d6 1470expand_builtin_apply_args_1 (void)
53800dbe 1471{
1c7e61a7 1472 rtx registers, tem;
53800dbe 1473 int size, align, regno;
3754d046 1474 machine_mode mode;
6812c89e 1475 rtx struct_incoming_value = targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 1);
53800dbe 1476
1477 /* Create a block where the arg-pointer, structure value address,
1478 and argument registers can be saved. */
1479 registers = assign_stack_local (BLKmode, apply_args_size (), -1);
1480
1481 /* Walk past the arg-pointer and structure value address. */
1482 size = GET_MODE_SIZE (Pmode);
6812c89e 1483 if (targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0))
53800dbe 1484 size += GET_MODE_SIZE (Pmode);
1485
1486 /* Save each register used in calling a function to the block. */
1487 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1488 if ((mode = apply_args_mode[regno]) != VOIDmode)
1489 {
53800dbe 1490 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1491 if (size % align != 0)
1492 size = CEIL (size, align) * align;
1493
1494 tem = gen_rtx_REG (mode, INCOMING_REGNO (regno));
1495
e513d163 1496 emit_move_insn (adjust_address (registers, mode, size), tem);
53800dbe 1497 size += GET_MODE_SIZE (mode);
1498 }
1499
1500 /* Save the arg pointer to the block. */
27a7a23a 1501 tem = copy_to_reg (crtl->args.internal_arg_pointer);
1c7e61a7 1502 /* We need the pointer as the caller actually passed them to us, not
9d4b544c 1503 as we might have pretended they were passed. Make sure it's a valid
1504 operand, as emit_move_insn isn't expected to handle a PLUS. */
3764c94e 1505 if (STACK_GROWS_DOWNWARD)
1506 tem
1507 = force_operand (plus_constant (Pmode, tem,
1508 crtl->args.pretend_args_size),
1509 NULL_RTX);
1c7e61a7 1510 emit_move_insn (adjust_address (registers, Pmode, 0), tem);
0862b7e9 1511
53800dbe 1512 size = GET_MODE_SIZE (Pmode);
1513
1514 /* Save the structure value address unless this is passed as an
1515 "invisible" first argument. */
45550790 1516 if (struct_incoming_value)
53800dbe 1517 {
e513d163 1518 emit_move_insn (adjust_address (registers, Pmode, size),
45550790 1519 copy_to_reg (struct_incoming_value));
53800dbe 1520 size += GET_MODE_SIZE (Pmode);
1521 }
1522
1523 /* Return the address of the block. */
1524 return copy_addr_to_reg (XEXP (registers, 0));
1525}
1526
1527/* __builtin_apply_args returns block of memory allocated on
1528 the stack into which is stored the arg pointer, structure
1529 value address, static chain, and all the registers that might
1530 possibly be used in performing a function call. The code is
1531 moved to the start of the function so the incoming values are
1532 saved. */
27d0c333 1533
53800dbe 1534static rtx
aecda0d6 1535expand_builtin_apply_args (void)
53800dbe 1536{
1537 /* Don't do __builtin_apply_args more than once in a function.
1538 Save the result of the first call and reuse it. */
1539 if (apply_args_value != 0)
1540 return apply_args_value;
1541 {
1542 /* When this function is called, it means that registers must be
1543 saved on entry to this function. So we migrate the
1544 call to the first insn of this function. */
1545 rtx temp;
53800dbe 1546
1547 start_sequence ();
1548 temp = expand_builtin_apply_args_1 ();
9ed997be 1549 rtx_insn *seq = get_insns ();
53800dbe 1550 end_sequence ();
1551
1552 apply_args_value = temp;
1553
31d3e01c 1554 /* Put the insns after the NOTE that starts the function.
1555 If this is inside a start_sequence, make the outer-level insn
53800dbe 1556 chain current, so the code is placed at the start of the
0ef1a651 1557 function. If internal_arg_pointer is a non-virtual pseudo,
1558 it needs to be placed after the function that initializes
1559 that pseudo. */
53800dbe 1560 push_topmost_sequence ();
0ef1a651 1561 if (REG_P (crtl->args.internal_arg_pointer)
1562 && REGNO (crtl->args.internal_arg_pointer) > LAST_VIRTUAL_REGISTER)
1563 emit_insn_before (seq, parm_birth_insn);
1564 else
1565 emit_insn_before (seq, NEXT_INSN (entry_of_function ()));
53800dbe 1566 pop_topmost_sequence ();
1567 return temp;
1568 }
1569}
1570
1571/* Perform an untyped call and save the state required to perform an
1572 untyped return of whatever value was returned by the given function. */
1573
1574static rtx
aecda0d6 1575expand_builtin_apply (rtx function, rtx arguments, rtx argsize)
53800dbe 1576{
1577 int size, align, regno;
3754d046 1578 machine_mode mode;
1e0c0b35 1579 rtx incoming_args, result, reg, dest, src;
1580 rtx_call_insn *call_insn;
53800dbe 1581 rtx old_stack_level = 0;
1582 rtx call_fusage = 0;
6812c89e 1583 rtx struct_value = targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0);
53800dbe 1584
85d654dd 1585 arguments = convert_memory_address (Pmode, arguments);
726ec87c 1586
53800dbe 1587 /* Create a block where the return registers can be saved. */
1588 result = assign_stack_local (BLKmode, apply_result_size (), -1);
1589
53800dbe 1590 /* Fetch the arg pointer from the ARGUMENTS block. */
1591 incoming_args = gen_reg_rtx (Pmode);
726ec87c 1592 emit_move_insn (incoming_args, gen_rtx_MEM (Pmode, arguments));
3764c94e 1593 if (!STACK_GROWS_DOWNWARD)
1594 incoming_args = expand_simple_binop (Pmode, MINUS, incoming_args, argsize,
1595 incoming_args, 0, OPTAB_LIB_WIDEN);
53800dbe 1596
04a46d40 1597 /* Push a new argument block and copy the arguments. Do not allow
1598 the (potential) memcpy call below to interfere with our stack
1599 manipulations. */
53800dbe 1600 do_pending_stack_adjust ();
04a46d40 1601 NO_DEFER_POP;
53800dbe 1602
2358393e 1603 /* Save the stack with nonlocal if available. */
71512c05 1604 if (targetm.have_save_stack_nonlocal ())
e9c97615 1605 emit_stack_save (SAVE_NONLOCAL, &old_stack_level);
53800dbe 1606 else
e9c97615 1607 emit_stack_save (SAVE_BLOCK, &old_stack_level);
53800dbe 1608
59647703 1609 /* Allocate a block of memory onto the stack and copy the memory
990495a7 1610 arguments to the outgoing arguments address. We can pass TRUE
1611 as the 4th argument because we just saved the stack pointer
1612 and will restore it right after the call. */
5be42b39 1613 allocate_dynamic_stack_space (argsize, 0, BIGGEST_ALIGNMENT, true);
27a7a23a 1614
1615 /* Set DRAP flag to true, even though allocate_dynamic_stack_space
1616 may have already set current_function_calls_alloca to true.
1617 current_function_calls_alloca won't be set if argsize is zero,
1618 so we have to guarantee need_drap is true here. */
1619 if (SUPPORTS_STACK_ALIGNMENT)
1620 crtl->need_drap = true;
1621
59647703 1622 dest = virtual_outgoing_args_rtx;
3764c94e 1623 if (!STACK_GROWS_DOWNWARD)
1624 {
1625 if (CONST_INT_P (argsize))
1626 dest = plus_constant (Pmode, dest, -INTVAL (argsize));
1627 else
1628 dest = gen_rtx_PLUS (Pmode, dest, negate_rtx (Pmode, argsize));
1629 }
2a631e19 1630 dest = gen_rtx_MEM (BLKmode, dest);
1631 set_mem_align (dest, PARM_BOUNDARY);
1632 src = gen_rtx_MEM (BLKmode, incoming_args);
1633 set_mem_align (src, PARM_BOUNDARY);
0378dbdc 1634 emit_block_move (dest, src, argsize, BLOCK_OP_NORMAL);
53800dbe 1635
1636 /* Refer to the argument block. */
1637 apply_args_size ();
1638 arguments = gen_rtx_MEM (BLKmode, arguments);
2a631e19 1639 set_mem_align (arguments, PARM_BOUNDARY);
53800dbe 1640
1641 /* Walk past the arg-pointer and structure value address. */
1642 size = GET_MODE_SIZE (Pmode);
45550790 1643 if (struct_value)
53800dbe 1644 size += GET_MODE_SIZE (Pmode);
1645
1646 /* Restore each of the registers previously saved. Make USE insns
1647 for each of these registers for use in making the call. */
1648 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1649 if ((mode = apply_args_mode[regno]) != VOIDmode)
1650 {
1651 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1652 if (size % align != 0)
1653 size = CEIL (size, align) * align;
1654 reg = gen_rtx_REG (mode, regno);
e513d163 1655 emit_move_insn (reg, adjust_address (arguments, mode, size));
53800dbe 1656 use_reg (&call_fusage, reg);
1657 size += GET_MODE_SIZE (mode);
1658 }
1659
1660 /* Restore the structure value address unless this is passed as an
1661 "invisible" first argument. */
1662 size = GET_MODE_SIZE (Pmode);
45550790 1663 if (struct_value)
53800dbe 1664 {
1665 rtx value = gen_reg_rtx (Pmode);
e513d163 1666 emit_move_insn (value, adjust_address (arguments, Pmode, size));
45550790 1667 emit_move_insn (struct_value, value);
8ad4c111 1668 if (REG_P (struct_value))
45550790 1669 use_reg (&call_fusage, struct_value);
53800dbe 1670 size += GET_MODE_SIZE (Pmode);
1671 }
1672
1673 /* All arguments and registers used for the call are set up by now! */
82c7907c 1674 function = prepare_call_address (NULL, function, NULL, &call_fusage, 0, 0);
53800dbe 1675
1676 /* Ensure address is valid. SYMBOL_REF is already valid, so no need,
1677 and we don't want to load it into a register as an optimization,
1678 because prepare_call_address already did it if it should be done. */
1679 if (GET_CODE (function) != SYMBOL_REF)
1680 function = memory_address (FUNCTION_MODE, function);
1681
1682 /* Generate the actual call instruction and save the return value. */
1d99ab0a 1683 if (targetm.have_untyped_call ())
1684 {
1685 rtx mem = gen_rtx_MEM (FUNCTION_MODE, function);
1686 emit_call_insn (targetm.gen_untyped_call (mem, result,
1687 result_vector (1, result)));
1688 }
53800dbe 1689 else
53800dbe 1690#ifdef HAVE_call_value
1691 if (HAVE_call_value)
1692 {
1693 rtx valreg = 0;
1694
1695 /* Locate the unique return register. It is not possible to
1696 express a call that sets more than one return register using
1697 call_value; use untyped_call for that. In fact, untyped_call
1698 only needs to save the return registers in the given block. */
1699 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1700 if ((mode = apply_result_mode[regno]) != VOIDmode)
1701 {
64db345d 1702 gcc_assert (!valreg); /* HAVE_untyped_call required. */
7d3f6cc7 1703
53800dbe 1704 valreg = gen_rtx_REG (mode, regno);
1705 }
1706
2ed6c343 1707 emit_call_insn (GEN_CALL_VALUE (valreg,
53800dbe 1708 gen_rtx_MEM (FUNCTION_MODE, function),
1709 const0_rtx, NULL_RTX, const0_rtx));
1710
e513d163 1711 emit_move_insn (adjust_address (result, GET_MODE (valreg), 0), valreg);
53800dbe 1712 }
1713 else
1714#endif
64db345d 1715 gcc_unreachable ();
53800dbe 1716
d5f9786f 1717 /* Find the CALL insn we just emitted, and attach the register usage
1718 information. */
1719 call_insn = last_call_insn ();
1720 add_function_usage_to (call_insn, call_fusage);
53800dbe 1721
1722 /* Restore the stack. */
71512c05 1723 if (targetm.have_save_stack_nonlocal ())
e9c97615 1724 emit_stack_restore (SAVE_NONLOCAL, old_stack_level);
53800dbe 1725 else
e9c97615 1726 emit_stack_restore (SAVE_BLOCK, old_stack_level);
9af5ce0c 1727 fixup_args_size_notes (call_insn, get_last_insn (), 0);
53800dbe 1728
04a46d40 1729 OK_DEFER_POP;
1730
53800dbe 1731 /* Return the address of the result block. */
85d654dd 1732 result = copy_addr_to_reg (XEXP (result, 0));
1733 return convert_memory_address (ptr_mode, result);
53800dbe 1734}
1735
1736/* Perform an untyped return. */
1737
1738static void
aecda0d6 1739expand_builtin_return (rtx result)
53800dbe 1740{
1741 int size, align, regno;
3754d046 1742 machine_mode mode;
53800dbe 1743 rtx reg;
57c26b3a 1744 rtx_insn *call_fusage = 0;
53800dbe 1745
85d654dd 1746 result = convert_memory_address (Pmode, result);
726ec87c 1747
53800dbe 1748 apply_result_size ();
1749 result = gen_rtx_MEM (BLKmode, result);
1750
1d99ab0a 1751 if (targetm.have_untyped_return ())
53800dbe 1752 {
1d99ab0a 1753 rtx vector = result_vector (0, result);
1754 emit_jump_insn (targetm.gen_untyped_return (result, vector));
53800dbe 1755 emit_barrier ();
1756 return;
1757 }
53800dbe 1758
1759 /* Restore the return value and note that each value is used. */
1760 size = 0;
1761 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1762 if ((mode = apply_result_mode[regno]) != VOIDmode)
1763 {
1764 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1765 if (size % align != 0)
1766 size = CEIL (size, align) * align;
1767 reg = gen_rtx_REG (mode, INCOMING_REGNO (regno));
e513d163 1768 emit_move_insn (reg, adjust_address (result, mode, size));
53800dbe 1769
1770 push_to_sequence (call_fusage);
18b42941 1771 emit_use (reg);
53800dbe 1772 call_fusage = get_insns ();
1773 end_sequence ();
1774 size += GET_MODE_SIZE (mode);
1775 }
1776
1777 /* Put the USE insns before the return. */
31d3e01c 1778 emit_insn (call_fusage);
53800dbe 1779
1780 /* Return whatever values was restored by jumping directly to the end
1781 of the function. */
62380d2d 1782 expand_naked_return ();
53800dbe 1783}
1784
539a3a92 1785/* Used by expand_builtin_classify_type and fold_builtin_classify_type. */
27d0c333 1786
539a3a92 1787static enum type_class
aecda0d6 1788type_to_class (tree type)
539a3a92 1789{
1790 switch (TREE_CODE (type))
1791 {
1792 case VOID_TYPE: return void_type_class;
1793 case INTEGER_TYPE: return integer_type_class;
539a3a92 1794 case ENUMERAL_TYPE: return enumeral_type_class;
1795 case BOOLEAN_TYPE: return boolean_type_class;
1796 case POINTER_TYPE: return pointer_type_class;
1797 case REFERENCE_TYPE: return reference_type_class;
1798 case OFFSET_TYPE: return offset_type_class;
1799 case REAL_TYPE: return real_type_class;
1800 case COMPLEX_TYPE: return complex_type_class;
1801 case FUNCTION_TYPE: return function_type_class;
1802 case METHOD_TYPE: return method_type_class;
1803 case RECORD_TYPE: return record_type_class;
1804 case UNION_TYPE:
1805 case QUAL_UNION_TYPE: return union_type_class;
1806 case ARRAY_TYPE: return (TYPE_STRING_FLAG (type)
1807 ? string_type_class : array_type_class);
539a3a92 1808 case LANG_TYPE: return lang_type_class;
1809 default: return no_type_class;
1810 }
1811}
bf8e3599 1812
c2f47e15 1813/* Expand a call EXP to __builtin_classify_type. */
27d0c333 1814
53800dbe 1815static rtx
c2f47e15 1816expand_builtin_classify_type (tree exp)
53800dbe 1817{
c2f47e15 1818 if (call_expr_nargs (exp))
1819 return GEN_INT (type_to_class (TREE_TYPE (CALL_EXPR_ARG (exp, 0))));
53800dbe 1820 return GEN_INT (no_type_class);
1821}
1822
07976da7 1823/* This helper macro, meant to be used in mathfn_built_in below,
1824 determines which among a set of three builtin math functions is
1825 appropriate for a given type mode. The `F' and `L' cases are
1826 automatically generated from the `double' case. */
1827#define CASE_MATHFN(BUILT_IN_MATHFN) \
1828 case BUILT_IN_MATHFN: case BUILT_IN_MATHFN##F: case BUILT_IN_MATHFN##L: \
1829 fcode = BUILT_IN_MATHFN; fcodef = BUILT_IN_MATHFN##F ; \
1830 fcodel = BUILT_IN_MATHFN##L ; break;
cd2656b0 1831/* Similar to above, but appends _R after any F/L suffix. */
1832#define CASE_MATHFN_REENT(BUILT_IN_MATHFN) \
1833 case BUILT_IN_MATHFN##_R: case BUILT_IN_MATHFN##F_R: case BUILT_IN_MATHFN##L_R: \
1834 fcode = BUILT_IN_MATHFN##_R; fcodef = BUILT_IN_MATHFN##F_R ; \
1835 fcodel = BUILT_IN_MATHFN##L_R ; break;
07976da7 1836
b9a16870 1837/* Return mathematic function equivalent to FN but operating directly on TYPE,
1838 if available. If IMPLICIT is true use the implicit builtin declaration,
1839 otherwise use the explicit declaration. If we can't do the conversion,
1840 return zero. */
c319d56a 1841
1842static tree
b9a16870 1843mathfn_built_in_1 (tree type, enum built_in_function fn, bool implicit_p)
0a68165a 1844{
b9a16870 1845 enum built_in_function fcode, fcodef, fcodel, fcode2;
07976da7 1846
1847 switch (fn)
1848 {
746114e8 1849 CASE_MATHFN (BUILT_IN_ACOS)
1850 CASE_MATHFN (BUILT_IN_ACOSH)
1851 CASE_MATHFN (BUILT_IN_ASIN)
1852 CASE_MATHFN (BUILT_IN_ASINH)
07976da7 1853 CASE_MATHFN (BUILT_IN_ATAN)
746114e8 1854 CASE_MATHFN (BUILT_IN_ATAN2)
1855 CASE_MATHFN (BUILT_IN_ATANH)
1856 CASE_MATHFN (BUILT_IN_CBRT)
07976da7 1857 CASE_MATHFN (BUILT_IN_CEIL)
d735c391 1858 CASE_MATHFN (BUILT_IN_CEXPI)
746114e8 1859 CASE_MATHFN (BUILT_IN_COPYSIGN)
07976da7 1860 CASE_MATHFN (BUILT_IN_COS)
746114e8 1861 CASE_MATHFN (BUILT_IN_COSH)
1862 CASE_MATHFN (BUILT_IN_DREM)
1863 CASE_MATHFN (BUILT_IN_ERF)
1864 CASE_MATHFN (BUILT_IN_ERFC)
07976da7 1865 CASE_MATHFN (BUILT_IN_EXP)
746114e8 1866 CASE_MATHFN (BUILT_IN_EXP10)
1867 CASE_MATHFN (BUILT_IN_EXP2)
1868 CASE_MATHFN (BUILT_IN_EXPM1)
1869 CASE_MATHFN (BUILT_IN_FABS)
1870 CASE_MATHFN (BUILT_IN_FDIM)
07976da7 1871 CASE_MATHFN (BUILT_IN_FLOOR)
746114e8 1872 CASE_MATHFN (BUILT_IN_FMA)
1873 CASE_MATHFN (BUILT_IN_FMAX)
1874 CASE_MATHFN (BUILT_IN_FMIN)
1875 CASE_MATHFN (BUILT_IN_FMOD)
1876 CASE_MATHFN (BUILT_IN_FREXP)
1877 CASE_MATHFN (BUILT_IN_GAMMA)
cd2656b0 1878 CASE_MATHFN_REENT (BUILT_IN_GAMMA) /* GAMMA_R */
746114e8 1879 CASE_MATHFN (BUILT_IN_HUGE_VAL)
1880 CASE_MATHFN (BUILT_IN_HYPOT)
1881 CASE_MATHFN (BUILT_IN_ILOGB)
80ff6494 1882 CASE_MATHFN (BUILT_IN_ICEIL)
1883 CASE_MATHFN (BUILT_IN_IFLOOR)
746114e8 1884 CASE_MATHFN (BUILT_IN_INF)
80ff6494 1885 CASE_MATHFN (BUILT_IN_IRINT)
1886 CASE_MATHFN (BUILT_IN_IROUND)
69b779ea 1887 CASE_MATHFN (BUILT_IN_ISINF)
746114e8 1888 CASE_MATHFN (BUILT_IN_J0)
1889 CASE_MATHFN (BUILT_IN_J1)
1890 CASE_MATHFN (BUILT_IN_JN)
ac148751 1891 CASE_MATHFN (BUILT_IN_LCEIL)
746114e8 1892 CASE_MATHFN (BUILT_IN_LDEXP)
ad52b9b7 1893 CASE_MATHFN (BUILT_IN_LFLOOR)
746114e8 1894 CASE_MATHFN (BUILT_IN_LGAMMA)
cd2656b0 1895 CASE_MATHFN_REENT (BUILT_IN_LGAMMA) /* LGAMMA_R */
ac148751 1896 CASE_MATHFN (BUILT_IN_LLCEIL)
ad52b9b7 1897 CASE_MATHFN (BUILT_IN_LLFLOOR)
746114e8 1898 CASE_MATHFN (BUILT_IN_LLRINT)
1899 CASE_MATHFN (BUILT_IN_LLROUND)
07976da7 1900 CASE_MATHFN (BUILT_IN_LOG)
746114e8 1901 CASE_MATHFN (BUILT_IN_LOG10)
1902 CASE_MATHFN (BUILT_IN_LOG1P)
1903 CASE_MATHFN (BUILT_IN_LOG2)
1904 CASE_MATHFN (BUILT_IN_LOGB)
1905 CASE_MATHFN (BUILT_IN_LRINT)
1906 CASE_MATHFN (BUILT_IN_LROUND)
1907 CASE_MATHFN (BUILT_IN_MODF)
1908 CASE_MATHFN (BUILT_IN_NAN)
1909 CASE_MATHFN (BUILT_IN_NANS)
07976da7 1910 CASE_MATHFN (BUILT_IN_NEARBYINT)
746114e8 1911 CASE_MATHFN (BUILT_IN_NEXTAFTER)
1912 CASE_MATHFN (BUILT_IN_NEXTTOWARD)
1913 CASE_MATHFN (BUILT_IN_POW)
757c219d 1914 CASE_MATHFN (BUILT_IN_POWI)
746114e8 1915 CASE_MATHFN (BUILT_IN_POW10)
1916 CASE_MATHFN (BUILT_IN_REMAINDER)
1917 CASE_MATHFN (BUILT_IN_REMQUO)
1918 CASE_MATHFN (BUILT_IN_RINT)
07976da7 1919 CASE_MATHFN (BUILT_IN_ROUND)
746114e8 1920 CASE_MATHFN (BUILT_IN_SCALB)
1921 CASE_MATHFN (BUILT_IN_SCALBLN)
1922 CASE_MATHFN (BUILT_IN_SCALBN)
c319d56a 1923 CASE_MATHFN (BUILT_IN_SIGNBIT)
746114e8 1924 CASE_MATHFN (BUILT_IN_SIGNIFICAND)
07976da7 1925 CASE_MATHFN (BUILT_IN_SIN)
746114e8 1926 CASE_MATHFN (BUILT_IN_SINCOS)
1927 CASE_MATHFN (BUILT_IN_SINH)
07976da7 1928 CASE_MATHFN (BUILT_IN_SQRT)
1929 CASE_MATHFN (BUILT_IN_TAN)
746114e8 1930 CASE_MATHFN (BUILT_IN_TANH)
1931 CASE_MATHFN (BUILT_IN_TGAMMA)
07976da7 1932 CASE_MATHFN (BUILT_IN_TRUNC)
746114e8 1933 CASE_MATHFN (BUILT_IN_Y0)
1934 CASE_MATHFN (BUILT_IN_Y1)
1935 CASE_MATHFN (BUILT_IN_YN)
07976da7 1936
0a68165a 1937 default:
c2f47e15 1938 return NULL_TREE;
0a68165a 1939 }
07976da7 1940
96b9f485 1941 if (TYPE_MAIN_VARIANT (type) == double_type_node)
b9a16870 1942 fcode2 = fcode;
96b9f485 1943 else if (TYPE_MAIN_VARIANT (type) == float_type_node)
b9a16870 1944 fcode2 = fcodef;
96b9f485 1945 else if (TYPE_MAIN_VARIANT (type) == long_double_type_node)
b9a16870 1946 fcode2 = fcodel;
07976da7 1947 else
c2f47e15 1948 return NULL_TREE;
b9a16870 1949
1950 if (implicit_p && !builtin_decl_implicit_p (fcode2))
1951 return NULL_TREE;
1952
1953 return builtin_decl_explicit (fcode2);
0a68165a 1954}
1955
c319d56a 1956/* Like mathfn_built_in_1(), but always use the implicit array. */
1957
1958tree
1959mathfn_built_in (tree type, enum built_in_function fn)
1960{
1961 return mathfn_built_in_1 (type, fn, /*implicit=*/ 1);
1962}
1963
0fd605a5 1964/* If errno must be maintained, expand the RTL to check if the result,
1965 TARGET, of a built-in function call, EXP, is NaN, and if so set
1966 errno to EDOM. */
1967
1968static void
aecda0d6 1969expand_errno_check (tree exp, rtx target)
0fd605a5 1970{
1e0c0b35 1971 rtx_code_label *lab = gen_label_rtx ();
0fd605a5 1972
7f05340e 1973 /* Test the result; if it is NaN, set errno=EDOM because
1974 the argument was not in the domain. */
3fcf767f 1975 do_compare_rtx_and_jump (target, target, EQ, 0, GET_MODE (target),
f9a00e9e 1976 NULL_RTX, NULL, lab,
79ab74cc 1977 /* The jump is very likely. */
1978 REG_BR_PROB_BASE - (REG_BR_PROB_BASE / 2000 - 1));
0fd605a5 1979
1980#ifdef TARGET_EDOM
7f05340e 1981 /* If this built-in doesn't throw an exception, set errno directly. */
c2f47e15 1982 if (TREE_NOTHROW (TREE_OPERAND (CALL_EXPR_FN (exp), 0)))
7f05340e 1983 {
0fd605a5 1984#ifdef GEN_ERRNO_RTX
7f05340e 1985 rtx errno_rtx = GEN_ERRNO_RTX;
0fd605a5 1986#else
7f05340e 1987 rtx errno_rtx
0fd605a5 1988 = gen_rtx_MEM (word_mode, gen_rtx_SYMBOL_REF (Pmode, "errno"));
1989#endif
d11aedc7 1990 emit_move_insn (errno_rtx,
1991 gen_int_mode (TARGET_EDOM, GET_MODE (errno_rtx)));
0fd605a5 1992 emit_label (lab);
7f05340e 1993 return;
0fd605a5 1994 }
7f05340e 1995#endif
1996
08491912 1997 /* Make sure the library call isn't expanded as a tail call. */
1998 CALL_EXPR_TAILCALL (exp) = 0;
1999
7f05340e 2000 /* We can't set errno=EDOM directly; let the library call do it.
2001 Pop the arguments right away in case the call gets deleted. */
2002 NO_DEFER_POP;
2003 expand_call (exp, target, 0);
2004 OK_DEFER_POP;
2005 emit_label (lab);
0fd605a5 2006}
2007
6b43bae4 2008/* Expand a call to one of the builtin math functions (sqrt, exp, or log).
c2f47e15 2009 Return NULL_RTX if a normal call should be emitted rather than expanding
2010 the function in-line. EXP is the expression that is a call to the builtin
53800dbe 2011 function; if convenient, the result should be placed in TARGET.
2012 SUBTARGET may be used as the target for computing one of EXP's operands. */
27d0c333 2013
53800dbe 2014static rtx
aecda0d6 2015expand_builtin_mathfn (tree exp, rtx target, rtx subtarget)
53800dbe 2016{
bf8e3599 2017 optab builtin_optab;
1e0c0b35 2018 rtx op0;
2019 rtx_insn *insns;
c6e6ecb1 2020 tree fndecl = get_callee_fndecl (exp);
3754d046 2021 machine_mode mode;
528ee710 2022 bool errno_set = false;
d6a0a4b0 2023 bool try_widening = false;
abfea505 2024 tree arg;
53800dbe 2025
c2f47e15 2026 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2027 return NULL_RTX;
53800dbe 2028
c2f47e15 2029 arg = CALL_EXPR_ARG (exp, 0);
53800dbe 2030
2031 switch (DECL_FUNCTION_CODE (fndecl))
2032 {
4f35b1fc 2033 CASE_FLT_FN (BUILT_IN_SQRT):
7f05340e 2034 errno_set = ! tree_expr_nonnegative_p (arg);
d6a0a4b0 2035 try_widening = true;
7f05340e 2036 builtin_optab = sqrt_optab;
2037 break;
4f35b1fc 2038 CASE_FLT_FN (BUILT_IN_EXP):
528ee710 2039 errno_set = true; builtin_optab = exp_optab; break;
4f35b1fc 2040 CASE_FLT_FN (BUILT_IN_EXP10):
2041 CASE_FLT_FN (BUILT_IN_POW10):
750ef9f5 2042 errno_set = true; builtin_optab = exp10_optab; break;
4f35b1fc 2043 CASE_FLT_FN (BUILT_IN_EXP2):
750ef9f5 2044 errno_set = true; builtin_optab = exp2_optab; break;
4f35b1fc 2045 CASE_FLT_FN (BUILT_IN_EXPM1):
a6b4eed2 2046 errno_set = true; builtin_optab = expm1_optab; break;
4f35b1fc 2047 CASE_FLT_FN (BUILT_IN_LOGB):
4efbc641 2048 errno_set = true; builtin_optab = logb_optab; break;
4f35b1fc 2049 CASE_FLT_FN (BUILT_IN_LOG):
528ee710 2050 errno_set = true; builtin_optab = log_optab; break;
4f35b1fc 2051 CASE_FLT_FN (BUILT_IN_LOG10):
d3cd9bde 2052 errno_set = true; builtin_optab = log10_optab; break;
4f35b1fc 2053 CASE_FLT_FN (BUILT_IN_LOG2):
d3cd9bde 2054 errno_set = true; builtin_optab = log2_optab; break;
4f35b1fc 2055 CASE_FLT_FN (BUILT_IN_LOG1P):
f474cd93 2056 errno_set = true; builtin_optab = log1p_optab; break;
4f35b1fc 2057 CASE_FLT_FN (BUILT_IN_ASIN):
8de2f465 2058 builtin_optab = asin_optab; break;
4f35b1fc 2059 CASE_FLT_FN (BUILT_IN_ACOS):
8de2f465 2060 builtin_optab = acos_optab; break;
4f35b1fc 2061 CASE_FLT_FN (BUILT_IN_TAN):
528ee710 2062 builtin_optab = tan_optab; break;
4f35b1fc 2063 CASE_FLT_FN (BUILT_IN_ATAN):
528ee710 2064 builtin_optab = atan_optab; break;
4f35b1fc 2065 CASE_FLT_FN (BUILT_IN_FLOOR):
528ee710 2066 builtin_optab = floor_optab; break;
4f35b1fc 2067 CASE_FLT_FN (BUILT_IN_CEIL):
528ee710 2068 builtin_optab = ceil_optab; break;
4f35b1fc 2069 CASE_FLT_FN (BUILT_IN_TRUNC):
a7cc195f 2070 builtin_optab = btrunc_optab; break;
4f35b1fc 2071 CASE_FLT_FN (BUILT_IN_ROUND):
528ee710 2072 builtin_optab = round_optab; break;
4f35b1fc 2073 CASE_FLT_FN (BUILT_IN_NEARBYINT):
0ddf4ad9 2074 builtin_optab = nearbyint_optab;
2075 if (flag_trapping_math)
2076 break;
2077 /* Else fallthrough and expand as rint. */
4f35b1fc 2078 CASE_FLT_FN (BUILT_IN_RINT):
aef94a0f 2079 builtin_optab = rint_optab; break;
b3154a1f 2080 CASE_FLT_FN (BUILT_IN_SIGNIFICAND):
2081 builtin_optab = significand_optab; break;
42721db0 2082 default:
64db345d 2083 gcc_unreachable ();
53800dbe 2084 }
2085
7f05340e 2086 /* Make a suitable register to place result in. */
2087 mode = TYPE_MODE (TREE_TYPE (exp));
fc4eef90 2088
7f05340e 2089 if (! flag_errno_math || ! HONOR_NANS (mode))
2090 errno_set = false;
2091
d6a0a4b0 2092 /* Before working hard, check whether the instruction is available, but try
2093 to widen the mode for specific operations. */
2094 if ((optab_handler (builtin_optab, mode) != CODE_FOR_nothing
2095 || (try_widening && !excess_precision_type (TREE_TYPE (exp))))
f2aca212 2096 && (!errno_set || !optimize_insn_for_size_p ()))
68e6cb9d 2097 {
de2e453e 2098 rtx result = gen_reg_rtx (mode);
7f05340e 2099
bd421108 2100 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2101 need to expand the argument again. This way, we will not perform
2102 side-effects more the once. */
abfea505 2103 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
7f05340e 2104
1db6d067 2105 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
7f05340e 2106
bd421108 2107 start_sequence ();
53800dbe 2108
de2e453e 2109 /* Compute into RESULT.
2110 Set RESULT to wherever the result comes back. */
2111 result = expand_unop (mode, builtin_optab, op0, result, 0);
bd421108 2112
de2e453e 2113 if (result != 0)
bd421108 2114 {
2115 if (errno_set)
de2e453e 2116 expand_errno_check (exp, result);
bd421108 2117
2118 /* Output the entire sequence. */
2119 insns = get_insns ();
2120 end_sequence ();
2121 emit_insn (insns);
de2e453e 2122 return result;
bd421108 2123 }
2124
2125 /* If we were unable to expand via the builtin, stop the sequence
2126 (without outputting the insns) and call to the library function
2127 with the stabilized argument list. */
53800dbe 2128 end_sequence ();
53800dbe 2129 }
2130
1e5b92fa 2131 return expand_call (exp, target, target == const0_rtx);
0fd605a5 2132}
2133
2134/* Expand a call to the builtin binary math functions (pow and atan2).
c2f47e15 2135 Return NULL_RTX if a normal call should be emitted rather than expanding the
0fd605a5 2136 function in-line. EXP is the expression that is a call to the builtin
2137 function; if convenient, the result should be placed in TARGET.
2138 SUBTARGET may be used as the target for computing one of EXP's
2139 operands. */
2140
2141static rtx
aecda0d6 2142expand_builtin_mathfn_2 (tree exp, rtx target, rtx subtarget)
0fd605a5 2143{
2144 optab builtin_optab;
1e0c0b35 2145 rtx op0, op1, result;
2146 rtx_insn *insns;
4737caf2 2147 int op1_type = REAL_TYPE;
c6e6ecb1 2148 tree fndecl = get_callee_fndecl (exp);
abfea505 2149 tree arg0, arg1;
3754d046 2150 machine_mode mode;
0fd605a5 2151 bool errno_set = true;
0fd605a5 2152
73a954a1 2153 switch (DECL_FUNCTION_CODE (fndecl))
2154 {
2155 CASE_FLT_FN (BUILT_IN_SCALBN):
2156 CASE_FLT_FN (BUILT_IN_SCALBLN):
2157 CASE_FLT_FN (BUILT_IN_LDEXP):
2158 op1_type = INTEGER_TYPE;
2159 default:
2160 break;
2161 }
4737caf2 2162
c2f47e15 2163 if (!validate_arglist (exp, REAL_TYPE, op1_type, VOID_TYPE))
2164 return NULL_RTX;
0fd605a5 2165
c2f47e15 2166 arg0 = CALL_EXPR_ARG (exp, 0);
2167 arg1 = CALL_EXPR_ARG (exp, 1);
0fd605a5 2168
0fd605a5 2169 switch (DECL_FUNCTION_CODE (fndecl))
2170 {
4f35b1fc 2171 CASE_FLT_FN (BUILT_IN_POW):
0fd605a5 2172 builtin_optab = pow_optab; break;
4f35b1fc 2173 CASE_FLT_FN (BUILT_IN_ATAN2):
0fd605a5 2174 builtin_optab = atan2_optab; break;
73a954a1 2175 CASE_FLT_FN (BUILT_IN_SCALB):
2176 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (exp)))->b != 2)
2177 return 0;
2178 builtin_optab = scalb_optab; break;
2179 CASE_FLT_FN (BUILT_IN_SCALBN):
2180 CASE_FLT_FN (BUILT_IN_SCALBLN):
2181 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (exp)))->b != 2)
2182 return 0;
2183 /* Fall through... */
4f35b1fc 2184 CASE_FLT_FN (BUILT_IN_LDEXP):
4737caf2 2185 builtin_optab = ldexp_optab; break;
4f35b1fc 2186 CASE_FLT_FN (BUILT_IN_FMOD):
80ed5c06 2187 builtin_optab = fmod_optab; break;
ef722005 2188 CASE_FLT_FN (BUILT_IN_REMAINDER):
4f35b1fc 2189 CASE_FLT_FN (BUILT_IN_DREM):
ef722005 2190 builtin_optab = remainder_optab; break;
0fd605a5 2191 default:
64db345d 2192 gcc_unreachable ();
0fd605a5 2193 }
2194
7f05340e 2195 /* Make a suitable register to place result in. */
2196 mode = TYPE_MODE (TREE_TYPE (exp));
fc4eef90 2197
2198 /* Before working hard, check whether the instruction is available. */
d6bf3b14 2199 if (optab_handler (builtin_optab, mode) == CODE_FOR_nothing)
c2f47e15 2200 return NULL_RTX;
fc4eef90 2201
de2e453e 2202 result = gen_reg_rtx (mode);
7f05340e 2203
2204 if (! flag_errno_math || ! HONOR_NANS (mode))
2205 errno_set = false;
2206
f2aca212 2207 if (errno_set && optimize_insn_for_size_p ())
2208 return 0;
2209
4ee9c684 2210 /* Always stabilize the argument list. */
abfea505 2211 CALL_EXPR_ARG (exp, 0) = arg0 = builtin_save_expr (arg0);
2212 CALL_EXPR_ARG (exp, 1) = arg1 = builtin_save_expr (arg1);
7f05340e 2213
8ec3c5c2 2214 op0 = expand_expr (arg0, subtarget, VOIDmode, EXPAND_NORMAL);
2215 op1 = expand_normal (arg1);
7f05340e 2216
7f05340e 2217 start_sequence ();
2218
de2e453e 2219 /* Compute into RESULT.
2220 Set RESULT to wherever the result comes back. */
2221 result = expand_binop (mode, builtin_optab, op0, op1,
2222 result, 0, OPTAB_DIRECT);
53800dbe 2223
68e6cb9d 2224 /* If we were unable to expand via the builtin, stop the sequence
2225 (without outputting the insns) and call to the library function
2226 with the stabilized argument list. */
de2e453e 2227 if (result == 0)
0fd605a5 2228 {
2229 end_sequence ();
68e6cb9d 2230 return expand_call (exp, target, target == const0_rtx);
53800dbe 2231 }
2232
a4356fb9 2233 if (errno_set)
de2e453e 2234 expand_errno_check (exp, result);
0fd605a5 2235
53800dbe 2236 /* Output the entire sequence. */
2237 insns = get_insns ();
2238 end_sequence ();
31d3e01c 2239 emit_insn (insns);
bf8e3599 2240
de2e453e 2241 return result;
53800dbe 2242}
2243
7e0713b1 2244/* Expand a call to the builtin trinary math functions (fma).
2245 Return NULL_RTX if a normal call should be emitted rather than expanding the
2246 function in-line. EXP is the expression that is a call to the builtin
2247 function; if convenient, the result should be placed in TARGET.
2248 SUBTARGET may be used as the target for computing one of EXP's
2249 operands. */
2250
2251static rtx
2252expand_builtin_mathfn_ternary (tree exp, rtx target, rtx subtarget)
2253{
2254 optab builtin_optab;
1e0c0b35 2255 rtx op0, op1, op2, result;
2256 rtx_insn *insns;
7e0713b1 2257 tree fndecl = get_callee_fndecl (exp);
2258 tree arg0, arg1, arg2;
3754d046 2259 machine_mode mode;
7e0713b1 2260
2261 if (!validate_arglist (exp, REAL_TYPE, REAL_TYPE, REAL_TYPE, VOID_TYPE))
2262 return NULL_RTX;
2263
2264 arg0 = CALL_EXPR_ARG (exp, 0);
2265 arg1 = CALL_EXPR_ARG (exp, 1);
2266 arg2 = CALL_EXPR_ARG (exp, 2);
2267
2268 switch (DECL_FUNCTION_CODE (fndecl))
2269 {
2270 CASE_FLT_FN (BUILT_IN_FMA):
2271 builtin_optab = fma_optab; break;
2272 default:
2273 gcc_unreachable ();
2274 }
2275
2276 /* Make a suitable register to place result in. */
2277 mode = TYPE_MODE (TREE_TYPE (exp));
2278
2279 /* Before working hard, check whether the instruction is available. */
2280 if (optab_handler (builtin_optab, mode) == CODE_FOR_nothing)
2281 return NULL_RTX;
2282
de2e453e 2283 result = gen_reg_rtx (mode);
7e0713b1 2284
2285 /* Always stabilize the argument list. */
2286 CALL_EXPR_ARG (exp, 0) = arg0 = builtin_save_expr (arg0);
2287 CALL_EXPR_ARG (exp, 1) = arg1 = builtin_save_expr (arg1);
2288 CALL_EXPR_ARG (exp, 2) = arg2 = builtin_save_expr (arg2);
2289
2290 op0 = expand_expr (arg0, subtarget, VOIDmode, EXPAND_NORMAL);
2291 op1 = expand_normal (arg1);
2292 op2 = expand_normal (arg2);
2293
2294 start_sequence ();
2295
de2e453e 2296 /* Compute into RESULT.
2297 Set RESULT to wherever the result comes back. */
2298 result = expand_ternary_op (mode, builtin_optab, op0, op1, op2,
2299 result, 0);
7e0713b1 2300
2301 /* If we were unable to expand via the builtin, stop the sequence
2302 (without outputting the insns) and call to the library function
2303 with the stabilized argument list. */
de2e453e 2304 if (result == 0)
7e0713b1 2305 {
2306 end_sequence ();
2307 return expand_call (exp, target, target == const0_rtx);
2308 }
2309
2310 /* Output the entire sequence. */
2311 insns = get_insns ();
2312 end_sequence ();
2313 emit_insn (insns);
2314
de2e453e 2315 return result;
7e0713b1 2316}
2317
6b43bae4 2318/* Expand a call to the builtin sin and cos math functions.
c2f47e15 2319 Return NULL_RTX if a normal call should be emitted rather than expanding the
6b43bae4 2320 function in-line. EXP is the expression that is a call to the builtin
2321 function; if convenient, the result should be placed in TARGET.
2322 SUBTARGET may be used as the target for computing one of EXP's
2323 operands. */
2324
2325static rtx
2326expand_builtin_mathfn_3 (tree exp, rtx target, rtx subtarget)
2327{
2328 optab builtin_optab;
1e0c0b35 2329 rtx op0;
2330 rtx_insn *insns;
6b43bae4 2331 tree fndecl = get_callee_fndecl (exp);
3754d046 2332 machine_mode mode;
abfea505 2333 tree arg;
6b43bae4 2334
c2f47e15 2335 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2336 return NULL_RTX;
6b43bae4 2337
c2f47e15 2338 arg = CALL_EXPR_ARG (exp, 0);
6b43bae4 2339
2340 switch (DECL_FUNCTION_CODE (fndecl))
2341 {
4f35b1fc 2342 CASE_FLT_FN (BUILT_IN_SIN):
2343 CASE_FLT_FN (BUILT_IN_COS):
6b43bae4 2344 builtin_optab = sincos_optab; break;
2345 default:
64db345d 2346 gcc_unreachable ();
6b43bae4 2347 }
2348
2349 /* Make a suitable register to place result in. */
2350 mode = TYPE_MODE (TREE_TYPE (exp));
2351
6b43bae4 2352 /* Check if sincos insn is available, otherwise fallback
0bed3869 2353 to sin or cos insn. */
d6bf3b14 2354 if (optab_handler (builtin_optab, mode) == CODE_FOR_nothing)
6b43bae4 2355 switch (DECL_FUNCTION_CODE (fndecl))
2356 {
4f35b1fc 2357 CASE_FLT_FN (BUILT_IN_SIN):
6b43bae4 2358 builtin_optab = sin_optab; break;
4f35b1fc 2359 CASE_FLT_FN (BUILT_IN_COS):
6b43bae4 2360 builtin_optab = cos_optab; break;
2361 default:
64db345d 2362 gcc_unreachable ();
6b43bae4 2363 }
6b43bae4 2364
2365 /* Before working hard, check whether the instruction is available. */
d6bf3b14 2366 if (optab_handler (builtin_optab, mode) != CODE_FOR_nothing)
6b43bae4 2367 {
de2e453e 2368 rtx result = gen_reg_rtx (mode);
6b43bae4 2369
2370 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2371 need to expand the argument again. This way, we will not perform
2372 side-effects more the once. */
abfea505 2373 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
6b43bae4 2374
1db6d067 2375 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
6b43bae4 2376
6b43bae4 2377 start_sequence ();
2378
de2e453e 2379 /* Compute into RESULT.
2380 Set RESULT to wherever the result comes back. */
6b43bae4 2381 if (builtin_optab == sincos_optab)
2382 {
de2e453e 2383 int ok;
7d3f6cc7 2384
6b43bae4 2385 switch (DECL_FUNCTION_CODE (fndecl))
2386 {
4f35b1fc 2387 CASE_FLT_FN (BUILT_IN_SIN):
de2e453e 2388 ok = expand_twoval_unop (builtin_optab, op0, 0, result, 0);
6b43bae4 2389 break;
4f35b1fc 2390 CASE_FLT_FN (BUILT_IN_COS):
de2e453e 2391 ok = expand_twoval_unop (builtin_optab, op0, result, 0, 0);
6b43bae4 2392 break;
2393 default:
64db345d 2394 gcc_unreachable ();
6b43bae4 2395 }
de2e453e 2396 gcc_assert (ok);
6b43bae4 2397 }
2398 else
de2e453e 2399 result = expand_unop (mode, builtin_optab, op0, result, 0);
6b43bae4 2400
de2e453e 2401 if (result != 0)
6b43bae4 2402 {
6b43bae4 2403 /* Output the entire sequence. */
2404 insns = get_insns ();
2405 end_sequence ();
2406 emit_insn (insns);
de2e453e 2407 return result;
6b43bae4 2408 }
2409
2410 /* If we were unable to expand via the builtin, stop the sequence
2411 (without outputting the insns) and call to the library function
2412 with the stabilized argument list. */
2413 end_sequence ();
2414 }
2415
de2e453e 2416 return expand_call (exp, target, target == const0_rtx);
6b43bae4 2417}
2418
a65c4d64 2419/* Given an interclass math builtin decl FNDECL and it's argument ARG
2420 return an RTL instruction code that implements the functionality.
2421 If that isn't possible or available return CODE_FOR_nothing. */
a67a90e5 2422
a65c4d64 2423static enum insn_code
2424interclass_mathfn_icode (tree arg, tree fndecl)
a67a90e5 2425{
a65c4d64 2426 bool errno_set = false;
6cdd383a 2427 optab builtin_optab = unknown_optab;
3754d046 2428 machine_mode mode;
a67a90e5 2429
2430 switch (DECL_FUNCTION_CODE (fndecl))
2431 {
2432 CASE_FLT_FN (BUILT_IN_ILOGB):
2433 errno_set = true; builtin_optab = ilogb_optab; break;
69b779ea 2434 CASE_FLT_FN (BUILT_IN_ISINF):
2435 builtin_optab = isinf_optab; break;
8a1a9cb7 2436 case BUILT_IN_ISNORMAL:
cde061c1 2437 case BUILT_IN_ISFINITE:
2438 CASE_FLT_FN (BUILT_IN_FINITE):
a65c4d64 2439 case BUILT_IN_FINITED32:
2440 case BUILT_IN_FINITED64:
2441 case BUILT_IN_FINITED128:
2442 case BUILT_IN_ISINFD32:
2443 case BUILT_IN_ISINFD64:
2444 case BUILT_IN_ISINFD128:
cde061c1 2445 /* These builtins have no optabs (yet). */
2446 break;
a67a90e5 2447 default:
2448 gcc_unreachable ();
2449 }
2450
2451 /* There's no easy way to detect the case we need to set EDOM. */
2452 if (flag_errno_math && errno_set)
a65c4d64 2453 return CODE_FOR_nothing;
a67a90e5 2454
2455 /* Optab mode depends on the mode of the input argument. */
2456 mode = TYPE_MODE (TREE_TYPE (arg));
2457
cde061c1 2458 if (builtin_optab)
d6bf3b14 2459 return optab_handler (builtin_optab, mode);
a65c4d64 2460 return CODE_FOR_nothing;
2461}
2462
2463/* Expand a call to one of the builtin math functions that operate on
2464 floating point argument and output an integer result (ilogb, isinf,
2465 isnan, etc).
2466 Return 0 if a normal call should be emitted rather than expanding the
2467 function in-line. EXP is the expression that is a call to the builtin
f97eea22 2468 function; if convenient, the result should be placed in TARGET. */
a65c4d64 2469
2470static rtx
f97eea22 2471expand_builtin_interclass_mathfn (tree exp, rtx target)
a65c4d64 2472{
2473 enum insn_code icode = CODE_FOR_nothing;
2474 rtx op0;
2475 tree fndecl = get_callee_fndecl (exp);
3754d046 2476 machine_mode mode;
a65c4d64 2477 tree arg;
2478
2479 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2480 return NULL_RTX;
2481
2482 arg = CALL_EXPR_ARG (exp, 0);
2483 icode = interclass_mathfn_icode (arg, fndecl);
2484 mode = TYPE_MODE (TREE_TYPE (arg));
2485
a67a90e5 2486 if (icode != CODE_FOR_nothing)
2487 {
8786db1e 2488 struct expand_operand ops[1];
1e0c0b35 2489 rtx_insn *last = get_last_insn ();
4e2a2fb4 2490 tree orig_arg = arg;
a67a90e5 2491
2492 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2493 need to expand the argument again. This way, we will not perform
2494 side-effects more the once. */
abfea505 2495 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
a67a90e5 2496
f97eea22 2497 op0 = expand_expr (arg, NULL_RTX, VOIDmode, EXPAND_NORMAL);
a67a90e5 2498
2499 if (mode != GET_MODE (op0))
2500 op0 = convert_to_mode (mode, op0, 0);
2501
8786db1e 2502 create_output_operand (&ops[0], target, TYPE_MODE (TREE_TYPE (exp)));
2503 if (maybe_legitimize_operands (icode, 0, 1, ops)
2504 && maybe_emit_unop_insn (icode, ops[0].value, op0, UNKNOWN))
2505 return ops[0].value;
2506
4e2a2fb4 2507 delete_insns_since (last);
2508 CALL_EXPR_ARG (exp, 0) = orig_arg;
a67a90e5 2509 }
2510
a65c4d64 2511 return NULL_RTX;
a67a90e5 2512}
2513
c3147c1a 2514/* Expand a call to the builtin sincos math function.
c2f47e15 2515 Return NULL_RTX if a normal call should be emitted rather than expanding the
c3147c1a 2516 function in-line. EXP is the expression that is a call to the builtin
2517 function. */
2518
2519static rtx
2520expand_builtin_sincos (tree exp)
2521{
2522 rtx op0, op1, op2, target1, target2;
3754d046 2523 machine_mode mode;
c3147c1a 2524 tree arg, sinp, cosp;
2525 int result;
389dd41b 2526 location_t loc = EXPR_LOCATION (exp);
be5575b2 2527 tree alias_type, alias_off;
c3147c1a 2528
c2f47e15 2529 if (!validate_arglist (exp, REAL_TYPE,
2530 POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
2531 return NULL_RTX;
c3147c1a 2532
c2f47e15 2533 arg = CALL_EXPR_ARG (exp, 0);
2534 sinp = CALL_EXPR_ARG (exp, 1);
2535 cosp = CALL_EXPR_ARG (exp, 2);
c3147c1a 2536
2537 /* Make a suitable register to place result in. */
2538 mode = TYPE_MODE (TREE_TYPE (arg));
2539
2540 /* Check if sincos insn is available, otherwise emit the call. */
d6bf3b14 2541 if (optab_handler (sincos_optab, mode) == CODE_FOR_nothing)
c3147c1a 2542 return NULL_RTX;
2543
2544 target1 = gen_reg_rtx (mode);
2545 target2 = gen_reg_rtx (mode);
2546
8ec3c5c2 2547 op0 = expand_normal (arg);
be5575b2 2548 alias_type = build_pointer_type_for_mode (TREE_TYPE (arg), ptr_mode, true);
2549 alias_off = build_int_cst (alias_type, 0);
2550 op1 = expand_normal (fold_build2_loc (loc, MEM_REF, TREE_TYPE (arg),
2551 sinp, alias_off));
2552 op2 = expand_normal (fold_build2_loc (loc, MEM_REF, TREE_TYPE (arg),
2553 cosp, alias_off));
c3147c1a 2554
2555 /* Compute into target1 and target2.
2556 Set TARGET to wherever the result comes back. */
2557 result = expand_twoval_unop (sincos_optab, op0, target2, target1, 0);
2558 gcc_assert (result);
2559
2560 /* Move target1 and target2 to the memory locations indicated
2561 by op1 and op2. */
2562 emit_move_insn (op1, target1);
2563 emit_move_insn (op2, target2);
2564
2565 return const0_rtx;
2566}
2567
d735c391 2568/* Expand a call to the internal cexpi builtin to the sincos math function.
2569 EXP is the expression that is a call to the builtin function; if convenient,
f97eea22 2570 the result should be placed in TARGET. */
d735c391 2571
2572static rtx
f97eea22 2573expand_builtin_cexpi (tree exp, rtx target)
d735c391 2574{
2575 tree fndecl = get_callee_fndecl (exp);
d735c391 2576 tree arg, type;
3754d046 2577 machine_mode mode;
d735c391 2578 rtx op0, op1, op2;
389dd41b 2579 location_t loc = EXPR_LOCATION (exp);
d735c391 2580
c2f47e15 2581 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2582 return NULL_RTX;
d735c391 2583
c2f47e15 2584 arg = CALL_EXPR_ARG (exp, 0);
d735c391 2585 type = TREE_TYPE (arg);
2586 mode = TYPE_MODE (TREE_TYPE (arg));
2587
2588 /* Try expanding via a sincos optab, fall back to emitting a libcall
18b8d8ae 2589 to sincos or cexp. We are sure we have sincos or cexp because cexpi
2590 is only generated from sincos, cexp or if we have either of them. */
d6bf3b14 2591 if (optab_handler (sincos_optab, mode) != CODE_FOR_nothing)
d735c391 2592 {
2593 op1 = gen_reg_rtx (mode);
2594 op2 = gen_reg_rtx (mode);
2595
f97eea22 2596 op0 = expand_expr (arg, NULL_RTX, VOIDmode, EXPAND_NORMAL);
d735c391 2597
2598 /* Compute into op1 and op2. */
2599 expand_twoval_unop (sincos_optab, op0, op2, op1, 0);
2600 }
30f690e0 2601 else if (targetm.libc_has_function (function_sincos))
d735c391 2602 {
c2f47e15 2603 tree call, fn = NULL_TREE;
d735c391 2604 tree top1, top2;
2605 rtx op1a, op2a;
2606
2607 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
b9a16870 2608 fn = builtin_decl_explicit (BUILT_IN_SINCOSF);
d735c391 2609 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
b9a16870 2610 fn = builtin_decl_explicit (BUILT_IN_SINCOS);
d735c391 2611 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
b9a16870 2612 fn = builtin_decl_explicit (BUILT_IN_SINCOSL);
c2f47e15 2613 else
2614 gcc_unreachable ();
48e1416a 2615
0ab48139 2616 op1 = assign_temp (TREE_TYPE (arg), 1, 1);
2617 op2 = assign_temp (TREE_TYPE (arg), 1, 1);
99182918 2618 op1a = copy_addr_to_reg (XEXP (op1, 0));
2619 op2a = copy_addr_to_reg (XEXP (op2, 0));
d735c391 2620 top1 = make_tree (build_pointer_type (TREE_TYPE (arg)), op1a);
2621 top2 = make_tree (build_pointer_type (TREE_TYPE (arg)), op2a);
2622
d735c391 2623 /* Make sure not to fold the sincos call again. */
2624 call = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
c2f47e15 2625 expand_normal (build_call_nary (TREE_TYPE (TREE_TYPE (fn)),
2626 call, 3, arg, top1, top2));
d735c391 2627 }
18b8d8ae 2628 else
2629 {
0ecbc158 2630 tree call, fn = NULL_TREE, narg;
18b8d8ae 2631 tree ctype = build_complex_type (type);
2632
0ecbc158 2633 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
b9a16870 2634 fn = builtin_decl_explicit (BUILT_IN_CEXPF);
0ecbc158 2635 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
b9a16870 2636 fn = builtin_decl_explicit (BUILT_IN_CEXP);
0ecbc158 2637 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
b9a16870 2638 fn = builtin_decl_explicit (BUILT_IN_CEXPL);
c2f47e15 2639 else
2640 gcc_unreachable ();
fc0dfa6e 2641
2642 /* If we don't have a decl for cexp create one. This is the
2643 friendliest fallback if the user calls __builtin_cexpi
2644 without full target C99 function support. */
2645 if (fn == NULL_TREE)
2646 {
2647 tree fntype;
2648 const char *name = NULL;
2649
2650 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2651 name = "cexpf";
2652 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2653 name = "cexp";
2654 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2655 name = "cexpl";
2656
2657 fntype = build_function_type_list (ctype, ctype, NULL_TREE);
2658 fn = build_fn_decl (name, fntype);
2659 }
2660
389dd41b 2661 narg = fold_build2_loc (loc, COMPLEX_EXPR, ctype,
18b8d8ae 2662 build_real (type, dconst0), arg);
2663
2664 /* Make sure not to fold the cexp call again. */
2665 call = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
48e1416a 2666 return expand_expr (build_call_nary (ctype, call, 1, narg),
1db6d067 2667 target, VOIDmode, EXPAND_NORMAL);
18b8d8ae 2668 }
d735c391 2669
2670 /* Now build the proper return type. */
2671 return expand_expr (build2 (COMPLEX_EXPR, build_complex_type (type),
2672 make_tree (TREE_TYPE (arg), op2),
2673 make_tree (TREE_TYPE (arg), op1)),
1db6d067 2674 target, VOIDmode, EXPAND_NORMAL);
d735c391 2675}
2676
a65c4d64 2677/* Conveniently construct a function call expression. FNDECL names the
2678 function to be called, N is the number of arguments, and the "..."
2679 parameters are the argument expressions. Unlike build_call_exr
2680 this doesn't fold the call, hence it will always return a CALL_EXPR. */
2681
2682static tree
2683build_call_nofold_loc (location_t loc, tree fndecl, int n, ...)
2684{
2685 va_list ap;
2686 tree fntype = TREE_TYPE (fndecl);
2687 tree fn = build1 (ADDR_EXPR, build_pointer_type (fntype), fndecl);
2688
2689 va_start (ap, n);
2690 fn = build_call_valist (TREE_TYPE (fntype), fn, n, ap);
2691 va_end (ap);
2692 SET_EXPR_LOCATION (fn, loc);
2693 return fn;
2694}
a65c4d64 2695
7d3afc77 2696/* Expand a call to one of the builtin rounding functions gcc defines
2697 as an extension (lfloor and lceil). As these are gcc extensions we
2698 do not need to worry about setting errno to EDOM.
ad52b9b7 2699 If expanding via optab fails, lower expression to (int)(floor(x)).
2700 EXP is the expression that is a call to the builtin function;
ff1b14e4 2701 if convenient, the result should be placed in TARGET. */
ad52b9b7 2702
2703static rtx
ff1b14e4 2704expand_builtin_int_roundingfn (tree exp, rtx target)
ad52b9b7 2705{
9c42dd28 2706 convert_optab builtin_optab;
1e0c0b35 2707 rtx op0, tmp;
2708 rtx_insn *insns;
ad52b9b7 2709 tree fndecl = get_callee_fndecl (exp);
ad52b9b7 2710 enum built_in_function fallback_fn;
2711 tree fallback_fndecl;
3754d046 2712 machine_mode mode;
4de0924f 2713 tree arg;
ad52b9b7 2714
c2f47e15 2715 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
ad52b9b7 2716 gcc_unreachable ();
2717
c2f47e15 2718 arg = CALL_EXPR_ARG (exp, 0);
ad52b9b7 2719
2720 switch (DECL_FUNCTION_CODE (fndecl))
2721 {
80ff6494 2722 CASE_FLT_FN (BUILT_IN_ICEIL):
4f35b1fc 2723 CASE_FLT_FN (BUILT_IN_LCEIL):
2724 CASE_FLT_FN (BUILT_IN_LLCEIL):
ac148751 2725 builtin_optab = lceil_optab;
2726 fallback_fn = BUILT_IN_CEIL;
2727 break;
2728
80ff6494 2729 CASE_FLT_FN (BUILT_IN_IFLOOR):
4f35b1fc 2730 CASE_FLT_FN (BUILT_IN_LFLOOR):
2731 CASE_FLT_FN (BUILT_IN_LLFLOOR):
ad52b9b7 2732 builtin_optab = lfloor_optab;
2733 fallback_fn = BUILT_IN_FLOOR;
2734 break;
2735
2736 default:
2737 gcc_unreachable ();
2738 }
2739
2740 /* Make a suitable register to place result in. */
2741 mode = TYPE_MODE (TREE_TYPE (exp));
2742
9c42dd28 2743 target = gen_reg_rtx (mode);
ad52b9b7 2744
9c42dd28 2745 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2746 need to expand the argument again. This way, we will not perform
2747 side-effects more the once. */
abfea505 2748 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
ad52b9b7 2749
ff1b14e4 2750 op0 = expand_expr (arg, NULL, VOIDmode, EXPAND_NORMAL);
ad52b9b7 2751
9c42dd28 2752 start_sequence ();
ad52b9b7 2753
9c42dd28 2754 /* Compute into TARGET. */
2755 if (expand_sfix_optab (target, op0, builtin_optab))
2756 {
2757 /* Output the entire sequence. */
2758 insns = get_insns ();
ad52b9b7 2759 end_sequence ();
9c42dd28 2760 emit_insn (insns);
2761 return target;
ad52b9b7 2762 }
2763
9c42dd28 2764 /* If we were unable to expand via the builtin, stop the sequence
2765 (without outputting the insns). */
2766 end_sequence ();
2767
ad52b9b7 2768 /* Fall back to floating point rounding optab. */
2769 fallback_fndecl = mathfn_built_in (TREE_TYPE (arg), fallback_fn);
fc0dfa6e 2770
2771 /* For non-C99 targets we may end up without a fallback fndecl here
2772 if the user called __builtin_lfloor directly. In this case emit
2773 a call to the floor/ceil variants nevertheless. This should result
2774 in the best user experience for not full C99 targets. */
2775 if (fallback_fndecl == NULL_TREE)
2776 {
2777 tree fntype;
2778 const char *name = NULL;
2779
2780 switch (DECL_FUNCTION_CODE (fndecl))
2781 {
80ff6494 2782 case BUILT_IN_ICEIL:
fc0dfa6e 2783 case BUILT_IN_LCEIL:
2784 case BUILT_IN_LLCEIL:
2785 name = "ceil";
2786 break;
80ff6494 2787 case BUILT_IN_ICEILF:
fc0dfa6e 2788 case BUILT_IN_LCEILF:
2789 case BUILT_IN_LLCEILF:
2790 name = "ceilf";
2791 break;
80ff6494 2792 case BUILT_IN_ICEILL:
fc0dfa6e 2793 case BUILT_IN_LCEILL:
2794 case BUILT_IN_LLCEILL:
2795 name = "ceill";
2796 break;
80ff6494 2797 case BUILT_IN_IFLOOR:
fc0dfa6e 2798 case BUILT_IN_LFLOOR:
2799 case BUILT_IN_LLFLOOR:
2800 name = "floor";
2801 break;
80ff6494 2802 case BUILT_IN_IFLOORF:
fc0dfa6e 2803 case BUILT_IN_LFLOORF:
2804 case BUILT_IN_LLFLOORF:
2805 name = "floorf";
2806 break;
80ff6494 2807 case BUILT_IN_IFLOORL:
fc0dfa6e 2808 case BUILT_IN_LFLOORL:
2809 case BUILT_IN_LLFLOORL:
2810 name = "floorl";
2811 break;
2812 default:
2813 gcc_unreachable ();
2814 }
2815
2816 fntype = build_function_type_list (TREE_TYPE (arg),
2817 TREE_TYPE (arg), NULL_TREE);
2818 fallback_fndecl = build_fn_decl (name, fntype);
2819 }
2820
0568e9c1 2821 exp = build_call_nofold_loc (EXPR_LOCATION (exp), fallback_fndecl, 1, arg);
ad52b9b7 2822
d4c690af 2823 tmp = expand_normal (exp);
933eb13a 2824 tmp = maybe_emit_group_store (tmp, TREE_TYPE (exp));
ad52b9b7 2825
2826 /* Truncate the result of floating point optab to integer
2827 via expand_fix (). */
2828 target = gen_reg_rtx (mode);
2829 expand_fix (target, tmp, 0);
2830
2831 return target;
2832}
2833
7d3afc77 2834/* Expand a call to one of the builtin math functions doing integer
2835 conversion (lrint).
2836 Return 0 if a normal call should be emitted rather than expanding the
2837 function in-line. EXP is the expression that is a call to the builtin
ff1b14e4 2838 function; if convenient, the result should be placed in TARGET. */
7d3afc77 2839
2840static rtx
ff1b14e4 2841expand_builtin_int_roundingfn_2 (tree exp, rtx target)
7d3afc77 2842{
5f51ee59 2843 convert_optab builtin_optab;
1e0c0b35 2844 rtx op0;
2845 rtx_insn *insns;
7d3afc77 2846 tree fndecl = get_callee_fndecl (exp);
4de0924f 2847 tree arg;
3754d046 2848 machine_mode mode;
e951f9a4 2849 enum built_in_function fallback_fn = BUILT_IN_NONE;
7d3afc77 2850
c2f47e15 2851 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2852 gcc_unreachable ();
48e1416a 2853
c2f47e15 2854 arg = CALL_EXPR_ARG (exp, 0);
7d3afc77 2855
2856 switch (DECL_FUNCTION_CODE (fndecl))
2857 {
80ff6494 2858 CASE_FLT_FN (BUILT_IN_IRINT):
e951f9a4 2859 fallback_fn = BUILT_IN_LRINT;
2860 /* FALLTHRU */
7d3afc77 2861 CASE_FLT_FN (BUILT_IN_LRINT):
2862 CASE_FLT_FN (BUILT_IN_LLRINT):
e951f9a4 2863 builtin_optab = lrint_optab;
2864 break;
80ff6494 2865
2866 CASE_FLT_FN (BUILT_IN_IROUND):
e951f9a4 2867 fallback_fn = BUILT_IN_LROUND;
2868 /* FALLTHRU */
ef2f1a10 2869 CASE_FLT_FN (BUILT_IN_LROUND):
2870 CASE_FLT_FN (BUILT_IN_LLROUND):
e951f9a4 2871 builtin_optab = lround_optab;
2872 break;
80ff6494 2873
7d3afc77 2874 default:
2875 gcc_unreachable ();
2876 }
2877
e951f9a4 2878 /* There's no easy way to detect the case we need to set EDOM. */
2879 if (flag_errno_math && fallback_fn == BUILT_IN_NONE)
2880 return NULL_RTX;
2881
7d3afc77 2882 /* Make a suitable register to place result in. */
2883 mode = TYPE_MODE (TREE_TYPE (exp));
2884
e951f9a4 2885 /* There's no easy way to detect the case we need to set EDOM. */
2886 if (!flag_errno_math)
2887 {
de2e453e 2888 rtx result = gen_reg_rtx (mode);
7d3afc77 2889
e951f9a4 2890 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2891 need to expand the argument again. This way, we will not perform
2892 side-effects more the once. */
2893 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
7d3afc77 2894
e951f9a4 2895 op0 = expand_expr (arg, NULL, VOIDmode, EXPAND_NORMAL);
7d3afc77 2896
e951f9a4 2897 start_sequence ();
7d3afc77 2898
de2e453e 2899 if (expand_sfix_optab (result, op0, builtin_optab))
e951f9a4 2900 {
2901 /* Output the entire sequence. */
2902 insns = get_insns ();
2903 end_sequence ();
2904 emit_insn (insns);
de2e453e 2905 return result;
e951f9a4 2906 }
2907
2908 /* If we were unable to expand via the builtin, stop the sequence
2909 (without outputting the insns) and call to the library function
2910 with the stabilized argument list. */
7d3afc77 2911 end_sequence ();
2912 }
2913
e951f9a4 2914 if (fallback_fn != BUILT_IN_NONE)
2915 {
2916 /* Fall back to rounding to long int. Use implicit_p 0 - for non-C99
2917 targets, (int) round (x) should never be transformed into
2918 BUILT_IN_IROUND and if __builtin_iround is called directly, emit
2919 a call to lround in the hope that the target provides at least some
2920 C99 functions. This should result in the best user experience for
2921 not full C99 targets. */
2922 tree fallback_fndecl = mathfn_built_in_1 (TREE_TYPE (arg),
2923 fallback_fn, 0);
2924
2925 exp = build_call_nofold_loc (EXPR_LOCATION (exp),
2926 fallback_fndecl, 1, arg);
2927
2928 target = expand_call (exp, NULL_RTX, target == const0_rtx);
933eb13a 2929 target = maybe_emit_group_store (target, TREE_TYPE (exp));
e951f9a4 2930 return convert_to_mode (mode, target, 0);
2931 }
5f51ee59 2932
de2e453e 2933 return expand_call (exp, target, target == const0_rtx);
7d3afc77 2934}
2935
c2f47e15 2936/* Expand a call to the powi built-in mathematical function. Return NULL_RTX if
757c219d 2937 a normal call should be emitted rather than expanding the function
2938 in-line. EXP is the expression that is a call to the builtin
2939 function; if convenient, the result should be placed in TARGET. */
2940
2941static rtx
f97eea22 2942expand_builtin_powi (tree exp, rtx target)
757c219d 2943{
757c219d 2944 tree arg0, arg1;
2945 rtx op0, op1;
3754d046 2946 machine_mode mode;
2947 machine_mode mode2;
757c219d 2948
c2f47e15 2949 if (! validate_arglist (exp, REAL_TYPE, INTEGER_TYPE, VOID_TYPE))
2950 return NULL_RTX;
757c219d 2951
c2f47e15 2952 arg0 = CALL_EXPR_ARG (exp, 0);
2953 arg1 = CALL_EXPR_ARG (exp, 1);
757c219d 2954 mode = TYPE_MODE (TREE_TYPE (exp));
2955
757c219d 2956 /* Emit a libcall to libgcc. */
2957
c2f47e15 2958 /* Mode of the 2nd argument must match that of an int. */
d0405f40 2959 mode2 = mode_for_size (INT_TYPE_SIZE, MODE_INT, 0);
2960
757c219d 2961 if (target == NULL_RTX)
2962 target = gen_reg_rtx (mode);
2963
f97eea22 2964 op0 = expand_expr (arg0, NULL_RTX, mode, EXPAND_NORMAL);
757c219d 2965 if (GET_MODE (op0) != mode)
2966 op0 = convert_to_mode (mode, op0, 0);
1db6d067 2967 op1 = expand_expr (arg1, NULL_RTX, mode2, EXPAND_NORMAL);
d0405f40 2968 if (GET_MODE (op1) != mode2)
2969 op1 = convert_to_mode (mode2, op1, 0);
757c219d 2970
f36b9f69 2971 target = emit_library_call_value (optab_libfunc (powi_optab, mode),
2dd6f9ed 2972 target, LCT_CONST, mode, 2,
d0405f40 2973 op0, mode, op1, mode2);
757c219d 2974
2975 return target;
2976}
2977
48e1416a 2978/* Expand expression EXP which is a call to the strlen builtin. Return
c2f47e15 2979 NULL_RTX if we failed the caller should emit a normal call, otherwise
aed0bd19 2980 try to get the result in TARGET, if convenient. */
f7c44134 2981
53800dbe 2982static rtx
c2f47e15 2983expand_builtin_strlen (tree exp, rtx target,
3754d046 2984 machine_mode target_mode)
53800dbe 2985{
c2f47e15 2986 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
2987 return NULL_RTX;
53800dbe 2988 else
2989 {
8786db1e 2990 struct expand_operand ops[4];
911c0150 2991 rtx pat;
c2f47e15 2992 tree len;
2993 tree src = CALL_EXPR_ARG (exp, 0);
1e0c0b35 2994 rtx src_reg;
2995 rtx_insn *before_strlen;
3754d046 2996 machine_mode insn_mode = target_mode;
ef2c4a29 2997 enum insn_code icode = CODE_FOR_nothing;
153c3b50 2998 unsigned int align;
6248e345 2999
3000 /* If the length can be computed at compile-time, return it. */
681fab1e 3001 len = c_strlen (src, 0);
6248e345 3002 if (len)
80cd7a5e 3003 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
6248e345 3004
681fab1e 3005 /* If the length can be computed at compile-time and is constant
3006 integer, but there are side-effects in src, evaluate
3007 src for side-effects, then return len.
3008 E.g. x = strlen (i++ ? "xfoo" + 1 : "bar");
3009 can be optimized into: i++; x = 3; */
3010 len = c_strlen (src, 1);
3011 if (len && TREE_CODE (len) == INTEGER_CST)
3012 {
3013 expand_expr (src, const0_rtx, VOIDmode, EXPAND_NORMAL);
3014 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
3015 }
3016
957d0361 3017 align = get_pointer_alignment (src) / BITS_PER_UNIT;
53800dbe 3018
53800dbe 3019 /* If SRC is not a pointer type, don't do this operation inline. */
3020 if (align == 0)
c2f47e15 3021 return NULL_RTX;
53800dbe 3022
911c0150 3023 /* Bail out if we can't compute strlen in the right mode. */
53800dbe 3024 while (insn_mode != VOIDmode)
3025 {
d6bf3b14 3026 icode = optab_handler (strlen_optab, insn_mode);
53800dbe 3027 if (icode != CODE_FOR_nothing)
c28ae87f 3028 break;
53800dbe 3029
3030 insn_mode = GET_MODE_WIDER_MODE (insn_mode);
3031 }
3032 if (insn_mode == VOIDmode)
c2f47e15 3033 return NULL_RTX;
53800dbe 3034
911c0150 3035 /* Make a place to hold the source address. We will not expand
3036 the actual source until we are sure that the expansion will
3037 not fail -- there are trees that cannot be expanded twice. */
3038 src_reg = gen_reg_rtx (Pmode);
53800dbe 3039
911c0150 3040 /* Mark the beginning of the strlen sequence so we can emit the
3041 source operand later. */
f0ce3b1f 3042 before_strlen = get_last_insn ();
53800dbe 3043
8786db1e 3044 create_output_operand (&ops[0], target, insn_mode);
3045 create_fixed_operand (&ops[1], gen_rtx_MEM (BLKmode, src_reg));
3046 create_integer_operand (&ops[2], 0);
3047 create_integer_operand (&ops[3], align);
3048 if (!maybe_expand_insn (icode, 4, ops))
c2f47e15 3049 return NULL_RTX;
911c0150 3050
3051 /* Now that we are assured of success, expand the source. */
3052 start_sequence ();
499eee58 3053 pat = expand_expr (src, src_reg, Pmode, EXPAND_NORMAL);
911c0150 3054 if (pat != src_reg)
499eee58 3055 {
3056#ifdef POINTERS_EXTEND_UNSIGNED
3057 if (GET_MODE (pat) != Pmode)
3058 pat = convert_to_mode (Pmode, pat,
3059 POINTERS_EXTEND_UNSIGNED);
3060#endif
3061 emit_move_insn (src_reg, pat);
3062 }
31d3e01c 3063 pat = get_insns ();
911c0150 3064 end_sequence ();
bceb0d1f 3065
3066 if (before_strlen)
3067 emit_insn_after (pat, before_strlen);
3068 else
3069 emit_insn_before (pat, get_insns ());
53800dbe 3070
3071 /* Return the value in the proper mode for this function. */
8786db1e 3072 if (GET_MODE (ops[0].value) == target_mode)
3073 target = ops[0].value;
53800dbe 3074 else if (target != 0)
8786db1e 3075 convert_move (target, ops[0].value, 0);
53800dbe 3076 else
8786db1e 3077 target = convert_to_mode (target_mode, ops[0].value, 0);
911c0150 3078
3079 return target;
53800dbe 3080 }
3081}
3082
6840589f 3083/* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3084 bytes from constant string DATA + OFFSET and return it as target
3085 constant. */
3086
3087static rtx
aecda0d6 3088builtin_memcpy_read_str (void *data, HOST_WIDE_INT offset,
3754d046 3089 machine_mode mode)
6840589f 3090{
3091 const char *str = (const char *) data;
3092
64db345d 3093 gcc_assert (offset >= 0
3094 && ((unsigned HOST_WIDE_INT) offset + GET_MODE_SIZE (mode)
3095 <= strlen (str) + 1));
6840589f 3096
3097 return c_readstr (str + offset, mode);
3098}
3099
36d63243 3100/* LEN specify length of the block of memcpy/memset operation.
9db0f34d 3101 Figure out its range and put it into MIN_SIZE/MAX_SIZE.
3102 In some cases we can make very likely guess on max size, then we
3103 set it into PROBABLE_MAX_SIZE. */
36d63243 3104
3105static void
3106determine_block_size (tree len, rtx len_rtx,
3107 unsigned HOST_WIDE_INT *min_size,
9db0f34d 3108 unsigned HOST_WIDE_INT *max_size,
3109 unsigned HOST_WIDE_INT *probable_max_size)
36d63243 3110{
3111 if (CONST_INT_P (len_rtx))
3112 {
4e140a5c 3113 *min_size = *max_size = *probable_max_size = UINTVAL (len_rtx);
36d63243 3114 return;
3115 }
3116 else
3117 {
9c1be15e 3118 wide_int min, max;
9db0f34d 3119 enum value_range_type range_type = VR_UNDEFINED;
3120
3121 /* Determine bounds from the type. */
3122 if (tree_fits_uhwi_p (TYPE_MIN_VALUE (TREE_TYPE (len))))
3123 *min_size = tree_to_uhwi (TYPE_MIN_VALUE (TREE_TYPE (len)));
3124 else
3125 *min_size = 0;
3126 if (tree_fits_uhwi_p (TYPE_MAX_VALUE (TREE_TYPE (len))))
4e140a5c 3127 *probable_max_size = *max_size
3128 = tree_to_uhwi (TYPE_MAX_VALUE (TREE_TYPE (len)));
9db0f34d 3129 else
3130 *probable_max_size = *max_size = GET_MODE_MASK (GET_MODE (len_rtx));
3131
3132 if (TREE_CODE (len) == SSA_NAME)
3133 range_type = get_range_info (len, &min, &max);
3134 if (range_type == VR_RANGE)
36d63243 3135 {
fe5ad926 3136 if (wi::fits_uhwi_p (min) && *min_size < min.to_uhwi ())
36d63243 3137 *min_size = min.to_uhwi ();
fe5ad926 3138 if (wi::fits_uhwi_p (max) && *max_size > max.to_uhwi ())
9db0f34d 3139 *probable_max_size = *max_size = max.to_uhwi ();
36d63243 3140 }
9db0f34d 3141 else if (range_type == VR_ANTI_RANGE)
36d63243 3142 {
4a474a5a 3143 /* Anti range 0...N lets us to determine minimal size to N+1. */
fe5ad926 3144 if (min == 0)
9db0f34d 3145 {
9c1be15e 3146 if (wi::fits_uhwi_p (max) && max.to_uhwi () + 1 != 0)
3147 *min_size = max.to_uhwi () + 1;
9db0f34d 3148 }
3149 /* Code like
3150
3151 int n;
3152 if (n < 100)
4a474a5a 3153 memcpy (a, b, n)
9db0f34d 3154
3155 Produce anti range allowing negative values of N. We still
3156 can use the information and make a guess that N is not negative.
3157 */
fe5ad926 3158 else if (!wi::leu_p (max, 1 << 30) && wi::fits_uhwi_p (min))
3159 *probable_max_size = min.to_uhwi () - 1;
36d63243 3160 }
3161 }
3162 gcc_checking_assert (*max_size <=
3163 (unsigned HOST_WIDE_INT)
3164 GET_MODE_MASK (GET_MODE (len_rtx)));
3165}
3166
f21337ef 3167/* Helper function to do the actual work for expand_builtin_memcpy. */
3168
3169static rtx
3170expand_builtin_memcpy_args (tree dest, tree src, tree len, rtx target, tree exp)
3171{
3172 const char *src_str;
3173 unsigned int src_align = get_pointer_alignment (src);
3174 unsigned int dest_align = get_pointer_alignment (dest);
3175 rtx dest_mem, src_mem, dest_addr, len_rtx;
3176 HOST_WIDE_INT expected_size = -1;
3177 unsigned int expected_align = 0;
3178 unsigned HOST_WIDE_INT min_size;
3179 unsigned HOST_WIDE_INT max_size;
3180 unsigned HOST_WIDE_INT probable_max_size;
3181
3182 /* If DEST is not a pointer type, call the normal function. */
3183 if (dest_align == 0)
3184 return NULL_RTX;
3185
3186 /* If either SRC is not a pointer type, don't do this
3187 operation in-line. */
3188 if (src_align == 0)
3189 return NULL_RTX;
3190
3191 if (currently_expanding_gimple_stmt)
3192 stringop_block_profile (currently_expanding_gimple_stmt,
3193 &expected_align, &expected_size);
3194
3195 if (expected_align < dest_align)
3196 expected_align = dest_align;
3197 dest_mem = get_memory_rtx (dest, len);
3198 set_mem_align (dest_mem, dest_align);
3199 len_rtx = expand_normal (len);
3200 determine_block_size (len, len_rtx, &min_size, &max_size,
3201 &probable_max_size);
3202 src_str = c_getstr (src);
3203
3204 /* If SRC is a string constant and block move would be done
3205 by pieces, we can avoid loading the string from memory
3206 and only stored the computed constants. */
3207 if (src_str
3208 && CONST_INT_P (len_rtx)
3209 && (unsigned HOST_WIDE_INT) INTVAL (len_rtx) <= strlen (src_str) + 1
3210 && can_store_by_pieces (INTVAL (len_rtx), builtin_memcpy_read_str,
3211 CONST_CAST (char *, src_str),
3212 dest_align, false))
3213 {
3214 dest_mem = store_by_pieces (dest_mem, INTVAL (len_rtx),
3215 builtin_memcpy_read_str,
3216 CONST_CAST (char *, src_str),
3217 dest_align, false, 0);
3218 dest_mem = force_operand (XEXP (dest_mem, 0), target);
3219 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3220 return dest_mem;
3221 }
3222
3223 src_mem = get_memory_rtx (src, len);
3224 set_mem_align (src_mem, src_align);
3225
3226 /* Copy word part most expediently. */
3227 dest_addr = emit_block_move_hints (dest_mem, src_mem, len_rtx,
3228 CALL_EXPR_TAILCALL (exp)
3229 ? BLOCK_OP_TAILCALL : BLOCK_OP_NORMAL,
3230 expected_align, expected_size,
3231 min_size, max_size, probable_max_size);
3232
3233 if (dest_addr == 0)
3234 {
3235 dest_addr = force_operand (XEXP (dest_mem, 0), target);
3236 dest_addr = convert_memory_address (ptr_mode, dest_addr);
3237 }
3238
3239 return dest_addr;
3240}
3241
c2f47e15 3242/* Expand a call EXP to the memcpy builtin.
3243 Return NULL_RTX if we failed, the caller should emit a normal call,
3b824fa6 3244 otherwise try to get the result in TARGET, if convenient (and in
9fe0e1b8 3245 mode MODE if that's convenient). */
c2f47e15 3246
53800dbe 3247static rtx
a65c4d64 3248expand_builtin_memcpy (tree exp, rtx target)
53800dbe 3249{
c2f47e15 3250 if (!validate_arglist (exp,
3251 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3252 return NULL_RTX;
53800dbe 3253 else
3254 {
c2f47e15 3255 tree dest = CALL_EXPR_ARG (exp, 0);
3256 tree src = CALL_EXPR_ARG (exp, 1);
3257 tree len = CALL_EXPR_ARG (exp, 2);
f21337ef 3258 return expand_builtin_memcpy_args (dest, src, len, target, exp);
3259 }
3260}
6840589f 3261
f21337ef 3262/* Expand an instrumented call EXP to the memcpy builtin.
3263 Return NULL_RTX if we failed, the caller should emit a normal call,
3264 otherwise try to get the result in TARGET, if convenient (and in
3265 mode MODE if that's convenient). */
53800dbe 3266
f21337ef 3267static rtx
3268expand_builtin_memcpy_with_bounds (tree exp, rtx target)
3269{
3270 if (!validate_arglist (exp,
3271 POINTER_TYPE, POINTER_BOUNDS_TYPE,
3272 POINTER_TYPE, POINTER_BOUNDS_TYPE,
3273 INTEGER_TYPE, VOID_TYPE))
3274 return NULL_RTX;
3275 else
3276 {
3277 tree dest = CALL_EXPR_ARG (exp, 0);
3278 tree src = CALL_EXPR_ARG (exp, 2);
3279 tree len = CALL_EXPR_ARG (exp, 4);
3280 rtx res = expand_builtin_memcpy_args (dest, src, len, target, exp);
53800dbe 3281
f21337ef 3282 /* Return src bounds with the result. */
3283 if (res)
e5716f7e 3284 {
17d388d8 3285 rtx bnd = force_reg (targetm.chkp_bound_mode (),
f21337ef 3286 expand_normal (CALL_EXPR_ARG (exp, 1)));
3287 res = chkp_join_splitted_slot (res, bnd);
e5716f7e 3288 }
f21337ef 3289 return res;
53800dbe 3290 }
3291}
3292
c2f47e15 3293/* Expand a call EXP to the mempcpy builtin.
3294 Return NULL_RTX if we failed; the caller should emit a normal call,
647661c6 3295 otherwise try to get the result in TARGET, if convenient (and in
9fe0e1b8 3296 mode MODE if that's convenient). If ENDP is 0 return the
3297 destination pointer, if ENDP is 1 return the end pointer ala
3298 mempcpy, and if ENDP is 2 return the end pointer minus one ala
3299 stpcpy. */
647661c6 3300
3301static rtx
3754d046 3302expand_builtin_mempcpy (tree exp, rtx target, machine_mode mode)
647661c6 3303{
c2f47e15 3304 if (!validate_arglist (exp,
3305 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3306 return NULL_RTX;
3307 else
3308 {
3309 tree dest = CALL_EXPR_ARG (exp, 0);
3310 tree src = CALL_EXPR_ARG (exp, 1);
3311 tree len = CALL_EXPR_ARG (exp, 2);
3312 return expand_builtin_mempcpy_args (dest, src, len,
f21337ef 3313 target, mode, /*endp=*/ 1,
3314 exp);
3315 }
3316}
3317
3318/* Expand an instrumented call EXP to the mempcpy builtin.
3319 Return NULL_RTX if we failed, the caller should emit a normal call,
3320 otherwise try to get the result in TARGET, if convenient (and in
3321 mode MODE if that's convenient). */
3322
3323static rtx
3324expand_builtin_mempcpy_with_bounds (tree exp, rtx target, machine_mode mode)
3325{
3326 if (!validate_arglist (exp,
3327 POINTER_TYPE, POINTER_BOUNDS_TYPE,
3328 POINTER_TYPE, POINTER_BOUNDS_TYPE,
3329 INTEGER_TYPE, VOID_TYPE))
3330 return NULL_RTX;
3331 else
3332 {
3333 tree dest = CALL_EXPR_ARG (exp, 0);
3334 tree src = CALL_EXPR_ARG (exp, 2);
3335 tree len = CALL_EXPR_ARG (exp, 4);
3336 rtx res = expand_builtin_mempcpy_args (dest, src, len, target,
3337 mode, 1, exp);
3338
3339 /* Return src bounds with the result. */
3340 if (res)
3341 {
17d388d8 3342 rtx bnd = force_reg (targetm.chkp_bound_mode (),
f21337ef 3343 expand_normal (CALL_EXPR_ARG (exp, 1)));
3344 res = chkp_join_splitted_slot (res, bnd);
3345 }
3346 return res;
c2f47e15 3347 }
3348}
3349
3350/* Helper function to do the actual work for expand_builtin_mempcpy. The
3351 arguments to the builtin_mempcpy call DEST, SRC, and LEN are broken out
3352 so that this can also be called without constructing an actual CALL_EXPR.
a65c4d64 3353 The other arguments and return value are the same as for
3354 expand_builtin_mempcpy. */
c2f47e15 3355
3356static rtx
a65c4d64 3357expand_builtin_mempcpy_args (tree dest, tree src, tree len,
f21337ef 3358 rtx target, machine_mode mode, int endp,
3359 tree orig_exp)
c2f47e15 3360{
f21337ef 3361 tree fndecl = get_callee_fndecl (orig_exp);
3362
c2f47e15 3363 /* If return value is ignored, transform mempcpy into memcpy. */
f21337ef 3364 if (target == const0_rtx
3365 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_MEMPCPY_NOBND_NOCHK_CHKP
3366 && builtin_decl_implicit_p (BUILT_IN_CHKP_MEMCPY_NOBND_NOCHK_CHKP))
3367 {
3368 tree fn = builtin_decl_implicit (BUILT_IN_CHKP_MEMCPY_NOBND_NOCHK_CHKP);
3369 tree result = build_call_nofold_loc (UNKNOWN_LOCATION, fn, 3,
3370 dest, src, len);
3371 return expand_expr (result, target, mode, EXPAND_NORMAL);
3372 }
3373 else if (target == const0_rtx
3374 && builtin_decl_implicit_p (BUILT_IN_MEMCPY))
9fe0e1b8 3375 {
b9a16870 3376 tree fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
0568e9c1 3377 tree result = build_call_nofold_loc (UNKNOWN_LOCATION, fn, 3,
3378 dest, src, len);
c8b17b2e 3379 return expand_expr (result, target, mode, EXPAND_NORMAL);
9fe0e1b8 3380 }
647661c6 3381 else
3382 {
9fe0e1b8 3383 const char *src_str;
957d0361 3384 unsigned int src_align = get_pointer_alignment (src);
3385 unsigned int dest_align = get_pointer_alignment (dest);
9fe0e1b8 3386 rtx dest_mem, src_mem, len_rtx;
a0c938f0 3387
7da1412b 3388 /* If either SRC or DEST is not a pointer type, don't do this
a0c938f0 3389 operation in-line. */
7da1412b 3390 if (dest_align == 0 || src_align == 0)
c2f47e15 3391 return NULL_RTX;
9fe0e1b8 3392
6217c238 3393 /* If LEN is not constant, call the normal function. */
e913b5cd 3394 if (! tree_fits_uhwi_p (len))
c2f47e15 3395 return NULL_RTX;
0862b7e9 3396
8ec3c5c2 3397 len_rtx = expand_normal (len);
9fe0e1b8 3398 src_str = c_getstr (src);
647661c6 3399
9fe0e1b8 3400 /* If SRC is a string constant and block move would be done
3401 by pieces, we can avoid loading the string from memory
3402 and only stored the computed constants. */
3403 if (src_str
971ba038 3404 && CONST_INT_P (len_rtx)
9fe0e1b8 3405 && (unsigned HOST_WIDE_INT) INTVAL (len_rtx) <= strlen (src_str) + 1
3406 && can_store_by_pieces (INTVAL (len_rtx), builtin_memcpy_read_str,
364c0c59 3407 CONST_CAST (char *, src_str),
3408 dest_align, false))
9fe0e1b8 3409 {
d8ae1baa 3410 dest_mem = get_memory_rtx (dest, len);
9fe0e1b8 3411 set_mem_align (dest_mem, dest_align);
3412 dest_mem = store_by_pieces (dest_mem, INTVAL (len_rtx),
3413 builtin_memcpy_read_str,
364c0c59 3414 CONST_CAST (char *, src_str),
3415 dest_align, false, endp);
9fe0e1b8 3416 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
85d654dd 3417 dest_mem = convert_memory_address (ptr_mode, dest_mem);
9fe0e1b8 3418 return dest_mem;
647661c6 3419 }
3420
971ba038 3421 if (CONST_INT_P (len_rtx)
9fe0e1b8 3422 && can_move_by_pieces (INTVAL (len_rtx),
3423 MIN (dest_align, src_align)))
3424 {
d8ae1baa 3425 dest_mem = get_memory_rtx (dest, len);
9fe0e1b8 3426 set_mem_align (dest_mem, dest_align);
d8ae1baa 3427 src_mem = get_memory_rtx (src, len);
9fe0e1b8 3428 set_mem_align (src_mem, src_align);
3429 dest_mem = move_by_pieces (dest_mem, src_mem, INTVAL (len_rtx),
3430 MIN (dest_align, src_align), endp);
3431 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
85d654dd 3432 dest_mem = convert_memory_address (ptr_mode, dest_mem);
9fe0e1b8 3433 return dest_mem;
3434 }
3435
c2f47e15 3436 return NULL_RTX;
647661c6 3437 }
3438}
3439
c2f47e15 3440/* Expand into a movstr instruction, if one is available. Return NULL_RTX if
727c62dd 3441 we failed, the caller should emit a normal call, otherwise try to
3442 get the result in TARGET, if convenient. If ENDP is 0 return the
3443 destination pointer, if ENDP is 1 return the end pointer ala
3444 mempcpy, and if ENDP is 2 return the end pointer minus one ala
3445 stpcpy. */
3446
3447static rtx
3448expand_movstr (tree dest, tree src, rtx target, int endp)
3449{
8786db1e 3450 struct expand_operand ops[3];
727c62dd 3451 rtx dest_mem;
3452 rtx src_mem;
727c62dd 3453
8d74dc42 3454 if (!targetm.have_movstr ())
c2f47e15 3455 return NULL_RTX;
727c62dd 3456
d8ae1baa 3457 dest_mem = get_memory_rtx (dest, NULL);
3458 src_mem = get_memory_rtx (src, NULL);
727c62dd 3459 if (!endp)
3460 {
3461 target = force_reg (Pmode, XEXP (dest_mem, 0));
3462 dest_mem = replace_equiv_address (dest_mem, target);
727c62dd 3463 }
3464
8786db1e 3465 create_output_operand (&ops[0], endp ? target : NULL_RTX, Pmode);
3466 create_fixed_operand (&ops[1], dest_mem);
3467 create_fixed_operand (&ops[2], src_mem);
8d74dc42 3468 if (!maybe_expand_insn (targetm.code_for_movstr, 3, ops))
1e1d5623 3469 return NULL_RTX;
727c62dd 3470
8786db1e 3471 if (endp && target != const0_rtx)
c5aba89c 3472 {
8786db1e 3473 target = ops[0].value;
3474 /* movstr is supposed to set end to the address of the NUL
3475 terminator. If the caller requested a mempcpy-like return value,
3476 adjust it. */
3477 if (endp == 1)
3478 {
29c05e22 3479 rtx tem = plus_constant (GET_MODE (target),
3480 gen_lowpart (GET_MODE (target), target), 1);
8786db1e 3481 emit_move_insn (target, force_operand (tem, NULL_RTX));
3482 }
c5aba89c 3483 }
727c62dd 3484 return target;
3485}
3486
48e1416a 3487/* Expand expression EXP, which is a call to the strcpy builtin. Return
3488 NULL_RTX if we failed the caller should emit a normal call, otherwise
c2f47e15 3489 try to get the result in TARGET, if convenient (and in mode MODE if that's
6f428e8b 3490 convenient). */
902de8ed 3491
53800dbe 3492static rtx
a65c4d64 3493expand_builtin_strcpy (tree exp, rtx target)
53800dbe 3494{
c2f47e15 3495 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3496 {
3497 tree dest = CALL_EXPR_ARG (exp, 0);
3498 tree src = CALL_EXPR_ARG (exp, 1);
a65c4d64 3499 return expand_builtin_strcpy_args (dest, src, target);
c2f47e15 3500 }
3501 return NULL_RTX;
3502}
3503
3504/* Helper function to do the actual work for expand_builtin_strcpy. The
3505 arguments to the builtin_strcpy call DEST and SRC are broken out
3506 so that this can also be called without constructing an actual CALL_EXPR.
3507 The other arguments and return value are the same as for
3508 expand_builtin_strcpy. */
3509
3510static rtx
a65c4d64 3511expand_builtin_strcpy_args (tree dest, tree src, rtx target)
c2f47e15 3512{
c2f47e15 3513 return expand_movstr (dest, src, target, /*endp=*/0);
53800dbe 3514}
3515
c2f47e15 3516/* Expand a call EXP to the stpcpy builtin.
3517 Return NULL_RTX if we failed the caller should emit a normal call,
3b824fa6 3518 otherwise try to get the result in TARGET, if convenient (and in
3519 mode MODE if that's convenient). */
3520
3521static rtx
3754d046 3522expand_builtin_stpcpy (tree exp, rtx target, machine_mode mode)
3b824fa6 3523{
c2f47e15 3524 tree dst, src;
389dd41b 3525 location_t loc = EXPR_LOCATION (exp);
c2f47e15 3526
3527 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3528 return NULL_RTX;
3529
3530 dst = CALL_EXPR_ARG (exp, 0);
3531 src = CALL_EXPR_ARG (exp, 1);
3532
727c62dd 3533 /* If return value is ignored, transform stpcpy into strcpy. */
b9a16870 3534 if (target == const0_rtx && builtin_decl_implicit (BUILT_IN_STRCPY))
978836e5 3535 {
b9a16870 3536 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
0568e9c1 3537 tree result = build_call_nofold_loc (loc, fn, 2, dst, src);
c8b17b2e 3538 return expand_expr (result, target, mode, EXPAND_NORMAL);
978836e5 3539 }
3b824fa6 3540 else
3541 {
c2f47e15 3542 tree len, lenp1;
727c62dd 3543 rtx ret;
647661c6 3544
9fe0e1b8 3545 /* Ensure we get an actual string whose length can be evaluated at
a0c938f0 3546 compile-time, not an expression containing a string. This is
3547 because the latter will potentially produce pessimized code
3548 when used to produce the return value. */
681fab1e 3549 if (! c_getstr (src) || ! (len = c_strlen (src, 0)))
c2f47e15 3550 return expand_movstr (dst, src, target, /*endp=*/2);
3b824fa6 3551
389dd41b 3552 lenp1 = size_binop_loc (loc, PLUS_EXPR, len, ssize_int (1));
a65c4d64 3553 ret = expand_builtin_mempcpy_args (dst, src, lenp1,
f21337ef 3554 target, mode, /*endp=*/2,
3555 exp);
727c62dd 3556
3557 if (ret)
3558 return ret;
3559
3560 if (TREE_CODE (len) == INTEGER_CST)
3561 {
8ec3c5c2 3562 rtx len_rtx = expand_normal (len);
727c62dd 3563
971ba038 3564 if (CONST_INT_P (len_rtx))
727c62dd 3565 {
a65c4d64 3566 ret = expand_builtin_strcpy_args (dst, src, target);
727c62dd 3567
3568 if (ret)
3569 {
3570 if (! target)
7ac87324 3571 {
3572 if (mode != VOIDmode)
3573 target = gen_reg_rtx (mode);
3574 else
3575 target = gen_reg_rtx (GET_MODE (ret));
3576 }
727c62dd 3577 if (GET_MODE (target) != GET_MODE (ret))
3578 ret = gen_lowpart (GET_MODE (target), ret);
3579
29c05e22 3580 ret = plus_constant (GET_MODE (ret), ret, INTVAL (len_rtx));
c5aba89c 3581 ret = emit_move_insn (target, force_operand (ret, NULL_RTX));
64db345d 3582 gcc_assert (ret);
727c62dd 3583
3584 return target;
3585 }
3586 }
3587 }
3588
c2f47e15 3589 return expand_movstr (dst, src, target, /*endp=*/2);
3b824fa6 3590 }
3591}
3592
6840589f 3593/* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3594 bytes from constant string DATA + OFFSET and return it as target
3595 constant. */
3596
09879952 3597rtx
aecda0d6 3598builtin_strncpy_read_str (void *data, HOST_WIDE_INT offset,
3754d046 3599 machine_mode mode)
6840589f 3600{
3601 const char *str = (const char *) data;
3602
3603 if ((unsigned HOST_WIDE_INT) offset > strlen (str))
3604 return const0_rtx;
3605
3606 return c_readstr (str + offset, mode);
3607}
3608
48e1416a 3609/* Expand expression EXP, which is a call to the strncpy builtin. Return
c2f47e15 3610 NULL_RTX if we failed the caller should emit a normal call. */
ed09096d 3611
3612static rtx
a65c4d64 3613expand_builtin_strncpy (tree exp, rtx target)
ed09096d 3614{
389dd41b 3615 location_t loc = EXPR_LOCATION (exp);
c2f47e15 3616
3617 if (validate_arglist (exp,
3618 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
ed09096d 3619 {
c2f47e15 3620 tree dest = CALL_EXPR_ARG (exp, 0);
3621 tree src = CALL_EXPR_ARG (exp, 1);
3622 tree len = CALL_EXPR_ARG (exp, 2);
3623 tree slen = c_strlen (src, 1);
6840589f 3624
8ff6a5cd 3625 /* We must be passed a constant len and src parameter. */
e913b5cd 3626 if (!tree_fits_uhwi_p (len) || !slen || !tree_fits_uhwi_p (slen))
c2f47e15 3627 return NULL_RTX;
ed09096d 3628
389dd41b 3629 slen = size_binop_loc (loc, PLUS_EXPR, slen, ssize_int (1));
ed09096d 3630
3631 /* We're required to pad with trailing zeros if the requested
a0c938f0 3632 len is greater than strlen(s2)+1. In that case try to
6840589f 3633 use store_by_pieces, if it fails, punt. */
ed09096d 3634 if (tree_int_cst_lt (slen, len))
6840589f 3635 {
957d0361 3636 unsigned int dest_align = get_pointer_alignment (dest);
c2f47e15 3637 const char *p = c_getstr (src);
6840589f 3638 rtx dest_mem;
3639
e913b5cd 3640 if (!p || dest_align == 0 || !tree_fits_uhwi_p (len)
3641 || !can_store_by_pieces (tree_to_uhwi (len),
6840589f 3642 builtin_strncpy_read_str,
364c0c59 3643 CONST_CAST (char *, p),
3644 dest_align, false))
c2f47e15 3645 return NULL_RTX;
6840589f 3646
d8ae1baa 3647 dest_mem = get_memory_rtx (dest, len);
e913b5cd 3648 store_by_pieces (dest_mem, tree_to_uhwi (len),
6840589f 3649 builtin_strncpy_read_str,
364c0c59 3650 CONST_CAST (char *, p), dest_align, false, 0);
a65c4d64 3651 dest_mem = force_operand (XEXP (dest_mem, 0), target);
85d654dd 3652 dest_mem = convert_memory_address (ptr_mode, dest_mem);
e5716f7e 3653 return dest_mem;
6840589f 3654 }
ed09096d 3655 }
c2f47e15 3656 return NULL_RTX;
ed09096d 3657}
3658
ecc318ff 3659/* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3660 bytes from constant string DATA + OFFSET and return it as target
3661 constant. */
3662
f656b751 3663rtx
aecda0d6 3664builtin_memset_read_str (void *data, HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
3754d046 3665 machine_mode mode)
ecc318ff 3666{
3667 const char *c = (const char *) data;
364c0c59 3668 char *p = XALLOCAVEC (char, GET_MODE_SIZE (mode));
ecc318ff 3669
3670 memset (p, *c, GET_MODE_SIZE (mode));
3671
3672 return c_readstr (p, mode);
3673}
3674
a7ec6974 3675/* Callback routine for store_by_pieces. Return the RTL of a register
3676 containing GET_MODE_SIZE (MODE) consecutive copies of the unsigned
3677 char value given in the RTL register data. For example, if mode is
3678 4 bytes wide, return the RTL for 0x01010101*data. */
3679
3680static rtx
aecda0d6 3681builtin_memset_gen_str (void *data, HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
3754d046 3682 machine_mode mode)
a7ec6974 3683{
3684 rtx target, coeff;
3685 size_t size;
3686 char *p;
3687
3688 size = GET_MODE_SIZE (mode);
f0ce3b1f 3689 if (size == 1)
3690 return (rtx) data;
a7ec6974 3691
364c0c59 3692 p = XALLOCAVEC (char, size);
a7ec6974 3693 memset (p, 1, size);
3694 coeff = c_readstr (p, mode);
3695
f0ce3b1f 3696 target = convert_to_mode (mode, (rtx) data, 1);
a7ec6974 3697 target = expand_mult (mode, target, coeff, NULL_RTX, 1);
3698 return force_reg (mode, target);
3699}
3700
48e1416a 3701/* Expand expression EXP, which is a call to the memset builtin. Return
3702 NULL_RTX if we failed the caller should emit a normal call, otherwise
c2f47e15 3703 try to get the result in TARGET, if convenient (and in mode MODE if that's
6f428e8b 3704 convenient). */
902de8ed 3705
53800dbe 3706static rtx
3754d046 3707expand_builtin_memset (tree exp, rtx target, machine_mode mode)
53800dbe 3708{
c2f47e15 3709 if (!validate_arglist (exp,
3710 POINTER_TYPE, INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
3711 return NULL_RTX;
53800dbe 3712 else
3713 {
c2f47e15 3714 tree dest = CALL_EXPR_ARG (exp, 0);
3715 tree val = CALL_EXPR_ARG (exp, 1);
3716 tree len = CALL_EXPR_ARG (exp, 2);
3717 return expand_builtin_memset_args (dest, val, len, target, mode, exp);
3718 }
3719}
53800dbe 3720
f21337ef 3721/* Expand expression EXP, which is an instrumented call to the memset builtin.
3722 Return NULL_RTX if we failed the caller should emit a normal call, otherwise
3723 try to get the result in TARGET, if convenient (and in mode MODE if that's
3724 convenient). */
3725
3726static rtx
3727expand_builtin_memset_with_bounds (tree exp, rtx target, machine_mode mode)
3728{
3729 if (!validate_arglist (exp,
3730 POINTER_TYPE, POINTER_BOUNDS_TYPE,
3731 INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
3732 return NULL_RTX;
3733 else
3734 {
3735 tree dest = CALL_EXPR_ARG (exp, 0);
3736 tree val = CALL_EXPR_ARG (exp, 2);
3737 tree len = CALL_EXPR_ARG (exp, 3);
3738 rtx res = expand_builtin_memset_args (dest, val, len, target, mode, exp);
3739
3740 /* Return src bounds with the result. */
3741 if (res)
3742 {
17d388d8 3743 rtx bnd = force_reg (targetm.chkp_bound_mode (),
f21337ef 3744 expand_normal (CALL_EXPR_ARG (exp, 1)));
3745 res = chkp_join_splitted_slot (res, bnd);
3746 }
3747 return res;
3748 }
3749}
3750
c2f47e15 3751/* Helper function to do the actual work for expand_builtin_memset. The
3752 arguments to the builtin_memset call DEST, VAL, and LEN are broken out
3753 so that this can also be called without constructing an actual CALL_EXPR.
3754 The other arguments and return value are the same as for
3755 expand_builtin_memset. */
6b961939 3756
c2f47e15 3757static rtx
3758expand_builtin_memset_args (tree dest, tree val, tree len,
3754d046 3759 rtx target, machine_mode mode, tree orig_exp)
c2f47e15 3760{
3761 tree fndecl, fn;
3762 enum built_in_function fcode;
3754d046 3763 machine_mode val_mode;
c2f47e15 3764 char c;
3765 unsigned int dest_align;
3766 rtx dest_mem, dest_addr, len_rtx;
3767 HOST_WIDE_INT expected_size = -1;
3768 unsigned int expected_align = 0;
36d63243 3769 unsigned HOST_WIDE_INT min_size;
3770 unsigned HOST_WIDE_INT max_size;
9db0f34d 3771 unsigned HOST_WIDE_INT probable_max_size;
53800dbe 3772
957d0361 3773 dest_align = get_pointer_alignment (dest);
162719b3 3774
c2f47e15 3775 /* If DEST is not a pointer type, don't do this operation in-line. */
3776 if (dest_align == 0)
3777 return NULL_RTX;
6f428e8b 3778
8cee8dc0 3779 if (currently_expanding_gimple_stmt)
3780 stringop_block_profile (currently_expanding_gimple_stmt,
3781 &expected_align, &expected_size);
75a70cf9 3782
c2f47e15 3783 if (expected_align < dest_align)
3784 expected_align = dest_align;
6b961939 3785
c2f47e15 3786 /* If the LEN parameter is zero, return DEST. */
3787 if (integer_zerop (len))
3788 {
3789 /* Evaluate and ignore VAL in case it has side-effects. */
3790 expand_expr (val, const0_rtx, VOIDmode, EXPAND_NORMAL);
3791 return expand_expr (dest, target, mode, EXPAND_NORMAL);
3792 }
7a3e5564 3793
c2f47e15 3794 /* Stabilize the arguments in case we fail. */
3795 dest = builtin_save_expr (dest);
3796 val = builtin_save_expr (val);
3797 len = builtin_save_expr (len);
a7ec6974 3798
c2f47e15 3799 len_rtx = expand_normal (len);
9db0f34d 3800 determine_block_size (len, len_rtx, &min_size, &max_size,
3801 &probable_max_size);
c2f47e15 3802 dest_mem = get_memory_rtx (dest, len);
03a5dda9 3803 val_mode = TYPE_MODE (unsigned_char_type_node);
a7ec6974 3804
c2f47e15 3805 if (TREE_CODE (val) != INTEGER_CST)
3806 {
3807 rtx val_rtx;
a7ec6974 3808
c2f47e15 3809 val_rtx = expand_normal (val);
03a5dda9 3810 val_rtx = convert_to_mode (val_mode, val_rtx, 0);
53800dbe 3811
c2f47e15 3812 /* Assume that we can memset by pieces if we can store
3813 * the coefficients by pieces (in the required modes).
3814 * We can't pass builtin_memset_gen_str as that emits RTL. */
3815 c = 1;
e913b5cd 3816 if (tree_fits_uhwi_p (len)
3817 && can_store_by_pieces (tree_to_uhwi (len),
4b297e2e 3818 builtin_memset_read_str, &c, dest_align,
3819 true))
c2f47e15 3820 {
03a5dda9 3821 val_rtx = force_reg (val_mode, val_rtx);
e913b5cd 3822 store_by_pieces (dest_mem, tree_to_uhwi (len),
4b297e2e 3823 builtin_memset_gen_str, val_rtx, dest_align,
3824 true, 0);
c2f47e15 3825 }
3826 else if (!set_storage_via_setmem (dest_mem, len_rtx, val_rtx,
3827 dest_align, expected_align,
9db0f34d 3828 expected_size, min_size, max_size,
3829 probable_max_size))
6b961939 3830 goto do_libcall;
48e1416a 3831
c2f47e15 3832 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3833 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3834 return dest_mem;
3835 }
53800dbe 3836
c2f47e15 3837 if (target_char_cast (val, &c))
3838 goto do_libcall;
ecc318ff 3839
c2f47e15 3840 if (c)
3841 {
e913b5cd 3842 if (tree_fits_uhwi_p (len)
3843 && can_store_by_pieces (tree_to_uhwi (len),
4b297e2e 3844 builtin_memset_read_str, &c, dest_align,
3845 true))
e913b5cd 3846 store_by_pieces (dest_mem, tree_to_uhwi (len),
4b297e2e 3847 builtin_memset_read_str, &c, dest_align, true, 0);
03a5dda9 3848 else if (!set_storage_via_setmem (dest_mem, len_rtx,
3849 gen_int_mode (c, val_mode),
c2f47e15 3850 dest_align, expected_align,
9db0f34d 3851 expected_size, min_size, max_size,
3852 probable_max_size))
c2f47e15 3853 goto do_libcall;
48e1416a 3854
c2f47e15 3855 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3856 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3857 return dest_mem;
3858 }
ecc318ff 3859
c2f47e15 3860 set_mem_align (dest_mem, dest_align);
3861 dest_addr = clear_storage_hints (dest_mem, len_rtx,
3862 CALL_EXPR_TAILCALL (orig_exp)
3863 ? BLOCK_OP_TAILCALL : BLOCK_OP_NORMAL,
36d63243 3864 expected_align, expected_size,
9db0f34d 3865 min_size, max_size,
3866 probable_max_size);
53800dbe 3867
c2f47e15 3868 if (dest_addr == 0)
3869 {
3870 dest_addr = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3871 dest_addr = convert_memory_address (ptr_mode, dest_addr);
3872 }
53800dbe 3873
c2f47e15 3874 return dest_addr;
6b961939 3875
c2f47e15 3876 do_libcall:
3877 fndecl = get_callee_fndecl (orig_exp);
3878 fcode = DECL_FUNCTION_CODE (fndecl);
f21337ef 3879 if (fcode == BUILT_IN_MEMSET
3880 || fcode == BUILT_IN_CHKP_MEMSET_NOBND_NOCHK_CHKP)
0568e9c1 3881 fn = build_call_nofold_loc (EXPR_LOCATION (orig_exp), fndecl, 3,
3882 dest, val, len);
c2f47e15 3883 else if (fcode == BUILT_IN_BZERO)
0568e9c1 3884 fn = build_call_nofold_loc (EXPR_LOCATION (orig_exp), fndecl, 2,
3885 dest, len);
c2f47e15 3886 else
3887 gcc_unreachable ();
a65c4d64 3888 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
3889 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (orig_exp);
c2f47e15 3890 return expand_call (fn, target, target == const0_rtx);
53800dbe 3891}
3892
48e1416a 3893/* Expand expression EXP, which is a call to the bzero builtin. Return
c2f47e15 3894 NULL_RTX if we failed the caller should emit a normal call. */
27d0c333 3895
ffc83088 3896static rtx
0b25db21 3897expand_builtin_bzero (tree exp)
ffc83088 3898{
c2f47e15 3899 tree dest, size;
389dd41b 3900 location_t loc = EXPR_LOCATION (exp);
ffc83088 3901
c2f47e15 3902 if (!validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
7369e7ba 3903 return NULL_RTX;
ffc83088 3904
c2f47e15 3905 dest = CALL_EXPR_ARG (exp, 0);
3906 size = CALL_EXPR_ARG (exp, 1);
bf8e3599 3907
7369e7ba 3908 /* New argument list transforming bzero(ptr x, int y) to
6f428e8b 3909 memset(ptr x, int 0, size_t y). This is done this way
3910 so that if it isn't expanded inline, we fallback to
3911 calling bzero instead of memset. */
bf8e3599 3912
c2f47e15 3913 return expand_builtin_memset_args (dest, integer_zero_node,
a0553bff 3914 fold_convert_loc (loc,
3915 size_type_node, size),
c2f47e15 3916 const0_rtx, VOIDmode, exp);
ffc83088 3917}
3918
d6f01a40 3919/* Try to expand cmpstr operation ICODE with the given operands.
3920 Return the result rtx on success, otherwise return null. */
3921
3922static rtx
3923expand_cmpstr (insn_code icode, rtx target, rtx arg1_rtx, rtx arg2_rtx,
3924 HOST_WIDE_INT align)
3925{
3926 machine_mode insn_mode = insn_data[icode].operand[0].mode;
3927
3928 if (target && (!REG_P (target) || HARD_REGISTER_P (target)))
3929 target = NULL_RTX;
3930
3931 struct expand_operand ops[4];
3932 create_output_operand (&ops[0], target, insn_mode);
3933 create_fixed_operand (&ops[1], arg1_rtx);
3934 create_fixed_operand (&ops[2], arg2_rtx);
3935 create_integer_operand (&ops[3], align);
3936 if (maybe_expand_insn (icode, 4, ops))
3937 return ops[0].value;
3938 return NULL_RTX;
3939}
3940
ea368aac 3941/* Try to expand cmpstrn or cmpmem operation ICODE with the given operands.
d6f01a40 3942 ARG3_TYPE is the type of ARG3_RTX. Return the result rtx on success,
3943 otherwise return null. */
3944
3945static rtx
ea368aac 3946expand_cmpstrn_or_cmpmem (insn_code icode, rtx target, rtx arg1_rtx,
3947 rtx arg2_rtx, tree arg3_type, rtx arg3_rtx,
3948 HOST_WIDE_INT align)
d6f01a40 3949{
3950 machine_mode insn_mode = insn_data[icode].operand[0].mode;
3951
3952 if (target && (!REG_P (target) || HARD_REGISTER_P (target)))
3953 target = NULL_RTX;
3954
3955 struct expand_operand ops[5];
3956 create_output_operand (&ops[0], target, insn_mode);
3957 create_fixed_operand (&ops[1], arg1_rtx);
3958 create_fixed_operand (&ops[2], arg2_rtx);
3959 create_convert_operand_from (&ops[3], arg3_rtx, TYPE_MODE (arg3_type),
3960 TYPE_UNSIGNED (arg3_type));
3961 create_integer_operand (&ops[4], align);
3962 if (maybe_expand_insn (icode, 5, ops))
3963 return ops[0].value;
3964 return NULL_RTX;
3965}
3966
7a3f89b5 3967/* Expand expression EXP, which is a call to the memcmp built-in function.
bd021c1c 3968 Return NULL_RTX if we failed and the caller should emit a normal call,
ea368aac 3969 otherwise try to get the result in TARGET, if convenient. */
27d0c333 3970
53800dbe 3971static rtx
ea368aac 3972expand_builtin_memcmp (tree exp, rtx target)
53800dbe 3973{
c2f47e15 3974 if (!validate_arglist (exp,
3975 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3976 return NULL_RTX;
6f428e8b 3977
bd021c1c 3978 /* Note: The cmpstrnsi pattern, if it exists, is not suitable for
3979 implementing memcmp because it will stop if it encounters two
3980 zero bytes. */
ea368aac 3981 insn_code icode = direct_optab_handler (cmpmem_optab, SImode);
3982 if (icode == CODE_FOR_nothing)
3983 return NULL_RTX;
53800dbe 3984
ea368aac 3985 tree arg1 = CALL_EXPR_ARG (exp, 0);
3986 tree arg2 = CALL_EXPR_ARG (exp, 1);
3987 tree len = CALL_EXPR_ARG (exp, 2);
b428c0a5 3988
ea368aac 3989 unsigned int arg1_align = get_pointer_alignment (arg1) / BITS_PER_UNIT;
3990 unsigned int arg2_align = get_pointer_alignment (arg2) / BITS_PER_UNIT;
53800dbe 3991
ea368aac 3992 /* If we don't have POINTER_TYPE, call the function. */
3993 if (arg1_align == 0 || arg2_align == 0)
3994 return NULL_RTX;
53800dbe 3995
ea368aac 3996 machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
3997 location_t loc = EXPR_LOCATION (exp);
3998 rtx arg1_rtx = get_memory_rtx (arg1, len);
3999 rtx arg2_rtx = get_memory_rtx (arg2, len);
4000 rtx arg3_rtx = expand_normal (fold_convert_loc (loc, sizetype, len));
53800dbe 4001
ea368aac 4002 /* Set MEM_SIZE as appropriate. */
4003 if (CONST_INT_P (arg3_rtx))
4004 {
4005 set_mem_size (arg1_rtx, INTVAL (arg3_rtx));
4006 set_mem_size (arg2_rtx, INTVAL (arg3_rtx));
4007 }
83f88f8e 4008
ea368aac 4009 rtx result = expand_cmpstrn_or_cmpmem (icode, target, arg1_rtx, arg2_rtx,
4010 TREE_TYPE (len), arg3_rtx,
4011 MIN (arg1_align, arg2_align));
4012 if (result)
4013 {
4014 /* Return the value in the proper mode for this function. */
4015 if (GET_MODE (result) == mode)
4016 return result;
83f88f8e 4017
ea368aac 4018 if (target != 0)
4019 {
4020 convert_move (target, result, 0);
4021 return target;
4022 }
0cd832f0 4023
53800dbe 4024 return convert_to_mode (mode, result, 0);
ea368aac 4025 }
53800dbe 4026
ea368aac 4027 result = target;
4028 if (! (result != 0
4029 && REG_P (result) && GET_MODE (result) == mode
4030 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
4031 result = gen_reg_rtx (mode);
4032
4033 emit_library_call_value (memcmp_libfunc, result, LCT_PURE,
4034 TYPE_MODE (integer_type_node), 3,
4035 XEXP (arg1_rtx, 0), Pmode,
4036 XEXP (arg2_rtx, 0), Pmode,
4037 convert_to_mode (TYPE_MODE (sizetype), arg3_rtx,
4038 TYPE_UNSIGNED (sizetype)),
4039 TYPE_MODE (sizetype));
4040 return result;
6f428e8b 4041}
4042
c2f47e15 4043/* Expand expression EXP, which is a call to the strcmp builtin. Return NULL_RTX
53800dbe 4044 if we failed the caller should emit a normal call, otherwise try to get
4045 the result in TARGET, if convenient. */
902de8ed 4046
53800dbe 4047static rtx
a65c4d64 4048expand_builtin_strcmp (tree exp, ATTRIBUTE_UNUSED rtx target)
53800dbe 4049{
c2f47e15 4050 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
4051 return NULL_RTX;
bf8e3599 4052
d6f01a40 4053 insn_code cmpstr_icode = direct_optab_handler (cmpstr_optab, SImode);
4054 insn_code cmpstrn_icode = direct_optab_handler (cmpstrn_optab, SImode);
4055 if (cmpstr_icode != CODE_FOR_nothing || cmpstrn_icode != CODE_FOR_nothing)
6ac5504b 4056 {
4057 rtx arg1_rtx, arg2_rtx;
6ac5504b 4058 tree fndecl, fn;
c2f47e15 4059 tree arg1 = CALL_EXPR_ARG (exp, 0);
4060 tree arg2 = CALL_EXPR_ARG (exp, 1);
d6f01a40 4061 rtx result = NULL_RTX;
a0c938f0 4062
957d0361 4063 unsigned int arg1_align = get_pointer_alignment (arg1) / BITS_PER_UNIT;
4064 unsigned int arg2_align = get_pointer_alignment (arg2) / BITS_PER_UNIT;
6ac5504b 4065
4066 /* If we don't have POINTER_TYPE, call the function. */
4067 if (arg1_align == 0 || arg2_align == 0)
c2f47e15 4068 return NULL_RTX;
7a3f89b5 4069
6ac5504b 4070 /* Stabilize the arguments in case gen_cmpstr(n)si fail. */
4071 arg1 = builtin_save_expr (arg1);
4072 arg2 = builtin_save_expr (arg2);
7a3f89b5 4073
d8ae1baa 4074 arg1_rtx = get_memory_rtx (arg1, NULL);
4075 arg2_rtx = get_memory_rtx (arg2, NULL);
53800dbe 4076
6ac5504b 4077 /* Try to call cmpstrsi. */
d6f01a40 4078 if (cmpstr_icode != CODE_FOR_nothing)
4079 result = expand_cmpstr (cmpstr_icode, target, arg1_rtx, arg2_rtx,
4080 MIN (arg1_align, arg2_align));
4081
6ac5504b 4082 /* Try to determine at least one length and call cmpstrnsi. */
d6f01a40 4083 if (!result && cmpstrn_icode != CODE_FOR_nothing)
6ac5504b 4084 {
4085 tree len;
4086 rtx arg3_rtx;
4087
6ac5504b 4088 tree len1 = c_strlen (arg1, 1);
4089 tree len2 = c_strlen (arg2, 1);
4090
4091 if (len1)
4092 len1 = size_binop (PLUS_EXPR, ssize_int (1), len1);
4093 if (len2)
4094 len2 = size_binop (PLUS_EXPR, ssize_int (1), len2);
4095
4096 /* If we don't have a constant length for the first, use the length
4097 of the second, if we know it. We don't require a constant for
4098 this case; some cost analysis could be done if both are available
4099 but neither is constant. For now, assume they're equally cheap,
4100 unless one has side effects. If both strings have constant lengths,
4101 use the smaller. */
4102
4103 if (!len1)
4104 len = len2;
4105 else if (!len2)
4106 len = len1;
4107 else if (TREE_SIDE_EFFECTS (len1))
4108 len = len2;
4109 else if (TREE_SIDE_EFFECTS (len2))
4110 len = len1;
4111 else if (TREE_CODE (len1) != INTEGER_CST)
4112 len = len2;
4113 else if (TREE_CODE (len2) != INTEGER_CST)
4114 len = len1;
4115 else if (tree_int_cst_lt (len1, len2))
4116 len = len1;
4117 else
4118 len = len2;
4119
4120 /* If both arguments have side effects, we cannot optimize. */
d6f01a40 4121 if (len && !TREE_SIDE_EFFECTS (len))
4122 {
4123 arg3_rtx = expand_normal (len);
ea368aac 4124 result = expand_cmpstrn_or_cmpmem
4125 (cmpstrn_icode, target, arg1_rtx, arg2_rtx, TREE_TYPE (len),
4126 arg3_rtx, MIN (arg1_align, arg2_align));
d6f01a40 4127 }
6ac5504b 4128 }
3f8aefe2 4129
d6f01a40 4130 if (result)
6ac5504b 4131 {
6ac5504b 4132 /* Return the value in the proper mode for this function. */
d6f01a40 4133 machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
6ac5504b 4134 if (GET_MODE (result) == mode)
4135 return result;
4136 if (target == 0)
4137 return convert_to_mode (mode, result, 0);
4138 convert_move (target, result, 0);
4139 return target;
4140 }
902de8ed 4141
6ac5504b 4142 /* Expand the library call ourselves using a stabilized argument
4143 list to avoid re-evaluating the function's arguments twice. */
6ac5504b 4144 fndecl = get_callee_fndecl (exp);
0568e9c1 4145 fn = build_call_nofold_loc (EXPR_LOCATION (exp), fndecl, 2, arg1, arg2);
a65c4d64 4146 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
4147 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
6ac5504b 4148 return expand_call (fn, target, target == const0_rtx);
4149 }
c2f47e15 4150 return NULL_RTX;
83d79705 4151}
53800dbe 4152
48e1416a 4153/* Expand expression EXP, which is a call to the strncmp builtin. Return
c2f47e15 4154 NULL_RTX if we failed the caller should emit a normal call, otherwise try to get
ed09096d 4155 the result in TARGET, if convenient. */
27d0c333 4156
ed09096d 4157static rtx
a65c4d64 4158expand_builtin_strncmp (tree exp, ATTRIBUTE_UNUSED rtx target,
3754d046 4159 ATTRIBUTE_UNUSED machine_mode mode)
ed09096d 4160{
a65c4d64 4161 location_t loc ATTRIBUTE_UNUSED = EXPR_LOCATION (exp);
389dd41b 4162
c2f47e15 4163 if (!validate_arglist (exp,
4164 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
4165 return NULL_RTX;
ed09096d 4166
6e34e617 4167 /* If c_strlen can determine an expression for one of the string
6ac5504b 4168 lengths, and it doesn't have side effects, then emit cmpstrnsi
7a3f89b5 4169 using length MIN(strlen(string)+1, arg3). */
d6f01a40 4170 insn_code cmpstrn_icode = direct_optab_handler (cmpstrn_optab, SImode);
4171 if (cmpstrn_icode != CODE_FOR_nothing)
7a3f89b5 4172 {
4173 tree len, len1, len2;
4174 rtx arg1_rtx, arg2_rtx, arg3_rtx;
d6f01a40 4175 rtx result;
0b25db21 4176 tree fndecl, fn;
c2f47e15 4177 tree arg1 = CALL_EXPR_ARG (exp, 0);
4178 tree arg2 = CALL_EXPR_ARG (exp, 1);
4179 tree arg3 = CALL_EXPR_ARG (exp, 2);
6f428e8b 4180
957d0361 4181 unsigned int arg1_align = get_pointer_alignment (arg1) / BITS_PER_UNIT;
4182 unsigned int arg2_align = get_pointer_alignment (arg2) / BITS_PER_UNIT;
bf8e3599 4183
681fab1e 4184 len1 = c_strlen (arg1, 1);
4185 len2 = c_strlen (arg2, 1);
7a3f89b5 4186
4187 if (len1)
389dd41b 4188 len1 = size_binop_loc (loc, PLUS_EXPR, ssize_int (1), len1);
7a3f89b5 4189 if (len2)
389dd41b 4190 len2 = size_binop_loc (loc, PLUS_EXPR, ssize_int (1), len2);
7a3f89b5 4191
4192 /* If we don't have a constant length for the first, use the length
4193 of the second, if we know it. We don't require a constant for
4194 this case; some cost analysis could be done if both are available
4195 but neither is constant. For now, assume they're equally cheap,
4196 unless one has side effects. If both strings have constant lengths,
4197 use the smaller. */
4198
4199 if (!len1)
4200 len = len2;
4201 else if (!len2)
4202 len = len1;
4203 else if (TREE_SIDE_EFFECTS (len1))
4204 len = len2;
4205 else if (TREE_SIDE_EFFECTS (len2))
4206 len = len1;
4207 else if (TREE_CODE (len1) != INTEGER_CST)
4208 len = len2;
4209 else if (TREE_CODE (len2) != INTEGER_CST)
4210 len = len1;
4211 else if (tree_int_cst_lt (len1, len2))
4212 len = len1;
4213 else
4214 len = len2;
6e34e617 4215
7a3f89b5 4216 /* If both arguments have side effects, we cannot optimize. */
4217 if (!len || TREE_SIDE_EFFECTS (len))
c2f47e15 4218 return NULL_RTX;
bf8e3599 4219
7a3f89b5 4220 /* The actual new length parameter is MIN(len,arg3). */
389dd41b 4221 len = fold_build2_loc (loc, MIN_EXPR, TREE_TYPE (len), len,
4222 fold_convert_loc (loc, TREE_TYPE (len), arg3));
7a3f89b5 4223
4224 /* If we don't have POINTER_TYPE, call the function. */
4225 if (arg1_align == 0 || arg2_align == 0)
c2f47e15 4226 return NULL_RTX;
7a3f89b5 4227
a65c4d64 4228 /* Stabilize the arguments in case gen_cmpstrnsi fails. */
4229 arg1 = builtin_save_expr (arg1);
4230 arg2 = builtin_save_expr (arg2);
4231 len = builtin_save_expr (len);
27d0c333 4232
a65c4d64 4233 arg1_rtx = get_memory_rtx (arg1, len);
4234 arg2_rtx = get_memory_rtx (arg2, len);
4235 arg3_rtx = expand_normal (len);
ea368aac 4236 result = expand_cmpstrn_or_cmpmem (cmpstrn_icode, target, arg1_rtx,
4237 arg2_rtx, TREE_TYPE (len), arg3_rtx,
4238 MIN (arg1_align, arg2_align));
d6f01a40 4239 if (result)
a65c4d64 4240 {
a65c4d64 4241 /* Return the value in the proper mode for this function. */
4242 mode = TYPE_MODE (TREE_TYPE (exp));
4243 if (GET_MODE (result) == mode)
4244 return result;
4245 if (target == 0)
4246 return convert_to_mode (mode, result, 0);
4247 convert_move (target, result, 0);
4248 return target;
4249 }
27d0c333 4250
a65c4d64 4251 /* Expand the library call ourselves using a stabilized argument
4252 list to avoid re-evaluating the function's arguments twice. */
4253 fndecl = get_callee_fndecl (exp);
0568e9c1 4254 fn = build_call_nofold_loc (EXPR_LOCATION (exp), fndecl, 3,
4255 arg1, arg2, len);
a65c4d64 4256 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
4257 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
4258 return expand_call (fn, target, target == const0_rtx);
4259 }
c2f47e15 4260 return NULL_RTX;
49f0327b 4261}
4262
a66c9326 4263/* Expand a call to __builtin_saveregs, generating the result in TARGET,
4264 if that's convenient. */
902de8ed 4265
a66c9326 4266rtx
aecda0d6 4267expand_builtin_saveregs (void)
53800dbe 4268{
1e0c0b35 4269 rtx val;
4270 rtx_insn *seq;
53800dbe 4271
4272 /* Don't do __builtin_saveregs more than once in a function.
4273 Save the result of the first call and reuse it. */
4274 if (saveregs_value != 0)
4275 return saveregs_value;
53800dbe 4276
a66c9326 4277 /* When this function is called, it means that registers must be
4278 saved on entry to this function. So we migrate the call to the
4279 first insn of this function. */
4280
4281 start_sequence ();
53800dbe 4282
a66c9326 4283 /* Do whatever the machine needs done in this case. */
45550790 4284 val = targetm.calls.expand_builtin_saveregs ();
53800dbe 4285
a66c9326 4286 seq = get_insns ();
4287 end_sequence ();
53800dbe 4288
a66c9326 4289 saveregs_value = val;
53800dbe 4290
31d3e01c 4291 /* Put the insns after the NOTE that starts the function. If this
4292 is inside a start_sequence, make the outer-level insn chain current, so
a66c9326 4293 the code is placed at the start of the function. */
4294 push_topmost_sequence ();
0ec80471 4295 emit_insn_after (seq, entry_of_function ());
a66c9326 4296 pop_topmost_sequence ();
4297
4298 return val;
53800dbe 4299}
4300
79012a9d 4301/* Expand a call to __builtin_next_arg. */
27d0c333 4302
53800dbe 4303static rtx
79012a9d 4304expand_builtin_next_arg (void)
53800dbe 4305{
79012a9d 4306 /* Checking arguments is already done in fold_builtin_next_arg
4307 that must be called before this function. */
940ddc5c 4308 return expand_binop (ptr_mode, add_optab,
abe32cce 4309 crtl->args.internal_arg_pointer,
4310 crtl->args.arg_offset_rtx,
53800dbe 4311 NULL_RTX, 0, OPTAB_LIB_WIDEN);
4312}
4313
a66c9326 4314/* Make it easier for the backends by protecting the valist argument
4315 from multiple evaluations. */
4316
4317static tree
389dd41b 4318stabilize_va_list_loc (location_t loc, tree valist, int needs_lvalue)
a66c9326 4319{
5f57a8b1 4320 tree vatype = targetm.canonical_va_list_type (TREE_TYPE (valist));
4321
182cf5a9 4322 /* The current way of determining the type of valist is completely
4323 bogus. We should have the information on the va builtin instead. */
4324 if (!vatype)
4325 vatype = targetm.fn_abi_va_list (cfun->decl);
5f57a8b1 4326
4327 if (TREE_CODE (vatype) == ARRAY_TYPE)
a66c9326 4328 {
2d47cc32 4329 if (TREE_SIDE_EFFECTS (valist))
4330 valist = save_expr (valist);
11a61dea 4331
2d47cc32 4332 /* For this case, the backends will be expecting a pointer to
5f57a8b1 4333 vatype, but it's possible we've actually been given an array
4334 (an actual TARGET_CANONICAL_VA_LIST_TYPE (valist)).
2d47cc32 4335 So fix it. */
4336 if (TREE_CODE (TREE_TYPE (valist)) == ARRAY_TYPE)
8a15c04a 4337 {
5f57a8b1 4338 tree p1 = build_pointer_type (TREE_TYPE (vatype));
389dd41b 4339 valist = build_fold_addr_expr_with_type_loc (loc, valist, p1);
8a15c04a 4340 }
a66c9326 4341 }
11a61dea 4342 else
a66c9326 4343 {
182cf5a9 4344 tree pt = build_pointer_type (vatype);
11a61dea 4345
2d47cc32 4346 if (! needs_lvalue)
4347 {
11a61dea 4348 if (! TREE_SIDE_EFFECTS (valist))
4349 return valist;
bf8e3599 4350
389dd41b 4351 valist = fold_build1_loc (loc, ADDR_EXPR, pt, valist);
a66c9326 4352 TREE_SIDE_EFFECTS (valist) = 1;
a66c9326 4353 }
2d47cc32 4354
11a61dea 4355 if (TREE_SIDE_EFFECTS (valist))
2d47cc32 4356 valist = save_expr (valist);
182cf5a9 4357 valist = fold_build2_loc (loc, MEM_REF,
4358 vatype, valist, build_int_cst (pt, 0));
a66c9326 4359 }
4360
4361 return valist;
4362}
4363
2e15d750 4364/* The "standard" definition of va_list is void*. */
4365
4366tree
4367std_build_builtin_va_list (void)
4368{
4369 return ptr_type_node;
4370}
4371
5f57a8b1 4372/* The "standard" abi va_list is va_list_type_node. */
4373
4374tree
4375std_fn_abi_va_list (tree fndecl ATTRIBUTE_UNUSED)
4376{
4377 return va_list_type_node;
4378}
4379
4380/* The "standard" type of va_list is va_list_type_node. */
4381
4382tree
4383std_canonical_va_list_type (tree type)
4384{
4385 tree wtype, htype;
4386
4387 if (INDIRECT_REF_P (type))
4388 type = TREE_TYPE (type);
9af5ce0c 4389 else if (POINTER_TYPE_P (type) && POINTER_TYPE_P (TREE_TYPE (type)))
5f57a8b1 4390 type = TREE_TYPE (type);
5f57a8b1 4391 wtype = va_list_type_node;
4392 htype = type;
7b36f9ab 4393 /* Treat structure va_list types. */
4394 if (TREE_CODE (wtype) == RECORD_TYPE && POINTER_TYPE_P (htype))
4395 htype = TREE_TYPE (htype);
4396 else if (TREE_CODE (wtype) == ARRAY_TYPE)
5f57a8b1 4397 {
4398 /* If va_list is an array type, the argument may have decayed
4399 to a pointer type, e.g. by being passed to another function.
4400 In that case, unwrap both types so that we can compare the
4401 underlying records. */
4402 if (TREE_CODE (htype) == ARRAY_TYPE
4403 || POINTER_TYPE_P (htype))
4404 {
4405 wtype = TREE_TYPE (wtype);
4406 htype = TREE_TYPE (htype);
4407 }
4408 }
4409 if (TYPE_MAIN_VARIANT (wtype) == TYPE_MAIN_VARIANT (htype))
4410 return va_list_type_node;
4411
4412 return NULL_TREE;
4413}
4414
a66c9326 4415/* The "standard" implementation of va_start: just assign `nextarg' to
4416 the variable. */
27d0c333 4417
a66c9326 4418void
aecda0d6 4419std_expand_builtin_va_start (tree valist, rtx nextarg)
a66c9326 4420{
f03c17bc 4421 rtx va_r = expand_expr (valist, NULL_RTX, VOIDmode, EXPAND_WRITE);
4422 convert_move (va_r, nextarg, 0);
058a1b7a 4423
4424 /* We do not have any valid bounds for the pointer, so
4425 just store zero bounds for it. */
4426 if (chkp_function_instrumented_p (current_function_decl))
4427 chkp_expand_bounds_reset_for_mem (valist,
4428 make_tree (TREE_TYPE (valist),
4429 nextarg));
a66c9326 4430}
4431
c2f47e15 4432/* Expand EXP, a call to __builtin_va_start. */
27d0c333 4433
a66c9326 4434static rtx
c2f47e15 4435expand_builtin_va_start (tree exp)
a66c9326 4436{
4437 rtx nextarg;
c2f47e15 4438 tree valist;
389dd41b 4439 location_t loc = EXPR_LOCATION (exp);
a66c9326 4440
c2f47e15 4441 if (call_expr_nargs (exp) < 2)
cb166087 4442 {
389dd41b 4443 error_at (loc, "too few arguments to function %<va_start%>");
cb166087 4444 return const0_rtx;
4445 }
a66c9326 4446
c2f47e15 4447 if (fold_builtin_next_arg (exp, true))
79012a9d 4448 return const0_rtx;
7c2f0500 4449
79012a9d 4450 nextarg = expand_builtin_next_arg ();
389dd41b 4451 valist = stabilize_va_list_loc (loc, CALL_EXPR_ARG (exp, 0), 1);
a66c9326 4452
8a58ed0a 4453 if (targetm.expand_builtin_va_start)
4454 targetm.expand_builtin_va_start (valist, nextarg);
4455 else
4456 std_expand_builtin_va_start (valist, nextarg);
a66c9326 4457
4458 return const0_rtx;
4459}
4460
c2f47e15 4461/* Expand EXP, a call to __builtin_va_end. */
f7c44134 4462
a66c9326 4463static rtx
c2f47e15 4464expand_builtin_va_end (tree exp)
a66c9326 4465{
c2f47e15 4466 tree valist = CALL_EXPR_ARG (exp, 0);
8a15c04a 4467
8a15c04a 4468 /* Evaluate for side effects, if needed. I hate macros that don't
4469 do that. */
4470 if (TREE_SIDE_EFFECTS (valist))
4471 expand_expr (valist, const0_rtx, VOIDmode, EXPAND_NORMAL);
a66c9326 4472
4473 return const0_rtx;
4474}
4475
c2f47e15 4476/* Expand EXP, a call to __builtin_va_copy. We do this as a
a66c9326 4477 builtin rather than just as an assignment in stdarg.h because of the
4478 nastiness of array-type va_list types. */
f7c44134 4479
a66c9326 4480static rtx
c2f47e15 4481expand_builtin_va_copy (tree exp)
a66c9326 4482{
4483 tree dst, src, t;
389dd41b 4484 location_t loc = EXPR_LOCATION (exp);
a66c9326 4485
c2f47e15 4486 dst = CALL_EXPR_ARG (exp, 0);
4487 src = CALL_EXPR_ARG (exp, 1);
a66c9326 4488
389dd41b 4489 dst = stabilize_va_list_loc (loc, dst, 1);
4490 src = stabilize_va_list_loc (loc, src, 0);
a66c9326 4491
5f57a8b1 4492 gcc_assert (cfun != NULL && cfun->decl != NULL_TREE);
4493
4494 if (TREE_CODE (targetm.fn_abi_va_list (cfun->decl)) != ARRAY_TYPE)
a66c9326 4495 {
5f57a8b1 4496 t = build2 (MODIFY_EXPR, targetm.fn_abi_va_list (cfun->decl), dst, src);
a66c9326 4497 TREE_SIDE_EFFECTS (t) = 1;
4498 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
4499 }
4500 else
4501 {
11a61dea 4502 rtx dstb, srcb, size;
4503
4504 /* Evaluate to pointers. */
4505 dstb = expand_expr (dst, NULL_RTX, Pmode, EXPAND_NORMAL);
4506 srcb = expand_expr (src, NULL_RTX, Pmode, EXPAND_NORMAL);
5f57a8b1 4507 size = expand_expr (TYPE_SIZE_UNIT (targetm.fn_abi_va_list (cfun->decl)),
4508 NULL_RTX, VOIDmode, EXPAND_NORMAL);
11a61dea 4509
85d654dd 4510 dstb = convert_memory_address (Pmode, dstb);
4511 srcb = convert_memory_address (Pmode, srcb);
726ec87c 4512
11a61dea 4513 /* "Dereference" to BLKmode memories. */
4514 dstb = gen_rtx_MEM (BLKmode, dstb);
ab6ab77e 4515 set_mem_alias_set (dstb, get_alias_set (TREE_TYPE (TREE_TYPE (dst))));
5f57a8b1 4516 set_mem_align (dstb, TYPE_ALIGN (targetm.fn_abi_va_list (cfun->decl)));
11a61dea 4517 srcb = gen_rtx_MEM (BLKmode, srcb);
ab6ab77e 4518 set_mem_alias_set (srcb, get_alias_set (TREE_TYPE (TREE_TYPE (src))));
5f57a8b1 4519 set_mem_align (srcb, TYPE_ALIGN (targetm.fn_abi_va_list (cfun->decl)));
11a61dea 4520
4521 /* Copy. */
0378dbdc 4522 emit_block_move (dstb, srcb, size, BLOCK_OP_NORMAL);
a66c9326 4523 }
4524
4525 return const0_rtx;
4526}
4527
53800dbe 4528/* Expand a call to one of the builtin functions __builtin_frame_address or
4529 __builtin_return_address. */
27d0c333 4530
53800dbe 4531static rtx
c2f47e15 4532expand_builtin_frame_address (tree fndecl, tree exp)
53800dbe 4533{
53800dbe 4534 /* The argument must be a nonnegative integer constant.
4535 It counts the number of frames to scan up the stack.
5b252e95 4536 The value is either the frame pointer value or the return
4537 address saved in that frame. */
c2f47e15 4538 if (call_expr_nargs (exp) == 0)
53800dbe 4539 /* Warning about missing arg was already issued. */
4540 return const0_rtx;
e913b5cd 4541 else if (! tree_fits_uhwi_p (CALL_EXPR_ARG (exp, 0)))
53800dbe 4542 {
5b252e95 4543 error ("invalid argument to %qD", fndecl);
53800dbe 4544 return const0_rtx;
4545 }
4546 else
4547 {
5b252e95 4548 /* Number of frames to scan up the stack. */
4549 unsigned HOST_WIDE_INT count = tree_to_uhwi (CALL_EXPR_ARG (exp, 0));
4550
4551 rtx tem = expand_builtin_return_addr (DECL_FUNCTION_CODE (fndecl), count);
53800dbe 4552
4553 /* Some ports cannot access arbitrary stack frames. */
4554 if (tem == NULL)
4555 {
5b252e95 4556 warning (0, "unsupported argument to %qD", fndecl);
53800dbe 4557 return const0_rtx;
4558 }
4559
5b252e95 4560 if (count)
4561 {
4562 /* Warn since no effort is made to ensure that any frame
4563 beyond the current one exists or can be safely reached. */
4564 warning (OPT_Wframe_address, "calling %qD with "
4565 "a nonzero argument is unsafe", fndecl);
4566 }
4567
53800dbe 4568 /* For __builtin_frame_address, return what we've got. */
4569 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
4570 return tem;
4571
8ad4c111 4572 if (!REG_P (tem)
53800dbe 4573 && ! CONSTANT_P (tem))
99182918 4574 tem = copy_addr_to_reg (tem);
53800dbe 4575 return tem;
4576 }
4577}
4578
990495a7 4579/* Expand EXP, a call to the alloca builtin. Return NULL_RTX if we
5be42b39 4580 failed and the caller should emit a normal call. CANNOT_ACCUMULATE
4581 is the same as for allocate_dynamic_stack_space. */
15c6cf6b 4582
53800dbe 4583static rtx
5be42b39 4584expand_builtin_alloca (tree exp, bool cannot_accumulate)
53800dbe 4585{
4586 rtx op0;
15c6cf6b 4587 rtx result;
581bf1c2 4588 bool valid_arglist;
4589 unsigned int align;
4590 bool alloca_with_align = (DECL_FUNCTION_CODE (get_callee_fndecl (exp))
4591 == BUILT_IN_ALLOCA_WITH_ALIGN);
53800dbe 4592
581bf1c2 4593 valid_arglist
4594 = (alloca_with_align
4595 ? validate_arglist (exp, INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE)
4596 : validate_arglist (exp, INTEGER_TYPE, VOID_TYPE));
4597
4598 if (!valid_arglist)
c2f47e15 4599 return NULL_RTX;
53800dbe 4600
4601 /* Compute the argument. */
c2f47e15 4602 op0 = expand_normal (CALL_EXPR_ARG (exp, 0));
53800dbe 4603
581bf1c2 4604 /* Compute the alignment. */
4605 align = (alloca_with_align
f9ae6f95 4606 ? TREE_INT_CST_LOW (CALL_EXPR_ARG (exp, 1))
581bf1c2 4607 : BIGGEST_ALIGNMENT);
4608
53800dbe 4609 /* Allocate the desired space. */
581bf1c2 4610 result = allocate_dynamic_stack_space (op0, 0, align, cannot_accumulate);
85d654dd 4611 result = convert_memory_address (ptr_mode, result);
15c6cf6b 4612
4613 return result;
53800dbe 4614}
4615
74bdbe96 4616/* Expand a call to bswap builtin in EXP.
4617 Return NULL_RTX if a normal call should be emitted rather than expanding the
4618 function in-line. If convenient, the result should be placed in TARGET.
4619 SUBTARGET may be used as the target for computing one of EXP's operands. */
42791117 4620
4621static rtx
3754d046 4622expand_builtin_bswap (machine_mode target_mode, tree exp, rtx target,
74bdbe96 4623 rtx subtarget)
42791117 4624{
42791117 4625 tree arg;
4626 rtx op0;
4627
c2f47e15 4628 if (!validate_arglist (exp, INTEGER_TYPE, VOID_TYPE))
4629 return NULL_RTX;
42791117 4630
c2f47e15 4631 arg = CALL_EXPR_ARG (exp, 0);
74bdbe96 4632 op0 = expand_expr (arg,
4633 subtarget && GET_MODE (subtarget) == target_mode
4634 ? subtarget : NULL_RTX,
4635 target_mode, EXPAND_NORMAL);
4636 if (GET_MODE (op0) != target_mode)
4637 op0 = convert_to_mode (target_mode, op0, 1);
42791117 4638
74bdbe96 4639 target = expand_unop (target_mode, bswap_optab, op0, target, 1);
42791117 4640
4641 gcc_assert (target);
4642
74bdbe96 4643 return convert_to_mode (target_mode, target, 1);
42791117 4644}
4645
c2f47e15 4646/* Expand a call to a unary builtin in EXP.
4647 Return NULL_RTX if a normal call should be emitted rather than expanding the
53800dbe 4648 function in-line. If convenient, the result should be placed in TARGET.
4649 SUBTARGET may be used as the target for computing one of EXP's operands. */
15c6cf6b 4650
53800dbe 4651static rtx
3754d046 4652expand_builtin_unop (machine_mode target_mode, tree exp, rtx target,
aecda0d6 4653 rtx subtarget, optab op_optab)
53800dbe 4654{
4655 rtx op0;
c2f47e15 4656
4657 if (!validate_arglist (exp, INTEGER_TYPE, VOID_TYPE))
4658 return NULL_RTX;
53800dbe 4659
4660 /* Compute the argument. */
f97eea22 4661 op0 = expand_expr (CALL_EXPR_ARG (exp, 0),
4662 (subtarget
4663 && (TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 0)))
4664 == GET_MODE (subtarget))) ? subtarget : NULL_RTX,
1db6d067 4665 VOIDmode, EXPAND_NORMAL);
6a08d0ab 4666 /* Compute op, into TARGET if possible.
53800dbe 4667 Set TARGET to wherever the result comes back. */
c2f47e15 4668 target = expand_unop (TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 0))),
6aaa1f9e 4669 op_optab, op0, target, op_optab != clrsb_optab);
64db345d 4670 gcc_assert (target);
7d3f6cc7 4671
efb070c8 4672 return convert_to_mode (target_mode, target, 0);
53800dbe 4673}
89cfe6e5 4674
48e1416a 4675/* Expand a call to __builtin_expect. We just return our argument
5a74f77e 4676 as the builtin_expect semantic should've been already executed by
4677 tree branch prediction pass. */
89cfe6e5 4678
4679static rtx
c2f47e15 4680expand_builtin_expect (tree exp, rtx target)
89cfe6e5 4681{
1e4adcfc 4682 tree arg;
89cfe6e5 4683
c2f47e15 4684 if (call_expr_nargs (exp) < 2)
89cfe6e5 4685 return const0_rtx;
c2f47e15 4686 arg = CALL_EXPR_ARG (exp, 0);
89cfe6e5 4687
c2f47e15 4688 target = expand_expr (arg, target, VOIDmode, EXPAND_NORMAL);
5a74f77e 4689 /* When guessing was done, the hints should be already stripped away. */
07311427 4690 gcc_assert (!flag_guess_branch_prob
852f689e 4691 || optimize == 0 || seen_error ());
89cfe6e5 4692 return target;
4693}
689df48e 4694
fca0886c 4695/* Expand a call to __builtin_assume_aligned. We just return our first
4696 argument as the builtin_assume_aligned semantic should've been already
4697 executed by CCP. */
4698
4699static rtx
4700expand_builtin_assume_aligned (tree exp, rtx target)
4701{
4702 if (call_expr_nargs (exp) < 2)
4703 return const0_rtx;
4704 target = expand_expr (CALL_EXPR_ARG (exp, 0), target, VOIDmode,
4705 EXPAND_NORMAL);
4706 gcc_assert (!TREE_SIDE_EFFECTS (CALL_EXPR_ARG (exp, 1))
4707 && (call_expr_nargs (exp) < 3
4708 || !TREE_SIDE_EFFECTS (CALL_EXPR_ARG (exp, 2))));
4709 return target;
4710}
4711
c22de3f0 4712void
aecda0d6 4713expand_builtin_trap (void)
a0ef1725 4714{
4db8dd0c 4715 if (targetm.have_trap ())
f73960eb 4716 {
4db8dd0c 4717 rtx_insn *insn = emit_insn (targetm.gen_trap ());
f73960eb 4718 /* For trap insns when not accumulating outgoing args force
4719 REG_ARGS_SIZE note to prevent crossjumping of calls with
4720 different args sizes. */
4721 if (!ACCUMULATE_OUTGOING_ARGS)
4722 add_reg_note (insn, REG_ARGS_SIZE, GEN_INT (stack_pointer_delta));
4723 }
a0ef1725 4724 else
a0ef1725 4725 emit_library_call (abort_libfunc, LCT_NORETURN, VOIDmode, 0);
4726 emit_barrier ();
4727}
78a74442 4728
d2b48f0c 4729/* Expand a call to __builtin_unreachable. We do nothing except emit
4730 a barrier saying that control flow will not pass here.
4731
4732 It is the responsibility of the program being compiled to ensure
4733 that control flow does never reach __builtin_unreachable. */
4734static void
4735expand_builtin_unreachable (void)
4736{
4737 emit_barrier ();
4738}
4739
c2f47e15 4740/* Expand EXP, a call to fabs, fabsf or fabsl.
4741 Return NULL_RTX if a normal call should be emitted rather than expanding
78a74442 4742 the function inline. If convenient, the result should be placed
4743 in TARGET. SUBTARGET may be used as the target for computing
4744 the operand. */
4745
4746static rtx
c2f47e15 4747expand_builtin_fabs (tree exp, rtx target, rtx subtarget)
78a74442 4748{
3754d046 4749 machine_mode mode;
78a74442 4750 tree arg;
4751 rtx op0;
4752
c2f47e15 4753 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
4754 return NULL_RTX;
78a74442 4755
c2f47e15 4756 arg = CALL_EXPR_ARG (exp, 0);
c7f617c2 4757 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
78a74442 4758 mode = TYPE_MODE (TREE_TYPE (arg));
1db6d067 4759 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
78a74442 4760 return expand_abs (mode, op0, target, 0, safe_from_p (target, arg, 1));
4761}
4762
c2f47e15 4763/* Expand EXP, a call to copysign, copysignf, or copysignl.
270436f3 4764 Return NULL is a normal call should be emitted rather than expanding the
4765 function inline. If convenient, the result should be placed in TARGET.
4766 SUBTARGET may be used as the target for computing the operand. */
4767
4768static rtx
c2f47e15 4769expand_builtin_copysign (tree exp, rtx target, rtx subtarget)
270436f3 4770{
4771 rtx op0, op1;
4772 tree arg;
4773
c2f47e15 4774 if (!validate_arglist (exp, REAL_TYPE, REAL_TYPE, VOID_TYPE))
4775 return NULL_RTX;
270436f3 4776
c2f47e15 4777 arg = CALL_EXPR_ARG (exp, 0);
8ec3c5c2 4778 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
270436f3 4779
c2f47e15 4780 arg = CALL_EXPR_ARG (exp, 1);
8ec3c5c2 4781 op1 = expand_normal (arg);
270436f3 4782
4783 return expand_copysign (op0, op1, target);
4784}
4785
ac8fb6db 4786/* Expand a call to __builtin___clear_cache. */
4787
4788static rtx
32e17df0 4789expand_builtin___clear_cache (tree exp)
ac8fb6db 4790{
32e17df0 4791 if (!targetm.code_for_clear_cache)
4792 {
ac8fb6db 4793#ifdef CLEAR_INSN_CACHE
32e17df0 4794 /* There is no "clear_cache" insn, and __clear_cache() in libgcc
4795 does something. Just do the default expansion to a call to
4796 __clear_cache(). */
4797 return NULL_RTX;
ac8fb6db 4798#else
32e17df0 4799 /* There is no "clear_cache" insn, and __clear_cache() in libgcc
4800 does nothing. There is no need to call it. Do nothing. */
4801 return const0_rtx;
ac8fb6db 4802#endif /* CLEAR_INSN_CACHE */
32e17df0 4803 }
4804
ac8fb6db 4805 /* We have a "clear_cache" insn, and it will handle everything. */
4806 tree begin, end;
4807 rtx begin_rtx, end_rtx;
ac8fb6db 4808
4809 /* We must not expand to a library call. If we did, any
4810 fallback library function in libgcc that might contain a call to
4811 __builtin___clear_cache() would recurse infinitely. */
4812 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
4813 {
4814 error ("both arguments to %<__builtin___clear_cache%> must be pointers");
4815 return const0_rtx;
4816 }
4817
32e17df0 4818 if (targetm.have_clear_cache ())
ac8fb6db 4819 {
8786db1e 4820 struct expand_operand ops[2];
ac8fb6db 4821
4822 begin = CALL_EXPR_ARG (exp, 0);
4823 begin_rtx = expand_expr (begin, NULL_RTX, Pmode, EXPAND_NORMAL);
ac8fb6db 4824
4825 end = CALL_EXPR_ARG (exp, 1);
4826 end_rtx = expand_expr (end, NULL_RTX, Pmode, EXPAND_NORMAL);
ac8fb6db 4827
8786db1e 4828 create_address_operand (&ops[0], begin_rtx);
4829 create_address_operand (&ops[1], end_rtx);
32e17df0 4830 if (maybe_expand_insn (targetm.code_for_clear_cache, 2, ops))
8786db1e 4831 return const0_rtx;
ac8fb6db 4832 }
4833 return const0_rtx;
ac8fb6db 4834}
4835
4ee9c684 4836/* Given a trampoline address, make sure it satisfies TRAMPOLINE_ALIGNMENT. */
4837
4838static rtx
4839round_trampoline_addr (rtx tramp)
4840{
4841 rtx temp, addend, mask;
4842
4843 /* If we don't need too much alignment, we'll have been guaranteed
4844 proper alignment by get_trampoline_type. */
4845 if (TRAMPOLINE_ALIGNMENT <= STACK_BOUNDARY)
4846 return tramp;
4847
4848 /* Round address up to desired boundary. */
4849 temp = gen_reg_rtx (Pmode);
0359f9f5 4850 addend = gen_int_mode (TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT - 1, Pmode);
4851 mask = gen_int_mode (-TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT, Pmode);
4ee9c684 4852
4853 temp = expand_simple_binop (Pmode, PLUS, tramp, addend,
4854 temp, 0, OPTAB_LIB_WIDEN);
4855 tramp = expand_simple_binop (Pmode, AND, temp, mask,
4856 temp, 0, OPTAB_LIB_WIDEN);
4857
4858 return tramp;
4859}
4860
4861static rtx
c307f106 4862expand_builtin_init_trampoline (tree exp, bool onstack)
4ee9c684 4863{
4864 tree t_tramp, t_func, t_chain;
82c7907c 4865 rtx m_tramp, r_tramp, r_chain, tmp;
4ee9c684 4866
c2f47e15 4867 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE,
4ee9c684 4868 POINTER_TYPE, VOID_TYPE))
4869 return NULL_RTX;
4870
c2f47e15 4871 t_tramp = CALL_EXPR_ARG (exp, 0);
4872 t_func = CALL_EXPR_ARG (exp, 1);
4873 t_chain = CALL_EXPR_ARG (exp, 2);
4ee9c684 4874
8ec3c5c2 4875 r_tramp = expand_normal (t_tramp);
82c7907c 4876 m_tramp = gen_rtx_MEM (BLKmode, r_tramp);
4877 MEM_NOTRAP_P (m_tramp) = 1;
4878
c307f106 4879 /* If ONSTACK, the TRAMP argument should be the address of a field
4880 within the local function's FRAME decl. Either way, let's see if
4881 we can fill in the MEM_ATTRs for this memory. */
82c7907c 4882 if (TREE_CODE (t_tramp) == ADDR_EXPR)
f4146cb8 4883 set_mem_attributes (m_tramp, TREE_OPERAND (t_tramp, 0), true);
82c7907c 4884
c307f106 4885 /* Creator of a heap trampoline is responsible for making sure the
4886 address is aligned to at least STACK_BOUNDARY. Normally malloc
4887 will ensure this anyhow. */
82c7907c 4888 tmp = round_trampoline_addr (r_tramp);
4889 if (tmp != r_tramp)
4890 {
4891 m_tramp = change_address (m_tramp, BLKmode, tmp);
4892 set_mem_align (m_tramp, TRAMPOLINE_ALIGNMENT);
5b2a69fa 4893 set_mem_size (m_tramp, TRAMPOLINE_SIZE);
82c7907c 4894 }
4895
4896 /* The FUNC argument should be the address of the nested function.
4897 Extract the actual function decl to pass to the hook. */
4898 gcc_assert (TREE_CODE (t_func) == ADDR_EXPR);
4899 t_func = TREE_OPERAND (t_func, 0);
4900 gcc_assert (TREE_CODE (t_func) == FUNCTION_DECL);
4901
8ec3c5c2 4902 r_chain = expand_normal (t_chain);
4ee9c684 4903
4904 /* Generate insns to initialize the trampoline. */
82c7907c 4905 targetm.calls.trampoline_init (m_tramp, t_func, r_chain);
4ee9c684 4906
c307f106 4907 if (onstack)
4908 {
4909 trampolines_created = 1;
8bc8a8f4 4910
c307f106 4911 warning_at (DECL_SOURCE_LOCATION (t_func), OPT_Wtrampolines,
4912 "trampoline generated for nested function %qD", t_func);
4913 }
8bc8a8f4 4914
4ee9c684 4915 return const0_rtx;
4916}
4917
4918static rtx
c2f47e15 4919expand_builtin_adjust_trampoline (tree exp)
4ee9c684 4920{
4921 rtx tramp;
4922
c2f47e15 4923 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
4ee9c684 4924 return NULL_RTX;
4925
c2f47e15 4926 tramp = expand_normal (CALL_EXPR_ARG (exp, 0));
4ee9c684 4927 tramp = round_trampoline_addr (tramp);
82c7907c 4928 if (targetm.calls.trampoline_adjust_address)
4929 tramp = targetm.calls.trampoline_adjust_address (tramp);
4ee9c684 4930
4931 return tramp;
4932}
4933
93f564d6 4934/* Expand the call EXP to the built-in signbit, signbitf or signbitl
4935 function. The function first checks whether the back end provides
4936 an insn to implement signbit for the respective mode. If not, it
4937 checks whether the floating point format of the value is such that
10902624 4938 the sign bit can be extracted. If that is not the case, error out.
4939 EXP is the expression that is a call to the builtin function; if
4940 convenient, the result should be placed in TARGET. */
27f261ef 4941static rtx
4942expand_builtin_signbit (tree exp, rtx target)
4943{
4944 const struct real_format *fmt;
3754d046 4945 machine_mode fmode, imode, rmode;
c2f47e15 4946 tree arg;
ca4f1f5b 4947 int word, bitpos;
27eda240 4948 enum insn_code icode;
27f261ef 4949 rtx temp;
389dd41b 4950 location_t loc = EXPR_LOCATION (exp);
27f261ef 4951
c2f47e15 4952 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
4953 return NULL_RTX;
27f261ef 4954
c2f47e15 4955 arg = CALL_EXPR_ARG (exp, 0);
27f261ef 4956 fmode = TYPE_MODE (TREE_TYPE (arg));
4957 rmode = TYPE_MODE (TREE_TYPE (exp));
4958 fmt = REAL_MODE_FORMAT (fmode);
4959
93f564d6 4960 arg = builtin_save_expr (arg);
4961
4962 /* Expand the argument yielding a RTX expression. */
4963 temp = expand_normal (arg);
4964
4965 /* Check if the back end provides an insn that handles signbit for the
4966 argument's mode. */
d6bf3b14 4967 icode = optab_handler (signbit_optab, fmode);
27eda240 4968 if (icode != CODE_FOR_nothing)
93f564d6 4969 {
1e0c0b35 4970 rtx_insn *last = get_last_insn ();
93f564d6 4971 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
4e2a2fb4 4972 if (maybe_emit_unop_insn (icode, target, temp, UNKNOWN))
4973 return target;
4974 delete_insns_since (last);
93f564d6 4975 }
4976
27f261ef 4977 /* For floating point formats without a sign bit, implement signbit
4978 as "ARG < 0.0". */
8d564692 4979 bitpos = fmt->signbit_ro;
ca4f1f5b 4980 if (bitpos < 0)
27f261ef 4981 {
4982 /* But we can't do this if the format supports signed zero. */
10902624 4983 gcc_assert (!fmt->has_signed_zero || !HONOR_SIGNED_ZEROS (fmode));
27f261ef 4984
389dd41b 4985 arg = fold_build2_loc (loc, LT_EXPR, TREE_TYPE (exp), arg,
49d00087 4986 build_real (TREE_TYPE (arg), dconst0));
27f261ef 4987 return expand_expr (arg, target, VOIDmode, EXPAND_NORMAL);
4988 }
4989
ca4f1f5b 4990 if (GET_MODE_SIZE (fmode) <= UNITS_PER_WORD)
27f261ef 4991 {
ca4f1f5b 4992 imode = int_mode_for_mode (fmode);
10902624 4993 gcc_assert (imode != BLKmode);
ca4f1f5b 4994 temp = gen_lowpart (imode, temp);
24fd4260 4995 }
4996 else
4997 {
ca4f1f5b 4998 imode = word_mode;
4999 /* Handle targets with different FP word orders. */
5000 if (FLOAT_WORDS_BIG_ENDIAN)
a0c938f0 5001 word = (GET_MODE_BITSIZE (fmode) - bitpos) / BITS_PER_WORD;
ca4f1f5b 5002 else
a0c938f0 5003 word = bitpos / BITS_PER_WORD;
ca4f1f5b 5004 temp = operand_subword_force (temp, word, fmode);
5005 bitpos = bitpos % BITS_PER_WORD;
5006 }
5007
44b0f1d0 5008 /* Force the intermediate word_mode (or narrower) result into a
5009 register. This avoids attempting to create paradoxical SUBREGs
5010 of floating point modes below. */
5011 temp = force_reg (imode, temp);
5012
ca4f1f5b 5013 /* If the bitpos is within the "result mode" lowpart, the operation
5014 can be implement with a single bitwise AND. Otherwise, we need
5015 a right shift and an AND. */
5016
5017 if (bitpos < GET_MODE_BITSIZE (rmode))
5018 {
796b6678 5019 wide_int mask = wi::set_bit_in_zero (bitpos, GET_MODE_PRECISION (rmode));
27f261ef 5020
4a46f016 5021 if (GET_MODE_SIZE (imode) > GET_MODE_SIZE (rmode))
ca4f1f5b 5022 temp = gen_lowpart (rmode, temp);
24fd4260 5023 temp = expand_binop (rmode, and_optab, temp,
e913b5cd 5024 immed_wide_int_const (mask, rmode),
ca4f1f5b 5025 NULL_RTX, 1, OPTAB_LIB_WIDEN);
27f261ef 5026 }
ca4f1f5b 5027 else
5028 {
5029 /* Perform a logical right shift to place the signbit in the least
a0c938f0 5030 significant bit, then truncate the result to the desired mode
ca4f1f5b 5031 and mask just this bit. */
f5ff0b21 5032 temp = expand_shift (RSHIFT_EXPR, imode, temp, bitpos, NULL_RTX, 1);
ca4f1f5b 5033 temp = gen_lowpart (rmode, temp);
5034 temp = expand_binop (rmode, and_optab, temp, const1_rtx,
5035 NULL_RTX, 1, OPTAB_LIB_WIDEN);
5036 }
5037
27f261ef 5038 return temp;
5039}
73673831 5040
5041/* Expand fork or exec calls. TARGET is the desired target of the
c2f47e15 5042 call. EXP is the call. FN is the
73673831 5043 identificator of the actual function. IGNORE is nonzero if the
5044 value is to be ignored. */
5045
5046static rtx
c2f47e15 5047expand_builtin_fork_or_exec (tree fn, tree exp, rtx target, int ignore)
73673831 5048{
5049 tree id, decl;
5050 tree call;
5051
5052 /* If we are not profiling, just call the function. */
5053 if (!profile_arc_flag)
5054 return NULL_RTX;
5055
5056 /* Otherwise call the wrapper. This should be equivalent for the rest of
5057 compiler, so the code does not diverge, and the wrapper may run the
9c9bad97 5058 code necessary for keeping the profiling sane. */
73673831 5059
5060 switch (DECL_FUNCTION_CODE (fn))
5061 {
5062 case BUILT_IN_FORK:
5063 id = get_identifier ("__gcov_fork");
5064 break;
5065
5066 case BUILT_IN_EXECL:
5067 id = get_identifier ("__gcov_execl");
5068 break;
5069
5070 case BUILT_IN_EXECV:
5071 id = get_identifier ("__gcov_execv");
5072 break;
5073
5074 case BUILT_IN_EXECLP:
5075 id = get_identifier ("__gcov_execlp");
5076 break;
5077
5078 case BUILT_IN_EXECLE:
5079 id = get_identifier ("__gcov_execle");
5080 break;
5081
5082 case BUILT_IN_EXECVP:
5083 id = get_identifier ("__gcov_execvp");
5084 break;
5085
5086 case BUILT_IN_EXECVE:
5087 id = get_identifier ("__gcov_execve");
5088 break;
5089
5090 default:
64db345d 5091 gcc_unreachable ();
73673831 5092 }
5093
e60a6f7b 5094 decl = build_decl (DECL_SOURCE_LOCATION (fn),
5095 FUNCTION_DECL, id, TREE_TYPE (fn));
73673831 5096 DECL_EXTERNAL (decl) = 1;
5097 TREE_PUBLIC (decl) = 1;
5098 DECL_ARTIFICIAL (decl) = 1;
5099 TREE_NOTHROW (decl) = 1;
e82d310b 5100 DECL_VISIBILITY (decl) = VISIBILITY_DEFAULT;
5101 DECL_VISIBILITY_SPECIFIED (decl) = 1;
389dd41b 5102 call = rewrite_call_expr (EXPR_LOCATION (exp), exp, 0, decl, 0);
73673831 5103 return expand_call (call, target, ignore);
c2f47e15 5104 }
48e1416a 5105
b6a5fc45 5106
5107\f
3e272de8 5108/* Reconstitute a mode for a __sync intrinsic operation. Since the type of
5109 the pointer in these functions is void*, the tree optimizers may remove
5110 casts. The mode computed in expand_builtin isn't reliable either, due
5111 to __sync_bool_compare_and_swap.
5112
5113 FCODE_DIFF should be fcode - base, where base is the FOO_1 code for the
5114 group of builtins. This gives us log2 of the mode size. */
5115
3754d046 5116static inline machine_mode
3e272de8 5117get_builtin_sync_mode (int fcode_diff)
5118{
ad3a13b5 5119 /* The size is not negotiable, so ask not to get BLKmode in return
5120 if the target indicates that a smaller size would be better. */
5121 return mode_for_size (BITS_PER_UNIT << fcode_diff, MODE_INT, 0);
3e272de8 5122}
5123
041e0215 5124/* Expand the memory expression LOC and return the appropriate memory operand
5125 for the builtin_sync operations. */
5126
5127static rtx
3754d046 5128get_builtin_sync_mem (tree loc, machine_mode mode)
041e0215 5129{
5130 rtx addr, mem;
5131
7f4d56ad 5132 addr = expand_expr (loc, NULL_RTX, ptr_mode, EXPAND_SUM);
5133 addr = convert_memory_address (Pmode, addr);
041e0215 5134
5135 /* Note that we explicitly do not want any alias information for this
5136 memory, so that we kill all other live memories. Otherwise we don't
5137 satisfy the full barrier semantics of the intrinsic. */
5138 mem = validize_mem (gen_rtx_MEM (mode, addr));
5139
153c3b50 5140 /* The alignment needs to be at least according to that of the mode. */
5141 set_mem_align (mem, MAX (GET_MODE_ALIGNMENT (mode),
957d0361 5142 get_pointer_alignment (loc)));
c94cfd1c 5143 set_mem_alias_set (mem, ALIAS_SET_MEMORY_BARRIER);
041e0215 5144 MEM_VOLATILE_P (mem) = 1;
5145
5146 return mem;
5147}
5148
1cd6e20d 5149/* Make sure an argument is in the right mode.
5150 EXP is the tree argument.
5151 MODE is the mode it should be in. */
5152
5153static rtx
3754d046 5154expand_expr_force_mode (tree exp, machine_mode mode)
1cd6e20d 5155{
5156 rtx val;
3754d046 5157 machine_mode old_mode;
1cd6e20d 5158
5159 val = expand_expr (exp, NULL_RTX, mode, EXPAND_NORMAL);
5160 /* If VAL is promoted to a wider mode, convert it back to MODE. Take care
5161 of CONST_INTs, where we know the old_mode only from the call argument. */
5162
5163 old_mode = GET_MODE (val);
5164 if (old_mode == VOIDmode)
5165 old_mode = TYPE_MODE (TREE_TYPE (exp));
5166 val = convert_modes (mode, old_mode, val, 1);
5167 return val;
5168}
5169
5170
b6a5fc45 5171/* Expand the __sync_xxx_and_fetch and __sync_fetch_and_xxx intrinsics.
c2f47e15 5172 EXP is the CALL_EXPR. CODE is the rtx code
b6a5fc45 5173 that corresponds to the arithmetic or logical operation from the name;
5174 an exception here is that NOT actually means NAND. TARGET is an optional
5175 place for us to store the results; AFTER is true if this is the
1cd6e20d 5176 fetch_and_xxx form. */
b6a5fc45 5177
5178static rtx
3754d046 5179expand_builtin_sync_operation (machine_mode mode, tree exp,
3e272de8 5180 enum rtx_code code, bool after,
1cd6e20d 5181 rtx target)
b6a5fc45 5182{
041e0215 5183 rtx val, mem;
e60a6f7b 5184 location_t loc = EXPR_LOCATION (exp);
b6a5fc45 5185
cf73e559 5186 if (code == NOT && warn_sync_nand)
5187 {
5188 tree fndecl = get_callee_fndecl (exp);
5189 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
5190
5191 static bool warned_f_a_n, warned_n_a_f;
5192
5193 switch (fcode)
5194 {
2797f13a 5195 case BUILT_IN_SYNC_FETCH_AND_NAND_1:
5196 case BUILT_IN_SYNC_FETCH_AND_NAND_2:
5197 case BUILT_IN_SYNC_FETCH_AND_NAND_4:
5198 case BUILT_IN_SYNC_FETCH_AND_NAND_8:
5199 case BUILT_IN_SYNC_FETCH_AND_NAND_16:
cf73e559 5200 if (warned_f_a_n)
5201 break;
5202
b9a16870 5203 fndecl = builtin_decl_implicit (BUILT_IN_SYNC_FETCH_AND_NAND_N);
e60a6f7b 5204 inform (loc, "%qD changed semantics in GCC 4.4", fndecl);
cf73e559 5205 warned_f_a_n = true;
5206 break;
5207
2797f13a 5208 case BUILT_IN_SYNC_NAND_AND_FETCH_1:
5209 case BUILT_IN_SYNC_NAND_AND_FETCH_2:
5210 case BUILT_IN_SYNC_NAND_AND_FETCH_4:
5211 case BUILT_IN_SYNC_NAND_AND_FETCH_8:
5212 case BUILT_IN_SYNC_NAND_AND_FETCH_16:
cf73e559 5213 if (warned_n_a_f)
5214 break;
5215
b9a16870 5216 fndecl = builtin_decl_implicit (BUILT_IN_SYNC_NAND_AND_FETCH_N);
e60a6f7b 5217 inform (loc, "%qD changed semantics in GCC 4.4", fndecl);
cf73e559 5218 warned_n_a_f = true;
5219 break;
5220
5221 default:
5222 gcc_unreachable ();
5223 }
5224 }
5225
b6a5fc45 5226 /* Expand the operands. */
c2f47e15 5227 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
1cd6e20d 5228 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
b6a5fc45 5229
a372f7ca 5230 return expand_atomic_fetch_op (target, mem, val, code, MEMMODEL_SYNC_SEQ_CST,
1cd6e20d 5231 after);
b6a5fc45 5232}
5233
5234/* Expand the __sync_val_compare_and_swap and __sync_bool_compare_and_swap
c2f47e15 5235 intrinsics. EXP is the CALL_EXPR. IS_BOOL is
b6a5fc45 5236 true if this is the boolean form. TARGET is a place for us to store the
5237 results; this is NOT optional if IS_BOOL is true. */
5238
5239static rtx
3754d046 5240expand_builtin_compare_and_swap (machine_mode mode, tree exp,
3e272de8 5241 bool is_bool, rtx target)
b6a5fc45 5242{
041e0215 5243 rtx old_val, new_val, mem;
ba885f6a 5244 rtx *pbool, *poval;
b6a5fc45 5245
5246 /* Expand the operands. */
c2f47e15 5247 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
1cd6e20d 5248 old_val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5249 new_val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 2), mode);
b6a5fc45 5250
ba885f6a 5251 pbool = poval = NULL;
5252 if (target != const0_rtx)
5253 {
5254 if (is_bool)
5255 pbool = &target;
5256 else
5257 poval = &target;
5258 }
5259 if (!expand_atomic_compare_and_swap (pbool, poval, mem, old_val, new_val,
a372f7ca 5260 false, MEMMODEL_SYNC_SEQ_CST,
5261 MEMMODEL_SYNC_SEQ_CST))
1cd6e20d 5262 return NULL_RTX;
c2f47e15 5263
1cd6e20d 5264 return target;
b6a5fc45 5265}
5266
5267/* Expand the __sync_lock_test_and_set intrinsic. Note that the most
5268 general form is actually an atomic exchange, and some targets only
5269 support a reduced form with the second argument being a constant 1.
48e1416a 5270 EXP is the CALL_EXPR; TARGET is an optional place for us to store
c2f47e15 5271 the results. */
b6a5fc45 5272
5273static rtx
3754d046 5274expand_builtin_sync_lock_test_and_set (machine_mode mode, tree exp,
1cd6e20d 5275 rtx target)
b6a5fc45 5276{
041e0215 5277 rtx val, mem;
b6a5fc45 5278
5279 /* Expand the operands. */
c2f47e15 5280 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
1cd6e20d 5281 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5282
7821cde1 5283 return expand_sync_lock_test_and_set (target, mem, val);
1cd6e20d 5284}
5285
5286/* Expand the __sync_lock_release intrinsic. EXP is the CALL_EXPR. */
5287
5288static void
3754d046 5289expand_builtin_sync_lock_release (machine_mode mode, tree exp)
1cd6e20d 5290{
5291 rtx mem;
5292
5293 /* Expand the operands. */
5294 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5295
a372f7ca 5296 expand_atomic_store (mem, const0_rtx, MEMMODEL_SYNC_RELEASE, true);
1cd6e20d 5297}
5298
5299/* Given an integer representing an ``enum memmodel'', verify its
5300 correctness and return the memory model enum. */
5301
5302static enum memmodel
5303get_memmodel (tree exp)
5304{
5305 rtx op;
7f738025 5306 unsigned HOST_WIDE_INT val;
1cd6e20d 5307
5308 /* If the parameter is not a constant, it's a run time value so we'll just
5309 convert it to MEMMODEL_SEQ_CST to avoid annoying runtime checking. */
5310 if (TREE_CODE (exp) != INTEGER_CST)
5311 return MEMMODEL_SEQ_CST;
5312
5313 op = expand_normal (exp);
7f738025 5314
5315 val = INTVAL (op);
5316 if (targetm.memmodel_check)
5317 val = targetm.memmodel_check (val);
5318 else if (val & ~MEMMODEL_MASK)
5319 {
5320 warning (OPT_Winvalid_memory_model,
5321 "Unknown architecture specifier in memory model to builtin.");
5322 return MEMMODEL_SEQ_CST;
5323 }
5324
a372f7ca 5325 /* Should never see a user explicit SYNC memodel model, so >= LAST works. */
5326 if (memmodel_base (val) >= MEMMODEL_LAST)
1cd6e20d 5327 {
5328 warning (OPT_Winvalid_memory_model,
5329 "invalid memory model argument to builtin");
5330 return MEMMODEL_SEQ_CST;
5331 }
7f738025 5332
3070f133 5333 /* Workaround for Bugzilla 59448. GCC doesn't track consume properly, so
5334 be conservative and promote consume to acquire. */
5335 if (val == MEMMODEL_CONSUME)
5336 val = MEMMODEL_ACQUIRE;
5337
7f738025 5338 return (enum memmodel) val;
1cd6e20d 5339}
5340
5341/* Expand the __atomic_exchange intrinsic:
5342 TYPE __atomic_exchange (TYPE *object, TYPE desired, enum memmodel)
5343 EXP is the CALL_EXPR.
5344 TARGET is an optional place for us to store the results. */
5345
5346static rtx
3754d046 5347expand_builtin_atomic_exchange (machine_mode mode, tree exp, rtx target)
1cd6e20d 5348{
5349 rtx val, mem;
5350 enum memmodel model;
5351
5352 model = get_memmodel (CALL_EXPR_ARG (exp, 2));
1cd6e20d 5353
5354 if (!flag_inline_atomics)
5355 return NULL_RTX;
5356
5357 /* Expand the operands. */
5358 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5359 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5360
7821cde1 5361 return expand_atomic_exchange (target, mem, val, model);
1cd6e20d 5362}
5363
5364/* Expand the __atomic_compare_exchange intrinsic:
5365 bool __atomic_compare_exchange (TYPE *object, TYPE *expect,
5366 TYPE desired, BOOL weak,
5367 enum memmodel success,
5368 enum memmodel failure)
5369 EXP is the CALL_EXPR.
5370 TARGET is an optional place for us to store the results. */
5371
5372static rtx
3754d046 5373expand_builtin_atomic_compare_exchange (machine_mode mode, tree exp,
1cd6e20d 5374 rtx target)
5375{
1e0c0b35 5376 rtx expect, desired, mem, oldval;
5377 rtx_code_label *label;
1cd6e20d 5378 enum memmodel success, failure;
5379 tree weak;
5380 bool is_weak;
5381
5382 success = get_memmodel (CALL_EXPR_ARG (exp, 4));
5383 failure = get_memmodel (CALL_EXPR_ARG (exp, 5));
5384
086f4e33 5385 if (failure > success)
5386 {
5387 warning (OPT_Winvalid_memory_model,
5388 "failure memory model cannot be stronger than success memory "
5389 "model for %<__atomic_compare_exchange%>");
5390 success = MEMMODEL_SEQ_CST;
5391 }
5392
a372f7ca 5393 if (is_mm_release (failure) || is_mm_acq_rel (failure))
1cd6e20d 5394 {
086f4e33 5395 warning (OPT_Winvalid_memory_model,
5396 "invalid failure memory model for "
5397 "%<__atomic_compare_exchange%>");
5398 failure = MEMMODEL_SEQ_CST;
5399 success = MEMMODEL_SEQ_CST;
1cd6e20d 5400 }
5401
086f4e33 5402
1cd6e20d 5403 if (!flag_inline_atomics)
5404 return NULL_RTX;
5405
5406 /* Expand the operands. */
5407 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5408
5409 expect = expand_normal (CALL_EXPR_ARG (exp, 1));
5410 expect = convert_memory_address (Pmode, expect);
c401b131 5411 expect = gen_rtx_MEM (mode, expect);
1cd6e20d 5412 desired = expand_expr_force_mode (CALL_EXPR_ARG (exp, 2), mode);
5413
5414 weak = CALL_EXPR_ARG (exp, 3);
5415 is_weak = false;
e913b5cd 5416 if (tree_fits_shwi_p (weak) && tree_to_shwi (weak) != 0)
1cd6e20d 5417 is_weak = true;
5418
d86e3752 5419 if (target == const0_rtx)
5420 target = NULL;
d86e3752 5421
3c29a9ea 5422 /* Lest the rtl backend create a race condition with an imporoper store
5423 to memory, always create a new pseudo for OLDVAL. */
5424 oldval = NULL;
5425
5426 if (!expand_atomic_compare_and_swap (&target, &oldval, mem, expect, desired,
ba885f6a 5427 is_weak, success, failure))
1cd6e20d 5428 return NULL_RTX;
5429
d86e3752 5430 /* Conditionally store back to EXPECT, lest we create a race condition
5431 with an improper store to memory. */
5432 /* ??? With a rearrangement of atomics at the gimple level, we can handle
5433 the normal case where EXPECT is totally private, i.e. a register. At
5434 which point the store can be unconditional. */
5435 label = gen_label_rtx ();
62589f76 5436 emit_cmp_and_jump_insns (target, const0_rtx, NE, NULL,
5437 GET_MODE (target), 1, label);
d86e3752 5438 emit_move_insn (expect, oldval);
5439 emit_label (label);
c401b131 5440
1cd6e20d 5441 return target;
5442}
5443
5444/* Expand the __atomic_load intrinsic:
5445 TYPE __atomic_load (TYPE *object, enum memmodel)
5446 EXP is the CALL_EXPR.
5447 TARGET is an optional place for us to store the results. */
5448
5449static rtx
3754d046 5450expand_builtin_atomic_load (machine_mode mode, tree exp, rtx target)
1cd6e20d 5451{
5452 rtx mem;
5453 enum memmodel model;
5454
5455 model = get_memmodel (CALL_EXPR_ARG (exp, 1));
a372f7ca 5456 if (is_mm_release (model) || is_mm_acq_rel (model))
1cd6e20d 5457 {
086f4e33 5458 warning (OPT_Winvalid_memory_model,
5459 "invalid memory model for %<__atomic_load%>");
5460 model = MEMMODEL_SEQ_CST;
1cd6e20d 5461 }
5462
5463 if (!flag_inline_atomics)
5464 return NULL_RTX;
5465
5466 /* Expand the operand. */
5467 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5468
5469 return expand_atomic_load (target, mem, model);
5470}
5471
5472
5473/* Expand the __atomic_store intrinsic:
5474 void __atomic_store (TYPE *object, TYPE desired, enum memmodel)
5475 EXP is the CALL_EXPR.
5476 TARGET is an optional place for us to store the results. */
5477
5478static rtx
3754d046 5479expand_builtin_atomic_store (machine_mode mode, tree exp)
1cd6e20d 5480{
5481 rtx mem, val;
5482 enum memmodel model;
5483
5484 model = get_memmodel (CALL_EXPR_ARG (exp, 2));
a372f7ca 5485 if (!(is_mm_relaxed (model) || is_mm_seq_cst (model)
5486 || is_mm_release (model)))
1cd6e20d 5487 {
086f4e33 5488 warning (OPT_Winvalid_memory_model,
5489 "invalid memory model for %<__atomic_store%>");
5490 model = MEMMODEL_SEQ_CST;
1cd6e20d 5491 }
5492
5493 if (!flag_inline_atomics)
5494 return NULL_RTX;
5495
5496 /* Expand the operands. */
5497 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5498 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5499
8808bf16 5500 return expand_atomic_store (mem, val, model, false);
1cd6e20d 5501}
5502
5503/* Expand the __atomic_fetch_XXX intrinsic:
5504 TYPE __atomic_fetch_XXX (TYPE *object, TYPE val, enum memmodel)
5505 EXP is the CALL_EXPR.
5506 TARGET is an optional place for us to store the results.
5507 CODE is the operation, PLUS, MINUS, ADD, XOR, or IOR.
5508 FETCH_AFTER is true if returning the result of the operation.
5509 FETCH_AFTER is false if returning the value before the operation.
5510 IGNORE is true if the result is not used.
5511 EXT_CALL is the correct builtin for an external call if this cannot be
5512 resolved to an instruction sequence. */
5513
5514static rtx
3754d046 5515expand_builtin_atomic_fetch_op (machine_mode mode, tree exp, rtx target,
1cd6e20d 5516 enum rtx_code code, bool fetch_after,
5517 bool ignore, enum built_in_function ext_call)
5518{
5519 rtx val, mem, ret;
5520 enum memmodel model;
5521 tree fndecl;
5522 tree addr;
5523
5524 model = get_memmodel (CALL_EXPR_ARG (exp, 2));
5525
5526 /* Expand the operands. */
5527 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5528 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5529
5530 /* Only try generating instructions if inlining is turned on. */
5531 if (flag_inline_atomics)
5532 {
5533 ret = expand_atomic_fetch_op (target, mem, val, code, model, fetch_after);
5534 if (ret)
5535 return ret;
5536 }
5537
5538 /* Return if a different routine isn't needed for the library call. */
5539 if (ext_call == BUILT_IN_NONE)
5540 return NULL_RTX;
5541
5542 /* Change the call to the specified function. */
5543 fndecl = get_callee_fndecl (exp);
5544 addr = CALL_EXPR_FN (exp);
5545 STRIP_NOPS (addr);
5546
5547 gcc_assert (TREE_OPERAND (addr, 0) == fndecl);
9af5ce0c 5548 TREE_OPERAND (addr, 0) = builtin_decl_explicit (ext_call);
1cd6e20d 5549
5550 /* Expand the call here so we can emit trailing code. */
5551 ret = expand_call (exp, target, ignore);
5552
5553 /* Replace the original function just in case it matters. */
5554 TREE_OPERAND (addr, 0) = fndecl;
5555
5556 /* Then issue the arithmetic correction to return the right result. */
5557 if (!ignore)
c449f851 5558 {
5559 if (code == NOT)
5560 {
5561 ret = expand_simple_binop (mode, AND, ret, val, NULL_RTX, true,
5562 OPTAB_LIB_WIDEN);
5563 ret = expand_simple_unop (mode, NOT, ret, target, true);
5564 }
5565 else
5566 ret = expand_simple_binop (mode, code, ret, val, target, true,
5567 OPTAB_LIB_WIDEN);
5568 }
1cd6e20d 5569 return ret;
5570}
5571
10b744a3 5572/* Expand an atomic clear operation.
5573 void _atomic_clear (BOOL *obj, enum memmodel)
5574 EXP is the call expression. */
5575
5576static rtx
5577expand_builtin_atomic_clear (tree exp)
5578{
3754d046 5579 machine_mode mode;
10b744a3 5580 rtx mem, ret;
5581 enum memmodel model;
5582
5583 mode = mode_for_size (BOOL_TYPE_SIZE, MODE_INT, 0);
5584 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5585 model = get_memmodel (CALL_EXPR_ARG (exp, 1));
5586
a372f7ca 5587 if (is_mm_consume (model) || is_mm_acquire (model) || is_mm_acq_rel (model))
10b744a3 5588 {
086f4e33 5589 warning (OPT_Winvalid_memory_model,
5590 "invalid memory model for %<__atomic_store%>");
5591 model = MEMMODEL_SEQ_CST;
10b744a3 5592 }
5593
5594 /* Try issuing an __atomic_store, and allow fallback to __sync_lock_release.
5595 Failing that, a store is issued by __atomic_store. The only way this can
5596 fail is if the bool type is larger than a word size. Unlikely, but
5597 handle it anyway for completeness. Assume a single threaded model since
5598 there is no atomic support in this case, and no barriers are required. */
5599 ret = expand_atomic_store (mem, const0_rtx, model, true);
5600 if (!ret)
5601 emit_move_insn (mem, const0_rtx);
5602 return const0_rtx;
5603}
5604
5605/* Expand an atomic test_and_set operation.
5606 bool _atomic_test_and_set (BOOL *obj, enum memmodel)
5607 EXP is the call expression. */
5608
5609static rtx
7821cde1 5610expand_builtin_atomic_test_and_set (tree exp, rtx target)
10b744a3 5611{
7821cde1 5612 rtx mem;
10b744a3 5613 enum memmodel model;
3754d046 5614 machine_mode mode;
10b744a3 5615
5616 mode = mode_for_size (BOOL_TYPE_SIZE, MODE_INT, 0);
5617 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5618 model = get_memmodel (CALL_EXPR_ARG (exp, 1));
5619
7821cde1 5620 return expand_atomic_test_and_set (target, mem, model);
10b744a3 5621}
5622
5623
1cd6e20d 5624/* Return true if (optional) argument ARG1 of size ARG0 is always lock free on
5625 this architecture. If ARG1 is NULL, use typical alignment for size ARG0. */
5626
5627static tree
5628fold_builtin_atomic_always_lock_free (tree arg0, tree arg1)
5629{
5630 int size;
3754d046 5631 machine_mode mode;
1cd6e20d 5632 unsigned int mode_align, type_align;
5633
5634 if (TREE_CODE (arg0) != INTEGER_CST)
5635 return NULL_TREE;
b6a5fc45 5636
1cd6e20d 5637 size = INTVAL (expand_normal (arg0)) * BITS_PER_UNIT;
5638 mode = mode_for_size (size, MODE_INT, 0);
5639 mode_align = GET_MODE_ALIGNMENT (mode);
5640
5641 if (TREE_CODE (arg1) == INTEGER_CST && INTVAL (expand_normal (arg1)) == 0)
5642 type_align = mode_align;
5643 else
5644 {
5645 tree ttype = TREE_TYPE (arg1);
5646
5647 /* This function is usually invoked and folded immediately by the front
5648 end before anything else has a chance to look at it. The pointer
5649 parameter at this point is usually cast to a void *, so check for that
5650 and look past the cast. */
d09ef31a 5651 if (CONVERT_EXPR_P (arg1) && POINTER_TYPE_P (ttype)
1cd6e20d 5652 && VOID_TYPE_P (TREE_TYPE (ttype)))
5653 arg1 = TREE_OPERAND (arg1, 0);
5654
5655 ttype = TREE_TYPE (arg1);
5656 gcc_assert (POINTER_TYPE_P (ttype));
5657
5658 /* Get the underlying type of the object. */
5659 ttype = TREE_TYPE (ttype);
5660 type_align = TYPE_ALIGN (ttype);
5661 }
5662
47ae02b7 5663 /* If the object has smaller alignment, the lock free routines cannot
1cd6e20d 5664 be used. */
5665 if (type_align < mode_align)
06308d2a 5666 return boolean_false_node;
1cd6e20d 5667
5668 /* Check if a compare_and_swap pattern exists for the mode which represents
5669 the required size. The pattern is not allowed to fail, so the existence
5670 of the pattern indicates support is present. */
29139cdc 5671 if (can_compare_and_swap_p (mode, true))
06308d2a 5672 return boolean_true_node;
1cd6e20d 5673 else
06308d2a 5674 return boolean_false_node;
1cd6e20d 5675}
5676
5677/* Return true if the parameters to call EXP represent an object which will
5678 always generate lock free instructions. The first argument represents the
5679 size of the object, and the second parameter is a pointer to the object
5680 itself. If NULL is passed for the object, then the result is based on
5681 typical alignment for an object of the specified size. Otherwise return
5682 false. */
5683
5684static rtx
5685expand_builtin_atomic_always_lock_free (tree exp)
5686{
5687 tree size;
5688 tree arg0 = CALL_EXPR_ARG (exp, 0);
5689 tree arg1 = CALL_EXPR_ARG (exp, 1);
5690
5691 if (TREE_CODE (arg0) != INTEGER_CST)
5692 {
5693 error ("non-constant argument 1 to __atomic_always_lock_free");
5694 return const0_rtx;
5695 }
5696
5697 size = fold_builtin_atomic_always_lock_free (arg0, arg1);
06308d2a 5698 if (size == boolean_true_node)
1cd6e20d 5699 return const1_rtx;
5700 return const0_rtx;
5701}
5702
5703/* Return a one or zero if it can be determined that object ARG1 of size ARG
5704 is lock free on this architecture. */
5705
5706static tree
5707fold_builtin_atomic_is_lock_free (tree arg0, tree arg1)
5708{
5709 if (!flag_inline_atomics)
5710 return NULL_TREE;
5711
5712 /* If it isn't always lock free, don't generate a result. */
06308d2a 5713 if (fold_builtin_atomic_always_lock_free (arg0, arg1) == boolean_true_node)
5714 return boolean_true_node;
1cd6e20d 5715
5716 return NULL_TREE;
5717}
5718
5719/* Return true if the parameters to call EXP represent an object which will
5720 always generate lock free instructions. The first argument represents the
5721 size of the object, and the second parameter is a pointer to the object
5722 itself. If NULL is passed for the object, then the result is based on
5723 typical alignment for an object of the specified size. Otherwise return
5724 NULL*/
5725
5726static rtx
5727expand_builtin_atomic_is_lock_free (tree exp)
5728{
5729 tree size;
5730 tree arg0 = CALL_EXPR_ARG (exp, 0);
5731 tree arg1 = CALL_EXPR_ARG (exp, 1);
5732
5733 if (!INTEGRAL_TYPE_P (TREE_TYPE (arg0)))
5734 {
5735 error ("non-integer argument 1 to __atomic_is_lock_free");
5736 return NULL_RTX;
5737 }
5738
5739 if (!flag_inline_atomics)
5740 return NULL_RTX;
5741
5742 /* If the value is known at compile time, return the RTX for it. */
5743 size = fold_builtin_atomic_is_lock_free (arg0, arg1);
06308d2a 5744 if (size == boolean_true_node)
1cd6e20d 5745 return const1_rtx;
5746
5747 return NULL_RTX;
5748}
5749
1cd6e20d 5750/* Expand the __atomic_thread_fence intrinsic:
5751 void __atomic_thread_fence (enum memmodel)
5752 EXP is the CALL_EXPR. */
5753
5754static void
5755expand_builtin_atomic_thread_fence (tree exp)
5756{
fe54c06b 5757 enum memmodel model = get_memmodel (CALL_EXPR_ARG (exp, 0));
5758 expand_mem_thread_fence (model);
1cd6e20d 5759}
5760
5761/* Expand the __atomic_signal_fence intrinsic:
5762 void __atomic_signal_fence (enum memmodel)
5763 EXP is the CALL_EXPR. */
5764
5765static void
5766expand_builtin_atomic_signal_fence (tree exp)
5767{
fe54c06b 5768 enum memmodel model = get_memmodel (CALL_EXPR_ARG (exp, 0));
5769 expand_mem_signal_fence (model);
b6a5fc45 5770}
5771
5772/* Expand the __sync_synchronize intrinsic. */
5773
5774static void
2797f13a 5775expand_builtin_sync_synchronize (void)
b6a5fc45 5776{
a372f7ca 5777 expand_mem_thread_fence (MEMMODEL_SYNC_SEQ_CST);
b6a5fc45 5778}
5779
badaa04c 5780static rtx
5781expand_builtin_thread_pointer (tree exp, rtx target)
5782{
5783 enum insn_code icode;
5784 if (!validate_arglist (exp, VOID_TYPE))
5785 return const0_rtx;
5786 icode = direct_optab_handler (get_thread_pointer_optab, Pmode);
5787 if (icode != CODE_FOR_nothing)
5788 {
5789 struct expand_operand op;
3ed779c3 5790 /* If the target is not sutitable then create a new target. */
5791 if (target == NULL_RTX
5792 || !REG_P (target)
5793 || GET_MODE (target) != Pmode)
badaa04c 5794 target = gen_reg_rtx (Pmode);
5795 create_output_operand (&op, target, Pmode);
5796 expand_insn (icode, 1, &op);
5797 return target;
5798 }
5799 error ("__builtin_thread_pointer is not supported on this target");
5800 return const0_rtx;
5801}
5802
5803static void
5804expand_builtin_set_thread_pointer (tree exp)
5805{
5806 enum insn_code icode;
5807 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
5808 return;
5809 icode = direct_optab_handler (set_thread_pointer_optab, Pmode);
5810 if (icode != CODE_FOR_nothing)
5811 {
5812 struct expand_operand op;
5813 rtx val = expand_expr (CALL_EXPR_ARG (exp, 0), NULL_RTX,
5814 Pmode, EXPAND_NORMAL);
6f343c10 5815 create_input_operand (&op, val, Pmode);
badaa04c 5816 expand_insn (icode, 1, &op);
5817 return;
5818 }
5819 error ("__builtin_set_thread_pointer is not supported on this target");
5820}
5821
53800dbe 5822\f
0e80b01d 5823/* Emit code to restore the current value of stack. */
5824
5825static void
5826expand_stack_restore (tree var)
5827{
1e0c0b35 5828 rtx_insn *prev;
5829 rtx sa = expand_normal (var);
0e80b01d 5830
5831 sa = convert_memory_address (Pmode, sa);
5832
5833 prev = get_last_insn ();
5834 emit_stack_restore (SAVE_BLOCK, sa);
97354ae4 5835
5836 record_new_stack_level ();
5837
0e80b01d 5838 fixup_args_size_notes (prev, get_last_insn (), 0);
5839}
5840
0e80b01d 5841/* Emit code to save the current value of stack. */
5842
5843static rtx
5844expand_stack_save (void)
5845{
5846 rtx ret = NULL_RTX;
5847
0e80b01d 5848 emit_stack_save (SAVE_BLOCK, &ret);
5849 return ret;
5850}
5851
ca4c3545 5852
5853/* Expand OpenACC acc_on_device.
5854
5855 This has to happen late (that is, not in early folding; expand_builtin_*,
5856 rather than fold_builtin_*), as we have to act differently for host and
5857 acceleration device (ACCEL_COMPILER conditional). */
5858
5859static rtx
f212338e 5860expand_builtin_acc_on_device (tree exp, rtx target)
ca4c3545 5861{
5862 if (!validate_arglist (exp, INTEGER_TYPE, VOID_TYPE))
5863 return NULL_RTX;
5864
5865 tree arg = CALL_EXPR_ARG (exp, 0);
5866
5867 /* Return (arg == v1 || arg == v2) ? 1 : 0. */
5868 machine_mode v_mode = TYPE_MODE (TREE_TYPE (arg));
5869 rtx v = expand_normal (arg), v1, v2;
f212338e 5870#ifdef ACCEL_COMPILER
ca4c3545 5871 v1 = GEN_INT (GOMP_DEVICE_NOT_HOST);
5872 v2 = GEN_INT (ACCEL_COMPILER_acc_device);
f212338e 5873#else
5874 v1 = GEN_INT (GOMP_DEVICE_NONE);
5875 v2 = GEN_INT (GOMP_DEVICE_HOST);
5876#endif
ca4c3545 5877 machine_mode target_mode = TYPE_MODE (integer_type_node);
15b4214c 5878 if (!target || !register_operand (target, target_mode))
ca4c3545 5879 target = gen_reg_rtx (target_mode);
5880 emit_move_insn (target, const1_rtx);
5881 rtx_code_label *done_label = gen_label_rtx ();
5882 do_compare_rtx_and_jump (v, v1, EQ, false, v_mode, NULL_RTX,
f9a00e9e 5883 NULL, done_label, PROB_EVEN);
ca4c3545 5884 do_compare_rtx_and_jump (v, v2, EQ, false, v_mode, NULL_RTX,
f9a00e9e 5885 NULL, done_label, PROB_EVEN);
ca4c3545 5886 emit_move_insn (target, const0_rtx);
5887 emit_label (done_label);
5888
5889 return target;
5890}
5891
5892
53800dbe 5893/* Expand an expression EXP that calls a built-in function,
5894 with result going to TARGET if that's convenient
5895 (and in mode MODE if that's convenient).
5896 SUBTARGET may be used as the target for computing one of EXP's operands.
5897 IGNORE is nonzero if the value is to be ignored. */
5898
5899rtx
3754d046 5900expand_builtin (tree exp, rtx target, rtx subtarget, machine_mode mode,
aecda0d6 5901 int ignore)
53800dbe 5902{
c6e6ecb1 5903 tree fndecl = get_callee_fndecl (exp);
53800dbe 5904 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
3754d046 5905 machine_mode target_mode = TYPE_MODE (TREE_TYPE (exp));
67fa4078 5906 int flags;
53800dbe 5907
4e2f4ed5 5908 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
5909 return targetm.expand_builtin (exp, target, subtarget, mode, ignore);
5910
f9acf11a 5911 /* When ASan is enabled, we don't want to expand some memory/string
5912 builtins and rely on libsanitizer's hooks. This allows us to avoid
5913 redundant checks and be sure, that possible overflow will be detected
5914 by ASan. */
5915
5916 if ((flag_sanitize & SANITIZE_ADDRESS) && asan_intercepted_p (fcode))
5917 return expand_call (exp, target, ignore);
5918
53800dbe 5919 /* When not optimizing, generate calls to library functions for a certain
5920 set of builtins. */
cd9ff771 5921 if (!optimize
b6a5fc45 5922 && !called_as_built_in (fndecl)
73037a1e 5923 && fcode != BUILT_IN_FORK
5924 && fcode != BUILT_IN_EXECL
5925 && fcode != BUILT_IN_EXECV
5926 && fcode != BUILT_IN_EXECLP
5927 && fcode != BUILT_IN_EXECLE
5928 && fcode != BUILT_IN_EXECVP
5929 && fcode != BUILT_IN_EXECVE
2c281b15 5930 && fcode != BUILT_IN_ALLOCA
581bf1c2 5931 && fcode != BUILT_IN_ALLOCA_WITH_ALIGN
058a1b7a 5932 && fcode != BUILT_IN_FREE
5933 && fcode != BUILT_IN_CHKP_SET_PTR_BOUNDS
5934 && fcode != BUILT_IN_CHKP_INIT_PTR_BOUNDS
5935 && fcode != BUILT_IN_CHKP_NULL_PTR_BOUNDS
5936 && fcode != BUILT_IN_CHKP_COPY_PTR_BOUNDS
5937 && fcode != BUILT_IN_CHKP_NARROW_PTR_BOUNDS
5938 && fcode != BUILT_IN_CHKP_STORE_PTR_BOUNDS
5939 && fcode != BUILT_IN_CHKP_CHECK_PTR_LBOUNDS
5940 && fcode != BUILT_IN_CHKP_CHECK_PTR_UBOUNDS
5941 && fcode != BUILT_IN_CHKP_CHECK_PTR_BOUNDS
5942 && fcode != BUILT_IN_CHKP_GET_PTR_LBOUND
5943 && fcode != BUILT_IN_CHKP_GET_PTR_UBOUND
5944 && fcode != BUILT_IN_CHKP_BNDRET)
cd9ff771 5945 return expand_call (exp, target, ignore);
53800dbe 5946
8d6d7930 5947 /* The built-in function expanders test for target == const0_rtx
5948 to determine whether the function's result will be ignored. */
5949 if (ignore)
5950 target = const0_rtx;
5951
5952 /* If the result of a pure or const built-in function is ignored, and
5953 none of its arguments are volatile, we can avoid expanding the
5954 built-in call and just evaluate the arguments for side-effects. */
5955 if (target == const0_rtx
67fa4078 5956 && ((flags = flags_from_decl_or_type (fndecl)) & (ECF_CONST | ECF_PURE))
5957 && !(flags & ECF_LOOPING_CONST_OR_PURE))
8d6d7930 5958 {
5959 bool volatilep = false;
5960 tree arg;
c2f47e15 5961 call_expr_arg_iterator iter;
8d6d7930 5962
c2f47e15 5963 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
5964 if (TREE_THIS_VOLATILE (arg))
8d6d7930 5965 {
5966 volatilep = true;
5967 break;
5968 }
5969
5970 if (! volatilep)
5971 {
c2f47e15 5972 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
5973 expand_expr (arg, const0_rtx, VOIDmode, EXPAND_NORMAL);
8d6d7930 5974 return const0_rtx;
5975 }
5976 }
5977
f21337ef 5978 /* expand_builtin_with_bounds is supposed to be used for
5979 instrumented builtin calls. */
058a1b7a 5980 gcc_assert (!CALL_WITH_BOUNDS_P (exp));
5981
53800dbe 5982 switch (fcode)
5983 {
4f35b1fc 5984 CASE_FLT_FN (BUILT_IN_FABS):
8aa32773 5985 case BUILT_IN_FABSD32:
5986 case BUILT_IN_FABSD64:
5987 case BUILT_IN_FABSD128:
c2f47e15 5988 target = expand_builtin_fabs (exp, target, subtarget);
78a74442 5989 if (target)
a0c938f0 5990 return target;
78a74442 5991 break;
5992
4f35b1fc 5993 CASE_FLT_FN (BUILT_IN_COPYSIGN):
c2f47e15 5994 target = expand_builtin_copysign (exp, target, subtarget);
270436f3 5995 if (target)
5996 return target;
5997 break;
5998
7d3f6cc7 5999 /* Just do a normal library call if we were unable to fold
6000 the values. */
4f35b1fc 6001 CASE_FLT_FN (BUILT_IN_CABS):
78a74442 6002 break;
53800dbe 6003
4f35b1fc 6004 CASE_FLT_FN (BUILT_IN_EXP):
6005 CASE_FLT_FN (BUILT_IN_EXP10):
6006 CASE_FLT_FN (BUILT_IN_POW10):
6007 CASE_FLT_FN (BUILT_IN_EXP2):
6008 CASE_FLT_FN (BUILT_IN_EXPM1):
6009 CASE_FLT_FN (BUILT_IN_LOGB):
4f35b1fc 6010 CASE_FLT_FN (BUILT_IN_LOG):
6011 CASE_FLT_FN (BUILT_IN_LOG10):
6012 CASE_FLT_FN (BUILT_IN_LOG2):
6013 CASE_FLT_FN (BUILT_IN_LOG1P):
6014 CASE_FLT_FN (BUILT_IN_TAN):
6015 CASE_FLT_FN (BUILT_IN_ASIN):
6016 CASE_FLT_FN (BUILT_IN_ACOS):
6017 CASE_FLT_FN (BUILT_IN_ATAN):
b3154a1f 6018 CASE_FLT_FN (BUILT_IN_SIGNIFICAND):
7f3be425 6019 /* Treat these like sqrt only if unsafe math optimizations are allowed,
6020 because of possible accuracy problems. */
6021 if (! flag_unsafe_math_optimizations)
53800dbe 6022 break;
4f35b1fc 6023 CASE_FLT_FN (BUILT_IN_SQRT):
6024 CASE_FLT_FN (BUILT_IN_FLOOR):
6025 CASE_FLT_FN (BUILT_IN_CEIL):
6026 CASE_FLT_FN (BUILT_IN_TRUNC):
6027 CASE_FLT_FN (BUILT_IN_ROUND):
6028 CASE_FLT_FN (BUILT_IN_NEARBYINT):
6029 CASE_FLT_FN (BUILT_IN_RINT):
53800dbe 6030 target = expand_builtin_mathfn (exp, target, subtarget);
6031 if (target)
6032 return target;
6033 break;
6034
7e0713b1 6035 CASE_FLT_FN (BUILT_IN_FMA):
6036 target = expand_builtin_mathfn_ternary (exp, target, subtarget);
6037 if (target)
6038 return target;
6039 break;
6040
a67a90e5 6041 CASE_FLT_FN (BUILT_IN_ILOGB):
6042 if (! flag_unsafe_math_optimizations)
6043 break;
69b779ea 6044 CASE_FLT_FN (BUILT_IN_ISINF):
cde061c1 6045 CASE_FLT_FN (BUILT_IN_FINITE):
6046 case BUILT_IN_ISFINITE:
8a1a9cb7 6047 case BUILT_IN_ISNORMAL:
f97eea22 6048 target = expand_builtin_interclass_mathfn (exp, target);
a67a90e5 6049 if (target)
6050 return target;
6051 break;
6052
80ff6494 6053 CASE_FLT_FN (BUILT_IN_ICEIL):
4f35b1fc 6054 CASE_FLT_FN (BUILT_IN_LCEIL):
6055 CASE_FLT_FN (BUILT_IN_LLCEIL):
6056 CASE_FLT_FN (BUILT_IN_LFLOOR):
80ff6494 6057 CASE_FLT_FN (BUILT_IN_IFLOOR):
4f35b1fc 6058 CASE_FLT_FN (BUILT_IN_LLFLOOR):
ff1b14e4 6059 target = expand_builtin_int_roundingfn (exp, target);
ad52b9b7 6060 if (target)
6061 return target;
6062 break;
6063
80ff6494 6064 CASE_FLT_FN (BUILT_IN_IRINT):
7d3afc77 6065 CASE_FLT_FN (BUILT_IN_LRINT):
6066 CASE_FLT_FN (BUILT_IN_LLRINT):
80ff6494 6067 CASE_FLT_FN (BUILT_IN_IROUND):
ef2f1a10 6068 CASE_FLT_FN (BUILT_IN_LROUND):
6069 CASE_FLT_FN (BUILT_IN_LLROUND):
ff1b14e4 6070 target = expand_builtin_int_roundingfn_2 (exp, target);
7d3afc77 6071 if (target)
6072 return target;
6073 break;
6074
4f35b1fc 6075 CASE_FLT_FN (BUILT_IN_POWI):
f97eea22 6076 target = expand_builtin_powi (exp, target);
757c219d 6077 if (target)
6078 return target;
6079 break;
6080
4f35b1fc 6081 CASE_FLT_FN (BUILT_IN_ATAN2):
6082 CASE_FLT_FN (BUILT_IN_LDEXP):
73a954a1 6083 CASE_FLT_FN (BUILT_IN_SCALB):
6084 CASE_FLT_FN (BUILT_IN_SCALBN):
6085 CASE_FLT_FN (BUILT_IN_SCALBLN):
0fd605a5 6086 if (! flag_unsafe_math_optimizations)
6087 break;
ef722005 6088
6089 CASE_FLT_FN (BUILT_IN_FMOD):
6090 CASE_FLT_FN (BUILT_IN_REMAINDER):
6091 CASE_FLT_FN (BUILT_IN_DREM):
0810ff17 6092 CASE_FLT_FN (BUILT_IN_POW):
0fd605a5 6093 target = expand_builtin_mathfn_2 (exp, target, subtarget);
6094 if (target)
6095 return target;
6096 break;
6097
d735c391 6098 CASE_FLT_FN (BUILT_IN_CEXPI):
f97eea22 6099 target = expand_builtin_cexpi (exp, target);
d735c391 6100 gcc_assert (target);
6101 return target;
6102
4f35b1fc 6103 CASE_FLT_FN (BUILT_IN_SIN):
6104 CASE_FLT_FN (BUILT_IN_COS):
6b43bae4 6105 if (! flag_unsafe_math_optimizations)
6106 break;
6107 target = expand_builtin_mathfn_3 (exp, target, subtarget);
6108 if (target)
6109 return target;
6110 break;
6111
c3147c1a 6112 CASE_FLT_FN (BUILT_IN_SINCOS):
6113 if (! flag_unsafe_math_optimizations)
6114 break;
6115 target = expand_builtin_sincos (exp);
6116 if (target)
6117 return target;
6118 break;
6119
53800dbe 6120 case BUILT_IN_APPLY_ARGS:
6121 return expand_builtin_apply_args ();
6122
6123 /* __builtin_apply (FUNCTION, ARGUMENTS, ARGSIZE) invokes
6124 FUNCTION with a copy of the parameters described by
6125 ARGUMENTS, and ARGSIZE. It returns a block of memory
6126 allocated on the stack into which is stored all the registers
6127 that might possibly be used for returning the result of a
6128 function. ARGUMENTS is the value returned by
6129 __builtin_apply_args. ARGSIZE is the number of bytes of
6130 arguments that must be copied. ??? How should this value be
6131 computed? We'll also need a safe worst case value for varargs
6132 functions. */
6133 case BUILT_IN_APPLY:
c2f47e15 6134 if (!validate_arglist (exp, POINTER_TYPE,
0eb671f7 6135 POINTER_TYPE, INTEGER_TYPE, VOID_TYPE)
c2f47e15 6136 && !validate_arglist (exp, REFERENCE_TYPE,
0eb671f7 6137 POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
53800dbe 6138 return const0_rtx;
6139 else
6140 {
53800dbe 6141 rtx ops[3];
6142
c2f47e15 6143 ops[0] = expand_normal (CALL_EXPR_ARG (exp, 0));
6144 ops[1] = expand_normal (CALL_EXPR_ARG (exp, 1));
6145 ops[2] = expand_normal (CALL_EXPR_ARG (exp, 2));
53800dbe 6146
6147 return expand_builtin_apply (ops[0], ops[1], ops[2]);
6148 }
6149
6150 /* __builtin_return (RESULT) causes the function to return the
6151 value described by RESULT. RESULT is address of the block of
6152 memory returned by __builtin_apply. */
6153 case BUILT_IN_RETURN:
c2f47e15 6154 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6155 expand_builtin_return (expand_normal (CALL_EXPR_ARG (exp, 0)));
53800dbe 6156 return const0_rtx;
6157
6158 case BUILT_IN_SAVEREGS:
a66c9326 6159 return expand_builtin_saveregs ();
53800dbe 6160
48dc2227 6161 case BUILT_IN_VA_ARG_PACK:
6162 /* All valid uses of __builtin_va_arg_pack () are removed during
6163 inlining. */
b8c23db3 6164 error ("%Kinvalid use of %<__builtin_va_arg_pack ()%>", exp);
48dc2227 6165 return const0_rtx;
6166
4e1d7ea4 6167 case BUILT_IN_VA_ARG_PACK_LEN:
6168 /* All valid uses of __builtin_va_arg_pack_len () are removed during
6169 inlining. */
b8c23db3 6170 error ("%Kinvalid use of %<__builtin_va_arg_pack_len ()%>", exp);
4e1d7ea4 6171 return const0_rtx;
6172
53800dbe 6173 /* Return the address of the first anonymous stack arg. */
6174 case BUILT_IN_NEXT_ARG:
c2f47e15 6175 if (fold_builtin_next_arg (exp, false))
a0c938f0 6176 return const0_rtx;
79012a9d 6177 return expand_builtin_next_arg ();
53800dbe 6178
ac8fb6db 6179 case BUILT_IN_CLEAR_CACHE:
6180 target = expand_builtin___clear_cache (exp);
6181 if (target)
6182 return target;
6183 break;
6184
53800dbe 6185 case BUILT_IN_CLASSIFY_TYPE:
c2f47e15 6186 return expand_builtin_classify_type (exp);
53800dbe 6187
6188 case BUILT_IN_CONSTANT_P:
4ee9c684 6189 return const0_rtx;
53800dbe 6190
6191 case BUILT_IN_FRAME_ADDRESS:
6192 case BUILT_IN_RETURN_ADDRESS:
c2f47e15 6193 return expand_builtin_frame_address (fndecl, exp);
53800dbe 6194
6195 /* Returns the address of the area where the structure is returned.
6196 0 otherwise. */
6197 case BUILT_IN_AGGREGATE_INCOMING_ADDRESS:
c2f47e15 6198 if (call_expr_nargs (exp) != 0
9342ee68 6199 || ! AGGREGATE_TYPE_P (TREE_TYPE (TREE_TYPE (current_function_decl)))
e16ceb8e 6200 || !MEM_P (DECL_RTL (DECL_RESULT (current_function_decl))))
9342ee68 6201 return const0_rtx;
53800dbe 6202 else
9342ee68 6203 return XEXP (DECL_RTL (DECL_RESULT (current_function_decl)), 0);
53800dbe 6204
6205 case BUILT_IN_ALLOCA:
581bf1c2 6206 case BUILT_IN_ALLOCA_WITH_ALIGN:
990495a7 6207 /* If the allocation stems from the declaration of a variable-sized
6208 object, it cannot accumulate. */
a882d754 6209 target = expand_builtin_alloca (exp, CALL_ALLOCA_FOR_VAR_P (exp));
53800dbe 6210 if (target)
6211 return target;
6212 break;
6213
4ee9c684 6214 case BUILT_IN_STACK_SAVE:
6215 return expand_stack_save ();
6216
6217 case BUILT_IN_STACK_RESTORE:
c2f47e15 6218 expand_stack_restore (CALL_EXPR_ARG (exp, 0));
4ee9c684 6219 return const0_rtx;
6220
74bdbe96 6221 case BUILT_IN_BSWAP16:
42791117 6222 case BUILT_IN_BSWAP32:
6223 case BUILT_IN_BSWAP64:
74bdbe96 6224 target = expand_builtin_bswap (target_mode, exp, target, subtarget);
42791117 6225 if (target)
6226 return target;
6227 break;
6228
4f35b1fc 6229 CASE_INT_FN (BUILT_IN_FFS):
c2f47e15 6230 target = expand_builtin_unop (target_mode, exp, target,
efb070c8 6231 subtarget, ffs_optab);
6a08d0ab 6232 if (target)
6233 return target;
6234 break;
6235
4f35b1fc 6236 CASE_INT_FN (BUILT_IN_CLZ):
c2f47e15 6237 target = expand_builtin_unop (target_mode, exp, target,
efb070c8 6238 subtarget, clz_optab);
6a08d0ab 6239 if (target)
6240 return target;
6241 break;
6242
4f35b1fc 6243 CASE_INT_FN (BUILT_IN_CTZ):
c2f47e15 6244 target = expand_builtin_unop (target_mode, exp, target,
efb070c8 6245 subtarget, ctz_optab);
6a08d0ab 6246 if (target)
6247 return target;
6248 break;
6249
d8492bd3 6250 CASE_INT_FN (BUILT_IN_CLRSB):
d8492bd3 6251 target = expand_builtin_unop (target_mode, exp, target,
6252 subtarget, clrsb_optab);
6253 if (target)
6254 return target;
6255 break;
6256
4f35b1fc 6257 CASE_INT_FN (BUILT_IN_POPCOUNT):
c2f47e15 6258 target = expand_builtin_unop (target_mode, exp, target,
efb070c8 6259 subtarget, popcount_optab);
6a08d0ab 6260 if (target)
6261 return target;
6262 break;
6263
4f35b1fc 6264 CASE_INT_FN (BUILT_IN_PARITY):
c2f47e15 6265 target = expand_builtin_unop (target_mode, exp, target,
efb070c8 6266 subtarget, parity_optab);
53800dbe 6267 if (target)
6268 return target;
6269 break;
6270
6271 case BUILT_IN_STRLEN:
c2f47e15 6272 target = expand_builtin_strlen (exp, target, target_mode);
53800dbe 6273 if (target)
6274 return target;
6275 break;
6276
6277 case BUILT_IN_STRCPY:
a65c4d64 6278 target = expand_builtin_strcpy (exp, target);
53800dbe 6279 if (target)
6280 return target;
6281 break;
bf8e3599 6282
ed09096d 6283 case BUILT_IN_STRNCPY:
a65c4d64 6284 target = expand_builtin_strncpy (exp, target);
ed09096d 6285 if (target)
6286 return target;
6287 break;
bf8e3599 6288
3b824fa6 6289 case BUILT_IN_STPCPY:
dc369150 6290 target = expand_builtin_stpcpy (exp, target, mode);
3b824fa6 6291 if (target)
6292 return target;
6293 break;
6294
53800dbe 6295 case BUILT_IN_MEMCPY:
a65c4d64 6296 target = expand_builtin_memcpy (exp, target);
3b824fa6 6297 if (target)
6298 return target;
6299 break;
6300
6301 case BUILT_IN_MEMPCPY:
c2f47e15 6302 target = expand_builtin_mempcpy (exp, target, mode);
53800dbe 6303 if (target)
6304 return target;
6305 break;
6306
6307 case BUILT_IN_MEMSET:
c2f47e15 6308 target = expand_builtin_memset (exp, target, mode);
53800dbe 6309 if (target)
6310 return target;
6311 break;
6312
ffc83088 6313 case BUILT_IN_BZERO:
0b25db21 6314 target = expand_builtin_bzero (exp);
ffc83088 6315 if (target)
6316 return target;
6317 break;
6318
53800dbe 6319 case BUILT_IN_STRCMP:
a65c4d64 6320 target = expand_builtin_strcmp (exp, target);
53800dbe 6321 if (target)
6322 return target;
6323 break;
6324
ed09096d 6325 case BUILT_IN_STRNCMP:
6326 target = expand_builtin_strncmp (exp, target, mode);
6327 if (target)
6328 return target;
6329 break;
6330
071f1696 6331 case BUILT_IN_BCMP:
53800dbe 6332 case BUILT_IN_MEMCMP:
ea368aac 6333 target = expand_builtin_memcmp (exp, target);
53800dbe 6334 if (target)
6335 return target;
6336 break;
53800dbe 6337
6338 case BUILT_IN_SETJMP:
2c8a1497 6339 /* This should have been lowered to the builtins below. */
6340 gcc_unreachable ();
6341
6342 case BUILT_IN_SETJMP_SETUP:
6343 /* __builtin_setjmp_setup is passed a pointer to an array of five words
6344 and the receiver label. */
c2f47e15 6345 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
2c8a1497 6346 {
c2f47e15 6347 rtx buf_addr = expand_expr (CALL_EXPR_ARG (exp, 0), subtarget,
2c8a1497 6348 VOIDmode, EXPAND_NORMAL);
c2f47e15 6349 tree label = TREE_OPERAND (CALL_EXPR_ARG (exp, 1), 0);
9ed997be 6350 rtx_insn *label_r = label_rtx (label);
2c8a1497 6351
6352 /* This is copied from the handling of non-local gotos. */
6353 expand_builtin_setjmp_setup (buf_addr, label_r);
6354 nonlocal_goto_handler_labels
a4de1c23 6355 = gen_rtx_INSN_LIST (VOIDmode, label_r,
2c8a1497 6356 nonlocal_goto_handler_labels);
6357 /* ??? Do not let expand_label treat us as such since we would
6358 not want to be both on the list of non-local labels and on
6359 the list of forced labels. */
6360 FORCED_LABEL (label) = 0;
6361 return const0_rtx;
6362 }
6363 break;
6364
2c8a1497 6365 case BUILT_IN_SETJMP_RECEIVER:
6366 /* __builtin_setjmp_receiver is passed the receiver label. */
c2f47e15 6367 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
2c8a1497 6368 {
c2f47e15 6369 tree label = TREE_OPERAND (CALL_EXPR_ARG (exp, 0), 0);
9ed997be 6370 rtx_insn *label_r = label_rtx (label);
2c8a1497 6371
6372 expand_builtin_setjmp_receiver (label_r);
6373 return const0_rtx;
6374 }
6b7f6858 6375 break;
53800dbe 6376
6377 /* __builtin_longjmp is passed a pointer to an array of five words.
6378 It's similar to the C library longjmp function but works with
6379 __builtin_setjmp above. */
6380 case BUILT_IN_LONGJMP:
c2f47e15 6381 if (validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
53800dbe 6382 {
c2f47e15 6383 rtx buf_addr = expand_expr (CALL_EXPR_ARG (exp, 0), subtarget,
8ec3c5c2 6384 VOIDmode, EXPAND_NORMAL);
c2f47e15 6385 rtx value = expand_normal (CALL_EXPR_ARG (exp, 1));
53800dbe 6386
6387 if (value != const1_rtx)
6388 {
1e5fcbe2 6389 error ("%<__builtin_longjmp%> second argument must be 1");
53800dbe 6390 return const0_rtx;
6391 }
6392
6393 expand_builtin_longjmp (buf_addr, value);
6394 return const0_rtx;
6395 }
2c8a1497 6396 break;
53800dbe 6397
4ee9c684 6398 case BUILT_IN_NONLOCAL_GOTO:
c2f47e15 6399 target = expand_builtin_nonlocal_goto (exp);
4ee9c684 6400 if (target)
6401 return target;
6402 break;
6403
843d08a9 6404 /* This updates the setjmp buffer that is its argument with the value
6405 of the current stack pointer. */
6406 case BUILT_IN_UPDATE_SETJMP_BUF:
c2f47e15 6407 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
843d08a9 6408 {
6409 rtx buf_addr
c2f47e15 6410 = expand_normal (CALL_EXPR_ARG (exp, 0));
843d08a9 6411
6412 expand_builtin_update_setjmp_buf (buf_addr);
6413 return const0_rtx;
6414 }
6415 break;
6416
53800dbe 6417 case BUILT_IN_TRAP:
a0ef1725 6418 expand_builtin_trap ();
53800dbe 6419 return const0_rtx;
6420
d2b48f0c 6421 case BUILT_IN_UNREACHABLE:
6422 expand_builtin_unreachable ();
6423 return const0_rtx;
6424
4f35b1fc 6425 CASE_FLT_FN (BUILT_IN_SIGNBIT):
004e23c4 6426 case BUILT_IN_SIGNBITD32:
6427 case BUILT_IN_SIGNBITD64:
6428 case BUILT_IN_SIGNBITD128:
27f261ef 6429 target = expand_builtin_signbit (exp, target);
6430 if (target)
6431 return target;
6432 break;
6433
53800dbe 6434 /* Various hooks for the DWARF 2 __throw routine. */
6435 case BUILT_IN_UNWIND_INIT:
6436 expand_builtin_unwind_init ();
6437 return const0_rtx;
6438 case BUILT_IN_DWARF_CFA:
6439 return virtual_cfa_rtx;
6440#ifdef DWARF2_UNWIND_INFO
f8f023a5 6441 case BUILT_IN_DWARF_SP_COLUMN:
6442 return expand_builtin_dwarf_sp_column ();
695e919b 6443 case BUILT_IN_INIT_DWARF_REG_SIZES:
c2f47e15 6444 expand_builtin_init_dwarf_reg_sizes (CALL_EXPR_ARG (exp, 0));
695e919b 6445 return const0_rtx;
53800dbe 6446#endif
6447 case BUILT_IN_FROB_RETURN_ADDR:
c2f47e15 6448 return expand_builtin_frob_return_addr (CALL_EXPR_ARG (exp, 0));
53800dbe 6449 case BUILT_IN_EXTRACT_RETURN_ADDR:
c2f47e15 6450 return expand_builtin_extract_return_addr (CALL_EXPR_ARG (exp, 0));
53800dbe 6451 case BUILT_IN_EH_RETURN:
c2f47e15 6452 expand_builtin_eh_return (CALL_EXPR_ARG (exp, 0),
6453 CALL_EXPR_ARG (exp, 1));
53800dbe 6454 return const0_rtx;
df4b504c 6455 case BUILT_IN_EH_RETURN_DATA_REGNO:
c2f47e15 6456 return expand_builtin_eh_return_data_regno (exp);
26093bf4 6457 case BUILT_IN_EXTEND_POINTER:
c2f47e15 6458 return expand_builtin_extend_pointer (CALL_EXPR_ARG (exp, 0));
e38def9c 6459 case BUILT_IN_EH_POINTER:
6460 return expand_builtin_eh_pointer (exp);
6461 case BUILT_IN_EH_FILTER:
6462 return expand_builtin_eh_filter (exp);
6463 case BUILT_IN_EH_COPY_VALUES:
6464 return expand_builtin_eh_copy_values (exp);
26093bf4 6465
7ccc713a 6466 case BUILT_IN_VA_START:
c2f47e15 6467 return expand_builtin_va_start (exp);
a66c9326 6468 case BUILT_IN_VA_END:
c2f47e15 6469 return expand_builtin_va_end (exp);
a66c9326 6470 case BUILT_IN_VA_COPY:
c2f47e15 6471 return expand_builtin_va_copy (exp);
89cfe6e5 6472 case BUILT_IN_EXPECT:
c2f47e15 6473 return expand_builtin_expect (exp, target);
fca0886c 6474 case BUILT_IN_ASSUME_ALIGNED:
6475 return expand_builtin_assume_aligned (exp, target);
5e3608d8 6476 case BUILT_IN_PREFETCH:
c2f47e15 6477 expand_builtin_prefetch (exp);
5e3608d8 6478 return const0_rtx;
6479
4ee9c684 6480 case BUILT_IN_INIT_TRAMPOLINE:
c307f106 6481 return expand_builtin_init_trampoline (exp, true);
6482 case BUILT_IN_INIT_HEAP_TRAMPOLINE:
6483 return expand_builtin_init_trampoline (exp, false);
4ee9c684 6484 case BUILT_IN_ADJUST_TRAMPOLINE:
c2f47e15 6485 return expand_builtin_adjust_trampoline (exp);
4ee9c684 6486
73673831 6487 case BUILT_IN_FORK:
6488 case BUILT_IN_EXECL:
6489 case BUILT_IN_EXECV:
6490 case BUILT_IN_EXECLP:
6491 case BUILT_IN_EXECLE:
6492 case BUILT_IN_EXECVP:
6493 case BUILT_IN_EXECVE:
c2f47e15 6494 target = expand_builtin_fork_or_exec (fndecl, exp, target, ignore);
73673831 6495 if (target)
6496 return target;
6497 break;
53800dbe 6498
2797f13a 6499 case BUILT_IN_SYNC_FETCH_AND_ADD_1:
6500 case BUILT_IN_SYNC_FETCH_AND_ADD_2:
6501 case BUILT_IN_SYNC_FETCH_AND_ADD_4:
6502 case BUILT_IN_SYNC_FETCH_AND_ADD_8:
6503 case BUILT_IN_SYNC_FETCH_AND_ADD_16:
6504 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_ADD_1);
1cd6e20d 6505 target = expand_builtin_sync_operation (mode, exp, PLUS, false, target);
b6a5fc45 6506 if (target)
6507 return target;
6508 break;
6509
2797f13a 6510 case BUILT_IN_SYNC_FETCH_AND_SUB_1:
6511 case BUILT_IN_SYNC_FETCH_AND_SUB_2:
6512 case BUILT_IN_SYNC_FETCH_AND_SUB_4:
6513 case BUILT_IN_SYNC_FETCH_AND_SUB_8:
6514 case BUILT_IN_SYNC_FETCH_AND_SUB_16:
6515 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_SUB_1);
1cd6e20d 6516 target = expand_builtin_sync_operation (mode, exp, MINUS, false, target);
b6a5fc45 6517 if (target)
6518 return target;
6519 break;
6520
2797f13a 6521 case BUILT_IN_SYNC_FETCH_AND_OR_1:
6522 case BUILT_IN_SYNC_FETCH_AND_OR_2:
6523 case BUILT_IN_SYNC_FETCH_AND_OR_4:
6524 case BUILT_IN_SYNC_FETCH_AND_OR_8:
6525 case BUILT_IN_SYNC_FETCH_AND_OR_16:
6526 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_OR_1);
1cd6e20d 6527 target = expand_builtin_sync_operation (mode, exp, IOR, false, target);
b6a5fc45 6528 if (target)
6529 return target;
6530 break;
6531
2797f13a 6532 case BUILT_IN_SYNC_FETCH_AND_AND_1:
6533 case BUILT_IN_SYNC_FETCH_AND_AND_2:
6534 case BUILT_IN_SYNC_FETCH_AND_AND_4:
6535 case BUILT_IN_SYNC_FETCH_AND_AND_8:
6536 case BUILT_IN_SYNC_FETCH_AND_AND_16:
6537 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_AND_1);
1cd6e20d 6538 target = expand_builtin_sync_operation (mode, exp, AND, false, target);
b6a5fc45 6539 if (target)
6540 return target;
6541 break;
6542
2797f13a 6543 case BUILT_IN_SYNC_FETCH_AND_XOR_1:
6544 case BUILT_IN_SYNC_FETCH_AND_XOR_2:
6545 case BUILT_IN_SYNC_FETCH_AND_XOR_4:
6546 case BUILT_IN_SYNC_FETCH_AND_XOR_8:
6547 case BUILT_IN_SYNC_FETCH_AND_XOR_16:
6548 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_XOR_1);
1cd6e20d 6549 target = expand_builtin_sync_operation (mode, exp, XOR, false, target);
b6a5fc45 6550 if (target)
6551 return target;
6552 break;
6553
2797f13a 6554 case BUILT_IN_SYNC_FETCH_AND_NAND_1:
6555 case BUILT_IN_SYNC_FETCH_AND_NAND_2:
6556 case BUILT_IN_SYNC_FETCH_AND_NAND_4:
6557 case BUILT_IN_SYNC_FETCH_AND_NAND_8:
6558 case BUILT_IN_SYNC_FETCH_AND_NAND_16:
6559 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_NAND_1);
1cd6e20d 6560 target = expand_builtin_sync_operation (mode, exp, NOT, false, target);
b6a5fc45 6561 if (target)
6562 return target;
6563 break;
6564
2797f13a 6565 case BUILT_IN_SYNC_ADD_AND_FETCH_1:
6566 case BUILT_IN_SYNC_ADD_AND_FETCH_2:
6567 case BUILT_IN_SYNC_ADD_AND_FETCH_4:
6568 case BUILT_IN_SYNC_ADD_AND_FETCH_8:
6569 case BUILT_IN_SYNC_ADD_AND_FETCH_16:
6570 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_ADD_AND_FETCH_1);
1cd6e20d 6571 target = expand_builtin_sync_operation (mode, exp, PLUS, true, target);
b6a5fc45 6572 if (target)
6573 return target;
6574 break;
6575
2797f13a 6576 case BUILT_IN_SYNC_SUB_AND_FETCH_1:
6577 case BUILT_IN_SYNC_SUB_AND_FETCH_2:
6578 case BUILT_IN_SYNC_SUB_AND_FETCH_4:
6579 case BUILT_IN_SYNC_SUB_AND_FETCH_8:
6580 case BUILT_IN_SYNC_SUB_AND_FETCH_16:
6581 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_SUB_AND_FETCH_1);
1cd6e20d 6582 target = expand_builtin_sync_operation (mode, exp, MINUS, true, target);
b6a5fc45 6583 if (target)
6584 return target;
6585 break;
6586
2797f13a 6587 case BUILT_IN_SYNC_OR_AND_FETCH_1:
6588 case BUILT_IN_SYNC_OR_AND_FETCH_2:
6589 case BUILT_IN_SYNC_OR_AND_FETCH_4:
6590 case BUILT_IN_SYNC_OR_AND_FETCH_8:
6591 case BUILT_IN_SYNC_OR_AND_FETCH_16:
6592 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_OR_AND_FETCH_1);
1cd6e20d 6593 target = expand_builtin_sync_operation (mode, exp, IOR, true, target);
b6a5fc45 6594 if (target)
6595 return target;
6596 break;
6597
2797f13a 6598 case BUILT_IN_SYNC_AND_AND_FETCH_1:
6599 case BUILT_IN_SYNC_AND_AND_FETCH_2:
6600 case BUILT_IN_SYNC_AND_AND_FETCH_4:
6601 case BUILT_IN_SYNC_AND_AND_FETCH_8:
6602 case BUILT_IN_SYNC_AND_AND_FETCH_16:
6603 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_AND_AND_FETCH_1);
1cd6e20d 6604 target = expand_builtin_sync_operation (mode, exp, AND, true, target);
b6a5fc45 6605 if (target)
6606 return target;
6607 break;
6608
2797f13a 6609 case BUILT_IN_SYNC_XOR_AND_FETCH_1:
6610 case BUILT_IN_SYNC_XOR_AND_FETCH_2:
6611 case BUILT_IN_SYNC_XOR_AND_FETCH_4:
6612 case BUILT_IN_SYNC_XOR_AND_FETCH_8:
6613 case BUILT_IN_SYNC_XOR_AND_FETCH_16:
6614 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_XOR_AND_FETCH_1);
1cd6e20d 6615 target = expand_builtin_sync_operation (mode, exp, XOR, true, target);
b6a5fc45 6616 if (target)
6617 return target;
6618 break;
6619
2797f13a 6620 case BUILT_IN_SYNC_NAND_AND_FETCH_1:
6621 case BUILT_IN_SYNC_NAND_AND_FETCH_2:
6622 case BUILT_IN_SYNC_NAND_AND_FETCH_4:
6623 case BUILT_IN_SYNC_NAND_AND_FETCH_8:
6624 case BUILT_IN_SYNC_NAND_AND_FETCH_16:
6625 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_NAND_AND_FETCH_1);
1cd6e20d 6626 target = expand_builtin_sync_operation (mode, exp, NOT, true, target);
b6a5fc45 6627 if (target)
6628 return target;
6629 break;
6630
2797f13a 6631 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_1:
6632 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_2:
6633 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_4:
6634 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_8:
6635 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_16:
a601d32a 6636 if (mode == VOIDmode)
6637 mode = TYPE_MODE (boolean_type_node);
b6a5fc45 6638 if (!target || !register_operand (target, mode))
6639 target = gen_reg_rtx (mode);
3e272de8 6640
2797f13a 6641 mode = get_builtin_sync_mode
6642 (fcode - BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_1);
c2f47e15 6643 target = expand_builtin_compare_and_swap (mode, exp, true, target);
b6a5fc45 6644 if (target)
6645 return target;
6646 break;
6647
2797f13a 6648 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_1:
6649 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_2:
6650 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_4:
6651 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_8:
6652 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_16:
6653 mode = get_builtin_sync_mode
6654 (fcode - BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_1);
c2f47e15 6655 target = expand_builtin_compare_and_swap (mode, exp, false, target);
b6a5fc45 6656 if (target)
6657 return target;
6658 break;
6659
2797f13a 6660 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_1:
6661 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_2:
6662 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_4:
6663 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_8:
6664 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_16:
6665 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_LOCK_TEST_AND_SET_1);
6666 target = expand_builtin_sync_lock_test_and_set (mode, exp, target);
b6a5fc45 6667 if (target)
6668 return target;
6669 break;
6670
2797f13a 6671 case BUILT_IN_SYNC_LOCK_RELEASE_1:
6672 case BUILT_IN_SYNC_LOCK_RELEASE_2:
6673 case BUILT_IN_SYNC_LOCK_RELEASE_4:
6674 case BUILT_IN_SYNC_LOCK_RELEASE_8:
6675 case BUILT_IN_SYNC_LOCK_RELEASE_16:
6676 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_LOCK_RELEASE_1);
6677 expand_builtin_sync_lock_release (mode, exp);
b6a5fc45 6678 return const0_rtx;
6679
2797f13a 6680 case BUILT_IN_SYNC_SYNCHRONIZE:
6681 expand_builtin_sync_synchronize ();
b6a5fc45 6682 return const0_rtx;
6683
1cd6e20d 6684 case BUILT_IN_ATOMIC_EXCHANGE_1:
6685 case BUILT_IN_ATOMIC_EXCHANGE_2:
6686 case BUILT_IN_ATOMIC_EXCHANGE_4:
6687 case BUILT_IN_ATOMIC_EXCHANGE_8:
6688 case BUILT_IN_ATOMIC_EXCHANGE_16:
6689 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_EXCHANGE_1);
6690 target = expand_builtin_atomic_exchange (mode, exp, target);
6691 if (target)
6692 return target;
6693 break;
6694
6695 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1:
6696 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_2:
6697 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_4:
6698 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_8:
6699 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_16:
2c201ad1 6700 {
6701 unsigned int nargs, z;
f1f41a6c 6702 vec<tree, va_gc> *vec;
2c201ad1 6703
6704 mode =
6705 get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1);
6706 target = expand_builtin_atomic_compare_exchange (mode, exp, target);
6707 if (target)
6708 return target;
6709
6710 /* If this is turned into an external library call, the weak parameter
6711 must be dropped to match the expected parameter list. */
6712 nargs = call_expr_nargs (exp);
f1f41a6c 6713 vec_alloc (vec, nargs - 1);
2c201ad1 6714 for (z = 0; z < 3; z++)
f1f41a6c 6715 vec->quick_push (CALL_EXPR_ARG (exp, z));
2c201ad1 6716 /* Skip the boolean weak parameter. */
6717 for (z = 4; z < 6; z++)
f1f41a6c 6718 vec->quick_push (CALL_EXPR_ARG (exp, z));
2c201ad1 6719 exp = build_call_vec (TREE_TYPE (exp), CALL_EXPR_FN (exp), vec);
6720 break;
6721 }
1cd6e20d 6722
6723 case BUILT_IN_ATOMIC_LOAD_1:
6724 case BUILT_IN_ATOMIC_LOAD_2:
6725 case BUILT_IN_ATOMIC_LOAD_4:
6726 case BUILT_IN_ATOMIC_LOAD_8:
6727 case BUILT_IN_ATOMIC_LOAD_16:
6728 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_LOAD_1);
6729 target = expand_builtin_atomic_load (mode, exp, target);
6730 if (target)
6731 return target;
6732 break;
6733
6734 case BUILT_IN_ATOMIC_STORE_1:
6735 case BUILT_IN_ATOMIC_STORE_2:
6736 case BUILT_IN_ATOMIC_STORE_4:
6737 case BUILT_IN_ATOMIC_STORE_8:
6738 case BUILT_IN_ATOMIC_STORE_16:
6739 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_STORE_1);
6740 target = expand_builtin_atomic_store (mode, exp);
6741 if (target)
6742 return const0_rtx;
6743 break;
6744
6745 case BUILT_IN_ATOMIC_ADD_FETCH_1:
6746 case BUILT_IN_ATOMIC_ADD_FETCH_2:
6747 case BUILT_IN_ATOMIC_ADD_FETCH_4:
6748 case BUILT_IN_ATOMIC_ADD_FETCH_8:
6749 case BUILT_IN_ATOMIC_ADD_FETCH_16:
6750 {
6751 enum built_in_function lib;
6752 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_ADD_FETCH_1);
6753 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_ADD_1 +
6754 (fcode - BUILT_IN_ATOMIC_ADD_FETCH_1));
6755 target = expand_builtin_atomic_fetch_op (mode, exp, target, PLUS, true,
6756 ignore, lib);
6757 if (target)
6758 return target;
6759 break;
6760 }
6761 case BUILT_IN_ATOMIC_SUB_FETCH_1:
6762 case BUILT_IN_ATOMIC_SUB_FETCH_2:
6763 case BUILT_IN_ATOMIC_SUB_FETCH_4:
6764 case BUILT_IN_ATOMIC_SUB_FETCH_8:
6765 case BUILT_IN_ATOMIC_SUB_FETCH_16:
6766 {
6767 enum built_in_function lib;
6768 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_SUB_FETCH_1);
6769 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_SUB_1 +
6770 (fcode - BUILT_IN_ATOMIC_SUB_FETCH_1));
6771 target = expand_builtin_atomic_fetch_op (mode, exp, target, MINUS, true,
6772 ignore, lib);
6773 if (target)
6774 return target;
6775 break;
6776 }
6777 case BUILT_IN_ATOMIC_AND_FETCH_1:
6778 case BUILT_IN_ATOMIC_AND_FETCH_2:
6779 case BUILT_IN_ATOMIC_AND_FETCH_4:
6780 case BUILT_IN_ATOMIC_AND_FETCH_8:
6781 case BUILT_IN_ATOMIC_AND_FETCH_16:
6782 {
6783 enum built_in_function lib;
6784 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_AND_FETCH_1);
6785 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_AND_1 +
6786 (fcode - BUILT_IN_ATOMIC_AND_FETCH_1));
6787 target = expand_builtin_atomic_fetch_op (mode, exp, target, AND, true,
6788 ignore, lib);
6789 if (target)
6790 return target;
6791 break;
6792 }
6793 case BUILT_IN_ATOMIC_NAND_FETCH_1:
6794 case BUILT_IN_ATOMIC_NAND_FETCH_2:
6795 case BUILT_IN_ATOMIC_NAND_FETCH_4:
6796 case BUILT_IN_ATOMIC_NAND_FETCH_8:
6797 case BUILT_IN_ATOMIC_NAND_FETCH_16:
6798 {
6799 enum built_in_function lib;
6800 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_NAND_FETCH_1);
6801 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_NAND_1 +
6802 (fcode - BUILT_IN_ATOMIC_NAND_FETCH_1));
6803 target = expand_builtin_atomic_fetch_op (mode, exp, target, NOT, true,
6804 ignore, lib);
6805 if (target)
6806 return target;
6807 break;
6808 }
6809 case BUILT_IN_ATOMIC_XOR_FETCH_1:
6810 case BUILT_IN_ATOMIC_XOR_FETCH_2:
6811 case BUILT_IN_ATOMIC_XOR_FETCH_4:
6812 case BUILT_IN_ATOMIC_XOR_FETCH_8:
6813 case BUILT_IN_ATOMIC_XOR_FETCH_16:
6814 {
6815 enum built_in_function lib;
6816 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_XOR_FETCH_1);
6817 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_XOR_1 +
6818 (fcode - BUILT_IN_ATOMIC_XOR_FETCH_1));
6819 target = expand_builtin_atomic_fetch_op (mode, exp, target, XOR, true,
6820 ignore, lib);
6821 if (target)
6822 return target;
6823 break;
6824 }
6825 case BUILT_IN_ATOMIC_OR_FETCH_1:
6826 case BUILT_IN_ATOMIC_OR_FETCH_2:
6827 case BUILT_IN_ATOMIC_OR_FETCH_4:
6828 case BUILT_IN_ATOMIC_OR_FETCH_8:
6829 case BUILT_IN_ATOMIC_OR_FETCH_16:
6830 {
6831 enum built_in_function lib;
6832 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_OR_FETCH_1);
6833 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_OR_1 +
6834 (fcode - BUILT_IN_ATOMIC_OR_FETCH_1));
6835 target = expand_builtin_atomic_fetch_op (mode, exp, target, IOR, true,
6836 ignore, lib);
6837 if (target)
6838 return target;
6839 break;
6840 }
6841 case BUILT_IN_ATOMIC_FETCH_ADD_1:
6842 case BUILT_IN_ATOMIC_FETCH_ADD_2:
6843 case BUILT_IN_ATOMIC_FETCH_ADD_4:
6844 case BUILT_IN_ATOMIC_FETCH_ADD_8:
6845 case BUILT_IN_ATOMIC_FETCH_ADD_16:
6846 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_ADD_1);
6847 target = expand_builtin_atomic_fetch_op (mode, exp, target, PLUS, false,
6848 ignore, BUILT_IN_NONE);
6849 if (target)
6850 return target;
6851 break;
6852
6853 case BUILT_IN_ATOMIC_FETCH_SUB_1:
6854 case BUILT_IN_ATOMIC_FETCH_SUB_2:
6855 case BUILT_IN_ATOMIC_FETCH_SUB_4:
6856 case BUILT_IN_ATOMIC_FETCH_SUB_8:
6857 case BUILT_IN_ATOMIC_FETCH_SUB_16:
6858 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_SUB_1);
6859 target = expand_builtin_atomic_fetch_op (mode, exp, target, MINUS, false,
6860 ignore, BUILT_IN_NONE);
6861 if (target)
6862 return target;
6863 break;
6864
6865 case BUILT_IN_ATOMIC_FETCH_AND_1:
6866 case BUILT_IN_ATOMIC_FETCH_AND_2:
6867 case BUILT_IN_ATOMIC_FETCH_AND_4:
6868 case BUILT_IN_ATOMIC_FETCH_AND_8:
6869 case BUILT_IN_ATOMIC_FETCH_AND_16:
6870 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_AND_1);
6871 target = expand_builtin_atomic_fetch_op (mode, exp, target, AND, false,
6872 ignore, BUILT_IN_NONE);
6873 if (target)
6874 return target;
6875 break;
6876
6877 case BUILT_IN_ATOMIC_FETCH_NAND_1:
6878 case BUILT_IN_ATOMIC_FETCH_NAND_2:
6879 case BUILT_IN_ATOMIC_FETCH_NAND_4:
6880 case BUILT_IN_ATOMIC_FETCH_NAND_8:
6881 case BUILT_IN_ATOMIC_FETCH_NAND_16:
6882 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_NAND_1);
6883 target = expand_builtin_atomic_fetch_op (mode, exp, target, NOT, false,
6884 ignore, BUILT_IN_NONE);
6885 if (target)
6886 return target;
6887 break;
6888
6889 case BUILT_IN_ATOMIC_FETCH_XOR_1:
6890 case BUILT_IN_ATOMIC_FETCH_XOR_2:
6891 case BUILT_IN_ATOMIC_FETCH_XOR_4:
6892 case BUILT_IN_ATOMIC_FETCH_XOR_8:
6893 case BUILT_IN_ATOMIC_FETCH_XOR_16:
6894 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_XOR_1);
6895 target = expand_builtin_atomic_fetch_op (mode, exp, target, XOR, false,
6896 ignore, BUILT_IN_NONE);
6897 if (target)
6898 return target;
6899 break;
6900
6901 case BUILT_IN_ATOMIC_FETCH_OR_1:
6902 case BUILT_IN_ATOMIC_FETCH_OR_2:
6903 case BUILT_IN_ATOMIC_FETCH_OR_4:
6904 case BUILT_IN_ATOMIC_FETCH_OR_8:
6905 case BUILT_IN_ATOMIC_FETCH_OR_16:
6906 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_OR_1);
6907 target = expand_builtin_atomic_fetch_op (mode, exp, target, IOR, false,
6908 ignore, BUILT_IN_NONE);
6909 if (target)
6910 return target;
6911 break;
10b744a3 6912
6913 case BUILT_IN_ATOMIC_TEST_AND_SET:
7821cde1 6914 return expand_builtin_atomic_test_and_set (exp, target);
10b744a3 6915
6916 case BUILT_IN_ATOMIC_CLEAR:
6917 return expand_builtin_atomic_clear (exp);
1cd6e20d 6918
6919 case BUILT_IN_ATOMIC_ALWAYS_LOCK_FREE:
6920 return expand_builtin_atomic_always_lock_free (exp);
6921
6922 case BUILT_IN_ATOMIC_IS_LOCK_FREE:
6923 target = expand_builtin_atomic_is_lock_free (exp);
6924 if (target)
6925 return target;
6926 break;
6927
6928 case BUILT_IN_ATOMIC_THREAD_FENCE:
6929 expand_builtin_atomic_thread_fence (exp);
6930 return const0_rtx;
6931
6932 case BUILT_IN_ATOMIC_SIGNAL_FENCE:
6933 expand_builtin_atomic_signal_fence (exp);
6934 return const0_rtx;
6935
0a39fd54 6936 case BUILT_IN_OBJECT_SIZE:
6937 return expand_builtin_object_size (exp);
6938
6939 case BUILT_IN_MEMCPY_CHK:
6940 case BUILT_IN_MEMPCPY_CHK:
6941 case BUILT_IN_MEMMOVE_CHK:
6942 case BUILT_IN_MEMSET_CHK:
6943 target = expand_builtin_memory_chk (exp, target, mode, fcode);
6944 if (target)
6945 return target;
6946 break;
6947
6948 case BUILT_IN_STRCPY_CHK:
6949 case BUILT_IN_STPCPY_CHK:
6950 case BUILT_IN_STRNCPY_CHK:
1063acde 6951 case BUILT_IN_STPNCPY_CHK:
0a39fd54 6952 case BUILT_IN_STRCAT_CHK:
b356dfef 6953 case BUILT_IN_STRNCAT_CHK:
0a39fd54 6954 case BUILT_IN_SNPRINTF_CHK:
6955 case BUILT_IN_VSNPRINTF_CHK:
6956 maybe_emit_chk_warning (exp, fcode);
6957 break;
6958
6959 case BUILT_IN_SPRINTF_CHK:
6960 case BUILT_IN_VSPRINTF_CHK:
6961 maybe_emit_sprintf_chk_warning (exp, fcode);
6962 break;
6963
2c281b15 6964 case BUILT_IN_FREE:
f74ea1c2 6965 if (warn_free_nonheap_object)
6966 maybe_emit_free_warning (exp);
2c281b15 6967 break;
6968
badaa04c 6969 case BUILT_IN_THREAD_POINTER:
6970 return expand_builtin_thread_pointer (exp, target);
6971
6972 case BUILT_IN_SET_THREAD_POINTER:
6973 expand_builtin_set_thread_pointer (exp);
6974 return const0_rtx;
6975
d037099f 6976 case BUILT_IN_CILK_DETACH:
6977 expand_builtin_cilk_detach (exp);
6978 return const0_rtx;
6979
6980 case BUILT_IN_CILK_POP_FRAME:
6981 expand_builtin_cilk_pop_frame (exp);
6982 return const0_rtx;
6983
058a1b7a 6984 case BUILT_IN_CHKP_INIT_PTR_BOUNDS:
6985 case BUILT_IN_CHKP_NULL_PTR_BOUNDS:
6986 case BUILT_IN_CHKP_COPY_PTR_BOUNDS:
6987 case BUILT_IN_CHKP_CHECK_PTR_LBOUNDS:
6988 case BUILT_IN_CHKP_CHECK_PTR_UBOUNDS:
6989 case BUILT_IN_CHKP_CHECK_PTR_BOUNDS:
6990 case BUILT_IN_CHKP_SET_PTR_BOUNDS:
6991 case BUILT_IN_CHKP_NARROW_PTR_BOUNDS:
6992 case BUILT_IN_CHKP_STORE_PTR_BOUNDS:
6993 case BUILT_IN_CHKP_GET_PTR_LBOUND:
6994 case BUILT_IN_CHKP_GET_PTR_UBOUND:
6995 /* We allow user CHKP builtins if Pointer Bounds
6996 Checker is off. */
6997 if (!chkp_function_instrumented_p (current_function_decl))
6998 {
6999 if (fcode == BUILT_IN_CHKP_SET_PTR_BOUNDS
7000 || fcode == BUILT_IN_CHKP_NARROW_PTR_BOUNDS
7001 || fcode == BUILT_IN_CHKP_INIT_PTR_BOUNDS
7002 || fcode == BUILT_IN_CHKP_NULL_PTR_BOUNDS
7003 || fcode == BUILT_IN_CHKP_COPY_PTR_BOUNDS)
7004 return expand_normal (CALL_EXPR_ARG (exp, 0));
7005 else if (fcode == BUILT_IN_CHKP_GET_PTR_LBOUND)
7006 return expand_normal (size_zero_node);
7007 else if (fcode == BUILT_IN_CHKP_GET_PTR_UBOUND)
7008 return expand_normal (size_int (-1));
7009 else
7010 return const0_rtx;
7011 }
7012 /* FALLTHROUGH */
7013
7014 case BUILT_IN_CHKP_BNDMK:
7015 case BUILT_IN_CHKP_BNDSTX:
7016 case BUILT_IN_CHKP_BNDCL:
7017 case BUILT_IN_CHKP_BNDCU:
7018 case BUILT_IN_CHKP_BNDLDX:
7019 case BUILT_IN_CHKP_BNDRET:
7020 case BUILT_IN_CHKP_INTERSECT:
7021 case BUILT_IN_CHKP_NARROW:
7022 case BUILT_IN_CHKP_EXTRACT_LOWER:
7023 case BUILT_IN_CHKP_EXTRACT_UPPER:
7024 /* Software implementation of Pointer Bounds Checker is NYI.
7025 Target support is required. */
7026 error ("Your target platform does not support -fcheck-pointer-bounds");
7027 break;
7028
ca4c3545 7029 case BUILT_IN_ACC_ON_DEVICE:
7030 target = expand_builtin_acc_on_device (exp, target);
7031 if (target)
7032 return target;
7033 break;
7034
92482ee0 7035 default: /* just do library call, if unknown builtin */
146c1b4f 7036 break;
53800dbe 7037 }
7038
7039 /* The switch statement above can drop through to cause the function
7040 to be called normally. */
7041 return expand_call (exp, target, ignore);
7042}
650e4c94 7043
f21337ef 7044/* Similar to expand_builtin but is used for instrumented calls. */
7045
7046rtx
7047expand_builtin_with_bounds (tree exp, rtx target,
7048 rtx subtarget ATTRIBUTE_UNUSED,
7049 machine_mode mode, int ignore)
7050{
7051 tree fndecl = get_callee_fndecl (exp);
7052 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
7053
7054 gcc_assert (CALL_WITH_BOUNDS_P (exp));
7055
7056 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
7057 return targetm.expand_builtin (exp, target, subtarget, mode, ignore);
7058
7059 gcc_assert (fcode > BEGIN_CHKP_BUILTINS
7060 && fcode < END_CHKP_BUILTINS);
7061
7062 switch (fcode)
7063 {
7064 case BUILT_IN_CHKP_MEMCPY_NOBND_NOCHK_CHKP:
7065 target = expand_builtin_memcpy_with_bounds (exp, target);
7066 if (target)
7067 return target;
7068 break;
7069
7070 case BUILT_IN_CHKP_MEMPCPY_NOBND_NOCHK_CHKP:
7071 target = expand_builtin_mempcpy_with_bounds (exp, target, mode);
7072 if (target)
7073 return target;
7074 break;
7075
7076 case BUILT_IN_CHKP_MEMSET_NOBND_NOCHK_CHKP:
7077 target = expand_builtin_memset_with_bounds (exp, target, mode);
7078 if (target)
7079 return target;
7080 break;
7081
7082 default:
7083 break;
7084 }
7085
7086 /* The switch statement above can drop through to cause the function
7087 to be called normally. */
7088 return expand_call (exp, target, ignore);
7089 }
7090
805e22b2 7091/* Determine whether a tree node represents a call to a built-in
52203a9d 7092 function. If the tree T is a call to a built-in function with
7093 the right number of arguments of the appropriate types, return
7094 the DECL_FUNCTION_CODE of the call, e.g. BUILT_IN_SQRT.
7095 Otherwise the return value is END_BUILTINS. */
aecda0d6 7096
805e22b2 7097enum built_in_function
b7bf20db 7098builtin_mathfn_code (const_tree t)
805e22b2 7099{
b7bf20db 7100 const_tree fndecl, arg, parmlist;
7101 const_tree argtype, parmtype;
7102 const_call_expr_arg_iterator iter;
805e22b2 7103
7104 if (TREE_CODE (t) != CALL_EXPR
c2f47e15 7105 || TREE_CODE (CALL_EXPR_FN (t)) != ADDR_EXPR)
805e22b2 7106 return END_BUILTINS;
7107
c6e6ecb1 7108 fndecl = get_callee_fndecl (t);
7109 if (fndecl == NULL_TREE
52203a9d 7110 || TREE_CODE (fndecl) != FUNCTION_DECL
805e22b2 7111 || ! DECL_BUILT_IN (fndecl)
7112 || DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
7113 return END_BUILTINS;
7114
52203a9d 7115 parmlist = TYPE_ARG_TYPES (TREE_TYPE (fndecl));
b7bf20db 7116 init_const_call_expr_arg_iterator (t, &iter);
52203a9d 7117 for (; parmlist; parmlist = TREE_CHAIN (parmlist))
e9f80ff5 7118 {
52203a9d 7119 /* If a function doesn't take a variable number of arguments,
7120 the last element in the list will have type `void'. */
7121 parmtype = TREE_VALUE (parmlist);
7122 if (VOID_TYPE_P (parmtype))
7123 {
b7bf20db 7124 if (more_const_call_expr_args_p (&iter))
52203a9d 7125 return END_BUILTINS;
7126 return DECL_FUNCTION_CODE (fndecl);
7127 }
7128
b7bf20db 7129 if (! more_const_call_expr_args_p (&iter))
e9f80ff5 7130 return END_BUILTINS;
48e1416a 7131
b7bf20db 7132 arg = next_const_call_expr_arg (&iter);
c2f47e15 7133 argtype = TREE_TYPE (arg);
52203a9d 7134
7135 if (SCALAR_FLOAT_TYPE_P (parmtype))
7136 {
7137 if (! SCALAR_FLOAT_TYPE_P (argtype))
7138 return END_BUILTINS;
7139 }
7140 else if (COMPLEX_FLOAT_TYPE_P (parmtype))
7141 {
7142 if (! COMPLEX_FLOAT_TYPE_P (argtype))
7143 return END_BUILTINS;
7144 }
7145 else if (POINTER_TYPE_P (parmtype))
7146 {
7147 if (! POINTER_TYPE_P (argtype))
7148 return END_BUILTINS;
7149 }
7150 else if (INTEGRAL_TYPE_P (parmtype))
7151 {
7152 if (! INTEGRAL_TYPE_P (argtype))
7153 return END_BUILTINS;
7154 }
7155 else
e9f80ff5 7156 return END_BUILTINS;
e9f80ff5 7157 }
7158
52203a9d 7159 /* Variable-length argument list. */
805e22b2 7160 return DECL_FUNCTION_CODE (fndecl);
7161}
7162
c2f47e15 7163/* Fold a call to __builtin_constant_p, if we know its argument ARG will
7164 evaluate to a constant. */
650e4c94 7165
7166static tree
c2f47e15 7167fold_builtin_constant_p (tree arg)
650e4c94 7168{
650e4c94 7169 /* We return 1 for a numeric type that's known to be a constant
7170 value at compile-time or for an aggregate type that's a
7171 literal constant. */
c2f47e15 7172 STRIP_NOPS (arg);
650e4c94 7173
7174 /* If we know this is a constant, emit the constant of one. */
c2f47e15 7175 if (CONSTANT_CLASS_P (arg)
7176 || (TREE_CODE (arg) == CONSTRUCTOR
7177 && TREE_CONSTANT (arg)))
650e4c94 7178 return integer_one_node;
c2f47e15 7179 if (TREE_CODE (arg) == ADDR_EXPR)
adcfa3a3 7180 {
c2f47e15 7181 tree op = TREE_OPERAND (arg, 0);
adcfa3a3 7182 if (TREE_CODE (op) == STRING_CST
7183 || (TREE_CODE (op) == ARRAY_REF
7184 && integer_zerop (TREE_OPERAND (op, 1))
7185 && TREE_CODE (TREE_OPERAND (op, 0)) == STRING_CST))
7186 return integer_one_node;
7187 }
650e4c94 7188
1fb4300c 7189 /* If this expression has side effects, show we don't know it to be a
7190 constant. Likewise if it's a pointer or aggregate type since in
7191 those case we only want literals, since those are only optimized
f97c71a1 7192 when generating RTL, not later.
7193 And finally, if we are compiling an initializer, not code, we
7194 need to return a definite result now; there's not going to be any
7195 more optimization done. */
c2f47e15 7196 if (TREE_SIDE_EFFECTS (arg)
7197 || AGGREGATE_TYPE_P (TREE_TYPE (arg))
7198 || POINTER_TYPE_P (TREE_TYPE (arg))
47be647d 7199 || cfun == 0
0b049e15 7200 || folding_initializer
7201 || force_folding_builtin_constant_p)
650e4c94 7202 return integer_zero_node;
7203
c2f47e15 7204 return NULL_TREE;
650e4c94 7205}
7206
76f5a783 7207/* Create builtin_expect with PRED and EXPECTED as its arguments and
7208 return it as a truthvalue. */
4ee9c684 7209
7210static tree
c83059be 7211build_builtin_expect_predicate (location_t loc, tree pred, tree expected,
7212 tree predictor)
4ee9c684 7213{
76f5a783 7214 tree fn, arg_types, pred_type, expected_type, call_expr, ret_type;
4ee9c684 7215
b9a16870 7216 fn = builtin_decl_explicit (BUILT_IN_EXPECT);
76f5a783 7217 arg_types = TYPE_ARG_TYPES (TREE_TYPE (fn));
7218 ret_type = TREE_TYPE (TREE_TYPE (fn));
7219 pred_type = TREE_VALUE (arg_types);
7220 expected_type = TREE_VALUE (TREE_CHAIN (arg_types));
7221
389dd41b 7222 pred = fold_convert_loc (loc, pred_type, pred);
7223 expected = fold_convert_loc (loc, expected_type, expected);
c83059be 7224 call_expr = build_call_expr_loc (loc, fn, predictor ? 3 : 2, pred, expected,
7225 predictor);
76f5a783 7226
7227 return build2 (NE_EXPR, TREE_TYPE (pred), call_expr,
7228 build_int_cst (ret_type, 0));
7229}
7230
7231/* Fold a call to builtin_expect with arguments ARG0 and ARG1. Return
7232 NULL_TREE if no simplification is possible. */
7233
c83059be 7234tree
7235fold_builtin_expect (location_t loc, tree arg0, tree arg1, tree arg2)
76f5a783 7236{
083bada9 7237 tree inner, fndecl, inner_arg0;
76f5a783 7238 enum tree_code code;
7239
083bada9 7240 /* Distribute the expected value over short-circuiting operators.
7241 See through the cast from truthvalue_type_node to long. */
7242 inner_arg0 = arg0;
d09ef31a 7243 while (CONVERT_EXPR_P (inner_arg0)
083bada9 7244 && INTEGRAL_TYPE_P (TREE_TYPE (inner_arg0))
7245 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (inner_arg0, 0))))
7246 inner_arg0 = TREE_OPERAND (inner_arg0, 0);
7247
76f5a783 7248 /* If this is a builtin_expect within a builtin_expect keep the
7249 inner one. See through a comparison against a constant. It
7250 might have been added to create a thruthvalue. */
083bada9 7251 inner = inner_arg0;
7252
76f5a783 7253 if (COMPARISON_CLASS_P (inner)
7254 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST)
7255 inner = TREE_OPERAND (inner, 0);
7256
7257 if (TREE_CODE (inner) == CALL_EXPR
7258 && (fndecl = get_callee_fndecl (inner))
7259 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
7260 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_EXPECT)
7261 return arg0;
7262
083bada9 7263 inner = inner_arg0;
76f5a783 7264 code = TREE_CODE (inner);
7265 if (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR)
7266 {
7267 tree op0 = TREE_OPERAND (inner, 0);
7268 tree op1 = TREE_OPERAND (inner, 1);
7269
c83059be 7270 op0 = build_builtin_expect_predicate (loc, op0, arg1, arg2);
7271 op1 = build_builtin_expect_predicate (loc, op1, arg1, arg2);
76f5a783 7272 inner = build2 (code, TREE_TYPE (inner), op0, op1);
7273
389dd41b 7274 return fold_convert_loc (loc, TREE_TYPE (arg0), inner);
76f5a783 7275 }
7276
7277 /* If the argument isn't invariant then there's nothing else we can do. */
083bada9 7278 if (!TREE_CONSTANT (inner_arg0))
c2f47e15 7279 return NULL_TREE;
4ee9c684 7280
76f5a783 7281 /* If we expect that a comparison against the argument will fold to
7282 a constant return the constant. In practice, this means a true
7283 constant or the address of a non-weak symbol. */
083bada9 7284 inner = inner_arg0;
4ee9c684 7285 STRIP_NOPS (inner);
7286 if (TREE_CODE (inner) == ADDR_EXPR)
7287 {
7288 do
7289 {
7290 inner = TREE_OPERAND (inner, 0);
7291 }
7292 while (TREE_CODE (inner) == COMPONENT_REF
7293 || TREE_CODE (inner) == ARRAY_REF);
062b4460 7294 if ((TREE_CODE (inner) == VAR_DECL
7295 || TREE_CODE (inner) == FUNCTION_DECL)
7296 && DECL_WEAK (inner))
c2f47e15 7297 return NULL_TREE;
4ee9c684 7298 }
7299
76f5a783 7300 /* Otherwise, ARG0 already has the proper type for the return value. */
7301 return arg0;
4ee9c684 7302}
7303
c2f47e15 7304/* Fold a call to __builtin_classify_type with argument ARG. */
27d0c333 7305
539a3a92 7306static tree
c2f47e15 7307fold_builtin_classify_type (tree arg)
539a3a92 7308{
c2f47e15 7309 if (arg == 0)
7002a1c8 7310 return build_int_cst (integer_type_node, no_type_class);
539a3a92 7311
7002a1c8 7312 return build_int_cst (integer_type_node, type_to_class (TREE_TYPE (arg)));
539a3a92 7313}
7314
c2f47e15 7315/* Fold a call to __builtin_strlen with argument ARG. */
e6e27594 7316
7317static tree
c7cbde74 7318fold_builtin_strlen (location_t loc, tree type, tree arg)
e6e27594 7319{
c2f47e15 7320 if (!validate_arg (arg, POINTER_TYPE))
e6e27594 7321 return NULL_TREE;
7322 else
7323 {
c2f47e15 7324 tree len = c_strlen (arg, 0);
e6e27594 7325
7326 if (len)
c7cbde74 7327 return fold_convert_loc (loc, type, len);
e6e27594 7328
7329 return NULL_TREE;
7330 }
7331}
7332
92c43e3c 7333/* Fold a call to __builtin_inf or __builtin_huge_val. */
7334
7335static tree
389dd41b 7336fold_builtin_inf (location_t loc, tree type, int warn)
92c43e3c 7337{
aa870c1b 7338 REAL_VALUE_TYPE real;
7339
40f4dbd5 7340 /* __builtin_inff is intended to be usable to define INFINITY on all
7341 targets. If an infinity is not available, INFINITY expands "to a
7342 positive constant of type float that overflows at translation
7343 time", footnote "In this case, using INFINITY will violate the
7344 constraint in 6.4.4 and thus require a diagnostic." (C99 7.12#4).
7345 Thus we pedwarn to ensure this constraint violation is
7346 diagnosed. */
92c43e3c 7347 if (!MODE_HAS_INFINITIES (TYPE_MODE (type)) && warn)
389dd41b 7348 pedwarn (loc, 0, "target format does not support infinity");
92c43e3c 7349
aa870c1b 7350 real_inf (&real);
7351 return build_real (type, real);
92c43e3c 7352}
7353
c2f47e15 7354/* Fold a call to __builtin_nan or __builtin_nans with argument ARG. */
b0db7939 7355
7356static tree
c2f47e15 7357fold_builtin_nan (tree arg, tree type, int quiet)
b0db7939 7358{
7359 REAL_VALUE_TYPE real;
7360 const char *str;
7361
c2f47e15 7362 if (!validate_arg (arg, POINTER_TYPE))
7363 return NULL_TREE;
7364 str = c_getstr (arg);
b0db7939 7365 if (!str)
c2f47e15 7366 return NULL_TREE;
b0db7939 7367
7368 if (!real_nan (&real, str, quiet, TYPE_MODE (type)))
c2f47e15 7369 return NULL_TREE;
b0db7939 7370
7371 return build_real (type, real);
7372}
7373
277f8dd2 7374/* Return true if the floating point expression T has an integer value.
7375 We also allow +Inf, -Inf and NaN to be considered integer values. */
7376
7377static bool
7378integer_valued_real_p (tree t)
7379{
7380 switch (TREE_CODE (t))
7381 {
7382 case FLOAT_EXPR:
7383 return true;
7384
7385 case ABS_EXPR:
7386 case SAVE_EXPR:
277f8dd2 7387 return integer_valued_real_p (TREE_OPERAND (t, 0));
7388
7389 case COMPOUND_EXPR:
41076ef6 7390 case MODIFY_EXPR:
277f8dd2 7391 case BIND_EXPR:
75a70cf9 7392 return integer_valued_real_p (TREE_OPERAND (t, 1));
277f8dd2 7393
7394 case PLUS_EXPR:
7395 case MINUS_EXPR:
7396 case MULT_EXPR:
7397 case MIN_EXPR:
7398 case MAX_EXPR:
7399 return integer_valued_real_p (TREE_OPERAND (t, 0))
7400 && integer_valued_real_p (TREE_OPERAND (t, 1));
7401
7402 case COND_EXPR:
7403 return integer_valued_real_p (TREE_OPERAND (t, 1))
7404 && integer_valued_real_p (TREE_OPERAND (t, 2));
7405
7406 case REAL_CST:
0570334c 7407 return real_isinteger (TREE_REAL_CST_PTR (t), TYPE_MODE (TREE_TYPE (t)));
277f8dd2 7408
d09ef31a 7409 CASE_CONVERT:
277f8dd2 7410 {
7411 tree type = TREE_TYPE (TREE_OPERAND (t, 0));
7412 if (TREE_CODE (type) == INTEGER_TYPE)
7413 return true;
7414 if (TREE_CODE (type) == REAL_TYPE)
7415 return integer_valued_real_p (TREE_OPERAND (t, 0));
7416 break;
7417 }
7418
7419 case CALL_EXPR:
7420 switch (builtin_mathfn_code (t))
7421 {
4f35b1fc 7422 CASE_FLT_FN (BUILT_IN_CEIL):
7423 CASE_FLT_FN (BUILT_IN_FLOOR):
7424 CASE_FLT_FN (BUILT_IN_NEARBYINT):
7425 CASE_FLT_FN (BUILT_IN_RINT):
7426 CASE_FLT_FN (BUILT_IN_ROUND):
7427 CASE_FLT_FN (BUILT_IN_TRUNC):
277f8dd2 7428 return true;
7429
d4a43a03 7430 CASE_FLT_FN (BUILT_IN_FMIN):
7431 CASE_FLT_FN (BUILT_IN_FMAX):
c2f47e15 7432 return integer_valued_real_p (CALL_EXPR_ARG (t, 0))
7433 && integer_valued_real_p (CALL_EXPR_ARG (t, 1));
d4a43a03 7434
277f8dd2 7435 default:
7436 break;
7437 }
7438 break;
7439
7440 default:
7441 break;
7442 }
7443 return false;
7444}
7445
c2f47e15 7446/* FNDECL is assumed to be a builtin where truncation can be propagated
6528f4f4 7447 across (for instance floor((double)f) == (double)floorf (f).
c2f47e15 7448 Do the transformation for a call with argument ARG. */
277f8dd2 7449
6528f4f4 7450static tree
389dd41b 7451fold_trunc_transparent_mathfn (location_t loc, tree fndecl, tree arg)
6528f4f4 7452{
6528f4f4 7453 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
277f8dd2 7454
c2f47e15 7455 if (!validate_arg (arg, REAL_TYPE))
7456 return NULL_TREE;
6528f4f4 7457
277f8dd2 7458 /* Integer rounding functions are idempotent. */
7459 if (fcode == builtin_mathfn_code (arg))
7460 return arg;
7461
7462 /* If argument is already integer valued, and we don't need to worry
7463 about setting errno, there's no need to perform rounding. */
7464 if (! flag_errno_math && integer_valued_real_p (arg))
7465 return arg;
7466
7467 if (optimize)
6528f4f4 7468 {
277f8dd2 7469 tree arg0 = strip_float_extensions (arg);
2426241c 7470 tree ftype = TREE_TYPE (TREE_TYPE (fndecl));
6528f4f4 7471 tree newtype = TREE_TYPE (arg0);
7472 tree decl;
7473
7474 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (ftype)
7475 && (decl = mathfn_built_in (newtype, fcode)))
389dd41b 7476 return fold_convert_loc (loc, ftype,
7477 build_call_expr_loc (loc, decl, 1,
7478 fold_convert_loc (loc,
7479 newtype,
7480 arg0)));
6528f4f4 7481 }
c2f47e15 7482 return NULL_TREE;
6528f4f4 7483}
7484
c2f47e15 7485/* FNDECL is assumed to be builtin which can narrow the FP type of
7486 the argument, for instance lround((double)f) -> lroundf (f).
7487 Do the transformation for a call with argument ARG. */
9ed65c7f 7488
7489static tree
389dd41b 7490fold_fixed_mathfn (location_t loc, tree fndecl, tree arg)
9ed65c7f 7491{
9ed65c7f 7492 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
9ed65c7f 7493
c2f47e15 7494 if (!validate_arg (arg, REAL_TYPE))
7495 return NULL_TREE;
9ed65c7f 7496
7497 /* If argument is already integer valued, and we don't need to worry
7498 about setting errno, there's no need to perform rounding. */
7499 if (! flag_errno_math && integer_valued_real_p (arg))
389dd41b 7500 return fold_build1_loc (loc, FIX_TRUNC_EXPR,
7501 TREE_TYPE (TREE_TYPE (fndecl)), arg);
9ed65c7f 7502
7503 if (optimize)
7504 {
7505 tree ftype = TREE_TYPE (arg);
7506 tree arg0 = strip_float_extensions (arg);
7507 tree newtype = TREE_TYPE (arg0);
7508 tree decl;
7509
7510 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (ftype)
7511 && (decl = mathfn_built_in (newtype, fcode)))
389dd41b 7512 return build_call_expr_loc (loc, decl, 1,
7513 fold_convert_loc (loc, newtype, arg0));
9ed65c7f 7514 }
73a0da56 7515
80ff6494 7516 /* Canonicalize iround (x) to lround (x) on ILP32 targets where
7517 sizeof (int) == sizeof (long). */
7518 if (TYPE_PRECISION (integer_type_node)
7519 == TYPE_PRECISION (long_integer_type_node))
7520 {
7521 tree newfn = NULL_TREE;
7522 switch (fcode)
7523 {
7524 CASE_FLT_FN (BUILT_IN_ICEIL):
7525 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LCEIL);
7526 break;
7527
7528 CASE_FLT_FN (BUILT_IN_IFLOOR):
7529 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LFLOOR);
7530 break;
7531
7532 CASE_FLT_FN (BUILT_IN_IROUND):
7533 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LROUND);
7534 break;
7535
7536 CASE_FLT_FN (BUILT_IN_IRINT):
7537 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LRINT);
7538 break;
7539
7540 default:
7541 break;
7542 }
7543
7544 if (newfn)
7545 {
7546 tree newcall = build_call_expr_loc (loc, newfn, 1, arg);
7547 return fold_convert_loc (loc,
7548 TREE_TYPE (TREE_TYPE (fndecl)), newcall);
7549 }
7550 }
7551
73a0da56 7552 /* Canonicalize llround (x) to lround (x) on LP64 targets where
7553 sizeof (long long) == sizeof (long). */
7554 if (TYPE_PRECISION (long_long_integer_type_node)
7555 == TYPE_PRECISION (long_integer_type_node))
7556 {
7557 tree newfn = NULL_TREE;
7558 switch (fcode)
7559 {
7560 CASE_FLT_FN (BUILT_IN_LLCEIL):
7561 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LCEIL);
7562 break;
7563
7564 CASE_FLT_FN (BUILT_IN_LLFLOOR):
7565 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LFLOOR);
7566 break;
7567
7568 CASE_FLT_FN (BUILT_IN_LLROUND):
7569 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LROUND);
7570 break;
7571
7572 CASE_FLT_FN (BUILT_IN_LLRINT):
7573 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LRINT);
7574 break;
7575
7576 default:
7577 break;
7578 }
7579
7580 if (newfn)
7581 {
389dd41b 7582 tree newcall = build_call_expr_loc (loc, newfn, 1, arg);
7583 return fold_convert_loc (loc,
7584 TREE_TYPE (TREE_TYPE (fndecl)), newcall);
73a0da56 7585 }
7586 }
7587
c2f47e15 7588 return NULL_TREE;
9ed65c7f 7589}
7590
c2f47e15 7591/* Fold call to builtin cabs, cabsf or cabsl with argument ARG. TYPE is the
7592 return type. Return NULL_TREE if no simplification can be made. */
c63f4ad3 7593
7594static tree
389dd41b 7595fold_builtin_cabs (location_t loc, tree arg, tree type, tree fndecl)
c63f4ad3 7596{
c2f47e15 7597 tree res;
c63f4ad3 7598
b0ce8887 7599 if (!validate_arg (arg, COMPLEX_TYPE)
c63f4ad3 7600 || TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) != REAL_TYPE)
7601 return NULL_TREE;
7602
b4725390 7603 /* Calculate the result when the argument is a constant. */
7604 if (TREE_CODE (arg) == COMPLEX_CST
7605 && (res = do_mpfr_arg2 (TREE_REALPART (arg), TREE_IMAGPART (arg),
7606 type, mpfr_hypot)))
7607 return res;
48e1416a 7608
1af0d139 7609 if (TREE_CODE (arg) == COMPLEX_EXPR)
7610 {
7611 tree real = TREE_OPERAND (arg, 0);
7612 tree imag = TREE_OPERAND (arg, 1);
48e1416a 7613
1af0d139 7614 /* If either part is zero, cabs is fabs of the other. */
7615 if (real_zerop (real))
389dd41b 7616 return fold_build1_loc (loc, ABS_EXPR, type, imag);
1af0d139 7617 if (real_zerop (imag))
389dd41b 7618 return fold_build1_loc (loc, ABS_EXPR, type, real);
1af0d139 7619
7620 /* cabs(x+xi) -> fabs(x)*sqrt(2). */
7621 if (flag_unsafe_math_optimizations
7622 && operand_equal_p (real, imag, OEP_PURE_SAME))
7623 {
2e7ca27b 7624 const REAL_VALUE_TYPE sqrt2_trunc
7910b2fb 7625 = real_value_truncate (TYPE_MODE (type), dconst_sqrt2 ());
1af0d139 7626 STRIP_NOPS (real);
389dd41b 7627 return fold_build2_loc (loc, MULT_EXPR, type,
7628 fold_build1_loc (loc, ABS_EXPR, type, real),
2e7ca27b 7629 build_real (type, sqrt2_trunc));
1af0d139 7630 }
7631 }
c63f4ad3 7632
749891b2 7633 /* Optimize cabs(-z) and cabs(conj(z)) as cabs(z). */
7634 if (TREE_CODE (arg) == NEGATE_EXPR
7635 || TREE_CODE (arg) == CONJ_EXPR)
389dd41b 7636 return build_call_expr_loc (loc, fndecl, 1, TREE_OPERAND (arg, 0));
749891b2 7637
7d3f6cc7 7638 /* Don't do this when optimizing for size. */
7639 if (flag_unsafe_math_optimizations
0bfd8d5c 7640 && optimize && optimize_function_for_speed_p (cfun))
c63f4ad3 7641 {
0da0dbfa 7642 tree sqrtfn = mathfn_built_in (type, BUILT_IN_SQRT);
c63f4ad3 7643
7644 if (sqrtfn != NULL_TREE)
7645 {
c2f47e15 7646 tree rpart, ipart, result;
c63f4ad3 7647
4ee9c684 7648 arg = builtin_save_expr (arg);
29a6518e 7649
389dd41b 7650 rpart = fold_build1_loc (loc, REALPART_EXPR, type, arg);
7651 ipart = fold_build1_loc (loc, IMAGPART_EXPR, type, arg);
c63f4ad3 7652
4ee9c684 7653 rpart = builtin_save_expr (rpart);
7654 ipart = builtin_save_expr (ipart);
c63f4ad3 7655
389dd41b 7656 result = fold_build2_loc (loc, PLUS_EXPR, type,
7657 fold_build2_loc (loc, MULT_EXPR, type,
49d00087 7658 rpart, rpart),
389dd41b 7659 fold_build2_loc (loc, MULT_EXPR, type,
49d00087 7660 ipart, ipart));
c63f4ad3 7661
389dd41b 7662 return build_call_expr_loc (loc, sqrtfn, 1, result);
c63f4ad3 7663 }
7664 }
7665
7666 return NULL_TREE;
7667}
7668
c2373fdb 7669/* Build a complex (inf +- 0i) for the result of cproj. TYPE is the
7670 complex tree type of the result. If NEG is true, the imaginary
7671 zero is negative. */
7672
7673static tree
7674build_complex_cproj (tree type, bool neg)
7675{
7676 REAL_VALUE_TYPE rinf, rzero = dconst0;
7677
7678 real_inf (&rinf);
7679 rzero.sign = neg;
7680 return build_complex (type, build_real (TREE_TYPE (type), rinf),
7681 build_real (TREE_TYPE (type), rzero));
7682}
7683
7684/* Fold call to builtin cproj, cprojf or cprojl with argument ARG. TYPE is the
7685 return type. Return NULL_TREE if no simplification can be made. */
7686
7687static tree
7688fold_builtin_cproj (location_t loc, tree arg, tree type)
7689{
7690 if (!validate_arg (arg, COMPLEX_TYPE)
7691 || TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) != REAL_TYPE)
7692 return NULL_TREE;
7693
7694 /* If there are no infinities, return arg. */
fe994837 7695 if (! HONOR_INFINITIES (type))
c2373fdb 7696 return non_lvalue_loc (loc, arg);
7697
7698 /* Calculate the result when the argument is a constant. */
7699 if (TREE_CODE (arg) == COMPLEX_CST)
7700 {
7701 const REAL_VALUE_TYPE *real = TREE_REAL_CST_PTR (TREE_REALPART (arg));
7702 const REAL_VALUE_TYPE *imag = TREE_REAL_CST_PTR (TREE_IMAGPART (arg));
7703
7704 if (real_isinf (real) || real_isinf (imag))
7705 return build_complex_cproj (type, imag->sign);
7706 else
7707 return arg;
7708 }
b4c7e601 7709 else if (TREE_CODE (arg) == COMPLEX_EXPR)
7710 {
7711 tree real = TREE_OPERAND (arg, 0);
7712 tree imag = TREE_OPERAND (arg, 1);
7713
7714 STRIP_NOPS (real);
7715 STRIP_NOPS (imag);
7716
7717 /* If the real part is inf and the imag part is known to be
7718 nonnegative, return (inf + 0i). Remember side-effects are
7719 possible in the imag part. */
7720 if (TREE_CODE (real) == REAL_CST
7721 && real_isinf (TREE_REAL_CST_PTR (real))
7722 && tree_expr_nonnegative_p (imag))
7723 return omit_one_operand_loc (loc, type,
7724 build_complex_cproj (type, false),
7725 arg);
7726
7727 /* If the imag part is inf, return (inf+I*copysign(0,imag)).
7728 Remember side-effects are possible in the real part. */
7729 if (TREE_CODE (imag) == REAL_CST
7730 && real_isinf (TREE_REAL_CST_PTR (imag)))
7731 return
7732 omit_one_operand_loc (loc, type,
7733 build_complex_cproj (type, TREE_REAL_CST_PTR
7734 (imag)->sign), arg);
7735 }
c2373fdb 7736
7737 return NULL_TREE;
7738}
7739
c2f47e15 7740/* Fold a builtin function call to sqrt, sqrtf, or sqrtl with argument ARG.
7741 Return NULL_TREE if no simplification can be made. */
e6e27594 7742
7743static tree
389dd41b 7744fold_builtin_sqrt (location_t loc, tree arg, tree type)
e6e27594 7745{
7746
7747 enum built_in_function fcode;
b4e8ab0c 7748 tree res;
c2f47e15 7749
7750 if (!validate_arg (arg, REAL_TYPE))
e6e27594 7751 return NULL_TREE;
7752
b4e8ab0c 7753 /* Calculate the result when the argument is a constant. */
7754 if ((res = do_mpfr_arg1 (arg, type, mpfr_sqrt, &dconst0, NULL, true)))
7755 return res;
48e1416a 7756
e6e27594 7757 /* Optimize sqrt(expN(x)) = expN(x*0.5). */
7758 fcode = builtin_mathfn_code (arg);
7759 if (flag_unsafe_math_optimizations && BUILTIN_EXPONENT_P (fcode))
7760 {
c2f47e15 7761 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg), 0);
389dd41b 7762 arg = fold_build2_loc (loc, MULT_EXPR, type,
c2f47e15 7763 CALL_EXPR_ARG (arg, 0),
49d00087 7764 build_real (type, dconsthalf));
389dd41b 7765 return build_call_expr_loc (loc, expfn, 1, arg);
e6e27594 7766 }
7767
7768 /* Optimize sqrt(Nroot(x)) -> pow(x,1/(2*N)). */
7769 if (flag_unsafe_math_optimizations && BUILTIN_ROOT_P (fcode))
7770 {
7771 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
7772
7773 if (powfn)
7774 {
c2f47e15 7775 tree arg0 = CALL_EXPR_ARG (arg, 0);
e6e27594 7776 tree tree_root;
7777 /* The inner root was either sqrt or cbrt. */
57510da6 7778 /* This was a conditional expression but it triggered a bug
18381619 7779 in Sun C 5.5. */
ce6cd837 7780 REAL_VALUE_TYPE dconstroot;
7781 if (BUILTIN_SQRT_P (fcode))
7782 dconstroot = dconsthalf;
7783 else
7784 dconstroot = dconst_third ();
e6e27594 7785
7786 /* Adjust for the outer root. */
7787 SET_REAL_EXP (&dconstroot, REAL_EXP (&dconstroot) - 1);
7788 dconstroot = real_value_truncate (TYPE_MODE (type), dconstroot);
7789 tree_root = build_real (type, dconstroot);
389dd41b 7790 return build_call_expr_loc (loc, powfn, 2, arg0, tree_root);
e6e27594 7791 }
7792 }
7793
bc33117f 7794 /* Optimize sqrt(pow(x,y)) = pow(|x|,y*0.5). */
e6e27594 7795 if (flag_unsafe_math_optimizations
7796 && (fcode == BUILT_IN_POW
7797 || fcode == BUILT_IN_POWF
7798 || fcode == BUILT_IN_POWL))
7799 {
c2f47e15 7800 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg), 0);
7801 tree arg0 = CALL_EXPR_ARG (arg, 0);
7802 tree arg1 = CALL_EXPR_ARG (arg, 1);
bc33117f 7803 tree narg1;
7804 if (!tree_expr_nonnegative_p (arg0))
7805 arg0 = build1 (ABS_EXPR, type, arg0);
389dd41b 7806 narg1 = fold_build2_loc (loc, MULT_EXPR, type, arg1,
49d00087 7807 build_real (type, dconsthalf));
389dd41b 7808 return build_call_expr_loc (loc, powfn, 2, arg0, narg1);
e6e27594 7809 }
7810
7811 return NULL_TREE;
7812}
7813
c2f47e15 7814/* Fold a builtin function call to cbrt, cbrtf, or cbrtl with argument ARG.
7815 Return NULL_TREE if no simplification can be made. */
7816
e6e27594 7817static tree
389dd41b 7818fold_builtin_cbrt (location_t loc, tree arg, tree type)
e6e27594 7819{
e6e27594 7820 const enum built_in_function fcode = builtin_mathfn_code (arg);
29f4cd78 7821 tree res;
e6e27594 7822
c2f47e15 7823 if (!validate_arg (arg, REAL_TYPE))
e6e27594 7824 return NULL_TREE;
7825
29f4cd78 7826 /* Calculate the result when the argument is a constant. */
7827 if ((res = do_mpfr_arg1 (arg, type, mpfr_cbrt, NULL, NULL, 0)))
7828 return res;
e6e27594 7829
cdfeb715 7830 if (flag_unsafe_math_optimizations)
e6e27594 7831 {
cdfeb715 7832 /* Optimize cbrt(expN(x)) -> expN(x/3). */
7833 if (BUILTIN_EXPONENT_P (fcode))
a0c938f0 7834 {
c2f47e15 7835 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg), 0);
cdfeb715 7836 const REAL_VALUE_TYPE third_trunc =
7910b2fb 7837 real_value_truncate (TYPE_MODE (type), dconst_third ());
389dd41b 7838 arg = fold_build2_loc (loc, MULT_EXPR, type,
c2f47e15 7839 CALL_EXPR_ARG (arg, 0),
49d00087 7840 build_real (type, third_trunc));
389dd41b 7841 return build_call_expr_loc (loc, expfn, 1, arg);
cdfeb715 7842 }
e6e27594 7843
cdfeb715 7844 /* Optimize cbrt(sqrt(x)) -> pow(x,1/6). */
7845 if (BUILTIN_SQRT_P (fcode))
a0c938f0 7846 {
cdfeb715 7847 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
e6e27594 7848
cdfeb715 7849 if (powfn)
7850 {
c2f47e15 7851 tree arg0 = CALL_EXPR_ARG (arg, 0);
cdfeb715 7852 tree tree_root;
7910b2fb 7853 REAL_VALUE_TYPE dconstroot = dconst_third ();
cdfeb715 7854
7855 SET_REAL_EXP (&dconstroot, REAL_EXP (&dconstroot) - 1);
7856 dconstroot = real_value_truncate (TYPE_MODE (type), dconstroot);
7857 tree_root = build_real (type, dconstroot);
389dd41b 7858 return build_call_expr_loc (loc, powfn, 2, arg0, tree_root);
cdfeb715 7859 }
e6e27594 7860 }
7861
cdfeb715 7862 /* Optimize cbrt(cbrt(x)) -> pow(x,1/9) iff x is nonnegative. */
7863 if (BUILTIN_CBRT_P (fcode))
a0c938f0 7864 {
c2f47e15 7865 tree arg0 = CALL_EXPR_ARG (arg, 0);
cdfeb715 7866 if (tree_expr_nonnegative_p (arg0))
7867 {
7868 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
7869
7870 if (powfn)
a0c938f0 7871 {
cdfeb715 7872 tree tree_root;
7873 REAL_VALUE_TYPE dconstroot;
a0c938f0 7874
3fa759a9 7875 real_arithmetic (&dconstroot, MULT_EXPR,
7910b2fb 7876 dconst_third_ptr (), dconst_third_ptr ());
cdfeb715 7877 dconstroot = real_value_truncate (TYPE_MODE (type), dconstroot);
7878 tree_root = build_real (type, dconstroot);
389dd41b 7879 return build_call_expr_loc (loc, powfn, 2, arg0, tree_root);
cdfeb715 7880 }
7881 }
7882 }
a0c938f0 7883
cdfeb715 7884 /* Optimize cbrt(pow(x,y)) -> pow(x,y/3) iff x is nonnegative. */
48e1416a 7885 if (fcode == BUILT_IN_POW
c2f47e15 7886 || fcode == BUILT_IN_POWF
cdfeb715 7887 || fcode == BUILT_IN_POWL)
a0c938f0 7888 {
c2f47e15 7889 tree arg00 = CALL_EXPR_ARG (arg, 0);
7890 tree arg01 = CALL_EXPR_ARG (arg, 1);
cdfeb715 7891 if (tree_expr_nonnegative_p (arg00))
7892 {
c2f47e15 7893 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg), 0);
cdfeb715 7894 const REAL_VALUE_TYPE dconstroot
7910b2fb 7895 = real_value_truncate (TYPE_MODE (type), dconst_third ());
389dd41b 7896 tree narg01 = fold_build2_loc (loc, MULT_EXPR, type, arg01,
49d00087 7897 build_real (type, dconstroot));
389dd41b 7898 return build_call_expr_loc (loc, powfn, 2, arg00, narg01);
cdfeb715 7899 }
7900 }
e6e27594 7901 }
7902 return NULL_TREE;
7903}
7904
c2f47e15 7905/* Fold function call to builtin cos, cosf, or cosl with argument ARG.
7906 TYPE is the type of the return value. Return NULL_TREE if no
7907 simplification can be made. */
7908
e6e27594 7909static tree
389dd41b 7910fold_builtin_cos (location_t loc,
7911 tree arg, tree type, tree fndecl)
e6e27594 7912{
e6ab33d8 7913 tree res, narg;
e6e27594 7914
c2f47e15 7915 if (!validate_arg (arg, REAL_TYPE))
e6e27594 7916 return NULL_TREE;
7917
bffb7645 7918 /* Calculate the result when the argument is a constant. */
728bac60 7919 if ((res = do_mpfr_arg1 (arg, type, mpfr_cos, NULL, NULL, 0)))
bffb7645 7920 return res;
48e1416a 7921
e6e27594 7922 /* Optimize cos(-x) into cos (x). */
e6ab33d8 7923 if ((narg = fold_strip_sign_ops (arg)))
389dd41b 7924 return build_call_expr_loc (loc, fndecl, 1, narg);
e6e27594 7925
7926 return NULL_TREE;
7927}
7928
c2f47e15 7929/* Fold function call to builtin cosh, coshf, or coshl with argument ARG.
7930 Return NULL_TREE if no simplification can be made. */
7931
cacdc1af 7932static tree
389dd41b 7933fold_builtin_cosh (location_t loc, tree arg, tree type, tree fndecl)
cacdc1af 7934{
c2f47e15 7935 if (validate_arg (arg, REAL_TYPE))
cacdc1af 7936 {
cacdc1af 7937 tree res, narg;
7938
7939 /* Calculate the result when the argument is a constant. */
7940 if ((res = do_mpfr_arg1 (arg, type, mpfr_cosh, NULL, NULL, 0)))
7941 return res;
48e1416a 7942
cacdc1af 7943 /* Optimize cosh(-x) into cosh (x). */
7944 if ((narg = fold_strip_sign_ops (arg)))
389dd41b 7945 return build_call_expr_loc (loc, fndecl, 1, narg);
cacdc1af 7946 }
48e1416a 7947
cacdc1af 7948 return NULL_TREE;
7949}
7950
239d491a 7951/* Fold function call to builtin ccos (or ccosh if HYPER is TRUE) with
7952 argument ARG. TYPE is the type of the return value. Return
7953 NULL_TREE if no simplification can be made. */
7954
7955static tree
965d0f29 7956fold_builtin_ccos (location_t loc, tree arg, tree type, tree fndecl,
7957 bool hyper)
239d491a 7958{
7959 if (validate_arg (arg, COMPLEX_TYPE)
7960 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) == REAL_TYPE)
7961 {
7962 tree tmp;
7963
239d491a 7964 /* Calculate the result when the argument is a constant. */
7965 if ((tmp = do_mpc_arg1 (arg, type, (hyper ? mpc_cosh : mpc_cos))))
7966 return tmp;
48e1416a 7967
239d491a 7968 /* Optimize fn(-x) into fn(x). */
7969 if ((tmp = fold_strip_sign_ops (arg)))
389dd41b 7970 return build_call_expr_loc (loc, fndecl, 1, tmp);
239d491a 7971 }
7972
7973 return NULL_TREE;
7974}
7975
c2f47e15 7976/* Fold function call to builtin tan, tanf, or tanl with argument ARG.
7977 Return NULL_TREE if no simplification can be made. */
7978
e6e27594 7979static tree
c2f47e15 7980fold_builtin_tan (tree arg, tree type)
e6e27594 7981{
7982 enum built_in_function fcode;
29f4cd78 7983 tree res;
e6e27594 7984
c2f47e15 7985 if (!validate_arg (arg, REAL_TYPE))
e6e27594 7986 return NULL_TREE;
7987
bffb7645 7988 /* Calculate the result when the argument is a constant. */
728bac60 7989 if ((res = do_mpfr_arg1 (arg, type, mpfr_tan, NULL, NULL, 0)))
bffb7645 7990 return res;
48e1416a 7991
e6e27594 7992 /* Optimize tan(atan(x)) = x. */
7993 fcode = builtin_mathfn_code (arg);
7994 if (flag_unsafe_math_optimizations
7995 && (fcode == BUILT_IN_ATAN
7996 || fcode == BUILT_IN_ATANF
7997 || fcode == BUILT_IN_ATANL))
c2f47e15 7998 return CALL_EXPR_ARG (arg, 0);
e6e27594 7999
8000 return NULL_TREE;
8001}
8002
d735c391 8003/* Fold function call to builtin sincos, sincosf, or sincosl. Return
8004 NULL_TREE if no simplification can be made. */
8005
8006static tree
389dd41b 8007fold_builtin_sincos (location_t loc,
8008 tree arg0, tree arg1, tree arg2)
d735c391 8009{
c2f47e15 8010 tree type;
d735c391 8011 tree res, fn, call;
8012
c2f47e15 8013 if (!validate_arg (arg0, REAL_TYPE)
8014 || !validate_arg (arg1, POINTER_TYPE)
8015 || !validate_arg (arg2, POINTER_TYPE))
d735c391 8016 return NULL_TREE;
8017
d735c391 8018 type = TREE_TYPE (arg0);
d735c391 8019
8020 /* Calculate the result when the argument is a constant. */
8021 if ((res = do_mpfr_sincos (arg0, arg1, arg2)))
8022 return res;
8023
8024 /* Canonicalize sincos to cexpi. */
30f690e0 8025 if (!targetm.libc_has_function (function_c99_math_complex))
2a6b4c77 8026 return NULL_TREE;
d735c391 8027 fn = mathfn_built_in (type, BUILT_IN_CEXPI);
8028 if (!fn)
8029 return NULL_TREE;
8030
389dd41b 8031 call = build_call_expr_loc (loc, fn, 1, arg0);
d735c391 8032 call = builtin_save_expr (call);
8033
a75b1c71 8034 return build2 (COMPOUND_EXPR, void_type_node,
d735c391 8035 build2 (MODIFY_EXPR, void_type_node,
389dd41b 8036 build_fold_indirect_ref_loc (loc, arg1),
d735c391 8037 build1 (IMAGPART_EXPR, type, call)),
8038 build2 (MODIFY_EXPR, void_type_node,
389dd41b 8039 build_fold_indirect_ref_loc (loc, arg2),
d735c391 8040 build1 (REALPART_EXPR, type, call)));
8041}
8042
c5bb2c4b 8043/* Fold function call to builtin cexp, cexpf, or cexpl. Return
8044 NULL_TREE if no simplification can be made. */
8045
8046static tree
389dd41b 8047fold_builtin_cexp (location_t loc, tree arg0, tree type)
c5bb2c4b 8048{
c2f47e15 8049 tree rtype;
c5bb2c4b 8050 tree realp, imagp, ifn;
239d491a 8051 tree res;
c5bb2c4b 8052
239d491a 8053 if (!validate_arg (arg0, COMPLEX_TYPE)
b0ce8887 8054 || TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) != REAL_TYPE)
c5bb2c4b 8055 return NULL_TREE;
8056
239d491a 8057 /* Calculate the result when the argument is a constant. */
8058 if ((res = do_mpc_arg1 (arg0, type, mpc_exp)))
8059 return res;
48e1416a 8060
c5bb2c4b 8061 rtype = TREE_TYPE (TREE_TYPE (arg0));
8062
8063 /* In case we can figure out the real part of arg0 and it is constant zero
8064 fold to cexpi. */
30f690e0 8065 if (!targetm.libc_has_function (function_c99_math_complex))
2a6b4c77 8066 return NULL_TREE;
c5bb2c4b 8067 ifn = mathfn_built_in (rtype, BUILT_IN_CEXPI);
8068 if (!ifn)
8069 return NULL_TREE;
8070
389dd41b 8071 if ((realp = fold_unary_loc (loc, REALPART_EXPR, rtype, arg0))
c5bb2c4b 8072 && real_zerop (realp))
8073 {
389dd41b 8074 tree narg = fold_build1_loc (loc, IMAGPART_EXPR, rtype, arg0);
8075 return build_call_expr_loc (loc, ifn, 1, narg);
c5bb2c4b 8076 }
8077
8078 /* In case we can easily decompose real and imaginary parts split cexp
8079 to exp (r) * cexpi (i). */
8080 if (flag_unsafe_math_optimizations
8081 && realp)
8082 {
8083 tree rfn, rcall, icall;
8084
8085 rfn = mathfn_built_in (rtype, BUILT_IN_EXP);
8086 if (!rfn)
8087 return NULL_TREE;
8088
389dd41b 8089 imagp = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg0);
c5bb2c4b 8090 if (!imagp)
8091 return NULL_TREE;
8092
389dd41b 8093 icall = build_call_expr_loc (loc, ifn, 1, imagp);
c5bb2c4b 8094 icall = builtin_save_expr (icall);
389dd41b 8095 rcall = build_call_expr_loc (loc, rfn, 1, realp);
c5bb2c4b 8096 rcall = builtin_save_expr (rcall);
389dd41b 8097 return fold_build2_loc (loc, COMPLEX_EXPR, type,
8098 fold_build2_loc (loc, MULT_EXPR, rtype,
71bf42bb 8099 rcall,
389dd41b 8100 fold_build1_loc (loc, REALPART_EXPR,
8101 rtype, icall)),
8102 fold_build2_loc (loc, MULT_EXPR, rtype,
71bf42bb 8103 rcall,
389dd41b 8104 fold_build1_loc (loc, IMAGPART_EXPR,
8105 rtype, icall)));
c5bb2c4b 8106 }
8107
8108 return NULL_TREE;
8109}
8110
c2f47e15 8111/* Fold function call to builtin trunc, truncf or truncl with argument ARG.
8112 Return NULL_TREE if no simplification can be made. */
277f8dd2 8113
8114static tree
389dd41b 8115fold_builtin_trunc (location_t loc, tree fndecl, tree arg)
277f8dd2 8116{
c2f47e15 8117 if (!validate_arg (arg, REAL_TYPE))
8118 return NULL_TREE;
277f8dd2 8119
8120 /* Optimize trunc of constant value. */
f96bd2bf 8121 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
277f8dd2 8122 {
8123 REAL_VALUE_TYPE r, x;
2426241c 8124 tree type = TREE_TYPE (TREE_TYPE (fndecl));
277f8dd2 8125
8126 x = TREE_REAL_CST (arg);
8127 real_trunc (&r, TYPE_MODE (type), &x);
8128 return build_real (type, r);
8129 }
8130
389dd41b 8131 return fold_trunc_transparent_mathfn (loc, fndecl, arg);
277f8dd2 8132}
8133
c2f47e15 8134/* Fold function call to builtin floor, floorf or floorl with argument ARG.
8135 Return NULL_TREE if no simplification can be made. */
277f8dd2 8136
8137static tree
389dd41b 8138fold_builtin_floor (location_t loc, tree fndecl, tree arg)
277f8dd2 8139{
c2f47e15 8140 if (!validate_arg (arg, REAL_TYPE))
8141 return NULL_TREE;
277f8dd2 8142
8143 /* Optimize floor of constant value. */
f96bd2bf 8144 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
277f8dd2 8145 {
8146 REAL_VALUE_TYPE x;
8147
8148 x = TREE_REAL_CST (arg);
8149 if (! REAL_VALUE_ISNAN (x) || ! flag_errno_math)
8150 {
2426241c 8151 tree type = TREE_TYPE (TREE_TYPE (fndecl));
277f8dd2 8152 REAL_VALUE_TYPE r;
8153
8154 real_floor (&r, TYPE_MODE (type), &x);
8155 return build_real (type, r);
8156 }
8157 }
8158
acc2b92e 8159 /* Fold floor (x) where x is nonnegative to trunc (x). */
8160 if (tree_expr_nonnegative_p (arg))
30fe8286 8161 {
8162 tree truncfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_TRUNC);
8163 if (truncfn)
389dd41b 8164 return build_call_expr_loc (loc, truncfn, 1, arg);
30fe8286 8165 }
acc2b92e 8166
389dd41b 8167 return fold_trunc_transparent_mathfn (loc, fndecl, arg);
277f8dd2 8168}
8169
c2f47e15 8170/* Fold function call to builtin ceil, ceilf or ceill with argument ARG.
8171 Return NULL_TREE if no simplification can be made. */
277f8dd2 8172
8173static tree
389dd41b 8174fold_builtin_ceil (location_t loc, tree fndecl, tree arg)
277f8dd2 8175{
c2f47e15 8176 if (!validate_arg (arg, REAL_TYPE))
8177 return NULL_TREE;
277f8dd2 8178
8179 /* Optimize ceil of constant value. */
f96bd2bf 8180 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
277f8dd2 8181 {
8182 REAL_VALUE_TYPE x;
8183
8184 x = TREE_REAL_CST (arg);
8185 if (! REAL_VALUE_ISNAN (x) || ! flag_errno_math)
8186 {
2426241c 8187 tree type = TREE_TYPE (TREE_TYPE (fndecl));
277f8dd2 8188 REAL_VALUE_TYPE r;
8189
8190 real_ceil (&r, TYPE_MODE (type), &x);
8191 return build_real (type, r);
8192 }
8193 }
8194
389dd41b 8195 return fold_trunc_transparent_mathfn (loc, fndecl, arg);
277f8dd2 8196}
8197
c2f47e15 8198/* Fold function call to builtin round, roundf or roundl with argument ARG.
8199 Return NULL_TREE if no simplification can be made. */
89ab3887 8200
8201static tree
389dd41b 8202fold_builtin_round (location_t loc, tree fndecl, tree arg)
89ab3887 8203{
c2f47e15 8204 if (!validate_arg (arg, REAL_TYPE))
8205 return NULL_TREE;
89ab3887 8206
34f17811 8207 /* Optimize round of constant value. */
f96bd2bf 8208 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
89ab3887 8209 {
8210 REAL_VALUE_TYPE x;
8211
8212 x = TREE_REAL_CST (arg);
8213 if (! REAL_VALUE_ISNAN (x) || ! flag_errno_math)
8214 {
2426241c 8215 tree type = TREE_TYPE (TREE_TYPE (fndecl));
89ab3887 8216 REAL_VALUE_TYPE r;
8217
8218 real_round (&r, TYPE_MODE (type), &x);
8219 return build_real (type, r);
8220 }
8221 }
8222
389dd41b 8223 return fold_trunc_transparent_mathfn (loc, fndecl, arg);
89ab3887 8224}
8225
34f17811 8226/* Fold function call to builtin lround, lroundf or lroundl (or the
c2f47e15 8227 corresponding long long versions) and other rounding functions. ARG
8228 is the argument to the call. Return NULL_TREE if no simplification
8229 can be made. */
34f17811 8230
8231static tree
389dd41b 8232fold_builtin_int_roundingfn (location_t loc, tree fndecl, tree arg)
34f17811 8233{
c2f47e15 8234 if (!validate_arg (arg, REAL_TYPE))
8235 return NULL_TREE;
34f17811 8236
8237 /* Optimize lround of constant value. */
f96bd2bf 8238 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
34f17811 8239 {
8240 const REAL_VALUE_TYPE x = TREE_REAL_CST (arg);
8241
776a7bab 8242 if (real_isfinite (&x))
34f17811 8243 {
2426241c 8244 tree itype = TREE_TYPE (TREE_TYPE (fndecl));
ca9b061d 8245 tree ftype = TREE_TYPE (arg);
34f17811 8246 REAL_VALUE_TYPE r;
e913b5cd 8247 bool fail = false;
34f17811 8248
ad52b9b7 8249 switch (DECL_FUNCTION_CODE (fndecl))
8250 {
80ff6494 8251 CASE_FLT_FN (BUILT_IN_IFLOOR):
4f35b1fc 8252 CASE_FLT_FN (BUILT_IN_LFLOOR):
8253 CASE_FLT_FN (BUILT_IN_LLFLOOR):
ad52b9b7 8254 real_floor (&r, TYPE_MODE (ftype), &x);
8255 break;
8256
80ff6494 8257 CASE_FLT_FN (BUILT_IN_ICEIL):
4f35b1fc 8258 CASE_FLT_FN (BUILT_IN_LCEIL):
8259 CASE_FLT_FN (BUILT_IN_LLCEIL):
ac148751 8260 real_ceil (&r, TYPE_MODE (ftype), &x);
8261 break;
8262
80ff6494 8263 CASE_FLT_FN (BUILT_IN_IROUND):
4f35b1fc 8264 CASE_FLT_FN (BUILT_IN_LROUND):
8265 CASE_FLT_FN (BUILT_IN_LLROUND):
ad52b9b7 8266 real_round (&r, TYPE_MODE (ftype), &x);
8267 break;
8268
8269 default:
8270 gcc_unreachable ();
8271 }
8272
ab2c1de8 8273 wide_int val = real_to_integer (&r, &fail, TYPE_PRECISION (itype));
e913b5cd 8274 if (!fail)
8275 return wide_int_to_tree (itype, val);
34f17811 8276 }
8277 }
8278
acc2b92e 8279 switch (DECL_FUNCTION_CODE (fndecl))
8280 {
8281 CASE_FLT_FN (BUILT_IN_LFLOOR):
8282 CASE_FLT_FN (BUILT_IN_LLFLOOR):
8283 /* Fold lfloor (x) where x is nonnegative to FIX_TRUNC (x). */
8284 if (tree_expr_nonnegative_p (arg))
389dd41b 8285 return fold_build1_loc (loc, FIX_TRUNC_EXPR,
8286 TREE_TYPE (TREE_TYPE (fndecl)), arg);
acc2b92e 8287 break;
8288 default:;
8289 }
8290
389dd41b 8291 return fold_fixed_mathfn (loc, fndecl, arg);
34f17811 8292}
8293
70fb4c07 8294/* Fold function call to builtin ffs, clz, ctz, popcount and parity
c2f47e15 8295 and their long and long long variants (i.e. ffsl and ffsll). ARG is
8296 the argument to the call. Return NULL_TREE if no simplification can
8297 be made. */
70fb4c07 8298
8299static tree
c2f47e15 8300fold_builtin_bitop (tree fndecl, tree arg)
70fb4c07 8301{
c2f47e15 8302 if (!validate_arg (arg, INTEGER_TYPE))
70fb4c07 8303 return NULL_TREE;
8304
8305 /* Optimize for constant argument. */
f96bd2bf 8306 if (TREE_CODE (arg) == INTEGER_CST && !TREE_OVERFLOW (arg))
70fb4c07 8307 {
1cee90ad 8308 tree type = TREE_TYPE (arg);
796b6678 8309 int result;
70fb4c07 8310
8311 switch (DECL_FUNCTION_CODE (fndecl))
8312 {
4f35b1fc 8313 CASE_INT_FN (BUILT_IN_FFS):
ab2c1de8 8314 result = wi::ffs (arg);
70fb4c07 8315 break;
8316
4f35b1fc 8317 CASE_INT_FN (BUILT_IN_CLZ):
1cee90ad 8318 if (wi::ne_p (arg, 0))
8319 result = wi::clz (arg);
8320 else if (! CLZ_DEFINED_VALUE_AT_ZERO (TYPE_MODE (type), result))
8321 result = TYPE_PRECISION (type);
70fb4c07 8322 break;
8323
4f35b1fc 8324 CASE_INT_FN (BUILT_IN_CTZ):
1cee90ad 8325 if (wi::ne_p (arg, 0))
8326 result = wi::ctz (arg);
8327 else if (! CTZ_DEFINED_VALUE_AT_ZERO (TYPE_MODE (type), result))
8328 result = TYPE_PRECISION (type);
70fb4c07 8329 break;
8330
6aaa1f9e 8331 CASE_INT_FN (BUILT_IN_CLRSB):
ab2c1de8 8332 result = wi::clrsb (arg);
6aaa1f9e 8333 break;
8334
4f35b1fc 8335 CASE_INT_FN (BUILT_IN_POPCOUNT):
ab2c1de8 8336 result = wi::popcount (arg);
70fb4c07 8337 break;
8338
4f35b1fc 8339 CASE_INT_FN (BUILT_IN_PARITY):
ab2c1de8 8340 result = wi::parity (arg);
70fb4c07 8341 break;
8342
8343 default:
64db345d 8344 gcc_unreachable ();
70fb4c07 8345 }
8346
796b6678 8347 return build_int_cst (TREE_TYPE (TREE_TYPE (fndecl)), result);
70fb4c07 8348 }
8349
8350 return NULL_TREE;
8351}
8352
74bdbe96 8353/* Fold function call to builtin_bswap and the short, long and long long
42791117 8354 variants. Return NULL_TREE if no simplification can be made. */
8355static tree
c2f47e15 8356fold_builtin_bswap (tree fndecl, tree arg)
42791117 8357{
c2f47e15 8358 if (! validate_arg (arg, INTEGER_TYPE))
8359 return NULL_TREE;
42791117 8360
8361 /* Optimize constant value. */
f96bd2bf 8362 if (TREE_CODE (arg) == INTEGER_CST && !TREE_OVERFLOW (arg))
42791117 8363 {
74bdbe96 8364 tree type = TREE_TYPE (TREE_TYPE (fndecl));
42791117 8365
42791117 8366 switch (DECL_FUNCTION_CODE (fndecl))
8367 {
74bdbe96 8368 case BUILT_IN_BSWAP16:
42791117 8369 case BUILT_IN_BSWAP32:
8370 case BUILT_IN_BSWAP64:
8371 {
e913b5cd 8372 signop sgn = TYPE_SIGN (type);
ddb1be65 8373 tree result =
796b6678 8374 wide_int_to_tree (type,
8375 wide_int::from (arg, TYPE_PRECISION (type),
8376 sgn).bswap ());
e913b5cd 8377 return result;
42791117 8378 }
42791117 8379 default:
8380 gcc_unreachable ();
8381 }
42791117 8382 }
8383
8384 return NULL_TREE;
8385}
c2f47e15 8386
f0c477f2 8387/* Fold a builtin function call to hypot, hypotf, or hypotl. Return
8388 NULL_TREE if no simplification can be made. */
8389
8390static tree
389dd41b 8391fold_builtin_hypot (location_t loc, tree fndecl,
8392 tree arg0, tree arg1, tree type)
f0c477f2 8393{
e6ab33d8 8394 tree res, narg0, narg1;
f0c477f2 8395
c2f47e15 8396 if (!validate_arg (arg0, REAL_TYPE)
8397 || !validate_arg (arg1, REAL_TYPE))
f0c477f2 8398 return NULL_TREE;
8399
8400 /* Calculate the result when the argument is a constant. */
8401 if ((res = do_mpfr_arg2 (arg0, arg1, type, mpfr_hypot)))
8402 return res;
48e1416a 8403
6c95f21c 8404 /* If either argument to hypot has a negate or abs, strip that off.
8405 E.g. hypot(-x,fabs(y)) -> hypot(x,y). */
e6ab33d8 8406 narg0 = fold_strip_sign_ops (arg0);
8407 narg1 = fold_strip_sign_ops (arg1);
8408 if (narg0 || narg1)
8409 {
48e1416a 8410 return build_call_expr_loc (loc, fndecl, 2, narg0 ? narg0 : arg0,
c2f47e15 8411 narg1 ? narg1 : arg1);
6c95f21c 8412 }
48e1416a 8413
f0c477f2 8414 /* If either argument is zero, hypot is fabs of the other. */
8415 if (real_zerop (arg0))
389dd41b 8416 return fold_build1_loc (loc, ABS_EXPR, type, arg1);
f0c477f2 8417 else if (real_zerop (arg1))
389dd41b 8418 return fold_build1_loc (loc, ABS_EXPR, type, arg0);
48e1416a 8419
6c95f21c 8420 /* hypot(x,x) -> fabs(x)*sqrt(2). */
8421 if (flag_unsafe_math_optimizations
8422 && operand_equal_p (arg0, arg1, OEP_PURE_SAME))
f0c477f2 8423 {
2e7ca27b 8424 const REAL_VALUE_TYPE sqrt2_trunc
7910b2fb 8425 = real_value_truncate (TYPE_MODE (type), dconst_sqrt2 ());
389dd41b 8426 return fold_build2_loc (loc, MULT_EXPR, type,
8427 fold_build1_loc (loc, ABS_EXPR, type, arg0),
2e7ca27b 8428 build_real (type, sqrt2_trunc));
f0c477f2 8429 }
8430
f0c477f2 8431 return NULL_TREE;
8432}
8433
8434
e6e27594 8435/* Fold a builtin function call to pow, powf, or powl. Return
8436 NULL_TREE if no simplification can be made. */
8437static tree
389dd41b 8438fold_builtin_pow (location_t loc, tree fndecl, tree arg0, tree arg1, tree type)
e6e27594 8439{
f0c477f2 8440 tree res;
e6e27594 8441
c2f47e15 8442 if (!validate_arg (arg0, REAL_TYPE)
8443 || !validate_arg (arg1, REAL_TYPE))
e6e27594 8444 return NULL_TREE;
8445
f0c477f2 8446 /* Calculate the result when the argument is a constant. */
8447 if ((res = do_mpfr_arg2 (arg0, arg1, type, mpfr_pow)))
8448 return res;
8449
e6e27594 8450 /* Optimize pow(1.0,y) = 1.0. */
8451 if (real_onep (arg0))
389dd41b 8452 return omit_one_operand_loc (loc, type, build_real (type, dconst1), arg1);
e6e27594 8453
8454 if (TREE_CODE (arg1) == REAL_CST
f96bd2bf 8455 && !TREE_OVERFLOW (arg1))
e6e27594 8456 {
198d9bbe 8457 REAL_VALUE_TYPE cint;
e6e27594 8458 REAL_VALUE_TYPE c;
198d9bbe 8459 HOST_WIDE_INT n;
8460
e6e27594 8461 c = TREE_REAL_CST (arg1);
8462
8463 /* Optimize pow(x,0.0) = 1.0. */
8464 if (REAL_VALUES_EQUAL (c, dconst0))
389dd41b 8465 return omit_one_operand_loc (loc, type, build_real (type, dconst1),
e6e27594 8466 arg0);
8467
8468 /* Optimize pow(x,1.0) = x. */
8469 if (REAL_VALUES_EQUAL (c, dconst1))
8470 return arg0;
8471
8472 /* Optimize pow(x,-1.0) = 1.0/x. */
8473 if (REAL_VALUES_EQUAL (c, dconstm1))
389dd41b 8474 return fold_build2_loc (loc, RDIV_EXPR, type,
49d00087 8475 build_real (type, dconst1), arg0);
e6e27594 8476
8477 /* Optimize pow(x,0.5) = sqrt(x). */
8478 if (flag_unsafe_math_optimizations
8479 && REAL_VALUES_EQUAL (c, dconsthalf))
8480 {
8481 tree sqrtfn = mathfn_built_in (type, BUILT_IN_SQRT);
8482
8483 if (sqrtfn != NULL_TREE)
389dd41b 8484 return build_call_expr_loc (loc, sqrtfn, 1, arg0);
e6e27594 8485 }
8486
feb5b3eb 8487 /* Optimize pow(x,1.0/3.0) = cbrt(x). */
8488 if (flag_unsafe_math_optimizations)
8489 {
8490 const REAL_VALUE_TYPE dconstroot
7910b2fb 8491 = real_value_truncate (TYPE_MODE (type), dconst_third ());
feb5b3eb 8492
8493 if (REAL_VALUES_EQUAL (c, dconstroot))
8494 {
8495 tree cbrtfn = mathfn_built_in (type, BUILT_IN_CBRT);
8496 if (cbrtfn != NULL_TREE)
389dd41b 8497 return build_call_expr_loc (loc, cbrtfn, 1, arg0);
feb5b3eb 8498 }
8499 }
8500
198d9bbe 8501 /* Check for an integer exponent. */
8502 n = real_to_integer (&c);
e913b5cd 8503 real_from_integer (&cint, VOIDmode, n, SIGNED);
198d9bbe 8504 if (real_identical (&c, &cint))
e6e27594 8505 {
a2b30b48 8506 /* Attempt to evaluate pow at compile-time, unless this should
8507 raise an exception. */
198d9bbe 8508 if (TREE_CODE (arg0) == REAL_CST
a2b30b48 8509 && !TREE_OVERFLOW (arg0)
8510 && (n > 0
8511 || (!flag_trapping_math && !flag_errno_math)
8512 || !REAL_VALUES_EQUAL (TREE_REAL_CST (arg0), dconst0)))
e6e27594 8513 {
8514 REAL_VALUE_TYPE x;
8515 bool inexact;
8516
8517 x = TREE_REAL_CST (arg0);
8518 inexact = real_powi (&x, TYPE_MODE (type), &x, n);
8519 if (flag_unsafe_math_optimizations || !inexact)
8520 return build_real (type, x);
8521 }
198d9bbe 8522
8523 /* Strip sign ops from even integer powers. */
8524 if ((n & 1) == 0 && flag_unsafe_math_optimizations)
8525 {
8526 tree narg0 = fold_strip_sign_ops (arg0);
8527 if (narg0)
389dd41b 8528 return build_call_expr_loc (loc, fndecl, 2, narg0, arg1);
198d9bbe 8529 }
e6e27594 8530 }
8531 }
8532
cdfeb715 8533 if (flag_unsafe_math_optimizations)
e6e27594 8534 {
cdfeb715 8535 const enum built_in_function fcode = builtin_mathfn_code (arg0);
e6e27594 8536
cdfeb715 8537 /* Optimize pow(expN(x),y) = expN(x*y). */
8538 if (BUILTIN_EXPONENT_P (fcode))
a0c938f0 8539 {
c2f47e15 8540 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
8541 tree arg = CALL_EXPR_ARG (arg0, 0);
389dd41b 8542 arg = fold_build2_loc (loc, MULT_EXPR, type, arg, arg1);
8543 return build_call_expr_loc (loc, expfn, 1, arg);
cdfeb715 8544 }
e6e27594 8545
cdfeb715 8546 /* Optimize pow(sqrt(x),y) = pow(x,y*0.5). */
8547 if (BUILTIN_SQRT_P (fcode))
a0c938f0 8548 {
c2f47e15 8549 tree narg0 = CALL_EXPR_ARG (arg0, 0);
389dd41b 8550 tree narg1 = fold_build2_loc (loc, MULT_EXPR, type, arg1,
49d00087 8551 build_real (type, dconsthalf));
389dd41b 8552 return build_call_expr_loc (loc, fndecl, 2, narg0, narg1);
cdfeb715 8553 }
8554
8555 /* Optimize pow(cbrt(x),y) = pow(x,y/3) iff x is nonnegative. */
8556 if (BUILTIN_CBRT_P (fcode))
a0c938f0 8557 {
c2f47e15 8558 tree arg = CALL_EXPR_ARG (arg0, 0);
cdfeb715 8559 if (tree_expr_nonnegative_p (arg))
8560 {
8561 const REAL_VALUE_TYPE dconstroot
7910b2fb 8562 = real_value_truncate (TYPE_MODE (type), dconst_third ());
389dd41b 8563 tree narg1 = fold_build2_loc (loc, MULT_EXPR, type, arg1,
49d00087 8564 build_real (type, dconstroot));
389dd41b 8565 return build_call_expr_loc (loc, fndecl, 2, arg, narg1);
cdfeb715 8566 }
8567 }
a0c938f0 8568
49e436b5 8569 /* Optimize pow(pow(x,y),z) = pow(x,y*z) iff x is nonnegative. */
c2f47e15 8570 if (fcode == BUILT_IN_POW
8571 || fcode == BUILT_IN_POWF
8572 || fcode == BUILT_IN_POWL)
a0c938f0 8573 {
c2f47e15 8574 tree arg00 = CALL_EXPR_ARG (arg0, 0);
49e436b5 8575 if (tree_expr_nonnegative_p (arg00))
8576 {
8577 tree arg01 = CALL_EXPR_ARG (arg0, 1);
8578 tree narg1 = fold_build2_loc (loc, MULT_EXPR, type, arg01, arg1);
8579 return build_call_expr_loc (loc, fndecl, 2, arg00, narg1);
8580 }
cdfeb715 8581 }
e6e27594 8582 }
cdfeb715 8583
e6e27594 8584 return NULL_TREE;
8585}
8586
c2f47e15 8587/* Fold a builtin function call to powi, powif, or powil with argument ARG.
8588 Return NULL_TREE if no simplification can be made. */
b4d0c20c 8589static tree
389dd41b 8590fold_builtin_powi (location_t loc, tree fndecl ATTRIBUTE_UNUSED,
c2f47e15 8591 tree arg0, tree arg1, tree type)
b4d0c20c 8592{
c2f47e15 8593 if (!validate_arg (arg0, REAL_TYPE)
8594 || !validate_arg (arg1, INTEGER_TYPE))
b4d0c20c 8595 return NULL_TREE;
8596
8597 /* Optimize pow(1.0,y) = 1.0. */
8598 if (real_onep (arg0))
389dd41b 8599 return omit_one_operand_loc (loc, type, build_real (type, dconst1), arg1);
b4d0c20c 8600
e913b5cd 8601 if (tree_fits_shwi_p (arg1))
b4d0c20c 8602 {
e913b5cd 8603 HOST_WIDE_INT c = tree_to_shwi (arg1);
b4d0c20c 8604
8605 /* Evaluate powi at compile-time. */
8606 if (TREE_CODE (arg0) == REAL_CST
f96bd2bf 8607 && !TREE_OVERFLOW (arg0))
b4d0c20c 8608 {
8609 REAL_VALUE_TYPE x;
8610 x = TREE_REAL_CST (arg0);
8611 real_powi (&x, TYPE_MODE (type), &x, c);
8612 return build_real (type, x);
8613 }
8614
8615 /* Optimize pow(x,0) = 1.0. */
8616 if (c == 0)
389dd41b 8617 return omit_one_operand_loc (loc, type, build_real (type, dconst1),
b4d0c20c 8618 arg0);
8619
8620 /* Optimize pow(x,1) = x. */
8621 if (c == 1)
8622 return arg0;
8623
8624 /* Optimize pow(x,-1) = 1.0/x. */
8625 if (c == -1)
389dd41b 8626 return fold_build2_loc (loc, RDIV_EXPR, type,
49d00087 8627 build_real (type, dconst1), arg0);
b4d0c20c 8628 }
8629
8630 return NULL_TREE;
8631}
8632
8918c507 8633/* A subroutine of fold_builtin to fold the various exponent
c2f47e15 8634 functions. Return NULL_TREE if no simplification can be made.
debf9994 8635 FUNC is the corresponding MPFR exponent function. */
8918c507 8636
8637static tree
389dd41b 8638fold_builtin_exponent (location_t loc, tree fndecl, tree arg,
debf9994 8639 int (*func)(mpfr_ptr, mpfr_srcptr, mp_rnd_t))
8918c507 8640{
c2f47e15 8641 if (validate_arg (arg, REAL_TYPE))
8918c507 8642 {
8918c507 8643 tree type = TREE_TYPE (TREE_TYPE (fndecl));
29f4cd78 8644 tree res;
48e1416a 8645
debf9994 8646 /* Calculate the result when the argument is a constant. */
728bac60 8647 if ((res = do_mpfr_arg1 (arg, type, func, NULL, NULL, 0)))
debf9994 8648 return res;
8918c507 8649
8650 /* Optimize expN(logN(x)) = x. */
8651 if (flag_unsafe_math_optimizations)
a0c938f0 8652 {
8918c507 8653 const enum built_in_function fcode = builtin_mathfn_code (arg);
8654
debf9994 8655 if ((func == mpfr_exp
8918c507 8656 && (fcode == BUILT_IN_LOG
8657 || fcode == BUILT_IN_LOGF
8658 || fcode == BUILT_IN_LOGL))
debf9994 8659 || (func == mpfr_exp2
8918c507 8660 && (fcode == BUILT_IN_LOG2
8661 || fcode == BUILT_IN_LOG2F
8662 || fcode == BUILT_IN_LOG2L))
debf9994 8663 || (func == mpfr_exp10
8918c507 8664 && (fcode == BUILT_IN_LOG10
8665 || fcode == BUILT_IN_LOG10F
8666 || fcode == BUILT_IN_LOG10L)))
389dd41b 8667 return fold_convert_loc (loc, type, CALL_EXPR_ARG (arg, 0));
8918c507 8668 }
8669 }
8670
c2f47e15 8671 return NULL_TREE;
8918c507 8672}
8673
7959b13b 8674/* Fold function call to builtin memchr. ARG1, ARG2 and LEN are the
8675 arguments to the call, and TYPE is its return type.
8676 Return NULL_TREE if no simplification can be made. */
8677
8678static tree
389dd41b 8679fold_builtin_memchr (location_t loc, tree arg1, tree arg2, tree len, tree type)
7959b13b 8680{
8681 if (!validate_arg (arg1, POINTER_TYPE)
8682 || !validate_arg (arg2, INTEGER_TYPE)
8683 || !validate_arg (len, INTEGER_TYPE))
8684 return NULL_TREE;
8685 else
8686 {
8687 const char *p1;
8688
8689 if (TREE_CODE (arg2) != INTEGER_CST
e913b5cd 8690 || !tree_fits_uhwi_p (len))
7959b13b 8691 return NULL_TREE;
8692
8693 p1 = c_getstr (arg1);
8694 if (p1 && compare_tree_int (len, strlen (p1) + 1) <= 0)
8695 {
8696 char c;
8697 const char *r;
8698 tree tem;
8699
8700 if (target_char_cast (arg2, &c))
8701 return NULL_TREE;
8702
e913b5cd 8703 r = (const char *) memchr (p1, c, tree_to_uhwi (len));
7959b13b 8704
8705 if (r == NULL)
8706 return build_int_cst (TREE_TYPE (arg1), 0);
8707
2cc66f2a 8708 tem = fold_build_pointer_plus_hwi_loc (loc, arg1, r - p1);
389dd41b 8709 return fold_convert_loc (loc, type, tem);
7959b13b 8710 }
8711 return NULL_TREE;
8712 }
8713}
8714
c2f47e15 8715/* Fold function call to builtin memcmp with arguments ARG1 and ARG2.
8716 Return NULL_TREE if no simplification can be made. */
9c8a1629 8717
8718static tree
389dd41b 8719fold_builtin_memcmp (location_t loc, tree arg1, tree arg2, tree len)
9c8a1629 8720{
c4fef134 8721 const char *p1, *p2;
9c8a1629 8722
c2f47e15 8723 if (!validate_arg (arg1, POINTER_TYPE)
8724 || !validate_arg (arg2, POINTER_TYPE)
8725 || !validate_arg (len, INTEGER_TYPE))
8726 return NULL_TREE;
9c8a1629 8727
8728 /* If the LEN parameter is zero, return zero. */
8729 if (integer_zerop (len))
389dd41b 8730 return omit_two_operands_loc (loc, integer_type_node, integer_zero_node,
c4fef134 8731 arg1, arg2);
9c8a1629 8732
8733 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
8734 if (operand_equal_p (arg1, arg2, 0))
389dd41b 8735 return omit_one_operand_loc (loc, integer_type_node, integer_zero_node, len);
c4fef134 8736
8737 p1 = c_getstr (arg1);
8738 p2 = c_getstr (arg2);
8739
8740 /* If all arguments are constant, and the value of len is not greater
8741 than the lengths of arg1 and arg2, evaluate at compile-time. */
e913b5cd 8742 if (tree_fits_uhwi_p (len) && p1 && p2
c4fef134 8743 && compare_tree_int (len, strlen (p1) + 1) <= 0
8744 && compare_tree_int (len, strlen (p2) + 1) <= 0)
8745 {
e913b5cd 8746 const int r = memcmp (p1, p2, tree_to_uhwi (len));
c4fef134 8747
8748 if (r > 0)
8749 return integer_one_node;
8750 else if (r < 0)
8751 return integer_minus_one_node;
8752 else
8753 return integer_zero_node;
8754 }
8755
8756 /* If len parameter is one, return an expression corresponding to
8757 (*(const unsigned char*)arg1 - (const unsigned char*)arg2). */
e913b5cd 8758 if (tree_fits_uhwi_p (len) && tree_to_uhwi (len) == 1)
c4fef134 8759 {
8760 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
4f1b71c0 8761 tree cst_uchar_ptr_node
8762 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
8763
389dd41b 8764 tree ind1
8765 = fold_convert_loc (loc, integer_type_node,
8766 build1 (INDIRECT_REF, cst_uchar_node,
8767 fold_convert_loc (loc,
8768 cst_uchar_ptr_node,
c4fef134 8769 arg1)));
389dd41b 8770 tree ind2
8771 = fold_convert_loc (loc, integer_type_node,
8772 build1 (INDIRECT_REF, cst_uchar_node,
8773 fold_convert_loc (loc,
8774 cst_uchar_ptr_node,
c4fef134 8775 arg2)));
389dd41b 8776 return fold_build2_loc (loc, MINUS_EXPR, integer_type_node, ind1, ind2);
c4fef134 8777 }
9c8a1629 8778
c2f47e15 8779 return NULL_TREE;
9c8a1629 8780}
8781
c2f47e15 8782/* Fold function call to builtin strcmp with arguments ARG1 and ARG2.
8783 Return NULL_TREE if no simplification can be made. */
9c8a1629 8784
8785static tree
389dd41b 8786fold_builtin_strcmp (location_t loc, tree arg1, tree arg2)
9c8a1629 8787{
9c8a1629 8788 const char *p1, *p2;
8789
c2f47e15 8790 if (!validate_arg (arg1, POINTER_TYPE)
8791 || !validate_arg (arg2, POINTER_TYPE))
8792 return NULL_TREE;
9c8a1629 8793
8794 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
8795 if (operand_equal_p (arg1, arg2, 0))
c4fef134 8796 return integer_zero_node;
9c8a1629 8797
8798 p1 = c_getstr (arg1);
8799 p2 = c_getstr (arg2);
8800
8801 if (p1 && p2)
8802 {
9c8a1629 8803 const int i = strcmp (p1, p2);
8804 if (i < 0)
c4fef134 8805 return integer_minus_one_node;
9c8a1629 8806 else if (i > 0)
c4fef134 8807 return integer_one_node;
9c8a1629 8808 else
c4fef134 8809 return integer_zero_node;
8810 }
8811
8812 /* If the second arg is "", return *(const unsigned char*)arg1. */
8813 if (p2 && *p2 == '\0')
8814 {
8815 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
4f1b71c0 8816 tree cst_uchar_ptr_node
8817 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
8818
389dd41b 8819 return fold_convert_loc (loc, integer_type_node,
8820 build1 (INDIRECT_REF, cst_uchar_node,
8821 fold_convert_loc (loc,
8822 cst_uchar_ptr_node,
8823 arg1)));
c4fef134 8824 }
8825
8826 /* If the first arg is "", return -*(const unsigned char*)arg2. */
8827 if (p1 && *p1 == '\0')
8828 {
8829 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
4f1b71c0 8830 tree cst_uchar_ptr_node
8831 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
8832
389dd41b 8833 tree temp
8834 = fold_convert_loc (loc, integer_type_node,
8835 build1 (INDIRECT_REF, cst_uchar_node,
8836 fold_convert_loc (loc,
8837 cst_uchar_ptr_node,
c4fef134 8838 arg2)));
389dd41b 8839 return fold_build1_loc (loc, NEGATE_EXPR, integer_type_node, temp);
9c8a1629 8840 }
8841
c2f47e15 8842 return NULL_TREE;
9c8a1629 8843}
8844
c2f47e15 8845/* Fold function call to builtin strncmp with arguments ARG1, ARG2, and LEN.
8846 Return NULL_TREE if no simplification can be made. */
9c8a1629 8847
8848static tree
389dd41b 8849fold_builtin_strncmp (location_t loc, tree arg1, tree arg2, tree len)
9c8a1629 8850{
9c8a1629 8851 const char *p1, *p2;
8852
c2f47e15 8853 if (!validate_arg (arg1, POINTER_TYPE)
8854 || !validate_arg (arg2, POINTER_TYPE)
8855 || !validate_arg (len, INTEGER_TYPE))
8856 return NULL_TREE;
9c8a1629 8857
8858 /* If the LEN parameter is zero, return zero. */
8859 if (integer_zerop (len))
389dd41b 8860 return omit_two_operands_loc (loc, integer_type_node, integer_zero_node,
c4fef134 8861 arg1, arg2);
9c8a1629 8862
8863 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
8864 if (operand_equal_p (arg1, arg2, 0))
389dd41b 8865 return omit_one_operand_loc (loc, integer_type_node, integer_zero_node, len);
9c8a1629 8866
8867 p1 = c_getstr (arg1);
8868 p2 = c_getstr (arg2);
8869
e913b5cd 8870 if (tree_fits_uhwi_p (len) && p1 && p2)
9c8a1629 8871 {
e913b5cd 8872 const int i = strncmp (p1, p2, tree_to_uhwi (len));
c4fef134 8873 if (i > 0)
8874 return integer_one_node;
8875 else if (i < 0)
8876 return integer_minus_one_node;
9c8a1629 8877 else
c4fef134 8878 return integer_zero_node;
8879 }
8880
8881 /* If the second arg is "", and the length is greater than zero,
8882 return *(const unsigned char*)arg1. */
8883 if (p2 && *p2 == '\0'
8884 && TREE_CODE (len) == INTEGER_CST
8885 && tree_int_cst_sgn (len) == 1)
8886 {
8887 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
4f1b71c0 8888 tree cst_uchar_ptr_node
8889 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
8890
389dd41b 8891 return fold_convert_loc (loc, integer_type_node,
8892 build1 (INDIRECT_REF, cst_uchar_node,
8893 fold_convert_loc (loc,
8894 cst_uchar_ptr_node,
8895 arg1)));
c4fef134 8896 }
8897
8898 /* If the first arg is "", and the length is greater than zero,
8899 return -*(const unsigned char*)arg2. */
8900 if (p1 && *p1 == '\0'
8901 && TREE_CODE (len) == INTEGER_CST
8902 && tree_int_cst_sgn (len) == 1)
8903 {
8904 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
4f1b71c0 8905 tree cst_uchar_ptr_node
8906 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
8907
389dd41b 8908 tree temp = fold_convert_loc (loc, integer_type_node,
8909 build1 (INDIRECT_REF, cst_uchar_node,
8910 fold_convert_loc (loc,
8911 cst_uchar_ptr_node,
8912 arg2)));
8913 return fold_build1_loc (loc, NEGATE_EXPR, integer_type_node, temp);
c4fef134 8914 }
8915
8916 /* If len parameter is one, return an expression corresponding to
8917 (*(const unsigned char*)arg1 - (const unsigned char*)arg2). */
e913b5cd 8918 if (tree_fits_uhwi_p (len) && tree_to_uhwi (len) == 1)
c4fef134 8919 {
8920 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
4f1b71c0 8921 tree cst_uchar_ptr_node
8922 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
8923
389dd41b 8924 tree ind1 = fold_convert_loc (loc, integer_type_node,
8925 build1 (INDIRECT_REF, cst_uchar_node,
8926 fold_convert_loc (loc,
8927 cst_uchar_ptr_node,
8928 arg1)));
8929 tree ind2 = fold_convert_loc (loc, integer_type_node,
8930 build1 (INDIRECT_REF, cst_uchar_node,
8931 fold_convert_loc (loc,
8932 cst_uchar_ptr_node,
8933 arg2)));
8934 return fold_build2_loc (loc, MINUS_EXPR, integer_type_node, ind1, ind2);
9c8a1629 8935 }
8936
c2f47e15 8937 return NULL_TREE;
9c8a1629 8938}
8939
c2f47e15 8940/* Fold function call to builtin signbit, signbitf or signbitl with argument
8941 ARG. Return NULL_TREE if no simplification can be made. */
27f261ef 8942
8943static tree
389dd41b 8944fold_builtin_signbit (location_t loc, tree arg, tree type)
27f261ef 8945{
c2f47e15 8946 if (!validate_arg (arg, REAL_TYPE))
27f261ef 8947 return NULL_TREE;
8948
27f261ef 8949 /* If ARG is a compile-time constant, determine the result. */
8950 if (TREE_CODE (arg) == REAL_CST
f96bd2bf 8951 && !TREE_OVERFLOW (arg))
27f261ef 8952 {
8953 REAL_VALUE_TYPE c;
8954
8955 c = TREE_REAL_CST (arg);
385f3f36 8956 return (REAL_VALUE_NEGATIVE (c)
8957 ? build_one_cst (type)
8958 : build_zero_cst (type));
27f261ef 8959 }
8960
8961 /* If ARG is non-negative, the result is always zero. */
8962 if (tree_expr_nonnegative_p (arg))
389dd41b 8963 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
27f261ef 8964
8965 /* If ARG's format doesn't have signed zeros, return "arg < 0.0". */
fe994837 8966 if (!HONOR_SIGNED_ZEROS (arg))
de67cbb8 8967 return fold_convert (type,
8968 fold_build2_loc (loc, LT_EXPR, boolean_type_node, arg,
8969 build_real (TREE_TYPE (arg), dconst0)));
27f261ef 8970
8971 return NULL_TREE;
8972}
8973
c2f47e15 8974/* Fold function call to builtin copysign, copysignf or copysignl with
8975 arguments ARG1 and ARG2. Return NULL_TREE if no simplification can
8976 be made. */
467214fd 8977
8978static tree
389dd41b 8979fold_builtin_copysign (location_t loc, tree fndecl,
8980 tree arg1, tree arg2, tree type)
467214fd 8981{
c2f47e15 8982 tree tem;
467214fd 8983
c2f47e15 8984 if (!validate_arg (arg1, REAL_TYPE)
8985 || !validate_arg (arg2, REAL_TYPE))
467214fd 8986 return NULL_TREE;
8987
467214fd 8988 /* copysign(X,X) is X. */
8989 if (operand_equal_p (arg1, arg2, 0))
389dd41b 8990 return fold_convert_loc (loc, type, arg1);
467214fd 8991
8992 /* If ARG1 and ARG2 are compile-time constants, determine the result. */
8993 if (TREE_CODE (arg1) == REAL_CST
8994 && TREE_CODE (arg2) == REAL_CST
f96bd2bf 8995 && !TREE_OVERFLOW (arg1)
8996 && !TREE_OVERFLOW (arg2))
467214fd 8997 {
8998 REAL_VALUE_TYPE c1, c2;
8999
9000 c1 = TREE_REAL_CST (arg1);
9001 c2 = TREE_REAL_CST (arg2);
749680e2 9002 /* c1.sign := c2.sign. */
467214fd 9003 real_copysign (&c1, &c2);
9004 return build_real (type, c1);
467214fd 9005 }
9006
9007 /* copysign(X, Y) is fabs(X) when Y is always non-negative.
9008 Remember to evaluate Y for side-effects. */
9009 if (tree_expr_nonnegative_p (arg2))
389dd41b 9010 return omit_one_operand_loc (loc, type,
9011 fold_build1_loc (loc, ABS_EXPR, type, arg1),
467214fd 9012 arg2);
9013
198d9bbe 9014 /* Strip sign changing operations for the first argument. */
9015 tem = fold_strip_sign_ops (arg1);
9016 if (tem)
389dd41b 9017 return build_call_expr_loc (loc, fndecl, 2, tem, arg2);
198d9bbe 9018
467214fd 9019 return NULL_TREE;
9020}
9021
c2f47e15 9022/* Fold a call to builtin isascii with argument ARG. */
d49367d4 9023
9024static tree
389dd41b 9025fold_builtin_isascii (location_t loc, tree arg)
d49367d4 9026{
c2f47e15 9027 if (!validate_arg (arg, INTEGER_TYPE))
9028 return NULL_TREE;
d49367d4 9029 else
9030 {
9031 /* Transform isascii(c) -> ((c & ~0x7f) == 0). */
c90b5d40 9032 arg = fold_build2 (BIT_AND_EXPR, integer_type_node, arg,
7002a1c8 9033 build_int_cst (integer_type_node,
c90b5d40 9034 ~ (unsigned HOST_WIDE_INT) 0x7f));
389dd41b 9035 return fold_build2_loc (loc, EQ_EXPR, integer_type_node,
7002a1c8 9036 arg, integer_zero_node);
d49367d4 9037 }
9038}
9039
c2f47e15 9040/* Fold a call to builtin toascii with argument ARG. */
d49367d4 9041
9042static tree
389dd41b 9043fold_builtin_toascii (location_t loc, tree arg)
d49367d4 9044{
c2f47e15 9045 if (!validate_arg (arg, INTEGER_TYPE))
9046 return NULL_TREE;
48e1416a 9047
c2f47e15 9048 /* Transform toascii(c) -> (c & 0x7f). */
389dd41b 9049 return fold_build2_loc (loc, BIT_AND_EXPR, integer_type_node, arg,
7002a1c8 9050 build_int_cst (integer_type_node, 0x7f));
d49367d4 9051}
9052
c2f47e15 9053/* Fold a call to builtin isdigit with argument ARG. */
df1cf42e 9054
9055static tree
389dd41b 9056fold_builtin_isdigit (location_t loc, tree arg)
df1cf42e 9057{
c2f47e15 9058 if (!validate_arg (arg, INTEGER_TYPE))
9059 return NULL_TREE;
df1cf42e 9060 else
9061 {
9062 /* Transform isdigit(c) -> (unsigned)(c) - '0' <= 9. */
624d37a6 9063 /* According to the C standard, isdigit is unaffected by locale.
9064 However, it definitely is affected by the target character set. */
624d37a6 9065 unsigned HOST_WIDE_INT target_digit0
9066 = lang_hooks.to_target_charset ('0');
9067
9068 if (target_digit0 == 0)
9069 return NULL_TREE;
9070
389dd41b 9071 arg = fold_convert_loc (loc, unsigned_type_node, arg);
c90b5d40 9072 arg = fold_build2 (MINUS_EXPR, unsigned_type_node, arg,
9073 build_int_cst (unsigned_type_node, target_digit0));
389dd41b 9074 return fold_build2_loc (loc, LE_EXPR, integer_type_node, arg,
f2532264 9075 build_int_cst (unsigned_type_node, 9));
df1cf42e 9076 }
9077}
27f261ef 9078
c2f47e15 9079/* Fold a call to fabs, fabsf or fabsl with argument ARG. */
d1aade50 9080
9081static tree
389dd41b 9082fold_builtin_fabs (location_t loc, tree arg, tree type)
d1aade50 9083{
c2f47e15 9084 if (!validate_arg (arg, REAL_TYPE))
9085 return NULL_TREE;
d1aade50 9086
389dd41b 9087 arg = fold_convert_loc (loc, type, arg);
d1aade50 9088 if (TREE_CODE (arg) == REAL_CST)
9089 return fold_abs_const (arg, type);
389dd41b 9090 return fold_build1_loc (loc, ABS_EXPR, type, arg);
d1aade50 9091}
9092
c2f47e15 9093/* Fold a call to abs, labs, llabs or imaxabs with argument ARG. */
d1aade50 9094
9095static tree
389dd41b 9096fold_builtin_abs (location_t loc, tree arg, tree type)
d1aade50 9097{
c2f47e15 9098 if (!validate_arg (arg, INTEGER_TYPE))
9099 return NULL_TREE;
d1aade50 9100
389dd41b 9101 arg = fold_convert_loc (loc, type, arg);
d1aade50 9102 if (TREE_CODE (arg) == INTEGER_CST)
9103 return fold_abs_const (arg, type);
389dd41b 9104 return fold_build1_loc (loc, ABS_EXPR, type, arg);
d1aade50 9105}
9106
b9be572e 9107/* Fold a fma operation with arguments ARG[012]. */
9108
9109tree
9110fold_fma (location_t loc ATTRIBUTE_UNUSED,
9111 tree type, tree arg0, tree arg1, tree arg2)
9112{
9113 if (TREE_CODE (arg0) == REAL_CST
9114 && TREE_CODE (arg1) == REAL_CST
9115 && TREE_CODE (arg2) == REAL_CST)
9116 return do_mpfr_arg3 (arg0, arg1, arg2, type, mpfr_fma);
9117
9118 return NULL_TREE;
9119}
9120
9121/* Fold a call to fma, fmaf, or fmal with arguments ARG[012]. */
9122
9123static tree
9124fold_builtin_fma (location_t loc, tree arg0, tree arg1, tree arg2, tree type)
9125{
9126 if (validate_arg (arg0, REAL_TYPE)
9af5ce0c 9127 && validate_arg (arg1, REAL_TYPE)
9128 && validate_arg (arg2, REAL_TYPE))
b9be572e 9129 {
9130 tree tem = fold_fma (loc, type, arg0, arg1, arg2);
9131 if (tem)
9132 return tem;
9133
9134 /* ??? Only expand to FMA_EXPR if it's directly supported. */
9135 if (optab_handler (fma_optab, TYPE_MODE (type)) != CODE_FOR_nothing)
9136 return fold_build3_loc (loc, FMA_EXPR, type, arg0, arg1, arg2);
9137 }
9138 return NULL_TREE;
9139}
9140
d4a43a03 9141/* Fold a call to builtin fmin or fmax. */
9142
9143static tree
389dd41b 9144fold_builtin_fmin_fmax (location_t loc, tree arg0, tree arg1,
9145 tree type, bool max)
d4a43a03 9146{
c2f47e15 9147 if (validate_arg (arg0, REAL_TYPE) && validate_arg (arg1, REAL_TYPE))
d4a43a03 9148 {
d4a43a03 9149 /* Calculate the result when the argument is a constant. */
9150 tree res = do_mpfr_arg2 (arg0, arg1, type, (max ? mpfr_max : mpfr_min));
9151
9152 if (res)
9153 return res;
9154
61fe3555 9155 /* If either argument is NaN, return the other one. Avoid the
9156 transformation if we get (and honor) a signalling NaN. Using
9157 omit_one_operand() ensures we create a non-lvalue. */
9158 if (TREE_CODE (arg0) == REAL_CST
9159 && real_isnan (&TREE_REAL_CST (arg0))
fe994837 9160 && (! HONOR_SNANS (arg0)
61fe3555 9161 || ! TREE_REAL_CST (arg0).signalling))
389dd41b 9162 return omit_one_operand_loc (loc, type, arg1, arg0);
61fe3555 9163 if (TREE_CODE (arg1) == REAL_CST
9164 && real_isnan (&TREE_REAL_CST (arg1))
fe994837 9165 && (! HONOR_SNANS (arg1)
61fe3555 9166 || ! TREE_REAL_CST (arg1).signalling))
389dd41b 9167 return omit_one_operand_loc (loc, type, arg0, arg1);
61fe3555 9168
d4a43a03 9169 /* Transform fmin/fmax(x,x) -> x. */
9170 if (operand_equal_p (arg0, arg1, OEP_PURE_SAME))
389dd41b 9171 return omit_one_operand_loc (loc, type, arg0, arg1);
48e1416a 9172
d4a43a03 9173 /* Convert fmin/fmax to MIN_EXPR/MAX_EXPR. C99 requires these
9174 functions to return the numeric arg if the other one is NaN.
9175 These tree codes don't honor that, so only transform if
9176 -ffinite-math-only is set. C99 doesn't require -0.0 to be
9177 handled, so we don't have to worry about it either. */
9178 if (flag_finite_math_only)
389dd41b 9179 return fold_build2_loc (loc, (max ? MAX_EXPR : MIN_EXPR), type,
9180 fold_convert_loc (loc, type, arg0),
9181 fold_convert_loc (loc, type, arg1));
d4a43a03 9182 }
9183 return NULL_TREE;
9184}
9185
abe4dcf6 9186/* Fold a call to builtin carg(a+bi) -> atan2(b,a). */
9187
9188static tree
389dd41b 9189fold_builtin_carg (location_t loc, tree arg, tree type)
abe4dcf6 9190{
239d491a 9191 if (validate_arg (arg, COMPLEX_TYPE)
9192 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) == REAL_TYPE)
abe4dcf6 9193 {
9194 tree atan2_fn = mathfn_built_in (type, BUILT_IN_ATAN2);
48e1416a 9195
abe4dcf6 9196 if (atan2_fn)
9197 {
c2f47e15 9198 tree new_arg = builtin_save_expr (arg);
389dd41b 9199 tree r_arg = fold_build1_loc (loc, REALPART_EXPR, type, new_arg);
9200 tree i_arg = fold_build1_loc (loc, IMAGPART_EXPR, type, new_arg);
9201 return build_call_expr_loc (loc, atan2_fn, 2, i_arg, r_arg);
abe4dcf6 9202 }
9203 }
48e1416a 9204
abe4dcf6 9205 return NULL_TREE;
9206}
9207
cb2b9385 9208/* Fold a call to builtin logb/ilogb. */
9209
9210static tree
389dd41b 9211fold_builtin_logb (location_t loc, tree arg, tree rettype)
cb2b9385 9212{
9213 if (! validate_arg (arg, REAL_TYPE))
9214 return NULL_TREE;
48e1416a 9215
cb2b9385 9216 STRIP_NOPS (arg);
48e1416a 9217
cb2b9385 9218 if (TREE_CODE (arg) == REAL_CST && ! TREE_OVERFLOW (arg))
9219 {
9220 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg);
48e1416a 9221
cb2b9385 9222 switch (value->cl)
9223 {
9224 case rvc_nan:
9225 case rvc_inf:
9226 /* If arg is Inf or NaN and we're logb, return it. */
9227 if (TREE_CODE (rettype) == REAL_TYPE)
7695fea9 9228 {
9229 /* For logb(-Inf) we have to return +Inf. */
9230 if (real_isinf (value) && real_isneg (value))
9231 {
9232 REAL_VALUE_TYPE tem;
9233 real_inf (&tem);
9234 return build_real (rettype, tem);
9235 }
9236 return fold_convert_loc (loc, rettype, arg);
9237 }
cb2b9385 9238 /* Fall through... */
9239 case rvc_zero:
9240 /* Zero may set errno and/or raise an exception for logb, also
9241 for ilogb we don't know FP_ILOGB0. */
9242 return NULL_TREE;
9243 case rvc_normal:
9244 /* For normal numbers, proceed iff radix == 2. In GCC,
9245 normalized significands are in the range [0.5, 1.0). We
9246 want the exponent as if they were [1.0, 2.0) so get the
9247 exponent and subtract 1. */
9248 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (arg)))->b == 2)
389dd41b 9249 return fold_convert_loc (loc, rettype,
7002a1c8 9250 build_int_cst (integer_type_node,
389dd41b 9251 REAL_EXP (value)-1));
cb2b9385 9252 break;
9253 }
9254 }
48e1416a 9255
cb2b9385 9256 return NULL_TREE;
9257}
9258
9259/* Fold a call to builtin significand, if radix == 2. */
9260
9261static tree
389dd41b 9262fold_builtin_significand (location_t loc, tree arg, tree rettype)
cb2b9385 9263{
9264 if (! validate_arg (arg, REAL_TYPE))
9265 return NULL_TREE;
48e1416a 9266
cb2b9385 9267 STRIP_NOPS (arg);
48e1416a 9268
cb2b9385 9269 if (TREE_CODE (arg) == REAL_CST && ! TREE_OVERFLOW (arg))
9270 {
9271 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg);
48e1416a 9272
cb2b9385 9273 switch (value->cl)
9274 {
9275 case rvc_zero:
9276 case rvc_nan:
9277 case rvc_inf:
9278 /* If arg is +-0, +-Inf or +-NaN, then return it. */
389dd41b 9279 return fold_convert_loc (loc, rettype, arg);
cb2b9385 9280 case rvc_normal:
9281 /* For normal numbers, proceed iff radix == 2. */
9282 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (arg)))->b == 2)
9283 {
9284 REAL_VALUE_TYPE result = *value;
9285 /* In GCC, normalized significands are in the range [0.5,
9286 1.0). We want them to be [1.0, 2.0) so set the
9287 exponent to 1. */
9288 SET_REAL_EXP (&result, 1);
9289 return build_real (rettype, result);
9290 }
9291 break;
9292 }
9293 }
48e1416a 9294
cb2b9385 9295 return NULL_TREE;
9296}
9297
3838b9ae 9298/* Fold a call to builtin frexp, we can assume the base is 2. */
9299
9300static tree
389dd41b 9301fold_builtin_frexp (location_t loc, tree arg0, tree arg1, tree rettype)
3838b9ae 9302{
9303 if (! validate_arg (arg0, REAL_TYPE) || ! validate_arg (arg1, POINTER_TYPE))
9304 return NULL_TREE;
48e1416a 9305
3838b9ae 9306 STRIP_NOPS (arg0);
48e1416a 9307
3838b9ae 9308 if (!(TREE_CODE (arg0) == REAL_CST && ! TREE_OVERFLOW (arg0)))
9309 return NULL_TREE;
48e1416a 9310
389dd41b 9311 arg1 = build_fold_indirect_ref_loc (loc, arg1);
3838b9ae 9312
9313 /* Proceed if a valid pointer type was passed in. */
9314 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg1)) == integer_type_node)
9315 {
9316 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg0);
9317 tree frac, exp;
48e1416a 9318
3838b9ae 9319 switch (value->cl)
9320 {
9321 case rvc_zero:
9322 /* For +-0, return (*exp = 0, +-0). */
9323 exp = integer_zero_node;
9324 frac = arg0;
9325 break;
9326 case rvc_nan:
9327 case rvc_inf:
9328 /* For +-NaN or +-Inf, *exp is unspecified, return arg0. */
389dd41b 9329 return omit_one_operand_loc (loc, rettype, arg0, arg1);
3838b9ae 9330 case rvc_normal:
9331 {
9332 /* Since the frexp function always expects base 2, and in
9333 GCC normalized significands are already in the range
9334 [0.5, 1.0), we have exactly what frexp wants. */
9335 REAL_VALUE_TYPE frac_rvt = *value;
9336 SET_REAL_EXP (&frac_rvt, 0);
9337 frac = build_real (rettype, frac_rvt);
7002a1c8 9338 exp = build_int_cst (integer_type_node, REAL_EXP (value));
3838b9ae 9339 }
9340 break;
9341 default:
9342 gcc_unreachable ();
9343 }
48e1416a 9344
3838b9ae 9345 /* Create the COMPOUND_EXPR (*arg1 = trunc, frac). */
389dd41b 9346 arg1 = fold_build2_loc (loc, MODIFY_EXPR, rettype, arg1, exp);
3838b9ae 9347 TREE_SIDE_EFFECTS (arg1) = 1;
389dd41b 9348 return fold_build2_loc (loc, COMPOUND_EXPR, rettype, arg1, frac);
3838b9ae 9349 }
9350
9351 return NULL_TREE;
9352}
9353
7587301b 9354/* Fold a call to builtin ldexp or scalbn/scalbln. If LDEXP is true
9355 then we can assume the base is two. If it's false, then we have to
9356 check the mode of the TYPE parameter in certain cases. */
9357
9358static tree
389dd41b 9359fold_builtin_load_exponent (location_t loc, tree arg0, tree arg1,
9360 tree type, bool ldexp)
7587301b 9361{
9362 if (validate_arg (arg0, REAL_TYPE) && validate_arg (arg1, INTEGER_TYPE))
9363 {
9364 STRIP_NOPS (arg0);
9365 STRIP_NOPS (arg1);
9366
9367 /* If arg0 is 0, Inf or NaN, or if arg1 is 0, then return arg0. */
9368 if (real_zerop (arg0) || integer_zerop (arg1)
9369 || (TREE_CODE (arg0) == REAL_CST
776a7bab 9370 && !real_isfinite (&TREE_REAL_CST (arg0))))
389dd41b 9371 return omit_one_operand_loc (loc, type, arg0, arg1);
48e1416a 9372
7587301b 9373 /* If both arguments are constant, then try to evaluate it. */
9374 if ((ldexp || REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2)
9375 && TREE_CODE (arg0) == REAL_CST && !TREE_OVERFLOW (arg0)
e913b5cd 9376 && tree_fits_shwi_p (arg1))
7587301b 9377 {
9378 /* Bound the maximum adjustment to twice the range of the
9379 mode's valid exponents. Use abs to ensure the range is
9380 positive as a sanity check. */
48e1416a 9381 const long max_exp_adj = 2 *
7587301b 9382 labs (REAL_MODE_FORMAT (TYPE_MODE (type))->emax
9383 - REAL_MODE_FORMAT (TYPE_MODE (type))->emin);
9384
9385 /* Get the user-requested adjustment. */
e913b5cd 9386 const HOST_WIDE_INT req_exp_adj = tree_to_shwi (arg1);
48e1416a 9387
7587301b 9388 /* The requested adjustment must be inside this range. This
9389 is a preliminary cap to avoid things like overflow, we
9390 may still fail to compute the result for other reasons. */
9391 if (-max_exp_adj < req_exp_adj && req_exp_adj < max_exp_adj)
9392 {
9393 REAL_VALUE_TYPE initial_result;
48e1416a 9394
7587301b 9395 real_ldexp (&initial_result, &TREE_REAL_CST (arg0), req_exp_adj);
9396
9397 /* Ensure we didn't overflow. */
9398 if (! real_isinf (&initial_result))
9399 {
9400 const REAL_VALUE_TYPE trunc_result
9401 = real_value_truncate (TYPE_MODE (type), initial_result);
48e1416a 9402
7587301b 9403 /* Only proceed if the target mode can hold the
9404 resulting value. */
9405 if (REAL_VALUES_EQUAL (initial_result, trunc_result))
9406 return build_real (type, trunc_result);
9407 }
9408 }
9409 }
9410 }
9411
9412 return NULL_TREE;
9413}
9414
ebf8b4f5 9415/* Fold a call to builtin modf. */
9416
9417static tree
389dd41b 9418fold_builtin_modf (location_t loc, tree arg0, tree arg1, tree rettype)
ebf8b4f5 9419{
9420 if (! validate_arg (arg0, REAL_TYPE) || ! validate_arg (arg1, POINTER_TYPE))
9421 return NULL_TREE;
48e1416a 9422
ebf8b4f5 9423 STRIP_NOPS (arg0);
48e1416a 9424
ebf8b4f5 9425 if (!(TREE_CODE (arg0) == REAL_CST && ! TREE_OVERFLOW (arg0)))
9426 return NULL_TREE;
48e1416a 9427
389dd41b 9428 arg1 = build_fold_indirect_ref_loc (loc, arg1);
ebf8b4f5 9429
9430 /* Proceed if a valid pointer type was passed in. */
9431 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg1)) == TYPE_MAIN_VARIANT (rettype))
9432 {
9433 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg0);
9434 REAL_VALUE_TYPE trunc, frac;
9435
9436 switch (value->cl)
9437 {
9438 case rvc_nan:
9439 case rvc_zero:
9440 /* For +-NaN or +-0, return (*arg1 = arg0, arg0). */
9441 trunc = frac = *value;
9442 break;
9443 case rvc_inf:
9444 /* For +-Inf, return (*arg1 = arg0, +-0). */
9445 frac = dconst0;
9446 frac.sign = value->sign;
9447 trunc = *value;
9448 break;
9449 case rvc_normal:
9450 /* Return (*arg1 = trunc(arg0), arg0-trunc(arg0)). */
9451 real_trunc (&trunc, VOIDmode, value);
9452 real_arithmetic (&frac, MINUS_EXPR, value, &trunc);
9453 /* If the original number was negative and already
9454 integral, then the fractional part is -0.0. */
9455 if (value->sign && frac.cl == rvc_zero)
9456 frac.sign = value->sign;
9457 break;
9458 }
48e1416a 9459
ebf8b4f5 9460 /* Create the COMPOUND_EXPR (*arg1 = trunc, frac). */
389dd41b 9461 arg1 = fold_build2_loc (loc, MODIFY_EXPR, rettype, arg1,
ebf8b4f5 9462 build_real (rettype, trunc));
9463 TREE_SIDE_EFFECTS (arg1) = 1;
389dd41b 9464 return fold_build2_loc (loc, COMPOUND_EXPR, rettype, arg1,
ebf8b4f5 9465 build_real (rettype, frac));
9466 }
48e1416a 9467
ebf8b4f5 9468 return NULL_TREE;
9469}
9470
a65c4d64 9471/* Given a location LOC, an interclass builtin function decl FNDECL
9472 and its single argument ARG, return an folded expression computing
9473 the same, or NULL_TREE if we either couldn't or didn't want to fold
9474 (the latter happen if there's an RTL instruction available). */
9475
9476static tree
9477fold_builtin_interclass_mathfn (location_t loc, tree fndecl, tree arg)
9478{
3754d046 9479 machine_mode mode;
a65c4d64 9480
9481 if (!validate_arg (arg, REAL_TYPE))
9482 return NULL_TREE;
9483
9484 if (interclass_mathfn_icode (arg, fndecl) != CODE_FOR_nothing)
9485 return NULL_TREE;
9486
9487 mode = TYPE_MODE (TREE_TYPE (arg));
9488
9489 /* If there is no optab, try generic code. */
9490 switch (DECL_FUNCTION_CODE (fndecl))
9491 {
9492 tree result;
9493
9494 CASE_FLT_FN (BUILT_IN_ISINF):
9495 {
9496 /* isinf(x) -> isgreater(fabs(x),DBL_MAX). */
b9a16870 9497 tree const isgr_fn = builtin_decl_explicit (BUILT_IN_ISGREATER);
a65c4d64 9498 tree const type = TREE_TYPE (arg);
9499 REAL_VALUE_TYPE r;
9500 char buf[128];
9501
9502 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf));
9503 real_from_string (&r, buf);
9504 result = build_call_expr (isgr_fn, 2,
9505 fold_build1_loc (loc, ABS_EXPR, type, arg),
9506 build_real (type, r));
9507 return result;
9508 }
9509 CASE_FLT_FN (BUILT_IN_FINITE):
9510 case BUILT_IN_ISFINITE:
9511 {
9512 /* isfinite(x) -> islessequal(fabs(x),DBL_MAX). */
b9a16870 9513 tree const isle_fn = builtin_decl_explicit (BUILT_IN_ISLESSEQUAL);
a65c4d64 9514 tree const type = TREE_TYPE (arg);
9515 REAL_VALUE_TYPE r;
9516 char buf[128];
9517
9518 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf));
9519 real_from_string (&r, buf);
9520 result = build_call_expr (isle_fn, 2,
9521 fold_build1_loc (loc, ABS_EXPR, type, arg),
9522 build_real (type, r));
9523 /*result = fold_build2_loc (loc, UNGT_EXPR,
9524 TREE_TYPE (TREE_TYPE (fndecl)),
9525 fold_build1_loc (loc, ABS_EXPR, type, arg),
9526 build_real (type, r));
9527 result = fold_build1_loc (loc, TRUTH_NOT_EXPR,
9528 TREE_TYPE (TREE_TYPE (fndecl)),
9529 result);*/
9530 return result;
9531 }
9532 case BUILT_IN_ISNORMAL:
9533 {
9534 /* isnormal(x) -> isgreaterequal(fabs(x),DBL_MIN) &
9535 islessequal(fabs(x),DBL_MAX). */
b9a16870 9536 tree const isle_fn = builtin_decl_explicit (BUILT_IN_ISLESSEQUAL);
9537 tree const isge_fn = builtin_decl_explicit (BUILT_IN_ISGREATEREQUAL);
a65c4d64 9538 tree const type = TREE_TYPE (arg);
9539 REAL_VALUE_TYPE rmax, rmin;
9540 char buf[128];
9541
9542 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf));
9543 real_from_string (&rmax, buf);
9544 sprintf (buf, "0x1p%d", REAL_MODE_FORMAT (mode)->emin - 1);
9545 real_from_string (&rmin, buf);
9546 arg = builtin_save_expr (fold_build1_loc (loc, ABS_EXPR, type, arg));
9547 result = build_call_expr (isle_fn, 2, arg,
9548 build_real (type, rmax));
9549 result = fold_build2 (BIT_AND_EXPR, integer_type_node, result,
9550 build_call_expr (isge_fn, 2, arg,
9551 build_real (type, rmin)));
9552 return result;
9553 }
9554 default:
9555 break;
9556 }
9557
9558 return NULL_TREE;
9559}
9560
726069ba 9561/* Fold a call to __builtin_isnan(), __builtin_isinf, __builtin_finite.
c2f47e15 9562 ARG is the argument for the call. */
726069ba 9563
9564static tree
389dd41b 9565fold_builtin_classify (location_t loc, tree fndecl, tree arg, int builtin_index)
726069ba 9566{
726069ba 9567 tree type = TREE_TYPE (TREE_TYPE (fndecl));
726069ba 9568 REAL_VALUE_TYPE r;
9569
c2f47e15 9570 if (!validate_arg (arg, REAL_TYPE))
d43cee80 9571 return NULL_TREE;
726069ba 9572
726069ba 9573 switch (builtin_index)
9574 {
9575 case BUILT_IN_ISINF:
fe994837 9576 if (!HONOR_INFINITIES (arg))
389dd41b 9577 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
726069ba 9578
9579 if (TREE_CODE (arg) == REAL_CST)
9580 {
9581 r = TREE_REAL_CST (arg);
9582 if (real_isinf (&r))
9583 return real_compare (GT_EXPR, &r, &dconst0)
9584 ? integer_one_node : integer_minus_one_node;
9585 else
9586 return integer_zero_node;
9587 }
9588
9589 return NULL_TREE;
9590
c319d56a 9591 case BUILT_IN_ISINF_SIGN:
9592 {
9593 /* isinf_sign(x) -> isinf(x) ? (signbit(x) ? -1 : 1) : 0 */
9594 /* In a boolean context, GCC will fold the inner COND_EXPR to
9595 1. So e.g. "if (isinf_sign(x))" would be folded to just
9596 "if (isinf(x) ? 1 : 0)" which becomes "if (isinf(x))". */
9597 tree signbit_fn = mathfn_built_in_1 (TREE_TYPE (arg), BUILT_IN_SIGNBIT, 0);
b9a16870 9598 tree isinf_fn = builtin_decl_explicit (BUILT_IN_ISINF);
c319d56a 9599 tree tmp = NULL_TREE;
9600
9601 arg = builtin_save_expr (arg);
9602
9603 if (signbit_fn && isinf_fn)
9604 {
389dd41b 9605 tree signbit_call = build_call_expr_loc (loc, signbit_fn, 1, arg);
9606 tree isinf_call = build_call_expr_loc (loc, isinf_fn, 1, arg);
c319d56a 9607
389dd41b 9608 signbit_call = fold_build2_loc (loc, NE_EXPR, integer_type_node,
c319d56a 9609 signbit_call, integer_zero_node);
389dd41b 9610 isinf_call = fold_build2_loc (loc, NE_EXPR, integer_type_node,
c319d56a 9611 isinf_call, integer_zero_node);
48e1416a 9612
389dd41b 9613 tmp = fold_build3_loc (loc, COND_EXPR, integer_type_node, signbit_call,
c319d56a 9614 integer_minus_one_node, integer_one_node);
389dd41b 9615 tmp = fold_build3_loc (loc, COND_EXPR, integer_type_node,
9616 isinf_call, tmp,
c319d56a 9617 integer_zero_node);
9618 }
9619
9620 return tmp;
9621 }
9622
cde061c1 9623 case BUILT_IN_ISFINITE:
93633022 9624 if (!HONOR_NANS (arg)
fe994837 9625 && !HONOR_INFINITIES (arg))
389dd41b 9626 return omit_one_operand_loc (loc, type, integer_one_node, arg);
726069ba 9627
9628 if (TREE_CODE (arg) == REAL_CST)
9629 {
9630 r = TREE_REAL_CST (arg);
776a7bab 9631 return real_isfinite (&r) ? integer_one_node : integer_zero_node;
726069ba 9632 }
9633
9634 return NULL_TREE;
9635
9636 case BUILT_IN_ISNAN:
93633022 9637 if (!HONOR_NANS (arg))
389dd41b 9638 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
726069ba 9639
9640 if (TREE_CODE (arg) == REAL_CST)
9641 {
9642 r = TREE_REAL_CST (arg);
9643 return real_isnan (&r) ? integer_one_node : integer_zero_node;
9644 }
9645
9646 arg = builtin_save_expr (arg);
389dd41b 9647 return fold_build2_loc (loc, UNORDERED_EXPR, type, arg, arg);
726069ba 9648
9649 default:
64db345d 9650 gcc_unreachable ();
726069ba 9651 }
9652}
9653
19fbe3a4 9654/* Fold a call to __builtin_fpclassify(int, int, int, int, int, ...).
9655 This builtin will generate code to return the appropriate floating
9656 point classification depending on the value of the floating point
9657 number passed in. The possible return values must be supplied as
921b27c0 9658 int arguments to the call in the following order: FP_NAN, FP_INFINITE,
19fbe3a4 9659 FP_NORMAL, FP_SUBNORMAL and FP_ZERO. The ellipses is for exactly
9660 one floating point argument which is "type generic". */
9661
9662static tree
9d884767 9663fold_builtin_fpclassify (location_t loc, tree *args, int nargs)
19fbe3a4 9664{
921b27c0 9665 tree fp_nan, fp_infinite, fp_normal, fp_subnormal, fp_zero,
9666 arg, type, res, tmp;
3754d046 9667 machine_mode mode;
19fbe3a4 9668 REAL_VALUE_TYPE r;
9669 char buf[128];
48e1416a 9670
19fbe3a4 9671 /* Verify the required arguments in the original call. */
9d884767 9672 if (nargs != 6
9673 || !validate_arg (args[0], INTEGER_TYPE)
9674 || !validate_arg (args[1], INTEGER_TYPE)
9675 || !validate_arg (args[2], INTEGER_TYPE)
9676 || !validate_arg (args[3], INTEGER_TYPE)
9677 || !validate_arg (args[4], INTEGER_TYPE)
9678 || !validate_arg (args[5], REAL_TYPE))
19fbe3a4 9679 return NULL_TREE;
48e1416a 9680
9d884767 9681 fp_nan = args[0];
9682 fp_infinite = args[1];
9683 fp_normal = args[2];
9684 fp_subnormal = args[3];
9685 fp_zero = args[4];
9686 arg = args[5];
19fbe3a4 9687 type = TREE_TYPE (arg);
9688 mode = TYPE_MODE (type);
389dd41b 9689 arg = builtin_save_expr (fold_build1_loc (loc, ABS_EXPR, type, arg));
19fbe3a4 9690
48e1416a 9691 /* fpclassify(x) ->
19fbe3a4 9692 isnan(x) ? FP_NAN :
921b27c0 9693 (fabs(x) == Inf ? FP_INFINITE :
19fbe3a4 9694 (fabs(x) >= DBL_MIN ? FP_NORMAL :
9695 (x == 0 ? FP_ZERO : FP_SUBNORMAL))). */
48e1416a 9696
389dd41b 9697 tmp = fold_build2_loc (loc, EQ_EXPR, integer_type_node, arg,
19fbe3a4 9698 build_real (type, dconst0));
389dd41b 9699 res = fold_build3_loc (loc, COND_EXPR, integer_type_node,
9700 tmp, fp_zero, fp_subnormal);
19fbe3a4 9701
9702 sprintf (buf, "0x1p%d", REAL_MODE_FORMAT (mode)->emin - 1);
9703 real_from_string (&r, buf);
389dd41b 9704 tmp = fold_build2_loc (loc, GE_EXPR, integer_type_node,
9705 arg, build_real (type, r));
9706 res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp, fp_normal, res);
48e1416a 9707
19fbe3a4 9708 if (HONOR_INFINITIES (mode))
9709 {
9710 real_inf (&r);
389dd41b 9711 tmp = fold_build2_loc (loc, EQ_EXPR, integer_type_node, arg,
19fbe3a4 9712 build_real (type, r));
389dd41b 9713 res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp,
9714 fp_infinite, res);
19fbe3a4 9715 }
9716
9717 if (HONOR_NANS (mode))
9718 {
389dd41b 9719 tmp = fold_build2_loc (loc, ORDERED_EXPR, integer_type_node, arg, arg);
9720 res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp, res, fp_nan);
19fbe3a4 9721 }
48e1416a 9722
19fbe3a4 9723 return res;
9724}
9725
9bc9f15f 9726/* Fold a call to an unordered comparison function such as
d5019fe8 9727 __builtin_isgreater(). FNDECL is the FUNCTION_DECL for the function
c2f47e15 9728 being called and ARG0 and ARG1 are the arguments for the call.
726069ba 9729 UNORDERED_CODE and ORDERED_CODE are comparison codes that give
9730 the opposite of the desired result. UNORDERED_CODE is used
9731 for modes that can hold NaNs and ORDERED_CODE is used for
9732 the rest. */
9bc9f15f 9733
9734static tree
389dd41b 9735fold_builtin_unordered_cmp (location_t loc, tree fndecl, tree arg0, tree arg1,
9bc9f15f 9736 enum tree_code unordered_code,
9737 enum tree_code ordered_code)
9738{
859f903a 9739 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9bc9f15f 9740 enum tree_code code;
6978db0d 9741 tree type0, type1;
9742 enum tree_code code0, code1;
9743 tree cmp_type = NULL_TREE;
9bc9f15f 9744
6978db0d 9745 type0 = TREE_TYPE (arg0);
9746 type1 = TREE_TYPE (arg1);
a0c938f0 9747
6978db0d 9748 code0 = TREE_CODE (type0);
9749 code1 = TREE_CODE (type1);
a0c938f0 9750
6978db0d 9751 if (code0 == REAL_TYPE && code1 == REAL_TYPE)
9752 /* Choose the wider of two real types. */
9753 cmp_type = TYPE_PRECISION (type0) >= TYPE_PRECISION (type1)
9754 ? type0 : type1;
9755 else if (code0 == REAL_TYPE && code1 == INTEGER_TYPE)
9756 cmp_type = type0;
9757 else if (code0 == INTEGER_TYPE && code1 == REAL_TYPE)
9758 cmp_type = type1;
a0c938f0 9759
389dd41b 9760 arg0 = fold_convert_loc (loc, cmp_type, arg0);
9761 arg1 = fold_convert_loc (loc, cmp_type, arg1);
859f903a 9762
9763 if (unordered_code == UNORDERED_EXPR)
9764 {
93633022 9765 if (!HONOR_NANS (arg0))
389dd41b 9766 return omit_two_operands_loc (loc, type, integer_zero_node, arg0, arg1);
9767 return fold_build2_loc (loc, UNORDERED_EXPR, type, arg0, arg1);
859f903a 9768 }
9bc9f15f 9769
93633022 9770 code = HONOR_NANS (arg0) ? unordered_code : ordered_code;
389dd41b 9771 return fold_build1_loc (loc, TRUTH_NOT_EXPR, type,
9772 fold_build2_loc (loc, code, type, arg0, arg1));
9bc9f15f 9773}
9774
0c93c8a9 9775/* Fold __builtin_{,s,u}{add,sub,mul}{,l,ll}_overflow, either into normal
9776 arithmetics if it can never overflow, or into internal functions that
9777 return both result of arithmetics and overflowed boolean flag in
9778 a complex integer result, or some other check for overflow. */
9779
9780static tree
9781fold_builtin_arith_overflow (location_t loc, enum built_in_function fcode,
9782 tree arg0, tree arg1, tree arg2)
9783{
9784 enum internal_fn ifn = IFN_LAST;
9785 tree type = TREE_TYPE (TREE_TYPE (arg2));
9786 tree mem_arg2 = build_fold_indirect_ref_loc (loc, arg2);
9787 switch (fcode)
9788 {
9789 case BUILT_IN_ADD_OVERFLOW:
9790 case BUILT_IN_SADD_OVERFLOW:
9791 case BUILT_IN_SADDL_OVERFLOW:
9792 case BUILT_IN_SADDLL_OVERFLOW:
9793 case BUILT_IN_UADD_OVERFLOW:
9794 case BUILT_IN_UADDL_OVERFLOW:
9795 case BUILT_IN_UADDLL_OVERFLOW:
9796 ifn = IFN_ADD_OVERFLOW;
9797 break;
9798 case BUILT_IN_SUB_OVERFLOW:
9799 case BUILT_IN_SSUB_OVERFLOW:
9800 case BUILT_IN_SSUBL_OVERFLOW:
9801 case BUILT_IN_SSUBLL_OVERFLOW:
9802 case BUILT_IN_USUB_OVERFLOW:
9803 case BUILT_IN_USUBL_OVERFLOW:
9804 case BUILT_IN_USUBLL_OVERFLOW:
9805 ifn = IFN_SUB_OVERFLOW;
9806 break;
9807 case BUILT_IN_MUL_OVERFLOW:
9808 case BUILT_IN_SMUL_OVERFLOW:
9809 case BUILT_IN_SMULL_OVERFLOW:
9810 case BUILT_IN_SMULLL_OVERFLOW:
9811 case BUILT_IN_UMUL_OVERFLOW:
9812 case BUILT_IN_UMULL_OVERFLOW:
9813 case BUILT_IN_UMULLL_OVERFLOW:
9814 ifn = IFN_MUL_OVERFLOW;
9815 break;
9816 default:
9817 gcc_unreachable ();
9818 }
9819 tree ctype = build_complex_type (type);
9820 tree call = build_call_expr_internal_loc (loc, ifn, ctype,
9821 2, arg0, arg1);
9822 tree tgt = save_expr (call);
9823 tree intres = build1_loc (loc, REALPART_EXPR, type, tgt);
9824 tree ovfres = build1_loc (loc, IMAGPART_EXPR, type, tgt);
9825 ovfres = fold_convert_loc (loc, boolean_type_node, ovfres);
9826 tree store
9827 = fold_build2_loc (loc, MODIFY_EXPR, void_type_node, mem_arg2, intres);
9828 return build2_loc (loc, COMPOUND_EXPR, boolean_type_node, store, ovfres);
9829}
9830
c2f47e15 9831/* Fold a call to built-in function FNDECL with 0 arguments.
e80cc485 9832 This function returns NULL_TREE if no simplification was possible. */
650e4c94 9833
4ee9c684 9834static tree
e80cc485 9835fold_builtin_0 (location_t loc, tree fndecl)
650e4c94 9836{
e9f80ff5 9837 tree type = TREE_TYPE (TREE_TYPE (fndecl));
c2f47e15 9838 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
189b3398 9839 switch (fcode)
650e4c94 9840 {
c2f47e15 9841 CASE_FLT_FN (BUILT_IN_INF):
9842 case BUILT_IN_INFD32:
9843 case BUILT_IN_INFD64:
9844 case BUILT_IN_INFD128:
389dd41b 9845 return fold_builtin_inf (loc, type, true);
7c2f0500 9846
c2f47e15 9847 CASE_FLT_FN (BUILT_IN_HUGE_VAL):
389dd41b 9848 return fold_builtin_inf (loc, type, false);
7c2f0500 9849
c2f47e15 9850 case BUILT_IN_CLASSIFY_TYPE:
9851 return fold_builtin_classify_type (NULL_TREE);
7c2f0500 9852
c2f47e15 9853 default:
9854 break;
9855 }
9856 return NULL_TREE;
9857}
7c2f0500 9858
c2f47e15 9859/* Fold a call to built-in function FNDECL with 1 argument, ARG0.
e80cc485 9860 This function returns NULL_TREE if no simplification was possible. */
7c2f0500 9861
c2f47e15 9862static tree
e80cc485 9863fold_builtin_1 (location_t loc, tree fndecl, tree arg0)
c2f47e15 9864{
9865 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9866 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
9867 switch (fcode)
9868 {
650e4c94 9869 case BUILT_IN_CONSTANT_P:
7c2f0500 9870 {
c2f47e15 9871 tree val = fold_builtin_constant_p (arg0);
7c2f0500 9872
7c2f0500 9873 /* Gimplification will pull the CALL_EXPR for the builtin out of
9874 an if condition. When not optimizing, we'll not CSE it back.
9875 To avoid link error types of regressions, return false now. */
9876 if (!val && !optimize)
9877 val = integer_zero_node;
9878
9879 return val;
9880 }
650e4c94 9881
539a3a92 9882 case BUILT_IN_CLASSIFY_TYPE:
c2f47e15 9883 return fold_builtin_classify_type (arg0);
539a3a92 9884
650e4c94 9885 case BUILT_IN_STRLEN:
c7cbde74 9886 return fold_builtin_strlen (loc, type, arg0);
650e4c94 9887
4f35b1fc 9888 CASE_FLT_FN (BUILT_IN_FABS):
8aa32773 9889 case BUILT_IN_FABSD32:
9890 case BUILT_IN_FABSD64:
9891 case BUILT_IN_FABSD128:
389dd41b 9892 return fold_builtin_fabs (loc, arg0, type);
d1aade50 9893
9894 case BUILT_IN_ABS:
9895 case BUILT_IN_LABS:
9896 case BUILT_IN_LLABS:
9897 case BUILT_IN_IMAXABS:
389dd41b 9898 return fold_builtin_abs (loc, arg0, type);
c63f4ad3 9899
4f35b1fc 9900 CASE_FLT_FN (BUILT_IN_CONJ):
239d491a 9901 if (validate_arg (arg0, COMPLEX_TYPE)
48e1416a 9902 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
389dd41b 9903 return fold_build1_loc (loc, CONJ_EXPR, type, arg0);
c2f47e15 9904 break;
36d3581d 9905
4f35b1fc 9906 CASE_FLT_FN (BUILT_IN_CREAL):
239d491a 9907 if (validate_arg (arg0, COMPLEX_TYPE)
48e1416a 9908 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
7082509e 9909 return non_lvalue_loc (loc, fold_build1_loc (loc, REALPART_EXPR, type, arg0));
c2f47e15 9910 break;
36d3581d 9911
4f35b1fc 9912 CASE_FLT_FN (BUILT_IN_CIMAG):
b0ce8887 9913 if (validate_arg (arg0, COMPLEX_TYPE)
9914 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
389dd41b 9915 return non_lvalue_loc (loc, fold_build1_loc (loc, IMAGPART_EXPR, type, arg0));
c2f47e15 9916 break;
36d3581d 9917
503733d5 9918 CASE_FLT_FN (BUILT_IN_CCOS):
9af5ce0c 9919 return fold_builtin_ccos (loc, arg0, type, fndecl, /*hyper=*/ false);
48e1416a 9920
503733d5 9921 CASE_FLT_FN (BUILT_IN_CCOSH):
9af5ce0c 9922 return fold_builtin_ccos (loc, arg0, type, fndecl, /*hyper=*/ true);
48e1416a 9923
c2373fdb 9924 CASE_FLT_FN (BUILT_IN_CPROJ):
9af5ce0c 9925 return fold_builtin_cproj (loc, arg0, type);
c2373fdb 9926
239d491a 9927 CASE_FLT_FN (BUILT_IN_CSIN):
9928 if (validate_arg (arg0, COMPLEX_TYPE)
48e1416a 9929 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
239d491a 9930 return do_mpc_arg1 (arg0, type, mpc_sin);
c2f47e15 9931 break;
48e1416a 9932
239d491a 9933 CASE_FLT_FN (BUILT_IN_CSINH):
9934 if (validate_arg (arg0, COMPLEX_TYPE)
48e1416a 9935 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
239d491a 9936 return do_mpc_arg1 (arg0, type, mpc_sinh);
9937 break;
48e1416a 9938
239d491a 9939 CASE_FLT_FN (BUILT_IN_CTAN):
9940 if (validate_arg (arg0, COMPLEX_TYPE)
48e1416a 9941 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
239d491a 9942 return do_mpc_arg1 (arg0, type, mpc_tan);
9943 break;
48e1416a 9944
239d491a 9945 CASE_FLT_FN (BUILT_IN_CTANH):
9946 if (validate_arg (arg0, COMPLEX_TYPE)
48e1416a 9947 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
239d491a 9948 return do_mpc_arg1 (arg0, type, mpc_tanh);
9949 break;
48e1416a 9950
239d491a 9951 CASE_FLT_FN (BUILT_IN_CLOG):
9952 if (validate_arg (arg0, COMPLEX_TYPE)
48e1416a 9953 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
239d491a 9954 return do_mpc_arg1 (arg0, type, mpc_log);
9955 break;
48e1416a 9956
239d491a 9957 CASE_FLT_FN (BUILT_IN_CSQRT):
9958 if (validate_arg (arg0, COMPLEX_TYPE)
48e1416a 9959 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
239d491a 9960 return do_mpc_arg1 (arg0, type, mpc_sqrt);
9961 break;
48e1416a 9962
0e7e6e7f 9963 CASE_FLT_FN (BUILT_IN_CASIN):
9964 if (validate_arg (arg0, COMPLEX_TYPE)
48e1416a 9965 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
0e7e6e7f 9966 return do_mpc_arg1 (arg0, type, mpc_asin);
9967 break;
48e1416a 9968
0e7e6e7f 9969 CASE_FLT_FN (BUILT_IN_CACOS):
9970 if (validate_arg (arg0, COMPLEX_TYPE)
48e1416a 9971 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
0e7e6e7f 9972 return do_mpc_arg1 (arg0, type, mpc_acos);
9973 break;
48e1416a 9974
0e7e6e7f 9975 CASE_FLT_FN (BUILT_IN_CATAN):
9976 if (validate_arg (arg0, COMPLEX_TYPE)
48e1416a 9977 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
0e7e6e7f 9978 return do_mpc_arg1 (arg0, type, mpc_atan);
9979 break;
48e1416a 9980
0e7e6e7f 9981 CASE_FLT_FN (BUILT_IN_CASINH):
9982 if (validate_arg (arg0, COMPLEX_TYPE)
48e1416a 9983 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
0e7e6e7f 9984 return do_mpc_arg1 (arg0, type, mpc_asinh);
9985 break;
48e1416a 9986
0e7e6e7f 9987 CASE_FLT_FN (BUILT_IN_CACOSH):
9988 if (validate_arg (arg0, COMPLEX_TYPE)
48e1416a 9989 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
0e7e6e7f 9990 return do_mpc_arg1 (arg0, type, mpc_acosh);
9991 break;
48e1416a 9992
0e7e6e7f 9993 CASE_FLT_FN (BUILT_IN_CATANH):
9994 if (validate_arg (arg0, COMPLEX_TYPE)
48e1416a 9995 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
0e7e6e7f 9996 return do_mpc_arg1 (arg0, type, mpc_atanh);
9997 break;
48e1416a 9998
4f35b1fc 9999 CASE_FLT_FN (BUILT_IN_CABS):
389dd41b 10000 return fold_builtin_cabs (loc, arg0, type, fndecl);
c63f4ad3 10001
abe4dcf6 10002 CASE_FLT_FN (BUILT_IN_CARG):
389dd41b 10003 return fold_builtin_carg (loc, arg0, type);
abe4dcf6 10004
4f35b1fc 10005 CASE_FLT_FN (BUILT_IN_SQRT):
389dd41b 10006 return fold_builtin_sqrt (loc, arg0, type);
805e22b2 10007
4f35b1fc 10008 CASE_FLT_FN (BUILT_IN_CBRT):
389dd41b 10009 return fold_builtin_cbrt (loc, arg0, type);
3bc5c41b 10010
728bac60 10011 CASE_FLT_FN (BUILT_IN_ASIN):
c2f47e15 10012 if (validate_arg (arg0, REAL_TYPE))
10013 return do_mpfr_arg1 (arg0, type, mpfr_asin,
728bac60 10014 &dconstm1, &dconst1, true);
10015 break;
10016
10017 CASE_FLT_FN (BUILT_IN_ACOS):
c2f47e15 10018 if (validate_arg (arg0, REAL_TYPE))
10019 return do_mpfr_arg1 (arg0, type, mpfr_acos,
728bac60 10020 &dconstm1, &dconst1, true);
10021 break;
10022
10023 CASE_FLT_FN (BUILT_IN_ATAN):
c2f47e15 10024 if (validate_arg (arg0, REAL_TYPE))
10025 return do_mpfr_arg1 (arg0, type, mpfr_atan, NULL, NULL, 0);
728bac60 10026 break;
10027
10028 CASE_FLT_FN (BUILT_IN_ASINH):
c2f47e15 10029 if (validate_arg (arg0, REAL_TYPE))
10030 return do_mpfr_arg1 (arg0, type, mpfr_asinh, NULL, NULL, 0);
728bac60 10031 break;
10032
10033 CASE_FLT_FN (BUILT_IN_ACOSH):
c2f47e15 10034 if (validate_arg (arg0, REAL_TYPE))
10035 return do_mpfr_arg1 (arg0, type, mpfr_acosh,
728bac60 10036 &dconst1, NULL, true);
10037 break;
10038
10039 CASE_FLT_FN (BUILT_IN_ATANH):
c2f47e15 10040 if (validate_arg (arg0, REAL_TYPE))
10041 return do_mpfr_arg1 (arg0, type, mpfr_atanh,
728bac60 10042 &dconstm1, &dconst1, false);
10043 break;
10044
4f35b1fc 10045 CASE_FLT_FN (BUILT_IN_SIN):
c2f47e15 10046 if (validate_arg (arg0, REAL_TYPE))
10047 return do_mpfr_arg1 (arg0, type, mpfr_sin, NULL, NULL, 0);
728bac60 10048 break;
77e89269 10049
4f35b1fc 10050 CASE_FLT_FN (BUILT_IN_COS):
389dd41b 10051 return fold_builtin_cos (loc, arg0, type, fndecl);
77e89269 10052
728bac60 10053 CASE_FLT_FN (BUILT_IN_TAN):
c2f47e15 10054 return fold_builtin_tan (arg0, type);
d735c391 10055
c5bb2c4b 10056 CASE_FLT_FN (BUILT_IN_CEXP):
389dd41b 10057 return fold_builtin_cexp (loc, arg0, type);
c5bb2c4b 10058
d735c391 10059 CASE_FLT_FN (BUILT_IN_CEXPI):
c2f47e15 10060 if (validate_arg (arg0, REAL_TYPE))
10061 return do_mpfr_sincos (arg0, NULL_TREE, NULL_TREE);
10062 break;
d92f994c 10063
728bac60 10064 CASE_FLT_FN (BUILT_IN_SINH):
c2f47e15 10065 if (validate_arg (arg0, REAL_TYPE))
10066 return do_mpfr_arg1 (arg0, type, mpfr_sinh, NULL, NULL, 0);
728bac60 10067 break;
10068
10069 CASE_FLT_FN (BUILT_IN_COSH):
389dd41b 10070 return fold_builtin_cosh (loc, arg0, type, fndecl);
728bac60 10071
10072 CASE_FLT_FN (BUILT_IN_TANH):
c2f47e15 10073 if (validate_arg (arg0, REAL_TYPE))
10074 return do_mpfr_arg1 (arg0, type, mpfr_tanh, NULL, NULL, 0);
728bac60 10075 break;
10076
29f4cd78 10077 CASE_FLT_FN (BUILT_IN_ERF):
c2f47e15 10078 if (validate_arg (arg0, REAL_TYPE))
10079 return do_mpfr_arg1 (arg0, type, mpfr_erf, NULL, NULL, 0);
29f4cd78 10080 break;
10081
10082 CASE_FLT_FN (BUILT_IN_ERFC):
c2f47e15 10083 if (validate_arg (arg0, REAL_TYPE))
10084 return do_mpfr_arg1 (arg0, type, mpfr_erfc, NULL, NULL, 0);
29f4cd78 10085 break;
10086
32dba52b 10087 CASE_FLT_FN (BUILT_IN_TGAMMA):
c2f47e15 10088 if (validate_arg (arg0, REAL_TYPE))
10089 return do_mpfr_arg1 (arg0, type, mpfr_gamma, NULL, NULL, 0);
32dba52b 10090 break;
48e1416a 10091
4f35b1fc 10092 CASE_FLT_FN (BUILT_IN_EXP):
389dd41b 10093 return fold_builtin_exponent (loc, fndecl, arg0, mpfr_exp);
467214fd 10094
4f35b1fc 10095 CASE_FLT_FN (BUILT_IN_EXP2):
389dd41b 10096 return fold_builtin_exponent (loc, fndecl, arg0, mpfr_exp2);
467214fd 10097
4f35b1fc 10098 CASE_FLT_FN (BUILT_IN_EXP10):
10099 CASE_FLT_FN (BUILT_IN_POW10):
389dd41b 10100 return fold_builtin_exponent (loc, fndecl, arg0, mpfr_exp10);
467214fd 10101
29f4cd78 10102 CASE_FLT_FN (BUILT_IN_EXPM1):
c2f47e15 10103 if (validate_arg (arg0, REAL_TYPE))
10104 return do_mpfr_arg1 (arg0, type, mpfr_expm1, NULL, NULL, 0);
f8dad9b4 10105 break;
48e1416a 10106
4f35b1fc 10107 CASE_FLT_FN (BUILT_IN_LOG):
f8dad9b4 10108 if (validate_arg (arg0, REAL_TYPE))
10109 return do_mpfr_arg1 (arg0, type, mpfr_log, &dconst0, NULL, false);
10110 break;
467214fd 10111
4f35b1fc 10112 CASE_FLT_FN (BUILT_IN_LOG2):
f8dad9b4 10113 if (validate_arg (arg0, REAL_TYPE))
10114 return do_mpfr_arg1 (arg0, type, mpfr_log2, &dconst0, NULL, false);
10115 break;
467214fd 10116
4f35b1fc 10117 CASE_FLT_FN (BUILT_IN_LOG10):
f8dad9b4 10118 if (validate_arg (arg0, REAL_TYPE))
10119 return do_mpfr_arg1 (arg0, type, mpfr_log10, &dconst0, NULL, false);
10120 break;
29f4cd78 10121
10122 CASE_FLT_FN (BUILT_IN_LOG1P):
c2f47e15 10123 if (validate_arg (arg0, REAL_TYPE))
10124 return do_mpfr_arg1 (arg0, type, mpfr_log1p,
29f4cd78 10125 &dconstm1, NULL, false);
10126 break;
805e22b2 10127
65dd1378 10128 CASE_FLT_FN (BUILT_IN_J0):
10129 if (validate_arg (arg0, REAL_TYPE))
10130 return do_mpfr_arg1 (arg0, type, mpfr_j0,
10131 NULL, NULL, 0);
10132 break;
10133
10134 CASE_FLT_FN (BUILT_IN_J1):
10135 if (validate_arg (arg0, REAL_TYPE))
10136 return do_mpfr_arg1 (arg0, type, mpfr_j1,
10137 NULL, NULL, 0);
10138 break;
6ff9eeff 10139
10140 CASE_FLT_FN (BUILT_IN_Y0):
10141 if (validate_arg (arg0, REAL_TYPE))
10142 return do_mpfr_arg1 (arg0, type, mpfr_y0,
10143 &dconst0, NULL, false);
10144 break;
10145
10146 CASE_FLT_FN (BUILT_IN_Y1):
10147 if (validate_arg (arg0, REAL_TYPE))
10148 return do_mpfr_arg1 (arg0, type, mpfr_y1,
10149 &dconst0, NULL, false);
10150 break;
65dd1378 10151
4f35b1fc 10152 CASE_FLT_FN (BUILT_IN_NAN):
c4503c0a 10153 case BUILT_IN_NAND32:
10154 case BUILT_IN_NAND64:
10155 case BUILT_IN_NAND128:
c2f47e15 10156 return fold_builtin_nan (arg0, type, true);
b0db7939 10157
4f35b1fc 10158 CASE_FLT_FN (BUILT_IN_NANS):
c2f47e15 10159 return fold_builtin_nan (arg0, type, false);
b0db7939 10160
4f35b1fc 10161 CASE_FLT_FN (BUILT_IN_FLOOR):
389dd41b 10162 return fold_builtin_floor (loc, fndecl, arg0);
277f8dd2 10163
4f35b1fc 10164 CASE_FLT_FN (BUILT_IN_CEIL):
389dd41b 10165 return fold_builtin_ceil (loc, fndecl, arg0);
277f8dd2 10166
4f35b1fc 10167 CASE_FLT_FN (BUILT_IN_TRUNC):
389dd41b 10168 return fold_builtin_trunc (loc, fndecl, arg0);
277f8dd2 10169
4f35b1fc 10170 CASE_FLT_FN (BUILT_IN_ROUND):
389dd41b 10171 return fold_builtin_round (loc, fndecl, arg0);
89ab3887 10172
4f35b1fc 10173 CASE_FLT_FN (BUILT_IN_NEARBYINT):
10174 CASE_FLT_FN (BUILT_IN_RINT):
389dd41b 10175 return fold_trunc_transparent_mathfn (loc, fndecl, arg0);
6528f4f4 10176
80ff6494 10177 CASE_FLT_FN (BUILT_IN_ICEIL):
4f35b1fc 10178 CASE_FLT_FN (BUILT_IN_LCEIL):
10179 CASE_FLT_FN (BUILT_IN_LLCEIL):
10180 CASE_FLT_FN (BUILT_IN_LFLOOR):
80ff6494 10181 CASE_FLT_FN (BUILT_IN_IFLOOR):
4f35b1fc 10182 CASE_FLT_FN (BUILT_IN_LLFLOOR):
80ff6494 10183 CASE_FLT_FN (BUILT_IN_IROUND):
a0c938f0 10184 CASE_FLT_FN (BUILT_IN_LROUND):
4f35b1fc 10185 CASE_FLT_FN (BUILT_IN_LLROUND):
389dd41b 10186 return fold_builtin_int_roundingfn (loc, fndecl, arg0);
34f17811 10187
80ff6494 10188 CASE_FLT_FN (BUILT_IN_IRINT):
4f35b1fc 10189 CASE_FLT_FN (BUILT_IN_LRINT):
10190 CASE_FLT_FN (BUILT_IN_LLRINT):
389dd41b 10191 return fold_fixed_mathfn (loc, fndecl, arg0);
9ed65c7f 10192
74bdbe96 10193 case BUILT_IN_BSWAP16:
42791117 10194 case BUILT_IN_BSWAP32:
10195 case BUILT_IN_BSWAP64:
c2f47e15 10196 return fold_builtin_bswap (fndecl, arg0);
42791117 10197
4f35b1fc 10198 CASE_INT_FN (BUILT_IN_FFS):
10199 CASE_INT_FN (BUILT_IN_CLZ):
10200 CASE_INT_FN (BUILT_IN_CTZ):
6aaa1f9e 10201 CASE_INT_FN (BUILT_IN_CLRSB):
4f35b1fc 10202 CASE_INT_FN (BUILT_IN_POPCOUNT):
10203 CASE_INT_FN (BUILT_IN_PARITY):
c2f47e15 10204 return fold_builtin_bitop (fndecl, arg0);
9c8a1629 10205
4f35b1fc 10206 CASE_FLT_FN (BUILT_IN_SIGNBIT):
389dd41b 10207 return fold_builtin_signbit (loc, arg0, type);
27f261ef 10208
cb2b9385 10209 CASE_FLT_FN (BUILT_IN_SIGNIFICAND):
389dd41b 10210 return fold_builtin_significand (loc, arg0, type);
cb2b9385 10211
10212 CASE_FLT_FN (BUILT_IN_ILOGB):
10213 CASE_FLT_FN (BUILT_IN_LOGB):
389dd41b 10214 return fold_builtin_logb (loc, arg0, type);
cb2b9385 10215
d49367d4 10216 case BUILT_IN_ISASCII:
389dd41b 10217 return fold_builtin_isascii (loc, arg0);
d49367d4 10218
10219 case BUILT_IN_TOASCII:
389dd41b 10220 return fold_builtin_toascii (loc, arg0);
d49367d4 10221
df1cf42e 10222 case BUILT_IN_ISDIGIT:
389dd41b 10223 return fold_builtin_isdigit (loc, arg0);
467214fd 10224
4f35b1fc 10225 CASE_FLT_FN (BUILT_IN_FINITE):
c4503c0a 10226 case BUILT_IN_FINITED32:
10227 case BUILT_IN_FINITED64:
10228 case BUILT_IN_FINITED128:
cde061c1 10229 case BUILT_IN_ISFINITE:
a65c4d64 10230 {
10231 tree ret = fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISFINITE);
10232 if (ret)
10233 return ret;
10234 return fold_builtin_interclass_mathfn (loc, fndecl, arg0);
10235 }
726069ba 10236
4f35b1fc 10237 CASE_FLT_FN (BUILT_IN_ISINF):
c4503c0a 10238 case BUILT_IN_ISINFD32:
10239 case BUILT_IN_ISINFD64:
10240 case BUILT_IN_ISINFD128:
a65c4d64 10241 {
10242 tree ret = fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISINF);
10243 if (ret)
10244 return ret;
10245 return fold_builtin_interclass_mathfn (loc, fndecl, arg0);
10246 }
10247
10248 case BUILT_IN_ISNORMAL:
10249 return fold_builtin_interclass_mathfn (loc, fndecl, arg0);
726069ba 10250
c319d56a 10251 case BUILT_IN_ISINF_SIGN:
389dd41b 10252 return fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISINF_SIGN);
c319d56a 10253
4f35b1fc 10254 CASE_FLT_FN (BUILT_IN_ISNAN):
c4503c0a 10255 case BUILT_IN_ISNAND32:
10256 case BUILT_IN_ISNAND64:
10257 case BUILT_IN_ISNAND128:
389dd41b 10258 return fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISNAN);
c2f47e15 10259
663870fc 10260 case BUILT_IN_FREE:
10261 if (integer_zerop (arg0))
10262 return build_empty_stmt (loc);
10263 break;
10264
c2f47e15 10265 default:
10266 break;
10267 }
10268
10269 return NULL_TREE;
10270
10271}
10272
10273/* Fold a call to built-in function FNDECL with 2 arguments, ARG0 and ARG1.
e80cc485 10274 This function returns NULL_TREE if no simplification was possible. */
c2f47e15 10275
10276static tree
e80cc485 10277fold_builtin_2 (location_t loc, tree fndecl, tree arg0, tree arg1)
c2f47e15 10278{
10279 tree type = TREE_TYPE (TREE_TYPE (fndecl));
10280 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10281
10282 switch (fcode)
10283 {
65dd1378 10284 CASE_FLT_FN (BUILT_IN_JN):
10285 if (validate_arg (arg0, INTEGER_TYPE)
10286 && validate_arg (arg1, REAL_TYPE))
10287 return do_mpfr_bessel_n (arg0, arg1, type, mpfr_jn, NULL, 0);
10288 break;
6ff9eeff 10289
10290 CASE_FLT_FN (BUILT_IN_YN):
10291 if (validate_arg (arg0, INTEGER_TYPE)
10292 && validate_arg (arg1, REAL_TYPE))
10293 return do_mpfr_bessel_n (arg0, arg1, type, mpfr_yn,
10294 &dconst0, false);
10295 break;
e5407ca6 10296
10297 CASE_FLT_FN (BUILT_IN_DREM):
10298 CASE_FLT_FN (BUILT_IN_REMAINDER):
10299 if (validate_arg (arg0, REAL_TYPE)
9af5ce0c 10300 && validate_arg (arg1, REAL_TYPE))
e5407ca6 10301 return do_mpfr_arg2 (arg0, arg1, type, mpfr_remainder);
10302 break;
e84da7c1 10303
10304 CASE_FLT_FN_REENT (BUILT_IN_GAMMA): /* GAMMA_R */
10305 CASE_FLT_FN_REENT (BUILT_IN_LGAMMA): /* LGAMMA_R */
10306 if (validate_arg (arg0, REAL_TYPE)
9af5ce0c 10307 && validate_arg (arg1, POINTER_TYPE))
e84da7c1 10308 return do_mpfr_lgamma_r (arg0, arg1, type);
10309 break;
c2f47e15 10310
10311 CASE_FLT_FN (BUILT_IN_ATAN2):
10312 if (validate_arg (arg0, REAL_TYPE)
9af5ce0c 10313 && validate_arg (arg1, REAL_TYPE))
c2f47e15 10314 return do_mpfr_arg2 (arg0, arg1, type, mpfr_atan2);
10315 break;
10316
10317 CASE_FLT_FN (BUILT_IN_FDIM):
10318 if (validate_arg (arg0, REAL_TYPE)
9af5ce0c 10319 && validate_arg (arg1, REAL_TYPE))
c2f47e15 10320 return do_mpfr_arg2 (arg0, arg1, type, mpfr_dim);
10321 break;
10322
10323 CASE_FLT_FN (BUILT_IN_HYPOT):
389dd41b 10324 return fold_builtin_hypot (loc, fndecl, arg0, arg1, type);
c2f47e15 10325
c699fab8 10326 CASE_FLT_FN (BUILT_IN_CPOW):
10327 if (validate_arg (arg0, COMPLEX_TYPE)
10328 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE
10329 && validate_arg (arg1, COMPLEX_TYPE)
48e1416a 10330 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg1))) == REAL_TYPE)
652d9409 10331 return do_mpc_arg2 (arg0, arg1, type, /*do_nonfinite=*/ 0, mpc_pow);
c699fab8 10332 break;
c699fab8 10333
7587301b 10334 CASE_FLT_FN (BUILT_IN_LDEXP):
389dd41b 10335 return fold_builtin_load_exponent (loc, arg0, arg1, type, /*ldexp=*/true);
7587301b 10336 CASE_FLT_FN (BUILT_IN_SCALBN):
10337 CASE_FLT_FN (BUILT_IN_SCALBLN):
389dd41b 10338 return fold_builtin_load_exponent (loc, arg0, arg1,
10339 type, /*ldexp=*/false);
7587301b 10340
3838b9ae 10341 CASE_FLT_FN (BUILT_IN_FREXP):
389dd41b 10342 return fold_builtin_frexp (loc, arg0, arg1, type);
3838b9ae 10343
ebf8b4f5 10344 CASE_FLT_FN (BUILT_IN_MODF):
389dd41b 10345 return fold_builtin_modf (loc, arg0, arg1, type);
ebf8b4f5 10346
c2f47e15 10347 case BUILT_IN_STRSTR:
389dd41b 10348 return fold_builtin_strstr (loc, arg0, arg1, type);
c2f47e15 10349
c2f47e15 10350 case BUILT_IN_STRSPN:
389dd41b 10351 return fold_builtin_strspn (loc, arg0, arg1);
c2f47e15 10352
10353 case BUILT_IN_STRCSPN:
389dd41b 10354 return fold_builtin_strcspn (loc, arg0, arg1);
c2f47e15 10355
10356 case BUILT_IN_STRCHR:
10357 case BUILT_IN_INDEX:
389dd41b 10358 return fold_builtin_strchr (loc, arg0, arg1, type);
c2f47e15 10359
10360 case BUILT_IN_STRRCHR:
10361 case BUILT_IN_RINDEX:
389dd41b 10362 return fold_builtin_strrchr (loc, arg0, arg1, type);
c2f47e15 10363
c2f47e15 10364 case BUILT_IN_STRCMP:
389dd41b 10365 return fold_builtin_strcmp (loc, arg0, arg1);
c2f47e15 10366
10367 case BUILT_IN_STRPBRK:
389dd41b 10368 return fold_builtin_strpbrk (loc, arg0, arg1, type);
c2f47e15 10369
10370 case BUILT_IN_EXPECT:
c83059be 10371 return fold_builtin_expect (loc, arg0, arg1, NULL_TREE);
c2f47e15 10372
10373 CASE_FLT_FN (BUILT_IN_POW):
389dd41b 10374 return fold_builtin_pow (loc, fndecl, arg0, arg1, type);
c2f47e15 10375
10376 CASE_FLT_FN (BUILT_IN_POWI):
389dd41b 10377 return fold_builtin_powi (loc, fndecl, arg0, arg1, type);
c2f47e15 10378
10379 CASE_FLT_FN (BUILT_IN_COPYSIGN):
389dd41b 10380 return fold_builtin_copysign (loc, fndecl, arg0, arg1, type);
c2f47e15 10381
10382 CASE_FLT_FN (BUILT_IN_FMIN):
389dd41b 10383 return fold_builtin_fmin_fmax (loc, arg0, arg1, type, /*max=*/false);
c2f47e15 10384
10385 CASE_FLT_FN (BUILT_IN_FMAX):
389dd41b 10386 return fold_builtin_fmin_fmax (loc, arg0, arg1, type, /*max=*/true);
726069ba 10387
9bc9f15f 10388 case BUILT_IN_ISGREATER:
389dd41b 10389 return fold_builtin_unordered_cmp (loc, fndecl,
10390 arg0, arg1, UNLE_EXPR, LE_EXPR);
9bc9f15f 10391 case BUILT_IN_ISGREATEREQUAL:
389dd41b 10392 return fold_builtin_unordered_cmp (loc, fndecl,
10393 arg0, arg1, UNLT_EXPR, LT_EXPR);
9bc9f15f 10394 case BUILT_IN_ISLESS:
389dd41b 10395 return fold_builtin_unordered_cmp (loc, fndecl,
10396 arg0, arg1, UNGE_EXPR, GE_EXPR);
9bc9f15f 10397 case BUILT_IN_ISLESSEQUAL:
389dd41b 10398 return fold_builtin_unordered_cmp (loc, fndecl,
10399 arg0, arg1, UNGT_EXPR, GT_EXPR);
9bc9f15f 10400 case BUILT_IN_ISLESSGREATER:
389dd41b 10401 return fold_builtin_unordered_cmp (loc, fndecl,
10402 arg0, arg1, UNEQ_EXPR, EQ_EXPR);
9bc9f15f 10403 case BUILT_IN_ISUNORDERED:
389dd41b 10404 return fold_builtin_unordered_cmp (loc, fndecl,
10405 arg0, arg1, UNORDERED_EXPR,
d5019fe8 10406 NOP_EXPR);
9bc9f15f 10407
7c2f0500 10408 /* We do the folding for va_start in the expander. */
10409 case BUILT_IN_VA_START:
10410 break;
f0613857 10411
0a39fd54 10412 case BUILT_IN_OBJECT_SIZE:
c2f47e15 10413 return fold_builtin_object_size (arg0, arg1);
0a39fd54 10414
1cd6e20d 10415 case BUILT_IN_ATOMIC_ALWAYS_LOCK_FREE:
10416 return fold_builtin_atomic_always_lock_free (arg0, arg1);
10417
10418 case BUILT_IN_ATOMIC_IS_LOCK_FREE:
10419 return fold_builtin_atomic_is_lock_free (arg0, arg1);
10420
c2f47e15 10421 default:
10422 break;
10423 }
10424 return NULL_TREE;
10425}
10426
10427/* Fold a call to built-in function FNDECL with 3 arguments, ARG0, ARG1,
e80cc485 10428 and ARG2.
c2f47e15 10429 This function returns NULL_TREE if no simplification was possible. */
10430
10431static tree
389dd41b 10432fold_builtin_3 (location_t loc, tree fndecl,
e80cc485 10433 tree arg0, tree arg1, tree arg2)
c2f47e15 10434{
10435 tree type = TREE_TYPE (TREE_TYPE (fndecl));
10436 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10437 switch (fcode)
10438 {
10439
10440 CASE_FLT_FN (BUILT_IN_SINCOS):
389dd41b 10441 return fold_builtin_sincos (loc, arg0, arg1, arg2);
c2f47e15 10442
10443 CASE_FLT_FN (BUILT_IN_FMA):
b9be572e 10444 return fold_builtin_fma (loc, arg0, arg1, arg2, type);
c2f47e15 10445 break;
10446
e5407ca6 10447 CASE_FLT_FN (BUILT_IN_REMQUO):
10448 if (validate_arg (arg0, REAL_TYPE)
9af5ce0c 10449 && validate_arg (arg1, REAL_TYPE)
10450 && validate_arg (arg2, POINTER_TYPE))
e5407ca6 10451 return do_mpfr_remquo (arg0, arg1, arg2);
10452 break;
e5407ca6 10453
c2f47e15 10454 case BUILT_IN_STRNCMP:
389dd41b 10455 return fold_builtin_strncmp (loc, arg0, arg1, arg2);
c2f47e15 10456
7959b13b 10457 case BUILT_IN_MEMCHR:
389dd41b 10458 return fold_builtin_memchr (loc, arg0, arg1, arg2, type);
7959b13b 10459
c2f47e15 10460 case BUILT_IN_BCMP:
10461 case BUILT_IN_MEMCMP:
389dd41b 10462 return fold_builtin_memcmp (loc, arg0, arg1, arg2);;
c2f47e15 10463
c83059be 10464 case BUILT_IN_EXPECT:
10465 return fold_builtin_expect (loc, arg0, arg1, arg2);
10466
0c93c8a9 10467 case BUILT_IN_ADD_OVERFLOW:
10468 case BUILT_IN_SUB_OVERFLOW:
10469 case BUILT_IN_MUL_OVERFLOW:
10470 case BUILT_IN_SADD_OVERFLOW:
10471 case BUILT_IN_SADDL_OVERFLOW:
10472 case BUILT_IN_SADDLL_OVERFLOW:
10473 case BUILT_IN_SSUB_OVERFLOW:
10474 case BUILT_IN_SSUBL_OVERFLOW:
10475 case BUILT_IN_SSUBLL_OVERFLOW:
10476 case BUILT_IN_SMUL_OVERFLOW:
10477 case BUILT_IN_SMULL_OVERFLOW:
10478 case BUILT_IN_SMULLL_OVERFLOW:
10479 case BUILT_IN_UADD_OVERFLOW:
10480 case BUILT_IN_UADDL_OVERFLOW:
10481 case BUILT_IN_UADDLL_OVERFLOW:
10482 case BUILT_IN_USUB_OVERFLOW:
10483 case BUILT_IN_USUBL_OVERFLOW:
10484 case BUILT_IN_USUBLL_OVERFLOW:
10485 case BUILT_IN_UMUL_OVERFLOW:
10486 case BUILT_IN_UMULL_OVERFLOW:
10487 case BUILT_IN_UMULLL_OVERFLOW:
10488 return fold_builtin_arith_overflow (loc, fcode, arg0, arg1, arg2);
10489
650e4c94 10490 default:
10491 break;
10492 }
c2f47e15 10493 return NULL_TREE;
10494}
650e4c94 10495
c2f47e15 10496/* Fold a call to built-in function FNDECL. ARGS is an array of NARGS
9d884767 10497 arguments. IGNORE is true if the result of the
10498 function call is ignored. This function returns NULL_TREE if no
10499 simplification was possible. */
48e1416a 10500
2165588a 10501tree
e80cc485 10502fold_builtin_n (location_t loc, tree fndecl, tree *args, int nargs, bool)
c2f47e15 10503{
10504 tree ret = NULL_TREE;
a7f5bb2d 10505
c2f47e15 10506 switch (nargs)
10507 {
10508 case 0:
e80cc485 10509 ret = fold_builtin_0 (loc, fndecl);
c2f47e15 10510 break;
10511 case 1:
e80cc485 10512 ret = fold_builtin_1 (loc, fndecl, args[0]);
c2f47e15 10513 break;
10514 case 2:
e80cc485 10515 ret = fold_builtin_2 (loc, fndecl, args[0], args[1]);
c2f47e15 10516 break;
10517 case 3:
e80cc485 10518 ret = fold_builtin_3 (loc, fndecl, args[0], args[1], args[2]);
c2f47e15 10519 break;
c2f47e15 10520 default:
e80cc485 10521 ret = fold_builtin_varargs (loc, fndecl, args, nargs);
c2f47e15 10522 break;
10523 }
10524 if (ret)
10525 {
75a70cf9 10526 ret = build1 (NOP_EXPR, TREE_TYPE (ret), ret);
389dd41b 10527 SET_EXPR_LOCATION (ret, loc);
c2f47e15 10528 TREE_NO_WARNING (ret) = 1;
10529 return ret;
10530 }
10531 return NULL_TREE;
10532}
10533
0e80b01d 10534/* Construct a new CALL_EXPR to FNDECL using the tail of the argument
10535 list ARGS along with N new arguments in NEWARGS. SKIP is the number
10536 of arguments in ARGS to be omitted. OLDNARGS is the number of
10537 elements in ARGS. */
c2f47e15 10538
10539static tree
0e80b01d 10540rewrite_call_expr_valist (location_t loc, int oldnargs, tree *args,
10541 int skip, tree fndecl, int n, va_list newargs)
c2f47e15 10542{
0e80b01d 10543 int nargs = oldnargs - skip + n;
10544 tree *buffer;
c2f47e15 10545
0e80b01d 10546 if (n > 0)
c2f47e15 10547 {
0e80b01d 10548 int i, j;
c2f47e15 10549
0e80b01d 10550 buffer = XALLOCAVEC (tree, nargs);
10551 for (i = 0; i < n; i++)
10552 buffer[i] = va_arg (newargs, tree);
10553 for (j = skip; j < oldnargs; j++, i++)
10554 buffer[i] = args[j];
10555 }
10556 else
10557 buffer = args + skip;
19fbe3a4 10558
0e80b01d 10559 return build_call_expr_loc_array (loc, fndecl, nargs, buffer);
10560}
c2f47e15 10561
198622c0 10562/* Return true if FNDECL shouldn't be folded right now.
10563 If a built-in function has an inline attribute always_inline
10564 wrapper, defer folding it after always_inline functions have
10565 been inlined, otherwise e.g. -D_FORTIFY_SOURCE checking
10566 might not be performed. */
10567
51d2c51e 10568bool
198622c0 10569avoid_folding_inline_builtin (tree fndecl)
10570{
10571 return (DECL_DECLARED_INLINE_P (fndecl)
10572 && DECL_DISREGARD_INLINE_LIMITS (fndecl)
10573 && cfun
10574 && !cfun->always_inline_functions_inlined
10575 && lookup_attribute ("always_inline", DECL_ATTRIBUTES (fndecl)));
10576}
10577
4ee9c684 10578/* A wrapper function for builtin folding that prevents warnings for
491e04ef 10579 "statement without effect" and the like, caused by removing the
4ee9c684 10580 call node earlier than the warning is generated. */
10581
10582tree
389dd41b 10583fold_call_expr (location_t loc, tree exp, bool ignore)
4ee9c684 10584{
c2f47e15 10585 tree ret = NULL_TREE;
10586 tree fndecl = get_callee_fndecl (exp);
10587 if (fndecl
10588 && TREE_CODE (fndecl) == FUNCTION_DECL
48dc2227 10589 && DECL_BUILT_IN (fndecl)
10590 /* If CALL_EXPR_VA_ARG_PACK is set, the arguments aren't finalized
10591 yet. Defer folding until we see all the arguments
10592 (after inlining). */
10593 && !CALL_EXPR_VA_ARG_PACK (exp))
10594 {
10595 int nargs = call_expr_nargs (exp);
10596
10597 /* Before gimplification CALL_EXPR_VA_ARG_PACK is not set, but
10598 instead last argument is __builtin_va_arg_pack (). Defer folding
10599 even in that case, until arguments are finalized. */
10600 if (nargs && TREE_CODE (CALL_EXPR_ARG (exp, nargs - 1)) == CALL_EXPR)
10601 {
10602 tree fndecl2 = get_callee_fndecl (CALL_EXPR_ARG (exp, nargs - 1));
10603 if (fndecl2
10604 && TREE_CODE (fndecl2) == FUNCTION_DECL
10605 && DECL_BUILT_IN_CLASS (fndecl2) == BUILT_IN_NORMAL
10606 && DECL_FUNCTION_CODE (fndecl2) == BUILT_IN_VA_ARG_PACK)
10607 return NULL_TREE;
10608 }
10609
198622c0 10610 if (avoid_folding_inline_builtin (fndecl))
10611 return NULL_TREE;
10612
c2f47e15 10613 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
97d67146 10614 return targetm.fold_builtin (fndecl, call_expr_nargs (exp),
10615 CALL_EXPR_ARGP (exp), ignore);
c2f47e15 10616 else
10617 {
9d884767 10618 tree *args = CALL_EXPR_ARGP (exp);
10619 ret = fold_builtin_n (loc, fndecl, args, nargs, ignore);
c2f47e15 10620 if (ret)
389dd41b 10621 return ret;
c2f47e15 10622 }
4ee9c684 10623 }
c2f47e15 10624 return NULL_TREE;
10625}
48e1416a 10626
9d884767 10627/* Fold a CALL_EXPR with type TYPE with FN as the function expression.
10628 N arguments are passed in the array ARGARRAY. Return a folded
10629 expression or NULL_TREE if no simplification was possible. */
805e22b2 10630
10631tree
9d884767 10632fold_builtin_call_array (location_t loc, tree,
d01f58f9 10633 tree fn,
10634 int n,
10635 tree *argarray)
7e15618b 10636{
9d884767 10637 if (TREE_CODE (fn) != ADDR_EXPR)
10638 return NULL_TREE;
c2f47e15 10639
9d884767 10640 tree fndecl = TREE_OPERAND (fn, 0);
10641 if (TREE_CODE (fndecl) == FUNCTION_DECL
10642 && DECL_BUILT_IN (fndecl))
10643 {
10644 /* If last argument is __builtin_va_arg_pack (), arguments to this
10645 function are not finalized yet. Defer folding until they are. */
10646 if (n && TREE_CODE (argarray[n - 1]) == CALL_EXPR)
10647 {
10648 tree fndecl2 = get_callee_fndecl (argarray[n - 1]);
10649 if (fndecl2
10650 && TREE_CODE (fndecl2) == FUNCTION_DECL
10651 && DECL_BUILT_IN_CLASS (fndecl2) == BUILT_IN_NORMAL
10652 && DECL_FUNCTION_CODE (fndecl2) == BUILT_IN_VA_ARG_PACK)
10653 return NULL_TREE;
10654 }
10655 if (avoid_folding_inline_builtin (fndecl))
10656 return NULL_TREE;
10657 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
10658 return targetm.fold_builtin (fndecl, n, argarray, false);
10659 else
10660 return fold_builtin_n (loc, fndecl, argarray, n, false);
10661 }
c2f47e15 10662
9d884767 10663 return NULL_TREE;
c2f47e15 10664}
10665
af1409ad 10666/* Construct a new CALL_EXPR using the tail of the argument list of EXP
10667 along with N new arguments specified as the "..." parameters. SKIP
10668 is the number of arguments in EXP to be omitted. This function is used
10669 to do varargs-to-varargs transformations. */
10670
10671static tree
10672rewrite_call_expr (location_t loc, tree exp, int skip, tree fndecl, int n, ...)
10673{
10674 va_list ap;
10675 tree t;
10676
10677 va_start (ap, n);
10678 t = rewrite_call_expr_valist (loc, call_expr_nargs (exp),
10679 CALL_EXPR_ARGP (exp), skip, fndecl, n, ap);
10680 va_end (ap);
c2f47e15 10681
af1409ad 10682 return t;
c2f47e15 10683}
10684
10685/* Validate a single argument ARG against a tree code CODE representing
10686 a type. */
48e1416a 10687
c2f47e15 10688static bool
b7bf20db 10689validate_arg (const_tree arg, enum tree_code code)
c2f47e15 10690{
10691 if (!arg)
10692 return false;
10693 else if (code == POINTER_TYPE)
10694 return POINTER_TYPE_P (TREE_TYPE (arg));
c7f617c2 10695 else if (code == INTEGER_TYPE)
10696 return INTEGRAL_TYPE_P (TREE_TYPE (arg));
c2f47e15 10697 return code == TREE_CODE (TREE_TYPE (arg));
7e15618b 10698}
0eb671f7 10699
75a70cf9 10700/* This function validates the types of a function call argument list
10701 against a specified list of tree_codes. If the last specifier is a 0,
10702 that represents an ellipses, otherwise the last specifier must be a
10703 VOID_TYPE.
10704
10705 This is the GIMPLE version of validate_arglist. Eventually we want to
10706 completely convert builtins.c to work from GIMPLEs and the tree based
10707 validate_arglist will then be removed. */
10708
10709bool
1a91d914 10710validate_gimple_arglist (const gcall *call, ...)
75a70cf9 10711{
10712 enum tree_code code;
10713 bool res = 0;
10714 va_list ap;
10715 const_tree arg;
10716 size_t i;
10717
10718 va_start (ap, call);
10719 i = 0;
10720
10721 do
10722 {
d62e827b 10723 code = (enum tree_code) va_arg (ap, int);
75a70cf9 10724 switch (code)
10725 {
10726 case 0:
10727 /* This signifies an ellipses, any further arguments are all ok. */
10728 res = true;
10729 goto end;
10730 case VOID_TYPE:
10731 /* This signifies an endlink, if no arguments remain, return
10732 true, otherwise return false. */
10733 res = (i == gimple_call_num_args (call));
10734 goto end;
10735 default:
10736 /* If no parameters remain or the parameter's code does not
10737 match the specified code, return false. Otherwise continue
10738 checking any remaining arguments. */
10739 arg = gimple_call_arg (call, i++);
10740 if (!validate_arg (arg, code))
10741 goto end;
10742 break;
10743 }
10744 }
10745 while (1);
10746
10747 /* We need gotos here since we can only have one VA_CLOSE in a
10748 function. */
10749 end: ;
10750 va_end (ap);
10751
10752 return res;
10753}
10754
fc2a2dcb 10755/* Default target-specific builtin expander that does nothing. */
10756
10757rtx
aecda0d6 10758default_expand_builtin (tree exp ATTRIBUTE_UNUSED,
10759 rtx target ATTRIBUTE_UNUSED,
10760 rtx subtarget ATTRIBUTE_UNUSED,
3754d046 10761 machine_mode mode ATTRIBUTE_UNUSED,
aecda0d6 10762 int ignore ATTRIBUTE_UNUSED)
fc2a2dcb 10763{
10764 return NULL_RTX;
10765}
c7926a82 10766
01537105 10767/* Returns true is EXP represents data that would potentially reside
10768 in a readonly section. */
10769
b9ea678c 10770bool
01537105 10771readonly_data_expr (tree exp)
10772{
10773 STRIP_NOPS (exp);
10774
9ff0637e 10775 if (TREE_CODE (exp) != ADDR_EXPR)
10776 return false;
10777
10778 exp = get_base_address (TREE_OPERAND (exp, 0));
10779 if (!exp)
10780 return false;
10781
10782 /* Make sure we call decl_readonly_section only for trees it
10783 can handle (since it returns true for everything it doesn't
10784 understand). */
491e04ef 10785 if (TREE_CODE (exp) == STRING_CST
9ff0637e 10786 || TREE_CODE (exp) == CONSTRUCTOR
10787 || (TREE_CODE (exp) == VAR_DECL && TREE_STATIC (exp)))
10788 return decl_readonly_section (exp, 0);
01537105 10789 else
10790 return false;
10791}
4ee9c684 10792
c2f47e15 10793/* Simplify a call to the strstr builtin. S1 and S2 are the arguments
10794 to the call, and TYPE is its return type.
4ee9c684 10795
c2f47e15 10796 Return NULL_TREE if no simplification was possible, otherwise return the
4ee9c684 10797 simplified form of the call as a tree.
10798
10799 The simplified form may be a constant or other expression which
10800 computes the same value, but in a more efficient manner (including
10801 calls to other builtin functions).
10802
10803 The call may contain arguments which need to be evaluated, but
10804 which are not useful to determine the result of the call. In
10805 this case we return a chain of COMPOUND_EXPRs. The LHS of each
10806 COMPOUND_EXPR will be an argument which must be evaluated.
10807 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
10808 COMPOUND_EXPR in the chain will contain the tree for the simplified
10809 form of the builtin function call. */
10810
10811static tree
389dd41b 10812fold_builtin_strstr (location_t loc, tree s1, tree s2, tree type)
4ee9c684 10813{
c2f47e15 10814 if (!validate_arg (s1, POINTER_TYPE)
10815 || !validate_arg (s2, POINTER_TYPE))
10816 return NULL_TREE;
4ee9c684 10817 else
10818 {
4ee9c684 10819 tree fn;
10820 const char *p1, *p2;
10821
10822 p2 = c_getstr (s2);
10823 if (p2 == NULL)
c2f47e15 10824 return NULL_TREE;
4ee9c684 10825
10826 p1 = c_getstr (s1);
10827 if (p1 != NULL)
10828 {
10829 const char *r = strstr (p1, p2);
daa1d5f5 10830 tree tem;
4ee9c684 10831
4ee9c684 10832 if (r == NULL)
779b4c41 10833 return build_int_cst (TREE_TYPE (s1), 0);
c0c67e38 10834
10835 /* Return an offset into the constant string argument. */
2cc66f2a 10836 tem = fold_build_pointer_plus_hwi_loc (loc, s1, r - p1);
389dd41b 10837 return fold_convert_loc (loc, type, tem);
4ee9c684 10838 }
10839
7efa231c 10840 /* The argument is const char *, and the result is char *, so we need
10841 a type conversion here to avoid a warning. */
4ee9c684 10842 if (p2[0] == '\0')
389dd41b 10843 return fold_convert_loc (loc, type, s1);
4ee9c684 10844
10845 if (p2[1] != '\0')
c2f47e15 10846 return NULL_TREE;
4ee9c684 10847
b9a16870 10848 fn = builtin_decl_implicit (BUILT_IN_STRCHR);
4ee9c684 10849 if (!fn)
c2f47e15 10850 return NULL_TREE;
4ee9c684 10851
10852 /* New argument list transforming strstr(s1, s2) to
10853 strchr(s1, s2[0]). */
7002a1c8 10854 return build_call_expr_loc (loc, fn, 2, s1,
10855 build_int_cst (integer_type_node, p2[0]));
4ee9c684 10856 }
10857}
10858
c2f47e15 10859/* Simplify a call to the strchr builtin. S1 and S2 are the arguments to
10860 the call, and TYPE is its return type.
4ee9c684 10861
c2f47e15 10862 Return NULL_TREE if no simplification was possible, otherwise return the
4ee9c684 10863 simplified form of the call as a tree.
10864
10865 The simplified form may be a constant or other expression which
10866 computes the same value, but in a more efficient manner (including
10867 calls to other builtin functions).
10868
10869 The call may contain arguments which need to be evaluated, but
10870 which are not useful to determine the result of the call. In
10871 this case we return a chain of COMPOUND_EXPRs. The LHS of each
10872 COMPOUND_EXPR will be an argument which must be evaluated.
10873 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
10874 COMPOUND_EXPR in the chain will contain the tree for the simplified
10875 form of the builtin function call. */
10876
10877static tree
389dd41b 10878fold_builtin_strchr (location_t loc, tree s1, tree s2, tree type)
4ee9c684 10879{
c2f47e15 10880 if (!validate_arg (s1, POINTER_TYPE)
10881 || !validate_arg (s2, INTEGER_TYPE))
10882 return NULL_TREE;
4ee9c684 10883 else
10884 {
4ee9c684 10885 const char *p1;
10886
10887 if (TREE_CODE (s2) != INTEGER_CST)
c2f47e15 10888 return NULL_TREE;
4ee9c684 10889
10890 p1 = c_getstr (s1);
10891 if (p1 != NULL)
10892 {
10893 char c;
10894 const char *r;
daa1d5f5 10895 tree tem;
4ee9c684 10896
10897 if (target_char_cast (s2, &c))
c2f47e15 10898 return NULL_TREE;
4ee9c684 10899
10900 r = strchr (p1, c);
10901
10902 if (r == NULL)
779b4c41 10903 return build_int_cst (TREE_TYPE (s1), 0);
4ee9c684 10904
10905 /* Return an offset into the constant string argument. */
2cc66f2a 10906 tem = fold_build_pointer_plus_hwi_loc (loc, s1, r - p1);
389dd41b 10907 return fold_convert_loc (loc, type, tem);
4ee9c684 10908 }
c2f47e15 10909 return NULL_TREE;
4ee9c684 10910 }
10911}
10912
c2f47e15 10913/* Simplify a call to the strrchr builtin. S1 and S2 are the arguments to
10914 the call, and TYPE is its return type.
4ee9c684 10915
c2f47e15 10916 Return NULL_TREE if no simplification was possible, otherwise return the
4ee9c684 10917 simplified form of the call as a tree.
10918
10919 The simplified form may be a constant or other expression which
10920 computes the same value, but in a more efficient manner (including
10921 calls to other builtin functions).
10922
10923 The call may contain arguments which need to be evaluated, but
10924 which are not useful to determine the result of the call. In
10925 this case we return a chain of COMPOUND_EXPRs. The LHS of each
10926 COMPOUND_EXPR will be an argument which must be evaluated.
10927 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
10928 COMPOUND_EXPR in the chain will contain the tree for the simplified
10929 form of the builtin function call. */
10930
10931static tree
389dd41b 10932fold_builtin_strrchr (location_t loc, tree s1, tree s2, tree type)
4ee9c684 10933{
c2f47e15 10934 if (!validate_arg (s1, POINTER_TYPE)
10935 || !validate_arg (s2, INTEGER_TYPE))
10936 return NULL_TREE;
4ee9c684 10937 else
10938 {
4ee9c684 10939 tree fn;
10940 const char *p1;
10941
10942 if (TREE_CODE (s2) != INTEGER_CST)
c2f47e15 10943 return NULL_TREE;
4ee9c684 10944
10945 p1 = c_getstr (s1);
10946 if (p1 != NULL)
10947 {
10948 char c;
10949 const char *r;
daa1d5f5 10950 tree tem;
4ee9c684 10951
10952 if (target_char_cast (s2, &c))
c2f47e15 10953 return NULL_TREE;
4ee9c684 10954
10955 r = strrchr (p1, c);
10956
10957 if (r == NULL)
779b4c41 10958 return build_int_cst (TREE_TYPE (s1), 0);
4ee9c684 10959
10960 /* Return an offset into the constant string argument. */
2cc66f2a 10961 tem = fold_build_pointer_plus_hwi_loc (loc, s1, r - p1);
389dd41b 10962 return fold_convert_loc (loc, type, tem);
4ee9c684 10963 }
10964
10965 if (! integer_zerop (s2))
c2f47e15 10966 return NULL_TREE;
4ee9c684 10967
b9a16870 10968 fn = builtin_decl_implicit (BUILT_IN_STRCHR);
4ee9c684 10969 if (!fn)
c2f47e15 10970 return NULL_TREE;
4ee9c684 10971
10972 /* Transform strrchr(s1, '\0') to strchr(s1, '\0'). */
389dd41b 10973 return build_call_expr_loc (loc, fn, 2, s1, s2);
4ee9c684 10974 }
10975}
10976
c2f47e15 10977/* Simplify a call to the strpbrk builtin. S1 and S2 are the arguments
10978 to the call, and TYPE is its return type.
4ee9c684 10979
c2f47e15 10980 Return NULL_TREE if no simplification was possible, otherwise return the
4ee9c684 10981 simplified form of the call as a tree.
10982
10983 The simplified form may be a constant or other expression which
10984 computes the same value, but in a more efficient manner (including
10985 calls to other builtin functions).
10986
10987 The call may contain arguments which need to be evaluated, but
10988 which are not useful to determine the result of the call. In
10989 this case we return a chain of COMPOUND_EXPRs. The LHS of each
10990 COMPOUND_EXPR will be an argument which must be evaluated.
10991 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
10992 COMPOUND_EXPR in the chain will contain the tree for the simplified
10993 form of the builtin function call. */
10994
10995static tree
389dd41b 10996fold_builtin_strpbrk (location_t loc, tree s1, tree s2, tree type)
4ee9c684 10997{
c2f47e15 10998 if (!validate_arg (s1, POINTER_TYPE)
10999 || !validate_arg (s2, POINTER_TYPE))
11000 return NULL_TREE;
4ee9c684 11001 else
11002 {
4ee9c684 11003 tree fn;
11004 const char *p1, *p2;
11005
11006 p2 = c_getstr (s2);
11007 if (p2 == NULL)
c2f47e15 11008 return NULL_TREE;
4ee9c684 11009
11010 p1 = c_getstr (s1);
11011 if (p1 != NULL)
11012 {
11013 const char *r = strpbrk (p1, p2);
daa1d5f5 11014 tree tem;
4ee9c684 11015
11016 if (r == NULL)
779b4c41 11017 return build_int_cst (TREE_TYPE (s1), 0);
4ee9c684 11018
11019 /* Return an offset into the constant string argument. */
2cc66f2a 11020 tem = fold_build_pointer_plus_hwi_loc (loc, s1, r - p1);
389dd41b 11021 return fold_convert_loc (loc, type, tem);
4ee9c684 11022 }
11023
11024 if (p2[0] == '\0')
05abc81b 11025 /* strpbrk(x, "") == NULL.
11026 Evaluate and ignore s1 in case it had side-effects. */
389dd41b 11027 return omit_one_operand_loc (loc, TREE_TYPE (s1), integer_zero_node, s1);
4ee9c684 11028
11029 if (p2[1] != '\0')
c2f47e15 11030 return NULL_TREE; /* Really call strpbrk. */
4ee9c684 11031
b9a16870 11032 fn = builtin_decl_implicit (BUILT_IN_STRCHR);
4ee9c684 11033 if (!fn)
c2f47e15 11034 return NULL_TREE;
4ee9c684 11035
11036 /* New argument list transforming strpbrk(s1, s2) to
11037 strchr(s1, s2[0]). */
7002a1c8 11038 return build_call_expr_loc (loc, fn, 2, s1,
11039 build_int_cst (integer_type_node, p2[0]));
4ee9c684 11040 }
11041}
11042
c2f47e15 11043/* Simplify a call to the strspn builtin. S1 and S2 are the arguments
11044 to the call.
4ee9c684 11045
c2f47e15 11046 Return NULL_TREE if no simplification was possible, otherwise return the
4ee9c684 11047 simplified form of the call as a tree.
11048
11049 The simplified form may be a constant or other expression which
11050 computes the same value, but in a more efficient manner (including
11051 calls to other builtin functions).
11052
11053 The call may contain arguments which need to be evaluated, but
11054 which are not useful to determine the result of the call. In
11055 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11056 COMPOUND_EXPR will be an argument which must be evaluated.
11057 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11058 COMPOUND_EXPR in the chain will contain the tree for the simplified
11059 form of the builtin function call. */
11060
11061static tree
389dd41b 11062fold_builtin_strspn (location_t loc, tree s1, tree s2)
4ee9c684 11063{
c2f47e15 11064 if (!validate_arg (s1, POINTER_TYPE)
11065 || !validate_arg (s2, POINTER_TYPE))
11066 return NULL_TREE;
4ee9c684 11067 else
11068 {
4ee9c684 11069 const char *p1 = c_getstr (s1), *p2 = c_getstr (s2);
11070
11071 /* If both arguments are constants, evaluate at compile-time. */
11072 if (p1 && p2)
11073 {
11074 const size_t r = strspn (p1, p2);
547b938d 11075 return build_int_cst (size_type_node, r);
4ee9c684 11076 }
11077
c2f47e15 11078 /* If either argument is "", return NULL_TREE. */
4ee9c684 11079 if ((p1 && *p1 == '\0') || (p2 && *p2 == '\0'))
9bc9f15f 11080 /* Evaluate and ignore both arguments in case either one has
11081 side-effects. */
389dd41b 11082 return omit_two_operands_loc (loc, size_type_node, size_zero_node,
9bc9f15f 11083 s1, s2);
c2f47e15 11084 return NULL_TREE;
4ee9c684 11085 }
11086}
11087
c2f47e15 11088/* Simplify a call to the strcspn builtin. S1 and S2 are the arguments
11089 to the call.
4ee9c684 11090
c2f47e15 11091 Return NULL_TREE if no simplification was possible, otherwise return the
4ee9c684 11092 simplified form of the call as a tree.
11093
11094 The simplified form may be a constant or other expression which
11095 computes the same value, but in a more efficient manner (including
11096 calls to other builtin functions).
11097
11098 The call may contain arguments which need to be evaluated, but
11099 which are not useful to determine the result of the call. In
11100 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11101 COMPOUND_EXPR will be an argument which must be evaluated.
11102 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11103 COMPOUND_EXPR in the chain will contain the tree for the simplified
11104 form of the builtin function call. */
11105
11106static tree
389dd41b 11107fold_builtin_strcspn (location_t loc, tree s1, tree s2)
4ee9c684 11108{
c2f47e15 11109 if (!validate_arg (s1, POINTER_TYPE)
11110 || !validate_arg (s2, POINTER_TYPE))
11111 return NULL_TREE;
4ee9c684 11112 else
11113 {
4ee9c684 11114 const char *p1 = c_getstr (s1), *p2 = c_getstr (s2);
11115
11116 /* If both arguments are constants, evaluate at compile-time. */
11117 if (p1 && p2)
11118 {
11119 const size_t r = strcspn (p1, p2);
547b938d 11120 return build_int_cst (size_type_node, r);
4ee9c684 11121 }
11122
c2f47e15 11123 /* If the first argument is "", return NULL_TREE. */
4ee9c684 11124 if (p1 && *p1 == '\0')
11125 {
11126 /* Evaluate and ignore argument s2 in case it has
11127 side-effects. */
389dd41b 11128 return omit_one_operand_loc (loc, size_type_node,
39761420 11129 size_zero_node, s2);
4ee9c684 11130 }
11131
11132 /* If the second argument is "", return __builtin_strlen(s1). */
11133 if (p2 && *p2 == '\0')
11134 {
b9a16870 11135 tree fn = builtin_decl_implicit (BUILT_IN_STRLEN);
4ee9c684 11136
11137 /* If the replacement _DECL isn't initialized, don't do the
11138 transformation. */
11139 if (!fn)
c2f47e15 11140 return NULL_TREE;
4ee9c684 11141
389dd41b 11142 return build_call_expr_loc (loc, fn, 1, s1);
4ee9c684 11143 }
c2f47e15 11144 return NULL_TREE;
4ee9c684 11145 }
11146}
11147
c2f47e15 11148/* Fold the next_arg or va_start call EXP. Returns true if there was an error
743b0c6a 11149 produced. False otherwise. This is done so that we don't output the error
11150 or warning twice or three times. */
75a70cf9 11151
743b0c6a 11152bool
c2f47e15 11153fold_builtin_next_arg (tree exp, bool va_start_p)
4ee9c684 11154{
11155 tree fntype = TREE_TYPE (current_function_decl);
c2f47e15 11156 int nargs = call_expr_nargs (exp);
11157 tree arg;
d98fd4a4 11158 /* There is good chance the current input_location points inside the
11159 definition of the va_start macro (perhaps on the token for
11160 builtin) in a system header, so warnings will not be emitted.
11161 Use the location in real source code. */
11162 source_location current_location =
11163 linemap_unwind_to_first_non_reserved_loc (line_table, input_location,
11164 NULL);
4ee9c684 11165
257d99c3 11166 if (!stdarg_p (fntype))
743b0c6a 11167 {
11168 error ("%<va_start%> used in function with fixed args");
11169 return true;
11170 }
c2f47e15 11171
11172 if (va_start_p)
79012a9d 11173 {
c2f47e15 11174 if (va_start_p && (nargs != 2))
11175 {
11176 error ("wrong number of arguments to function %<va_start%>");
11177 return true;
11178 }
11179 arg = CALL_EXPR_ARG (exp, 1);
79012a9d 11180 }
11181 /* We use __builtin_va_start (ap, 0, 0) or __builtin_next_arg (0, 0)
11182 when we checked the arguments and if needed issued a warning. */
c2f47e15 11183 else
4ee9c684 11184 {
c2f47e15 11185 if (nargs == 0)
11186 {
11187 /* Evidently an out of date version of <stdarg.h>; can't validate
11188 va_start's second argument, but can still work as intended. */
d98fd4a4 11189 warning_at (current_location,
7edb1062 11190 OPT_Wvarargs,
11191 "%<__builtin_next_arg%> called without an argument");
c2f47e15 11192 return true;
11193 }
11194 else if (nargs > 1)
a0c938f0 11195 {
c2f47e15 11196 error ("wrong number of arguments to function %<__builtin_next_arg%>");
a0c938f0 11197 return true;
11198 }
c2f47e15 11199 arg = CALL_EXPR_ARG (exp, 0);
11200 }
11201
a8dd994c 11202 if (TREE_CODE (arg) == SSA_NAME)
11203 arg = SSA_NAME_VAR (arg);
11204
c2f47e15 11205 /* We destructively modify the call to be __builtin_va_start (ap, 0)
48e1416a 11206 or __builtin_next_arg (0) the first time we see it, after checking
c2f47e15 11207 the arguments and if needed issuing a warning. */
11208 if (!integer_zerop (arg))
11209 {
11210 tree last_parm = tree_last (DECL_ARGUMENTS (current_function_decl));
79012a9d 11211
4ee9c684 11212 /* Strip off all nops for the sake of the comparison. This
11213 is not quite the same as STRIP_NOPS. It does more.
11214 We must also strip off INDIRECT_EXPR for C++ reference
11215 parameters. */
72dd6141 11216 while (CONVERT_EXPR_P (arg)
4ee9c684 11217 || TREE_CODE (arg) == INDIRECT_REF)
11218 arg = TREE_OPERAND (arg, 0);
11219 if (arg != last_parm)
a0c938f0 11220 {
b08cf617 11221 /* FIXME: Sometimes with the tree optimizers we can get the
11222 not the last argument even though the user used the last
11223 argument. We just warn and set the arg to be the last
11224 argument so that we will get wrong-code because of
11225 it. */
d98fd4a4 11226 warning_at (current_location,
7edb1062 11227 OPT_Wvarargs,
d98fd4a4 11228 "second parameter of %<va_start%> not last named argument");
743b0c6a 11229 }
24158ad7 11230
11231 /* Undefined by C99 7.15.1.4p4 (va_start):
11232 "If the parameter parmN is declared with the register storage
11233 class, with a function or array type, or with a type that is
11234 not compatible with the type that results after application of
11235 the default argument promotions, the behavior is undefined."
11236 */
11237 else if (DECL_REGISTER (arg))
d98fd4a4 11238 {
11239 warning_at (current_location,
7edb1062 11240 OPT_Wvarargs,
d98fd4a4 11241 "undefined behaviour when second parameter of "
11242 "%<va_start%> is declared with %<register%> storage");
11243 }
24158ad7 11244
79012a9d 11245 /* We want to verify the second parameter just once before the tree
a0c938f0 11246 optimizers are run and then avoid keeping it in the tree,
11247 as otherwise we could warn even for correct code like:
11248 void foo (int i, ...)
11249 { va_list ap; i++; va_start (ap, i); va_end (ap); } */
c2f47e15 11250 if (va_start_p)
11251 CALL_EXPR_ARG (exp, 1) = integer_zero_node;
11252 else
11253 CALL_EXPR_ARG (exp, 0) = integer_zero_node;
743b0c6a 11254 }
11255 return false;
4ee9c684 11256}
11257
11258
c2f47e15 11259/* Expand a call EXP to __builtin_object_size. */
0a39fd54 11260
f7715905 11261static rtx
0a39fd54 11262expand_builtin_object_size (tree exp)
11263{
11264 tree ost;
11265 int object_size_type;
11266 tree fndecl = get_callee_fndecl (exp);
0a39fd54 11267
c2f47e15 11268 if (!validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
0a39fd54 11269 {
b8c23db3 11270 error ("%Kfirst argument of %D must be a pointer, second integer constant",
11271 exp, fndecl);
0a39fd54 11272 expand_builtin_trap ();
11273 return const0_rtx;
11274 }
11275
c2f47e15 11276 ost = CALL_EXPR_ARG (exp, 1);
0a39fd54 11277 STRIP_NOPS (ost);
11278
11279 if (TREE_CODE (ost) != INTEGER_CST
11280 || tree_int_cst_sgn (ost) < 0
11281 || compare_tree_int (ost, 3) > 0)
11282 {
b8c23db3 11283 error ("%Klast argument of %D is not integer constant between 0 and 3",
11284 exp, fndecl);
0a39fd54 11285 expand_builtin_trap ();
11286 return const0_rtx;
11287 }
11288
e913b5cd 11289 object_size_type = tree_to_shwi (ost);
0a39fd54 11290
11291 return object_size_type < 2 ? constm1_rtx : const0_rtx;
11292}
11293
11294/* Expand EXP, a call to the __mem{cpy,pcpy,move,set}_chk builtin.
11295 FCODE is the BUILT_IN_* to use.
c2f47e15 11296 Return NULL_RTX if we failed; the caller should emit a normal call,
0a39fd54 11297 otherwise try to get the result in TARGET, if convenient (and in
11298 mode MODE if that's convenient). */
11299
11300static rtx
3754d046 11301expand_builtin_memory_chk (tree exp, rtx target, machine_mode mode,
0a39fd54 11302 enum built_in_function fcode)
11303{
0a39fd54 11304 tree dest, src, len, size;
11305
c2f47e15 11306 if (!validate_arglist (exp,
0a39fd54 11307 POINTER_TYPE,
11308 fcode == BUILT_IN_MEMSET_CHK
11309 ? INTEGER_TYPE : POINTER_TYPE,
11310 INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
c2f47e15 11311 return NULL_RTX;
0a39fd54 11312
c2f47e15 11313 dest = CALL_EXPR_ARG (exp, 0);
11314 src = CALL_EXPR_ARG (exp, 1);
11315 len = CALL_EXPR_ARG (exp, 2);
11316 size = CALL_EXPR_ARG (exp, 3);
0a39fd54 11317
e913b5cd 11318 if (! tree_fits_uhwi_p (size))
c2f47e15 11319 return NULL_RTX;
0a39fd54 11320
e913b5cd 11321 if (tree_fits_uhwi_p (len) || integer_all_onesp (size))
0a39fd54 11322 {
11323 tree fn;
11324
11325 if (! integer_all_onesp (size) && tree_int_cst_lt (size, len))
11326 {
b430e8d9 11327 warning_at (tree_nonartificial_location (exp),
11328 0, "%Kcall to %D will always overflow destination buffer",
11329 exp, get_callee_fndecl (exp));
c2f47e15 11330 return NULL_RTX;
0a39fd54 11331 }
11332
0a39fd54 11333 fn = NULL_TREE;
11334 /* If __builtin_mem{cpy,pcpy,move,set}_chk is used, assume
11335 mem{cpy,pcpy,move,set} is available. */
11336 switch (fcode)
11337 {
11338 case BUILT_IN_MEMCPY_CHK:
b9a16870 11339 fn = builtin_decl_explicit (BUILT_IN_MEMCPY);
0a39fd54 11340 break;
11341 case BUILT_IN_MEMPCPY_CHK:
b9a16870 11342 fn = builtin_decl_explicit (BUILT_IN_MEMPCPY);
0a39fd54 11343 break;
11344 case BUILT_IN_MEMMOVE_CHK:
b9a16870 11345 fn = builtin_decl_explicit (BUILT_IN_MEMMOVE);
0a39fd54 11346 break;
11347 case BUILT_IN_MEMSET_CHK:
b9a16870 11348 fn = builtin_decl_explicit (BUILT_IN_MEMSET);
0a39fd54 11349 break;
11350 default:
11351 break;
11352 }
11353
11354 if (! fn)
c2f47e15 11355 return NULL_RTX;
0a39fd54 11356
0568e9c1 11357 fn = build_call_nofold_loc (EXPR_LOCATION (exp), fn, 3, dest, src, len);
a65c4d64 11358 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
11359 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
0a39fd54 11360 return expand_expr (fn, target, mode, EXPAND_NORMAL);
11361 }
11362 else if (fcode == BUILT_IN_MEMSET_CHK)
c2f47e15 11363 return NULL_RTX;
0a39fd54 11364 else
11365 {
957d0361 11366 unsigned int dest_align = get_pointer_alignment (dest);
0a39fd54 11367
11368 /* If DEST is not a pointer type, call the normal function. */
11369 if (dest_align == 0)
c2f47e15 11370 return NULL_RTX;
0a39fd54 11371
11372 /* If SRC and DEST are the same (and not volatile), do nothing. */
11373 if (operand_equal_p (src, dest, 0))
11374 {
11375 tree expr;
11376
11377 if (fcode != BUILT_IN_MEMPCPY_CHK)
11378 {
11379 /* Evaluate and ignore LEN in case it has side-effects. */
11380 expand_expr (len, const0_rtx, VOIDmode, EXPAND_NORMAL);
11381 return expand_expr (dest, target, mode, EXPAND_NORMAL);
11382 }
11383
2cc66f2a 11384 expr = fold_build_pointer_plus (dest, len);
0a39fd54 11385 return expand_expr (expr, target, mode, EXPAND_NORMAL);
11386 }
11387
11388 /* __memmove_chk special case. */
11389 if (fcode == BUILT_IN_MEMMOVE_CHK)
11390 {
957d0361 11391 unsigned int src_align = get_pointer_alignment (src);
0a39fd54 11392
11393 if (src_align == 0)
c2f47e15 11394 return NULL_RTX;
0a39fd54 11395
11396 /* If src is categorized for a readonly section we can use
11397 normal __memcpy_chk. */
11398 if (readonly_data_expr (src))
11399 {
b9a16870 11400 tree fn = builtin_decl_explicit (BUILT_IN_MEMCPY_CHK);
0a39fd54 11401 if (!fn)
c2f47e15 11402 return NULL_RTX;
0568e9c1 11403 fn = build_call_nofold_loc (EXPR_LOCATION (exp), fn, 4,
11404 dest, src, len, size);
a65c4d64 11405 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
11406 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
0a39fd54 11407 return expand_expr (fn, target, mode, EXPAND_NORMAL);
11408 }
11409 }
c2f47e15 11410 return NULL_RTX;
0a39fd54 11411 }
11412}
11413
11414/* Emit warning if a buffer overflow is detected at compile time. */
11415
11416static void
11417maybe_emit_chk_warning (tree exp, enum built_in_function fcode)
11418{
c2f47e15 11419 int is_strlen = 0;
0a39fd54 11420 tree len, size;
b430e8d9 11421 location_t loc = tree_nonartificial_location (exp);
0a39fd54 11422
11423 switch (fcode)
11424 {
11425 case BUILT_IN_STRCPY_CHK:
11426 case BUILT_IN_STPCPY_CHK:
11427 /* For __strcat_chk the warning will be emitted only if overflowing
11428 by at least strlen (dest) + 1 bytes. */
11429 case BUILT_IN_STRCAT_CHK:
c2f47e15 11430 len = CALL_EXPR_ARG (exp, 1);
11431 size = CALL_EXPR_ARG (exp, 2);
0a39fd54 11432 is_strlen = 1;
11433 break;
b356dfef 11434 case BUILT_IN_STRNCAT_CHK:
0a39fd54 11435 case BUILT_IN_STRNCPY_CHK:
1063acde 11436 case BUILT_IN_STPNCPY_CHK:
c2f47e15 11437 len = CALL_EXPR_ARG (exp, 2);
11438 size = CALL_EXPR_ARG (exp, 3);
0a39fd54 11439 break;
11440 case BUILT_IN_SNPRINTF_CHK:
11441 case BUILT_IN_VSNPRINTF_CHK:
c2f47e15 11442 len = CALL_EXPR_ARG (exp, 1);
11443 size = CALL_EXPR_ARG (exp, 3);
0a39fd54 11444 break;
11445 default:
11446 gcc_unreachable ();
11447 }
11448
0a39fd54 11449 if (!len || !size)
11450 return;
11451
e913b5cd 11452 if (! tree_fits_uhwi_p (size) || integer_all_onesp (size))
0a39fd54 11453 return;
11454
11455 if (is_strlen)
11456 {
11457 len = c_strlen (len, 1);
e913b5cd 11458 if (! len || ! tree_fits_uhwi_p (len) || tree_int_cst_lt (len, size))
0a39fd54 11459 return;
11460 }
b356dfef 11461 else if (fcode == BUILT_IN_STRNCAT_CHK)
11462 {
c2f47e15 11463 tree src = CALL_EXPR_ARG (exp, 1);
e913b5cd 11464 if (! src || ! tree_fits_uhwi_p (len) || tree_int_cst_lt (len, size))
b356dfef 11465 return;
11466 src = c_strlen (src, 1);
e913b5cd 11467 if (! src || ! tree_fits_uhwi_p (src))
b356dfef 11468 {
b430e8d9 11469 warning_at (loc, 0, "%Kcall to %D might overflow destination buffer",
11470 exp, get_callee_fndecl (exp));
b356dfef 11471 return;
11472 }
11473 else if (tree_int_cst_lt (src, size))
11474 return;
11475 }
e913b5cd 11476 else if (! tree_fits_uhwi_p (len) || ! tree_int_cst_lt (size, len))
0a39fd54 11477 return;
11478
b430e8d9 11479 warning_at (loc, 0, "%Kcall to %D will always overflow destination buffer",
11480 exp, get_callee_fndecl (exp));
0a39fd54 11481}
11482
11483/* Emit warning if a buffer overflow is detected at compile time
11484 in __sprintf_chk/__vsprintf_chk calls. */
11485
11486static void
11487maybe_emit_sprintf_chk_warning (tree exp, enum built_in_function fcode)
11488{
1e4adcfc 11489 tree size, len, fmt;
0a39fd54 11490 const char *fmt_str;
c2f47e15 11491 int nargs = call_expr_nargs (exp);
0a39fd54 11492
11493 /* Verify the required arguments in the original call. */
48e1416a 11494
c2f47e15 11495 if (nargs < 4)
0a39fd54 11496 return;
c2f47e15 11497 size = CALL_EXPR_ARG (exp, 2);
11498 fmt = CALL_EXPR_ARG (exp, 3);
0a39fd54 11499
e913b5cd 11500 if (! tree_fits_uhwi_p (size) || integer_all_onesp (size))
0a39fd54 11501 return;
11502
11503 /* Check whether the format is a literal string constant. */
11504 fmt_str = c_getstr (fmt);
11505 if (fmt_str == NULL)
11506 return;
11507
d4473c84 11508 if (!init_target_chars ())
99eabcc1 11509 return;
11510
0a39fd54 11511 /* If the format doesn't contain % args or %%, we know its size. */
99eabcc1 11512 if (strchr (fmt_str, target_percent) == 0)
0a39fd54 11513 len = build_int_cstu (size_type_node, strlen (fmt_str));
11514 /* If the format is "%s" and first ... argument is a string literal,
11515 we know it too. */
c2f47e15 11516 else if (fcode == BUILT_IN_SPRINTF_CHK
11517 && strcmp (fmt_str, target_percent_s) == 0)
0a39fd54 11518 {
11519 tree arg;
11520
c2f47e15 11521 if (nargs < 5)
0a39fd54 11522 return;
c2f47e15 11523 arg = CALL_EXPR_ARG (exp, 4);
0a39fd54 11524 if (! POINTER_TYPE_P (TREE_TYPE (arg)))
11525 return;
11526
11527 len = c_strlen (arg, 1);
e913b5cd 11528 if (!len || ! tree_fits_uhwi_p (len))
0a39fd54 11529 return;
11530 }
11531 else
11532 return;
11533
11534 if (! tree_int_cst_lt (len, size))
b430e8d9 11535 warning_at (tree_nonartificial_location (exp),
11536 0, "%Kcall to %D will always overflow destination buffer",
11537 exp, get_callee_fndecl (exp));
0a39fd54 11538}
11539
2c281b15 11540/* Emit warning if a free is called with address of a variable. */
11541
11542static void
11543maybe_emit_free_warning (tree exp)
11544{
11545 tree arg = CALL_EXPR_ARG (exp, 0);
11546
11547 STRIP_NOPS (arg);
11548 if (TREE_CODE (arg) != ADDR_EXPR)
11549 return;
11550
11551 arg = get_base_address (TREE_OPERAND (arg, 0));
182cf5a9 11552 if (arg == NULL || INDIRECT_REF_P (arg) || TREE_CODE (arg) == MEM_REF)
2c281b15 11553 return;
11554
11555 if (SSA_VAR_P (arg))
f74ea1c2 11556 warning_at (tree_nonartificial_location (exp), OPT_Wfree_nonheap_object,
11557 "%Kattempt to free a non-heap object %qD", exp, arg);
2c281b15 11558 else
f74ea1c2 11559 warning_at (tree_nonartificial_location (exp), OPT_Wfree_nonheap_object,
11560 "%Kattempt to free a non-heap object", exp);
2c281b15 11561}
11562
c2f47e15 11563/* Fold a call to __builtin_object_size with arguments PTR and OST,
11564 if possible. */
0a39fd54 11565
f7715905 11566static tree
c2f47e15 11567fold_builtin_object_size (tree ptr, tree ost)
0a39fd54 11568{
a6caa15f 11569 unsigned HOST_WIDE_INT bytes;
0a39fd54 11570 int object_size_type;
11571
c2f47e15 11572 if (!validate_arg (ptr, POINTER_TYPE)
11573 || !validate_arg (ost, INTEGER_TYPE))
11574 return NULL_TREE;
0a39fd54 11575
0a39fd54 11576 STRIP_NOPS (ost);
11577
11578 if (TREE_CODE (ost) != INTEGER_CST
11579 || tree_int_cst_sgn (ost) < 0
11580 || compare_tree_int (ost, 3) > 0)
c2f47e15 11581 return NULL_TREE;
0a39fd54 11582
e913b5cd 11583 object_size_type = tree_to_shwi (ost);
0a39fd54 11584
11585 /* __builtin_object_size doesn't evaluate side-effects in its arguments;
11586 if there are any side-effects, it returns (size_t) -1 for types 0 and 1
11587 and (size_t) 0 for types 2 and 3. */
11588 if (TREE_SIDE_EFFECTS (ptr))
697bbc3f 11589 return build_int_cst_type (size_type_node, object_size_type < 2 ? -1 : 0);
0a39fd54 11590
11591 if (TREE_CODE (ptr) == ADDR_EXPR)
a6caa15f 11592 {
6da74b21 11593 bytes = compute_builtin_object_size (ptr, object_size_type);
11594 if (wi::fits_to_tree_p (bytes, size_type_node))
11595 return build_int_cstu (size_type_node, bytes);
a6caa15f 11596 }
0a39fd54 11597 else if (TREE_CODE (ptr) == SSA_NAME)
11598 {
0a39fd54 11599 /* If object size is not known yet, delay folding until
11600 later. Maybe subsequent passes will help determining
11601 it. */
11602 bytes = compute_builtin_object_size (ptr, object_size_type);
a6caa15f 11603 if (bytes != (unsigned HOST_WIDE_INT) (object_size_type < 2 ? -1 : 0)
6da74b21 11604 && wi::fits_to_tree_p (bytes, size_type_node))
11605 return build_int_cstu (size_type_node, bytes);
0a39fd54 11606 }
11607
a6caa15f 11608 return NULL_TREE;
0a39fd54 11609}
11610
0e80b01d 11611/* Builtins with folding operations that operate on "..." arguments
11612 need special handling; we need to store the arguments in a convenient
11613 data structure before attempting any folding. Fortunately there are
11614 only a few builtins that fall into this category. FNDECL is the
e80cc485 11615 function, EXP is the CALL_EXPR for the call. */
0e80b01d 11616
11617static tree
e80cc485 11618fold_builtin_varargs (location_t loc, tree fndecl, tree *args, int nargs)
0e80b01d 11619{
11620 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
11621 tree ret = NULL_TREE;
11622
11623 switch (fcode)
11624 {
0e80b01d 11625 case BUILT_IN_FPCLASSIFY:
9d884767 11626 ret = fold_builtin_fpclassify (loc, args, nargs);
0e80b01d 11627 break;
11628
11629 default:
11630 break;
11631 }
11632 if (ret)
11633 {
11634 ret = build1 (NOP_EXPR, TREE_TYPE (ret), ret);
11635 SET_EXPR_LOCATION (ret, loc);
11636 TREE_NO_WARNING (ret) = 1;
11637 return ret;
11638 }
11639 return NULL_TREE;
11640}
11641
99eabcc1 11642/* Initialize format string characters in the target charset. */
11643
b9ea678c 11644bool
99eabcc1 11645init_target_chars (void)
11646{
11647 static bool init;
11648 if (!init)
11649 {
11650 target_newline = lang_hooks.to_target_charset ('\n');
11651 target_percent = lang_hooks.to_target_charset ('%');
11652 target_c = lang_hooks.to_target_charset ('c');
11653 target_s = lang_hooks.to_target_charset ('s');
11654 if (target_newline == 0 || target_percent == 0 || target_c == 0
11655 || target_s == 0)
11656 return false;
11657
11658 target_percent_c[0] = target_percent;
11659 target_percent_c[1] = target_c;
11660 target_percent_c[2] = '\0';
11661
11662 target_percent_s[0] = target_percent;
11663 target_percent_s[1] = target_s;
11664 target_percent_s[2] = '\0';
11665
11666 target_percent_s_newline[0] = target_percent;
11667 target_percent_s_newline[1] = target_s;
11668 target_percent_s_newline[2] = target_newline;
11669 target_percent_s_newline[3] = '\0';
a0c938f0 11670
99eabcc1 11671 init = true;
11672 }
11673 return true;
11674}
bffb7645 11675
f0c477f2 11676/* Helper function for do_mpfr_arg*(). Ensure M is a normal number
11677 and no overflow/underflow occurred. INEXACT is true if M was not
fa7637bd 11678 exactly calculated. TYPE is the tree type for the result. This
f0c477f2 11679 function assumes that you cleared the MPFR flags and then
11680 calculated M to see if anything subsequently set a flag prior to
11681 entering this function. Return NULL_TREE if any checks fail. */
11682
11683static tree
d4473c84 11684do_mpfr_ckconv (mpfr_srcptr m, tree type, int inexact)
f0c477f2 11685{
11686 /* Proceed iff we get a normal number, i.e. not NaN or Inf and no
11687 overflow/underflow occurred. If -frounding-math, proceed iff the
11688 result of calling FUNC was exact. */
d4473c84 11689 if (mpfr_number_p (m) && !mpfr_overflow_p () && !mpfr_underflow_p ()
f0c477f2 11690 && (!flag_rounding_math || !inexact))
11691 {
11692 REAL_VALUE_TYPE rr;
11693
66fa16e6 11694 real_from_mpfr (&rr, m, type, GMP_RNDN);
f0c477f2 11695 /* Proceed iff GCC's REAL_VALUE_TYPE can hold the MPFR value,
11696 check for overflow/underflow. If the REAL_VALUE_TYPE is zero
11697 but the mpft_t is not, then we underflowed in the
11698 conversion. */
776a7bab 11699 if (real_isfinite (&rr)
f0c477f2 11700 && (rr.cl == rvc_zero) == (mpfr_zero_p (m) != 0))
11701 {
11702 REAL_VALUE_TYPE rmode;
11703
11704 real_convert (&rmode, TYPE_MODE (type), &rr);
11705 /* Proceed iff the specified mode can hold the value. */
11706 if (real_identical (&rmode, &rr))
11707 return build_real (type, rmode);
11708 }
11709 }
11710 return NULL_TREE;
11711}
11712
239d491a 11713/* Helper function for do_mpc_arg*(). Ensure M is a normal complex
11714 number and no overflow/underflow occurred. INEXACT is true if M
11715 was not exactly calculated. TYPE is the tree type for the result.
11716 This function assumes that you cleared the MPFR flags and then
11717 calculated M to see if anything subsequently set a flag prior to
652d9409 11718 entering this function. Return NULL_TREE if any checks fail, if
11719 FORCE_CONVERT is true, then bypass the checks. */
239d491a 11720
11721static tree
652d9409 11722do_mpc_ckconv (mpc_srcptr m, tree type, int inexact, int force_convert)
239d491a 11723{
11724 /* Proceed iff we get a normal number, i.e. not NaN or Inf and no
11725 overflow/underflow occurred. If -frounding-math, proceed iff the
11726 result of calling FUNC was exact. */
652d9409 11727 if (force_convert
11728 || (mpfr_number_p (mpc_realref (m)) && mpfr_number_p (mpc_imagref (m))
11729 && !mpfr_overflow_p () && !mpfr_underflow_p ()
11730 && (!flag_rounding_math || !inexact)))
239d491a 11731 {
11732 REAL_VALUE_TYPE re, im;
11733
b0e7c4d4 11734 real_from_mpfr (&re, mpc_realref (m), TREE_TYPE (type), GMP_RNDN);
11735 real_from_mpfr (&im, mpc_imagref (m), TREE_TYPE (type), GMP_RNDN);
239d491a 11736 /* Proceed iff GCC's REAL_VALUE_TYPE can hold the MPFR values,
11737 check for overflow/underflow. If the REAL_VALUE_TYPE is zero
11738 but the mpft_t is not, then we underflowed in the
11739 conversion. */
652d9409 11740 if (force_convert
11741 || (real_isfinite (&re) && real_isfinite (&im)
11742 && (re.cl == rvc_zero) == (mpfr_zero_p (mpc_realref (m)) != 0)
11743 && (im.cl == rvc_zero) == (mpfr_zero_p (mpc_imagref (m)) != 0)))
239d491a 11744 {
11745 REAL_VALUE_TYPE re_mode, im_mode;
11746
11747 real_convert (&re_mode, TYPE_MODE (TREE_TYPE (type)), &re);
11748 real_convert (&im_mode, TYPE_MODE (TREE_TYPE (type)), &im);
11749 /* Proceed iff the specified mode can hold the value. */
652d9409 11750 if (force_convert
11751 || (real_identical (&re_mode, &re)
11752 && real_identical (&im_mode, &im)))
239d491a 11753 return build_complex (type, build_real (TREE_TYPE (type), re_mode),
11754 build_real (TREE_TYPE (type), im_mode));
11755 }
11756 }
11757 return NULL_TREE;
11758}
239d491a 11759
bffb7645 11760/* If argument ARG is a REAL_CST, call the one-argument mpfr function
11761 FUNC on it and return the resulting value as a tree with type TYPE.
728bac60 11762 If MIN and/or MAX are not NULL, then the supplied ARG must be
11763 within those bounds. If INCLUSIVE is true, then MIN/MAX are
11764 acceptable values, otherwise they are not. The mpfr precision is
11765 set to the precision of TYPE. We assume that function FUNC returns
11766 zero if the result could be calculated exactly within the requested
11767 precision. */
bffb7645 11768
11769static tree
728bac60 11770do_mpfr_arg1 (tree arg, tree type, int (*func)(mpfr_ptr, mpfr_srcptr, mp_rnd_t),
11771 const REAL_VALUE_TYPE *min, const REAL_VALUE_TYPE *max,
11772 bool inclusive)
bffb7645 11773{
11774 tree result = NULL_TREE;
48e1416a 11775
bffb7645 11776 STRIP_NOPS (arg);
11777
bd7d6fa4 11778 /* To proceed, MPFR must exactly represent the target floating point
11779 format, which only happens when the target base equals two. */
11780 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
f96bd2bf 11781 && TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
bffb7645 11782 {
f0c477f2 11783 const REAL_VALUE_TYPE *const ra = &TREE_REAL_CST (arg);
bffb7645 11784
776a7bab 11785 if (real_isfinite (ra)
f0c477f2 11786 && (!min || real_compare (inclusive ? GE_EXPR: GT_EXPR , ra, min))
11787 && (!max || real_compare (inclusive ? LE_EXPR: LT_EXPR , ra, max)))
bffb7645 11788 {
e2eb2b7f 11789 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
11790 const int prec = fmt->p;
11791 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
debf9994 11792 int inexact;
bffb7645 11793 mpfr_t m;
11794
11795 mpfr_init2 (m, prec);
66fa16e6 11796 mpfr_from_real (m, ra, GMP_RNDN);
d4473c84 11797 mpfr_clear_flags ();
e2eb2b7f 11798 inexact = func (m, m, rnd);
f0c477f2 11799 result = do_mpfr_ckconv (m, type, inexact);
bffb7645 11800 mpfr_clear (m);
11801 }
11802 }
48e1416a 11803
bffb7645 11804 return result;
11805}
f0c477f2 11806
11807/* If argument ARG is a REAL_CST, call the two-argument mpfr function
11808 FUNC on it and return the resulting value as a tree with type TYPE.
11809 The mpfr precision is set to the precision of TYPE. We assume that
11810 function FUNC returns zero if the result could be calculated
11811 exactly within the requested precision. */
11812
11813static tree
11814do_mpfr_arg2 (tree arg1, tree arg2, tree type,
11815 int (*func)(mpfr_ptr, mpfr_srcptr, mpfr_srcptr, mp_rnd_t))
11816{
11817 tree result = NULL_TREE;
48e1416a 11818
f0c477f2 11819 STRIP_NOPS (arg1);
11820 STRIP_NOPS (arg2);
11821
bd7d6fa4 11822 /* To proceed, MPFR must exactly represent the target floating point
11823 format, which only happens when the target base equals two. */
11824 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
f96bd2bf 11825 && TREE_CODE (arg1) == REAL_CST && !TREE_OVERFLOW (arg1)
11826 && TREE_CODE (arg2) == REAL_CST && !TREE_OVERFLOW (arg2))
f0c477f2 11827 {
11828 const REAL_VALUE_TYPE *const ra1 = &TREE_REAL_CST (arg1);
11829 const REAL_VALUE_TYPE *const ra2 = &TREE_REAL_CST (arg2);
11830
776a7bab 11831 if (real_isfinite (ra1) && real_isfinite (ra2))
f0c477f2 11832 {
e2eb2b7f 11833 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
11834 const int prec = fmt->p;
11835 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
f0c477f2 11836 int inexact;
11837 mpfr_t m1, m2;
11838
11839 mpfr_inits2 (prec, m1, m2, NULL);
66fa16e6 11840 mpfr_from_real (m1, ra1, GMP_RNDN);
11841 mpfr_from_real (m2, ra2, GMP_RNDN);
d4473c84 11842 mpfr_clear_flags ();
e2eb2b7f 11843 inexact = func (m1, m1, m2, rnd);
f0c477f2 11844 result = do_mpfr_ckconv (m1, type, inexact);
11845 mpfr_clears (m1, m2, NULL);
11846 }
11847 }
48e1416a 11848
f0c477f2 11849 return result;
11850}
d92f994c 11851
9917422b 11852/* If argument ARG is a REAL_CST, call the three-argument mpfr function
11853 FUNC on it and return the resulting value as a tree with type TYPE.
11854 The mpfr precision is set to the precision of TYPE. We assume that
11855 function FUNC returns zero if the result could be calculated
11856 exactly within the requested precision. */
11857
11858static tree
11859do_mpfr_arg3 (tree arg1, tree arg2, tree arg3, tree type,
11860 int (*func)(mpfr_ptr, mpfr_srcptr, mpfr_srcptr, mpfr_srcptr, mp_rnd_t))
11861{
11862 tree result = NULL_TREE;
48e1416a 11863
9917422b 11864 STRIP_NOPS (arg1);
11865 STRIP_NOPS (arg2);
11866 STRIP_NOPS (arg3);
11867
bd7d6fa4 11868 /* To proceed, MPFR must exactly represent the target floating point
11869 format, which only happens when the target base equals two. */
11870 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
f96bd2bf 11871 && TREE_CODE (arg1) == REAL_CST && !TREE_OVERFLOW (arg1)
11872 && TREE_CODE (arg2) == REAL_CST && !TREE_OVERFLOW (arg2)
11873 && TREE_CODE (arg3) == REAL_CST && !TREE_OVERFLOW (arg3))
9917422b 11874 {
11875 const REAL_VALUE_TYPE *const ra1 = &TREE_REAL_CST (arg1);
11876 const REAL_VALUE_TYPE *const ra2 = &TREE_REAL_CST (arg2);
11877 const REAL_VALUE_TYPE *const ra3 = &TREE_REAL_CST (arg3);
11878
776a7bab 11879 if (real_isfinite (ra1) && real_isfinite (ra2) && real_isfinite (ra3))
9917422b 11880 {
e2eb2b7f 11881 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
11882 const int prec = fmt->p;
11883 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
9917422b 11884 int inexact;
11885 mpfr_t m1, m2, m3;
11886
11887 mpfr_inits2 (prec, m1, m2, m3, NULL);
66fa16e6 11888 mpfr_from_real (m1, ra1, GMP_RNDN);
11889 mpfr_from_real (m2, ra2, GMP_RNDN);
11890 mpfr_from_real (m3, ra3, GMP_RNDN);
d4473c84 11891 mpfr_clear_flags ();
e2eb2b7f 11892 inexact = func (m1, m1, m2, m3, rnd);
9917422b 11893 result = do_mpfr_ckconv (m1, type, inexact);
11894 mpfr_clears (m1, m2, m3, NULL);
11895 }
11896 }
48e1416a 11897
9917422b 11898 return result;
11899}
11900
d92f994c 11901/* If argument ARG is a REAL_CST, call mpfr_sin_cos() on it and set
11902 the pointers *(ARG_SINP) and *(ARG_COSP) to the resulting values.
d735c391 11903 If ARG_SINP and ARG_COSP are NULL then the result is returned
11904 as a complex value.
d92f994c 11905 The type is taken from the type of ARG and is used for setting the
11906 precision of the calculation and results. */
11907
11908static tree
11909do_mpfr_sincos (tree arg, tree arg_sinp, tree arg_cosp)
11910{
bd7d6fa4 11911 tree const type = TREE_TYPE (arg);
d92f994c 11912 tree result = NULL_TREE;
48e1416a 11913
d92f994c 11914 STRIP_NOPS (arg);
48e1416a 11915
bd7d6fa4 11916 /* To proceed, MPFR must exactly represent the target floating point
11917 format, which only happens when the target base equals two. */
11918 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
f96bd2bf 11919 && TREE_CODE (arg) == REAL_CST
11920 && !TREE_OVERFLOW (arg))
d92f994c 11921 {
11922 const REAL_VALUE_TYPE *const ra = &TREE_REAL_CST (arg);
11923
776a7bab 11924 if (real_isfinite (ra))
d92f994c 11925 {
e2eb2b7f 11926 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
11927 const int prec = fmt->p;
11928 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
d92f994c 11929 tree result_s, result_c;
11930 int inexact;
11931 mpfr_t m, ms, mc;
11932
11933 mpfr_inits2 (prec, m, ms, mc, NULL);
66fa16e6 11934 mpfr_from_real (m, ra, GMP_RNDN);
d4473c84 11935 mpfr_clear_flags ();
e2eb2b7f 11936 inexact = mpfr_sin_cos (ms, mc, m, rnd);
d92f994c 11937 result_s = do_mpfr_ckconv (ms, type, inexact);
11938 result_c = do_mpfr_ckconv (mc, type, inexact);
11939 mpfr_clears (m, ms, mc, NULL);
11940 if (result_s && result_c)
11941 {
d735c391 11942 /* If we are to return in a complex value do so. */
11943 if (!arg_sinp && !arg_cosp)
11944 return build_complex (build_complex_type (type),
11945 result_c, result_s);
11946
d92f994c 11947 /* Dereference the sin/cos pointer arguments. */
11948 arg_sinp = build_fold_indirect_ref (arg_sinp);
11949 arg_cosp = build_fold_indirect_ref (arg_cosp);
11950 /* Proceed if valid pointer type were passed in. */
11951 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg_sinp)) == TYPE_MAIN_VARIANT (type)
11952 && TYPE_MAIN_VARIANT (TREE_TYPE (arg_cosp)) == TYPE_MAIN_VARIANT (type))
11953 {
11954 /* Set the values. */
41076ef6 11955 result_s = fold_build2 (MODIFY_EXPR, type, arg_sinp,
35cc02b5 11956 result_s);
d92f994c 11957 TREE_SIDE_EFFECTS (result_s) = 1;
41076ef6 11958 result_c = fold_build2 (MODIFY_EXPR, type, arg_cosp,
35cc02b5 11959 result_c);
d92f994c 11960 TREE_SIDE_EFFECTS (result_c) = 1;
11961 /* Combine the assignments into a compound expr. */
11962 result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
11963 result_s, result_c));
11964 }
11965 }
11966 }
11967 }
11968 return result;
11969}
65dd1378 11970
65dd1378 11971/* If argument ARG1 is an INTEGER_CST and ARG2 is a REAL_CST, call the
11972 two-argument mpfr order N Bessel function FUNC on them and return
11973 the resulting value as a tree with type TYPE. The mpfr precision
11974 is set to the precision of TYPE. We assume that function FUNC
11975 returns zero if the result could be calculated exactly within the
11976 requested precision. */
11977static tree
11978do_mpfr_bessel_n (tree arg1, tree arg2, tree type,
11979 int (*func)(mpfr_ptr, long, mpfr_srcptr, mp_rnd_t),
11980 const REAL_VALUE_TYPE *min, bool inclusive)
11981{
11982 tree result = NULL_TREE;
11983
11984 STRIP_NOPS (arg1);
11985 STRIP_NOPS (arg2);
11986
11987 /* To proceed, MPFR must exactly represent the target floating point
11988 format, which only happens when the target base equals two. */
11989 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
e913b5cd 11990 && tree_fits_shwi_p (arg1)
65dd1378 11991 && TREE_CODE (arg2) == REAL_CST && !TREE_OVERFLOW (arg2))
11992 {
e913b5cd 11993 const HOST_WIDE_INT n = tree_to_shwi (arg1);
65dd1378 11994 const REAL_VALUE_TYPE *const ra = &TREE_REAL_CST (arg2);
11995
11996 if (n == (long)n
776a7bab 11997 && real_isfinite (ra)
65dd1378 11998 && (!min || real_compare (inclusive ? GE_EXPR: GT_EXPR , ra, min)))
11999 {
e2eb2b7f 12000 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
12001 const int prec = fmt->p;
12002 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
65dd1378 12003 int inexact;
12004 mpfr_t m;
12005
12006 mpfr_init2 (m, prec);
12007 mpfr_from_real (m, ra, GMP_RNDN);
12008 mpfr_clear_flags ();
e2eb2b7f 12009 inexact = func (m, n, m, rnd);
65dd1378 12010 result = do_mpfr_ckconv (m, type, inexact);
12011 mpfr_clear (m);
12012 }
12013 }
48e1416a 12014
65dd1378 12015 return result;
12016}
e5407ca6 12017
12018/* If arguments ARG0 and ARG1 are REAL_CSTs, call mpfr_remquo() to set
12019 the pointer *(ARG_QUO) and return the result. The type is taken
12020 from the type of ARG0 and is used for setting the precision of the
12021 calculation and results. */
12022
12023static tree
12024do_mpfr_remquo (tree arg0, tree arg1, tree arg_quo)
12025{
12026 tree const type = TREE_TYPE (arg0);
12027 tree result = NULL_TREE;
48e1416a 12028
e5407ca6 12029 STRIP_NOPS (arg0);
12030 STRIP_NOPS (arg1);
48e1416a 12031
e5407ca6 12032 /* To proceed, MPFR must exactly represent the target floating point
12033 format, which only happens when the target base equals two. */
12034 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
12035 && TREE_CODE (arg0) == REAL_CST && !TREE_OVERFLOW (arg0)
12036 && TREE_CODE (arg1) == REAL_CST && !TREE_OVERFLOW (arg1))
12037 {
12038 const REAL_VALUE_TYPE *const ra0 = TREE_REAL_CST_PTR (arg0);
12039 const REAL_VALUE_TYPE *const ra1 = TREE_REAL_CST_PTR (arg1);
12040
776a7bab 12041 if (real_isfinite (ra0) && real_isfinite (ra1))
e5407ca6 12042 {
e2eb2b7f 12043 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
12044 const int prec = fmt->p;
12045 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
e5407ca6 12046 tree result_rem;
12047 long integer_quo;
12048 mpfr_t m0, m1;
12049
12050 mpfr_inits2 (prec, m0, m1, NULL);
12051 mpfr_from_real (m0, ra0, GMP_RNDN);
12052 mpfr_from_real (m1, ra1, GMP_RNDN);
12053 mpfr_clear_flags ();
e2eb2b7f 12054 mpfr_remquo (m0, &integer_quo, m0, m1, rnd);
e5407ca6 12055 /* Remquo is independent of the rounding mode, so pass
12056 inexact=0 to do_mpfr_ckconv(). */
12057 result_rem = do_mpfr_ckconv (m0, type, /*inexact=*/ 0);
12058 mpfr_clears (m0, m1, NULL);
12059 if (result_rem)
12060 {
12061 /* MPFR calculates quo in the host's long so it may
12062 return more bits in quo than the target int can hold
12063 if sizeof(host long) > sizeof(target int). This can
12064 happen even for native compilers in LP64 mode. In
12065 these cases, modulo the quo value with the largest
12066 number that the target int can hold while leaving one
12067 bit for the sign. */
12068 if (sizeof (integer_quo) * CHAR_BIT > INT_TYPE_SIZE)
12069 integer_quo %= (long)(1UL << (INT_TYPE_SIZE - 1));
12070
12071 /* Dereference the quo pointer argument. */
12072 arg_quo = build_fold_indirect_ref (arg_quo);
12073 /* Proceed iff a valid pointer type was passed in. */
12074 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg_quo)) == integer_type_node)
12075 {
12076 /* Set the value. */
7002a1c8 12077 tree result_quo
12078 = fold_build2 (MODIFY_EXPR, TREE_TYPE (arg_quo), arg_quo,
12079 build_int_cst (TREE_TYPE (arg_quo),
12080 integer_quo));
e5407ca6 12081 TREE_SIDE_EFFECTS (result_quo) = 1;
12082 /* Combine the quo assignment with the rem. */
12083 result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
12084 result_quo, result_rem));
12085 }
12086 }
12087 }
12088 }
12089 return result;
12090}
e84da7c1 12091
12092/* If ARG is a REAL_CST, call mpfr_lgamma() on it and return the
12093 resulting value as a tree with type TYPE. The mpfr precision is
12094 set to the precision of TYPE. We assume that this mpfr function
12095 returns zero if the result could be calculated exactly within the
12096 requested precision. In addition, the integer pointer represented
12097 by ARG_SG will be dereferenced and set to the appropriate signgam
12098 (-1,1) value. */
12099
12100static tree
12101do_mpfr_lgamma_r (tree arg, tree arg_sg, tree type)
12102{
12103 tree result = NULL_TREE;
12104
12105 STRIP_NOPS (arg);
48e1416a 12106
e84da7c1 12107 /* To proceed, MPFR must exactly represent the target floating point
12108 format, which only happens when the target base equals two. Also
12109 verify ARG is a constant and that ARG_SG is an int pointer. */
12110 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
12111 && TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg)
12112 && TREE_CODE (TREE_TYPE (arg_sg)) == POINTER_TYPE
12113 && TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (arg_sg))) == integer_type_node)
12114 {
12115 const REAL_VALUE_TYPE *const ra = TREE_REAL_CST_PTR (arg);
12116
12117 /* In addition to NaN and Inf, the argument cannot be zero or a
12118 negative integer. */
776a7bab 12119 if (real_isfinite (ra)
e84da7c1 12120 && ra->cl != rvc_zero
9af5ce0c 12121 && !(real_isneg (ra) && real_isinteger (ra, TYPE_MODE (type))))
e84da7c1 12122 {
e2eb2b7f 12123 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
12124 const int prec = fmt->p;
12125 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
e84da7c1 12126 int inexact, sg;
12127 mpfr_t m;
12128 tree result_lg;
12129
12130 mpfr_init2 (m, prec);
12131 mpfr_from_real (m, ra, GMP_RNDN);
12132 mpfr_clear_flags ();
e2eb2b7f 12133 inexact = mpfr_lgamma (m, &sg, m, rnd);
e84da7c1 12134 result_lg = do_mpfr_ckconv (m, type, inexact);
12135 mpfr_clear (m);
12136 if (result_lg)
12137 {
12138 tree result_sg;
12139
12140 /* Dereference the arg_sg pointer argument. */
12141 arg_sg = build_fold_indirect_ref (arg_sg);
12142 /* Assign the signgam value into *arg_sg. */
12143 result_sg = fold_build2 (MODIFY_EXPR,
12144 TREE_TYPE (arg_sg), arg_sg,
7002a1c8 12145 build_int_cst (TREE_TYPE (arg_sg), sg));
e84da7c1 12146 TREE_SIDE_EFFECTS (result_sg) = 1;
12147 /* Combine the signgam assignment with the lgamma result. */
12148 result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
12149 result_sg, result_lg));
12150 }
12151 }
12152 }
12153
12154 return result;
12155}
75a70cf9 12156
239d491a 12157/* If argument ARG is a COMPLEX_CST, call the one-argument mpc
12158 function FUNC on it and return the resulting value as a tree with
12159 type TYPE. The mpfr precision is set to the precision of TYPE. We
12160 assume that function FUNC returns zero if the result could be
12161 calculated exactly within the requested precision. */
12162
12163static tree
12164do_mpc_arg1 (tree arg, tree type, int (*func)(mpc_ptr, mpc_srcptr, mpc_rnd_t))
12165{
12166 tree result = NULL_TREE;
48e1416a 12167
239d491a 12168 STRIP_NOPS (arg);
12169
12170 /* To proceed, MPFR must exactly represent the target floating point
12171 format, which only happens when the target base equals two. */
12172 if (TREE_CODE (arg) == COMPLEX_CST && !TREE_OVERFLOW (arg)
12173 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) == REAL_TYPE
12174 && REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (TREE_TYPE (arg))))->b == 2)
12175 {
12176 const REAL_VALUE_TYPE *const re = TREE_REAL_CST_PTR (TREE_REALPART (arg));
12177 const REAL_VALUE_TYPE *const im = TREE_REAL_CST_PTR (TREE_IMAGPART (arg));
12178
12179 if (real_isfinite (re) && real_isfinite (im))
12180 {
12181 const struct real_format *const fmt =
12182 REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (type)));
12183 const int prec = fmt->p;
12184 const mp_rnd_t rnd = fmt->round_towards_zero ? GMP_RNDZ : GMP_RNDN;
44d89feb 12185 const mpc_rnd_t crnd = fmt->round_towards_zero ? MPC_RNDZZ : MPC_RNDNN;
239d491a 12186 int inexact;
12187 mpc_t m;
48e1416a 12188
239d491a 12189 mpc_init2 (m, prec);
9af5ce0c 12190 mpfr_from_real (mpc_realref (m), re, rnd);
12191 mpfr_from_real (mpc_imagref (m), im, rnd);
239d491a 12192 mpfr_clear_flags ();
44d89feb 12193 inexact = func (m, m, crnd);
652d9409 12194 result = do_mpc_ckconv (m, type, inexact, /*force_convert=*/ 0);
239d491a 12195 mpc_clear (m);
12196 }
12197 }
12198
12199 return result;
12200}
c699fab8 12201
12202/* If arguments ARG0 and ARG1 are a COMPLEX_CST, call the two-argument
12203 mpc function FUNC on it and return the resulting value as a tree
12204 with type TYPE. The mpfr precision is set to the precision of
12205 TYPE. We assume that function FUNC returns zero if the result
652d9409 12206 could be calculated exactly within the requested precision. If
12207 DO_NONFINITE is true, then fold expressions containing Inf or NaN
12208 in the arguments and/or results. */
c699fab8 12209
63e89698 12210tree
652d9409 12211do_mpc_arg2 (tree arg0, tree arg1, tree type, int do_nonfinite,
c699fab8 12212 int (*func)(mpc_ptr, mpc_srcptr, mpc_srcptr, mpc_rnd_t))
12213{
12214 tree result = NULL_TREE;
48e1416a 12215
c699fab8 12216 STRIP_NOPS (arg0);
12217 STRIP_NOPS (arg1);
12218
12219 /* To proceed, MPFR must exactly represent the target floating point
12220 format, which only happens when the target base equals two. */
12221 if (TREE_CODE (arg0) == COMPLEX_CST && !TREE_OVERFLOW (arg0)
12222 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE
12223 && TREE_CODE (arg1) == COMPLEX_CST && !TREE_OVERFLOW (arg1)
12224 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg1))) == REAL_TYPE
12225 && REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (TREE_TYPE (arg0))))->b == 2)
12226 {
12227 const REAL_VALUE_TYPE *const re0 = TREE_REAL_CST_PTR (TREE_REALPART (arg0));
12228 const REAL_VALUE_TYPE *const im0 = TREE_REAL_CST_PTR (TREE_IMAGPART (arg0));
12229 const REAL_VALUE_TYPE *const re1 = TREE_REAL_CST_PTR (TREE_REALPART (arg1));
12230 const REAL_VALUE_TYPE *const im1 = TREE_REAL_CST_PTR (TREE_IMAGPART (arg1));
12231
652d9409 12232 if (do_nonfinite
12233 || (real_isfinite (re0) && real_isfinite (im0)
12234 && real_isfinite (re1) && real_isfinite (im1)))
c699fab8 12235 {
12236 const struct real_format *const fmt =
12237 REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (type)));
12238 const int prec = fmt->p;
12239 const mp_rnd_t rnd = fmt->round_towards_zero ? GMP_RNDZ : GMP_RNDN;
12240 const mpc_rnd_t crnd = fmt->round_towards_zero ? MPC_RNDZZ : MPC_RNDNN;
12241 int inexact;
12242 mpc_t m0, m1;
48e1416a 12243
c699fab8 12244 mpc_init2 (m0, prec);
12245 mpc_init2 (m1, prec);
9af5ce0c 12246 mpfr_from_real (mpc_realref (m0), re0, rnd);
12247 mpfr_from_real (mpc_imagref (m0), im0, rnd);
12248 mpfr_from_real (mpc_realref (m1), re1, rnd);
12249 mpfr_from_real (mpc_imagref (m1), im1, rnd);
c699fab8 12250 mpfr_clear_flags ();
12251 inexact = func (m0, m0, m1, crnd);
652d9409 12252 result = do_mpc_ckconv (m0, type, inexact, do_nonfinite);
c699fab8 12253 mpc_clear (m0);
12254 mpc_clear (m1);
12255 }
12256 }
12257
12258 return result;
12259}
239d491a 12260
75a70cf9 12261/* A wrapper function for builtin folding that prevents warnings for
12262 "statement without effect" and the like, caused by removing the
12263 call node earlier than the warning is generated. */
12264
12265tree
1a91d914 12266fold_call_stmt (gcall *stmt, bool ignore)
75a70cf9 12267{
12268 tree ret = NULL_TREE;
12269 tree fndecl = gimple_call_fndecl (stmt);
389dd41b 12270 location_t loc = gimple_location (stmt);
75a70cf9 12271 if (fndecl
12272 && TREE_CODE (fndecl) == FUNCTION_DECL
12273 && DECL_BUILT_IN (fndecl)
12274 && !gimple_call_va_arg_pack_p (stmt))
12275 {
12276 int nargs = gimple_call_num_args (stmt);
9845fb99 12277 tree *args = (nargs > 0
12278 ? gimple_call_arg_ptr (stmt, 0)
12279 : &error_mark_node);
75a70cf9 12280
198622c0 12281 if (avoid_folding_inline_builtin (fndecl))
12282 return NULL_TREE;
75a70cf9 12283 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
12284 {
9845fb99 12285 return targetm.fold_builtin (fndecl, nargs, args, ignore);
75a70cf9 12286 }
12287 else
12288 {
9d884767 12289 ret = fold_builtin_n (loc, fndecl, args, nargs, ignore);
75a70cf9 12290 if (ret)
12291 {
12292 /* Propagate location information from original call to
12293 expansion of builtin. Otherwise things like
12294 maybe_emit_chk_warning, that operate on the expansion
12295 of a builtin, will use the wrong location information. */
12296 if (gimple_has_location (stmt))
12297 {
12298 tree realret = ret;
12299 if (TREE_CODE (ret) == NOP_EXPR)
12300 realret = TREE_OPERAND (ret, 0);
12301 if (CAN_HAVE_LOCATION_P (realret)
12302 && !EXPR_HAS_LOCATION (realret))
389dd41b 12303 SET_EXPR_LOCATION (realret, loc);
75a70cf9 12304 return realret;
12305 }
12306 return ret;
12307 }
12308 }
12309 }
12310 return NULL_TREE;
12311}
7bfefa9d 12312
b9a16870 12313/* Look up the function in builtin_decl that corresponds to DECL
7bfefa9d 12314 and set ASMSPEC as its user assembler name. DECL must be a
12315 function decl that declares a builtin. */
12316
12317void
12318set_builtin_user_assembler_name (tree decl, const char *asmspec)
12319{
12320 tree builtin;
12321 gcc_assert (TREE_CODE (decl) == FUNCTION_DECL
12322 && DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL
12323 && asmspec != 0);
12324
b9a16870 12325 builtin = builtin_decl_explicit (DECL_FUNCTION_CODE (decl));
4d8e0d6d 12326 set_user_assembler_name (builtin, asmspec);
7bfefa9d 12327 switch (DECL_FUNCTION_CODE (decl))
12328 {
12329 case BUILT_IN_MEMCPY:
12330 init_block_move_fn (asmspec);
12331 memcpy_libfunc = set_user_assembler_libfunc ("memcpy", asmspec);
12332 break;
12333 case BUILT_IN_MEMSET:
12334 init_block_clear_fn (asmspec);
12335 memset_libfunc = set_user_assembler_libfunc ("memset", asmspec);
12336 break;
12337 case BUILT_IN_MEMMOVE:
12338 memmove_libfunc = set_user_assembler_libfunc ("memmove", asmspec);
12339 break;
12340 case BUILT_IN_MEMCMP:
12341 memcmp_libfunc = set_user_assembler_libfunc ("memcmp", asmspec);
12342 break;
12343 case BUILT_IN_ABORT:
12344 abort_libfunc = set_user_assembler_libfunc ("abort", asmspec);
12345 break;
5a80a58b 12346 case BUILT_IN_FFS:
12347 if (INT_TYPE_SIZE < BITS_PER_WORD)
12348 {
12349 set_user_assembler_libfunc ("ffs", asmspec);
12350 set_optab_libfunc (ffs_optab, mode_for_size (INT_TYPE_SIZE,
12351 MODE_INT, 0), "ffs");
12352 }
12353 break;
7bfefa9d 12354 default:
12355 break;
12356 }
12357}
a6b74a67 12358
12359/* Return true if DECL is a builtin that expands to a constant or similarly
12360 simple code. */
12361bool
12362is_simple_builtin (tree decl)
12363{
12364 if (decl && DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL)
12365 switch (DECL_FUNCTION_CODE (decl))
12366 {
12367 /* Builtins that expand to constants. */
12368 case BUILT_IN_CONSTANT_P:
12369 case BUILT_IN_EXPECT:
12370 case BUILT_IN_OBJECT_SIZE:
12371 case BUILT_IN_UNREACHABLE:
12372 /* Simple register moves or loads from stack. */
fca0886c 12373 case BUILT_IN_ASSUME_ALIGNED:
a6b74a67 12374 case BUILT_IN_RETURN_ADDRESS:
12375 case BUILT_IN_EXTRACT_RETURN_ADDR:
12376 case BUILT_IN_FROB_RETURN_ADDR:
12377 case BUILT_IN_RETURN:
12378 case BUILT_IN_AGGREGATE_INCOMING_ADDRESS:
12379 case BUILT_IN_FRAME_ADDRESS:
12380 case BUILT_IN_VA_END:
12381 case BUILT_IN_STACK_SAVE:
12382 case BUILT_IN_STACK_RESTORE:
12383 /* Exception state returns or moves registers around. */
12384 case BUILT_IN_EH_FILTER:
12385 case BUILT_IN_EH_POINTER:
12386 case BUILT_IN_EH_COPY_VALUES:
12387 return true;
12388
12389 default:
12390 return false;
12391 }
12392
12393 return false;
12394}
12395
12396/* Return true if DECL is a builtin that is not expensive, i.e., they are
12397 most probably expanded inline into reasonably simple code. This is a
12398 superset of is_simple_builtin. */
12399bool
12400is_inexpensive_builtin (tree decl)
12401{
12402 if (!decl)
12403 return false;
12404 else if (DECL_BUILT_IN_CLASS (decl) == BUILT_IN_MD)
12405 return true;
12406 else if (DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL)
12407 switch (DECL_FUNCTION_CODE (decl))
12408 {
12409 case BUILT_IN_ABS:
12410 case BUILT_IN_ALLOCA:
581bf1c2 12411 case BUILT_IN_ALLOCA_WITH_ALIGN:
74bdbe96 12412 case BUILT_IN_BSWAP16:
a6b74a67 12413 case BUILT_IN_BSWAP32:
12414 case BUILT_IN_BSWAP64:
12415 case BUILT_IN_CLZ:
12416 case BUILT_IN_CLZIMAX:
12417 case BUILT_IN_CLZL:
12418 case BUILT_IN_CLZLL:
12419 case BUILT_IN_CTZ:
12420 case BUILT_IN_CTZIMAX:
12421 case BUILT_IN_CTZL:
12422 case BUILT_IN_CTZLL:
12423 case BUILT_IN_FFS:
12424 case BUILT_IN_FFSIMAX:
12425 case BUILT_IN_FFSL:
12426 case BUILT_IN_FFSLL:
12427 case BUILT_IN_IMAXABS:
12428 case BUILT_IN_FINITE:
12429 case BUILT_IN_FINITEF:
12430 case BUILT_IN_FINITEL:
12431 case BUILT_IN_FINITED32:
12432 case BUILT_IN_FINITED64:
12433 case BUILT_IN_FINITED128:
12434 case BUILT_IN_FPCLASSIFY:
12435 case BUILT_IN_ISFINITE:
12436 case BUILT_IN_ISINF_SIGN:
12437 case BUILT_IN_ISINF:
12438 case BUILT_IN_ISINFF:
12439 case BUILT_IN_ISINFL:
12440 case BUILT_IN_ISINFD32:
12441 case BUILT_IN_ISINFD64:
12442 case BUILT_IN_ISINFD128:
12443 case BUILT_IN_ISNAN:
12444 case BUILT_IN_ISNANF:
12445 case BUILT_IN_ISNANL:
12446 case BUILT_IN_ISNAND32:
12447 case BUILT_IN_ISNAND64:
12448 case BUILT_IN_ISNAND128:
12449 case BUILT_IN_ISNORMAL:
12450 case BUILT_IN_ISGREATER:
12451 case BUILT_IN_ISGREATEREQUAL:
12452 case BUILT_IN_ISLESS:
12453 case BUILT_IN_ISLESSEQUAL:
12454 case BUILT_IN_ISLESSGREATER:
12455 case BUILT_IN_ISUNORDERED:
12456 case BUILT_IN_VA_ARG_PACK:
12457 case BUILT_IN_VA_ARG_PACK_LEN:
12458 case BUILT_IN_VA_COPY:
12459 case BUILT_IN_TRAP:
12460 case BUILT_IN_SAVEREGS:
12461 case BUILT_IN_POPCOUNTL:
12462 case BUILT_IN_POPCOUNTLL:
12463 case BUILT_IN_POPCOUNTIMAX:
12464 case BUILT_IN_POPCOUNT:
12465 case BUILT_IN_PARITYL:
12466 case BUILT_IN_PARITYLL:
12467 case BUILT_IN_PARITYIMAX:
12468 case BUILT_IN_PARITY:
12469 case BUILT_IN_LABS:
12470 case BUILT_IN_LLABS:
12471 case BUILT_IN_PREFETCH:
ca4c3545 12472 case BUILT_IN_ACC_ON_DEVICE:
a6b74a67 12473 return true;
12474
12475 default:
12476 return is_simple_builtin (decl);
12477 }
12478
12479 return false;
12480}