]> git.ipfire.org Git - thirdparty/gcc.git/blame - gcc/builtins.c
Merge current set of OpenACC changes from gomp-4_0-branch.
[thirdparty/gcc.git] / gcc / builtins.c
CommitLineData
53800dbe 1/* Expand builtin functions.
d353bf18 2 Copyright (C) 1988-2015 Free Software Foundation, Inc.
53800dbe 3
f12b58b3 4This file is part of GCC.
53800dbe 5
f12b58b3 6GCC is free software; you can redistribute it and/or modify it under
7the terms of the GNU General Public License as published by the Free
8c4c00c1 8Software Foundation; either version 3, or (at your option) any later
f12b58b3 9version.
53800dbe 10
f12b58b3 11GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12WARRANTY; without even the implied warranty of MERCHANTABILITY or
13FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14for more details.
53800dbe 15
16You should have received a copy of the GNU General Public License
8c4c00c1 17along with GCC; see the file COPYING3. If not see
18<http://www.gnu.org/licenses/>. */
53800dbe 19
20#include "config.h"
21#include "system.h"
805e22b2 22#include "coretypes.h"
23#include "tm.h"
53800dbe 24#include "machmode.h"
25#include "rtl.h"
b20a8bb4 26#include "hash-set.h"
b20a8bb4 27#include "vec.h"
28#include "double-int.h"
29#include "input.h"
30#include "alias.h"
31#include "symtab.h"
32#include "wide-int.h"
33#include "inchash.h"
53800dbe 34#include "tree.h"
b20a8bb4 35#include "fold-const.h"
9ed99284 36#include "stringpool.h"
37#include "stor-layout.h"
38#include "calls.h"
39#include "varasm.h"
40#include "tree-object-size.h"
dae0b5cb 41#include "realmpfr.h"
94ea8568 42#include "predict.h"
94ea8568 43#include "hashtab.h"
94ea8568 44#include "hard-reg-set.h"
94ea8568 45#include "function.h"
46#include "cfgrtl.h"
bc61cadb 47#include "basic-block.h"
48#include "tree-ssa-alias.h"
49#include "internal-fn.h"
50#include "gimple-expr.h"
51#include "is-a.h"
75a70cf9 52#include "gimple.h"
53800dbe 53#include "flags.h"
54#include "regs.h"
53800dbe 55#include "except.h"
53800dbe 56#include "insn-config.h"
d53441c8 57#include "statistics.h"
58#include "real.h"
59#include "fixed-value.h"
60#include "expmed.h"
61#include "dojump.h"
62#include "explow.h"
63#include "emit-rtl.h"
64#include "stmt.h"
53800dbe 65#include "expr.h"
34517c64 66#include "insn-codes.h"
d8fc4d0b 67#include "optabs.h"
68#include "libfuncs.h"
53800dbe 69#include "recog.h"
70#include "output.h"
71#include "typeclass.h"
1dd6c958 72#include "tm_p.h"
fc2a2dcb 73#include "target.h"
63c62881 74#include "langhooks.h"
073c1fd5 75#include "tree-ssanames.h"
76#include "tree-dfa.h"
162719b3 77#include "value-prof.h"
852f689e 78#include "diagnostic-core.h"
3b9c3a16 79#include "builtins.h"
f9acf11a 80#include "asan.h"
d037099f 81#include "cilk.h"
058a1b7a 82#include "ipa-ref.h"
83#include "lto-streamer.h"
84#include "cgraph.h"
85#include "tree-chkp.h"
86#include "rtl-chkp.h"
ca4c3545 87#include "gomp-constants.h"
53800dbe 88
5383fb56 89
239d491a 90static tree do_mpc_arg1 (tree, tree, int (*)(mpc_ptr, mpc_srcptr, mpc_rnd_t));
726e2588 91
3b9c3a16 92struct target_builtins default_target_builtins;
93#if SWITCHABLE_TARGET
94struct target_builtins *this_target_builtins = &default_target_builtins;
95#endif
96
ab7943b9 97/* Define the names of the builtin function types and codes. */
96423453 98const char *const built_in_class_names[BUILT_IN_LAST]
ab7943b9 99 = {"NOT_BUILT_IN", "BUILT_IN_FRONTEND", "BUILT_IN_MD", "BUILT_IN_NORMAL"};
100
9cfddb70 101#define DEF_BUILTIN(X, N, C, T, LT, B, F, NA, AT, IM, COND) #X,
0dfc45b5 102const char * built_in_names[(int) END_BUILTINS] =
4e9d90c7 103{
104#include "builtins.def"
105};
106#undef DEF_BUILTIN
ab7943b9 107
df94cd3b 108/* Setup an array of _DECL trees, make sure each element is
109 initialized to NULL_TREE. */
b9a16870 110builtin_info_type builtin_info;
df94cd3b 111
0b049e15 112/* Non-zero if __builtin_constant_p should be folded right away. */
113bool force_folding_builtin_constant_p;
114
3754d046 115static rtx c_readstr (const char *, machine_mode);
aecda0d6 116static int target_char_cast (tree, char *);
d8ae1baa 117static rtx get_memory_rtx (tree, tree);
aecda0d6 118static int apply_args_size (void);
119static int apply_result_size (void);
d8c9779c 120#if defined (HAVE_untyped_call) || defined (HAVE_untyped_return)
aecda0d6 121static rtx result_vector (int, rtx);
d8c9779c 122#endif
843d08a9 123static void expand_builtin_update_setjmp_buf (rtx);
aecda0d6 124static void expand_builtin_prefetch (tree);
125static rtx expand_builtin_apply_args (void);
126static rtx expand_builtin_apply_args_1 (void);
127static rtx expand_builtin_apply (rtx, rtx, rtx);
128static void expand_builtin_return (rtx);
129static enum type_class type_to_class (tree);
130static rtx expand_builtin_classify_type (tree);
131static void expand_errno_check (tree, rtx);
132static rtx expand_builtin_mathfn (tree, rtx, rtx);
133static rtx expand_builtin_mathfn_2 (tree, rtx, rtx);
6b43bae4 134static rtx expand_builtin_mathfn_3 (tree, rtx, rtx);
7e0713b1 135static rtx expand_builtin_mathfn_ternary (tree, rtx, rtx);
f97eea22 136static rtx expand_builtin_interclass_mathfn (tree, rtx);
c3147c1a 137static rtx expand_builtin_sincos (tree);
f97eea22 138static rtx expand_builtin_cexpi (tree, rtx);
ff1b14e4 139static rtx expand_builtin_int_roundingfn (tree, rtx);
140static rtx expand_builtin_int_roundingfn_2 (tree, rtx);
79012a9d 141static rtx expand_builtin_next_arg (void);
aecda0d6 142static rtx expand_builtin_va_start (tree);
143static rtx expand_builtin_va_end (tree);
144static rtx expand_builtin_va_copy (tree);
3754d046 145static rtx expand_builtin_memcmp (tree, rtx, machine_mode);
a65c4d64 146static rtx expand_builtin_strcmp (tree, rtx);
3754d046 147static rtx expand_builtin_strncmp (tree, rtx, machine_mode);
148static rtx builtin_memcpy_read_str (void *, HOST_WIDE_INT, machine_mode);
a65c4d64 149static rtx expand_builtin_memcpy (tree, rtx);
f21337ef 150static rtx expand_builtin_memcpy_with_bounds (tree, rtx);
151static rtx expand_builtin_memcpy_args (tree, tree, tree, rtx, tree);
3754d046 152static rtx expand_builtin_mempcpy (tree, rtx, machine_mode);
f21337ef 153static rtx expand_builtin_mempcpy_with_bounds (tree, rtx, machine_mode);
48e1416a 154static rtx expand_builtin_mempcpy_args (tree, tree, tree, rtx,
f21337ef 155 machine_mode, int, tree);
a65c4d64 156static rtx expand_builtin_strcpy (tree, rtx);
157static rtx expand_builtin_strcpy_args (tree, tree, rtx);
3754d046 158static rtx expand_builtin_stpcpy (tree, rtx, machine_mode);
a65c4d64 159static rtx expand_builtin_strncpy (tree, rtx);
3754d046 160static rtx builtin_memset_gen_str (void *, HOST_WIDE_INT, machine_mode);
161static rtx expand_builtin_memset (tree, rtx, machine_mode);
f21337ef 162static rtx expand_builtin_memset_with_bounds (tree, rtx, machine_mode);
3754d046 163static rtx expand_builtin_memset_args (tree, tree, tree, rtx, machine_mode, tree);
aecda0d6 164static rtx expand_builtin_bzero (tree);
3754d046 165static rtx expand_builtin_strlen (tree, rtx, machine_mode);
5be42b39 166static rtx expand_builtin_alloca (tree, bool);
3754d046 167static rtx expand_builtin_unop (machine_mode, tree, rtx, rtx, optab);
aecda0d6 168static rtx expand_builtin_frame_address (tree, tree);
389dd41b 169static tree stabilize_va_list_loc (location_t, tree, int);
aecda0d6 170static rtx expand_builtin_expect (tree, rtx);
171static tree fold_builtin_constant_p (tree);
172static tree fold_builtin_classify_type (tree);
c7cbde74 173static tree fold_builtin_strlen (location_t, tree, tree);
389dd41b 174static tree fold_builtin_inf (location_t, tree, int);
aecda0d6 175static tree fold_builtin_nan (tree, tree, int);
389dd41b 176static tree rewrite_call_expr (location_t, tree, int, tree, int, ...);
b7bf20db 177static bool validate_arg (const_tree, enum tree_code code);
277f8dd2 178static bool integer_valued_real_p (tree);
389dd41b 179static tree fold_trunc_transparent_mathfn (location_t, tree, tree);
aecda0d6 180static rtx expand_builtin_fabs (tree, rtx, rtx);
27f261ef 181static rtx expand_builtin_signbit (tree, rtx);
389dd41b 182static tree fold_builtin_sqrt (location_t, tree, tree);
183static tree fold_builtin_cbrt (location_t, tree, tree);
184static tree fold_builtin_pow (location_t, tree, tree, tree, tree);
185static tree fold_builtin_powi (location_t, tree, tree, tree, tree);
186static tree fold_builtin_cos (location_t, tree, tree, tree);
187static tree fold_builtin_cosh (location_t, tree, tree, tree);
bffb7645 188static tree fold_builtin_tan (tree, tree);
389dd41b 189static tree fold_builtin_trunc (location_t, tree, tree);
190static tree fold_builtin_floor (location_t, tree, tree);
191static tree fold_builtin_ceil (location_t, tree, tree);
192static tree fold_builtin_round (location_t, tree, tree);
193static tree fold_builtin_int_roundingfn (location_t, tree, tree);
10b9666f 194static tree fold_builtin_bitop (tree, tree);
389dd41b 195static tree fold_builtin_strchr (location_t, tree, tree, tree);
196static tree fold_builtin_memchr (location_t, tree, tree, tree, tree);
197static tree fold_builtin_memcmp (location_t, tree, tree, tree);
198static tree fold_builtin_strcmp (location_t, tree, tree);
199static tree fold_builtin_strncmp (location_t, tree, tree, tree);
200static tree fold_builtin_signbit (location_t, tree, tree);
201static tree fold_builtin_copysign (location_t, tree, tree, tree, tree);
202static tree fold_builtin_isascii (location_t, tree);
203static tree fold_builtin_toascii (location_t, tree);
204static tree fold_builtin_isdigit (location_t, tree);
205static tree fold_builtin_fabs (location_t, tree, tree);
206static tree fold_builtin_abs (location_t, tree, tree);
207static tree fold_builtin_unordered_cmp (location_t, tree, tree, tree, enum tree_code,
d5019fe8 208 enum tree_code);
e80cc485 209static tree fold_builtin_0 (location_t, tree);
210static tree fold_builtin_1 (location_t, tree, tree);
211static tree fold_builtin_2 (location_t, tree, tree, tree);
212static tree fold_builtin_3 (location_t, tree, tree, tree, tree);
213static tree fold_builtin_varargs (location_t, tree, tree*, int);
389dd41b 214
215static tree fold_builtin_strpbrk (location_t, tree, tree, tree);
216static tree fold_builtin_strstr (location_t, tree, tree, tree);
217static tree fold_builtin_strrchr (location_t, tree, tree, tree);
389dd41b 218static tree fold_builtin_strspn (location_t, tree, tree);
219static tree fold_builtin_strcspn (location_t, tree, tree);
4ee9c684 220
0a39fd54 221static rtx expand_builtin_object_size (tree);
3754d046 222static rtx expand_builtin_memory_chk (tree, rtx, machine_mode,
0a39fd54 223 enum built_in_function);
224static void maybe_emit_chk_warning (tree, enum built_in_function);
225static void maybe_emit_sprintf_chk_warning (tree, enum built_in_function);
2c281b15 226static void maybe_emit_free_warning (tree);
c2f47e15 227static tree fold_builtin_object_size (tree, tree);
99eabcc1 228
e788f202 229unsigned HOST_WIDE_INT target_newline;
b9ea678c 230unsigned HOST_WIDE_INT target_percent;
99eabcc1 231static unsigned HOST_WIDE_INT target_c;
232static unsigned HOST_WIDE_INT target_s;
aea88c77 233char target_percent_c[3];
b9ea678c 234char target_percent_s[3];
e788f202 235char target_percent_s_newline[4];
728bac60 236static tree do_mpfr_arg1 (tree, tree, int (*)(mpfr_ptr, mpfr_srcptr, mp_rnd_t),
237 const REAL_VALUE_TYPE *, const REAL_VALUE_TYPE *, bool);
f0c477f2 238static tree do_mpfr_arg2 (tree, tree, tree,
239 int (*)(mpfr_ptr, mpfr_srcptr, mpfr_srcptr, mp_rnd_t));
9917422b 240static tree do_mpfr_arg3 (tree, tree, tree, tree,
241 int (*)(mpfr_ptr, mpfr_srcptr, mpfr_srcptr, mpfr_srcptr, mp_rnd_t));
d92f994c 242static tree do_mpfr_sincos (tree, tree, tree);
65dd1378 243static tree do_mpfr_bessel_n (tree, tree, tree,
244 int (*)(mpfr_ptr, long, mpfr_srcptr, mp_rnd_t),
245 const REAL_VALUE_TYPE *, bool);
e5407ca6 246static tree do_mpfr_remquo (tree, tree, tree);
e84da7c1 247static tree do_mpfr_lgamma_r (tree, tree, tree);
1cd6e20d 248static void expand_builtin_sync_synchronize (void);
0a39fd54 249
7bfefa9d 250/* Return true if NAME starts with __builtin_ or __sync_. */
251
b29139ad 252static bool
1c47b3e8 253is_builtin_name (const char *name)
b6a5fc45 254{
b6a5fc45 255 if (strncmp (name, "__builtin_", 10) == 0)
256 return true;
257 if (strncmp (name, "__sync_", 7) == 0)
258 return true;
1cd6e20d 259 if (strncmp (name, "__atomic_", 9) == 0)
260 return true;
a89e6c15 261 if (flag_cilkplus
d037099f 262 && (!strcmp (name, "__cilkrts_detach")
263 || !strcmp (name, "__cilkrts_pop_frame")))
264 return true;
b6a5fc45 265 return false;
266}
4ee9c684 267
7bfefa9d 268
269/* Return true if DECL is a function symbol representing a built-in. */
270
271bool
272is_builtin_fn (tree decl)
273{
274 return TREE_CODE (decl) == FUNCTION_DECL && DECL_BUILT_IN (decl);
275}
276
1c47b3e8 277/* Return true if NODE should be considered for inline expansion regardless
278 of the optimization level. This means whenever a function is invoked with
279 its "internal" name, which normally contains the prefix "__builtin". */
280
281static bool
282called_as_built_in (tree node)
283{
284 /* Note that we must use DECL_NAME, not DECL_ASSEMBLER_NAME_SET_P since
285 we want the name used to call the function, not the name it
286 will have. */
287 const char *name = IDENTIFIER_POINTER (DECL_NAME (node));
288 return is_builtin_name (name);
289}
290
ceea063b 291/* Compute values M and N such that M divides (address of EXP - N) and such
292 that N < M. If these numbers can be determined, store M in alignp and N in
293 *BITPOSP and return true. Otherwise return false and store BITS_PER_UNIT to
294 *alignp and any bit-offset to *bitposp.
0d8f7716 295
296 Note that the address (and thus the alignment) computed here is based
297 on the address to which a symbol resolves, whereas DECL_ALIGN is based
298 on the address at which an object is actually located. These two
299 addresses are not always the same. For example, on ARM targets,
300 the address &foo of a Thumb function foo() has the lowest bit set,
3482bf13 301 whereas foo() itself starts on an even address.
698537d1 302
3482bf13 303 If ADDR_P is true we are taking the address of the memory reference EXP
304 and thus cannot rely on the access taking place. */
305
306static bool
307get_object_alignment_2 (tree exp, unsigned int *alignp,
308 unsigned HOST_WIDE_INT *bitposp, bool addr_p)
698537d1 309{
98ab9e8f 310 HOST_WIDE_INT bitsize, bitpos;
311 tree offset;
3754d046 312 machine_mode mode;
98ab9e8f 313 int unsignedp, volatilep;
c8a2b4ff 314 unsigned int align = BITS_PER_UNIT;
ceea063b 315 bool known_alignment = false;
698537d1 316
98ab9e8f 317 /* Get the innermost object and the constant (bitpos) and possibly
318 variable (offset) offset of the access. */
319 exp = get_inner_reference (exp, &bitsize, &bitpos, &offset,
320 &mode, &unsignedp, &volatilep, true);
321
322 /* Extract alignment information from the innermost object and
323 possibly adjust bitpos and offset. */
3482bf13 324 if (TREE_CODE (exp) == FUNCTION_DECL)
0d8f7716 325 {
3482bf13 326 /* Function addresses can encode extra information besides their
327 alignment. However, if TARGET_PTRMEMFUNC_VBIT_LOCATION
328 allows the low bit to be used as a virtual bit, we know
329 that the address itself must be at least 2-byte aligned. */
330 if (TARGET_PTRMEMFUNC_VBIT_LOCATION == ptrmemfunc_vbit_in_pfn)
331 align = 2 * BITS_PER_UNIT;
0d8f7716 332 }
3482bf13 333 else if (TREE_CODE (exp) == LABEL_DECL)
334 ;
335 else if (TREE_CODE (exp) == CONST_DECL)
98ab9e8f 336 {
3482bf13 337 /* The alignment of a CONST_DECL is determined by its initializer. */
338 exp = DECL_INITIAL (exp);
98ab9e8f 339 align = TYPE_ALIGN (TREE_TYPE (exp));
340#ifdef CONSTANT_ALIGNMENT
3482bf13 341 if (CONSTANT_CLASS_P (exp))
342 align = (unsigned) CONSTANT_ALIGNMENT (exp, align);
698537d1 343#endif
3482bf13 344 known_alignment = true;
98ab9e8f 345 }
3482bf13 346 else if (DECL_P (exp))
ceea063b 347 {
3482bf13 348 align = DECL_ALIGN (exp);
ceea063b 349 known_alignment = true;
ceea063b 350 }
3482bf13 351 else if (TREE_CODE (exp) == VIEW_CONVERT_EXPR)
ceea063b 352 {
ceea063b 353 align = TYPE_ALIGN (TREE_TYPE (exp));
354 }
3482bf13 355 else if (TREE_CODE (exp) == INDIRECT_REF
356 || TREE_CODE (exp) == MEM_REF
357 || TREE_CODE (exp) == TARGET_MEM_REF)
98ab9e8f 358 {
359 tree addr = TREE_OPERAND (exp, 0);
ceea063b 360 unsigned ptr_align;
361 unsigned HOST_WIDE_INT ptr_bitpos;
362
98ab9e8f 363 if (TREE_CODE (addr) == BIT_AND_EXPR
364 && TREE_CODE (TREE_OPERAND (addr, 1)) == INTEGER_CST)
365 {
f9ae6f95 366 align = (TREE_INT_CST_LOW (TREE_OPERAND (addr, 1))
367 & -TREE_INT_CST_LOW (TREE_OPERAND (addr, 1)));
98ab9e8f 368 align *= BITS_PER_UNIT;
369 addr = TREE_OPERAND (addr, 0);
370 }
ceea063b 371
3482bf13 372 known_alignment
373 = get_pointer_alignment_1 (addr, &ptr_align, &ptr_bitpos);
3482bf13 374 align = MAX (ptr_align, align);
375
4083990a 376 /* The alignment of the pointer operand in a TARGET_MEM_REF
377 has to take the variable offset parts into account. */
3482bf13 378 if (TREE_CODE (exp) == TARGET_MEM_REF)
153c3b50 379 {
3482bf13 380 if (TMR_INDEX (exp))
381 {
382 unsigned HOST_WIDE_INT step = 1;
383 if (TMR_STEP (exp))
f9ae6f95 384 step = TREE_INT_CST_LOW (TMR_STEP (exp));
3482bf13 385 align = MIN (align, (step & -step) * BITS_PER_UNIT);
386 }
387 if (TMR_INDEX2 (exp))
388 align = BITS_PER_UNIT;
389 known_alignment = false;
153c3b50 390 }
ceea063b 391
3482bf13 392 /* When EXP is an actual memory reference then we can use
393 TYPE_ALIGN of a pointer indirection to derive alignment.
394 Do so only if get_pointer_alignment_1 did not reveal absolute
4083990a 395 alignment knowledge and if using that alignment would
396 improve the situation. */
397 if (!addr_p && !known_alignment
398 && TYPE_ALIGN (TREE_TYPE (exp)) > align)
399 align = TYPE_ALIGN (TREE_TYPE (exp));
400 else
401 {
402 /* Else adjust bitpos accordingly. */
403 bitpos += ptr_bitpos;
404 if (TREE_CODE (exp) == MEM_REF
405 || TREE_CODE (exp) == TARGET_MEM_REF)
e913b5cd 406 bitpos += mem_ref_offset (exp).to_short_addr () * BITS_PER_UNIT;
4083990a 407 }
98ab9e8f 408 }
3482bf13 409 else if (TREE_CODE (exp) == STRING_CST)
153c3b50 410 {
3482bf13 411 /* STRING_CST are the only constant objects we allow to be not
412 wrapped inside a CONST_DECL. */
413 align = TYPE_ALIGN (TREE_TYPE (exp));
414#ifdef CONSTANT_ALIGNMENT
415 if (CONSTANT_CLASS_P (exp))
416 align = (unsigned) CONSTANT_ALIGNMENT (exp, align);
417#endif
418 known_alignment = true;
98ab9e8f 419 }
98ab9e8f 420
421 /* If there is a non-constant offset part extract the maximum
422 alignment that can prevail. */
c8a2b4ff 423 if (offset)
98ab9e8f 424 {
ad464c56 425 unsigned int trailing_zeros = tree_ctz (offset);
c8a2b4ff 426 if (trailing_zeros < HOST_BITS_PER_INT)
98ab9e8f 427 {
c8a2b4ff 428 unsigned int inner = (1U << trailing_zeros) * BITS_PER_UNIT;
429 if (inner)
430 align = MIN (align, inner);
98ab9e8f 431 }
98ab9e8f 432 }
433
3482bf13 434 *alignp = align;
435 *bitposp = bitpos & (*alignp - 1);
ceea063b 436 return known_alignment;
0c883ef3 437}
438
3482bf13 439/* For a memory reference expression EXP compute values M and N such that M
440 divides (&EXP - N) and such that N < M. If these numbers can be determined,
441 store M in alignp and N in *BITPOSP and return true. Otherwise return false
442 and store BITS_PER_UNIT to *alignp and any bit-offset to *bitposp. */
443
444bool
445get_object_alignment_1 (tree exp, unsigned int *alignp,
446 unsigned HOST_WIDE_INT *bitposp)
447{
448 return get_object_alignment_2 (exp, alignp, bitposp, false);
449}
450
957d0361 451/* Return the alignment in bits of EXP, an object. */
0c883ef3 452
453unsigned int
957d0361 454get_object_alignment (tree exp)
0c883ef3 455{
456 unsigned HOST_WIDE_INT bitpos = 0;
457 unsigned int align;
458
ceea063b 459 get_object_alignment_1 (exp, &align, &bitpos);
0c883ef3 460
98ab9e8f 461 /* align and bitpos now specify known low bits of the pointer.
462 ptr & (align - 1) == bitpos. */
463
464 if (bitpos != 0)
465 align = (bitpos & -bitpos);
957d0361 466 return align;
698537d1 467}
468
ceea063b 469/* For a pointer valued expression EXP compute values M and N such that M
470 divides (EXP - N) and such that N < M. If these numbers can be determined,
3482bf13 471 store M in alignp and N in *BITPOSP and return true. Return false if
472 the results are just a conservative approximation.
53800dbe 473
ceea063b 474 If EXP is not a pointer, false is returned too. */
53800dbe 475
ceea063b 476bool
477get_pointer_alignment_1 (tree exp, unsigned int *alignp,
478 unsigned HOST_WIDE_INT *bitposp)
53800dbe 479{
153c3b50 480 STRIP_NOPS (exp);
535e2026 481
153c3b50 482 if (TREE_CODE (exp) == ADDR_EXPR)
3482bf13 483 return get_object_alignment_2 (TREE_OPERAND (exp, 0),
484 alignp, bitposp, true);
153c3b50 485 else if (TREE_CODE (exp) == SSA_NAME
486 && POINTER_TYPE_P (TREE_TYPE (exp)))
53800dbe 487 {
ceea063b 488 unsigned int ptr_align, ptr_misalign;
153c3b50 489 struct ptr_info_def *pi = SSA_NAME_PTR_INFO (exp);
ceea063b 490
491 if (pi && get_ptr_info_alignment (pi, &ptr_align, &ptr_misalign))
492 {
493 *bitposp = ptr_misalign * BITS_PER_UNIT;
494 *alignp = ptr_align * BITS_PER_UNIT;
3482bf13 495 /* We cannot really tell whether this result is an approximation. */
ceea063b 496 return true;
497 }
498 else
69fbc3aa 499 {
500 *bitposp = 0;
ceea063b 501 *alignp = BITS_PER_UNIT;
502 return false;
69fbc3aa 503 }
53800dbe 504 }
0bb8b39a 505 else if (TREE_CODE (exp) == INTEGER_CST)
506 {
507 *alignp = BIGGEST_ALIGNMENT;
f9ae6f95 508 *bitposp = ((TREE_INT_CST_LOW (exp) * BITS_PER_UNIT)
0bb8b39a 509 & (BIGGEST_ALIGNMENT - 1));
510 return true;
511 }
153c3b50 512
69fbc3aa 513 *bitposp = 0;
ceea063b 514 *alignp = BITS_PER_UNIT;
515 return false;
53800dbe 516}
517
69fbc3aa 518/* Return the alignment in bits of EXP, a pointer valued expression.
519 The alignment returned is, by default, the alignment of the thing that
520 EXP points to. If it is not a POINTER_TYPE, 0 is returned.
521
522 Otherwise, look at the expression to see if we can do better, i.e., if the
523 expression is actually pointing at an object whose alignment is tighter. */
524
525unsigned int
526get_pointer_alignment (tree exp)
527{
528 unsigned HOST_WIDE_INT bitpos = 0;
529 unsigned int align;
ceea063b 530
531 get_pointer_alignment_1 (exp, &align, &bitpos);
69fbc3aa 532
533 /* align and bitpos now specify known low bits of the pointer.
534 ptr & (align - 1) == bitpos. */
535
536 if (bitpos != 0)
537 align = (bitpos & -bitpos);
538
539 return align;
540}
541
53800dbe 542/* Compute the length of a C string. TREE_STRING_LENGTH is not the right
543 way, because it could contain a zero byte in the middle.
544 TREE_STRING_LENGTH is the size of the character array, not the string.
545
4172d65e 546 ONLY_VALUE should be nonzero if the result is not going to be emitted
c09841f6 547 into the instruction stream and zero if it is going to be expanded.
4172d65e 548 E.g. with i++ ? "foo" : "bar", if ONLY_VALUE is nonzero, constant 3
681fab1e 549 is returned, otherwise NULL, since
550 len = c_strlen (src, 1); if (len) expand_expr (len, ...); would not
551 evaluate the side-effects.
552
6bda159e 553 If ONLY_VALUE is two then we do not emit warnings about out-of-bound
554 accesses. Note that this implies the result is not going to be emitted
555 into the instruction stream.
556
902de8ed 557 The value returned is of type `ssizetype'.
558
53800dbe 559 Unfortunately, string_constant can't access the values of const char
560 arrays with initializers, so neither can we do so here. */
561
4ee9c684 562tree
681fab1e 563c_strlen (tree src, int only_value)
53800dbe 564{
565 tree offset_node;
27d0c333 566 HOST_WIDE_INT offset;
567 int max;
44acf429 568 const char *ptr;
da136652 569 location_t loc;
53800dbe 570
681fab1e 571 STRIP_NOPS (src);
572 if (TREE_CODE (src) == COND_EXPR
573 && (only_value || !TREE_SIDE_EFFECTS (TREE_OPERAND (src, 0))))
574 {
575 tree len1, len2;
576
577 len1 = c_strlen (TREE_OPERAND (src, 1), only_value);
578 len2 = c_strlen (TREE_OPERAND (src, 2), only_value);
0862b7e9 579 if (tree_int_cst_equal (len1, len2))
681fab1e 580 return len1;
581 }
582
583 if (TREE_CODE (src) == COMPOUND_EXPR
584 && (only_value || !TREE_SIDE_EFFECTS (TREE_OPERAND (src, 0))))
585 return c_strlen (TREE_OPERAND (src, 1), only_value);
586
3df42822 587 loc = EXPR_LOC_OR_LOC (src, input_location);
da136652 588
53800dbe 589 src = string_constant (src, &offset_node);
590 if (src == 0)
c2f47e15 591 return NULL_TREE;
902de8ed 592
83d79705 593 max = TREE_STRING_LENGTH (src) - 1;
53800dbe 594 ptr = TREE_STRING_POINTER (src);
902de8ed 595
53800dbe 596 if (offset_node && TREE_CODE (offset_node) != INTEGER_CST)
597 {
598 /* If the string has an internal zero byte (e.g., "foo\0bar"), we can't
599 compute the offset to the following null if we don't know where to
600 start searching for it. */
601 int i;
902de8ed 602
53800dbe 603 for (i = 0; i < max; i++)
604 if (ptr[i] == 0)
c2f47e15 605 return NULL_TREE;
902de8ed 606
53800dbe 607 /* We don't know the starting offset, but we do know that the string
608 has no internal zero bytes. We can assume that the offset falls
609 within the bounds of the string; otherwise, the programmer deserves
610 what he gets. Subtract the offset from the length of the string,
902de8ed 611 and return that. This would perhaps not be valid if we were dealing
612 with named arrays in addition to literal string constants. */
613
da136652 614 return size_diffop_loc (loc, size_int (max), offset_node);
53800dbe 615 }
616
617 /* We have a known offset into the string. Start searching there for
27d0c333 618 a null character if we can represent it as a single HOST_WIDE_INT. */
dabc4084 619 if (offset_node == 0)
53800dbe 620 offset = 0;
35ec552a 621 else if (! tree_fits_shwi_p (offset_node))
dabc4084 622 offset = -1;
53800dbe 623 else
e913b5cd 624 offset = tree_to_shwi (offset_node);
902de8ed 625
1f63a7d6 626 /* If the offset is known to be out of bounds, warn, and call strlen at
627 runtime. */
2f1c4f17 628 if (offset < 0 || offset > max)
53800dbe 629 {
1f63a7d6 630 /* Suppress multiple warnings for propagated constant strings. */
2f1c4f17 631 if (only_value != 2
632 && !TREE_NO_WARNING (src))
1f63a7d6 633 {
da136652 634 warning_at (loc, 0, "offset outside bounds of constant string");
1f63a7d6 635 TREE_NO_WARNING (src) = 1;
636 }
c2f47e15 637 return NULL_TREE;
53800dbe 638 }
902de8ed 639
53800dbe 640 /* Use strlen to search for the first zero byte. Since any strings
641 constructed with build_string will have nulls appended, we win even
642 if we get handed something like (char[4])"abcd".
643
644 Since OFFSET is our starting index into the string, no further
645 calculation is needed. */
902de8ed 646 return ssize_int (strlen (ptr + offset));
53800dbe 647}
648
83d79705 649/* Return a char pointer for a C string if it is a string constant
650 or sum of string constant and integer constant. */
651
b9ea678c 652const char *
aecda0d6 653c_getstr (tree src)
83d79705 654{
655 tree offset_node;
83d79705 656
657 src = string_constant (src, &offset_node);
658 if (src == 0)
659 return 0;
660
8c85fcb7 661 if (offset_node == 0)
662 return TREE_STRING_POINTER (src);
e913b5cd 663 else if (!tree_fits_uhwi_p (offset_node)
8c85fcb7 664 || compare_tree_int (offset_node, TREE_STRING_LENGTH (src) - 1) > 0)
83d79705 665 return 0;
83d79705 666
e913b5cd 667 return TREE_STRING_POINTER (src) + tree_to_uhwi (offset_node);
83d79705 668}
669
e913b5cd 670/* Return a constant integer corresponding to target reading
8c85fcb7 671 GET_MODE_BITSIZE (MODE) bits from string constant STR. */
ecc318ff 672
6840589f 673static rtx
3754d046 674c_readstr (const char *str, machine_mode mode)
6840589f 675{
6840589f 676 HOST_WIDE_INT ch;
677 unsigned int i, j;
e913b5cd 678 HOST_WIDE_INT tmp[MAX_BITSIZE_MODE_ANY_INT / HOST_BITS_PER_WIDE_INT];
0407eaee 679
680 gcc_assert (GET_MODE_CLASS (mode) == MODE_INT);
e913b5cd 681 unsigned int len = (GET_MODE_PRECISION (mode) + HOST_BITS_PER_WIDE_INT - 1)
682 / HOST_BITS_PER_WIDE_INT;
683
a12aa4cc 684 gcc_assert (len <= MAX_BITSIZE_MODE_ANY_INT / HOST_BITS_PER_WIDE_INT);
e913b5cd 685 for (i = 0; i < len; i++)
686 tmp[i] = 0;
6840589f 687
6840589f 688 ch = 1;
689 for (i = 0; i < GET_MODE_SIZE (mode); i++)
690 {
691 j = i;
692 if (WORDS_BIG_ENDIAN)
693 j = GET_MODE_SIZE (mode) - i - 1;
694 if (BYTES_BIG_ENDIAN != WORDS_BIG_ENDIAN
ad8f8e52 695 && GET_MODE_SIZE (mode) >= UNITS_PER_WORD)
6840589f 696 j = j + UNITS_PER_WORD - 2 * (j % UNITS_PER_WORD) - 1;
697 j *= BITS_PER_UNIT;
7d3f6cc7 698
6840589f 699 if (ch)
700 ch = (unsigned char) str[i];
e913b5cd 701 tmp[j / HOST_BITS_PER_WIDE_INT] |= ch << (j % HOST_BITS_PER_WIDE_INT);
6840589f 702 }
ddb1be65 703
ab2c1de8 704 wide_int c = wide_int::from_array (tmp, len, GET_MODE_PRECISION (mode));
e913b5cd 705 return immed_wide_int_const (c, mode);
6840589f 706}
707
ecc318ff 708/* Cast a target constant CST to target CHAR and if that value fits into
5206b159 709 host char type, return zero and put that value into variable pointed to by
ecc318ff 710 P. */
711
712static int
aecda0d6 713target_char_cast (tree cst, char *p)
ecc318ff 714{
715 unsigned HOST_WIDE_INT val, hostval;
716
c19686c5 717 if (TREE_CODE (cst) != INTEGER_CST
ecc318ff 718 || CHAR_TYPE_SIZE > HOST_BITS_PER_WIDE_INT)
719 return 1;
720
e913b5cd 721 /* Do not care if it fits or not right here. */
f9ae6f95 722 val = TREE_INT_CST_LOW (cst);
e913b5cd 723
ecc318ff 724 if (CHAR_TYPE_SIZE < HOST_BITS_PER_WIDE_INT)
725 val &= (((unsigned HOST_WIDE_INT) 1) << CHAR_TYPE_SIZE) - 1;
726
727 hostval = val;
728 if (HOST_BITS_PER_CHAR < HOST_BITS_PER_WIDE_INT)
729 hostval &= (((unsigned HOST_WIDE_INT) 1) << HOST_BITS_PER_CHAR) - 1;
730
731 if (val != hostval)
732 return 1;
733
734 *p = hostval;
735 return 0;
736}
737
4ee9c684 738/* Similar to save_expr, but assumes that arbitrary code is not executed
739 in between the multiple evaluations. In particular, we assume that a
740 non-addressable local variable will not be modified. */
741
742static tree
743builtin_save_expr (tree exp)
744{
f6c35aa4 745 if (TREE_CODE (exp) == SSA_NAME
746 || (TREE_ADDRESSABLE (exp) == 0
747 && (TREE_CODE (exp) == PARM_DECL
748 || (TREE_CODE (exp) == VAR_DECL && !TREE_STATIC (exp)))))
4ee9c684 749 return exp;
750
751 return save_expr (exp);
752}
753
53800dbe 754/* Given TEM, a pointer to a stack frame, follow the dynamic chain COUNT
755 times to get the address of either a higher stack frame, or a return
756 address located within it (depending on FNDECL_CODE). */
902de8ed 757
c626df3d 758static rtx
869d0ef0 759expand_builtin_return_addr (enum built_in_function fndecl_code, int count)
53800dbe 760{
761 int i;
762
869d0ef0 763#ifdef INITIAL_FRAME_ADDRESS_RTX
764 rtx tem = INITIAL_FRAME_ADDRESS_RTX;
765#else
e3e15c50 766 rtx tem;
767
1b74fde7 768 /* For a zero count with __builtin_return_address, we don't care what
769 frame address we return, because target-specific definitions will
770 override us. Therefore frame pointer elimination is OK, and using
771 the soft frame pointer is OK.
772
fa7637bd 773 For a nonzero count, or a zero count with __builtin_frame_address,
1b74fde7 774 we require a stable offset from the current frame pointer to the
775 previous one, so we must use the hard frame pointer, and
e3e15c50 776 we must disable frame pointer elimination. */
1b74fde7 777 if (count == 0 && fndecl_code == BUILT_IN_RETURN_ADDRESS)
e3e15c50 778 tem = frame_pointer_rtx;
a0c938f0 779 else
e3e15c50 780 {
781 tem = hard_frame_pointer_rtx;
782
783 /* Tell reload not to eliminate the frame pointer. */
18d50ae6 784 crtl->accesses_prior_frames = 1;
e3e15c50 785 }
869d0ef0 786#endif
787
53800dbe 788 /* Some machines need special handling before we can access
3a69c60c 789 arbitrary frames. For example, on the SPARC, we must first flush
53800dbe 790 all register windows to the stack. */
791#ifdef SETUP_FRAME_ADDRESSES
792 if (count > 0)
793 SETUP_FRAME_ADDRESSES ();
794#endif
795
3a69c60c 796 /* On the SPARC, the return address is not in the frame, it is in a
53800dbe 797 register. There is no way to access it off of the current frame
798 pointer, but it can be accessed off the previous frame pointer by
799 reading the value from the register window save area. */
800#ifdef RETURN_ADDR_IN_PREVIOUS_FRAME
801 if (fndecl_code == BUILT_IN_RETURN_ADDRESS)
802 count--;
803#endif
804
805 /* Scan back COUNT frames to the specified frame. */
806 for (i = 0; i < count; i++)
807 {
808 /* Assume the dynamic chain pointer is in the word that the
809 frame address points to, unless otherwise specified. */
810#ifdef DYNAMIC_CHAIN_ADDRESS
811 tem = DYNAMIC_CHAIN_ADDRESS (tem);
812#endif
813 tem = memory_address (Pmode, tem);
00060fc2 814 tem = gen_frame_mem (Pmode, tem);
83fc1478 815 tem = copy_to_reg (tem);
53800dbe 816 }
817
3a69c60c 818 /* For __builtin_frame_address, return what we've got. But, on
819 the SPARC for example, we may have to add a bias. */
53800dbe 820 if (fndecl_code == BUILT_IN_FRAME_ADDRESS)
3a69c60c 821#ifdef FRAME_ADDR_RTX
822 return FRAME_ADDR_RTX (tem);
823#else
53800dbe 824 return tem;
3a69c60c 825#endif
53800dbe 826
3a69c60c 827 /* For __builtin_return_address, get the return address from that frame. */
53800dbe 828#ifdef RETURN_ADDR_RTX
829 tem = RETURN_ADDR_RTX (count, tem);
830#else
831 tem = memory_address (Pmode,
29c05e22 832 plus_constant (Pmode, tem, GET_MODE_SIZE (Pmode)));
00060fc2 833 tem = gen_frame_mem (Pmode, tem);
53800dbe 834#endif
835 return tem;
836}
837
f7c44134 838/* Alias set used for setjmp buffer. */
32c2fdea 839static alias_set_type setjmp_alias_set = -1;
f7c44134 840
6b7f6858 841/* Construct the leading half of a __builtin_setjmp call. Control will
2c8a1497 842 return to RECEIVER_LABEL. This is also called directly by the SJLJ
843 exception handling code. */
53800dbe 844
6b7f6858 845void
aecda0d6 846expand_builtin_setjmp_setup (rtx buf_addr, rtx receiver_label)
53800dbe 847{
3754d046 848 machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
53800dbe 849 rtx stack_save;
f7c44134 850 rtx mem;
53800dbe 851
f7c44134 852 if (setjmp_alias_set == -1)
853 setjmp_alias_set = new_alias_set ();
854
85d654dd 855 buf_addr = convert_memory_address (Pmode, buf_addr);
53800dbe 856
37ae8504 857 buf_addr = force_reg (Pmode, force_operand (buf_addr, NULL_RTX));
53800dbe 858
6b7f6858 859 /* We store the frame pointer and the address of receiver_label in
860 the buffer and use the rest of it for the stack save area, which
861 is machine-dependent. */
53800dbe 862
f7c44134 863 mem = gen_rtx_MEM (Pmode, buf_addr);
ab6ab77e 864 set_mem_alias_set (mem, setjmp_alias_set);
e3e026e8 865 emit_move_insn (mem, targetm.builtin_setjmp_frame_value ());
f7c44134 866
29c05e22 867 mem = gen_rtx_MEM (Pmode, plus_constant (Pmode, buf_addr,
868 GET_MODE_SIZE (Pmode))),
ab6ab77e 869 set_mem_alias_set (mem, setjmp_alias_set);
f7c44134 870
871 emit_move_insn (validize_mem (mem),
6b7f6858 872 force_reg (Pmode, gen_rtx_LABEL_REF (Pmode, receiver_label)));
53800dbe 873
874 stack_save = gen_rtx_MEM (sa_mode,
29c05e22 875 plus_constant (Pmode, buf_addr,
53800dbe 876 2 * GET_MODE_SIZE (Pmode)));
ab6ab77e 877 set_mem_alias_set (stack_save, setjmp_alias_set);
e9c97615 878 emit_stack_save (SAVE_NONLOCAL, &stack_save);
53800dbe 879
880 /* If there is further processing to do, do it. */
881#ifdef HAVE_builtin_setjmp_setup
882 if (HAVE_builtin_setjmp_setup)
883 emit_insn (gen_builtin_setjmp_setup (buf_addr));
884#endif
885
29f09705 886 /* We have a nonlocal label. */
18d50ae6 887 cfun->has_nonlocal_label = 1;
6b7f6858 888}
53800dbe 889
2c8a1497 890/* Construct the trailing part of a __builtin_setjmp call. This is
4598ade9 891 also called directly by the SJLJ exception handling code.
892 If RECEIVER_LABEL is NULL, instead contruct a nonlocal goto handler. */
6b7f6858 893
894void
aecda0d6 895expand_builtin_setjmp_receiver (rtx receiver_label ATTRIBUTE_UNUSED)
6b7f6858 896{
82c7907c 897 rtx chain;
898
4598ade9 899 /* Mark the FP as used when we get here, so we have to make sure it's
53800dbe 900 marked as used by this function. */
18b42941 901 emit_use (hard_frame_pointer_rtx);
53800dbe 902
903 /* Mark the static chain as clobbered here so life information
904 doesn't get messed up for it. */
82c7907c 905 chain = targetm.calls.static_chain (current_function_decl, true);
906 if (chain && REG_P (chain))
907 emit_clobber (chain);
53800dbe 908
909 /* Now put in the code to restore the frame pointer, and argument
491e04ef 910 pointer, if needed. */
53800dbe 911#ifdef HAVE_nonlocal_goto
912 if (! HAVE_nonlocal_goto)
913#endif
62dcb5c8 914 {
915 /* First adjust our frame pointer to its actual value. It was
916 previously set to the start of the virtual area corresponding to
917 the stacked variables when we branched here and now needs to be
918 adjusted to the actual hardware fp value.
919
920 Assignments to virtual registers are converted by
921 instantiate_virtual_regs into the corresponding assignment
922 to the underlying register (fp in this case) that makes
923 the original assignment true.
924 So the following insn will actually be decrementing fp by
925 STARTING_FRAME_OFFSET. */
926 emit_move_insn (virtual_stack_vars_rtx, hard_frame_pointer_rtx);
927
928 /* Restoring the frame pointer also modifies the hard frame pointer.
929 Mark it used (so that the previous assignment remains live once
930 the frame pointer is eliminated) and clobbered (to represent the
931 implicit update from the assignment). */
932 emit_use (hard_frame_pointer_rtx);
933 emit_clobber (hard_frame_pointer_rtx);
934 }
53800dbe 935
5ae82d58 936#if !HARD_FRAME_POINTER_IS_ARG_POINTER
53800dbe 937 if (fixed_regs[ARG_POINTER_REGNUM])
938 {
939#ifdef ELIMINABLE_REGS
4598ade9 940 /* If the argument pointer can be eliminated in favor of the
941 frame pointer, we don't need to restore it. We assume here
942 that if such an elimination is present, it can always be used.
943 This is the case on all known machines; if we don't make this
944 assumption, we do unnecessary saving on many machines. */
53800dbe 945 size_t i;
e99c3a1d 946 static const struct elims {const int from, to;} elim_regs[] = ELIMINABLE_REGS;
53800dbe 947
3098b2d3 948 for (i = 0; i < ARRAY_SIZE (elim_regs); i++)
53800dbe 949 if (elim_regs[i].from == ARG_POINTER_REGNUM
950 && elim_regs[i].to == HARD_FRAME_POINTER_REGNUM)
951 break;
952
3098b2d3 953 if (i == ARRAY_SIZE (elim_regs))
53800dbe 954#endif
955 {
956 /* Now restore our arg pointer from the address at which it
05927e40 957 was saved in our stack frame. */
27a7a23a 958 emit_move_insn (crtl->args.internal_arg_pointer,
b079a207 959 copy_to_reg (get_arg_pointer_save_area ()));
53800dbe 960 }
961 }
962#endif
963
964#ifdef HAVE_builtin_setjmp_receiver
4598ade9 965 if (receiver_label != NULL && HAVE_builtin_setjmp_receiver)
6b7f6858 966 emit_insn (gen_builtin_setjmp_receiver (receiver_label));
53800dbe 967 else
968#endif
969#ifdef HAVE_nonlocal_goto_receiver
970 if (HAVE_nonlocal_goto_receiver)
971 emit_insn (gen_nonlocal_goto_receiver ());
972 else
973#endif
6b7f6858 974 { /* Nothing */ }
57f6bb94 975
3072d30e 976 /* We must not allow the code we just generated to be reordered by
977 scheduling. Specifically, the update of the frame pointer must
62dcb5c8 978 happen immediately, not later. */
3072d30e 979 emit_insn (gen_blockage ());
6b7f6858 980}
53800dbe 981
53800dbe 982/* __builtin_longjmp is passed a pointer to an array of five words (not
983 all will be used on all machines). It operates similarly to the C
984 library function of the same name, but is more efficient. Much of
2c8a1497 985 the code below is copied from the handling of non-local gotos. */
53800dbe 986
c626df3d 987static void
aecda0d6 988expand_builtin_longjmp (rtx buf_addr, rtx value)
53800dbe 989{
1e0c0b35 990 rtx fp, lab, stack;
991 rtx_insn *insn, *last;
3754d046 992 machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
53800dbe 993
48e1416a 994 /* DRAP is needed for stack realign if longjmp is expanded to current
27a7a23a 995 function */
996 if (SUPPORTS_STACK_ALIGNMENT)
997 crtl->need_drap = true;
998
f7c44134 999 if (setjmp_alias_set == -1)
1000 setjmp_alias_set = new_alias_set ();
1001
85d654dd 1002 buf_addr = convert_memory_address (Pmode, buf_addr);
479e4d5e 1003
53800dbe 1004 buf_addr = force_reg (Pmode, buf_addr);
1005
82c7907c 1006 /* We require that the user must pass a second argument of 1, because
1007 that is what builtin_setjmp will return. */
64db345d 1008 gcc_assert (value == const1_rtx);
53800dbe 1009
4712c7d6 1010 last = get_last_insn ();
53800dbe 1011#ifdef HAVE_builtin_longjmp
1012 if (HAVE_builtin_longjmp)
1013 emit_insn (gen_builtin_longjmp (buf_addr));
1014 else
1015#endif
1016 {
1017 fp = gen_rtx_MEM (Pmode, buf_addr);
29c05e22 1018 lab = gen_rtx_MEM (Pmode, plus_constant (Pmode, buf_addr,
53800dbe 1019 GET_MODE_SIZE (Pmode)));
1020
29c05e22 1021 stack = gen_rtx_MEM (sa_mode, plus_constant (Pmode, buf_addr,
53800dbe 1022 2 * GET_MODE_SIZE (Pmode)));
ab6ab77e 1023 set_mem_alias_set (fp, setjmp_alias_set);
1024 set_mem_alias_set (lab, setjmp_alias_set);
1025 set_mem_alias_set (stack, setjmp_alias_set);
53800dbe 1026
1027 /* Pick up FP, label, and SP from the block and jump. This code is
1028 from expand_goto in stmt.c; see there for detailed comments. */
03fd9d2c 1029#ifdef HAVE_nonlocal_goto
53800dbe 1030 if (HAVE_nonlocal_goto)
1031 /* We have to pass a value to the nonlocal_goto pattern that will
1032 get copied into the static_chain pointer, but it does not matter
1033 what that value is, because builtin_setjmp does not use it. */
28d202a8 1034 emit_insn (gen_nonlocal_goto (value, lab, stack, fp));
53800dbe 1035 else
1036#endif
1037 {
1038 lab = copy_to_reg (lab);
1039
18b42941 1040 emit_clobber (gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode)));
1041 emit_clobber (gen_rtx_MEM (BLKmode, hard_frame_pointer_rtx));
2a871ad1 1042
53800dbe 1043 emit_move_insn (hard_frame_pointer_rtx, fp);
e9c97615 1044 emit_stack_restore (SAVE_NONLOCAL, stack);
53800dbe 1045
18b42941 1046 emit_use (hard_frame_pointer_rtx);
1047 emit_use (stack_pointer_rtx);
53800dbe 1048 emit_indirect_jump (lab);
1049 }
1050 }
615166bb 1051
1052 /* Search backwards and mark the jump insn as a non-local goto.
1053 Note that this precludes the use of __builtin_longjmp to a
1054 __builtin_setjmp target in the same function. However, we've
1055 already cautioned the user that these functions are for
1056 internal exception handling use only. */
449c0509 1057 for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
1058 {
64db345d 1059 gcc_assert (insn != last);
7d3f6cc7 1060
6d7dc5b9 1061 if (JUMP_P (insn))
449c0509 1062 {
a1ddb869 1063 add_reg_note (insn, REG_NON_LOCAL_GOTO, const0_rtx);
449c0509 1064 break;
1065 }
6d7dc5b9 1066 else if (CALL_P (insn))
9342ee68 1067 break;
449c0509 1068 }
53800dbe 1069}
1070
0e80b01d 1071static inline bool
1072more_const_call_expr_args_p (const const_call_expr_arg_iterator *iter)
1073{
1074 return (iter->i < iter->n);
1075}
1076
1077/* This function validates the types of a function call argument list
1078 against a specified list of tree_codes. If the last specifier is a 0,
1079 that represents an ellipses, otherwise the last specifier must be a
1080 VOID_TYPE. */
1081
1082static bool
1083validate_arglist (const_tree callexpr, ...)
1084{
1085 enum tree_code code;
1086 bool res = 0;
1087 va_list ap;
1088 const_call_expr_arg_iterator iter;
1089 const_tree arg;
1090
1091 va_start (ap, callexpr);
1092 init_const_call_expr_arg_iterator (callexpr, &iter);
1093
1094 do
1095 {
1096 code = (enum tree_code) va_arg (ap, int);
1097 switch (code)
1098 {
1099 case 0:
1100 /* This signifies an ellipses, any further arguments are all ok. */
1101 res = true;
1102 goto end;
1103 case VOID_TYPE:
1104 /* This signifies an endlink, if no arguments remain, return
1105 true, otherwise return false. */
1106 res = !more_const_call_expr_args_p (&iter);
1107 goto end;
1108 default:
1109 /* If no parameters remain or the parameter's code does not
1110 match the specified code, return false. Otherwise continue
1111 checking any remaining arguments. */
1112 arg = next_const_call_expr_arg (&iter);
1113 if (!validate_arg (arg, code))
1114 goto end;
1115 break;
1116 }
1117 }
1118 while (1);
1119
1120 /* We need gotos here since we can only have one VA_CLOSE in a
1121 function. */
1122 end: ;
1123 va_end (ap);
1124
1125 return res;
1126}
1127
4ee9c684 1128/* Expand a call to __builtin_nonlocal_goto. We're passed the target label
1129 and the address of the save area. */
1130
1131static rtx
c2f47e15 1132expand_builtin_nonlocal_goto (tree exp)
4ee9c684 1133{
1134 tree t_label, t_save_area;
1e0c0b35 1135 rtx r_label, r_save_area, r_fp, r_sp;
1136 rtx_insn *insn;
4ee9c684 1137
c2f47e15 1138 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
4ee9c684 1139 return NULL_RTX;
1140
c2f47e15 1141 t_label = CALL_EXPR_ARG (exp, 0);
1142 t_save_area = CALL_EXPR_ARG (exp, 1);
4ee9c684 1143
8ec3c5c2 1144 r_label = expand_normal (t_label);
3dce56cc 1145 r_label = convert_memory_address (Pmode, r_label);
8ec3c5c2 1146 r_save_area = expand_normal (t_save_area);
3dce56cc 1147 r_save_area = convert_memory_address (Pmode, r_save_area);
d1ff492e 1148 /* Copy the address of the save location to a register just in case it was
1149 based on the frame pointer. */
51adbc8a 1150 r_save_area = copy_to_reg (r_save_area);
4ee9c684 1151 r_fp = gen_rtx_MEM (Pmode, r_save_area);
1152 r_sp = gen_rtx_MEM (STACK_SAVEAREA_MODE (SAVE_NONLOCAL),
29c05e22 1153 plus_constant (Pmode, r_save_area,
1154 GET_MODE_SIZE (Pmode)));
4ee9c684 1155
18d50ae6 1156 crtl->has_nonlocal_goto = 1;
4ee9c684 1157
03fd9d2c 1158#ifdef HAVE_nonlocal_goto
4ee9c684 1159 /* ??? We no longer need to pass the static chain value, afaik. */
1160 if (HAVE_nonlocal_goto)
1161 emit_insn (gen_nonlocal_goto (const0_rtx, r_label, r_sp, r_fp));
1162 else
1163#endif
1164 {
1165 r_label = copy_to_reg (r_label);
1166
18b42941 1167 emit_clobber (gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode)));
1168 emit_clobber (gen_rtx_MEM (BLKmode, hard_frame_pointer_rtx));
491e04ef 1169
d1ff492e 1170 /* Restore frame pointer for containing function. */
4ee9c684 1171 emit_move_insn (hard_frame_pointer_rtx, r_fp);
e9c97615 1172 emit_stack_restore (SAVE_NONLOCAL, r_sp);
491e04ef 1173
4ee9c684 1174 /* USE of hard_frame_pointer_rtx added for consistency;
1175 not clear if really needed. */
18b42941 1176 emit_use (hard_frame_pointer_rtx);
1177 emit_use (stack_pointer_rtx);
ad0d0af8 1178
1179 /* If the architecture is using a GP register, we must
1180 conservatively assume that the target function makes use of it.
1181 The prologue of functions with nonlocal gotos must therefore
1182 initialize the GP register to the appropriate value, and we
1183 must then make sure that this value is live at the point
1184 of the jump. (Note that this doesn't necessarily apply
1185 to targets with a nonlocal_goto pattern; they are free
1186 to implement it in their own way. Note also that this is
1187 a no-op if the GP register is a global invariant.) */
1188 if ((unsigned) PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM
1189 && fixed_regs[PIC_OFFSET_TABLE_REGNUM])
18b42941 1190 emit_use (pic_offset_table_rtx);
ad0d0af8 1191
4ee9c684 1192 emit_indirect_jump (r_label);
1193 }
491e04ef 1194
4ee9c684 1195 /* Search backwards to the jump insn and mark it as a
1196 non-local goto. */
1197 for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
1198 {
6d7dc5b9 1199 if (JUMP_P (insn))
4ee9c684 1200 {
a1ddb869 1201 add_reg_note (insn, REG_NON_LOCAL_GOTO, const0_rtx);
4ee9c684 1202 break;
1203 }
6d7dc5b9 1204 else if (CALL_P (insn))
4ee9c684 1205 break;
1206 }
1207
1208 return const0_rtx;
1209}
1210
843d08a9 1211/* __builtin_update_setjmp_buf is passed a pointer to an array of five words
1212 (not all will be used on all machines) that was passed to __builtin_setjmp.
1213 It updates the stack pointer in that block to correspond to the current
1214 stack pointer. */
1215
1216static void
1217expand_builtin_update_setjmp_buf (rtx buf_addr)
1218{
3754d046 1219 machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
d1ff492e 1220 rtx stack_save
843d08a9 1221 = gen_rtx_MEM (sa_mode,
1222 memory_address
1223 (sa_mode,
29c05e22 1224 plus_constant (Pmode, buf_addr,
1225 2 * GET_MODE_SIZE (Pmode))));
843d08a9 1226
e9c97615 1227 emit_stack_save (SAVE_NONLOCAL, &stack_save);
843d08a9 1228}
1229
5e3608d8 1230/* Expand a call to __builtin_prefetch. For a target that does not support
1231 data prefetch, evaluate the memory address argument in case it has side
1232 effects. */
1233
1234static void
c2f47e15 1235expand_builtin_prefetch (tree exp)
5e3608d8 1236{
1237 tree arg0, arg1, arg2;
c2f47e15 1238 int nargs;
5e3608d8 1239 rtx op0, op1, op2;
1240
c2f47e15 1241 if (!validate_arglist (exp, POINTER_TYPE, 0))
26a5cadb 1242 return;
1243
c2f47e15 1244 arg0 = CALL_EXPR_ARG (exp, 0);
1245
26a5cadb 1246 /* Arguments 1 and 2 are optional; argument 1 (read/write) defaults to
1247 zero (read) and argument 2 (locality) defaults to 3 (high degree of
1248 locality). */
c2f47e15 1249 nargs = call_expr_nargs (exp);
1250 if (nargs > 1)
1251 arg1 = CALL_EXPR_ARG (exp, 1);
26a5cadb 1252 else
c2f47e15 1253 arg1 = integer_zero_node;
1254 if (nargs > 2)
1255 arg2 = CALL_EXPR_ARG (exp, 2);
1256 else
2512209b 1257 arg2 = integer_three_node;
5e3608d8 1258
1259 /* Argument 0 is an address. */
1260 op0 = expand_expr (arg0, NULL_RTX, Pmode, EXPAND_NORMAL);
1261
1262 /* Argument 1 (read/write flag) must be a compile-time constant int. */
1263 if (TREE_CODE (arg1) != INTEGER_CST)
1264 {
07e3a3d2 1265 error ("second argument to %<__builtin_prefetch%> must be a constant");
9342ee68 1266 arg1 = integer_zero_node;
5e3608d8 1267 }
8ec3c5c2 1268 op1 = expand_normal (arg1);
5e3608d8 1269 /* Argument 1 must be either zero or one. */
1270 if (INTVAL (op1) != 0 && INTVAL (op1) != 1)
1271 {
c3ceba8e 1272 warning (0, "invalid second argument to %<__builtin_prefetch%>;"
07e3a3d2 1273 " using zero");
5e3608d8 1274 op1 = const0_rtx;
1275 }
1276
1277 /* Argument 2 (locality) must be a compile-time constant int. */
1278 if (TREE_CODE (arg2) != INTEGER_CST)
1279 {
07e3a3d2 1280 error ("third argument to %<__builtin_prefetch%> must be a constant");
5e3608d8 1281 arg2 = integer_zero_node;
1282 }
8ec3c5c2 1283 op2 = expand_normal (arg2);
5e3608d8 1284 /* Argument 2 must be 0, 1, 2, or 3. */
1285 if (INTVAL (op2) < 0 || INTVAL (op2) > 3)
1286 {
c3ceba8e 1287 warning (0, "invalid third argument to %<__builtin_prefetch%>; using zero");
5e3608d8 1288 op2 = const0_rtx;
1289 }
1290
1291#ifdef HAVE_prefetch
1292 if (HAVE_prefetch)
1293 {
8786db1e 1294 struct expand_operand ops[3];
1295
1296 create_address_operand (&ops[0], op0);
1297 create_integer_operand (&ops[1], INTVAL (op1));
1298 create_integer_operand (&ops[2], INTVAL (op2));
1299 if (maybe_expand_insn (CODE_FOR_prefetch, 3, ops))
1300 return;
5e3608d8 1301 }
5e3608d8 1302#endif
0a534ba7 1303
f0ce3b1f 1304 /* Don't do anything with direct references to volatile memory, but
1305 generate code to handle other side effects. */
e16ceb8e 1306 if (!MEM_P (op0) && side_effects_p (op0))
f0ce3b1f 1307 emit_insn (op0);
5e3608d8 1308}
1309
f7c44134 1310/* Get a MEM rtx for expression EXP which is the address of an operand
d8ae1baa 1311 to be used in a string instruction (cmpstrsi, movmemsi, ..). LEN is
1312 the maximum length of the block of memory that might be accessed or
1313 NULL if unknown. */
f7c44134 1314
53800dbe 1315static rtx
d8ae1baa 1316get_memory_rtx (tree exp, tree len)
53800dbe 1317{
ad0a178f 1318 tree orig_exp = exp;
1319 rtx addr, mem;
ad0a178f 1320
1321 /* When EXP is not resolved SAVE_EXPR, MEM_ATTRS can be still derived
1322 from its expression, for expr->a.b only <variable>.a.b is recorded. */
1323 if (TREE_CODE (exp) == SAVE_EXPR && !SAVE_EXPR_RESOLVED_P (exp))
1324 exp = TREE_OPERAND (exp, 0);
1325
1326 addr = expand_expr (orig_exp, NULL_RTX, ptr_mode, EXPAND_NORMAL);
1327 mem = gen_rtx_MEM (BLKmode, memory_address (BLKmode, addr));
2a631e19 1328
f7c44134 1329 /* Get an expression we can use to find the attributes to assign to MEM.
5dd3f78f 1330 First remove any nops. */
72dd6141 1331 while (CONVERT_EXPR_P (exp)
f7c44134 1332 && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (exp, 0))))
1333 exp = TREE_OPERAND (exp, 0);
1334
5dd3f78f 1335 /* Build a MEM_REF representing the whole accessed area as a byte blob,
1336 (as builtin stringops may alias with anything). */
1337 exp = fold_build2 (MEM_REF,
1338 build_array_type (char_type_node,
1339 build_range_type (sizetype,
1340 size_one_node, len)),
1341 exp, build_int_cst (ptr_type_node, 0));
1342
1343 /* If the MEM_REF has no acceptable address, try to get the base object
1344 from the original address we got, and build an all-aliasing
1345 unknown-sized access to that one. */
1346 if (is_gimple_mem_ref_addr (TREE_OPERAND (exp, 0)))
1347 set_mem_attributes (mem, exp, 0);
1348 else if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
1349 && (exp = get_base_address (TREE_OPERAND (TREE_OPERAND (exp, 0),
1350 0))))
eec8e941 1351 {
5dd3f78f 1352 exp = build_fold_addr_expr (exp);
1353 exp = fold_build2 (MEM_REF,
1354 build_array_type (char_type_node,
1355 build_range_type (sizetype,
1356 size_zero_node,
1357 NULL)),
1358 exp, build_int_cst (ptr_type_node, 0));
a1a25d19 1359 set_mem_attributes (mem, exp, 0);
eec8e941 1360 }
5dd3f78f 1361 set_mem_alias_set (mem, 0);
53800dbe 1362 return mem;
1363}
1364\f
1365/* Built-in functions to perform an untyped call and return. */
1366
3b9c3a16 1367#define apply_args_mode \
1368 (this_target_builtins->x_apply_args_mode)
1369#define apply_result_mode \
1370 (this_target_builtins->x_apply_result_mode)
53800dbe 1371
53800dbe 1372/* Return the size required for the block returned by __builtin_apply_args,
1373 and initialize apply_args_mode. */
1374
1375static int
aecda0d6 1376apply_args_size (void)
53800dbe 1377{
1378 static int size = -1;
58e9ce8f 1379 int align;
1380 unsigned int regno;
3754d046 1381 machine_mode mode;
53800dbe 1382
1383 /* The values computed by this function never change. */
1384 if (size < 0)
1385 {
1386 /* The first value is the incoming arg-pointer. */
1387 size = GET_MODE_SIZE (Pmode);
1388
1389 /* The second value is the structure value address unless this is
1390 passed as an "invisible" first argument. */
6812c89e 1391 if (targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0))
53800dbe 1392 size += GET_MODE_SIZE (Pmode);
1393
1394 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1395 if (FUNCTION_ARG_REGNO_P (regno))
1396 {
4bac51c9 1397 mode = targetm.calls.get_raw_arg_mode (regno);
0862b7e9 1398
64db345d 1399 gcc_assert (mode != VOIDmode);
53800dbe 1400
1401 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1402 if (size % align != 0)
1403 size = CEIL (size, align) * align;
53800dbe 1404 size += GET_MODE_SIZE (mode);
1405 apply_args_mode[regno] = mode;
1406 }
1407 else
1408 {
1409 apply_args_mode[regno] = VOIDmode;
53800dbe 1410 }
1411 }
1412 return size;
1413}
1414
1415/* Return the size required for the block returned by __builtin_apply,
1416 and initialize apply_result_mode. */
1417
1418static int
aecda0d6 1419apply_result_size (void)
53800dbe 1420{
1421 static int size = -1;
1422 int align, regno;
3754d046 1423 machine_mode mode;
53800dbe 1424
1425 /* The values computed by this function never change. */
1426 if (size < 0)
1427 {
1428 size = 0;
1429
1430 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
e1ce1485 1431 if (targetm.calls.function_value_regno_p (regno))
53800dbe 1432 {
4bac51c9 1433 mode = targetm.calls.get_raw_result_mode (regno);
0862b7e9 1434
64db345d 1435 gcc_assert (mode != VOIDmode);
53800dbe 1436
1437 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1438 if (size % align != 0)
1439 size = CEIL (size, align) * align;
1440 size += GET_MODE_SIZE (mode);
1441 apply_result_mode[regno] = mode;
1442 }
1443 else
1444 apply_result_mode[regno] = VOIDmode;
1445
1446 /* Allow targets that use untyped_call and untyped_return to override
1447 the size so that machine-specific information can be stored here. */
1448#ifdef APPLY_RESULT_SIZE
1449 size = APPLY_RESULT_SIZE;
1450#endif
1451 }
1452 return size;
1453}
1454
1455#if defined (HAVE_untyped_call) || defined (HAVE_untyped_return)
1456/* Create a vector describing the result block RESULT. If SAVEP is true,
1457 the result block is used to save the values; otherwise it is used to
1458 restore the values. */
1459
1460static rtx
aecda0d6 1461result_vector (int savep, rtx result)
53800dbe 1462{
1463 int regno, size, align, nelts;
3754d046 1464 machine_mode mode;
53800dbe 1465 rtx reg, mem;
364c0c59 1466 rtx *savevec = XALLOCAVEC (rtx, FIRST_PSEUDO_REGISTER);
bf8e3599 1467
53800dbe 1468 size = nelts = 0;
1469 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1470 if ((mode = apply_result_mode[regno]) != VOIDmode)
1471 {
1472 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1473 if (size % align != 0)
1474 size = CEIL (size, align) * align;
1475 reg = gen_rtx_REG (mode, savep ? regno : INCOMING_REGNO (regno));
e513d163 1476 mem = adjust_address (result, mode, size);
53800dbe 1477 savevec[nelts++] = (savep
1478 ? gen_rtx_SET (VOIDmode, mem, reg)
1479 : gen_rtx_SET (VOIDmode, reg, mem));
1480 size += GET_MODE_SIZE (mode);
1481 }
1482 return gen_rtx_PARALLEL (VOIDmode, gen_rtvec_v (nelts, savevec));
1483}
1484#endif /* HAVE_untyped_call or HAVE_untyped_return */
1485
1486/* Save the state required to perform an untyped call with the same
1487 arguments as were passed to the current function. */
1488
1489static rtx
aecda0d6 1490expand_builtin_apply_args_1 (void)
53800dbe 1491{
1c7e61a7 1492 rtx registers, tem;
53800dbe 1493 int size, align, regno;
3754d046 1494 machine_mode mode;
6812c89e 1495 rtx struct_incoming_value = targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 1);
53800dbe 1496
1497 /* Create a block where the arg-pointer, structure value address,
1498 and argument registers can be saved. */
1499 registers = assign_stack_local (BLKmode, apply_args_size (), -1);
1500
1501 /* Walk past the arg-pointer and structure value address. */
1502 size = GET_MODE_SIZE (Pmode);
6812c89e 1503 if (targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0))
53800dbe 1504 size += GET_MODE_SIZE (Pmode);
1505
1506 /* Save each register used in calling a function to the block. */
1507 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1508 if ((mode = apply_args_mode[regno]) != VOIDmode)
1509 {
53800dbe 1510 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1511 if (size % align != 0)
1512 size = CEIL (size, align) * align;
1513
1514 tem = gen_rtx_REG (mode, INCOMING_REGNO (regno));
1515
e513d163 1516 emit_move_insn (adjust_address (registers, mode, size), tem);
53800dbe 1517 size += GET_MODE_SIZE (mode);
1518 }
1519
1520 /* Save the arg pointer to the block. */
27a7a23a 1521 tem = copy_to_reg (crtl->args.internal_arg_pointer);
f083a92b 1522#ifdef STACK_GROWS_DOWNWARD
1c7e61a7 1523 /* We need the pointer as the caller actually passed them to us, not
9d4b544c 1524 as we might have pretended they were passed. Make sure it's a valid
1525 operand, as emit_move_insn isn't expected to handle a PLUS. */
1526 tem
29c05e22 1527 = force_operand (plus_constant (Pmode, tem, crtl->args.pretend_args_size),
9d4b544c 1528 NULL_RTX);
1c7e61a7 1529#endif
1530 emit_move_insn (adjust_address (registers, Pmode, 0), tem);
0862b7e9 1531
53800dbe 1532 size = GET_MODE_SIZE (Pmode);
1533
1534 /* Save the structure value address unless this is passed as an
1535 "invisible" first argument. */
45550790 1536 if (struct_incoming_value)
53800dbe 1537 {
e513d163 1538 emit_move_insn (adjust_address (registers, Pmode, size),
45550790 1539 copy_to_reg (struct_incoming_value));
53800dbe 1540 size += GET_MODE_SIZE (Pmode);
1541 }
1542
1543 /* Return the address of the block. */
1544 return copy_addr_to_reg (XEXP (registers, 0));
1545}
1546
1547/* __builtin_apply_args returns block of memory allocated on
1548 the stack into which is stored the arg pointer, structure
1549 value address, static chain, and all the registers that might
1550 possibly be used in performing a function call. The code is
1551 moved to the start of the function so the incoming values are
1552 saved. */
27d0c333 1553
53800dbe 1554static rtx
aecda0d6 1555expand_builtin_apply_args (void)
53800dbe 1556{
1557 /* Don't do __builtin_apply_args more than once in a function.
1558 Save the result of the first call and reuse it. */
1559 if (apply_args_value != 0)
1560 return apply_args_value;
1561 {
1562 /* When this function is called, it means that registers must be
1563 saved on entry to this function. So we migrate the
1564 call to the first insn of this function. */
1565 rtx temp;
1566 rtx seq;
1567
1568 start_sequence ();
1569 temp = expand_builtin_apply_args_1 ();
1570 seq = get_insns ();
1571 end_sequence ();
1572
1573 apply_args_value = temp;
1574
31d3e01c 1575 /* Put the insns after the NOTE that starts the function.
1576 If this is inside a start_sequence, make the outer-level insn
53800dbe 1577 chain current, so the code is placed at the start of the
0ef1a651 1578 function. If internal_arg_pointer is a non-virtual pseudo,
1579 it needs to be placed after the function that initializes
1580 that pseudo. */
53800dbe 1581 push_topmost_sequence ();
0ef1a651 1582 if (REG_P (crtl->args.internal_arg_pointer)
1583 && REGNO (crtl->args.internal_arg_pointer) > LAST_VIRTUAL_REGISTER)
1584 emit_insn_before (seq, parm_birth_insn);
1585 else
1586 emit_insn_before (seq, NEXT_INSN (entry_of_function ()));
53800dbe 1587 pop_topmost_sequence ();
1588 return temp;
1589 }
1590}
1591
1592/* Perform an untyped call and save the state required to perform an
1593 untyped return of whatever value was returned by the given function. */
1594
1595static rtx
aecda0d6 1596expand_builtin_apply (rtx function, rtx arguments, rtx argsize)
53800dbe 1597{
1598 int size, align, regno;
3754d046 1599 machine_mode mode;
1e0c0b35 1600 rtx incoming_args, result, reg, dest, src;
1601 rtx_call_insn *call_insn;
53800dbe 1602 rtx old_stack_level = 0;
1603 rtx call_fusage = 0;
6812c89e 1604 rtx struct_value = targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0);
53800dbe 1605
85d654dd 1606 arguments = convert_memory_address (Pmode, arguments);
726ec87c 1607
53800dbe 1608 /* Create a block where the return registers can be saved. */
1609 result = assign_stack_local (BLKmode, apply_result_size (), -1);
1610
53800dbe 1611 /* Fetch the arg pointer from the ARGUMENTS block. */
1612 incoming_args = gen_reg_rtx (Pmode);
726ec87c 1613 emit_move_insn (incoming_args, gen_rtx_MEM (Pmode, arguments));
53800dbe 1614#ifndef STACK_GROWS_DOWNWARD
ad99e708 1615 incoming_args = expand_simple_binop (Pmode, MINUS, incoming_args, argsize,
1616 incoming_args, 0, OPTAB_LIB_WIDEN);
53800dbe 1617#endif
1618
04a46d40 1619 /* Push a new argument block and copy the arguments. Do not allow
1620 the (potential) memcpy call below to interfere with our stack
1621 manipulations. */
53800dbe 1622 do_pending_stack_adjust ();
04a46d40 1623 NO_DEFER_POP;
53800dbe 1624
2358393e 1625 /* Save the stack with nonlocal if available. */
53800dbe 1626#ifdef HAVE_save_stack_nonlocal
1627 if (HAVE_save_stack_nonlocal)
e9c97615 1628 emit_stack_save (SAVE_NONLOCAL, &old_stack_level);
53800dbe 1629 else
1630#endif
e9c97615 1631 emit_stack_save (SAVE_BLOCK, &old_stack_level);
53800dbe 1632
59647703 1633 /* Allocate a block of memory onto the stack and copy the memory
990495a7 1634 arguments to the outgoing arguments address. We can pass TRUE
1635 as the 4th argument because we just saved the stack pointer
1636 and will restore it right after the call. */
5be42b39 1637 allocate_dynamic_stack_space (argsize, 0, BIGGEST_ALIGNMENT, true);
27a7a23a 1638
1639 /* Set DRAP flag to true, even though allocate_dynamic_stack_space
1640 may have already set current_function_calls_alloca to true.
1641 current_function_calls_alloca won't be set if argsize is zero,
1642 so we have to guarantee need_drap is true here. */
1643 if (SUPPORTS_STACK_ALIGNMENT)
1644 crtl->need_drap = true;
1645
59647703 1646 dest = virtual_outgoing_args_rtx;
1647#ifndef STACK_GROWS_DOWNWARD
971ba038 1648 if (CONST_INT_P (argsize))
29c05e22 1649 dest = plus_constant (Pmode, dest, -INTVAL (argsize));
59647703 1650 else
1651 dest = gen_rtx_PLUS (Pmode, dest, negate_rtx (Pmode, argsize));
1652#endif
2a631e19 1653 dest = gen_rtx_MEM (BLKmode, dest);
1654 set_mem_align (dest, PARM_BOUNDARY);
1655 src = gen_rtx_MEM (BLKmode, incoming_args);
1656 set_mem_align (src, PARM_BOUNDARY);
0378dbdc 1657 emit_block_move (dest, src, argsize, BLOCK_OP_NORMAL);
53800dbe 1658
1659 /* Refer to the argument block. */
1660 apply_args_size ();
1661 arguments = gen_rtx_MEM (BLKmode, arguments);
2a631e19 1662 set_mem_align (arguments, PARM_BOUNDARY);
53800dbe 1663
1664 /* Walk past the arg-pointer and structure value address. */
1665 size = GET_MODE_SIZE (Pmode);
45550790 1666 if (struct_value)
53800dbe 1667 size += GET_MODE_SIZE (Pmode);
1668
1669 /* Restore each of the registers previously saved. Make USE insns
1670 for each of these registers for use in making the call. */
1671 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1672 if ((mode = apply_args_mode[regno]) != VOIDmode)
1673 {
1674 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1675 if (size % align != 0)
1676 size = CEIL (size, align) * align;
1677 reg = gen_rtx_REG (mode, regno);
e513d163 1678 emit_move_insn (reg, adjust_address (arguments, mode, size));
53800dbe 1679 use_reg (&call_fusage, reg);
1680 size += GET_MODE_SIZE (mode);
1681 }
1682
1683 /* Restore the structure value address unless this is passed as an
1684 "invisible" first argument. */
1685 size = GET_MODE_SIZE (Pmode);
45550790 1686 if (struct_value)
53800dbe 1687 {
1688 rtx value = gen_reg_rtx (Pmode);
e513d163 1689 emit_move_insn (value, adjust_address (arguments, Pmode, size));
45550790 1690 emit_move_insn (struct_value, value);
8ad4c111 1691 if (REG_P (struct_value))
45550790 1692 use_reg (&call_fusage, struct_value);
53800dbe 1693 size += GET_MODE_SIZE (Pmode);
1694 }
1695
1696 /* All arguments and registers used for the call are set up by now! */
82c7907c 1697 function = prepare_call_address (NULL, function, NULL, &call_fusage, 0, 0);
53800dbe 1698
1699 /* Ensure address is valid. SYMBOL_REF is already valid, so no need,
1700 and we don't want to load it into a register as an optimization,
1701 because prepare_call_address already did it if it should be done. */
1702 if (GET_CODE (function) != SYMBOL_REF)
1703 function = memory_address (FUNCTION_MODE, function);
1704
1705 /* Generate the actual call instruction and save the return value. */
1706#ifdef HAVE_untyped_call
1707 if (HAVE_untyped_call)
1708 emit_call_insn (gen_untyped_call (gen_rtx_MEM (FUNCTION_MODE, function),
1709 result, result_vector (1, result)));
1710 else
1711#endif
1712#ifdef HAVE_call_value
1713 if (HAVE_call_value)
1714 {
1715 rtx valreg = 0;
1716
1717 /* Locate the unique return register. It is not possible to
1718 express a call that sets more than one return register using
1719 call_value; use untyped_call for that. In fact, untyped_call
1720 only needs to save the return registers in the given block. */
1721 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1722 if ((mode = apply_result_mode[regno]) != VOIDmode)
1723 {
64db345d 1724 gcc_assert (!valreg); /* HAVE_untyped_call required. */
7d3f6cc7 1725
53800dbe 1726 valreg = gen_rtx_REG (mode, regno);
1727 }
1728
2ed6c343 1729 emit_call_insn (GEN_CALL_VALUE (valreg,
53800dbe 1730 gen_rtx_MEM (FUNCTION_MODE, function),
1731 const0_rtx, NULL_RTX, const0_rtx));
1732
e513d163 1733 emit_move_insn (adjust_address (result, GET_MODE (valreg), 0), valreg);
53800dbe 1734 }
1735 else
1736#endif
64db345d 1737 gcc_unreachable ();
53800dbe 1738
d5f9786f 1739 /* Find the CALL insn we just emitted, and attach the register usage
1740 information. */
1741 call_insn = last_call_insn ();
1742 add_function_usage_to (call_insn, call_fusage);
53800dbe 1743
1744 /* Restore the stack. */
1745#ifdef HAVE_save_stack_nonlocal
1746 if (HAVE_save_stack_nonlocal)
e9c97615 1747 emit_stack_restore (SAVE_NONLOCAL, old_stack_level);
53800dbe 1748 else
1749#endif
e9c97615 1750 emit_stack_restore (SAVE_BLOCK, old_stack_level);
9af5ce0c 1751 fixup_args_size_notes (call_insn, get_last_insn (), 0);
53800dbe 1752
04a46d40 1753 OK_DEFER_POP;
1754
53800dbe 1755 /* Return the address of the result block. */
85d654dd 1756 result = copy_addr_to_reg (XEXP (result, 0));
1757 return convert_memory_address (ptr_mode, result);
53800dbe 1758}
1759
1760/* Perform an untyped return. */
1761
1762static void
aecda0d6 1763expand_builtin_return (rtx result)
53800dbe 1764{
1765 int size, align, regno;
3754d046 1766 machine_mode mode;
53800dbe 1767 rtx reg;
57c26b3a 1768 rtx_insn *call_fusage = 0;
53800dbe 1769
85d654dd 1770 result = convert_memory_address (Pmode, result);
726ec87c 1771
53800dbe 1772 apply_result_size ();
1773 result = gen_rtx_MEM (BLKmode, result);
1774
1775#ifdef HAVE_untyped_return
1776 if (HAVE_untyped_return)
1777 {
1778 emit_jump_insn (gen_untyped_return (result, result_vector (0, result)));
1779 emit_barrier ();
1780 return;
1781 }
1782#endif
1783
1784 /* Restore the return value and note that each value is used. */
1785 size = 0;
1786 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1787 if ((mode = apply_result_mode[regno]) != VOIDmode)
1788 {
1789 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1790 if (size % align != 0)
1791 size = CEIL (size, align) * align;
1792 reg = gen_rtx_REG (mode, INCOMING_REGNO (regno));
e513d163 1793 emit_move_insn (reg, adjust_address (result, mode, size));
53800dbe 1794
1795 push_to_sequence (call_fusage);
18b42941 1796 emit_use (reg);
53800dbe 1797 call_fusage = get_insns ();
1798 end_sequence ();
1799 size += GET_MODE_SIZE (mode);
1800 }
1801
1802 /* Put the USE insns before the return. */
31d3e01c 1803 emit_insn (call_fusage);
53800dbe 1804
1805 /* Return whatever values was restored by jumping directly to the end
1806 of the function. */
62380d2d 1807 expand_naked_return ();
53800dbe 1808}
1809
539a3a92 1810/* Used by expand_builtin_classify_type and fold_builtin_classify_type. */
27d0c333 1811
539a3a92 1812static enum type_class
aecda0d6 1813type_to_class (tree type)
539a3a92 1814{
1815 switch (TREE_CODE (type))
1816 {
1817 case VOID_TYPE: return void_type_class;
1818 case INTEGER_TYPE: return integer_type_class;
539a3a92 1819 case ENUMERAL_TYPE: return enumeral_type_class;
1820 case BOOLEAN_TYPE: return boolean_type_class;
1821 case POINTER_TYPE: return pointer_type_class;
1822 case REFERENCE_TYPE: return reference_type_class;
1823 case OFFSET_TYPE: return offset_type_class;
1824 case REAL_TYPE: return real_type_class;
1825 case COMPLEX_TYPE: return complex_type_class;
1826 case FUNCTION_TYPE: return function_type_class;
1827 case METHOD_TYPE: return method_type_class;
1828 case RECORD_TYPE: return record_type_class;
1829 case UNION_TYPE:
1830 case QUAL_UNION_TYPE: return union_type_class;
1831 case ARRAY_TYPE: return (TYPE_STRING_FLAG (type)
1832 ? string_type_class : array_type_class);
539a3a92 1833 case LANG_TYPE: return lang_type_class;
1834 default: return no_type_class;
1835 }
1836}
bf8e3599 1837
c2f47e15 1838/* Expand a call EXP to __builtin_classify_type. */
27d0c333 1839
53800dbe 1840static rtx
c2f47e15 1841expand_builtin_classify_type (tree exp)
53800dbe 1842{
c2f47e15 1843 if (call_expr_nargs (exp))
1844 return GEN_INT (type_to_class (TREE_TYPE (CALL_EXPR_ARG (exp, 0))));
53800dbe 1845 return GEN_INT (no_type_class);
1846}
1847
07976da7 1848/* This helper macro, meant to be used in mathfn_built_in below,
1849 determines which among a set of three builtin math functions is
1850 appropriate for a given type mode. The `F' and `L' cases are
1851 automatically generated from the `double' case. */
1852#define CASE_MATHFN(BUILT_IN_MATHFN) \
1853 case BUILT_IN_MATHFN: case BUILT_IN_MATHFN##F: case BUILT_IN_MATHFN##L: \
1854 fcode = BUILT_IN_MATHFN; fcodef = BUILT_IN_MATHFN##F ; \
1855 fcodel = BUILT_IN_MATHFN##L ; break;
cd2656b0 1856/* Similar to above, but appends _R after any F/L suffix. */
1857#define CASE_MATHFN_REENT(BUILT_IN_MATHFN) \
1858 case BUILT_IN_MATHFN##_R: case BUILT_IN_MATHFN##F_R: case BUILT_IN_MATHFN##L_R: \
1859 fcode = BUILT_IN_MATHFN##_R; fcodef = BUILT_IN_MATHFN##F_R ; \
1860 fcodel = BUILT_IN_MATHFN##L_R ; break;
07976da7 1861
b9a16870 1862/* Return mathematic function equivalent to FN but operating directly on TYPE,
1863 if available. If IMPLICIT is true use the implicit builtin declaration,
1864 otherwise use the explicit declaration. If we can't do the conversion,
1865 return zero. */
c319d56a 1866
1867static tree
b9a16870 1868mathfn_built_in_1 (tree type, enum built_in_function fn, bool implicit_p)
0a68165a 1869{
b9a16870 1870 enum built_in_function fcode, fcodef, fcodel, fcode2;
07976da7 1871
1872 switch (fn)
1873 {
746114e8 1874 CASE_MATHFN (BUILT_IN_ACOS)
1875 CASE_MATHFN (BUILT_IN_ACOSH)
1876 CASE_MATHFN (BUILT_IN_ASIN)
1877 CASE_MATHFN (BUILT_IN_ASINH)
07976da7 1878 CASE_MATHFN (BUILT_IN_ATAN)
746114e8 1879 CASE_MATHFN (BUILT_IN_ATAN2)
1880 CASE_MATHFN (BUILT_IN_ATANH)
1881 CASE_MATHFN (BUILT_IN_CBRT)
07976da7 1882 CASE_MATHFN (BUILT_IN_CEIL)
d735c391 1883 CASE_MATHFN (BUILT_IN_CEXPI)
746114e8 1884 CASE_MATHFN (BUILT_IN_COPYSIGN)
07976da7 1885 CASE_MATHFN (BUILT_IN_COS)
746114e8 1886 CASE_MATHFN (BUILT_IN_COSH)
1887 CASE_MATHFN (BUILT_IN_DREM)
1888 CASE_MATHFN (BUILT_IN_ERF)
1889 CASE_MATHFN (BUILT_IN_ERFC)
07976da7 1890 CASE_MATHFN (BUILT_IN_EXP)
746114e8 1891 CASE_MATHFN (BUILT_IN_EXP10)
1892 CASE_MATHFN (BUILT_IN_EXP2)
1893 CASE_MATHFN (BUILT_IN_EXPM1)
1894 CASE_MATHFN (BUILT_IN_FABS)
1895 CASE_MATHFN (BUILT_IN_FDIM)
07976da7 1896 CASE_MATHFN (BUILT_IN_FLOOR)
746114e8 1897 CASE_MATHFN (BUILT_IN_FMA)
1898 CASE_MATHFN (BUILT_IN_FMAX)
1899 CASE_MATHFN (BUILT_IN_FMIN)
1900 CASE_MATHFN (BUILT_IN_FMOD)
1901 CASE_MATHFN (BUILT_IN_FREXP)
1902 CASE_MATHFN (BUILT_IN_GAMMA)
cd2656b0 1903 CASE_MATHFN_REENT (BUILT_IN_GAMMA) /* GAMMA_R */
746114e8 1904 CASE_MATHFN (BUILT_IN_HUGE_VAL)
1905 CASE_MATHFN (BUILT_IN_HYPOT)
1906 CASE_MATHFN (BUILT_IN_ILOGB)
80ff6494 1907 CASE_MATHFN (BUILT_IN_ICEIL)
1908 CASE_MATHFN (BUILT_IN_IFLOOR)
746114e8 1909 CASE_MATHFN (BUILT_IN_INF)
80ff6494 1910 CASE_MATHFN (BUILT_IN_IRINT)
1911 CASE_MATHFN (BUILT_IN_IROUND)
69b779ea 1912 CASE_MATHFN (BUILT_IN_ISINF)
746114e8 1913 CASE_MATHFN (BUILT_IN_J0)
1914 CASE_MATHFN (BUILT_IN_J1)
1915 CASE_MATHFN (BUILT_IN_JN)
ac148751 1916 CASE_MATHFN (BUILT_IN_LCEIL)
746114e8 1917 CASE_MATHFN (BUILT_IN_LDEXP)
ad52b9b7 1918 CASE_MATHFN (BUILT_IN_LFLOOR)
746114e8 1919 CASE_MATHFN (BUILT_IN_LGAMMA)
cd2656b0 1920 CASE_MATHFN_REENT (BUILT_IN_LGAMMA) /* LGAMMA_R */
ac148751 1921 CASE_MATHFN (BUILT_IN_LLCEIL)
ad52b9b7 1922 CASE_MATHFN (BUILT_IN_LLFLOOR)
746114e8 1923 CASE_MATHFN (BUILT_IN_LLRINT)
1924 CASE_MATHFN (BUILT_IN_LLROUND)
07976da7 1925 CASE_MATHFN (BUILT_IN_LOG)
746114e8 1926 CASE_MATHFN (BUILT_IN_LOG10)
1927 CASE_MATHFN (BUILT_IN_LOG1P)
1928 CASE_MATHFN (BUILT_IN_LOG2)
1929 CASE_MATHFN (BUILT_IN_LOGB)
1930 CASE_MATHFN (BUILT_IN_LRINT)
1931 CASE_MATHFN (BUILT_IN_LROUND)
1932 CASE_MATHFN (BUILT_IN_MODF)
1933 CASE_MATHFN (BUILT_IN_NAN)
1934 CASE_MATHFN (BUILT_IN_NANS)
07976da7 1935 CASE_MATHFN (BUILT_IN_NEARBYINT)
746114e8 1936 CASE_MATHFN (BUILT_IN_NEXTAFTER)
1937 CASE_MATHFN (BUILT_IN_NEXTTOWARD)
1938 CASE_MATHFN (BUILT_IN_POW)
757c219d 1939 CASE_MATHFN (BUILT_IN_POWI)
746114e8 1940 CASE_MATHFN (BUILT_IN_POW10)
1941 CASE_MATHFN (BUILT_IN_REMAINDER)
1942 CASE_MATHFN (BUILT_IN_REMQUO)
1943 CASE_MATHFN (BUILT_IN_RINT)
07976da7 1944 CASE_MATHFN (BUILT_IN_ROUND)
746114e8 1945 CASE_MATHFN (BUILT_IN_SCALB)
1946 CASE_MATHFN (BUILT_IN_SCALBLN)
1947 CASE_MATHFN (BUILT_IN_SCALBN)
c319d56a 1948 CASE_MATHFN (BUILT_IN_SIGNBIT)
746114e8 1949 CASE_MATHFN (BUILT_IN_SIGNIFICAND)
07976da7 1950 CASE_MATHFN (BUILT_IN_SIN)
746114e8 1951 CASE_MATHFN (BUILT_IN_SINCOS)
1952 CASE_MATHFN (BUILT_IN_SINH)
07976da7 1953 CASE_MATHFN (BUILT_IN_SQRT)
1954 CASE_MATHFN (BUILT_IN_TAN)
746114e8 1955 CASE_MATHFN (BUILT_IN_TANH)
1956 CASE_MATHFN (BUILT_IN_TGAMMA)
07976da7 1957 CASE_MATHFN (BUILT_IN_TRUNC)
746114e8 1958 CASE_MATHFN (BUILT_IN_Y0)
1959 CASE_MATHFN (BUILT_IN_Y1)
1960 CASE_MATHFN (BUILT_IN_YN)
07976da7 1961
0a68165a 1962 default:
c2f47e15 1963 return NULL_TREE;
0a68165a 1964 }
07976da7 1965
96b9f485 1966 if (TYPE_MAIN_VARIANT (type) == double_type_node)
b9a16870 1967 fcode2 = fcode;
96b9f485 1968 else if (TYPE_MAIN_VARIANT (type) == float_type_node)
b9a16870 1969 fcode2 = fcodef;
96b9f485 1970 else if (TYPE_MAIN_VARIANT (type) == long_double_type_node)
b9a16870 1971 fcode2 = fcodel;
07976da7 1972 else
c2f47e15 1973 return NULL_TREE;
b9a16870 1974
1975 if (implicit_p && !builtin_decl_implicit_p (fcode2))
1976 return NULL_TREE;
1977
1978 return builtin_decl_explicit (fcode2);
0a68165a 1979}
1980
c319d56a 1981/* Like mathfn_built_in_1(), but always use the implicit array. */
1982
1983tree
1984mathfn_built_in (tree type, enum built_in_function fn)
1985{
1986 return mathfn_built_in_1 (type, fn, /*implicit=*/ 1);
1987}
1988
0fd605a5 1989/* If errno must be maintained, expand the RTL to check if the result,
1990 TARGET, of a built-in function call, EXP, is NaN, and if so set
1991 errno to EDOM. */
1992
1993static void
aecda0d6 1994expand_errno_check (tree exp, rtx target)
0fd605a5 1995{
1e0c0b35 1996 rtx_code_label *lab = gen_label_rtx ();
0fd605a5 1997
7f05340e 1998 /* Test the result; if it is NaN, set errno=EDOM because
1999 the argument was not in the domain. */
3fcf767f 2000 do_compare_rtx_and_jump (target, target, EQ, 0, GET_MODE (target),
79ab74cc 2001 NULL_RTX, NULL_RTX, lab,
2002 /* The jump is very likely. */
2003 REG_BR_PROB_BASE - (REG_BR_PROB_BASE / 2000 - 1));
0fd605a5 2004
2005#ifdef TARGET_EDOM
7f05340e 2006 /* If this built-in doesn't throw an exception, set errno directly. */
c2f47e15 2007 if (TREE_NOTHROW (TREE_OPERAND (CALL_EXPR_FN (exp), 0)))
7f05340e 2008 {
0fd605a5 2009#ifdef GEN_ERRNO_RTX
7f05340e 2010 rtx errno_rtx = GEN_ERRNO_RTX;
0fd605a5 2011#else
7f05340e 2012 rtx errno_rtx
0fd605a5 2013 = gen_rtx_MEM (word_mode, gen_rtx_SYMBOL_REF (Pmode, "errno"));
2014#endif
d11aedc7 2015 emit_move_insn (errno_rtx,
2016 gen_int_mode (TARGET_EDOM, GET_MODE (errno_rtx)));
0fd605a5 2017 emit_label (lab);
7f05340e 2018 return;
0fd605a5 2019 }
7f05340e 2020#endif
2021
08491912 2022 /* Make sure the library call isn't expanded as a tail call. */
2023 CALL_EXPR_TAILCALL (exp) = 0;
2024
7f05340e 2025 /* We can't set errno=EDOM directly; let the library call do it.
2026 Pop the arguments right away in case the call gets deleted. */
2027 NO_DEFER_POP;
2028 expand_call (exp, target, 0);
2029 OK_DEFER_POP;
2030 emit_label (lab);
0fd605a5 2031}
2032
6b43bae4 2033/* Expand a call to one of the builtin math functions (sqrt, exp, or log).
c2f47e15 2034 Return NULL_RTX if a normal call should be emitted rather than expanding
2035 the function in-line. EXP is the expression that is a call to the builtin
53800dbe 2036 function; if convenient, the result should be placed in TARGET.
2037 SUBTARGET may be used as the target for computing one of EXP's operands. */
27d0c333 2038
53800dbe 2039static rtx
aecda0d6 2040expand_builtin_mathfn (tree exp, rtx target, rtx subtarget)
53800dbe 2041{
bf8e3599 2042 optab builtin_optab;
1e0c0b35 2043 rtx op0;
2044 rtx_insn *insns;
c6e6ecb1 2045 tree fndecl = get_callee_fndecl (exp);
3754d046 2046 machine_mode mode;
528ee710 2047 bool errno_set = false;
d6a0a4b0 2048 bool try_widening = false;
abfea505 2049 tree arg;
53800dbe 2050
c2f47e15 2051 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2052 return NULL_RTX;
53800dbe 2053
c2f47e15 2054 arg = CALL_EXPR_ARG (exp, 0);
53800dbe 2055
2056 switch (DECL_FUNCTION_CODE (fndecl))
2057 {
4f35b1fc 2058 CASE_FLT_FN (BUILT_IN_SQRT):
7f05340e 2059 errno_set = ! tree_expr_nonnegative_p (arg);
d6a0a4b0 2060 try_widening = true;
7f05340e 2061 builtin_optab = sqrt_optab;
2062 break;
4f35b1fc 2063 CASE_FLT_FN (BUILT_IN_EXP):
528ee710 2064 errno_set = true; builtin_optab = exp_optab; break;
4f35b1fc 2065 CASE_FLT_FN (BUILT_IN_EXP10):
2066 CASE_FLT_FN (BUILT_IN_POW10):
750ef9f5 2067 errno_set = true; builtin_optab = exp10_optab; break;
4f35b1fc 2068 CASE_FLT_FN (BUILT_IN_EXP2):
750ef9f5 2069 errno_set = true; builtin_optab = exp2_optab; break;
4f35b1fc 2070 CASE_FLT_FN (BUILT_IN_EXPM1):
a6b4eed2 2071 errno_set = true; builtin_optab = expm1_optab; break;
4f35b1fc 2072 CASE_FLT_FN (BUILT_IN_LOGB):
4efbc641 2073 errno_set = true; builtin_optab = logb_optab; break;
4f35b1fc 2074 CASE_FLT_FN (BUILT_IN_LOG):
528ee710 2075 errno_set = true; builtin_optab = log_optab; break;
4f35b1fc 2076 CASE_FLT_FN (BUILT_IN_LOG10):
d3cd9bde 2077 errno_set = true; builtin_optab = log10_optab; break;
4f35b1fc 2078 CASE_FLT_FN (BUILT_IN_LOG2):
d3cd9bde 2079 errno_set = true; builtin_optab = log2_optab; break;
4f35b1fc 2080 CASE_FLT_FN (BUILT_IN_LOG1P):
f474cd93 2081 errno_set = true; builtin_optab = log1p_optab; break;
4f35b1fc 2082 CASE_FLT_FN (BUILT_IN_ASIN):
8de2f465 2083 builtin_optab = asin_optab; break;
4f35b1fc 2084 CASE_FLT_FN (BUILT_IN_ACOS):
8de2f465 2085 builtin_optab = acos_optab; break;
4f35b1fc 2086 CASE_FLT_FN (BUILT_IN_TAN):
528ee710 2087 builtin_optab = tan_optab; break;
4f35b1fc 2088 CASE_FLT_FN (BUILT_IN_ATAN):
528ee710 2089 builtin_optab = atan_optab; break;
4f35b1fc 2090 CASE_FLT_FN (BUILT_IN_FLOOR):
528ee710 2091 builtin_optab = floor_optab; break;
4f35b1fc 2092 CASE_FLT_FN (BUILT_IN_CEIL):
528ee710 2093 builtin_optab = ceil_optab; break;
4f35b1fc 2094 CASE_FLT_FN (BUILT_IN_TRUNC):
a7cc195f 2095 builtin_optab = btrunc_optab; break;
4f35b1fc 2096 CASE_FLT_FN (BUILT_IN_ROUND):
528ee710 2097 builtin_optab = round_optab; break;
4f35b1fc 2098 CASE_FLT_FN (BUILT_IN_NEARBYINT):
0ddf4ad9 2099 builtin_optab = nearbyint_optab;
2100 if (flag_trapping_math)
2101 break;
2102 /* Else fallthrough and expand as rint. */
4f35b1fc 2103 CASE_FLT_FN (BUILT_IN_RINT):
aef94a0f 2104 builtin_optab = rint_optab; break;
b3154a1f 2105 CASE_FLT_FN (BUILT_IN_SIGNIFICAND):
2106 builtin_optab = significand_optab; break;
42721db0 2107 default:
64db345d 2108 gcc_unreachable ();
53800dbe 2109 }
2110
7f05340e 2111 /* Make a suitable register to place result in. */
2112 mode = TYPE_MODE (TREE_TYPE (exp));
fc4eef90 2113
7f05340e 2114 if (! flag_errno_math || ! HONOR_NANS (mode))
2115 errno_set = false;
2116
d6a0a4b0 2117 /* Before working hard, check whether the instruction is available, but try
2118 to widen the mode for specific operations. */
2119 if ((optab_handler (builtin_optab, mode) != CODE_FOR_nothing
2120 || (try_widening && !excess_precision_type (TREE_TYPE (exp))))
f2aca212 2121 && (!errno_set || !optimize_insn_for_size_p ()))
68e6cb9d 2122 {
de2e453e 2123 rtx result = gen_reg_rtx (mode);
7f05340e 2124
bd421108 2125 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2126 need to expand the argument again. This way, we will not perform
2127 side-effects more the once. */
abfea505 2128 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
7f05340e 2129
1db6d067 2130 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
7f05340e 2131
bd421108 2132 start_sequence ();
53800dbe 2133
de2e453e 2134 /* Compute into RESULT.
2135 Set RESULT to wherever the result comes back. */
2136 result = expand_unop (mode, builtin_optab, op0, result, 0);
bd421108 2137
de2e453e 2138 if (result != 0)
bd421108 2139 {
2140 if (errno_set)
de2e453e 2141 expand_errno_check (exp, result);
bd421108 2142
2143 /* Output the entire sequence. */
2144 insns = get_insns ();
2145 end_sequence ();
2146 emit_insn (insns);
de2e453e 2147 return result;
bd421108 2148 }
2149
2150 /* If we were unable to expand via the builtin, stop the sequence
2151 (without outputting the insns) and call to the library function
2152 with the stabilized argument list. */
53800dbe 2153 end_sequence ();
53800dbe 2154 }
2155
1e5b92fa 2156 return expand_call (exp, target, target == const0_rtx);
0fd605a5 2157}
2158
2159/* Expand a call to the builtin binary math functions (pow and atan2).
c2f47e15 2160 Return NULL_RTX if a normal call should be emitted rather than expanding the
0fd605a5 2161 function in-line. EXP is the expression that is a call to the builtin
2162 function; if convenient, the result should be placed in TARGET.
2163 SUBTARGET may be used as the target for computing one of EXP's
2164 operands. */
2165
2166static rtx
aecda0d6 2167expand_builtin_mathfn_2 (tree exp, rtx target, rtx subtarget)
0fd605a5 2168{
2169 optab builtin_optab;
1e0c0b35 2170 rtx op0, op1, result;
2171 rtx_insn *insns;
4737caf2 2172 int op1_type = REAL_TYPE;
c6e6ecb1 2173 tree fndecl = get_callee_fndecl (exp);
abfea505 2174 tree arg0, arg1;
3754d046 2175 machine_mode mode;
0fd605a5 2176 bool errno_set = true;
0fd605a5 2177
73a954a1 2178 switch (DECL_FUNCTION_CODE (fndecl))
2179 {
2180 CASE_FLT_FN (BUILT_IN_SCALBN):
2181 CASE_FLT_FN (BUILT_IN_SCALBLN):
2182 CASE_FLT_FN (BUILT_IN_LDEXP):
2183 op1_type = INTEGER_TYPE;
2184 default:
2185 break;
2186 }
4737caf2 2187
c2f47e15 2188 if (!validate_arglist (exp, REAL_TYPE, op1_type, VOID_TYPE))
2189 return NULL_RTX;
0fd605a5 2190
c2f47e15 2191 arg0 = CALL_EXPR_ARG (exp, 0);
2192 arg1 = CALL_EXPR_ARG (exp, 1);
0fd605a5 2193
0fd605a5 2194 switch (DECL_FUNCTION_CODE (fndecl))
2195 {
4f35b1fc 2196 CASE_FLT_FN (BUILT_IN_POW):
0fd605a5 2197 builtin_optab = pow_optab; break;
4f35b1fc 2198 CASE_FLT_FN (BUILT_IN_ATAN2):
0fd605a5 2199 builtin_optab = atan2_optab; break;
73a954a1 2200 CASE_FLT_FN (BUILT_IN_SCALB):
2201 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (exp)))->b != 2)
2202 return 0;
2203 builtin_optab = scalb_optab; break;
2204 CASE_FLT_FN (BUILT_IN_SCALBN):
2205 CASE_FLT_FN (BUILT_IN_SCALBLN):
2206 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (exp)))->b != 2)
2207 return 0;
2208 /* Fall through... */
4f35b1fc 2209 CASE_FLT_FN (BUILT_IN_LDEXP):
4737caf2 2210 builtin_optab = ldexp_optab; break;
4f35b1fc 2211 CASE_FLT_FN (BUILT_IN_FMOD):
80ed5c06 2212 builtin_optab = fmod_optab; break;
ef722005 2213 CASE_FLT_FN (BUILT_IN_REMAINDER):
4f35b1fc 2214 CASE_FLT_FN (BUILT_IN_DREM):
ef722005 2215 builtin_optab = remainder_optab; break;
0fd605a5 2216 default:
64db345d 2217 gcc_unreachable ();
0fd605a5 2218 }
2219
7f05340e 2220 /* Make a suitable register to place result in. */
2221 mode = TYPE_MODE (TREE_TYPE (exp));
fc4eef90 2222
2223 /* Before working hard, check whether the instruction is available. */
d6bf3b14 2224 if (optab_handler (builtin_optab, mode) == CODE_FOR_nothing)
c2f47e15 2225 return NULL_RTX;
fc4eef90 2226
de2e453e 2227 result = gen_reg_rtx (mode);
7f05340e 2228
2229 if (! flag_errno_math || ! HONOR_NANS (mode))
2230 errno_set = false;
2231
f2aca212 2232 if (errno_set && optimize_insn_for_size_p ())
2233 return 0;
2234
4ee9c684 2235 /* Always stabilize the argument list. */
abfea505 2236 CALL_EXPR_ARG (exp, 0) = arg0 = builtin_save_expr (arg0);
2237 CALL_EXPR_ARG (exp, 1) = arg1 = builtin_save_expr (arg1);
7f05340e 2238
8ec3c5c2 2239 op0 = expand_expr (arg0, subtarget, VOIDmode, EXPAND_NORMAL);
2240 op1 = expand_normal (arg1);
7f05340e 2241
7f05340e 2242 start_sequence ();
2243
de2e453e 2244 /* Compute into RESULT.
2245 Set RESULT to wherever the result comes back. */
2246 result = expand_binop (mode, builtin_optab, op0, op1,
2247 result, 0, OPTAB_DIRECT);
53800dbe 2248
68e6cb9d 2249 /* If we were unable to expand via the builtin, stop the sequence
2250 (without outputting the insns) and call to the library function
2251 with the stabilized argument list. */
de2e453e 2252 if (result == 0)
0fd605a5 2253 {
2254 end_sequence ();
68e6cb9d 2255 return expand_call (exp, target, target == const0_rtx);
53800dbe 2256 }
2257
a4356fb9 2258 if (errno_set)
de2e453e 2259 expand_errno_check (exp, result);
0fd605a5 2260
53800dbe 2261 /* Output the entire sequence. */
2262 insns = get_insns ();
2263 end_sequence ();
31d3e01c 2264 emit_insn (insns);
bf8e3599 2265
de2e453e 2266 return result;
53800dbe 2267}
2268
7e0713b1 2269/* Expand a call to the builtin trinary math functions (fma).
2270 Return NULL_RTX if a normal call should be emitted rather than expanding the
2271 function in-line. EXP is the expression that is a call to the builtin
2272 function; if convenient, the result should be placed in TARGET.
2273 SUBTARGET may be used as the target for computing one of EXP's
2274 operands. */
2275
2276static rtx
2277expand_builtin_mathfn_ternary (tree exp, rtx target, rtx subtarget)
2278{
2279 optab builtin_optab;
1e0c0b35 2280 rtx op0, op1, op2, result;
2281 rtx_insn *insns;
7e0713b1 2282 tree fndecl = get_callee_fndecl (exp);
2283 tree arg0, arg1, arg2;
3754d046 2284 machine_mode mode;
7e0713b1 2285
2286 if (!validate_arglist (exp, REAL_TYPE, REAL_TYPE, REAL_TYPE, VOID_TYPE))
2287 return NULL_RTX;
2288
2289 arg0 = CALL_EXPR_ARG (exp, 0);
2290 arg1 = CALL_EXPR_ARG (exp, 1);
2291 arg2 = CALL_EXPR_ARG (exp, 2);
2292
2293 switch (DECL_FUNCTION_CODE (fndecl))
2294 {
2295 CASE_FLT_FN (BUILT_IN_FMA):
2296 builtin_optab = fma_optab; break;
2297 default:
2298 gcc_unreachable ();
2299 }
2300
2301 /* Make a suitable register to place result in. */
2302 mode = TYPE_MODE (TREE_TYPE (exp));
2303
2304 /* Before working hard, check whether the instruction is available. */
2305 if (optab_handler (builtin_optab, mode) == CODE_FOR_nothing)
2306 return NULL_RTX;
2307
de2e453e 2308 result = gen_reg_rtx (mode);
7e0713b1 2309
2310 /* Always stabilize the argument list. */
2311 CALL_EXPR_ARG (exp, 0) = arg0 = builtin_save_expr (arg0);
2312 CALL_EXPR_ARG (exp, 1) = arg1 = builtin_save_expr (arg1);
2313 CALL_EXPR_ARG (exp, 2) = arg2 = builtin_save_expr (arg2);
2314
2315 op0 = expand_expr (arg0, subtarget, VOIDmode, EXPAND_NORMAL);
2316 op1 = expand_normal (arg1);
2317 op2 = expand_normal (arg2);
2318
2319 start_sequence ();
2320
de2e453e 2321 /* Compute into RESULT.
2322 Set RESULT to wherever the result comes back. */
2323 result = expand_ternary_op (mode, builtin_optab, op0, op1, op2,
2324 result, 0);
7e0713b1 2325
2326 /* If we were unable to expand via the builtin, stop the sequence
2327 (without outputting the insns) and call to the library function
2328 with the stabilized argument list. */
de2e453e 2329 if (result == 0)
7e0713b1 2330 {
2331 end_sequence ();
2332 return expand_call (exp, target, target == const0_rtx);
2333 }
2334
2335 /* Output the entire sequence. */
2336 insns = get_insns ();
2337 end_sequence ();
2338 emit_insn (insns);
2339
de2e453e 2340 return result;
7e0713b1 2341}
2342
6b43bae4 2343/* Expand a call to the builtin sin and cos math functions.
c2f47e15 2344 Return NULL_RTX if a normal call should be emitted rather than expanding the
6b43bae4 2345 function in-line. EXP is the expression that is a call to the builtin
2346 function; if convenient, the result should be placed in TARGET.
2347 SUBTARGET may be used as the target for computing one of EXP's
2348 operands. */
2349
2350static rtx
2351expand_builtin_mathfn_3 (tree exp, rtx target, rtx subtarget)
2352{
2353 optab builtin_optab;
1e0c0b35 2354 rtx op0;
2355 rtx_insn *insns;
6b43bae4 2356 tree fndecl = get_callee_fndecl (exp);
3754d046 2357 machine_mode mode;
abfea505 2358 tree arg;
6b43bae4 2359
c2f47e15 2360 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2361 return NULL_RTX;
6b43bae4 2362
c2f47e15 2363 arg = CALL_EXPR_ARG (exp, 0);
6b43bae4 2364
2365 switch (DECL_FUNCTION_CODE (fndecl))
2366 {
4f35b1fc 2367 CASE_FLT_FN (BUILT_IN_SIN):
2368 CASE_FLT_FN (BUILT_IN_COS):
6b43bae4 2369 builtin_optab = sincos_optab; break;
2370 default:
64db345d 2371 gcc_unreachable ();
6b43bae4 2372 }
2373
2374 /* Make a suitable register to place result in. */
2375 mode = TYPE_MODE (TREE_TYPE (exp));
2376
6b43bae4 2377 /* Check if sincos insn is available, otherwise fallback
0bed3869 2378 to sin or cos insn. */
d6bf3b14 2379 if (optab_handler (builtin_optab, mode) == CODE_FOR_nothing)
6b43bae4 2380 switch (DECL_FUNCTION_CODE (fndecl))
2381 {
4f35b1fc 2382 CASE_FLT_FN (BUILT_IN_SIN):
6b43bae4 2383 builtin_optab = sin_optab; break;
4f35b1fc 2384 CASE_FLT_FN (BUILT_IN_COS):
6b43bae4 2385 builtin_optab = cos_optab; break;
2386 default:
64db345d 2387 gcc_unreachable ();
6b43bae4 2388 }
6b43bae4 2389
2390 /* Before working hard, check whether the instruction is available. */
d6bf3b14 2391 if (optab_handler (builtin_optab, mode) != CODE_FOR_nothing)
6b43bae4 2392 {
de2e453e 2393 rtx result = gen_reg_rtx (mode);
6b43bae4 2394
2395 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2396 need to expand the argument again. This way, we will not perform
2397 side-effects more the once. */
abfea505 2398 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
6b43bae4 2399
1db6d067 2400 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
6b43bae4 2401
6b43bae4 2402 start_sequence ();
2403
de2e453e 2404 /* Compute into RESULT.
2405 Set RESULT to wherever the result comes back. */
6b43bae4 2406 if (builtin_optab == sincos_optab)
2407 {
de2e453e 2408 int ok;
7d3f6cc7 2409
6b43bae4 2410 switch (DECL_FUNCTION_CODE (fndecl))
2411 {
4f35b1fc 2412 CASE_FLT_FN (BUILT_IN_SIN):
de2e453e 2413 ok = expand_twoval_unop (builtin_optab, op0, 0, result, 0);
6b43bae4 2414 break;
4f35b1fc 2415 CASE_FLT_FN (BUILT_IN_COS):
de2e453e 2416 ok = expand_twoval_unop (builtin_optab, op0, result, 0, 0);
6b43bae4 2417 break;
2418 default:
64db345d 2419 gcc_unreachable ();
6b43bae4 2420 }
de2e453e 2421 gcc_assert (ok);
6b43bae4 2422 }
2423 else
de2e453e 2424 result = expand_unop (mode, builtin_optab, op0, result, 0);
6b43bae4 2425
de2e453e 2426 if (result != 0)
6b43bae4 2427 {
6b43bae4 2428 /* Output the entire sequence. */
2429 insns = get_insns ();
2430 end_sequence ();
2431 emit_insn (insns);
de2e453e 2432 return result;
6b43bae4 2433 }
2434
2435 /* If we were unable to expand via the builtin, stop the sequence
2436 (without outputting the insns) and call to the library function
2437 with the stabilized argument list. */
2438 end_sequence ();
2439 }
2440
de2e453e 2441 return expand_call (exp, target, target == const0_rtx);
6b43bae4 2442}
2443
a65c4d64 2444/* Given an interclass math builtin decl FNDECL and it's argument ARG
2445 return an RTL instruction code that implements the functionality.
2446 If that isn't possible or available return CODE_FOR_nothing. */
a67a90e5 2447
a65c4d64 2448static enum insn_code
2449interclass_mathfn_icode (tree arg, tree fndecl)
a67a90e5 2450{
a65c4d64 2451 bool errno_set = false;
6cdd383a 2452 optab builtin_optab = unknown_optab;
3754d046 2453 machine_mode mode;
a67a90e5 2454
2455 switch (DECL_FUNCTION_CODE (fndecl))
2456 {
2457 CASE_FLT_FN (BUILT_IN_ILOGB):
2458 errno_set = true; builtin_optab = ilogb_optab; break;
69b779ea 2459 CASE_FLT_FN (BUILT_IN_ISINF):
2460 builtin_optab = isinf_optab; break;
8a1a9cb7 2461 case BUILT_IN_ISNORMAL:
cde061c1 2462 case BUILT_IN_ISFINITE:
2463 CASE_FLT_FN (BUILT_IN_FINITE):
a65c4d64 2464 case BUILT_IN_FINITED32:
2465 case BUILT_IN_FINITED64:
2466 case BUILT_IN_FINITED128:
2467 case BUILT_IN_ISINFD32:
2468 case BUILT_IN_ISINFD64:
2469 case BUILT_IN_ISINFD128:
cde061c1 2470 /* These builtins have no optabs (yet). */
2471 break;
a67a90e5 2472 default:
2473 gcc_unreachable ();
2474 }
2475
2476 /* There's no easy way to detect the case we need to set EDOM. */
2477 if (flag_errno_math && errno_set)
a65c4d64 2478 return CODE_FOR_nothing;
a67a90e5 2479
2480 /* Optab mode depends on the mode of the input argument. */
2481 mode = TYPE_MODE (TREE_TYPE (arg));
2482
cde061c1 2483 if (builtin_optab)
d6bf3b14 2484 return optab_handler (builtin_optab, mode);
a65c4d64 2485 return CODE_FOR_nothing;
2486}
2487
2488/* Expand a call to one of the builtin math functions that operate on
2489 floating point argument and output an integer result (ilogb, isinf,
2490 isnan, etc).
2491 Return 0 if a normal call should be emitted rather than expanding the
2492 function in-line. EXP is the expression that is a call to the builtin
f97eea22 2493 function; if convenient, the result should be placed in TARGET. */
a65c4d64 2494
2495static rtx
f97eea22 2496expand_builtin_interclass_mathfn (tree exp, rtx target)
a65c4d64 2497{
2498 enum insn_code icode = CODE_FOR_nothing;
2499 rtx op0;
2500 tree fndecl = get_callee_fndecl (exp);
3754d046 2501 machine_mode mode;
a65c4d64 2502 tree arg;
2503
2504 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2505 return NULL_RTX;
2506
2507 arg = CALL_EXPR_ARG (exp, 0);
2508 icode = interclass_mathfn_icode (arg, fndecl);
2509 mode = TYPE_MODE (TREE_TYPE (arg));
2510
a67a90e5 2511 if (icode != CODE_FOR_nothing)
2512 {
8786db1e 2513 struct expand_operand ops[1];
1e0c0b35 2514 rtx_insn *last = get_last_insn ();
4e2a2fb4 2515 tree orig_arg = arg;
a67a90e5 2516
2517 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2518 need to expand the argument again. This way, we will not perform
2519 side-effects more the once. */
abfea505 2520 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
a67a90e5 2521
f97eea22 2522 op0 = expand_expr (arg, NULL_RTX, VOIDmode, EXPAND_NORMAL);
a67a90e5 2523
2524 if (mode != GET_MODE (op0))
2525 op0 = convert_to_mode (mode, op0, 0);
2526
8786db1e 2527 create_output_operand (&ops[0], target, TYPE_MODE (TREE_TYPE (exp)));
2528 if (maybe_legitimize_operands (icode, 0, 1, ops)
2529 && maybe_emit_unop_insn (icode, ops[0].value, op0, UNKNOWN))
2530 return ops[0].value;
2531
4e2a2fb4 2532 delete_insns_since (last);
2533 CALL_EXPR_ARG (exp, 0) = orig_arg;
a67a90e5 2534 }
2535
a65c4d64 2536 return NULL_RTX;
a67a90e5 2537}
2538
c3147c1a 2539/* Expand a call to the builtin sincos math function.
c2f47e15 2540 Return NULL_RTX if a normal call should be emitted rather than expanding the
c3147c1a 2541 function in-line. EXP is the expression that is a call to the builtin
2542 function. */
2543
2544static rtx
2545expand_builtin_sincos (tree exp)
2546{
2547 rtx op0, op1, op2, target1, target2;
3754d046 2548 machine_mode mode;
c3147c1a 2549 tree arg, sinp, cosp;
2550 int result;
389dd41b 2551 location_t loc = EXPR_LOCATION (exp);
be5575b2 2552 tree alias_type, alias_off;
c3147c1a 2553
c2f47e15 2554 if (!validate_arglist (exp, REAL_TYPE,
2555 POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
2556 return NULL_RTX;
c3147c1a 2557
c2f47e15 2558 arg = CALL_EXPR_ARG (exp, 0);
2559 sinp = CALL_EXPR_ARG (exp, 1);
2560 cosp = CALL_EXPR_ARG (exp, 2);
c3147c1a 2561
2562 /* Make a suitable register to place result in. */
2563 mode = TYPE_MODE (TREE_TYPE (arg));
2564
2565 /* Check if sincos insn is available, otherwise emit the call. */
d6bf3b14 2566 if (optab_handler (sincos_optab, mode) == CODE_FOR_nothing)
c3147c1a 2567 return NULL_RTX;
2568
2569 target1 = gen_reg_rtx (mode);
2570 target2 = gen_reg_rtx (mode);
2571
8ec3c5c2 2572 op0 = expand_normal (arg);
be5575b2 2573 alias_type = build_pointer_type_for_mode (TREE_TYPE (arg), ptr_mode, true);
2574 alias_off = build_int_cst (alias_type, 0);
2575 op1 = expand_normal (fold_build2_loc (loc, MEM_REF, TREE_TYPE (arg),
2576 sinp, alias_off));
2577 op2 = expand_normal (fold_build2_loc (loc, MEM_REF, TREE_TYPE (arg),
2578 cosp, alias_off));
c3147c1a 2579
2580 /* Compute into target1 and target2.
2581 Set TARGET to wherever the result comes back. */
2582 result = expand_twoval_unop (sincos_optab, op0, target2, target1, 0);
2583 gcc_assert (result);
2584
2585 /* Move target1 and target2 to the memory locations indicated
2586 by op1 and op2. */
2587 emit_move_insn (op1, target1);
2588 emit_move_insn (op2, target2);
2589
2590 return const0_rtx;
2591}
2592
d735c391 2593/* Expand a call to the internal cexpi builtin to the sincos math function.
2594 EXP is the expression that is a call to the builtin function; if convenient,
f97eea22 2595 the result should be placed in TARGET. */
d735c391 2596
2597static rtx
f97eea22 2598expand_builtin_cexpi (tree exp, rtx target)
d735c391 2599{
2600 tree fndecl = get_callee_fndecl (exp);
d735c391 2601 tree arg, type;
3754d046 2602 machine_mode mode;
d735c391 2603 rtx op0, op1, op2;
389dd41b 2604 location_t loc = EXPR_LOCATION (exp);
d735c391 2605
c2f47e15 2606 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2607 return NULL_RTX;
d735c391 2608
c2f47e15 2609 arg = CALL_EXPR_ARG (exp, 0);
d735c391 2610 type = TREE_TYPE (arg);
2611 mode = TYPE_MODE (TREE_TYPE (arg));
2612
2613 /* Try expanding via a sincos optab, fall back to emitting a libcall
18b8d8ae 2614 to sincos or cexp. We are sure we have sincos or cexp because cexpi
2615 is only generated from sincos, cexp or if we have either of them. */
d6bf3b14 2616 if (optab_handler (sincos_optab, mode) != CODE_FOR_nothing)
d735c391 2617 {
2618 op1 = gen_reg_rtx (mode);
2619 op2 = gen_reg_rtx (mode);
2620
f97eea22 2621 op0 = expand_expr (arg, NULL_RTX, VOIDmode, EXPAND_NORMAL);
d735c391 2622
2623 /* Compute into op1 and op2. */
2624 expand_twoval_unop (sincos_optab, op0, op2, op1, 0);
2625 }
30f690e0 2626 else if (targetm.libc_has_function (function_sincos))
d735c391 2627 {
c2f47e15 2628 tree call, fn = NULL_TREE;
d735c391 2629 tree top1, top2;
2630 rtx op1a, op2a;
2631
2632 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
b9a16870 2633 fn = builtin_decl_explicit (BUILT_IN_SINCOSF);
d735c391 2634 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
b9a16870 2635 fn = builtin_decl_explicit (BUILT_IN_SINCOS);
d735c391 2636 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
b9a16870 2637 fn = builtin_decl_explicit (BUILT_IN_SINCOSL);
c2f47e15 2638 else
2639 gcc_unreachable ();
48e1416a 2640
0ab48139 2641 op1 = assign_temp (TREE_TYPE (arg), 1, 1);
2642 op2 = assign_temp (TREE_TYPE (arg), 1, 1);
99182918 2643 op1a = copy_addr_to_reg (XEXP (op1, 0));
2644 op2a = copy_addr_to_reg (XEXP (op2, 0));
d735c391 2645 top1 = make_tree (build_pointer_type (TREE_TYPE (arg)), op1a);
2646 top2 = make_tree (build_pointer_type (TREE_TYPE (arg)), op2a);
2647
d735c391 2648 /* Make sure not to fold the sincos call again. */
2649 call = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
c2f47e15 2650 expand_normal (build_call_nary (TREE_TYPE (TREE_TYPE (fn)),
2651 call, 3, arg, top1, top2));
d735c391 2652 }
18b8d8ae 2653 else
2654 {
0ecbc158 2655 tree call, fn = NULL_TREE, narg;
18b8d8ae 2656 tree ctype = build_complex_type (type);
2657
0ecbc158 2658 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
b9a16870 2659 fn = builtin_decl_explicit (BUILT_IN_CEXPF);
0ecbc158 2660 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
b9a16870 2661 fn = builtin_decl_explicit (BUILT_IN_CEXP);
0ecbc158 2662 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
b9a16870 2663 fn = builtin_decl_explicit (BUILT_IN_CEXPL);
c2f47e15 2664 else
2665 gcc_unreachable ();
fc0dfa6e 2666
2667 /* If we don't have a decl for cexp create one. This is the
2668 friendliest fallback if the user calls __builtin_cexpi
2669 without full target C99 function support. */
2670 if (fn == NULL_TREE)
2671 {
2672 tree fntype;
2673 const char *name = NULL;
2674
2675 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2676 name = "cexpf";
2677 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2678 name = "cexp";
2679 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2680 name = "cexpl";
2681
2682 fntype = build_function_type_list (ctype, ctype, NULL_TREE);
2683 fn = build_fn_decl (name, fntype);
2684 }
2685
389dd41b 2686 narg = fold_build2_loc (loc, COMPLEX_EXPR, ctype,
18b8d8ae 2687 build_real (type, dconst0), arg);
2688
2689 /* Make sure not to fold the cexp call again. */
2690 call = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
48e1416a 2691 return expand_expr (build_call_nary (ctype, call, 1, narg),
1db6d067 2692 target, VOIDmode, EXPAND_NORMAL);
18b8d8ae 2693 }
d735c391 2694
2695 /* Now build the proper return type. */
2696 return expand_expr (build2 (COMPLEX_EXPR, build_complex_type (type),
2697 make_tree (TREE_TYPE (arg), op2),
2698 make_tree (TREE_TYPE (arg), op1)),
1db6d067 2699 target, VOIDmode, EXPAND_NORMAL);
d735c391 2700}
2701
a65c4d64 2702/* Conveniently construct a function call expression. FNDECL names the
2703 function to be called, N is the number of arguments, and the "..."
2704 parameters are the argument expressions. Unlike build_call_exr
2705 this doesn't fold the call, hence it will always return a CALL_EXPR. */
2706
2707static tree
2708build_call_nofold_loc (location_t loc, tree fndecl, int n, ...)
2709{
2710 va_list ap;
2711 tree fntype = TREE_TYPE (fndecl);
2712 tree fn = build1 (ADDR_EXPR, build_pointer_type (fntype), fndecl);
2713
2714 va_start (ap, n);
2715 fn = build_call_valist (TREE_TYPE (fntype), fn, n, ap);
2716 va_end (ap);
2717 SET_EXPR_LOCATION (fn, loc);
2718 return fn;
2719}
a65c4d64 2720
7d3afc77 2721/* Expand a call to one of the builtin rounding functions gcc defines
2722 as an extension (lfloor and lceil). As these are gcc extensions we
2723 do not need to worry about setting errno to EDOM.
ad52b9b7 2724 If expanding via optab fails, lower expression to (int)(floor(x)).
2725 EXP is the expression that is a call to the builtin function;
ff1b14e4 2726 if convenient, the result should be placed in TARGET. */
ad52b9b7 2727
2728static rtx
ff1b14e4 2729expand_builtin_int_roundingfn (tree exp, rtx target)
ad52b9b7 2730{
9c42dd28 2731 convert_optab builtin_optab;
1e0c0b35 2732 rtx op0, tmp;
2733 rtx_insn *insns;
ad52b9b7 2734 tree fndecl = get_callee_fndecl (exp);
ad52b9b7 2735 enum built_in_function fallback_fn;
2736 tree fallback_fndecl;
3754d046 2737 machine_mode mode;
4de0924f 2738 tree arg;
ad52b9b7 2739
c2f47e15 2740 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
ad52b9b7 2741 gcc_unreachable ();
2742
c2f47e15 2743 arg = CALL_EXPR_ARG (exp, 0);
ad52b9b7 2744
2745 switch (DECL_FUNCTION_CODE (fndecl))
2746 {
80ff6494 2747 CASE_FLT_FN (BUILT_IN_ICEIL):
4f35b1fc 2748 CASE_FLT_FN (BUILT_IN_LCEIL):
2749 CASE_FLT_FN (BUILT_IN_LLCEIL):
ac148751 2750 builtin_optab = lceil_optab;
2751 fallback_fn = BUILT_IN_CEIL;
2752 break;
2753
80ff6494 2754 CASE_FLT_FN (BUILT_IN_IFLOOR):
4f35b1fc 2755 CASE_FLT_FN (BUILT_IN_LFLOOR):
2756 CASE_FLT_FN (BUILT_IN_LLFLOOR):
ad52b9b7 2757 builtin_optab = lfloor_optab;
2758 fallback_fn = BUILT_IN_FLOOR;
2759 break;
2760
2761 default:
2762 gcc_unreachable ();
2763 }
2764
2765 /* Make a suitable register to place result in. */
2766 mode = TYPE_MODE (TREE_TYPE (exp));
2767
9c42dd28 2768 target = gen_reg_rtx (mode);
ad52b9b7 2769
9c42dd28 2770 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2771 need to expand the argument again. This way, we will not perform
2772 side-effects more the once. */
abfea505 2773 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
ad52b9b7 2774
ff1b14e4 2775 op0 = expand_expr (arg, NULL, VOIDmode, EXPAND_NORMAL);
ad52b9b7 2776
9c42dd28 2777 start_sequence ();
ad52b9b7 2778
9c42dd28 2779 /* Compute into TARGET. */
2780 if (expand_sfix_optab (target, op0, builtin_optab))
2781 {
2782 /* Output the entire sequence. */
2783 insns = get_insns ();
ad52b9b7 2784 end_sequence ();
9c42dd28 2785 emit_insn (insns);
2786 return target;
ad52b9b7 2787 }
2788
9c42dd28 2789 /* If we were unable to expand via the builtin, stop the sequence
2790 (without outputting the insns). */
2791 end_sequence ();
2792
ad52b9b7 2793 /* Fall back to floating point rounding optab. */
2794 fallback_fndecl = mathfn_built_in (TREE_TYPE (arg), fallback_fn);
fc0dfa6e 2795
2796 /* For non-C99 targets we may end up without a fallback fndecl here
2797 if the user called __builtin_lfloor directly. In this case emit
2798 a call to the floor/ceil variants nevertheless. This should result
2799 in the best user experience for not full C99 targets. */
2800 if (fallback_fndecl == NULL_TREE)
2801 {
2802 tree fntype;
2803 const char *name = NULL;
2804
2805 switch (DECL_FUNCTION_CODE (fndecl))
2806 {
80ff6494 2807 case BUILT_IN_ICEIL:
fc0dfa6e 2808 case BUILT_IN_LCEIL:
2809 case BUILT_IN_LLCEIL:
2810 name = "ceil";
2811 break;
80ff6494 2812 case BUILT_IN_ICEILF:
fc0dfa6e 2813 case BUILT_IN_LCEILF:
2814 case BUILT_IN_LLCEILF:
2815 name = "ceilf";
2816 break;
80ff6494 2817 case BUILT_IN_ICEILL:
fc0dfa6e 2818 case BUILT_IN_LCEILL:
2819 case BUILT_IN_LLCEILL:
2820 name = "ceill";
2821 break;
80ff6494 2822 case BUILT_IN_IFLOOR:
fc0dfa6e 2823 case BUILT_IN_LFLOOR:
2824 case BUILT_IN_LLFLOOR:
2825 name = "floor";
2826 break;
80ff6494 2827 case BUILT_IN_IFLOORF:
fc0dfa6e 2828 case BUILT_IN_LFLOORF:
2829 case BUILT_IN_LLFLOORF:
2830 name = "floorf";
2831 break;
80ff6494 2832 case BUILT_IN_IFLOORL:
fc0dfa6e 2833 case BUILT_IN_LFLOORL:
2834 case BUILT_IN_LLFLOORL:
2835 name = "floorl";
2836 break;
2837 default:
2838 gcc_unreachable ();
2839 }
2840
2841 fntype = build_function_type_list (TREE_TYPE (arg),
2842 TREE_TYPE (arg), NULL_TREE);
2843 fallback_fndecl = build_fn_decl (name, fntype);
2844 }
2845
0568e9c1 2846 exp = build_call_nofold_loc (EXPR_LOCATION (exp), fallback_fndecl, 1, arg);
ad52b9b7 2847
d4c690af 2848 tmp = expand_normal (exp);
933eb13a 2849 tmp = maybe_emit_group_store (tmp, TREE_TYPE (exp));
ad52b9b7 2850
2851 /* Truncate the result of floating point optab to integer
2852 via expand_fix (). */
2853 target = gen_reg_rtx (mode);
2854 expand_fix (target, tmp, 0);
2855
2856 return target;
2857}
2858
7d3afc77 2859/* Expand a call to one of the builtin math functions doing integer
2860 conversion (lrint).
2861 Return 0 if a normal call should be emitted rather than expanding the
2862 function in-line. EXP is the expression that is a call to the builtin
ff1b14e4 2863 function; if convenient, the result should be placed in TARGET. */
7d3afc77 2864
2865static rtx
ff1b14e4 2866expand_builtin_int_roundingfn_2 (tree exp, rtx target)
7d3afc77 2867{
5f51ee59 2868 convert_optab builtin_optab;
1e0c0b35 2869 rtx op0;
2870 rtx_insn *insns;
7d3afc77 2871 tree fndecl = get_callee_fndecl (exp);
4de0924f 2872 tree arg;
3754d046 2873 machine_mode mode;
e951f9a4 2874 enum built_in_function fallback_fn = BUILT_IN_NONE;
7d3afc77 2875
c2f47e15 2876 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2877 gcc_unreachable ();
48e1416a 2878
c2f47e15 2879 arg = CALL_EXPR_ARG (exp, 0);
7d3afc77 2880
2881 switch (DECL_FUNCTION_CODE (fndecl))
2882 {
80ff6494 2883 CASE_FLT_FN (BUILT_IN_IRINT):
e951f9a4 2884 fallback_fn = BUILT_IN_LRINT;
2885 /* FALLTHRU */
7d3afc77 2886 CASE_FLT_FN (BUILT_IN_LRINT):
2887 CASE_FLT_FN (BUILT_IN_LLRINT):
e951f9a4 2888 builtin_optab = lrint_optab;
2889 break;
80ff6494 2890
2891 CASE_FLT_FN (BUILT_IN_IROUND):
e951f9a4 2892 fallback_fn = BUILT_IN_LROUND;
2893 /* FALLTHRU */
ef2f1a10 2894 CASE_FLT_FN (BUILT_IN_LROUND):
2895 CASE_FLT_FN (BUILT_IN_LLROUND):
e951f9a4 2896 builtin_optab = lround_optab;
2897 break;
80ff6494 2898
7d3afc77 2899 default:
2900 gcc_unreachable ();
2901 }
2902
e951f9a4 2903 /* There's no easy way to detect the case we need to set EDOM. */
2904 if (flag_errno_math && fallback_fn == BUILT_IN_NONE)
2905 return NULL_RTX;
2906
7d3afc77 2907 /* Make a suitable register to place result in. */
2908 mode = TYPE_MODE (TREE_TYPE (exp));
2909
e951f9a4 2910 /* There's no easy way to detect the case we need to set EDOM. */
2911 if (!flag_errno_math)
2912 {
de2e453e 2913 rtx result = gen_reg_rtx (mode);
7d3afc77 2914
e951f9a4 2915 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2916 need to expand the argument again. This way, we will not perform
2917 side-effects more the once. */
2918 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
7d3afc77 2919
e951f9a4 2920 op0 = expand_expr (arg, NULL, VOIDmode, EXPAND_NORMAL);
7d3afc77 2921
e951f9a4 2922 start_sequence ();
7d3afc77 2923
de2e453e 2924 if (expand_sfix_optab (result, op0, builtin_optab))
e951f9a4 2925 {
2926 /* Output the entire sequence. */
2927 insns = get_insns ();
2928 end_sequence ();
2929 emit_insn (insns);
de2e453e 2930 return result;
e951f9a4 2931 }
2932
2933 /* If we were unable to expand via the builtin, stop the sequence
2934 (without outputting the insns) and call to the library function
2935 with the stabilized argument list. */
7d3afc77 2936 end_sequence ();
2937 }
2938
e951f9a4 2939 if (fallback_fn != BUILT_IN_NONE)
2940 {
2941 /* Fall back to rounding to long int. Use implicit_p 0 - for non-C99
2942 targets, (int) round (x) should never be transformed into
2943 BUILT_IN_IROUND and if __builtin_iround is called directly, emit
2944 a call to lround in the hope that the target provides at least some
2945 C99 functions. This should result in the best user experience for
2946 not full C99 targets. */
2947 tree fallback_fndecl = mathfn_built_in_1 (TREE_TYPE (arg),
2948 fallback_fn, 0);
2949
2950 exp = build_call_nofold_loc (EXPR_LOCATION (exp),
2951 fallback_fndecl, 1, arg);
2952
2953 target = expand_call (exp, NULL_RTX, target == const0_rtx);
933eb13a 2954 target = maybe_emit_group_store (target, TREE_TYPE (exp));
e951f9a4 2955 return convert_to_mode (mode, target, 0);
2956 }
5f51ee59 2957
de2e453e 2958 return expand_call (exp, target, target == const0_rtx);
7d3afc77 2959}
2960
c2f47e15 2961/* Expand a call to the powi built-in mathematical function. Return NULL_RTX if
757c219d 2962 a normal call should be emitted rather than expanding the function
2963 in-line. EXP is the expression that is a call to the builtin
2964 function; if convenient, the result should be placed in TARGET. */
2965
2966static rtx
f97eea22 2967expand_builtin_powi (tree exp, rtx target)
757c219d 2968{
757c219d 2969 tree arg0, arg1;
2970 rtx op0, op1;
3754d046 2971 machine_mode mode;
2972 machine_mode mode2;
757c219d 2973
c2f47e15 2974 if (! validate_arglist (exp, REAL_TYPE, INTEGER_TYPE, VOID_TYPE))
2975 return NULL_RTX;
757c219d 2976
c2f47e15 2977 arg0 = CALL_EXPR_ARG (exp, 0);
2978 arg1 = CALL_EXPR_ARG (exp, 1);
757c219d 2979 mode = TYPE_MODE (TREE_TYPE (exp));
2980
757c219d 2981 /* Emit a libcall to libgcc. */
2982
c2f47e15 2983 /* Mode of the 2nd argument must match that of an int. */
d0405f40 2984 mode2 = mode_for_size (INT_TYPE_SIZE, MODE_INT, 0);
2985
757c219d 2986 if (target == NULL_RTX)
2987 target = gen_reg_rtx (mode);
2988
f97eea22 2989 op0 = expand_expr (arg0, NULL_RTX, mode, EXPAND_NORMAL);
757c219d 2990 if (GET_MODE (op0) != mode)
2991 op0 = convert_to_mode (mode, op0, 0);
1db6d067 2992 op1 = expand_expr (arg1, NULL_RTX, mode2, EXPAND_NORMAL);
d0405f40 2993 if (GET_MODE (op1) != mode2)
2994 op1 = convert_to_mode (mode2, op1, 0);
757c219d 2995
f36b9f69 2996 target = emit_library_call_value (optab_libfunc (powi_optab, mode),
2dd6f9ed 2997 target, LCT_CONST, mode, 2,
d0405f40 2998 op0, mode, op1, mode2);
757c219d 2999
3000 return target;
3001}
3002
48e1416a 3003/* Expand expression EXP which is a call to the strlen builtin. Return
c2f47e15 3004 NULL_RTX if we failed the caller should emit a normal call, otherwise
aed0bd19 3005 try to get the result in TARGET, if convenient. */
f7c44134 3006
53800dbe 3007static rtx
c2f47e15 3008expand_builtin_strlen (tree exp, rtx target,
3754d046 3009 machine_mode target_mode)
53800dbe 3010{
c2f47e15 3011 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
3012 return NULL_RTX;
53800dbe 3013 else
3014 {
8786db1e 3015 struct expand_operand ops[4];
911c0150 3016 rtx pat;
c2f47e15 3017 tree len;
3018 tree src = CALL_EXPR_ARG (exp, 0);
1e0c0b35 3019 rtx src_reg;
3020 rtx_insn *before_strlen;
3754d046 3021 machine_mode insn_mode = target_mode;
ef2c4a29 3022 enum insn_code icode = CODE_FOR_nothing;
153c3b50 3023 unsigned int align;
6248e345 3024
3025 /* If the length can be computed at compile-time, return it. */
681fab1e 3026 len = c_strlen (src, 0);
6248e345 3027 if (len)
80cd7a5e 3028 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
6248e345 3029
681fab1e 3030 /* If the length can be computed at compile-time and is constant
3031 integer, but there are side-effects in src, evaluate
3032 src for side-effects, then return len.
3033 E.g. x = strlen (i++ ? "xfoo" + 1 : "bar");
3034 can be optimized into: i++; x = 3; */
3035 len = c_strlen (src, 1);
3036 if (len && TREE_CODE (len) == INTEGER_CST)
3037 {
3038 expand_expr (src, const0_rtx, VOIDmode, EXPAND_NORMAL);
3039 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
3040 }
3041
957d0361 3042 align = get_pointer_alignment (src) / BITS_PER_UNIT;
53800dbe 3043
53800dbe 3044 /* If SRC is not a pointer type, don't do this operation inline. */
3045 if (align == 0)
c2f47e15 3046 return NULL_RTX;
53800dbe 3047
911c0150 3048 /* Bail out if we can't compute strlen in the right mode. */
53800dbe 3049 while (insn_mode != VOIDmode)
3050 {
d6bf3b14 3051 icode = optab_handler (strlen_optab, insn_mode);
53800dbe 3052 if (icode != CODE_FOR_nothing)
c28ae87f 3053 break;
53800dbe 3054
3055 insn_mode = GET_MODE_WIDER_MODE (insn_mode);
3056 }
3057 if (insn_mode == VOIDmode)
c2f47e15 3058 return NULL_RTX;
53800dbe 3059
911c0150 3060 /* Make a place to hold the source address. We will not expand
3061 the actual source until we are sure that the expansion will
3062 not fail -- there are trees that cannot be expanded twice. */
3063 src_reg = gen_reg_rtx (Pmode);
53800dbe 3064
911c0150 3065 /* Mark the beginning of the strlen sequence so we can emit the
3066 source operand later. */
f0ce3b1f 3067 before_strlen = get_last_insn ();
53800dbe 3068
8786db1e 3069 create_output_operand (&ops[0], target, insn_mode);
3070 create_fixed_operand (&ops[1], gen_rtx_MEM (BLKmode, src_reg));
3071 create_integer_operand (&ops[2], 0);
3072 create_integer_operand (&ops[3], align);
3073 if (!maybe_expand_insn (icode, 4, ops))
c2f47e15 3074 return NULL_RTX;
911c0150 3075
3076 /* Now that we are assured of success, expand the source. */
3077 start_sequence ();
499eee58 3078 pat = expand_expr (src, src_reg, Pmode, EXPAND_NORMAL);
911c0150 3079 if (pat != src_reg)
499eee58 3080 {
3081#ifdef POINTERS_EXTEND_UNSIGNED
3082 if (GET_MODE (pat) != Pmode)
3083 pat = convert_to_mode (Pmode, pat,
3084 POINTERS_EXTEND_UNSIGNED);
3085#endif
3086 emit_move_insn (src_reg, pat);
3087 }
31d3e01c 3088 pat = get_insns ();
911c0150 3089 end_sequence ();
bceb0d1f 3090
3091 if (before_strlen)
3092 emit_insn_after (pat, before_strlen);
3093 else
3094 emit_insn_before (pat, get_insns ());
53800dbe 3095
3096 /* Return the value in the proper mode for this function. */
8786db1e 3097 if (GET_MODE (ops[0].value) == target_mode)
3098 target = ops[0].value;
53800dbe 3099 else if (target != 0)
8786db1e 3100 convert_move (target, ops[0].value, 0);
53800dbe 3101 else
8786db1e 3102 target = convert_to_mode (target_mode, ops[0].value, 0);
911c0150 3103
3104 return target;
53800dbe 3105 }
3106}
3107
6840589f 3108/* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3109 bytes from constant string DATA + OFFSET and return it as target
3110 constant. */
3111
3112static rtx
aecda0d6 3113builtin_memcpy_read_str (void *data, HOST_WIDE_INT offset,
3754d046 3114 machine_mode mode)
6840589f 3115{
3116 const char *str = (const char *) data;
3117
64db345d 3118 gcc_assert (offset >= 0
3119 && ((unsigned HOST_WIDE_INT) offset + GET_MODE_SIZE (mode)
3120 <= strlen (str) + 1));
6840589f 3121
3122 return c_readstr (str + offset, mode);
3123}
3124
36d63243 3125/* LEN specify length of the block of memcpy/memset operation.
9db0f34d 3126 Figure out its range and put it into MIN_SIZE/MAX_SIZE.
3127 In some cases we can make very likely guess on max size, then we
3128 set it into PROBABLE_MAX_SIZE. */
36d63243 3129
3130static void
3131determine_block_size (tree len, rtx len_rtx,
3132 unsigned HOST_WIDE_INT *min_size,
9db0f34d 3133 unsigned HOST_WIDE_INT *max_size,
3134 unsigned HOST_WIDE_INT *probable_max_size)
36d63243 3135{
3136 if (CONST_INT_P (len_rtx))
3137 {
4e140a5c 3138 *min_size = *max_size = *probable_max_size = UINTVAL (len_rtx);
36d63243 3139 return;
3140 }
3141 else
3142 {
9c1be15e 3143 wide_int min, max;
9db0f34d 3144 enum value_range_type range_type = VR_UNDEFINED;
3145
3146 /* Determine bounds from the type. */
3147 if (tree_fits_uhwi_p (TYPE_MIN_VALUE (TREE_TYPE (len))))
3148 *min_size = tree_to_uhwi (TYPE_MIN_VALUE (TREE_TYPE (len)));
3149 else
3150 *min_size = 0;
3151 if (tree_fits_uhwi_p (TYPE_MAX_VALUE (TREE_TYPE (len))))
4e140a5c 3152 *probable_max_size = *max_size
3153 = tree_to_uhwi (TYPE_MAX_VALUE (TREE_TYPE (len)));
9db0f34d 3154 else
3155 *probable_max_size = *max_size = GET_MODE_MASK (GET_MODE (len_rtx));
3156
3157 if (TREE_CODE (len) == SSA_NAME)
3158 range_type = get_range_info (len, &min, &max);
3159 if (range_type == VR_RANGE)
36d63243 3160 {
fe5ad926 3161 if (wi::fits_uhwi_p (min) && *min_size < min.to_uhwi ())
36d63243 3162 *min_size = min.to_uhwi ();
fe5ad926 3163 if (wi::fits_uhwi_p (max) && *max_size > max.to_uhwi ())
9db0f34d 3164 *probable_max_size = *max_size = max.to_uhwi ();
36d63243 3165 }
9db0f34d 3166 else if (range_type == VR_ANTI_RANGE)
36d63243 3167 {
4a474a5a 3168 /* Anti range 0...N lets us to determine minimal size to N+1. */
fe5ad926 3169 if (min == 0)
9db0f34d 3170 {
9c1be15e 3171 if (wi::fits_uhwi_p (max) && max.to_uhwi () + 1 != 0)
3172 *min_size = max.to_uhwi () + 1;
9db0f34d 3173 }
3174 /* Code like
3175
3176 int n;
3177 if (n < 100)
4a474a5a 3178 memcpy (a, b, n)
9db0f34d 3179
3180 Produce anti range allowing negative values of N. We still
3181 can use the information and make a guess that N is not negative.
3182 */
fe5ad926 3183 else if (!wi::leu_p (max, 1 << 30) && wi::fits_uhwi_p (min))
3184 *probable_max_size = min.to_uhwi () - 1;
36d63243 3185 }
3186 }
3187 gcc_checking_assert (*max_size <=
3188 (unsigned HOST_WIDE_INT)
3189 GET_MODE_MASK (GET_MODE (len_rtx)));
3190}
3191
f21337ef 3192/* Helper function to do the actual work for expand_builtin_memcpy. */
3193
3194static rtx
3195expand_builtin_memcpy_args (tree dest, tree src, tree len, rtx target, tree exp)
3196{
3197 const char *src_str;
3198 unsigned int src_align = get_pointer_alignment (src);
3199 unsigned int dest_align = get_pointer_alignment (dest);
3200 rtx dest_mem, src_mem, dest_addr, len_rtx;
3201 HOST_WIDE_INT expected_size = -1;
3202 unsigned int expected_align = 0;
3203 unsigned HOST_WIDE_INT min_size;
3204 unsigned HOST_WIDE_INT max_size;
3205 unsigned HOST_WIDE_INT probable_max_size;
3206
3207 /* If DEST is not a pointer type, call the normal function. */
3208 if (dest_align == 0)
3209 return NULL_RTX;
3210
3211 /* If either SRC is not a pointer type, don't do this
3212 operation in-line. */
3213 if (src_align == 0)
3214 return NULL_RTX;
3215
3216 if (currently_expanding_gimple_stmt)
3217 stringop_block_profile (currently_expanding_gimple_stmt,
3218 &expected_align, &expected_size);
3219
3220 if (expected_align < dest_align)
3221 expected_align = dest_align;
3222 dest_mem = get_memory_rtx (dest, len);
3223 set_mem_align (dest_mem, dest_align);
3224 len_rtx = expand_normal (len);
3225 determine_block_size (len, len_rtx, &min_size, &max_size,
3226 &probable_max_size);
3227 src_str = c_getstr (src);
3228
3229 /* If SRC is a string constant and block move would be done
3230 by pieces, we can avoid loading the string from memory
3231 and only stored the computed constants. */
3232 if (src_str
3233 && CONST_INT_P (len_rtx)
3234 && (unsigned HOST_WIDE_INT) INTVAL (len_rtx) <= strlen (src_str) + 1
3235 && can_store_by_pieces (INTVAL (len_rtx), builtin_memcpy_read_str,
3236 CONST_CAST (char *, src_str),
3237 dest_align, false))
3238 {
3239 dest_mem = store_by_pieces (dest_mem, INTVAL (len_rtx),
3240 builtin_memcpy_read_str,
3241 CONST_CAST (char *, src_str),
3242 dest_align, false, 0);
3243 dest_mem = force_operand (XEXP (dest_mem, 0), target);
3244 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3245 return dest_mem;
3246 }
3247
3248 src_mem = get_memory_rtx (src, len);
3249 set_mem_align (src_mem, src_align);
3250
3251 /* Copy word part most expediently. */
3252 dest_addr = emit_block_move_hints (dest_mem, src_mem, len_rtx,
3253 CALL_EXPR_TAILCALL (exp)
3254 ? BLOCK_OP_TAILCALL : BLOCK_OP_NORMAL,
3255 expected_align, expected_size,
3256 min_size, max_size, probable_max_size);
3257
3258 if (dest_addr == 0)
3259 {
3260 dest_addr = force_operand (XEXP (dest_mem, 0), target);
3261 dest_addr = convert_memory_address (ptr_mode, dest_addr);
3262 }
3263
3264 return dest_addr;
3265}
3266
c2f47e15 3267/* Expand a call EXP to the memcpy builtin.
3268 Return NULL_RTX if we failed, the caller should emit a normal call,
3b824fa6 3269 otherwise try to get the result in TARGET, if convenient (and in
9fe0e1b8 3270 mode MODE if that's convenient). */
c2f47e15 3271
53800dbe 3272static rtx
a65c4d64 3273expand_builtin_memcpy (tree exp, rtx target)
53800dbe 3274{
c2f47e15 3275 if (!validate_arglist (exp,
3276 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3277 return NULL_RTX;
53800dbe 3278 else
3279 {
c2f47e15 3280 tree dest = CALL_EXPR_ARG (exp, 0);
3281 tree src = CALL_EXPR_ARG (exp, 1);
3282 tree len = CALL_EXPR_ARG (exp, 2);
f21337ef 3283 return expand_builtin_memcpy_args (dest, src, len, target, exp);
3284 }
3285}
6840589f 3286
f21337ef 3287/* Expand an instrumented call EXP to the memcpy builtin.
3288 Return NULL_RTX if we failed, the caller should emit a normal call,
3289 otherwise try to get the result in TARGET, if convenient (and in
3290 mode MODE if that's convenient). */
53800dbe 3291
f21337ef 3292static rtx
3293expand_builtin_memcpy_with_bounds (tree exp, rtx target)
3294{
3295 if (!validate_arglist (exp,
3296 POINTER_TYPE, POINTER_BOUNDS_TYPE,
3297 POINTER_TYPE, POINTER_BOUNDS_TYPE,
3298 INTEGER_TYPE, VOID_TYPE))
3299 return NULL_RTX;
3300 else
3301 {
3302 tree dest = CALL_EXPR_ARG (exp, 0);
3303 tree src = CALL_EXPR_ARG (exp, 2);
3304 tree len = CALL_EXPR_ARG (exp, 4);
3305 rtx res = expand_builtin_memcpy_args (dest, src, len, target, exp);
53800dbe 3306
f21337ef 3307 /* Return src bounds with the result. */
3308 if (res)
e5716f7e 3309 {
17d388d8 3310 rtx bnd = force_reg (targetm.chkp_bound_mode (),
f21337ef 3311 expand_normal (CALL_EXPR_ARG (exp, 1)));
3312 res = chkp_join_splitted_slot (res, bnd);
e5716f7e 3313 }
f21337ef 3314 return res;
53800dbe 3315 }
3316}
3317
c2f47e15 3318/* Expand a call EXP to the mempcpy builtin.
3319 Return NULL_RTX if we failed; the caller should emit a normal call,
647661c6 3320 otherwise try to get the result in TARGET, if convenient (and in
9fe0e1b8 3321 mode MODE if that's convenient). If ENDP is 0 return the
3322 destination pointer, if ENDP is 1 return the end pointer ala
3323 mempcpy, and if ENDP is 2 return the end pointer minus one ala
3324 stpcpy. */
647661c6 3325
3326static rtx
3754d046 3327expand_builtin_mempcpy (tree exp, rtx target, machine_mode mode)
647661c6 3328{
c2f47e15 3329 if (!validate_arglist (exp,
3330 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3331 return NULL_RTX;
3332 else
3333 {
3334 tree dest = CALL_EXPR_ARG (exp, 0);
3335 tree src = CALL_EXPR_ARG (exp, 1);
3336 tree len = CALL_EXPR_ARG (exp, 2);
3337 return expand_builtin_mempcpy_args (dest, src, len,
f21337ef 3338 target, mode, /*endp=*/ 1,
3339 exp);
3340 }
3341}
3342
3343/* Expand an instrumented call EXP to the mempcpy builtin.
3344 Return NULL_RTX if we failed, the caller should emit a normal call,
3345 otherwise try to get the result in TARGET, if convenient (and in
3346 mode MODE if that's convenient). */
3347
3348static rtx
3349expand_builtin_mempcpy_with_bounds (tree exp, rtx target, machine_mode mode)
3350{
3351 if (!validate_arglist (exp,
3352 POINTER_TYPE, POINTER_BOUNDS_TYPE,
3353 POINTER_TYPE, POINTER_BOUNDS_TYPE,
3354 INTEGER_TYPE, VOID_TYPE))
3355 return NULL_RTX;
3356 else
3357 {
3358 tree dest = CALL_EXPR_ARG (exp, 0);
3359 tree src = CALL_EXPR_ARG (exp, 2);
3360 tree len = CALL_EXPR_ARG (exp, 4);
3361 rtx res = expand_builtin_mempcpy_args (dest, src, len, target,
3362 mode, 1, exp);
3363
3364 /* Return src bounds with the result. */
3365 if (res)
3366 {
17d388d8 3367 rtx bnd = force_reg (targetm.chkp_bound_mode (),
f21337ef 3368 expand_normal (CALL_EXPR_ARG (exp, 1)));
3369 res = chkp_join_splitted_slot (res, bnd);
3370 }
3371 return res;
c2f47e15 3372 }
3373}
3374
3375/* Helper function to do the actual work for expand_builtin_mempcpy. The
3376 arguments to the builtin_mempcpy call DEST, SRC, and LEN are broken out
3377 so that this can also be called without constructing an actual CALL_EXPR.
a65c4d64 3378 The other arguments and return value are the same as for
3379 expand_builtin_mempcpy. */
c2f47e15 3380
3381static rtx
a65c4d64 3382expand_builtin_mempcpy_args (tree dest, tree src, tree len,
f21337ef 3383 rtx target, machine_mode mode, int endp,
3384 tree orig_exp)
c2f47e15 3385{
f21337ef 3386 tree fndecl = get_callee_fndecl (orig_exp);
3387
c2f47e15 3388 /* If return value is ignored, transform mempcpy into memcpy. */
f21337ef 3389 if (target == const0_rtx
3390 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_MEMPCPY_NOBND_NOCHK_CHKP
3391 && builtin_decl_implicit_p (BUILT_IN_CHKP_MEMCPY_NOBND_NOCHK_CHKP))
3392 {
3393 tree fn = builtin_decl_implicit (BUILT_IN_CHKP_MEMCPY_NOBND_NOCHK_CHKP);
3394 tree result = build_call_nofold_loc (UNKNOWN_LOCATION, fn, 3,
3395 dest, src, len);
3396 return expand_expr (result, target, mode, EXPAND_NORMAL);
3397 }
3398 else if (target == const0_rtx
3399 && builtin_decl_implicit_p (BUILT_IN_MEMCPY))
9fe0e1b8 3400 {
b9a16870 3401 tree fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
0568e9c1 3402 tree result = build_call_nofold_loc (UNKNOWN_LOCATION, fn, 3,
3403 dest, src, len);
c8b17b2e 3404 return expand_expr (result, target, mode, EXPAND_NORMAL);
9fe0e1b8 3405 }
647661c6 3406 else
3407 {
9fe0e1b8 3408 const char *src_str;
957d0361 3409 unsigned int src_align = get_pointer_alignment (src);
3410 unsigned int dest_align = get_pointer_alignment (dest);
9fe0e1b8 3411 rtx dest_mem, src_mem, len_rtx;
a0c938f0 3412
7da1412b 3413 /* If either SRC or DEST is not a pointer type, don't do this
a0c938f0 3414 operation in-line. */
7da1412b 3415 if (dest_align == 0 || src_align == 0)
c2f47e15 3416 return NULL_RTX;
9fe0e1b8 3417
6217c238 3418 /* If LEN is not constant, call the normal function. */
e913b5cd 3419 if (! tree_fits_uhwi_p (len))
c2f47e15 3420 return NULL_RTX;
0862b7e9 3421
8ec3c5c2 3422 len_rtx = expand_normal (len);
9fe0e1b8 3423 src_str = c_getstr (src);
647661c6 3424
9fe0e1b8 3425 /* If SRC is a string constant and block move would be done
3426 by pieces, we can avoid loading the string from memory
3427 and only stored the computed constants. */
3428 if (src_str
971ba038 3429 && CONST_INT_P (len_rtx)
9fe0e1b8 3430 && (unsigned HOST_WIDE_INT) INTVAL (len_rtx) <= strlen (src_str) + 1
3431 && can_store_by_pieces (INTVAL (len_rtx), builtin_memcpy_read_str,
364c0c59 3432 CONST_CAST (char *, src_str),
3433 dest_align, false))
9fe0e1b8 3434 {
d8ae1baa 3435 dest_mem = get_memory_rtx (dest, len);
9fe0e1b8 3436 set_mem_align (dest_mem, dest_align);
3437 dest_mem = store_by_pieces (dest_mem, INTVAL (len_rtx),
3438 builtin_memcpy_read_str,
364c0c59 3439 CONST_CAST (char *, src_str),
3440 dest_align, false, endp);
9fe0e1b8 3441 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
85d654dd 3442 dest_mem = convert_memory_address (ptr_mode, dest_mem);
9fe0e1b8 3443 return dest_mem;
647661c6 3444 }
3445
971ba038 3446 if (CONST_INT_P (len_rtx)
9fe0e1b8 3447 && can_move_by_pieces (INTVAL (len_rtx),
3448 MIN (dest_align, src_align)))
3449 {
d8ae1baa 3450 dest_mem = get_memory_rtx (dest, len);
9fe0e1b8 3451 set_mem_align (dest_mem, dest_align);
d8ae1baa 3452 src_mem = get_memory_rtx (src, len);
9fe0e1b8 3453 set_mem_align (src_mem, src_align);
3454 dest_mem = move_by_pieces (dest_mem, src_mem, INTVAL (len_rtx),
3455 MIN (dest_align, src_align), endp);
3456 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
85d654dd 3457 dest_mem = convert_memory_address (ptr_mode, dest_mem);
9fe0e1b8 3458 return dest_mem;
3459 }
3460
c2f47e15 3461 return NULL_RTX;
647661c6 3462 }
3463}
3464
727c62dd 3465#ifndef HAVE_movstr
3466# define HAVE_movstr 0
3467# define CODE_FOR_movstr CODE_FOR_nothing
3468#endif
3469
c2f47e15 3470/* Expand into a movstr instruction, if one is available. Return NULL_RTX if
727c62dd 3471 we failed, the caller should emit a normal call, otherwise try to
3472 get the result in TARGET, if convenient. If ENDP is 0 return the
3473 destination pointer, if ENDP is 1 return the end pointer ala
3474 mempcpy, and if ENDP is 2 return the end pointer minus one ala
3475 stpcpy. */
3476
3477static rtx
3478expand_movstr (tree dest, tree src, rtx target, int endp)
3479{
8786db1e 3480 struct expand_operand ops[3];
727c62dd 3481 rtx dest_mem;
3482 rtx src_mem;
727c62dd 3483
3484 if (!HAVE_movstr)
c2f47e15 3485 return NULL_RTX;
727c62dd 3486
d8ae1baa 3487 dest_mem = get_memory_rtx (dest, NULL);
3488 src_mem = get_memory_rtx (src, NULL);
727c62dd 3489 if (!endp)
3490 {
3491 target = force_reg (Pmode, XEXP (dest_mem, 0));
3492 dest_mem = replace_equiv_address (dest_mem, target);
727c62dd 3493 }
3494
8786db1e 3495 create_output_operand (&ops[0], endp ? target : NULL_RTX, Pmode);
3496 create_fixed_operand (&ops[1], dest_mem);
3497 create_fixed_operand (&ops[2], src_mem);
1e1d5623 3498 if (!maybe_expand_insn (CODE_FOR_movstr, 3, ops))
3499 return NULL_RTX;
727c62dd 3500
8786db1e 3501 if (endp && target != const0_rtx)
c5aba89c 3502 {
8786db1e 3503 target = ops[0].value;
3504 /* movstr is supposed to set end to the address of the NUL
3505 terminator. If the caller requested a mempcpy-like return value,
3506 adjust it. */
3507 if (endp == 1)
3508 {
29c05e22 3509 rtx tem = plus_constant (GET_MODE (target),
3510 gen_lowpart (GET_MODE (target), target), 1);
8786db1e 3511 emit_move_insn (target, force_operand (tem, NULL_RTX));
3512 }
c5aba89c 3513 }
727c62dd 3514 return target;
3515}
3516
48e1416a 3517/* Expand expression EXP, which is a call to the strcpy builtin. Return
3518 NULL_RTX if we failed the caller should emit a normal call, otherwise
c2f47e15 3519 try to get the result in TARGET, if convenient (and in mode MODE if that's
6f428e8b 3520 convenient). */
902de8ed 3521
53800dbe 3522static rtx
a65c4d64 3523expand_builtin_strcpy (tree exp, rtx target)
53800dbe 3524{
c2f47e15 3525 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3526 {
3527 tree dest = CALL_EXPR_ARG (exp, 0);
3528 tree src = CALL_EXPR_ARG (exp, 1);
a65c4d64 3529 return expand_builtin_strcpy_args (dest, src, target);
c2f47e15 3530 }
3531 return NULL_RTX;
3532}
3533
3534/* Helper function to do the actual work for expand_builtin_strcpy. The
3535 arguments to the builtin_strcpy call DEST and SRC are broken out
3536 so that this can also be called without constructing an actual CALL_EXPR.
3537 The other arguments and return value are the same as for
3538 expand_builtin_strcpy. */
3539
3540static rtx
a65c4d64 3541expand_builtin_strcpy_args (tree dest, tree src, rtx target)
c2f47e15 3542{
c2f47e15 3543 return expand_movstr (dest, src, target, /*endp=*/0);
53800dbe 3544}
3545
c2f47e15 3546/* Expand a call EXP to the stpcpy builtin.
3547 Return NULL_RTX if we failed the caller should emit a normal call,
3b824fa6 3548 otherwise try to get the result in TARGET, if convenient (and in
3549 mode MODE if that's convenient). */
3550
3551static rtx
3754d046 3552expand_builtin_stpcpy (tree exp, rtx target, machine_mode mode)
3b824fa6 3553{
c2f47e15 3554 tree dst, src;
389dd41b 3555 location_t loc = EXPR_LOCATION (exp);
c2f47e15 3556
3557 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3558 return NULL_RTX;
3559
3560 dst = CALL_EXPR_ARG (exp, 0);
3561 src = CALL_EXPR_ARG (exp, 1);
3562
727c62dd 3563 /* If return value is ignored, transform stpcpy into strcpy. */
b9a16870 3564 if (target == const0_rtx && builtin_decl_implicit (BUILT_IN_STRCPY))
978836e5 3565 {
b9a16870 3566 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
0568e9c1 3567 tree result = build_call_nofold_loc (loc, fn, 2, dst, src);
c8b17b2e 3568 return expand_expr (result, target, mode, EXPAND_NORMAL);
978836e5 3569 }
3b824fa6 3570 else
3571 {
c2f47e15 3572 tree len, lenp1;
727c62dd 3573 rtx ret;
647661c6 3574
9fe0e1b8 3575 /* Ensure we get an actual string whose length can be evaluated at
a0c938f0 3576 compile-time, not an expression containing a string. This is
3577 because the latter will potentially produce pessimized code
3578 when used to produce the return value. */
681fab1e 3579 if (! c_getstr (src) || ! (len = c_strlen (src, 0)))
c2f47e15 3580 return expand_movstr (dst, src, target, /*endp=*/2);
3b824fa6 3581
389dd41b 3582 lenp1 = size_binop_loc (loc, PLUS_EXPR, len, ssize_int (1));
a65c4d64 3583 ret = expand_builtin_mempcpy_args (dst, src, lenp1,
f21337ef 3584 target, mode, /*endp=*/2,
3585 exp);
727c62dd 3586
3587 if (ret)
3588 return ret;
3589
3590 if (TREE_CODE (len) == INTEGER_CST)
3591 {
8ec3c5c2 3592 rtx len_rtx = expand_normal (len);
727c62dd 3593
971ba038 3594 if (CONST_INT_P (len_rtx))
727c62dd 3595 {
a65c4d64 3596 ret = expand_builtin_strcpy_args (dst, src, target);
727c62dd 3597
3598 if (ret)
3599 {
3600 if (! target)
7ac87324 3601 {
3602 if (mode != VOIDmode)
3603 target = gen_reg_rtx (mode);
3604 else
3605 target = gen_reg_rtx (GET_MODE (ret));
3606 }
727c62dd 3607 if (GET_MODE (target) != GET_MODE (ret))
3608 ret = gen_lowpart (GET_MODE (target), ret);
3609
29c05e22 3610 ret = plus_constant (GET_MODE (ret), ret, INTVAL (len_rtx));
c5aba89c 3611 ret = emit_move_insn (target, force_operand (ret, NULL_RTX));
64db345d 3612 gcc_assert (ret);
727c62dd 3613
3614 return target;
3615 }
3616 }
3617 }
3618
c2f47e15 3619 return expand_movstr (dst, src, target, /*endp=*/2);
3b824fa6 3620 }
3621}
3622
6840589f 3623/* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3624 bytes from constant string DATA + OFFSET and return it as target
3625 constant. */
3626
09879952 3627rtx
aecda0d6 3628builtin_strncpy_read_str (void *data, HOST_WIDE_INT offset,
3754d046 3629 machine_mode mode)
6840589f 3630{
3631 const char *str = (const char *) data;
3632
3633 if ((unsigned HOST_WIDE_INT) offset > strlen (str))
3634 return const0_rtx;
3635
3636 return c_readstr (str + offset, mode);
3637}
3638
48e1416a 3639/* Expand expression EXP, which is a call to the strncpy builtin. Return
c2f47e15 3640 NULL_RTX if we failed the caller should emit a normal call. */
ed09096d 3641
3642static rtx
a65c4d64 3643expand_builtin_strncpy (tree exp, rtx target)
ed09096d 3644{
389dd41b 3645 location_t loc = EXPR_LOCATION (exp);
c2f47e15 3646
3647 if (validate_arglist (exp,
3648 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
ed09096d 3649 {
c2f47e15 3650 tree dest = CALL_EXPR_ARG (exp, 0);
3651 tree src = CALL_EXPR_ARG (exp, 1);
3652 tree len = CALL_EXPR_ARG (exp, 2);
3653 tree slen = c_strlen (src, 1);
6840589f 3654
8ff6a5cd 3655 /* We must be passed a constant len and src parameter. */
e913b5cd 3656 if (!tree_fits_uhwi_p (len) || !slen || !tree_fits_uhwi_p (slen))
c2f47e15 3657 return NULL_RTX;
ed09096d 3658
389dd41b 3659 slen = size_binop_loc (loc, PLUS_EXPR, slen, ssize_int (1));
ed09096d 3660
3661 /* We're required to pad with trailing zeros if the requested
a0c938f0 3662 len is greater than strlen(s2)+1. In that case try to
6840589f 3663 use store_by_pieces, if it fails, punt. */
ed09096d 3664 if (tree_int_cst_lt (slen, len))
6840589f 3665 {
957d0361 3666 unsigned int dest_align = get_pointer_alignment (dest);
c2f47e15 3667 const char *p = c_getstr (src);
6840589f 3668 rtx dest_mem;
3669
e913b5cd 3670 if (!p || dest_align == 0 || !tree_fits_uhwi_p (len)
3671 || !can_store_by_pieces (tree_to_uhwi (len),
6840589f 3672 builtin_strncpy_read_str,
364c0c59 3673 CONST_CAST (char *, p),
3674 dest_align, false))
c2f47e15 3675 return NULL_RTX;
6840589f 3676
d8ae1baa 3677 dest_mem = get_memory_rtx (dest, len);
e913b5cd 3678 store_by_pieces (dest_mem, tree_to_uhwi (len),
6840589f 3679 builtin_strncpy_read_str,
364c0c59 3680 CONST_CAST (char *, p), dest_align, false, 0);
a65c4d64 3681 dest_mem = force_operand (XEXP (dest_mem, 0), target);
85d654dd 3682 dest_mem = convert_memory_address (ptr_mode, dest_mem);
e5716f7e 3683 return dest_mem;
6840589f 3684 }
ed09096d 3685 }
c2f47e15 3686 return NULL_RTX;
ed09096d 3687}
3688
ecc318ff 3689/* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3690 bytes from constant string DATA + OFFSET and return it as target
3691 constant. */
3692
f656b751 3693rtx
aecda0d6 3694builtin_memset_read_str (void *data, HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
3754d046 3695 machine_mode mode)
ecc318ff 3696{
3697 const char *c = (const char *) data;
364c0c59 3698 char *p = XALLOCAVEC (char, GET_MODE_SIZE (mode));
ecc318ff 3699
3700 memset (p, *c, GET_MODE_SIZE (mode));
3701
3702 return c_readstr (p, mode);
3703}
3704
a7ec6974 3705/* Callback routine for store_by_pieces. Return the RTL of a register
3706 containing GET_MODE_SIZE (MODE) consecutive copies of the unsigned
3707 char value given in the RTL register data. For example, if mode is
3708 4 bytes wide, return the RTL for 0x01010101*data. */
3709
3710static rtx
aecda0d6 3711builtin_memset_gen_str (void *data, HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
3754d046 3712 machine_mode mode)
a7ec6974 3713{
3714 rtx target, coeff;
3715 size_t size;
3716 char *p;
3717
3718 size = GET_MODE_SIZE (mode);
f0ce3b1f 3719 if (size == 1)
3720 return (rtx) data;
a7ec6974 3721
364c0c59 3722 p = XALLOCAVEC (char, size);
a7ec6974 3723 memset (p, 1, size);
3724 coeff = c_readstr (p, mode);
3725
f0ce3b1f 3726 target = convert_to_mode (mode, (rtx) data, 1);
a7ec6974 3727 target = expand_mult (mode, target, coeff, NULL_RTX, 1);
3728 return force_reg (mode, target);
3729}
3730
48e1416a 3731/* Expand expression EXP, which is a call to the memset builtin. Return
3732 NULL_RTX if we failed the caller should emit a normal call, otherwise
c2f47e15 3733 try to get the result in TARGET, if convenient (and in mode MODE if that's
6f428e8b 3734 convenient). */
902de8ed 3735
53800dbe 3736static rtx
3754d046 3737expand_builtin_memset (tree exp, rtx target, machine_mode mode)
53800dbe 3738{
c2f47e15 3739 if (!validate_arglist (exp,
3740 POINTER_TYPE, INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
3741 return NULL_RTX;
53800dbe 3742 else
3743 {
c2f47e15 3744 tree dest = CALL_EXPR_ARG (exp, 0);
3745 tree val = CALL_EXPR_ARG (exp, 1);
3746 tree len = CALL_EXPR_ARG (exp, 2);
3747 return expand_builtin_memset_args (dest, val, len, target, mode, exp);
3748 }
3749}
53800dbe 3750
f21337ef 3751/* Expand expression EXP, which is an instrumented call to the memset builtin.
3752 Return NULL_RTX if we failed the caller should emit a normal call, otherwise
3753 try to get the result in TARGET, if convenient (and in mode MODE if that's
3754 convenient). */
3755
3756static rtx
3757expand_builtin_memset_with_bounds (tree exp, rtx target, machine_mode mode)
3758{
3759 if (!validate_arglist (exp,
3760 POINTER_TYPE, POINTER_BOUNDS_TYPE,
3761 INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
3762 return NULL_RTX;
3763 else
3764 {
3765 tree dest = CALL_EXPR_ARG (exp, 0);
3766 tree val = CALL_EXPR_ARG (exp, 2);
3767 tree len = CALL_EXPR_ARG (exp, 3);
3768 rtx res = expand_builtin_memset_args (dest, val, len, target, mode, exp);
3769
3770 /* Return src bounds with the result. */
3771 if (res)
3772 {
17d388d8 3773 rtx bnd = force_reg (targetm.chkp_bound_mode (),
f21337ef 3774 expand_normal (CALL_EXPR_ARG (exp, 1)));
3775 res = chkp_join_splitted_slot (res, bnd);
3776 }
3777 return res;
3778 }
3779}
3780
c2f47e15 3781/* Helper function to do the actual work for expand_builtin_memset. The
3782 arguments to the builtin_memset call DEST, VAL, and LEN are broken out
3783 so that this can also be called without constructing an actual CALL_EXPR.
3784 The other arguments and return value are the same as for
3785 expand_builtin_memset. */
6b961939 3786
c2f47e15 3787static rtx
3788expand_builtin_memset_args (tree dest, tree val, tree len,
3754d046 3789 rtx target, machine_mode mode, tree orig_exp)
c2f47e15 3790{
3791 tree fndecl, fn;
3792 enum built_in_function fcode;
3754d046 3793 machine_mode val_mode;
c2f47e15 3794 char c;
3795 unsigned int dest_align;
3796 rtx dest_mem, dest_addr, len_rtx;
3797 HOST_WIDE_INT expected_size = -1;
3798 unsigned int expected_align = 0;
36d63243 3799 unsigned HOST_WIDE_INT min_size;
3800 unsigned HOST_WIDE_INT max_size;
9db0f34d 3801 unsigned HOST_WIDE_INT probable_max_size;
53800dbe 3802
957d0361 3803 dest_align = get_pointer_alignment (dest);
162719b3 3804
c2f47e15 3805 /* If DEST is not a pointer type, don't do this operation in-line. */
3806 if (dest_align == 0)
3807 return NULL_RTX;
6f428e8b 3808
8cee8dc0 3809 if (currently_expanding_gimple_stmt)
3810 stringop_block_profile (currently_expanding_gimple_stmt,
3811 &expected_align, &expected_size);
75a70cf9 3812
c2f47e15 3813 if (expected_align < dest_align)
3814 expected_align = dest_align;
6b961939 3815
c2f47e15 3816 /* If the LEN parameter is zero, return DEST. */
3817 if (integer_zerop (len))
3818 {
3819 /* Evaluate and ignore VAL in case it has side-effects. */
3820 expand_expr (val, const0_rtx, VOIDmode, EXPAND_NORMAL);
3821 return expand_expr (dest, target, mode, EXPAND_NORMAL);
3822 }
7a3e5564 3823
c2f47e15 3824 /* Stabilize the arguments in case we fail. */
3825 dest = builtin_save_expr (dest);
3826 val = builtin_save_expr (val);
3827 len = builtin_save_expr (len);
a7ec6974 3828
c2f47e15 3829 len_rtx = expand_normal (len);
9db0f34d 3830 determine_block_size (len, len_rtx, &min_size, &max_size,
3831 &probable_max_size);
c2f47e15 3832 dest_mem = get_memory_rtx (dest, len);
03a5dda9 3833 val_mode = TYPE_MODE (unsigned_char_type_node);
a7ec6974 3834
c2f47e15 3835 if (TREE_CODE (val) != INTEGER_CST)
3836 {
3837 rtx val_rtx;
a7ec6974 3838
c2f47e15 3839 val_rtx = expand_normal (val);
03a5dda9 3840 val_rtx = convert_to_mode (val_mode, val_rtx, 0);
53800dbe 3841
c2f47e15 3842 /* Assume that we can memset by pieces if we can store
3843 * the coefficients by pieces (in the required modes).
3844 * We can't pass builtin_memset_gen_str as that emits RTL. */
3845 c = 1;
e913b5cd 3846 if (tree_fits_uhwi_p (len)
3847 && can_store_by_pieces (tree_to_uhwi (len),
4b297e2e 3848 builtin_memset_read_str, &c, dest_align,
3849 true))
c2f47e15 3850 {
03a5dda9 3851 val_rtx = force_reg (val_mode, val_rtx);
e913b5cd 3852 store_by_pieces (dest_mem, tree_to_uhwi (len),
4b297e2e 3853 builtin_memset_gen_str, val_rtx, dest_align,
3854 true, 0);
c2f47e15 3855 }
3856 else if (!set_storage_via_setmem (dest_mem, len_rtx, val_rtx,
3857 dest_align, expected_align,
9db0f34d 3858 expected_size, min_size, max_size,
3859 probable_max_size))
6b961939 3860 goto do_libcall;
48e1416a 3861
c2f47e15 3862 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3863 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3864 return dest_mem;
3865 }
53800dbe 3866
c2f47e15 3867 if (target_char_cast (val, &c))
3868 goto do_libcall;
ecc318ff 3869
c2f47e15 3870 if (c)
3871 {
e913b5cd 3872 if (tree_fits_uhwi_p (len)
3873 && can_store_by_pieces (tree_to_uhwi (len),
4b297e2e 3874 builtin_memset_read_str, &c, dest_align,
3875 true))
e913b5cd 3876 store_by_pieces (dest_mem, tree_to_uhwi (len),
4b297e2e 3877 builtin_memset_read_str, &c, dest_align, true, 0);
03a5dda9 3878 else if (!set_storage_via_setmem (dest_mem, len_rtx,
3879 gen_int_mode (c, val_mode),
c2f47e15 3880 dest_align, expected_align,
9db0f34d 3881 expected_size, min_size, max_size,
3882 probable_max_size))
c2f47e15 3883 goto do_libcall;
48e1416a 3884
c2f47e15 3885 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3886 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3887 return dest_mem;
3888 }
ecc318ff 3889
c2f47e15 3890 set_mem_align (dest_mem, dest_align);
3891 dest_addr = clear_storage_hints (dest_mem, len_rtx,
3892 CALL_EXPR_TAILCALL (orig_exp)
3893 ? BLOCK_OP_TAILCALL : BLOCK_OP_NORMAL,
36d63243 3894 expected_align, expected_size,
9db0f34d 3895 min_size, max_size,
3896 probable_max_size);
53800dbe 3897
c2f47e15 3898 if (dest_addr == 0)
3899 {
3900 dest_addr = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3901 dest_addr = convert_memory_address (ptr_mode, dest_addr);
3902 }
53800dbe 3903
c2f47e15 3904 return dest_addr;
6b961939 3905
c2f47e15 3906 do_libcall:
3907 fndecl = get_callee_fndecl (orig_exp);
3908 fcode = DECL_FUNCTION_CODE (fndecl);
f21337ef 3909 if (fcode == BUILT_IN_MEMSET
3910 || fcode == BUILT_IN_CHKP_MEMSET_NOBND_NOCHK_CHKP)
0568e9c1 3911 fn = build_call_nofold_loc (EXPR_LOCATION (orig_exp), fndecl, 3,
3912 dest, val, len);
c2f47e15 3913 else if (fcode == BUILT_IN_BZERO)
0568e9c1 3914 fn = build_call_nofold_loc (EXPR_LOCATION (orig_exp), fndecl, 2,
3915 dest, len);
c2f47e15 3916 else
3917 gcc_unreachable ();
a65c4d64 3918 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
3919 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (orig_exp);
c2f47e15 3920 return expand_call (fn, target, target == const0_rtx);
53800dbe 3921}
3922
48e1416a 3923/* Expand expression EXP, which is a call to the bzero builtin. Return
c2f47e15 3924 NULL_RTX if we failed the caller should emit a normal call. */
27d0c333 3925
ffc83088 3926static rtx
0b25db21 3927expand_builtin_bzero (tree exp)
ffc83088 3928{
c2f47e15 3929 tree dest, size;
389dd41b 3930 location_t loc = EXPR_LOCATION (exp);
ffc83088 3931
c2f47e15 3932 if (!validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
7369e7ba 3933 return NULL_RTX;
ffc83088 3934
c2f47e15 3935 dest = CALL_EXPR_ARG (exp, 0);
3936 size = CALL_EXPR_ARG (exp, 1);
bf8e3599 3937
7369e7ba 3938 /* New argument list transforming bzero(ptr x, int y) to
6f428e8b 3939 memset(ptr x, int 0, size_t y). This is done this way
3940 so that if it isn't expanded inline, we fallback to
3941 calling bzero instead of memset. */
bf8e3599 3942
c2f47e15 3943 return expand_builtin_memset_args (dest, integer_zero_node,
a0553bff 3944 fold_convert_loc (loc,
3945 size_type_node, size),
c2f47e15 3946 const0_rtx, VOIDmode, exp);
ffc83088 3947}
3948
7a3f89b5 3949/* Expand expression EXP, which is a call to the memcmp built-in function.
bd021c1c 3950 Return NULL_RTX if we failed and the caller should emit a normal call,
3951 otherwise try to get the result in TARGET, if convenient (and in mode
3952 MODE, if that's convenient). */
27d0c333 3953
53800dbe 3954static rtx
a65c4d64 3955expand_builtin_memcmp (tree exp, ATTRIBUTE_UNUSED rtx target,
3754d046 3956 ATTRIBUTE_UNUSED machine_mode mode)
53800dbe 3957{
a65c4d64 3958 location_t loc ATTRIBUTE_UNUSED = EXPR_LOCATION (exp);
389dd41b 3959
c2f47e15 3960 if (!validate_arglist (exp,
3961 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3962 return NULL_RTX;
6f428e8b 3963
bd021c1c 3964 /* Note: The cmpstrnsi pattern, if it exists, is not suitable for
3965 implementing memcmp because it will stop if it encounters two
3966 zero bytes. */
3967#if defined HAVE_cmpmemsi
53800dbe 3968 {
0cd832f0 3969 rtx arg1_rtx, arg2_rtx, arg3_rtx;
53800dbe 3970 rtx result;
0cd832f0 3971 rtx insn;
c2f47e15 3972 tree arg1 = CALL_EXPR_ARG (exp, 0);
3973 tree arg2 = CALL_EXPR_ARG (exp, 1);
3974 tree len = CALL_EXPR_ARG (exp, 2);
53800dbe 3975
957d0361 3976 unsigned int arg1_align = get_pointer_alignment (arg1) / BITS_PER_UNIT;
3977 unsigned int arg2_align = get_pointer_alignment (arg2) / BITS_PER_UNIT;
3754d046 3978 machine_mode insn_mode;
b428c0a5 3979
b428c0a5 3980 if (HAVE_cmpmemsi)
3981 insn_mode = insn_data[(int) CODE_FOR_cmpmemsi].operand[0].mode;
3982 else
c2f47e15 3983 return NULL_RTX;
53800dbe 3984
3985 /* If we don't have POINTER_TYPE, call the function. */
3986 if (arg1_align == 0 || arg2_align == 0)
c2f47e15 3987 return NULL_RTX;
53800dbe 3988
3989 /* Make a place to write the result of the instruction. */
3990 result = target;
3991 if (! (result != 0
8ad4c111 3992 && REG_P (result) && GET_MODE (result) == insn_mode
53800dbe 3993 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
3994 result = gen_reg_rtx (insn_mode);
3995
d8ae1baa 3996 arg1_rtx = get_memory_rtx (arg1, len);
3997 arg2_rtx = get_memory_rtx (arg2, len);
389dd41b 3998 arg3_rtx = expand_normal (fold_convert_loc (loc, sizetype, len));
83f88f8e 3999
4000 /* Set MEM_SIZE as appropriate. */
971ba038 4001 if (CONST_INT_P (arg3_rtx))
83f88f8e 4002 {
5b2a69fa 4003 set_mem_size (arg1_rtx, INTVAL (arg3_rtx));
4004 set_mem_size (arg2_rtx, INTVAL (arg3_rtx));
83f88f8e 4005 }
4006
b428c0a5 4007 if (HAVE_cmpmemsi)
4008 insn = gen_cmpmemsi (result, arg1_rtx, arg2_rtx, arg3_rtx,
4009 GEN_INT (MIN (arg1_align, arg2_align)));
0cd832f0 4010 else
64db345d 4011 gcc_unreachable ();
0cd832f0 4012
4013 if (insn)
4014 emit_insn (insn);
4015 else
2dd6f9ed 4016 emit_library_call_value (memcmp_libfunc, result, LCT_PURE,
0cd832f0 4017 TYPE_MODE (integer_type_node), 3,
4018 XEXP (arg1_rtx, 0), Pmode,
4019 XEXP (arg2_rtx, 0), Pmode,
4020 convert_to_mode (TYPE_MODE (sizetype), arg3_rtx,
78a8ed03 4021 TYPE_UNSIGNED (sizetype)),
0cd832f0 4022 TYPE_MODE (sizetype));
53800dbe 4023
4024 /* Return the value in the proper mode for this function. */
4025 mode = TYPE_MODE (TREE_TYPE (exp));
4026 if (GET_MODE (result) == mode)
4027 return result;
4028 else if (target != 0)
4029 {
4030 convert_move (target, result, 0);
4031 return target;
4032 }
4033 else
4034 return convert_to_mode (mode, result, 0);
4035 }
bd021c1c 4036#endif /* HAVE_cmpmemsi. */
53800dbe 4037
c2f47e15 4038 return NULL_RTX;
6f428e8b 4039}
4040
c2f47e15 4041/* Expand expression EXP, which is a call to the strcmp builtin. Return NULL_RTX
53800dbe 4042 if we failed the caller should emit a normal call, otherwise try to get
4043 the result in TARGET, if convenient. */
902de8ed 4044
53800dbe 4045static rtx
a65c4d64 4046expand_builtin_strcmp (tree exp, ATTRIBUTE_UNUSED rtx target)
53800dbe 4047{
c2f47e15 4048 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
4049 return NULL_RTX;
bf8e3599 4050
6ac5504b 4051#if defined HAVE_cmpstrsi || defined HAVE_cmpstrnsi
6b531606 4052 if (direct_optab_handler (cmpstr_optab, SImode) != CODE_FOR_nothing
4053 || direct_optab_handler (cmpstrn_optab, SImode) != CODE_FOR_nothing)
6ac5504b 4054 {
4055 rtx arg1_rtx, arg2_rtx;
4056 rtx result, insn = NULL_RTX;
4057 tree fndecl, fn;
c2f47e15 4058 tree arg1 = CALL_EXPR_ARG (exp, 0);
4059 tree arg2 = CALL_EXPR_ARG (exp, 1);
a0c938f0 4060
957d0361 4061 unsigned int arg1_align = get_pointer_alignment (arg1) / BITS_PER_UNIT;
4062 unsigned int arg2_align = get_pointer_alignment (arg2) / BITS_PER_UNIT;
6ac5504b 4063
4064 /* If we don't have POINTER_TYPE, call the function. */
4065 if (arg1_align == 0 || arg2_align == 0)
c2f47e15 4066 return NULL_RTX;
7a3f89b5 4067
6ac5504b 4068 /* Stabilize the arguments in case gen_cmpstr(n)si fail. */
4069 arg1 = builtin_save_expr (arg1);
4070 arg2 = builtin_save_expr (arg2);
7a3f89b5 4071
d8ae1baa 4072 arg1_rtx = get_memory_rtx (arg1, NULL);
4073 arg2_rtx = get_memory_rtx (arg2, NULL);
53800dbe 4074
6ac5504b 4075#ifdef HAVE_cmpstrsi
4076 /* Try to call cmpstrsi. */
4077 if (HAVE_cmpstrsi)
4078 {
3754d046 4079 machine_mode insn_mode
6ac5504b 4080 = insn_data[(int) CODE_FOR_cmpstrsi].operand[0].mode;
4081
4082 /* Make a place to write the result of the instruction. */
4083 result = target;
4084 if (! (result != 0
4085 && REG_P (result) && GET_MODE (result) == insn_mode
4086 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
4087 result = gen_reg_rtx (insn_mode);
4088
4089 insn = gen_cmpstrsi (result, arg1_rtx, arg2_rtx,
4090 GEN_INT (MIN (arg1_align, arg2_align)));
4091 }
4092#endif
03fd9d2c 4093#ifdef HAVE_cmpstrnsi
6ac5504b 4094 /* Try to determine at least one length and call cmpstrnsi. */
a0c938f0 4095 if (!insn && HAVE_cmpstrnsi)
6ac5504b 4096 {
4097 tree len;
4098 rtx arg3_rtx;
4099
3754d046 4100 machine_mode insn_mode
6ac5504b 4101 = insn_data[(int) CODE_FOR_cmpstrnsi].operand[0].mode;
4102 tree len1 = c_strlen (arg1, 1);
4103 tree len2 = c_strlen (arg2, 1);
4104
4105 if (len1)
4106 len1 = size_binop (PLUS_EXPR, ssize_int (1), len1);
4107 if (len2)
4108 len2 = size_binop (PLUS_EXPR, ssize_int (1), len2);
4109
4110 /* If we don't have a constant length for the first, use the length
4111 of the second, if we know it. We don't require a constant for
4112 this case; some cost analysis could be done if both are available
4113 but neither is constant. For now, assume they're equally cheap,
4114 unless one has side effects. If both strings have constant lengths,
4115 use the smaller. */
4116
4117 if (!len1)
4118 len = len2;
4119 else if (!len2)
4120 len = len1;
4121 else if (TREE_SIDE_EFFECTS (len1))
4122 len = len2;
4123 else if (TREE_SIDE_EFFECTS (len2))
4124 len = len1;
4125 else if (TREE_CODE (len1) != INTEGER_CST)
4126 len = len2;
4127 else if (TREE_CODE (len2) != INTEGER_CST)
4128 len = len1;
4129 else if (tree_int_cst_lt (len1, len2))
4130 len = len1;
4131 else
4132 len = len2;
4133
4134 /* If both arguments have side effects, we cannot optimize. */
4135 if (!len || TREE_SIDE_EFFECTS (len))
6b961939 4136 goto do_libcall;
53800dbe 4137
8ec3c5c2 4138 arg3_rtx = expand_normal (len);
902de8ed 4139
6ac5504b 4140 /* Make a place to write the result of the instruction. */
4141 result = target;
4142 if (! (result != 0
4143 && REG_P (result) && GET_MODE (result) == insn_mode
4144 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
4145 result = gen_reg_rtx (insn_mode);
53800dbe 4146
6ac5504b 4147 insn = gen_cmpstrnsi (result, arg1_rtx, arg2_rtx, arg3_rtx,
4148 GEN_INT (MIN (arg1_align, arg2_align)));
4149 }
4150#endif
3f8aefe2 4151
6ac5504b 4152 if (insn)
4153 {
3754d046 4154 machine_mode mode;
6ac5504b 4155 emit_insn (insn);
3f8aefe2 4156
6ac5504b 4157 /* Return the value in the proper mode for this function. */
4158 mode = TYPE_MODE (TREE_TYPE (exp));
4159 if (GET_MODE (result) == mode)
4160 return result;
4161 if (target == 0)
4162 return convert_to_mode (mode, result, 0);
4163 convert_move (target, result, 0);
4164 return target;
4165 }
902de8ed 4166
6ac5504b 4167 /* Expand the library call ourselves using a stabilized argument
4168 list to avoid re-evaluating the function's arguments twice. */
2694880e 4169#ifdef HAVE_cmpstrnsi
6b961939 4170 do_libcall:
2694880e 4171#endif
6ac5504b 4172 fndecl = get_callee_fndecl (exp);
0568e9c1 4173 fn = build_call_nofold_loc (EXPR_LOCATION (exp), fndecl, 2, arg1, arg2);
a65c4d64 4174 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
4175 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
6ac5504b 4176 return expand_call (fn, target, target == const0_rtx);
4177 }
7a3f89b5 4178#endif
c2f47e15 4179 return NULL_RTX;
83d79705 4180}
53800dbe 4181
48e1416a 4182/* Expand expression EXP, which is a call to the strncmp builtin. Return
c2f47e15 4183 NULL_RTX if we failed the caller should emit a normal call, otherwise try to get
ed09096d 4184 the result in TARGET, if convenient. */
27d0c333 4185
ed09096d 4186static rtx
a65c4d64 4187expand_builtin_strncmp (tree exp, ATTRIBUTE_UNUSED rtx target,
3754d046 4188 ATTRIBUTE_UNUSED machine_mode mode)
ed09096d 4189{
a65c4d64 4190 location_t loc ATTRIBUTE_UNUSED = EXPR_LOCATION (exp);
389dd41b 4191
c2f47e15 4192 if (!validate_arglist (exp,
4193 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
4194 return NULL_RTX;
ed09096d 4195
6e34e617 4196 /* If c_strlen can determine an expression for one of the string
6ac5504b 4197 lengths, and it doesn't have side effects, then emit cmpstrnsi
7a3f89b5 4198 using length MIN(strlen(string)+1, arg3). */
6ac5504b 4199#ifdef HAVE_cmpstrnsi
4200 if (HAVE_cmpstrnsi)
7a3f89b5 4201 {
4202 tree len, len1, len2;
4203 rtx arg1_rtx, arg2_rtx, arg3_rtx;
4204 rtx result, insn;
0b25db21 4205 tree fndecl, fn;
c2f47e15 4206 tree arg1 = CALL_EXPR_ARG (exp, 0);
4207 tree arg2 = CALL_EXPR_ARG (exp, 1);
4208 tree arg3 = CALL_EXPR_ARG (exp, 2);
6f428e8b 4209
957d0361 4210 unsigned int arg1_align = get_pointer_alignment (arg1) / BITS_PER_UNIT;
4211 unsigned int arg2_align = get_pointer_alignment (arg2) / BITS_PER_UNIT;
3754d046 4212 machine_mode insn_mode
6ac5504b 4213 = insn_data[(int) CODE_FOR_cmpstrnsi].operand[0].mode;
bf8e3599 4214
681fab1e 4215 len1 = c_strlen (arg1, 1);
4216 len2 = c_strlen (arg2, 1);
7a3f89b5 4217
4218 if (len1)
389dd41b 4219 len1 = size_binop_loc (loc, PLUS_EXPR, ssize_int (1), len1);
7a3f89b5 4220 if (len2)
389dd41b 4221 len2 = size_binop_loc (loc, PLUS_EXPR, ssize_int (1), len2);
7a3f89b5 4222
4223 /* If we don't have a constant length for the first, use the length
4224 of the second, if we know it. We don't require a constant for
4225 this case; some cost analysis could be done if both are available
4226 but neither is constant. For now, assume they're equally cheap,
4227 unless one has side effects. If both strings have constant lengths,
4228 use the smaller. */
4229
4230 if (!len1)
4231 len = len2;
4232 else if (!len2)
4233 len = len1;
4234 else if (TREE_SIDE_EFFECTS (len1))
4235 len = len2;
4236 else if (TREE_SIDE_EFFECTS (len2))
4237 len = len1;
4238 else if (TREE_CODE (len1) != INTEGER_CST)
4239 len = len2;
4240 else if (TREE_CODE (len2) != INTEGER_CST)
4241 len = len1;
4242 else if (tree_int_cst_lt (len1, len2))
4243 len = len1;
4244 else
4245 len = len2;
6e34e617 4246
7a3f89b5 4247 /* If both arguments have side effects, we cannot optimize. */
4248 if (!len || TREE_SIDE_EFFECTS (len))
c2f47e15 4249 return NULL_RTX;
bf8e3599 4250
7a3f89b5 4251 /* The actual new length parameter is MIN(len,arg3). */
389dd41b 4252 len = fold_build2_loc (loc, MIN_EXPR, TREE_TYPE (len), len,
4253 fold_convert_loc (loc, TREE_TYPE (len), arg3));
7a3f89b5 4254
4255 /* If we don't have POINTER_TYPE, call the function. */
4256 if (arg1_align == 0 || arg2_align == 0)
c2f47e15 4257 return NULL_RTX;
7a3f89b5 4258
4259 /* Make a place to write the result of the instruction. */
4260 result = target;
4261 if (! (result != 0
8ad4c111 4262 && REG_P (result) && GET_MODE (result) == insn_mode
7a3f89b5 4263 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
4264 result = gen_reg_rtx (insn_mode);
4265
a65c4d64 4266 /* Stabilize the arguments in case gen_cmpstrnsi fails. */
4267 arg1 = builtin_save_expr (arg1);
4268 arg2 = builtin_save_expr (arg2);
4269 len = builtin_save_expr (len);
27d0c333 4270
a65c4d64 4271 arg1_rtx = get_memory_rtx (arg1, len);
4272 arg2_rtx = get_memory_rtx (arg2, len);
4273 arg3_rtx = expand_normal (len);
4274 insn = gen_cmpstrnsi (result, arg1_rtx, arg2_rtx, arg3_rtx,
4275 GEN_INT (MIN (arg1_align, arg2_align)));
4276 if (insn)
4277 {
4278 emit_insn (insn);
49f0327b 4279
a65c4d64 4280 /* Return the value in the proper mode for this function. */
4281 mode = TYPE_MODE (TREE_TYPE (exp));
4282 if (GET_MODE (result) == mode)
4283 return result;
4284 if (target == 0)
4285 return convert_to_mode (mode, result, 0);
4286 convert_move (target, result, 0);
4287 return target;
4288 }
27d0c333 4289
a65c4d64 4290 /* Expand the library call ourselves using a stabilized argument
4291 list to avoid re-evaluating the function's arguments twice. */
4292 fndecl = get_callee_fndecl (exp);
0568e9c1 4293 fn = build_call_nofold_loc (EXPR_LOCATION (exp), fndecl, 3,
4294 arg1, arg2, len);
a65c4d64 4295 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
4296 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
4297 return expand_call (fn, target, target == const0_rtx);
4298 }
4299#endif
c2f47e15 4300 return NULL_RTX;
49f0327b 4301}
4302
a66c9326 4303/* Expand a call to __builtin_saveregs, generating the result in TARGET,
4304 if that's convenient. */
902de8ed 4305
a66c9326 4306rtx
aecda0d6 4307expand_builtin_saveregs (void)
53800dbe 4308{
1e0c0b35 4309 rtx val;
4310 rtx_insn *seq;
53800dbe 4311
4312 /* Don't do __builtin_saveregs more than once in a function.
4313 Save the result of the first call and reuse it. */
4314 if (saveregs_value != 0)
4315 return saveregs_value;
53800dbe 4316
a66c9326 4317 /* When this function is called, it means that registers must be
4318 saved on entry to this function. So we migrate the call to the
4319 first insn of this function. */
4320
4321 start_sequence ();
53800dbe 4322
a66c9326 4323 /* Do whatever the machine needs done in this case. */
45550790 4324 val = targetm.calls.expand_builtin_saveregs ();
53800dbe 4325
a66c9326 4326 seq = get_insns ();
4327 end_sequence ();
53800dbe 4328
a66c9326 4329 saveregs_value = val;
53800dbe 4330
31d3e01c 4331 /* Put the insns after the NOTE that starts the function. If this
4332 is inside a start_sequence, make the outer-level insn chain current, so
a66c9326 4333 the code is placed at the start of the function. */
4334 push_topmost_sequence ();
0ec80471 4335 emit_insn_after (seq, entry_of_function ());
a66c9326 4336 pop_topmost_sequence ();
4337
4338 return val;
53800dbe 4339}
4340
79012a9d 4341/* Expand a call to __builtin_next_arg. */
27d0c333 4342
53800dbe 4343static rtx
79012a9d 4344expand_builtin_next_arg (void)
53800dbe 4345{
79012a9d 4346 /* Checking arguments is already done in fold_builtin_next_arg
4347 that must be called before this function. */
940ddc5c 4348 return expand_binop (ptr_mode, add_optab,
abe32cce 4349 crtl->args.internal_arg_pointer,
4350 crtl->args.arg_offset_rtx,
53800dbe 4351 NULL_RTX, 0, OPTAB_LIB_WIDEN);
4352}
4353
a66c9326 4354/* Make it easier for the backends by protecting the valist argument
4355 from multiple evaluations. */
4356
4357static tree
389dd41b 4358stabilize_va_list_loc (location_t loc, tree valist, int needs_lvalue)
a66c9326 4359{
5f57a8b1 4360 tree vatype = targetm.canonical_va_list_type (TREE_TYPE (valist));
4361
182cf5a9 4362 /* The current way of determining the type of valist is completely
4363 bogus. We should have the information on the va builtin instead. */
4364 if (!vatype)
4365 vatype = targetm.fn_abi_va_list (cfun->decl);
5f57a8b1 4366
4367 if (TREE_CODE (vatype) == ARRAY_TYPE)
a66c9326 4368 {
2d47cc32 4369 if (TREE_SIDE_EFFECTS (valist))
4370 valist = save_expr (valist);
11a61dea 4371
2d47cc32 4372 /* For this case, the backends will be expecting a pointer to
5f57a8b1 4373 vatype, but it's possible we've actually been given an array
4374 (an actual TARGET_CANONICAL_VA_LIST_TYPE (valist)).
2d47cc32 4375 So fix it. */
4376 if (TREE_CODE (TREE_TYPE (valist)) == ARRAY_TYPE)
8a15c04a 4377 {
5f57a8b1 4378 tree p1 = build_pointer_type (TREE_TYPE (vatype));
389dd41b 4379 valist = build_fold_addr_expr_with_type_loc (loc, valist, p1);
8a15c04a 4380 }
a66c9326 4381 }
11a61dea 4382 else
a66c9326 4383 {
182cf5a9 4384 tree pt = build_pointer_type (vatype);
11a61dea 4385
2d47cc32 4386 if (! needs_lvalue)
4387 {
11a61dea 4388 if (! TREE_SIDE_EFFECTS (valist))
4389 return valist;
bf8e3599 4390
389dd41b 4391 valist = fold_build1_loc (loc, ADDR_EXPR, pt, valist);
a66c9326 4392 TREE_SIDE_EFFECTS (valist) = 1;
a66c9326 4393 }
2d47cc32 4394
11a61dea 4395 if (TREE_SIDE_EFFECTS (valist))
2d47cc32 4396 valist = save_expr (valist);
182cf5a9 4397 valist = fold_build2_loc (loc, MEM_REF,
4398 vatype, valist, build_int_cst (pt, 0));
a66c9326 4399 }
4400
4401 return valist;
4402}
4403
2e15d750 4404/* The "standard" definition of va_list is void*. */
4405
4406tree
4407std_build_builtin_va_list (void)
4408{
4409 return ptr_type_node;
4410}
4411
5f57a8b1 4412/* The "standard" abi va_list is va_list_type_node. */
4413
4414tree
4415std_fn_abi_va_list (tree fndecl ATTRIBUTE_UNUSED)
4416{
4417 return va_list_type_node;
4418}
4419
4420/* The "standard" type of va_list is va_list_type_node. */
4421
4422tree
4423std_canonical_va_list_type (tree type)
4424{
4425 tree wtype, htype;
4426
4427 if (INDIRECT_REF_P (type))
4428 type = TREE_TYPE (type);
9af5ce0c 4429 else if (POINTER_TYPE_P (type) && POINTER_TYPE_P (TREE_TYPE (type)))
5f57a8b1 4430 type = TREE_TYPE (type);
5f57a8b1 4431 wtype = va_list_type_node;
4432 htype = type;
7b36f9ab 4433 /* Treat structure va_list types. */
4434 if (TREE_CODE (wtype) == RECORD_TYPE && POINTER_TYPE_P (htype))
4435 htype = TREE_TYPE (htype);
4436 else if (TREE_CODE (wtype) == ARRAY_TYPE)
5f57a8b1 4437 {
4438 /* If va_list is an array type, the argument may have decayed
4439 to a pointer type, e.g. by being passed to another function.
4440 In that case, unwrap both types so that we can compare the
4441 underlying records. */
4442 if (TREE_CODE (htype) == ARRAY_TYPE
4443 || POINTER_TYPE_P (htype))
4444 {
4445 wtype = TREE_TYPE (wtype);
4446 htype = TREE_TYPE (htype);
4447 }
4448 }
4449 if (TYPE_MAIN_VARIANT (wtype) == TYPE_MAIN_VARIANT (htype))
4450 return va_list_type_node;
4451
4452 return NULL_TREE;
4453}
4454
a66c9326 4455/* The "standard" implementation of va_start: just assign `nextarg' to
4456 the variable. */
27d0c333 4457
a66c9326 4458void
aecda0d6 4459std_expand_builtin_va_start (tree valist, rtx nextarg)
a66c9326 4460{
f03c17bc 4461 rtx va_r = expand_expr (valist, NULL_RTX, VOIDmode, EXPAND_WRITE);
4462 convert_move (va_r, nextarg, 0);
058a1b7a 4463
4464 /* We do not have any valid bounds for the pointer, so
4465 just store zero bounds for it. */
4466 if (chkp_function_instrumented_p (current_function_decl))
4467 chkp_expand_bounds_reset_for_mem (valist,
4468 make_tree (TREE_TYPE (valist),
4469 nextarg));
a66c9326 4470}
4471
c2f47e15 4472/* Expand EXP, a call to __builtin_va_start. */
27d0c333 4473
a66c9326 4474static rtx
c2f47e15 4475expand_builtin_va_start (tree exp)
a66c9326 4476{
4477 rtx nextarg;
c2f47e15 4478 tree valist;
389dd41b 4479 location_t loc = EXPR_LOCATION (exp);
a66c9326 4480
c2f47e15 4481 if (call_expr_nargs (exp) < 2)
cb166087 4482 {
389dd41b 4483 error_at (loc, "too few arguments to function %<va_start%>");
cb166087 4484 return const0_rtx;
4485 }
a66c9326 4486
c2f47e15 4487 if (fold_builtin_next_arg (exp, true))
79012a9d 4488 return const0_rtx;
7c2f0500 4489
79012a9d 4490 nextarg = expand_builtin_next_arg ();
389dd41b 4491 valist = stabilize_va_list_loc (loc, CALL_EXPR_ARG (exp, 0), 1);
a66c9326 4492
8a58ed0a 4493 if (targetm.expand_builtin_va_start)
4494 targetm.expand_builtin_va_start (valist, nextarg);
4495 else
4496 std_expand_builtin_va_start (valist, nextarg);
a66c9326 4497
4498 return const0_rtx;
4499}
4500
c2f47e15 4501/* Expand EXP, a call to __builtin_va_end. */
f7c44134 4502
a66c9326 4503static rtx
c2f47e15 4504expand_builtin_va_end (tree exp)
a66c9326 4505{
c2f47e15 4506 tree valist = CALL_EXPR_ARG (exp, 0);
8a15c04a 4507
8a15c04a 4508 /* Evaluate for side effects, if needed. I hate macros that don't
4509 do that. */
4510 if (TREE_SIDE_EFFECTS (valist))
4511 expand_expr (valist, const0_rtx, VOIDmode, EXPAND_NORMAL);
a66c9326 4512
4513 return const0_rtx;
4514}
4515
c2f47e15 4516/* Expand EXP, a call to __builtin_va_copy. We do this as a
a66c9326 4517 builtin rather than just as an assignment in stdarg.h because of the
4518 nastiness of array-type va_list types. */
f7c44134 4519
a66c9326 4520static rtx
c2f47e15 4521expand_builtin_va_copy (tree exp)
a66c9326 4522{
4523 tree dst, src, t;
389dd41b 4524 location_t loc = EXPR_LOCATION (exp);
a66c9326 4525
c2f47e15 4526 dst = CALL_EXPR_ARG (exp, 0);
4527 src = CALL_EXPR_ARG (exp, 1);
a66c9326 4528
389dd41b 4529 dst = stabilize_va_list_loc (loc, dst, 1);
4530 src = stabilize_va_list_loc (loc, src, 0);
a66c9326 4531
5f57a8b1 4532 gcc_assert (cfun != NULL && cfun->decl != NULL_TREE);
4533
4534 if (TREE_CODE (targetm.fn_abi_va_list (cfun->decl)) != ARRAY_TYPE)
a66c9326 4535 {
5f57a8b1 4536 t = build2 (MODIFY_EXPR, targetm.fn_abi_va_list (cfun->decl), dst, src);
a66c9326 4537 TREE_SIDE_EFFECTS (t) = 1;
4538 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
4539 }
4540 else
4541 {
11a61dea 4542 rtx dstb, srcb, size;
4543
4544 /* Evaluate to pointers. */
4545 dstb = expand_expr (dst, NULL_RTX, Pmode, EXPAND_NORMAL);
4546 srcb = expand_expr (src, NULL_RTX, Pmode, EXPAND_NORMAL);
5f57a8b1 4547 size = expand_expr (TYPE_SIZE_UNIT (targetm.fn_abi_va_list (cfun->decl)),
4548 NULL_RTX, VOIDmode, EXPAND_NORMAL);
11a61dea 4549
85d654dd 4550 dstb = convert_memory_address (Pmode, dstb);
4551 srcb = convert_memory_address (Pmode, srcb);
726ec87c 4552
11a61dea 4553 /* "Dereference" to BLKmode memories. */
4554 dstb = gen_rtx_MEM (BLKmode, dstb);
ab6ab77e 4555 set_mem_alias_set (dstb, get_alias_set (TREE_TYPE (TREE_TYPE (dst))));
5f57a8b1 4556 set_mem_align (dstb, TYPE_ALIGN (targetm.fn_abi_va_list (cfun->decl)));
11a61dea 4557 srcb = gen_rtx_MEM (BLKmode, srcb);
ab6ab77e 4558 set_mem_alias_set (srcb, get_alias_set (TREE_TYPE (TREE_TYPE (src))));
5f57a8b1 4559 set_mem_align (srcb, TYPE_ALIGN (targetm.fn_abi_va_list (cfun->decl)));
11a61dea 4560
4561 /* Copy. */
0378dbdc 4562 emit_block_move (dstb, srcb, size, BLOCK_OP_NORMAL);
a66c9326 4563 }
4564
4565 return const0_rtx;
4566}
4567
53800dbe 4568/* Expand a call to one of the builtin functions __builtin_frame_address or
4569 __builtin_return_address. */
27d0c333 4570
53800dbe 4571static rtx
c2f47e15 4572expand_builtin_frame_address (tree fndecl, tree exp)
53800dbe 4573{
53800dbe 4574 /* The argument must be a nonnegative integer constant.
4575 It counts the number of frames to scan up the stack.
4576 The value is the return address saved in that frame. */
c2f47e15 4577 if (call_expr_nargs (exp) == 0)
53800dbe 4578 /* Warning about missing arg was already issued. */
4579 return const0_rtx;
e913b5cd 4580 else if (! tree_fits_uhwi_p (CALL_EXPR_ARG (exp, 0)))
53800dbe 4581 {
4582 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
07e3a3d2 4583 error ("invalid argument to %<__builtin_frame_address%>");
53800dbe 4584 else
07e3a3d2 4585 error ("invalid argument to %<__builtin_return_address%>");
53800dbe 4586 return const0_rtx;
4587 }
4588 else
4589 {
27d0c333 4590 rtx tem
4591 = expand_builtin_return_addr (DECL_FUNCTION_CODE (fndecl),
e913b5cd 4592 tree_to_uhwi (CALL_EXPR_ARG (exp, 0)));
53800dbe 4593
4594 /* Some ports cannot access arbitrary stack frames. */
4595 if (tem == NULL)
4596 {
4597 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
c3ceba8e 4598 warning (0, "unsupported argument to %<__builtin_frame_address%>");
53800dbe 4599 else
c3ceba8e 4600 warning (0, "unsupported argument to %<__builtin_return_address%>");
53800dbe 4601 return const0_rtx;
4602 }
4603
4604 /* For __builtin_frame_address, return what we've got. */
4605 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
4606 return tem;
4607
8ad4c111 4608 if (!REG_P (tem)
53800dbe 4609 && ! CONSTANT_P (tem))
99182918 4610 tem = copy_addr_to_reg (tem);
53800dbe 4611 return tem;
4612 }
4613}
4614
990495a7 4615/* Expand EXP, a call to the alloca builtin. Return NULL_RTX if we
5be42b39 4616 failed and the caller should emit a normal call. CANNOT_ACCUMULATE
4617 is the same as for allocate_dynamic_stack_space. */
15c6cf6b 4618
53800dbe 4619static rtx
5be42b39 4620expand_builtin_alloca (tree exp, bool cannot_accumulate)
53800dbe 4621{
4622 rtx op0;
15c6cf6b 4623 rtx result;
581bf1c2 4624 bool valid_arglist;
4625 unsigned int align;
4626 bool alloca_with_align = (DECL_FUNCTION_CODE (get_callee_fndecl (exp))
4627 == BUILT_IN_ALLOCA_WITH_ALIGN);
53800dbe 4628
581bf1c2 4629 valid_arglist
4630 = (alloca_with_align
4631 ? validate_arglist (exp, INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE)
4632 : validate_arglist (exp, INTEGER_TYPE, VOID_TYPE));
4633
4634 if (!valid_arglist)
c2f47e15 4635 return NULL_RTX;
53800dbe 4636
4637 /* Compute the argument. */
c2f47e15 4638 op0 = expand_normal (CALL_EXPR_ARG (exp, 0));
53800dbe 4639
581bf1c2 4640 /* Compute the alignment. */
4641 align = (alloca_with_align
f9ae6f95 4642 ? TREE_INT_CST_LOW (CALL_EXPR_ARG (exp, 1))
581bf1c2 4643 : BIGGEST_ALIGNMENT);
4644
53800dbe 4645 /* Allocate the desired space. */
581bf1c2 4646 result = allocate_dynamic_stack_space (op0, 0, align, cannot_accumulate);
85d654dd 4647 result = convert_memory_address (ptr_mode, result);
15c6cf6b 4648
4649 return result;
53800dbe 4650}
4651
74bdbe96 4652/* Expand a call to bswap builtin in EXP.
4653 Return NULL_RTX if a normal call should be emitted rather than expanding the
4654 function in-line. If convenient, the result should be placed in TARGET.
4655 SUBTARGET may be used as the target for computing one of EXP's operands. */
42791117 4656
4657static rtx
3754d046 4658expand_builtin_bswap (machine_mode target_mode, tree exp, rtx target,
74bdbe96 4659 rtx subtarget)
42791117 4660{
42791117 4661 tree arg;
4662 rtx op0;
4663
c2f47e15 4664 if (!validate_arglist (exp, INTEGER_TYPE, VOID_TYPE))
4665 return NULL_RTX;
42791117 4666
c2f47e15 4667 arg = CALL_EXPR_ARG (exp, 0);
74bdbe96 4668 op0 = expand_expr (arg,
4669 subtarget && GET_MODE (subtarget) == target_mode
4670 ? subtarget : NULL_RTX,
4671 target_mode, EXPAND_NORMAL);
4672 if (GET_MODE (op0) != target_mode)
4673 op0 = convert_to_mode (target_mode, op0, 1);
42791117 4674
74bdbe96 4675 target = expand_unop (target_mode, bswap_optab, op0, target, 1);
42791117 4676
4677 gcc_assert (target);
4678
74bdbe96 4679 return convert_to_mode (target_mode, target, 1);
42791117 4680}
4681
c2f47e15 4682/* Expand a call to a unary builtin in EXP.
4683 Return NULL_RTX if a normal call should be emitted rather than expanding the
53800dbe 4684 function in-line. If convenient, the result should be placed in TARGET.
4685 SUBTARGET may be used as the target for computing one of EXP's operands. */
15c6cf6b 4686
53800dbe 4687static rtx
3754d046 4688expand_builtin_unop (machine_mode target_mode, tree exp, rtx target,
aecda0d6 4689 rtx subtarget, optab op_optab)
53800dbe 4690{
4691 rtx op0;
c2f47e15 4692
4693 if (!validate_arglist (exp, INTEGER_TYPE, VOID_TYPE))
4694 return NULL_RTX;
53800dbe 4695
4696 /* Compute the argument. */
f97eea22 4697 op0 = expand_expr (CALL_EXPR_ARG (exp, 0),
4698 (subtarget
4699 && (TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 0)))
4700 == GET_MODE (subtarget))) ? subtarget : NULL_RTX,
1db6d067 4701 VOIDmode, EXPAND_NORMAL);
6a08d0ab 4702 /* Compute op, into TARGET if possible.
53800dbe 4703 Set TARGET to wherever the result comes back. */
c2f47e15 4704 target = expand_unop (TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 0))),
6aaa1f9e 4705 op_optab, op0, target, op_optab != clrsb_optab);
64db345d 4706 gcc_assert (target);
7d3f6cc7 4707
efb070c8 4708 return convert_to_mode (target_mode, target, 0);
53800dbe 4709}
89cfe6e5 4710
48e1416a 4711/* Expand a call to __builtin_expect. We just return our argument
5a74f77e 4712 as the builtin_expect semantic should've been already executed by
4713 tree branch prediction pass. */
89cfe6e5 4714
4715static rtx
c2f47e15 4716expand_builtin_expect (tree exp, rtx target)
89cfe6e5 4717{
1e4adcfc 4718 tree arg;
89cfe6e5 4719
c2f47e15 4720 if (call_expr_nargs (exp) < 2)
89cfe6e5 4721 return const0_rtx;
c2f47e15 4722 arg = CALL_EXPR_ARG (exp, 0);
89cfe6e5 4723
c2f47e15 4724 target = expand_expr (arg, target, VOIDmode, EXPAND_NORMAL);
5a74f77e 4725 /* When guessing was done, the hints should be already stripped away. */
07311427 4726 gcc_assert (!flag_guess_branch_prob
852f689e 4727 || optimize == 0 || seen_error ());
89cfe6e5 4728 return target;
4729}
689df48e 4730
fca0886c 4731/* Expand a call to __builtin_assume_aligned. We just return our first
4732 argument as the builtin_assume_aligned semantic should've been already
4733 executed by CCP. */
4734
4735static rtx
4736expand_builtin_assume_aligned (tree exp, rtx target)
4737{
4738 if (call_expr_nargs (exp) < 2)
4739 return const0_rtx;
4740 target = expand_expr (CALL_EXPR_ARG (exp, 0), target, VOIDmode,
4741 EXPAND_NORMAL);
4742 gcc_assert (!TREE_SIDE_EFFECTS (CALL_EXPR_ARG (exp, 1))
4743 && (call_expr_nargs (exp) < 3
4744 || !TREE_SIDE_EFFECTS (CALL_EXPR_ARG (exp, 2))));
4745 return target;
4746}
4747
c22de3f0 4748void
aecda0d6 4749expand_builtin_trap (void)
a0ef1725 4750{
4751#ifdef HAVE_trap
4752 if (HAVE_trap)
f73960eb 4753 {
4754 rtx insn = emit_insn (gen_trap ());
4755 /* For trap insns when not accumulating outgoing args force
4756 REG_ARGS_SIZE note to prevent crossjumping of calls with
4757 different args sizes. */
4758 if (!ACCUMULATE_OUTGOING_ARGS)
4759 add_reg_note (insn, REG_ARGS_SIZE, GEN_INT (stack_pointer_delta));
4760 }
a0ef1725 4761 else
4762#endif
4763 emit_library_call (abort_libfunc, LCT_NORETURN, VOIDmode, 0);
4764 emit_barrier ();
4765}
78a74442 4766
d2b48f0c 4767/* Expand a call to __builtin_unreachable. We do nothing except emit
4768 a barrier saying that control flow will not pass here.
4769
4770 It is the responsibility of the program being compiled to ensure
4771 that control flow does never reach __builtin_unreachable. */
4772static void
4773expand_builtin_unreachable (void)
4774{
4775 emit_barrier ();
4776}
4777
c2f47e15 4778/* Expand EXP, a call to fabs, fabsf or fabsl.
4779 Return NULL_RTX if a normal call should be emitted rather than expanding
78a74442 4780 the function inline. If convenient, the result should be placed
4781 in TARGET. SUBTARGET may be used as the target for computing
4782 the operand. */
4783
4784static rtx
c2f47e15 4785expand_builtin_fabs (tree exp, rtx target, rtx subtarget)
78a74442 4786{
3754d046 4787 machine_mode mode;
78a74442 4788 tree arg;
4789 rtx op0;
4790
c2f47e15 4791 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
4792 return NULL_RTX;
78a74442 4793
c2f47e15 4794 arg = CALL_EXPR_ARG (exp, 0);
c7f617c2 4795 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
78a74442 4796 mode = TYPE_MODE (TREE_TYPE (arg));
1db6d067 4797 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
78a74442 4798 return expand_abs (mode, op0, target, 0, safe_from_p (target, arg, 1));
4799}
4800
c2f47e15 4801/* Expand EXP, a call to copysign, copysignf, or copysignl.
270436f3 4802 Return NULL is a normal call should be emitted rather than expanding the
4803 function inline. If convenient, the result should be placed in TARGET.
4804 SUBTARGET may be used as the target for computing the operand. */
4805
4806static rtx
c2f47e15 4807expand_builtin_copysign (tree exp, rtx target, rtx subtarget)
270436f3 4808{
4809 rtx op0, op1;
4810 tree arg;
4811
c2f47e15 4812 if (!validate_arglist (exp, REAL_TYPE, REAL_TYPE, VOID_TYPE))
4813 return NULL_RTX;
270436f3 4814
c2f47e15 4815 arg = CALL_EXPR_ARG (exp, 0);
8ec3c5c2 4816 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
270436f3 4817
c2f47e15 4818 arg = CALL_EXPR_ARG (exp, 1);
8ec3c5c2 4819 op1 = expand_normal (arg);
270436f3 4820
4821 return expand_copysign (op0, op1, target);
4822}
4823
ac8fb6db 4824/* Expand a call to __builtin___clear_cache. */
4825
4826static rtx
4827expand_builtin___clear_cache (tree exp ATTRIBUTE_UNUSED)
4828{
4829#ifndef HAVE_clear_cache
4830#ifdef CLEAR_INSN_CACHE
4831 /* There is no "clear_cache" insn, and __clear_cache() in libgcc
4832 does something. Just do the default expansion to a call to
4833 __clear_cache(). */
4834 return NULL_RTX;
4835#else
4836 /* There is no "clear_cache" insn, and __clear_cache() in libgcc
4837 does nothing. There is no need to call it. Do nothing. */
4838 return const0_rtx;
4839#endif /* CLEAR_INSN_CACHE */
4840#else
4841 /* We have a "clear_cache" insn, and it will handle everything. */
4842 tree begin, end;
4843 rtx begin_rtx, end_rtx;
ac8fb6db 4844
4845 /* We must not expand to a library call. If we did, any
4846 fallback library function in libgcc that might contain a call to
4847 __builtin___clear_cache() would recurse infinitely. */
4848 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
4849 {
4850 error ("both arguments to %<__builtin___clear_cache%> must be pointers");
4851 return const0_rtx;
4852 }
4853
4854 if (HAVE_clear_cache)
4855 {
8786db1e 4856 struct expand_operand ops[2];
ac8fb6db 4857
4858 begin = CALL_EXPR_ARG (exp, 0);
4859 begin_rtx = expand_expr (begin, NULL_RTX, Pmode, EXPAND_NORMAL);
ac8fb6db 4860
4861 end = CALL_EXPR_ARG (exp, 1);
4862 end_rtx = expand_expr (end, NULL_RTX, Pmode, EXPAND_NORMAL);
ac8fb6db 4863
8786db1e 4864 create_address_operand (&ops[0], begin_rtx);
4865 create_address_operand (&ops[1], end_rtx);
4866 if (maybe_expand_insn (CODE_FOR_clear_cache, 2, ops))
4867 return const0_rtx;
ac8fb6db 4868 }
4869 return const0_rtx;
4870#endif /* HAVE_clear_cache */
4871}
4872
4ee9c684 4873/* Given a trampoline address, make sure it satisfies TRAMPOLINE_ALIGNMENT. */
4874
4875static rtx
4876round_trampoline_addr (rtx tramp)
4877{
4878 rtx temp, addend, mask;
4879
4880 /* If we don't need too much alignment, we'll have been guaranteed
4881 proper alignment by get_trampoline_type. */
4882 if (TRAMPOLINE_ALIGNMENT <= STACK_BOUNDARY)
4883 return tramp;
4884
4885 /* Round address up to desired boundary. */
4886 temp = gen_reg_rtx (Pmode);
0359f9f5 4887 addend = gen_int_mode (TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT - 1, Pmode);
4888 mask = gen_int_mode (-TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT, Pmode);
4ee9c684 4889
4890 temp = expand_simple_binop (Pmode, PLUS, tramp, addend,
4891 temp, 0, OPTAB_LIB_WIDEN);
4892 tramp = expand_simple_binop (Pmode, AND, temp, mask,
4893 temp, 0, OPTAB_LIB_WIDEN);
4894
4895 return tramp;
4896}
4897
4898static rtx
c307f106 4899expand_builtin_init_trampoline (tree exp, bool onstack)
4ee9c684 4900{
4901 tree t_tramp, t_func, t_chain;
82c7907c 4902 rtx m_tramp, r_tramp, r_chain, tmp;
4ee9c684 4903
c2f47e15 4904 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE,
4ee9c684 4905 POINTER_TYPE, VOID_TYPE))
4906 return NULL_RTX;
4907
c2f47e15 4908 t_tramp = CALL_EXPR_ARG (exp, 0);
4909 t_func = CALL_EXPR_ARG (exp, 1);
4910 t_chain = CALL_EXPR_ARG (exp, 2);
4ee9c684 4911
8ec3c5c2 4912 r_tramp = expand_normal (t_tramp);
82c7907c 4913 m_tramp = gen_rtx_MEM (BLKmode, r_tramp);
4914 MEM_NOTRAP_P (m_tramp) = 1;
4915
c307f106 4916 /* If ONSTACK, the TRAMP argument should be the address of a field
4917 within the local function's FRAME decl. Either way, let's see if
4918 we can fill in the MEM_ATTRs for this memory. */
82c7907c 4919 if (TREE_CODE (t_tramp) == ADDR_EXPR)
f4146cb8 4920 set_mem_attributes (m_tramp, TREE_OPERAND (t_tramp, 0), true);
82c7907c 4921
c307f106 4922 /* Creator of a heap trampoline is responsible for making sure the
4923 address is aligned to at least STACK_BOUNDARY. Normally malloc
4924 will ensure this anyhow. */
82c7907c 4925 tmp = round_trampoline_addr (r_tramp);
4926 if (tmp != r_tramp)
4927 {
4928 m_tramp = change_address (m_tramp, BLKmode, tmp);
4929 set_mem_align (m_tramp, TRAMPOLINE_ALIGNMENT);
5b2a69fa 4930 set_mem_size (m_tramp, TRAMPOLINE_SIZE);
82c7907c 4931 }
4932
4933 /* The FUNC argument should be the address of the nested function.
4934 Extract the actual function decl to pass to the hook. */
4935 gcc_assert (TREE_CODE (t_func) == ADDR_EXPR);
4936 t_func = TREE_OPERAND (t_func, 0);
4937 gcc_assert (TREE_CODE (t_func) == FUNCTION_DECL);
4938
8ec3c5c2 4939 r_chain = expand_normal (t_chain);
4ee9c684 4940
4941 /* Generate insns to initialize the trampoline. */
82c7907c 4942 targetm.calls.trampoline_init (m_tramp, t_func, r_chain);
4ee9c684 4943
c307f106 4944 if (onstack)
4945 {
4946 trampolines_created = 1;
8bc8a8f4 4947
c307f106 4948 warning_at (DECL_SOURCE_LOCATION (t_func), OPT_Wtrampolines,
4949 "trampoline generated for nested function %qD", t_func);
4950 }
8bc8a8f4 4951
4ee9c684 4952 return const0_rtx;
4953}
4954
4955static rtx
c2f47e15 4956expand_builtin_adjust_trampoline (tree exp)
4ee9c684 4957{
4958 rtx tramp;
4959
c2f47e15 4960 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
4ee9c684 4961 return NULL_RTX;
4962
c2f47e15 4963 tramp = expand_normal (CALL_EXPR_ARG (exp, 0));
4ee9c684 4964 tramp = round_trampoline_addr (tramp);
82c7907c 4965 if (targetm.calls.trampoline_adjust_address)
4966 tramp = targetm.calls.trampoline_adjust_address (tramp);
4ee9c684 4967
4968 return tramp;
4969}
4970
93f564d6 4971/* Expand the call EXP to the built-in signbit, signbitf or signbitl
4972 function. The function first checks whether the back end provides
4973 an insn to implement signbit for the respective mode. If not, it
4974 checks whether the floating point format of the value is such that
4975 the sign bit can be extracted. If that is not the case, the
4976 function returns NULL_RTX to indicate that a normal call should be
4977 emitted rather than expanding the function in-line. EXP is the
4978 expression that is a call to the builtin function; if convenient,
4979 the result should be placed in TARGET. */
27f261ef 4980static rtx
4981expand_builtin_signbit (tree exp, rtx target)
4982{
4983 const struct real_format *fmt;
3754d046 4984 machine_mode fmode, imode, rmode;
c2f47e15 4985 tree arg;
ca4f1f5b 4986 int word, bitpos;
27eda240 4987 enum insn_code icode;
27f261ef 4988 rtx temp;
389dd41b 4989 location_t loc = EXPR_LOCATION (exp);
27f261ef 4990
c2f47e15 4991 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
4992 return NULL_RTX;
27f261ef 4993
c2f47e15 4994 arg = CALL_EXPR_ARG (exp, 0);
27f261ef 4995 fmode = TYPE_MODE (TREE_TYPE (arg));
4996 rmode = TYPE_MODE (TREE_TYPE (exp));
4997 fmt = REAL_MODE_FORMAT (fmode);
4998
93f564d6 4999 arg = builtin_save_expr (arg);
5000
5001 /* Expand the argument yielding a RTX expression. */
5002 temp = expand_normal (arg);
5003
5004 /* Check if the back end provides an insn that handles signbit for the
5005 argument's mode. */
d6bf3b14 5006 icode = optab_handler (signbit_optab, fmode);
27eda240 5007 if (icode != CODE_FOR_nothing)
93f564d6 5008 {
1e0c0b35 5009 rtx_insn *last = get_last_insn ();
93f564d6 5010 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
4e2a2fb4 5011 if (maybe_emit_unop_insn (icode, target, temp, UNKNOWN))
5012 return target;
5013 delete_insns_since (last);
93f564d6 5014 }
5015
27f261ef 5016 /* For floating point formats without a sign bit, implement signbit
5017 as "ARG < 0.0". */
8d564692 5018 bitpos = fmt->signbit_ro;
ca4f1f5b 5019 if (bitpos < 0)
27f261ef 5020 {
5021 /* But we can't do this if the format supports signed zero. */
5022 if (fmt->has_signed_zero && HONOR_SIGNED_ZEROS (fmode))
c2f47e15 5023 return NULL_RTX;
27f261ef 5024
389dd41b 5025 arg = fold_build2_loc (loc, LT_EXPR, TREE_TYPE (exp), arg,
49d00087 5026 build_real (TREE_TYPE (arg), dconst0));
27f261ef 5027 return expand_expr (arg, target, VOIDmode, EXPAND_NORMAL);
5028 }
5029
ca4f1f5b 5030 if (GET_MODE_SIZE (fmode) <= UNITS_PER_WORD)
27f261ef 5031 {
ca4f1f5b 5032 imode = int_mode_for_mode (fmode);
5033 if (imode == BLKmode)
c2f47e15 5034 return NULL_RTX;
ca4f1f5b 5035 temp = gen_lowpart (imode, temp);
24fd4260 5036 }
5037 else
5038 {
ca4f1f5b 5039 imode = word_mode;
5040 /* Handle targets with different FP word orders. */
5041 if (FLOAT_WORDS_BIG_ENDIAN)
a0c938f0 5042 word = (GET_MODE_BITSIZE (fmode) - bitpos) / BITS_PER_WORD;
ca4f1f5b 5043 else
a0c938f0 5044 word = bitpos / BITS_PER_WORD;
ca4f1f5b 5045 temp = operand_subword_force (temp, word, fmode);
5046 bitpos = bitpos % BITS_PER_WORD;
5047 }
5048
44b0f1d0 5049 /* Force the intermediate word_mode (or narrower) result into a
5050 register. This avoids attempting to create paradoxical SUBREGs
5051 of floating point modes below. */
5052 temp = force_reg (imode, temp);
5053
ca4f1f5b 5054 /* If the bitpos is within the "result mode" lowpart, the operation
5055 can be implement with a single bitwise AND. Otherwise, we need
5056 a right shift and an AND. */
5057
5058 if (bitpos < GET_MODE_BITSIZE (rmode))
5059 {
796b6678 5060 wide_int mask = wi::set_bit_in_zero (bitpos, GET_MODE_PRECISION (rmode));
27f261ef 5061
4a46f016 5062 if (GET_MODE_SIZE (imode) > GET_MODE_SIZE (rmode))
ca4f1f5b 5063 temp = gen_lowpart (rmode, temp);
24fd4260 5064 temp = expand_binop (rmode, and_optab, temp,
e913b5cd 5065 immed_wide_int_const (mask, rmode),
ca4f1f5b 5066 NULL_RTX, 1, OPTAB_LIB_WIDEN);
27f261ef 5067 }
ca4f1f5b 5068 else
5069 {
5070 /* Perform a logical right shift to place the signbit in the least
a0c938f0 5071 significant bit, then truncate the result to the desired mode
ca4f1f5b 5072 and mask just this bit. */
f5ff0b21 5073 temp = expand_shift (RSHIFT_EXPR, imode, temp, bitpos, NULL_RTX, 1);
ca4f1f5b 5074 temp = gen_lowpart (rmode, temp);
5075 temp = expand_binop (rmode, and_optab, temp, const1_rtx,
5076 NULL_RTX, 1, OPTAB_LIB_WIDEN);
5077 }
5078
27f261ef 5079 return temp;
5080}
73673831 5081
5082/* Expand fork or exec calls. TARGET is the desired target of the
c2f47e15 5083 call. EXP is the call. FN is the
73673831 5084 identificator of the actual function. IGNORE is nonzero if the
5085 value is to be ignored. */
5086
5087static rtx
c2f47e15 5088expand_builtin_fork_or_exec (tree fn, tree exp, rtx target, int ignore)
73673831 5089{
5090 tree id, decl;
5091 tree call;
5092
5093 /* If we are not profiling, just call the function. */
5094 if (!profile_arc_flag)
5095 return NULL_RTX;
5096
5097 /* Otherwise call the wrapper. This should be equivalent for the rest of
5098 compiler, so the code does not diverge, and the wrapper may run the
9c9bad97 5099 code necessary for keeping the profiling sane. */
73673831 5100
5101 switch (DECL_FUNCTION_CODE (fn))
5102 {
5103 case BUILT_IN_FORK:
5104 id = get_identifier ("__gcov_fork");
5105 break;
5106
5107 case BUILT_IN_EXECL:
5108 id = get_identifier ("__gcov_execl");
5109 break;
5110
5111 case BUILT_IN_EXECV:
5112 id = get_identifier ("__gcov_execv");
5113 break;
5114
5115 case BUILT_IN_EXECLP:
5116 id = get_identifier ("__gcov_execlp");
5117 break;
5118
5119 case BUILT_IN_EXECLE:
5120 id = get_identifier ("__gcov_execle");
5121 break;
5122
5123 case BUILT_IN_EXECVP:
5124 id = get_identifier ("__gcov_execvp");
5125 break;
5126
5127 case BUILT_IN_EXECVE:
5128 id = get_identifier ("__gcov_execve");
5129 break;
5130
5131 default:
64db345d 5132 gcc_unreachable ();
73673831 5133 }
5134
e60a6f7b 5135 decl = build_decl (DECL_SOURCE_LOCATION (fn),
5136 FUNCTION_DECL, id, TREE_TYPE (fn));
73673831 5137 DECL_EXTERNAL (decl) = 1;
5138 TREE_PUBLIC (decl) = 1;
5139 DECL_ARTIFICIAL (decl) = 1;
5140 TREE_NOTHROW (decl) = 1;
e82d310b 5141 DECL_VISIBILITY (decl) = VISIBILITY_DEFAULT;
5142 DECL_VISIBILITY_SPECIFIED (decl) = 1;
389dd41b 5143 call = rewrite_call_expr (EXPR_LOCATION (exp), exp, 0, decl, 0);
73673831 5144 return expand_call (call, target, ignore);
c2f47e15 5145 }
48e1416a 5146
b6a5fc45 5147
5148\f
3e272de8 5149/* Reconstitute a mode for a __sync intrinsic operation. Since the type of
5150 the pointer in these functions is void*, the tree optimizers may remove
5151 casts. The mode computed in expand_builtin isn't reliable either, due
5152 to __sync_bool_compare_and_swap.
5153
5154 FCODE_DIFF should be fcode - base, where base is the FOO_1 code for the
5155 group of builtins. This gives us log2 of the mode size. */
5156
3754d046 5157static inline machine_mode
3e272de8 5158get_builtin_sync_mode (int fcode_diff)
5159{
ad3a13b5 5160 /* The size is not negotiable, so ask not to get BLKmode in return
5161 if the target indicates that a smaller size would be better. */
5162 return mode_for_size (BITS_PER_UNIT << fcode_diff, MODE_INT, 0);
3e272de8 5163}
5164
041e0215 5165/* Expand the memory expression LOC and return the appropriate memory operand
5166 for the builtin_sync operations. */
5167
5168static rtx
3754d046 5169get_builtin_sync_mem (tree loc, machine_mode mode)
041e0215 5170{
5171 rtx addr, mem;
5172
7f4d56ad 5173 addr = expand_expr (loc, NULL_RTX, ptr_mode, EXPAND_SUM);
5174 addr = convert_memory_address (Pmode, addr);
041e0215 5175
5176 /* Note that we explicitly do not want any alias information for this
5177 memory, so that we kill all other live memories. Otherwise we don't
5178 satisfy the full barrier semantics of the intrinsic. */
5179 mem = validize_mem (gen_rtx_MEM (mode, addr));
5180
153c3b50 5181 /* The alignment needs to be at least according to that of the mode. */
5182 set_mem_align (mem, MAX (GET_MODE_ALIGNMENT (mode),
957d0361 5183 get_pointer_alignment (loc)));
c94cfd1c 5184 set_mem_alias_set (mem, ALIAS_SET_MEMORY_BARRIER);
041e0215 5185 MEM_VOLATILE_P (mem) = 1;
5186
5187 return mem;
5188}
5189
1cd6e20d 5190/* Make sure an argument is in the right mode.
5191 EXP is the tree argument.
5192 MODE is the mode it should be in. */
5193
5194static rtx
3754d046 5195expand_expr_force_mode (tree exp, machine_mode mode)
1cd6e20d 5196{
5197 rtx val;
3754d046 5198 machine_mode old_mode;
1cd6e20d 5199
5200 val = expand_expr (exp, NULL_RTX, mode, EXPAND_NORMAL);
5201 /* If VAL is promoted to a wider mode, convert it back to MODE. Take care
5202 of CONST_INTs, where we know the old_mode only from the call argument. */
5203
5204 old_mode = GET_MODE (val);
5205 if (old_mode == VOIDmode)
5206 old_mode = TYPE_MODE (TREE_TYPE (exp));
5207 val = convert_modes (mode, old_mode, val, 1);
5208 return val;
5209}
5210
5211
b6a5fc45 5212/* Expand the __sync_xxx_and_fetch and __sync_fetch_and_xxx intrinsics.
c2f47e15 5213 EXP is the CALL_EXPR. CODE is the rtx code
b6a5fc45 5214 that corresponds to the arithmetic or logical operation from the name;
5215 an exception here is that NOT actually means NAND. TARGET is an optional
5216 place for us to store the results; AFTER is true if this is the
1cd6e20d 5217 fetch_and_xxx form. */
b6a5fc45 5218
5219static rtx
3754d046 5220expand_builtin_sync_operation (machine_mode mode, tree exp,
3e272de8 5221 enum rtx_code code, bool after,
1cd6e20d 5222 rtx target)
b6a5fc45 5223{
041e0215 5224 rtx val, mem;
e60a6f7b 5225 location_t loc = EXPR_LOCATION (exp);
b6a5fc45 5226
cf73e559 5227 if (code == NOT && warn_sync_nand)
5228 {
5229 tree fndecl = get_callee_fndecl (exp);
5230 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
5231
5232 static bool warned_f_a_n, warned_n_a_f;
5233
5234 switch (fcode)
5235 {
2797f13a 5236 case BUILT_IN_SYNC_FETCH_AND_NAND_1:
5237 case BUILT_IN_SYNC_FETCH_AND_NAND_2:
5238 case BUILT_IN_SYNC_FETCH_AND_NAND_4:
5239 case BUILT_IN_SYNC_FETCH_AND_NAND_8:
5240 case BUILT_IN_SYNC_FETCH_AND_NAND_16:
cf73e559 5241 if (warned_f_a_n)
5242 break;
5243
b9a16870 5244 fndecl = builtin_decl_implicit (BUILT_IN_SYNC_FETCH_AND_NAND_N);
e60a6f7b 5245 inform (loc, "%qD changed semantics in GCC 4.4", fndecl);
cf73e559 5246 warned_f_a_n = true;
5247 break;
5248
2797f13a 5249 case BUILT_IN_SYNC_NAND_AND_FETCH_1:
5250 case BUILT_IN_SYNC_NAND_AND_FETCH_2:
5251 case BUILT_IN_SYNC_NAND_AND_FETCH_4:
5252 case BUILT_IN_SYNC_NAND_AND_FETCH_8:
5253 case BUILT_IN_SYNC_NAND_AND_FETCH_16:
cf73e559 5254 if (warned_n_a_f)
5255 break;
5256
b9a16870 5257 fndecl = builtin_decl_implicit (BUILT_IN_SYNC_NAND_AND_FETCH_N);
e60a6f7b 5258 inform (loc, "%qD changed semantics in GCC 4.4", fndecl);
cf73e559 5259 warned_n_a_f = true;
5260 break;
5261
5262 default:
5263 gcc_unreachable ();
5264 }
5265 }
5266
b6a5fc45 5267 /* Expand the operands. */
c2f47e15 5268 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
1cd6e20d 5269 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
b6a5fc45 5270
1cd6e20d 5271 return expand_atomic_fetch_op (target, mem, val, code, MEMMODEL_SEQ_CST,
5272 after);
b6a5fc45 5273}
5274
5275/* Expand the __sync_val_compare_and_swap and __sync_bool_compare_and_swap
c2f47e15 5276 intrinsics. EXP is the CALL_EXPR. IS_BOOL is
b6a5fc45 5277 true if this is the boolean form. TARGET is a place for us to store the
5278 results; this is NOT optional if IS_BOOL is true. */
5279
5280static rtx
3754d046 5281expand_builtin_compare_and_swap (machine_mode mode, tree exp,
3e272de8 5282 bool is_bool, rtx target)
b6a5fc45 5283{
041e0215 5284 rtx old_val, new_val, mem;
ba885f6a 5285 rtx *pbool, *poval;
b6a5fc45 5286
5287 /* Expand the operands. */
c2f47e15 5288 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
1cd6e20d 5289 old_val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5290 new_val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 2), mode);
b6a5fc45 5291
ba885f6a 5292 pbool = poval = NULL;
5293 if (target != const0_rtx)
5294 {
5295 if (is_bool)
5296 pbool = &target;
5297 else
5298 poval = &target;
5299 }
5300 if (!expand_atomic_compare_and_swap (pbool, poval, mem, old_val, new_val,
5301 false, MEMMODEL_SEQ_CST,
5302 MEMMODEL_SEQ_CST))
1cd6e20d 5303 return NULL_RTX;
c2f47e15 5304
1cd6e20d 5305 return target;
b6a5fc45 5306}
5307
5308/* Expand the __sync_lock_test_and_set intrinsic. Note that the most
5309 general form is actually an atomic exchange, and some targets only
5310 support a reduced form with the second argument being a constant 1.
48e1416a 5311 EXP is the CALL_EXPR; TARGET is an optional place for us to store
c2f47e15 5312 the results. */
b6a5fc45 5313
5314static rtx
3754d046 5315expand_builtin_sync_lock_test_and_set (machine_mode mode, tree exp,
1cd6e20d 5316 rtx target)
b6a5fc45 5317{
041e0215 5318 rtx val, mem;
b6a5fc45 5319
5320 /* Expand the operands. */
c2f47e15 5321 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
1cd6e20d 5322 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5323
7821cde1 5324 return expand_sync_lock_test_and_set (target, mem, val);
1cd6e20d 5325}
5326
5327/* Expand the __sync_lock_release intrinsic. EXP is the CALL_EXPR. */
5328
5329static void
3754d046 5330expand_builtin_sync_lock_release (machine_mode mode, tree exp)
1cd6e20d 5331{
5332 rtx mem;
5333
5334 /* Expand the operands. */
5335 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5336
8808bf16 5337 expand_atomic_store (mem, const0_rtx, MEMMODEL_RELEASE, true);
1cd6e20d 5338}
5339
5340/* Given an integer representing an ``enum memmodel'', verify its
5341 correctness and return the memory model enum. */
5342
5343static enum memmodel
5344get_memmodel (tree exp)
5345{
5346 rtx op;
7f738025 5347 unsigned HOST_WIDE_INT val;
1cd6e20d 5348
5349 /* If the parameter is not a constant, it's a run time value so we'll just
5350 convert it to MEMMODEL_SEQ_CST to avoid annoying runtime checking. */
5351 if (TREE_CODE (exp) != INTEGER_CST)
5352 return MEMMODEL_SEQ_CST;
5353
5354 op = expand_normal (exp);
7f738025 5355
5356 val = INTVAL (op);
5357 if (targetm.memmodel_check)
5358 val = targetm.memmodel_check (val);
5359 else if (val & ~MEMMODEL_MASK)
5360 {
5361 warning (OPT_Winvalid_memory_model,
5362 "Unknown architecture specifier in memory model to builtin.");
5363 return MEMMODEL_SEQ_CST;
5364 }
5365
9af5ce0c 5366 if ((INTVAL (op) & MEMMODEL_MASK) >= MEMMODEL_LAST)
1cd6e20d 5367 {
5368 warning (OPT_Winvalid_memory_model,
5369 "invalid memory model argument to builtin");
5370 return MEMMODEL_SEQ_CST;
5371 }
7f738025 5372
3070f133 5373 /* Workaround for Bugzilla 59448. GCC doesn't track consume properly, so
5374 be conservative and promote consume to acquire. */
5375 if (val == MEMMODEL_CONSUME)
5376 val = MEMMODEL_ACQUIRE;
5377
7f738025 5378 return (enum memmodel) val;
1cd6e20d 5379}
5380
5381/* Expand the __atomic_exchange intrinsic:
5382 TYPE __atomic_exchange (TYPE *object, TYPE desired, enum memmodel)
5383 EXP is the CALL_EXPR.
5384 TARGET is an optional place for us to store the results. */
5385
5386static rtx
3754d046 5387expand_builtin_atomic_exchange (machine_mode mode, tree exp, rtx target)
1cd6e20d 5388{
5389 rtx val, mem;
5390 enum memmodel model;
5391
5392 model = get_memmodel (CALL_EXPR_ARG (exp, 2));
1cd6e20d 5393
5394 if (!flag_inline_atomics)
5395 return NULL_RTX;
5396
5397 /* Expand the operands. */
5398 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5399 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5400
7821cde1 5401 return expand_atomic_exchange (target, mem, val, model);
1cd6e20d 5402}
5403
5404/* Expand the __atomic_compare_exchange intrinsic:
5405 bool __atomic_compare_exchange (TYPE *object, TYPE *expect,
5406 TYPE desired, BOOL weak,
5407 enum memmodel success,
5408 enum memmodel failure)
5409 EXP is the CALL_EXPR.
5410 TARGET is an optional place for us to store the results. */
5411
5412static rtx
3754d046 5413expand_builtin_atomic_compare_exchange (machine_mode mode, tree exp,
1cd6e20d 5414 rtx target)
5415{
1e0c0b35 5416 rtx expect, desired, mem, oldval;
5417 rtx_code_label *label;
1cd6e20d 5418 enum memmodel success, failure;
5419 tree weak;
5420 bool is_weak;
5421
5422 success = get_memmodel (CALL_EXPR_ARG (exp, 4));
5423 failure = get_memmodel (CALL_EXPR_ARG (exp, 5));
5424
086f4e33 5425 if (failure > success)
5426 {
5427 warning (OPT_Winvalid_memory_model,
5428 "failure memory model cannot be stronger than success memory "
5429 "model for %<__atomic_compare_exchange%>");
5430 success = MEMMODEL_SEQ_CST;
5431 }
5432
7f738025 5433 if ((failure & MEMMODEL_MASK) == MEMMODEL_RELEASE
5434 || (failure & MEMMODEL_MASK) == MEMMODEL_ACQ_REL)
1cd6e20d 5435 {
086f4e33 5436 warning (OPT_Winvalid_memory_model,
5437 "invalid failure memory model for "
5438 "%<__atomic_compare_exchange%>");
5439 failure = MEMMODEL_SEQ_CST;
5440 success = MEMMODEL_SEQ_CST;
1cd6e20d 5441 }
5442
086f4e33 5443
1cd6e20d 5444 if (!flag_inline_atomics)
5445 return NULL_RTX;
5446
5447 /* Expand the operands. */
5448 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5449
5450 expect = expand_normal (CALL_EXPR_ARG (exp, 1));
5451 expect = convert_memory_address (Pmode, expect);
c401b131 5452 expect = gen_rtx_MEM (mode, expect);
1cd6e20d 5453 desired = expand_expr_force_mode (CALL_EXPR_ARG (exp, 2), mode);
5454
5455 weak = CALL_EXPR_ARG (exp, 3);
5456 is_weak = false;
e913b5cd 5457 if (tree_fits_shwi_p (weak) && tree_to_shwi (weak) != 0)
1cd6e20d 5458 is_weak = true;
5459
d86e3752 5460 if (target == const0_rtx)
5461 target = NULL;
d86e3752 5462
3c29a9ea 5463 /* Lest the rtl backend create a race condition with an imporoper store
5464 to memory, always create a new pseudo for OLDVAL. */
5465 oldval = NULL;
5466
5467 if (!expand_atomic_compare_and_swap (&target, &oldval, mem, expect, desired,
ba885f6a 5468 is_weak, success, failure))
1cd6e20d 5469 return NULL_RTX;
5470
d86e3752 5471 /* Conditionally store back to EXPECT, lest we create a race condition
5472 with an improper store to memory. */
5473 /* ??? With a rearrangement of atomics at the gimple level, we can handle
5474 the normal case where EXPECT is totally private, i.e. a register. At
5475 which point the store can be unconditional. */
5476 label = gen_label_rtx ();
5477 emit_cmp_and_jump_insns (target, const0_rtx, NE, NULL, VOIDmode, 1, label);
5478 emit_move_insn (expect, oldval);
5479 emit_label (label);
c401b131 5480
1cd6e20d 5481 return target;
5482}
5483
5484/* Expand the __atomic_load intrinsic:
5485 TYPE __atomic_load (TYPE *object, enum memmodel)
5486 EXP is the CALL_EXPR.
5487 TARGET is an optional place for us to store the results. */
5488
5489static rtx
3754d046 5490expand_builtin_atomic_load (machine_mode mode, tree exp, rtx target)
1cd6e20d 5491{
5492 rtx mem;
5493 enum memmodel model;
5494
5495 model = get_memmodel (CALL_EXPR_ARG (exp, 1));
7f738025 5496 if ((model & MEMMODEL_MASK) == MEMMODEL_RELEASE
5497 || (model & MEMMODEL_MASK) == MEMMODEL_ACQ_REL)
1cd6e20d 5498 {
086f4e33 5499 warning (OPT_Winvalid_memory_model,
5500 "invalid memory model for %<__atomic_load%>");
5501 model = MEMMODEL_SEQ_CST;
1cd6e20d 5502 }
5503
5504 if (!flag_inline_atomics)
5505 return NULL_RTX;
5506
5507 /* Expand the operand. */
5508 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5509
5510 return expand_atomic_load (target, mem, model);
5511}
5512
5513
5514/* Expand the __atomic_store intrinsic:
5515 void __atomic_store (TYPE *object, TYPE desired, enum memmodel)
5516 EXP is the CALL_EXPR.
5517 TARGET is an optional place for us to store the results. */
5518
5519static rtx
3754d046 5520expand_builtin_atomic_store (machine_mode mode, tree exp)
1cd6e20d 5521{
5522 rtx mem, val;
5523 enum memmodel model;
5524
5525 model = get_memmodel (CALL_EXPR_ARG (exp, 2));
7f738025 5526 if ((model & MEMMODEL_MASK) != MEMMODEL_RELAXED
5527 && (model & MEMMODEL_MASK) != MEMMODEL_SEQ_CST
5528 && (model & MEMMODEL_MASK) != MEMMODEL_RELEASE)
1cd6e20d 5529 {
086f4e33 5530 warning (OPT_Winvalid_memory_model,
5531 "invalid memory model for %<__atomic_store%>");
5532 model = MEMMODEL_SEQ_CST;
1cd6e20d 5533 }
5534
5535 if (!flag_inline_atomics)
5536 return NULL_RTX;
5537
5538 /* Expand the operands. */
5539 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5540 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5541
8808bf16 5542 return expand_atomic_store (mem, val, model, false);
1cd6e20d 5543}
5544
5545/* Expand the __atomic_fetch_XXX intrinsic:
5546 TYPE __atomic_fetch_XXX (TYPE *object, TYPE val, enum memmodel)
5547 EXP is the CALL_EXPR.
5548 TARGET is an optional place for us to store the results.
5549 CODE is the operation, PLUS, MINUS, ADD, XOR, or IOR.
5550 FETCH_AFTER is true if returning the result of the operation.
5551 FETCH_AFTER is false if returning the value before the operation.
5552 IGNORE is true if the result is not used.
5553 EXT_CALL is the correct builtin for an external call if this cannot be
5554 resolved to an instruction sequence. */
5555
5556static rtx
3754d046 5557expand_builtin_atomic_fetch_op (machine_mode mode, tree exp, rtx target,
1cd6e20d 5558 enum rtx_code code, bool fetch_after,
5559 bool ignore, enum built_in_function ext_call)
5560{
5561 rtx val, mem, ret;
5562 enum memmodel model;
5563 tree fndecl;
5564 tree addr;
5565
5566 model = get_memmodel (CALL_EXPR_ARG (exp, 2));
5567
5568 /* Expand the operands. */
5569 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5570 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5571
5572 /* Only try generating instructions if inlining is turned on. */
5573 if (flag_inline_atomics)
5574 {
5575 ret = expand_atomic_fetch_op (target, mem, val, code, model, fetch_after);
5576 if (ret)
5577 return ret;
5578 }
5579
5580 /* Return if a different routine isn't needed for the library call. */
5581 if (ext_call == BUILT_IN_NONE)
5582 return NULL_RTX;
5583
5584 /* Change the call to the specified function. */
5585 fndecl = get_callee_fndecl (exp);
5586 addr = CALL_EXPR_FN (exp);
5587 STRIP_NOPS (addr);
5588
5589 gcc_assert (TREE_OPERAND (addr, 0) == fndecl);
9af5ce0c 5590 TREE_OPERAND (addr, 0) = builtin_decl_explicit (ext_call);
1cd6e20d 5591
5592 /* Expand the call here so we can emit trailing code. */
5593 ret = expand_call (exp, target, ignore);
5594
5595 /* Replace the original function just in case it matters. */
5596 TREE_OPERAND (addr, 0) = fndecl;
5597
5598 /* Then issue the arithmetic correction to return the right result. */
5599 if (!ignore)
c449f851 5600 {
5601 if (code == NOT)
5602 {
5603 ret = expand_simple_binop (mode, AND, ret, val, NULL_RTX, true,
5604 OPTAB_LIB_WIDEN);
5605 ret = expand_simple_unop (mode, NOT, ret, target, true);
5606 }
5607 else
5608 ret = expand_simple_binop (mode, code, ret, val, target, true,
5609 OPTAB_LIB_WIDEN);
5610 }
1cd6e20d 5611 return ret;
5612}
5613
10b744a3 5614
7821cde1 5615#ifndef HAVE_atomic_clear
5616# define HAVE_atomic_clear 0
5617# define gen_atomic_clear(x,y) (gcc_unreachable (), NULL_RTX)
5618#endif
5619
10b744a3 5620/* Expand an atomic clear operation.
5621 void _atomic_clear (BOOL *obj, enum memmodel)
5622 EXP is the call expression. */
5623
5624static rtx
5625expand_builtin_atomic_clear (tree exp)
5626{
3754d046 5627 machine_mode mode;
10b744a3 5628 rtx mem, ret;
5629 enum memmodel model;
5630
5631 mode = mode_for_size (BOOL_TYPE_SIZE, MODE_INT, 0);
5632 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5633 model = get_memmodel (CALL_EXPR_ARG (exp, 1));
5634
086f4e33 5635 if ((model & MEMMODEL_MASK) == MEMMODEL_CONSUME
5636 || (model & MEMMODEL_MASK) == MEMMODEL_ACQUIRE
7f738025 5637 || (model & MEMMODEL_MASK) == MEMMODEL_ACQ_REL)
10b744a3 5638 {
086f4e33 5639 warning (OPT_Winvalid_memory_model,
5640 "invalid memory model for %<__atomic_store%>");
5641 model = MEMMODEL_SEQ_CST;
10b744a3 5642 }
5643
7821cde1 5644 if (HAVE_atomic_clear)
5645 {
5646 emit_insn (gen_atomic_clear (mem, model));
5647 return const0_rtx;
5648 }
5649
10b744a3 5650 /* Try issuing an __atomic_store, and allow fallback to __sync_lock_release.
5651 Failing that, a store is issued by __atomic_store. The only way this can
5652 fail is if the bool type is larger than a word size. Unlikely, but
5653 handle it anyway for completeness. Assume a single threaded model since
5654 there is no atomic support in this case, and no barriers are required. */
5655 ret = expand_atomic_store (mem, const0_rtx, model, true);
5656 if (!ret)
5657 emit_move_insn (mem, const0_rtx);
5658 return const0_rtx;
5659}
5660
5661/* Expand an atomic test_and_set operation.
5662 bool _atomic_test_and_set (BOOL *obj, enum memmodel)
5663 EXP is the call expression. */
5664
5665static rtx
7821cde1 5666expand_builtin_atomic_test_and_set (tree exp, rtx target)
10b744a3 5667{
7821cde1 5668 rtx mem;
10b744a3 5669 enum memmodel model;
3754d046 5670 machine_mode mode;
10b744a3 5671
5672 mode = mode_for_size (BOOL_TYPE_SIZE, MODE_INT, 0);
5673 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5674 model = get_memmodel (CALL_EXPR_ARG (exp, 1));
5675
7821cde1 5676 return expand_atomic_test_and_set (target, mem, model);
10b744a3 5677}
5678
5679
1cd6e20d 5680/* Return true if (optional) argument ARG1 of size ARG0 is always lock free on
5681 this architecture. If ARG1 is NULL, use typical alignment for size ARG0. */
5682
5683static tree
5684fold_builtin_atomic_always_lock_free (tree arg0, tree arg1)
5685{
5686 int size;
3754d046 5687 machine_mode mode;
1cd6e20d 5688 unsigned int mode_align, type_align;
5689
5690 if (TREE_CODE (arg0) != INTEGER_CST)
5691 return NULL_TREE;
b6a5fc45 5692
1cd6e20d 5693 size = INTVAL (expand_normal (arg0)) * BITS_PER_UNIT;
5694 mode = mode_for_size (size, MODE_INT, 0);
5695 mode_align = GET_MODE_ALIGNMENT (mode);
5696
5697 if (TREE_CODE (arg1) == INTEGER_CST && INTVAL (expand_normal (arg1)) == 0)
5698 type_align = mode_align;
5699 else
5700 {
5701 tree ttype = TREE_TYPE (arg1);
5702
5703 /* This function is usually invoked and folded immediately by the front
5704 end before anything else has a chance to look at it. The pointer
5705 parameter at this point is usually cast to a void *, so check for that
5706 and look past the cast. */
d09ef31a 5707 if (CONVERT_EXPR_P (arg1) && POINTER_TYPE_P (ttype)
1cd6e20d 5708 && VOID_TYPE_P (TREE_TYPE (ttype)))
5709 arg1 = TREE_OPERAND (arg1, 0);
5710
5711 ttype = TREE_TYPE (arg1);
5712 gcc_assert (POINTER_TYPE_P (ttype));
5713
5714 /* Get the underlying type of the object. */
5715 ttype = TREE_TYPE (ttype);
5716 type_align = TYPE_ALIGN (ttype);
5717 }
5718
5719 /* If the object has smaller alignment, the the lock free routines cannot
5720 be used. */
5721 if (type_align < mode_align)
06308d2a 5722 return boolean_false_node;
1cd6e20d 5723
5724 /* Check if a compare_and_swap pattern exists for the mode which represents
5725 the required size. The pattern is not allowed to fail, so the existence
5726 of the pattern indicates support is present. */
29139cdc 5727 if (can_compare_and_swap_p (mode, true))
06308d2a 5728 return boolean_true_node;
1cd6e20d 5729 else
06308d2a 5730 return boolean_false_node;
1cd6e20d 5731}
5732
5733/* Return true if the parameters to call EXP represent an object which will
5734 always generate lock free instructions. The first argument represents the
5735 size of the object, and the second parameter is a pointer to the object
5736 itself. If NULL is passed for the object, then the result is based on
5737 typical alignment for an object of the specified size. Otherwise return
5738 false. */
5739
5740static rtx
5741expand_builtin_atomic_always_lock_free (tree exp)
5742{
5743 tree size;
5744 tree arg0 = CALL_EXPR_ARG (exp, 0);
5745 tree arg1 = CALL_EXPR_ARG (exp, 1);
5746
5747 if (TREE_CODE (arg0) != INTEGER_CST)
5748 {
5749 error ("non-constant argument 1 to __atomic_always_lock_free");
5750 return const0_rtx;
5751 }
5752
5753 size = fold_builtin_atomic_always_lock_free (arg0, arg1);
06308d2a 5754 if (size == boolean_true_node)
1cd6e20d 5755 return const1_rtx;
5756 return const0_rtx;
5757}
5758
5759/* Return a one or zero if it can be determined that object ARG1 of size ARG
5760 is lock free on this architecture. */
5761
5762static tree
5763fold_builtin_atomic_is_lock_free (tree arg0, tree arg1)
5764{
5765 if (!flag_inline_atomics)
5766 return NULL_TREE;
5767
5768 /* If it isn't always lock free, don't generate a result. */
06308d2a 5769 if (fold_builtin_atomic_always_lock_free (arg0, arg1) == boolean_true_node)
5770 return boolean_true_node;
1cd6e20d 5771
5772 return NULL_TREE;
5773}
5774
5775/* Return true if the parameters to call EXP represent an object which will
5776 always generate lock free instructions. The first argument represents the
5777 size of the object, and the second parameter is a pointer to the object
5778 itself. If NULL is passed for the object, then the result is based on
5779 typical alignment for an object of the specified size. Otherwise return
5780 NULL*/
5781
5782static rtx
5783expand_builtin_atomic_is_lock_free (tree exp)
5784{
5785 tree size;
5786 tree arg0 = CALL_EXPR_ARG (exp, 0);
5787 tree arg1 = CALL_EXPR_ARG (exp, 1);
5788
5789 if (!INTEGRAL_TYPE_P (TREE_TYPE (arg0)))
5790 {
5791 error ("non-integer argument 1 to __atomic_is_lock_free");
5792 return NULL_RTX;
5793 }
5794
5795 if (!flag_inline_atomics)
5796 return NULL_RTX;
5797
5798 /* If the value is known at compile time, return the RTX for it. */
5799 size = fold_builtin_atomic_is_lock_free (arg0, arg1);
06308d2a 5800 if (size == boolean_true_node)
1cd6e20d 5801 return const1_rtx;
5802
5803 return NULL_RTX;
5804}
5805
1cd6e20d 5806/* Expand the __atomic_thread_fence intrinsic:
5807 void __atomic_thread_fence (enum memmodel)
5808 EXP is the CALL_EXPR. */
5809
5810static void
5811expand_builtin_atomic_thread_fence (tree exp)
5812{
fe54c06b 5813 enum memmodel model = get_memmodel (CALL_EXPR_ARG (exp, 0));
5814 expand_mem_thread_fence (model);
1cd6e20d 5815}
5816
5817/* Expand the __atomic_signal_fence intrinsic:
5818 void __atomic_signal_fence (enum memmodel)
5819 EXP is the CALL_EXPR. */
5820
5821static void
5822expand_builtin_atomic_signal_fence (tree exp)
5823{
fe54c06b 5824 enum memmodel model = get_memmodel (CALL_EXPR_ARG (exp, 0));
5825 expand_mem_signal_fence (model);
b6a5fc45 5826}
5827
5828/* Expand the __sync_synchronize intrinsic. */
5829
5830static void
2797f13a 5831expand_builtin_sync_synchronize (void)
b6a5fc45 5832{
fe54c06b 5833 expand_mem_thread_fence (MEMMODEL_SEQ_CST);
b6a5fc45 5834}
5835
badaa04c 5836static rtx
5837expand_builtin_thread_pointer (tree exp, rtx target)
5838{
5839 enum insn_code icode;
5840 if (!validate_arglist (exp, VOID_TYPE))
5841 return const0_rtx;
5842 icode = direct_optab_handler (get_thread_pointer_optab, Pmode);
5843 if (icode != CODE_FOR_nothing)
5844 {
5845 struct expand_operand op;
3ed779c3 5846 /* If the target is not sutitable then create a new target. */
5847 if (target == NULL_RTX
5848 || !REG_P (target)
5849 || GET_MODE (target) != Pmode)
badaa04c 5850 target = gen_reg_rtx (Pmode);
5851 create_output_operand (&op, target, Pmode);
5852 expand_insn (icode, 1, &op);
5853 return target;
5854 }
5855 error ("__builtin_thread_pointer is not supported on this target");
5856 return const0_rtx;
5857}
5858
5859static void
5860expand_builtin_set_thread_pointer (tree exp)
5861{
5862 enum insn_code icode;
5863 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
5864 return;
5865 icode = direct_optab_handler (set_thread_pointer_optab, Pmode);
5866 if (icode != CODE_FOR_nothing)
5867 {
5868 struct expand_operand op;
5869 rtx val = expand_expr (CALL_EXPR_ARG (exp, 0), NULL_RTX,
5870 Pmode, EXPAND_NORMAL);
6f343c10 5871 create_input_operand (&op, val, Pmode);
badaa04c 5872 expand_insn (icode, 1, &op);
5873 return;
5874 }
5875 error ("__builtin_set_thread_pointer is not supported on this target");
5876}
5877
53800dbe 5878\f
0e80b01d 5879/* Emit code to restore the current value of stack. */
5880
5881static void
5882expand_stack_restore (tree var)
5883{
1e0c0b35 5884 rtx_insn *prev;
5885 rtx sa = expand_normal (var);
0e80b01d 5886
5887 sa = convert_memory_address (Pmode, sa);
5888
5889 prev = get_last_insn ();
5890 emit_stack_restore (SAVE_BLOCK, sa);
5891 fixup_args_size_notes (prev, get_last_insn (), 0);
5892}
5893
5894
5895/* Emit code to save the current value of stack. */
5896
5897static rtx
5898expand_stack_save (void)
5899{
5900 rtx ret = NULL_RTX;
5901
5902 do_pending_stack_adjust ();
5903 emit_stack_save (SAVE_BLOCK, &ret);
5904 return ret;
5905}
5906
ca4c3545 5907
5908/* Expand OpenACC acc_on_device.
5909
5910 This has to happen late (that is, not in early folding; expand_builtin_*,
5911 rather than fold_builtin_*), as we have to act differently for host and
5912 acceleration device (ACCEL_COMPILER conditional). */
5913
5914static rtx
5915expand_builtin_acc_on_device (tree exp, rtx target)
5916{
5917 if (!validate_arglist (exp, INTEGER_TYPE, VOID_TYPE))
5918 return NULL_RTX;
5919
5920 tree arg = CALL_EXPR_ARG (exp, 0);
5921
5922 /* Return (arg == v1 || arg == v2) ? 1 : 0. */
5923 machine_mode v_mode = TYPE_MODE (TREE_TYPE (arg));
5924 rtx v = expand_normal (arg), v1, v2;
5925#ifdef ACCEL_COMPILER
5926 v1 = GEN_INT (GOMP_DEVICE_NOT_HOST);
5927 v2 = GEN_INT (ACCEL_COMPILER_acc_device);
5928#else
5929 v1 = GEN_INT (GOMP_DEVICE_NONE);
5930 v2 = GEN_INT (GOMP_DEVICE_HOST);
5931#endif
5932 machine_mode target_mode = TYPE_MODE (integer_type_node);
5933 if (!REG_P (target) || GET_MODE (target) != target_mode)
5934 target = gen_reg_rtx (target_mode);
5935 emit_move_insn (target, const1_rtx);
5936 rtx_code_label *done_label = gen_label_rtx ();
5937 do_compare_rtx_and_jump (v, v1, EQ, false, v_mode, NULL_RTX,
5938 NULL_RTX, done_label, PROB_EVEN);
5939 do_compare_rtx_and_jump (v, v2, EQ, false, v_mode, NULL_RTX,
5940 NULL_RTX, done_label, PROB_EVEN);
5941 emit_move_insn (target, const0_rtx);
5942 emit_label (done_label);
5943
5944 return target;
5945}
5946
5947
53800dbe 5948/* Expand an expression EXP that calls a built-in function,
5949 with result going to TARGET if that's convenient
5950 (and in mode MODE if that's convenient).
5951 SUBTARGET may be used as the target for computing one of EXP's operands.
5952 IGNORE is nonzero if the value is to be ignored. */
5953
5954rtx
3754d046 5955expand_builtin (tree exp, rtx target, rtx subtarget, machine_mode mode,
aecda0d6 5956 int ignore)
53800dbe 5957{
c6e6ecb1 5958 tree fndecl = get_callee_fndecl (exp);
53800dbe 5959 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
3754d046 5960 machine_mode target_mode = TYPE_MODE (TREE_TYPE (exp));
67fa4078 5961 int flags;
53800dbe 5962
f9acf11a 5963 /* When ASan is enabled, we don't want to expand some memory/string
5964 builtins and rely on libsanitizer's hooks. This allows us to avoid
5965 redundant checks and be sure, that possible overflow will be detected
5966 by ASan. */
5967
5968 if ((flag_sanitize & SANITIZE_ADDRESS) && asan_intercepted_p (fcode))
5969 return expand_call (exp, target, ignore);
5970
8305149e 5971 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
883b2e73 5972 return targetm.expand_builtin (exp, target, subtarget, mode, ignore);
bf8e3599 5973
53800dbe 5974 /* When not optimizing, generate calls to library functions for a certain
5975 set of builtins. */
cd9ff771 5976 if (!optimize
b6a5fc45 5977 && !called_as_built_in (fndecl)
73037a1e 5978 && fcode != BUILT_IN_FORK
5979 && fcode != BUILT_IN_EXECL
5980 && fcode != BUILT_IN_EXECV
5981 && fcode != BUILT_IN_EXECLP
5982 && fcode != BUILT_IN_EXECLE
5983 && fcode != BUILT_IN_EXECVP
5984 && fcode != BUILT_IN_EXECVE
2c281b15 5985 && fcode != BUILT_IN_ALLOCA
581bf1c2 5986 && fcode != BUILT_IN_ALLOCA_WITH_ALIGN
058a1b7a 5987 && fcode != BUILT_IN_FREE
5988 && fcode != BUILT_IN_CHKP_SET_PTR_BOUNDS
5989 && fcode != BUILT_IN_CHKP_INIT_PTR_BOUNDS
5990 && fcode != BUILT_IN_CHKP_NULL_PTR_BOUNDS
5991 && fcode != BUILT_IN_CHKP_COPY_PTR_BOUNDS
5992 && fcode != BUILT_IN_CHKP_NARROW_PTR_BOUNDS
5993 && fcode != BUILT_IN_CHKP_STORE_PTR_BOUNDS
5994 && fcode != BUILT_IN_CHKP_CHECK_PTR_LBOUNDS
5995 && fcode != BUILT_IN_CHKP_CHECK_PTR_UBOUNDS
5996 && fcode != BUILT_IN_CHKP_CHECK_PTR_BOUNDS
5997 && fcode != BUILT_IN_CHKP_GET_PTR_LBOUND
5998 && fcode != BUILT_IN_CHKP_GET_PTR_UBOUND
5999 && fcode != BUILT_IN_CHKP_BNDRET)
cd9ff771 6000 return expand_call (exp, target, ignore);
53800dbe 6001
8d6d7930 6002 /* The built-in function expanders test for target == const0_rtx
6003 to determine whether the function's result will be ignored. */
6004 if (ignore)
6005 target = const0_rtx;
6006
6007 /* If the result of a pure or const built-in function is ignored, and
6008 none of its arguments are volatile, we can avoid expanding the
6009 built-in call and just evaluate the arguments for side-effects. */
6010 if (target == const0_rtx
67fa4078 6011 && ((flags = flags_from_decl_or_type (fndecl)) & (ECF_CONST | ECF_PURE))
6012 && !(flags & ECF_LOOPING_CONST_OR_PURE))
8d6d7930 6013 {
6014 bool volatilep = false;
6015 tree arg;
c2f47e15 6016 call_expr_arg_iterator iter;
8d6d7930 6017
c2f47e15 6018 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
6019 if (TREE_THIS_VOLATILE (arg))
8d6d7930 6020 {
6021 volatilep = true;
6022 break;
6023 }
6024
6025 if (! volatilep)
6026 {
c2f47e15 6027 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
6028 expand_expr (arg, const0_rtx, VOIDmode, EXPAND_NORMAL);
8d6d7930 6029 return const0_rtx;
6030 }
6031 }
6032
f21337ef 6033 /* expand_builtin_with_bounds is supposed to be used for
6034 instrumented builtin calls. */
058a1b7a 6035 gcc_assert (!CALL_WITH_BOUNDS_P (exp));
6036
53800dbe 6037 switch (fcode)
6038 {
4f35b1fc 6039 CASE_FLT_FN (BUILT_IN_FABS):
8aa32773 6040 case BUILT_IN_FABSD32:
6041 case BUILT_IN_FABSD64:
6042 case BUILT_IN_FABSD128:
c2f47e15 6043 target = expand_builtin_fabs (exp, target, subtarget);
78a74442 6044 if (target)
a0c938f0 6045 return target;
78a74442 6046 break;
6047
4f35b1fc 6048 CASE_FLT_FN (BUILT_IN_COPYSIGN):
c2f47e15 6049 target = expand_builtin_copysign (exp, target, subtarget);
270436f3 6050 if (target)
6051 return target;
6052 break;
6053
7d3f6cc7 6054 /* Just do a normal library call if we were unable to fold
6055 the values. */
4f35b1fc 6056 CASE_FLT_FN (BUILT_IN_CABS):
78a74442 6057 break;
53800dbe 6058
4f35b1fc 6059 CASE_FLT_FN (BUILT_IN_EXP):
6060 CASE_FLT_FN (BUILT_IN_EXP10):
6061 CASE_FLT_FN (BUILT_IN_POW10):
6062 CASE_FLT_FN (BUILT_IN_EXP2):
6063 CASE_FLT_FN (BUILT_IN_EXPM1):
6064 CASE_FLT_FN (BUILT_IN_LOGB):
4f35b1fc 6065 CASE_FLT_FN (BUILT_IN_LOG):
6066 CASE_FLT_FN (BUILT_IN_LOG10):
6067 CASE_FLT_FN (BUILT_IN_LOG2):
6068 CASE_FLT_FN (BUILT_IN_LOG1P):
6069 CASE_FLT_FN (BUILT_IN_TAN):
6070 CASE_FLT_FN (BUILT_IN_ASIN):
6071 CASE_FLT_FN (BUILT_IN_ACOS):
6072 CASE_FLT_FN (BUILT_IN_ATAN):
b3154a1f 6073 CASE_FLT_FN (BUILT_IN_SIGNIFICAND):
7f3be425 6074 /* Treat these like sqrt only if unsafe math optimizations are allowed,
6075 because of possible accuracy problems. */
6076 if (! flag_unsafe_math_optimizations)
53800dbe 6077 break;
4f35b1fc 6078 CASE_FLT_FN (BUILT_IN_SQRT):
6079 CASE_FLT_FN (BUILT_IN_FLOOR):
6080 CASE_FLT_FN (BUILT_IN_CEIL):
6081 CASE_FLT_FN (BUILT_IN_TRUNC):
6082 CASE_FLT_FN (BUILT_IN_ROUND):
6083 CASE_FLT_FN (BUILT_IN_NEARBYINT):
6084 CASE_FLT_FN (BUILT_IN_RINT):
53800dbe 6085 target = expand_builtin_mathfn (exp, target, subtarget);
6086 if (target)
6087 return target;
6088 break;
6089
7e0713b1 6090 CASE_FLT_FN (BUILT_IN_FMA):
6091 target = expand_builtin_mathfn_ternary (exp, target, subtarget);
6092 if (target)
6093 return target;
6094 break;
6095
a67a90e5 6096 CASE_FLT_FN (BUILT_IN_ILOGB):
6097 if (! flag_unsafe_math_optimizations)
6098 break;
69b779ea 6099 CASE_FLT_FN (BUILT_IN_ISINF):
cde061c1 6100 CASE_FLT_FN (BUILT_IN_FINITE):
6101 case BUILT_IN_ISFINITE:
8a1a9cb7 6102 case BUILT_IN_ISNORMAL:
f97eea22 6103 target = expand_builtin_interclass_mathfn (exp, target);
a67a90e5 6104 if (target)
6105 return target;
6106 break;
6107
80ff6494 6108 CASE_FLT_FN (BUILT_IN_ICEIL):
4f35b1fc 6109 CASE_FLT_FN (BUILT_IN_LCEIL):
6110 CASE_FLT_FN (BUILT_IN_LLCEIL):
6111 CASE_FLT_FN (BUILT_IN_LFLOOR):
80ff6494 6112 CASE_FLT_FN (BUILT_IN_IFLOOR):
4f35b1fc 6113 CASE_FLT_FN (BUILT_IN_LLFLOOR):
ff1b14e4 6114 target = expand_builtin_int_roundingfn (exp, target);
ad52b9b7 6115 if (target)
6116 return target;
6117 break;
6118
80ff6494 6119 CASE_FLT_FN (BUILT_IN_IRINT):
7d3afc77 6120 CASE_FLT_FN (BUILT_IN_LRINT):
6121 CASE_FLT_FN (BUILT_IN_LLRINT):
80ff6494 6122 CASE_FLT_FN (BUILT_IN_IROUND):
ef2f1a10 6123 CASE_FLT_FN (BUILT_IN_LROUND):
6124 CASE_FLT_FN (BUILT_IN_LLROUND):
ff1b14e4 6125 target = expand_builtin_int_roundingfn_2 (exp, target);
7d3afc77 6126 if (target)
6127 return target;
6128 break;
6129
4f35b1fc 6130 CASE_FLT_FN (BUILT_IN_POWI):
f97eea22 6131 target = expand_builtin_powi (exp, target);
757c219d 6132 if (target)
6133 return target;
6134 break;
6135
4f35b1fc 6136 CASE_FLT_FN (BUILT_IN_ATAN2):
6137 CASE_FLT_FN (BUILT_IN_LDEXP):
73a954a1 6138 CASE_FLT_FN (BUILT_IN_SCALB):
6139 CASE_FLT_FN (BUILT_IN_SCALBN):
6140 CASE_FLT_FN (BUILT_IN_SCALBLN):
0fd605a5 6141 if (! flag_unsafe_math_optimizations)
6142 break;
ef722005 6143
6144 CASE_FLT_FN (BUILT_IN_FMOD):
6145 CASE_FLT_FN (BUILT_IN_REMAINDER):
6146 CASE_FLT_FN (BUILT_IN_DREM):
0810ff17 6147 CASE_FLT_FN (BUILT_IN_POW):
0fd605a5 6148 target = expand_builtin_mathfn_2 (exp, target, subtarget);
6149 if (target)
6150 return target;
6151 break;
6152
d735c391 6153 CASE_FLT_FN (BUILT_IN_CEXPI):
f97eea22 6154 target = expand_builtin_cexpi (exp, target);
d735c391 6155 gcc_assert (target);
6156 return target;
6157
4f35b1fc 6158 CASE_FLT_FN (BUILT_IN_SIN):
6159 CASE_FLT_FN (BUILT_IN_COS):
6b43bae4 6160 if (! flag_unsafe_math_optimizations)
6161 break;
6162 target = expand_builtin_mathfn_3 (exp, target, subtarget);
6163 if (target)
6164 return target;
6165 break;
6166
c3147c1a 6167 CASE_FLT_FN (BUILT_IN_SINCOS):
6168 if (! flag_unsafe_math_optimizations)
6169 break;
6170 target = expand_builtin_sincos (exp);
6171 if (target)
6172 return target;
6173 break;
6174
53800dbe 6175 case BUILT_IN_APPLY_ARGS:
6176 return expand_builtin_apply_args ();
6177
6178 /* __builtin_apply (FUNCTION, ARGUMENTS, ARGSIZE) invokes
6179 FUNCTION with a copy of the parameters described by
6180 ARGUMENTS, and ARGSIZE. It returns a block of memory
6181 allocated on the stack into which is stored all the registers
6182 that might possibly be used for returning the result of a
6183 function. ARGUMENTS is the value returned by
6184 __builtin_apply_args. ARGSIZE is the number of bytes of
6185 arguments that must be copied. ??? How should this value be
6186 computed? We'll also need a safe worst case value for varargs
6187 functions. */
6188 case BUILT_IN_APPLY:
c2f47e15 6189 if (!validate_arglist (exp, POINTER_TYPE,
0eb671f7 6190 POINTER_TYPE, INTEGER_TYPE, VOID_TYPE)
c2f47e15 6191 && !validate_arglist (exp, REFERENCE_TYPE,
0eb671f7 6192 POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
53800dbe 6193 return const0_rtx;
6194 else
6195 {
53800dbe 6196 rtx ops[3];
6197
c2f47e15 6198 ops[0] = expand_normal (CALL_EXPR_ARG (exp, 0));
6199 ops[1] = expand_normal (CALL_EXPR_ARG (exp, 1));
6200 ops[2] = expand_normal (CALL_EXPR_ARG (exp, 2));
53800dbe 6201
6202 return expand_builtin_apply (ops[0], ops[1], ops[2]);
6203 }
6204
6205 /* __builtin_return (RESULT) causes the function to return the
6206 value described by RESULT. RESULT is address of the block of
6207 memory returned by __builtin_apply. */
6208 case BUILT_IN_RETURN:
c2f47e15 6209 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6210 expand_builtin_return (expand_normal (CALL_EXPR_ARG (exp, 0)));
53800dbe 6211 return const0_rtx;
6212
6213 case BUILT_IN_SAVEREGS:
a66c9326 6214 return expand_builtin_saveregs ();
53800dbe 6215
48dc2227 6216 case BUILT_IN_VA_ARG_PACK:
6217 /* All valid uses of __builtin_va_arg_pack () are removed during
6218 inlining. */
b8c23db3 6219 error ("%Kinvalid use of %<__builtin_va_arg_pack ()%>", exp);
48dc2227 6220 return const0_rtx;
6221
4e1d7ea4 6222 case BUILT_IN_VA_ARG_PACK_LEN:
6223 /* All valid uses of __builtin_va_arg_pack_len () are removed during
6224 inlining. */
b8c23db3 6225 error ("%Kinvalid use of %<__builtin_va_arg_pack_len ()%>", exp);
4e1d7ea4 6226 return const0_rtx;
6227
53800dbe 6228 /* Return the address of the first anonymous stack arg. */
6229 case BUILT_IN_NEXT_ARG:
c2f47e15 6230 if (fold_builtin_next_arg (exp, false))
a0c938f0 6231 return const0_rtx;
79012a9d 6232 return expand_builtin_next_arg ();
53800dbe 6233
ac8fb6db 6234 case BUILT_IN_CLEAR_CACHE:
6235 target = expand_builtin___clear_cache (exp);
6236 if (target)
6237 return target;
6238 break;
6239
53800dbe 6240 case BUILT_IN_CLASSIFY_TYPE:
c2f47e15 6241 return expand_builtin_classify_type (exp);
53800dbe 6242
6243 case BUILT_IN_CONSTANT_P:
4ee9c684 6244 return const0_rtx;
53800dbe 6245
6246 case BUILT_IN_FRAME_ADDRESS:
6247 case BUILT_IN_RETURN_ADDRESS:
c2f47e15 6248 return expand_builtin_frame_address (fndecl, exp);
53800dbe 6249
6250 /* Returns the address of the area where the structure is returned.
6251 0 otherwise. */
6252 case BUILT_IN_AGGREGATE_INCOMING_ADDRESS:
c2f47e15 6253 if (call_expr_nargs (exp) != 0
9342ee68 6254 || ! AGGREGATE_TYPE_P (TREE_TYPE (TREE_TYPE (current_function_decl)))
e16ceb8e 6255 || !MEM_P (DECL_RTL (DECL_RESULT (current_function_decl))))
9342ee68 6256 return const0_rtx;
53800dbe 6257 else
9342ee68 6258 return XEXP (DECL_RTL (DECL_RESULT (current_function_decl)), 0);
53800dbe 6259
6260 case BUILT_IN_ALLOCA:
581bf1c2 6261 case BUILT_IN_ALLOCA_WITH_ALIGN:
990495a7 6262 /* If the allocation stems from the declaration of a variable-sized
6263 object, it cannot accumulate. */
a882d754 6264 target = expand_builtin_alloca (exp, CALL_ALLOCA_FOR_VAR_P (exp));
53800dbe 6265 if (target)
6266 return target;
6267 break;
6268
4ee9c684 6269 case BUILT_IN_STACK_SAVE:
6270 return expand_stack_save ();
6271
6272 case BUILT_IN_STACK_RESTORE:
c2f47e15 6273 expand_stack_restore (CALL_EXPR_ARG (exp, 0));
4ee9c684 6274 return const0_rtx;
6275
74bdbe96 6276 case BUILT_IN_BSWAP16:
42791117 6277 case BUILT_IN_BSWAP32:
6278 case BUILT_IN_BSWAP64:
74bdbe96 6279 target = expand_builtin_bswap (target_mode, exp, target, subtarget);
42791117 6280 if (target)
6281 return target;
6282 break;
6283
4f35b1fc 6284 CASE_INT_FN (BUILT_IN_FFS):
c2f47e15 6285 target = expand_builtin_unop (target_mode, exp, target,
efb070c8 6286 subtarget, ffs_optab);
6a08d0ab 6287 if (target)
6288 return target;
6289 break;
6290
4f35b1fc 6291 CASE_INT_FN (BUILT_IN_CLZ):
c2f47e15 6292 target = expand_builtin_unop (target_mode, exp, target,
efb070c8 6293 subtarget, clz_optab);
6a08d0ab 6294 if (target)
6295 return target;
6296 break;
6297
4f35b1fc 6298 CASE_INT_FN (BUILT_IN_CTZ):
c2f47e15 6299 target = expand_builtin_unop (target_mode, exp, target,
efb070c8 6300 subtarget, ctz_optab);
6a08d0ab 6301 if (target)
6302 return target;
6303 break;
6304
d8492bd3 6305 CASE_INT_FN (BUILT_IN_CLRSB):
d8492bd3 6306 target = expand_builtin_unop (target_mode, exp, target,
6307 subtarget, clrsb_optab);
6308 if (target)
6309 return target;
6310 break;
6311
4f35b1fc 6312 CASE_INT_FN (BUILT_IN_POPCOUNT):
c2f47e15 6313 target = expand_builtin_unop (target_mode, exp, target,
efb070c8 6314 subtarget, popcount_optab);
6a08d0ab 6315 if (target)
6316 return target;
6317 break;
6318
4f35b1fc 6319 CASE_INT_FN (BUILT_IN_PARITY):
c2f47e15 6320 target = expand_builtin_unop (target_mode, exp, target,
efb070c8 6321 subtarget, parity_optab);
53800dbe 6322 if (target)
6323 return target;
6324 break;
6325
6326 case BUILT_IN_STRLEN:
c2f47e15 6327 target = expand_builtin_strlen (exp, target, target_mode);
53800dbe 6328 if (target)
6329 return target;
6330 break;
6331
6332 case BUILT_IN_STRCPY:
a65c4d64 6333 target = expand_builtin_strcpy (exp, target);
53800dbe 6334 if (target)
6335 return target;
6336 break;
bf8e3599 6337
ed09096d 6338 case BUILT_IN_STRNCPY:
a65c4d64 6339 target = expand_builtin_strncpy (exp, target);
ed09096d 6340 if (target)
6341 return target;
6342 break;
bf8e3599 6343
3b824fa6 6344 case BUILT_IN_STPCPY:
dc369150 6345 target = expand_builtin_stpcpy (exp, target, mode);
3b824fa6 6346 if (target)
6347 return target;
6348 break;
6349
53800dbe 6350 case BUILT_IN_MEMCPY:
a65c4d64 6351 target = expand_builtin_memcpy (exp, target);
3b824fa6 6352 if (target)
6353 return target;
6354 break;
6355
6356 case BUILT_IN_MEMPCPY:
c2f47e15 6357 target = expand_builtin_mempcpy (exp, target, mode);
53800dbe 6358 if (target)
6359 return target;
6360 break;
6361
6362 case BUILT_IN_MEMSET:
c2f47e15 6363 target = expand_builtin_memset (exp, target, mode);
53800dbe 6364 if (target)
6365 return target;
6366 break;
6367
ffc83088 6368 case BUILT_IN_BZERO:
0b25db21 6369 target = expand_builtin_bzero (exp);
ffc83088 6370 if (target)
6371 return target;
6372 break;
6373
53800dbe 6374 case BUILT_IN_STRCMP:
a65c4d64 6375 target = expand_builtin_strcmp (exp, target);
53800dbe 6376 if (target)
6377 return target;
6378 break;
6379
ed09096d 6380 case BUILT_IN_STRNCMP:
6381 target = expand_builtin_strncmp (exp, target, mode);
6382 if (target)
6383 return target;
6384 break;
6385
071f1696 6386 case BUILT_IN_BCMP:
53800dbe 6387 case BUILT_IN_MEMCMP:
c2f47e15 6388 target = expand_builtin_memcmp (exp, target, mode);
53800dbe 6389 if (target)
6390 return target;
6391 break;
53800dbe 6392
6393 case BUILT_IN_SETJMP:
2c8a1497 6394 /* This should have been lowered to the builtins below. */
6395 gcc_unreachable ();
6396
6397 case BUILT_IN_SETJMP_SETUP:
6398 /* __builtin_setjmp_setup is passed a pointer to an array of five words
6399 and the receiver label. */
c2f47e15 6400 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
2c8a1497 6401 {
c2f47e15 6402 rtx buf_addr = expand_expr (CALL_EXPR_ARG (exp, 0), subtarget,
2c8a1497 6403 VOIDmode, EXPAND_NORMAL);
c2f47e15 6404 tree label = TREE_OPERAND (CALL_EXPR_ARG (exp, 1), 0);
2c8a1497 6405 rtx label_r = label_rtx (label);
6406
6407 /* This is copied from the handling of non-local gotos. */
6408 expand_builtin_setjmp_setup (buf_addr, label_r);
6409 nonlocal_goto_handler_labels
a4de1c23 6410 = gen_rtx_INSN_LIST (VOIDmode, label_r,
2c8a1497 6411 nonlocal_goto_handler_labels);
6412 /* ??? Do not let expand_label treat us as such since we would
6413 not want to be both on the list of non-local labels and on
6414 the list of forced labels. */
6415 FORCED_LABEL (label) = 0;
6416 return const0_rtx;
6417 }
6418 break;
6419
2c8a1497 6420 case BUILT_IN_SETJMP_RECEIVER:
6421 /* __builtin_setjmp_receiver is passed the receiver label. */
c2f47e15 6422 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
2c8a1497 6423 {
c2f47e15 6424 tree label = TREE_OPERAND (CALL_EXPR_ARG (exp, 0), 0);
2c8a1497 6425 rtx label_r = label_rtx (label);
6426
6427 expand_builtin_setjmp_receiver (label_r);
6428 return const0_rtx;
6429 }
6b7f6858 6430 break;
53800dbe 6431
6432 /* __builtin_longjmp is passed a pointer to an array of five words.
6433 It's similar to the C library longjmp function but works with
6434 __builtin_setjmp above. */
6435 case BUILT_IN_LONGJMP:
c2f47e15 6436 if (validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
53800dbe 6437 {
c2f47e15 6438 rtx buf_addr = expand_expr (CALL_EXPR_ARG (exp, 0), subtarget,
8ec3c5c2 6439 VOIDmode, EXPAND_NORMAL);
c2f47e15 6440 rtx value = expand_normal (CALL_EXPR_ARG (exp, 1));
53800dbe 6441
6442 if (value != const1_rtx)
6443 {
1e5fcbe2 6444 error ("%<__builtin_longjmp%> second argument must be 1");
53800dbe 6445 return const0_rtx;
6446 }
6447
6448 expand_builtin_longjmp (buf_addr, value);
6449 return const0_rtx;
6450 }
2c8a1497 6451 break;
53800dbe 6452
4ee9c684 6453 case BUILT_IN_NONLOCAL_GOTO:
c2f47e15 6454 target = expand_builtin_nonlocal_goto (exp);
4ee9c684 6455 if (target)
6456 return target;
6457 break;
6458
843d08a9 6459 /* This updates the setjmp buffer that is its argument with the value
6460 of the current stack pointer. */
6461 case BUILT_IN_UPDATE_SETJMP_BUF:
c2f47e15 6462 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
843d08a9 6463 {
6464 rtx buf_addr
c2f47e15 6465 = expand_normal (CALL_EXPR_ARG (exp, 0));
843d08a9 6466
6467 expand_builtin_update_setjmp_buf (buf_addr);
6468 return const0_rtx;
6469 }
6470 break;
6471
53800dbe 6472 case BUILT_IN_TRAP:
a0ef1725 6473 expand_builtin_trap ();
53800dbe 6474 return const0_rtx;
6475
d2b48f0c 6476 case BUILT_IN_UNREACHABLE:
6477 expand_builtin_unreachable ();
6478 return const0_rtx;
6479
4f35b1fc 6480 CASE_FLT_FN (BUILT_IN_SIGNBIT):
004e23c4 6481 case BUILT_IN_SIGNBITD32:
6482 case BUILT_IN_SIGNBITD64:
6483 case BUILT_IN_SIGNBITD128:
27f261ef 6484 target = expand_builtin_signbit (exp, target);
6485 if (target)
6486 return target;
6487 break;
6488
53800dbe 6489 /* Various hooks for the DWARF 2 __throw routine. */
6490 case BUILT_IN_UNWIND_INIT:
6491 expand_builtin_unwind_init ();
6492 return const0_rtx;
6493 case BUILT_IN_DWARF_CFA:
6494 return virtual_cfa_rtx;
6495#ifdef DWARF2_UNWIND_INFO
f8f023a5 6496 case BUILT_IN_DWARF_SP_COLUMN:
6497 return expand_builtin_dwarf_sp_column ();
695e919b 6498 case BUILT_IN_INIT_DWARF_REG_SIZES:
c2f47e15 6499 expand_builtin_init_dwarf_reg_sizes (CALL_EXPR_ARG (exp, 0));
695e919b 6500 return const0_rtx;
53800dbe 6501#endif
6502 case BUILT_IN_FROB_RETURN_ADDR:
c2f47e15 6503 return expand_builtin_frob_return_addr (CALL_EXPR_ARG (exp, 0));
53800dbe 6504 case BUILT_IN_EXTRACT_RETURN_ADDR:
c2f47e15 6505 return expand_builtin_extract_return_addr (CALL_EXPR_ARG (exp, 0));
53800dbe 6506 case BUILT_IN_EH_RETURN:
c2f47e15 6507 expand_builtin_eh_return (CALL_EXPR_ARG (exp, 0),
6508 CALL_EXPR_ARG (exp, 1));
53800dbe 6509 return const0_rtx;
df4b504c 6510#ifdef EH_RETURN_DATA_REGNO
6511 case BUILT_IN_EH_RETURN_DATA_REGNO:
c2f47e15 6512 return expand_builtin_eh_return_data_regno (exp);
df4b504c 6513#endif
26093bf4 6514 case BUILT_IN_EXTEND_POINTER:
c2f47e15 6515 return expand_builtin_extend_pointer (CALL_EXPR_ARG (exp, 0));
e38def9c 6516 case BUILT_IN_EH_POINTER:
6517 return expand_builtin_eh_pointer (exp);
6518 case BUILT_IN_EH_FILTER:
6519 return expand_builtin_eh_filter (exp);
6520 case BUILT_IN_EH_COPY_VALUES:
6521 return expand_builtin_eh_copy_values (exp);
26093bf4 6522
7ccc713a 6523 case BUILT_IN_VA_START:
c2f47e15 6524 return expand_builtin_va_start (exp);
a66c9326 6525 case BUILT_IN_VA_END:
c2f47e15 6526 return expand_builtin_va_end (exp);
a66c9326 6527 case BUILT_IN_VA_COPY:
c2f47e15 6528 return expand_builtin_va_copy (exp);
89cfe6e5 6529 case BUILT_IN_EXPECT:
c2f47e15 6530 return expand_builtin_expect (exp, target);
fca0886c 6531 case BUILT_IN_ASSUME_ALIGNED:
6532 return expand_builtin_assume_aligned (exp, target);
5e3608d8 6533 case BUILT_IN_PREFETCH:
c2f47e15 6534 expand_builtin_prefetch (exp);
5e3608d8 6535 return const0_rtx;
6536
4ee9c684 6537 case BUILT_IN_INIT_TRAMPOLINE:
c307f106 6538 return expand_builtin_init_trampoline (exp, true);
6539 case BUILT_IN_INIT_HEAP_TRAMPOLINE:
6540 return expand_builtin_init_trampoline (exp, false);
4ee9c684 6541 case BUILT_IN_ADJUST_TRAMPOLINE:
c2f47e15 6542 return expand_builtin_adjust_trampoline (exp);
4ee9c684 6543
73673831 6544 case BUILT_IN_FORK:
6545 case BUILT_IN_EXECL:
6546 case BUILT_IN_EXECV:
6547 case BUILT_IN_EXECLP:
6548 case BUILT_IN_EXECLE:
6549 case BUILT_IN_EXECVP:
6550 case BUILT_IN_EXECVE:
c2f47e15 6551 target = expand_builtin_fork_or_exec (fndecl, exp, target, ignore);
73673831 6552 if (target)
6553 return target;
6554 break;
53800dbe 6555
2797f13a 6556 case BUILT_IN_SYNC_FETCH_AND_ADD_1:
6557 case BUILT_IN_SYNC_FETCH_AND_ADD_2:
6558 case BUILT_IN_SYNC_FETCH_AND_ADD_4:
6559 case BUILT_IN_SYNC_FETCH_AND_ADD_8:
6560 case BUILT_IN_SYNC_FETCH_AND_ADD_16:
6561 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_ADD_1);
1cd6e20d 6562 target = expand_builtin_sync_operation (mode, exp, PLUS, false, target);
b6a5fc45 6563 if (target)
6564 return target;
6565 break;
6566
2797f13a 6567 case BUILT_IN_SYNC_FETCH_AND_SUB_1:
6568 case BUILT_IN_SYNC_FETCH_AND_SUB_2:
6569 case BUILT_IN_SYNC_FETCH_AND_SUB_4:
6570 case BUILT_IN_SYNC_FETCH_AND_SUB_8:
6571 case BUILT_IN_SYNC_FETCH_AND_SUB_16:
6572 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_SUB_1);
1cd6e20d 6573 target = expand_builtin_sync_operation (mode, exp, MINUS, false, target);
b6a5fc45 6574 if (target)
6575 return target;
6576 break;
6577
2797f13a 6578 case BUILT_IN_SYNC_FETCH_AND_OR_1:
6579 case BUILT_IN_SYNC_FETCH_AND_OR_2:
6580 case BUILT_IN_SYNC_FETCH_AND_OR_4:
6581 case BUILT_IN_SYNC_FETCH_AND_OR_8:
6582 case BUILT_IN_SYNC_FETCH_AND_OR_16:
6583 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_OR_1);
1cd6e20d 6584 target = expand_builtin_sync_operation (mode, exp, IOR, false, target);
b6a5fc45 6585 if (target)
6586 return target;
6587 break;
6588
2797f13a 6589 case BUILT_IN_SYNC_FETCH_AND_AND_1:
6590 case BUILT_IN_SYNC_FETCH_AND_AND_2:
6591 case BUILT_IN_SYNC_FETCH_AND_AND_4:
6592 case BUILT_IN_SYNC_FETCH_AND_AND_8:
6593 case BUILT_IN_SYNC_FETCH_AND_AND_16:
6594 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_AND_1);
1cd6e20d 6595 target = expand_builtin_sync_operation (mode, exp, AND, false, target);
b6a5fc45 6596 if (target)
6597 return target;
6598 break;
6599
2797f13a 6600 case BUILT_IN_SYNC_FETCH_AND_XOR_1:
6601 case BUILT_IN_SYNC_FETCH_AND_XOR_2:
6602 case BUILT_IN_SYNC_FETCH_AND_XOR_4:
6603 case BUILT_IN_SYNC_FETCH_AND_XOR_8:
6604 case BUILT_IN_SYNC_FETCH_AND_XOR_16:
6605 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_XOR_1);
1cd6e20d 6606 target = expand_builtin_sync_operation (mode, exp, XOR, false, target);
b6a5fc45 6607 if (target)
6608 return target;
6609 break;
6610
2797f13a 6611 case BUILT_IN_SYNC_FETCH_AND_NAND_1:
6612 case BUILT_IN_SYNC_FETCH_AND_NAND_2:
6613 case BUILT_IN_SYNC_FETCH_AND_NAND_4:
6614 case BUILT_IN_SYNC_FETCH_AND_NAND_8:
6615 case BUILT_IN_SYNC_FETCH_AND_NAND_16:
6616 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_NAND_1);
1cd6e20d 6617 target = expand_builtin_sync_operation (mode, exp, NOT, false, target);
b6a5fc45 6618 if (target)
6619 return target;
6620 break;
6621
2797f13a 6622 case BUILT_IN_SYNC_ADD_AND_FETCH_1:
6623 case BUILT_IN_SYNC_ADD_AND_FETCH_2:
6624 case BUILT_IN_SYNC_ADD_AND_FETCH_4:
6625 case BUILT_IN_SYNC_ADD_AND_FETCH_8:
6626 case BUILT_IN_SYNC_ADD_AND_FETCH_16:
6627 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_ADD_AND_FETCH_1);
1cd6e20d 6628 target = expand_builtin_sync_operation (mode, exp, PLUS, true, target);
b6a5fc45 6629 if (target)
6630 return target;
6631 break;
6632
2797f13a 6633 case BUILT_IN_SYNC_SUB_AND_FETCH_1:
6634 case BUILT_IN_SYNC_SUB_AND_FETCH_2:
6635 case BUILT_IN_SYNC_SUB_AND_FETCH_4:
6636 case BUILT_IN_SYNC_SUB_AND_FETCH_8:
6637 case BUILT_IN_SYNC_SUB_AND_FETCH_16:
6638 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_SUB_AND_FETCH_1);
1cd6e20d 6639 target = expand_builtin_sync_operation (mode, exp, MINUS, true, target);
b6a5fc45 6640 if (target)
6641 return target;
6642 break;
6643
2797f13a 6644 case BUILT_IN_SYNC_OR_AND_FETCH_1:
6645 case BUILT_IN_SYNC_OR_AND_FETCH_2:
6646 case BUILT_IN_SYNC_OR_AND_FETCH_4:
6647 case BUILT_IN_SYNC_OR_AND_FETCH_8:
6648 case BUILT_IN_SYNC_OR_AND_FETCH_16:
6649 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_OR_AND_FETCH_1);
1cd6e20d 6650 target = expand_builtin_sync_operation (mode, exp, IOR, true, target);
b6a5fc45 6651 if (target)
6652 return target;
6653 break;
6654
2797f13a 6655 case BUILT_IN_SYNC_AND_AND_FETCH_1:
6656 case BUILT_IN_SYNC_AND_AND_FETCH_2:
6657 case BUILT_IN_SYNC_AND_AND_FETCH_4:
6658 case BUILT_IN_SYNC_AND_AND_FETCH_8:
6659 case BUILT_IN_SYNC_AND_AND_FETCH_16:
6660 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_AND_AND_FETCH_1);
1cd6e20d 6661 target = expand_builtin_sync_operation (mode, exp, AND, true, target);
b6a5fc45 6662 if (target)
6663 return target;
6664 break;
6665
2797f13a 6666 case BUILT_IN_SYNC_XOR_AND_FETCH_1:
6667 case BUILT_IN_SYNC_XOR_AND_FETCH_2:
6668 case BUILT_IN_SYNC_XOR_AND_FETCH_4:
6669 case BUILT_IN_SYNC_XOR_AND_FETCH_8:
6670 case BUILT_IN_SYNC_XOR_AND_FETCH_16:
6671 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_XOR_AND_FETCH_1);
1cd6e20d 6672 target = expand_builtin_sync_operation (mode, exp, XOR, true, target);
b6a5fc45 6673 if (target)
6674 return target;
6675 break;
6676
2797f13a 6677 case BUILT_IN_SYNC_NAND_AND_FETCH_1:
6678 case BUILT_IN_SYNC_NAND_AND_FETCH_2:
6679 case BUILT_IN_SYNC_NAND_AND_FETCH_4:
6680 case BUILT_IN_SYNC_NAND_AND_FETCH_8:
6681 case BUILT_IN_SYNC_NAND_AND_FETCH_16:
6682 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_NAND_AND_FETCH_1);
1cd6e20d 6683 target = expand_builtin_sync_operation (mode, exp, NOT, true, target);
b6a5fc45 6684 if (target)
6685 return target;
6686 break;
6687
2797f13a 6688 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_1:
6689 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_2:
6690 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_4:
6691 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_8:
6692 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_16:
a601d32a 6693 if (mode == VOIDmode)
6694 mode = TYPE_MODE (boolean_type_node);
b6a5fc45 6695 if (!target || !register_operand (target, mode))
6696 target = gen_reg_rtx (mode);
3e272de8 6697
2797f13a 6698 mode = get_builtin_sync_mode
6699 (fcode - BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_1);
c2f47e15 6700 target = expand_builtin_compare_and_swap (mode, exp, true, target);
b6a5fc45 6701 if (target)
6702 return target;
6703 break;
6704
2797f13a 6705 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_1:
6706 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_2:
6707 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_4:
6708 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_8:
6709 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_16:
6710 mode = get_builtin_sync_mode
6711 (fcode - BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_1);
c2f47e15 6712 target = expand_builtin_compare_and_swap (mode, exp, false, target);
b6a5fc45 6713 if (target)
6714 return target;
6715 break;
6716
2797f13a 6717 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_1:
6718 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_2:
6719 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_4:
6720 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_8:
6721 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_16:
6722 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_LOCK_TEST_AND_SET_1);
6723 target = expand_builtin_sync_lock_test_and_set (mode, exp, target);
b6a5fc45 6724 if (target)
6725 return target;
6726 break;
6727
2797f13a 6728 case BUILT_IN_SYNC_LOCK_RELEASE_1:
6729 case BUILT_IN_SYNC_LOCK_RELEASE_2:
6730 case BUILT_IN_SYNC_LOCK_RELEASE_4:
6731 case BUILT_IN_SYNC_LOCK_RELEASE_8:
6732 case BUILT_IN_SYNC_LOCK_RELEASE_16:
6733 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_LOCK_RELEASE_1);
6734 expand_builtin_sync_lock_release (mode, exp);
b6a5fc45 6735 return const0_rtx;
6736
2797f13a 6737 case BUILT_IN_SYNC_SYNCHRONIZE:
6738 expand_builtin_sync_synchronize ();
b6a5fc45 6739 return const0_rtx;
6740
1cd6e20d 6741 case BUILT_IN_ATOMIC_EXCHANGE_1:
6742 case BUILT_IN_ATOMIC_EXCHANGE_2:
6743 case BUILT_IN_ATOMIC_EXCHANGE_4:
6744 case BUILT_IN_ATOMIC_EXCHANGE_8:
6745 case BUILT_IN_ATOMIC_EXCHANGE_16:
6746 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_EXCHANGE_1);
6747 target = expand_builtin_atomic_exchange (mode, exp, target);
6748 if (target)
6749 return target;
6750 break;
6751
6752 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1:
6753 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_2:
6754 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_4:
6755 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_8:
6756 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_16:
2c201ad1 6757 {
6758 unsigned int nargs, z;
f1f41a6c 6759 vec<tree, va_gc> *vec;
2c201ad1 6760
6761 mode =
6762 get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1);
6763 target = expand_builtin_atomic_compare_exchange (mode, exp, target);
6764 if (target)
6765 return target;
6766
6767 /* If this is turned into an external library call, the weak parameter
6768 must be dropped to match the expected parameter list. */
6769 nargs = call_expr_nargs (exp);
f1f41a6c 6770 vec_alloc (vec, nargs - 1);
2c201ad1 6771 for (z = 0; z < 3; z++)
f1f41a6c 6772 vec->quick_push (CALL_EXPR_ARG (exp, z));
2c201ad1 6773 /* Skip the boolean weak parameter. */
6774 for (z = 4; z < 6; z++)
f1f41a6c 6775 vec->quick_push (CALL_EXPR_ARG (exp, z));
2c201ad1 6776 exp = build_call_vec (TREE_TYPE (exp), CALL_EXPR_FN (exp), vec);
6777 break;
6778 }
1cd6e20d 6779
6780 case BUILT_IN_ATOMIC_LOAD_1:
6781 case BUILT_IN_ATOMIC_LOAD_2:
6782 case BUILT_IN_ATOMIC_LOAD_4:
6783 case BUILT_IN_ATOMIC_LOAD_8:
6784 case BUILT_IN_ATOMIC_LOAD_16:
6785 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_LOAD_1);
6786 target = expand_builtin_atomic_load (mode, exp, target);
6787 if (target)
6788 return target;
6789 break;
6790
6791 case BUILT_IN_ATOMIC_STORE_1:
6792 case BUILT_IN_ATOMIC_STORE_2:
6793 case BUILT_IN_ATOMIC_STORE_4:
6794 case BUILT_IN_ATOMIC_STORE_8:
6795 case BUILT_IN_ATOMIC_STORE_16:
6796 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_STORE_1);
6797 target = expand_builtin_atomic_store (mode, exp);
6798 if (target)
6799 return const0_rtx;
6800 break;
6801
6802 case BUILT_IN_ATOMIC_ADD_FETCH_1:
6803 case BUILT_IN_ATOMIC_ADD_FETCH_2:
6804 case BUILT_IN_ATOMIC_ADD_FETCH_4:
6805 case BUILT_IN_ATOMIC_ADD_FETCH_8:
6806 case BUILT_IN_ATOMIC_ADD_FETCH_16:
6807 {
6808 enum built_in_function lib;
6809 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_ADD_FETCH_1);
6810 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_ADD_1 +
6811 (fcode - BUILT_IN_ATOMIC_ADD_FETCH_1));
6812 target = expand_builtin_atomic_fetch_op (mode, exp, target, PLUS, true,
6813 ignore, lib);
6814 if (target)
6815 return target;
6816 break;
6817 }
6818 case BUILT_IN_ATOMIC_SUB_FETCH_1:
6819 case BUILT_IN_ATOMIC_SUB_FETCH_2:
6820 case BUILT_IN_ATOMIC_SUB_FETCH_4:
6821 case BUILT_IN_ATOMIC_SUB_FETCH_8:
6822 case BUILT_IN_ATOMIC_SUB_FETCH_16:
6823 {
6824 enum built_in_function lib;
6825 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_SUB_FETCH_1);
6826 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_SUB_1 +
6827 (fcode - BUILT_IN_ATOMIC_SUB_FETCH_1));
6828 target = expand_builtin_atomic_fetch_op (mode, exp, target, MINUS, true,
6829 ignore, lib);
6830 if (target)
6831 return target;
6832 break;
6833 }
6834 case BUILT_IN_ATOMIC_AND_FETCH_1:
6835 case BUILT_IN_ATOMIC_AND_FETCH_2:
6836 case BUILT_IN_ATOMIC_AND_FETCH_4:
6837 case BUILT_IN_ATOMIC_AND_FETCH_8:
6838 case BUILT_IN_ATOMIC_AND_FETCH_16:
6839 {
6840 enum built_in_function lib;
6841 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_AND_FETCH_1);
6842 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_AND_1 +
6843 (fcode - BUILT_IN_ATOMIC_AND_FETCH_1));
6844 target = expand_builtin_atomic_fetch_op (mode, exp, target, AND, true,
6845 ignore, lib);
6846 if (target)
6847 return target;
6848 break;
6849 }
6850 case BUILT_IN_ATOMIC_NAND_FETCH_1:
6851 case BUILT_IN_ATOMIC_NAND_FETCH_2:
6852 case BUILT_IN_ATOMIC_NAND_FETCH_4:
6853 case BUILT_IN_ATOMIC_NAND_FETCH_8:
6854 case BUILT_IN_ATOMIC_NAND_FETCH_16:
6855 {
6856 enum built_in_function lib;
6857 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_NAND_FETCH_1);
6858 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_NAND_1 +
6859 (fcode - BUILT_IN_ATOMIC_NAND_FETCH_1));
6860 target = expand_builtin_atomic_fetch_op (mode, exp, target, NOT, true,
6861 ignore, lib);
6862 if (target)
6863 return target;
6864 break;
6865 }
6866 case BUILT_IN_ATOMIC_XOR_FETCH_1:
6867 case BUILT_IN_ATOMIC_XOR_FETCH_2:
6868 case BUILT_IN_ATOMIC_XOR_FETCH_4:
6869 case BUILT_IN_ATOMIC_XOR_FETCH_8:
6870 case BUILT_IN_ATOMIC_XOR_FETCH_16:
6871 {
6872 enum built_in_function lib;
6873 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_XOR_FETCH_1);
6874 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_XOR_1 +
6875 (fcode - BUILT_IN_ATOMIC_XOR_FETCH_1));
6876 target = expand_builtin_atomic_fetch_op (mode, exp, target, XOR, true,
6877 ignore, lib);
6878 if (target)
6879 return target;
6880 break;
6881 }
6882 case BUILT_IN_ATOMIC_OR_FETCH_1:
6883 case BUILT_IN_ATOMIC_OR_FETCH_2:
6884 case BUILT_IN_ATOMIC_OR_FETCH_4:
6885 case BUILT_IN_ATOMIC_OR_FETCH_8:
6886 case BUILT_IN_ATOMIC_OR_FETCH_16:
6887 {
6888 enum built_in_function lib;
6889 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_OR_FETCH_1);
6890 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_OR_1 +
6891 (fcode - BUILT_IN_ATOMIC_OR_FETCH_1));
6892 target = expand_builtin_atomic_fetch_op (mode, exp, target, IOR, true,
6893 ignore, lib);
6894 if (target)
6895 return target;
6896 break;
6897 }
6898 case BUILT_IN_ATOMIC_FETCH_ADD_1:
6899 case BUILT_IN_ATOMIC_FETCH_ADD_2:
6900 case BUILT_IN_ATOMIC_FETCH_ADD_4:
6901 case BUILT_IN_ATOMIC_FETCH_ADD_8:
6902 case BUILT_IN_ATOMIC_FETCH_ADD_16:
6903 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_ADD_1);
6904 target = expand_builtin_atomic_fetch_op (mode, exp, target, PLUS, false,
6905 ignore, BUILT_IN_NONE);
6906 if (target)
6907 return target;
6908 break;
6909
6910 case BUILT_IN_ATOMIC_FETCH_SUB_1:
6911 case BUILT_IN_ATOMIC_FETCH_SUB_2:
6912 case BUILT_IN_ATOMIC_FETCH_SUB_4:
6913 case BUILT_IN_ATOMIC_FETCH_SUB_8:
6914 case BUILT_IN_ATOMIC_FETCH_SUB_16:
6915 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_SUB_1);
6916 target = expand_builtin_atomic_fetch_op (mode, exp, target, MINUS, false,
6917 ignore, BUILT_IN_NONE);
6918 if (target)
6919 return target;
6920 break;
6921
6922 case BUILT_IN_ATOMIC_FETCH_AND_1:
6923 case BUILT_IN_ATOMIC_FETCH_AND_2:
6924 case BUILT_IN_ATOMIC_FETCH_AND_4:
6925 case BUILT_IN_ATOMIC_FETCH_AND_8:
6926 case BUILT_IN_ATOMIC_FETCH_AND_16:
6927 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_AND_1);
6928 target = expand_builtin_atomic_fetch_op (mode, exp, target, AND, false,
6929 ignore, BUILT_IN_NONE);
6930 if (target)
6931 return target;
6932 break;
6933
6934 case BUILT_IN_ATOMIC_FETCH_NAND_1:
6935 case BUILT_IN_ATOMIC_FETCH_NAND_2:
6936 case BUILT_IN_ATOMIC_FETCH_NAND_4:
6937 case BUILT_IN_ATOMIC_FETCH_NAND_8:
6938 case BUILT_IN_ATOMIC_FETCH_NAND_16:
6939 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_NAND_1);
6940 target = expand_builtin_atomic_fetch_op (mode, exp, target, NOT, false,
6941 ignore, BUILT_IN_NONE);
6942 if (target)
6943 return target;
6944 break;
6945
6946 case BUILT_IN_ATOMIC_FETCH_XOR_1:
6947 case BUILT_IN_ATOMIC_FETCH_XOR_2:
6948 case BUILT_IN_ATOMIC_FETCH_XOR_4:
6949 case BUILT_IN_ATOMIC_FETCH_XOR_8:
6950 case BUILT_IN_ATOMIC_FETCH_XOR_16:
6951 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_XOR_1);
6952 target = expand_builtin_atomic_fetch_op (mode, exp, target, XOR, false,
6953 ignore, BUILT_IN_NONE);
6954 if (target)
6955 return target;
6956 break;
6957
6958 case BUILT_IN_ATOMIC_FETCH_OR_1:
6959 case BUILT_IN_ATOMIC_FETCH_OR_2:
6960 case BUILT_IN_ATOMIC_FETCH_OR_4:
6961 case BUILT_IN_ATOMIC_FETCH_OR_8:
6962 case BUILT_IN_ATOMIC_FETCH_OR_16:
6963 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_OR_1);
6964 target = expand_builtin_atomic_fetch_op (mode, exp, target, IOR, false,
6965 ignore, BUILT_IN_NONE);
6966 if (target)
6967 return target;
6968 break;
10b744a3 6969
6970 case BUILT_IN_ATOMIC_TEST_AND_SET:
7821cde1 6971 return expand_builtin_atomic_test_and_set (exp, target);
10b744a3 6972
6973 case BUILT_IN_ATOMIC_CLEAR:
6974 return expand_builtin_atomic_clear (exp);
1cd6e20d 6975
6976 case BUILT_IN_ATOMIC_ALWAYS_LOCK_FREE:
6977 return expand_builtin_atomic_always_lock_free (exp);
6978
6979 case BUILT_IN_ATOMIC_IS_LOCK_FREE:
6980 target = expand_builtin_atomic_is_lock_free (exp);
6981 if (target)
6982 return target;
6983 break;
6984
6985 case BUILT_IN_ATOMIC_THREAD_FENCE:
6986 expand_builtin_atomic_thread_fence (exp);
6987 return const0_rtx;
6988
6989 case BUILT_IN_ATOMIC_SIGNAL_FENCE:
6990 expand_builtin_atomic_signal_fence (exp);
6991 return const0_rtx;
6992
0a39fd54 6993 case BUILT_IN_OBJECT_SIZE:
6994 return expand_builtin_object_size (exp);
6995
6996 case BUILT_IN_MEMCPY_CHK:
6997 case BUILT_IN_MEMPCPY_CHK:
6998 case BUILT_IN_MEMMOVE_CHK:
6999 case BUILT_IN_MEMSET_CHK:
7000 target = expand_builtin_memory_chk (exp, target, mode, fcode);
7001 if (target)
7002 return target;
7003 break;
7004
7005 case BUILT_IN_STRCPY_CHK:
7006 case BUILT_IN_STPCPY_CHK:
7007 case BUILT_IN_STRNCPY_CHK:
1063acde 7008 case BUILT_IN_STPNCPY_CHK:
0a39fd54 7009 case BUILT_IN_STRCAT_CHK:
b356dfef 7010 case BUILT_IN_STRNCAT_CHK:
0a39fd54 7011 case BUILT_IN_SNPRINTF_CHK:
7012 case BUILT_IN_VSNPRINTF_CHK:
7013 maybe_emit_chk_warning (exp, fcode);
7014 break;
7015
7016 case BUILT_IN_SPRINTF_CHK:
7017 case BUILT_IN_VSPRINTF_CHK:
7018 maybe_emit_sprintf_chk_warning (exp, fcode);
7019 break;
7020
2c281b15 7021 case BUILT_IN_FREE:
f74ea1c2 7022 if (warn_free_nonheap_object)
7023 maybe_emit_free_warning (exp);
2c281b15 7024 break;
7025
badaa04c 7026 case BUILT_IN_THREAD_POINTER:
7027 return expand_builtin_thread_pointer (exp, target);
7028
7029 case BUILT_IN_SET_THREAD_POINTER:
7030 expand_builtin_set_thread_pointer (exp);
7031 return const0_rtx;
7032
d037099f 7033 case BUILT_IN_CILK_DETACH:
7034 expand_builtin_cilk_detach (exp);
7035 return const0_rtx;
7036
7037 case BUILT_IN_CILK_POP_FRAME:
7038 expand_builtin_cilk_pop_frame (exp);
7039 return const0_rtx;
7040
058a1b7a 7041 case BUILT_IN_CHKP_INIT_PTR_BOUNDS:
7042 case BUILT_IN_CHKP_NULL_PTR_BOUNDS:
7043 case BUILT_IN_CHKP_COPY_PTR_BOUNDS:
7044 case BUILT_IN_CHKP_CHECK_PTR_LBOUNDS:
7045 case BUILT_IN_CHKP_CHECK_PTR_UBOUNDS:
7046 case BUILT_IN_CHKP_CHECK_PTR_BOUNDS:
7047 case BUILT_IN_CHKP_SET_PTR_BOUNDS:
7048 case BUILT_IN_CHKP_NARROW_PTR_BOUNDS:
7049 case BUILT_IN_CHKP_STORE_PTR_BOUNDS:
7050 case BUILT_IN_CHKP_GET_PTR_LBOUND:
7051 case BUILT_IN_CHKP_GET_PTR_UBOUND:
7052 /* We allow user CHKP builtins if Pointer Bounds
7053 Checker is off. */
7054 if (!chkp_function_instrumented_p (current_function_decl))
7055 {
7056 if (fcode == BUILT_IN_CHKP_SET_PTR_BOUNDS
7057 || fcode == BUILT_IN_CHKP_NARROW_PTR_BOUNDS
7058 || fcode == BUILT_IN_CHKP_INIT_PTR_BOUNDS
7059 || fcode == BUILT_IN_CHKP_NULL_PTR_BOUNDS
7060 || fcode == BUILT_IN_CHKP_COPY_PTR_BOUNDS)
7061 return expand_normal (CALL_EXPR_ARG (exp, 0));
7062 else if (fcode == BUILT_IN_CHKP_GET_PTR_LBOUND)
7063 return expand_normal (size_zero_node);
7064 else if (fcode == BUILT_IN_CHKP_GET_PTR_UBOUND)
7065 return expand_normal (size_int (-1));
7066 else
7067 return const0_rtx;
7068 }
7069 /* FALLTHROUGH */
7070
7071 case BUILT_IN_CHKP_BNDMK:
7072 case BUILT_IN_CHKP_BNDSTX:
7073 case BUILT_IN_CHKP_BNDCL:
7074 case BUILT_IN_CHKP_BNDCU:
7075 case BUILT_IN_CHKP_BNDLDX:
7076 case BUILT_IN_CHKP_BNDRET:
7077 case BUILT_IN_CHKP_INTERSECT:
7078 case BUILT_IN_CHKP_NARROW:
7079 case BUILT_IN_CHKP_EXTRACT_LOWER:
7080 case BUILT_IN_CHKP_EXTRACT_UPPER:
7081 /* Software implementation of Pointer Bounds Checker is NYI.
7082 Target support is required. */
7083 error ("Your target platform does not support -fcheck-pointer-bounds");
7084 break;
7085
ca4c3545 7086 case BUILT_IN_ACC_ON_DEVICE:
7087 target = expand_builtin_acc_on_device (exp, target);
7088 if (target)
7089 return target;
7090 break;
7091
92482ee0 7092 default: /* just do library call, if unknown builtin */
146c1b4f 7093 break;
53800dbe 7094 }
7095
7096 /* The switch statement above can drop through to cause the function
7097 to be called normally. */
7098 return expand_call (exp, target, ignore);
7099}
650e4c94 7100
f21337ef 7101/* Similar to expand_builtin but is used for instrumented calls. */
7102
7103rtx
7104expand_builtin_with_bounds (tree exp, rtx target,
7105 rtx subtarget ATTRIBUTE_UNUSED,
7106 machine_mode mode, int ignore)
7107{
7108 tree fndecl = get_callee_fndecl (exp);
7109 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
7110
7111 gcc_assert (CALL_WITH_BOUNDS_P (exp));
7112
7113 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
7114 return targetm.expand_builtin (exp, target, subtarget, mode, ignore);
7115
7116 gcc_assert (fcode > BEGIN_CHKP_BUILTINS
7117 && fcode < END_CHKP_BUILTINS);
7118
7119 switch (fcode)
7120 {
7121 case BUILT_IN_CHKP_MEMCPY_NOBND_NOCHK_CHKP:
7122 target = expand_builtin_memcpy_with_bounds (exp, target);
7123 if (target)
7124 return target;
7125 break;
7126
7127 case BUILT_IN_CHKP_MEMPCPY_NOBND_NOCHK_CHKP:
7128 target = expand_builtin_mempcpy_with_bounds (exp, target, mode);
7129 if (target)
7130 return target;
7131 break;
7132
7133 case BUILT_IN_CHKP_MEMSET_NOBND_NOCHK_CHKP:
7134 target = expand_builtin_memset_with_bounds (exp, target, mode);
7135 if (target)
7136 return target;
7137 break;
7138
7139 default:
7140 break;
7141 }
7142
7143 /* The switch statement above can drop through to cause the function
7144 to be called normally. */
7145 return expand_call (exp, target, ignore);
7146 }
7147
805e22b2 7148/* Determine whether a tree node represents a call to a built-in
52203a9d 7149 function. If the tree T is a call to a built-in function with
7150 the right number of arguments of the appropriate types, return
7151 the DECL_FUNCTION_CODE of the call, e.g. BUILT_IN_SQRT.
7152 Otherwise the return value is END_BUILTINS. */
aecda0d6 7153
805e22b2 7154enum built_in_function
b7bf20db 7155builtin_mathfn_code (const_tree t)
805e22b2 7156{
b7bf20db 7157 const_tree fndecl, arg, parmlist;
7158 const_tree argtype, parmtype;
7159 const_call_expr_arg_iterator iter;
805e22b2 7160
7161 if (TREE_CODE (t) != CALL_EXPR
c2f47e15 7162 || TREE_CODE (CALL_EXPR_FN (t)) != ADDR_EXPR)
805e22b2 7163 return END_BUILTINS;
7164
c6e6ecb1 7165 fndecl = get_callee_fndecl (t);
7166 if (fndecl == NULL_TREE
52203a9d 7167 || TREE_CODE (fndecl) != FUNCTION_DECL
805e22b2 7168 || ! DECL_BUILT_IN (fndecl)
7169 || DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
7170 return END_BUILTINS;
7171
52203a9d 7172 parmlist = TYPE_ARG_TYPES (TREE_TYPE (fndecl));
b7bf20db 7173 init_const_call_expr_arg_iterator (t, &iter);
52203a9d 7174 for (; parmlist; parmlist = TREE_CHAIN (parmlist))
e9f80ff5 7175 {
52203a9d 7176 /* If a function doesn't take a variable number of arguments,
7177 the last element in the list will have type `void'. */
7178 parmtype = TREE_VALUE (parmlist);
7179 if (VOID_TYPE_P (parmtype))
7180 {
b7bf20db 7181 if (more_const_call_expr_args_p (&iter))
52203a9d 7182 return END_BUILTINS;
7183 return DECL_FUNCTION_CODE (fndecl);
7184 }
7185
b7bf20db 7186 if (! more_const_call_expr_args_p (&iter))
e9f80ff5 7187 return END_BUILTINS;
48e1416a 7188
b7bf20db 7189 arg = next_const_call_expr_arg (&iter);
c2f47e15 7190 argtype = TREE_TYPE (arg);
52203a9d 7191
7192 if (SCALAR_FLOAT_TYPE_P (parmtype))
7193 {
7194 if (! SCALAR_FLOAT_TYPE_P (argtype))
7195 return END_BUILTINS;
7196 }
7197 else if (COMPLEX_FLOAT_TYPE_P (parmtype))
7198 {
7199 if (! COMPLEX_FLOAT_TYPE_P (argtype))
7200 return END_BUILTINS;
7201 }
7202 else if (POINTER_TYPE_P (parmtype))
7203 {
7204 if (! POINTER_TYPE_P (argtype))
7205 return END_BUILTINS;
7206 }
7207 else if (INTEGRAL_TYPE_P (parmtype))
7208 {
7209 if (! INTEGRAL_TYPE_P (argtype))
7210 return END_BUILTINS;
7211 }
7212 else
e9f80ff5 7213 return END_BUILTINS;
e9f80ff5 7214 }
7215
52203a9d 7216 /* Variable-length argument list. */
805e22b2 7217 return DECL_FUNCTION_CODE (fndecl);
7218}
7219
c2f47e15 7220/* Fold a call to __builtin_constant_p, if we know its argument ARG will
7221 evaluate to a constant. */
650e4c94 7222
7223static tree
c2f47e15 7224fold_builtin_constant_p (tree arg)
650e4c94 7225{
650e4c94 7226 /* We return 1 for a numeric type that's known to be a constant
7227 value at compile-time or for an aggregate type that's a
7228 literal constant. */
c2f47e15 7229 STRIP_NOPS (arg);
650e4c94 7230
7231 /* If we know this is a constant, emit the constant of one. */
c2f47e15 7232 if (CONSTANT_CLASS_P (arg)
7233 || (TREE_CODE (arg) == CONSTRUCTOR
7234 && TREE_CONSTANT (arg)))
650e4c94 7235 return integer_one_node;
c2f47e15 7236 if (TREE_CODE (arg) == ADDR_EXPR)
adcfa3a3 7237 {
c2f47e15 7238 tree op = TREE_OPERAND (arg, 0);
adcfa3a3 7239 if (TREE_CODE (op) == STRING_CST
7240 || (TREE_CODE (op) == ARRAY_REF
7241 && integer_zerop (TREE_OPERAND (op, 1))
7242 && TREE_CODE (TREE_OPERAND (op, 0)) == STRING_CST))
7243 return integer_one_node;
7244 }
650e4c94 7245
1fb4300c 7246 /* If this expression has side effects, show we don't know it to be a
7247 constant. Likewise if it's a pointer or aggregate type since in
7248 those case we only want literals, since those are only optimized
f97c71a1 7249 when generating RTL, not later.
7250 And finally, if we are compiling an initializer, not code, we
7251 need to return a definite result now; there's not going to be any
7252 more optimization done. */
c2f47e15 7253 if (TREE_SIDE_EFFECTS (arg)
7254 || AGGREGATE_TYPE_P (TREE_TYPE (arg))
7255 || POINTER_TYPE_P (TREE_TYPE (arg))
47be647d 7256 || cfun == 0
0b049e15 7257 || folding_initializer
7258 || force_folding_builtin_constant_p)
650e4c94 7259 return integer_zero_node;
7260
c2f47e15 7261 return NULL_TREE;
650e4c94 7262}
7263
76f5a783 7264/* Create builtin_expect with PRED and EXPECTED as its arguments and
7265 return it as a truthvalue. */
4ee9c684 7266
7267static tree
c83059be 7268build_builtin_expect_predicate (location_t loc, tree pred, tree expected,
7269 tree predictor)
4ee9c684 7270{
76f5a783 7271 tree fn, arg_types, pred_type, expected_type, call_expr, ret_type;
4ee9c684 7272
b9a16870 7273 fn = builtin_decl_explicit (BUILT_IN_EXPECT);
76f5a783 7274 arg_types = TYPE_ARG_TYPES (TREE_TYPE (fn));
7275 ret_type = TREE_TYPE (TREE_TYPE (fn));
7276 pred_type = TREE_VALUE (arg_types);
7277 expected_type = TREE_VALUE (TREE_CHAIN (arg_types));
7278
389dd41b 7279 pred = fold_convert_loc (loc, pred_type, pred);
7280 expected = fold_convert_loc (loc, expected_type, expected);
c83059be 7281 call_expr = build_call_expr_loc (loc, fn, predictor ? 3 : 2, pred, expected,
7282 predictor);
76f5a783 7283
7284 return build2 (NE_EXPR, TREE_TYPE (pred), call_expr,
7285 build_int_cst (ret_type, 0));
7286}
7287
7288/* Fold a call to builtin_expect with arguments ARG0 and ARG1. Return
7289 NULL_TREE if no simplification is possible. */
7290
c83059be 7291tree
7292fold_builtin_expect (location_t loc, tree arg0, tree arg1, tree arg2)
76f5a783 7293{
083bada9 7294 tree inner, fndecl, inner_arg0;
76f5a783 7295 enum tree_code code;
7296
083bada9 7297 /* Distribute the expected value over short-circuiting operators.
7298 See through the cast from truthvalue_type_node to long. */
7299 inner_arg0 = arg0;
d09ef31a 7300 while (CONVERT_EXPR_P (inner_arg0)
083bada9 7301 && INTEGRAL_TYPE_P (TREE_TYPE (inner_arg0))
7302 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (inner_arg0, 0))))
7303 inner_arg0 = TREE_OPERAND (inner_arg0, 0);
7304
76f5a783 7305 /* If this is a builtin_expect within a builtin_expect keep the
7306 inner one. See through a comparison against a constant. It
7307 might have been added to create a thruthvalue. */
083bada9 7308 inner = inner_arg0;
7309
76f5a783 7310 if (COMPARISON_CLASS_P (inner)
7311 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST)
7312 inner = TREE_OPERAND (inner, 0);
7313
7314 if (TREE_CODE (inner) == CALL_EXPR
7315 && (fndecl = get_callee_fndecl (inner))
7316 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
7317 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_EXPECT)
7318 return arg0;
7319
083bada9 7320 inner = inner_arg0;
76f5a783 7321 code = TREE_CODE (inner);
7322 if (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR)
7323 {
7324 tree op0 = TREE_OPERAND (inner, 0);
7325 tree op1 = TREE_OPERAND (inner, 1);
7326
c83059be 7327 op0 = build_builtin_expect_predicate (loc, op0, arg1, arg2);
7328 op1 = build_builtin_expect_predicate (loc, op1, arg1, arg2);
76f5a783 7329 inner = build2 (code, TREE_TYPE (inner), op0, op1);
7330
389dd41b 7331 return fold_convert_loc (loc, TREE_TYPE (arg0), inner);
76f5a783 7332 }
7333
7334 /* If the argument isn't invariant then there's nothing else we can do. */
083bada9 7335 if (!TREE_CONSTANT (inner_arg0))
c2f47e15 7336 return NULL_TREE;
4ee9c684 7337
76f5a783 7338 /* If we expect that a comparison against the argument will fold to
7339 a constant return the constant. In practice, this means a true
7340 constant or the address of a non-weak symbol. */
083bada9 7341 inner = inner_arg0;
4ee9c684 7342 STRIP_NOPS (inner);
7343 if (TREE_CODE (inner) == ADDR_EXPR)
7344 {
7345 do
7346 {
7347 inner = TREE_OPERAND (inner, 0);
7348 }
7349 while (TREE_CODE (inner) == COMPONENT_REF
7350 || TREE_CODE (inner) == ARRAY_REF);
062b4460 7351 if ((TREE_CODE (inner) == VAR_DECL
7352 || TREE_CODE (inner) == FUNCTION_DECL)
7353 && DECL_WEAK (inner))
c2f47e15 7354 return NULL_TREE;
4ee9c684 7355 }
7356
76f5a783 7357 /* Otherwise, ARG0 already has the proper type for the return value. */
7358 return arg0;
4ee9c684 7359}
7360
c2f47e15 7361/* Fold a call to __builtin_classify_type with argument ARG. */
27d0c333 7362
539a3a92 7363static tree
c2f47e15 7364fold_builtin_classify_type (tree arg)
539a3a92 7365{
c2f47e15 7366 if (arg == 0)
7002a1c8 7367 return build_int_cst (integer_type_node, no_type_class);
539a3a92 7368
7002a1c8 7369 return build_int_cst (integer_type_node, type_to_class (TREE_TYPE (arg)));
539a3a92 7370}
7371
c2f47e15 7372/* Fold a call to __builtin_strlen with argument ARG. */
e6e27594 7373
7374static tree
c7cbde74 7375fold_builtin_strlen (location_t loc, tree type, tree arg)
e6e27594 7376{
c2f47e15 7377 if (!validate_arg (arg, POINTER_TYPE))
e6e27594 7378 return NULL_TREE;
7379 else
7380 {
c2f47e15 7381 tree len = c_strlen (arg, 0);
e6e27594 7382
7383 if (len)
c7cbde74 7384 return fold_convert_loc (loc, type, len);
e6e27594 7385
7386 return NULL_TREE;
7387 }
7388}
7389
92c43e3c 7390/* Fold a call to __builtin_inf or __builtin_huge_val. */
7391
7392static tree
389dd41b 7393fold_builtin_inf (location_t loc, tree type, int warn)
92c43e3c 7394{
aa870c1b 7395 REAL_VALUE_TYPE real;
7396
40f4dbd5 7397 /* __builtin_inff is intended to be usable to define INFINITY on all
7398 targets. If an infinity is not available, INFINITY expands "to a
7399 positive constant of type float that overflows at translation
7400 time", footnote "In this case, using INFINITY will violate the
7401 constraint in 6.4.4 and thus require a diagnostic." (C99 7.12#4).
7402 Thus we pedwarn to ensure this constraint violation is
7403 diagnosed. */
92c43e3c 7404 if (!MODE_HAS_INFINITIES (TYPE_MODE (type)) && warn)
389dd41b 7405 pedwarn (loc, 0, "target format does not support infinity");
92c43e3c 7406
aa870c1b 7407 real_inf (&real);
7408 return build_real (type, real);
92c43e3c 7409}
7410
c2f47e15 7411/* Fold a call to __builtin_nan or __builtin_nans with argument ARG. */
b0db7939 7412
7413static tree
c2f47e15 7414fold_builtin_nan (tree arg, tree type, int quiet)
b0db7939 7415{
7416 REAL_VALUE_TYPE real;
7417 const char *str;
7418
c2f47e15 7419 if (!validate_arg (arg, POINTER_TYPE))
7420 return NULL_TREE;
7421 str = c_getstr (arg);
b0db7939 7422 if (!str)
c2f47e15 7423 return NULL_TREE;
b0db7939 7424
7425 if (!real_nan (&real, str, quiet, TYPE_MODE (type)))
c2f47e15 7426 return NULL_TREE;
b0db7939 7427
7428 return build_real (type, real);
7429}
7430
277f8dd2 7431/* Return true if the floating point expression T has an integer value.
7432 We also allow +Inf, -Inf and NaN to be considered integer values. */
7433
7434static bool
7435integer_valued_real_p (tree t)
7436{
7437 switch (TREE_CODE (t))
7438 {
7439 case FLOAT_EXPR:
7440 return true;
7441
7442 case ABS_EXPR:
7443 case SAVE_EXPR:
277f8dd2 7444 return integer_valued_real_p (TREE_OPERAND (t, 0));
7445
7446 case COMPOUND_EXPR:
41076ef6 7447 case MODIFY_EXPR:
277f8dd2 7448 case BIND_EXPR:
75a70cf9 7449 return integer_valued_real_p (TREE_OPERAND (t, 1));
277f8dd2 7450
7451 case PLUS_EXPR:
7452 case MINUS_EXPR:
7453 case MULT_EXPR:
7454 case MIN_EXPR:
7455 case MAX_EXPR:
7456 return integer_valued_real_p (TREE_OPERAND (t, 0))
7457 && integer_valued_real_p (TREE_OPERAND (t, 1));
7458
7459 case COND_EXPR:
7460 return integer_valued_real_p (TREE_OPERAND (t, 1))
7461 && integer_valued_real_p (TREE_OPERAND (t, 2));
7462
7463 case REAL_CST:
0570334c 7464 return real_isinteger (TREE_REAL_CST_PTR (t), TYPE_MODE (TREE_TYPE (t)));
277f8dd2 7465
d09ef31a 7466 CASE_CONVERT:
277f8dd2 7467 {
7468 tree type = TREE_TYPE (TREE_OPERAND (t, 0));
7469 if (TREE_CODE (type) == INTEGER_TYPE)
7470 return true;
7471 if (TREE_CODE (type) == REAL_TYPE)
7472 return integer_valued_real_p (TREE_OPERAND (t, 0));
7473 break;
7474 }
7475
7476 case CALL_EXPR:
7477 switch (builtin_mathfn_code (t))
7478 {
4f35b1fc 7479 CASE_FLT_FN (BUILT_IN_CEIL):
7480 CASE_FLT_FN (BUILT_IN_FLOOR):
7481 CASE_FLT_FN (BUILT_IN_NEARBYINT):
7482 CASE_FLT_FN (BUILT_IN_RINT):
7483 CASE_FLT_FN (BUILT_IN_ROUND):
7484 CASE_FLT_FN (BUILT_IN_TRUNC):
277f8dd2 7485 return true;
7486
d4a43a03 7487 CASE_FLT_FN (BUILT_IN_FMIN):
7488 CASE_FLT_FN (BUILT_IN_FMAX):
c2f47e15 7489 return integer_valued_real_p (CALL_EXPR_ARG (t, 0))
7490 && integer_valued_real_p (CALL_EXPR_ARG (t, 1));
d4a43a03 7491
277f8dd2 7492 default:
7493 break;
7494 }
7495 break;
7496
7497 default:
7498 break;
7499 }
7500 return false;
7501}
7502
c2f47e15 7503/* FNDECL is assumed to be a builtin where truncation can be propagated
6528f4f4 7504 across (for instance floor((double)f) == (double)floorf (f).
c2f47e15 7505 Do the transformation for a call with argument ARG. */
277f8dd2 7506
6528f4f4 7507static tree
389dd41b 7508fold_trunc_transparent_mathfn (location_t loc, tree fndecl, tree arg)
6528f4f4 7509{
6528f4f4 7510 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
277f8dd2 7511
c2f47e15 7512 if (!validate_arg (arg, REAL_TYPE))
7513 return NULL_TREE;
6528f4f4 7514
277f8dd2 7515 /* Integer rounding functions are idempotent. */
7516 if (fcode == builtin_mathfn_code (arg))
7517 return arg;
7518
7519 /* If argument is already integer valued, and we don't need to worry
7520 about setting errno, there's no need to perform rounding. */
7521 if (! flag_errno_math && integer_valued_real_p (arg))
7522 return arg;
7523
7524 if (optimize)
6528f4f4 7525 {
277f8dd2 7526 tree arg0 = strip_float_extensions (arg);
2426241c 7527 tree ftype = TREE_TYPE (TREE_TYPE (fndecl));
6528f4f4 7528 tree newtype = TREE_TYPE (arg0);
7529 tree decl;
7530
7531 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (ftype)
7532 && (decl = mathfn_built_in (newtype, fcode)))
389dd41b 7533 return fold_convert_loc (loc, ftype,
7534 build_call_expr_loc (loc, decl, 1,
7535 fold_convert_loc (loc,
7536 newtype,
7537 arg0)));
6528f4f4 7538 }
c2f47e15 7539 return NULL_TREE;
6528f4f4 7540}
7541
c2f47e15 7542/* FNDECL is assumed to be builtin which can narrow the FP type of
7543 the argument, for instance lround((double)f) -> lroundf (f).
7544 Do the transformation for a call with argument ARG. */
9ed65c7f 7545
7546static tree
389dd41b 7547fold_fixed_mathfn (location_t loc, tree fndecl, tree arg)
9ed65c7f 7548{
9ed65c7f 7549 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
9ed65c7f 7550
c2f47e15 7551 if (!validate_arg (arg, REAL_TYPE))
7552 return NULL_TREE;
9ed65c7f 7553
7554 /* If argument is already integer valued, and we don't need to worry
7555 about setting errno, there's no need to perform rounding. */
7556 if (! flag_errno_math && integer_valued_real_p (arg))
389dd41b 7557 return fold_build1_loc (loc, FIX_TRUNC_EXPR,
7558 TREE_TYPE (TREE_TYPE (fndecl)), arg);
9ed65c7f 7559
7560 if (optimize)
7561 {
7562 tree ftype = TREE_TYPE (arg);
7563 tree arg0 = strip_float_extensions (arg);
7564 tree newtype = TREE_TYPE (arg0);
7565 tree decl;
7566
7567 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (ftype)
7568 && (decl = mathfn_built_in (newtype, fcode)))
389dd41b 7569 return build_call_expr_loc (loc, decl, 1,
7570 fold_convert_loc (loc, newtype, arg0));
9ed65c7f 7571 }
73a0da56 7572
80ff6494 7573 /* Canonicalize iround (x) to lround (x) on ILP32 targets where
7574 sizeof (int) == sizeof (long). */
7575 if (TYPE_PRECISION (integer_type_node)
7576 == TYPE_PRECISION (long_integer_type_node))
7577 {
7578 tree newfn = NULL_TREE;
7579 switch (fcode)
7580 {
7581 CASE_FLT_FN (BUILT_IN_ICEIL):
7582 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LCEIL);
7583 break;
7584
7585 CASE_FLT_FN (BUILT_IN_IFLOOR):
7586 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LFLOOR);
7587 break;
7588
7589 CASE_FLT_FN (BUILT_IN_IROUND):
7590 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LROUND);
7591 break;
7592
7593 CASE_FLT_FN (BUILT_IN_IRINT):
7594 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LRINT);
7595 break;
7596
7597 default:
7598 break;
7599 }
7600
7601 if (newfn)
7602 {
7603 tree newcall = build_call_expr_loc (loc, newfn, 1, arg);
7604 return fold_convert_loc (loc,
7605 TREE_TYPE (TREE_TYPE (fndecl)), newcall);
7606 }
7607 }
7608
73a0da56 7609 /* Canonicalize llround (x) to lround (x) on LP64 targets where
7610 sizeof (long long) == sizeof (long). */
7611 if (TYPE_PRECISION (long_long_integer_type_node)
7612 == TYPE_PRECISION (long_integer_type_node))
7613 {
7614 tree newfn = NULL_TREE;
7615 switch (fcode)
7616 {
7617 CASE_FLT_FN (BUILT_IN_LLCEIL):
7618 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LCEIL);
7619 break;
7620
7621 CASE_FLT_FN (BUILT_IN_LLFLOOR):
7622 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LFLOOR);
7623 break;
7624
7625 CASE_FLT_FN (BUILT_IN_LLROUND):
7626 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LROUND);
7627 break;
7628
7629 CASE_FLT_FN (BUILT_IN_LLRINT):
7630 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LRINT);
7631 break;
7632
7633 default:
7634 break;
7635 }
7636
7637 if (newfn)
7638 {
389dd41b 7639 tree newcall = build_call_expr_loc (loc, newfn, 1, arg);
7640 return fold_convert_loc (loc,
7641 TREE_TYPE (TREE_TYPE (fndecl)), newcall);
73a0da56 7642 }
7643 }
7644
c2f47e15 7645 return NULL_TREE;
9ed65c7f 7646}
7647
c2f47e15 7648/* Fold call to builtin cabs, cabsf or cabsl with argument ARG. TYPE is the
7649 return type. Return NULL_TREE if no simplification can be made. */
c63f4ad3 7650
7651static tree
389dd41b 7652fold_builtin_cabs (location_t loc, tree arg, tree type, tree fndecl)
c63f4ad3 7653{
c2f47e15 7654 tree res;
c63f4ad3 7655
b0ce8887 7656 if (!validate_arg (arg, COMPLEX_TYPE)
c63f4ad3 7657 || TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) != REAL_TYPE)
7658 return NULL_TREE;
7659
b4725390 7660 /* Calculate the result when the argument is a constant. */
7661 if (TREE_CODE (arg) == COMPLEX_CST
7662 && (res = do_mpfr_arg2 (TREE_REALPART (arg), TREE_IMAGPART (arg),
7663 type, mpfr_hypot)))
7664 return res;
48e1416a 7665
1af0d139 7666 if (TREE_CODE (arg) == COMPLEX_EXPR)
7667 {
7668 tree real = TREE_OPERAND (arg, 0);
7669 tree imag = TREE_OPERAND (arg, 1);
48e1416a 7670
1af0d139 7671 /* If either part is zero, cabs is fabs of the other. */
7672 if (real_zerop (real))
389dd41b 7673 return fold_build1_loc (loc, ABS_EXPR, type, imag);
1af0d139 7674 if (real_zerop (imag))
389dd41b 7675 return fold_build1_loc (loc, ABS_EXPR, type, real);
1af0d139 7676
7677 /* cabs(x+xi) -> fabs(x)*sqrt(2). */
7678 if (flag_unsafe_math_optimizations
7679 && operand_equal_p (real, imag, OEP_PURE_SAME))
7680 {
2e7ca27b 7681 const REAL_VALUE_TYPE sqrt2_trunc
7910b2fb 7682 = real_value_truncate (TYPE_MODE (type), dconst_sqrt2 ());
1af0d139 7683 STRIP_NOPS (real);
389dd41b 7684 return fold_build2_loc (loc, MULT_EXPR, type,
7685 fold_build1_loc (loc, ABS_EXPR, type, real),
2e7ca27b 7686 build_real (type, sqrt2_trunc));
1af0d139 7687 }
7688 }
c63f4ad3 7689
749891b2 7690 /* Optimize cabs(-z) and cabs(conj(z)) as cabs(z). */
7691 if (TREE_CODE (arg) == NEGATE_EXPR
7692 || TREE_CODE (arg) == CONJ_EXPR)
389dd41b 7693 return build_call_expr_loc (loc, fndecl, 1, TREE_OPERAND (arg, 0));
749891b2 7694
7d3f6cc7 7695 /* Don't do this when optimizing for size. */
7696 if (flag_unsafe_math_optimizations
0bfd8d5c 7697 && optimize && optimize_function_for_speed_p (cfun))
c63f4ad3 7698 {
0da0dbfa 7699 tree sqrtfn = mathfn_built_in (type, BUILT_IN_SQRT);
c63f4ad3 7700
7701 if (sqrtfn != NULL_TREE)
7702 {
c2f47e15 7703 tree rpart, ipart, result;
c63f4ad3 7704
4ee9c684 7705 arg = builtin_save_expr (arg);
29a6518e 7706
389dd41b 7707 rpart = fold_build1_loc (loc, REALPART_EXPR, type, arg);
7708 ipart = fold_build1_loc (loc, IMAGPART_EXPR, type, arg);
c63f4ad3 7709
4ee9c684 7710 rpart = builtin_save_expr (rpart);
7711 ipart = builtin_save_expr (ipart);
c63f4ad3 7712
389dd41b 7713 result = fold_build2_loc (loc, PLUS_EXPR, type,
7714 fold_build2_loc (loc, MULT_EXPR, type,
49d00087 7715 rpart, rpart),
389dd41b 7716 fold_build2_loc (loc, MULT_EXPR, type,
49d00087 7717 ipart, ipart));
c63f4ad3 7718
389dd41b 7719 return build_call_expr_loc (loc, sqrtfn, 1, result);
c63f4ad3 7720 }
7721 }
7722
7723 return NULL_TREE;
7724}
7725
c2373fdb 7726/* Build a complex (inf +- 0i) for the result of cproj. TYPE is the
7727 complex tree type of the result. If NEG is true, the imaginary
7728 zero is negative. */
7729
7730static tree
7731build_complex_cproj (tree type, bool neg)
7732{
7733 REAL_VALUE_TYPE rinf, rzero = dconst0;
7734
7735 real_inf (&rinf);
7736 rzero.sign = neg;
7737 return build_complex (type, build_real (TREE_TYPE (type), rinf),
7738 build_real (TREE_TYPE (type), rzero));
7739}
7740
7741/* Fold call to builtin cproj, cprojf or cprojl with argument ARG. TYPE is the
7742 return type. Return NULL_TREE if no simplification can be made. */
7743
7744static tree
7745fold_builtin_cproj (location_t loc, tree arg, tree type)
7746{
7747 if (!validate_arg (arg, COMPLEX_TYPE)
7748 || TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) != REAL_TYPE)
7749 return NULL_TREE;
7750
7751 /* If there are no infinities, return arg. */
fe994837 7752 if (! HONOR_INFINITIES (type))
c2373fdb 7753 return non_lvalue_loc (loc, arg);
7754
7755 /* Calculate the result when the argument is a constant. */
7756 if (TREE_CODE (arg) == COMPLEX_CST)
7757 {
7758 const REAL_VALUE_TYPE *real = TREE_REAL_CST_PTR (TREE_REALPART (arg));
7759 const REAL_VALUE_TYPE *imag = TREE_REAL_CST_PTR (TREE_IMAGPART (arg));
7760
7761 if (real_isinf (real) || real_isinf (imag))
7762 return build_complex_cproj (type, imag->sign);
7763 else
7764 return arg;
7765 }
b4c7e601 7766 else if (TREE_CODE (arg) == COMPLEX_EXPR)
7767 {
7768 tree real = TREE_OPERAND (arg, 0);
7769 tree imag = TREE_OPERAND (arg, 1);
7770
7771 STRIP_NOPS (real);
7772 STRIP_NOPS (imag);
7773
7774 /* If the real part is inf and the imag part is known to be
7775 nonnegative, return (inf + 0i). Remember side-effects are
7776 possible in the imag part. */
7777 if (TREE_CODE (real) == REAL_CST
7778 && real_isinf (TREE_REAL_CST_PTR (real))
7779 && tree_expr_nonnegative_p (imag))
7780 return omit_one_operand_loc (loc, type,
7781 build_complex_cproj (type, false),
7782 arg);
7783
7784 /* If the imag part is inf, return (inf+I*copysign(0,imag)).
7785 Remember side-effects are possible in the real part. */
7786 if (TREE_CODE (imag) == REAL_CST
7787 && real_isinf (TREE_REAL_CST_PTR (imag)))
7788 return
7789 omit_one_operand_loc (loc, type,
7790 build_complex_cproj (type, TREE_REAL_CST_PTR
7791 (imag)->sign), arg);
7792 }
c2373fdb 7793
7794 return NULL_TREE;
7795}
7796
c2f47e15 7797/* Fold a builtin function call to sqrt, sqrtf, or sqrtl with argument ARG.
7798 Return NULL_TREE if no simplification can be made. */
e6e27594 7799
7800static tree
389dd41b 7801fold_builtin_sqrt (location_t loc, tree arg, tree type)
e6e27594 7802{
7803
7804 enum built_in_function fcode;
b4e8ab0c 7805 tree res;
c2f47e15 7806
7807 if (!validate_arg (arg, REAL_TYPE))
e6e27594 7808 return NULL_TREE;
7809
b4e8ab0c 7810 /* Calculate the result when the argument is a constant. */
7811 if ((res = do_mpfr_arg1 (arg, type, mpfr_sqrt, &dconst0, NULL, true)))
7812 return res;
48e1416a 7813
e6e27594 7814 /* Optimize sqrt(expN(x)) = expN(x*0.5). */
7815 fcode = builtin_mathfn_code (arg);
7816 if (flag_unsafe_math_optimizations && BUILTIN_EXPONENT_P (fcode))
7817 {
c2f47e15 7818 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg), 0);
389dd41b 7819 arg = fold_build2_loc (loc, MULT_EXPR, type,
c2f47e15 7820 CALL_EXPR_ARG (arg, 0),
49d00087 7821 build_real (type, dconsthalf));
389dd41b 7822 return build_call_expr_loc (loc, expfn, 1, arg);
e6e27594 7823 }
7824
7825 /* Optimize sqrt(Nroot(x)) -> pow(x,1/(2*N)). */
7826 if (flag_unsafe_math_optimizations && BUILTIN_ROOT_P (fcode))
7827 {
7828 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
7829
7830 if (powfn)
7831 {
c2f47e15 7832 tree arg0 = CALL_EXPR_ARG (arg, 0);
e6e27594 7833 tree tree_root;
7834 /* The inner root was either sqrt or cbrt. */
57510da6 7835 /* This was a conditional expression but it triggered a bug
18381619 7836 in Sun C 5.5. */
ce6cd837 7837 REAL_VALUE_TYPE dconstroot;
7838 if (BUILTIN_SQRT_P (fcode))
7839 dconstroot = dconsthalf;
7840 else
7841 dconstroot = dconst_third ();
e6e27594 7842
7843 /* Adjust for the outer root. */
7844 SET_REAL_EXP (&dconstroot, REAL_EXP (&dconstroot) - 1);
7845 dconstroot = real_value_truncate (TYPE_MODE (type), dconstroot);
7846 tree_root = build_real (type, dconstroot);
389dd41b 7847 return build_call_expr_loc (loc, powfn, 2, arg0, tree_root);
e6e27594 7848 }
7849 }
7850
bc33117f 7851 /* Optimize sqrt(pow(x,y)) = pow(|x|,y*0.5). */
e6e27594 7852 if (flag_unsafe_math_optimizations
7853 && (fcode == BUILT_IN_POW
7854 || fcode == BUILT_IN_POWF
7855 || fcode == BUILT_IN_POWL))
7856 {
c2f47e15 7857 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg), 0);
7858 tree arg0 = CALL_EXPR_ARG (arg, 0);
7859 tree arg1 = CALL_EXPR_ARG (arg, 1);
bc33117f 7860 tree narg1;
7861 if (!tree_expr_nonnegative_p (arg0))
7862 arg0 = build1 (ABS_EXPR, type, arg0);
389dd41b 7863 narg1 = fold_build2_loc (loc, MULT_EXPR, type, arg1,
49d00087 7864 build_real (type, dconsthalf));
389dd41b 7865 return build_call_expr_loc (loc, powfn, 2, arg0, narg1);
e6e27594 7866 }
7867
7868 return NULL_TREE;
7869}
7870
c2f47e15 7871/* Fold a builtin function call to cbrt, cbrtf, or cbrtl with argument ARG.
7872 Return NULL_TREE if no simplification can be made. */
7873
e6e27594 7874static tree
389dd41b 7875fold_builtin_cbrt (location_t loc, tree arg, tree type)
e6e27594 7876{
e6e27594 7877 const enum built_in_function fcode = builtin_mathfn_code (arg);
29f4cd78 7878 tree res;
e6e27594 7879
c2f47e15 7880 if (!validate_arg (arg, REAL_TYPE))
e6e27594 7881 return NULL_TREE;
7882
29f4cd78 7883 /* Calculate the result when the argument is a constant. */
7884 if ((res = do_mpfr_arg1 (arg, type, mpfr_cbrt, NULL, NULL, 0)))
7885 return res;
e6e27594 7886
cdfeb715 7887 if (flag_unsafe_math_optimizations)
e6e27594 7888 {
cdfeb715 7889 /* Optimize cbrt(expN(x)) -> expN(x/3). */
7890 if (BUILTIN_EXPONENT_P (fcode))
a0c938f0 7891 {
c2f47e15 7892 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg), 0);
cdfeb715 7893 const REAL_VALUE_TYPE third_trunc =
7910b2fb 7894 real_value_truncate (TYPE_MODE (type), dconst_third ());
389dd41b 7895 arg = fold_build2_loc (loc, MULT_EXPR, type,
c2f47e15 7896 CALL_EXPR_ARG (arg, 0),
49d00087 7897 build_real (type, third_trunc));
389dd41b 7898 return build_call_expr_loc (loc, expfn, 1, arg);
cdfeb715 7899 }
e6e27594 7900
cdfeb715 7901 /* Optimize cbrt(sqrt(x)) -> pow(x,1/6). */
7902 if (BUILTIN_SQRT_P (fcode))
a0c938f0 7903 {
cdfeb715 7904 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
e6e27594 7905
cdfeb715 7906 if (powfn)
7907 {
c2f47e15 7908 tree arg0 = CALL_EXPR_ARG (arg, 0);
cdfeb715 7909 tree tree_root;
7910b2fb 7910 REAL_VALUE_TYPE dconstroot = dconst_third ();
cdfeb715 7911
7912 SET_REAL_EXP (&dconstroot, REAL_EXP (&dconstroot) - 1);
7913 dconstroot = real_value_truncate (TYPE_MODE (type), dconstroot);
7914 tree_root = build_real (type, dconstroot);
389dd41b 7915 return build_call_expr_loc (loc, powfn, 2, arg0, tree_root);
cdfeb715 7916 }
e6e27594 7917 }
7918
cdfeb715 7919 /* Optimize cbrt(cbrt(x)) -> pow(x,1/9) iff x is nonnegative. */
7920 if (BUILTIN_CBRT_P (fcode))
a0c938f0 7921 {
c2f47e15 7922 tree arg0 = CALL_EXPR_ARG (arg, 0);
cdfeb715 7923 if (tree_expr_nonnegative_p (arg0))
7924 {
7925 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
7926
7927 if (powfn)
a0c938f0 7928 {
cdfeb715 7929 tree tree_root;
7930 REAL_VALUE_TYPE dconstroot;
a0c938f0 7931
3fa759a9 7932 real_arithmetic (&dconstroot, MULT_EXPR,
7910b2fb 7933 dconst_third_ptr (), dconst_third_ptr ());
cdfeb715 7934 dconstroot = real_value_truncate (TYPE_MODE (type), dconstroot);
7935 tree_root = build_real (type, dconstroot);
389dd41b 7936 return build_call_expr_loc (loc, powfn, 2, arg0, tree_root);
cdfeb715 7937 }
7938 }
7939 }
a0c938f0 7940
cdfeb715 7941 /* Optimize cbrt(pow(x,y)) -> pow(x,y/3) iff x is nonnegative. */
48e1416a 7942 if (fcode == BUILT_IN_POW
c2f47e15 7943 || fcode == BUILT_IN_POWF
cdfeb715 7944 || fcode == BUILT_IN_POWL)
a0c938f0 7945 {
c2f47e15 7946 tree arg00 = CALL_EXPR_ARG (arg, 0);
7947 tree arg01 = CALL_EXPR_ARG (arg, 1);
cdfeb715 7948 if (tree_expr_nonnegative_p (arg00))
7949 {
c2f47e15 7950 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg), 0);
cdfeb715 7951 const REAL_VALUE_TYPE dconstroot
7910b2fb 7952 = real_value_truncate (TYPE_MODE (type), dconst_third ());
389dd41b 7953 tree narg01 = fold_build2_loc (loc, MULT_EXPR, type, arg01,
49d00087 7954 build_real (type, dconstroot));
389dd41b 7955 return build_call_expr_loc (loc, powfn, 2, arg00, narg01);
cdfeb715 7956 }
7957 }
e6e27594 7958 }
7959 return NULL_TREE;
7960}
7961
c2f47e15 7962/* Fold function call to builtin cos, cosf, or cosl with argument ARG.
7963 TYPE is the type of the return value. Return NULL_TREE if no
7964 simplification can be made. */
7965
e6e27594 7966static tree
389dd41b 7967fold_builtin_cos (location_t loc,
7968 tree arg, tree type, tree fndecl)
e6e27594 7969{
e6ab33d8 7970 tree res, narg;
e6e27594 7971
c2f47e15 7972 if (!validate_arg (arg, REAL_TYPE))
e6e27594 7973 return NULL_TREE;
7974
bffb7645 7975 /* Calculate the result when the argument is a constant. */
728bac60 7976 if ((res = do_mpfr_arg1 (arg, type, mpfr_cos, NULL, NULL, 0)))
bffb7645 7977 return res;
48e1416a 7978
e6e27594 7979 /* Optimize cos(-x) into cos (x). */
e6ab33d8 7980 if ((narg = fold_strip_sign_ops (arg)))
389dd41b 7981 return build_call_expr_loc (loc, fndecl, 1, narg);
e6e27594 7982
7983 return NULL_TREE;
7984}
7985
c2f47e15 7986/* Fold function call to builtin cosh, coshf, or coshl with argument ARG.
7987 Return NULL_TREE if no simplification can be made. */
7988
cacdc1af 7989static tree
389dd41b 7990fold_builtin_cosh (location_t loc, tree arg, tree type, tree fndecl)
cacdc1af 7991{
c2f47e15 7992 if (validate_arg (arg, REAL_TYPE))
cacdc1af 7993 {
cacdc1af 7994 tree res, narg;
7995
7996 /* Calculate the result when the argument is a constant. */
7997 if ((res = do_mpfr_arg1 (arg, type, mpfr_cosh, NULL, NULL, 0)))
7998 return res;
48e1416a 7999
cacdc1af 8000 /* Optimize cosh(-x) into cosh (x). */
8001 if ((narg = fold_strip_sign_ops (arg)))
389dd41b 8002 return build_call_expr_loc (loc, fndecl, 1, narg);
cacdc1af 8003 }
48e1416a 8004
cacdc1af 8005 return NULL_TREE;
8006}
8007
239d491a 8008/* Fold function call to builtin ccos (or ccosh if HYPER is TRUE) with
8009 argument ARG. TYPE is the type of the return value. Return
8010 NULL_TREE if no simplification can be made. */
8011
8012static tree
965d0f29 8013fold_builtin_ccos (location_t loc, tree arg, tree type, tree fndecl,
8014 bool hyper)
239d491a 8015{
8016 if (validate_arg (arg, COMPLEX_TYPE)
8017 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) == REAL_TYPE)
8018 {
8019 tree tmp;
8020
239d491a 8021 /* Calculate the result when the argument is a constant. */
8022 if ((tmp = do_mpc_arg1 (arg, type, (hyper ? mpc_cosh : mpc_cos))))
8023 return tmp;
48e1416a 8024
239d491a 8025 /* Optimize fn(-x) into fn(x). */
8026 if ((tmp = fold_strip_sign_ops (arg)))
389dd41b 8027 return build_call_expr_loc (loc, fndecl, 1, tmp);
239d491a 8028 }
8029
8030 return NULL_TREE;
8031}
8032
c2f47e15 8033/* Fold function call to builtin tan, tanf, or tanl with argument ARG.
8034 Return NULL_TREE if no simplification can be made. */
8035
e6e27594 8036static tree
c2f47e15 8037fold_builtin_tan (tree arg, tree type)
e6e27594 8038{
8039 enum built_in_function fcode;
29f4cd78 8040 tree res;
e6e27594 8041
c2f47e15 8042 if (!validate_arg (arg, REAL_TYPE))
e6e27594 8043 return NULL_TREE;
8044
bffb7645 8045 /* Calculate the result when the argument is a constant. */
728bac60 8046 if ((res = do_mpfr_arg1 (arg, type, mpfr_tan, NULL, NULL, 0)))
bffb7645 8047 return res;
48e1416a 8048
e6e27594 8049 /* Optimize tan(atan(x)) = x. */
8050 fcode = builtin_mathfn_code (arg);
8051 if (flag_unsafe_math_optimizations
8052 && (fcode == BUILT_IN_ATAN
8053 || fcode == BUILT_IN_ATANF
8054 || fcode == BUILT_IN_ATANL))
c2f47e15 8055 return CALL_EXPR_ARG (arg, 0);
e6e27594 8056
8057 return NULL_TREE;
8058}
8059
d735c391 8060/* Fold function call to builtin sincos, sincosf, or sincosl. Return
8061 NULL_TREE if no simplification can be made. */
8062
8063static tree
389dd41b 8064fold_builtin_sincos (location_t loc,
8065 tree arg0, tree arg1, tree arg2)
d735c391 8066{
c2f47e15 8067 tree type;
d735c391 8068 tree res, fn, call;
8069
c2f47e15 8070 if (!validate_arg (arg0, REAL_TYPE)
8071 || !validate_arg (arg1, POINTER_TYPE)
8072 || !validate_arg (arg2, POINTER_TYPE))
d735c391 8073 return NULL_TREE;
8074
d735c391 8075 type = TREE_TYPE (arg0);
d735c391 8076
8077 /* Calculate the result when the argument is a constant. */
8078 if ((res = do_mpfr_sincos (arg0, arg1, arg2)))
8079 return res;
8080
8081 /* Canonicalize sincos to cexpi. */
30f690e0 8082 if (!targetm.libc_has_function (function_c99_math_complex))
2a6b4c77 8083 return NULL_TREE;
d735c391 8084 fn = mathfn_built_in (type, BUILT_IN_CEXPI);
8085 if (!fn)
8086 return NULL_TREE;
8087
389dd41b 8088 call = build_call_expr_loc (loc, fn, 1, arg0);
d735c391 8089 call = builtin_save_expr (call);
8090
a75b1c71 8091 return build2 (COMPOUND_EXPR, void_type_node,
d735c391 8092 build2 (MODIFY_EXPR, void_type_node,
389dd41b 8093 build_fold_indirect_ref_loc (loc, arg1),
d735c391 8094 build1 (IMAGPART_EXPR, type, call)),
8095 build2 (MODIFY_EXPR, void_type_node,
389dd41b 8096 build_fold_indirect_ref_loc (loc, arg2),
d735c391 8097 build1 (REALPART_EXPR, type, call)));
8098}
8099
c5bb2c4b 8100/* Fold function call to builtin cexp, cexpf, or cexpl. Return
8101 NULL_TREE if no simplification can be made. */
8102
8103static tree
389dd41b 8104fold_builtin_cexp (location_t loc, tree arg0, tree type)
c5bb2c4b 8105{
c2f47e15 8106 tree rtype;
c5bb2c4b 8107 tree realp, imagp, ifn;
239d491a 8108 tree res;
c5bb2c4b 8109
239d491a 8110 if (!validate_arg (arg0, COMPLEX_TYPE)
b0ce8887 8111 || TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) != REAL_TYPE)
c5bb2c4b 8112 return NULL_TREE;
8113
239d491a 8114 /* Calculate the result when the argument is a constant. */
8115 if ((res = do_mpc_arg1 (arg0, type, mpc_exp)))
8116 return res;
48e1416a 8117
c5bb2c4b 8118 rtype = TREE_TYPE (TREE_TYPE (arg0));
8119
8120 /* In case we can figure out the real part of arg0 and it is constant zero
8121 fold to cexpi. */
30f690e0 8122 if (!targetm.libc_has_function (function_c99_math_complex))
2a6b4c77 8123 return NULL_TREE;
c5bb2c4b 8124 ifn = mathfn_built_in (rtype, BUILT_IN_CEXPI);
8125 if (!ifn)
8126 return NULL_TREE;
8127
389dd41b 8128 if ((realp = fold_unary_loc (loc, REALPART_EXPR, rtype, arg0))
c5bb2c4b 8129 && real_zerop (realp))
8130 {
389dd41b 8131 tree narg = fold_build1_loc (loc, IMAGPART_EXPR, rtype, arg0);
8132 return build_call_expr_loc (loc, ifn, 1, narg);
c5bb2c4b 8133 }
8134
8135 /* In case we can easily decompose real and imaginary parts split cexp
8136 to exp (r) * cexpi (i). */
8137 if (flag_unsafe_math_optimizations
8138 && realp)
8139 {
8140 tree rfn, rcall, icall;
8141
8142 rfn = mathfn_built_in (rtype, BUILT_IN_EXP);
8143 if (!rfn)
8144 return NULL_TREE;
8145
389dd41b 8146 imagp = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg0);
c5bb2c4b 8147 if (!imagp)
8148 return NULL_TREE;
8149
389dd41b 8150 icall = build_call_expr_loc (loc, ifn, 1, imagp);
c5bb2c4b 8151 icall = builtin_save_expr (icall);
389dd41b 8152 rcall = build_call_expr_loc (loc, rfn, 1, realp);
c5bb2c4b 8153 rcall = builtin_save_expr (rcall);
389dd41b 8154 return fold_build2_loc (loc, COMPLEX_EXPR, type,
8155 fold_build2_loc (loc, MULT_EXPR, rtype,
71bf42bb 8156 rcall,
389dd41b 8157 fold_build1_loc (loc, REALPART_EXPR,
8158 rtype, icall)),
8159 fold_build2_loc (loc, MULT_EXPR, rtype,
71bf42bb 8160 rcall,
389dd41b 8161 fold_build1_loc (loc, IMAGPART_EXPR,
8162 rtype, icall)));
c5bb2c4b 8163 }
8164
8165 return NULL_TREE;
8166}
8167
c2f47e15 8168/* Fold function call to builtin trunc, truncf or truncl with argument ARG.
8169 Return NULL_TREE if no simplification can be made. */
277f8dd2 8170
8171static tree
389dd41b 8172fold_builtin_trunc (location_t loc, tree fndecl, tree arg)
277f8dd2 8173{
c2f47e15 8174 if (!validate_arg (arg, REAL_TYPE))
8175 return NULL_TREE;
277f8dd2 8176
8177 /* Optimize trunc of constant value. */
f96bd2bf 8178 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
277f8dd2 8179 {
8180 REAL_VALUE_TYPE r, x;
2426241c 8181 tree type = TREE_TYPE (TREE_TYPE (fndecl));
277f8dd2 8182
8183 x = TREE_REAL_CST (arg);
8184 real_trunc (&r, TYPE_MODE (type), &x);
8185 return build_real (type, r);
8186 }
8187
389dd41b 8188 return fold_trunc_transparent_mathfn (loc, fndecl, arg);
277f8dd2 8189}
8190
c2f47e15 8191/* Fold function call to builtin floor, floorf or floorl with argument ARG.
8192 Return NULL_TREE if no simplification can be made. */
277f8dd2 8193
8194static tree
389dd41b 8195fold_builtin_floor (location_t loc, tree fndecl, tree arg)
277f8dd2 8196{
c2f47e15 8197 if (!validate_arg (arg, REAL_TYPE))
8198 return NULL_TREE;
277f8dd2 8199
8200 /* Optimize floor of constant value. */
f96bd2bf 8201 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
277f8dd2 8202 {
8203 REAL_VALUE_TYPE x;
8204
8205 x = TREE_REAL_CST (arg);
8206 if (! REAL_VALUE_ISNAN (x) || ! flag_errno_math)
8207 {
2426241c 8208 tree type = TREE_TYPE (TREE_TYPE (fndecl));
277f8dd2 8209 REAL_VALUE_TYPE r;
8210
8211 real_floor (&r, TYPE_MODE (type), &x);
8212 return build_real (type, r);
8213 }
8214 }
8215
acc2b92e 8216 /* Fold floor (x) where x is nonnegative to trunc (x). */
8217 if (tree_expr_nonnegative_p (arg))
30fe8286 8218 {
8219 tree truncfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_TRUNC);
8220 if (truncfn)
389dd41b 8221 return build_call_expr_loc (loc, truncfn, 1, arg);
30fe8286 8222 }
acc2b92e 8223
389dd41b 8224 return fold_trunc_transparent_mathfn (loc, fndecl, arg);
277f8dd2 8225}
8226
c2f47e15 8227/* Fold function call to builtin ceil, ceilf or ceill with argument ARG.
8228 Return NULL_TREE if no simplification can be made. */
277f8dd2 8229
8230static tree
389dd41b 8231fold_builtin_ceil (location_t loc, tree fndecl, tree arg)
277f8dd2 8232{
c2f47e15 8233 if (!validate_arg (arg, REAL_TYPE))
8234 return NULL_TREE;
277f8dd2 8235
8236 /* Optimize ceil of constant value. */
f96bd2bf 8237 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
277f8dd2 8238 {
8239 REAL_VALUE_TYPE x;
8240
8241 x = TREE_REAL_CST (arg);
8242 if (! REAL_VALUE_ISNAN (x) || ! flag_errno_math)
8243 {
2426241c 8244 tree type = TREE_TYPE (TREE_TYPE (fndecl));
277f8dd2 8245 REAL_VALUE_TYPE r;
8246
8247 real_ceil (&r, TYPE_MODE (type), &x);
8248 return build_real (type, r);
8249 }
8250 }
8251
389dd41b 8252 return fold_trunc_transparent_mathfn (loc, fndecl, arg);
277f8dd2 8253}
8254
c2f47e15 8255/* Fold function call to builtin round, roundf or roundl with argument ARG.
8256 Return NULL_TREE if no simplification can be made. */
89ab3887 8257
8258static tree
389dd41b 8259fold_builtin_round (location_t loc, tree fndecl, tree arg)
89ab3887 8260{
c2f47e15 8261 if (!validate_arg (arg, REAL_TYPE))
8262 return NULL_TREE;
89ab3887 8263
34f17811 8264 /* Optimize round of constant value. */
f96bd2bf 8265 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
89ab3887 8266 {
8267 REAL_VALUE_TYPE x;
8268
8269 x = TREE_REAL_CST (arg);
8270 if (! REAL_VALUE_ISNAN (x) || ! flag_errno_math)
8271 {
2426241c 8272 tree type = TREE_TYPE (TREE_TYPE (fndecl));
89ab3887 8273 REAL_VALUE_TYPE r;
8274
8275 real_round (&r, TYPE_MODE (type), &x);
8276 return build_real (type, r);
8277 }
8278 }
8279
389dd41b 8280 return fold_trunc_transparent_mathfn (loc, fndecl, arg);
89ab3887 8281}
8282
34f17811 8283/* Fold function call to builtin lround, lroundf or lroundl (or the
c2f47e15 8284 corresponding long long versions) and other rounding functions. ARG
8285 is the argument to the call. Return NULL_TREE if no simplification
8286 can be made. */
34f17811 8287
8288static tree
389dd41b 8289fold_builtin_int_roundingfn (location_t loc, tree fndecl, tree arg)
34f17811 8290{
c2f47e15 8291 if (!validate_arg (arg, REAL_TYPE))
8292 return NULL_TREE;
34f17811 8293
8294 /* Optimize lround of constant value. */
f96bd2bf 8295 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
34f17811 8296 {
8297 const REAL_VALUE_TYPE x = TREE_REAL_CST (arg);
8298
776a7bab 8299 if (real_isfinite (&x))
34f17811 8300 {
2426241c 8301 tree itype = TREE_TYPE (TREE_TYPE (fndecl));
ca9b061d 8302 tree ftype = TREE_TYPE (arg);
34f17811 8303 REAL_VALUE_TYPE r;
e913b5cd 8304 bool fail = false;
34f17811 8305
ad52b9b7 8306 switch (DECL_FUNCTION_CODE (fndecl))
8307 {
80ff6494 8308 CASE_FLT_FN (BUILT_IN_IFLOOR):
4f35b1fc 8309 CASE_FLT_FN (BUILT_IN_LFLOOR):
8310 CASE_FLT_FN (BUILT_IN_LLFLOOR):
ad52b9b7 8311 real_floor (&r, TYPE_MODE (ftype), &x);
8312 break;
8313
80ff6494 8314 CASE_FLT_FN (BUILT_IN_ICEIL):
4f35b1fc 8315 CASE_FLT_FN (BUILT_IN_LCEIL):
8316 CASE_FLT_FN (BUILT_IN_LLCEIL):
ac148751 8317 real_ceil (&r, TYPE_MODE (ftype), &x);
8318 break;
8319
80ff6494 8320 CASE_FLT_FN (BUILT_IN_IROUND):
4f35b1fc 8321 CASE_FLT_FN (BUILT_IN_LROUND):
8322 CASE_FLT_FN (BUILT_IN_LLROUND):
ad52b9b7 8323 real_round (&r, TYPE_MODE (ftype), &x);
8324 break;
8325
8326 default:
8327 gcc_unreachable ();
8328 }
8329
ab2c1de8 8330 wide_int val = real_to_integer (&r, &fail, TYPE_PRECISION (itype));
e913b5cd 8331 if (!fail)
8332 return wide_int_to_tree (itype, val);
34f17811 8333 }
8334 }
8335
acc2b92e 8336 switch (DECL_FUNCTION_CODE (fndecl))
8337 {
8338 CASE_FLT_FN (BUILT_IN_LFLOOR):
8339 CASE_FLT_FN (BUILT_IN_LLFLOOR):
8340 /* Fold lfloor (x) where x is nonnegative to FIX_TRUNC (x). */
8341 if (tree_expr_nonnegative_p (arg))
389dd41b 8342 return fold_build1_loc (loc, FIX_TRUNC_EXPR,
8343 TREE_TYPE (TREE_TYPE (fndecl)), arg);
acc2b92e 8344 break;
8345 default:;
8346 }
8347
389dd41b 8348 return fold_fixed_mathfn (loc, fndecl, arg);
34f17811 8349}
8350
70fb4c07 8351/* Fold function call to builtin ffs, clz, ctz, popcount and parity
c2f47e15 8352 and their long and long long variants (i.e. ffsl and ffsll). ARG is
8353 the argument to the call. Return NULL_TREE if no simplification can
8354 be made. */
70fb4c07 8355
8356static tree
c2f47e15 8357fold_builtin_bitop (tree fndecl, tree arg)
70fb4c07 8358{
c2f47e15 8359 if (!validate_arg (arg, INTEGER_TYPE))
70fb4c07 8360 return NULL_TREE;
8361
8362 /* Optimize for constant argument. */
f96bd2bf 8363 if (TREE_CODE (arg) == INTEGER_CST && !TREE_OVERFLOW (arg))
70fb4c07 8364 {
1cee90ad 8365 tree type = TREE_TYPE (arg);
796b6678 8366 int result;
70fb4c07 8367
8368 switch (DECL_FUNCTION_CODE (fndecl))
8369 {
4f35b1fc 8370 CASE_INT_FN (BUILT_IN_FFS):
ab2c1de8 8371 result = wi::ffs (arg);
70fb4c07 8372 break;
8373
4f35b1fc 8374 CASE_INT_FN (BUILT_IN_CLZ):
1cee90ad 8375 if (wi::ne_p (arg, 0))
8376 result = wi::clz (arg);
8377 else if (! CLZ_DEFINED_VALUE_AT_ZERO (TYPE_MODE (type), result))
8378 result = TYPE_PRECISION (type);
70fb4c07 8379 break;
8380
4f35b1fc 8381 CASE_INT_FN (BUILT_IN_CTZ):
1cee90ad 8382 if (wi::ne_p (arg, 0))
8383 result = wi::ctz (arg);
8384 else if (! CTZ_DEFINED_VALUE_AT_ZERO (TYPE_MODE (type), result))
8385 result = TYPE_PRECISION (type);
70fb4c07 8386 break;
8387
6aaa1f9e 8388 CASE_INT_FN (BUILT_IN_CLRSB):
ab2c1de8 8389 result = wi::clrsb (arg);
6aaa1f9e 8390 break;
8391
4f35b1fc 8392 CASE_INT_FN (BUILT_IN_POPCOUNT):
ab2c1de8 8393 result = wi::popcount (arg);
70fb4c07 8394 break;
8395
4f35b1fc 8396 CASE_INT_FN (BUILT_IN_PARITY):
ab2c1de8 8397 result = wi::parity (arg);
70fb4c07 8398 break;
8399
8400 default:
64db345d 8401 gcc_unreachable ();
70fb4c07 8402 }
8403
796b6678 8404 return build_int_cst (TREE_TYPE (TREE_TYPE (fndecl)), result);
70fb4c07 8405 }
8406
8407 return NULL_TREE;
8408}
8409
74bdbe96 8410/* Fold function call to builtin_bswap and the short, long and long long
42791117 8411 variants. Return NULL_TREE if no simplification can be made. */
8412static tree
c2f47e15 8413fold_builtin_bswap (tree fndecl, tree arg)
42791117 8414{
c2f47e15 8415 if (! validate_arg (arg, INTEGER_TYPE))
8416 return NULL_TREE;
42791117 8417
8418 /* Optimize constant value. */
f96bd2bf 8419 if (TREE_CODE (arg) == INTEGER_CST && !TREE_OVERFLOW (arg))
42791117 8420 {
74bdbe96 8421 tree type = TREE_TYPE (TREE_TYPE (fndecl));
42791117 8422
42791117 8423 switch (DECL_FUNCTION_CODE (fndecl))
8424 {
74bdbe96 8425 case BUILT_IN_BSWAP16:
42791117 8426 case BUILT_IN_BSWAP32:
8427 case BUILT_IN_BSWAP64:
8428 {
e913b5cd 8429 signop sgn = TYPE_SIGN (type);
ddb1be65 8430 tree result =
796b6678 8431 wide_int_to_tree (type,
8432 wide_int::from (arg, TYPE_PRECISION (type),
8433 sgn).bswap ());
e913b5cd 8434 return result;
42791117 8435 }
42791117 8436 default:
8437 gcc_unreachable ();
8438 }
42791117 8439 }
8440
8441 return NULL_TREE;
8442}
c2f47e15 8443
f0c477f2 8444/* Fold a builtin function call to hypot, hypotf, or hypotl. Return
8445 NULL_TREE if no simplification can be made. */
8446
8447static tree
389dd41b 8448fold_builtin_hypot (location_t loc, tree fndecl,
8449 tree arg0, tree arg1, tree type)
f0c477f2 8450{
e6ab33d8 8451 tree res, narg0, narg1;
f0c477f2 8452
c2f47e15 8453 if (!validate_arg (arg0, REAL_TYPE)
8454 || !validate_arg (arg1, REAL_TYPE))
f0c477f2 8455 return NULL_TREE;
8456
8457 /* Calculate the result when the argument is a constant. */
8458 if ((res = do_mpfr_arg2 (arg0, arg1, type, mpfr_hypot)))
8459 return res;
48e1416a 8460
6c95f21c 8461 /* If either argument to hypot has a negate or abs, strip that off.
8462 E.g. hypot(-x,fabs(y)) -> hypot(x,y). */
e6ab33d8 8463 narg0 = fold_strip_sign_ops (arg0);
8464 narg1 = fold_strip_sign_ops (arg1);
8465 if (narg0 || narg1)
8466 {
48e1416a 8467 return build_call_expr_loc (loc, fndecl, 2, narg0 ? narg0 : arg0,
c2f47e15 8468 narg1 ? narg1 : arg1);
6c95f21c 8469 }
48e1416a 8470
f0c477f2 8471 /* If either argument is zero, hypot is fabs of the other. */
8472 if (real_zerop (arg0))
389dd41b 8473 return fold_build1_loc (loc, ABS_EXPR, type, arg1);
f0c477f2 8474 else if (real_zerop (arg1))
389dd41b 8475 return fold_build1_loc (loc, ABS_EXPR, type, arg0);
48e1416a 8476
6c95f21c 8477 /* hypot(x,x) -> fabs(x)*sqrt(2). */
8478 if (flag_unsafe_math_optimizations
8479 && operand_equal_p (arg0, arg1, OEP_PURE_SAME))
f0c477f2 8480 {
2e7ca27b 8481 const REAL_VALUE_TYPE sqrt2_trunc
7910b2fb 8482 = real_value_truncate (TYPE_MODE (type), dconst_sqrt2 ());
389dd41b 8483 return fold_build2_loc (loc, MULT_EXPR, type,
8484 fold_build1_loc (loc, ABS_EXPR, type, arg0),
2e7ca27b 8485 build_real (type, sqrt2_trunc));
f0c477f2 8486 }
8487
f0c477f2 8488 return NULL_TREE;
8489}
8490
8491
e6e27594 8492/* Fold a builtin function call to pow, powf, or powl. Return
8493 NULL_TREE if no simplification can be made. */
8494static tree
389dd41b 8495fold_builtin_pow (location_t loc, tree fndecl, tree arg0, tree arg1, tree type)
e6e27594 8496{
f0c477f2 8497 tree res;
e6e27594 8498
c2f47e15 8499 if (!validate_arg (arg0, REAL_TYPE)
8500 || !validate_arg (arg1, REAL_TYPE))
e6e27594 8501 return NULL_TREE;
8502
f0c477f2 8503 /* Calculate the result when the argument is a constant. */
8504 if ((res = do_mpfr_arg2 (arg0, arg1, type, mpfr_pow)))
8505 return res;
8506
e6e27594 8507 /* Optimize pow(1.0,y) = 1.0. */
8508 if (real_onep (arg0))
389dd41b 8509 return omit_one_operand_loc (loc, type, build_real (type, dconst1), arg1);
e6e27594 8510
8511 if (TREE_CODE (arg1) == REAL_CST
f96bd2bf 8512 && !TREE_OVERFLOW (arg1))
e6e27594 8513 {
198d9bbe 8514 REAL_VALUE_TYPE cint;
e6e27594 8515 REAL_VALUE_TYPE c;
198d9bbe 8516 HOST_WIDE_INT n;
8517
e6e27594 8518 c = TREE_REAL_CST (arg1);
8519
8520 /* Optimize pow(x,0.0) = 1.0. */
8521 if (REAL_VALUES_EQUAL (c, dconst0))
389dd41b 8522 return omit_one_operand_loc (loc, type, build_real (type, dconst1),
e6e27594 8523 arg0);
8524
8525 /* Optimize pow(x,1.0) = x. */
8526 if (REAL_VALUES_EQUAL (c, dconst1))
8527 return arg0;
8528
8529 /* Optimize pow(x,-1.0) = 1.0/x. */
8530 if (REAL_VALUES_EQUAL (c, dconstm1))
389dd41b 8531 return fold_build2_loc (loc, RDIV_EXPR, type,
49d00087 8532 build_real (type, dconst1), arg0);
e6e27594 8533
8534 /* Optimize pow(x,0.5) = sqrt(x). */
8535 if (flag_unsafe_math_optimizations
8536 && REAL_VALUES_EQUAL (c, dconsthalf))
8537 {
8538 tree sqrtfn = mathfn_built_in (type, BUILT_IN_SQRT);
8539
8540 if (sqrtfn != NULL_TREE)
389dd41b 8541 return build_call_expr_loc (loc, sqrtfn, 1, arg0);
e6e27594 8542 }
8543
feb5b3eb 8544 /* Optimize pow(x,1.0/3.0) = cbrt(x). */
8545 if (flag_unsafe_math_optimizations)
8546 {
8547 const REAL_VALUE_TYPE dconstroot
7910b2fb 8548 = real_value_truncate (TYPE_MODE (type), dconst_third ());
feb5b3eb 8549
8550 if (REAL_VALUES_EQUAL (c, dconstroot))
8551 {
8552 tree cbrtfn = mathfn_built_in (type, BUILT_IN_CBRT);
8553 if (cbrtfn != NULL_TREE)
389dd41b 8554 return build_call_expr_loc (loc, cbrtfn, 1, arg0);
feb5b3eb 8555 }
8556 }
8557
198d9bbe 8558 /* Check for an integer exponent. */
8559 n = real_to_integer (&c);
e913b5cd 8560 real_from_integer (&cint, VOIDmode, n, SIGNED);
198d9bbe 8561 if (real_identical (&c, &cint))
e6e27594 8562 {
a2b30b48 8563 /* Attempt to evaluate pow at compile-time, unless this should
8564 raise an exception. */
198d9bbe 8565 if (TREE_CODE (arg0) == REAL_CST
a2b30b48 8566 && !TREE_OVERFLOW (arg0)
8567 && (n > 0
8568 || (!flag_trapping_math && !flag_errno_math)
8569 || !REAL_VALUES_EQUAL (TREE_REAL_CST (arg0), dconst0)))
e6e27594 8570 {
8571 REAL_VALUE_TYPE x;
8572 bool inexact;
8573
8574 x = TREE_REAL_CST (arg0);
8575 inexact = real_powi (&x, TYPE_MODE (type), &x, n);
8576 if (flag_unsafe_math_optimizations || !inexact)
8577 return build_real (type, x);
8578 }
198d9bbe 8579
8580 /* Strip sign ops from even integer powers. */
8581 if ((n & 1) == 0 && flag_unsafe_math_optimizations)
8582 {
8583 tree narg0 = fold_strip_sign_ops (arg0);
8584 if (narg0)
389dd41b 8585 return build_call_expr_loc (loc, fndecl, 2, narg0, arg1);
198d9bbe 8586 }
e6e27594 8587 }
8588 }
8589
cdfeb715 8590 if (flag_unsafe_math_optimizations)
e6e27594 8591 {
cdfeb715 8592 const enum built_in_function fcode = builtin_mathfn_code (arg0);
e6e27594 8593
cdfeb715 8594 /* Optimize pow(expN(x),y) = expN(x*y). */
8595 if (BUILTIN_EXPONENT_P (fcode))
a0c938f0 8596 {
c2f47e15 8597 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
8598 tree arg = CALL_EXPR_ARG (arg0, 0);
389dd41b 8599 arg = fold_build2_loc (loc, MULT_EXPR, type, arg, arg1);
8600 return build_call_expr_loc (loc, expfn, 1, arg);
cdfeb715 8601 }
e6e27594 8602
cdfeb715 8603 /* Optimize pow(sqrt(x),y) = pow(x,y*0.5). */
8604 if (BUILTIN_SQRT_P (fcode))
a0c938f0 8605 {
c2f47e15 8606 tree narg0 = CALL_EXPR_ARG (arg0, 0);
389dd41b 8607 tree narg1 = fold_build2_loc (loc, MULT_EXPR, type, arg1,
49d00087 8608 build_real (type, dconsthalf));
389dd41b 8609 return build_call_expr_loc (loc, fndecl, 2, narg0, narg1);
cdfeb715 8610 }
8611
8612 /* Optimize pow(cbrt(x),y) = pow(x,y/3) iff x is nonnegative. */
8613 if (BUILTIN_CBRT_P (fcode))
a0c938f0 8614 {
c2f47e15 8615 tree arg = CALL_EXPR_ARG (arg0, 0);
cdfeb715 8616 if (tree_expr_nonnegative_p (arg))
8617 {
8618 const REAL_VALUE_TYPE dconstroot
7910b2fb 8619 = real_value_truncate (TYPE_MODE (type), dconst_third ());
389dd41b 8620 tree narg1 = fold_build2_loc (loc, MULT_EXPR, type, arg1,
49d00087 8621 build_real (type, dconstroot));
389dd41b 8622 return build_call_expr_loc (loc, fndecl, 2, arg, narg1);
cdfeb715 8623 }
8624 }
a0c938f0 8625
49e436b5 8626 /* Optimize pow(pow(x,y),z) = pow(x,y*z) iff x is nonnegative. */
c2f47e15 8627 if (fcode == BUILT_IN_POW
8628 || fcode == BUILT_IN_POWF
8629 || fcode == BUILT_IN_POWL)
a0c938f0 8630 {
c2f47e15 8631 tree arg00 = CALL_EXPR_ARG (arg0, 0);
49e436b5 8632 if (tree_expr_nonnegative_p (arg00))
8633 {
8634 tree arg01 = CALL_EXPR_ARG (arg0, 1);
8635 tree narg1 = fold_build2_loc (loc, MULT_EXPR, type, arg01, arg1);
8636 return build_call_expr_loc (loc, fndecl, 2, arg00, narg1);
8637 }
cdfeb715 8638 }
e6e27594 8639 }
cdfeb715 8640
e6e27594 8641 return NULL_TREE;
8642}
8643
c2f47e15 8644/* Fold a builtin function call to powi, powif, or powil with argument ARG.
8645 Return NULL_TREE if no simplification can be made. */
b4d0c20c 8646static tree
389dd41b 8647fold_builtin_powi (location_t loc, tree fndecl ATTRIBUTE_UNUSED,
c2f47e15 8648 tree arg0, tree arg1, tree type)
b4d0c20c 8649{
c2f47e15 8650 if (!validate_arg (arg0, REAL_TYPE)
8651 || !validate_arg (arg1, INTEGER_TYPE))
b4d0c20c 8652 return NULL_TREE;
8653
8654 /* Optimize pow(1.0,y) = 1.0. */
8655 if (real_onep (arg0))
389dd41b 8656 return omit_one_operand_loc (loc, type, build_real (type, dconst1), arg1);
b4d0c20c 8657
e913b5cd 8658 if (tree_fits_shwi_p (arg1))
b4d0c20c 8659 {
e913b5cd 8660 HOST_WIDE_INT c = tree_to_shwi (arg1);
b4d0c20c 8661
8662 /* Evaluate powi at compile-time. */
8663 if (TREE_CODE (arg0) == REAL_CST
f96bd2bf 8664 && !TREE_OVERFLOW (arg0))
b4d0c20c 8665 {
8666 REAL_VALUE_TYPE x;
8667 x = TREE_REAL_CST (arg0);
8668 real_powi (&x, TYPE_MODE (type), &x, c);
8669 return build_real (type, x);
8670 }
8671
8672 /* Optimize pow(x,0) = 1.0. */
8673 if (c == 0)
389dd41b 8674 return omit_one_operand_loc (loc, type, build_real (type, dconst1),
b4d0c20c 8675 arg0);
8676
8677 /* Optimize pow(x,1) = x. */
8678 if (c == 1)
8679 return arg0;
8680
8681 /* Optimize pow(x,-1) = 1.0/x. */
8682 if (c == -1)
389dd41b 8683 return fold_build2_loc (loc, RDIV_EXPR, type,
49d00087 8684 build_real (type, dconst1), arg0);
b4d0c20c 8685 }
8686
8687 return NULL_TREE;
8688}
8689
8918c507 8690/* A subroutine of fold_builtin to fold the various exponent
c2f47e15 8691 functions. Return NULL_TREE if no simplification can be made.
debf9994 8692 FUNC is the corresponding MPFR exponent function. */
8918c507 8693
8694static tree
389dd41b 8695fold_builtin_exponent (location_t loc, tree fndecl, tree arg,
debf9994 8696 int (*func)(mpfr_ptr, mpfr_srcptr, mp_rnd_t))
8918c507 8697{
c2f47e15 8698 if (validate_arg (arg, REAL_TYPE))
8918c507 8699 {
8918c507 8700 tree type = TREE_TYPE (TREE_TYPE (fndecl));
29f4cd78 8701 tree res;
48e1416a 8702
debf9994 8703 /* Calculate the result when the argument is a constant. */
728bac60 8704 if ((res = do_mpfr_arg1 (arg, type, func, NULL, NULL, 0)))
debf9994 8705 return res;
8918c507 8706
8707 /* Optimize expN(logN(x)) = x. */
8708 if (flag_unsafe_math_optimizations)
a0c938f0 8709 {
8918c507 8710 const enum built_in_function fcode = builtin_mathfn_code (arg);
8711
debf9994 8712 if ((func == mpfr_exp
8918c507 8713 && (fcode == BUILT_IN_LOG
8714 || fcode == BUILT_IN_LOGF
8715 || fcode == BUILT_IN_LOGL))
debf9994 8716 || (func == mpfr_exp2
8918c507 8717 && (fcode == BUILT_IN_LOG2
8718 || fcode == BUILT_IN_LOG2F
8719 || fcode == BUILT_IN_LOG2L))
debf9994 8720 || (func == mpfr_exp10
8918c507 8721 && (fcode == BUILT_IN_LOG10
8722 || fcode == BUILT_IN_LOG10F
8723 || fcode == BUILT_IN_LOG10L)))
389dd41b 8724 return fold_convert_loc (loc, type, CALL_EXPR_ARG (arg, 0));
8918c507 8725 }
8726 }
8727
c2f47e15 8728 return NULL_TREE;
8918c507 8729}
8730
7959b13b 8731/* Fold function call to builtin memchr. ARG1, ARG2 and LEN are the
8732 arguments to the call, and TYPE is its return type.
8733 Return NULL_TREE if no simplification can be made. */
8734
8735static tree
389dd41b 8736fold_builtin_memchr (location_t loc, tree arg1, tree arg2, tree len, tree type)
7959b13b 8737{
8738 if (!validate_arg (arg1, POINTER_TYPE)
8739 || !validate_arg (arg2, INTEGER_TYPE)
8740 || !validate_arg (len, INTEGER_TYPE))
8741 return NULL_TREE;
8742 else
8743 {
8744 const char *p1;
8745
8746 if (TREE_CODE (arg2) != INTEGER_CST
e913b5cd 8747 || !tree_fits_uhwi_p (len))
7959b13b 8748 return NULL_TREE;
8749
8750 p1 = c_getstr (arg1);
8751 if (p1 && compare_tree_int (len, strlen (p1) + 1) <= 0)
8752 {
8753 char c;
8754 const char *r;
8755 tree tem;
8756
8757 if (target_char_cast (arg2, &c))
8758 return NULL_TREE;
8759
e913b5cd 8760 r = (const char *) memchr (p1, c, tree_to_uhwi (len));
7959b13b 8761
8762 if (r == NULL)
8763 return build_int_cst (TREE_TYPE (arg1), 0);
8764
2cc66f2a 8765 tem = fold_build_pointer_plus_hwi_loc (loc, arg1, r - p1);
389dd41b 8766 return fold_convert_loc (loc, type, tem);
7959b13b 8767 }
8768 return NULL_TREE;
8769 }
8770}
8771
c2f47e15 8772/* Fold function call to builtin memcmp with arguments ARG1 and ARG2.
8773 Return NULL_TREE if no simplification can be made. */
9c8a1629 8774
8775static tree
389dd41b 8776fold_builtin_memcmp (location_t loc, tree arg1, tree arg2, tree len)
9c8a1629 8777{
c4fef134 8778 const char *p1, *p2;
9c8a1629 8779
c2f47e15 8780 if (!validate_arg (arg1, POINTER_TYPE)
8781 || !validate_arg (arg2, POINTER_TYPE)
8782 || !validate_arg (len, INTEGER_TYPE))
8783 return NULL_TREE;
9c8a1629 8784
8785 /* If the LEN parameter is zero, return zero. */
8786 if (integer_zerop (len))
389dd41b 8787 return omit_two_operands_loc (loc, integer_type_node, integer_zero_node,
c4fef134 8788 arg1, arg2);
9c8a1629 8789
8790 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
8791 if (operand_equal_p (arg1, arg2, 0))
389dd41b 8792 return omit_one_operand_loc (loc, integer_type_node, integer_zero_node, len);
c4fef134 8793
8794 p1 = c_getstr (arg1);
8795 p2 = c_getstr (arg2);
8796
8797 /* If all arguments are constant, and the value of len is not greater
8798 than the lengths of arg1 and arg2, evaluate at compile-time. */
e913b5cd 8799 if (tree_fits_uhwi_p (len) && p1 && p2
c4fef134 8800 && compare_tree_int (len, strlen (p1) + 1) <= 0
8801 && compare_tree_int (len, strlen (p2) + 1) <= 0)
8802 {
e913b5cd 8803 const int r = memcmp (p1, p2, tree_to_uhwi (len));
c4fef134 8804
8805 if (r > 0)
8806 return integer_one_node;
8807 else if (r < 0)
8808 return integer_minus_one_node;
8809 else
8810 return integer_zero_node;
8811 }
8812
8813 /* If len parameter is one, return an expression corresponding to
8814 (*(const unsigned char*)arg1 - (const unsigned char*)arg2). */
e913b5cd 8815 if (tree_fits_uhwi_p (len) && tree_to_uhwi (len) == 1)
c4fef134 8816 {
8817 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
4f1b71c0 8818 tree cst_uchar_ptr_node
8819 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
8820
389dd41b 8821 tree ind1
8822 = fold_convert_loc (loc, integer_type_node,
8823 build1 (INDIRECT_REF, cst_uchar_node,
8824 fold_convert_loc (loc,
8825 cst_uchar_ptr_node,
c4fef134 8826 arg1)));
389dd41b 8827 tree ind2
8828 = fold_convert_loc (loc, integer_type_node,
8829 build1 (INDIRECT_REF, cst_uchar_node,
8830 fold_convert_loc (loc,
8831 cst_uchar_ptr_node,
c4fef134 8832 arg2)));
389dd41b 8833 return fold_build2_loc (loc, MINUS_EXPR, integer_type_node, ind1, ind2);
c4fef134 8834 }
9c8a1629 8835
c2f47e15 8836 return NULL_TREE;
9c8a1629 8837}
8838
c2f47e15 8839/* Fold function call to builtin strcmp with arguments ARG1 and ARG2.
8840 Return NULL_TREE if no simplification can be made. */
9c8a1629 8841
8842static tree
389dd41b 8843fold_builtin_strcmp (location_t loc, tree arg1, tree arg2)
9c8a1629 8844{
9c8a1629 8845 const char *p1, *p2;
8846
c2f47e15 8847 if (!validate_arg (arg1, POINTER_TYPE)
8848 || !validate_arg (arg2, POINTER_TYPE))
8849 return NULL_TREE;
9c8a1629 8850
8851 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
8852 if (operand_equal_p (arg1, arg2, 0))
c4fef134 8853 return integer_zero_node;
9c8a1629 8854
8855 p1 = c_getstr (arg1);
8856 p2 = c_getstr (arg2);
8857
8858 if (p1 && p2)
8859 {
9c8a1629 8860 const int i = strcmp (p1, p2);
8861 if (i < 0)
c4fef134 8862 return integer_minus_one_node;
9c8a1629 8863 else if (i > 0)
c4fef134 8864 return integer_one_node;
9c8a1629 8865 else
c4fef134 8866 return integer_zero_node;
8867 }
8868
8869 /* If the second arg is "", return *(const unsigned char*)arg1. */
8870 if (p2 && *p2 == '\0')
8871 {
8872 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
4f1b71c0 8873 tree cst_uchar_ptr_node
8874 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
8875
389dd41b 8876 return fold_convert_loc (loc, integer_type_node,
8877 build1 (INDIRECT_REF, cst_uchar_node,
8878 fold_convert_loc (loc,
8879 cst_uchar_ptr_node,
8880 arg1)));
c4fef134 8881 }
8882
8883 /* If the first arg is "", return -*(const unsigned char*)arg2. */
8884 if (p1 && *p1 == '\0')
8885 {
8886 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
4f1b71c0 8887 tree cst_uchar_ptr_node
8888 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
8889
389dd41b 8890 tree temp
8891 = fold_convert_loc (loc, integer_type_node,
8892 build1 (INDIRECT_REF, cst_uchar_node,
8893 fold_convert_loc (loc,
8894 cst_uchar_ptr_node,
c4fef134 8895 arg2)));
389dd41b 8896 return fold_build1_loc (loc, NEGATE_EXPR, integer_type_node, temp);
9c8a1629 8897 }
8898
c2f47e15 8899 return NULL_TREE;
9c8a1629 8900}
8901
c2f47e15 8902/* Fold function call to builtin strncmp with arguments ARG1, ARG2, and LEN.
8903 Return NULL_TREE if no simplification can be made. */
9c8a1629 8904
8905static tree
389dd41b 8906fold_builtin_strncmp (location_t loc, tree arg1, tree arg2, tree len)
9c8a1629 8907{
9c8a1629 8908 const char *p1, *p2;
8909
c2f47e15 8910 if (!validate_arg (arg1, POINTER_TYPE)
8911 || !validate_arg (arg2, POINTER_TYPE)
8912 || !validate_arg (len, INTEGER_TYPE))
8913 return NULL_TREE;
9c8a1629 8914
8915 /* If the LEN parameter is zero, return zero. */
8916 if (integer_zerop (len))
389dd41b 8917 return omit_two_operands_loc (loc, integer_type_node, integer_zero_node,
c4fef134 8918 arg1, arg2);
9c8a1629 8919
8920 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
8921 if (operand_equal_p (arg1, arg2, 0))
389dd41b 8922 return omit_one_operand_loc (loc, integer_type_node, integer_zero_node, len);
9c8a1629 8923
8924 p1 = c_getstr (arg1);
8925 p2 = c_getstr (arg2);
8926
e913b5cd 8927 if (tree_fits_uhwi_p (len) && p1 && p2)
9c8a1629 8928 {
e913b5cd 8929 const int i = strncmp (p1, p2, tree_to_uhwi (len));
c4fef134 8930 if (i > 0)
8931 return integer_one_node;
8932 else if (i < 0)
8933 return integer_minus_one_node;
9c8a1629 8934 else
c4fef134 8935 return integer_zero_node;
8936 }
8937
8938 /* If the second arg is "", and the length is greater than zero,
8939 return *(const unsigned char*)arg1. */
8940 if (p2 && *p2 == '\0'
8941 && TREE_CODE (len) == INTEGER_CST
8942 && tree_int_cst_sgn (len) == 1)
8943 {
8944 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
4f1b71c0 8945 tree cst_uchar_ptr_node
8946 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
8947
389dd41b 8948 return fold_convert_loc (loc, integer_type_node,
8949 build1 (INDIRECT_REF, cst_uchar_node,
8950 fold_convert_loc (loc,
8951 cst_uchar_ptr_node,
8952 arg1)));
c4fef134 8953 }
8954
8955 /* If the first arg is "", and the length is greater than zero,
8956 return -*(const unsigned char*)arg2. */
8957 if (p1 && *p1 == '\0'
8958 && TREE_CODE (len) == INTEGER_CST
8959 && tree_int_cst_sgn (len) == 1)
8960 {
8961 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
4f1b71c0 8962 tree cst_uchar_ptr_node
8963 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
8964
389dd41b 8965 tree temp = fold_convert_loc (loc, integer_type_node,
8966 build1 (INDIRECT_REF, cst_uchar_node,
8967 fold_convert_loc (loc,
8968 cst_uchar_ptr_node,
8969 arg2)));
8970 return fold_build1_loc (loc, NEGATE_EXPR, integer_type_node, temp);
c4fef134 8971 }
8972
8973 /* If len parameter is one, return an expression corresponding to
8974 (*(const unsigned char*)arg1 - (const unsigned char*)arg2). */
e913b5cd 8975 if (tree_fits_uhwi_p (len) && tree_to_uhwi (len) == 1)
c4fef134 8976 {
8977 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
4f1b71c0 8978 tree cst_uchar_ptr_node
8979 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
8980
389dd41b 8981 tree ind1 = fold_convert_loc (loc, integer_type_node,
8982 build1 (INDIRECT_REF, cst_uchar_node,
8983 fold_convert_loc (loc,
8984 cst_uchar_ptr_node,
8985 arg1)));
8986 tree ind2 = fold_convert_loc (loc, integer_type_node,
8987 build1 (INDIRECT_REF, cst_uchar_node,
8988 fold_convert_loc (loc,
8989 cst_uchar_ptr_node,
8990 arg2)));
8991 return fold_build2_loc (loc, MINUS_EXPR, integer_type_node, ind1, ind2);
9c8a1629 8992 }
8993
c2f47e15 8994 return NULL_TREE;
9c8a1629 8995}
8996
c2f47e15 8997/* Fold function call to builtin signbit, signbitf or signbitl with argument
8998 ARG. Return NULL_TREE if no simplification can be made. */
27f261ef 8999
9000static tree
389dd41b 9001fold_builtin_signbit (location_t loc, tree arg, tree type)
27f261ef 9002{
c2f47e15 9003 if (!validate_arg (arg, REAL_TYPE))
27f261ef 9004 return NULL_TREE;
9005
27f261ef 9006 /* If ARG is a compile-time constant, determine the result. */
9007 if (TREE_CODE (arg) == REAL_CST
f96bd2bf 9008 && !TREE_OVERFLOW (arg))
27f261ef 9009 {
9010 REAL_VALUE_TYPE c;
9011
9012 c = TREE_REAL_CST (arg);
385f3f36 9013 return (REAL_VALUE_NEGATIVE (c)
9014 ? build_one_cst (type)
9015 : build_zero_cst (type));
27f261ef 9016 }
9017
9018 /* If ARG is non-negative, the result is always zero. */
9019 if (tree_expr_nonnegative_p (arg))
389dd41b 9020 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
27f261ef 9021
9022 /* If ARG's format doesn't have signed zeros, return "arg < 0.0". */
fe994837 9023 if (!HONOR_SIGNED_ZEROS (arg))
de67cbb8 9024 return fold_convert (type,
9025 fold_build2_loc (loc, LT_EXPR, boolean_type_node, arg,
9026 build_real (TREE_TYPE (arg), dconst0)));
27f261ef 9027
9028 return NULL_TREE;
9029}
9030
c2f47e15 9031/* Fold function call to builtin copysign, copysignf or copysignl with
9032 arguments ARG1 and ARG2. Return NULL_TREE if no simplification can
9033 be made. */
467214fd 9034
9035static tree
389dd41b 9036fold_builtin_copysign (location_t loc, tree fndecl,
9037 tree arg1, tree arg2, tree type)
467214fd 9038{
c2f47e15 9039 tree tem;
467214fd 9040
c2f47e15 9041 if (!validate_arg (arg1, REAL_TYPE)
9042 || !validate_arg (arg2, REAL_TYPE))
467214fd 9043 return NULL_TREE;
9044
467214fd 9045 /* copysign(X,X) is X. */
9046 if (operand_equal_p (arg1, arg2, 0))
389dd41b 9047 return fold_convert_loc (loc, type, arg1);
467214fd 9048
9049 /* If ARG1 and ARG2 are compile-time constants, determine the result. */
9050 if (TREE_CODE (arg1) == REAL_CST
9051 && TREE_CODE (arg2) == REAL_CST
f96bd2bf 9052 && !TREE_OVERFLOW (arg1)
9053 && !TREE_OVERFLOW (arg2))
467214fd 9054 {
9055 REAL_VALUE_TYPE c1, c2;
9056
9057 c1 = TREE_REAL_CST (arg1);
9058 c2 = TREE_REAL_CST (arg2);
749680e2 9059 /* c1.sign := c2.sign. */
467214fd 9060 real_copysign (&c1, &c2);
9061 return build_real (type, c1);
467214fd 9062 }
9063
9064 /* copysign(X, Y) is fabs(X) when Y is always non-negative.
9065 Remember to evaluate Y for side-effects. */
9066 if (tree_expr_nonnegative_p (arg2))
389dd41b 9067 return omit_one_operand_loc (loc, type,
9068 fold_build1_loc (loc, ABS_EXPR, type, arg1),
467214fd 9069 arg2);
9070
198d9bbe 9071 /* Strip sign changing operations for the first argument. */
9072 tem = fold_strip_sign_ops (arg1);
9073 if (tem)
389dd41b 9074 return build_call_expr_loc (loc, fndecl, 2, tem, arg2);
198d9bbe 9075
467214fd 9076 return NULL_TREE;
9077}
9078
c2f47e15 9079/* Fold a call to builtin isascii with argument ARG. */
d49367d4 9080
9081static tree
389dd41b 9082fold_builtin_isascii (location_t loc, tree arg)
d49367d4 9083{
c2f47e15 9084 if (!validate_arg (arg, INTEGER_TYPE))
9085 return NULL_TREE;
d49367d4 9086 else
9087 {
9088 /* Transform isascii(c) -> ((c & ~0x7f) == 0). */
c90b5d40 9089 arg = fold_build2 (BIT_AND_EXPR, integer_type_node, arg,
7002a1c8 9090 build_int_cst (integer_type_node,
c90b5d40 9091 ~ (unsigned HOST_WIDE_INT) 0x7f));
389dd41b 9092 return fold_build2_loc (loc, EQ_EXPR, integer_type_node,
7002a1c8 9093 arg, integer_zero_node);
d49367d4 9094 }
9095}
9096
c2f47e15 9097/* Fold a call to builtin toascii with argument ARG. */
d49367d4 9098
9099static tree
389dd41b 9100fold_builtin_toascii (location_t loc, tree arg)
d49367d4 9101{
c2f47e15 9102 if (!validate_arg (arg, INTEGER_TYPE))
9103 return NULL_TREE;
48e1416a 9104
c2f47e15 9105 /* Transform toascii(c) -> (c & 0x7f). */
389dd41b 9106 return fold_build2_loc (loc, BIT_AND_EXPR, integer_type_node, arg,
7002a1c8 9107 build_int_cst (integer_type_node, 0x7f));
d49367d4 9108}
9109
c2f47e15 9110/* Fold a call to builtin isdigit with argument ARG. */
df1cf42e 9111
9112static tree
389dd41b 9113fold_builtin_isdigit (location_t loc, tree arg)
df1cf42e 9114{
c2f47e15 9115 if (!validate_arg (arg, INTEGER_TYPE))
9116 return NULL_TREE;
df1cf42e 9117 else
9118 {
9119 /* Transform isdigit(c) -> (unsigned)(c) - '0' <= 9. */
624d37a6 9120 /* According to the C standard, isdigit is unaffected by locale.
9121 However, it definitely is affected by the target character set. */
624d37a6 9122 unsigned HOST_WIDE_INT target_digit0
9123 = lang_hooks.to_target_charset ('0');
9124
9125 if (target_digit0 == 0)
9126 return NULL_TREE;
9127
389dd41b 9128 arg = fold_convert_loc (loc, unsigned_type_node, arg);
c90b5d40 9129 arg = fold_build2 (MINUS_EXPR, unsigned_type_node, arg,
9130 build_int_cst (unsigned_type_node, target_digit0));
389dd41b 9131 return fold_build2_loc (loc, LE_EXPR, integer_type_node, arg,
f2532264 9132 build_int_cst (unsigned_type_node, 9));
df1cf42e 9133 }
9134}
27f261ef 9135
c2f47e15 9136/* Fold a call to fabs, fabsf or fabsl with argument ARG. */
d1aade50 9137
9138static tree
389dd41b 9139fold_builtin_fabs (location_t loc, tree arg, tree type)
d1aade50 9140{
c2f47e15 9141 if (!validate_arg (arg, REAL_TYPE))
9142 return NULL_TREE;
d1aade50 9143
389dd41b 9144 arg = fold_convert_loc (loc, type, arg);
d1aade50 9145 if (TREE_CODE (arg) == REAL_CST)
9146 return fold_abs_const (arg, type);
389dd41b 9147 return fold_build1_loc (loc, ABS_EXPR, type, arg);
d1aade50 9148}
9149
c2f47e15 9150/* Fold a call to abs, labs, llabs or imaxabs with argument ARG. */
d1aade50 9151
9152static tree
389dd41b 9153fold_builtin_abs (location_t loc, tree arg, tree type)
d1aade50 9154{
c2f47e15 9155 if (!validate_arg (arg, INTEGER_TYPE))
9156 return NULL_TREE;
d1aade50 9157
389dd41b 9158 arg = fold_convert_loc (loc, type, arg);
d1aade50 9159 if (TREE_CODE (arg) == INTEGER_CST)
9160 return fold_abs_const (arg, type);
389dd41b 9161 return fold_build1_loc (loc, ABS_EXPR, type, arg);
d1aade50 9162}
9163
b9be572e 9164/* Fold a fma operation with arguments ARG[012]. */
9165
9166tree
9167fold_fma (location_t loc ATTRIBUTE_UNUSED,
9168 tree type, tree arg0, tree arg1, tree arg2)
9169{
9170 if (TREE_CODE (arg0) == REAL_CST
9171 && TREE_CODE (arg1) == REAL_CST
9172 && TREE_CODE (arg2) == REAL_CST)
9173 return do_mpfr_arg3 (arg0, arg1, arg2, type, mpfr_fma);
9174
9175 return NULL_TREE;
9176}
9177
9178/* Fold a call to fma, fmaf, or fmal with arguments ARG[012]. */
9179
9180static tree
9181fold_builtin_fma (location_t loc, tree arg0, tree arg1, tree arg2, tree type)
9182{
9183 if (validate_arg (arg0, REAL_TYPE)
9af5ce0c 9184 && validate_arg (arg1, REAL_TYPE)
9185 && validate_arg (arg2, REAL_TYPE))
b9be572e 9186 {
9187 tree tem = fold_fma (loc, type, arg0, arg1, arg2);
9188 if (tem)
9189 return tem;
9190
9191 /* ??? Only expand to FMA_EXPR if it's directly supported. */
9192 if (optab_handler (fma_optab, TYPE_MODE (type)) != CODE_FOR_nothing)
9193 return fold_build3_loc (loc, FMA_EXPR, type, arg0, arg1, arg2);
9194 }
9195 return NULL_TREE;
9196}
9197
d4a43a03 9198/* Fold a call to builtin fmin or fmax. */
9199
9200static tree
389dd41b 9201fold_builtin_fmin_fmax (location_t loc, tree arg0, tree arg1,
9202 tree type, bool max)
d4a43a03 9203{
c2f47e15 9204 if (validate_arg (arg0, REAL_TYPE) && validate_arg (arg1, REAL_TYPE))
d4a43a03 9205 {
d4a43a03 9206 /* Calculate the result when the argument is a constant. */
9207 tree res = do_mpfr_arg2 (arg0, arg1, type, (max ? mpfr_max : mpfr_min));
9208
9209 if (res)
9210 return res;
9211
61fe3555 9212 /* If either argument is NaN, return the other one. Avoid the
9213 transformation if we get (and honor) a signalling NaN. Using
9214 omit_one_operand() ensures we create a non-lvalue. */
9215 if (TREE_CODE (arg0) == REAL_CST
9216 && real_isnan (&TREE_REAL_CST (arg0))
fe994837 9217 && (! HONOR_SNANS (arg0)
61fe3555 9218 || ! TREE_REAL_CST (arg0).signalling))
389dd41b 9219 return omit_one_operand_loc (loc, type, arg1, arg0);
61fe3555 9220 if (TREE_CODE (arg1) == REAL_CST
9221 && real_isnan (&TREE_REAL_CST (arg1))
fe994837 9222 && (! HONOR_SNANS (arg1)
61fe3555 9223 || ! TREE_REAL_CST (arg1).signalling))
389dd41b 9224 return omit_one_operand_loc (loc, type, arg0, arg1);
61fe3555 9225
d4a43a03 9226 /* Transform fmin/fmax(x,x) -> x. */
9227 if (operand_equal_p (arg0, arg1, OEP_PURE_SAME))
389dd41b 9228 return omit_one_operand_loc (loc, type, arg0, arg1);
48e1416a 9229
d4a43a03 9230 /* Convert fmin/fmax to MIN_EXPR/MAX_EXPR. C99 requires these
9231 functions to return the numeric arg if the other one is NaN.
9232 These tree codes don't honor that, so only transform if
9233 -ffinite-math-only is set. C99 doesn't require -0.0 to be
9234 handled, so we don't have to worry about it either. */
9235 if (flag_finite_math_only)
389dd41b 9236 return fold_build2_loc (loc, (max ? MAX_EXPR : MIN_EXPR), type,
9237 fold_convert_loc (loc, type, arg0),
9238 fold_convert_loc (loc, type, arg1));
d4a43a03 9239 }
9240 return NULL_TREE;
9241}
9242
abe4dcf6 9243/* Fold a call to builtin carg(a+bi) -> atan2(b,a). */
9244
9245static tree
389dd41b 9246fold_builtin_carg (location_t loc, tree arg, tree type)
abe4dcf6 9247{
239d491a 9248 if (validate_arg (arg, COMPLEX_TYPE)
9249 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) == REAL_TYPE)
abe4dcf6 9250 {
9251 tree atan2_fn = mathfn_built_in (type, BUILT_IN_ATAN2);
48e1416a 9252
abe4dcf6 9253 if (atan2_fn)
9254 {
c2f47e15 9255 tree new_arg = builtin_save_expr (arg);
389dd41b 9256 tree r_arg = fold_build1_loc (loc, REALPART_EXPR, type, new_arg);
9257 tree i_arg = fold_build1_loc (loc, IMAGPART_EXPR, type, new_arg);
9258 return build_call_expr_loc (loc, atan2_fn, 2, i_arg, r_arg);
abe4dcf6 9259 }
9260 }
48e1416a 9261
abe4dcf6 9262 return NULL_TREE;
9263}
9264
cb2b9385 9265/* Fold a call to builtin logb/ilogb. */
9266
9267static tree
389dd41b 9268fold_builtin_logb (location_t loc, tree arg, tree rettype)
cb2b9385 9269{
9270 if (! validate_arg (arg, REAL_TYPE))
9271 return NULL_TREE;
48e1416a 9272
cb2b9385 9273 STRIP_NOPS (arg);
48e1416a 9274
cb2b9385 9275 if (TREE_CODE (arg) == REAL_CST && ! TREE_OVERFLOW (arg))
9276 {
9277 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg);
48e1416a 9278
cb2b9385 9279 switch (value->cl)
9280 {
9281 case rvc_nan:
9282 case rvc_inf:
9283 /* If arg is Inf or NaN and we're logb, return it. */
9284 if (TREE_CODE (rettype) == REAL_TYPE)
7695fea9 9285 {
9286 /* For logb(-Inf) we have to return +Inf. */
9287 if (real_isinf (value) && real_isneg (value))
9288 {
9289 REAL_VALUE_TYPE tem;
9290 real_inf (&tem);
9291 return build_real (rettype, tem);
9292 }
9293 return fold_convert_loc (loc, rettype, arg);
9294 }
cb2b9385 9295 /* Fall through... */
9296 case rvc_zero:
9297 /* Zero may set errno and/or raise an exception for logb, also
9298 for ilogb we don't know FP_ILOGB0. */
9299 return NULL_TREE;
9300 case rvc_normal:
9301 /* For normal numbers, proceed iff radix == 2. In GCC,
9302 normalized significands are in the range [0.5, 1.0). We
9303 want the exponent as if they were [1.0, 2.0) so get the
9304 exponent and subtract 1. */
9305 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (arg)))->b == 2)
389dd41b 9306 return fold_convert_loc (loc, rettype,
7002a1c8 9307 build_int_cst (integer_type_node,
389dd41b 9308 REAL_EXP (value)-1));
cb2b9385 9309 break;
9310 }
9311 }
48e1416a 9312
cb2b9385 9313 return NULL_TREE;
9314}
9315
9316/* Fold a call to builtin significand, if radix == 2. */
9317
9318static tree
389dd41b 9319fold_builtin_significand (location_t loc, tree arg, tree rettype)
cb2b9385 9320{
9321 if (! validate_arg (arg, REAL_TYPE))
9322 return NULL_TREE;
48e1416a 9323
cb2b9385 9324 STRIP_NOPS (arg);
48e1416a 9325
cb2b9385 9326 if (TREE_CODE (arg) == REAL_CST && ! TREE_OVERFLOW (arg))
9327 {
9328 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg);
48e1416a 9329
cb2b9385 9330 switch (value->cl)
9331 {
9332 case rvc_zero:
9333 case rvc_nan:
9334 case rvc_inf:
9335 /* If arg is +-0, +-Inf or +-NaN, then return it. */
389dd41b 9336 return fold_convert_loc (loc, rettype, arg);
cb2b9385 9337 case rvc_normal:
9338 /* For normal numbers, proceed iff radix == 2. */
9339 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (arg)))->b == 2)
9340 {
9341 REAL_VALUE_TYPE result = *value;
9342 /* In GCC, normalized significands are in the range [0.5,
9343 1.0). We want them to be [1.0, 2.0) so set the
9344 exponent to 1. */
9345 SET_REAL_EXP (&result, 1);
9346 return build_real (rettype, result);
9347 }
9348 break;
9349 }
9350 }
48e1416a 9351
cb2b9385 9352 return NULL_TREE;
9353}
9354
3838b9ae 9355/* Fold a call to builtin frexp, we can assume the base is 2. */
9356
9357static tree
389dd41b 9358fold_builtin_frexp (location_t loc, tree arg0, tree arg1, tree rettype)
3838b9ae 9359{
9360 if (! validate_arg (arg0, REAL_TYPE) || ! validate_arg (arg1, POINTER_TYPE))
9361 return NULL_TREE;
48e1416a 9362
3838b9ae 9363 STRIP_NOPS (arg0);
48e1416a 9364
3838b9ae 9365 if (!(TREE_CODE (arg0) == REAL_CST && ! TREE_OVERFLOW (arg0)))
9366 return NULL_TREE;
48e1416a 9367
389dd41b 9368 arg1 = build_fold_indirect_ref_loc (loc, arg1);
3838b9ae 9369
9370 /* Proceed if a valid pointer type was passed in. */
9371 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg1)) == integer_type_node)
9372 {
9373 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg0);
9374 tree frac, exp;
48e1416a 9375
3838b9ae 9376 switch (value->cl)
9377 {
9378 case rvc_zero:
9379 /* For +-0, return (*exp = 0, +-0). */
9380 exp = integer_zero_node;
9381 frac = arg0;
9382 break;
9383 case rvc_nan:
9384 case rvc_inf:
9385 /* For +-NaN or +-Inf, *exp is unspecified, return arg0. */
389dd41b 9386 return omit_one_operand_loc (loc, rettype, arg0, arg1);
3838b9ae 9387 case rvc_normal:
9388 {
9389 /* Since the frexp function always expects base 2, and in
9390 GCC normalized significands are already in the range
9391 [0.5, 1.0), we have exactly what frexp wants. */
9392 REAL_VALUE_TYPE frac_rvt = *value;
9393 SET_REAL_EXP (&frac_rvt, 0);
9394 frac = build_real (rettype, frac_rvt);
7002a1c8 9395 exp = build_int_cst (integer_type_node, REAL_EXP (value));
3838b9ae 9396 }
9397 break;
9398 default:
9399 gcc_unreachable ();
9400 }
48e1416a 9401
3838b9ae 9402 /* Create the COMPOUND_EXPR (*arg1 = trunc, frac). */
389dd41b 9403 arg1 = fold_build2_loc (loc, MODIFY_EXPR, rettype, arg1, exp);
3838b9ae 9404 TREE_SIDE_EFFECTS (arg1) = 1;
389dd41b 9405 return fold_build2_loc (loc, COMPOUND_EXPR, rettype, arg1, frac);
3838b9ae 9406 }
9407
9408 return NULL_TREE;
9409}
9410
7587301b 9411/* Fold a call to builtin ldexp or scalbn/scalbln. If LDEXP is true
9412 then we can assume the base is two. If it's false, then we have to
9413 check the mode of the TYPE parameter in certain cases. */
9414
9415static tree
389dd41b 9416fold_builtin_load_exponent (location_t loc, tree arg0, tree arg1,
9417 tree type, bool ldexp)
7587301b 9418{
9419 if (validate_arg (arg0, REAL_TYPE) && validate_arg (arg1, INTEGER_TYPE))
9420 {
9421 STRIP_NOPS (arg0);
9422 STRIP_NOPS (arg1);
9423
9424 /* If arg0 is 0, Inf or NaN, or if arg1 is 0, then return arg0. */
9425 if (real_zerop (arg0) || integer_zerop (arg1)
9426 || (TREE_CODE (arg0) == REAL_CST
776a7bab 9427 && !real_isfinite (&TREE_REAL_CST (arg0))))
389dd41b 9428 return omit_one_operand_loc (loc, type, arg0, arg1);
48e1416a 9429
7587301b 9430 /* If both arguments are constant, then try to evaluate it. */
9431 if ((ldexp || REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2)
9432 && TREE_CODE (arg0) == REAL_CST && !TREE_OVERFLOW (arg0)
e913b5cd 9433 && tree_fits_shwi_p (arg1))
7587301b 9434 {
9435 /* Bound the maximum adjustment to twice the range of the
9436 mode's valid exponents. Use abs to ensure the range is
9437 positive as a sanity check. */
48e1416a 9438 const long max_exp_adj = 2 *
7587301b 9439 labs (REAL_MODE_FORMAT (TYPE_MODE (type))->emax
9440 - REAL_MODE_FORMAT (TYPE_MODE (type))->emin);
9441
9442 /* Get the user-requested adjustment. */
e913b5cd 9443 const HOST_WIDE_INT req_exp_adj = tree_to_shwi (arg1);
48e1416a 9444
7587301b 9445 /* The requested adjustment must be inside this range. This
9446 is a preliminary cap to avoid things like overflow, we
9447 may still fail to compute the result for other reasons. */
9448 if (-max_exp_adj < req_exp_adj && req_exp_adj < max_exp_adj)
9449 {
9450 REAL_VALUE_TYPE initial_result;
48e1416a 9451
7587301b 9452 real_ldexp (&initial_result, &TREE_REAL_CST (arg0), req_exp_adj);
9453
9454 /* Ensure we didn't overflow. */
9455 if (! real_isinf (&initial_result))
9456 {
9457 const REAL_VALUE_TYPE trunc_result
9458 = real_value_truncate (TYPE_MODE (type), initial_result);
48e1416a 9459
7587301b 9460 /* Only proceed if the target mode can hold the
9461 resulting value. */
9462 if (REAL_VALUES_EQUAL (initial_result, trunc_result))
9463 return build_real (type, trunc_result);
9464 }
9465 }
9466 }
9467 }
9468
9469 return NULL_TREE;
9470}
9471
ebf8b4f5 9472/* Fold a call to builtin modf. */
9473
9474static tree
389dd41b 9475fold_builtin_modf (location_t loc, tree arg0, tree arg1, tree rettype)
ebf8b4f5 9476{
9477 if (! validate_arg (arg0, REAL_TYPE) || ! validate_arg (arg1, POINTER_TYPE))
9478 return NULL_TREE;
48e1416a 9479
ebf8b4f5 9480 STRIP_NOPS (arg0);
48e1416a 9481
ebf8b4f5 9482 if (!(TREE_CODE (arg0) == REAL_CST && ! TREE_OVERFLOW (arg0)))
9483 return NULL_TREE;
48e1416a 9484
389dd41b 9485 arg1 = build_fold_indirect_ref_loc (loc, arg1);
ebf8b4f5 9486
9487 /* Proceed if a valid pointer type was passed in. */
9488 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg1)) == TYPE_MAIN_VARIANT (rettype))
9489 {
9490 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg0);
9491 REAL_VALUE_TYPE trunc, frac;
9492
9493 switch (value->cl)
9494 {
9495 case rvc_nan:
9496 case rvc_zero:
9497 /* For +-NaN or +-0, return (*arg1 = arg0, arg0). */
9498 trunc = frac = *value;
9499 break;
9500 case rvc_inf:
9501 /* For +-Inf, return (*arg1 = arg0, +-0). */
9502 frac = dconst0;
9503 frac.sign = value->sign;
9504 trunc = *value;
9505 break;
9506 case rvc_normal:
9507 /* Return (*arg1 = trunc(arg0), arg0-trunc(arg0)). */
9508 real_trunc (&trunc, VOIDmode, value);
9509 real_arithmetic (&frac, MINUS_EXPR, value, &trunc);
9510 /* If the original number was negative and already
9511 integral, then the fractional part is -0.0. */
9512 if (value->sign && frac.cl == rvc_zero)
9513 frac.sign = value->sign;
9514 break;
9515 }
48e1416a 9516
ebf8b4f5 9517 /* Create the COMPOUND_EXPR (*arg1 = trunc, frac). */
389dd41b 9518 arg1 = fold_build2_loc (loc, MODIFY_EXPR, rettype, arg1,
ebf8b4f5 9519 build_real (rettype, trunc));
9520 TREE_SIDE_EFFECTS (arg1) = 1;
389dd41b 9521 return fold_build2_loc (loc, COMPOUND_EXPR, rettype, arg1,
ebf8b4f5 9522 build_real (rettype, frac));
9523 }
48e1416a 9524
ebf8b4f5 9525 return NULL_TREE;
9526}
9527
a65c4d64 9528/* Given a location LOC, an interclass builtin function decl FNDECL
9529 and its single argument ARG, return an folded expression computing
9530 the same, or NULL_TREE if we either couldn't or didn't want to fold
9531 (the latter happen if there's an RTL instruction available). */
9532
9533static tree
9534fold_builtin_interclass_mathfn (location_t loc, tree fndecl, tree arg)
9535{
3754d046 9536 machine_mode mode;
a65c4d64 9537
9538 if (!validate_arg (arg, REAL_TYPE))
9539 return NULL_TREE;
9540
9541 if (interclass_mathfn_icode (arg, fndecl) != CODE_FOR_nothing)
9542 return NULL_TREE;
9543
9544 mode = TYPE_MODE (TREE_TYPE (arg));
9545
9546 /* If there is no optab, try generic code. */
9547 switch (DECL_FUNCTION_CODE (fndecl))
9548 {
9549 tree result;
9550
9551 CASE_FLT_FN (BUILT_IN_ISINF):
9552 {
9553 /* isinf(x) -> isgreater(fabs(x),DBL_MAX). */
b9a16870 9554 tree const isgr_fn = builtin_decl_explicit (BUILT_IN_ISGREATER);
a65c4d64 9555 tree const type = TREE_TYPE (arg);
9556 REAL_VALUE_TYPE r;
9557 char buf[128];
9558
9559 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf));
9560 real_from_string (&r, buf);
9561 result = build_call_expr (isgr_fn, 2,
9562 fold_build1_loc (loc, ABS_EXPR, type, arg),
9563 build_real (type, r));
9564 return result;
9565 }
9566 CASE_FLT_FN (BUILT_IN_FINITE):
9567 case BUILT_IN_ISFINITE:
9568 {
9569 /* isfinite(x) -> islessequal(fabs(x),DBL_MAX). */
b9a16870 9570 tree const isle_fn = builtin_decl_explicit (BUILT_IN_ISLESSEQUAL);
a65c4d64 9571 tree const type = TREE_TYPE (arg);
9572 REAL_VALUE_TYPE r;
9573 char buf[128];
9574
9575 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf));
9576 real_from_string (&r, buf);
9577 result = build_call_expr (isle_fn, 2,
9578 fold_build1_loc (loc, ABS_EXPR, type, arg),
9579 build_real (type, r));
9580 /*result = fold_build2_loc (loc, UNGT_EXPR,
9581 TREE_TYPE (TREE_TYPE (fndecl)),
9582 fold_build1_loc (loc, ABS_EXPR, type, arg),
9583 build_real (type, r));
9584 result = fold_build1_loc (loc, TRUTH_NOT_EXPR,
9585 TREE_TYPE (TREE_TYPE (fndecl)),
9586 result);*/
9587 return result;
9588 }
9589 case BUILT_IN_ISNORMAL:
9590 {
9591 /* isnormal(x) -> isgreaterequal(fabs(x),DBL_MIN) &
9592 islessequal(fabs(x),DBL_MAX). */
b9a16870 9593 tree const isle_fn = builtin_decl_explicit (BUILT_IN_ISLESSEQUAL);
9594 tree const isge_fn = builtin_decl_explicit (BUILT_IN_ISGREATEREQUAL);
a65c4d64 9595 tree const type = TREE_TYPE (arg);
9596 REAL_VALUE_TYPE rmax, rmin;
9597 char buf[128];
9598
9599 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf));
9600 real_from_string (&rmax, buf);
9601 sprintf (buf, "0x1p%d", REAL_MODE_FORMAT (mode)->emin - 1);
9602 real_from_string (&rmin, buf);
9603 arg = builtin_save_expr (fold_build1_loc (loc, ABS_EXPR, type, arg));
9604 result = build_call_expr (isle_fn, 2, arg,
9605 build_real (type, rmax));
9606 result = fold_build2 (BIT_AND_EXPR, integer_type_node, result,
9607 build_call_expr (isge_fn, 2, arg,
9608 build_real (type, rmin)));
9609 return result;
9610 }
9611 default:
9612 break;
9613 }
9614
9615 return NULL_TREE;
9616}
9617
726069ba 9618/* Fold a call to __builtin_isnan(), __builtin_isinf, __builtin_finite.
c2f47e15 9619 ARG is the argument for the call. */
726069ba 9620
9621static tree
389dd41b 9622fold_builtin_classify (location_t loc, tree fndecl, tree arg, int builtin_index)
726069ba 9623{
726069ba 9624 tree type = TREE_TYPE (TREE_TYPE (fndecl));
726069ba 9625 REAL_VALUE_TYPE r;
9626
c2f47e15 9627 if (!validate_arg (arg, REAL_TYPE))
d43cee80 9628 return NULL_TREE;
726069ba 9629
726069ba 9630 switch (builtin_index)
9631 {
9632 case BUILT_IN_ISINF:
fe994837 9633 if (!HONOR_INFINITIES (arg))
389dd41b 9634 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
726069ba 9635
9636 if (TREE_CODE (arg) == REAL_CST)
9637 {
9638 r = TREE_REAL_CST (arg);
9639 if (real_isinf (&r))
9640 return real_compare (GT_EXPR, &r, &dconst0)
9641 ? integer_one_node : integer_minus_one_node;
9642 else
9643 return integer_zero_node;
9644 }
9645
9646 return NULL_TREE;
9647
c319d56a 9648 case BUILT_IN_ISINF_SIGN:
9649 {
9650 /* isinf_sign(x) -> isinf(x) ? (signbit(x) ? -1 : 1) : 0 */
9651 /* In a boolean context, GCC will fold the inner COND_EXPR to
9652 1. So e.g. "if (isinf_sign(x))" would be folded to just
9653 "if (isinf(x) ? 1 : 0)" which becomes "if (isinf(x))". */
9654 tree signbit_fn = mathfn_built_in_1 (TREE_TYPE (arg), BUILT_IN_SIGNBIT, 0);
b9a16870 9655 tree isinf_fn = builtin_decl_explicit (BUILT_IN_ISINF);
c319d56a 9656 tree tmp = NULL_TREE;
9657
9658 arg = builtin_save_expr (arg);
9659
9660 if (signbit_fn && isinf_fn)
9661 {
389dd41b 9662 tree signbit_call = build_call_expr_loc (loc, signbit_fn, 1, arg);
9663 tree isinf_call = build_call_expr_loc (loc, isinf_fn, 1, arg);
c319d56a 9664
389dd41b 9665 signbit_call = fold_build2_loc (loc, NE_EXPR, integer_type_node,
c319d56a 9666 signbit_call, integer_zero_node);
389dd41b 9667 isinf_call = fold_build2_loc (loc, NE_EXPR, integer_type_node,
c319d56a 9668 isinf_call, integer_zero_node);
48e1416a 9669
389dd41b 9670 tmp = fold_build3_loc (loc, COND_EXPR, integer_type_node, signbit_call,
c319d56a 9671 integer_minus_one_node, integer_one_node);
389dd41b 9672 tmp = fold_build3_loc (loc, COND_EXPR, integer_type_node,
9673 isinf_call, tmp,
c319d56a 9674 integer_zero_node);
9675 }
9676
9677 return tmp;
9678 }
9679
cde061c1 9680 case BUILT_IN_ISFINITE:
93633022 9681 if (!HONOR_NANS (arg)
fe994837 9682 && !HONOR_INFINITIES (arg))
389dd41b 9683 return omit_one_operand_loc (loc, type, integer_one_node, arg);
726069ba 9684
9685 if (TREE_CODE (arg) == REAL_CST)
9686 {
9687 r = TREE_REAL_CST (arg);
776a7bab 9688 return real_isfinite (&r) ? integer_one_node : integer_zero_node;
726069ba 9689 }
9690
9691 return NULL_TREE;
9692
9693 case BUILT_IN_ISNAN:
93633022 9694 if (!HONOR_NANS (arg))
389dd41b 9695 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
726069ba 9696
9697 if (TREE_CODE (arg) == REAL_CST)
9698 {
9699 r = TREE_REAL_CST (arg);
9700 return real_isnan (&r) ? integer_one_node : integer_zero_node;
9701 }
9702
9703 arg = builtin_save_expr (arg);
389dd41b 9704 return fold_build2_loc (loc, UNORDERED_EXPR, type, arg, arg);
726069ba 9705
9706 default:
64db345d 9707 gcc_unreachable ();
726069ba 9708 }
9709}
9710
19fbe3a4 9711/* Fold a call to __builtin_fpclassify(int, int, int, int, int, ...).
9712 This builtin will generate code to return the appropriate floating
9713 point classification depending on the value of the floating point
9714 number passed in. The possible return values must be supplied as
921b27c0 9715 int arguments to the call in the following order: FP_NAN, FP_INFINITE,
19fbe3a4 9716 FP_NORMAL, FP_SUBNORMAL and FP_ZERO. The ellipses is for exactly
9717 one floating point argument which is "type generic". */
9718
9719static tree
9d884767 9720fold_builtin_fpclassify (location_t loc, tree *args, int nargs)
19fbe3a4 9721{
921b27c0 9722 tree fp_nan, fp_infinite, fp_normal, fp_subnormal, fp_zero,
9723 arg, type, res, tmp;
3754d046 9724 machine_mode mode;
19fbe3a4 9725 REAL_VALUE_TYPE r;
9726 char buf[128];
48e1416a 9727
19fbe3a4 9728 /* Verify the required arguments in the original call. */
9d884767 9729 if (nargs != 6
9730 || !validate_arg (args[0], INTEGER_TYPE)
9731 || !validate_arg (args[1], INTEGER_TYPE)
9732 || !validate_arg (args[2], INTEGER_TYPE)
9733 || !validate_arg (args[3], INTEGER_TYPE)
9734 || !validate_arg (args[4], INTEGER_TYPE)
9735 || !validate_arg (args[5], REAL_TYPE))
19fbe3a4 9736 return NULL_TREE;
48e1416a 9737
9d884767 9738 fp_nan = args[0];
9739 fp_infinite = args[1];
9740 fp_normal = args[2];
9741 fp_subnormal = args[3];
9742 fp_zero = args[4];
9743 arg = args[5];
19fbe3a4 9744 type = TREE_TYPE (arg);
9745 mode = TYPE_MODE (type);
389dd41b 9746 arg = builtin_save_expr (fold_build1_loc (loc, ABS_EXPR, type, arg));
19fbe3a4 9747
48e1416a 9748 /* fpclassify(x) ->
19fbe3a4 9749 isnan(x) ? FP_NAN :
921b27c0 9750 (fabs(x) == Inf ? FP_INFINITE :
19fbe3a4 9751 (fabs(x) >= DBL_MIN ? FP_NORMAL :
9752 (x == 0 ? FP_ZERO : FP_SUBNORMAL))). */
48e1416a 9753
389dd41b 9754 tmp = fold_build2_loc (loc, EQ_EXPR, integer_type_node, arg,
19fbe3a4 9755 build_real (type, dconst0));
389dd41b 9756 res = fold_build3_loc (loc, COND_EXPR, integer_type_node,
9757 tmp, fp_zero, fp_subnormal);
19fbe3a4 9758
9759 sprintf (buf, "0x1p%d", REAL_MODE_FORMAT (mode)->emin - 1);
9760 real_from_string (&r, buf);
389dd41b 9761 tmp = fold_build2_loc (loc, GE_EXPR, integer_type_node,
9762 arg, build_real (type, r));
9763 res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp, fp_normal, res);
48e1416a 9764
19fbe3a4 9765 if (HONOR_INFINITIES (mode))
9766 {
9767 real_inf (&r);
389dd41b 9768 tmp = fold_build2_loc (loc, EQ_EXPR, integer_type_node, arg,
19fbe3a4 9769 build_real (type, r));
389dd41b 9770 res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp,
9771 fp_infinite, res);
19fbe3a4 9772 }
9773
9774 if (HONOR_NANS (mode))
9775 {
389dd41b 9776 tmp = fold_build2_loc (loc, ORDERED_EXPR, integer_type_node, arg, arg);
9777 res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp, res, fp_nan);
19fbe3a4 9778 }
48e1416a 9779
19fbe3a4 9780 return res;
9781}
9782
9bc9f15f 9783/* Fold a call to an unordered comparison function such as
d5019fe8 9784 __builtin_isgreater(). FNDECL is the FUNCTION_DECL for the function
c2f47e15 9785 being called and ARG0 and ARG1 are the arguments for the call.
726069ba 9786 UNORDERED_CODE and ORDERED_CODE are comparison codes that give
9787 the opposite of the desired result. UNORDERED_CODE is used
9788 for modes that can hold NaNs and ORDERED_CODE is used for
9789 the rest. */
9bc9f15f 9790
9791static tree
389dd41b 9792fold_builtin_unordered_cmp (location_t loc, tree fndecl, tree arg0, tree arg1,
9bc9f15f 9793 enum tree_code unordered_code,
9794 enum tree_code ordered_code)
9795{
859f903a 9796 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9bc9f15f 9797 enum tree_code code;
6978db0d 9798 tree type0, type1;
9799 enum tree_code code0, code1;
9800 tree cmp_type = NULL_TREE;
9bc9f15f 9801
6978db0d 9802 type0 = TREE_TYPE (arg0);
9803 type1 = TREE_TYPE (arg1);
a0c938f0 9804
6978db0d 9805 code0 = TREE_CODE (type0);
9806 code1 = TREE_CODE (type1);
a0c938f0 9807
6978db0d 9808 if (code0 == REAL_TYPE && code1 == REAL_TYPE)
9809 /* Choose the wider of two real types. */
9810 cmp_type = TYPE_PRECISION (type0) >= TYPE_PRECISION (type1)
9811 ? type0 : type1;
9812 else if (code0 == REAL_TYPE && code1 == INTEGER_TYPE)
9813 cmp_type = type0;
9814 else if (code0 == INTEGER_TYPE && code1 == REAL_TYPE)
9815 cmp_type = type1;
a0c938f0 9816
389dd41b 9817 arg0 = fold_convert_loc (loc, cmp_type, arg0);
9818 arg1 = fold_convert_loc (loc, cmp_type, arg1);
859f903a 9819
9820 if (unordered_code == UNORDERED_EXPR)
9821 {
93633022 9822 if (!HONOR_NANS (arg0))
389dd41b 9823 return omit_two_operands_loc (loc, type, integer_zero_node, arg0, arg1);
9824 return fold_build2_loc (loc, UNORDERED_EXPR, type, arg0, arg1);
859f903a 9825 }
9bc9f15f 9826
93633022 9827 code = HONOR_NANS (arg0) ? unordered_code : ordered_code;
389dd41b 9828 return fold_build1_loc (loc, TRUTH_NOT_EXPR, type,
9829 fold_build2_loc (loc, code, type, arg0, arg1));
9bc9f15f 9830}
9831
0c93c8a9 9832/* Fold __builtin_{,s,u}{add,sub,mul}{,l,ll}_overflow, either into normal
9833 arithmetics if it can never overflow, or into internal functions that
9834 return both result of arithmetics and overflowed boolean flag in
9835 a complex integer result, or some other check for overflow. */
9836
9837static tree
9838fold_builtin_arith_overflow (location_t loc, enum built_in_function fcode,
9839 tree arg0, tree arg1, tree arg2)
9840{
9841 enum internal_fn ifn = IFN_LAST;
9842 tree type = TREE_TYPE (TREE_TYPE (arg2));
9843 tree mem_arg2 = build_fold_indirect_ref_loc (loc, arg2);
9844 switch (fcode)
9845 {
9846 case BUILT_IN_ADD_OVERFLOW:
9847 case BUILT_IN_SADD_OVERFLOW:
9848 case BUILT_IN_SADDL_OVERFLOW:
9849 case BUILT_IN_SADDLL_OVERFLOW:
9850 case BUILT_IN_UADD_OVERFLOW:
9851 case BUILT_IN_UADDL_OVERFLOW:
9852 case BUILT_IN_UADDLL_OVERFLOW:
9853 ifn = IFN_ADD_OVERFLOW;
9854 break;
9855 case BUILT_IN_SUB_OVERFLOW:
9856 case BUILT_IN_SSUB_OVERFLOW:
9857 case BUILT_IN_SSUBL_OVERFLOW:
9858 case BUILT_IN_SSUBLL_OVERFLOW:
9859 case BUILT_IN_USUB_OVERFLOW:
9860 case BUILT_IN_USUBL_OVERFLOW:
9861 case BUILT_IN_USUBLL_OVERFLOW:
9862 ifn = IFN_SUB_OVERFLOW;
9863 break;
9864 case BUILT_IN_MUL_OVERFLOW:
9865 case BUILT_IN_SMUL_OVERFLOW:
9866 case BUILT_IN_SMULL_OVERFLOW:
9867 case BUILT_IN_SMULLL_OVERFLOW:
9868 case BUILT_IN_UMUL_OVERFLOW:
9869 case BUILT_IN_UMULL_OVERFLOW:
9870 case BUILT_IN_UMULLL_OVERFLOW:
9871 ifn = IFN_MUL_OVERFLOW;
9872 break;
9873 default:
9874 gcc_unreachable ();
9875 }
9876 tree ctype = build_complex_type (type);
9877 tree call = build_call_expr_internal_loc (loc, ifn, ctype,
9878 2, arg0, arg1);
9879 tree tgt = save_expr (call);
9880 tree intres = build1_loc (loc, REALPART_EXPR, type, tgt);
9881 tree ovfres = build1_loc (loc, IMAGPART_EXPR, type, tgt);
9882 ovfres = fold_convert_loc (loc, boolean_type_node, ovfres);
9883 tree store
9884 = fold_build2_loc (loc, MODIFY_EXPR, void_type_node, mem_arg2, intres);
9885 return build2_loc (loc, COMPOUND_EXPR, boolean_type_node, store, ovfres);
9886}
9887
c2f47e15 9888/* Fold a call to built-in function FNDECL with 0 arguments.
e80cc485 9889 This function returns NULL_TREE if no simplification was possible. */
650e4c94 9890
4ee9c684 9891static tree
e80cc485 9892fold_builtin_0 (location_t loc, tree fndecl)
650e4c94 9893{
e9f80ff5 9894 tree type = TREE_TYPE (TREE_TYPE (fndecl));
c2f47e15 9895 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
189b3398 9896 switch (fcode)
650e4c94 9897 {
c2f47e15 9898 CASE_FLT_FN (BUILT_IN_INF):
9899 case BUILT_IN_INFD32:
9900 case BUILT_IN_INFD64:
9901 case BUILT_IN_INFD128:
389dd41b 9902 return fold_builtin_inf (loc, type, true);
7c2f0500 9903
c2f47e15 9904 CASE_FLT_FN (BUILT_IN_HUGE_VAL):
389dd41b 9905 return fold_builtin_inf (loc, type, false);
7c2f0500 9906
c2f47e15 9907 case BUILT_IN_CLASSIFY_TYPE:
9908 return fold_builtin_classify_type (NULL_TREE);
7c2f0500 9909
c2f47e15 9910 default:
9911 break;
9912 }
9913 return NULL_TREE;
9914}
7c2f0500 9915
c2f47e15 9916/* Fold a call to built-in function FNDECL with 1 argument, ARG0.
e80cc485 9917 This function returns NULL_TREE if no simplification was possible. */
7c2f0500 9918
c2f47e15 9919static tree
e80cc485 9920fold_builtin_1 (location_t loc, tree fndecl, tree arg0)
c2f47e15 9921{
9922 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9923 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
9924 switch (fcode)
9925 {
650e4c94 9926 case BUILT_IN_CONSTANT_P:
7c2f0500 9927 {
c2f47e15 9928 tree val = fold_builtin_constant_p (arg0);
7c2f0500 9929
7c2f0500 9930 /* Gimplification will pull the CALL_EXPR for the builtin out of
9931 an if condition. When not optimizing, we'll not CSE it back.
9932 To avoid link error types of regressions, return false now. */
9933 if (!val && !optimize)
9934 val = integer_zero_node;
9935
9936 return val;
9937 }
650e4c94 9938
539a3a92 9939 case BUILT_IN_CLASSIFY_TYPE:
c2f47e15 9940 return fold_builtin_classify_type (arg0);
539a3a92 9941
650e4c94 9942 case BUILT_IN_STRLEN:
c7cbde74 9943 return fold_builtin_strlen (loc, type, arg0);
650e4c94 9944
4f35b1fc 9945 CASE_FLT_FN (BUILT_IN_FABS):
8aa32773 9946 case BUILT_IN_FABSD32:
9947 case BUILT_IN_FABSD64:
9948 case BUILT_IN_FABSD128:
389dd41b 9949 return fold_builtin_fabs (loc, arg0, type);
d1aade50 9950
9951 case BUILT_IN_ABS:
9952 case BUILT_IN_LABS:
9953 case BUILT_IN_LLABS:
9954 case BUILT_IN_IMAXABS:
389dd41b 9955 return fold_builtin_abs (loc, arg0, type);
c63f4ad3 9956
4f35b1fc 9957 CASE_FLT_FN (BUILT_IN_CONJ):
239d491a 9958 if (validate_arg (arg0, COMPLEX_TYPE)
48e1416a 9959 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
389dd41b 9960 return fold_build1_loc (loc, CONJ_EXPR, type, arg0);
c2f47e15 9961 break;
36d3581d 9962
4f35b1fc 9963 CASE_FLT_FN (BUILT_IN_CREAL):
239d491a 9964 if (validate_arg (arg0, COMPLEX_TYPE)
48e1416a 9965 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
389dd41b 9966 return non_lvalue_loc (loc, fold_build1_loc (loc, REALPART_EXPR, type, arg0));;
c2f47e15 9967 break;
36d3581d 9968
4f35b1fc 9969 CASE_FLT_FN (BUILT_IN_CIMAG):
b0ce8887 9970 if (validate_arg (arg0, COMPLEX_TYPE)
9971 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
389dd41b 9972 return non_lvalue_loc (loc, fold_build1_loc (loc, IMAGPART_EXPR, type, arg0));
c2f47e15 9973 break;
36d3581d 9974
503733d5 9975 CASE_FLT_FN (BUILT_IN_CCOS):
9af5ce0c 9976 return fold_builtin_ccos (loc, arg0, type, fndecl, /*hyper=*/ false);
48e1416a 9977
503733d5 9978 CASE_FLT_FN (BUILT_IN_CCOSH):
9af5ce0c 9979 return fold_builtin_ccos (loc, arg0, type, fndecl, /*hyper=*/ true);
48e1416a 9980
c2373fdb 9981 CASE_FLT_FN (BUILT_IN_CPROJ):
9af5ce0c 9982 return fold_builtin_cproj (loc, arg0, type);
c2373fdb 9983
239d491a 9984 CASE_FLT_FN (BUILT_IN_CSIN):
9985 if (validate_arg (arg0, COMPLEX_TYPE)
48e1416a 9986 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
239d491a 9987 return do_mpc_arg1 (arg0, type, mpc_sin);
c2f47e15 9988 break;
48e1416a 9989
239d491a 9990 CASE_FLT_FN (BUILT_IN_CSINH):
9991 if (validate_arg (arg0, COMPLEX_TYPE)
48e1416a 9992 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
239d491a 9993 return do_mpc_arg1 (arg0, type, mpc_sinh);
9994 break;
48e1416a 9995
239d491a 9996 CASE_FLT_FN (BUILT_IN_CTAN):
9997 if (validate_arg (arg0, COMPLEX_TYPE)
48e1416a 9998 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
239d491a 9999 return do_mpc_arg1 (arg0, type, mpc_tan);
10000 break;
48e1416a 10001
239d491a 10002 CASE_FLT_FN (BUILT_IN_CTANH):
10003 if (validate_arg (arg0, COMPLEX_TYPE)
48e1416a 10004 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
239d491a 10005 return do_mpc_arg1 (arg0, type, mpc_tanh);
10006 break;
48e1416a 10007
239d491a 10008 CASE_FLT_FN (BUILT_IN_CLOG):
10009 if (validate_arg (arg0, COMPLEX_TYPE)
48e1416a 10010 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
239d491a 10011 return do_mpc_arg1 (arg0, type, mpc_log);
10012 break;
48e1416a 10013
239d491a 10014 CASE_FLT_FN (BUILT_IN_CSQRT):
10015 if (validate_arg (arg0, COMPLEX_TYPE)
48e1416a 10016 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
239d491a 10017 return do_mpc_arg1 (arg0, type, mpc_sqrt);
10018 break;
48e1416a 10019
0e7e6e7f 10020 CASE_FLT_FN (BUILT_IN_CASIN):
10021 if (validate_arg (arg0, COMPLEX_TYPE)
48e1416a 10022 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
0e7e6e7f 10023 return do_mpc_arg1 (arg0, type, mpc_asin);
10024 break;
48e1416a 10025
0e7e6e7f 10026 CASE_FLT_FN (BUILT_IN_CACOS):
10027 if (validate_arg (arg0, COMPLEX_TYPE)
48e1416a 10028 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
0e7e6e7f 10029 return do_mpc_arg1 (arg0, type, mpc_acos);
10030 break;
48e1416a 10031
0e7e6e7f 10032 CASE_FLT_FN (BUILT_IN_CATAN):
10033 if (validate_arg (arg0, COMPLEX_TYPE)
48e1416a 10034 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
0e7e6e7f 10035 return do_mpc_arg1 (arg0, type, mpc_atan);
10036 break;
48e1416a 10037
0e7e6e7f 10038 CASE_FLT_FN (BUILT_IN_CASINH):
10039 if (validate_arg (arg0, COMPLEX_TYPE)
48e1416a 10040 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
0e7e6e7f 10041 return do_mpc_arg1 (arg0, type, mpc_asinh);
10042 break;
48e1416a 10043
0e7e6e7f 10044 CASE_FLT_FN (BUILT_IN_CACOSH):
10045 if (validate_arg (arg0, COMPLEX_TYPE)
48e1416a 10046 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
0e7e6e7f 10047 return do_mpc_arg1 (arg0, type, mpc_acosh);
10048 break;
48e1416a 10049
0e7e6e7f 10050 CASE_FLT_FN (BUILT_IN_CATANH):
10051 if (validate_arg (arg0, COMPLEX_TYPE)
48e1416a 10052 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
0e7e6e7f 10053 return do_mpc_arg1 (arg0, type, mpc_atanh);
10054 break;
48e1416a 10055
4f35b1fc 10056 CASE_FLT_FN (BUILT_IN_CABS):
389dd41b 10057 return fold_builtin_cabs (loc, arg0, type, fndecl);
c63f4ad3 10058
abe4dcf6 10059 CASE_FLT_FN (BUILT_IN_CARG):
389dd41b 10060 return fold_builtin_carg (loc, arg0, type);
abe4dcf6 10061
4f35b1fc 10062 CASE_FLT_FN (BUILT_IN_SQRT):
389dd41b 10063 return fold_builtin_sqrt (loc, arg0, type);
805e22b2 10064
4f35b1fc 10065 CASE_FLT_FN (BUILT_IN_CBRT):
389dd41b 10066 return fold_builtin_cbrt (loc, arg0, type);
3bc5c41b 10067
728bac60 10068 CASE_FLT_FN (BUILT_IN_ASIN):
c2f47e15 10069 if (validate_arg (arg0, REAL_TYPE))
10070 return do_mpfr_arg1 (arg0, type, mpfr_asin,
728bac60 10071 &dconstm1, &dconst1, true);
10072 break;
10073
10074 CASE_FLT_FN (BUILT_IN_ACOS):
c2f47e15 10075 if (validate_arg (arg0, REAL_TYPE))
10076 return do_mpfr_arg1 (arg0, type, mpfr_acos,
728bac60 10077 &dconstm1, &dconst1, true);
10078 break;
10079
10080 CASE_FLT_FN (BUILT_IN_ATAN):
c2f47e15 10081 if (validate_arg (arg0, REAL_TYPE))
10082 return do_mpfr_arg1 (arg0, type, mpfr_atan, NULL, NULL, 0);
728bac60 10083 break;
10084
10085 CASE_FLT_FN (BUILT_IN_ASINH):
c2f47e15 10086 if (validate_arg (arg0, REAL_TYPE))
10087 return do_mpfr_arg1 (arg0, type, mpfr_asinh, NULL, NULL, 0);
728bac60 10088 break;
10089
10090 CASE_FLT_FN (BUILT_IN_ACOSH):
c2f47e15 10091 if (validate_arg (arg0, REAL_TYPE))
10092 return do_mpfr_arg1 (arg0, type, mpfr_acosh,
728bac60 10093 &dconst1, NULL, true);
10094 break;
10095
10096 CASE_FLT_FN (BUILT_IN_ATANH):
c2f47e15 10097 if (validate_arg (arg0, REAL_TYPE))
10098 return do_mpfr_arg1 (arg0, type, mpfr_atanh,
728bac60 10099 &dconstm1, &dconst1, false);
10100 break;
10101
4f35b1fc 10102 CASE_FLT_FN (BUILT_IN_SIN):
c2f47e15 10103 if (validate_arg (arg0, REAL_TYPE))
10104 return do_mpfr_arg1 (arg0, type, mpfr_sin, NULL, NULL, 0);
728bac60 10105 break;
77e89269 10106
4f35b1fc 10107 CASE_FLT_FN (BUILT_IN_COS):
389dd41b 10108 return fold_builtin_cos (loc, arg0, type, fndecl);
77e89269 10109
728bac60 10110 CASE_FLT_FN (BUILT_IN_TAN):
c2f47e15 10111 return fold_builtin_tan (arg0, type);
d735c391 10112
c5bb2c4b 10113 CASE_FLT_FN (BUILT_IN_CEXP):
389dd41b 10114 return fold_builtin_cexp (loc, arg0, type);
c5bb2c4b 10115
d735c391 10116 CASE_FLT_FN (BUILT_IN_CEXPI):
c2f47e15 10117 if (validate_arg (arg0, REAL_TYPE))
10118 return do_mpfr_sincos (arg0, NULL_TREE, NULL_TREE);
10119 break;
d92f994c 10120
728bac60 10121 CASE_FLT_FN (BUILT_IN_SINH):
c2f47e15 10122 if (validate_arg (arg0, REAL_TYPE))
10123 return do_mpfr_arg1 (arg0, type, mpfr_sinh, NULL, NULL, 0);
728bac60 10124 break;
10125
10126 CASE_FLT_FN (BUILT_IN_COSH):
389dd41b 10127 return fold_builtin_cosh (loc, arg0, type, fndecl);
728bac60 10128
10129 CASE_FLT_FN (BUILT_IN_TANH):
c2f47e15 10130 if (validate_arg (arg0, REAL_TYPE))
10131 return do_mpfr_arg1 (arg0, type, mpfr_tanh, NULL, NULL, 0);
728bac60 10132 break;
10133
29f4cd78 10134 CASE_FLT_FN (BUILT_IN_ERF):
c2f47e15 10135 if (validate_arg (arg0, REAL_TYPE))
10136 return do_mpfr_arg1 (arg0, type, mpfr_erf, NULL, NULL, 0);
29f4cd78 10137 break;
10138
10139 CASE_FLT_FN (BUILT_IN_ERFC):
c2f47e15 10140 if (validate_arg (arg0, REAL_TYPE))
10141 return do_mpfr_arg1 (arg0, type, mpfr_erfc, NULL, NULL, 0);
29f4cd78 10142 break;
10143
32dba52b 10144 CASE_FLT_FN (BUILT_IN_TGAMMA):
c2f47e15 10145 if (validate_arg (arg0, REAL_TYPE))
10146 return do_mpfr_arg1 (arg0, type, mpfr_gamma, NULL, NULL, 0);
32dba52b 10147 break;
48e1416a 10148
4f35b1fc 10149 CASE_FLT_FN (BUILT_IN_EXP):
389dd41b 10150 return fold_builtin_exponent (loc, fndecl, arg0, mpfr_exp);
467214fd 10151
4f35b1fc 10152 CASE_FLT_FN (BUILT_IN_EXP2):
389dd41b 10153 return fold_builtin_exponent (loc, fndecl, arg0, mpfr_exp2);
467214fd 10154
4f35b1fc 10155 CASE_FLT_FN (BUILT_IN_EXP10):
10156 CASE_FLT_FN (BUILT_IN_POW10):
389dd41b 10157 return fold_builtin_exponent (loc, fndecl, arg0, mpfr_exp10);
467214fd 10158
29f4cd78 10159 CASE_FLT_FN (BUILT_IN_EXPM1):
c2f47e15 10160 if (validate_arg (arg0, REAL_TYPE))
10161 return do_mpfr_arg1 (arg0, type, mpfr_expm1, NULL, NULL, 0);
f8dad9b4 10162 break;
48e1416a 10163
4f35b1fc 10164 CASE_FLT_FN (BUILT_IN_LOG):
f8dad9b4 10165 if (validate_arg (arg0, REAL_TYPE))
10166 return do_mpfr_arg1 (arg0, type, mpfr_log, &dconst0, NULL, false);
10167 break;
467214fd 10168
4f35b1fc 10169 CASE_FLT_FN (BUILT_IN_LOG2):
f8dad9b4 10170 if (validate_arg (arg0, REAL_TYPE))
10171 return do_mpfr_arg1 (arg0, type, mpfr_log2, &dconst0, NULL, false);
10172 break;
467214fd 10173
4f35b1fc 10174 CASE_FLT_FN (BUILT_IN_LOG10):
f8dad9b4 10175 if (validate_arg (arg0, REAL_TYPE))
10176 return do_mpfr_arg1 (arg0, type, mpfr_log10, &dconst0, NULL, false);
10177 break;
29f4cd78 10178
10179 CASE_FLT_FN (BUILT_IN_LOG1P):
c2f47e15 10180 if (validate_arg (arg0, REAL_TYPE))
10181 return do_mpfr_arg1 (arg0, type, mpfr_log1p,
29f4cd78 10182 &dconstm1, NULL, false);
10183 break;
805e22b2 10184
65dd1378 10185 CASE_FLT_FN (BUILT_IN_J0):
10186 if (validate_arg (arg0, REAL_TYPE))
10187 return do_mpfr_arg1 (arg0, type, mpfr_j0,
10188 NULL, NULL, 0);
10189 break;
10190
10191 CASE_FLT_FN (BUILT_IN_J1):
10192 if (validate_arg (arg0, REAL_TYPE))
10193 return do_mpfr_arg1 (arg0, type, mpfr_j1,
10194 NULL, NULL, 0);
10195 break;
6ff9eeff 10196
10197 CASE_FLT_FN (BUILT_IN_Y0):
10198 if (validate_arg (arg0, REAL_TYPE))
10199 return do_mpfr_arg1 (arg0, type, mpfr_y0,
10200 &dconst0, NULL, false);
10201 break;
10202
10203 CASE_FLT_FN (BUILT_IN_Y1):
10204 if (validate_arg (arg0, REAL_TYPE))
10205 return do_mpfr_arg1 (arg0, type, mpfr_y1,
10206 &dconst0, NULL, false);
10207 break;
65dd1378 10208
4f35b1fc 10209 CASE_FLT_FN (BUILT_IN_NAN):
c4503c0a 10210 case BUILT_IN_NAND32:
10211 case BUILT_IN_NAND64:
10212 case BUILT_IN_NAND128:
c2f47e15 10213 return fold_builtin_nan (arg0, type, true);
b0db7939 10214
4f35b1fc 10215 CASE_FLT_FN (BUILT_IN_NANS):
c2f47e15 10216 return fold_builtin_nan (arg0, type, false);
b0db7939 10217
4f35b1fc 10218 CASE_FLT_FN (BUILT_IN_FLOOR):
389dd41b 10219 return fold_builtin_floor (loc, fndecl, arg0);
277f8dd2 10220
4f35b1fc 10221 CASE_FLT_FN (BUILT_IN_CEIL):
389dd41b 10222 return fold_builtin_ceil (loc, fndecl, arg0);
277f8dd2 10223
4f35b1fc 10224 CASE_FLT_FN (BUILT_IN_TRUNC):
389dd41b 10225 return fold_builtin_trunc (loc, fndecl, arg0);
277f8dd2 10226
4f35b1fc 10227 CASE_FLT_FN (BUILT_IN_ROUND):
389dd41b 10228 return fold_builtin_round (loc, fndecl, arg0);
89ab3887 10229
4f35b1fc 10230 CASE_FLT_FN (BUILT_IN_NEARBYINT):
10231 CASE_FLT_FN (BUILT_IN_RINT):
389dd41b 10232 return fold_trunc_transparent_mathfn (loc, fndecl, arg0);
6528f4f4 10233
80ff6494 10234 CASE_FLT_FN (BUILT_IN_ICEIL):
4f35b1fc 10235 CASE_FLT_FN (BUILT_IN_LCEIL):
10236 CASE_FLT_FN (BUILT_IN_LLCEIL):
10237 CASE_FLT_FN (BUILT_IN_LFLOOR):
80ff6494 10238 CASE_FLT_FN (BUILT_IN_IFLOOR):
4f35b1fc 10239 CASE_FLT_FN (BUILT_IN_LLFLOOR):
80ff6494 10240 CASE_FLT_FN (BUILT_IN_IROUND):
a0c938f0 10241 CASE_FLT_FN (BUILT_IN_LROUND):
4f35b1fc 10242 CASE_FLT_FN (BUILT_IN_LLROUND):
389dd41b 10243 return fold_builtin_int_roundingfn (loc, fndecl, arg0);
34f17811 10244
80ff6494 10245 CASE_FLT_FN (BUILT_IN_IRINT):
4f35b1fc 10246 CASE_FLT_FN (BUILT_IN_LRINT):
10247 CASE_FLT_FN (BUILT_IN_LLRINT):
389dd41b 10248 return fold_fixed_mathfn (loc, fndecl, arg0);
9ed65c7f 10249
74bdbe96 10250 case BUILT_IN_BSWAP16:
42791117 10251 case BUILT_IN_BSWAP32:
10252 case BUILT_IN_BSWAP64:
c2f47e15 10253 return fold_builtin_bswap (fndecl, arg0);
42791117 10254
4f35b1fc 10255 CASE_INT_FN (BUILT_IN_FFS):
10256 CASE_INT_FN (BUILT_IN_CLZ):
10257 CASE_INT_FN (BUILT_IN_CTZ):
6aaa1f9e 10258 CASE_INT_FN (BUILT_IN_CLRSB):
4f35b1fc 10259 CASE_INT_FN (BUILT_IN_POPCOUNT):
10260 CASE_INT_FN (BUILT_IN_PARITY):
c2f47e15 10261 return fold_builtin_bitop (fndecl, arg0);
9c8a1629 10262
4f35b1fc 10263 CASE_FLT_FN (BUILT_IN_SIGNBIT):
389dd41b 10264 return fold_builtin_signbit (loc, arg0, type);
27f261ef 10265
cb2b9385 10266 CASE_FLT_FN (BUILT_IN_SIGNIFICAND):
389dd41b 10267 return fold_builtin_significand (loc, arg0, type);
cb2b9385 10268
10269 CASE_FLT_FN (BUILT_IN_ILOGB):
10270 CASE_FLT_FN (BUILT_IN_LOGB):
389dd41b 10271 return fold_builtin_logb (loc, arg0, type);
cb2b9385 10272
d49367d4 10273 case BUILT_IN_ISASCII:
389dd41b 10274 return fold_builtin_isascii (loc, arg0);
d49367d4 10275
10276 case BUILT_IN_TOASCII:
389dd41b 10277 return fold_builtin_toascii (loc, arg0);
d49367d4 10278
df1cf42e 10279 case BUILT_IN_ISDIGIT:
389dd41b 10280 return fold_builtin_isdigit (loc, arg0);
467214fd 10281
4f35b1fc 10282 CASE_FLT_FN (BUILT_IN_FINITE):
c4503c0a 10283 case BUILT_IN_FINITED32:
10284 case BUILT_IN_FINITED64:
10285 case BUILT_IN_FINITED128:
cde061c1 10286 case BUILT_IN_ISFINITE:
a65c4d64 10287 {
10288 tree ret = fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISFINITE);
10289 if (ret)
10290 return ret;
10291 return fold_builtin_interclass_mathfn (loc, fndecl, arg0);
10292 }
726069ba 10293
4f35b1fc 10294 CASE_FLT_FN (BUILT_IN_ISINF):
c4503c0a 10295 case BUILT_IN_ISINFD32:
10296 case BUILT_IN_ISINFD64:
10297 case BUILT_IN_ISINFD128:
a65c4d64 10298 {
10299 tree ret = fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISINF);
10300 if (ret)
10301 return ret;
10302 return fold_builtin_interclass_mathfn (loc, fndecl, arg0);
10303 }
10304
10305 case BUILT_IN_ISNORMAL:
10306 return fold_builtin_interclass_mathfn (loc, fndecl, arg0);
726069ba 10307
c319d56a 10308 case BUILT_IN_ISINF_SIGN:
389dd41b 10309 return fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISINF_SIGN);
c319d56a 10310
4f35b1fc 10311 CASE_FLT_FN (BUILT_IN_ISNAN):
c4503c0a 10312 case BUILT_IN_ISNAND32:
10313 case BUILT_IN_ISNAND64:
10314 case BUILT_IN_ISNAND128:
389dd41b 10315 return fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISNAN);
c2f47e15 10316
663870fc 10317 case BUILT_IN_FREE:
10318 if (integer_zerop (arg0))
10319 return build_empty_stmt (loc);
10320 break;
10321
c2f47e15 10322 default:
10323 break;
10324 }
10325
10326 return NULL_TREE;
10327
10328}
10329
10330/* Fold a call to built-in function FNDECL with 2 arguments, ARG0 and ARG1.
e80cc485 10331 This function returns NULL_TREE if no simplification was possible. */
c2f47e15 10332
10333static tree
e80cc485 10334fold_builtin_2 (location_t loc, tree fndecl, tree arg0, tree arg1)
c2f47e15 10335{
10336 tree type = TREE_TYPE (TREE_TYPE (fndecl));
10337 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10338
10339 switch (fcode)
10340 {
65dd1378 10341 CASE_FLT_FN (BUILT_IN_JN):
10342 if (validate_arg (arg0, INTEGER_TYPE)
10343 && validate_arg (arg1, REAL_TYPE))
10344 return do_mpfr_bessel_n (arg0, arg1, type, mpfr_jn, NULL, 0);
10345 break;
6ff9eeff 10346
10347 CASE_FLT_FN (BUILT_IN_YN):
10348 if (validate_arg (arg0, INTEGER_TYPE)
10349 && validate_arg (arg1, REAL_TYPE))
10350 return do_mpfr_bessel_n (arg0, arg1, type, mpfr_yn,
10351 &dconst0, false);
10352 break;
e5407ca6 10353
10354 CASE_FLT_FN (BUILT_IN_DREM):
10355 CASE_FLT_FN (BUILT_IN_REMAINDER):
10356 if (validate_arg (arg0, REAL_TYPE)
9af5ce0c 10357 && validate_arg (arg1, REAL_TYPE))
e5407ca6 10358 return do_mpfr_arg2 (arg0, arg1, type, mpfr_remainder);
10359 break;
e84da7c1 10360
10361 CASE_FLT_FN_REENT (BUILT_IN_GAMMA): /* GAMMA_R */
10362 CASE_FLT_FN_REENT (BUILT_IN_LGAMMA): /* LGAMMA_R */
10363 if (validate_arg (arg0, REAL_TYPE)
9af5ce0c 10364 && validate_arg (arg1, POINTER_TYPE))
e84da7c1 10365 return do_mpfr_lgamma_r (arg0, arg1, type);
10366 break;
c2f47e15 10367
10368 CASE_FLT_FN (BUILT_IN_ATAN2):
10369 if (validate_arg (arg0, REAL_TYPE)
9af5ce0c 10370 && validate_arg (arg1, REAL_TYPE))
c2f47e15 10371 return do_mpfr_arg2 (arg0, arg1, type, mpfr_atan2);
10372 break;
10373
10374 CASE_FLT_FN (BUILT_IN_FDIM):
10375 if (validate_arg (arg0, REAL_TYPE)
9af5ce0c 10376 && validate_arg (arg1, REAL_TYPE))
c2f47e15 10377 return do_mpfr_arg2 (arg0, arg1, type, mpfr_dim);
10378 break;
10379
10380 CASE_FLT_FN (BUILT_IN_HYPOT):
389dd41b 10381 return fold_builtin_hypot (loc, fndecl, arg0, arg1, type);
c2f47e15 10382
c699fab8 10383 CASE_FLT_FN (BUILT_IN_CPOW):
10384 if (validate_arg (arg0, COMPLEX_TYPE)
10385 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE
10386 && validate_arg (arg1, COMPLEX_TYPE)
48e1416a 10387 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg1))) == REAL_TYPE)
652d9409 10388 return do_mpc_arg2 (arg0, arg1, type, /*do_nonfinite=*/ 0, mpc_pow);
c699fab8 10389 break;
c699fab8 10390
7587301b 10391 CASE_FLT_FN (BUILT_IN_LDEXP):
389dd41b 10392 return fold_builtin_load_exponent (loc, arg0, arg1, type, /*ldexp=*/true);
7587301b 10393 CASE_FLT_FN (BUILT_IN_SCALBN):
10394 CASE_FLT_FN (BUILT_IN_SCALBLN):
389dd41b 10395 return fold_builtin_load_exponent (loc, arg0, arg1,
10396 type, /*ldexp=*/false);
7587301b 10397
3838b9ae 10398 CASE_FLT_FN (BUILT_IN_FREXP):
389dd41b 10399 return fold_builtin_frexp (loc, arg0, arg1, type);
3838b9ae 10400
ebf8b4f5 10401 CASE_FLT_FN (BUILT_IN_MODF):
389dd41b 10402 return fold_builtin_modf (loc, arg0, arg1, type);
ebf8b4f5 10403
c2f47e15 10404 case BUILT_IN_STRSTR:
389dd41b 10405 return fold_builtin_strstr (loc, arg0, arg1, type);
c2f47e15 10406
c2f47e15 10407 case BUILT_IN_STRSPN:
389dd41b 10408 return fold_builtin_strspn (loc, arg0, arg1);
c2f47e15 10409
10410 case BUILT_IN_STRCSPN:
389dd41b 10411 return fold_builtin_strcspn (loc, arg0, arg1);
c2f47e15 10412
10413 case BUILT_IN_STRCHR:
10414 case BUILT_IN_INDEX:
389dd41b 10415 return fold_builtin_strchr (loc, arg0, arg1, type);
c2f47e15 10416
10417 case BUILT_IN_STRRCHR:
10418 case BUILT_IN_RINDEX:
389dd41b 10419 return fold_builtin_strrchr (loc, arg0, arg1, type);
c2f47e15 10420
c2f47e15 10421 case BUILT_IN_STRCMP:
389dd41b 10422 return fold_builtin_strcmp (loc, arg0, arg1);
c2f47e15 10423
10424 case BUILT_IN_STRPBRK:
389dd41b 10425 return fold_builtin_strpbrk (loc, arg0, arg1, type);
c2f47e15 10426
10427 case BUILT_IN_EXPECT:
c83059be 10428 return fold_builtin_expect (loc, arg0, arg1, NULL_TREE);
c2f47e15 10429
10430 CASE_FLT_FN (BUILT_IN_POW):
389dd41b 10431 return fold_builtin_pow (loc, fndecl, arg0, arg1, type);
c2f47e15 10432
10433 CASE_FLT_FN (BUILT_IN_POWI):
389dd41b 10434 return fold_builtin_powi (loc, fndecl, arg0, arg1, type);
c2f47e15 10435
10436 CASE_FLT_FN (BUILT_IN_COPYSIGN):
389dd41b 10437 return fold_builtin_copysign (loc, fndecl, arg0, arg1, type);
c2f47e15 10438
10439 CASE_FLT_FN (BUILT_IN_FMIN):
389dd41b 10440 return fold_builtin_fmin_fmax (loc, arg0, arg1, type, /*max=*/false);
c2f47e15 10441
10442 CASE_FLT_FN (BUILT_IN_FMAX):
389dd41b 10443 return fold_builtin_fmin_fmax (loc, arg0, arg1, type, /*max=*/true);
726069ba 10444
9bc9f15f 10445 case BUILT_IN_ISGREATER:
389dd41b 10446 return fold_builtin_unordered_cmp (loc, fndecl,
10447 arg0, arg1, UNLE_EXPR, LE_EXPR);
9bc9f15f 10448 case BUILT_IN_ISGREATEREQUAL:
389dd41b 10449 return fold_builtin_unordered_cmp (loc, fndecl,
10450 arg0, arg1, UNLT_EXPR, LT_EXPR);
9bc9f15f 10451 case BUILT_IN_ISLESS:
389dd41b 10452 return fold_builtin_unordered_cmp (loc, fndecl,
10453 arg0, arg1, UNGE_EXPR, GE_EXPR);
9bc9f15f 10454 case BUILT_IN_ISLESSEQUAL:
389dd41b 10455 return fold_builtin_unordered_cmp (loc, fndecl,
10456 arg0, arg1, UNGT_EXPR, GT_EXPR);
9bc9f15f 10457 case BUILT_IN_ISLESSGREATER:
389dd41b 10458 return fold_builtin_unordered_cmp (loc, fndecl,
10459 arg0, arg1, UNEQ_EXPR, EQ_EXPR);
9bc9f15f 10460 case BUILT_IN_ISUNORDERED:
389dd41b 10461 return fold_builtin_unordered_cmp (loc, fndecl,
10462 arg0, arg1, UNORDERED_EXPR,
d5019fe8 10463 NOP_EXPR);
9bc9f15f 10464
7c2f0500 10465 /* We do the folding for va_start in the expander. */
10466 case BUILT_IN_VA_START:
10467 break;
f0613857 10468
0a39fd54 10469 case BUILT_IN_OBJECT_SIZE:
c2f47e15 10470 return fold_builtin_object_size (arg0, arg1);
0a39fd54 10471
1cd6e20d 10472 case BUILT_IN_ATOMIC_ALWAYS_LOCK_FREE:
10473 return fold_builtin_atomic_always_lock_free (arg0, arg1);
10474
10475 case BUILT_IN_ATOMIC_IS_LOCK_FREE:
10476 return fold_builtin_atomic_is_lock_free (arg0, arg1);
10477
c2f47e15 10478 default:
10479 break;
10480 }
10481 return NULL_TREE;
10482}
10483
10484/* Fold a call to built-in function FNDECL with 3 arguments, ARG0, ARG1,
e80cc485 10485 and ARG2.
c2f47e15 10486 This function returns NULL_TREE if no simplification was possible. */
10487
10488static tree
389dd41b 10489fold_builtin_3 (location_t loc, tree fndecl,
e80cc485 10490 tree arg0, tree arg1, tree arg2)
c2f47e15 10491{
10492 tree type = TREE_TYPE (TREE_TYPE (fndecl));
10493 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10494 switch (fcode)
10495 {
10496
10497 CASE_FLT_FN (BUILT_IN_SINCOS):
389dd41b 10498 return fold_builtin_sincos (loc, arg0, arg1, arg2);
c2f47e15 10499
10500 CASE_FLT_FN (BUILT_IN_FMA):
b9be572e 10501 return fold_builtin_fma (loc, arg0, arg1, arg2, type);
c2f47e15 10502 break;
10503
e5407ca6 10504 CASE_FLT_FN (BUILT_IN_REMQUO):
10505 if (validate_arg (arg0, REAL_TYPE)
9af5ce0c 10506 && validate_arg (arg1, REAL_TYPE)
10507 && validate_arg (arg2, POINTER_TYPE))
e5407ca6 10508 return do_mpfr_remquo (arg0, arg1, arg2);
10509 break;
e5407ca6 10510
c2f47e15 10511 case BUILT_IN_STRNCMP:
389dd41b 10512 return fold_builtin_strncmp (loc, arg0, arg1, arg2);
c2f47e15 10513
7959b13b 10514 case BUILT_IN_MEMCHR:
389dd41b 10515 return fold_builtin_memchr (loc, arg0, arg1, arg2, type);
7959b13b 10516
c2f47e15 10517 case BUILT_IN_BCMP:
10518 case BUILT_IN_MEMCMP:
389dd41b 10519 return fold_builtin_memcmp (loc, arg0, arg1, arg2);;
c2f47e15 10520
c83059be 10521 case BUILT_IN_EXPECT:
10522 return fold_builtin_expect (loc, arg0, arg1, arg2);
10523
0c93c8a9 10524 case BUILT_IN_ADD_OVERFLOW:
10525 case BUILT_IN_SUB_OVERFLOW:
10526 case BUILT_IN_MUL_OVERFLOW:
10527 case BUILT_IN_SADD_OVERFLOW:
10528 case BUILT_IN_SADDL_OVERFLOW:
10529 case BUILT_IN_SADDLL_OVERFLOW:
10530 case BUILT_IN_SSUB_OVERFLOW:
10531 case BUILT_IN_SSUBL_OVERFLOW:
10532 case BUILT_IN_SSUBLL_OVERFLOW:
10533 case BUILT_IN_SMUL_OVERFLOW:
10534 case BUILT_IN_SMULL_OVERFLOW:
10535 case BUILT_IN_SMULLL_OVERFLOW:
10536 case BUILT_IN_UADD_OVERFLOW:
10537 case BUILT_IN_UADDL_OVERFLOW:
10538 case BUILT_IN_UADDLL_OVERFLOW:
10539 case BUILT_IN_USUB_OVERFLOW:
10540 case BUILT_IN_USUBL_OVERFLOW:
10541 case BUILT_IN_USUBLL_OVERFLOW:
10542 case BUILT_IN_UMUL_OVERFLOW:
10543 case BUILT_IN_UMULL_OVERFLOW:
10544 case BUILT_IN_UMULLL_OVERFLOW:
10545 return fold_builtin_arith_overflow (loc, fcode, arg0, arg1, arg2);
10546
650e4c94 10547 default:
10548 break;
10549 }
c2f47e15 10550 return NULL_TREE;
10551}
650e4c94 10552
c2f47e15 10553/* Fold a call to built-in function FNDECL. ARGS is an array of NARGS
9d884767 10554 arguments. IGNORE is true if the result of the
10555 function call is ignored. This function returns NULL_TREE if no
10556 simplification was possible. */
48e1416a 10557
2165588a 10558tree
e80cc485 10559fold_builtin_n (location_t loc, tree fndecl, tree *args, int nargs, bool)
c2f47e15 10560{
10561 tree ret = NULL_TREE;
a7f5bb2d 10562
c2f47e15 10563 switch (nargs)
10564 {
10565 case 0:
e80cc485 10566 ret = fold_builtin_0 (loc, fndecl);
c2f47e15 10567 break;
10568 case 1:
e80cc485 10569 ret = fold_builtin_1 (loc, fndecl, args[0]);
c2f47e15 10570 break;
10571 case 2:
e80cc485 10572 ret = fold_builtin_2 (loc, fndecl, args[0], args[1]);
c2f47e15 10573 break;
10574 case 3:
e80cc485 10575 ret = fold_builtin_3 (loc, fndecl, args[0], args[1], args[2]);
c2f47e15 10576 break;
c2f47e15 10577 default:
e80cc485 10578 ret = fold_builtin_varargs (loc, fndecl, args, nargs);
c2f47e15 10579 break;
10580 }
10581 if (ret)
10582 {
75a70cf9 10583 ret = build1 (NOP_EXPR, TREE_TYPE (ret), ret);
389dd41b 10584 SET_EXPR_LOCATION (ret, loc);
c2f47e15 10585 TREE_NO_WARNING (ret) = 1;
10586 return ret;
10587 }
10588 return NULL_TREE;
10589}
10590
0e80b01d 10591/* Construct a new CALL_EXPR to FNDECL using the tail of the argument
10592 list ARGS along with N new arguments in NEWARGS. SKIP is the number
10593 of arguments in ARGS to be omitted. OLDNARGS is the number of
10594 elements in ARGS. */
c2f47e15 10595
10596static tree
0e80b01d 10597rewrite_call_expr_valist (location_t loc, int oldnargs, tree *args,
10598 int skip, tree fndecl, int n, va_list newargs)
c2f47e15 10599{
0e80b01d 10600 int nargs = oldnargs - skip + n;
10601 tree *buffer;
c2f47e15 10602
0e80b01d 10603 if (n > 0)
c2f47e15 10604 {
0e80b01d 10605 int i, j;
c2f47e15 10606
0e80b01d 10607 buffer = XALLOCAVEC (tree, nargs);
10608 for (i = 0; i < n; i++)
10609 buffer[i] = va_arg (newargs, tree);
10610 for (j = skip; j < oldnargs; j++, i++)
10611 buffer[i] = args[j];
10612 }
10613 else
10614 buffer = args + skip;
19fbe3a4 10615
0e80b01d 10616 return build_call_expr_loc_array (loc, fndecl, nargs, buffer);
10617}
c2f47e15 10618
198622c0 10619/* Return true if FNDECL shouldn't be folded right now.
10620 If a built-in function has an inline attribute always_inline
10621 wrapper, defer folding it after always_inline functions have
10622 been inlined, otherwise e.g. -D_FORTIFY_SOURCE checking
10623 might not be performed. */
10624
51d2c51e 10625bool
198622c0 10626avoid_folding_inline_builtin (tree fndecl)
10627{
10628 return (DECL_DECLARED_INLINE_P (fndecl)
10629 && DECL_DISREGARD_INLINE_LIMITS (fndecl)
10630 && cfun
10631 && !cfun->always_inline_functions_inlined
10632 && lookup_attribute ("always_inline", DECL_ATTRIBUTES (fndecl)));
10633}
10634
4ee9c684 10635/* A wrapper function for builtin folding that prevents warnings for
491e04ef 10636 "statement without effect" and the like, caused by removing the
4ee9c684 10637 call node earlier than the warning is generated. */
10638
10639tree
389dd41b 10640fold_call_expr (location_t loc, tree exp, bool ignore)
4ee9c684 10641{
c2f47e15 10642 tree ret = NULL_TREE;
10643 tree fndecl = get_callee_fndecl (exp);
10644 if (fndecl
10645 && TREE_CODE (fndecl) == FUNCTION_DECL
48dc2227 10646 && DECL_BUILT_IN (fndecl)
10647 /* If CALL_EXPR_VA_ARG_PACK is set, the arguments aren't finalized
10648 yet. Defer folding until we see all the arguments
10649 (after inlining). */
10650 && !CALL_EXPR_VA_ARG_PACK (exp))
10651 {
10652 int nargs = call_expr_nargs (exp);
10653
10654 /* Before gimplification CALL_EXPR_VA_ARG_PACK is not set, but
10655 instead last argument is __builtin_va_arg_pack (). Defer folding
10656 even in that case, until arguments are finalized. */
10657 if (nargs && TREE_CODE (CALL_EXPR_ARG (exp, nargs - 1)) == CALL_EXPR)
10658 {
10659 tree fndecl2 = get_callee_fndecl (CALL_EXPR_ARG (exp, nargs - 1));
10660 if (fndecl2
10661 && TREE_CODE (fndecl2) == FUNCTION_DECL
10662 && DECL_BUILT_IN_CLASS (fndecl2) == BUILT_IN_NORMAL
10663 && DECL_FUNCTION_CODE (fndecl2) == BUILT_IN_VA_ARG_PACK)
10664 return NULL_TREE;
10665 }
10666
198622c0 10667 if (avoid_folding_inline_builtin (fndecl))
10668 return NULL_TREE;
10669
c2f47e15 10670 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
97d67146 10671 return targetm.fold_builtin (fndecl, call_expr_nargs (exp),
10672 CALL_EXPR_ARGP (exp), ignore);
c2f47e15 10673 else
10674 {
9d884767 10675 tree *args = CALL_EXPR_ARGP (exp);
10676 ret = fold_builtin_n (loc, fndecl, args, nargs, ignore);
c2f47e15 10677 if (ret)
389dd41b 10678 return ret;
c2f47e15 10679 }
4ee9c684 10680 }
c2f47e15 10681 return NULL_TREE;
10682}
48e1416a 10683
9d884767 10684/* Fold a CALL_EXPR with type TYPE with FN as the function expression.
10685 N arguments are passed in the array ARGARRAY. Return a folded
10686 expression or NULL_TREE if no simplification was possible. */
805e22b2 10687
10688tree
9d884767 10689fold_builtin_call_array (location_t loc, tree,
d01f58f9 10690 tree fn,
10691 int n,
10692 tree *argarray)
7e15618b 10693{
9d884767 10694 if (TREE_CODE (fn) != ADDR_EXPR)
10695 return NULL_TREE;
c2f47e15 10696
9d884767 10697 tree fndecl = TREE_OPERAND (fn, 0);
10698 if (TREE_CODE (fndecl) == FUNCTION_DECL
10699 && DECL_BUILT_IN (fndecl))
10700 {
10701 /* If last argument is __builtin_va_arg_pack (), arguments to this
10702 function are not finalized yet. Defer folding until they are. */
10703 if (n && TREE_CODE (argarray[n - 1]) == CALL_EXPR)
10704 {
10705 tree fndecl2 = get_callee_fndecl (argarray[n - 1]);
10706 if (fndecl2
10707 && TREE_CODE (fndecl2) == FUNCTION_DECL
10708 && DECL_BUILT_IN_CLASS (fndecl2) == BUILT_IN_NORMAL
10709 && DECL_FUNCTION_CODE (fndecl2) == BUILT_IN_VA_ARG_PACK)
10710 return NULL_TREE;
10711 }
10712 if (avoid_folding_inline_builtin (fndecl))
10713 return NULL_TREE;
10714 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
10715 return targetm.fold_builtin (fndecl, n, argarray, false);
10716 else
10717 return fold_builtin_n (loc, fndecl, argarray, n, false);
10718 }
c2f47e15 10719
9d884767 10720 return NULL_TREE;
c2f47e15 10721}
10722
af1409ad 10723/* Construct a new CALL_EXPR using the tail of the argument list of EXP
10724 along with N new arguments specified as the "..." parameters. SKIP
10725 is the number of arguments in EXP to be omitted. This function is used
10726 to do varargs-to-varargs transformations. */
10727
10728static tree
10729rewrite_call_expr (location_t loc, tree exp, int skip, tree fndecl, int n, ...)
10730{
10731 va_list ap;
10732 tree t;
10733
10734 va_start (ap, n);
10735 t = rewrite_call_expr_valist (loc, call_expr_nargs (exp),
10736 CALL_EXPR_ARGP (exp), skip, fndecl, n, ap);
10737 va_end (ap);
c2f47e15 10738
af1409ad 10739 return t;
c2f47e15 10740}
10741
10742/* Validate a single argument ARG against a tree code CODE representing
10743 a type. */
48e1416a 10744
c2f47e15 10745static bool
b7bf20db 10746validate_arg (const_tree arg, enum tree_code code)
c2f47e15 10747{
10748 if (!arg)
10749 return false;
10750 else if (code == POINTER_TYPE)
10751 return POINTER_TYPE_P (TREE_TYPE (arg));
c7f617c2 10752 else if (code == INTEGER_TYPE)
10753 return INTEGRAL_TYPE_P (TREE_TYPE (arg));
c2f47e15 10754 return code == TREE_CODE (TREE_TYPE (arg));
7e15618b 10755}
0eb671f7 10756
75a70cf9 10757/* This function validates the types of a function call argument list
10758 against a specified list of tree_codes. If the last specifier is a 0,
10759 that represents an ellipses, otherwise the last specifier must be a
10760 VOID_TYPE.
10761
10762 This is the GIMPLE version of validate_arglist. Eventually we want to
10763 completely convert builtins.c to work from GIMPLEs and the tree based
10764 validate_arglist will then be removed. */
10765
10766bool
1a91d914 10767validate_gimple_arglist (const gcall *call, ...)
75a70cf9 10768{
10769 enum tree_code code;
10770 bool res = 0;
10771 va_list ap;
10772 const_tree arg;
10773 size_t i;
10774
10775 va_start (ap, call);
10776 i = 0;
10777
10778 do
10779 {
d62e827b 10780 code = (enum tree_code) va_arg (ap, int);
75a70cf9 10781 switch (code)
10782 {
10783 case 0:
10784 /* This signifies an ellipses, any further arguments are all ok. */
10785 res = true;
10786 goto end;
10787 case VOID_TYPE:
10788 /* This signifies an endlink, if no arguments remain, return
10789 true, otherwise return false. */
10790 res = (i == gimple_call_num_args (call));
10791 goto end;
10792 default:
10793 /* If no parameters remain or the parameter's code does not
10794 match the specified code, return false. Otherwise continue
10795 checking any remaining arguments. */
10796 arg = gimple_call_arg (call, i++);
10797 if (!validate_arg (arg, code))
10798 goto end;
10799 break;
10800 }
10801 }
10802 while (1);
10803
10804 /* We need gotos here since we can only have one VA_CLOSE in a
10805 function. */
10806 end: ;
10807 va_end (ap);
10808
10809 return res;
10810}
10811
fc2a2dcb 10812/* Default target-specific builtin expander that does nothing. */
10813
10814rtx
aecda0d6 10815default_expand_builtin (tree exp ATTRIBUTE_UNUSED,
10816 rtx target ATTRIBUTE_UNUSED,
10817 rtx subtarget ATTRIBUTE_UNUSED,
3754d046 10818 machine_mode mode ATTRIBUTE_UNUSED,
aecda0d6 10819 int ignore ATTRIBUTE_UNUSED)
fc2a2dcb 10820{
10821 return NULL_RTX;
10822}
c7926a82 10823
01537105 10824/* Returns true is EXP represents data that would potentially reside
10825 in a readonly section. */
10826
b9ea678c 10827bool
01537105 10828readonly_data_expr (tree exp)
10829{
10830 STRIP_NOPS (exp);
10831
9ff0637e 10832 if (TREE_CODE (exp) != ADDR_EXPR)
10833 return false;
10834
10835 exp = get_base_address (TREE_OPERAND (exp, 0));
10836 if (!exp)
10837 return false;
10838
10839 /* Make sure we call decl_readonly_section only for trees it
10840 can handle (since it returns true for everything it doesn't
10841 understand). */
491e04ef 10842 if (TREE_CODE (exp) == STRING_CST
9ff0637e 10843 || TREE_CODE (exp) == CONSTRUCTOR
10844 || (TREE_CODE (exp) == VAR_DECL && TREE_STATIC (exp)))
10845 return decl_readonly_section (exp, 0);
01537105 10846 else
10847 return false;
10848}
4ee9c684 10849
c2f47e15 10850/* Simplify a call to the strstr builtin. S1 and S2 are the arguments
10851 to the call, and TYPE is its return type.
4ee9c684 10852
c2f47e15 10853 Return NULL_TREE if no simplification was possible, otherwise return the
4ee9c684 10854 simplified form of the call as a tree.
10855
10856 The simplified form may be a constant or other expression which
10857 computes the same value, but in a more efficient manner (including
10858 calls to other builtin functions).
10859
10860 The call may contain arguments which need to be evaluated, but
10861 which are not useful to determine the result of the call. In
10862 this case we return a chain of COMPOUND_EXPRs. The LHS of each
10863 COMPOUND_EXPR will be an argument which must be evaluated.
10864 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
10865 COMPOUND_EXPR in the chain will contain the tree for the simplified
10866 form of the builtin function call. */
10867
10868static tree
389dd41b 10869fold_builtin_strstr (location_t loc, tree s1, tree s2, tree type)
4ee9c684 10870{
c2f47e15 10871 if (!validate_arg (s1, POINTER_TYPE)
10872 || !validate_arg (s2, POINTER_TYPE))
10873 return NULL_TREE;
4ee9c684 10874 else
10875 {
4ee9c684 10876 tree fn;
10877 const char *p1, *p2;
10878
10879 p2 = c_getstr (s2);
10880 if (p2 == NULL)
c2f47e15 10881 return NULL_TREE;
4ee9c684 10882
10883 p1 = c_getstr (s1);
10884 if (p1 != NULL)
10885 {
10886 const char *r = strstr (p1, p2);
daa1d5f5 10887 tree tem;
4ee9c684 10888
4ee9c684 10889 if (r == NULL)
779b4c41 10890 return build_int_cst (TREE_TYPE (s1), 0);
c0c67e38 10891
10892 /* Return an offset into the constant string argument. */
2cc66f2a 10893 tem = fold_build_pointer_plus_hwi_loc (loc, s1, r - p1);
389dd41b 10894 return fold_convert_loc (loc, type, tem);
4ee9c684 10895 }
10896
7efa231c 10897 /* The argument is const char *, and the result is char *, so we need
10898 a type conversion here to avoid a warning. */
4ee9c684 10899 if (p2[0] == '\0')
389dd41b 10900 return fold_convert_loc (loc, type, s1);
4ee9c684 10901
10902 if (p2[1] != '\0')
c2f47e15 10903 return NULL_TREE;
4ee9c684 10904
b9a16870 10905 fn = builtin_decl_implicit (BUILT_IN_STRCHR);
4ee9c684 10906 if (!fn)
c2f47e15 10907 return NULL_TREE;
4ee9c684 10908
10909 /* New argument list transforming strstr(s1, s2) to
10910 strchr(s1, s2[0]). */
7002a1c8 10911 return build_call_expr_loc (loc, fn, 2, s1,
10912 build_int_cst (integer_type_node, p2[0]));
4ee9c684 10913 }
10914}
10915
c2f47e15 10916/* Simplify a call to the strchr builtin. S1 and S2 are the arguments to
10917 the call, and TYPE is its return type.
4ee9c684 10918
c2f47e15 10919 Return NULL_TREE if no simplification was possible, otherwise return the
4ee9c684 10920 simplified form of the call as a tree.
10921
10922 The simplified form may be a constant or other expression which
10923 computes the same value, but in a more efficient manner (including
10924 calls to other builtin functions).
10925
10926 The call may contain arguments which need to be evaluated, but
10927 which are not useful to determine the result of the call. In
10928 this case we return a chain of COMPOUND_EXPRs. The LHS of each
10929 COMPOUND_EXPR will be an argument which must be evaluated.
10930 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
10931 COMPOUND_EXPR in the chain will contain the tree for the simplified
10932 form of the builtin function call. */
10933
10934static tree
389dd41b 10935fold_builtin_strchr (location_t loc, tree s1, tree s2, tree type)
4ee9c684 10936{
c2f47e15 10937 if (!validate_arg (s1, POINTER_TYPE)
10938 || !validate_arg (s2, INTEGER_TYPE))
10939 return NULL_TREE;
4ee9c684 10940 else
10941 {
4ee9c684 10942 const char *p1;
10943
10944 if (TREE_CODE (s2) != INTEGER_CST)
c2f47e15 10945 return NULL_TREE;
4ee9c684 10946
10947 p1 = c_getstr (s1);
10948 if (p1 != NULL)
10949 {
10950 char c;
10951 const char *r;
daa1d5f5 10952 tree tem;
4ee9c684 10953
10954 if (target_char_cast (s2, &c))
c2f47e15 10955 return NULL_TREE;
4ee9c684 10956
10957 r = strchr (p1, c);
10958
10959 if (r == NULL)
779b4c41 10960 return build_int_cst (TREE_TYPE (s1), 0);
4ee9c684 10961
10962 /* Return an offset into the constant string argument. */
2cc66f2a 10963 tem = fold_build_pointer_plus_hwi_loc (loc, s1, r - p1);
389dd41b 10964 return fold_convert_loc (loc, type, tem);
4ee9c684 10965 }
c2f47e15 10966 return NULL_TREE;
4ee9c684 10967 }
10968}
10969
c2f47e15 10970/* Simplify a call to the strrchr builtin. S1 and S2 are the arguments to
10971 the call, and TYPE is its return type.
4ee9c684 10972
c2f47e15 10973 Return NULL_TREE if no simplification was possible, otherwise return the
4ee9c684 10974 simplified form of the call as a tree.
10975
10976 The simplified form may be a constant or other expression which
10977 computes the same value, but in a more efficient manner (including
10978 calls to other builtin functions).
10979
10980 The call may contain arguments which need to be evaluated, but
10981 which are not useful to determine the result of the call. In
10982 this case we return a chain of COMPOUND_EXPRs. The LHS of each
10983 COMPOUND_EXPR will be an argument which must be evaluated.
10984 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
10985 COMPOUND_EXPR in the chain will contain the tree for the simplified
10986 form of the builtin function call. */
10987
10988static tree
389dd41b 10989fold_builtin_strrchr (location_t loc, tree s1, tree s2, tree type)
4ee9c684 10990{
c2f47e15 10991 if (!validate_arg (s1, POINTER_TYPE)
10992 || !validate_arg (s2, INTEGER_TYPE))
10993 return NULL_TREE;
4ee9c684 10994 else
10995 {
4ee9c684 10996 tree fn;
10997 const char *p1;
10998
10999 if (TREE_CODE (s2) != INTEGER_CST)
c2f47e15 11000 return NULL_TREE;
4ee9c684 11001
11002 p1 = c_getstr (s1);
11003 if (p1 != NULL)
11004 {
11005 char c;
11006 const char *r;
daa1d5f5 11007 tree tem;
4ee9c684 11008
11009 if (target_char_cast (s2, &c))
c2f47e15 11010 return NULL_TREE;
4ee9c684 11011
11012 r = strrchr (p1, c);
11013
11014 if (r == NULL)
779b4c41 11015 return build_int_cst (TREE_TYPE (s1), 0);
4ee9c684 11016
11017 /* Return an offset into the constant string argument. */
2cc66f2a 11018 tem = fold_build_pointer_plus_hwi_loc (loc, s1, r - p1);
389dd41b 11019 return fold_convert_loc (loc, type, tem);
4ee9c684 11020 }
11021
11022 if (! integer_zerop (s2))
c2f47e15 11023 return NULL_TREE;
4ee9c684 11024
b9a16870 11025 fn = builtin_decl_implicit (BUILT_IN_STRCHR);
4ee9c684 11026 if (!fn)
c2f47e15 11027 return NULL_TREE;
4ee9c684 11028
11029 /* Transform strrchr(s1, '\0') to strchr(s1, '\0'). */
389dd41b 11030 return build_call_expr_loc (loc, fn, 2, s1, s2);
4ee9c684 11031 }
11032}
11033
c2f47e15 11034/* Simplify a call to the strpbrk builtin. S1 and S2 are the arguments
11035 to the call, and TYPE is its return type.
4ee9c684 11036
c2f47e15 11037 Return NULL_TREE if no simplification was possible, otherwise return the
4ee9c684 11038 simplified form of the call as a tree.
11039
11040 The simplified form may be a constant or other expression which
11041 computes the same value, but in a more efficient manner (including
11042 calls to other builtin functions).
11043
11044 The call may contain arguments which need to be evaluated, but
11045 which are not useful to determine the result of the call. In
11046 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11047 COMPOUND_EXPR will be an argument which must be evaluated.
11048 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11049 COMPOUND_EXPR in the chain will contain the tree for the simplified
11050 form of the builtin function call. */
11051
11052static tree
389dd41b 11053fold_builtin_strpbrk (location_t loc, tree s1, tree s2, tree type)
4ee9c684 11054{
c2f47e15 11055 if (!validate_arg (s1, POINTER_TYPE)
11056 || !validate_arg (s2, POINTER_TYPE))
11057 return NULL_TREE;
4ee9c684 11058 else
11059 {
4ee9c684 11060 tree fn;
11061 const char *p1, *p2;
11062
11063 p2 = c_getstr (s2);
11064 if (p2 == NULL)
c2f47e15 11065 return NULL_TREE;
4ee9c684 11066
11067 p1 = c_getstr (s1);
11068 if (p1 != NULL)
11069 {
11070 const char *r = strpbrk (p1, p2);
daa1d5f5 11071 tree tem;
4ee9c684 11072
11073 if (r == NULL)
779b4c41 11074 return build_int_cst (TREE_TYPE (s1), 0);
4ee9c684 11075
11076 /* Return an offset into the constant string argument. */
2cc66f2a 11077 tem = fold_build_pointer_plus_hwi_loc (loc, s1, r - p1);
389dd41b 11078 return fold_convert_loc (loc, type, tem);
4ee9c684 11079 }
11080
11081 if (p2[0] == '\0')
05abc81b 11082 /* strpbrk(x, "") == NULL.
11083 Evaluate and ignore s1 in case it had side-effects. */
389dd41b 11084 return omit_one_operand_loc (loc, TREE_TYPE (s1), integer_zero_node, s1);
4ee9c684 11085
11086 if (p2[1] != '\0')
c2f47e15 11087 return NULL_TREE; /* Really call strpbrk. */
4ee9c684 11088
b9a16870 11089 fn = builtin_decl_implicit (BUILT_IN_STRCHR);
4ee9c684 11090 if (!fn)
c2f47e15 11091 return NULL_TREE;
4ee9c684 11092
11093 /* New argument list transforming strpbrk(s1, s2) to
11094 strchr(s1, s2[0]). */
7002a1c8 11095 return build_call_expr_loc (loc, fn, 2, s1,
11096 build_int_cst (integer_type_node, p2[0]));
4ee9c684 11097 }
11098}
11099
c2f47e15 11100/* Simplify a call to the strspn builtin. S1 and S2 are the arguments
11101 to the call.
4ee9c684 11102
c2f47e15 11103 Return NULL_TREE if no simplification was possible, otherwise return the
4ee9c684 11104 simplified form of the call as a tree.
11105
11106 The simplified form may be a constant or other expression which
11107 computes the same value, but in a more efficient manner (including
11108 calls to other builtin functions).
11109
11110 The call may contain arguments which need to be evaluated, but
11111 which are not useful to determine the result of the call. In
11112 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11113 COMPOUND_EXPR will be an argument which must be evaluated.
11114 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11115 COMPOUND_EXPR in the chain will contain the tree for the simplified
11116 form of the builtin function call. */
11117
11118static tree
389dd41b 11119fold_builtin_strspn (location_t loc, tree s1, tree s2)
4ee9c684 11120{
c2f47e15 11121 if (!validate_arg (s1, POINTER_TYPE)
11122 || !validate_arg (s2, POINTER_TYPE))
11123 return NULL_TREE;
4ee9c684 11124 else
11125 {
4ee9c684 11126 const char *p1 = c_getstr (s1), *p2 = c_getstr (s2);
11127
11128 /* If both arguments are constants, evaluate at compile-time. */
11129 if (p1 && p2)
11130 {
11131 const size_t r = strspn (p1, p2);
547b938d 11132 return build_int_cst (size_type_node, r);
4ee9c684 11133 }
11134
c2f47e15 11135 /* If either argument is "", return NULL_TREE. */
4ee9c684 11136 if ((p1 && *p1 == '\0') || (p2 && *p2 == '\0'))
9bc9f15f 11137 /* Evaluate and ignore both arguments in case either one has
11138 side-effects. */
389dd41b 11139 return omit_two_operands_loc (loc, size_type_node, size_zero_node,
9bc9f15f 11140 s1, s2);
c2f47e15 11141 return NULL_TREE;
4ee9c684 11142 }
11143}
11144
c2f47e15 11145/* Simplify a call to the strcspn builtin. S1 and S2 are the arguments
11146 to the call.
4ee9c684 11147
c2f47e15 11148 Return NULL_TREE if no simplification was possible, otherwise return the
4ee9c684 11149 simplified form of the call as a tree.
11150
11151 The simplified form may be a constant or other expression which
11152 computes the same value, but in a more efficient manner (including
11153 calls to other builtin functions).
11154
11155 The call may contain arguments which need to be evaluated, but
11156 which are not useful to determine the result of the call. In
11157 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11158 COMPOUND_EXPR will be an argument which must be evaluated.
11159 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11160 COMPOUND_EXPR in the chain will contain the tree for the simplified
11161 form of the builtin function call. */
11162
11163static tree
389dd41b 11164fold_builtin_strcspn (location_t loc, tree s1, tree s2)
4ee9c684 11165{
c2f47e15 11166 if (!validate_arg (s1, POINTER_TYPE)
11167 || !validate_arg (s2, POINTER_TYPE))
11168 return NULL_TREE;
4ee9c684 11169 else
11170 {
4ee9c684 11171 const char *p1 = c_getstr (s1), *p2 = c_getstr (s2);
11172
11173 /* If both arguments are constants, evaluate at compile-time. */
11174 if (p1 && p2)
11175 {
11176 const size_t r = strcspn (p1, p2);
547b938d 11177 return build_int_cst (size_type_node, r);
4ee9c684 11178 }
11179
c2f47e15 11180 /* If the first argument is "", return NULL_TREE. */
4ee9c684 11181 if (p1 && *p1 == '\0')
11182 {
11183 /* Evaluate and ignore argument s2 in case it has
11184 side-effects. */
389dd41b 11185 return omit_one_operand_loc (loc, size_type_node,
39761420 11186 size_zero_node, s2);
4ee9c684 11187 }
11188
11189 /* If the second argument is "", return __builtin_strlen(s1). */
11190 if (p2 && *p2 == '\0')
11191 {
b9a16870 11192 tree fn = builtin_decl_implicit (BUILT_IN_STRLEN);
4ee9c684 11193
11194 /* If the replacement _DECL isn't initialized, don't do the
11195 transformation. */
11196 if (!fn)
c2f47e15 11197 return NULL_TREE;
4ee9c684 11198
389dd41b 11199 return build_call_expr_loc (loc, fn, 1, s1);
4ee9c684 11200 }
c2f47e15 11201 return NULL_TREE;
4ee9c684 11202 }
11203}
11204
c2f47e15 11205/* Fold the next_arg or va_start call EXP. Returns true if there was an error
743b0c6a 11206 produced. False otherwise. This is done so that we don't output the error
11207 or warning twice or three times. */
75a70cf9 11208
743b0c6a 11209bool
c2f47e15 11210fold_builtin_next_arg (tree exp, bool va_start_p)
4ee9c684 11211{
11212 tree fntype = TREE_TYPE (current_function_decl);
c2f47e15 11213 int nargs = call_expr_nargs (exp);
11214 tree arg;
d98fd4a4 11215 /* There is good chance the current input_location points inside the
11216 definition of the va_start macro (perhaps on the token for
11217 builtin) in a system header, so warnings will not be emitted.
11218 Use the location in real source code. */
11219 source_location current_location =
11220 linemap_unwind_to_first_non_reserved_loc (line_table, input_location,
11221 NULL);
4ee9c684 11222
257d99c3 11223 if (!stdarg_p (fntype))
743b0c6a 11224 {
11225 error ("%<va_start%> used in function with fixed args");
11226 return true;
11227 }
c2f47e15 11228
11229 if (va_start_p)
79012a9d 11230 {
c2f47e15 11231 if (va_start_p && (nargs != 2))
11232 {
11233 error ("wrong number of arguments to function %<va_start%>");
11234 return true;
11235 }
11236 arg = CALL_EXPR_ARG (exp, 1);
79012a9d 11237 }
11238 /* We use __builtin_va_start (ap, 0, 0) or __builtin_next_arg (0, 0)
11239 when we checked the arguments and if needed issued a warning. */
c2f47e15 11240 else
4ee9c684 11241 {
c2f47e15 11242 if (nargs == 0)
11243 {
11244 /* Evidently an out of date version of <stdarg.h>; can't validate
11245 va_start's second argument, but can still work as intended. */
d98fd4a4 11246 warning_at (current_location,
7edb1062 11247 OPT_Wvarargs,
11248 "%<__builtin_next_arg%> called without an argument");
c2f47e15 11249 return true;
11250 }
11251 else if (nargs > 1)
a0c938f0 11252 {
c2f47e15 11253 error ("wrong number of arguments to function %<__builtin_next_arg%>");
a0c938f0 11254 return true;
11255 }
c2f47e15 11256 arg = CALL_EXPR_ARG (exp, 0);
11257 }
11258
a8dd994c 11259 if (TREE_CODE (arg) == SSA_NAME)
11260 arg = SSA_NAME_VAR (arg);
11261
c2f47e15 11262 /* We destructively modify the call to be __builtin_va_start (ap, 0)
48e1416a 11263 or __builtin_next_arg (0) the first time we see it, after checking
c2f47e15 11264 the arguments and if needed issuing a warning. */
11265 if (!integer_zerop (arg))
11266 {
11267 tree last_parm = tree_last (DECL_ARGUMENTS (current_function_decl));
79012a9d 11268
4ee9c684 11269 /* Strip off all nops for the sake of the comparison. This
11270 is not quite the same as STRIP_NOPS. It does more.
11271 We must also strip off INDIRECT_EXPR for C++ reference
11272 parameters. */
72dd6141 11273 while (CONVERT_EXPR_P (arg)
4ee9c684 11274 || TREE_CODE (arg) == INDIRECT_REF)
11275 arg = TREE_OPERAND (arg, 0);
11276 if (arg != last_parm)
a0c938f0 11277 {
b08cf617 11278 /* FIXME: Sometimes with the tree optimizers we can get the
11279 not the last argument even though the user used the last
11280 argument. We just warn and set the arg to be the last
11281 argument so that we will get wrong-code because of
11282 it. */
d98fd4a4 11283 warning_at (current_location,
7edb1062 11284 OPT_Wvarargs,
d98fd4a4 11285 "second parameter of %<va_start%> not last named argument");
743b0c6a 11286 }
24158ad7 11287
11288 /* Undefined by C99 7.15.1.4p4 (va_start):
11289 "If the parameter parmN is declared with the register storage
11290 class, with a function or array type, or with a type that is
11291 not compatible with the type that results after application of
11292 the default argument promotions, the behavior is undefined."
11293 */
11294 else if (DECL_REGISTER (arg))
d98fd4a4 11295 {
11296 warning_at (current_location,
7edb1062 11297 OPT_Wvarargs,
d98fd4a4 11298 "undefined behaviour when second parameter of "
11299 "%<va_start%> is declared with %<register%> storage");
11300 }
24158ad7 11301
79012a9d 11302 /* We want to verify the second parameter just once before the tree
a0c938f0 11303 optimizers are run and then avoid keeping it in the tree,
11304 as otherwise we could warn even for correct code like:
11305 void foo (int i, ...)
11306 { va_list ap; i++; va_start (ap, i); va_end (ap); } */
c2f47e15 11307 if (va_start_p)
11308 CALL_EXPR_ARG (exp, 1) = integer_zero_node;
11309 else
11310 CALL_EXPR_ARG (exp, 0) = integer_zero_node;
743b0c6a 11311 }
11312 return false;
4ee9c684 11313}
11314
11315
c2f47e15 11316/* Expand a call EXP to __builtin_object_size. */
0a39fd54 11317
f7715905 11318static rtx
0a39fd54 11319expand_builtin_object_size (tree exp)
11320{
11321 tree ost;
11322 int object_size_type;
11323 tree fndecl = get_callee_fndecl (exp);
0a39fd54 11324
c2f47e15 11325 if (!validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
0a39fd54 11326 {
b8c23db3 11327 error ("%Kfirst argument of %D must be a pointer, second integer constant",
11328 exp, fndecl);
0a39fd54 11329 expand_builtin_trap ();
11330 return const0_rtx;
11331 }
11332
c2f47e15 11333 ost = CALL_EXPR_ARG (exp, 1);
0a39fd54 11334 STRIP_NOPS (ost);
11335
11336 if (TREE_CODE (ost) != INTEGER_CST
11337 || tree_int_cst_sgn (ost) < 0
11338 || compare_tree_int (ost, 3) > 0)
11339 {
b8c23db3 11340 error ("%Klast argument of %D is not integer constant between 0 and 3",
11341 exp, fndecl);
0a39fd54 11342 expand_builtin_trap ();
11343 return const0_rtx;
11344 }
11345
e913b5cd 11346 object_size_type = tree_to_shwi (ost);
0a39fd54 11347
11348 return object_size_type < 2 ? constm1_rtx : const0_rtx;
11349}
11350
11351/* Expand EXP, a call to the __mem{cpy,pcpy,move,set}_chk builtin.
11352 FCODE is the BUILT_IN_* to use.
c2f47e15 11353 Return NULL_RTX if we failed; the caller should emit a normal call,
0a39fd54 11354 otherwise try to get the result in TARGET, if convenient (and in
11355 mode MODE if that's convenient). */
11356
11357static rtx
3754d046 11358expand_builtin_memory_chk (tree exp, rtx target, machine_mode mode,
0a39fd54 11359 enum built_in_function fcode)
11360{
0a39fd54 11361 tree dest, src, len, size;
11362
c2f47e15 11363 if (!validate_arglist (exp,
0a39fd54 11364 POINTER_TYPE,
11365 fcode == BUILT_IN_MEMSET_CHK
11366 ? INTEGER_TYPE : POINTER_TYPE,
11367 INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
c2f47e15 11368 return NULL_RTX;
0a39fd54 11369
c2f47e15 11370 dest = CALL_EXPR_ARG (exp, 0);
11371 src = CALL_EXPR_ARG (exp, 1);
11372 len = CALL_EXPR_ARG (exp, 2);
11373 size = CALL_EXPR_ARG (exp, 3);
0a39fd54 11374
e913b5cd 11375 if (! tree_fits_uhwi_p (size))
c2f47e15 11376 return NULL_RTX;
0a39fd54 11377
e913b5cd 11378 if (tree_fits_uhwi_p (len) || integer_all_onesp (size))
0a39fd54 11379 {
11380 tree fn;
11381
11382 if (! integer_all_onesp (size) && tree_int_cst_lt (size, len))
11383 {
b430e8d9 11384 warning_at (tree_nonartificial_location (exp),
11385 0, "%Kcall to %D will always overflow destination buffer",
11386 exp, get_callee_fndecl (exp));
c2f47e15 11387 return NULL_RTX;
0a39fd54 11388 }
11389
0a39fd54 11390 fn = NULL_TREE;
11391 /* If __builtin_mem{cpy,pcpy,move,set}_chk is used, assume
11392 mem{cpy,pcpy,move,set} is available. */
11393 switch (fcode)
11394 {
11395 case BUILT_IN_MEMCPY_CHK:
b9a16870 11396 fn = builtin_decl_explicit (BUILT_IN_MEMCPY);
0a39fd54 11397 break;
11398 case BUILT_IN_MEMPCPY_CHK:
b9a16870 11399 fn = builtin_decl_explicit (BUILT_IN_MEMPCPY);
0a39fd54 11400 break;
11401 case BUILT_IN_MEMMOVE_CHK:
b9a16870 11402 fn = builtin_decl_explicit (BUILT_IN_MEMMOVE);
0a39fd54 11403 break;
11404 case BUILT_IN_MEMSET_CHK:
b9a16870 11405 fn = builtin_decl_explicit (BUILT_IN_MEMSET);
0a39fd54 11406 break;
11407 default:
11408 break;
11409 }
11410
11411 if (! fn)
c2f47e15 11412 return NULL_RTX;
0a39fd54 11413
0568e9c1 11414 fn = build_call_nofold_loc (EXPR_LOCATION (exp), fn, 3, dest, src, len);
a65c4d64 11415 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
11416 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
0a39fd54 11417 return expand_expr (fn, target, mode, EXPAND_NORMAL);
11418 }
11419 else if (fcode == BUILT_IN_MEMSET_CHK)
c2f47e15 11420 return NULL_RTX;
0a39fd54 11421 else
11422 {
957d0361 11423 unsigned int dest_align = get_pointer_alignment (dest);
0a39fd54 11424
11425 /* If DEST is not a pointer type, call the normal function. */
11426 if (dest_align == 0)
c2f47e15 11427 return NULL_RTX;
0a39fd54 11428
11429 /* If SRC and DEST are the same (and not volatile), do nothing. */
11430 if (operand_equal_p (src, dest, 0))
11431 {
11432 tree expr;
11433
11434 if (fcode != BUILT_IN_MEMPCPY_CHK)
11435 {
11436 /* Evaluate and ignore LEN in case it has side-effects. */
11437 expand_expr (len, const0_rtx, VOIDmode, EXPAND_NORMAL);
11438 return expand_expr (dest, target, mode, EXPAND_NORMAL);
11439 }
11440
2cc66f2a 11441 expr = fold_build_pointer_plus (dest, len);
0a39fd54 11442 return expand_expr (expr, target, mode, EXPAND_NORMAL);
11443 }
11444
11445 /* __memmove_chk special case. */
11446 if (fcode == BUILT_IN_MEMMOVE_CHK)
11447 {
957d0361 11448 unsigned int src_align = get_pointer_alignment (src);
0a39fd54 11449
11450 if (src_align == 0)
c2f47e15 11451 return NULL_RTX;
0a39fd54 11452
11453 /* If src is categorized for a readonly section we can use
11454 normal __memcpy_chk. */
11455 if (readonly_data_expr (src))
11456 {
b9a16870 11457 tree fn = builtin_decl_explicit (BUILT_IN_MEMCPY_CHK);
0a39fd54 11458 if (!fn)
c2f47e15 11459 return NULL_RTX;
0568e9c1 11460 fn = build_call_nofold_loc (EXPR_LOCATION (exp), fn, 4,
11461 dest, src, len, size);
a65c4d64 11462 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
11463 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
0a39fd54 11464 return expand_expr (fn, target, mode, EXPAND_NORMAL);
11465 }
11466 }
c2f47e15 11467 return NULL_RTX;
0a39fd54 11468 }
11469}
11470
11471/* Emit warning if a buffer overflow is detected at compile time. */
11472
11473static void
11474maybe_emit_chk_warning (tree exp, enum built_in_function fcode)
11475{
c2f47e15 11476 int is_strlen = 0;
0a39fd54 11477 tree len, size;
b430e8d9 11478 location_t loc = tree_nonartificial_location (exp);
0a39fd54 11479
11480 switch (fcode)
11481 {
11482 case BUILT_IN_STRCPY_CHK:
11483 case BUILT_IN_STPCPY_CHK:
11484 /* For __strcat_chk the warning will be emitted only if overflowing
11485 by at least strlen (dest) + 1 bytes. */
11486 case BUILT_IN_STRCAT_CHK:
c2f47e15 11487 len = CALL_EXPR_ARG (exp, 1);
11488 size = CALL_EXPR_ARG (exp, 2);
0a39fd54 11489 is_strlen = 1;
11490 break;
b356dfef 11491 case BUILT_IN_STRNCAT_CHK:
0a39fd54 11492 case BUILT_IN_STRNCPY_CHK:
1063acde 11493 case BUILT_IN_STPNCPY_CHK:
c2f47e15 11494 len = CALL_EXPR_ARG (exp, 2);
11495 size = CALL_EXPR_ARG (exp, 3);
0a39fd54 11496 break;
11497 case BUILT_IN_SNPRINTF_CHK:
11498 case BUILT_IN_VSNPRINTF_CHK:
c2f47e15 11499 len = CALL_EXPR_ARG (exp, 1);
11500 size = CALL_EXPR_ARG (exp, 3);
0a39fd54 11501 break;
11502 default:
11503 gcc_unreachable ();
11504 }
11505
0a39fd54 11506 if (!len || !size)
11507 return;
11508
e913b5cd 11509 if (! tree_fits_uhwi_p (size) || integer_all_onesp (size))
0a39fd54 11510 return;
11511
11512 if (is_strlen)
11513 {
11514 len = c_strlen (len, 1);
e913b5cd 11515 if (! len || ! tree_fits_uhwi_p (len) || tree_int_cst_lt (len, size))
0a39fd54 11516 return;
11517 }
b356dfef 11518 else if (fcode == BUILT_IN_STRNCAT_CHK)
11519 {
c2f47e15 11520 tree src = CALL_EXPR_ARG (exp, 1);
e913b5cd 11521 if (! src || ! tree_fits_uhwi_p (len) || tree_int_cst_lt (len, size))
b356dfef 11522 return;
11523 src = c_strlen (src, 1);
e913b5cd 11524 if (! src || ! tree_fits_uhwi_p (src))
b356dfef 11525 {
b430e8d9 11526 warning_at (loc, 0, "%Kcall to %D might overflow destination buffer",
11527 exp, get_callee_fndecl (exp));
b356dfef 11528 return;
11529 }
11530 else if (tree_int_cst_lt (src, size))
11531 return;
11532 }
e913b5cd 11533 else if (! tree_fits_uhwi_p (len) || ! tree_int_cst_lt (size, len))
0a39fd54 11534 return;
11535
b430e8d9 11536 warning_at (loc, 0, "%Kcall to %D will always overflow destination buffer",
11537 exp, get_callee_fndecl (exp));
0a39fd54 11538}
11539
11540/* Emit warning if a buffer overflow is detected at compile time
11541 in __sprintf_chk/__vsprintf_chk calls. */
11542
11543static void
11544maybe_emit_sprintf_chk_warning (tree exp, enum built_in_function fcode)
11545{
1e4adcfc 11546 tree size, len, fmt;
0a39fd54 11547 const char *fmt_str;
c2f47e15 11548 int nargs = call_expr_nargs (exp);
0a39fd54 11549
11550 /* Verify the required arguments in the original call. */
48e1416a 11551
c2f47e15 11552 if (nargs < 4)
0a39fd54 11553 return;
c2f47e15 11554 size = CALL_EXPR_ARG (exp, 2);
11555 fmt = CALL_EXPR_ARG (exp, 3);
0a39fd54 11556
e913b5cd 11557 if (! tree_fits_uhwi_p (size) || integer_all_onesp (size))
0a39fd54 11558 return;
11559
11560 /* Check whether the format is a literal string constant. */
11561 fmt_str = c_getstr (fmt);
11562 if (fmt_str == NULL)
11563 return;
11564
d4473c84 11565 if (!init_target_chars ())
99eabcc1 11566 return;
11567
0a39fd54 11568 /* If the format doesn't contain % args or %%, we know its size. */
99eabcc1 11569 if (strchr (fmt_str, target_percent) == 0)
0a39fd54 11570 len = build_int_cstu (size_type_node, strlen (fmt_str));
11571 /* If the format is "%s" and first ... argument is a string literal,
11572 we know it too. */
c2f47e15 11573 else if (fcode == BUILT_IN_SPRINTF_CHK
11574 && strcmp (fmt_str, target_percent_s) == 0)
0a39fd54 11575 {
11576 tree arg;
11577
c2f47e15 11578 if (nargs < 5)
0a39fd54 11579 return;
c2f47e15 11580 arg = CALL_EXPR_ARG (exp, 4);
0a39fd54 11581 if (! POINTER_TYPE_P (TREE_TYPE (arg)))
11582 return;
11583
11584 len = c_strlen (arg, 1);
e913b5cd 11585 if (!len || ! tree_fits_uhwi_p (len))
0a39fd54 11586 return;
11587 }
11588 else
11589 return;
11590
11591 if (! tree_int_cst_lt (len, size))
b430e8d9 11592 warning_at (tree_nonartificial_location (exp),
11593 0, "%Kcall to %D will always overflow destination buffer",
11594 exp, get_callee_fndecl (exp));
0a39fd54 11595}
11596
2c281b15 11597/* Emit warning if a free is called with address of a variable. */
11598
11599static void
11600maybe_emit_free_warning (tree exp)
11601{
11602 tree arg = CALL_EXPR_ARG (exp, 0);
11603
11604 STRIP_NOPS (arg);
11605 if (TREE_CODE (arg) != ADDR_EXPR)
11606 return;
11607
11608 arg = get_base_address (TREE_OPERAND (arg, 0));
182cf5a9 11609 if (arg == NULL || INDIRECT_REF_P (arg) || TREE_CODE (arg) == MEM_REF)
2c281b15 11610 return;
11611
11612 if (SSA_VAR_P (arg))
f74ea1c2 11613 warning_at (tree_nonartificial_location (exp), OPT_Wfree_nonheap_object,
11614 "%Kattempt to free a non-heap object %qD", exp, arg);
2c281b15 11615 else
f74ea1c2 11616 warning_at (tree_nonartificial_location (exp), OPT_Wfree_nonheap_object,
11617 "%Kattempt to free a non-heap object", exp);
2c281b15 11618}
11619
c2f47e15 11620/* Fold a call to __builtin_object_size with arguments PTR and OST,
11621 if possible. */
0a39fd54 11622
f7715905 11623static tree
c2f47e15 11624fold_builtin_object_size (tree ptr, tree ost)
0a39fd54 11625{
a6caa15f 11626 unsigned HOST_WIDE_INT bytes;
0a39fd54 11627 int object_size_type;
11628
c2f47e15 11629 if (!validate_arg (ptr, POINTER_TYPE)
11630 || !validate_arg (ost, INTEGER_TYPE))
11631 return NULL_TREE;
0a39fd54 11632
0a39fd54 11633 STRIP_NOPS (ost);
11634
11635 if (TREE_CODE (ost) != INTEGER_CST
11636 || tree_int_cst_sgn (ost) < 0
11637 || compare_tree_int (ost, 3) > 0)
c2f47e15 11638 return NULL_TREE;
0a39fd54 11639
e913b5cd 11640 object_size_type = tree_to_shwi (ost);
0a39fd54 11641
11642 /* __builtin_object_size doesn't evaluate side-effects in its arguments;
11643 if there are any side-effects, it returns (size_t) -1 for types 0 and 1
11644 and (size_t) 0 for types 2 and 3. */
11645 if (TREE_SIDE_EFFECTS (ptr))
697bbc3f 11646 return build_int_cst_type (size_type_node, object_size_type < 2 ? -1 : 0);
0a39fd54 11647
11648 if (TREE_CODE (ptr) == ADDR_EXPR)
a6caa15f 11649 {
6da74b21 11650 bytes = compute_builtin_object_size (ptr, object_size_type);
11651 if (wi::fits_to_tree_p (bytes, size_type_node))
11652 return build_int_cstu (size_type_node, bytes);
a6caa15f 11653 }
0a39fd54 11654 else if (TREE_CODE (ptr) == SSA_NAME)
11655 {
0a39fd54 11656 /* If object size is not known yet, delay folding until
11657 later. Maybe subsequent passes will help determining
11658 it. */
11659 bytes = compute_builtin_object_size (ptr, object_size_type);
a6caa15f 11660 if (bytes != (unsigned HOST_WIDE_INT) (object_size_type < 2 ? -1 : 0)
6da74b21 11661 && wi::fits_to_tree_p (bytes, size_type_node))
11662 return build_int_cstu (size_type_node, bytes);
0a39fd54 11663 }
11664
a6caa15f 11665 return NULL_TREE;
0a39fd54 11666}
11667
0e80b01d 11668/* Builtins with folding operations that operate on "..." arguments
11669 need special handling; we need to store the arguments in a convenient
11670 data structure before attempting any folding. Fortunately there are
11671 only a few builtins that fall into this category. FNDECL is the
e80cc485 11672 function, EXP is the CALL_EXPR for the call. */
0e80b01d 11673
11674static tree
e80cc485 11675fold_builtin_varargs (location_t loc, tree fndecl, tree *args, int nargs)
0e80b01d 11676{
11677 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
11678 tree ret = NULL_TREE;
11679
11680 switch (fcode)
11681 {
0e80b01d 11682 case BUILT_IN_FPCLASSIFY:
9d884767 11683 ret = fold_builtin_fpclassify (loc, args, nargs);
0e80b01d 11684 break;
11685
11686 default:
11687 break;
11688 }
11689 if (ret)
11690 {
11691 ret = build1 (NOP_EXPR, TREE_TYPE (ret), ret);
11692 SET_EXPR_LOCATION (ret, loc);
11693 TREE_NO_WARNING (ret) = 1;
11694 return ret;
11695 }
11696 return NULL_TREE;
11697}
11698
99eabcc1 11699/* Initialize format string characters in the target charset. */
11700
b9ea678c 11701bool
99eabcc1 11702init_target_chars (void)
11703{
11704 static bool init;
11705 if (!init)
11706 {
11707 target_newline = lang_hooks.to_target_charset ('\n');
11708 target_percent = lang_hooks.to_target_charset ('%');
11709 target_c = lang_hooks.to_target_charset ('c');
11710 target_s = lang_hooks.to_target_charset ('s');
11711 if (target_newline == 0 || target_percent == 0 || target_c == 0
11712 || target_s == 0)
11713 return false;
11714
11715 target_percent_c[0] = target_percent;
11716 target_percent_c[1] = target_c;
11717 target_percent_c[2] = '\0';
11718
11719 target_percent_s[0] = target_percent;
11720 target_percent_s[1] = target_s;
11721 target_percent_s[2] = '\0';
11722
11723 target_percent_s_newline[0] = target_percent;
11724 target_percent_s_newline[1] = target_s;
11725 target_percent_s_newline[2] = target_newline;
11726 target_percent_s_newline[3] = '\0';
a0c938f0 11727
99eabcc1 11728 init = true;
11729 }
11730 return true;
11731}
bffb7645 11732
f0c477f2 11733/* Helper function for do_mpfr_arg*(). Ensure M is a normal number
11734 and no overflow/underflow occurred. INEXACT is true if M was not
fa7637bd 11735 exactly calculated. TYPE is the tree type for the result. This
f0c477f2 11736 function assumes that you cleared the MPFR flags and then
11737 calculated M to see if anything subsequently set a flag prior to
11738 entering this function. Return NULL_TREE if any checks fail. */
11739
11740static tree
d4473c84 11741do_mpfr_ckconv (mpfr_srcptr m, tree type, int inexact)
f0c477f2 11742{
11743 /* Proceed iff we get a normal number, i.e. not NaN or Inf and no
11744 overflow/underflow occurred. If -frounding-math, proceed iff the
11745 result of calling FUNC was exact. */
d4473c84 11746 if (mpfr_number_p (m) && !mpfr_overflow_p () && !mpfr_underflow_p ()
f0c477f2 11747 && (!flag_rounding_math || !inexact))
11748 {
11749 REAL_VALUE_TYPE rr;
11750
66fa16e6 11751 real_from_mpfr (&rr, m, type, GMP_RNDN);
f0c477f2 11752 /* Proceed iff GCC's REAL_VALUE_TYPE can hold the MPFR value,
11753 check for overflow/underflow. If the REAL_VALUE_TYPE is zero
11754 but the mpft_t is not, then we underflowed in the
11755 conversion. */
776a7bab 11756 if (real_isfinite (&rr)
f0c477f2 11757 && (rr.cl == rvc_zero) == (mpfr_zero_p (m) != 0))
11758 {
11759 REAL_VALUE_TYPE rmode;
11760
11761 real_convert (&rmode, TYPE_MODE (type), &rr);
11762 /* Proceed iff the specified mode can hold the value. */
11763 if (real_identical (&rmode, &rr))
11764 return build_real (type, rmode);
11765 }
11766 }
11767 return NULL_TREE;
11768}
11769
239d491a 11770/* Helper function for do_mpc_arg*(). Ensure M is a normal complex
11771 number and no overflow/underflow occurred. INEXACT is true if M
11772 was not exactly calculated. TYPE is the tree type for the result.
11773 This function assumes that you cleared the MPFR flags and then
11774 calculated M to see if anything subsequently set a flag prior to
652d9409 11775 entering this function. Return NULL_TREE if any checks fail, if
11776 FORCE_CONVERT is true, then bypass the checks. */
239d491a 11777
11778static tree
652d9409 11779do_mpc_ckconv (mpc_srcptr m, tree type, int inexact, int force_convert)
239d491a 11780{
11781 /* Proceed iff we get a normal number, i.e. not NaN or Inf and no
11782 overflow/underflow occurred. If -frounding-math, proceed iff the
11783 result of calling FUNC was exact. */
652d9409 11784 if (force_convert
11785 || (mpfr_number_p (mpc_realref (m)) && mpfr_number_p (mpc_imagref (m))
11786 && !mpfr_overflow_p () && !mpfr_underflow_p ()
11787 && (!flag_rounding_math || !inexact)))
239d491a 11788 {
11789 REAL_VALUE_TYPE re, im;
11790
b0e7c4d4 11791 real_from_mpfr (&re, mpc_realref (m), TREE_TYPE (type), GMP_RNDN);
11792 real_from_mpfr (&im, mpc_imagref (m), TREE_TYPE (type), GMP_RNDN);
239d491a 11793 /* Proceed iff GCC's REAL_VALUE_TYPE can hold the MPFR values,
11794 check for overflow/underflow. If the REAL_VALUE_TYPE is zero
11795 but the mpft_t is not, then we underflowed in the
11796 conversion. */
652d9409 11797 if (force_convert
11798 || (real_isfinite (&re) && real_isfinite (&im)
11799 && (re.cl == rvc_zero) == (mpfr_zero_p (mpc_realref (m)) != 0)
11800 && (im.cl == rvc_zero) == (mpfr_zero_p (mpc_imagref (m)) != 0)))
239d491a 11801 {
11802 REAL_VALUE_TYPE re_mode, im_mode;
11803
11804 real_convert (&re_mode, TYPE_MODE (TREE_TYPE (type)), &re);
11805 real_convert (&im_mode, TYPE_MODE (TREE_TYPE (type)), &im);
11806 /* Proceed iff the specified mode can hold the value. */
652d9409 11807 if (force_convert
11808 || (real_identical (&re_mode, &re)
11809 && real_identical (&im_mode, &im)))
239d491a 11810 return build_complex (type, build_real (TREE_TYPE (type), re_mode),
11811 build_real (TREE_TYPE (type), im_mode));
11812 }
11813 }
11814 return NULL_TREE;
11815}
239d491a 11816
bffb7645 11817/* If argument ARG is a REAL_CST, call the one-argument mpfr function
11818 FUNC on it and return the resulting value as a tree with type TYPE.
728bac60 11819 If MIN and/or MAX are not NULL, then the supplied ARG must be
11820 within those bounds. If INCLUSIVE is true, then MIN/MAX are
11821 acceptable values, otherwise they are not. The mpfr precision is
11822 set to the precision of TYPE. We assume that function FUNC returns
11823 zero if the result could be calculated exactly within the requested
11824 precision. */
bffb7645 11825
11826static tree
728bac60 11827do_mpfr_arg1 (tree arg, tree type, int (*func)(mpfr_ptr, mpfr_srcptr, mp_rnd_t),
11828 const REAL_VALUE_TYPE *min, const REAL_VALUE_TYPE *max,
11829 bool inclusive)
bffb7645 11830{
11831 tree result = NULL_TREE;
48e1416a 11832
bffb7645 11833 STRIP_NOPS (arg);
11834
bd7d6fa4 11835 /* To proceed, MPFR must exactly represent the target floating point
11836 format, which only happens when the target base equals two. */
11837 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
f96bd2bf 11838 && TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
bffb7645 11839 {
f0c477f2 11840 const REAL_VALUE_TYPE *const ra = &TREE_REAL_CST (arg);
bffb7645 11841
776a7bab 11842 if (real_isfinite (ra)
f0c477f2 11843 && (!min || real_compare (inclusive ? GE_EXPR: GT_EXPR , ra, min))
11844 && (!max || real_compare (inclusive ? LE_EXPR: LT_EXPR , ra, max)))
bffb7645 11845 {
e2eb2b7f 11846 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
11847 const int prec = fmt->p;
11848 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
debf9994 11849 int inexact;
bffb7645 11850 mpfr_t m;
11851
11852 mpfr_init2 (m, prec);
66fa16e6 11853 mpfr_from_real (m, ra, GMP_RNDN);
d4473c84 11854 mpfr_clear_flags ();
e2eb2b7f 11855 inexact = func (m, m, rnd);
f0c477f2 11856 result = do_mpfr_ckconv (m, type, inexact);
bffb7645 11857 mpfr_clear (m);
11858 }
11859 }
48e1416a 11860
bffb7645 11861 return result;
11862}
f0c477f2 11863
11864/* If argument ARG is a REAL_CST, call the two-argument mpfr function
11865 FUNC on it and return the resulting value as a tree with type TYPE.
11866 The mpfr precision is set to the precision of TYPE. We assume that
11867 function FUNC returns zero if the result could be calculated
11868 exactly within the requested precision. */
11869
11870static tree
11871do_mpfr_arg2 (tree arg1, tree arg2, tree type,
11872 int (*func)(mpfr_ptr, mpfr_srcptr, mpfr_srcptr, mp_rnd_t))
11873{
11874 tree result = NULL_TREE;
48e1416a 11875
f0c477f2 11876 STRIP_NOPS (arg1);
11877 STRIP_NOPS (arg2);
11878
bd7d6fa4 11879 /* To proceed, MPFR must exactly represent the target floating point
11880 format, which only happens when the target base equals two. */
11881 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
f96bd2bf 11882 && TREE_CODE (arg1) == REAL_CST && !TREE_OVERFLOW (arg1)
11883 && TREE_CODE (arg2) == REAL_CST && !TREE_OVERFLOW (arg2))
f0c477f2 11884 {
11885 const REAL_VALUE_TYPE *const ra1 = &TREE_REAL_CST (arg1);
11886 const REAL_VALUE_TYPE *const ra2 = &TREE_REAL_CST (arg2);
11887
776a7bab 11888 if (real_isfinite (ra1) && real_isfinite (ra2))
f0c477f2 11889 {
e2eb2b7f 11890 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
11891 const int prec = fmt->p;
11892 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
f0c477f2 11893 int inexact;
11894 mpfr_t m1, m2;
11895
11896 mpfr_inits2 (prec, m1, m2, NULL);
66fa16e6 11897 mpfr_from_real (m1, ra1, GMP_RNDN);
11898 mpfr_from_real (m2, ra2, GMP_RNDN);
d4473c84 11899 mpfr_clear_flags ();
e2eb2b7f 11900 inexact = func (m1, m1, m2, rnd);
f0c477f2 11901 result = do_mpfr_ckconv (m1, type, inexact);
11902 mpfr_clears (m1, m2, NULL);
11903 }
11904 }
48e1416a 11905
f0c477f2 11906 return result;
11907}
d92f994c 11908
9917422b 11909/* If argument ARG is a REAL_CST, call the three-argument mpfr function
11910 FUNC on it and return the resulting value as a tree with type TYPE.
11911 The mpfr precision is set to the precision of TYPE. We assume that
11912 function FUNC returns zero if the result could be calculated
11913 exactly within the requested precision. */
11914
11915static tree
11916do_mpfr_arg3 (tree arg1, tree arg2, tree arg3, tree type,
11917 int (*func)(mpfr_ptr, mpfr_srcptr, mpfr_srcptr, mpfr_srcptr, mp_rnd_t))
11918{
11919 tree result = NULL_TREE;
48e1416a 11920
9917422b 11921 STRIP_NOPS (arg1);
11922 STRIP_NOPS (arg2);
11923 STRIP_NOPS (arg3);
11924
bd7d6fa4 11925 /* To proceed, MPFR must exactly represent the target floating point
11926 format, which only happens when the target base equals two. */
11927 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
f96bd2bf 11928 && TREE_CODE (arg1) == REAL_CST && !TREE_OVERFLOW (arg1)
11929 && TREE_CODE (arg2) == REAL_CST && !TREE_OVERFLOW (arg2)
11930 && TREE_CODE (arg3) == REAL_CST && !TREE_OVERFLOW (arg3))
9917422b 11931 {
11932 const REAL_VALUE_TYPE *const ra1 = &TREE_REAL_CST (arg1);
11933 const REAL_VALUE_TYPE *const ra2 = &TREE_REAL_CST (arg2);
11934 const REAL_VALUE_TYPE *const ra3 = &TREE_REAL_CST (arg3);
11935
776a7bab 11936 if (real_isfinite (ra1) && real_isfinite (ra2) && real_isfinite (ra3))
9917422b 11937 {
e2eb2b7f 11938 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
11939 const int prec = fmt->p;
11940 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
9917422b 11941 int inexact;
11942 mpfr_t m1, m2, m3;
11943
11944 mpfr_inits2 (prec, m1, m2, m3, NULL);
66fa16e6 11945 mpfr_from_real (m1, ra1, GMP_RNDN);
11946 mpfr_from_real (m2, ra2, GMP_RNDN);
11947 mpfr_from_real (m3, ra3, GMP_RNDN);
d4473c84 11948 mpfr_clear_flags ();
e2eb2b7f 11949 inexact = func (m1, m1, m2, m3, rnd);
9917422b 11950 result = do_mpfr_ckconv (m1, type, inexact);
11951 mpfr_clears (m1, m2, m3, NULL);
11952 }
11953 }
48e1416a 11954
9917422b 11955 return result;
11956}
11957
d92f994c 11958/* If argument ARG is a REAL_CST, call mpfr_sin_cos() on it and set
11959 the pointers *(ARG_SINP) and *(ARG_COSP) to the resulting values.
d735c391 11960 If ARG_SINP and ARG_COSP are NULL then the result is returned
11961 as a complex value.
d92f994c 11962 The type is taken from the type of ARG and is used for setting the
11963 precision of the calculation and results. */
11964
11965static tree
11966do_mpfr_sincos (tree arg, tree arg_sinp, tree arg_cosp)
11967{
bd7d6fa4 11968 tree const type = TREE_TYPE (arg);
d92f994c 11969 tree result = NULL_TREE;
48e1416a 11970
d92f994c 11971 STRIP_NOPS (arg);
48e1416a 11972
bd7d6fa4 11973 /* To proceed, MPFR must exactly represent the target floating point
11974 format, which only happens when the target base equals two. */
11975 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
f96bd2bf 11976 && TREE_CODE (arg) == REAL_CST
11977 && !TREE_OVERFLOW (arg))
d92f994c 11978 {
11979 const REAL_VALUE_TYPE *const ra = &TREE_REAL_CST (arg);
11980
776a7bab 11981 if (real_isfinite (ra))
d92f994c 11982 {
e2eb2b7f 11983 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
11984 const int prec = fmt->p;
11985 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
d92f994c 11986 tree result_s, result_c;
11987 int inexact;
11988 mpfr_t m, ms, mc;
11989
11990 mpfr_inits2 (prec, m, ms, mc, NULL);
66fa16e6 11991 mpfr_from_real (m, ra, GMP_RNDN);
d4473c84 11992 mpfr_clear_flags ();
e2eb2b7f 11993 inexact = mpfr_sin_cos (ms, mc, m, rnd);
d92f994c 11994 result_s = do_mpfr_ckconv (ms, type, inexact);
11995 result_c = do_mpfr_ckconv (mc, type, inexact);
11996 mpfr_clears (m, ms, mc, NULL);
11997 if (result_s && result_c)
11998 {
d735c391 11999 /* If we are to return in a complex value do so. */
12000 if (!arg_sinp && !arg_cosp)
12001 return build_complex (build_complex_type (type),
12002 result_c, result_s);
12003
d92f994c 12004 /* Dereference the sin/cos pointer arguments. */
12005 arg_sinp = build_fold_indirect_ref (arg_sinp);
12006 arg_cosp = build_fold_indirect_ref (arg_cosp);
12007 /* Proceed if valid pointer type were passed in. */
12008 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg_sinp)) == TYPE_MAIN_VARIANT (type)
12009 && TYPE_MAIN_VARIANT (TREE_TYPE (arg_cosp)) == TYPE_MAIN_VARIANT (type))
12010 {
12011 /* Set the values. */
41076ef6 12012 result_s = fold_build2 (MODIFY_EXPR, type, arg_sinp,
35cc02b5 12013 result_s);
d92f994c 12014 TREE_SIDE_EFFECTS (result_s) = 1;
41076ef6 12015 result_c = fold_build2 (MODIFY_EXPR, type, arg_cosp,
35cc02b5 12016 result_c);
d92f994c 12017 TREE_SIDE_EFFECTS (result_c) = 1;
12018 /* Combine the assignments into a compound expr. */
12019 result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
12020 result_s, result_c));
12021 }
12022 }
12023 }
12024 }
12025 return result;
12026}
65dd1378 12027
65dd1378 12028/* If argument ARG1 is an INTEGER_CST and ARG2 is a REAL_CST, call the
12029 two-argument mpfr order N Bessel function FUNC on them and return
12030 the resulting value as a tree with type TYPE. The mpfr precision
12031 is set to the precision of TYPE. We assume that function FUNC
12032 returns zero if the result could be calculated exactly within the
12033 requested precision. */
12034static tree
12035do_mpfr_bessel_n (tree arg1, tree arg2, tree type,
12036 int (*func)(mpfr_ptr, long, mpfr_srcptr, mp_rnd_t),
12037 const REAL_VALUE_TYPE *min, bool inclusive)
12038{
12039 tree result = NULL_TREE;
12040
12041 STRIP_NOPS (arg1);
12042 STRIP_NOPS (arg2);
12043
12044 /* To proceed, MPFR must exactly represent the target floating point
12045 format, which only happens when the target base equals two. */
12046 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
e913b5cd 12047 && tree_fits_shwi_p (arg1)
65dd1378 12048 && TREE_CODE (arg2) == REAL_CST && !TREE_OVERFLOW (arg2))
12049 {
e913b5cd 12050 const HOST_WIDE_INT n = tree_to_shwi (arg1);
65dd1378 12051 const REAL_VALUE_TYPE *const ra = &TREE_REAL_CST (arg2);
12052
12053 if (n == (long)n
776a7bab 12054 && real_isfinite (ra)
65dd1378 12055 && (!min || real_compare (inclusive ? GE_EXPR: GT_EXPR , ra, min)))
12056 {
e2eb2b7f 12057 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
12058 const int prec = fmt->p;
12059 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
65dd1378 12060 int inexact;
12061 mpfr_t m;
12062
12063 mpfr_init2 (m, prec);
12064 mpfr_from_real (m, ra, GMP_RNDN);
12065 mpfr_clear_flags ();
e2eb2b7f 12066 inexact = func (m, n, m, rnd);
65dd1378 12067 result = do_mpfr_ckconv (m, type, inexact);
12068 mpfr_clear (m);
12069 }
12070 }
48e1416a 12071
65dd1378 12072 return result;
12073}
e5407ca6 12074
12075/* If arguments ARG0 and ARG1 are REAL_CSTs, call mpfr_remquo() to set
12076 the pointer *(ARG_QUO) and return the result. The type is taken
12077 from the type of ARG0 and is used for setting the precision of the
12078 calculation and results. */
12079
12080static tree
12081do_mpfr_remquo (tree arg0, tree arg1, tree arg_quo)
12082{
12083 tree const type = TREE_TYPE (arg0);
12084 tree result = NULL_TREE;
48e1416a 12085
e5407ca6 12086 STRIP_NOPS (arg0);
12087 STRIP_NOPS (arg1);
48e1416a 12088
e5407ca6 12089 /* To proceed, MPFR must exactly represent the target floating point
12090 format, which only happens when the target base equals two. */
12091 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
12092 && TREE_CODE (arg0) == REAL_CST && !TREE_OVERFLOW (arg0)
12093 && TREE_CODE (arg1) == REAL_CST && !TREE_OVERFLOW (arg1))
12094 {
12095 const REAL_VALUE_TYPE *const ra0 = TREE_REAL_CST_PTR (arg0);
12096 const REAL_VALUE_TYPE *const ra1 = TREE_REAL_CST_PTR (arg1);
12097
776a7bab 12098 if (real_isfinite (ra0) && real_isfinite (ra1))
e5407ca6 12099 {
e2eb2b7f 12100 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
12101 const int prec = fmt->p;
12102 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
e5407ca6 12103 tree result_rem;
12104 long integer_quo;
12105 mpfr_t m0, m1;
12106
12107 mpfr_inits2 (prec, m0, m1, NULL);
12108 mpfr_from_real (m0, ra0, GMP_RNDN);
12109 mpfr_from_real (m1, ra1, GMP_RNDN);
12110 mpfr_clear_flags ();
e2eb2b7f 12111 mpfr_remquo (m0, &integer_quo, m0, m1, rnd);
e5407ca6 12112 /* Remquo is independent of the rounding mode, so pass
12113 inexact=0 to do_mpfr_ckconv(). */
12114 result_rem = do_mpfr_ckconv (m0, type, /*inexact=*/ 0);
12115 mpfr_clears (m0, m1, NULL);
12116 if (result_rem)
12117 {
12118 /* MPFR calculates quo in the host's long so it may
12119 return more bits in quo than the target int can hold
12120 if sizeof(host long) > sizeof(target int). This can
12121 happen even for native compilers in LP64 mode. In
12122 these cases, modulo the quo value with the largest
12123 number that the target int can hold while leaving one
12124 bit for the sign. */
12125 if (sizeof (integer_quo) * CHAR_BIT > INT_TYPE_SIZE)
12126 integer_quo %= (long)(1UL << (INT_TYPE_SIZE - 1));
12127
12128 /* Dereference the quo pointer argument. */
12129 arg_quo = build_fold_indirect_ref (arg_quo);
12130 /* Proceed iff a valid pointer type was passed in. */
12131 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg_quo)) == integer_type_node)
12132 {
12133 /* Set the value. */
7002a1c8 12134 tree result_quo
12135 = fold_build2 (MODIFY_EXPR, TREE_TYPE (arg_quo), arg_quo,
12136 build_int_cst (TREE_TYPE (arg_quo),
12137 integer_quo));
e5407ca6 12138 TREE_SIDE_EFFECTS (result_quo) = 1;
12139 /* Combine the quo assignment with the rem. */
12140 result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
12141 result_quo, result_rem));
12142 }
12143 }
12144 }
12145 }
12146 return result;
12147}
e84da7c1 12148
12149/* If ARG is a REAL_CST, call mpfr_lgamma() on it and return the
12150 resulting value as a tree with type TYPE. The mpfr precision is
12151 set to the precision of TYPE. We assume that this mpfr function
12152 returns zero if the result could be calculated exactly within the
12153 requested precision. In addition, the integer pointer represented
12154 by ARG_SG will be dereferenced and set to the appropriate signgam
12155 (-1,1) value. */
12156
12157static tree
12158do_mpfr_lgamma_r (tree arg, tree arg_sg, tree type)
12159{
12160 tree result = NULL_TREE;
12161
12162 STRIP_NOPS (arg);
48e1416a 12163
e84da7c1 12164 /* To proceed, MPFR must exactly represent the target floating point
12165 format, which only happens when the target base equals two. Also
12166 verify ARG is a constant and that ARG_SG is an int pointer. */
12167 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
12168 && TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg)
12169 && TREE_CODE (TREE_TYPE (arg_sg)) == POINTER_TYPE
12170 && TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (arg_sg))) == integer_type_node)
12171 {
12172 const REAL_VALUE_TYPE *const ra = TREE_REAL_CST_PTR (arg);
12173
12174 /* In addition to NaN and Inf, the argument cannot be zero or a
12175 negative integer. */
776a7bab 12176 if (real_isfinite (ra)
e84da7c1 12177 && ra->cl != rvc_zero
9af5ce0c 12178 && !(real_isneg (ra) && real_isinteger (ra, TYPE_MODE (type))))
e84da7c1 12179 {
e2eb2b7f 12180 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
12181 const int prec = fmt->p;
12182 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
e84da7c1 12183 int inexact, sg;
12184 mpfr_t m;
12185 tree result_lg;
12186
12187 mpfr_init2 (m, prec);
12188 mpfr_from_real (m, ra, GMP_RNDN);
12189 mpfr_clear_flags ();
e2eb2b7f 12190 inexact = mpfr_lgamma (m, &sg, m, rnd);
e84da7c1 12191 result_lg = do_mpfr_ckconv (m, type, inexact);
12192 mpfr_clear (m);
12193 if (result_lg)
12194 {
12195 tree result_sg;
12196
12197 /* Dereference the arg_sg pointer argument. */
12198 arg_sg = build_fold_indirect_ref (arg_sg);
12199 /* Assign the signgam value into *arg_sg. */
12200 result_sg = fold_build2 (MODIFY_EXPR,
12201 TREE_TYPE (arg_sg), arg_sg,
7002a1c8 12202 build_int_cst (TREE_TYPE (arg_sg), sg));
e84da7c1 12203 TREE_SIDE_EFFECTS (result_sg) = 1;
12204 /* Combine the signgam assignment with the lgamma result. */
12205 result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
12206 result_sg, result_lg));
12207 }
12208 }
12209 }
12210
12211 return result;
12212}
75a70cf9 12213
239d491a 12214/* If argument ARG is a COMPLEX_CST, call the one-argument mpc
12215 function FUNC on it and return the resulting value as a tree with
12216 type TYPE. The mpfr precision is set to the precision of TYPE. We
12217 assume that function FUNC returns zero if the result could be
12218 calculated exactly within the requested precision. */
12219
12220static tree
12221do_mpc_arg1 (tree arg, tree type, int (*func)(mpc_ptr, mpc_srcptr, mpc_rnd_t))
12222{
12223 tree result = NULL_TREE;
48e1416a 12224
239d491a 12225 STRIP_NOPS (arg);
12226
12227 /* To proceed, MPFR must exactly represent the target floating point
12228 format, which only happens when the target base equals two. */
12229 if (TREE_CODE (arg) == COMPLEX_CST && !TREE_OVERFLOW (arg)
12230 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) == REAL_TYPE
12231 && REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (TREE_TYPE (arg))))->b == 2)
12232 {
12233 const REAL_VALUE_TYPE *const re = TREE_REAL_CST_PTR (TREE_REALPART (arg));
12234 const REAL_VALUE_TYPE *const im = TREE_REAL_CST_PTR (TREE_IMAGPART (arg));
12235
12236 if (real_isfinite (re) && real_isfinite (im))
12237 {
12238 const struct real_format *const fmt =
12239 REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (type)));
12240 const int prec = fmt->p;
12241 const mp_rnd_t rnd = fmt->round_towards_zero ? GMP_RNDZ : GMP_RNDN;
44d89feb 12242 const mpc_rnd_t crnd = fmt->round_towards_zero ? MPC_RNDZZ : MPC_RNDNN;
239d491a 12243 int inexact;
12244 mpc_t m;
48e1416a 12245
239d491a 12246 mpc_init2 (m, prec);
9af5ce0c 12247 mpfr_from_real (mpc_realref (m), re, rnd);
12248 mpfr_from_real (mpc_imagref (m), im, rnd);
239d491a 12249 mpfr_clear_flags ();
44d89feb 12250 inexact = func (m, m, crnd);
652d9409 12251 result = do_mpc_ckconv (m, type, inexact, /*force_convert=*/ 0);
239d491a 12252 mpc_clear (m);
12253 }
12254 }
12255
12256 return result;
12257}
c699fab8 12258
12259/* If arguments ARG0 and ARG1 are a COMPLEX_CST, call the two-argument
12260 mpc function FUNC on it and return the resulting value as a tree
12261 with type TYPE. The mpfr precision is set to the precision of
12262 TYPE. We assume that function FUNC returns zero if the result
652d9409 12263 could be calculated exactly within the requested precision. If
12264 DO_NONFINITE is true, then fold expressions containing Inf or NaN
12265 in the arguments and/or results. */
c699fab8 12266
63e89698 12267tree
652d9409 12268do_mpc_arg2 (tree arg0, tree arg1, tree type, int do_nonfinite,
c699fab8 12269 int (*func)(mpc_ptr, mpc_srcptr, mpc_srcptr, mpc_rnd_t))
12270{
12271 tree result = NULL_TREE;
48e1416a 12272
c699fab8 12273 STRIP_NOPS (arg0);
12274 STRIP_NOPS (arg1);
12275
12276 /* To proceed, MPFR must exactly represent the target floating point
12277 format, which only happens when the target base equals two. */
12278 if (TREE_CODE (arg0) == COMPLEX_CST && !TREE_OVERFLOW (arg0)
12279 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE
12280 && TREE_CODE (arg1) == COMPLEX_CST && !TREE_OVERFLOW (arg1)
12281 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg1))) == REAL_TYPE
12282 && REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (TREE_TYPE (arg0))))->b == 2)
12283 {
12284 const REAL_VALUE_TYPE *const re0 = TREE_REAL_CST_PTR (TREE_REALPART (arg0));
12285 const REAL_VALUE_TYPE *const im0 = TREE_REAL_CST_PTR (TREE_IMAGPART (arg0));
12286 const REAL_VALUE_TYPE *const re1 = TREE_REAL_CST_PTR (TREE_REALPART (arg1));
12287 const REAL_VALUE_TYPE *const im1 = TREE_REAL_CST_PTR (TREE_IMAGPART (arg1));
12288
652d9409 12289 if (do_nonfinite
12290 || (real_isfinite (re0) && real_isfinite (im0)
12291 && real_isfinite (re1) && real_isfinite (im1)))
c699fab8 12292 {
12293 const struct real_format *const fmt =
12294 REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (type)));
12295 const int prec = fmt->p;
12296 const mp_rnd_t rnd = fmt->round_towards_zero ? GMP_RNDZ : GMP_RNDN;
12297 const mpc_rnd_t crnd = fmt->round_towards_zero ? MPC_RNDZZ : MPC_RNDNN;
12298 int inexact;
12299 mpc_t m0, m1;
48e1416a 12300
c699fab8 12301 mpc_init2 (m0, prec);
12302 mpc_init2 (m1, prec);
9af5ce0c 12303 mpfr_from_real (mpc_realref (m0), re0, rnd);
12304 mpfr_from_real (mpc_imagref (m0), im0, rnd);
12305 mpfr_from_real (mpc_realref (m1), re1, rnd);
12306 mpfr_from_real (mpc_imagref (m1), im1, rnd);
c699fab8 12307 mpfr_clear_flags ();
12308 inexact = func (m0, m0, m1, crnd);
652d9409 12309 result = do_mpc_ckconv (m0, type, inexact, do_nonfinite);
c699fab8 12310 mpc_clear (m0);
12311 mpc_clear (m1);
12312 }
12313 }
12314
12315 return result;
12316}
239d491a 12317
75a70cf9 12318/* A wrapper function for builtin folding that prevents warnings for
12319 "statement without effect" and the like, caused by removing the
12320 call node earlier than the warning is generated. */
12321
12322tree
1a91d914 12323fold_call_stmt (gcall *stmt, bool ignore)
75a70cf9 12324{
12325 tree ret = NULL_TREE;
12326 tree fndecl = gimple_call_fndecl (stmt);
389dd41b 12327 location_t loc = gimple_location (stmt);
75a70cf9 12328 if (fndecl
12329 && TREE_CODE (fndecl) == FUNCTION_DECL
12330 && DECL_BUILT_IN (fndecl)
12331 && !gimple_call_va_arg_pack_p (stmt))
12332 {
12333 int nargs = gimple_call_num_args (stmt);
9845fb99 12334 tree *args = (nargs > 0
12335 ? gimple_call_arg_ptr (stmt, 0)
12336 : &error_mark_node);
75a70cf9 12337
198622c0 12338 if (avoid_folding_inline_builtin (fndecl))
12339 return NULL_TREE;
75a70cf9 12340 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
12341 {
9845fb99 12342 return targetm.fold_builtin (fndecl, nargs, args, ignore);
75a70cf9 12343 }
12344 else
12345 {
9d884767 12346 ret = fold_builtin_n (loc, fndecl, args, nargs, ignore);
75a70cf9 12347 if (ret)
12348 {
12349 /* Propagate location information from original call to
12350 expansion of builtin. Otherwise things like
12351 maybe_emit_chk_warning, that operate on the expansion
12352 of a builtin, will use the wrong location information. */
12353 if (gimple_has_location (stmt))
12354 {
12355 tree realret = ret;
12356 if (TREE_CODE (ret) == NOP_EXPR)
12357 realret = TREE_OPERAND (ret, 0);
12358 if (CAN_HAVE_LOCATION_P (realret)
12359 && !EXPR_HAS_LOCATION (realret))
389dd41b 12360 SET_EXPR_LOCATION (realret, loc);
75a70cf9 12361 return realret;
12362 }
12363 return ret;
12364 }
12365 }
12366 }
12367 return NULL_TREE;
12368}
7bfefa9d 12369
b9a16870 12370/* Look up the function in builtin_decl that corresponds to DECL
7bfefa9d 12371 and set ASMSPEC as its user assembler name. DECL must be a
12372 function decl that declares a builtin. */
12373
12374void
12375set_builtin_user_assembler_name (tree decl, const char *asmspec)
12376{
12377 tree builtin;
12378 gcc_assert (TREE_CODE (decl) == FUNCTION_DECL
12379 && DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL
12380 && asmspec != 0);
12381
b9a16870 12382 builtin = builtin_decl_explicit (DECL_FUNCTION_CODE (decl));
4d8e0d6d 12383 set_user_assembler_name (builtin, asmspec);
7bfefa9d 12384 switch (DECL_FUNCTION_CODE (decl))
12385 {
12386 case BUILT_IN_MEMCPY:
12387 init_block_move_fn (asmspec);
12388 memcpy_libfunc = set_user_assembler_libfunc ("memcpy", asmspec);
12389 break;
12390 case BUILT_IN_MEMSET:
12391 init_block_clear_fn (asmspec);
12392 memset_libfunc = set_user_assembler_libfunc ("memset", asmspec);
12393 break;
12394 case BUILT_IN_MEMMOVE:
12395 memmove_libfunc = set_user_assembler_libfunc ("memmove", asmspec);
12396 break;
12397 case BUILT_IN_MEMCMP:
12398 memcmp_libfunc = set_user_assembler_libfunc ("memcmp", asmspec);
12399 break;
12400 case BUILT_IN_ABORT:
12401 abort_libfunc = set_user_assembler_libfunc ("abort", asmspec);
12402 break;
5a80a58b 12403 case BUILT_IN_FFS:
12404 if (INT_TYPE_SIZE < BITS_PER_WORD)
12405 {
12406 set_user_assembler_libfunc ("ffs", asmspec);
12407 set_optab_libfunc (ffs_optab, mode_for_size (INT_TYPE_SIZE,
12408 MODE_INT, 0), "ffs");
12409 }
12410 break;
7bfefa9d 12411 default:
12412 break;
12413 }
12414}
a6b74a67 12415
12416/* Return true if DECL is a builtin that expands to a constant or similarly
12417 simple code. */
12418bool
12419is_simple_builtin (tree decl)
12420{
12421 if (decl && DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL)
12422 switch (DECL_FUNCTION_CODE (decl))
12423 {
12424 /* Builtins that expand to constants. */
12425 case BUILT_IN_CONSTANT_P:
12426 case BUILT_IN_EXPECT:
12427 case BUILT_IN_OBJECT_SIZE:
12428 case BUILT_IN_UNREACHABLE:
12429 /* Simple register moves or loads from stack. */
fca0886c 12430 case BUILT_IN_ASSUME_ALIGNED:
a6b74a67 12431 case BUILT_IN_RETURN_ADDRESS:
12432 case BUILT_IN_EXTRACT_RETURN_ADDR:
12433 case BUILT_IN_FROB_RETURN_ADDR:
12434 case BUILT_IN_RETURN:
12435 case BUILT_IN_AGGREGATE_INCOMING_ADDRESS:
12436 case BUILT_IN_FRAME_ADDRESS:
12437 case BUILT_IN_VA_END:
12438 case BUILT_IN_STACK_SAVE:
12439 case BUILT_IN_STACK_RESTORE:
12440 /* Exception state returns or moves registers around. */
12441 case BUILT_IN_EH_FILTER:
12442 case BUILT_IN_EH_POINTER:
12443 case BUILT_IN_EH_COPY_VALUES:
12444 return true;
12445
12446 default:
12447 return false;
12448 }
12449
12450 return false;
12451}
12452
12453/* Return true if DECL is a builtin that is not expensive, i.e., they are
12454 most probably expanded inline into reasonably simple code. This is a
12455 superset of is_simple_builtin. */
12456bool
12457is_inexpensive_builtin (tree decl)
12458{
12459 if (!decl)
12460 return false;
12461 else if (DECL_BUILT_IN_CLASS (decl) == BUILT_IN_MD)
12462 return true;
12463 else if (DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL)
12464 switch (DECL_FUNCTION_CODE (decl))
12465 {
12466 case BUILT_IN_ABS:
12467 case BUILT_IN_ALLOCA:
581bf1c2 12468 case BUILT_IN_ALLOCA_WITH_ALIGN:
74bdbe96 12469 case BUILT_IN_BSWAP16:
a6b74a67 12470 case BUILT_IN_BSWAP32:
12471 case BUILT_IN_BSWAP64:
12472 case BUILT_IN_CLZ:
12473 case BUILT_IN_CLZIMAX:
12474 case BUILT_IN_CLZL:
12475 case BUILT_IN_CLZLL:
12476 case BUILT_IN_CTZ:
12477 case BUILT_IN_CTZIMAX:
12478 case BUILT_IN_CTZL:
12479 case BUILT_IN_CTZLL:
12480 case BUILT_IN_FFS:
12481 case BUILT_IN_FFSIMAX:
12482 case BUILT_IN_FFSL:
12483 case BUILT_IN_FFSLL:
12484 case BUILT_IN_IMAXABS:
12485 case BUILT_IN_FINITE:
12486 case BUILT_IN_FINITEF:
12487 case BUILT_IN_FINITEL:
12488 case BUILT_IN_FINITED32:
12489 case BUILT_IN_FINITED64:
12490 case BUILT_IN_FINITED128:
12491 case BUILT_IN_FPCLASSIFY:
12492 case BUILT_IN_ISFINITE:
12493 case BUILT_IN_ISINF_SIGN:
12494 case BUILT_IN_ISINF:
12495 case BUILT_IN_ISINFF:
12496 case BUILT_IN_ISINFL:
12497 case BUILT_IN_ISINFD32:
12498 case BUILT_IN_ISINFD64:
12499 case BUILT_IN_ISINFD128:
12500 case BUILT_IN_ISNAN:
12501 case BUILT_IN_ISNANF:
12502 case BUILT_IN_ISNANL:
12503 case BUILT_IN_ISNAND32:
12504 case BUILT_IN_ISNAND64:
12505 case BUILT_IN_ISNAND128:
12506 case BUILT_IN_ISNORMAL:
12507 case BUILT_IN_ISGREATER:
12508 case BUILT_IN_ISGREATEREQUAL:
12509 case BUILT_IN_ISLESS:
12510 case BUILT_IN_ISLESSEQUAL:
12511 case BUILT_IN_ISLESSGREATER:
12512 case BUILT_IN_ISUNORDERED:
12513 case BUILT_IN_VA_ARG_PACK:
12514 case BUILT_IN_VA_ARG_PACK_LEN:
12515 case BUILT_IN_VA_COPY:
12516 case BUILT_IN_TRAP:
12517 case BUILT_IN_SAVEREGS:
12518 case BUILT_IN_POPCOUNTL:
12519 case BUILT_IN_POPCOUNTLL:
12520 case BUILT_IN_POPCOUNTIMAX:
12521 case BUILT_IN_POPCOUNT:
12522 case BUILT_IN_PARITYL:
12523 case BUILT_IN_PARITYLL:
12524 case BUILT_IN_PARITYIMAX:
12525 case BUILT_IN_PARITY:
12526 case BUILT_IN_LABS:
12527 case BUILT_IN_LLABS:
12528 case BUILT_IN_PREFETCH:
ca4c3545 12529 case BUILT_IN_ACC_ON_DEVICE:
a6b74a67 12530 return true;
12531
12532 default:
12533 return is_simple_builtin (decl);
12534 }
12535
12536 return false;
12537}