]> git.ipfire.org Git - thirdparty/gcc.git/blame - gcc/builtins.c
gcc/
[thirdparty/gcc.git] / gcc / builtins.c
CommitLineData
53800dbe 1/* Expand builtin functions.
d353bf18 2 Copyright (C) 1988-2015 Free Software Foundation, Inc.
53800dbe 3
f12b58b3 4This file is part of GCC.
53800dbe 5
f12b58b3 6GCC is free software; you can redistribute it and/or modify it under
7the terms of the GNU General Public License as published by the Free
8c4c00c1 8Software Foundation; either version 3, or (at your option) any later
f12b58b3 9version.
53800dbe 10
f12b58b3 11GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12WARRANTY; without even the implied warranty of MERCHANTABILITY or
13FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14for more details.
53800dbe 15
16You should have received a copy of the GNU General Public License
8c4c00c1 17along with GCC; see the file COPYING3. If not see
18<http://www.gnu.org/licenses/>. */
53800dbe 19
20#include "config.h"
21#include "system.h"
805e22b2 22#include "coretypes.h"
9ef16211 23#include "backend.h"
d040a5b0 24#include "predict.h"
9ef16211 25#include "tree.h"
26#include "gimple.h"
53800dbe 27#include "rtl.h"
b20a8bb4 28#include "alias.h"
b20a8bb4 29#include "fold-const.h"
9ed99284 30#include "stringpool.h"
31#include "stor-layout.h"
32#include "calls.h"
33#include "varasm.h"
34#include "tree-object-size.h"
dae0b5cb 35#include "realmpfr.h"
94ea8568 36#include "cfgrtl.h"
bc61cadb 37#include "internal-fn.h"
53800dbe 38#include "flags.h"
39#include "regs.h"
53800dbe 40#include "except.h"
53800dbe 41#include "insn-config.h"
d53441c8 42#include "expmed.h"
43#include "dojump.h"
44#include "explow.h"
45#include "emit-rtl.h"
46#include "stmt.h"
53800dbe 47#include "expr.h"
34517c64 48#include "insn-codes.h"
d8fc4d0b 49#include "optabs.h"
50#include "libfuncs.h"
53800dbe 51#include "recog.h"
52#include "output.h"
53#include "typeclass.h"
1dd6c958 54#include "tm_p.h"
fc2a2dcb 55#include "target.h"
63c62881 56#include "langhooks.h"
073c1fd5 57#include "tree-ssanames.h"
58#include "tree-dfa.h"
162719b3 59#include "value-prof.h"
852f689e 60#include "diagnostic-core.h"
3b9c3a16 61#include "builtins.h"
f9acf11a 62#include "asan.h"
d037099f 63#include "cilk.h"
058a1b7a 64#include "cgraph.h"
65#include "tree-chkp.h"
66#include "rtl-chkp.h"
ca4c3545 67#include "gomp-constants.h"
53800dbe 68
5383fb56 69
239d491a 70static tree do_mpc_arg1 (tree, tree, int (*)(mpc_ptr, mpc_srcptr, mpc_rnd_t));
726e2588 71
3b9c3a16 72struct target_builtins default_target_builtins;
73#if SWITCHABLE_TARGET
74struct target_builtins *this_target_builtins = &default_target_builtins;
75#endif
76
ab7943b9 77/* Define the names of the builtin function types and codes. */
96423453 78const char *const built_in_class_names[BUILT_IN_LAST]
ab7943b9 79 = {"NOT_BUILT_IN", "BUILT_IN_FRONTEND", "BUILT_IN_MD", "BUILT_IN_NORMAL"};
80
9cfddb70 81#define DEF_BUILTIN(X, N, C, T, LT, B, F, NA, AT, IM, COND) #X,
0dfc45b5 82const char * built_in_names[(int) END_BUILTINS] =
4e9d90c7 83{
84#include "builtins.def"
85};
86#undef DEF_BUILTIN
ab7943b9 87
cffdfb3d 88/* Setup an array of builtin_info_type, make sure each element decl is
df94cd3b 89 initialized to NULL_TREE. */
cffdfb3d 90builtin_info_type builtin_info[(int)END_BUILTINS];
df94cd3b 91
0b049e15 92/* Non-zero if __builtin_constant_p should be folded right away. */
93bool force_folding_builtin_constant_p;
94
3754d046 95static rtx c_readstr (const char *, machine_mode);
aecda0d6 96static int target_char_cast (tree, char *);
d8ae1baa 97static rtx get_memory_rtx (tree, tree);
aecda0d6 98static int apply_args_size (void);
99static int apply_result_size (void);
aecda0d6 100static rtx result_vector (int, rtx);
aecda0d6 101static void expand_builtin_prefetch (tree);
102static rtx expand_builtin_apply_args (void);
103static rtx expand_builtin_apply_args_1 (void);
104static rtx expand_builtin_apply (rtx, rtx, rtx);
105static void expand_builtin_return (rtx);
106static enum type_class type_to_class (tree);
107static rtx expand_builtin_classify_type (tree);
108static void expand_errno_check (tree, rtx);
109static rtx expand_builtin_mathfn (tree, rtx, rtx);
110static rtx expand_builtin_mathfn_2 (tree, rtx, rtx);
6b43bae4 111static rtx expand_builtin_mathfn_3 (tree, rtx, rtx);
7e0713b1 112static rtx expand_builtin_mathfn_ternary (tree, rtx, rtx);
f97eea22 113static rtx expand_builtin_interclass_mathfn (tree, rtx);
c3147c1a 114static rtx expand_builtin_sincos (tree);
f97eea22 115static rtx expand_builtin_cexpi (tree, rtx);
ff1b14e4 116static rtx expand_builtin_int_roundingfn (tree, rtx);
117static rtx expand_builtin_int_roundingfn_2 (tree, rtx);
79012a9d 118static rtx expand_builtin_next_arg (void);
aecda0d6 119static rtx expand_builtin_va_start (tree);
120static rtx expand_builtin_va_end (tree);
121static rtx expand_builtin_va_copy (tree);
3754d046 122static rtx expand_builtin_memcmp (tree, rtx, machine_mode);
a65c4d64 123static rtx expand_builtin_strcmp (tree, rtx);
3754d046 124static rtx expand_builtin_strncmp (tree, rtx, machine_mode);
125static rtx builtin_memcpy_read_str (void *, HOST_WIDE_INT, machine_mode);
a65c4d64 126static rtx expand_builtin_memcpy (tree, rtx);
f21337ef 127static rtx expand_builtin_memcpy_with_bounds (tree, rtx);
128static rtx expand_builtin_memcpy_args (tree, tree, tree, rtx, tree);
3754d046 129static rtx expand_builtin_mempcpy (tree, rtx, machine_mode);
f21337ef 130static rtx expand_builtin_mempcpy_with_bounds (tree, rtx, machine_mode);
48e1416a 131static rtx expand_builtin_mempcpy_args (tree, tree, tree, rtx,
f21337ef 132 machine_mode, int, tree);
a65c4d64 133static rtx expand_builtin_strcpy (tree, rtx);
134static rtx expand_builtin_strcpy_args (tree, tree, rtx);
3754d046 135static rtx expand_builtin_stpcpy (tree, rtx, machine_mode);
a65c4d64 136static rtx expand_builtin_strncpy (tree, rtx);
3754d046 137static rtx builtin_memset_gen_str (void *, HOST_WIDE_INT, machine_mode);
138static rtx expand_builtin_memset (tree, rtx, machine_mode);
f21337ef 139static rtx expand_builtin_memset_with_bounds (tree, rtx, machine_mode);
3754d046 140static rtx expand_builtin_memset_args (tree, tree, tree, rtx, machine_mode, tree);
aecda0d6 141static rtx expand_builtin_bzero (tree);
3754d046 142static rtx expand_builtin_strlen (tree, rtx, machine_mode);
5be42b39 143static rtx expand_builtin_alloca (tree, bool);
3754d046 144static rtx expand_builtin_unop (machine_mode, tree, rtx, rtx, optab);
aecda0d6 145static rtx expand_builtin_frame_address (tree, tree);
389dd41b 146static tree stabilize_va_list_loc (location_t, tree, int);
aecda0d6 147static rtx expand_builtin_expect (tree, rtx);
148static tree fold_builtin_constant_p (tree);
149static tree fold_builtin_classify_type (tree);
c7cbde74 150static tree fold_builtin_strlen (location_t, tree, tree);
389dd41b 151static tree fold_builtin_inf (location_t, tree, int);
aecda0d6 152static tree fold_builtin_nan (tree, tree, int);
389dd41b 153static tree rewrite_call_expr (location_t, tree, int, tree, int, ...);
b7bf20db 154static bool validate_arg (const_tree, enum tree_code code);
277f8dd2 155static bool integer_valued_real_p (tree);
389dd41b 156static tree fold_trunc_transparent_mathfn (location_t, tree, tree);
aecda0d6 157static rtx expand_builtin_fabs (tree, rtx, rtx);
27f261ef 158static rtx expand_builtin_signbit (tree, rtx);
389dd41b 159static tree fold_builtin_sqrt (location_t, tree, tree);
160static tree fold_builtin_cbrt (location_t, tree, tree);
161static tree fold_builtin_pow (location_t, tree, tree, tree, tree);
162static tree fold_builtin_powi (location_t, tree, tree, tree, tree);
163static tree fold_builtin_cos (location_t, tree, tree, tree);
164static tree fold_builtin_cosh (location_t, tree, tree, tree);
bffb7645 165static tree fold_builtin_tan (tree, tree);
389dd41b 166static tree fold_builtin_trunc (location_t, tree, tree);
167static tree fold_builtin_floor (location_t, tree, tree);
168static tree fold_builtin_ceil (location_t, tree, tree);
169static tree fold_builtin_round (location_t, tree, tree);
170static tree fold_builtin_int_roundingfn (location_t, tree, tree);
10b9666f 171static tree fold_builtin_bitop (tree, tree);
389dd41b 172static tree fold_builtin_strchr (location_t, tree, tree, tree);
173static tree fold_builtin_memchr (location_t, tree, tree, tree, tree);
174static tree fold_builtin_memcmp (location_t, tree, tree, tree);
175static tree fold_builtin_strcmp (location_t, tree, tree);
176static tree fold_builtin_strncmp (location_t, tree, tree, tree);
177static tree fold_builtin_signbit (location_t, tree, tree);
178static tree fold_builtin_copysign (location_t, tree, tree, tree, tree);
179static tree fold_builtin_isascii (location_t, tree);
180static tree fold_builtin_toascii (location_t, tree);
181static tree fold_builtin_isdigit (location_t, tree);
182static tree fold_builtin_fabs (location_t, tree, tree);
183static tree fold_builtin_abs (location_t, tree, tree);
184static tree fold_builtin_unordered_cmp (location_t, tree, tree, tree, enum tree_code,
d5019fe8 185 enum tree_code);
e80cc485 186static tree fold_builtin_0 (location_t, tree);
187static tree fold_builtin_1 (location_t, tree, tree);
188static tree fold_builtin_2 (location_t, tree, tree, tree);
189static tree fold_builtin_3 (location_t, tree, tree, tree, tree);
190static tree fold_builtin_varargs (location_t, tree, tree*, int);
389dd41b 191
192static tree fold_builtin_strpbrk (location_t, tree, tree, tree);
193static tree fold_builtin_strstr (location_t, tree, tree, tree);
194static tree fold_builtin_strrchr (location_t, tree, tree, tree);
389dd41b 195static tree fold_builtin_strspn (location_t, tree, tree);
196static tree fold_builtin_strcspn (location_t, tree, tree);
4ee9c684 197
0a39fd54 198static rtx expand_builtin_object_size (tree);
3754d046 199static rtx expand_builtin_memory_chk (tree, rtx, machine_mode,
0a39fd54 200 enum built_in_function);
201static void maybe_emit_chk_warning (tree, enum built_in_function);
202static void maybe_emit_sprintf_chk_warning (tree, enum built_in_function);
2c281b15 203static void maybe_emit_free_warning (tree);
c2f47e15 204static tree fold_builtin_object_size (tree, tree);
99eabcc1 205
e788f202 206unsigned HOST_WIDE_INT target_newline;
b9ea678c 207unsigned HOST_WIDE_INT target_percent;
99eabcc1 208static unsigned HOST_WIDE_INT target_c;
209static unsigned HOST_WIDE_INT target_s;
aea88c77 210char target_percent_c[3];
b9ea678c 211char target_percent_s[3];
e788f202 212char target_percent_s_newline[4];
728bac60 213static tree do_mpfr_arg1 (tree, tree, int (*)(mpfr_ptr, mpfr_srcptr, mp_rnd_t),
214 const REAL_VALUE_TYPE *, const REAL_VALUE_TYPE *, bool);
f0c477f2 215static tree do_mpfr_arg2 (tree, tree, tree,
216 int (*)(mpfr_ptr, mpfr_srcptr, mpfr_srcptr, mp_rnd_t));
9917422b 217static tree do_mpfr_arg3 (tree, tree, tree, tree,
218 int (*)(mpfr_ptr, mpfr_srcptr, mpfr_srcptr, mpfr_srcptr, mp_rnd_t));
d92f994c 219static tree do_mpfr_sincos (tree, tree, tree);
65dd1378 220static tree do_mpfr_bessel_n (tree, tree, tree,
221 int (*)(mpfr_ptr, long, mpfr_srcptr, mp_rnd_t),
222 const REAL_VALUE_TYPE *, bool);
e5407ca6 223static tree do_mpfr_remquo (tree, tree, tree);
e84da7c1 224static tree do_mpfr_lgamma_r (tree, tree, tree);
1cd6e20d 225static void expand_builtin_sync_synchronize (void);
0a39fd54 226
7bfefa9d 227/* Return true if NAME starts with __builtin_ or __sync_. */
228
b29139ad 229static bool
1c47b3e8 230is_builtin_name (const char *name)
b6a5fc45 231{
b6a5fc45 232 if (strncmp (name, "__builtin_", 10) == 0)
233 return true;
234 if (strncmp (name, "__sync_", 7) == 0)
235 return true;
1cd6e20d 236 if (strncmp (name, "__atomic_", 9) == 0)
237 return true;
a89e6c15 238 if (flag_cilkplus
d037099f 239 && (!strcmp (name, "__cilkrts_detach")
240 || !strcmp (name, "__cilkrts_pop_frame")))
241 return true;
b6a5fc45 242 return false;
243}
4ee9c684 244
7bfefa9d 245
246/* Return true if DECL is a function symbol representing a built-in. */
247
248bool
249is_builtin_fn (tree decl)
250{
251 return TREE_CODE (decl) == FUNCTION_DECL && DECL_BUILT_IN (decl);
252}
253
1c47b3e8 254/* Return true if NODE should be considered for inline expansion regardless
255 of the optimization level. This means whenever a function is invoked with
256 its "internal" name, which normally contains the prefix "__builtin". */
257
258static bool
259called_as_built_in (tree node)
260{
261 /* Note that we must use DECL_NAME, not DECL_ASSEMBLER_NAME_SET_P since
262 we want the name used to call the function, not the name it
263 will have. */
264 const char *name = IDENTIFIER_POINTER (DECL_NAME (node));
265 return is_builtin_name (name);
266}
267
ceea063b 268/* Compute values M and N such that M divides (address of EXP - N) and such
269 that N < M. If these numbers can be determined, store M in alignp and N in
270 *BITPOSP and return true. Otherwise return false and store BITS_PER_UNIT to
271 *alignp and any bit-offset to *bitposp.
0d8f7716 272
273 Note that the address (and thus the alignment) computed here is based
274 on the address to which a symbol resolves, whereas DECL_ALIGN is based
275 on the address at which an object is actually located. These two
276 addresses are not always the same. For example, on ARM targets,
277 the address &foo of a Thumb function foo() has the lowest bit set,
3482bf13 278 whereas foo() itself starts on an even address.
698537d1 279
3482bf13 280 If ADDR_P is true we are taking the address of the memory reference EXP
281 and thus cannot rely on the access taking place. */
282
283static bool
284get_object_alignment_2 (tree exp, unsigned int *alignp,
285 unsigned HOST_WIDE_INT *bitposp, bool addr_p)
698537d1 286{
98ab9e8f 287 HOST_WIDE_INT bitsize, bitpos;
288 tree offset;
3754d046 289 machine_mode mode;
98ab9e8f 290 int unsignedp, volatilep;
c8a2b4ff 291 unsigned int align = BITS_PER_UNIT;
ceea063b 292 bool known_alignment = false;
698537d1 293
98ab9e8f 294 /* Get the innermost object and the constant (bitpos) and possibly
295 variable (offset) offset of the access. */
296 exp = get_inner_reference (exp, &bitsize, &bitpos, &offset,
297 &mode, &unsignedp, &volatilep, true);
298
299 /* Extract alignment information from the innermost object and
300 possibly adjust bitpos and offset. */
3482bf13 301 if (TREE_CODE (exp) == FUNCTION_DECL)
0d8f7716 302 {
3482bf13 303 /* Function addresses can encode extra information besides their
304 alignment. However, if TARGET_PTRMEMFUNC_VBIT_LOCATION
305 allows the low bit to be used as a virtual bit, we know
306 that the address itself must be at least 2-byte aligned. */
307 if (TARGET_PTRMEMFUNC_VBIT_LOCATION == ptrmemfunc_vbit_in_pfn)
308 align = 2 * BITS_PER_UNIT;
0d8f7716 309 }
3482bf13 310 else if (TREE_CODE (exp) == LABEL_DECL)
311 ;
312 else if (TREE_CODE (exp) == CONST_DECL)
98ab9e8f 313 {
3482bf13 314 /* The alignment of a CONST_DECL is determined by its initializer. */
315 exp = DECL_INITIAL (exp);
98ab9e8f 316 align = TYPE_ALIGN (TREE_TYPE (exp));
3482bf13 317 if (CONSTANT_CLASS_P (exp))
318 align = (unsigned) CONSTANT_ALIGNMENT (exp, align);
e532afed 319
3482bf13 320 known_alignment = true;
98ab9e8f 321 }
3482bf13 322 else if (DECL_P (exp))
ceea063b 323 {
3482bf13 324 align = DECL_ALIGN (exp);
ceea063b 325 known_alignment = true;
ceea063b 326 }
3482bf13 327 else if (TREE_CODE (exp) == VIEW_CONVERT_EXPR)
ceea063b 328 {
ceea063b 329 align = TYPE_ALIGN (TREE_TYPE (exp));
330 }
3482bf13 331 else if (TREE_CODE (exp) == INDIRECT_REF
332 || TREE_CODE (exp) == MEM_REF
333 || TREE_CODE (exp) == TARGET_MEM_REF)
98ab9e8f 334 {
335 tree addr = TREE_OPERAND (exp, 0);
ceea063b 336 unsigned ptr_align;
337 unsigned HOST_WIDE_INT ptr_bitpos;
ab1e78e5 338 unsigned HOST_WIDE_INT ptr_bitmask = ~0;
ceea063b 339
ab1e78e5 340 /* If the address is explicitely aligned, handle that. */
98ab9e8f 341 if (TREE_CODE (addr) == BIT_AND_EXPR
342 && TREE_CODE (TREE_OPERAND (addr, 1)) == INTEGER_CST)
343 {
ab1e78e5 344 ptr_bitmask = TREE_INT_CST_LOW (TREE_OPERAND (addr, 1));
345 ptr_bitmask *= BITS_PER_UNIT;
346 align = ptr_bitmask & -ptr_bitmask;
98ab9e8f 347 addr = TREE_OPERAND (addr, 0);
348 }
ceea063b 349
3482bf13 350 known_alignment
351 = get_pointer_alignment_1 (addr, &ptr_align, &ptr_bitpos);
3482bf13 352 align = MAX (ptr_align, align);
353
ab1e78e5 354 /* Re-apply explicit alignment to the bitpos. */
355 ptr_bitpos &= ptr_bitmask;
356
4083990a 357 /* The alignment of the pointer operand in a TARGET_MEM_REF
358 has to take the variable offset parts into account. */
3482bf13 359 if (TREE_CODE (exp) == TARGET_MEM_REF)
153c3b50 360 {
3482bf13 361 if (TMR_INDEX (exp))
362 {
363 unsigned HOST_WIDE_INT step = 1;
364 if (TMR_STEP (exp))
f9ae6f95 365 step = TREE_INT_CST_LOW (TMR_STEP (exp));
3482bf13 366 align = MIN (align, (step & -step) * BITS_PER_UNIT);
367 }
368 if (TMR_INDEX2 (exp))
369 align = BITS_PER_UNIT;
370 known_alignment = false;
153c3b50 371 }
ceea063b 372
3482bf13 373 /* When EXP is an actual memory reference then we can use
374 TYPE_ALIGN of a pointer indirection to derive alignment.
375 Do so only if get_pointer_alignment_1 did not reveal absolute
4083990a 376 alignment knowledge and if using that alignment would
377 improve the situation. */
378 if (!addr_p && !known_alignment
379 && TYPE_ALIGN (TREE_TYPE (exp)) > align)
380 align = TYPE_ALIGN (TREE_TYPE (exp));
381 else
382 {
383 /* Else adjust bitpos accordingly. */
384 bitpos += ptr_bitpos;
385 if (TREE_CODE (exp) == MEM_REF
386 || TREE_CODE (exp) == TARGET_MEM_REF)
e913b5cd 387 bitpos += mem_ref_offset (exp).to_short_addr () * BITS_PER_UNIT;
4083990a 388 }
98ab9e8f 389 }
3482bf13 390 else if (TREE_CODE (exp) == STRING_CST)
153c3b50 391 {
3482bf13 392 /* STRING_CST are the only constant objects we allow to be not
393 wrapped inside a CONST_DECL. */
394 align = TYPE_ALIGN (TREE_TYPE (exp));
3482bf13 395 if (CONSTANT_CLASS_P (exp))
396 align = (unsigned) CONSTANT_ALIGNMENT (exp, align);
e532afed 397
3482bf13 398 known_alignment = true;
98ab9e8f 399 }
98ab9e8f 400
401 /* If there is a non-constant offset part extract the maximum
402 alignment that can prevail. */
c8a2b4ff 403 if (offset)
98ab9e8f 404 {
ad464c56 405 unsigned int trailing_zeros = tree_ctz (offset);
c8a2b4ff 406 if (trailing_zeros < HOST_BITS_PER_INT)
98ab9e8f 407 {
c8a2b4ff 408 unsigned int inner = (1U << trailing_zeros) * BITS_PER_UNIT;
409 if (inner)
410 align = MIN (align, inner);
98ab9e8f 411 }
98ab9e8f 412 }
413
3482bf13 414 *alignp = align;
415 *bitposp = bitpos & (*alignp - 1);
ceea063b 416 return known_alignment;
0c883ef3 417}
418
3482bf13 419/* For a memory reference expression EXP compute values M and N such that M
420 divides (&EXP - N) and such that N < M. If these numbers can be determined,
421 store M in alignp and N in *BITPOSP and return true. Otherwise return false
422 and store BITS_PER_UNIT to *alignp and any bit-offset to *bitposp. */
423
424bool
425get_object_alignment_1 (tree exp, unsigned int *alignp,
426 unsigned HOST_WIDE_INT *bitposp)
427{
428 return get_object_alignment_2 (exp, alignp, bitposp, false);
429}
430
957d0361 431/* Return the alignment in bits of EXP, an object. */
0c883ef3 432
433unsigned int
957d0361 434get_object_alignment (tree exp)
0c883ef3 435{
436 unsigned HOST_WIDE_INT bitpos = 0;
437 unsigned int align;
438
ceea063b 439 get_object_alignment_1 (exp, &align, &bitpos);
0c883ef3 440
98ab9e8f 441 /* align and bitpos now specify known low bits of the pointer.
442 ptr & (align - 1) == bitpos. */
443
444 if (bitpos != 0)
445 align = (bitpos & -bitpos);
957d0361 446 return align;
698537d1 447}
448
ceea063b 449/* For a pointer valued expression EXP compute values M and N such that M
450 divides (EXP - N) and such that N < M. If these numbers can be determined,
3482bf13 451 store M in alignp and N in *BITPOSP and return true. Return false if
452 the results are just a conservative approximation.
53800dbe 453
ceea063b 454 If EXP is not a pointer, false is returned too. */
53800dbe 455
ceea063b 456bool
457get_pointer_alignment_1 (tree exp, unsigned int *alignp,
458 unsigned HOST_WIDE_INT *bitposp)
53800dbe 459{
153c3b50 460 STRIP_NOPS (exp);
535e2026 461
153c3b50 462 if (TREE_CODE (exp) == ADDR_EXPR)
3482bf13 463 return get_object_alignment_2 (TREE_OPERAND (exp, 0),
464 alignp, bitposp, true);
906a9403 465 else if (TREE_CODE (exp) == POINTER_PLUS_EXPR)
466 {
467 unsigned int align;
468 unsigned HOST_WIDE_INT bitpos;
469 bool res = get_pointer_alignment_1 (TREE_OPERAND (exp, 0),
470 &align, &bitpos);
471 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
472 bitpos += TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)) * BITS_PER_UNIT;
473 else
474 {
475 unsigned int trailing_zeros = tree_ctz (TREE_OPERAND (exp, 1));
476 if (trailing_zeros < HOST_BITS_PER_INT)
477 {
478 unsigned int inner = (1U << trailing_zeros) * BITS_PER_UNIT;
479 if (inner)
480 align = MIN (align, inner);
481 }
482 }
483 *alignp = align;
484 *bitposp = bitpos & (align - 1);
485 return res;
486 }
153c3b50 487 else if (TREE_CODE (exp) == SSA_NAME
488 && POINTER_TYPE_P (TREE_TYPE (exp)))
53800dbe 489 {
ceea063b 490 unsigned int ptr_align, ptr_misalign;
153c3b50 491 struct ptr_info_def *pi = SSA_NAME_PTR_INFO (exp);
ceea063b 492
493 if (pi && get_ptr_info_alignment (pi, &ptr_align, &ptr_misalign))
494 {
495 *bitposp = ptr_misalign * BITS_PER_UNIT;
496 *alignp = ptr_align * BITS_PER_UNIT;
3482bf13 497 /* We cannot really tell whether this result is an approximation. */
ceea063b 498 return true;
499 }
500 else
69fbc3aa 501 {
502 *bitposp = 0;
ceea063b 503 *alignp = BITS_PER_UNIT;
504 return false;
69fbc3aa 505 }
53800dbe 506 }
0bb8b39a 507 else if (TREE_CODE (exp) == INTEGER_CST)
508 {
509 *alignp = BIGGEST_ALIGNMENT;
f9ae6f95 510 *bitposp = ((TREE_INT_CST_LOW (exp) * BITS_PER_UNIT)
0bb8b39a 511 & (BIGGEST_ALIGNMENT - 1));
512 return true;
513 }
153c3b50 514
69fbc3aa 515 *bitposp = 0;
ceea063b 516 *alignp = BITS_PER_UNIT;
517 return false;
53800dbe 518}
519
69fbc3aa 520/* Return the alignment in bits of EXP, a pointer valued expression.
521 The alignment returned is, by default, the alignment of the thing that
522 EXP points to. If it is not a POINTER_TYPE, 0 is returned.
523
524 Otherwise, look at the expression to see if we can do better, i.e., if the
525 expression is actually pointing at an object whose alignment is tighter. */
526
527unsigned int
528get_pointer_alignment (tree exp)
529{
530 unsigned HOST_WIDE_INT bitpos = 0;
531 unsigned int align;
ceea063b 532
533 get_pointer_alignment_1 (exp, &align, &bitpos);
69fbc3aa 534
535 /* align and bitpos now specify known low bits of the pointer.
536 ptr & (align - 1) == bitpos. */
537
538 if (bitpos != 0)
539 align = (bitpos & -bitpos);
540
541 return align;
542}
543
53800dbe 544/* Compute the length of a C string. TREE_STRING_LENGTH is not the right
545 way, because it could contain a zero byte in the middle.
546 TREE_STRING_LENGTH is the size of the character array, not the string.
547
4172d65e 548 ONLY_VALUE should be nonzero if the result is not going to be emitted
c09841f6 549 into the instruction stream and zero if it is going to be expanded.
4172d65e 550 E.g. with i++ ? "foo" : "bar", if ONLY_VALUE is nonzero, constant 3
681fab1e 551 is returned, otherwise NULL, since
552 len = c_strlen (src, 1); if (len) expand_expr (len, ...); would not
553 evaluate the side-effects.
554
6bda159e 555 If ONLY_VALUE is two then we do not emit warnings about out-of-bound
556 accesses. Note that this implies the result is not going to be emitted
557 into the instruction stream.
558
902de8ed 559 The value returned is of type `ssizetype'.
560
53800dbe 561 Unfortunately, string_constant can't access the values of const char
562 arrays with initializers, so neither can we do so here. */
563
4ee9c684 564tree
681fab1e 565c_strlen (tree src, int only_value)
53800dbe 566{
567 tree offset_node;
27d0c333 568 HOST_WIDE_INT offset;
569 int max;
44acf429 570 const char *ptr;
da136652 571 location_t loc;
53800dbe 572
681fab1e 573 STRIP_NOPS (src);
574 if (TREE_CODE (src) == COND_EXPR
575 && (only_value || !TREE_SIDE_EFFECTS (TREE_OPERAND (src, 0))))
576 {
577 tree len1, len2;
578
579 len1 = c_strlen (TREE_OPERAND (src, 1), only_value);
580 len2 = c_strlen (TREE_OPERAND (src, 2), only_value);
0862b7e9 581 if (tree_int_cst_equal (len1, len2))
681fab1e 582 return len1;
583 }
584
585 if (TREE_CODE (src) == COMPOUND_EXPR
586 && (only_value || !TREE_SIDE_EFFECTS (TREE_OPERAND (src, 0))))
587 return c_strlen (TREE_OPERAND (src, 1), only_value);
588
3df42822 589 loc = EXPR_LOC_OR_LOC (src, input_location);
da136652 590
53800dbe 591 src = string_constant (src, &offset_node);
592 if (src == 0)
c2f47e15 593 return NULL_TREE;
902de8ed 594
83d79705 595 max = TREE_STRING_LENGTH (src) - 1;
53800dbe 596 ptr = TREE_STRING_POINTER (src);
902de8ed 597
53800dbe 598 if (offset_node && TREE_CODE (offset_node) != INTEGER_CST)
599 {
600 /* If the string has an internal zero byte (e.g., "foo\0bar"), we can't
601 compute the offset to the following null if we don't know where to
602 start searching for it. */
603 int i;
902de8ed 604
53800dbe 605 for (i = 0; i < max; i++)
606 if (ptr[i] == 0)
c2f47e15 607 return NULL_TREE;
902de8ed 608
53800dbe 609 /* We don't know the starting offset, but we do know that the string
610 has no internal zero bytes. We can assume that the offset falls
611 within the bounds of the string; otherwise, the programmer deserves
612 what he gets. Subtract the offset from the length of the string,
902de8ed 613 and return that. This would perhaps not be valid if we were dealing
614 with named arrays in addition to literal string constants. */
615
da136652 616 return size_diffop_loc (loc, size_int (max), offset_node);
53800dbe 617 }
618
619 /* We have a known offset into the string. Start searching there for
27d0c333 620 a null character if we can represent it as a single HOST_WIDE_INT. */
dabc4084 621 if (offset_node == 0)
53800dbe 622 offset = 0;
35ec552a 623 else if (! tree_fits_shwi_p (offset_node))
dabc4084 624 offset = -1;
53800dbe 625 else
e913b5cd 626 offset = tree_to_shwi (offset_node);
902de8ed 627
1f63a7d6 628 /* If the offset is known to be out of bounds, warn, and call strlen at
629 runtime. */
2f1c4f17 630 if (offset < 0 || offset > max)
53800dbe 631 {
1f63a7d6 632 /* Suppress multiple warnings for propagated constant strings. */
2f1c4f17 633 if (only_value != 2
634 && !TREE_NO_WARNING (src))
1f63a7d6 635 {
da136652 636 warning_at (loc, 0, "offset outside bounds of constant string");
1f63a7d6 637 TREE_NO_WARNING (src) = 1;
638 }
c2f47e15 639 return NULL_TREE;
53800dbe 640 }
902de8ed 641
53800dbe 642 /* Use strlen to search for the first zero byte. Since any strings
643 constructed with build_string will have nulls appended, we win even
644 if we get handed something like (char[4])"abcd".
645
646 Since OFFSET is our starting index into the string, no further
647 calculation is needed. */
902de8ed 648 return ssize_int (strlen (ptr + offset));
53800dbe 649}
650
83d79705 651/* Return a char pointer for a C string if it is a string constant
652 or sum of string constant and integer constant. */
653
b9ea678c 654const char *
aecda0d6 655c_getstr (tree src)
83d79705 656{
657 tree offset_node;
83d79705 658
659 src = string_constant (src, &offset_node);
660 if (src == 0)
661 return 0;
662
8c85fcb7 663 if (offset_node == 0)
664 return TREE_STRING_POINTER (src);
e913b5cd 665 else if (!tree_fits_uhwi_p (offset_node)
8c85fcb7 666 || compare_tree_int (offset_node, TREE_STRING_LENGTH (src) - 1) > 0)
83d79705 667 return 0;
83d79705 668
e913b5cd 669 return TREE_STRING_POINTER (src) + tree_to_uhwi (offset_node);
83d79705 670}
671
e913b5cd 672/* Return a constant integer corresponding to target reading
8c85fcb7 673 GET_MODE_BITSIZE (MODE) bits from string constant STR. */
ecc318ff 674
6840589f 675static rtx
3754d046 676c_readstr (const char *str, machine_mode mode)
6840589f 677{
6840589f 678 HOST_WIDE_INT ch;
679 unsigned int i, j;
e913b5cd 680 HOST_WIDE_INT tmp[MAX_BITSIZE_MODE_ANY_INT / HOST_BITS_PER_WIDE_INT];
0407eaee 681
682 gcc_assert (GET_MODE_CLASS (mode) == MODE_INT);
e913b5cd 683 unsigned int len = (GET_MODE_PRECISION (mode) + HOST_BITS_PER_WIDE_INT - 1)
684 / HOST_BITS_PER_WIDE_INT;
685
a12aa4cc 686 gcc_assert (len <= MAX_BITSIZE_MODE_ANY_INT / HOST_BITS_PER_WIDE_INT);
e913b5cd 687 for (i = 0; i < len; i++)
688 tmp[i] = 0;
6840589f 689
6840589f 690 ch = 1;
691 for (i = 0; i < GET_MODE_SIZE (mode); i++)
692 {
693 j = i;
694 if (WORDS_BIG_ENDIAN)
695 j = GET_MODE_SIZE (mode) - i - 1;
696 if (BYTES_BIG_ENDIAN != WORDS_BIG_ENDIAN
ad8f8e52 697 && GET_MODE_SIZE (mode) >= UNITS_PER_WORD)
6840589f 698 j = j + UNITS_PER_WORD - 2 * (j % UNITS_PER_WORD) - 1;
699 j *= BITS_PER_UNIT;
7d3f6cc7 700
6840589f 701 if (ch)
702 ch = (unsigned char) str[i];
e913b5cd 703 tmp[j / HOST_BITS_PER_WIDE_INT] |= ch << (j % HOST_BITS_PER_WIDE_INT);
6840589f 704 }
ddb1be65 705
ab2c1de8 706 wide_int c = wide_int::from_array (tmp, len, GET_MODE_PRECISION (mode));
e913b5cd 707 return immed_wide_int_const (c, mode);
6840589f 708}
709
ecc318ff 710/* Cast a target constant CST to target CHAR and if that value fits into
5206b159 711 host char type, return zero and put that value into variable pointed to by
ecc318ff 712 P. */
713
714static int
aecda0d6 715target_char_cast (tree cst, char *p)
ecc318ff 716{
717 unsigned HOST_WIDE_INT val, hostval;
718
c19686c5 719 if (TREE_CODE (cst) != INTEGER_CST
ecc318ff 720 || CHAR_TYPE_SIZE > HOST_BITS_PER_WIDE_INT)
721 return 1;
722
e913b5cd 723 /* Do not care if it fits or not right here. */
f9ae6f95 724 val = TREE_INT_CST_LOW (cst);
e913b5cd 725
ecc318ff 726 if (CHAR_TYPE_SIZE < HOST_BITS_PER_WIDE_INT)
727 val &= (((unsigned HOST_WIDE_INT) 1) << CHAR_TYPE_SIZE) - 1;
728
729 hostval = val;
730 if (HOST_BITS_PER_CHAR < HOST_BITS_PER_WIDE_INT)
731 hostval &= (((unsigned HOST_WIDE_INT) 1) << HOST_BITS_PER_CHAR) - 1;
732
733 if (val != hostval)
734 return 1;
735
736 *p = hostval;
737 return 0;
738}
739
4ee9c684 740/* Similar to save_expr, but assumes that arbitrary code is not executed
741 in between the multiple evaluations. In particular, we assume that a
742 non-addressable local variable will not be modified. */
743
744static tree
745builtin_save_expr (tree exp)
746{
f6c35aa4 747 if (TREE_CODE (exp) == SSA_NAME
748 || (TREE_ADDRESSABLE (exp) == 0
749 && (TREE_CODE (exp) == PARM_DECL
750 || (TREE_CODE (exp) == VAR_DECL && !TREE_STATIC (exp)))))
4ee9c684 751 return exp;
752
753 return save_expr (exp);
754}
755
53800dbe 756/* Given TEM, a pointer to a stack frame, follow the dynamic chain COUNT
757 times to get the address of either a higher stack frame, or a return
758 address located within it (depending on FNDECL_CODE). */
902de8ed 759
c626df3d 760static rtx
869d0ef0 761expand_builtin_return_addr (enum built_in_function fndecl_code, int count)
53800dbe 762{
763 int i;
764
869d0ef0 765#ifdef INITIAL_FRAME_ADDRESS_RTX
766 rtx tem = INITIAL_FRAME_ADDRESS_RTX;
767#else
e3e15c50 768 rtx tem;
769
1b74fde7 770 /* For a zero count with __builtin_return_address, we don't care what
771 frame address we return, because target-specific definitions will
772 override us. Therefore frame pointer elimination is OK, and using
773 the soft frame pointer is OK.
774
fa7637bd 775 For a nonzero count, or a zero count with __builtin_frame_address,
1b74fde7 776 we require a stable offset from the current frame pointer to the
777 previous one, so we must use the hard frame pointer, and
e3e15c50 778 we must disable frame pointer elimination. */
1b74fde7 779 if (count == 0 && fndecl_code == BUILT_IN_RETURN_ADDRESS)
e3e15c50 780 tem = frame_pointer_rtx;
a0c938f0 781 else
e3e15c50 782 {
783 tem = hard_frame_pointer_rtx;
784
785 /* Tell reload not to eliminate the frame pointer. */
18d50ae6 786 crtl->accesses_prior_frames = 1;
e3e15c50 787 }
869d0ef0 788#endif
789
53800dbe 790 /* Some machines need special handling before we can access
3a69c60c 791 arbitrary frames. For example, on the SPARC, we must first flush
53800dbe 792 all register windows to the stack. */
793#ifdef SETUP_FRAME_ADDRESSES
794 if (count > 0)
795 SETUP_FRAME_ADDRESSES ();
796#endif
797
3a69c60c 798 /* On the SPARC, the return address is not in the frame, it is in a
53800dbe 799 register. There is no way to access it off of the current frame
800 pointer, but it can be accessed off the previous frame pointer by
801 reading the value from the register window save area. */
a26d6c60 802 if (RETURN_ADDR_IN_PREVIOUS_FRAME && fndecl_code == BUILT_IN_RETURN_ADDRESS)
53800dbe 803 count--;
53800dbe 804
805 /* Scan back COUNT frames to the specified frame. */
806 for (i = 0; i < count; i++)
807 {
808 /* Assume the dynamic chain pointer is in the word that the
809 frame address points to, unless otherwise specified. */
810#ifdef DYNAMIC_CHAIN_ADDRESS
811 tem = DYNAMIC_CHAIN_ADDRESS (tem);
812#endif
813 tem = memory_address (Pmode, tem);
00060fc2 814 tem = gen_frame_mem (Pmode, tem);
83fc1478 815 tem = copy_to_reg (tem);
53800dbe 816 }
817
3a69c60c 818 /* For __builtin_frame_address, return what we've got. But, on
819 the SPARC for example, we may have to add a bias. */
53800dbe 820 if (fndecl_code == BUILT_IN_FRAME_ADDRESS)
3a69c60c 821#ifdef FRAME_ADDR_RTX
822 return FRAME_ADDR_RTX (tem);
823#else
53800dbe 824 return tem;
3a69c60c 825#endif
53800dbe 826
3a69c60c 827 /* For __builtin_return_address, get the return address from that frame. */
53800dbe 828#ifdef RETURN_ADDR_RTX
829 tem = RETURN_ADDR_RTX (count, tem);
830#else
831 tem = memory_address (Pmode,
29c05e22 832 plus_constant (Pmode, tem, GET_MODE_SIZE (Pmode)));
00060fc2 833 tem = gen_frame_mem (Pmode, tem);
53800dbe 834#endif
835 return tem;
836}
837
f7c44134 838/* Alias set used for setjmp buffer. */
32c2fdea 839static alias_set_type setjmp_alias_set = -1;
f7c44134 840
6b7f6858 841/* Construct the leading half of a __builtin_setjmp call. Control will
2c8a1497 842 return to RECEIVER_LABEL. This is also called directly by the SJLJ
843 exception handling code. */
53800dbe 844
6b7f6858 845void
aecda0d6 846expand_builtin_setjmp_setup (rtx buf_addr, rtx receiver_label)
53800dbe 847{
3754d046 848 machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
53800dbe 849 rtx stack_save;
f7c44134 850 rtx mem;
53800dbe 851
f7c44134 852 if (setjmp_alias_set == -1)
853 setjmp_alias_set = new_alias_set ();
854
85d654dd 855 buf_addr = convert_memory_address (Pmode, buf_addr);
53800dbe 856
37ae8504 857 buf_addr = force_reg (Pmode, force_operand (buf_addr, NULL_RTX));
53800dbe 858
6b7f6858 859 /* We store the frame pointer and the address of receiver_label in
860 the buffer and use the rest of it for the stack save area, which
861 is machine-dependent. */
53800dbe 862
f7c44134 863 mem = gen_rtx_MEM (Pmode, buf_addr);
ab6ab77e 864 set_mem_alias_set (mem, setjmp_alias_set);
e3e026e8 865 emit_move_insn (mem, targetm.builtin_setjmp_frame_value ());
f7c44134 866
29c05e22 867 mem = gen_rtx_MEM (Pmode, plus_constant (Pmode, buf_addr,
868 GET_MODE_SIZE (Pmode))),
ab6ab77e 869 set_mem_alias_set (mem, setjmp_alias_set);
f7c44134 870
871 emit_move_insn (validize_mem (mem),
6b7f6858 872 force_reg (Pmode, gen_rtx_LABEL_REF (Pmode, receiver_label)));
53800dbe 873
874 stack_save = gen_rtx_MEM (sa_mode,
29c05e22 875 plus_constant (Pmode, buf_addr,
53800dbe 876 2 * GET_MODE_SIZE (Pmode)));
ab6ab77e 877 set_mem_alias_set (stack_save, setjmp_alias_set);
e9c97615 878 emit_stack_save (SAVE_NONLOCAL, &stack_save);
53800dbe 879
880 /* If there is further processing to do, do it. */
a3c81e61 881 if (targetm.have_builtin_setjmp_setup ())
882 emit_insn (targetm.gen_builtin_setjmp_setup (buf_addr));
53800dbe 883
29f09705 884 /* We have a nonlocal label. */
18d50ae6 885 cfun->has_nonlocal_label = 1;
6b7f6858 886}
53800dbe 887
2c8a1497 888/* Construct the trailing part of a __builtin_setjmp call. This is
4598ade9 889 also called directly by the SJLJ exception handling code.
890 If RECEIVER_LABEL is NULL, instead contruct a nonlocal goto handler. */
6b7f6858 891
892void
a3c81e61 893expand_builtin_setjmp_receiver (rtx receiver_label)
6b7f6858 894{
82c7907c 895 rtx chain;
896
4598ade9 897 /* Mark the FP as used when we get here, so we have to make sure it's
53800dbe 898 marked as used by this function. */
18b42941 899 emit_use (hard_frame_pointer_rtx);
53800dbe 900
901 /* Mark the static chain as clobbered here so life information
902 doesn't get messed up for it. */
82c7907c 903 chain = targetm.calls.static_chain (current_function_decl, true);
904 if (chain && REG_P (chain))
905 emit_clobber (chain);
53800dbe 906
907 /* Now put in the code to restore the frame pointer, and argument
491e04ef 908 pointer, if needed. */
a3c81e61 909 if (! targetm.have_nonlocal_goto ())
62dcb5c8 910 {
911 /* First adjust our frame pointer to its actual value. It was
912 previously set to the start of the virtual area corresponding to
913 the stacked variables when we branched here and now needs to be
914 adjusted to the actual hardware fp value.
915
916 Assignments to virtual registers are converted by
917 instantiate_virtual_regs into the corresponding assignment
918 to the underlying register (fp in this case) that makes
919 the original assignment true.
920 So the following insn will actually be decrementing fp by
921 STARTING_FRAME_OFFSET. */
922 emit_move_insn (virtual_stack_vars_rtx, hard_frame_pointer_rtx);
923
924 /* Restoring the frame pointer also modifies the hard frame pointer.
925 Mark it used (so that the previous assignment remains live once
926 the frame pointer is eliminated) and clobbered (to represent the
927 implicit update from the assignment). */
928 emit_use (hard_frame_pointer_rtx);
929 emit_clobber (hard_frame_pointer_rtx);
930 }
53800dbe 931
5ae82d58 932#if !HARD_FRAME_POINTER_IS_ARG_POINTER
53800dbe 933 if (fixed_regs[ARG_POINTER_REGNUM])
934 {
935#ifdef ELIMINABLE_REGS
4598ade9 936 /* If the argument pointer can be eliminated in favor of the
937 frame pointer, we don't need to restore it. We assume here
938 that if such an elimination is present, it can always be used.
939 This is the case on all known machines; if we don't make this
940 assumption, we do unnecessary saving on many machines. */
53800dbe 941 size_t i;
e99c3a1d 942 static const struct elims {const int from, to;} elim_regs[] = ELIMINABLE_REGS;
53800dbe 943
3098b2d3 944 for (i = 0; i < ARRAY_SIZE (elim_regs); i++)
53800dbe 945 if (elim_regs[i].from == ARG_POINTER_REGNUM
946 && elim_regs[i].to == HARD_FRAME_POINTER_REGNUM)
947 break;
948
3098b2d3 949 if (i == ARRAY_SIZE (elim_regs))
53800dbe 950#endif
951 {
952 /* Now restore our arg pointer from the address at which it
05927e40 953 was saved in our stack frame. */
27a7a23a 954 emit_move_insn (crtl->args.internal_arg_pointer,
b079a207 955 copy_to_reg (get_arg_pointer_save_area ()));
53800dbe 956 }
957 }
958#endif
959
a3c81e61 960 if (receiver_label != NULL && targetm.have_builtin_setjmp_receiver ())
961 emit_insn (targetm.gen_builtin_setjmp_receiver (receiver_label));
962 else if (targetm.have_nonlocal_goto_receiver ())
963 emit_insn (targetm.gen_nonlocal_goto_receiver ());
53800dbe 964 else
a3c81e61 965 { /* Nothing */ }
57f6bb94 966
3072d30e 967 /* We must not allow the code we just generated to be reordered by
968 scheduling. Specifically, the update of the frame pointer must
62dcb5c8 969 happen immediately, not later. */
3072d30e 970 emit_insn (gen_blockage ());
6b7f6858 971}
53800dbe 972
53800dbe 973/* __builtin_longjmp is passed a pointer to an array of five words (not
974 all will be used on all machines). It operates similarly to the C
975 library function of the same name, but is more efficient. Much of
2c8a1497 976 the code below is copied from the handling of non-local gotos. */
53800dbe 977
c626df3d 978static void
aecda0d6 979expand_builtin_longjmp (rtx buf_addr, rtx value)
53800dbe 980{
1e0c0b35 981 rtx fp, lab, stack;
982 rtx_insn *insn, *last;
3754d046 983 machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
53800dbe 984
48e1416a 985 /* DRAP is needed for stack realign if longjmp is expanded to current
27a7a23a 986 function */
987 if (SUPPORTS_STACK_ALIGNMENT)
988 crtl->need_drap = true;
989
f7c44134 990 if (setjmp_alias_set == -1)
991 setjmp_alias_set = new_alias_set ();
992
85d654dd 993 buf_addr = convert_memory_address (Pmode, buf_addr);
479e4d5e 994
53800dbe 995 buf_addr = force_reg (Pmode, buf_addr);
996
82c7907c 997 /* We require that the user must pass a second argument of 1, because
998 that is what builtin_setjmp will return. */
64db345d 999 gcc_assert (value == const1_rtx);
53800dbe 1000
4712c7d6 1001 last = get_last_insn ();
a3c81e61 1002 if (targetm.have_builtin_longjmp ())
1003 emit_insn (targetm.gen_builtin_longjmp (buf_addr));
53800dbe 1004 else
53800dbe 1005 {
1006 fp = gen_rtx_MEM (Pmode, buf_addr);
29c05e22 1007 lab = gen_rtx_MEM (Pmode, plus_constant (Pmode, buf_addr,
53800dbe 1008 GET_MODE_SIZE (Pmode)));
1009
29c05e22 1010 stack = gen_rtx_MEM (sa_mode, plus_constant (Pmode, buf_addr,
53800dbe 1011 2 * GET_MODE_SIZE (Pmode)));
ab6ab77e 1012 set_mem_alias_set (fp, setjmp_alias_set);
1013 set_mem_alias_set (lab, setjmp_alias_set);
1014 set_mem_alias_set (stack, setjmp_alias_set);
53800dbe 1015
1016 /* Pick up FP, label, and SP from the block and jump. This code is
1017 from expand_goto in stmt.c; see there for detailed comments. */
a3c81e61 1018 if (targetm.have_nonlocal_goto ())
53800dbe 1019 /* We have to pass a value to the nonlocal_goto pattern that will
1020 get copied into the static_chain pointer, but it does not matter
1021 what that value is, because builtin_setjmp does not use it. */
a3c81e61 1022 emit_insn (targetm.gen_nonlocal_goto (value, lab, stack, fp));
53800dbe 1023 else
53800dbe 1024 {
1025 lab = copy_to_reg (lab);
1026
18b42941 1027 emit_clobber (gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode)));
1028 emit_clobber (gen_rtx_MEM (BLKmode, hard_frame_pointer_rtx));
2a871ad1 1029
53800dbe 1030 emit_move_insn (hard_frame_pointer_rtx, fp);
e9c97615 1031 emit_stack_restore (SAVE_NONLOCAL, stack);
53800dbe 1032
18b42941 1033 emit_use (hard_frame_pointer_rtx);
1034 emit_use (stack_pointer_rtx);
53800dbe 1035 emit_indirect_jump (lab);
1036 }
1037 }
615166bb 1038
1039 /* Search backwards and mark the jump insn as a non-local goto.
1040 Note that this precludes the use of __builtin_longjmp to a
1041 __builtin_setjmp target in the same function. However, we've
1042 already cautioned the user that these functions are for
1043 internal exception handling use only. */
449c0509 1044 for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
1045 {
64db345d 1046 gcc_assert (insn != last);
7d3f6cc7 1047
6d7dc5b9 1048 if (JUMP_P (insn))
449c0509 1049 {
a1ddb869 1050 add_reg_note (insn, REG_NON_LOCAL_GOTO, const0_rtx);
449c0509 1051 break;
1052 }
6d7dc5b9 1053 else if (CALL_P (insn))
9342ee68 1054 break;
449c0509 1055 }
53800dbe 1056}
1057
0e80b01d 1058static inline bool
1059more_const_call_expr_args_p (const const_call_expr_arg_iterator *iter)
1060{
1061 return (iter->i < iter->n);
1062}
1063
1064/* This function validates the types of a function call argument list
1065 against a specified list of tree_codes. If the last specifier is a 0,
1066 that represents an ellipses, otherwise the last specifier must be a
1067 VOID_TYPE. */
1068
1069static bool
1070validate_arglist (const_tree callexpr, ...)
1071{
1072 enum tree_code code;
1073 bool res = 0;
1074 va_list ap;
1075 const_call_expr_arg_iterator iter;
1076 const_tree arg;
1077
1078 va_start (ap, callexpr);
1079 init_const_call_expr_arg_iterator (callexpr, &iter);
1080
1081 do
1082 {
1083 code = (enum tree_code) va_arg (ap, int);
1084 switch (code)
1085 {
1086 case 0:
1087 /* This signifies an ellipses, any further arguments are all ok. */
1088 res = true;
1089 goto end;
1090 case VOID_TYPE:
1091 /* This signifies an endlink, if no arguments remain, return
1092 true, otherwise return false. */
1093 res = !more_const_call_expr_args_p (&iter);
1094 goto end;
1095 default:
1096 /* If no parameters remain or the parameter's code does not
1097 match the specified code, return false. Otherwise continue
1098 checking any remaining arguments. */
1099 arg = next_const_call_expr_arg (&iter);
1100 if (!validate_arg (arg, code))
1101 goto end;
1102 break;
1103 }
1104 }
1105 while (1);
1106
1107 /* We need gotos here since we can only have one VA_CLOSE in a
1108 function. */
1109 end: ;
1110 va_end (ap);
1111
1112 return res;
1113}
1114
4ee9c684 1115/* Expand a call to __builtin_nonlocal_goto. We're passed the target label
1116 and the address of the save area. */
1117
1118static rtx
c2f47e15 1119expand_builtin_nonlocal_goto (tree exp)
4ee9c684 1120{
1121 tree t_label, t_save_area;
1e0c0b35 1122 rtx r_label, r_save_area, r_fp, r_sp;
1123 rtx_insn *insn;
4ee9c684 1124
c2f47e15 1125 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
4ee9c684 1126 return NULL_RTX;
1127
c2f47e15 1128 t_label = CALL_EXPR_ARG (exp, 0);
1129 t_save_area = CALL_EXPR_ARG (exp, 1);
4ee9c684 1130
8ec3c5c2 1131 r_label = expand_normal (t_label);
3dce56cc 1132 r_label = convert_memory_address (Pmode, r_label);
8ec3c5c2 1133 r_save_area = expand_normal (t_save_area);
3dce56cc 1134 r_save_area = convert_memory_address (Pmode, r_save_area);
d1ff492e 1135 /* Copy the address of the save location to a register just in case it was
1136 based on the frame pointer. */
51adbc8a 1137 r_save_area = copy_to_reg (r_save_area);
4ee9c684 1138 r_fp = gen_rtx_MEM (Pmode, r_save_area);
1139 r_sp = gen_rtx_MEM (STACK_SAVEAREA_MODE (SAVE_NONLOCAL),
29c05e22 1140 plus_constant (Pmode, r_save_area,
1141 GET_MODE_SIZE (Pmode)));
4ee9c684 1142
18d50ae6 1143 crtl->has_nonlocal_goto = 1;
4ee9c684 1144
4ee9c684 1145 /* ??? We no longer need to pass the static chain value, afaik. */
a3c81e61 1146 if (targetm.have_nonlocal_goto ())
1147 emit_insn (targetm.gen_nonlocal_goto (const0_rtx, r_label, r_sp, r_fp));
4ee9c684 1148 else
4ee9c684 1149 {
1150 r_label = copy_to_reg (r_label);
1151
18b42941 1152 emit_clobber (gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode)));
1153 emit_clobber (gen_rtx_MEM (BLKmode, hard_frame_pointer_rtx));
491e04ef 1154
d1ff492e 1155 /* Restore frame pointer for containing function. */
4ee9c684 1156 emit_move_insn (hard_frame_pointer_rtx, r_fp);
e9c97615 1157 emit_stack_restore (SAVE_NONLOCAL, r_sp);
491e04ef 1158
4ee9c684 1159 /* USE of hard_frame_pointer_rtx added for consistency;
1160 not clear if really needed. */
18b42941 1161 emit_use (hard_frame_pointer_rtx);
1162 emit_use (stack_pointer_rtx);
ad0d0af8 1163
1164 /* If the architecture is using a GP register, we must
1165 conservatively assume that the target function makes use of it.
1166 The prologue of functions with nonlocal gotos must therefore
1167 initialize the GP register to the appropriate value, and we
1168 must then make sure that this value is live at the point
1169 of the jump. (Note that this doesn't necessarily apply
1170 to targets with a nonlocal_goto pattern; they are free
1171 to implement it in their own way. Note also that this is
1172 a no-op if the GP register is a global invariant.) */
1173 if ((unsigned) PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM
1174 && fixed_regs[PIC_OFFSET_TABLE_REGNUM])
18b42941 1175 emit_use (pic_offset_table_rtx);
ad0d0af8 1176
4ee9c684 1177 emit_indirect_jump (r_label);
1178 }
491e04ef 1179
4ee9c684 1180 /* Search backwards to the jump insn and mark it as a
1181 non-local goto. */
1182 for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
1183 {
6d7dc5b9 1184 if (JUMP_P (insn))
4ee9c684 1185 {
a1ddb869 1186 add_reg_note (insn, REG_NON_LOCAL_GOTO, const0_rtx);
4ee9c684 1187 break;
1188 }
6d7dc5b9 1189 else if (CALL_P (insn))
4ee9c684 1190 break;
1191 }
1192
1193 return const0_rtx;
1194}
1195
843d08a9 1196/* __builtin_update_setjmp_buf is passed a pointer to an array of five words
1197 (not all will be used on all machines) that was passed to __builtin_setjmp.
97354ae4 1198 It updates the stack pointer in that block to the current value. This is
1199 also called directly by the SJLJ exception handling code. */
843d08a9 1200
97354ae4 1201void
843d08a9 1202expand_builtin_update_setjmp_buf (rtx buf_addr)
1203{
3754d046 1204 machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
d1ff492e 1205 rtx stack_save
843d08a9 1206 = gen_rtx_MEM (sa_mode,
1207 memory_address
1208 (sa_mode,
29c05e22 1209 plus_constant (Pmode, buf_addr,
1210 2 * GET_MODE_SIZE (Pmode))));
843d08a9 1211
e9c97615 1212 emit_stack_save (SAVE_NONLOCAL, &stack_save);
843d08a9 1213}
1214
5e3608d8 1215/* Expand a call to __builtin_prefetch. For a target that does not support
1216 data prefetch, evaluate the memory address argument in case it has side
1217 effects. */
1218
1219static void
c2f47e15 1220expand_builtin_prefetch (tree exp)
5e3608d8 1221{
1222 tree arg0, arg1, arg2;
c2f47e15 1223 int nargs;
5e3608d8 1224 rtx op0, op1, op2;
1225
c2f47e15 1226 if (!validate_arglist (exp, POINTER_TYPE, 0))
26a5cadb 1227 return;
1228
c2f47e15 1229 arg0 = CALL_EXPR_ARG (exp, 0);
1230
26a5cadb 1231 /* Arguments 1 and 2 are optional; argument 1 (read/write) defaults to
1232 zero (read) and argument 2 (locality) defaults to 3 (high degree of
1233 locality). */
c2f47e15 1234 nargs = call_expr_nargs (exp);
1235 if (nargs > 1)
1236 arg1 = CALL_EXPR_ARG (exp, 1);
26a5cadb 1237 else
c2f47e15 1238 arg1 = integer_zero_node;
1239 if (nargs > 2)
1240 arg2 = CALL_EXPR_ARG (exp, 2);
1241 else
2512209b 1242 arg2 = integer_three_node;
5e3608d8 1243
1244 /* Argument 0 is an address. */
1245 op0 = expand_expr (arg0, NULL_RTX, Pmode, EXPAND_NORMAL);
1246
1247 /* Argument 1 (read/write flag) must be a compile-time constant int. */
1248 if (TREE_CODE (arg1) != INTEGER_CST)
1249 {
07e3a3d2 1250 error ("second argument to %<__builtin_prefetch%> must be a constant");
9342ee68 1251 arg1 = integer_zero_node;
5e3608d8 1252 }
8ec3c5c2 1253 op1 = expand_normal (arg1);
5e3608d8 1254 /* Argument 1 must be either zero or one. */
1255 if (INTVAL (op1) != 0 && INTVAL (op1) != 1)
1256 {
c3ceba8e 1257 warning (0, "invalid second argument to %<__builtin_prefetch%>;"
07e3a3d2 1258 " using zero");
5e3608d8 1259 op1 = const0_rtx;
1260 }
1261
1262 /* Argument 2 (locality) must be a compile-time constant int. */
1263 if (TREE_CODE (arg2) != INTEGER_CST)
1264 {
07e3a3d2 1265 error ("third argument to %<__builtin_prefetch%> must be a constant");
5e3608d8 1266 arg2 = integer_zero_node;
1267 }
8ec3c5c2 1268 op2 = expand_normal (arg2);
5e3608d8 1269 /* Argument 2 must be 0, 1, 2, or 3. */
1270 if (INTVAL (op2) < 0 || INTVAL (op2) > 3)
1271 {
c3ceba8e 1272 warning (0, "invalid third argument to %<__builtin_prefetch%>; using zero");
5e3608d8 1273 op2 = const0_rtx;
1274 }
1275
1d375a79 1276 if (targetm.have_prefetch ())
5e3608d8 1277 {
8786db1e 1278 struct expand_operand ops[3];
1279
1280 create_address_operand (&ops[0], op0);
1281 create_integer_operand (&ops[1], INTVAL (op1));
1282 create_integer_operand (&ops[2], INTVAL (op2));
1d375a79 1283 if (maybe_expand_insn (targetm.code_for_prefetch, 3, ops))
8786db1e 1284 return;
5e3608d8 1285 }
0a534ba7 1286
f0ce3b1f 1287 /* Don't do anything with direct references to volatile memory, but
1288 generate code to handle other side effects. */
e16ceb8e 1289 if (!MEM_P (op0) && side_effects_p (op0))
f0ce3b1f 1290 emit_insn (op0);
5e3608d8 1291}
1292
f7c44134 1293/* Get a MEM rtx for expression EXP which is the address of an operand
d8ae1baa 1294 to be used in a string instruction (cmpstrsi, movmemsi, ..). LEN is
1295 the maximum length of the block of memory that might be accessed or
1296 NULL if unknown. */
f7c44134 1297
53800dbe 1298static rtx
d8ae1baa 1299get_memory_rtx (tree exp, tree len)
53800dbe 1300{
ad0a178f 1301 tree orig_exp = exp;
1302 rtx addr, mem;
ad0a178f 1303
1304 /* When EXP is not resolved SAVE_EXPR, MEM_ATTRS can be still derived
1305 from its expression, for expr->a.b only <variable>.a.b is recorded. */
1306 if (TREE_CODE (exp) == SAVE_EXPR && !SAVE_EXPR_RESOLVED_P (exp))
1307 exp = TREE_OPERAND (exp, 0);
1308
1309 addr = expand_expr (orig_exp, NULL_RTX, ptr_mode, EXPAND_NORMAL);
1310 mem = gen_rtx_MEM (BLKmode, memory_address (BLKmode, addr));
2a631e19 1311
f7c44134 1312 /* Get an expression we can use to find the attributes to assign to MEM.
5dd3f78f 1313 First remove any nops. */
72dd6141 1314 while (CONVERT_EXPR_P (exp)
f7c44134 1315 && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (exp, 0))))
1316 exp = TREE_OPERAND (exp, 0);
1317
5dd3f78f 1318 /* Build a MEM_REF representing the whole accessed area as a byte blob,
1319 (as builtin stringops may alias with anything). */
1320 exp = fold_build2 (MEM_REF,
1321 build_array_type (char_type_node,
1322 build_range_type (sizetype,
1323 size_one_node, len)),
1324 exp, build_int_cst (ptr_type_node, 0));
1325
1326 /* If the MEM_REF has no acceptable address, try to get the base object
1327 from the original address we got, and build an all-aliasing
1328 unknown-sized access to that one. */
1329 if (is_gimple_mem_ref_addr (TREE_OPERAND (exp, 0)))
1330 set_mem_attributes (mem, exp, 0);
1331 else if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
1332 && (exp = get_base_address (TREE_OPERAND (TREE_OPERAND (exp, 0),
1333 0))))
eec8e941 1334 {
5dd3f78f 1335 exp = build_fold_addr_expr (exp);
1336 exp = fold_build2 (MEM_REF,
1337 build_array_type (char_type_node,
1338 build_range_type (sizetype,
1339 size_zero_node,
1340 NULL)),
1341 exp, build_int_cst (ptr_type_node, 0));
a1a25d19 1342 set_mem_attributes (mem, exp, 0);
eec8e941 1343 }
5dd3f78f 1344 set_mem_alias_set (mem, 0);
53800dbe 1345 return mem;
1346}
1347\f
1348/* Built-in functions to perform an untyped call and return. */
1349
3b9c3a16 1350#define apply_args_mode \
1351 (this_target_builtins->x_apply_args_mode)
1352#define apply_result_mode \
1353 (this_target_builtins->x_apply_result_mode)
53800dbe 1354
53800dbe 1355/* Return the size required for the block returned by __builtin_apply_args,
1356 and initialize apply_args_mode. */
1357
1358static int
aecda0d6 1359apply_args_size (void)
53800dbe 1360{
1361 static int size = -1;
58e9ce8f 1362 int align;
1363 unsigned int regno;
3754d046 1364 machine_mode mode;
53800dbe 1365
1366 /* The values computed by this function never change. */
1367 if (size < 0)
1368 {
1369 /* The first value is the incoming arg-pointer. */
1370 size = GET_MODE_SIZE (Pmode);
1371
1372 /* The second value is the structure value address unless this is
1373 passed as an "invisible" first argument. */
6812c89e 1374 if (targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0))
53800dbe 1375 size += GET_MODE_SIZE (Pmode);
1376
1377 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1378 if (FUNCTION_ARG_REGNO_P (regno))
1379 {
4bac51c9 1380 mode = targetm.calls.get_raw_arg_mode (regno);
0862b7e9 1381
64db345d 1382 gcc_assert (mode != VOIDmode);
53800dbe 1383
1384 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1385 if (size % align != 0)
1386 size = CEIL (size, align) * align;
53800dbe 1387 size += GET_MODE_SIZE (mode);
1388 apply_args_mode[regno] = mode;
1389 }
1390 else
1391 {
1392 apply_args_mode[regno] = VOIDmode;
53800dbe 1393 }
1394 }
1395 return size;
1396}
1397
1398/* Return the size required for the block returned by __builtin_apply,
1399 and initialize apply_result_mode. */
1400
1401static int
aecda0d6 1402apply_result_size (void)
53800dbe 1403{
1404 static int size = -1;
1405 int align, regno;
3754d046 1406 machine_mode mode;
53800dbe 1407
1408 /* The values computed by this function never change. */
1409 if (size < 0)
1410 {
1411 size = 0;
1412
1413 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
e1ce1485 1414 if (targetm.calls.function_value_regno_p (regno))
53800dbe 1415 {
4bac51c9 1416 mode = targetm.calls.get_raw_result_mode (regno);
0862b7e9 1417
64db345d 1418 gcc_assert (mode != VOIDmode);
53800dbe 1419
1420 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1421 if (size % align != 0)
1422 size = CEIL (size, align) * align;
1423 size += GET_MODE_SIZE (mode);
1424 apply_result_mode[regno] = mode;
1425 }
1426 else
1427 apply_result_mode[regno] = VOIDmode;
1428
1429 /* Allow targets that use untyped_call and untyped_return to override
1430 the size so that machine-specific information can be stored here. */
1431#ifdef APPLY_RESULT_SIZE
1432 size = APPLY_RESULT_SIZE;
1433#endif
1434 }
1435 return size;
1436}
1437
53800dbe 1438/* Create a vector describing the result block RESULT. If SAVEP is true,
1439 the result block is used to save the values; otherwise it is used to
1440 restore the values. */
1441
1442static rtx
aecda0d6 1443result_vector (int savep, rtx result)
53800dbe 1444{
1445 int regno, size, align, nelts;
3754d046 1446 machine_mode mode;
53800dbe 1447 rtx reg, mem;
364c0c59 1448 rtx *savevec = XALLOCAVEC (rtx, FIRST_PSEUDO_REGISTER);
bf8e3599 1449
53800dbe 1450 size = nelts = 0;
1451 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1452 if ((mode = apply_result_mode[regno]) != VOIDmode)
1453 {
1454 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1455 if (size % align != 0)
1456 size = CEIL (size, align) * align;
1457 reg = gen_rtx_REG (mode, savep ? regno : INCOMING_REGNO (regno));
e513d163 1458 mem = adjust_address (result, mode, size);
53800dbe 1459 savevec[nelts++] = (savep
d1f9b275 1460 ? gen_rtx_SET (mem, reg)
1461 : gen_rtx_SET (reg, mem));
53800dbe 1462 size += GET_MODE_SIZE (mode);
1463 }
1464 return gen_rtx_PARALLEL (VOIDmode, gen_rtvec_v (nelts, savevec));
1465}
53800dbe 1466
1467/* Save the state required to perform an untyped call with the same
1468 arguments as were passed to the current function. */
1469
1470static rtx
aecda0d6 1471expand_builtin_apply_args_1 (void)
53800dbe 1472{
1c7e61a7 1473 rtx registers, tem;
53800dbe 1474 int size, align, regno;
3754d046 1475 machine_mode mode;
6812c89e 1476 rtx struct_incoming_value = targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 1);
53800dbe 1477
1478 /* Create a block where the arg-pointer, structure value address,
1479 and argument registers can be saved. */
1480 registers = assign_stack_local (BLKmode, apply_args_size (), -1);
1481
1482 /* Walk past the arg-pointer and structure value address. */
1483 size = GET_MODE_SIZE (Pmode);
6812c89e 1484 if (targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0))
53800dbe 1485 size += GET_MODE_SIZE (Pmode);
1486
1487 /* Save each register used in calling a function to the block. */
1488 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1489 if ((mode = apply_args_mode[regno]) != VOIDmode)
1490 {
53800dbe 1491 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1492 if (size % align != 0)
1493 size = CEIL (size, align) * align;
1494
1495 tem = gen_rtx_REG (mode, INCOMING_REGNO (regno));
1496
e513d163 1497 emit_move_insn (adjust_address (registers, mode, size), tem);
53800dbe 1498 size += GET_MODE_SIZE (mode);
1499 }
1500
1501 /* Save the arg pointer to the block. */
27a7a23a 1502 tem = copy_to_reg (crtl->args.internal_arg_pointer);
1c7e61a7 1503 /* We need the pointer as the caller actually passed them to us, not
9d4b544c 1504 as we might have pretended they were passed. Make sure it's a valid
1505 operand, as emit_move_insn isn't expected to handle a PLUS. */
3764c94e 1506 if (STACK_GROWS_DOWNWARD)
1507 tem
1508 = force_operand (plus_constant (Pmode, tem,
1509 crtl->args.pretend_args_size),
1510 NULL_RTX);
1c7e61a7 1511 emit_move_insn (adjust_address (registers, Pmode, 0), tem);
0862b7e9 1512
53800dbe 1513 size = GET_MODE_SIZE (Pmode);
1514
1515 /* Save the structure value address unless this is passed as an
1516 "invisible" first argument. */
45550790 1517 if (struct_incoming_value)
53800dbe 1518 {
e513d163 1519 emit_move_insn (adjust_address (registers, Pmode, size),
45550790 1520 copy_to_reg (struct_incoming_value));
53800dbe 1521 size += GET_MODE_SIZE (Pmode);
1522 }
1523
1524 /* Return the address of the block. */
1525 return copy_addr_to_reg (XEXP (registers, 0));
1526}
1527
1528/* __builtin_apply_args returns block of memory allocated on
1529 the stack into which is stored the arg pointer, structure
1530 value address, static chain, and all the registers that might
1531 possibly be used in performing a function call. The code is
1532 moved to the start of the function so the incoming values are
1533 saved. */
27d0c333 1534
53800dbe 1535static rtx
aecda0d6 1536expand_builtin_apply_args (void)
53800dbe 1537{
1538 /* Don't do __builtin_apply_args more than once in a function.
1539 Save the result of the first call and reuse it. */
1540 if (apply_args_value != 0)
1541 return apply_args_value;
1542 {
1543 /* When this function is called, it means that registers must be
1544 saved on entry to this function. So we migrate the
1545 call to the first insn of this function. */
1546 rtx temp;
53800dbe 1547
1548 start_sequence ();
1549 temp = expand_builtin_apply_args_1 ();
9ed997be 1550 rtx_insn *seq = get_insns ();
53800dbe 1551 end_sequence ();
1552
1553 apply_args_value = temp;
1554
31d3e01c 1555 /* Put the insns after the NOTE that starts the function.
1556 If this is inside a start_sequence, make the outer-level insn
53800dbe 1557 chain current, so the code is placed at the start of the
0ef1a651 1558 function. If internal_arg_pointer is a non-virtual pseudo,
1559 it needs to be placed after the function that initializes
1560 that pseudo. */
53800dbe 1561 push_topmost_sequence ();
0ef1a651 1562 if (REG_P (crtl->args.internal_arg_pointer)
1563 && REGNO (crtl->args.internal_arg_pointer) > LAST_VIRTUAL_REGISTER)
1564 emit_insn_before (seq, parm_birth_insn);
1565 else
1566 emit_insn_before (seq, NEXT_INSN (entry_of_function ()));
53800dbe 1567 pop_topmost_sequence ();
1568 return temp;
1569 }
1570}
1571
1572/* Perform an untyped call and save the state required to perform an
1573 untyped return of whatever value was returned by the given function. */
1574
1575static rtx
aecda0d6 1576expand_builtin_apply (rtx function, rtx arguments, rtx argsize)
53800dbe 1577{
1578 int size, align, regno;
3754d046 1579 machine_mode mode;
1e0c0b35 1580 rtx incoming_args, result, reg, dest, src;
1581 rtx_call_insn *call_insn;
53800dbe 1582 rtx old_stack_level = 0;
1583 rtx call_fusage = 0;
6812c89e 1584 rtx struct_value = targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0);
53800dbe 1585
85d654dd 1586 arguments = convert_memory_address (Pmode, arguments);
726ec87c 1587
53800dbe 1588 /* Create a block where the return registers can be saved. */
1589 result = assign_stack_local (BLKmode, apply_result_size (), -1);
1590
53800dbe 1591 /* Fetch the arg pointer from the ARGUMENTS block. */
1592 incoming_args = gen_reg_rtx (Pmode);
726ec87c 1593 emit_move_insn (incoming_args, gen_rtx_MEM (Pmode, arguments));
3764c94e 1594 if (!STACK_GROWS_DOWNWARD)
1595 incoming_args = expand_simple_binop (Pmode, MINUS, incoming_args, argsize,
1596 incoming_args, 0, OPTAB_LIB_WIDEN);
53800dbe 1597
04a46d40 1598 /* Push a new argument block and copy the arguments. Do not allow
1599 the (potential) memcpy call below to interfere with our stack
1600 manipulations. */
53800dbe 1601 do_pending_stack_adjust ();
04a46d40 1602 NO_DEFER_POP;
53800dbe 1603
2358393e 1604 /* Save the stack with nonlocal if available. */
71512c05 1605 if (targetm.have_save_stack_nonlocal ())
e9c97615 1606 emit_stack_save (SAVE_NONLOCAL, &old_stack_level);
53800dbe 1607 else
e9c97615 1608 emit_stack_save (SAVE_BLOCK, &old_stack_level);
53800dbe 1609
59647703 1610 /* Allocate a block of memory onto the stack and copy the memory
990495a7 1611 arguments to the outgoing arguments address. We can pass TRUE
1612 as the 4th argument because we just saved the stack pointer
1613 and will restore it right after the call. */
5be42b39 1614 allocate_dynamic_stack_space (argsize, 0, BIGGEST_ALIGNMENT, true);
27a7a23a 1615
1616 /* Set DRAP flag to true, even though allocate_dynamic_stack_space
1617 may have already set current_function_calls_alloca to true.
1618 current_function_calls_alloca won't be set if argsize is zero,
1619 so we have to guarantee need_drap is true here. */
1620 if (SUPPORTS_STACK_ALIGNMENT)
1621 crtl->need_drap = true;
1622
59647703 1623 dest = virtual_outgoing_args_rtx;
3764c94e 1624 if (!STACK_GROWS_DOWNWARD)
1625 {
1626 if (CONST_INT_P (argsize))
1627 dest = plus_constant (Pmode, dest, -INTVAL (argsize));
1628 else
1629 dest = gen_rtx_PLUS (Pmode, dest, negate_rtx (Pmode, argsize));
1630 }
2a631e19 1631 dest = gen_rtx_MEM (BLKmode, dest);
1632 set_mem_align (dest, PARM_BOUNDARY);
1633 src = gen_rtx_MEM (BLKmode, incoming_args);
1634 set_mem_align (src, PARM_BOUNDARY);
0378dbdc 1635 emit_block_move (dest, src, argsize, BLOCK_OP_NORMAL);
53800dbe 1636
1637 /* Refer to the argument block. */
1638 apply_args_size ();
1639 arguments = gen_rtx_MEM (BLKmode, arguments);
2a631e19 1640 set_mem_align (arguments, PARM_BOUNDARY);
53800dbe 1641
1642 /* Walk past the arg-pointer and structure value address. */
1643 size = GET_MODE_SIZE (Pmode);
45550790 1644 if (struct_value)
53800dbe 1645 size += GET_MODE_SIZE (Pmode);
1646
1647 /* Restore each of the registers previously saved. Make USE insns
1648 for each of these registers for use in making the call. */
1649 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1650 if ((mode = apply_args_mode[regno]) != VOIDmode)
1651 {
1652 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1653 if (size % align != 0)
1654 size = CEIL (size, align) * align;
1655 reg = gen_rtx_REG (mode, regno);
e513d163 1656 emit_move_insn (reg, adjust_address (arguments, mode, size));
53800dbe 1657 use_reg (&call_fusage, reg);
1658 size += GET_MODE_SIZE (mode);
1659 }
1660
1661 /* Restore the structure value address unless this is passed as an
1662 "invisible" first argument. */
1663 size = GET_MODE_SIZE (Pmode);
45550790 1664 if (struct_value)
53800dbe 1665 {
1666 rtx value = gen_reg_rtx (Pmode);
e513d163 1667 emit_move_insn (value, adjust_address (arguments, Pmode, size));
45550790 1668 emit_move_insn (struct_value, value);
8ad4c111 1669 if (REG_P (struct_value))
45550790 1670 use_reg (&call_fusage, struct_value);
53800dbe 1671 size += GET_MODE_SIZE (Pmode);
1672 }
1673
1674 /* All arguments and registers used for the call are set up by now! */
82c7907c 1675 function = prepare_call_address (NULL, function, NULL, &call_fusage, 0, 0);
53800dbe 1676
1677 /* Ensure address is valid. SYMBOL_REF is already valid, so no need,
1678 and we don't want to load it into a register as an optimization,
1679 because prepare_call_address already did it if it should be done. */
1680 if (GET_CODE (function) != SYMBOL_REF)
1681 function = memory_address (FUNCTION_MODE, function);
1682
1683 /* Generate the actual call instruction and save the return value. */
1d99ab0a 1684 if (targetm.have_untyped_call ())
1685 {
1686 rtx mem = gen_rtx_MEM (FUNCTION_MODE, function);
1687 emit_call_insn (targetm.gen_untyped_call (mem, result,
1688 result_vector (1, result)));
1689 }
53800dbe 1690 else
53800dbe 1691#ifdef HAVE_call_value
1692 if (HAVE_call_value)
1693 {
1694 rtx valreg = 0;
1695
1696 /* Locate the unique return register. It is not possible to
1697 express a call that sets more than one return register using
1698 call_value; use untyped_call for that. In fact, untyped_call
1699 only needs to save the return registers in the given block. */
1700 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1701 if ((mode = apply_result_mode[regno]) != VOIDmode)
1702 {
64db345d 1703 gcc_assert (!valreg); /* HAVE_untyped_call required. */
7d3f6cc7 1704
53800dbe 1705 valreg = gen_rtx_REG (mode, regno);
1706 }
1707
2ed6c343 1708 emit_call_insn (GEN_CALL_VALUE (valreg,
53800dbe 1709 gen_rtx_MEM (FUNCTION_MODE, function),
1710 const0_rtx, NULL_RTX, const0_rtx));
1711
e513d163 1712 emit_move_insn (adjust_address (result, GET_MODE (valreg), 0), valreg);
53800dbe 1713 }
1714 else
1715#endif
64db345d 1716 gcc_unreachable ();
53800dbe 1717
d5f9786f 1718 /* Find the CALL insn we just emitted, and attach the register usage
1719 information. */
1720 call_insn = last_call_insn ();
1721 add_function_usage_to (call_insn, call_fusage);
53800dbe 1722
1723 /* Restore the stack. */
71512c05 1724 if (targetm.have_save_stack_nonlocal ())
e9c97615 1725 emit_stack_restore (SAVE_NONLOCAL, old_stack_level);
53800dbe 1726 else
e9c97615 1727 emit_stack_restore (SAVE_BLOCK, old_stack_level);
9af5ce0c 1728 fixup_args_size_notes (call_insn, get_last_insn (), 0);
53800dbe 1729
04a46d40 1730 OK_DEFER_POP;
1731
53800dbe 1732 /* Return the address of the result block. */
85d654dd 1733 result = copy_addr_to_reg (XEXP (result, 0));
1734 return convert_memory_address (ptr_mode, result);
53800dbe 1735}
1736
1737/* Perform an untyped return. */
1738
1739static void
aecda0d6 1740expand_builtin_return (rtx result)
53800dbe 1741{
1742 int size, align, regno;
3754d046 1743 machine_mode mode;
53800dbe 1744 rtx reg;
57c26b3a 1745 rtx_insn *call_fusage = 0;
53800dbe 1746
85d654dd 1747 result = convert_memory_address (Pmode, result);
726ec87c 1748
53800dbe 1749 apply_result_size ();
1750 result = gen_rtx_MEM (BLKmode, result);
1751
1d99ab0a 1752 if (targetm.have_untyped_return ())
53800dbe 1753 {
1d99ab0a 1754 rtx vector = result_vector (0, result);
1755 emit_jump_insn (targetm.gen_untyped_return (result, vector));
53800dbe 1756 emit_barrier ();
1757 return;
1758 }
53800dbe 1759
1760 /* Restore the return value and note that each value is used. */
1761 size = 0;
1762 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1763 if ((mode = apply_result_mode[regno]) != VOIDmode)
1764 {
1765 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1766 if (size % align != 0)
1767 size = CEIL (size, align) * align;
1768 reg = gen_rtx_REG (mode, INCOMING_REGNO (regno));
e513d163 1769 emit_move_insn (reg, adjust_address (result, mode, size));
53800dbe 1770
1771 push_to_sequence (call_fusage);
18b42941 1772 emit_use (reg);
53800dbe 1773 call_fusage = get_insns ();
1774 end_sequence ();
1775 size += GET_MODE_SIZE (mode);
1776 }
1777
1778 /* Put the USE insns before the return. */
31d3e01c 1779 emit_insn (call_fusage);
53800dbe 1780
1781 /* Return whatever values was restored by jumping directly to the end
1782 of the function. */
62380d2d 1783 expand_naked_return ();
53800dbe 1784}
1785
539a3a92 1786/* Used by expand_builtin_classify_type and fold_builtin_classify_type. */
27d0c333 1787
539a3a92 1788static enum type_class
aecda0d6 1789type_to_class (tree type)
539a3a92 1790{
1791 switch (TREE_CODE (type))
1792 {
1793 case VOID_TYPE: return void_type_class;
1794 case INTEGER_TYPE: return integer_type_class;
539a3a92 1795 case ENUMERAL_TYPE: return enumeral_type_class;
1796 case BOOLEAN_TYPE: return boolean_type_class;
1797 case POINTER_TYPE: return pointer_type_class;
1798 case REFERENCE_TYPE: return reference_type_class;
1799 case OFFSET_TYPE: return offset_type_class;
1800 case REAL_TYPE: return real_type_class;
1801 case COMPLEX_TYPE: return complex_type_class;
1802 case FUNCTION_TYPE: return function_type_class;
1803 case METHOD_TYPE: return method_type_class;
1804 case RECORD_TYPE: return record_type_class;
1805 case UNION_TYPE:
1806 case QUAL_UNION_TYPE: return union_type_class;
1807 case ARRAY_TYPE: return (TYPE_STRING_FLAG (type)
1808 ? string_type_class : array_type_class);
539a3a92 1809 case LANG_TYPE: return lang_type_class;
1810 default: return no_type_class;
1811 }
1812}
bf8e3599 1813
c2f47e15 1814/* Expand a call EXP to __builtin_classify_type. */
27d0c333 1815
53800dbe 1816static rtx
c2f47e15 1817expand_builtin_classify_type (tree exp)
53800dbe 1818{
c2f47e15 1819 if (call_expr_nargs (exp))
1820 return GEN_INT (type_to_class (TREE_TYPE (CALL_EXPR_ARG (exp, 0))));
53800dbe 1821 return GEN_INT (no_type_class);
1822}
1823
07976da7 1824/* This helper macro, meant to be used in mathfn_built_in below,
1825 determines which among a set of three builtin math functions is
1826 appropriate for a given type mode. The `F' and `L' cases are
1827 automatically generated from the `double' case. */
1828#define CASE_MATHFN(BUILT_IN_MATHFN) \
1829 case BUILT_IN_MATHFN: case BUILT_IN_MATHFN##F: case BUILT_IN_MATHFN##L: \
1830 fcode = BUILT_IN_MATHFN; fcodef = BUILT_IN_MATHFN##F ; \
1831 fcodel = BUILT_IN_MATHFN##L ; break;
cd2656b0 1832/* Similar to above, but appends _R after any F/L suffix. */
1833#define CASE_MATHFN_REENT(BUILT_IN_MATHFN) \
1834 case BUILT_IN_MATHFN##_R: case BUILT_IN_MATHFN##F_R: case BUILT_IN_MATHFN##L_R: \
1835 fcode = BUILT_IN_MATHFN##_R; fcodef = BUILT_IN_MATHFN##F_R ; \
1836 fcodel = BUILT_IN_MATHFN##L_R ; break;
07976da7 1837
b9a16870 1838/* Return mathematic function equivalent to FN but operating directly on TYPE,
1839 if available. If IMPLICIT is true use the implicit builtin declaration,
1840 otherwise use the explicit declaration. If we can't do the conversion,
1841 return zero. */
c319d56a 1842
1843static tree
b9a16870 1844mathfn_built_in_1 (tree type, enum built_in_function fn, bool implicit_p)
0a68165a 1845{
b9a16870 1846 enum built_in_function fcode, fcodef, fcodel, fcode2;
07976da7 1847
1848 switch (fn)
1849 {
746114e8 1850 CASE_MATHFN (BUILT_IN_ACOS)
1851 CASE_MATHFN (BUILT_IN_ACOSH)
1852 CASE_MATHFN (BUILT_IN_ASIN)
1853 CASE_MATHFN (BUILT_IN_ASINH)
07976da7 1854 CASE_MATHFN (BUILT_IN_ATAN)
746114e8 1855 CASE_MATHFN (BUILT_IN_ATAN2)
1856 CASE_MATHFN (BUILT_IN_ATANH)
1857 CASE_MATHFN (BUILT_IN_CBRT)
07976da7 1858 CASE_MATHFN (BUILT_IN_CEIL)
d735c391 1859 CASE_MATHFN (BUILT_IN_CEXPI)
746114e8 1860 CASE_MATHFN (BUILT_IN_COPYSIGN)
07976da7 1861 CASE_MATHFN (BUILT_IN_COS)
746114e8 1862 CASE_MATHFN (BUILT_IN_COSH)
1863 CASE_MATHFN (BUILT_IN_DREM)
1864 CASE_MATHFN (BUILT_IN_ERF)
1865 CASE_MATHFN (BUILT_IN_ERFC)
07976da7 1866 CASE_MATHFN (BUILT_IN_EXP)
746114e8 1867 CASE_MATHFN (BUILT_IN_EXP10)
1868 CASE_MATHFN (BUILT_IN_EXP2)
1869 CASE_MATHFN (BUILT_IN_EXPM1)
1870 CASE_MATHFN (BUILT_IN_FABS)
1871 CASE_MATHFN (BUILT_IN_FDIM)
07976da7 1872 CASE_MATHFN (BUILT_IN_FLOOR)
746114e8 1873 CASE_MATHFN (BUILT_IN_FMA)
1874 CASE_MATHFN (BUILT_IN_FMAX)
1875 CASE_MATHFN (BUILT_IN_FMIN)
1876 CASE_MATHFN (BUILT_IN_FMOD)
1877 CASE_MATHFN (BUILT_IN_FREXP)
1878 CASE_MATHFN (BUILT_IN_GAMMA)
cd2656b0 1879 CASE_MATHFN_REENT (BUILT_IN_GAMMA) /* GAMMA_R */
746114e8 1880 CASE_MATHFN (BUILT_IN_HUGE_VAL)
1881 CASE_MATHFN (BUILT_IN_HYPOT)
1882 CASE_MATHFN (BUILT_IN_ILOGB)
80ff6494 1883 CASE_MATHFN (BUILT_IN_ICEIL)
1884 CASE_MATHFN (BUILT_IN_IFLOOR)
746114e8 1885 CASE_MATHFN (BUILT_IN_INF)
80ff6494 1886 CASE_MATHFN (BUILT_IN_IRINT)
1887 CASE_MATHFN (BUILT_IN_IROUND)
69b779ea 1888 CASE_MATHFN (BUILT_IN_ISINF)
746114e8 1889 CASE_MATHFN (BUILT_IN_J0)
1890 CASE_MATHFN (BUILT_IN_J1)
1891 CASE_MATHFN (BUILT_IN_JN)
ac148751 1892 CASE_MATHFN (BUILT_IN_LCEIL)
746114e8 1893 CASE_MATHFN (BUILT_IN_LDEXP)
ad52b9b7 1894 CASE_MATHFN (BUILT_IN_LFLOOR)
746114e8 1895 CASE_MATHFN (BUILT_IN_LGAMMA)
cd2656b0 1896 CASE_MATHFN_REENT (BUILT_IN_LGAMMA) /* LGAMMA_R */
ac148751 1897 CASE_MATHFN (BUILT_IN_LLCEIL)
ad52b9b7 1898 CASE_MATHFN (BUILT_IN_LLFLOOR)
746114e8 1899 CASE_MATHFN (BUILT_IN_LLRINT)
1900 CASE_MATHFN (BUILT_IN_LLROUND)
07976da7 1901 CASE_MATHFN (BUILT_IN_LOG)
746114e8 1902 CASE_MATHFN (BUILT_IN_LOG10)
1903 CASE_MATHFN (BUILT_IN_LOG1P)
1904 CASE_MATHFN (BUILT_IN_LOG2)
1905 CASE_MATHFN (BUILT_IN_LOGB)
1906 CASE_MATHFN (BUILT_IN_LRINT)
1907 CASE_MATHFN (BUILT_IN_LROUND)
1908 CASE_MATHFN (BUILT_IN_MODF)
1909 CASE_MATHFN (BUILT_IN_NAN)
1910 CASE_MATHFN (BUILT_IN_NANS)
07976da7 1911 CASE_MATHFN (BUILT_IN_NEARBYINT)
746114e8 1912 CASE_MATHFN (BUILT_IN_NEXTAFTER)
1913 CASE_MATHFN (BUILT_IN_NEXTTOWARD)
1914 CASE_MATHFN (BUILT_IN_POW)
757c219d 1915 CASE_MATHFN (BUILT_IN_POWI)
746114e8 1916 CASE_MATHFN (BUILT_IN_POW10)
1917 CASE_MATHFN (BUILT_IN_REMAINDER)
1918 CASE_MATHFN (BUILT_IN_REMQUO)
1919 CASE_MATHFN (BUILT_IN_RINT)
07976da7 1920 CASE_MATHFN (BUILT_IN_ROUND)
746114e8 1921 CASE_MATHFN (BUILT_IN_SCALB)
1922 CASE_MATHFN (BUILT_IN_SCALBLN)
1923 CASE_MATHFN (BUILT_IN_SCALBN)
c319d56a 1924 CASE_MATHFN (BUILT_IN_SIGNBIT)
746114e8 1925 CASE_MATHFN (BUILT_IN_SIGNIFICAND)
07976da7 1926 CASE_MATHFN (BUILT_IN_SIN)
746114e8 1927 CASE_MATHFN (BUILT_IN_SINCOS)
1928 CASE_MATHFN (BUILT_IN_SINH)
07976da7 1929 CASE_MATHFN (BUILT_IN_SQRT)
1930 CASE_MATHFN (BUILT_IN_TAN)
746114e8 1931 CASE_MATHFN (BUILT_IN_TANH)
1932 CASE_MATHFN (BUILT_IN_TGAMMA)
07976da7 1933 CASE_MATHFN (BUILT_IN_TRUNC)
746114e8 1934 CASE_MATHFN (BUILT_IN_Y0)
1935 CASE_MATHFN (BUILT_IN_Y1)
1936 CASE_MATHFN (BUILT_IN_YN)
07976da7 1937
0a68165a 1938 default:
c2f47e15 1939 return NULL_TREE;
0a68165a 1940 }
07976da7 1941
96b9f485 1942 if (TYPE_MAIN_VARIANT (type) == double_type_node)
b9a16870 1943 fcode2 = fcode;
96b9f485 1944 else if (TYPE_MAIN_VARIANT (type) == float_type_node)
b9a16870 1945 fcode2 = fcodef;
96b9f485 1946 else if (TYPE_MAIN_VARIANT (type) == long_double_type_node)
b9a16870 1947 fcode2 = fcodel;
07976da7 1948 else
c2f47e15 1949 return NULL_TREE;
b9a16870 1950
1951 if (implicit_p && !builtin_decl_implicit_p (fcode2))
1952 return NULL_TREE;
1953
1954 return builtin_decl_explicit (fcode2);
0a68165a 1955}
1956
c319d56a 1957/* Like mathfn_built_in_1(), but always use the implicit array. */
1958
1959tree
1960mathfn_built_in (tree type, enum built_in_function fn)
1961{
1962 return mathfn_built_in_1 (type, fn, /*implicit=*/ 1);
1963}
1964
0fd605a5 1965/* If errno must be maintained, expand the RTL to check if the result,
1966 TARGET, of a built-in function call, EXP, is NaN, and if so set
1967 errno to EDOM. */
1968
1969static void
aecda0d6 1970expand_errno_check (tree exp, rtx target)
0fd605a5 1971{
1e0c0b35 1972 rtx_code_label *lab = gen_label_rtx ();
0fd605a5 1973
7f05340e 1974 /* Test the result; if it is NaN, set errno=EDOM because
1975 the argument was not in the domain. */
3fcf767f 1976 do_compare_rtx_and_jump (target, target, EQ, 0, GET_MODE (target),
f9a00e9e 1977 NULL_RTX, NULL, lab,
79ab74cc 1978 /* The jump is very likely. */
1979 REG_BR_PROB_BASE - (REG_BR_PROB_BASE / 2000 - 1));
0fd605a5 1980
1981#ifdef TARGET_EDOM
7f05340e 1982 /* If this built-in doesn't throw an exception, set errno directly. */
c2f47e15 1983 if (TREE_NOTHROW (TREE_OPERAND (CALL_EXPR_FN (exp), 0)))
7f05340e 1984 {
0fd605a5 1985#ifdef GEN_ERRNO_RTX
7f05340e 1986 rtx errno_rtx = GEN_ERRNO_RTX;
0fd605a5 1987#else
7f05340e 1988 rtx errno_rtx
0fd605a5 1989 = gen_rtx_MEM (word_mode, gen_rtx_SYMBOL_REF (Pmode, "errno"));
1990#endif
d11aedc7 1991 emit_move_insn (errno_rtx,
1992 gen_int_mode (TARGET_EDOM, GET_MODE (errno_rtx)));
0fd605a5 1993 emit_label (lab);
7f05340e 1994 return;
0fd605a5 1995 }
7f05340e 1996#endif
1997
08491912 1998 /* Make sure the library call isn't expanded as a tail call. */
1999 CALL_EXPR_TAILCALL (exp) = 0;
2000
7f05340e 2001 /* We can't set errno=EDOM directly; let the library call do it.
2002 Pop the arguments right away in case the call gets deleted. */
2003 NO_DEFER_POP;
2004 expand_call (exp, target, 0);
2005 OK_DEFER_POP;
2006 emit_label (lab);
0fd605a5 2007}
2008
6b43bae4 2009/* Expand a call to one of the builtin math functions (sqrt, exp, or log).
c2f47e15 2010 Return NULL_RTX if a normal call should be emitted rather than expanding
2011 the function in-line. EXP is the expression that is a call to the builtin
53800dbe 2012 function; if convenient, the result should be placed in TARGET.
2013 SUBTARGET may be used as the target for computing one of EXP's operands. */
27d0c333 2014
53800dbe 2015static rtx
aecda0d6 2016expand_builtin_mathfn (tree exp, rtx target, rtx subtarget)
53800dbe 2017{
bf8e3599 2018 optab builtin_optab;
1e0c0b35 2019 rtx op0;
2020 rtx_insn *insns;
c6e6ecb1 2021 tree fndecl = get_callee_fndecl (exp);
3754d046 2022 machine_mode mode;
528ee710 2023 bool errno_set = false;
d6a0a4b0 2024 bool try_widening = false;
abfea505 2025 tree arg;
53800dbe 2026
c2f47e15 2027 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2028 return NULL_RTX;
53800dbe 2029
c2f47e15 2030 arg = CALL_EXPR_ARG (exp, 0);
53800dbe 2031
2032 switch (DECL_FUNCTION_CODE (fndecl))
2033 {
4f35b1fc 2034 CASE_FLT_FN (BUILT_IN_SQRT):
7f05340e 2035 errno_set = ! tree_expr_nonnegative_p (arg);
d6a0a4b0 2036 try_widening = true;
7f05340e 2037 builtin_optab = sqrt_optab;
2038 break;
4f35b1fc 2039 CASE_FLT_FN (BUILT_IN_EXP):
528ee710 2040 errno_set = true; builtin_optab = exp_optab; break;
4f35b1fc 2041 CASE_FLT_FN (BUILT_IN_EXP10):
2042 CASE_FLT_FN (BUILT_IN_POW10):
750ef9f5 2043 errno_set = true; builtin_optab = exp10_optab; break;
4f35b1fc 2044 CASE_FLT_FN (BUILT_IN_EXP2):
750ef9f5 2045 errno_set = true; builtin_optab = exp2_optab; break;
4f35b1fc 2046 CASE_FLT_FN (BUILT_IN_EXPM1):
a6b4eed2 2047 errno_set = true; builtin_optab = expm1_optab; break;
4f35b1fc 2048 CASE_FLT_FN (BUILT_IN_LOGB):
4efbc641 2049 errno_set = true; builtin_optab = logb_optab; break;
4f35b1fc 2050 CASE_FLT_FN (BUILT_IN_LOG):
528ee710 2051 errno_set = true; builtin_optab = log_optab; break;
4f35b1fc 2052 CASE_FLT_FN (BUILT_IN_LOG10):
d3cd9bde 2053 errno_set = true; builtin_optab = log10_optab; break;
4f35b1fc 2054 CASE_FLT_FN (BUILT_IN_LOG2):
d3cd9bde 2055 errno_set = true; builtin_optab = log2_optab; break;
4f35b1fc 2056 CASE_FLT_FN (BUILT_IN_LOG1P):
f474cd93 2057 errno_set = true; builtin_optab = log1p_optab; break;
4f35b1fc 2058 CASE_FLT_FN (BUILT_IN_ASIN):
8de2f465 2059 builtin_optab = asin_optab; break;
4f35b1fc 2060 CASE_FLT_FN (BUILT_IN_ACOS):
8de2f465 2061 builtin_optab = acos_optab; break;
4f35b1fc 2062 CASE_FLT_FN (BUILT_IN_TAN):
528ee710 2063 builtin_optab = tan_optab; break;
4f35b1fc 2064 CASE_FLT_FN (BUILT_IN_ATAN):
528ee710 2065 builtin_optab = atan_optab; break;
4f35b1fc 2066 CASE_FLT_FN (BUILT_IN_FLOOR):
528ee710 2067 builtin_optab = floor_optab; break;
4f35b1fc 2068 CASE_FLT_FN (BUILT_IN_CEIL):
528ee710 2069 builtin_optab = ceil_optab; break;
4f35b1fc 2070 CASE_FLT_FN (BUILT_IN_TRUNC):
a7cc195f 2071 builtin_optab = btrunc_optab; break;
4f35b1fc 2072 CASE_FLT_FN (BUILT_IN_ROUND):
528ee710 2073 builtin_optab = round_optab; break;
4f35b1fc 2074 CASE_FLT_FN (BUILT_IN_NEARBYINT):
0ddf4ad9 2075 builtin_optab = nearbyint_optab;
2076 if (flag_trapping_math)
2077 break;
2078 /* Else fallthrough and expand as rint. */
4f35b1fc 2079 CASE_FLT_FN (BUILT_IN_RINT):
aef94a0f 2080 builtin_optab = rint_optab; break;
b3154a1f 2081 CASE_FLT_FN (BUILT_IN_SIGNIFICAND):
2082 builtin_optab = significand_optab; break;
42721db0 2083 default:
64db345d 2084 gcc_unreachable ();
53800dbe 2085 }
2086
7f05340e 2087 /* Make a suitable register to place result in. */
2088 mode = TYPE_MODE (TREE_TYPE (exp));
fc4eef90 2089
7f05340e 2090 if (! flag_errno_math || ! HONOR_NANS (mode))
2091 errno_set = false;
2092
d6a0a4b0 2093 /* Before working hard, check whether the instruction is available, but try
2094 to widen the mode for specific operations. */
2095 if ((optab_handler (builtin_optab, mode) != CODE_FOR_nothing
2096 || (try_widening && !excess_precision_type (TREE_TYPE (exp))))
f2aca212 2097 && (!errno_set || !optimize_insn_for_size_p ()))
68e6cb9d 2098 {
de2e453e 2099 rtx result = gen_reg_rtx (mode);
7f05340e 2100
bd421108 2101 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2102 need to expand the argument again. This way, we will not perform
2103 side-effects more the once. */
abfea505 2104 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
7f05340e 2105
1db6d067 2106 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
7f05340e 2107
bd421108 2108 start_sequence ();
53800dbe 2109
de2e453e 2110 /* Compute into RESULT.
2111 Set RESULT to wherever the result comes back. */
2112 result = expand_unop (mode, builtin_optab, op0, result, 0);
bd421108 2113
de2e453e 2114 if (result != 0)
bd421108 2115 {
2116 if (errno_set)
de2e453e 2117 expand_errno_check (exp, result);
bd421108 2118
2119 /* Output the entire sequence. */
2120 insns = get_insns ();
2121 end_sequence ();
2122 emit_insn (insns);
de2e453e 2123 return result;
bd421108 2124 }
2125
2126 /* If we were unable to expand via the builtin, stop the sequence
2127 (without outputting the insns) and call to the library function
2128 with the stabilized argument list. */
53800dbe 2129 end_sequence ();
53800dbe 2130 }
2131
1e5b92fa 2132 return expand_call (exp, target, target == const0_rtx);
0fd605a5 2133}
2134
2135/* Expand a call to the builtin binary math functions (pow and atan2).
c2f47e15 2136 Return NULL_RTX if a normal call should be emitted rather than expanding the
0fd605a5 2137 function in-line. EXP is the expression that is a call to the builtin
2138 function; if convenient, the result should be placed in TARGET.
2139 SUBTARGET may be used as the target for computing one of EXP's
2140 operands. */
2141
2142static rtx
aecda0d6 2143expand_builtin_mathfn_2 (tree exp, rtx target, rtx subtarget)
0fd605a5 2144{
2145 optab builtin_optab;
1e0c0b35 2146 rtx op0, op1, result;
2147 rtx_insn *insns;
4737caf2 2148 int op1_type = REAL_TYPE;
c6e6ecb1 2149 tree fndecl = get_callee_fndecl (exp);
abfea505 2150 tree arg0, arg1;
3754d046 2151 machine_mode mode;
0fd605a5 2152 bool errno_set = true;
0fd605a5 2153
73a954a1 2154 switch (DECL_FUNCTION_CODE (fndecl))
2155 {
2156 CASE_FLT_FN (BUILT_IN_SCALBN):
2157 CASE_FLT_FN (BUILT_IN_SCALBLN):
2158 CASE_FLT_FN (BUILT_IN_LDEXP):
2159 op1_type = INTEGER_TYPE;
2160 default:
2161 break;
2162 }
4737caf2 2163
c2f47e15 2164 if (!validate_arglist (exp, REAL_TYPE, op1_type, VOID_TYPE))
2165 return NULL_RTX;
0fd605a5 2166
c2f47e15 2167 arg0 = CALL_EXPR_ARG (exp, 0);
2168 arg1 = CALL_EXPR_ARG (exp, 1);
0fd605a5 2169
0fd605a5 2170 switch (DECL_FUNCTION_CODE (fndecl))
2171 {
4f35b1fc 2172 CASE_FLT_FN (BUILT_IN_POW):
0fd605a5 2173 builtin_optab = pow_optab; break;
4f35b1fc 2174 CASE_FLT_FN (BUILT_IN_ATAN2):
0fd605a5 2175 builtin_optab = atan2_optab; break;
73a954a1 2176 CASE_FLT_FN (BUILT_IN_SCALB):
2177 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (exp)))->b != 2)
2178 return 0;
2179 builtin_optab = scalb_optab; break;
2180 CASE_FLT_FN (BUILT_IN_SCALBN):
2181 CASE_FLT_FN (BUILT_IN_SCALBLN):
2182 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (exp)))->b != 2)
2183 return 0;
2184 /* Fall through... */
4f35b1fc 2185 CASE_FLT_FN (BUILT_IN_LDEXP):
4737caf2 2186 builtin_optab = ldexp_optab; break;
4f35b1fc 2187 CASE_FLT_FN (BUILT_IN_FMOD):
80ed5c06 2188 builtin_optab = fmod_optab; break;
ef722005 2189 CASE_FLT_FN (BUILT_IN_REMAINDER):
4f35b1fc 2190 CASE_FLT_FN (BUILT_IN_DREM):
ef722005 2191 builtin_optab = remainder_optab; break;
0fd605a5 2192 default:
64db345d 2193 gcc_unreachable ();
0fd605a5 2194 }
2195
7f05340e 2196 /* Make a suitable register to place result in. */
2197 mode = TYPE_MODE (TREE_TYPE (exp));
fc4eef90 2198
2199 /* Before working hard, check whether the instruction is available. */
d6bf3b14 2200 if (optab_handler (builtin_optab, mode) == CODE_FOR_nothing)
c2f47e15 2201 return NULL_RTX;
fc4eef90 2202
de2e453e 2203 result = gen_reg_rtx (mode);
7f05340e 2204
2205 if (! flag_errno_math || ! HONOR_NANS (mode))
2206 errno_set = false;
2207
f2aca212 2208 if (errno_set && optimize_insn_for_size_p ())
2209 return 0;
2210
4ee9c684 2211 /* Always stabilize the argument list. */
abfea505 2212 CALL_EXPR_ARG (exp, 0) = arg0 = builtin_save_expr (arg0);
2213 CALL_EXPR_ARG (exp, 1) = arg1 = builtin_save_expr (arg1);
7f05340e 2214
8ec3c5c2 2215 op0 = expand_expr (arg0, subtarget, VOIDmode, EXPAND_NORMAL);
2216 op1 = expand_normal (arg1);
7f05340e 2217
7f05340e 2218 start_sequence ();
2219
de2e453e 2220 /* Compute into RESULT.
2221 Set RESULT to wherever the result comes back. */
2222 result = expand_binop (mode, builtin_optab, op0, op1,
2223 result, 0, OPTAB_DIRECT);
53800dbe 2224
68e6cb9d 2225 /* If we were unable to expand via the builtin, stop the sequence
2226 (without outputting the insns) and call to the library function
2227 with the stabilized argument list. */
de2e453e 2228 if (result == 0)
0fd605a5 2229 {
2230 end_sequence ();
68e6cb9d 2231 return expand_call (exp, target, target == const0_rtx);
53800dbe 2232 }
2233
a4356fb9 2234 if (errno_set)
de2e453e 2235 expand_errno_check (exp, result);
0fd605a5 2236
53800dbe 2237 /* Output the entire sequence. */
2238 insns = get_insns ();
2239 end_sequence ();
31d3e01c 2240 emit_insn (insns);
bf8e3599 2241
de2e453e 2242 return result;
53800dbe 2243}
2244
7e0713b1 2245/* Expand a call to the builtin trinary math functions (fma).
2246 Return NULL_RTX if a normal call should be emitted rather than expanding the
2247 function in-line. EXP is the expression that is a call to the builtin
2248 function; if convenient, the result should be placed in TARGET.
2249 SUBTARGET may be used as the target for computing one of EXP's
2250 operands. */
2251
2252static rtx
2253expand_builtin_mathfn_ternary (tree exp, rtx target, rtx subtarget)
2254{
2255 optab builtin_optab;
1e0c0b35 2256 rtx op0, op1, op2, result;
2257 rtx_insn *insns;
7e0713b1 2258 tree fndecl = get_callee_fndecl (exp);
2259 tree arg0, arg1, arg2;
3754d046 2260 machine_mode mode;
7e0713b1 2261
2262 if (!validate_arglist (exp, REAL_TYPE, REAL_TYPE, REAL_TYPE, VOID_TYPE))
2263 return NULL_RTX;
2264
2265 arg0 = CALL_EXPR_ARG (exp, 0);
2266 arg1 = CALL_EXPR_ARG (exp, 1);
2267 arg2 = CALL_EXPR_ARG (exp, 2);
2268
2269 switch (DECL_FUNCTION_CODE (fndecl))
2270 {
2271 CASE_FLT_FN (BUILT_IN_FMA):
2272 builtin_optab = fma_optab; break;
2273 default:
2274 gcc_unreachable ();
2275 }
2276
2277 /* Make a suitable register to place result in. */
2278 mode = TYPE_MODE (TREE_TYPE (exp));
2279
2280 /* Before working hard, check whether the instruction is available. */
2281 if (optab_handler (builtin_optab, mode) == CODE_FOR_nothing)
2282 return NULL_RTX;
2283
de2e453e 2284 result = gen_reg_rtx (mode);
7e0713b1 2285
2286 /* Always stabilize the argument list. */
2287 CALL_EXPR_ARG (exp, 0) = arg0 = builtin_save_expr (arg0);
2288 CALL_EXPR_ARG (exp, 1) = arg1 = builtin_save_expr (arg1);
2289 CALL_EXPR_ARG (exp, 2) = arg2 = builtin_save_expr (arg2);
2290
2291 op0 = expand_expr (arg0, subtarget, VOIDmode, EXPAND_NORMAL);
2292 op1 = expand_normal (arg1);
2293 op2 = expand_normal (arg2);
2294
2295 start_sequence ();
2296
de2e453e 2297 /* Compute into RESULT.
2298 Set RESULT to wherever the result comes back. */
2299 result = expand_ternary_op (mode, builtin_optab, op0, op1, op2,
2300 result, 0);
7e0713b1 2301
2302 /* If we were unable to expand via the builtin, stop the sequence
2303 (without outputting the insns) and call to the library function
2304 with the stabilized argument list. */
de2e453e 2305 if (result == 0)
7e0713b1 2306 {
2307 end_sequence ();
2308 return expand_call (exp, target, target == const0_rtx);
2309 }
2310
2311 /* Output the entire sequence. */
2312 insns = get_insns ();
2313 end_sequence ();
2314 emit_insn (insns);
2315
de2e453e 2316 return result;
7e0713b1 2317}
2318
6b43bae4 2319/* Expand a call to the builtin sin and cos math functions.
c2f47e15 2320 Return NULL_RTX if a normal call should be emitted rather than expanding the
6b43bae4 2321 function in-line. EXP is the expression that is a call to the builtin
2322 function; if convenient, the result should be placed in TARGET.
2323 SUBTARGET may be used as the target for computing one of EXP's
2324 operands. */
2325
2326static rtx
2327expand_builtin_mathfn_3 (tree exp, rtx target, rtx subtarget)
2328{
2329 optab builtin_optab;
1e0c0b35 2330 rtx op0;
2331 rtx_insn *insns;
6b43bae4 2332 tree fndecl = get_callee_fndecl (exp);
3754d046 2333 machine_mode mode;
abfea505 2334 tree arg;
6b43bae4 2335
c2f47e15 2336 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2337 return NULL_RTX;
6b43bae4 2338
c2f47e15 2339 arg = CALL_EXPR_ARG (exp, 0);
6b43bae4 2340
2341 switch (DECL_FUNCTION_CODE (fndecl))
2342 {
4f35b1fc 2343 CASE_FLT_FN (BUILT_IN_SIN):
2344 CASE_FLT_FN (BUILT_IN_COS):
6b43bae4 2345 builtin_optab = sincos_optab; break;
2346 default:
64db345d 2347 gcc_unreachable ();
6b43bae4 2348 }
2349
2350 /* Make a suitable register to place result in. */
2351 mode = TYPE_MODE (TREE_TYPE (exp));
2352
6b43bae4 2353 /* Check if sincos insn is available, otherwise fallback
0bed3869 2354 to sin or cos insn. */
d6bf3b14 2355 if (optab_handler (builtin_optab, mode) == CODE_FOR_nothing)
6b43bae4 2356 switch (DECL_FUNCTION_CODE (fndecl))
2357 {
4f35b1fc 2358 CASE_FLT_FN (BUILT_IN_SIN):
6b43bae4 2359 builtin_optab = sin_optab; break;
4f35b1fc 2360 CASE_FLT_FN (BUILT_IN_COS):
6b43bae4 2361 builtin_optab = cos_optab; break;
2362 default:
64db345d 2363 gcc_unreachable ();
6b43bae4 2364 }
6b43bae4 2365
2366 /* Before working hard, check whether the instruction is available. */
d6bf3b14 2367 if (optab_handler (builtin_optab, mode) != CODE_FOR_nothing)
6b43bae4 2368 {
de2e453e 2369 rtx result = gen_reg_rtx (mode);
6b43bae4 2370
2371 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2372 need to expand the argument again. This way, we will not perform
2373 side-effects more the once. */
abfea505 2374 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
6b43bae4 2375
1db6d067 2376 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
6b43bae4 2377
6b43bae4 2378 start_sequence ();
2379
de2e453e 2380 /* Compute into RESULT.
2381 Set RESULT to wherever the result comes back. */
6b43bae4 2382 if (builtin_optab == sincos_optab)
2383 {
de2e453e 2384 int ok;
7d3f6cc7 2385
6b43bae4 2386 switch (DECL_FUNCTION_CODE (fndecl))
2387 {
4f35b1fc 2388 CASE_FLT_FN (BUILT_IN_SIN):
de2e453e 2389 ok = expand_twoval_unop (builtin_optab, op0, 0, result, 0);
6b43bae4 2390 break;
4f35b1fc 2391 CASE_FLT_FN (BUILT_IN_COS):
de2e453e 2392 ok = expand_twoval_unop (builtin_optab, op0, result, 0, 0);
6b43bae4 2393 break;
2394 default:
64db345d 2395 gcc_unreachable ();
6b43bae4 2396 }
de2e453e 2397 gcc_assert (ok);
6b43bae4 2398 }
2399 else
de2e453e 2400 result = expand_unop (mode, builtin_optab, op0, result, 0);
6b43bae4 2401
de2e453e 2402 if (result != 0)
6b43bae4 2403 {
6b43bae4 2404 /* Output the entire sequence. */
2405 insns = get_insns ();
2406 end_sequence ();
2407 emit_insn (insns);
de2e453e 2408 return result;
6b43bae4 2409 }
2410
2411 /* If we were unable to expand via the builtin, stop the sequence
2412 (without outputting the insns) and call to the library function
2413 with the stabilized argument list. */
2414 end_sequence ();
2415 }
2416
de2e453e 2417 return expand_call (exp, target, target == const0_rtx);
6b43bae4 2418}
2419
a65c4d64 2420/* Given an interclass math builtin decl FNDECL and it's argument ARG
2421 return an RTL instruction code that implements the functionality.
2422 If that isn't possible or available return CODE_FOR_nothing. */
a67a90e5 2423
a65c4d64 2424static enum insn_code
2425interclass_mathfn_icode (tree arg, tree fndecl)
a67a90e5 2426{
a65c4d64 2427 bool errno_set = false;
6cdd383a 2428 optab builtin_optab = unknown_optab;
3754d046 2429 machine_mode mode;
a67a90e5 2430
2431 switch (DECL_FUNCTION_CODE (fndecl))
2432 {
2433 CASE_FLT_FN (BUILT_IN_ILOGB):
2434 errno_set = true; builtin_optab = ilogb_optab; break;
69b779ea 2435 CASE_FLT_FN (BUILT_IN_ISINF):
2436 builtin_optab = isinf_optab; break;
8a1a9cb7 2437 case BUILT_IN_ISNORMAL:
cde061c1 2438 case BUILT_IN_ISFINITE:
2439 CASE_FLT_FN (BUILT_IN_FINITE):
a65c4d64 2440 case BUILT_IN_FINITED32:
2441 case BUILT_IN_FINITED64:
2442 case BUILT_IN_FINITED128:
2443 case BUILT_IN_ISINFD32:
2444 case BUILT_IN_ISINFD64:
2445 case BUILT_IN_ISINFD128:
cde061c1 2446 /* These builtins have no optabs (yet). */
2447 break;
a67a90e5 2448 default:
2449 gcc_unreachable ();
2450 }
2451
2452 /* There's no easy way to detect the case we need to set EDOM. */
2453 if (flag_errno_math && errno_set)
a65c4d64 2454 return CODE_FOR_nothing;
a67a90e5 2455
2456 /* Optab mode depends on the mode of the input argument. */
2457 mode = TYPE_MODE (TREE_TYPE (arg));
2458
cde061c1 2459 if (builtin_optab)
d6bf3b14 2460 return optab_handler (builtin_optab, mode);
a65c4d64 2461 return CODE_FOR_nothing;
2462}
2463
2464/* Expand a call to one of the builtin math functions that operate on
2465 floating point argument and output an integer result (ilogb, isinf,
2466 isnan, etc).
2467 Return 0 if a normal call should be emitted rather than expanding the
2468 function in-line. EXP is the expression that is a call to the builtin
f97eea22 2469 function; if convenient, the result should be placed in TARGET. */
a65c4d64 2470
2471static rtx
f97eea22 2472expand_builtin_interclass_mathfn (tree exp, rtx target)
a65c4d64 2473{
2474 enum insn_code icode = CODE_FOR_nothing;
2475 rtx op0;
2476 tree fndecl = get_callee_fndecl (exp);
3754d046 2477 machine_mode mode;
a65c4d64 2478 tree arg;
2479
2480 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2481 return NULL_RTX;
2482
2483 arg = CALL_EXPR_ARG (exp, 0);
2484 icode = interclass_mathfn_icode (arg, fndecl);
2485 mode = TYPE_MODE (TREE_TYPE (arg));
2486
a67a90e5 2487 if (icode != CODE_FOR_nothing)
2488 {
8786db1e 2489 struct expand_operand ops[1];
1e0c0b35 2490 rtx_insn *last = get_last_insn ();
4e2a2fb4 2491 tree orig_arg = arg;
a67a90e5 2492
2493 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2494 need to expand the argument again. This way, we will not perform
2495 side-effects more the once. */
abfea505 2496 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
a67a90e5 2497
f97eea22 2498 op0 = expand_expr (arg, NULL_RTX, VOIDmode, EXPAND_NORMAL);
a67a90e5 2499
2500 if (mode != GET_MODE (op0))
2501 op0 = convert_to_mode (mode, op0, 0);
2502
8786db1e 2503 create_output_operand (&ops[0], target, TYPE_MODE (TREE_TYPE (exp)));
2504 if (maybe_legitimize_operands (icode, 0, 1, ops)
2505 && maybe_emit_unop_insn (icode, ops[0].value, op0, UNKNOWN))
2506 return ops[0].value;
2507
4e2a2fb4 2508 delete_insns_since (last);
2509 CALL_EXPR_ARG (exp, 0) = orig_arg;
a67a90e5 2510 }
2511
a65c4d64 2512 return NULL_RTX;
a67a90e5 2513}
2514
c3147c1a 2515/* Expand a call to the builtin sincos math function.
c2f47e15 2516 Return NULL_RTX if a normal call should be emitted rather than expanding the
c3147c1a 2517 function in-line. EXP is the expression that is a call to the builtin
2518 function. */
2519
2520static rtx
2521expand_builtin_sincos (tree exp)
2522{
2523 rtx op0, op1, op2, target1, target2;
3754d046 2524 machine_mode mode;
c3147c1a 2525 tree arg, sinp, cosp;
2526 int result;
389dd41b 2527 location_t loc = EXPR_LOCATION (exp);
be5575b2 2528 tree alias_type, alias_off;
c3147c1a 2529
c2f47e15 2530 if (!validate_arglist (exp, REAL_TYPE,
2531 POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
2532 return NULL_RTX;
c3147c1a 2533
c2f47e15 2534 arg = CALL_EXPR_ARG (exp, 0);
2535 sinp = CALL_EXPR_ARG (exp, 1);
2536 cosp = CALL_EXPR_ARG (exp, 2);
c3147c1a 2537
2538 /* Make a suitable register to place result in. */
2539 mode = TYPE_MODE (TREE_TYPE (arg));
2540
2541 /* Check if sincos insn is available, otherwise emit the call. */
d6bf3b14 2542 if (optab_handler (sincos_optab, mode) == CODE_FOR_nothing)
c3147c1a 2543 return NULL_RTX;
2544
2545 target1 = gen_reg_rtx (mode);
2546 target2 = gen_reg_rtx (mode);
2547
8ec3c5c2 2548 op0 = expand_normal (arg);
be5575b2 2549 alias_type = build_pointer_type_for_mode (TREE_TYPE (arg), ptr_mode, true);
2550 alias_off = build_int_cst (alias_type, 0);
2551 op1 = expand_normal (fold_build2_loc (loc, MEM_REF, TREE_TYPE (arg),
2552 sinp, alias_off));
2553 op2 = expand_normal (fold_build2_loc (loc, MEM_REF, TREE_TYPE (arg),
2554 cosp, alias_off));
c3147c1a 2555
2556 /* Compute into target1 and target2.
2557 Set TARGET to wherever the result comes back. */
2558 result = expand_twoval_unop (sincos_optab, op0, target2, target1, 0);
2559 gcc_assert (result);
2560
2561 /* Move target1 and target2 to the memory locations indicated
2562 by op1 and op2. */
2563 emit_move_insn (op1, target1);
2564 emit_move_insn (op2, target2);
2565
2566 return const0_rtx;
2567}
2568
d735c391 2569/* Expand a call to the internal cexpi builtin to the sincos math function.
2570 EXP is the expression that is a call to the builtin function; if convenient,
f97eea22 2571 the result should be placed in TARGET. */
d735c391 2572
2573static rtx
f97eea22 2574expand_builtin_cexpi (tree exp, rtx target)
d735c391 2575{
2576 tree fndecl = get_callee_fndecl (exp);
d735c391 2577 tree arg, type;
3754d046 2578 machine_mode mode;
d735c391 2579 rtx op0, op1, op2;
389dd41b 2580 location_t loc = EXPR_LOCATION (exp);
d735c391 2581
c2f47e15 2582 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2583 return NULL_RTX;
d735c391 2584
c2f47e15 2585 arg = CALL_EXPR_ARG (exp, 0);
d735c391 2586 type = TREE_TYPE (arg);
2587 mode = TYPE_MODE (TREE_TYPE (arg));
2588
2589 /* Try expanding via a sincos optab, fall back to emitting a libcall
18b8d8ae 2590 to sincos or cexp. We are sure we have sincos or cexp because cexpi
2591 is only generated from sincos, cexp or if we have either of them. */
d6bf3b14 2592 if (optab_handler (sincos_optab, mode) != CODE_FOR_nothing)
d735c391 2593 {
2594 op1 = gen_reg_rtx (mode);
2595 op2 = gen_reg_rtx (mode);
2596
f97eea22 2597 op0 = expand_expr (arg, NULL_RTX, VOIDmode, EXPAND_NORMAL);
d735c391 2598
2599 /* Compute into op1 and op2. */
2600 expand_twoval_unop (sincos_optab, op0, op2, op1, 0);
2601 }
30f690e0 2602 else if (targetm.libc_has_function (function_sincos))
d735c391 2603 {
c2f47e15 2604 tree call, fn = NULL_TREE;
d735c391 2605 tree top1, top2;
2606 rtx op1a, op2a;
2607
2608 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
b9a16870 2609 fn = builtin_decl_explicit (BUILT_IN_SINCOSF);
d735c391 2610 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
b9a16870 2611 fn = builtin_decl_explicit (BUILT_IN_SINCOS);
d735c391 2612 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
b9a16870 2613 fn = builtin_decl_explicit (BUILT_IN_SINCOSL);
c2f47e15 2614 else
2615 gcc_unreachable ();
48e1416a 2616
0ab48139 2617 op1 = assign_temp (TREE_TYPE (arg), 1, 1);
2618 op2 = assign_temp (TREE_TYPE (arg), 1, 1);
99182918 2619 op1a = copy_addr_to_reg (XEXP (op1, 0));
2620 op2a = copy_addr_to_reg (XEXP (op2, 0));
d735c391 2621 top1 = make_tree (build_pointer_type (TREE_TYPE (arg)), op1a);
2622 top2 = make_tree (build_pointer_type (TREE_TYPE (arg)), op2a);
2623
d735c391 2624 /* Make sure not to fold the sincos call again. */
2625 call = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
c2f47e15 2626 expand_normal (build_call_nary (TREE_TYPE (TREE_TYPE (fn)),
2627 call, 3, arg, top1, top2));
d735c391 2628 }
18b8d8ae 2629 else
2630 {
0ecbc158 2631 tree call, fn = NULL_TREE, narg;
18b8d8ae 2632 tree ctype = build_complex_type (type);
2633
0ecbc158 2634 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
b9a16870 2635 fn = builtin_decl_explicit (BUILT_IN_CEXPF);
0ecbc158 2636 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
b9a16870 2637 fn = builtin_decl_explicit (BUILT_IN_CEXP);
0ecbc158 2638 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
b9a16870 2639 fn = builtin_decl_explicit (BUILT_IN_CEXPL);
c2f47e15 2640 else
2641 gcc_unreachable ();
fc0dfa6e 2642
2643 /* If we don't have a decl for cexp create one. This is the
2644 friendliest fallback if the user calls __builtin_cexpi
2645 without full target C99 function support. */
2646 if (fn == NULL_TREE)
2647 {
2648 tree fntype;
2649 const char *name = NULL;
2650
2651 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2652 name = "cexpf";
2653 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2654 name = "cexp";
2655 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2656 name = "cexpl";
2657
2658 fntype = build_function_type_list (ctype, ctype, NULL_TREE);
2659 fn = build_fn_decl (name, fntype);
2660 }
2661
389dd41b 2662 narg = fold_build2_loc (loc, COMPLEX_EXPR, ctype,
18b8d8ae 2663 build_real (type, dconst0), arg);
2664
2665 /* Make sure not to fold the cexp call again. */
2666 call = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
48e1416a 2667 return expand_expr (build_call_nary (ctype, call, 1, narg),
1db6d067 2668 target, VOIDmode, EXPAND_NORMAL);
18b8d8ae 2669 }
d735c391 2670
2671 /* Now build the proper return type. */
2672 return expand_expr (build2 (COMPLEX_EXPR, build_complex_type (type),
2673 make_tree (TREE_TYPE (arg), op2),
2674 make_tree (TREE_TYPE (arg), op1)),
1db6d067 2675 target, VOIDmode, EXPAND_NORMAL);
d735c391 2676}
2677
a65c4d64 2678/* Conveniently construct a function call expression. FNDECL names the
2679 function to be called, N is the number of arguments, and the "..."
2680 parameters are the argument expressions. Unlike build_call_exr
2681 this doesn't fold the call, hence it will always return a CALL_EXPR. */
2682
2683static tree
2684build_call_nofold_loc (location_t loc, tree fndecl, int n, ...)
2685{
2686 va_list ap;
2687 tree fntype = TREE_TYPE (fndecl);
2688 tree fn = build1 (ADDR_EXPR, build_pointer_type (fntype), fndecl);
2689
2690 va_start (ap, n);
2691 fn = build_call_valist (TREE_TYPE (fntype), fn, n, ap);
2692 va_end (ap);
2693 SET_EXPR_LOCATION (fn, loc);
2694 return fn;
2695}
a65c4d64 2696
7d3afc77 2697/* Expand a call to one of the builtin rounding functions gcc defines
2698 as an extension (lfloor and lceil). As these are gcc extensions we
2699 do not need to worry about setting errno to EDOM.
ad52b9b7 2700 If expanding via optab fails, lower expression to (int)(floor(x)).
2701 EXP is the expression that is a call to the builtin function;
ff1b14e4 2702 if convenient, the result should be placed in TARGET. */
ad52b9b7 2703
2704static rtx
ff1b14e4 2705expand_builtin_int_roundingfn (tree exp, rtx target)
ad52b9b7 2706{
9c42dd28 2707 convert_optab builtin_optab;
1e0c0b35 2708 rtx op0, tmp;
2709 rtx_insn *insns;
ad52b9b7 2710 tree fndecl = get_callee_fndecl (exp);
ad52b9b7 2711 enum built_in_function fallback_fn;
2712 tree fallback_fndecl;
3754d046 2713 machine_mode mode;
4de0924f 2714 tree arg;
ad52b9b7 2715
c2f47e15 2716 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
ad52b9b7 2717 gcc_unreachable ();
2718
c2f47e15 2719 arg = CALL_EXPR_ARG (exp, 0);
ad52b9b7 2720
2721 switch (DECL_FUNCTION_CODE (fndecl))
2722 {
80ff6494 2723 CASE_FLT_FN (BUILT_IN_ICEIL):
4f35b1fc 2724 CASE_FLT_FN (BUILT_IN_LCEIL):
2725 CASE_FLT_FN (BUILT_IN_LLCEIL):
ac148751 2726 builtin_optab = lceil_optab;
2727 fallback_fn = BUILT_IN_CEIL;
2728 break;
2729
80ff6494 2730 CASE_FLT_FN (BUILT_IN_IFLOOR):
4f35b1fc 2731 CASE_FLT_FN (BUILT_IN_LFLOOR):
2732 CASE_FLT_FN (BUILT_IN_LLFLOOR):
ad52b9b7 2733 builtin_optab = lfloor_optab;
2734 fallback_fn = BUILT_IN_FLOOR;
2735 break;
2736
2737 default:
2738 gcc_unreachable ();
2739 }
2740
2741 /* Make a suitable register to place result in. */
2742 mode = TYPE_MODE (TREE_TYPE (exp));
2743
9c42dd28 2744 target = gen_reg_rtx (mode);
ad52b9b7 2745
9c42dd28 2746 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2747 need to expand the argument again. This way, we will not perform
2748 side-effects more the once. */
abfea505 2749 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
ad52b9b7 2750
ff1b14e4 2751 op0 = expand_expr (arg, NULL, VOIDmode, EXPAND_NORMAL);
ad52b9b7 2752
9c42dd28 2753 start_sequence ();
ad52b9b7 2754
9c42dd28 2755 /* Compute into TARGET. */
2756 if (expand_sfix_optab (target, op0, builtin_optab))
2757 {
2758 /* Output the entire sequence. */
2759 insns = get_insns ();
ad52b9b7 2760 end_sequence ();
9c42dd28 2761 emit_insn (insns);
2762 return target;
ad52b9b7 2763 }
2764
9c42dd28 2765 /* If we were unable to expand via the builtin, stop the sequence
2766 (without outputting the insns). */
2767 end_sequence ();
2768
ad52b9b7 2769 /* Fall back to floating point rounding optab. */
2770 fallback_fndecl = mathfn_built_in (TREE_TYPE (arg), fallback_fn);
fc0dfa6e 2771
2772 /* For non-C99 targets we may end up without a fallback fndecl here
2773 if the user called __builtin_lfloor directly. In this case emit
2774 a call to the floor/ceil variants nevertheless. This should result
2775 in the best user experience for not full C99 targets. */
2776 if (fallback_fndecl == NULL_TREE)
2777 {
2778 tree fntype;
2779 const char *name = NULL;
2780
2781 switch (DECL_FUNCTION_CODE (fndecl))
2782 {
80ff6494 2783 case BUILT_IN_ICEIL:
fc0dfa6e 2784 case BUILT_IN_LCEIL:
2785 case BUILT_IN_LLCEIL:
2786 name = "ceil";
2787 break;
80ff6494 2788 case BUILT_IN_ICEILF:
fc0dfa6e 2789 case BUILT_IN_LCEILF:
2790 case BUILT_IN_LLCEILF:
2791 name = "ceilf";
2792 break;
80ff6494 2793 case BUILT_IN_ICEILL:
fc0dfa6e 2794 case BUILT_IN_LCEILL:
2795 case BUILT_IN_LLCEILL:
2796 name = "ceill";
2797 break;
80ff6494 2798 case BUILT_IN_IFLOOR:
fc0dfa6e 2799 case BUILT_IN_LFLOOR:
2800 case BUILT_IN_LLFLOOR:
2801 name = "floor";
2802 break;
80ff6494 2803 case BUILT_IN_IFLOORF:
fc0dfa6e 2804 case BUILT_IN_LFLOORF:
2805 case BUILT_IN_LLFLOORF:
2806 name = "floorf";
2807 break;
80ff6494 2808 case BUILT_IN_IFLOORL:
fc0dfa6e 2809 case BUILT_IN_LFLOORL:
2810 case BUILT_IN_LLFLOORL:
2811 name = "floorl";
2812 break;
2813 default:
2814 gcc_unreachable ();
2815 }
2816
2817 fntype = build_function_type_list (TREE_TYPE (arg),
2818 TREE_TYPE (arg), NULL_TREE);
2819 fallback_fndecl = build_fn_decl (name, fntype);
2820 }
2821
0568e9c1 2822 exp = build_call_nofold_loc (EXPR_LOCATION (exp), fallback_fndecl, 1, arg);
ad52b9b7 2823
d4c690af 2824 tmp = expand_normal (exp);
933eb13a 2825 tmp = maybe_emit_group_store (tmp, TREE_TYPE (exp));
ad52b9b7 2826
2827 /* Truncate the result of floating point optab to integer
2828 via expand_fix (). */
2829 target = gen_reg_rtx (mode);
2830 expand_fix (target, tmp, 0);
2831
2832 return target;
2833}
2834
7d3afc77 2835/* Expand a call to one of the builtin math functions doing integer
2836 conversion (lrint).
2837 Return 0 if a normal call should be emitted rather than expanding the
2838 function in-line. EXP is the expression that is a call to the builtin
ff1b14e4 2839 function; if convenient, the result should be placed in TARGET. */
7d3afc77 2840
2841static rtx
ff1b14e4 2842expand_builtin_int_roundingfn_2 (tree exp, rtx target)
7d3afc77 2843{
5f51ee59 2844 convert_optab builtin_optab;
1e0c0b35 2845 rtx op0;
2846 rtx_insn *insns;
7d3afc77 2847 tree fndecl = get_callee_fndecl (exp);
4de0924f 2848 tree arg;
3754d046 2849 machine_mode mode;
e951f9a4 2850 enum built_in_function fallback_fn = BUILT_IN_NONE;
7d3afc77 2851
c2f47e15 2852 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2853 gcc_unreachable ();
48e1416a 2854
c2f47e15 2855 arg = CALL_EXPR_ARG (exp, 0);
7d3afc77 2856
2857 switch (DECL_FUNCTION_CODE (fndecl))
2858 {
80ff6494 2859 CASE_FLT_FN (BUILT_IN_IRINT):
e951f9a4 2860 fallback_fn = BUILT_IN_LRINT;
2861 /* FALLTHRU */
7d3afc77 2862 CASE_FLT_FN (BUILT_IN_LRINT):
2863 CASE_FLT_FN (BUILT_IN_LLRINT):
e951f9a4 2864 builtin_optab = lrint_optab;
2865 break;
80ff6494 2866
2867 CASE_FLT_FN (BUILT_IN_IROUND):
e951f9a4 2868 fallback_fn = BUILT_IN_LROUND;
2869 /* FALLTHRU */
ef2f1a10 2870 CASE_FLT_FN (BUILT_IN_LROUND):
2871 CASE_FLT_FN (BUILT_IN_LLROUND):
e951f9a4 2872 builtin_optab = lround_optab;
2873 break;
80ff6494 2874
7d3afc77 2875 default:
2876 gcc_unreachable ();
2877 }
2878
e951f9a4 2879 /* There's no easy way to detect the case we need to set EDOM. */
2880 if (flag_errno_math && fallback_fn == BUILT_IN_NONE)
2881 return NULL_RTX;
2882
7d3afc77 2883 /* Make a suitable register to place result in. */
2884 mode = TYPE_MODE (TREE_TYPE (exp));
2885
e951f9a4 2886 /* There's no easy way to detect the case we need to set EDOM. */
2887 if (!flag_errno_math)
2888 {
de2e453e 2889 rtx result = gen_reg_rtx (mode);
7d3afc77 2890
e951f9a4 2891 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2892 need to expand the argument again. This way, we will not perform
2893 side-effects more the once. */
2894 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
7d3afc77 2895
e951f9a4 2896 op0 = expand_expr (arg, NULL, VOIDmode, EXPAND_NORMAL);
7d3afc77 2897
e951f9a4 2898 start_sequence ();
7d3afc77 2899
de2e453e 2900 if (expand_sfix_optab (result, op0, builtin_optab))
e951f9a4 2901 {
2902 /* Output the entire sequence. */
2903 insns = get_insns ();
2904 end_sequence ();
2905 emit_insn (insns);
de2e453e 2906 return result;
e951f9a4 2907 }
2908
2909 /* If we were unable to expand via the builtin, stop the sequence
2910 (without outputting the insns) and call to the library function
2911 with the stabilized argument list. */
7d3afc77 2912 end_sequence ();
2913 }
2914
e951f9a4 2915 if (fallback_fn != BUILT_IN_NONE)
2916 {
2917 /* Fall back to rounding to long int. Use implicit_p 0 - for non-C99
2918 targets, (int) round (x) should never be transformed into
2919 BUILT_IN_IROUND and if __builtin_iround is called directly, emit
2920 a call to lround in the hope that the target provides at least some
2921 C99 functions. This should result in the best user experience for
2922 not full C99 targets. */
2923 tree fallback_fndecl = mathfn_built_in_1 (TREE_TYPE (arg),
2924 fallback_fn, 0);
2925
2926 exp = build_call_nofold_loc (EXPR_LOCATION (exp),
2927 fallback_fndecl, 1, arg);
2928
2929 target = expand_call (exp, NULL_RTX, target == const0_rtx);
933eb13a 2930 target = maybe_emit_group_store (target, TREE_TYPE (exp));
e951f9a4 2931 return convert_to_mode (mode, target, 0);
2932 }
5f51ee59 2933
de2e453e 2934 return expand_call (exp, target, target == const0_rtx);
7d3afc77 2935}
2936
c2f47e15 2937/* Expand a call to the powi built-in mathematical function. Return NULL_RTX if
757c219d 2938 a normal call should be emitted rather than expanding the function
2939 in-line. EXP is the expression that is a call to the builtin
2940 function; if convenient, the result should be placed in TARGET. */
2941
2942static rtx
f97eea22 2943expand_builtin_powi (tree exp, rtx target)
757c219d 2944{
757c219d 2945 tree arg0, arg1;
2946 rtx op0, op1;
3754d046 2947 machine_mode mode;
2948 machine_mode mode2;
757c219d 2949
c2f47e15 2950 if (! validate_arglist (exp, REAL_TYPE, INTEGER_TYPE, VOID_TYPE))
2951 return NULL_RTX;
757c219d 2952
c2f47e15 2953 arg0 = CALL_EXPR_ARG (exp, 0);
2954 arg1 = CALL_EXPR_ARG (exp, 1);
757c219d 2955 mode = TYPE_MODE (TREE_TYPE (exp));
2956
757c219d 2957 /* Emit a libcall to libgcc. */
2958
c2f47e15 2959 /* Mode of the 2nd argument must match that of an int. */
d0405f40 2960 mode2 = mode_for_size (INT_TYPE_SIZE, MODE_INT, 0);
2961
757c219d 2962 if (target == NULL_RTX)
2963 target = gen_reg_rtx (mode);
2964
f97eea22 2965 op0 = expand_expr (arg0, NULL_RTX, mode, EXPAND_NORMAL);
757c219d 2966 if (GET_MODE (op0) != mode)
2967 op0 = convert_to_mode (mode, op0, 0);
1db6d067 2968 op1 = expand_expr (arg1, NULL_RTX, mode2, EXPAND_NORMAL);
d0405f40 2969 if (GET_MODE (op1) != mode2)
2970 op1 = convert_to_mode (mode2, op1, 0);
757c219d 2971
f36b9f69 2972 target = emit_library_call_value (optab_libfunc (powi_optab, mode),
2dd6f9ed 2973 target, LCT_CONST, mode, 2,
d0405f40 2974 op0, mode, op1, mode2);
757c219d 2975
2976 return target;
2977}
2978
48e1416a 2979/* Expand expression EXP which is a call to the strlen builtin. Return
c2f47e15 2980 NULL_RTX if we failed the caller should emit a normal call, otherwise
aed0bd19 2981 try to get the result in TARGET, if convenient. */
f7c44134 2982
53800dbe 2983static rtx
c2f47e15 2984expand_builtin_strlen (tree exp, rtx target,
3754d046 2985 machine_mode target_mode)
53800dbe 2986{
c2f47e15 2987 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
2988 return NULL_RTX;
53800dbe 2989 else
2990 {
8786db1e 2991 struct expand_operand ops[4];
911c0150 2992 rtx pat;
c2f47e15 2993 tree len;
2994 tree src = CALL_EXPR_ARG (exp, 0);
1e0c0b35 2995 rtx src_reg;
2996 rtx_insn *before_strlen;
3754d046 2997 machine_mode insn_mode = target_mode;
ef2c4a29 2998 enum insn_code icode = CODE_FOR_nothing;
153c3b50 2999 unsigned int align;
6248e345 3000
3001 /* If the length can be computed at compile-time, return it. */
681fab1e 3002 len = c_strlen (src, 0);
6248e345 3003 if (len)
80cd7a5e 3004 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
6248e345 3005
681fab1e 3006 /* If the length can be computed at compile-time and is constant
3007 integer, but there are side-effects in src, evaluate
3008 src for side-effects, then return len.
3009 E.g. x = strlen (i++ ? "xfoo" + 1 : "bar");
3010 can be optimized into: i++; x = 3; */
3011 len = c_strlen (src, 1);
3012 if (len && TREE_CODE (len) == INTEGER_CST)
3013 {
3014 expand_expr (src, const0_rtx, VOIDmode, EXPAND_NORMAL);
3015 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
3016 }
3017
957d0361 3018 align = get_pointer_alignment (src) / BITS_PER_UNIT;
53800dbe 3019
53800dbe 3020 /* If SRC is not a pointer type, don't do this operation inline. */
3021 if (align == 0)
c2f47e15 3022 return NULL_RTX;
53800dbe 3023
911c0150 3024 /* Bail out if we can't compute strlen in the right mode. */
53800dbe 3025 while (insn_mode != VOIDmode)
3026 {
d6bf3b14 3027 icode = optab_handler (strlen_optab, insn_mode);
53800dbe 3028 if (icode != CODE_FOR_nothing)
c28ae87f 3029 break;
53800dbe 3030
3031 insn_mode = GET_MODE_WIDER_MODE (insn_mode);
3032 }
3033 if (insn_mode == VOIDmode)
c2f47e15 3034 return NULL_RTX;
53800dbe 3035
911c0150 3036 /* Make a place to hold the source address. We will not expand
3037 the actual source until we are sure that the expansion will
3038 not fail -- there are trees that cannot be expanded twice. */
3039 src_reg = gen_reg_rtx (Pmode);
53800dbe 3040
911c0150 3041 /* Mark the beginning of the strlen sequence so we can emit the
3042 source operand later. */
f0ce3b1f 3043 before_strlen = get_last_insn ();
53800dbe 3044
8786db1e 3045 create_output_operand (&ops[0], target, insn_mode);
3046 create_fixed_operand (&ops[1], gen_rtx_MEM (BLKmode, src_reg));
3047 create_integer_operand (&ops[2], 0);
3048 create_integer_operand (&ops[3], align);
3049 if (!maybe_expand_insn (icode, 4, ops))
c2f47e15 3050 return NULL_RTX;
911c0150 3051
3052 /* Now that we are assured of success, expand the source. */
3053 start_sequence ();
499eee58 3054 pat = expand_expr (src, src_reg, Pmode, EXPAND_NORMAL);
911c0150 3055 if (pat != src_reg)
499eee58 3056 {
3057#ifdef POINTERS_EXTEND_UNSIGNED
3058 if (GET_MODE (pat) != Pmode)
3059 pat = convert_to_mode (Pmode, pat,
3060 POINTERS_EXTEND_UNSIGNED);
3061#endif
3062 emit_move_insn (src_reg, pat);
3063 }
31d3e01c 3064 pat = get_insns ();
911c0150 3065 end_sequence ();
bceb0d1f 3066
3067 if (before_strlen)
3068 emit_insn_after (pat, before_strlen);
3069 else
3070 emit_insn_before (pat, get_insns ());
53800dbe 3071
3072 /* Return the value in the proper mode for this function. */
8786db1e 3073 if (GET_MODE (ops[0].value) == target_mode)
3074 target = ops[0].value;
53800dbe 3075 else if (target != 0)
8786db1e 3076 convert_move (target, ops[0].value, 0);
53800dbe 3077 else
8786db1e 3078 target = convert_to_mode (target_mode, ops[0].value, 0);
911c0150 3079
3080 return target;
53800dbe 3081 }
3082}
3083
6840589f 3084/* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3085 bytes from constant string DATA + OFFSET and return it as target
3086 constant. */
3087
3088static rtx
aecda0d6 3089builtin_memcpy_read_str (void *data, HOST_WIDE_INT offset,
3754d046 3090 machine_mode mode)
6840589f 3091{
3092 const char *str = (const char *) data;
3093
64db345d 3094 gcc_assert (offset >= 0
3095 && ((unsigned HOST_WIDE_INT) offset + GET_MODE_SIZE (mode)
3096 <= strlen (str) + 1));
6840589f 3097
3098 return c_readstr (str + offset, mode);
3099}
3100
36d63243 3101/* LEN specify length of the block of memcpy/memset operation.
9db0f34d 3102 Figure out its range and put it into MIN_SIZE/MAX_SIZE.
3103 In some cases we can make very likely guess on max size, then we
3104 set it into PROBABLE_MAX_SIZE. */
36d63243 3105
3106static void
3107determine_block_size (tree len, rtx len_rtx,
3108 unsigned HOST_WIDE_INT *min_size,
9db0f34d 3109 unsigned HOST_WIDE_INT *max_size,
3110 unsigned HOST_WIDE_INT *probable_max_size)
36d63243 3111{
3112 if (CONST_INT_P (len_rtx))
3113 {
4e140a5c 3114 *min_size = *max_size = *probable_max_size = UINTVAL (len_rtx);
36d63243 3115 return;
3116 }
3117 else
3118 {
9c1be15e 3119 wide_int min, max;
9db0f34d 3120 enum value_range_type range_type = VR_UNDEFINED;
3121
3122 /* Determine bounds from the type. */
3123 if (tree_fits_uhwi_p (TYPE_MIN_VALUE (TREE_TYPE (len))))
3124 *min_size = tree_to_uhwi (TYPE_MIN_VALUE (TREE_TYPE (len)));
3125 else
3126 *min_size = 0;
3127 if (tree_fits_uhwi_p (TYPE_MAX_VALUE (TREE_TYPE (len))))
4e140a5c 3128 *probable_max_size = *max_size
3129 = tree_to_uhwi (TYPE_MAX_VALUE (TREE_TYPE (len)));
9db0f34d 3130 else
3131 *probable_max_size = *max_size = GET_MODE_MASK (GET_MODE (len_rtx));
3132
3133 if (TREE_CODE (len) == SSA_NAME)
3134 range_type = get_range_info (len, &min, &max);
3135 if (range_type == VR_RANGE)
36d63243 3136 {
fe5ad926 3137 if (wi::fits_uhwi_p (min) && *min_size < min.to_uhwi ())
36d63243 3138 *min_size = min.to_uhwi ();
fe5ad926 3139 if (wi::fits_uhwi_p (max) && *max_size > max.to_uhwi ())
9db0f34d 3140 *probable_max_size = *max_size = max.to_uhwi ();
36d63243 3141 }
9db0f34d 3142 else if (range_type == VR_ANTI_RANGE)
36d63243 3143 {
4a474a5a 3144 /* Anti range 0...N lets us to determine minimal size to N+1. */
fe5ad926 3145 if (min == 0)
9db0f34d 3146 {
9c1be15e 3147 if (wi::fits_uhwi_p (max) && max.to_uhwi () + 1 != 0)
3148 *min_size = max.to_uhwi () + 1;
9db0f34d 3149 }
3150 /* Code like
3151
3152 int n;
3153 if (n < 100)
4a474a5a 3154 memcpy (a, b, n)
9db0f34d 3155
3156 Produce anti range allowing negative values of N. We still
3157 can use the information and make a guess that N is not negative.
3158 */
fe5ad926 3159 else if (!wi::leu_p (max, 1 << 30) && wi::fits_uhwi_p (min))
3160 *probable_max_size = min.to_uhwi () - 1;
36d63243 3161 }
3162 }
3163 gcc_checking_assert (*max_size <=
3164 (unsigned HOST_WIDE_INT)
3165 GET_MODE_MASK (GET_MODE (len_rtx)));
3166}
3167
f21337ef 3168/* Helper function to do the actual work for expand_builtin_memcpy. */
3169
3170static rtx
3171expand_builtin_memcpy_args (tree dest, tree src, tree len, rtx target, tree exp)
3172{
3173 const char *src_str;
3174 unsigned int src_align = get_pointer_alignment (src);
3175 unsigned int dest_align = get_pointer_alignment (dest);
3176 rtx dest_mem, src_mem, dest_addr, len_rtx;
3177 HOST_WIDE_INT expected_size = -1;
3178 unsigned int expected_align = 0;
3179 unsigned HOST_WIDE_INT min_size;
3180 unsigned HOST_WIDE_INT max_size;
3181 unsigned HOST_WIDE_INT probable_max_size;
3182
3183 /* If DEST is not a pointer type, call the normal function. */
3184 if (dest_align == 0)
3185 return NULL_RTX;
3186
3187 /* If either SRC is not a pointer type, don't do this
3188 operation in-line. */
3189 if (src_align == 0)
3190 return NULL_RTX;
3191
3192 if (currently_expanding_gimple_stmt)
3193 stringop_block_profile (currently_expanding_gimple_stmt,
3194 &expected_align, &expected_size);
3195
3196 if (expected_align < dest_align)
3197 expected_align = dest_align;
3198 dest_mem = get_memory_rtx (dest, len);
3199 set_mem_align (dest_mem, dest_align);
3200 len_rtx = expand_normal (len);
3201 determine_block_size (len, len_rtx, &min_size, &max_size,
3202 &probable_max_size);
3203 src_str = c_getstr (src);
3204
3205 /* If SRC is a string constant and block move would be done
3206 by pieces, we can avoid loading the string from memory
3207 and only stored the computed constants. */
3208 if (src_str
3209 && CONST_INT_P (len_rtx)
3210 && (unsigned HOST_WIDE_INT) INTVAL (len_rtx) <= strlen (src_str) + 1
3211 && can_store_by_pieces (INTVAL (len_rtx), builtin_memcpy_read_str,
3212 CONST_CAST (char *, src_str),
3213 dest_align, false))
3214 {
3215 dest_mem = store_by_pieces (dest_mem, INTVAL (len_rtx),
3216 builtin_memcpy_read_str,
3217 CONST_CAST (char *, src_str),
3218 dest_align, false, 0);
3219 dest_mem = force_operand (XEXP (dest_mem, 0), target);
3220 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3221 return dest_mem;
3222 }
3223
3224 src_mem = get_memory_rtx (src, len);
3225 set_mem_align (src_mem, src_align);
3226
3227 /* Copy word part most expediently. */
3228 dest_addr = emit_block_move_hints (dest_mem, src_mem, len_rtx,
3229 CALL_EXPR_TAILCALL (exp)
3230 ? BLOCK_OP_TAILCALL : BLOCK_OP_NORMAL,
3231 expected_align, expected_size,
3232 min_size, max_size, probable_max_size);
3233
3234 if (dest_addr == 0)
3235 {
3236 dest_addr = force_operand (XEXP (dest_mem, 0), target);
3237 dest_addr = convert_memory_address (ptr_mode, dest_addr);
3238 }
3239
3240 return dest_addr;
3241}
3242
c2f47e15 3243/* Expand a call EXP to the memcpy builtin.
3244 Return NULL_RTX if we failed, the caller should emit a normal call,
3b824fa6 3245 otherwise try to get the result in TARGET, if convenient (and in
9fe0e1b8 3246 mode MODE if that's convenient). */
c2f47e15 3247
53800dbe 3248static rtx
a65c4d64 3249expand_builtin_memcpy (tree exp, rtx target)
53800dbe 3250{
c2f47e15 3251 if (!validate_arglist (exp,
3252 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3253 return NULL_RTX;
53800dbe 3254 else
3255 {
c2f47e15 3256 tree dest = CALL_EXPR_ARG (exp, 0);
3257 tree src = CALL_EXPR_ARG (exp, 1);
3258 tree len = CALL_EXPR_ARG (exp, 2);
f21337ef 3259 return expand_builtin_memcpy_args (dest, src, len, target, exp);
3260 }
3261}
6840589f 3262
f21337ef 3263/* Expand an instrumented call EXP to the memcpy builtin.
3264 Return NULL_RTX if we failed, the caller should emit a normal call,
3265 otherwise try to get the result in TARGET, if convenient (and in
3266 mode MODE if that's convenient). */
53800dbe 3267
f21337ef 3268static rtx
3269expand_builtin_memcpy_with_bounds (tree exp, rtx target)
3270{
3271 if (!validate_arglist (exp,
3272 POINTER_TYPE, POINTER_BOUNDS_TYPE,
3273 POINTER_TYPE, POINTER_BOUNDS_TYPE,
3274 INTEGER_TYPE, VOID_TYPE))
3275 return NULL_RTX;
3276 else
3277 {
3278 tree dest = CALL_EXPR_ARG (exp, 0);
3279 tree src = CALL_EXPR_ARG (exp, 2);
3280 tree len = CALL_EXPR_ARG (exp, 4);
3281 rtx res = expand_builtin_memcpy_args (dest, src, len, target, exp);
53800dbe 3282
f21337ef 3283 /* Return src bounds with the result. */
3284 if (res)
e5716f7e 3285 {
17d388d8 3286 rtx bnd = force_reg (targetm.chkp_bound_mode (),
f21337ef 3287 expand_normal (CALL_EXPR_ARG (exp, 1)));
3288 res = chkp_join_splitted_slot (res, bnd);
e5716f7e 3289 }
f21337ef 3290 return res;
53800dbe 3291 }
3292}
3293
c2f47e15 3294/* Expand a call EXP to the mempcpy builtin.
3295 Return NULL_RTX if we failed; the caller should emit a normal call,
647661c6 3296 otherwise try to get the result in TARGET, if convenient (and in
9fe0e1b8 3297 mode MODE if that's convenient). If ENDP is 0 return the
3298 destination pointer, if ENDP is 1 return the end pointer ala
3299 mempcpy, and if ENDP is 2 return the end pointer minus one ala
3300 stpcpy. */
647661c6 3301
3302static rtx
3754d046 3303expand_builtin_mempcpy (tree exp, rtx target, machine_mode mode)
647661c6 3304{
c2f47e15 3305 if (!validate_arglist (exp,
3306 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3307 return NULL_RTX;
3308 else
3309 {
3310 tree dest = CALL_EXPR_ARG (exp, 0);
3311 tree src = CALL_EXPR_ARG (exp, 1);
3312 tree len = CALL_EXPR_ARG (exp, 2);
3313 return expand_builtin_mempcpy_args (dest, src, len,
f21337ef 3314 target, mode, /*endp=*/ 1,
3315 exp);
3316 }
3317}
3318
3319/* Expand an instrumented call EXP to the mempcpy builtin.
3320 Return NULL_RTX if we failed, the caller should emit a normal call,
3321 otherwise try to get the result in TARGET, if convenient (and in
3322 mode MODE if that's convenient). */
3323
3324static rtx
3325expand_builtin_mempcpy_with_bounds (tree exp, rtx target, machine_mode mode)
3326{
3327 if (!validate_arglist (exp,
3328 POINTER_TYPE, POINTER_BOUNDS_TYPE,
3329 POINTER_TYPE, POINTER_BOUNDS_TYPE,
3330 INTEGER_TYPE, VOID_TYPE))
3331 return NULL_RTX;
3332 else
3333 {
3334 tree dest = CALL_EXPR_ARG (exp, 0);
3335 tree src = CALL_EXPR_ARG (exp, 2);
3336 tree len = CALL_EXPR_ARG (exp, 4);
3337 rtx res = expand_builtin_mempcpy_args (dest, src, len, target,
3338 mode, 1, exp);
3339
3340 /* Return src bounds with the result. */
3341 if (res)
3342 {
17d388d8 3343 rtx bnd = force_reg (targetm.chkp_bound_mode (),
f21337ef 3344 expand_normal (CALL_EXPR_ARG (exp, 1)));
3345 res = chkp_join_splitted_slot (res, bnd);
3346 }
3347 return res;
c2f47e15 3348 }
3349}
3350
3351/* Helper function to do the actual work for expand_builtin_mempcpy. The
3352 arguments to the builtin_mempcpy call DEST, SRC, and LEN are broken out
3353 so that this can also be called without constructing an actual CALL_EXPR.
a65c4d64 3354 The other arguments and return value are the same as for
3355 expand_builtin_mempcpy. */
c2f47e15 3356
3357static rtx
a65c4d64 3358expand_builtin_mempcpy_args (tree dest, tree src, tree len,
f21337ef 3359 rtx target, machine_mode mode, int endp,
3360 tree orig_exp)
c2f47e15 3361{
f21337ef 3362 tree fndecl = get_callee_fndecl (orig_exp);
3363
c2f47e15 3364 /* If return value is ignored, transform mempcpy into memcpy. */
f21337ef 3365 if (target == const0_rtx
3366 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_MEMPCPY_NOBND_NOCHK_CHKP
3367 && builtin_decl_implicit_p (BUILT_IN_CHKP_MEMCPY_NOBND_NOCHK_CHKP))
3368 {
3369 tree fn = builtin_decl_implicit (BUILT_IN_CHKP_MEMCPY_NOBND_NOCHK_CHKP);
3370 tree result = build_call_nofold_loc (UNKNOWN_LOCATION, fn, 3,
3371 dest, src, len);
3372 return expand_expr (result, target, mode, EXPAND_NORMAL);
3373 }
3374 else if (target == const0_rtx
3375 && builtin_decl_implicit_p (BUILT_IN_MEMCPY))
9fe0e1b8 3376 {
b9a16870 3377 tree fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
0568e9c1 3378 tree result = build_call_nofold_loc (UNKNOWN_LOCATION, fn, 3,
3379 dest, src, len);
c8b17b2e 3380 return expand_expr (result, target, mode, EXPAND_NORMAL);
9fe0e1b8 3381 }
647661c6 3382 else
3383 {
9fe0e1b8 3384 const char *src_str;
957d0361 3385 unsigned int src_align = get_pointer_alignment (src);
3386 unsigned int dest_align = get_pointer_alignment (dest);
9fe0e1b8 3387 rtx dest_mem, src_mem, len_rtx;
a0c938f0 3388
7da1412b 3389 /* If either SRC or DEST is not a pointer type, don't do this
a0c938f0 3390 operation in-line. */
7da1412b 3391 if (dest_align == 0 || src_align == 0)
c2f47e15 3392 return NULL_RTX;
9fe0e1b8 3393
6217c238 3394 /* If LEN is not constant, call the normal function. */
e913b5cd 3395 if (! tree_fits_uhwi_p (len))
c2f47e15 3396 return NULL_RTX;
0862b7e9 3397
8ec3c5c2 3398 len_rtx = expand_normal (len);
9fe0e1b8 3399 src_str = c_getstr (src);
647661c6 3400
9fe0e1b8 3401 /* If SRC is a string constant and block move would be done
3402 by pieces, we can avoid loading the string from memory
3403 and only stored the computed constants. */
3404 if (src_str
971ba038 3405 && CONST_INT_P (len_rtx)
9fe0e1b8 3406 && (unsigned HOST_WIDE_INT) INTVAL (len_rtx) <= strlen (src_str) + 1
3407 && can_store_by_pieces (INTVAL (len_rtx), builtin_memcpy_read_str,
364c0c59 3408 CONST_CAST (char *, src_str),
3409 dest_align, false))
9fe0e1b8 3410 {
d8ae1baa 3411 dest_mem = get_memory_rtx (dest, len);
9fe0e1b8 3412 set_mem_align (dest_mem, dest_align);
3413 dest_mem = store_by_pieces (dest_mem, INTVAL (len_rtx),
3414 builtin_memcpy_read_str,
364c0c59 3415 CONST_CAST (char *, src_str),
3416 dest_align, false, endp);
9fe0e1b8 3417 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
85d654dd 3418 dest_mem = convert_memory_address (ptr_mode, dest_mem);
9fe0e1b8 3419 return dest_mem;
647661c6 3420 }
3421
971ba038 3422 if (CONST_INT_P (len_rtx)
9fe0e1b8 3423 && can_move_by_pieces (INTVAL (len_rtx),
3424 MIN (dest_align, src_align)))
3425 {
d8ae1baa 3426 dest_mem = get_memory_rtx (dest, len);
9fe0e1b8 3427 set_mem_align (dest_mem, dest_align);
d8ae1baa 3428 src_mem = get_memory_rtx (src, len);
9fe0e1b8 3429 set_mem_align (src_mem, src_align);
3430 dest_mem = move_by_pieces (dest_mem, src_mem, INTVAL (len_rtx),
3431 MIN (dest_align, src_align), endp);
3432 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
85d654dd 3433 dest_mem = convert_memory_address (ptr_mode, dest_mem);
9fe0e1b8 3434 return dest_mem;
3435 }
3436
c2f47e15 3437 return NULL_RTX;
647661c6 3438 }
3439}
3440
727c62dd 3441#ifndef HAVE_movstr
3442# define HAVE_movstr 0
3443# define CODE_FOR_movstr CODE_FOR_nothing
3444#endif
3445
c2f47e15 3446/* Expand into a movstr instruction, if one is available. Return NULL_RTX if
727c62dd 3447 we failed, the caller should emit a normal call, otherwise try to
3448 get the result in TARGET, if convenient. If ENDP is 0 return the
3449 destination pointer, if ENDP is 1 return the end pointer ala
3450 mempcpy, and if ENDP is 2 return the end pointer minus one ala
3451 stpcpy. */
3452
3453static rtx
3454expand_movstr (tree dest, tree src, rtx target, int endp)
3455{
8786db1e 3456 struct expand_operand ops[3];
727c62dd 3457 rtx dest_mem;
3458 rtx src_mem;
727c62dd 3459
3460 if (!HAVE_movstr)
c2f47e15 3461 return NULL_RTX;
727c62dd 3462
d8ae1baa 3463 dest_mem = get_memory_rtx (dest, NULL);
3464 src_mem = get_memory_rtx (src, NULL);
727c62dd 3465 if (!endp)
3466 {
3467 target = force_reg (Pmode, XEXP (dest_mem, 0));
3468 dest_mem = replace_equiv_address (dest_mem, target);
727c62dd 3469 }
3470
8786db1e 3471 create_output_operand (&ops[0], endp ? target : NULL_RTX, Pmode);
3472 create_fixed_operand (&ops[1], dest_mem);
3473 create_fixed_operand (&ops[2], src_mem);
1e1d5623 3474 if (!maybe_expand_insn (CODE_FOR_movstr, 3, ops))
3475 return NULL_RTX;
727c62dd 3476
8786db1e 3477 if (endp && target != const0_rtx)
c5aba89c 3478 {
8786db1e 3479 target = ops[0].value;
3480 /* movstr is supposed to set end to the address of the NUL
3481 terminator. If the caller requested a mempcpy-like return value,
3482 adjust it. */
3483 if (endp == 1)
3484 {
29c05e22 3485 rtx tem = plus_constant (GET_MODE (target),
3486 gen_lowpart (GET_MODE (target), target), 1);
8786db1e 3487 emit_move_insn (target, force_operand (tem, NULL_RTX));
3488 }
c5aba89c 3489 }
727c62dd 3490 return target;
3491}
3492
48e1416a 3493/* Expand expression EXP, which is a call to the strcpy builtin. Return
3494 NULL_RTX if we failed the caller should emit a normal call, otherwise
c2f47e15 3495 try to get the result in TARGET, if convenient (and in mode MODE if that's
6f428e8b 3496 convenient). */
902de8ed 3497
53800dbe 3498static rtx
a65c4d64 3499expand_builtin_strcpy (tree exp, rtx target)
53800dbe 3500{
c2f47e15 3501 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3502 {
3503 tree dest = CALL_EXPR_ARG (exp, 0);
3504 tree src = CALL_EXPR_ARG (exp, 1);
a65c4d64 3505 return expand_builtin_strcpy_args (dest, src, target);
c2f47e15 3506 }
3507 return NULL_RTX;
3508}
3509
3510/* Helper function to do the actual work for expand_builtin_strcpy. The
3511 arguments to the builtin_strcpy call DEST and SRC are broken out
3512 so that this can also be called without constructing an actual CALL_EXPR.
3513 The other arguments and return value are the same as for
3514 expand_builtin_strcpy. */
3515
3516static rtx
a65c4d64 3517expand_builtin_strcpy_args (tree dest, tree src, rtx target)
c2f47e15 3518{
c2f47e15 3519 return expand_movstr (dest, src, target, /*endp=*/0);
53800dbe 3520}
3521
c2f47e15 3522/* Expand a call EXP to the stpcpy builtin.
3523 Return NULL_RTX if we failed the caller should emit a normal call,
3b824fa6 3524 otherwise try to get the result in TARGET, if convenient (and in
3525 mode MODE if that's convenient). */
3526
3527static rtx
3754d046 3528expand_builtin_stpcpy (tree exp, rtx target, machine_mode mode)
3b824fa6 3529{
c2f47e15 3530 tree dst, src;
389dd41b 3531 location_t loc = EXPR_LOCATION (exp);
c2f47e15 3532
3533 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3534 return NULL_RTX;
3535
3536 dst = CALL_EXPR_ARG (exp, 0);
3537 src = CALL_EXPR_ARG (exp, 1);
3538
727c62dd 3539 /* If return value is ignored, transform stpcpy into strcpy. */
b9a16870 3540 if (target == const0_rtx && builtin_decl_implicit (BUILT_IN_STRCPY))
978836e5 3541 {
b9a16870 3542 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
0568e9c1 3543 tree result = build_call_nofold_loc (loc, fn, 2, dst, src);
c8b17b2e 3544 return expand_expr (result, target, mode, EXPAND_NORMAL);
978836e5 3545 }
3b824fa6 3546 else
3547 {
c2f47e15 3548 tree len, lenp1;
727c62dd 3549 rtx ret;
647661c6 3550
9fe0e1b8 3551 /* Ensure we get an actual string whose length can be evaluated at
a0c938f0 3552 compile-time, not an expression containing a string. This is
3553 because the latter will potentially produce pessimized code
3554 when used to produce the return value. */
681fab1e 3555 if (! c_getstr (src) || ! (len = c_strlen (src, 0)))
c2f47e15 3556 return expand_movstr (dst, src, target, /*endp=*/2);
3b824fa6 3557
389dd41b 3558 lenp1 = size_binop_loc (loc, PLUS_EXPR, len, ssize_int (1));
a65c4d64 3559 ret = expand_builtin_mempcpy_args (dst, src, lenp1,
f21337ef 3560 target, mode, /*endp=*/2,
3561 exp);
727c62dd 3562
3563 if (ret)
3564 return ret;
3565
3566 if (TREE_CODE (len) == INTEGER_CST)
3567 {
8ec3c5c2 3568 rtx len_rtx = expand_normal (len);
727c62dd 3569
971ba038 3570 if (CONST_INT_P (len_rtx))
727c62dd 3571 {
a65c4d64 3572 ret = expand_builtin_strcpy_args (dst, src, target);
727c62dd 3573
3574 if (ret)
3575 {
3576 if (! target)
7ac87324 3577 {
3578 if (mode != VOIDmode)
3579 target = gen_reg_rtx (mode);
3580 else
3581 target = gen_reg_rtx (GET_MODE (ret));
3582 }
727c62dd 3583 if (GET_MODE (target) != GET_MODE (ret))
3584 ret = gen_lowpart (GET_MODE (target), ret);
3585
29c05e22 3586 ret = plus_constant (GET_MODE (ret), ret, INTVAL (len_rtx));
c5aba89c 3587 ret = emit_move_insn (target, force_operand (ret, NULL_RTX));
64db345d 3588 gcc_assert (ret);
727c62dd 3589
3590 return target;
3591 }
3592 }
3593 }
3594
c2f47e15 3595 return expand_movstr (dst, src, target, /*endp=*/2);
3b824fa6 3596 }
3597}
3598
6840589f 3599/* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3600 bytes from constant string DATA + OFFSET and return it as target
3601 constant. */
3602
09879952 3603rtx
aecda0d6 3604builtin_strncpy_read_str (void *data, HOST_WIDE_INT offset,
3754d046 3605 machine_mode mode)
6840589f 3606{
3607 const char *str = (const char *) data;
3608
3609 if ((unsigned HOST_WIDE_INT) offset > strlen (str))
3610 return const0_rtx;
3611
3612 return c_readstr (str + offset, mode);
3613}
3614
48e1416a 3615/* Expand expression EXP, which is a call to the strncpy builtin. Return
c2f47e15 3616 NULL_RTX if we failed the caller should emit a normal call. */
ed09096d 3617
3618static rtx
a65c4d64 3619expand_builtin_strncpy (tree exp, rtx target)
ed09096d 3620{
389dd41b 3621 location_t loc = EXPR_LOCATION (exp);
c2f47e15 3622
3623 if (validate_arglist (exp,
3624 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
ed09096d 3625 {
c2f47e15 3626 tree dest = CALL_EXPR_ARG (exp, 0);
3627 tree src = CALL_EXPR_ARG (exp, 1);
3628 tree len = CALL_EXPR_ARG (exp, 2);
3629 tree slen = c_strlen (src, 1);
6840589f 3630
8ff6a5cd 3631 /* We must be passed a constant len and src parameter. */
e913b5cd 3632 if (!tree_fits_uhwi_p (len) || !slen || !tree_fits_uhwi_p (slen))
c2f47e15 3633 return NULL_RTX;
ed09096d 3634
389dd41b 3635 slen = size_binop_loc (loc, PLUS_EXPR, slen, ssize_int (1));
ed09096d 3636
3637 /* We're required to pad with trailing zeros if the requested
a0c938f0 3638 len is greater than strlen(s2)+1. In that case try to
6840589f 3639 use store_by_pieces, if it fails, punt. */
ed09096d 3640 if (tree_int_cst_lt (slen, len))
6840589f 3641 {
957d0361 3642 unsigned int dest_align = get_pointer_alignment (dest);
c2f47e15 3643 const char *p = c_getstr (src);
6840589f 3644 rtx dest_mem;
3645
e913b5cd 3646 if (!p || dest_align == 0 || !tree_fits_uhwi_p (len)
3647 || !can_store_by_pieces (tree_to_uhwi (len),
6840589f 3648 builtin_strncpy_read_str,
364c0c59 3649 CONST_CAST (char *, p),
3650 dest_align, false))
c2f47e15 3651 return NULL_RTX;
6840589f 3652
d8ae1baa 3653 dest_mem = get_memory_rtx (dest, len);
e913b5cd 3654 store_by_pieces (dest_mem, tree_to_uhwi (len),
6840589f 3655 builtin_strncpy_read_str,
364c0c59 3656 CONST_CAST (char *, p), dest_align, false, 0);
a65c4d64 3657 dest_mem = force_operand (XEXP (dest_mem, 0), target);
85d654dd 3658 dest_mem = convert_memory_address (ptr_mode, dest_mem);
e5716f7e 3659 return dest_mem;
6840589f 3660 }
ed09096d 3661 }
c2f47e15 3662 return NULL_RTX;
ed09096d 3663}
3664
ecc318ff 3665/* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3666 bytes from constant string DATA + OFFSET and return it as target
3667 constant. */
3668
f656b751 3669rtx
aecda0d6 3670builtin_memset_read_str (void *data, HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
3754d046 3671 machine_mode mode)
ecc318ff 3672{
3673 const char *c = (const char *) data;
364c0c59 3674 char *p = XALLOCAVEC (char, GET_MODE_SIZE (mode));
ecc318ff 3675
3676 memset (p, *c, GET_MODE_SIZE (mode));
3677
3678 return c_readstr (p, mode);
3679}
3680
a7ec6974 3681/* Callback routine for store_by_pieces. Return the RTL of a register
3682 containing GET_MODE_SIZE (MODE) consecutive copies of the unsigned
3683 char value given in the RTL register data. For example, if mode is
3684 4 bytes wide, return the RTL for 0x01010101*data. */
3685
3686static rtx
aecda0d6 3687builtin_memset_gen_str (void *data, HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
3754d046 3688 machine_mode mode)
a7ec6974 3689{
3690 rtx target, coeff;
3691 size_t size;
3692 char *p;
3693
3694 size = GET_MODE_SIZE (mode);
f0ce3b1f 3695 if (size == 1)
3696 return (rtx) data;
a7ec6974 3697
364c0c59 3698 p = XALLOCAVEC (char, size);
a7ec6974 3699 memset (p, 1, size);
3700 coeff = c_readstr (p, mode);
3701
f0ce3b1f 3702 target = convert_to_mode (mode, (rtx) data, 1);
a7ec6974 3703 target = expand_mult (mode, target, coeff, NULL_RTX, 1);
3704 return force_reg (mode, target);
3705}
3706
48e1416a 3707/* Expand expression EXP, which is a call to the memset builtin. Return
3708 NULL_RTX if we failed the caller should emit a normal call, otherwise
c2f47e15 3709 try to get the result in TARGET, if convenient (and in mode MODE if that's
6f428e8b 3710 convenient). */
902de8ed 3711
53800dbe 3712static rtx
3754d046 3713expand_builtin_memset (tree exp, rtx target, machine_mode mode)
53800dbe 3714{
c2f47e15 3715 if (!validate_arglist (exp,
3716 POINTER_TYPE, INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
3717 return NULL_RTX;
53800dbe 3718 else
3719 {
c2f47e15 3720 tree dest = CALL_EXPR_ARG (exp, 0);
3721 tree val = CALL_EXPR_ARG (exp, 1);
3722 tree len = CALL_EXPR_ARG (exp, 2);
3723 return expand_builtin_memset_args (dest, val, len, target, mode, exp);
3724 }
3725}
53800dbe 3726
f21337ef 3727/* Expand expression EXP, which is an instrumented call to the memset builtin.
3728 Return NULL_RTX if we failed the caller should emit a normal call, otherwise
3729 try to get the result in TARGET, if convenient (and in mode MODE if that's
3730 convenient). */
3731
3732static rtx
3733expand_builtin_memset_with_bounds (tree exp, rtx target, machine_mode mode)
3734{
3735 if (!validate_arglist (exp,
3736 POINTER_TYPE, POINTER_BOUNDS_TYPE,
3737 INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
3738 return NULL_RTX;
3739 else
3740 {
3741 tree dest = CALL_EXPR_ARG (exp, 0);
3742 tree val = CALL_EXPR_ARG (exp, 2);
3743 tree len = CALL_EXPR_ARG (exp, 3);
3744 rtx res = expand_builtin_memset_args (dest, val, len, target, mode, exp);
3745
3746 /* Return src bounds with the result. */
3747 if (res)
3748 {
17d388d8 3749 rtx bnd = force_reg (targetm.chkp_bound_mode (),
f21337ef 3750 expand_normal (CALL_EXPR_ARG (exp, 1)));
3751 res = chkp_join_splitted_slot (res, bnd);
3752 }
3753 return res;
3754 }
3755}
3756
c2f47e15 3757/* Helper function to do the actual work for expand_builtin_memset. The
3758 arguments to the builtin_memset call DEST, VAL, and LEN are broken out
3759 so that this can also be called without constructing an actual CALL_EXPR.
3760 The other arguments and return value are the same as for
3761 expand_builtin_memset. */
6b961939 3762
c2f47e15 3763static rtx
3764expand_builtin_memset_args (tree dest, tree val, tree len,
3754d046 3765 rtx target, machine_mode mode, tree orig_exp)
c2f47e15 3766{
3767 tree fndecl, fn;
3768 enum built_in_function fcode;
3754d046 3769 machine_mode val_mode;
c2f47e15 3770 char c;
3771 unsigned int dest_align;
3772 rtx dest_mem, dest_addr, len_rtx;
3773 HOST_WIDE_INT expected_size = -1;
3774 unsigned int expected_align = 0;
36d63243 3775 unsigned HOST_WIDE_INT min_size;
3776 unsigned HOST_WIDE_INT max_size;
9db0f34d 3777 unsigned HOST_WIDE_INT probable_max_size;
53800dbe 3778
957d0361 3779 dest_align = get_pointer_alignment (dest);
162719b3 3780
c2f47e15 3781 /* If DEST is not a pointer type, don't do this operation in-line. */
3782 if (dest_align == 0)
3783 return NULL_RTX;
6f428e8b 3784
8cee8dc0 3785 if (currently_expanding_gimple_stmt)
3786 stringop_block_profile (currently_expanding_gimple_stmt,
3787 &expected_align, &expected_size);
75a70cf9 3788
c2f47e15 3789 if (expected_align < dest_align)
3790 expected_align = dest_align;
6b961939 3791
c2f47e15 3792 /* If the LEN parameter is zero, return DEST. */
3793 if (integer_zerop (len))
3794 {
3795 /* Evaluate and ignore VAL in case it has side-effects. */
3796 expand_expr (val, const0_rtx, VOIDmode, EXPAND_NORMAL);
3797 return expand_expr (dest, target, mode, EXPAND_NORMAL);
3798 }
7a3e5564 3799
c2f47e15 3800 /* Stabilize the arguments in case we fail. */
3801 dest = builtin_save_expr (dest);
3802 val = builtin_save_expr (val);
3803 len = builtin_save_expr (len);
a7ec6974 3804
c2f47e15 3805 len_rtx = expand_normal (len);
9db0f34d 3806 determine_block_size (len, len_rtx, &min_size, &max_size,
3807 &probable_max_size);
c2f47e15 3808 dest_mem = get_memory_rtx (dest, len);
03a5dda9 3809 val_mode = TYPE_MODE (unsigned_char_type_node);
a7ec6974 3810
c2f47e15 3811 if (TREE_CODE (val) != INTEGER_CST)
3812 {
3813 rtx val_rtx;
a7ec6974 3814
c2f47e15 3815 val_rtx = expand_normal (val);
03a5dda9 3816 val_rtx = convert_to_mode (val_mode, val_rtx, 0);
53800dbe 3817
c2f47e15 3818 /* Assume that we can memset by pieces if we can store
3819 * the coefficients by pieces (in the required modes).
3820 * We can't pass builtin_memset_gen_str as that emits RTL. */
3821 c = 1;
e913b5cd 3822 if (tree_fits_uhwi_p (len)
3823 && can_store_by_pieces (tree_to_uhwi (len),
4b297e2e 3824 builtin_memset_read_str, &c, dest_align,
3825 true))
c2f47e15 3826 {
03a5dda9 3827 val_rtx = force_reg (val_mode, val_rtx);
e913b5cd 3828 store_by_pieces (dest_mem, tree_to_uhwi (len),
4b297e2e 3829 builtin_memset_gen_str, val_rtx, dest_align,
3830 true, 0);
c2f47e15 3831 }
3832 else if (!set_storage_via_setmem (dest_mem, len_rtx, val_rtx,
3833 dest_align, expected_align,
9db0f34d 3834 expected_size, min_size, max_size,
3835 probable_max_size))
6b961939 3836 goto do_libcall;
48e1416a 3837
c2f47e15 3838 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3839 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3840 return dest_mem;
3841 }
53800dbe 3842
c2f47e15 3843 if (target_char_cast (val, &c))
3844 goto do_libcall;
ecc318ff 3845
c2f47e15 3846 if (c)
3847 {
e913b5cd 3848 if (tree_fits_uhwi_p (len)
3849 && can_store_by_pieces (tree_to_uhwi (len),
4b297e2e 3850 builtin_memset_read_str, &c, dest_align,
3851 true))
e913b5cd 3852 store_by_pieces (dest_mem, tree_to_uhwi (len),
4b297e2e 3853 builtin_memset_read_str, &c, dest_align, true, 0);
03a5dda9 3854 else if (!set_storage_via_setmem (dest_mem, len_rtx,
3855 gen_int_mode (c, val_mode),
c2f47e15 3856 dest_align, expected_align,
9db0f34d 3857 expected_size, min_size, max_size,
3858 probable_max_size))
c2f47e15 3859 goto do_libcall;
48e1416a 3860
c2f47e15 3861 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3862 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3863 return dest_mem;
3864 }
ecc318ff 3865
c2f47e15 3866 set_mem_align (dest_mem, dest_align);
3867 dest_addr = clear_storage_hints (dest_mem, len_rtx,
3868 CALL_EXPR_TAILCALL (orig_exp)
3869 ? BLOCK_OP_TAILCALL : BLOCK_OP_NORMAL,
36d63243 3870 expected_align, expected_size,
9db0f34d 3871 min_size, max_size,
3872 probable_max_size);
53800dbe 3873
c2f47e15 3874 if (dest_addr == 0)
3875 {
3876 dest_addr = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3877 dest_addr = convert_memory_address (ptr_mode, dest_addr);
3878 }
53800dbe 3879
c2f47e15 3880 return dest_addr;
6b961939 3881
c2f47e15 3882 do_libcall:
3883 fndecl = get_callee_fndecl (orig_exp);
3884 fcode = DECL_FUNCTION_CODE (fndecl);
f21337ef 3885 if (fcode == BUILT_IN_MEMSET
3886 || fcode == BUILT_IN_CHKP_MEMSET_NOBND_NOCHK_CHKP)
0568e9c1 3887 fn = build_call_nofold_loc (EXPR_LOCATION (orig_exp), fndecl, 3,
3888 dest, val, len);
c2f47e15 3889 else if (fcode == BUILT_IN_BZERO)
0568e9c1 3890 fn = build_call_nofold_loc (EXPR_LOCATION (orig_exp), fndecl, 2,
3891 dest, len);
c2f47e15 3892 else
3893 gcc_unreachable ();
a65c4d64 3894 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
3895 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (orig_exp);
c2f47e15 3896 return expand_call (fn, target, target == const0_rtx);
53800dbe 3897}
3898
48e1416a 3899/* Expand expression EXP, which is a call to the bzero builtin. Return
c2f47e15 3900 NULL_RTX if we failed the caller should emit a normal call. */
27d0c333 3901
ffc83088 3902static rtx
0b25db21 3903expand_builtin_bzero (tree exp)
ffc83088 3904{
c2f47e15 3905 tree dest, size;
389dd41b 3906 location_t loc = EXPR_LOCATION (exp);
ffc83088 3907
c2f47e15 3908 if (!validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
7369e7ba 3909 return NULL_RTX;
ffc83088 3910
c2f47e15 3911 dest = CALL_EXPR_ARG (exp, 0);
3912 size = CALL_EXPR_ARG (exp, 1);
bf8e3599 3913
7369e7ba 3914 /* New argument list transforming bzero(ptr x, int y) to
6f428e8b 3915 memset(ptr x, int 0, size_t y). This is done this way
3916 so that if it isn't expanded inline, we fallback to
3917 calling bzero instead of memset. */
bf8e3599 3918
c2f47e15 3919 return expand_builtin_memset_args (dest, integer_zero_node,
a0553bff 3920 fold_convert_loc (loc,
3921 size_type_node, size),
c2f47e15 3922 const0_rtx, VOIDmode, exp);
ffc83088 3923}
3924
7a3f89b5 3925/* Expand expression EXP, which is a call to the memcmp built-in function.
bd021c1c 3926 Return NULL_RTX if we failed and the caller should emit a normal call,
3927 otherwise try to get the result in TARGET, if convenient (and in mode
3928 MODE, if that's convenient). */
27d0c333 3929
53800dbe 3930static rtx
a65c4d64 3931expand_builtin_memcmp (tree exp, ATTRIBUTE_UNUSED rtx target,
3754d046 3932 ATTRIBUTE_UNUSED machine_mode mode)
53800dbe 3933{
a65c4d64 3934 location_t loc ATTRIBUTE_UNUSED = EXPR_LOCATION (exp);
389dd41b 3935
c2f47e15 3936 if (!validate_arglist (exp,
3937 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3938 return NULL_RTX;
6f428e8b 3939
bd021c1c 3940 /* Note: The cmpstrnsi pattern, if it exists, is not suitable for
3941 implementing memcmp because it will stop if it encounters two
3942 zero bytes. */
3943#if defined HAVE_cmpmemsi
53800dbe 3944 {
0cd832f0 3945 rtx arg1_rtx, arg2_rtx, arg3_rtx;
53800dbe 3946 rtx result;
0cd832f0 3947 rtx insn;
c2f47e15 3948 tree arg1 = CALL_EXPR_ARG (exp, 0);
3949 tree arg2 = CALL_EXPR_ARG (exp, 1);
3950 tree len = CALL_EXPR_ARG (exp, 2);
53800dbe 3951
957d0361 3952 unsigned int arg1_align = get_pointer_alignment (arg1) / BITS_PER_UNIT;
3953 unsigned int arg2_align = get_pointer_alignment (arg2) / BITS_PER_UNIT;
3754d046 3954 machine_mode insn_mode;
b428c0a5 3955
b428c0a5 3956 if (HAVE_cmpmemsi)
3957 insn_mode = insn_data[(int) CODE_FOR_cmpmemsi].operand[0].mode;
3958 else
c2f47e15 3959 return NULL_RTX;
53800dbe 3960
3961 /* If we don't have POINTER_TYPE, call the function. */
3962 if (arg1_align == 0 || arg2_align == 0)
c2f47e15 3963 return NULL_RTX;
53800dbe 3964
3965 /* Make a place to write the result of the instruction. */
3966 result = target;
3967 if (! (result != 0
8ad4c111 3968 && REG_P (result) && GET_MODE (result) == insn_mode
53800dbe 3969 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
3970 result = gen_reg_rtx (insn_mode);
3971
d8ae1baa 3972 arg1_rtx = get_memory_rtx (arg1, len);
3973 arg2_rtx = get_memory_rtx (arg2, len);
389dd41b 3974 arg3_rtx = expand_normal (fold_convert_loc (loc, sizetype, len));
83f88f8e 3975
3976 /* Set MEM_SIZE as appropriate. */
971ba038 3977 if (CONST_INT_P (arg3_rtx))
83f88f8e 3978 {
5b2a69fa 3979 set_mem_size (arg1_rtx, INTVAL (arg3_rtx));
3980 set_mem_size (arg2_rtx, INTVAL (arg3_rtx));
83f88f8e 3981 }
3982
b428c0a5 3983 if (HAVE_cmpmemsi)
3984 insn = gen_cmpmemsi (result, arg1_rtx, arg2_rtx, arg3_rtx,
3985 GEN_INT (MIN (arg1_align, arg2_align)));
0cd832f0 3986 else
64db345d 3987 gcc_unreachable ();
0cd832f0 3988
3989 if (insn)
3990 emit_insn (insn);
3991 else
2dd6f9ed 3992 emit_library_call_value (memcmp_libfunc, result, LCT_PURE,
0cd832f0 3993 TYPE_MODE (integer_type_node), 3,
3994 XEXP (arg1_rtx, 0), Pmode,
3995 XEXP (arg2_rtx, 0), Pmode,
3996 convert_to_mode (TYPE_MODE (sizetype), arg3_rtx,
78a8ed03 3997 TYPE_UNSIGNED (sizetype)),
0cd832f0 3998 TYPE_MODE (sizetype));
53800dbe 3999
4000 /* Return the value in the proper mode for this function. */
4001 mode = TYPE_MODE (TREE_TYPE (exp));
4002 if (GET_MODE (result) == mode)
4003 return result;
4004 else if (target != 0)
4005 {
4006 convert_move (target, result, 0);
4007 return target;
4008 }
4009 else
4010 return convert_to_mode (mode, result, 0);
4011 }
bd021c1c 4012#endif /* HAVE_cmpmemsi. */
53800dbe 4013
c2f47e15 4014 return NULL_RTX;
6f428e8b 4015}
4016
c2f47e15 4017/* Expand expression EXP, which is a call to the strcmp builtin. Return NULL_RTX
53800dbe 4018 if we failed the caller should emit a normal call, otherwise try to get
4019 the result in TARGET, if convenient. */
902de8ed 4020
53800dbe 4021static rtx
a65c4d64 4022expand_builtin_strcmp (tree exp, ATTRIBUTE_UNUSED rtx target)
53800dbe 4023{
c2f47e15 4024 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
4025 return NULL_RTX;
bf8e3599 4026
6ac5504b 4027#if defined HAVE_cmpstrsi || defined HAVE_cmpstrnsi
6b531606 4028 if (direct_optab_handler (cmpstr_optab, SImode) != CODE_FOR_nothing
4029 || direct_optab_handler (cmpstrn_optab, SImode) != CODE_FOR_nothing)
6ac5504b 4030 {
4031 rtx arg1_rtx, arg2_rtx;
4032 rtx result, insn = NULL_RTX;
4033 tree fndecl, fn;
c2f47e15 4034 tree arg1 = CALL_EXPR_ARG (exp, 0);
4035 tree arg2 = CALL_EXPR_ARG (exp, 1);
a0c938f0 4036
957d0361 4037 unsigned int arg1_align = get_pointer_alignment (arg1) / BITS_PER_UNIT;
4038 unsigned int arg2_align = get_pointer_alignment (arg2) / BITS_PER_UNIT;
6ac5504b 4039
4040 /* If we don't have POINTER_TYPE, call the function. */
4041 if (arg1_align == 0 || arg2_align == 0)
c2f47e15 4042 return NULL_RTX;
7a3f89b5 4043
6ac5504b 4044 /* Stabilize the arguments in case gen_cmpstr(n)si fail. */
4045 arg1 = builtin_save_expr (arg1);
4046 arg2 = builtin_save_expr (arg2);
7a3f89b5 4047
d8ae1baa 4048 arg1_rtx = get_memory_rtx (arg1, NULL);
4049 arg2_rtx = get_memory_rtx (arg2, NULL);
53800dbe 4050
6ac5504b 4051#ifdef HAVE_cmpstrsi
4052 /* Try to call cmpstrsi. */
4053 if (HAVE_cmpstrsi)
4054 {
3754d046 4055 machine_mode insn_mode
6ac5504b 4056 = insn_data[(int) CODE_FOR_cmpstrsi].operand[0].mode;
4057
4058 /* Make a place to write the result of the instruction. */
4059 result = target;
4060 if (! (result != 0
4061 && REG_P (result) && GET_MODE (result) == insn_mode
4062 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
4063 result = gen_reg_rtx (insn_mode);
4064
4065 insn = gen_cmpstrsi (result, arg1_rtx, arg2_rtx,
4066 GEN_INT (MIN (arg1_align, arg2_align)));
4067 }
4068#endif
03fd9d2c 4069#ifdef HAVE_cmpstrnsi
6ac5504b 4070 /* Try to determine at least one length and call cmpstrnsi. */
a0c938f0 4071 if (!insn && HAVE_cmpstrnsi)
6ac5504b 4072 {
4073 tree len;
4074 rtx arg3_rtx;
4075
3754d046 4076 machine_mode insn_mode
6ac5504b 4077 = insn_data[(int) CODE_FOR_cmpstrnsi].operand[0].mode;
4078 tree len1 = c_strlen (arg1, 1);
4079 tree len2 = c_strlen (arg2, 1);
4080
4081 if (len1)
4082 len1 = size_binop (PLUS_EXPR, ssize_int (1), len1);
4083 if (len2)
4084 len2 = size_binop (PLUS_EXPR, ssize_int (1), len2);
4085
4086 /* If we don't have a constant length for the first, use the length
4087 of the second, if we know it. We don't require a constant for
4088 this case; some cost analysis could be done if both are available
4089 but neither is constant. For now, assume they're equally cheap,
4090 unless one has side effects. If both strings have constant lengths,
4091 use the smaller. */
4092
4093 if (!len1)
4094 len = len2;
4095 else if (!len2)
4096 len = len1;
4097 else if (TREE_SIDE_EFFECTS (len1))
4098 len = len2;
4099 else if (TREE_SIDE_EFFECTS (len2))
4100 len = len1;
4101 else if (TREE_CODE (len1) != INTEGER_CST)
4102 len = len2;
4103 else if (TREE_CODE (len2) != INTEGER_CST)
4104 len = len1;
4105 else if (tree_int_cst_lt (len1, len2))
4106 len = len1;
4107 else
4108 len = len2;
4109
4110 /* If both arguments have side effects, we cannot optimize. */
4111 if (!len || TREE_SIDE_EFFECTS (len))
6b961939 4112 goto do_libcall;
53800dbe 4113
8ec3c5c2 4114 arg3_rtx = expand_normal (len);
902de8ed 4115
6ac5504b 4116 /* Make a place to write the result of the instruction. */
4117 result = target;
4118 if (! (result != 0
4119 && REG_P (result) && GET_MODE (result) == insn_mode
4120 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
4121 result = gen_reg_rtx (insn_mode);
53800dbe 4122
6ac5504b 4123 insn = gen_cmpstrnsi (result, arg1_rtx, arg2_rtx, arg3_rtx,
4124 GEN_INT (MIN (arg1_align, arg2_align)));
4125 }
4126#endif
3f8aefe2 4127
6ac5504b 4128 if (insn)
4129 {
3754d046 4130 machine_mode mode;
6ac5504b 4131 emit_insn (insn);
3f8aefe2 4132
6ac5504b 4133 /* Return the value in the proper mode for this function. */
4134 mode = TYPE_MODE (TREE_TYPE (exp));
4135 if (GET_MODE (result) == mode)
4136 return result;
4137 if (target == 0)
4138 return convert_to_mode (mode, result, 0);
4139 convert_move (target, result, 0);
4140 return target;
4141 }
902de8ed 4142
6ac5504b 4143 /* Expand the library call ourselves using a stabilized argument
4144 list to avoid re-evaluating the function's arguments twice. */
2694880e 4145#ifdef HAVE_cmpstrnsi
6b961939 4146 do_libcall:
2694880e 4147#endif
6ac5504b 4148 fndecl = get_callee_fndecl (exp);
0568e9c1 4149 fn = build_call_nofold_loc (EXPR_LOCATION (exp), fndecl, 2, arg1, arg2);
a65c4d64 4150 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
4151 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
6ac5504b 4152 return expand_call (fn, target, target == const0_rtx);
4153 }
7a3f89b5 4154#endif
c2f47e15 4155 return NULL_RTX;
83d79705 4156}
53800dbe 4157
48e1416a 4158/* Expand expression EXP, which is a call to the strncmp builtin. Return
c2f47e15 4159 NULL_RTX if we failed the caller should emit a normal call, otherwise try to get
ed09096d 4160 the result in TARGET, if convenient. */
27d0c333 4161
ed09096d 4162static rtx
a65c4d64 4163expand_builtin_strncmp (tree exp, ATTRIBUTE_UNUSED rtx target,
3754d046 4164 ATTRIBUTE_UNUSED machine_mode mode)
ed09096d 4165{
a65c4d64 4166 location_t loc ATTRIBUTE_UNUSED = EXPR_LOCATION (exp);
389dd41b 4167
c2f47e15 4168 if (!validate_arglist (exp,
4169 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
4170 return NULL_RTX;
ed09096d 4171
6e34e617 4172 /* If c_strlen can determine an expression for one of the string
6ac5504b 4173 lengths, and it doesn't have side effects, then emit cmpstrnsi
7a3f89b5 4174 using length MIN(strlen(string)+1, arg3). */
6ac5504b 4175#ifdef HAVE_cmpstrnsi
4176 if (HAVE_cmpstrnsi)
7a3f89b5 4177 {
4178 tree len, len1, len2;
4179 rtx arg1_rtx, arg2_rtx, arg3_rtx;
4180 rtx result, insn;
0b25db21 4181 tree fndecl, fn;
c2f47e15 4182 tree arg1 = CALL_EXPR_ARG (exp, 0);
4183 tree arg2 = CALL_EXPR_ARG (exp, 1);
4184 tree arg3 = CALL_EXPR_ARG (exp, 2);
6f428e8b 4185
957d0361 4186 unsigned int arg1_align = get_pointer_alignment (arg1) / BITS_PER_UNIT;
4187 unsigned int arg2_align = get_pointer_alignment (arg2) / BITS_PER_UNIT;
3754d046 4188 machine_mode insn_mode
6ac5504b 4189 = insn_data[(int) CODE_FOR_cmpstrnsi].operand[0].mode;
bf8e3599 4190
681fab1e 4191 len1 = c_strlen (arg1, 1);
4192 len2 = c_strlen (arg2, 1);
7a3f89b5 4193
4194 if (len1)
389dd41b 4195 len1 = size_binop_loc (loc, PLUS_EXPR, ssize_int (1), len1);
7a3f89b5 4196 if (len2)
389dd41b 4197 len2 = size_binop_loc (loc, PLUS_EXPR, ssize_int (1), len2);
7a3f89b5 4198
4199 /* If we don't have a constant length for the first, use the length
4200 of the second, if we know it. We don't require a constant for
4201 this case; some cost analysis could be done if both are available
4202 but neither is constant. For now, assume they're equally cheap,
4203 unless one has side effects. If both strings have constant lengths,
4204 use the smaller. */
4205
4206 if (!len1)
4207 len = len2;
4208 else if (!len2)
4209 len = len1;
4210 else if (TREE_SIDE_EFFECTS (len1))
4211 len = len2;
4212 else if (TREE_SIDE_EFFECTS (len2))
4213 len = len1;
4214 else if (TREE_CODE (len1) != INTEGER_CST)
4215 len = len2;
4216 else if (TREE_CODE (len2) != INTEGER_CST)
4217 len = len1;
4218 else if (tree_int_cst_lt (len1, len2))
4219 len = len1;
4220 else
4221 len = len2;
6e34e617 4222
7a3f89b5 4223 /* If both arguments have side effects, we cannot optimize. */
4224 if (!len || TREE_SIDE_EFFECTS (len))
c2f47e15 4225 return NULL_RTX;
bf8e3599 4226
7a3f89b5 4227 /* The actual new length parameter is MIN(len,arg3). */
389dd41b 4228 len = fold_build2_loc (loc, MIN_EXPR, TREE_TYPE (len), len,
4229 fold_convert_loc (loc, TREE_TYPE (len), arg3));
7a3f89b5 4230
4231 /* If we don't have POINTER_TYPE, call the function. */
4232 if (arg1_align == 0 || arg2_align == 0)
c2f47e15 4233 return NULL_RTX;
7a3f89b5 4234
4235 /* Make a place to write the result of the instruction. */
4236 result = target;
4237 if (! (result != 0
8ad4c111 4238 && REG_P (result) && GET_MODE (result) == insn_mode
7a3f89b5 4239 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
4240 result = gen_reg_rtx (insn_mode);
4241
a65c4d64 4242 /* Stabilize the arguments in case gen_cmpstrnsi fails. */
4243 arg1 = builtin_save_expr (arg1);
4244 arg2 = builtin_save_expr (arg2);
4245 len = builtin_save_expr (len);
27d0c333 4246
a65c4d64 4247 arg1_rtx = get_memory_rtx (arg1, len);
4248 arg2_rtx = get_memory_rtx (arg2, len);
4249 arg3_rtx = expand_normal (len);
4250 insn = gen_cmpstrnsi (result, arg1_rtx, arg2_rtx, arg3_rtx,
4251 GEN_INT (MIN (arg1_align, arg2_align)));
4252 if (insn)
4253 {
4254 emit_insn (insn);
49f0327b 4255
a65c4d64 4256 /* Return the value in the proper mode for this function. */
4257 mode = TYPE_MODE (TREE_TYPE (exp));
4258 if (GET_MODE (result) == mode)
4259 return result;
4260 if (target == 0)
4261 return convert_to_mode (mode, result, 0);
4262 convert_move (target, result, 0);
4263 return target;
4264 }
27d0c333 4265
a65c4d64 4266 /* Expand the library call ourselves using a stabilized argument
4267 list to avoid re-evaluating the function's arguments twice. */
4268 fndecl = get_callee_fndecl (exp);
0568e9c1 4269 fn = build_call_nofold_loc (EXPR_LOCATION (exp), fndecl, 3,
4270 arg1, arg2, len);
a65c4d64 4271 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
4272 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
4273 return expand_call (fn, target, target == const0_rtx);
4274 }
4275#endif
c2f47e15 4276 return NULL_RTX;
49f0327b 4277}
4278
a66c9326 4279/* Expand a call to __builtin_saveregs, generating the result in TARGET,
4280 if that's convenient. */
902de8ed 4281
a66c9326 4282rtx
aecda0d6 4283expand_builtin_saveregs (void)
53800dbe 4284{
1e0c0b35 4285 rtx val;
4286 rtx_insn *seq;
53800dbe 4287
4288 /* Don't do __builtin_saveregs more than once in a function.
4289 Save the result of the first call and reuse it. */
4290 if (saveregs_value != 0)
4291 return saveregs_value;
53800dbe 4292
a66c9326 4293 /* When this function is called, it means that registers must be
4294 saved on entry to this function. So we migrate the call to the
4295 first insn of this function. */
4296
4297 start_sequence ();
53800dbe 4298
a66c9326 4299 /* Do whatever the machine needs done in this case. */
45550790 4300 val = targetm.calls.expand_builtin_saveregs ();
53800dbe 4301
a66c9326 4302 seq = get_insns ();
4303 end_sequence ();
53800dbe 4304
a66c9326 4305 saveregs_value = val;
53800dbe 4306
31d3e01c 4307 /* Put the insns after the NOTE that starts the function. If this
4308 is inside a start_sequence, make the outer-level insn chain current, so
a66c9326 4309 the code is placed at the start of the function. */
4310 push_topmost_sequence ();
0ec80471 4311 emit_insn_after (seq, entry_of_function ());
a66c9326 4312 pop_topmost_sequence ();
4313
4314 return val;
53800dbe 4315}
4316
79012a9d 4317/* Expand a call to __builtin_next_arg. */
27d0c333 4318
53800dbe 4319static rtx
79012a9d 4320expand_builtin_next_arg (void)
53800dbe 4321{
79012a9d 4322 /* Checking arguments is already done in fold_builtin_next_arg
4323 that must be called before this function. */
940ddc5c 4324 return expand_binop (ptr_mode, add_optab,
abe32cce 4325 crtl->args.internal_arg_pointer,
4326 crtl->args.arg_offset_rtx,
53800dbe 4327 NULL_RTX, 0, OPTAB_LIB_WIDEN);
4328}
4329
a66c9326 4330/* Make it easier for the backends by protecting the valist argument
4331 from multiple evaluations. */
4332
4333static tree
389dd41b 4334stabilize_va_list_loc (location_t loc, tree valist, int needs_lvalue)
a66c9326 4335{
5f57a8b1 4336 tree vatype = targetm.canonical_va_list_type (TREE_TYPE (valist));
4337
182cf5a9 4338 /* The current way of determining the type of valist is completely
4339 bogus. We should have the information on the va builtin instead. */
4340 if (!vatype)
4341 vatype = targetm.fn_abi_va_list (cfun->decl);
5f57a8b1 4342
4343 if (TREE_CODE (vatype) == ARRAY_TYPE)
a66c9326 4344 {
2d47cc32 4345 if (TREE_SIDE_EFFECTS (valist))
4346 valist = save_expr (valist);
11a61dea 4347
2d47cc32 4348 /* For this case, the backends will be expecting a pointer to
5f57a8b1 4349 vatype, but it's possible we've actually been given an array
4350 (an actual TARGET_CANONICAL_VA_LIST_TYPE (valist)).
2d47cc32 4351 So fix it. */
4352 if (TREE_CODE (TREE_TYPE (valist)) == ARRAY_TYPE)
8a15c04a 4353 {
5f57a8b1 4354 tree p1 = build_pointer_type (TREE_TYPE (vatype));
389dd41b 4355 valist = build_fold_addr_expr_with_type_loc (loc, valist, p1);
8a15c04a 4356 }
a66c9326 4357 }
11a61dea 4358 else
a66c9326 4359 {
182cf5a9 4360 tree pt = build_pointer_type (vatype);
11a61dea 4361
2d47cc32 4362 if (! needs_lvalue)
4363 {
11a61dea 4364 if (! TREE_SIDE_EFFECTS (valist))
4365 return valist;
bf8e3599 4366
389dd41b 4367 valist = fold_build1_loc (loc, ADDR_EXPR, pt, valist);
a66c9326 4368 TREE_SIDE_EFFECTS (valist) = 1;
a66c9326 4369 }
2d47cc32 4370
11a61dea 4371 if (TREE_SIDE_EFFECTS (valist))
2d47cc32 4372 valist = save_expr (valist);
182cf5a9 4373 valist = fold_build2_loc (loc, MEM_REF,
4374 vatype, valist, build_int_cst (pt, 0));
a66c9326 4375 }
4376
4377 return valist;
4378}
4379
2e15d750 4380/* The "standard" definition of va_list is void*. */
4381
4382tree
4383std_build_builtin_va_list (void)
4384{
4385 return ptr_type_node;
4386}
4387
5f57a8b1 4388/* The "standard" abi va_list is va_list_type_node. */
4389
4390tree
4391std_fn_abi_va_list (tree fndecl ATTRIBUTE_UNUSED)
4392{
4393 return va_list_type_node;
4394}
4395
4396/* The "standard" type of va_list is va_list_type_node. */
4397
4398tree
4399std_canonical_va_list_type (tree type)
4400{
4401 tree wtype, htype;
4402
4403 if (INDIRECT_REF_P (type))
4404 type = TREE_TYPE (type);
9af5ce0c 4405 else if (POINTER_TYPE_P (type) && POINTER_TYPE_P (TREE_TYPE (type)))
5f57a8b1 4406 type = TREE_TYPE (type);
5f57a8b1 4407 wtype = va_list_type_node;
4408 htype = type;
7b36f9ab 4409 /* Treat structure va_list types. */
4410 if (TREE_CODE (wtype) == RECORD_TYPE && POINTER_TYPE_P (htype))
4411 htype = TREE_TYPE (htype);
4412 else if (TREE_CODE (wtype) == ARRAY_TYPE)
5f57a8b1 4413 {
4414 /* If va_list is an array type, the argument may have decayed
4415 to a pointer type, e.g. by being passed to another function.
4416 In that case, unwrap both types so that we can compare the
4417 underlying records. */
4418 if (TREE_CODE (htype) == ARRAY_TYPE
4419 || POINTER_TYPE_P (htype))
4420 {
4421 wtype = TREE_TYPE (wtype);
4422 htype = TREE_TYPE (htype);
4423 }
4424 }
4425 if (TYPE_MAIN_VARIANT (wtype) == TYPE_MAIN_VARIANT (htype))
4426 return va_list_type_node;
4427
4428 return NULL_TREE;
4429}
4430
a66c9326 4431/* The "standard" implementation of va_start: just assign `nextarg' to
4432 the variable. */
27d0c333 4433
a66c9326 4434void
aecda0d6 4435std_expand_builtin_va_start (tree valist, rtx nextarg)
a66c9326 4436{
f03c17bc 4437 rtx va_r = expand_expr (valist, NULL_RTX, VOIDmode, EXPAND_WRITE);
4438 convert_move (va_r, nextarg, 0);
058a1b7a 4439
4440 /* We do not have any valid bounds for the pointer, so
4441 just store zero bounds for it. */
4442 if (chkp_function_instrumented_p (current_function_decl))
4443 chkp_expand_bounds_reset_for_mem (valist,
4444 make_tree (TREE_TYPE (valist),
4445 nextarg));
a66c9326 4446}
4447
c2f47e15 4448/* Expand EXP, a call to __builtin_va_start. */
27d0c333 4449
a66c9326 4450static rtx
c2f47e15 4451expand_builtin_va_start (tree exp)
a66c9326 4452{
4453 rtx nextarg;
c2f47e15 4454 tree valist;
389dd41b 4455 location_t loc = EXPR_LOCATION (exp);
a66c9326 4456
c2f47e15 4457 if (call_expr_nargs (exp) < 2)
cb166087 4458 {
389dd41b 4459 error_at (loc, "too few arguments to function %<va_start%>");
cb166087 4460 return const0_rtx;
4461 }
a66c9326 4462
c2f47e15 4463 if (fold_builtin_next_arg (exp, true))
79012a9d 4464 return const0_rtx;
7c2f0500 4465
79012a9d 4466 nextarg = expand_builtin_next_arg ();
389dd41b 4467 valist = stabilize_va_list_loc (loc, CALL_EXPR_ARG (exp, 0), 1);
a66c9326 4468
8a58ed0a 4469 if (targetm.expand_builtin_va_start)
4470 targetm.expand_builtin_va_start (valist, nextarg);
4471 else
4472 std_expand_builtin_va_start (valist, nextarg);
a66c9326 4473
4474 return const0_rtx;
4475}
4476
c2f47e15 4477/* Expand EXP, a call to __builtin_va_end. */
f7c44134 4478
a66c9326 4479static rtx
c2f47e15 4480expand_builtin_va_end (tree exp)
a66c9326 4481{
c2f47e15 4482 tree valist = CALL_EXPR_ARG (exp, 0);
8a15c04a 4483
8a15c04a 4484 /* Evaluate for side effects, if needed. I hate macros that don't
4485 do that. */
4486 if (TREE_SIDE_EFFECTS (valist))
4487 expand_expr (valist, const0_rtx, VOIDmode, EXPAND_NORMAL);
a66c9326 4488
4489 return const0_rtx;
4490}
4491
c2f47e15 4492/* Expand EXP, a call to __builtin_va_copy. We do this as a
a66c9326 4493 builtin rather than just as an assignment in stdarg.h because of the
4494 nastiness of array-type va_list types. */
f7c44134 4495
a66c9326 4496static rtx
c2f47e15 4497expand_builtin_va_copy (tree exp)
a66c9326 4498{
4499 tree dst, src, t;
389dd41b 4500 location_t loc = EXPR_LOCATION (exp);
a66c9326 4501
c2f47e15 4502 dst = CALL_EXPR_ARG (exp, 0);
4503 src = CALL_EXPR_ARG (exp, 1);
a66c9326 4504
389dd41b 4505 dst = stabilize_va_list_loc (loc, dst, 1);
4506 src = stabilize_va_list_loc (loc, src, 0);
a66c9326 4507
5f57a8b1 4508 gcc_assert (cfun != NULL && cfun->decl != NULL_TREE);
4509
4510 if (TREE_CODE (targetm.fn_abi_va_list (cfun->decl)) != ARRAY_TYPE)
a66c9326 4511 {
5f57a8b1 4512 t = build2 (MODIFY_EXPR, targetm.fn_abi_va_list (cfun->decl), dst, src);
a66c9326 4513 TREE_SIDE_EFFECTS (t) = 1;
4514 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
4515 }
4516 else
4517 {
11a61dea 4518 rtx dstb, srcb, size;
4519
4520 /* Evaluate to pointers. */
4521 dstb = expand_expr (dst, NULL_RTX, Pmode, EXPAND_NORMAL);
4522 srcb = expand_expr (src, NULL_RTX, Pmode, EXPAND_NORMAL);
5f57a8b1 4523 size = expand_expr (TYPE_SIZE_UNIT (targetm.fn_abi_va_list (cfun->decl)),
4524 NULL_RTX, VOIDmode, EXPAND_NORMAL);
11a61dea 4525
85d654dd 4526 dstb = convert_memory_address (Pmode, dstb);
4527 srcb = convert_memory_address (Pmode, srcb);
726ec87c 4528
11a61dea 4529 /* "Dereference" to BLKmode memories. */
4530 dstb = gen_rtx_MEM (BLKmode, dstb);
ab6ab77e 4531 set_mem_alias_set (dstb, get_alias_set (TREE_TYPE (TREE_TYPE (dst))));
5f57a8b1 4532 set_mem_align (dstb, TYPE_ALIGN (targetm.fn_abi_va_list (cfun->decl)));
11a61dea 4533 srcb = gen_rtx_MEM (BLKmode, srcb);
ab6ab77e 4534 set_mem_alias_set (srcb, get_alias_set (TREE_TYPE (TREE_TYPE (src))));
5f57a8b1 4535 set_mem_align (srcb, TYPE_ALIGN (targetm.fn_abi_va_list (cfun->decl)));
11a61dea 4536
4537 /* Copy. */
0378dbdc 4538 emit_block_move (dstb, srcb, size, BLOCK_OP_NORMAL);
a66c9326 4539 }
4540
4541 return const0_rtx;
4542}
4543
53800dbe 4544/* Expand a call to one of the builtin functions __builtin_frame_address or
4545 __builtin_return_address. */
27d0c333 4546
53800dbe 4547static rtx
c2f47e15 4548expand_builtin_frame_address (tree fndecl, tree exp)
53800dbe 4549{
53800dbe 4550 /* The argument must be a nonnegative integer constant.
4551 It counts the number of frames to scan up the stack.
5b252e95 4552 The value is either the frame pointer value or the return
4553 address saved in that frame. */
c2f47e15 4554 if (call_expr_nargs (exp) == 0)
53800dbe 4555 /* Warning about missing arg was already issued. */
4556 return const0_rtx;
e913b5cd 4557 else if (! tree_fits_uhwi_p (CALL_EXPR_ARG (exp, 0)))
53800dbe 4558 {
5b252e95 4559 error ("invalid argument to %qD", fndecl);
53800dbe 4560 return const0_rtx;
4561 }
4562 else
4563 {
5b252e95 4564 /* Number of frames to scan up the stack. */
4565 unsigned HOST_WIDE_INT count = tree_to_uhwi (CALL_EXPR_ARG (exp, 0));
4566
4567 rtx tem = expand_builtin_return_addr (DECL_FUNCTION_CODE (fndecl), count);
53800dbe 4568
4569 /* Some ports cannot access arbitrary stack frames. */
4570 if (tem == NULL)
4571 {
5b252e95 4572 warning (0, "unsupported argument to %qD", fndecl);
53800dbe 4573 return const0_rtx;
4574 }
4575
5b252e95 4576 if (count)
4577 {
4578 /* Warn since no effort is made to ensure that any frame
4579 beyond the current one exists or can be safely reached. */
4580 warning (OPT_Wframe_address, "calling %qD with "
4581 "a nonzero argument is unsafe", fndecl);
4582 }
4583
53800dbe 4584 /* For __builtin_frame_address, return what we've got. */
4585 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
4586 return tem;
4587
8ad4c111 4588 if (!REG_P (tem)
53800dbe 4589 && ! CONSTANT_P (tem))
99182918 4590 tem = copy_addr_to_reg (tem);
53800dbe 4591 return tem;
4592 }
4593}
4594
990495a7 4595/* Expand EXP, a call to the alloca builtin. Return NULL_RTX if we
5be42b39 4596 failed and the caller should emit a normal call. CANNOT_ACCUMULATE
4597 is the same as for allocate_dynamic_stack_space. */
15c6cf6b 4598
53800dbe 4599static rtx
5be42b39 4600expand_builtin_alloca (tree exp, bool cannot_accumulate)
53800dbe 4601{
4602 rtx op0;
15c6cf6b 4603 rtx result;
581bf1c2 4604 bool valid_arglist;
4605 unsigned int align;
4606 bool alloca_with_align = (DECL_FUNCTION_CODE (get_callee_fndecl (exp))
4607 == BUILT_IN_ALLOCA_WITH_ALIGN);
53800dbe 4608
581bf1c2 4609 valid_arglist
4610 = (alloca_with_align
4611 ? validate_arglist (exp, INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE)
4612 : validate_arglist (exp, INTEGER_TYPE, VOID_TYPE));
4613
4614 if (!valid_arglist)
c2f47e15 4615 return NULL_RTX;
53800dbe 4616
4617 /* Compute the argument. */
c2f47e15 4618 op0 = expand_normal (CALL_EXPR_ARG (exp, 0));
53800dbe 4619
581bf1c2 4620 /* Compute the alignment. */
4621 align = (alloca_with_align
f9ae6f95 4622 ? TREE_INT_CST_LOW (CALL_EXPR_ARG (exp, 1))
581bf1c2 4623 : BIGGEST_ALIGNMENT);
4624
53800dbe 4625 /* Allocate the desired space. */
581bf1c2 4626 result = allocate_dynamic_stack_space (op0, 0, align, cannot_accumulate);
85d654dd 4627 result = convert_memory_address (ptr_mode, result);
15c6cf6b 4628
4629 return result;
53800dbe 4630}
4631
74bdbe96 4632/* Expand a call to bswap builtin in EXP.
4633 Return NULL_RTX if a normal call should be emitted rather than expanding the
4634 function in-line. If convenient, the result should be placed in TARGET.
4635 SUBTARGET may be used as the target for computing one of EXP's operands. */
42791117 4636
4637static rtx
3754d046 4638expand_builtin_bswap (machine_mode target_mode, tree exp, rtx target,
74bdbe96 4639 rtx subtarget)
42791117 4640{
42791117 4641 tree arg;
4642 rtx op0;
4643
c2f47e15 4644 if (!validate_arglist (exp, INTEGER_TYPE, VOID_TYPE))
4645 return NULL_RTX;
42791117 4646
c2f47e15 4647 arg = CALL_EXPR_ARG (exp, 0);
74bdbe96 4648 op0 = expand_expr (arg,
4649 subtarget && GET_MODE (subtarget) == target_mode
4650 ? subtarget : NULL_RTX,
4651 target_mode, EXPAND_NORMAL);
4652 if (GET_MODE (op0) != target_mode)
4653 op0 = convert_to_mode (target_mode, op0, 1);
42791117 4654
74bdbe96 4655 target = expand_unop (target_mode, bswap_optab, op0, target, 1);
42791117 4656
4657 gcc_assert (target);
4658
74bdbe96 4659 return convert_to_mode (target_mode, target, 1);
42791117 4660}
4661
c2f47e15 4662/* Expand a call to a unary builtin in EXP.
4663 Return NULL_RTX if a normal call should be emitted rather than expanding the
53800dbe 4664 function in-line. If convenient, the result should be placed in TARGET.
4665 SUBTARGET may be used as the target for computing one of EXP's operands. */
15c6cf6b 4666
53800dbe 4667static rtx
3754d046 4668expand_builtin_unop (machine_mode target_mode, tree exp, rtx target,
aecda0d6 4669 rtx subtarget, optab op_optab)
53800dbe 4670{
4671 rtx op0;
c2f47e15 4672
4673 if (!validate_arglist (exp, INTEGER_TYPE, VOID_TYPE))
4674 return NULL_RTX;
53800dbe 4675
4676 /* Compute the argument. */
f97eea22 4677 op0 = expand_expr (CALL_EXPR_ARG (exp, 0),
4678 (subtarget
4679 && (TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 0)))
4680 == GET_MODE (subtarget))) ? subtarget : NULL_RTX,
1db6d067 4681 VOIDmode, EXPAND_NORMAL);
6a08d0ab 4682 /* Compute op, into TARGET if possible.
53800dbe 4683 Set TARGET to wherever the result comes back. */
c2f47e15 4684 target = expand_unop (TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 0))),
6aaa1f9e 4685 op_optab, op0, target, op_optab != clrsb_optab);
64db345d 4686 gcc_assert (target);
7d3f6cc7 4687
efb070c8 4688 return convert_to_mode (target_mode, target, 0);
53800dbe 4689}
89cfe6e5 4690
48e1416a 4691/* Expand a call to __builtin_expect. We just return our argument
5a74f77e 4692 as the builtin_expect semantic should've been already executed by
4693 tree branch prediction pass. */
89cfe6e5 4694
4695static rtx
c2f47e15 4696expand_builtin_expect (tree exp, rtx target)
89cfe6e5 4697{
1e4adcfc 4698 tree arg;
89cfe6e5 4699
c2f47e15 4700 if (call_expr_nargs (exp) < 2)
89cfe6e5 4701 return const0_rtx;
c2f47e15 4702 arg = CALL_EXPR_ARG (exp, 0);
89cfe6e5 4703
c2f47e15 4704 target = expand_expr (arg, target, VOIDmode, EXPAND_NORMAL);
5a74f77e 4705 /* When guessing was done, the hints should be already stripped away. */
07311427 4706 gcc_assert (!flag_guess_branch_prob
852f689e 4707 || optimize == 0 || seen_error ());
89cfe6e5 4708 return target;
4709}
689df48e 4710
fca0886c 4711/* Expand a call to __builtin_assume_aligned. We just return our first
4712 argument as the builtin_assume_aligned semantic should've been already
4713 executed by CCP. */
4714
4715static rtx
4716expand_builtin_assume_aligned (tree exp, rtx target)
4717{
4718 if (call_expr_nargs (exp) < 2)
4719 return const0_rtx;
4720 target = expand_expr (CALL_EXPR_ARG (exp, 0), target, VOIDmode,
4721 EXPAND_NORMAL);
4722 gcc_assert (!TREE_SIDE_EFFECTS (CALL_EXPR_ARG (exp, 1))
4723 && (call_expr_nargs (exp) < 3
4724 || !TREE_SIDE_EFFECTS (CALL_EXPR_ARG (exp, 2))));
4725 return target;
4726}
4727
c22de3f0 4728void
aecda0d6 4729expand_builtin_trap (void)
a0ef1725 4730{
4db8dd0c 4731 if (targetm.have_trap ())
f73960eb 4732 {
4db8dd0c 4733 rtx_insn *insn = emit_insn (targetm.gen_trap ());
f73960eb 4734 /* For trap insns when not accumulating outgoing args force
4735 REG_ARGS_SIZE note to prevent crossjumping of calls with
4736 different args sizes. */
4737 if (!ACCUMULATE_OUTGOING_ARGS)
4738 add_reg_note (insn, REG_ARGS_SIZE, GEN_INT (stack_pointer_delta));
4739 }
a0ef1725 4740 else
a0ef1725 4741 emit_library_call (abort_libfunc, LCT_NORETURN, VOIDmode, 0);
4742 emit_barrier ();
4743}
78a74442 4744
d2b48f0c 4745/* Expand a call to __builtin_unreachable. We do nothing except emit
4746 a barrier saying that control flow will not pass here.
4747
4748 It is the responsibility of the program being compiled to ensure
4749 that control flow does never reach __builtin_unreachable. */
4750static void
4751expand_builtin_unreachable (void)
4752{
4753 emit_barrier ();
4754}
4755
c2f47e15 4756/* Expand EXP, a call to fabs, fabsf or fabsl.
4757 Return NULL_RTX if a normal call should be emitted rather than expanding
78a74442 4758 the function inline. If convenient, the result should be placed
4759 in TARGET. SUBTARGET may be used as the target for computing
4760 the operand. */
4761
4762static rtx
c2f47e15 4763expand_builtin_fabs (tree exp, rtx target, rtx subtarget)
78a74442 4764{
3754d046 4765 machine_mode mode;
78a74442 4766 tree arg;
4767 rtx op0;
4768
c2f47e15 4769 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
4770 return NULL_RTX;
78a74442 4771
c2f47e15 4772 arg = CALL_EXPR_ARG (exp, 0);
c7f617c2 4773 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
78a74442 4774 mode = TYPE_MODE (TREE_TYPE (arg));
1db6d067 4775 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
78a74442 4776 return expand_abs (mode, op0, target, 0, safe_from_p (target, arg, 1));
4777}
4778
c2f47e15 4779/* Expand EXP, a call to copysign, copysignf, or copysignl.
270436f3 4780 Return NULL is a normal call should be emitted rather than expanding the
4781 function inline. If convenient, the result should be placed in TARGET.
4782 SUBTARGET may be used as the target for computing the operand. */
4783
4784static rtx
c2f47e15 4785expand_builtin_copysign (tree exp, rtx target, rtx subtarget)
270436f3 4786{
4787 rtx op0, op1;
4788 tree arg;
4789
c2f47e15 4790 if (!validate_arglist (exp, REAL_TYPE, REAL_TYPE, VOID_TYPE))
4791 return NULL_RTX;
270436f3 4792
c2f47e15 4793 arg = CALL_EXPR_ARG (exp, 0);
8ec3c5c2 4794 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
270436f3 4795
c2f47e15 4796 arg = CALL_EXPR_ARG (exp, 1);
8ec3c5c2 4797 op1 = expand_normal (arg);
270436f3 4798
4799 return expand_copysign (op0, op1, target);
4800}
4801
ac8fb6db 4802/* Expand a call to __builtin___clear_cache. */
4803
4804static rtx
32e17df0 4805expand_builtin___clear_cache (tree exp)
ac8fb6db 4806{
32e17df0 4807 if (!targetm.code_for_clear_cache)
4808 {
ac8fb6db 4809#ifdef CLEAR_INSN_CACHE
32e17df0 4810 /* There is no "clear_cache" insn, and __clear_cache() in libgcc
4811 does something. Just do the default expansion to a call to
4812 __clear_cache(). */
4813 return NULL_RTX;
ac8fb6db 4814#else
32e17df0 4815 /* There is no "clear_cache" insn, and __clear_cache() in libgcc
4816 does nothing. There is no need to call it. Do nothing. */
4817 return const0_rtx;
ac8fb6db 4818#endif /* CLEAR_INSN_CACHE */
32e17df0 4819 }
4820
ac8fb6db 4821 /* We have a "clear_cache" insn, and it will handle everything. */
4822 tree begin, end;
4823 rtx begin_rtx, end_rtx;
ac8fb6db 4824
4825 /* We must not expand to a library call. If we did, any
4826 fallback library function in libgcc that might contain a call to
4827 __builtin___clear_cache() would recurse infinitely. */
4828 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
4829 {
4830 error ("both arguments to %<__builtin___clear_cache%> must be pointers");
4831 return const0_rtx;
4832 }
4833
32e17df0 4834 if (targetm.have_clear_cache ())
ac8fb6db 4835 {
8786db1e 4836 struct expand_operand ops[2];
ac8fb6db 4837
4838 begin = CALL_EXPR_ARG (exp, 0);
4839 begin_rtx = expand_expr (begin, NULL_RTX, Pmode, EXPAND_NORMAL);
ac8fb6db 4840
4841 end = CALL_EXPR_ARG (exp, 1);
4842 end_rtx = expand_expr (end, NULL_RTX, Pmode, EXPAND_NORMAL);
ac8fb6db 4843
8786db1e 4844 create_address_operand (&ops[0], begin_rtx);
4845 create_address_operand (&ops[1], end_rtx);
32e17df0 4846 if (maybe_expand_insn (targetm.code_for_clear_cache, 2, ops))
8786db1e 4847 return const0_rtx;
ac8fb6db 4848 }
4849 return const0_rtx;
ac8fb6db 4850}
4851
4ee9c684 4852/* Given a trampoline address, make sure it satisfies TRAMPOLINE_ALIGNMENT. */
4853
4854static rtx
4855round_trampoline_addr (rtx tramp)
4856{
4857 rtx temp, addend, mask;
4858
4859 /* If we don't need too much alignment, we'll have been guaranteed
4860 proper alignment by get_trampoline_type. */
4861 if (TRAMPOLINE_ALIGNMENT <= STACK_BOUNDARY)
4862 return tramp;
4863
4864 /* Round address up to desired boundary. */
4865 temp = gen_reg_rtx (Pmode);
0359f9f5 4866 addend = gen_int_mode (TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT - 1, Pmode);
4867 mask = gen_int_mode (-TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT, Pmode);
4ee9c684 4868
4869 temp = expand_simple_binop (Pmode, PLUS, tramp, addend,
4870 temp, 0, OPTAB_LIB_WIDEN);
4871 tramp = expand_simple_binop (Pmode, AND, temp, mask,
4872 temp, 0, OPTAB_LIB_WIDEN);
4873
4874 return tramp;
4875}
4876
4877static rtx
c307f106 4878expand_builtin_init_trampoline (tree exp, bool onstack)
4ee9c684 4879{
4880 tree t_tramp, t_func, t_chain;
82c7907c 4881 rtx m_tramp, r_tramp, r_chain, tmp;
4ee9c684 4882
c2f47e15 4883 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE,
4ee9c684 4884 POINTER_TYPE, VOID_TYPE))
4885 return NULL_RTX;
4886
c2f47e15 4887 t_tramp = CALL_EXPR_ARG (exp, 0);
4888 t_func = CALL_EXPR_ARG (exp, 1);
4889 t_chain = CALL_EXPR_ARG (exp, 2);
4ee9c684 4890
8ec3c5c2 4891 r_tramp = expand_normal (t_tramp);
82c7907c 4892 m_tramp = gen_rtx_MEM (BLKmode, r_tramp);
4893 MEM_NOTRAP_P (m_tramp) = 1;
4894
c307f106 4895 /* If ONSTACK, the TRAMP argument should be the address of a field
4896 within the local function's FRAME decl. Either way, let's see if
4897 we can fill in the MEM_ATTRs for this memory. */
82c7907c 4898 if (TREE_CODE (t_tramp) == ADDR_EXPR)
f4146cb8 4899 set_mem_attributes (m_tramp, TREE_OPERAND (t_tramp, 0), true);
82c7907c 4900
c307f106 4901 /* Creator of a heap trampoline is responsible for making sure the
4902 address is aligned to at least STACK_BOUNDARY. Normally malloc
4903 will ensure this anyhow. */
82c7907c 4904 tmp = round_trampoline_addr (r_tramp);
4905 if (tmp != r_tramp)
4906 {
4907 m_tramp = change_address (m_tramp, BLKmode, tmp);
4908 set_mem_align (m_tramp, TRAMPOLINE_ALIGNMENT);
5b2a69fa 4909 set_mem_size (m_tramp, TRAMPOLINE_SIZE);
82c7907c 4910 }
4911
4912 /* The FUNC argument should be the address of the nested function.
4913 Extract the actual function decl to pass to the hook. */
4914 gcc_assert (TREE_CODE (t_func) == ADDR_EXPR);
4915 t_func = TREE_OPERAND (t_func, 0);
4916 gcc_assert (TREE_CODE (t_func) == FUNCTION_DECL);
4917
8ec3c5c2 4918 r_chain = expand_normal (t_chain);
4ee9c684 4919
4920 /* Generate insns to initialize the trampoline. */
82c7907c 4921 targetm.calls.trampoline_init (m_tramp, t_func, r_chain);
4ee9c684 4922
c307f106 4923 if (onstack)
4924 {
4925 trampolines_created = 1;
8bc8a8f4 4926
c307f106 4927 warning_at (DECL_SOURCE_LOCATION (t_func), OPT_Wtrampolines,
4928 "trampoline generated for nested function %qD", t_func);
4929 }
8bc8a8f4 4930
4ee9c684 4931 return const0_rtx;
4932}
4933
4934static rtx
c2f47e15 4935expand_builtin_adjust_trampoline (tree exp)
4ee9c684 4936{
4937 rtx tramp;
4938
c2f47e15 4939 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
4ee9c684 4940 return NULL_RTX;
4941
c2f47e15 4942 tramp = expand_normal (CALL_EXPR_ARG (exp, 0));
4ee9c684 4943 tramp = round_trampoline_addr (tramp);
82c7907c 4944 if (targetm.calls.trampoline_adjust_address)
4945 tramp = targetm.calls.trampoline_adjust_address (tramp);
4ee9c684 4946
4947 return tramp;
4948}
4949
93f564d6 4950/* Expand the call EXP to the built-in signbit, signbitf or signbitl
4951 function. The function first checks whether the back end provides
4952 an insn to implement signbit for the respective mode. If not, it
4953 checks whether the floating point format of the value is such that
10902624 4954 the sign bit can be extracted. If that is not the case, error out.
4955 EXP is the expression that is a call to the builtin function; if
4956 convenient, the result should be placed in TARGET. */
27f261ef 4957static rtx
4958expand_builtin_signbit (tree exp, rtx target)
4959{
4960 const struct real_format *fmt;
3754d046 4961 machine_mode fmode, imode, rmode;
c2f47e15 4962 tree arg;
ca4f1f5b 4963 int word, bitpos;
27eda240 4964 enum insn_code icode;
27f261ef 4965 rtx temp;
389dd41b 4966 location_t loc = EXPR_LOCATION (exp);
27f261ef 4967
c2f47e15 4968 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
4969 return NULL_RTX;
27f261ef 4970
c2f47e15 4971 arg = CALL_EXPR_ARG (exp, 0);
27f261ef 4972 fmode = TYPE_MODE (TREE_TYPE (arg));
4973 rmode = TYPE_MODE (TREE_TYPE (exp));
4974 fmt = REAL_MODE_FORMAT (fmode);
4975
93f564d6 4976 arg = builtin_save_expr (arg);
4977
4978 /* Expand the argument yielding a RTX expression. */
4979 temp = expand_normal (arg);
4980
4981 /* Check if the back end provides an insn that handles signbit for the
4982 argument's mode. */
d6bf3b14 4983 icode = optab_handler (signbit_optab, fmode);
27eda240 4984 if (icode != CODE_FOR_nothing)
93f564d6 4985 {
1e0c0b35 4986 rtx_insn *last = get_last_insn ();
93f564d6 4987 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
4e2a2fb4 4988 if (maybe_emit_unop_insn (icode, target, temp, UNKNOWN))
4989 return target;
4990 delete_insns_since (last);
93f564d6 4991 }
4992
27f261ef 4993 /* For floating point formats without a sign bit, implement signbit
4994 as "ARG < 0.0". */
8d564692 4995 bitpos = fmt->signbit_ro;
ca4f1f5b 4996 if (bitpos < 0)
27f261ef 4997 {
4998 /* But we can't do this if the format supports signed zero. */
10902624 4999 gcc_assert (!fmt->has_signed_zero || !HONOR_SIGNED_ZEROS (fmode));
27f261ef 5000
389dd41b 5001 arg = fold_build2_loc (loc, LT_EXPR, TREE_TYPE (exp), arg,
49d00087 5002 build_real (TREE_TYPE (arg), dconst0));
27f261ef 5003 return expand_expr (arg, target, VOIDmode, EXPAND_NORMAL);
5004 }
5005
ca4f1f5b 5006 if (GET_MODE_SIZE (fmode) <= UNITS_PER_WORD)
27f261ef 5007 {
ca4f1f5b 5008 imode = int_mode_for_mode (fmode);
10902624 5009 gcc_assert (imode != BLKmode);
ca4f1f5b 5010 temp = gen_lowpart (imode, temp);
24fd4260 5011 }
5012 else
5013 {
ca4f1f5b 5014 imode = word_mode;
5015 /* Handle targets with different FP word orders. */
5016 if (FLOAT_WORDS_BIG_ENDIAN)
a0c938f0 5017 word = (GET_MODE_BITSIZE (fmode) - bitpos) / BITS_PER_WORD;
ca4f1f5b 5018 else
a0c938f0 5019 word = bitpos / BITS_PER_WORD;
ca4f1f5b 5020 temp = operand_subword_force (temp, word, fmode);
5021 bitpos = bitpos % BITS_PER_WORD;
5022 }
5023
44b0f1d0 5024 /* Force the intermediate word_mode (or narrower) result into a
5025 register. This avoids attempting to create paradoxical SUBREGs
5026 of floating point modes below. */
5027 temp = force_reg (imode, temp);
5028
ca4f1f5b 5029 /* If the bitpos is within the "result mode" lowpart, the operation
5030 can be implement with a single bitwise AND. Otherwise, we need
5031 a right shift and an AND. */
5032
5033 if (bitpos < GET_MODE_BITSIZE (rmode))
5034 {
796b6678 5035 wide_int mask = wi::set_bit_in_zero (bitpos, GET_MODE_PRECISION (rmode));
27f261ef 5036
4a46f016 5037 if (GET_MODE_SIZE (imode) > GET_MODE_SIZE (rmode))
ca4f1f5b 5038 temp = gen_lowpart (rmode, temp);
24fd4260 5039 temp = expand_binop (rmode, and_optab, temp,
e913b5cd 5040 immed_wide_int_const (mask, rmode),
ca4f1f5b 5041 NULL_RTX, 1, OPTAB_LIB_WIDEN);
27f261ef 5042 }
ca4f1f5b 5043 else
5044 {
5045 /* Perform a logical right shift to place the signbit in the least
a0c938f0 5046 significant bit, then truncate the result to the desired mode
ca4f1f5b 5047 and mask just this bit. */
f5ff0b21 5048 temp = expand_shift (RSHIFT_EXPR, imode, temp, bitpos, NULL_RTX, 1);
ca4f1f5b 5049 temp = gen_lowpart (rmode, temp);
5050 temp = expand_binop (rmode, and_optab, temp, const1_rtx,
5051 NULL_RTX, 1, OPTAB_LIB_WIDEN);
5052 }
5053
27f261ef 5054 return temp;
5055}
73673831 5056
5057/* Expand fork or exec calls. TARGET is the desired target of the
c2f47e15 5058 call. EXP is the call. FN is the
73673831 5059 identificator of the actual function. IGNORE is nonzero if the
5060 value is to be ignored. */
5061
5062static rtx
c2f47e15 5063expand_builtin_fork_or_exec (tree fn, tree exp, rtx target, int ignore)
73673831 5064{
5065 tree id, decl;
5066 tree call;
5067
5068 /* If we are not profiling, just call the function. */
5069 if (!profile_arc_flag)
5070 return NULL_RTX;
5071
5072 /* Otherwise call the wrapper. This should be equivalent for the rest of
5073 compiler, so the code does not diverge, and the wrapper may run the
9c9bad97 5074 code necessary for keeping the profiling sane. */
73673831 5075
5076 switch (DECL_FUNCTION_CODE (fn))
5077 {
5078 case BUILT_IN_FORK:
5079 id = get_identifier ("__gcov_fork");
5080 break;
5081
5082 case BUILT_IN_EXECL:
5083 id = get_identifier ("__gcov_execl");
5084 break;
5085
5086 case BUILT_IN_EXECV:
5087 id = get_identifier ("__gcov_execv");
5088 break;
5089
5090 case BUILT_IN_EXECLP:
5091 id = get_identifier ("__gcov_execlp");
5092 break;
5093
5094 case BUILT_IN_EXECLE:
5095 id = get_identifier ("__gcov_execle");
5096 break;
5097
5098 case BUILT_IN_EXECVP:
5099 id = get_identifier ("__gcov_execvp");
5100 break;
5101
5102 case BUILT_IN_EXECVE:
5103 id = get_identifier ("__gcov_execve");
5104 break;
5105
5106 default:
64db345d 5107 gcc_unreachable ();
73673831 5108 }
5109
e60a6f7b 5110 decl = build_decl (DECL_SOURCE_LOCATION (fn),
5111 FUNCTION_DECL, id, TREE_TYPE (fn));
73673831 5112 DECL_EXTERNAL (decl) = 1;
5113 TREE_PUBLIC (decl) = 1;
5114 DECL_ARTIFICIAL (decl) = 1;
5115 TREE_NOTHROW (decl) = 1;
e82d310b 5116 DECL_VISIBILITY (decl) = VISIBILITY_DEFAULT;
5117 DECL_VISIBILITY_SPECIFIED (decl) = 1;
389dd41b 5118 call = rewrite_call_expr (EXPR_LOCATION (exp), exp, 0, decl, 0);
73673831 5119 return expand_call (call, target, ignore);
c2f47e15 5120 }
48e1416a 5121
b6a5fc45 5122
5123\f
3e272de8 5124/* Reconstitute a mode for a __sync intrinsic operation. Since the type of
5125 the pointer in these functions is void*, the tree optimizers may remove
5126 casts. The mode computed in expand_builtin isn't reliable either, due
5127 to __sync_bool_compare_and_swap.
5128
5129 FCODE_DIFF should be fcode - base, where base is the FOO_1 code for the
5130 group of builtins. This gives us log2 of the mode size. */
5131
3754d046 5132static inline machine_mode
3e272de8 5133get_builtin_sync_mode (int fcode_diff)
5134{
ad3a13b5 5135 /* The size is not negotiable, so ask not to get BLKmode in return
5136 if the target indicates that a smaller size would be better. */
5137 return mode_for_size (BITS_PER_UNIT << fcode_diff, MODE_INT, 0);
3e272de8 5138}
5139
041e0215 5140/* Expand the memory expression LOC and return the appropriate memory operand
5141 for the builtin_sync operations. */
5142
5143static rtx
3754d046 5144get_builtin_sync_mem (tree loc, machine_mode mode)
041e0215 5145{
5146 rtx addr, mem;
5147
7f4d56ad 5148 addr = expand_expr (loc, NULL_RTX, ptr_mode, EXPAND_SUM);
5149 addr = convert_memory_address (Pmode, addr);
041e0215 5150
5151 /* Note that we explicitly do not want any alias information for this
5152 memory, so that we kill all other live memories. Otherwise we don't
5153 satisfy the full barrier semantics of the intrinsic. */
5154 mem = validize_mem (gen_rtx_MEM (mode, addr));
5155
153c3b50 5156 /* The alignment needs to be at least according to that of the mode. */
5157 set_mem_align (mem, MAX (GET_MODE_ALIGNMENT (mode),
957d0361 5158 get_pointer_alignment (loc)));
c94cfd1c 5159 set_mem_alias_set (mem, ALIAS_SET_MEMORY_BARRIER);
041e0215 5160 MEM_VOLATILE_P (mem) = 1;
5161
5162 return mem;
5163}
5164
1cd6e20d 5165/* Make sure an argument is in the right mode.
5166 EXP is the tree argument.
5167 MODE is the mode it should be in. */
5168
5169static rtx
3754d046 5170expand_expr_force_mode (tree exp, machine_mode mode)
1cd6e20d 5171{
5172 rtx val;
3754d046 5173 machine_mode old_mode;
1cd6e20d 5174
5175 val = expand_expr (exp, NULL_RTX, mode, EXPAND_NORMAL);
5176 /* If VAL is promoted to a wider mode, convert it back to MODE. Take care
5177 of CONST_INTs, where we know the old_mode only from the call argument. */
5178
5179 old_mode = GET_MODE (val);
5180 if (old_mode == VOIDmode)
5181 old_mode = TYPE_MODE (TREE_TYPE (exp));
5182 val = convert_modes (mode, old_mode, val, 1);
5183 return val;
5184}
5185
5186
b6a5fc45 5187/* Expand the __sync_xxx_and_fetch and __sync_fetch_and_xxx intrinsics.
c2f47e15 5188 EXP is the CALL_EXPR. CODE is the rtx code
b6a5fc45 5189 that corresponds to the arithmetic or logical operation from the name;
5190 an exception here is that NOT actually means NAND. TARGET is an optional
5191 place for us to store the results; AFTER is true if this is the
1cd6e20d 5192 fetch_and_xxx form. */
b6a5fc45 5193
5194static rtx
3754d046 5195expand_builtin_sync_operation (machine_mode mode, tree exp,
3e272de8 5196 enum rtx_code code, bool after,
1cd6e20d 5197 rtx target)
b6a5fc45 5198{
041e0215 5199 rtx val, mem;
e60a6f7b 5200 location_t loc = EXPR_LOCATION (exp);
b6a5fc45 5201
cf73e559 5202 if (code == NOT && warn_sync_nand)
5203 {
5204 tree fndecl = get_callee_fndecl (exp);
5205 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
5206
5207 static bool warned_f_a_n, warned_n_a_f;
5208
5209 switch (fcode)
5210 {
2797f13a 5211 case BUILT_IN_SYNC_FETCH_AND_NAND_1:
5212 case BUILT_IN_SYNC_FETCH_AND_NAND_2:
5213 case BUILT_IN_SYNC_FETCH_AND_NAND_4:
5214 case BUILT_IN_SYNC_FETCH_AND_NAND_8:
5215 case BUILT_IN_SYNC_FETCH_AND_NAND_16:
cf73e559 5216 if (warned_f_a_n)
5217 break;
5218
b9a16870 5219 fndecl = builtin_decl_implicit (BUILT_IN_SYNC_FETCH_AND_NAND_N);
e60a6f7b 5220 inform (loc, "%qD changed semantics in GCC 4.4", fndecl);
cf73e559 5221 warned_f_a_n = true;
5222 break;
5223
2797f13a 5224 case BUILT_IN_SYNC_NAND_AND_FETCH_1:
5225 case BUILT_IN_SYNC_NAND_AND_FETCH_2:
5226 case BUILT_IN_SYNC_NAND_AND_FETCH_4:
5227 case BUILT_IN_SYNC_NAND_AND_FETCH_8:
5228 case BUILT_IN_SYNC_NAND_AND_FETCH_16:
cf73e559 5229 if (warned_n_a_f)
5230 break;
5231
b9a16870 5232 fndecl = builtin_decl_implicit (BUILT_IN_SYNC_NAND_AND_FETCH_N);
e60a6f7b 5233 inform (loc, "%qD changed semantics in GCC 4.4", fndecl);
cf73e559 5234 warned_n_a_f = true;
5235 break;
5236
5237 default:
5238 gcc_unreachable ();
5239 }
5240 }
5241
b6a5fc45 5242 /* Expand the operands. */
c2f47e15 5243 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
1cd6e20d 5244 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
b6a5fc45 5245
a372f7ca 5246 return expand_atomic_fetch_op (target, mem, val, code, MEMMODEL_SYNC_SEQ_CST,
1cd6e20d 5247 after);
b6a5fc45 5248}
5249
5250/* Expand the __sync_val_compare_and_swap and __sync_bool_compare_and_swap
c2f47e15 5251 intrinsics. EXP is the CALL_EXPR. IS_BOOL is
b6a5fc45 5252 true if this is the boolean form. TARGET is a place for us to store the
5253 results; this is NOT optional if IS_BOOL is true. */
5254
5255static rtx
3754d046 5256expand_builtin_compare_and_swap (machine_mode mode, tree exp,
3e272de8 5257 bool is_bool, rtx target)
b6a5fc45 5258{
041e0215 5259 rtx old_val, new_val, mem;
ba885f6a 5260 rtx *pbool, *poval;
b6a5fc45 5261
5262 /* Expand the operands. */
c2f47e15 5263 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
1cd6e20d 5264 old_val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5265 new_val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 2), mode);
b6a5fc45 5266
ba885f6a 5267 pbool = poval = NULL;
5268 if (target != const0_rtx)
5269 {
5270 if (is_bool)
5271 pbool = &target;
5272 else
5273 poval = &target;
5274 }
5275 if (!expand_atomic_compare_and_swap (pbool, poval, mem, old_val, new_val,
a372f7ca 5276 false, MEMMODEL_SYNC_SEQ_CST,
5277 MEMMODEL_SYNC_SEQ_CST))
1cd6e20d 5278 return NULL_RTX;
c2f47e15 5279
1cd6e20d 5280 return target;
b6a5fc45 5281}
5282
5283/* Expand the __sync_lock_test_and_set intrinsic. Note that the most
5284 general form is actually an atomic exchange, and some targets only
5285 support a reduced form with the second argument being a constant 1.
48e1416a 5286 EXP is the CALL_EXPR; TARGET is an optional place for us to store
c2f47e15 5287 the results. */
b6a5fc45 5288
5289static rtx
3754d046 5290expand_builtin_sync_lock_test_and_set (machine_mode mode, tree exp,
1cd6e20d 5291 rtx target)
b6a5fc45 5292{
041e0215 5293 rtx val, mem;
b6a5fc45 5294
5295 /* Expand the operands. */
c2f47e15 5296 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
1cd6e20d 5297 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5298
7821cde1 5299 return expand_sync_lock_test_and_set (target, mem, val);
1cd6e20d 5300}
5301
5302/* Expand the __sync_lock_release intrinsic. EXP is the CALL_EXPR. */
5303
5304static void
3754d046 5305expand_builtin_sync_lock_release (machine_mode mode, tree exp)
1cd6e20d 5306{
5307 rtx mem;
5308
5309 /* Expand the operands. */
5310 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5311
a372f7ca 5312 expand_atomic_store (mem, const0_rtx, MEMMODEL_SYNC_RELEASE, true);
1cd6e20d 5313}
5314
5315/* Given an integer representing an ``enum memmodel'', verify its
5316 correctness and return the memory model enum. */
5317
5318static enum memmodel
5319get_memmodel (tree exp)
5320{
5321 rtx op;
7f738025 5322 unsigned HOST_WIDE_INT val;
1cd6e20d 5323
5324 /* If the parameter is not a constant, it's a run time value so we'll just
5325 convert it to MEMMODEL_SEQ_CST to avoid annoying runtime checking. */
5326 if (TREE_CODE (exp) != INTEGER_CST)
5327 return MEMMODEL_SEQ_CST;
5328
5329 op = expand_normal (exp);
7f738025 5330
5331 val = INTVAL (op);
5332 if (targetm.memmodel_check)
5333 val = targetm.memmodel_check (val);
5334 else if (val & ~MEMMODEL_MASK)
5335 {
5336 warning (OPT_Winvalid_memory_model,
5337 "Unknown architecture specifier in memory model to builtin.");
5338 return MEMMODEL_SEQ_CST;
5339 }
5340
a372f7ca 5341 /* Should never see a user explicit SYNC memodel model, so >= LAST works. */
5342 if (memmodel_base (val) >= MEMMODEL_LAST)
1cd6e20d 5343 {
5344 warning (OPT_Winvalid_memory_model,
5345 "invalid memory model argument to builtin");
5346 return MEMMODEL_SEQ_CST;
5347 }
7f738025 5348
3070f133 5349 /* Workaround for Bugzilla 59448. GCC doesn't track consume properly, so
5350 be conservative and promote consume to acquire. */
5351 if (val == MEMMODEL_CONSUME)
5352 val = MEMMODEL_ACQUIRE;
5353
7f738025 5354 return (enum memmodel) val;
1cd6e20d 5355}
5356
5357/* Expand the __atomic_exchange intrinsic:
5358 TYPE __atomic_exchange (TYPE *object, TYPE desired, enum memmodel)
5359 EXP is the CALL_EXPR.
5360 TARGET is an optional place for us to store the results. */
5361
5362static rtx
3754d046 5363expand_builtin_atomic_exchange (machine_mode mode, tree exp, rtx target)
1cd6e20d 5364{
5365 rtx val, mem;
5366 enum memmodel model;
5367
5368 model = get_memmodel (CALL_EXPR_ARG (exp, 2));
1cd6e20d 5369
5370 if (!flag_inline_atomics)
5371 return NULL_RTX;
5372
5373 /* Expand the operands. */
5374 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5375 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5376
7821cde1 5377 return expand_atomic_exchange (target, mem, val, model);
1cd6e20d 5378}
5379
5380/* Expand the __atomic_compare_exchange intrinsic:
5381 bool __atomic_compare_exchange (TYPE *object, TYPE *expect,
5382 TYPE desired, BOOL weak,
5383 enum memmodel success,
5384 enum memmodel failure)
5385 EXP is the CALL_EXPR.
5386 TARGET is an optional place for us to store the results. */
5387
5388static rtx
3754d046 5389expand_builtin_atomic_compare_exchange (machine_mode mode, tree exp,
1cd6e20d 5390 rtx target)
5391{
1e0c0b35 5392 rtx expect, desired, mem, oldval;
5393 rtx_code_label *label;
1cd6e20d 5394 enum memmodel success, failure;
5395 tree weak;
5396 bool is_weak;
5397
5398 success = get_memmodel (CALL_EXPR_ARG (exp, 4));
5399 failure = get_memmodel (CALL_EXPR_ARG (exp, 5));
5400
086f4e33 5401 if (failure > success)
5402 {
5403 warning (OPT_Winvalid_memory_model,
5404 "failure memory model cannot be stronger than success memory "
5405 "model for %<__atomic_compare_exchange%>");
5406 success = MEMMODEL_SEQ_CST;
5407 }
5408
a372f7ca 5409 if (is_mm_release (failure) || is_mm_acq_rel (failure))
1cd6e20d 5410 {
086f4e33 5411 warning (OPT_Winvalid_memory_model,
5412 "invalid failure memory model for "
5413 "%<__atomic_compare_exchange%>");
5414 failure = MEMMODEL_SEQ_CST;
5415 success = MEMMODEL_SEQ_CST;
1cd6e20d 5416 }
5417
086f4e33 5418
1cd6e20d 5419 if (!flag_inline_atomics)
5420 return NULL_RTX;
5421
5422 /* Expand the operands. */
5423 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5424
5425 expect = expand_normal (CALL_EXPR_ARG (exp, 1));
5426 expect = convert_memory_address (Pmode, expect);
c401b131 5427 expect = gen_rtx_MEM (mode, expect);
1cd6e20d 5428 desired = expand_expr_force_mode (CALL_EXPR_ARG (exp, 2), mode);
5429
5430 weak = CALL_EXPR_ARG (exp, 3);
5431 is_weak = false;
e913b5cd 5432 if (tree_fits_shwi_p (weak) && tree_to_shwi (weak) != 0)
1cd6e20d 5433 is_weak = true;
5434
d86e3752 5435 if (target == const0_rtx)
5436 target = NULL;
d86e3752 5437
3c29a9ea 5438 /* Lest the rtl backend create a race condition with an imporoper store
5439 to memory, always create a new pseudo for OLDVAL. */
5440 oldval = NULL;
5441
5442 if (!expand_atomic_compare_and_swap (&target, &oldval, mem, expect, desired,
ba885f6a 5443 is_weak, success, failure))
1cd6e20d 5444 return NULL_RTX;
5445
d86e3752 5446 /* Conditionally store back to EXPECT, lest we create a race condition
5447 with an improper store to memory. */
5448 /* ??? With a rearrangement of atomics at the gimple level, we can handle
5449 the normal case where EXPECT is totally private, i.e. a register. At
5450 which point the store can be unconditional. */
5451 label = gen_label_rtx ();
62589f76 5452 emit_cmp_and_jump_insns (target, const0_rtx, NE, NULL,
5453 GET_MODE (target), 1, label);
d86e3752 5454 emit_move_insn (expect, oldval);
5455 emit_label (label);
c401b131 5456
1cd6e20d 5457 return target;
5458}
5459
5460/* Expand the __atomic_load intrinsic:
5461 TYPE __atomic_load (TYPE *object, enum memmodel)
5462 EXP is the CALL_EXPR.
5463 TARGET is an optional place for us to store the results. */
5464
5465static rtx
3754d046 5466expand_builtin_atomic_load (machine_mode mode, tree exp, rtx target)
1cd6e20d 5467{
5468 rtx mem;
5469 enum memmodel model;
5470
5471 model = get_memmodel (CALL_EXPR_ARG (exp, 1));
a372f7ca 5472 if (is_mm_release (model) || is_mm_acq_rel (model))
1cd6e20d 5473 {
086f4e33 5474 warning (OPT_Winvalid_memory_model,
5475 "invalid memory model for %<__atomic_load%>");
5476 model = MEMMODEL_SEQ_CST;
1cd6e20d 5477 }
5478
5479 if (!flag_inline_atomics)
5480 return NULL_RTX;
5481
5482 /* Expand the operand. */
5483 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5484
5485 return expand_atomic_load (target, mem, model);
5486}
5487
5488
5489/* Expand the __atomic_store intrinsic:
5490 void __atomic_store (TYPE *object, TYPE desired, enum memmodel)
5491 EXP is the CALL_EXPR.
5492 TARGET is an optional place for us to store the results. */
5493
5494static rtx
3754d046 5495expand_builtin_atomic_store (machine_mode mode, tree exp)
1cd6e20d 5496{
5497 rtx mem, val;
5498 enum memmodel model;
5499
5500 model = get_memmodel (CALL_EXPR_ARG (exp, 2));
a372f7ca 5501 if (!(is_mm_relaxed (model) || is_mm_seq_cst (model)
5502 || is_mm_release (model)))
1cd6e20d 5503 {
086f4e33 5504 warning (OPT_Winvalid_memory_model,
5505 "invalid memory model for %<__atomic_store%>");
5506 model = MEMMODEL_SEQ_CST;
1cd6e20d 5507 }
5508
5509 if (!flag_inline_atomics)
5510 return NULL_RTX;
5511
5512 /* Expand the operands. */
5513 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5514 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5515
8808bf16 5516 return expand_atomic_store (mem, val, model, false);
1cd6e20d 5517}
5518
5519/* Expand the __atomic_fetch_XXX intrinsic:
5520 TYPE __atomic_fetch_XXX (TYPE *object, TYPE val, enum memmodel)
5521 EXP is the CALL_EXPR.
5522 TARGET is an optional place for us to store the results.
5523 CODE is the operation, PLUS, MINUS, ADD, XOR, or IOR.
5524 FETCH_AFTER is true if returning the result of the operation.
5525 FETCH_AFTER is false if returning the value before the operation.
5526 IGNORE is true if the result is not used.
5527 EXT_CALL is the correct builtin for an external call if this cannot be
5528 resolved to an instruction sequence. */
5529
5530static rtx
3754d046 5531expand_builtin_atomic_fetch_op (machine_mode mode, tree exp, rtx target,
1cd6e20d 5532 enum rtx_code code, bool fetch_after,
5533 bool ignore, enum built_in_function ext_call)
5534{
5535 rtx val, mem, ret;
5536 enum memmodel model;
5537 tree fndecl;
5538 tree addr;
5539
5540 model = get_memmodel (CALL_EXPR_ARG (exp, 2));
5541
5542 /* Expand the operands. */
5543 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5544 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5545
5546 /* Only try generating instructions if inlining is turned on. */
5547 if (flag_inline_atomics)
5548 {
5549 ret = expand_atomic_fetch_op (target, mem, val, code, model, fetch_after);
5550 if (ret)
5551 return ret;
5552 }
5553
5554 /* Return if a different routine isn't needed for the library call. */
5555 if (ext_call == BUILT_IN_NONE)
5556 return NULL_RTX;
5557
5558 /* Change the call to the specified function. */
5559 fndecl = get_callee_fndecl (exp);
5560 addr = CALL_EXPR_FN (exp);
5561 STRIP_NOPS (addr);
5562
5563 gcc_assert (TREE_OPERAND (addr, 0) == fndecl);
9af5ce0c 5564 TREE_OPERAND (addr, 0) = builtin_decl_explicit (ext_call);
1cd6e20d 5565
5566 /* Expand the call here so we can emit trailing code. */
5567 ret = expand_call (exp, target, ignore);
5568
5569 /* Replace the original function just in case it matters. */
5570 TREE_OPERAND (addr, 0) = fndecl;
5571
5572 /* Then issue the arithmetic correction to return the right result. */
5573 if (!ignore)
c449f851 5574 {
5575 if (code == NOT)
5576 {
5577 ret = expand_simple_binop (mode, AND, ret, val, NULL_RTX, true,
5578 OPTAB_LIB_WIDEN);
5579 ret = expand_simple_unop (mode, NOT, ret, target, true);
5580 }
5581 else
5582 ret = expand_simple_binop (mode, code, ret, val, target, true,
5583 OPTAB_LIB_WIDEN);
5584 }
1cd6e20d 5585 return ret;
5586}
5587
10b744a3 5588/* Expand an atomic clear operation.
5589 void _atomic_clear (BOOL *obj, enum memmodel)
5590 EXP is the call expression. */
5591
5592static rtx
5593expand_builtin_atomic_clear (tree exp)
5594{
3754d046 5595 machine_mode mode;
10b744a3 5596 rtx mem, ret;
5597 enum memmodel model;
5598
5599 mode = mode_for_size (BOOL_TYPE_SIZE, MODE_INT, 0);
5600 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5601 model = get_memmodel (CALL_EXPR_ARG (exp, 1));
5602
a372f7ca 5603 if (is_mm_consume (model) || is_mm_acquire (model) || is_mm_acq_rel (model))
10b744a3 5604 {
086f4e33 5605 warning (OPT_Winvalid_memory_model,
5606 "invalid memory model for %<__atomic_store%>");
5607 model = MEMMODEL_SEQ_CST;
10b744a3 5608 }
5609
5610 /* Try issuing an __atomic_store, and allow fallback to __sync_lock_release.
5611 Failing that, a store is issued by __atomic_store. The only way this can
5612 fail is if the bool type is larger than a word size. Unlikely, but
5613 handle it anyway for completeness. Assume a single threaded model since
5614 there is no atomic support in this case, and no barriers are required. */
5615 ret = expand_atomic_store (mem, const0_rtx, model, true);
5616 if (!ret)
5617 emit_move_insn (mem, const0_rtx);
5618 return const0_rtx;
5619}
5620
5621/* Expand an atomic test_and_set operation.
5622 bool _atomic_test_and_set (BOOL *obj, enum memmodel)
5623 EXP is the call expression. */
5624
5625static rtx
7821cde1 5626expand_builtin_atomic_test_and_set (tree exp, rtx target)
10b744a3 5627{
7821cde1 5628 rtx mem;
10b744a3 5629 enum memmodel model;
3754d046 5630 machine_mode mode;
10b744a3 5631
5632 mode = mode_for_size (BOOL_TYPE_SIZE, MODE_INT, 0);
5633 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5634 model = get_memmodel (CALL_EXPR_ARG (exp, 1));
5635
7821cde1 5636 return expand_atomic_test_and_set (target, mem, model);
10b744a3 5637}
5638
5639
1cd6e20d 5640/* Return true if (optional) argument ARG1 of size ARG0 is always lock free on
5641 this architecture. If ARG1 is NULL, use typical alignment for size ARG0. */
5642
5643static tree
5644fold_builtin_atomic_always_lock_free (tree arg0, tree arg1)
5645{
5646 int size;
3754d046 5647 machine_mode mode;
1cd6e20d 5648 unsigned int mode_align, type_align;
5649
5650 if (TREE_CODE (arg0) != INTEGER_CST)
5651 return NULL_TREE;
b6a5fc45 5652
1cd6e20d 5653 size = INTVAL (expand_normal (arg0)) * BITS_PER_UNIT;
5654 mode = mode_for_size (size, MODE_INT, 0);
5655 mode_align = GET_MODE_ALIGNMENT (mode);
5656
5657 if (TREE_CODE (arg1) == INTEGER_CST && INTVAL (expand_normal (arg1)) == 0)
5658 type_align = mode_align;
5659 else
5660 {
5661 tree ttype = TREE_TYPE (arg1);
5662
5663 /* This function is usually invoked and folded immediately by the front
5664 end before anything else has a chance to look at it. The pointer
5665 parameter at this point is usually cast to a void *, so check for that
5666 and look past the cast. */
d09ef31a 5667 if (CONVERT_EXPR_P (arg1) && POINTER_TYPE_P (ttype)
1cd6e20d 5668 && VOID_TYPE_P (TREE_TYPE (ttype)))
5669 arg1 = TREE_OPERAND (arg1, 0);
5670
5671 ttype = TREE_TYPE (arg1);
5672 gcc_assert (POINTER_TYPE_P (ttype));
5673
5674 /* Get the underlying type of the object. */
5675 ttype = TREE_TYPE (ttype);
5676 type_align = TYPE_ALIGN (ttype);
5677 }
5678
47ae02b7 5679 /* If the object has smaller alignment, the lock free routines cannot
1cd6e20d 5680 be used. */
5681 if (type_align < mode_align)
06308d2a 5682 return boolean_false_node;
1cd6e20d 5683
5684 /* Check if a compare_and_swap pattern exists for the mode which represents
5685 the required size. The pattern is not allowed to fail, so the existence
5686 of the pattern indicates support is present. */
29139cdc 5687 if (can_compare_and_swap_p (mode, true))
06308d2a 5688 return boolean_true_node;
1cd6e20d 5689 else
06308d2a 5690 return boolean_false_node;
1cd6e20d 5691}
5692
5693/* Return true if the parameters to call EXP represent an object which will
5694 always generate lock free instructions. The first argument represents the
5695 size of the object, and the second parameter is a pointer to the object
5696 itself. If NULL is passed for the object, then the result is based on
5697 typical alignment for an object of the specified size. Otherwise return
5698 false. */
5699
5700static rtx
5701expand_builtin_atomic_always_lock_free (tree exp)
5702{
5703 tree size;
5704 tree arg0 = CALL_EXPR_ARG (exp, 0);
5705 tree arg1 = CALL_EXPR_ARG (exp, 1);
5706
5707 if (TREE_CODE (arg0) != INTEGER_CST)
5708 {
5709 error ("non-constant argument 1 to __atomic_always_lock_free");
5710 return const0_rtx;
5711 }
5712
5713 size = fold_builtin_atomic_always_lock_free (arg0, arg1);
06308d2a 5714 if (size == boolean_true_node)
1cd6e20d 5715 return const1_rtx;
5716 return const0_rtx;
5717}
5718
5719/* Return a one or zero if it can be determined that object ARG1 of size ARG
5720 is lock free on this architecture. */
5721
5722static tree
5723fold_builtin_atomic_is_lock_free (tree arg0, tree arg1)
5724{
5725 if (!flag_inline_atomics)
5726 return NULL_TREE;
5727
5728 /* If it isn't always lock free, don't generate a result. */
06308d2a 5729 if (fold_builtin_atomic_always_lock_free (arg0, arg1) == boolean_true_node)
5730 return boolean_true_node;
1cd6e20d 5731
5732 return NULL_TREE;
5733}
5734
5735/* Return true if the parameters to call EXP represent an object which will
5736 always generate lock free instructions. The first argument represents the
5737 size of the object, and the second parameter is a pointer to the object
5738 itself. If NULL is passed for the object, then the result is based on
5739 typical alignment for an object of the specified size. Otherwise return
5740 NULL*/
5741
5742static rtx
5743expand_builtin_atomic_is_lock_free (tree exp)
5744{
5745 tree size;
5746 tree arg0 = CALL_EXPR_ARG (exp, 0);
5747 tree arg1 = CALL_EXPR_ARG (exp, 1);
5748
5749 if (!INTEGRAL_TYPE_P (TREE_TYPE (arg0)))
5750 {
5751 error ("non-integer argument 1 to __atomic_is_lock_free");
5752 return NULL_RTX;
5753 }
5754
5755 if (!flag_inline_atomics)
5756 return NULL_RTX;
5757
5758 /* If the value is known at compile time, return the RTX for it. */
5759 size = fold_builtin_atomic_is_lock_free (arg0, arg1);
06308d2a 5760 if (size == boolean_true_node)
1cd6e20d 5761 return const1_rtx;
5762
5763 return NULL_RTX;
5764}
5765
1cd6e20d 5766/* Expand the __atomic_thread_fence intrinsic:
5767 void __atomic_thread_fence (enum memmodel)
5768 EXP is the CALL_EXPR. */
5769
5770static void
5771expand_builtin_atomic_thread_fence (tree exp)
5772{
fe54c06b 5773 enum memmodel model = get_memmodel (CALL_EXPR_ARG (exp, 0));
5774 expand_mem_thread_fence (model);
1cd6e20d 5775}
5776
5777/* Expand the __atomic_signal_fence intrinsic:
5778 void __atomic_signal_fence (enum memmodel)
5779 EXP is the CALL_EXPR. */
5780
5781static void
5782expand_builtin_atomic_signal_fence (tree exp)
5783{
fe54c06b 5784 enum memmodel model = get_memmodel (CALL_EXPR_ARG (exp, 0));
5785 expand_mem_signal_fence (model);
b6a5fc45 5786}
5787
5788/* Expand the __sync_synchronize intrinsic. */
5789
5790static void
2797f13a 5791expand_builtin_sync_synchronize (void)
b6a5fc45 5792{
a372f7ca 5793 expand_mem_thread_fence (MEMMODEL_SYNC_SEQ_CST);
b6a5fc45 5794}
5795
badaa04c 5796static rtx
5797expand_builtin_thread_pointer (tree exp, rtx target)
5798{
5799 enum insn_code icode;
5800 if (!validate_arglist (exp, VOID_TYPE))
5801 return const0_rtx;
5802 icode = direct_optab_handler (get_thread_pointer_optab, Pmode);
5803 if (icode != CODE_FOR_nothing)
5804 {
5805 struct expand_operand op;
3ed779c3 5806 /* If the target is not sutitable then create a new target. */
5807 if (target == NULL_RTX
5808 || !REG_P (target)
5809 || GET_MODE (target) != Pmode)
badaa04c 5810 target = gen_reg_rtx (Pmode);
5811 create_output_operand (&op, target, Pmode);
5812 expand_insn (icode, 1, &op);
5813 return target;
5814 }
5815 error ("__builtin_thread_pointer is not supported on this target");
5816 return const0_rtx;
5817}
5818
5819static void
5820expand_builtin_set_thread_pointer (tree exp)
5821{
5822 enum insn_code icode;
5823 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
5824 return;
5825 icode = direct_optab_handler (set_thread_pointer_optab, Pmode);
5826 if (icode != CODE_FOR_nothing)
5827 {
5828 struct expand_operand op;
5829 rtx val = expand_expr (CALL_EXPR_ARG (exp, 0), NULL_RTX,
5830 Pmode, EXPAND_NORMAL);
6f343c10 5831 create_input_operand (&op, val, Pmode);
badaa04c 5832 expand_insn (icode, 1, &op);
5833 return;
5834 }
5835 error ("__builtin_set_thread_pointer is not supported on this target");
5836}
5837
53800dbe 5838\f
0e80b01d 5839/* Emit code to restore the current value of stack. */
5840
5841static void
5842expand_stack_restore (tree var)
5843{
1e0c0b35 5844 rtx_insn *prev;
5845 rtx sa = expand_normal (var);
0e80b01d 5846
5847 sa = convert_memory_address (Pmode, sa);
5848
5849 prev = get_last_insn ();
5850 emit_stack_restore (SAVE_BLOCK, sa);
97354ae4 5851
5852 record_new_stack_level ();
5853
0e80b01d 5854 fixup_args_size_notes (prev, get_last_insn (), 0);
5855}
5856
0e80b01d 5857/* Emit code to save the current value of stack. */
5858
5859static rtx
5860expand_stack_save (void)
5861{
5862 rtx ret = NULL_RTX;
5863
0e80b01d 5864 emit_stack_save (SAVE_BLOCK, &ret);
5865 return ret;
5866}
5867
ca4c3545 5868
5869/* Expand OpenACC acc_on_device.
5870
5871 This has to happen late (that is, not in early folding; expand_builtin_*,
5872 rather than fold_builtin_*), as we have to act differently for host and
5873 acceleration device (ACCEL_COMPILER conditional). */
5874
5875static rtx
f212338e 5876expand_builtin_acc_on_device (tree exp, rtx target)
ca4c3545 5877{
5878 if (!validate_arglist (exp, INTEGER_TYPE, VOID_TYPE))
5879 return NULL_RTX;
5880
5881 tree arg = CALL_EXPR_ARG (exp, 0);
5882
5883 /* Return (arg == v1 || arg == v2) ? 1 : 0. */
5884 machine_mode v_mode = TYPE_MODE (TREE_TYPE (arg));
5885 rtx v = expand_normal (arg), v1, v2;
f212338e 5886#ifdef ACCEL_COMPILER
ca4c3545 5887 v1 = GEN_INT (GOMP_DEVICE_NOT_HOST);
5888 v2 = GEN_INT (ACCEL_COMPILER_acc_device);
f212338e 5889#else
5890 v1 = GEN_INT (GOMP_DEVICE_NONE);
5891 v2 = GEN_INT (GOMP_DEVICE_HOST);
5892#endif
ca4c3545 5893 machine_mode target_mode = TYPE_MODE (integer_type_node);
15b4214c 5894 if (!target || !register_operand (target, target_mode))
ca4c3545 5895 target = gen_reg_rtx (target_mode);
5896 emit_move_insn (target, const1_rtx);
5897 rtx_code_label *done_label = gen_label_rtx ();
5898 do_compare_rtx_and_jump (v, v1, EQ, false, v_mode, NULL_RTX,
f9a00e9e 5899 NULL, done_label, PROB_EVEN);
ca4c3545 5900 do_compare_rtx_and_jump (v, v2, EQ, false, v_mode, NULL_RTX,
f9a00e9e 5901 NULL, done_label, PROB_EVEN);
ca4c3545 5902 emit_move_insn (target, const0_rtx);
5903 emit_label (done_label);
5904
5905 return target;
5906}
5907
5908
53800dbe 5909/* Expand an expression EXP that calls a built-in function,
5910 with result going to TARGET if that's convenient
5911 (and in mode MODE if that's convenient).
5912 SUBTARGET may be used as the target for computing one of EXP's operands.
5913 IGNORE is nonzero if the value is to be ignored. */
5914
5915rtx
3754d046 5916expand_builtin (tree exp, rtx target, rtx subtarget, machine_mode mode,
aecda0d6 5917 int ignore)
53800dbe 5918{
c6e6ecb1 5919 tree fndecl = get_callee_fndecl (exp);
53800dbe 5920 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
3754d046 5921 machine_mode target_mode = TYPE_MODE (TREE_TYPE (exp));
67fa4078 5922 int flags;
53800dbe 5923
4e2f4ed5 5924 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
5925 return targetm.expand_builtin (exp, target, subtarget, mode, ignore);
5926
f9acf11a 5927 /* When ASan is enabled, we don't want to expand some memory/string
5928 builtins and rely on libsanitizer's hooks. This allows us to avoid
5929 redundant checks and be sure, that possible overflow will be detected
5930 by ASan. */
5931
5932 if ((flag_sanitize & SANITIZE_ADDRESS) && asan_intercepted_p (fcode))
5933 return expand_call (exp, target, ignore);
5934
53800dbe 5935 /* When not optimizing, generate calls to library functions for a certain
5936 set of builtins. */
cd9ff771 5937 if (!optimize
b6a5fc45 5938 && !called_as_built_in (fndecl)
73037a1e 5939 && fcode != BUILT_IN_FORK
5940 && fcode != BUILT_IN_EXECL
5941 && fcode != BUILT_IN_EXECV
5942 && fcode != BUILT_IN_EXECLP
5943 && fcode != BUILT_IN_EXECLE
5944 && fcode != BUILT_IN_EXECVP
5945 && fcode != BUILT_IN_EXECVE
2c281b15 5946 && fcode != BUILT_IN_ALLOCA
581bf1c2 5947 && fcode != BUILT_IN_ALLOCA_WITH_ALIGN
058a1b7a 5948 && fcode != BUILT_IN_FREE
5949 && fcode != BUILT_IN_CHKP_SET_PTR_BOUNDS
5950 && fcode != BUILT_IN_CHKP_INIT_PTR_BOUNDS
5951 && fcode != BUILT_IN_CHKP_NULL_PTR_BOUNDS
5952 && fcode != BUILT_IN_CHKP_COPY_PTR_BOUNDS
5953 && fcode != BUILT_IN_CHKP_NARROW_PTR_BOUNDS
5954 && fcode != BUILT_IN_CHKP_STORE_PTR_BOUNDS
5955 && fcode != BUILT_IN_CHKP_CHECK_PTR_LBOUNDS
5956 && fcode != BUILT_IN_CHKP_CHECK_PTR_UBOUNDS
5957 && fcode != BUILT_IN_CHKP_CHECK_PTR_BOUNDS
5958 && fcode != BUILT_IN_CHKP_GET_PTR_LBOUND
5959 && fcode != BUILT_IN_CHKP_GET_PTR_UBOUND
5960 && fcode != BUILT_IN_CHKP_BNDRET)
cd9ff771 5961 return expand_call (exp, target, ignore);
53800dbe 5962
8d6d7930 5963 /* The built-in function expanders test for target == const0_rtx
5964 to determine whether the function's result will be ignored. */
5965 if (ignore)
5966 target = const0_rtx;
5967
5968 /* If the result of a pure or const built-in function is ignored, and
5969 none of its arguments are volatile, we can avoid expanding the
5970 built-in call and just evaluate the arguments for side-effects. */
5971 if (target == const0_rtx
67fa4078 5972 && ((flags = flags_from_decl_or_type (fndecl)) & (ECF_CONST | ECF_PURE))
5973 && !(flags & ECF_LOOPING_CONST_OR_PURE))
8d6d7930 5974 {
5975 bool volatilep = false;
5976 tree arg;
c2f47e15 5977 call_expr_arg_iterator iter;
8d6d7930 5978
c2f47e15 5979 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
5980 if (TREE_THIS_VOLATILE (arg))
8d6d7930 5981 {
5982 volatilep = true;
5983 break;
5984 }
5985
5986 if (! volatilep)
5987 {
c2f47e15 5988 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
5989 expand_expr (arg, const0_rtx, VOIDmode, EXPAND_NORMAL);
8d6d7930 5990 return const0_rtx;
5991 }
5992 }
5993
f21337ef 5994 /* expand_builtin_with_bounds is supposed to be used for
5995 instrumented builtin calls. */
058a1b7a 5996 gcc_assert (!CALL_WITH_BOUNDS_P (exp));
5997
53800dbe 5998 switch (fcode)
5999 {
4f35b1fc 6000 CASE_FLT_FN (BUILT_IN_FABS):
8aa32773 6001 case BUILT_IN_FABSD32:
6002 case BUILT_IN_FABSD64:
6003 case BUILT_IN_FABSD128:
c2f47e15 6004 target = expand_builtin_fabs (exp, target, subtarget);
78a74442 6005 if (target)
a0c938f0 6006 return target;
78a74442 6007 break;
6008
4f35b1fc 6009 CASE_FLT_FN (BUILT_IN_COPYSIGN):
c2f47e15 6010 target = expand_builtin_copysign (exp, target, subtarget);
270436f3 6011 if (target)
6012 return target;
6013 break;
6014
7d3f6cc7 6015 /* Just do a normal library call if we were unable to fold
6016 the values. */
4f35b1fc 6017 CASE_FLT_FN (BUILT_IN_CABS):
78a74442 6018 break;
53800dbe 6019
4f35b1fc 6020 CASE_FLT_FN (BUILT_IN_EXP):
6021 CASE_FLT_FN (BUILT_IN_EXP10):
6022 CASE_FLT_FN (BUILT_IN_POW10):
6023 CASE_FLT_FN (BUILT_IN_EXP2):
6024 CASE_FLT_FN (BUILT_IN_EXPM1):
6025 CASE_FLT_FN (BUILT_IN_LOGB):
4f35b1fc 6026 CASE_FLT_FN (BUILT_IN_LOG):
6027 CASE_FLT_FN (BUILT_IN_LOG10):
6028 CASE_FLT_FN (BUILT_IN_LOG2):
6029 CASE_FLT_FN (BUILT_IN_LOG1P):
6030 CASE_FLT_FN (BUILT_IN_TAN):
6031 CASE_FLT_FN (BUILT_IN_ASIN):
6032 CASE_FLT_FN (BUILT_IN_ACOS):
6033 CASE_FLT_FN (BUILT_IN_ATAN):
b3154a1f 6034 CASE_FLT_FN (BUILT_IN_SIGNIFICAND):
7f3be425 6035 /* Treat these like sqrt only if unsafe math optimizations are allowed,
6036 because of possible accuracy problems. */
6037 if (! flag_unsafe_math_optimizations)
53800dbe 6038 break;
4f35b1fc 6039 CASE_FLT_FN (BUILT_IN_SQRT):
6040 CASE_FLT_FN (BUILT_IN_FLOOR):
6041 CASE_FLT_FN (BUILT_IN_CEIL):
6042 CASE_FLT_FN (BUILT_IN_TRUNC):
6043 CASE_FLT_FN (BUILT_IN_ROUND):
6044 CASE_FLT_FN (BUILT_IN_NEARBYINT):
6045 CASE_FLT_FN (BUILT_IN_RINT):
53800dbe 6046 target = expand_builtin_mathfn (exp, target, subtarget);
6047 if (target)
6048 return target;
6049 break;
6050
7e0713b1 6051 CASE_FLT_FN (BUILT_IN_FMA):
6052 target = expand_builtin_mathfn_ternary (exp, target, subtarget);
6053 if (target)
6054 return target;
6055 break;
6056
a67a90e5 6057 CASE_FLT_FN (BUILT_IN_ILOGB):
6058 if (! flag_unsafe_math_optimizations)
6059 break;
69b779ea 6060 CASE_FLT_FN (BUILT_IN_ISINF):
cde061c1 6061 CASE_FLT_FN (BUILT_IN_FINITE):
6062 case BUILT_IN_ISFINITE:
8a1a9cb7 6063 case BUILT_IN_ISNORMAL:
f97eea22 6064 target = expand_builtin_interclass_mathfn (exp, target);
a67a90e5 6065 if (target)
6066 return target;
6067 break;
6068
80ff6494 6069 CASE_FLT_FN (BUILT_IN_ICEIL):
4f35b1fc 6070 CASE_FLT_FN (BUILT_IN_LCEIL):
6071 CASE_FLT_FN (BUILT_IN_LLCEIL):
6072 CASE_FLT_FN (BUILT_IN_LFLOOR):
80ff6494 6073 CASE_FLT_FN (BUILT_IN_IFLOOR):
4f35b1fc 6074 CASE_FLT_FN (BUILT_IN_LLFLOOR):
ff1b14e4 6075 target = expand_builtin_int_roundingfn (exp, target);
ad52b9b7 6076 if (target)
6077 return target;
6078 break;
6079
80ff6494 6080 CASE_FLT_FN (BUILT_IN_IRINT):
7d3afc77 6081 CASE_FLT_FN (BUILT_IN_LRINT):
6082 CASE_FLT_FN (BUILT_IN_LLRINT):
80ff6494 6083 CASE_FLT_FN (BUILT_IN_IROUND):
ef2f1a10 6084 CASE_FLT_FN (BUILT_IN_LROUND):
6085 CASE_FLT_FN (BUILT_IN_LLROUND):
ff1b14e4 6086 target = expand_builtin_int_roundingfn_2 (exp, target);
7d3afc77 6087 if (target)
6088 return target;
6089 break;
6090
4f35b1fc 6091 CASE_FLT_FN (BUILT_IN_POWI):
f97eea22 6092 target = expand_builtin_powi (exp, target);
757c219d 6093 if (target)
6094 return target;
6095 break;
6096
4f35b1fc 6097 CASE_FLT_FN (BUILT_IN_ATAN2):
6098 CASE_FLT_FN (BUILT_IN_LDEXP):
73a954a1 6099 CASE_FLT_FN (BUILT_IN_SCALB):
6100 CASE_FLT_FN (BUILT_IN_SCALBN):
6101 CASE_FLT_FN (BUILT_IN_SCALBLN):
0fd605a5 6102 if (! flag_unsafe_math_optimizations)
6103 break;
ef722005 6104
6105 CASE_FLT_FN (BUILT_IN_FMOD):
6106 CASE_FLT_FN (BUILT_IN_REMAINDER):
6107 CASE_FLT_FN (BUILT_IN_DREM):
0810ff17 6108 CASE_FLT_FN (BUILT_IN_POW):
0fd605a5 6109 target = expand_builtin_mathfn_2 (exp, target, subtarget);
6110 if (target)
6111 return target;
6112 break;
6113
d735c391 6114 CASE_FLT_FN (BUILT_IN_CEXPI):
f97eea22 6115 target = expand_builtin_cexpi (exp, target);
d735c391 6116 gcc_assert (target);
6117 return target;
6118
4f35b1fc 6119 CASE_FLT_FN (BUILT_IN_SIN):
6120 CASE_FLT_FN (BUILT_IN_COS):
6b43bae4 6121 if (! flag_unsafe_math_optimizations)
6122 break;
6123 target = expand_builtin_mathfn_3 (exp, target, subtarget);
6124 if (target)
6125 return target;
6126 break;
6127
c3147c1a 6128 CASE_FLT_FN (BUILT_IN_SINCOS):
6129 if (! flag_unsafe_math_optimizations)
6130 break;
6131 target = expand_builtin_sincos (exp);
6132 if (target)
6133 return target;
6134 break;
6135
53800dbe 6136 case BUILT_IN_APPLY_ARGS:
6137 return expand_builtin_apply_args ();
6138
6139 /* __builtin_apply (FUNCTION, ARGUMENTS, ARGSIZE) invokes
6140 FUNCTION with a copy of the parameters described by
6141 ARGUMENTS, and ARGSIZE. It returns a block of memory
6142 allocated on the stack into which is stored all the registers
6143 that might possibly be used for returning the result of a
6144 function. ARGUMENTS is the value returned by
6145 __builtin_apply_args. ARGSIZE is the number of bytes of
6146 arguments that must be copied. ??? How should this value be
6147 computed? We'll also need a safe worst case value for varargs
6148 functions. */
6149 case BUILT_IN_APPLY:
c2f47e15 6150 if (!validate_arglist (exp, POINTER_TYPE,
0eb671f7 6151 POINTER_TYPE, INTEGER_TYPE, VOID_TYPE)
c2f47e15 6152 && !validate_arglist (exp, REFERENCE_TYPE,
0eb671f7 6153 POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
53800dbe 6154 return const0_rtx;
6155 else
6156 {
53800dbe 6157 rtx ops[3];
6158
c2f47e15 6159 ops[0] = expand_normal (CALL_EXPR_ARG (exp, 0));
6160 ops[1] = expand_normal (CALL_EXPR_ARG (exp, 1));
6161 ops[2] = expand_normal (CALL_EXPR_ARG (exp, 2));
53800dbe 6162
6163 return expand_builtin_apply (ops[0], ops[1], ops[2]);
6164 }
6165
6166 /* __builtin_return (RESULT) causes the function to return the
6167 value described by RESULT. RESULT is address of the block of
6168 memory returned by __builtin_apply. */
6169 case BUILT_IN_RETURN:
c2f47e15 6170 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6171 expand_builtin_return (expand_normal (CALL_EXPR_ARG (exp, 0)));
53800dbe 6172 return const0_rtx;
6173
6174 case BUILT_IN_SAVEREGS:
a66c9326 6175 return expand_builtin_saveregs ();
53800dbe 6176
48dc2227 6177 case BUILT_IN_VA_ARG_PACK:
6178 /* All valid uses of __builtin_va_arg_pack () are removed during
6179 inlining. */
b8c23db3 6180 error ("%Kinvalid use of %<__builtin_va_arg_pack ()%>", exp);
48dc2227 6181 return const0_rtx;
6182
4e1d7ea4 6183 case BUILT_IN_VA_ARG_PACK_LEN:
6184 /* All valid uses of __builtin_va_arg_pack_len () are removed during
6185 inlining. */
b8c23db3 6186 error ("%Kinvalid use of %<__builtin_va_arg_pack_len ()%>", exp);
4e1d7ea4 6187 return const0_rtx;
6188
53800dbe 6189 /* Return the address of the first anonymous stack arg. */
6190 case BUILT_IN_NEXT_ARG:
c2f47e15 6191 if (fold_builtin_next_arg (exp, false))
a0c938f0 6192 return const0_rtx;
79012a9d 6193 return expand_builtin_next_arg ();
53800dbe 6194
ac8fb6db 6195 case BUILT_IN_CLEAR_CACHE:
6196 target = expand_builtin___clear_cache (exp);
6197 if (target)
6198 return target;
6199 break;
6200
53800dbe 6201 case BUILT_IN_CLASSIFY_TYPE:
c2f47e15 6202 return expand_builtin_classify_type (exp);
53800dbe 6203
6204 case BUILT_IN_CONSTANT_P:
4ee9c684 6205 return const0_rtx;
53800dbe 6206
6207 case BUILT_IN_FRAME_ADDRESS:
6208 case BUILT_IN_RETURN_ADDRESS:
c2f47e15 6209 return expand_builtin_frame_address (fndecl, exp);
53800dbe 6210
6211 /* Returns the address of the area where the structure is returned.
6212 0 otherwise. */
6213 case BUILT_IN_AGGREGATE_INCOMING_ADDRESS:
c2f47e15 6214 if (call_expr_nargs (exp) != 0
9342ee68 6215 || ! AGGREGATE_TYPE_P (TREE_TYPE (TREE_TYPE (current_function_decl)))
e16ceb8e 6216 || !MEM_P (DECL_RTL (DECL_RESULT (current_function_decl))))
9342ee68 6217 return const0_rtx;
53800dbe 6218 else
9342ee68 6219 return XEXP (DECL_RTL (DECL_RESULT (current_function_decl)), 0);
53800dbe 6220
6221 case BUILT_IN_ALLOCA:
581bf1c2 6222 case BUILT_IN_ALLOCA_WITH_ALIGN:
990495a7 6223 /* If the allocation stems from the declaration of a variable-sized
6224 object, it cannot accumulate. */
a882d754 6225 target = expand_builtin_alloca (exp, CALL_ALLOCA_FOR_VAR_P (exp));
53800dbe 6226 if (target)
6227 return target;
6228 break;
6229
4ee9c684 6230 case BUILT_IN_STACK_SAVE:
6231 return expand_stack_save ();
6232
6233 case BUILT_IN_STACK_RESTORE:
c2f47e15 6234 expand_stack_restore (CALL_EXPR_ARG (exp, 0));
4ee9c684 6235 return const0_rtx;
6236
74bdbe96 6237 case BUILT_IN_BSWAP16:
42791117 6238 case BUILT_IN_BSWAP32:
6239 case BUILT_IN_BSWAP64:
74bdbe96 6240 target = expand_builtin_bswap (target_mode, exp, target, subtarget);
42791117 6241 if (target)
6242 return target;
6243 break;
6244
4f35b1fc 6245 CASE_INT_FN (BUILT_IN_FFS):
c2f47e15 6246 target = expand_builtin_unop (target_mode, exp, target,
efb070c8 6247 subtarget, ffs_optab);
6a08d0ab 6248 if (target)
6249 return target;
6250 break;
6251
4f35b1fc 6252 CASE_INT_FN (BUILT_IN_CLZ):
c2f47e15 6253 target = expand_builtin_unop (target_mode, exp, target,
efb070c8 6254 subtarget, clz_optab);
6a08d0ab 6255 if (target)
6256 return target;
6257 break;
6258
4f35b1fc 6259 CASE_INT_FN (BUILT_IN_CTZ):
c2f47e15 6260 target = expand_builtin_unop (target_mode, exp, target,
efb070c8 6261 subtarget, ctz_optab);
6a08d0ab 6262 if (target)
6263 return target;
6264 break;
6265
d8492bd3 6266 CASE_INT_FN (BUILT_IN_CLRSB):
d8492bd3 6267 target = expand_builtin_unop (target_mode, exp, target,
6268 subtarget, clrsb_optab);
6269 if (target)
6270 return target;
6271 break;
6272
4f35b1fc 6273 CASE_INT_FN (BUILT_IN_POPCOUNT):
c2f47e15 6274 target = expand_builtin_unop (target_mode, exp, target,
efb070c8 6275 subtarget, popcount_optab);
6a08d0ab 6276 if (target)
6277 return target;
6278 break;
6279
4f35b1fc 6280 CASE_INT_FN (BUILT_IN_PARITY):
c2f47e15 6281 target = expand_builtin_unop (target_mode, exp, target,
efb070c8 6282 subtarget, parity_optab);
53800dbe 6283 if (target)
6284 return target;
6285 break;
6286
6287 case BUILT_IN_STRLEN:
c2f47e15 6288 target = expand_builtin_strlen (exp, target, target_mode);
53800dbe 6289 if (target)
6290 return target;
6291 break;
6292
6293 case BUILT_IN_STRCPY:
a65c4d64 6294 target = expand_builtin_strcpy (exp, target);
53800dbe 6295 if (target)
6296 return target;
6297 break;
bf8e3599 6298
ed09096d 6299 case BUILT_IN_STRNCPY:
a65c4d64 6300 target = expand_builtin_strncpy (exp, target);
ed09096d 6301 if (target)
6302 return target;
6303 break;
bf8e3599 6304
3b824fa6 6305 case BUILT_IN_STPCPY:
dc369150 6306 target = expand_builtin_stpcpy (exp, target, mode);
3b824fa6 6307 if (target)
6308 return target;
6309 break;
6310
53800dbe 6311 case BUILT_IN_MEMCPY:
a65c4d64 6312 target = expand_builtin_memcpy (exp, target);
3b824fa6 6313 if (target)
6314 return target;
6315 break;
6316
6317 case BUILT_IN_MEMPCPY:
c2f47e15 6318 target = expand_builtin_mempcpy (exp, target, mode);
53800dbe 6319 if (target)
6320 return target;
6321 break;
6322
6323 case BUILT_IN_MEMSET:
c2f47e15 6324 target = expand_builtin_memset (exp, target, mode);
53800dbe 6325 if (target)
6326 return target;
6327 break;
6328
ffc83088 6329 case BUILT_IN_BZERO:
0b25db21 6330 target = expand_builtin_bzero (exp);
ffc83088 6331 if (target)
6332 return target;
6333 break;
6334
53800dbe 6335 case BUILT_IN_STRCMP:
a65c4d64 6336 target = expand_builtin_strcmp (exp, target);
53800dbe 6337 if (target)
6338 return target;
6339 break;
6340
ed09096d 6341 case BUILT_IN_STRNCMP:
6342 target = expand_builtin_strncmp (exp, target, mode);
6343 if (target)
6344 return target;
6345 break;
6346
071f1696 6347 case BUILT_IN_BCMP:
53800dbe 6348 case BUILT_IN_MEMCMP:
c2f47e15 6349 target = expand_builtin_memcmp (exp, target, mode);
53800dbe 6350 if (target)
6351 return target;
6352 break;
53800dbe 6353
6354 case BUILT_IN_SETJMP:
2c8a1497 6355 /* This should have been lowered to the builtins below. */
6356 gcc_unreachable ();
6357
6358 case BUILT_IN_SETJMP_SETUP:
6359 /* __builtin_setjmp_setup is passed a pointer to an array of five words
6360 and the receiver label. */
c2f47e15 6361 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
2c8a1497 6362 {
c2f47e15 6363 rtx buf_addr = expand_expr (CALL_EXPR_ARG (exp, 0), subtarget,
2c8a1497 6364 VOIDmode, EXPAND_NORMAL);
c2f47e15 6365 tree label = TREE_OPERAND (CALL_EXPR_ARG (exp, 1), 0);
9ed997be 6366 rtx_insn *label_r = label_rtx (label);
2c8a1497 6367
6368 /* This is copied from the handling of non-local gotos. */
6369 expand_builtin_setjmp_setup (buf_addr, label_r);
6370 nonlocal_goto_handler_labels
a4de1c23 6371 = gen_rtx_INSN_LIST (VOIDmode, label_r,
2c8a1497 6372 nonlocal_goto_handler_labels);
6373 /* ??? Do not let expand_label treat us as such since we would
6374 not want to be both on the list of non-local labels and on
6375 the list of forced labels. */
6376 FORCED_LABEL (label) = 0;
6377 return const0_rtx;
6378 }
6379 break;
6380
2c8a1497 6381 case BUILT_IN_SETJMP_RECEIVER:
6382 /* __builtin_setjmp_receiver is passed the receiver label. */
c2f47e15 6383 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
2c8a1497 6384 {
c2f47e15 6385 tree label = TREE_OPERAND (CALL_EXPR_ARG (exp, 0), 0);
9ed997be 6386 rtx_insn *label_r = label_rtx (label);
2c8a1497 6387
6388 expand_builtin_setjmp_receiver (label_r);
6389 return const0_rtx;
6390 }
6b7f6858 6391 break;
53800dbe 6392
6393 /* __builtin_longjmp is passed a pointer to an array of five words.
6394 It's similar to the C library longjmp function but works with
6395 __builtin_setjmp above. */
6396 case BUILT_IN_LONGJMP:
c2f47e15 6397 if (validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
53800dbe 6398 {
c2f47e15 6399 rtx buf_addr = expand_expr (CALL_EXPR_ARG (exp, 0), subtarget,
8ec3c5c2 6400 VOIDmode, EXPAND_NORMAL);
c2f47e15 6401 rtx value = expand_normal (CALL_EXPR_ARG (exp, 1));
53800dbe 6402
6403 if (value != const1_rtx)
6404 {
1e5fcbe2 6405 error ("%<__builtin_longjmp%> second argument must be 1");
53800dbe 6406 return const0_rtx;
6407 }
6408
6409 expand_builtin_longjmp (buf_addr, value);
6410 return const0_rtx;
6411 }
2c8a1497 6412 break;
53800dbe 6413
4ee9c684 6414 case BUILT_IN_NONLOCAL_GOTO:
c2f47e15 6415 target = expand_builtin_nonlocal_goto (exp);
4ee9c684 6416 if (target)
6417 return target;
6418 break;
6419
843d08a9 6420 /* This updates the setjmp buffer that is its argument with the value
6421 of the current stack pointer. */
6422 case BUILT_IN_UPDATE_SETJMP_BUF:
c2f47e15 6423 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
843d08a9 6424 {
6425 rtx buf_addr
c2f47e15 6426 = expand_normal (CALL_EXPR_ARG (exp, 0));
843d08a9 6427
6428 expand_builtin_update_setjmp_buf (buf_addr);
6429 return const0_rtx;
6430 }
6431 break;
6432
53800dbe 6433 case BUILT_IN_TRAP:
a0ef1725 6434 expand_builtin_trap ();
53800dbe 6435 return const0_rtx;
6436
d2b48f0c 6437 case BUILT_IN_UNREACHABLE:
6438 expand_builtin_unreachable ();
6439 return const0_rtx;
6440
4f35b1fc 6441 CASE_FLT_FN (BUILT_IN_SIGNBIT):
004e23c4 6442 case BUILT_IN_SIGNBITD32:
6443 case BUILT_IN_SIGNBITD64:
6444 case BUILT_IN_SIGNBITD128:
27f261ef 6445 target = expand_builtin_signbit (exp, target);
6446 if (target)
6447 return target;
6448 break;
6449
53800dbe 6450 /* Various hooks for the DWARF 2 __throw routine. */
6451 case BUILT_IN_UNWIND_INIT:
6452 expand_builtin_unwind_init ();
6453 return const0_rtx;
6454 case BUILT_IN_DWARF_CFA:
6455 return virtual_cfa_rtx;
6456#ifdef DWARF2_UNWIND_INFO
f8f023a5 6457 case BUILT_IN_DWARF_SP_COLUMN:
6458 return expand_builtin_dwarf_sp_column ();
695e919b 6459 case BUILT_IN_INIT_DWARF_REG_SIZES:
c2f47e15 6460 expand_builtin_init_dwarf_reg_sizes (CALL_EXPR_ARG (exp, 0));
695e919b 6461 return const0_rtx;
53800dbe 6462#endif
6463 case BUILT_IN_FROB_RETURN_ADDR:
c2f47e15 6464 return expand_builtin_frob_return_addr (CALL_EXPR_ARG (exp, 0));
53800dbe 6465 case BUILT_IN_EXTRACT_RETURN_ADDR:
c2f47e15 6466 return expand_builtin_extract_return_addr (CALL_EXPR_ARG (exp, 0));
53800dbe 6467 case BUILT_IN_EH_RETURN:
c2f47e15 6468 expand_builtin_eh_return (CALL_EXPR_ARG (exp, 0),
6469 CALL_EXPR_ARG (exp, 1));
53800dbe 6470 return const0_rtx;
df4b504c 6471 case BUILT_IN_EH_RETURN_DATA_REGNO:
c2f47e15 6472 return expand_builtin_eh_return_data_regno (exp);
26093bf4 6473 case BUILT_IN_EXTEND_POINTER:
c2f47e15 6474 return expand_builtin_extend_pointer (CALL_EXPR_ARG (exp, 0));
e38def9c 6475 case BUILT_IN_EH_POINTER:
6476 return expand_builtin_eh_pointer (exp);
6477 case BUILT_IN_EH_FILTER:
6478 return expand_builtin_eh_filter (exp);
6479 case BUILT_IN_EH_COPY_VALUES:
6480 return expand_builtin_eh_copy_values (exp);
26093bf4 6481
7ccc713a 6482 case BUILT_IN_VA_START:
c2f47e15 6483 return expand_builtin_va_start (exp);
a66c9326 6484 case BUILT_IN_VA_END:
c2f47e15 6485 return expand_builtin_va_end (exp);
a66c9326 6486 case BUILT_IN_VA_COPY:
c2f47e15 6487 return expand_builtin_va_copy (exp);
89cfe6e5 6488 case BUILT_IN_EXPECT:
c2f47e15 6489 return expand_builtin_expect (exp, target);
fca0886c 6490 case BUILT_IN_ASSUME_ALIGNED:
6491 return expand_builtin_assume_aligned (exp, target);
5e3608d8 6492 case BUILT_IN_PREFETCH:
c2f47e15 6493 expand_builtin_prefetch (exp);
5e3608d8 6494 return const0_rtx;
6495
4ee9c684 6496 case BUILT_IN_INIT_TRAMPOLINE:
c307f106 6497 return expand_builtin_init_trampoline (exp, true);
6498 case BUILT_IN_INIT_HEAP_TRAMPOLINE:
6499 return expand_builtin_init_trampoline (exp, false);
4ee9c684 6500 case BUILT_IN_ADJUST_TRAMPOLINE:
c2f47e15 6501 return expand_builtin_adjust_trampoline (exp);
4ee9c684 6502
73673831 6503 case BUILT_IN_FORK:
6504 case BUILT_IN_EXECL:
6505 case BUILT_IN_EXECV:
6506 case BUILT_IN_EXECLP:
6507 case BUILT_IN_EXECLE:
6508 case BUILT_IN_EXECVP:
6509 case BUILT_IN_EXECVE:
c2f47e15 6510 target = expand_builtin_fork_or_exec (fndecl, exp, target, ignore);
73673831 6511 if (target)
6512 return target;
6513 break;
53800dbe 6514
2797f13a 6515 case BUILT_IN_SYNC_FETCH_AND_ADD_1:
6516 case BUILT_IN_SYNC_FETCH_AND_ADD_2:
6517 case BUILT_IN_SYNC_FETCH_AND_ADD_4:
6518 case BUILT_IN_SYNC_FETCH_AND_ADD_8:
6519 case BUILT_IN_SYNC_FETCH_AND_ADD_16:
6520 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_ADD_1);
1cd6e20d 6521 target = expand_builtin_sync_operation (mode, exp, PLUS, false, target);
b6a5fc45 6522 if (target)
6523 return target;
6524 break;
6525
2797f13a 6526 case BUILT_IN_SYNC_FETCH_AND_SUB_1:
6527 case BUILT_IN_SYNC_FETCH_AND_SUB_2:
6528 case BUILT_IN_SYNC_FETCH_AND_SUB_4:
6529 case BUILT_IN_SYNC_FETCH_AND_SUB_8:
6530 case BUILT_IN_SYNC_FETCH_AND_SUB_16:
6531 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_SUB_1);
1cd6e20d 6532 target = expand_builtin_sync_operation (mode, exp, MINUS, false, target);
b6a5fc45 6533 if (target)
6534 return target;
6535 break;
6536
2797f13a 6537 case BUILT_IN_SYNC_FETCH_AND_OR_1:
6538 case BUILT_IN_SYNC_FETCH_AND_OR_2:
6539 case BUILT_IN_SYNC_FETCH_AND_OR_4:
6540 case BUILT_IN_SYNC_FETCH_AND_OR_8:
6541 case BUILT_IN_SYNC_FETCH_AND_OR_16:
6542 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_OR_1);
1cd6e20d 6543 target = expand_builtin_sync_operation (mode, exp, IOR, false, target);
b6a5fc45 6544 if (target)
6545 return target;
6546 break;
6547
2797f13a 6548 case BUILT_IN_SYNC_FETCH_AND_AND_1:
6549 case BUILT_IN_SYNC_FETCH_AND_AND_2:
6550 case BUILT_IN_SYNC_FETCH_AND_AND_4:
6551 case BUILT_IN_SYNC_FETCH_AND_AND_8:
6552 case BUILT_IN_SYNC_FETCH_AND_AND_16:
6553 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_AND_1);
1cd6e20d 6554 target = expand_builtin_sync_operation (mode, exp, AND, false, target);
b6a5fc45 6555 if (target)
6556 return target;
6557 break;
6558
2797f13a 6559 case BUILT_IN_SYNC_FETCH_AND_XOR_1:
6560 case BUILT_IN_SYNC_FETCH_AND_XOR_2:
6561 case BUILT_IN_SYNC_FETCH_AND_XOR_4:
6562 case BUILT_IN_SYNC_FETCH_AND_XOR_8:
6563 case BUILT_IN_SYNC_FETCH_AND_XOR_16:
6564 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_XOR_1);
1cd6e20d 6565 target = expand_builtin_sync_operation (mode, exp, XOR, false, target);
b6a5fc45 6566 if (target)
6567 return target;
6568 break;
6569
2797f13a 6570 case BUILT_IN_SYNC_FETCH_AND_NAND_1:
6571 case BUILT_IN_SYNC_FETCH_AND_NAND_2:
6572 case BUILT_IN_SYNC_FETCH_AND_NAND_4:
6573 case BUILT_IN_SYNC_FETCH_AND_NAND_8:
6574 case BUILT_IN_SYNC_FETCH_AND_NAND_16:
6575 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_NAND_1);
1cd6e20d 6576 target = expand_builtin_sync_operation (mode, exp, NOT, false, target);
b6a5fc45 6577 if (target)
6578 return target;
6579 break;
6580
2797f13a 6581 case BUILT_IN_SYNC_ADD_AND_FETCH_1:
6582 case BUILT_IN_SYNC_ADD_AND_FETCH_2:
6583 case BUILT_IN_SYNC_ADD_AND_FETCH_4:
6584 case BUILT_IN_SYNC_ADD_AND_FETCH_8:
6585 case BUILT_IN_SYNC_ADD_AND_FETCH_16:
6586 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_ADD_AND_FETCH_1);
1cd6e20d 6587 target = expand_builtin_sync_operation (mode, exp, PLUS, true, target);
b6a5fc45 6588 if (target)
6589 return target;
6590 break;
6591
2797f13a 6592 case BUILT_IN_SYNC_SUB_AND_FETCH_1:
6593 case BUILT_IN_SYNC_SUB_AND_FETCH_2:
6594 case BUILT_IN_SYNC_SUB_AND_FETCH_4:
6595 case BUILT_IN_SYNC_SUB_AND_FETCH_8:
6596 case BUILT_IN_SYNC_SUB_AND_FETCH_16:
6597 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_SUB_AND_FETCH_1);
1cd6e20d 6598 target = expand_builtin_sync_operation (mode, exp, MINUS, true, target);
b6a5fc45 6599 if (target)
6600 return target;
6601 break;
6602
2797f13a 6603 case BUILT_IN_SYNC_OR_AND_FETCH_1:
6604 case BUILT_IN_SYNC_OR_AND_FETCH_2:
6605 case BUILT_IN_SYNC_OR_AND_FETCH_4:
6606 case BUILT_IN_SYNC_OR_AND_FETCH_8:
6607 case BUILT_IN_SYNC_OR_AND_FETCH_16:
6608 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_OR_AND_FETCH_1);
1cd6e20d 6609 target = expand_builtin_sync_operation (mode, exp, IOR, true, target);
b6a5fc45 6610 if (target)
6611 return target;
6612 break;
6613
2797f13a 6614 case BUILT_IN_SYNC_AND_AND_FETCH_1:
6615 case BUILT_IN_SYNC_AND_AND_FETCH_2:
6616 case BUILT_IN_SYNC_AND_AND_FETCH_4:
6617 case BUILT_IN_SYNC_AND_AND_FETCH_8:
6618 case BUILT_IN_SYNC_AND_AND_FETCH_16:
6619 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_AND_AND_FETCH_1);
1cd6e20d 6620 target = expand_builtin_sync_operation (mode, exp, AND, true, target);
b6a5fc45 6621 if (target)
6622 return target;
6623 break;
6624
2797f13a 6625 case BUILT_IN_SYNC_XOR_AND_FETCH_1:
6626 case BUILT_IN_SYNC_XOR_AND_FETCH_2:
6627 case BUILT_IN_SYNC_XOR_AND_FETCH_4:
6628 case BUILT_IN_SYNC_XOR_AND_FETCH_8:
6629 case BUILT_IN_SYNC_XOR_AND_FETCH_16:
6630 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_XOR_AND_FETCH_1);
1cd6e20d 6631 target = expand_builtin_sync_operation (mode, exp, XOR, true, target);
b6a5fc45 6632 if (target)
6633 return target;
6634 break;
6635
2797f13a 6636 case BUILT_IN_SYNC_NAND_AND_FETCH_1:
6637 case BUILT_IN_SYNC_NAND_AND_FETCH_2:
6638 case BUILT_IN_SYNC_NAND_AND_FETCH_4:
6639 case BUILT_IN_SYNC_NAND_AND_FETCH_8:
6640 case BUILT_IN_SYNC_NAND_AND_FETCH_16:
6641 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_NAND_AND_FETCH_1);
1cd6e20d 6642 target = expand_builtin_sync_operation (mode, exp, NOT, true, target);
b6a5fc45 6643 if (target)
6644 return target;
6645 break;
6646
2797f13a 6647 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_1:
6648 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_2:
6649 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_4:
6650 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_8:
6651 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_16:
a601d32a 6652 if (mode == VOIDmode)
6653 mode = TYPE_MODE (boolean_type_node);
b6a5fc45 6654 if (!target || !register_operand (target, mode))
6655 target = gen_reg_rtx (mode);
3e272de8 6656
2797f13a 6657 mode = get_builtin_sync_mode
6658 (fcode - BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_1);
c2f47e15 6659 target = expand_builtin_compare_and_swap (mode, exp, true, target);
b6a5fc45 6660 if (target)
6661 return target;
6662 break;
6663
2797f13a 6664 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_1:
6665 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_2:
6666 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_4:
6667 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_8:
6668 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_16:
6669 mode = get_builtin_sync_mode
6670 (fcode - BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_1);
c2f47e15 6671 target = expand_builtin_compare_and_swap (mode, exp, false, target);
b6a5fc45 6672 if (target)
6673 return target;
6674 break;
6675
2797f13a 6676 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_1:
6677 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_2:
6678 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_4:
6679 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_8:
6680 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_16:
6681 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_LOCK_TEST_AND_SET_1);
6682 target = expand_builtin_sync_lock_test_and_set (mode, exp, target);
b6a5fc45 6683 if (target)
6684 return target;
6685 break;
6686
2797f13a 6687 case BUILT_IN_SYNC_LOCK_RELEASE_1:
6688 case BUILT_IN_SYNC_LOCK_RELEASE_2:
6689 case BUILT_IN_SYNC_LOCK_RELEASE_4:
6690 case BUILT_IN_SYNC_LOCK_RELEASE_8:
6691 case BUILT_IN_SYNC_LOCK_RELEASE_16:
6692 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_LOCK_RELEASE_1);
6693 expand_builtin_sync_lock_release (mode, exp);
b6a5fc45 6694 return const0_rtx;
6695
2797f13a 6696 case BUILT_IN_SYNC_SYNCHRONIZE:
6697 expand_builtin_sync_synchronize ();
b6a5fc45 6698 return const0_rtx;
6699
1cd6e20d 6700 case BUILT_IN_ATOMIC_EXCHANGE_1:
6701 case BUILT_IN_ATOMIC_EXCHANGE_2:
6702 case BUILT_IN_ATOMIC_EXCHANGE_4:
6703 case BUILT_IN_ATOMIC_EXCHANGE_8:
6704 case BUILT_IN_ATOMIC_EXCHANGE_16:
6705 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_EXCHANGE_1);
6706 target = expand_builtin_atomic_exchange (mode, exp, target);
6707 if (target)
6708 return target;
6709 break;
6710
6711 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1:
6712 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_2:
6713 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_4:
6714 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_8:
6715 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_16:
2c201ad1 6716 {
6717 unsigned int nargs, z;
f1f41a6c 6718 vec<tree, va_gc> *vec;
2c201ad1 6719
6720 mode =
6721 get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1);
6722 target = expand_builtin_atomic_compare_exchange (mode, exp, target);
6723 if (target)
6724 return target;
6725
6726 /* If this is turned into an external library call, the weak parameter
6727 must be dropped to match the expected parameter list. */
6728 nargs = call_expr_nargs (exp);
f1f41a6c 6729 vec_alloc (vec, nargs - 1);
2c201ad1 6730 for (z = 0; z < 3; z++)
f1f41a6c 6731 vec->quick_push (CALL_EXPR_ARG (exp, z));
2c201ad1 6732 /* Skip the boolean weak parameter. */
6733 for (z = 4; z < 6; z++)
f1f41a6c 6734 vec->quick_push (CALL_EXPR_ARG (exp, z));
2c201ad1 6735 exp = build_call_vec (TREE_TYPE (exp), CALL_EXPR_FN (exp), vec);
6736 break;
6737 }
1cd6e20d 6738
6739 case BUILT_IN_ATOMIC_LOAD_1:
6740 case BUILT_IN_ATOMIC_LOAD_2:
6741 case BUILT_IN_ATOMIC_LOAD_4:
6742 case BUILT_IN_ATOMIC_LOAD_8:
6743 case BUILT_IN_ATOMIC_LOAD_16:
6744 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_LOAD_1);
6745 target = expand_builtin_atomic_load (mode, exp, target);
6746 if (target)
6747 return target;
6748 break;
6749
6750 case BUILT_IN_ATOMIC_STORE_1:
6751 case BUILT_IN_ATOMIC_STORE_2:
6752 case BUILT_IN_ATOMIC_STORE_4:
6753 case BUILT_IN_ATOMIC_STORE_8:
6754 case BUILT_IN_ATOMIC_STORE_16:
6755 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_STORE_1);
6756 target = expand_builtin_atomic_store (mode, exp);
6757 if (target)
6758 return const0_rtx;
6759 break;
6760
6761 case BUILT_IN_ATOMIC_ADD_FETCH_1:
6762 case BUILT_IN_ATOMIC_ADD_FETCH_2:
6763 case BUILT_IN_ATOMIC_ADD_FETCH_4:
6764 case BUILT_IN_ATOMIC_ADD_FETCH_8:
6765 case BUILT_IN_ATOMIC_ADD_FETCH_16:
6766 {
6767 enum built_in_function lib;
6768 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_ADD_FETCH_1);
6769 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_ADD_1 +
6770 (fcode - BUILT_IN_ATOMIC_ADD_FETCH_1));
6771 target = expand_builtin_atomic_fetch_op (mode, exp, target, PLUS, true,
6772 ignore, lib);
6773 if (target)
6774 return target;
6775 break;
6776 }
6777 case BUILT_IN_ATOMIC_SUB_FETCH_1:
6778 case BUILT_IN_ATOMIC_SUB_FETCH_2:
6779 case BUILT_IN_ATOMIC_SUB_FETCH_4:
6780 case BUILT_IN_ATOMIC_SUB_FETCH_8:
6781 case BUILT_IN_ATOMIC_SUB_FETCH_16:
6782 {
6783 enum built_in_function lib;
6784 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_SUB_FETCH_1);
6785 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_SUB_1 +
6786 (fcode - BUILT_IN_ATOMIC_SUB_FETCH_1));
6787 target = expand_builtin_atomic_fetch_op (mode, exp, target, MINUS, true,
6788 ignore, lib);
6789 if (target)
6790 return target;
6791 break;
6792 }
6793 case BUILT_IN_ATOMIC_AND_FETCH_1:
6794 case BUILT_IN_ATOMIC_AND_FETCH_2:
6795 case BUILT_IN_ATOMIC_AND_FETCH_4:
6796 case BUILT_IN_ATOMIC_AND_FETCH_8:
6797 case BUILT_IN_ATOMIC_AND_FETCH_16:
6798 {
6799 enum built_in_function lib;
6800 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_AND_FETCH_1);
6801 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_AND_1 +
6802 (fcode - BUILT_IN_ATOMIC_AND_FETCH_1));
6803 target = expand_builtin_atomic_fetch_op (mode, exp, target, AND, true,
6804 ignore, lib);
6805 if (target)
6806 return target;
6807 break;
6808 }
6809 case BUILT_IN_ATOMIC_NAND_FETCH_1:
6810 case BUILT_IN_ATOMIC_NAND_FETCH_2:
6811 case BUILT_IN_ATOMIC_NAND_FETCH_4:
6812 case BUILT_IN_ATOMIC_NAND_FETCH_8:
6813 case BUILT_IN_ATOMIC_NAND_FETCH_16:
6814 {
6815 enum built_in_function lib;
6816 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_NAND_FETCH_1);
6817 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_NAND_1 +
6818 (fcode - BUILT_IN_ATOMIC_NAND_FETCH_1));
6819 target = expand_builtin_atomic_fetch_op (mode, exp, target, NOT, true,
6820 ignore, lib);
6821 if (target)
6822 return target;
6823 break;
6824 }
6825 case BUILT_IN_ATOMIC_XOR_FETCH_1:
6826 case BUILT_IN_ATOMIC_XOR_FETCH_2:
6827 case BUILT_IN_ATOMIC_XOR_FETCH_4:
6828 case BUILT_IN_ATOMIC_XOR_FETCH_8:
6829 case BUILT_IN_ATOMIC_XOR_FETCH_16:
6830 {
6831 enum built_in_function lib;
6832 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_XOR_FETCH_1);
6833 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_XOR_1 +
6834 (fcode - BUILT_IN_ATOMIC_XOR_FETCH_1));
6835 target = expand_builtin_atomic_fetch_op (mode, exp, target, XOR, true,
6836 ignore, lib);
6837 if (target)
6838 return target;
6839 break;
6840 }
6841 case BUILT_IN_ATOMIC_OR_FETCH_1:
6842 case BUILT_IN_ATOMIC_OR_FETCH_2:
6843 case BUILT_IN_ATOMIC_OR_FETCH_4:
6844 case BUILT_IN_ATOMIC_OR_FETCH_8:
6845 case BUILT_IN_ATOMIC_OR_FETCH_16:
6846 {
6847 enum built_in_function lib;
6848 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_OR_FETCH_1);
6849 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_OR_1 +
6850 (fcode - BUILT_IN_ATOMIC_OR_FETCH_1));
6851 target = expand_builtin_atomic_fetch_op (mode, exp, target, IOR, true,
6852 ignore, lib);
6853 if (target)
6854 return target;
6855 break;
6856 }
6857 case BUILT_IN_ATOMIC_FETCH_ADD_1:
6858 case BUILT_IN_ATOMIC_FETCH_ADD_2:
6859 case BUILT_IN_ATOMIC_FETCH_ADD_4:
6860 case BUILT_IN_ATOMIC_FETCH_ADD_8:
6861 case BUILT_IN_ATOMIC_FETCH_ADD_16:
6862 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_ADD_1);
6863 target = expand_builtin_atomic_fetch_op (mode, exp, target, PLUS, false,
6864 ignore, BUILT_IN_NONE);
6865 if (target)
6866 return target;
6867 break;
6868
6869 case BUILT_IN_ATOMIC_FETCH_SUB_1:
6870 case BUILT_IN_ATOMIC_FETCH_SUB_2:
6871 case BUILT_IN_ATOMIC_FETCH_SUB_4:
6872 case BUILT_IN_ATOMIC_FETCH_SUB_8:
6873 case BUILT_IN_ATOMIC_FETCH_SUB_16:
6874 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_SUB_1);
6875 target = expand_builtin_atomic_fetch_op (mode, exp, target, MINUS, false,
6876 ignore, BUILT_IN_NONE);
6877 if (target)
6878 return target;
6879 break;
6880
6881 case BUILT_IN_ATOMIC_FETCH_AND_1:
6882 case BUILT_IN_ATOMIC_FETCH_AND_2:
6883 case BUILT_IN_ATOMIC_FETCH_AND_4:
6884 case BUILT_IN_ATOMIC_FETCH_AND_8:
6885 case BUILT_IN_ATOMIC_FETCH_AND_16:
6886 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_AND_1);
6887 target = expand_builtin_atomic_fetch_op (mode, exp, target, AND, false,
6888 ignore, BUILT_IN_NONE);
6889 if (target)
6890 return target;
6891 break;
6892
6893 case BUILT_IN_ATOMIC_FETCH_NAND_1:
6894 case BUILT_IN_ATOMIC_FETCH_NAND_2:
6895 case BUILT_IN_ATOMIC_FETCH_NAND_4:
6896 case BUILT_IN_ATOMIC_FETCH_NAND_8:
6897 case BUILT_IN_ATOMIC_FETCH_NAND_16:
6898 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_NAND_1);
6899 target = expand_builtin_atomic_fetch_op (mode, exp, target, NOT, false,
6900 ignore, BUILT_IN_NONE);
6901 if (target)
6902 return target;
6903 break;
6904
6905 case BUILT_IN_ATOMIC_FETCH_XOR_1:
6906 case BUILT_IN_ATOMIC_FETCH_XOR_2:
6907 case BUILT_IN_ATOMIC_FETCH_XOR_4:
6908 case BUILT_IN_ATOMIC_FETCH_XOR_8:
6909 case BUILT_IN_ATOMIC_FETCH_XOR_16:
6910 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_XOR_1);
6911 target = expand_builtin_atomic_fetch_op (mode, exp, target, XOR, false,
6912 ignore, BUILT_IN_NONE);
6913 if (target)
6914 return target;
6915 break;
6916
6917 case BUILT_IN_ATOMIC_FETCH_OR_1:
6918 case BUILT_IN_ATOMIC_FETCH_OR_2:
6919 case BUILT_IN_ATOMIC_FETCH_OR_4:
6920 case BUILT_IN_ATOMIC_FETCH_OR_8:
6921 case BUILT_IN_ATOMIC_FETCH_OR_16:
6922 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_OR_1);
6923 target = expand_builtin_atomic_fetch_op (mode, exp, target, IOR, false,
6924 ignore, BUILT_IN_NONE);
6925 if (target)
6926 return target;
6927 break;
10b744a3 6928
6929 case BUILT_IN_ATOMIC_TEST_AND_SET:
7821cde1 6930 return expand_builtin_atomic_test_and_set (exp, target);
10b744a3 6931
6932 case BUILT_IN_ATOMIC_CLEAR:
6933 return expand_builtin_atomic_clear (exp);
1cd6e20d 6934
6935 case BUILT_IN_ATOMIC_ALWAYS_LOCK_FREE:
6936 return expand_builtin_atomic_always_lock_free (exp);
6937
6938 case BUILT_IN_ATOMIC_IS_LOCK_FREE:
6939 target = expand_builtin_atomic_is_lock_free (exp);
6940 if (target)
6941 return target;
6942 break;
6943
6944 case BUILT_IN_ATOMIC_THREAD_FENCE:
6945 expand_builtin_atomic_thread_fence (exp);
6946 return const0_rtx;
6947
6948 case BUILT_IN_ATOMIC_SIGNAL_FENCE:
6949 expand_builtin_atomic_signal_fence (exp);
6950 return const0_rtx;
6951
0a39fd54 6952 case BUILT_IN_OBJECT_SIZE:
6953 return expand_builtin_object_size (exp);
6954
6955 case BUILT_IN_MEMCPY_CHK:
6956 case BUILT_IN_MEMPCPY_CHK:
6957 case BUILT_IN_MEMMOVE_CHK:
6958 case BUILT_IN_MEMSET_CHK:
6959 target = expand_builtin_memory_chk (exp, target, mode, fcode);
6960 if (target)
6961 return target;
6962 break;
6963
6964 case BUILT_IN_STRCPY_CHK:
6965 case BUILT_IN_STPCPY_CHK:
6966 case BUILT_IN_STRNCPY_CHK:
1063acde 6967 case BUILT_IN_STPNCPY_CHK:
0a39fd54 6968 case BUILT_IN_STRCAT_CHK:
b356dfef 6969 case BUILT_IN_STRNCAT_CHK:
0a39fd54 6970 case BUILT_IN_SNPRINTF_CHK:
6971 case BUILT_IN_VSNPRINTF_CHK:
6972 maybe_emit_chk_warning (exp, fcode);
6973 break;
6974
6975 case BUILT_IN_SPRINTF_CHK:
6976 case BUILT_IN_VSPRINTF_CHK:
6977 maybe_emit_sprintf_chk_warning (exp, fcode);
6978 break;
6979
2c281b15 6980 case BUILT_IN_FREE:
f74ea1c2 6981 if (warn_free_nonheap_object)
6982 maybe_emit_free_warning (exp);
2c281b15 6983 break;
6984
badaa04c 6985 case BUILT_IN_THREAD_POINTER:
6986 return expand_builtin_thread_pointer (exp, target);
6987
6988 case BUILT_IN_SET_THREAD_POINTER:
6989 expand_builtin_set_thread_pointer (exp);
6990 return const0_rtx;
6991
d037099f 6992 case BUILT_IN_CILK_DETACH:
6993 expand_builtin_cilk_detach (exp);
6994 return const0_rtx;
6995
6996 case BUILT_IN_CILK_POP_FRAME:
6997 expand_builtin_cilk_pop_frame (exp);
6998 return const0_rtx;
6999
058a1b7a 7000 case BUILT_IN_CHKP_INIT_PTR_BOUNDS:
7001 case BUILT_IN_CHKP_NULL_PTR_BOUNDS:
7002 case BUILT_IN_CHKP_COPY_PTR_BOUNDS:
7003 case BUILT_IN_CHKP_CHECK_PTR_LBOUNDS:
7004 case BUILT_IN_CHKP_CHECK_PTR_UBOUNDS:
7005 case BUILT_IN_CHKP_CHECK_PTR_BOUNDS:
7006 case BUILT_IN_CHKP_SET_PTR_BOUNDS:
7007 case BUILT_IN_CHKP_NARROW_PTR_BOUNDS:
7008 case BUILT_IN_CHKP_STORE_PTR_BOUNDS:
7009 case BUILT_IN_CHKP_GET_PTR_LBOUND:
7010 case BUILT_IN_CHKP_GET_PTR_UBOUND:
7011 /* We allow user CHKP builtins if Pointer Bounds
7012 Checker is off. */
7013 if (!chkp_function_instrumented_p (current_function_decl))
7014 {
7015 if (fcode == BUILT_IN_CHKP_SET_PTR_BOUNDS
7016 || fcode == BUILT_IN_CHKP_NARROW_PTR_BOUNDS
7017 || fcode == BUILT_IN_CHKP_INIT_PTR_BOUNDS
7018 || fcode == BUILT_IN_CHKP_NULL_PTR_BOUNDS
7019 || fcode == BUILT_IN_CHKP_COPY_PTR_BOUNDS)
7020 return expand_normal (CALL_EXPR_ARG (exp, 0));
7021 else if (fcode == BUILT_IN_CHKP_GET_PTR_LBOUND)
7022 return expand_normal (size_zero_node);
7023 else if (fcode == BUILT_IN_CHKP_GET_PTR_UBOUND)
7024 return expand_normal (size_int (-1));
7025 else
7026 return const0_rtx;
7027 }
7028 /* FALLTHROUGH */
7029
7030 case BUILT_IN_CHKP_BNDMK:
7031 case BUILT_IN_CHKP_BNDSTX:
7032 case BUILT_IN_CHKP_BNDCL:
7033 case BUILT_IN_CHKP_BNDCU:
7034 case BUILT_IN_CHKP_BNDLDX:
7035 case BUILT_IN_CHKP_BNDRET:
7036 case BUILT_IN_CHKP_INTERSECT:
7037 case BUILT_IN_CHKP_NARROW:
7038 case BUILT_IN_CHKP_EXTRACT_LOWER:
7039 case BUILT_IN_CHKP_EXTRACT_UPPER:
7040 /* Software implementation of Pointer Bounds Checker is NYI.
7041 Target support is required. */
7042 error ("Your target platform does not support -fcheck-pointer-bounds");
7043 break;
7044
ca4c3545 7045 case BUILT_IN_ACC_ON_DEVICE:
7046 target = expand_builtin_acc_on_device (exp, target);
7047 if (target)
7048 return target;
7049 break;
7050
92482ee0 7051 default: /* just do library call, if unknown builtin */
146c1b4f 7052 break;
53800dbe 7053 }
7054
7055 /* The switch statement above can drop through to cause the function
7056 to be called normally. */
7057 return expand_call (exp, target, ignore);
7058}
650e4c94 7059
f21337ef 7060/* Similar to expand_builtin but is used for instrumented calls. */
7061
7062rtx
7063expand_builtin_with_bounds (tree exp, rtx target,
7064 rtx subtarget ATTRIBUTE_UNUSED,
7065 machine_mode mode, int ignore)
7066{
7067 tree fndecl = get_callee_fndecl (exp);
7068 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
7069
7070 gcc_assert (CALL_WITH_BOUNDS_P (exp));
7071
7072 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
7073 return targetm.expand_builtin (exp, target, subtarget, mode, ignore);
7074
7075 gcc_assert (fcode > BEGIN_CHKP_BUILTINS
7076 && fcode < END_CHKP_BUILTINS);
7077
7078 switch (fcode)
7079 {
7080 case BUILT_IN_CHKP_MEMCPY_NOBND_NOCHK_CHKP:
7081 target = expand_builtin_memcpy_with_bounds (exp, target);
7082 if (target)
7083 return target;
7084 break;
7085
7086 case BUILT_IN_CHKP_MEMPCPY_NOBND_NOCHK_CHKP:
7087 target = expand_builtin_mempcpy_with_bounds (exp, target, mode);
7088 if (target)
7089 return target;
7090 break;
7091
7092 case BUILT_IN_CHKP_MEMSET_NOBND_NOCHK_CHKP:
7093 target = expand_builtin_memset_with_bounds (exp, target, mode);
7094 if (target)
7095 return target;
7096 break;
7097
7098 default:
7099 break;
7100 }
7101
7102 /* The switch statement above can drop through to cause the function
7103 to be called normally. */
7104 return expand_call (exp, target, ignore);
7105 }
7106
805e22b2 7107/* Determine whether a tree node represents a call to a built-in
52203a9d 7108 function. If the tree T is a call to a built-in function with
7109 the right number of arguments of the appropriate types, return
7110 the DECL_FUNCTION_CODE of the call, e.g. BUILT_IN_SQRT.
7111 Otherwise the return value is END_BUILTINS. */
aecda0d6 7112
805e22b2 7113enum built_in_function
b7bf20db 7114builtin_mathfn_code (const_tree t)
805e22b2 7115{
b7bf20db 7116 const_tree fndecl, arg, parmlist;
7117 const_tree argtype, parmtype;
7118 const_call_expr_arg_iterator iter;
805e22b2 7119
7120 if (TREE_CODE (t) != CALL_EXPR
c2f47e15 7121 || TREE_CODE (CALL_EXPR_FN (t)) != ADDR_EXPR)
805e22b2 7122 return END_BUILTINS;
7123
c6e6ecb1 7124 fndecl = get_callee_fndecl (t);
7125 if (fndecl == NULL_TREE
52203a9d 7126 || TREE_CODE (fndecl) != FUNCTION_DECL
805e22b2 7127 || ! DECL_BUILT_IN (fndecl)
7128 || DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
7129 return END_BUILTINS;
7130
52203a9d 7131 parmlist = TYPE_ARG_TYPES (TREE_TYPE (fndecl));
b7bf20db 7132 init_const_call_expr_arg_iterator (t, &iter);
52203a9d 7133 for (; parmlist; parmlist = TREE_CHAIN (parmlist))
e9f80ff5 7134 {
52203a9d 7135 /* If a function doesn't take a variable number of arguments,
7136 the last element in the list will have type `void'. */
7137 parmtype = TREE_VALUE (parmlist);
7138 if (VOID_TYPE_P (parmtype))
7139 {
b7bf20db 7140 if (more_const_call_expr_args_p (&iter))
52203a9d 7141 return END_BUILTINS;
7142 return DECL_FUNCTION_CODE (fndecl);
7143 }
7144
b7bf20db 7145 if (! more_const_call_expr_args_p (&iter))
e9f80ff5 7146 return END_BUILTINS;
48e1416a 7147
b7bf20db 7148 arg = next_const_call_expr_arg (&iter);
c2f47e15 7149 argtype = TREE_TYPE (arg);
52203a9d 7150
7151 if (SCALAR_FLOAT_TYPE_P (parmtype))
7152 {
7153 if (! SCALAR_FLOAT_TYPE_P (argtype))
7154 return END_BUILTINS;
7155 }
7156 else if (COMPLEX_FLOAT_TYPE_P (parmtype))
7157 {
7158 if (! COMPLEX_FLOAT_TYPE_P (argtype))
7159 return END_BUILTINS;
7160 }
7161 else if (POINTER_TYPE_P (parmtype))
7162 {
7163 if (! POINTER_TYPE_P (argtype))
7164 return END_BUILTINS;
7165 }
7166 else if (INTEGRAL_TYPE_P (parmtype))
7167 {
7168 if (! INTEGRAL_TYPE_P (argtype))
7169 return END_BUILTINS;
7170 }
7171 else
e9f80ff5 7172 return END_BUILTINS;
e9f80ff5 7173 }
7174
52203a9d 7175 /* Variable-length argument list. */
805e22b2 7176 return DECL_FUNCTION_CODE (fndecl);
7177}
7178
c2f47e15 7179/* Fold a call to __builtin_constant_p, if we know its argument ARG will
7180 evaluate to a constant. */
650e4c94 7181
7182static tree
c2f47e15 7183fold_builtin_constant_p (tree arg)
650e4c94 7184{
650e4c94 7185 /* We return 1 for a numeric type that's known to be a constant
7186 value at compile-time or for an aggregate type that's a
7187 literal constant. */
c2f47e15 7188 STRIP_NOPS (arg);
650e4c94 7189
7190 /* If we know this is a constant, emit the constant of one. */
c2f47e15 7191 if (CONSTANT_CLASS_P (arg)
7192 || (TREE_CODE (arg) == CONSTRUCTOR
7193 && TREE_CONSTANT (arg)))
650e4c94 7194 return integer_one_node;
c2f47e15 7195 if (TREE_CODE (arg) == ADDR_EXPR)
adcfa3a3 7196 {
c2f47e15 7197 tree op = TREE_OPERAND (arg, 0);
adcfa3a3 7198 if (TREE_CODE (op) == STRING_CST
7199 || (TREE_CODE (op) == ARRAY_REF
7200 && integer_zerop (TREE_OPERAND (op, 1))
7201 && TREE_CODE (TREE_OPERAND (op, 0)) == STRING_CST))
7202 return integer_one_node;
7203 }
650e4c94 7204
1fb4300c 7205 /* If this expression has side effects, show we don't know it to be a
7206 constant. Likewise if it's a pointer or aggregate type since in
7207 those case we only want literals, since those are only optimized
f97c71a1 7208 when generating RTL, not later.
7209 And finally, if we are compiling an initializer, not code, we
7210 need to return a definite result now; there's not going to be any
7211 more optimization done. */
c2f47e15 7212 if (TREE_SIDE_EFFECTS (arg)
7213 || AGGREGATE_TYPE_P (TREE_TYPE (arg))
7214 || POINTER_TYPE_P (TREE_TYPE (arg))
47be647d 7215 || cfun == 0
0b049e15 7216 || folding_initializer
7217 || force_folding_builtin_constant_p)
650e4c94 7218 return integer_zero_node;
7219
c2f47e15 7220 return NULL_TREE;
650e4c94 7221}
7222
76f5a783 7223/* Create builtin_expect with PRED and EXPECTED as its arguments and
7224 return it as a truthvalue. */
4ee9c684 7225
7226static tree
c83059be 7227build_builtin_expect_predicate (location_t loc, tree pred, tree expected,
7228 tree predictor)
4ee9c684 7229{
76f5a783 7230 tree fn, arg_types, pred_type, expected_type, call_expr, ret_type;
4ee9c684 7231
b9a16870 7232 fn = builtin_decl_explicit (BUILT_IN_EXPECT);
76f5a783 7233 arg_types = TYPE_ARG_TYPES (TREE_TYPE (fn));
7234 ret_type = TREE_TYPE (TREE_TYPE (fn));
7235 pred_type = TREE_VALUE (arg_types);
7236 expected_type = TREE_VALUE (TREE_CHAIN (arg_types));
7237
389dd41b 7238 pred = fold_convert_loc (loc, pred_type, pred);
7239 expected = fold_convert_loc (loc, expected_type, expected);
c83059be 7240 call_expr = build_call_expr_loc (loc, fn, predictor ? 3 : 2, pred, expected,
7241 predictor);
76f5a783 7242
7243 return build2 (NE_EXPR, TREE_TYPE (pred), call_expr,
7244 build_int_cst (ret_type, 0));
7245}
7246
7247/* Fold a call to builtin_expect with arguments ARG0 and ARG1. Return
7248 NULL_TREE if no simplification is possible. */
7249
c83059be 7250tree
7251fold_builtin_expect (location_t loc, tree arg0, tree arg1, tree arg2)
76f5a783 7252{
083bada9 7253 tree inner, fndecl, inner_arg0;
76f5a783 7254 enum tree_code code;
7255
083bada9 7256 /* Distribute the expected value over short-circuiting operators.
7257 See through the cast from truthvalue_type_node to long. */
7258 inner_arg0 = arg0;
d09ef31a 7259 while (CONVERT_EXPR_P (inner_arg0)
083bada9 7260 && INTEGRAL_TYPE_P (TREE_TYPE (inner_arg0))
7261 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (inner_arg0, 0))))
7262 inner_arg0 = TREE_OPERAND (inner_arg0, 0);
7263
76f5a783 7264 /* If this is a builtin_expect within a builtin_expect keep the
7265 inner one. See through a comparison against a constant. It
7266 might have been added to create a thruthvalue. */
083bada9 7267 inner = inner_arg0;
7268
76f5a783 7269 if (COMPARISON_CLASS_P (inner)
7270 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST)
7271 inner = TREE_OPERAND (inner, 0);
7272
7273 if (TREE_CODE (inner) == CALL_EXPR
7274 && (fndecl = get_callee_fndecl (inner))
7275 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
7276 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_EXPECT)
7277 return arg0;
7278
083bada9 7279 inner = inner_arg0;
76f5a783 7280 code = TREE_CODE (inner);
7281 if (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR)
7282 {
7283 tree op0 = TREE_OPERAND (inner, 0);
7284 tree op1 = TREE_OPERAND (inner, 1);
7285
c83059be 7286 op0 = build_builtin_expect_predicate (loc, op0, arg1, arg2);
7287 op1 = build_builtin_expect_predicate (loc, op1, arg1, arg2);
76f5a783 7288 inner = build2 (code, TREE_TYPE (inner), op0, op1);
7289
389dd41b 7290 return fold_convert_loc (loc, TREE_TYPE (arg0), inner);
76f5a783 7291 }
7292
7293 /* If the argument isn't invariant then there's nothing else we can do. */
083bada9 7294 if (!TREE_CONSTANT (inner_arg0))
c2f47e15 7295 return NULL_TREE;
4ee9c684 7296
76f5a783 7297 /* If we expect that a comparison against the argument will fold to
7298 a constant return the constant. In practice, this means a true
7299 constant or the address of a non-weak symbol. */
083bada9 7300 inner = inner_arg0;
4ee9c684 7301 STRIP_NOPS (inner);
7302 if (TREE_CODE (inner) == ADDR_EXPR)
7303 {
7304 do
7305 {
7306 inner = TREE_OPERAND (inner, 0);
7307 }
7308 while (TREE_CODE (inner) == COMPONENT_REF
7309 || TREE_CODE (inner) == ARRAY_REF);
062b4460 7310 if ((TREE_CODE (inner) == VAR_DECL
7311 || TREE_CODE (inner) == FUNCTION_DECL)
7312 && DECL_WEAK (inner))
c2f47e15 7313 return NULL_TREE;
4ee9c684 7314 }
7315
76f5a783 7316 /* Otherwise, ARG0 already has the proper type for the return value. */
7317 return arg0;
4ee9c684 7318}
7319
c2f47e15 7320/* Fold a call to __builtin_classify_type with argument ARG. */
27d0c333 7321
539a3a92 7322static tree
c2f47e15 7323fold_builtin_classify_type (tree arg)
539a3a92 7324{
c2f47e15 7325 if (arg == 0)
7002a1c8 7326 return build_int_cst (integer_type_node, no_type_class);
539a3a92 7327
7002a1c8 7328 return build_int_cst (integer_type_node, type_to_class (TREE_TYPE (arg)));
539a3a92 7329}
7330
c2f47e15 7331/* Fold a call to __builtin_strlen with argument ARG. */
e6e27594 7332
7333static tree
c7cbde74 7334fold_builtin_strlen (location_t loc, tree type, tree arg)
e6e27594 7335{
c2f47e15 7336 if (!validate_arg (arg, POINTER_TYPE))
e6e27594 7337 return NULL_TREE;
7338 else
7339 {
c2f47e15 7340 tree len = c_strlen (arg, 0);
e6e27594 7341
7342 if (len)
c7cbde74 7343 return fold_convert_loc (loc, type, len);
e6e27594 7344
7345 return NULL_TREE;
7346 }
7347}
7348
92c43e3c 7349/* Fold a call to __builtin_inf or __builtin_huge_val. */
7350
7351static tree
389dd41b 7352fold_builtin_inf (location_t loc, tree type, int warn)
92c43e3c 7353{
aa870c1b 7354 REAL_VALUE_TYPE real;
7355
40f4dbd5 7356 /* __builtin_inff is intended to be usable to define INFINITY on all
7357 targets. If an infinity is not available, INFINITY expands "to a
7358 positive constant of type float that overflows at translation
7359 time", footnote "In this case, using INFINITY will violate the
7360 constraint in 6.4.4 and thus require a diagnostic." (C99 7.12#4).
7361 Thus we pedwarn to ensure this constraint violation is
7362 diagnosed. */
92c43e3c 7363 if (!MODE_HAS_INFINITIES (TYPE_MODE (type)) && warn)
389dd41b 7364 pedwarn (loc, 0, "target format does not support infinity");
92c43e3c 7365
aa870c1b 7366 real_inf (&real);
7367 return build_real (type, real);
92c43e3c 7368}
7369
c2f47e15 7370/* Fold a call to __builtin_nan or __builtin_nans with argument ARG. */
b0db7939 7371
7372static tree
c2f47e15 7373fold_builtin_nan (tree arg, tree type, int quiet)
b0db7939 7374{
7375 REAL_VALUE_TYPE real;
7376 const char *str;
7377
c2f47e15 7378 if (!validate_arg (arg, POINTER_TYPE))
7379 return NULL_TREE;
7380 str = c_getstr (arg);
b0db7939 7381 if (!str)
c2f47e15 7382 return NULL_TREE;
b0db7939 7383
7384 if (!real_nan (&real, str, quiet, TYPE_MODE (type)))
c2f47e15 7385 return NULL_TREE;
b0db7939 7386
7387 return build_real (type, real);
7388}
7389
277f8dd2 7390/* Return true if the floating point expression T has an integer value.
7391 We also allow +Inf, -Inf and NaN to be considered integer values. */
7392
7393static bool
7394integer_valued_real_p (tree t)
7395{
7396 switch (TREE_CODE (t))
7397 {
7398 case FLOAT_EXPR:
7399 return true;
7400
7401 case ABS_EXPR:
7402 case SAVE_EXPR:
277f8dd2 7403 return integer_valued_real_p (TREE_OPERAND (t, 0));
7404
7405 case COMPOUND_EXPR:
41076ef6 7406 case MODIFY_EXPR:
277f8dd2 7407 case BIND_EXPR:
75a70cf9 7408 return integer_valued_real_p (TREE_OPERAND (t, 1));
277f8dd2 7409
7410 case PLUS_EXPR:
7411 case MINUS_EXPR:
7412 case MULT_EXPR:
7413 case MIN_EXPR:
7414 case MAX_EXPR:
7415 return integer_valued_real_p (TREE_OPERAND (t, 0))
7416 && integer_valued_real_p (TREE_OPERAND (t, 1));
7417
7418 case COND_EXPR:
7419 return integer_valued_real_p (TREE_OPERAND (t, 1))
7420 && integer_valued_real_p (TREE_OPERAND (t, 2));
7421
7422 case REAL_CST:
0570334c 7423 return real_isinteger (TREE_REAL_CST_PTR (t), TYPE_MODE (TREE_TYPE (t)));
277f8dd2 7424
d09ef31a 7425 CASE_CONVERT:
277f8dd2 7426 {
7427 tree type = TREE_TYPE (TREE_OPERAND (t, 0));
7428 if (TREE_CODE (type) == INTEGER_TYPE)
7429 return true;
7430 if (TREE_CODE (type) == REAL_TYPE)
7431 return integer_valued_real_p (TREE_OPERAND (t, 0));
7432 break;
7433 }
7434
7435 case CALL_EXPR:
7436 switch (builtin_mathfn_code (t))
7437 {
4f35b1fc 7438 CASE_FLT_FN (BUILT_IN_CEIL):
7439 CASE_FLT_FN (BUILT_IN_FLOOR):
7440 CASE_FLT_FN (BUILT_IN_NEARBYINT):
7441 CASE_FLT_FN (BUILT_IN_RINT):
7442 CASE_FLT_FN (BUILT_IN_ROUND):
7443 CASE_FLT_FN (BUILT_IN_TRUNC):
277f8dd2 7444 return true;
7445
d4a43a03 7446 CASE_FLT_FN (BUILT_IN_FMIN):
7447 CASE_FLT_FN (BUILT_IN_FMAX):
c2f47e15 7448 return integer_valued_real_p (CALL_EXPR_ARG (t, 0))
7449 && integer_valued_real_p (CALL_EXPR_ARG (t, 1));
d4a43a03 7450
277f8dd2 7451 default:
7452 break;
7453 }
7454 break;
7455
7456 default:
7457 break;
7458 }
7459 return false;
7460}
7461
c2f47e15 7462/* FNDECL is assumed to be a builtin where truncation can be propagated
6528f4f4 7463 across (for instance floor((double)f) == (double)floorf (f).
c2f47e15 7464 Do the transformation for a call with argument ARG. */
277f8dd2 7465
6528f4f4 7466static tree
389dd41b 7467fold_trunc_transparent_mathfn (location_t loc, tree fndecl, tree arg)
6528f4f4 7468{
6528f4f4 7469 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
277f8dd2 7470
c2f47e15 7471 if (!validate_arg (arg, REAL_TYPE))
7472 return NULL_TREE;
6528f4f4 7473
277f8dd2 7474 /* Integer rounding functions are idempotent. */
7475 if (fcode == builtin_mathfn_code (arg))
7476 return arg;
7477
7478 /* If argument is already integer valued, and we don't need to worry
7479 about setting errno, there's no need to perform rounding. */
7480 if (! flag_errno_math && integer_valued_real_p (arg))
7481 return arg;
7482
7483 if (optimize)
6528f4f4 7484 {
277f8dd2 7485 tree arg0 = strip_float_extensions (arg);
2426241c 7486 tree ftype = TREE_TYPE (TREE_TYPE (fndecl));
6528f4f4 7487 tree newtype = TREE_TYPE (arg0);
7488 tree decl;
7489
7490 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (ftype)
7491 && (decl = mathfn_built_in (newtype, fcode)))
389dd41b 7492 return fold_convert_loc (loc, ftype,
7493 build_call_expr_loc (loc, decl, 1,
7494 fold_convert_loc (loc,
7495 newtype,
7496 arg0)));
6528f4f4 7497 }
c2f47e15 7498 return NULL_TREE;
6528f4f4 7499}
7500
c2f47e15 7501/* FNDECL is assumed to be builtin which can narrow the FP type of
7502 the argument, for instance lround((double)f) -> lroundf (f).
7503 Do the transformation for a call with argument ARG. */
9ed65c7f 7504
7505static tree
389dd41b 7506fold_fixed_mathfn (location_t loc, tree fndecl, tree arg)
9ed65c7f 7507{
9ed65c7f 7508 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
9ed65c7f 7509
c2f47e15 7510 if (!validate_arg (arg, REAL_TYPE))
7511 return NULL_TREE;
9ed65c7f 7512
7513 /* If argument is already integer valued, and we don't need to worry
7514 about setting errno, there's no need to perform rounding. */
7515 if (! flag_errno_math && integer_valued_real_p (arg))
389dd41b 7516 return fold_build1_loc (loc, FIX_TRUNC_EXPR,
7517 TREE_TYPE (TREE_TYPE (fndecl)), arg);
9ed65c7f 7518
7519 if (optimize)
7520 {
7521 tree ftype = TREE_TYPE (arg);
7522 tree arg0 = strip_float_extensions (arg);
7523 tree newtype = TREE_TYPE (arg0);
7524 tree decl;
7525
7526 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (ftype)
7527 && (decl = mathfn_built_in (newtype, fcode)))
389dd41b 7528 return build_call_expr_loc (loc, decl, 1,
7529 fold_convert_loc (loc, newtype, arg0));
9ed65c7f 7530 }
73a0da56 7531
80ff6494 7532 /* Canonicalize iround (x) to lround (x) on ILP32 targets where
7533 sizeof (int) == sizeof (long). */
7534 if (TYPE_PRECISION (integer_type_node)
7535 == TYPE_PRECISION (long_integer_type_node))
7536 {
7537 tree newfn = NULL_TREE;
7538 switch (fcode)
7539 {
7540 CASE_FLT_FN (BUILT_IN_ICEIL):
7541 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LCEIL);
7542 break;
7543
7544 CASE_FLT_FN (BUILT_IN_IFLOOR):
7545 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LFLOOR);
7546 break;
7547
7548 CASE_FLT_FN (BUILT_IN_IROUND):
7549 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LROUND);
7550 break;
7551
7552 CASE_FLT_FN (BUILT_IN_IRINT):
7553 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LRINT);
7554 break;
7555
7556 default:
7557 break;
7558 }
7559
7560 if (newfn)
7561 {
7562 tree newcall = build_call_expr_loc (loc, newfn, 1, arg);
7563 return fold_convert_loc (loc,
7564 TREE_TYPE (TREE_TYPE (fndecl)), newcall);
7565 }
7566 }
7567
73a0da56 7568 /* Canonicalize llround (x) to lround (x) on LP64 targets where
7569 sizeof (long long) == sizeof (long). */
7570 if (TYPE_PRECISION (long_long_integer_type_node)
7571 == TYPE_PRECISION (long_integer_type_node))
7572 {
7573 tree newfn = NULL_TREE;
7574 switch (fcode)
7575 {
7576 CASE_FLT_FN (BUILT_IN_LLCEIL):
7577 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LCEIL);
7578 break;
7579
7580 CASE_FLT_FN (BUILT_IN_LLFLOOR):
7581 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LFLOOR);
7582 break;
7583
7584 CASE_FLT_FN (BUILT_IN_LLROUND):
7585 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LROUND);
7586 break;
7587
7588 CASE_FLT_FN (BUILT_IN_LLRINT):
7589 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LRINT);
7590 break;
7591
7592 default:
7593 break;
7594 }
7595
7596 if (newfn)
7597 {
389dd41b 7598 tree newcall = build_call_expr_loc (loc, newfn, 1, arg);
7599 return fold_convert_loc (loc,
7600 TREE_TYPE (TREE_TYPE (fndecl)), newcall);
73a0da56 7601 }
7602 }
7603
c2f47e15 7604 return NULL_TREE;
9ed65c7f 7605}
7606
c2f47e15 7607/* Fold call to builtin cabs, cabsf or cabsl with argument ARG. TYPE is the
7608 return type. Return NULL_TREE if no simplification can be made. */
c63f4ad3 7609
7610static tree
389dd41b 7611fold_builtin_cabs (location_t loc, tree arg, tree type, tree fndecl)
c63f4ad3 7612{
c2f47e15 7613 tree res;
c63f4ad3 7614
b0ce8887 7615 if (!validate_arg (arg, COMPLEX_TYPE)
c63f4ad3 7616 || TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) != REAL_TYPE)
7617 return NULL_TREE;
7618
b4725390 7619 /* Calculate the result when the argument is a constant. */
7620 if (TREE_CODE (arg) == COMPLEX_CST
7621 && (res = do_mpfr_arg2 (TREE_REALPART (arg), TREE_IMAGPART (arg),
7622 type, mpfr_hypot)))
7623 return res;
48e1416a 7624
1af0d139 7625 if (TREE_CODE (arg) == COMPLEX_EXPR)
7626 {
7627 tree real = TREE_OPERAND (arg, 0);
7628 tree imag = TREE_OPERAND (arg, 1);
48e1416a 7629
1af0d139 7630 /* If either part is zero, cabs is fabs of the other. */
7631 if (real_zerop (real))
389dd41b 7632 return fold_build1_loc (loc, ABS_EXPR, type, imag);
1af0d139 7633 if (real_zerop (imag))
389dd41b 7634 return fold_build1_loc (loc, ABS_EXPR, type, real);
1af0d139 7635
7636 /* cabs(x+xi) -> fabs(x)*sqrt(2). */
7637 if (flag_unsafe_math_optimizations
7638 && operand_equal_p (real, imag, OEP_PURE_SAME))
7639 {
2e7ca27b 7640 const REAL_VALUE_TYPE sqrt2_trunc
7910b2fb 7641 = real_value_truncate (TYPE_MODE (type), dconst_sqrt2 ());
1af0d139 7642 STRIP_NOPS (real);
389dd41b 7643 return fold_build2_loc (loc, MULT_EXPR, type,
7644 fold_build1_loc (loc, ABS_EXPR, type, real),
2e7ca27b 7645 build_real (type, sqrt2_trunc));
1af0d139 7646 }
7647 }
c63f4ad3 7648
749891b2 7649 /* Optimize cabs(-z) and cabs(conj(z)) as cabs(z). */
7650 if (TREE_CODE (arg) == NEGATE_EXPR
7651 || TREE_CODE (arg) == CONJ_EXPR)
389dd41b 7652 return build_call_expr_loc (loc, fndecl, 1, TREE_OPERAND (arg, 0));
749891b2 7653
7d3f6cc7 7654 /* Don't do this when optimizing for size. */
7655 if (flag_unsafe_math_optimizations
0bfd8d5c 7656 && optimize && optimize_function_for_speed_p (cfun))
c63f4ad3 7657 {
0da0dbfa 7658 tree sqrtfn = mathfn_built_in (type, BUILT_IN_SQRT);
c63f4ad3 7659
7660 if (sqrtfn != NULL_TREE)
7661 {
c2f47e15 7662 tree rpart, ipart, result;
c63f4ad3 7663
4ee9c684 7664 arg = builtin_save_expr (arg);
29a6518e 7665
389dd41b 7666 rpart = fold_build1_loc (loc, REALPART_EXPR, type, arg);
7667 ipart = fold_build1_loc (loc, IMAGPART_EXPR, type, arg);
c63f4ad3 7668
4ee9c684 7669 rpart = builtin_save_expr (rpart);
7670 ipart = builtin_save_expr (ipart);
c63f4ad3 7671
389dd41b 7672 result = fold_build2_loc (loc, PLUS_EXPR, type,
7673 fold_build2_loc (loc, MULT_EXPR, type,
49d00087 7674 rpart, rpart),
389dd41b 7675 fold_build2_loc (loc, MULT_EXPR, type,
49d00087 7676 ipart, ipart));
c63f4ad3 7677
389dd41b 7678 return build_call_expr_loc (loc, sqrtfn, 1, result);
c63f4ad3 7679 }
7680 }
7681
7682 return NULL_TREE;
7683}
7684
c2373fdb 7685/* Build a complex (inf +- 0i) for the result of cproj. TYPE is the
7686 complex tree type of the result. If NEG is true, the imaginary
7687 zero is negative. */
7688
7689static tree
7690build_complex_cproj (tree type, bool neg)
7691{
7692 REAL_VALUE_TYPE rinf, rzero = dconst0;
7693
7694 real_inf (&rinf);
7695 rzero.sign = neg;
7696 return build_complex (type, build_real (TREE_TYPE (type), rinf),
7697 build_real (TREE_TYPE (type), rzero));
7698}
7699
7700/* Fold call to builtin cproj, cprojf or cprojl with argument ARG. TYPE is the
7701 return type. Return NULL_TREE if no simplification can be made. */
7702
7703static tree
7704fold_builtin_cproj (location_t loc, tree arg, tree type)
7705{
7706 if (!validate_arg (arg, COMPLEX_TYPE)
7707 || TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) != REAL_TYPE)
7708 return NULL_TREE;
7709
7710 /* If there are no infinities, return arg. */
fe994837 7711 if (! HONOR_INFINITIES (type))
c2373fdb 7712 return non_lvalue_loc (loc, arg);
7713
7714 /* Calculate the result when the argument is a constant. */
7715 if (TREE_CODE (arg) == COMPLEX_CST)
7716 {
7717 const REAL_VALUE_TYPE *real = TREE_REAL_CST_PTR (TREE_REALPART (arg));
7718 const REAL_VALUE_TYPE *imag = TREE_REAL_CST_PTR (TREE_IMAGPART (arg));
7719
7720 if (real_isinf (real) || real_isinf (imag))
7721 return build_complex_cproj (type, imag->sign);
7722 else
7723 return arg;
7724 }
b4c7e601 7725 else if (TREE_CODE (arg) == COMPLEX_EXPR)
7726 {
7727 tree real = TREE_OPERAND (arg, 0);
7728 tree imag = TREE_OPERAND (arg, 1);
7729
7730 STRIP_NOPS (real);
7731 STRIP_NOPS (imag);
7732
7733 /* If the real part is inf and the imag part is known to be
7734 nonnegative, return (inf + 0i). Remember side-effects are
7735 possible in the imag part. */
7736 if (TREE_CODE (real) == REAL_CST
7737 && real_isinf (TREE_REAL_CST_PTR (real))
7738 && tree_expr_nonnegative_p (imag))
7739 return omit_one_operand_loc (loc, type,
7740 build_complex_cproj (type, false),
7741 arg);
7742
7743 /* If the imag part is inf, return (inf+I*copysign(0,imag)).
7744 Remember side-effects are possible in the real part. */
7745 if (TREE_CODE (imag) == REAL_CST
7746 && real_isinf (TREE_REAL_CST_PTR (imag)))
7747 return
7748 omit_one_operand_loc (loc, type,
7749 build_complex_cproj (type, TREE_REAL_CST_PTR
7750 (imag)->sign), arg);
7751 }
c2373fdb 7752
7753 return NULL_TREE;
7754}
7755
c2f47e15 7756/* Fold a builtin function call to sqrt, sqrtf, or sqrtl with argument ARG.
7757 Return NULL_TREE if no simplification can be made. */
e6e27594 7758
7759static tree
389dd41b 7760fold_builtin_sqrt (location_t loc, tree arg, tree type)
e6e27594 7761{
7762
7763 enum built_in_function fcode;
b4e8ab0c 7764 tree res;
c2f47e15 7765
7766 if (!validate_arg (arg, REAL_TYPE))
e6e27594 7767 return NULL_TREE;
7768
b4e8ab0c 7769 /* Calculate the result when the argument is a constant. */
7770 if ((res = do_mpfr_arg1 (arg, type, mpfr_sqrt, &dconst0, NULL, true)))
7771 return res;
48e1416a 7772
e6e27594 7773 /* Optimize sqrt(expN(x)) = expN(x*0.5). */
7774 fcode = builtin_mathfn_code (arg);
7775 if (flag_unsafe_math_optimizations && BUILTIN_EXPONENT_P (fcode))
7776 {
c2f47e15 7777 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg), 0);
389dd41b 7778 arg = fold_build2_loc (loc, MULT_EXPR, type,
c2f47e15 7779 CALL_EXPR_ARG (arg, 0),
49d00087 7780 build_real (type, dconsthalf));
389dd41b 7781 return build_call_expr_loc (loc, expfn, 1, arg);
e6e27594 7782 }
7783
7784 /* Optimize sqrt(Nroot(x)) -> pow(x,1/(2*N)). */
7785 if (flag_unsafe_math_optimizations && BUILTIN_ROOT_P (fcode))
7786 {
7787 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
7788
7789 if (powfn)
7790 {
c2f47e15 7791 tree arg0 = CALL_EXPR_ARG (arg, 0);
e6e27594 7792 tree tree_root;
7793 /* The inner root was either sqrt or cbrt. */
57510da6 7794 /* This was a conditional expression but it triggered a bug
18381619 7795 in Sun C 5.5. */
ce6cd837 7796 REAL_VALUE_TYPE dconstroot;
7797 if (BUILTIN_SQRT_P (fcode))
7798 dconstroot = dconsthalf;
7799 else
7800 dconstroot = dconst_third ();
e6e27594 7801
7802 /* Adjust for the outer root. */
7803 SET_REAL_EXP (&dconstroot, REAL_EXP (&dconstroot) - 1);
7804 dconstroot = real_value_truncate (TYPE_MODE (type), dconstroot);
7805 tree_root = build_real (type, dconstroot);
389dd41b 7806 return build_call_expr_loc (loc, powfn, 2, arg0, tree_root);
e6e27594 7807 }
7808 }
7809
bc33117f 7810 /* Optimize sqrt(pow(x,y)) = pow(|x|,y*0.5). */
e6e27594 7811 if (flag_unsafe_math_optimizations
7812 && (fcode == BUILT_IN_POW
7813 || fcode == BUILT_IN_POWF
7814 || fcode == BUILT_IN_POWL))
7815 {
c2f47e15 7816 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg), 0);
7817 tree arg0 = CALL_EXPR_ARG (arg, 0);
7818 tree arg1 = CALL_EXPR_ARG (arg, 1);
bc33117f 7819 tree narg1;
7820 if (!tree_expr_nonnegative_p (arg0))
7821 arg0 = build1 (ABS_EXPR, type, arg0);
389dd41b 7822 narg1 = fold_build2_loc (loc, MULT_EXPR, type, arg1,
49d00087 7823 build_real (type, dconsthalf));
389dd41b 7824 return build_call_expr_loc (loc, powfn, 2, arg0, narg1);
e6e27594 7825 }
7826
7827 return NULL_TREE;
7828}
7829
c2f47e15 7830/* Fold a builtin function call to cbrt, cbrtf, or cbrtl with argument ARG.
7831 Return NULL_TREE if no simplification can be made. */
7832
e6e27594 7833static tree
389dd41b 7834fold_builtin_cbrt (location_t loc, tree arg, tree type)
e6e27594 7835{
e6e27594 7836 const enum built_in_function fcode = builtin_mathfn_code (arg);
29f4cd78 7837 tree res;
e6e27594 7838
c2f47e15 7839 if (!validate_arg (arg, REAL_TYPE))
e6e27594 7840 return NULL_TREE;
7841
29f4cd78 7842 /* Calculate the result when the argument is a constant. */
7843 if ((res = do_mpfr_arg1 (arg, type, mpfr_cbrt, NULL, NULL, 0)))
7844 return res;
e6e27594 7845
cdfeb715 7846 if (flag_unsafe_math_optimizations)
e6e27594 7847 {
cdfeb715 7848 /* Optimize cbrt(expN(x)) -> expN(x/3). */
7849 if (BUILTIN_EXPONENT_P (fcode))
a0c938f0 7850 {
c2f47e15 7851 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg), 0);
cdfeb715 7852 const REAL_VALUE_TYPE third_trunc =
7910b2fb 7853 real_value_truncate (TYPE_MODE (type), dconst_third ());
389dd41b 7854 arg = fold_build2_loc (loc, MULT_EXPR, type,
c2f47e15 7855 CALL_EXPR_ARG (arg, 0),
49d00087 7856 build_real (type, third_trunc));
389dd41b 7857 return build_call_expr_loc (loc, expfn, 1, arg);
cdfeb715 7858 }
e6e27594 7859
cdfeb715 7860 /* Optimize cbrt(sqrt(x)) -> pow(x,1/6). */
7861 if (BUILTIN_SQRT_P (fcode))
a0c938f0 7862 {
cdfeb715 7863 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
e6e27594 7864
cdfeb715 7865 if (powfn)
7866 {
c2f47e15 7867 tree arg0 = CALL_EXPR_ARG (arg, 0);
cdfeb715 7868 tree tree_root;
7910b2fb 7869 REAL_VALUE_TYPE dconstroot = dconst_third ();
cdfeb715 7870
7871 SET_REAL_EXP (&dconstroot, REAL_EXP (&dconstroot) - 1);
7872 dconstroot = real_value_truncate (TYPE_MODE (type), dconstroot);
7873 tree_root = build_real (type, dconstroot);
389dd41b 7874 return build_call_expr_loc (loc, powfn, 2, arg0, tree_root);
cdfeb715 7875 }
e6e27594 7876 }
7877
cdfeb715 7878 /* Optimize cbrt(cbrt(x)) -> pow(x,1/9) iff x is nonnegative. */
7879 if (BUILTIN_CBRT_P (fcode))
a0c938f0 7880 {
c2f47e15 7881 tree arg0 = CALL_EXPR_ARG (arg, 0);
cdfeb715 7882 if (tree_expr_nonnegative_p (arg0))
7883 {
7884 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
7885
7886 if (powfn)
a0c938f0 7887 {
cdfeb715 7888 tree tree_root;
7889 REAL_VALUE_TYPE dconstroot;
a0c938f0 7890
3fa759a9 7891 real_arithmetic (&dconstroot, MULT_EXPR,
7910b2fb 7892 dconst_third_ptr (), dconst_third_ptr ());
cdfeb715 7893 dconstroot = real_value_truncate (TYPE_MODE (type), dconstroot);
7894 tree_root = build_real (type, dconstroot);
389dd41b 7895 return build_call_expr_loc (loc, powfn, 2, arg0, tree_root);
cdfeb715 7896 }
7897 }
7898 }
a0c938f0 7899
cdfeb715 7900 /* Optimize cbrt(pow(x,y)) -> pow(x,y/3) iff x is nonnegative. */
48e1416a 7901 if (fcode == BUILT_IN_POW
c2f47e15 7902 || fcode == BUILT_IN_POWF
cdfeb715 7903 || fcode == BUILT_IN_POWL)
a0c938f0 7904 {
c2f47e15 7905 tree arg00 = CALL_EXPR_ARG (arg, 0);
7906 tree arg01 = CALL_EXPR_ARG (arg, 1);
cdfeb715 7907 if (tree_expr_nonnegative_p (arg00))
7908 {
c2f47e15 7909 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg), 0);
cdfeb715 7910 const REAL_VALUE_TYPE dconstroot
7910b2fb 7911 = real_value_truncate (TYPE_MODE (type), dconst_third ());
389dd41b 7912 tree narg01 = fold_build2_loc (loc, MULT_EXPR, type, arg01,
49d00087 7913 build_real (type, dconstroot));
389dd41b 7914 return build_call_expr_loc (loc, powfn, 2, arg00, narg01);
cdfeb715 7915 }
7916 }
e6e27594 7917 }
7918 return NULL_TREE;
7919}
7920
c2f47e15 7921/* Fold function call to builtin cos, cosf, or cosl with argument ARG.
7922 TYPE is the type of the return value. Return NULL_TREE if no
7923 simplification can be made. */
7924
e6e27594 7925static tree
389dd41b 7926fold_builtin_cos (location_t loc,
7927 tree arg, tree type, tree fndecl)
e6e27594 7928{
e6ab33d8 7929 tree res, narg;
e6e27594 7930
c2f47e15 7931 if (!validate_arg (arg, REAL_TYPE))
e6e27594 7932 return NULL_TREE;
7933
bffb7645 7934 /* Calculate the result when the argument is a constant. */
728bac60 7935 if ((res = do_mpfr_arg1 (arg, type, mpfr_cos, NULL, NULL, 0)))
bffb7645 7936 return res;
48e1416a 7937
e6e27594 7938 /* Optimize cos(-x) into cos (x). */
e6ab33d8 7939 if ((narg = fold_strip_sign_ops (arg)))
389dd41b 7940 return build_call_expr_loc (loc, fndecl, 1, narg);
e6e27594 7941
7942 return NULL_TREE;
7943}
7944
c2f47e15 7945/* Fold function call to builtin cosh, coshf, or coshl with argument ARG.
7946 Return NULL_TREE if no simplification can be made. */
7947
cacdc1af 7948static tree
389dd41b 7949fold_builtin_cosh (location_t loc, tree arg, tree type, tree fndecl)
cacdc1af 7950{
c2f47e15 7951 if (validate_arg (arg, REAL_TYPE))
cacdc1af 7952 {
cacdc1af 7953 tree res, narg;
7954
7955 /* Calculate the result when the argument is a constant. */
7956 if ((res = do_mpfr_arg1 (arg, type, mpfr_cosh, NULL, NULL, 0)))
7957 return res;
48e1416a 7958
cacdc1af 7959 /* Optimize cosh(-x) into cosh (x). */
7960 if ((narg = fold_strip_sign_ops (arg)))
389dd41b 7961 return build_call_expr_loc (loc, fndecl, 1, narg);
cacdc1af 7962 }
48e1416a 7963
cacdc1af 7964 return NULL_TREE;
7965}
7966
239d491a 7967/* Fold function call to builtin ccos (or ccosh if HYPER is TRUE) with
7968 argument ARG. TYPE is the type of the return value. Return
7969 NULL_TREE if no simplification can be made. */
7970
7971static tree
965d0f29 7972fold_builtin_ccos (location_t loc, tree arg, tree type, tree fndecl,
7973 bool hyper)
239d491a 7974{
7975 if (validate_arg (arg, COMPLEX_TYPE)
7976 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) == REAL_TYPE)
7977 {
7978 tree tmp;
7979
239d491a 7980 /* Calculate the result when the argument is a constant. */
7981 if ((tmp = do_mpc_arg1 (arg, type, (hyper ? mpc_cosh : mpc_cos))))
7982 return tmp;
48e1416a 7983
239d491a 7984 /* Optimize fn(-x) into fn(x). */
7985 if ((tmp = fold_strip_sign_ops (arg)))
389dd41b 7986 return build_call_expr_loc (loc, fndecl, 1, tmp);
239d491a 7987 }
7988
7989 return NULL_TREE;
7990}
7991
c2f47e15 7992/* Fold function call to builtin tan, tanf, or tanl with argument ARG.
7993 Return NULL_TREE if no simplification can be made. */
7994
e6e27594 7995static tree
c2f47e15 7996fold_builtin_tan (tree arg, tree type)
e6e27594 7997{
7998 enum built_in_function fcode;
29f4cd78 7999 tree res;
e6e27594 8000
c2f47e15 8001 if (!validate_arg (arg, REAL_TYPE))
e6e27594 8002 return NULL_TREE;
8003
bffb7645 8004 /* Calculate the result when the argument is a constant. */
728bac60 8005 if ((res = do_mpfr_arg1 (arg, type, mpfr_tan, NULL, NULL, 0)))
bffb7645 8006 return res;
48e1416a 8007
e6e27594 8008 /* Optimize tan(atan(x)) = x. */
8009 fcode = builtin_mathfn_code (arg);
8010 if (flag_unsafe_math_optimizations
8011 && (fcode == BUILT_IN_ATAN
8012 || fcode == BUILT_IN_ATANF
8013 || fcode == BUILT_IN_ATANL))
c2f47e15 8014 return CALL_EXPR_ARG (arg, 0);
e6e27594 8015
8016 return NULL_TREE;
8017}
8018
d735c391 8019/* Fold function call to builtin sincos, sincosf, or sincosl. Return
8020 NULL_TREE if no simplification can be made. */
8021
8022static tree
389dd41b 8023fold_builtin_sincos (location_t loc,
8024 tree arg0, tree arg1, tree arg2)
d735c391 8025{
c2f47e15 8026 tree type;
d735c391 8027 tree res, fn, call;
8028
c2f47e15 8029 if (!validate_arg (arg0, REAL_TYPE)
8030 || !validate_arg (arg1, POINTER_TYPE)
8031 || !validate_arg (arg2, POINTER_TYPE))
d735c391 8032 return NULL_TREE;
8033
d735c391 8034 type = TREE_TYPE (arg0);
d735c391 8035
8036 /* Calculate the result when the argument is a constant. */
8037 if ((res = do_mpfr_sincos (arg0, arg1, arg2)))
8038 return res;
8039
8040 /* Canonicalize sincos to cexpi. */
30f690e0 8041 if (!targetm.libc_has_function (function_c99_math_complex))
2a6b4c77 8042 return NULL_TREE;
d735c391 8043 fn = mathfn_built_in (type, BUILT_IN_CEXPI);
8044 if (!fn)
8045 return NULL_TREE;
8046
389dd41b 8047 call = build_call_expr_loc (loc, fn, 1, arg0);
d735c391 8048 call = builtin_save_expr (call);
8049
a75b1c71 8050 return build2 (COMPOUND_EXPR, void_type_node,
d735c391 8051 build2 (MODIFY_EXPR, void_type_node,
389dd41b 8052 build_fold_indirect_ref_loc (loc, arg1),
d735c391 8053 build1 (IMAGPART_EXPR, type, call)),
8054 build2 (MODIFY_EXPR, void_type_node,
389dd41b 8055 build_fold_indirect_ref_loc (loc, arg2),
d735c391 8056 build1 (REALPART_EXPR, type, call)));
8057}
8058
c5bb2c4b 8059/* Fold function call to builtin cexp, cexpf, or cexpl. Return
8060 NULL_TREE if no simplification can be made. */
8061
8062static tree
389dd41b 8063fold_builtin_cexp (location_t loc, tree arg0, tree type)
c5bb2c4b 8064{
c2f47e15 8065 tree rtype;
c5bb2c4b 8066 tree realp, imagp, ifn;
239d491a 8067 tree res;
c5bb2c4b 8068
239d491a 8069 if (!validate_arg (arg0, COMPLEX_TYPE)
b0ce8887 8070 || TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) != REAL_TYPE)
c5bb2c4b 8071 return NULL_TREE;
8072
239d491a 8073 /* Calculate the result when the argument is a constant. */
8074 if ((res = do_mpc_arg1 (arg0, type, mpc_exp)))
8075 return res;
48e1416a 8076
c5bb2c4b 8077 rtype = TREE_TYPE (TREE_TYPE (arg0));
8078
8079 /* In case we can figure out the real part of arg0 and it is constant zero
8080 fold to cexpi. */
30f690e0 8081 if (!targetm.libc_has_function (function_c99_math_complex))
2a6b4c77 8082 return NULL_TREE;
c5bb2c4b 8083 ifn = mathfn_built_in (rtype, BUILT_IN_CEXPI);
8084 if (!ifn)
8085 return NULL_TREE;
8086
389dd41b 8087 if ((realp = fold_unary_loc (loc, REALPART_EXPR, rtype, arg0))
c5bb2c4b 8088 && real_zerop (realp))
8089 {
389dd41b 8090 tree narg = fold_build1_loc (loc, IMAGPART_EXPR, rtype, arg0);
8091 return build_call_expr_loc (loc, ifn, 1, narg);
c5bb2c4b 8092 }
8093
8094 /* In case we can easily decompose real and imaginary parts split cexp
8095 to exp (r) * cexpi (i). */
8096 if (flag_unsafe_math_optimizations
8097 && realp)
8098 {
8099 tree rfn, rcall, icall;
8100
8101 rfn = mathfn_built_in (rtype, BUILT_IN_EXP);
8102 if (!rfn)
8103 return NULL_TREE;
8104
389dd41b 8105 imagp = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg0);
c5bb2c4b 8106 if (!imagp)
8107 return NULL_TREE;
8108
389dd41b 8109 icall = build_call_expr_loc (loc, ifn, 1, imagp);
c5bb2c4b 8110 icall = builtin_save_expr (icall);
389dd41b 8111 rcall = build_call_expr_loc (loc, rfn, 1, realp);
c5bb2c4b 8112 rcall = builtin_save_expr (rcall);
389dd41b 8113 return fold_build2_loc (loc, COMPLEX_EXPR, type,
8114 fold_build2_loc (loc, MULT_EXPR, rtype,
71bf42bb 8115 rcall,
389dd41b 8116 fold_build1_loc (loc, REALPART_EXPR,
8117 rtype, icall)),
8118 fold_build2_loc (loc, MULT_EXPR, rtype,
71bf42bb 8119 rcall,
389dd41b 8120 fold_build1_loc (loc, IMAGPART_EXPR,
8121 rtype, icall)));
c5bb2c4b 8122 }
8123
8124 return NULL_TREE;
8125}
8126
c2f47e15 8127/* Fold function call to builtin trunc, truncf or truncl with argument ARG.
8128 Return NULL_TREE if no simplification can be made. */
277f8dd2 8129
8130static tree
389dd41b 8131fold_builtin_trunc (location_t loc, tree fndecl, tree arg)
277f8dd2 8132{
c2f47e15 8133 if (!validate_arg (arg, REAL_TYPE))
8134 return NULL_TREE;
277f8dd2 8135
8136 /* Optimize trunc of constant value. */
f96bd2bf 8137 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
277f8dd2 8138 {
8139 REAL_VALUE_TYPE r, x;
2426241c 8140 tree type = TREE_TYPE (TREE_TYPE (fndecl));
277f8dd2 8141
8142 x = TREE_REAL_CST (arg);
8143 real_trunc (&r, TYPE_MODE (type), &x);
8144 return build_real (type, r);
8145 }
8146
389dd41b 8147 return fold_trunc_transparent_mathfn (loc, fndecl, arg);
277f8dd2 8148}
8149
c2f47e15 8150/* Fold function call to builtin floor, floorf or floorl with argument ARG.
8151 Return NULL_TREE if no simplification can be made. */
277f8dd2 8152
8153static tree
389dd41b 8154fold_builtin_floor (location_t loc, tree fndecl, tree arg)
277f8dd2 8155{
c2f47e15 8156 if (!validate_arg (arg, REAL_TYPE))
8157 return NULL_TREE;
277f8dd2 8158
8159 /* Optimize floor of constant value. */
f96bd2bf 8160 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
277f8dd2 8161 {
8162 REAL_VALUE_TYPE x;
8163
8164 x = TREE_REAL_CST (arg);
8165 if (! REAL_VALUE_ISNAN (x) || ! flag_errno_math)
8166 {
2426241c 8167 tree type = TREE_TYPE (TREE_TYPE (fndecl));
277f8dd2 8168 REAL_VALUE_TYPE r;
8169
8170 real_floor (&r, TYPE_MODE (type), &x);
8171 return build_real (type, r);
8172 }
8173 }
8174
acc2b92e 8175 /* Fold floor (x) where x is nonnegative to trunc (x). */
8176 if (tree_expr_nonnegative_p (arg))
30fe8286 8177 {
8178 tree truncfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_TRUNC);
8179 if (truncfn)
389dd41b 8180 return build_call_expr_loc (loc, truncfn, 1, arg);
30fe8286 8181 }
acc2b92e 8182
389dd41b 8183 return fold_trunc_transparent_mathfn (loc, fndecl, arg);
277f8dd2 8184}
8185
c2f47e15 8186/* Fold function call to builtin ceil, ceilf or ceill with argument ARG.
8187 Return NULL_TREE if no simplification can be made. */
277f8dd2 8188
8189static tree
389dd41b 8190fold_builtin_ceil (location_t loc, tree fndecl, tree arg)
277f8dd2 8191{
c2f47e15 8192 if (!validate_arg (arg, REAL_TYPE))
8193 return NULL_TREE;
277f8dd2 8194
8195 /* Optimize ceil of constant value. */
f96bd2bf 8196 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
277f8dd2 8197 {
8198 REAL_VALUE_TYPE x;
8199
8200 x = TREE_REAL_CST (arg);
8201 if (! REAL_VALUE_ISNAN (x) || ! flag_errno_math)
8202 {
2426241c 8203 tree type = TREE_TYPE (TREE_TYPE (fndecl));
277f8dd2 8204 REAL_VALUE_TYPE r;
8205
8206 real_ceil (&r, TYPE_MODE (type), &x);
8207 return build_real (type, r);
8208 }
8209 }
8210
389dd41b 8211 return fold_trunc_transparent_mathfn (loc, fndecl, arg);
277f8dd2 8212}
8213
c2f47e15 8214/* Fold function call to builtin round, roundf or roundl with argument ARG.
8215 Return NULL_TREE if no simplification can be made. */
89ab3887 8216
8217static tree
389dd41b 8218fold_builtin_round (location_t loc, tree fndecl, tree arg)
89ab3887 8219{
c2f47e15 8220 if (!validate_arg (arg, REAL_TYPE))
8221 return NULL_TREE;
89ab3887 8222
34f17811 8223 /* Optimize round of constant value. */
f96bd2bf 8224 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
89ab3887 8225 {
8226 REAL_VALUE_TYPE x;
8227
8228 x = TREE_REAL_CST (arg);
8229 if (! REAL_VALUE_ISNAN (x) || ! flag_errno_math)
8230 {
2426241c 8231 tree type = TREE_TYPE (TREE_TYPE (fndecl));
89ab3887 8232 REAL_VALUE_TYPE r;
8233
8234 real_round (&r, TYPE_MODE (type), &x);
8235 return build_real (type, r);
8236 }
8237 }
8238
389dd41b 8239 return fold_trunc_transparent_mathfn (loc, fndecl, arg);
89ab3887 8240}
8241
34f17811 8242/* Fold function call to builtin lround, lroundf or lroundl (or the
c2f47e15 8243 corresponding long long versions) and other rounding functions. ARG
8244 is the argument to the call. Return NULL_TREE if no simplification
8245 can be made. */
34f17811 8246
8247static tree
389dd41b 8248fold_builtin_int_roundingfn (location_t loc, tree fndecl, tree arg)
34f17811 8249{
c2f47e15 8250 if (!validate_arg (arg, REAL_TYPE))
8251 return NULL_TREE;
34f17811 8252
8253 /* Optimize lround of constant value. */
f96bd2bf 8254 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
34f17811 8255 {
8256 const REAL_VALUE_TYPE x = TREE_REAL_CST (arg);
8257
776a7bab 8258 if (real_isfinite (&x))
34f17811 8259 {
2426241c 8260 tree itype = TREE_TYPE (TREE_TYPE (fndecl));
ca9b061d 8261 tree ftype = TREE_TYPE (arg);
34f17811 8262 REAL_VALUE_TYPE r;
e913b5cd 8263 bool fail = false;
34f17811 8264
ad52b9b7 8265 switch (DECL_FUNCTION_CODE (fndecl))
8266 {
80ff6494 8267 CASE_FLT_FN (BUILT_IN_IFLOOR):
4f35b1fc 8268 CASE_FLT_FN (BUILT_IN_LFLOOR):
8269 CASE_FLT_FN (BUILT_IN_LLFLOOR):
ad52b9b7 8270 real_floor (&r, TYPE_MODE (ftype), &x);
8271 break;
8272
80ff6494 8273 CASE_FLT_FN (BUILT_IN_ICEIL):
4f35b1fc 8274 CASE_FLT_FN (BUILT_IN_LCEIL):
8275 CASE_FLT_FN (BUILT_IN_LLCEIL):
ac148751 8276 real_ceil (&r, TYPE_MODE (ftype), &x);
8277 break;
8278
80ff6494 8279 CASE_FLT_FN (BUILT_IN_IROUND):
4f35b1fc 8280 CASE_FLT_FN (BUILT_IN_LROUND):
8281 CASE_FLT_FN (BUILT_IN_LLROUND):
ad52b9b7 8282 real_round (&r, TYPE_MODE (ftype), &x);
8283 break;
8284
8285 default:
8286 gcc_unreachable ();
8287 }
8288
ab2c1de8 8289 wide_int val = real_to_integer (&r, &fail, TYPE_PRECISION (itype));
e913b5cd 8290 if (!fail)
8291 return wide_int_to_tree (itype, val);
34f17811 8292 }
8293 }
8294
acc2b92e 8295 switch (DECL_FUNCTION_CODE (fndecl))
8296 {
8297 CASE_FLT_FN (BUILT_IN_LFLOOR):
8298 CASE_FLT_FN (BUILT_IN_LLFLOOR):
8299 /* Fold lfloor (x) where x is nonnegative to FIX_TRUNC (x). */
8300 if (tree_expr_nonnegative_p (arg))
389dd41b 8301 return fold_build1_loc (loc, FIX_TRUNC_EXPR,
8302 TREE_TYPE (TREE_TYPE (fndecl)), arg);
acc2b92e 8303 break;
8304 default:;
8305 }
8306
389dd41b 8307 return fold_fixed_mathfn (loc, fndecl, arg);
34f17811 8308}
8309
70fb4c07 8310/* Fold function call to builtin ffs, clz, ctz, popcount and parity
c2f47e15 8311 and their long and long long variants (i.e. ffsl and ffsll). ARG is
8312 the argument to the call. Return NULL_TREE if no simplification can
8313 be made. */
70fb4c07 8314
8315static tree
c2f47e15 8316fold_builtin_bitop (tree fndecl, tree arg)
70fb4c07 8317{
c2f47e15 8318 if (!validate_arg (arg, INTEGER_TYPE))
70fb4c07 8319 return NULL_TREE;
8320
8321 /* Optimize for constant argument. */
f96bd2bf 8322 if (TREE_CODE (arg) == INTEGER_CST && !TREE_OVERFLOW (arg))
70fb4c07 8323 {
1cee90ad 8324 tree type = TREE_TYPE (arg);
796b6678 8325 int result;
70fb4c07 8326
8327 switch (DECL_FUNCTION_CODE (fndecl))
8328 {
4f35b1fc 8329 CASE_INT_FN (BUILT_IN_FFS):
ab2c1de8 8330 result = wi::ffs (arg);
70fb4c07 8331 break;
8332
4f35b1fc 8333 CASE_INT_FN (BUILT_IN_CLZ):
1cee90ad 8334 if (wi::ne_p (arg, 0))
8335 result = wi::clz (arg);
8336 else if (! CLZ_DEFINED_VALUE_AT_ZERO (TYPE_MODE (type), result))
8337 result = TYPE_PRECISION (type);
70fb4c07 8338 break;
8339
4f35b1fc 8340 CASE_INT_FN (BUILT_IN_CTZ):
1cee90ad 8341 if (wi::ne_p (arg, 0))
8342 result = wi::ctz (arg);
8343 else if (! CTZ_DEFINED_VALUE_AT_ZERO (TYPE_MODE (type), result))
8344 result = TYPE_PRECISION (type);
70fb4c07 8345 break;
8346
6aaa1f9e 8347 CASE_INT_FN (BUILT_IN_CLRSB):
ab2c1de8 8348 result = wi::clrsb (arg);
6aaa1f9e 8349 break;
8350
4f35b1fc 8351 CASE_INT_FN (BUILT_IN_POPCOUNT):
ab2c1de8 8352 result = wi::popcount (arg);
70fb4c07 8353 break;
8354
4f35b1fc 8355 CASE_INT_FN (BUILT_IN_PARITY):
ab2c1de8 8356 result = wi::parity (arg);
70fb4c07 8357 break;
8358
8359 default:
64db345d 8360 gcc_unreachable ();
70fb4c07 8361 }
8362
796b6678 8363 return build_int_cst (TREE_TYPE (TREE_TYPE (fndecl)), result);
70fb4c07 8364 }
8365
8366 return NULL_TREE;
8367}
8368
74bdbe96 8369/* Fold function call to builtin_bswap and the short, long and long long
42791117 8370 variants. Return NULL_TREE if no simplification can be made. */
8371static tree
c2f47e15 8372fold_builtin_bswap (tree fndecl, tree arg)
42791117 8373{
c2f47e15 8374 if (! validate_arg (arg, INTEGER_TYPE))
8375 return NULL_TREE;
42791117 8376
8377 /* Optimize constant value. */
f96bd2bf 8378 if (TREE_CODE (arg) == INTEGER_CST && !TREE_OVERFLOW (arg))
42791117 8379 {
74bdbe96 8380 tree type = TREE_TYPE (TREE_TYPE (fndecl));
42791117 8381
42791117 8382 switch (DECL_FUNCTION_CODE (fndecl))
8383 {
74bdbe96 8384 case BUILT_IN_BSWAP16:
42791117 8385 case BUILT_IN_BSWAP32:
8386 case BUILT_IN_BSWAP64:
8387 {
e913b5cd 8388 signop sgn = TYPE_SIGN (type);
ddb1be65 8389 tree result =
796b6678 8390 wide_int_to_tree (type,
8391 wide_int::from (arg, TYPE_PRECISION (type),
8392 sgn).bswap ());
e913b5cd 8393 return result;
42791117 8394 }
42791117 8395 default:
8396 gcc_unreachable ();
8397 }
42791117 8398 }
8399
8400 return NULL_TREE;
8401}
c2f47e15 8402
f0c477f2 8403/* Fold a builtin function call to hypot, hypotf, or hypotl. Return
8404 NULL_TREE if no simplification can be made. */
8405
8406static tree
389dd41b 8407fold_builtin_hypot (location_t loc, tree fndecl,
8408 tree arg0, tree arg1, tree type)
f0c477f2 8409{
e6ab33d8 8410 tree res, narg0, narg1;
f0c477f2 8411
c2f47e15 8412 if (!validate_arg (arg0, REAL_TYPE)
8413 || !validate_arg (arg1, REAL_TYPE))
f0c477f2 8414 return NULL_TREE;
8415
8416 /* Calculate the result when the argument is a constant. */
8417 if ((res = do_mpfr_arg2 (arg0, arg1, type, mpfr_hypot)))
8418 return res;
48e1416a 8419
6c95f21c 8420 /* If either argument to hypot has a negate or abs, strip that off.
8421 E.g. hypot(-x,fabs(y)) -> hypot(x,y). */
e6ab33d8 8422 narg0 = fold_strip_sign_ops (arg0);
8423 narg1 = fold_strip_sign_ops (arg1);
8424 if (narg0 || narg1)
8425 {
48e1416a 8426 return build_call_expr_loc (loc, fndecl, 2, narg0 ? narg0 : arg0,
c2f47e15 8427 narg1 ? narg1 : arg1);
6c95f21c 8428 }
48e1416a 8429
f0c477f2 8430 /* If either argument is zero, hypot is fabs of the other. */
8431 if (real_zerop (arg0))
389dd41b 8432 return fold_build1_loc (loc, ABS_EXPR, type, arg1);
f0c477f2 8433 else if (real_zerop (arg1))
389dd41b 8434 return fold_build1_loc (loc, ABS_EXPR, type, arg0);
48e1416a 8435
6c95f21c 8436 /* hypot(x,x) -> fabs(x)*sqrt(2). */
8437 if (flag_unsafe_math_optimizations
8438 && operand_equal_p (arg0, arg1, OEP_PURE_SAME))
f0c477f2 8439 {
2e7ca27b 8440 const REAL_VALUE_TYPE sqrt2_trunc
7910b2fb 8441 = real_value_truncate (TYPE_MODE (type), dconst_sqrt2 ());
389dd41b 8442 return fold_build2_loc (loc, MULT_EXPR, type,
8443 fold_build1_loc (loc, ABS_EXPR, type, arg0),
2e7ca27b 8444 build_real (type, sqrt2_trunc));
f0c477f2 8445 }
8446
f0c477f2 8447 return NULL_TREE;
8448}
8449
8450
e6e27594 8451/* Fold a builtin function call to pow, powf, or powl. Return
8452 NULL_TREE if no simplification can be made. */
8453static tree
389dd41b 8454fold_builtin_pow (location_t loc, tree fndecl, tree arg0, tree arg1, tree type)
e6e27594 8455{
f0c477f2 8456 tree res;
e6e27594 8457
c2f47e15 8458 if (!validate_arg (arg0, REAL_TYPE)
8459 || !validate_arg (arg1, REAL_TYPE))
e6e27594 8460 return NULL_TREE;
8461
f0c477f2 8462 /* Calculate the result when the argument is a constant. */
8463 if ((res = do_mpfr_arg2 (arg0, arg1, type, mpfr_pow)))
8464 return res;
8465
e6e27594 8466 /* Optimize pow(1.0,y) = 1.0. */
8467 if (real_onep (arg0))
389dd41b 8468 return omit_one_operand_loc (loc, type, build_real (type, dconst1), arg1);
e6e27594 8469
8470 if (TREE_CODE (arg1) == REAL_CST
f96bd2bf 8471 && !TREE_OVERFLOW (arg1))
e6e27594 8472 {
198d9bbe 8473 REAL_VALUE_TYPE cint;
e6e27594 8474 REAL_VALUE_TYPE c;
198d9bbe 8475 HOST_WIDE_INT n;
8476
e6e27594 8477 c = TREE_REAL_CST (arg1);
8478
8479 /* Optimize pow(x,0.0) = 1.0. */
8480 if (REAL_VALUES_EQUAL (c, dconst0))
389dd41b 8481 return omit_one_operand_loc (loc, type, build_real (type, dconst1),
e6e27594 8482 arg0);
8483
8484 /* Optimize pow(x,1.0) = x. */
8485 if (REAL_VALUES_EQUAL (c, dconst1))
8486 return arg0;
8487
8488 /* Optimize pow(x,-1.0) = 1.0/x. */
8489 if (REAL_VALUES_EQUAL (c, dconstm1))
389dd41b 8490 return fold_build2_loc (loc, RDIV_EXPR, type,
49d00087 8491 build_real (type, dconst1), arg0);
e6e27594 8492
8493 /* Optimize pow(x,0.5) = sqrt(x). */
8494 if (flag_unsafe_math_optimizations
8495 && REAL_VALUES_EQUAL (c, dconsthalf))
8496 {
8497 tree sqrtfn = mathfn_built_in (type, BUILT_IN_SQRT);
8498
8499 if (sqrtfn != NULL_TREE)
389dd41b 8500 return build_call_expr_loc (loc, sqrtfn, 1, arg0);
e6e27594 8501 }
8502
feb5b3eb 8503 /* Optimize pow(x,1.0/3.0) = cbrt(x). */
8504 if (flag_unsafe_math_optimizations)
8505 {
8506 const REAL_VALUE_TYPE dconstroot
7910b2fb 8507 = real_value_truncate (TYPE_MODE (type), dconst_third ());
feb5b3eb 8508
8509 if (REAL_VALUES_EQUAL (c, dconstroot))
8510 {
8511 tree cbrtfn = mathfn_built_in (type, BUILT_IN_CBRT);
8512 if (cbrtfn != NULL_TREE)
389dd41b 8513 return build_call_expr_loc (loc, cbrtfn, 1, arg0);
feb5b3eb 8514 }
8515 }
8516
198d9bbe 8517 /* Check for an integer exponent. */
8518 n = real_to_integer (&c);
e913b5cd 8519 real_from_integer (&cint, VOIDmode, n, SIGNED);
198d9bbe 8520 if (real_identical (&c, &cint))
e6e27594 8521 {
a2b30b48 8522 /* Attempt to evaluate pow at compile-time, unless this should
8523 raise an exception. */
198d9bbe 8524 if (TREE_CODE (arg0) == REAL_CST
a2b30b48 8525 && !TREE_OVERFLOW (arg0)
8526 && (n > 0
8527 || (!flag_trapping_math && !flag_errno_math)
8528 || !REAL_VALUES_EQUAL (TREE_REAL_CST (arg0), dconst0)))
e6e27594 8529 {
8530 REAL_VALUE_TYPE x;
8531 bool inexact;
8532
8533 x = TREE_REAL_CST (arg0);
8534 inexact = real_powi (&x, TYPE_MODE (type), &x, n);
8535 if (flag_unsafe_math_optimizations || !inexact)
8536 return build_real (type, x);
8537 }
198d9bbe 8538
8539 /* Strip sign ops from even integer powers. */
8540 if ((n & 1) == 0 && flag_unsafe_math_optimizations)
8541 {
8542 tree narg0 = fold_strip_sign_ops (arg0);
8543 if (narg0)
389dd41b 8544 return build_call_expr_loc (loc, fndecl, 2, narg0, arg1);
198d9bbe 8545 }
e6e27594 8546 }
8547 }
8548
cdfeb715 8549 if (flag_unsafe_math_optimizations)
e6e27594 8550 {
cdfeb715 8551 const enum built_in_function fcode = builtin_mathfn_code (arg0);
e6e27594 8552
cdfeb715 8553 /* Optimize pow(expN(x),y) = expN(x*y). */
8554 if (BUILTIN_EXPONENT_P (fcode))
a0c938f0 8555 {
c2f47e15 8556 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
8557 tree arg = CALL_EXPR_ARG (arg0, 0);
389dd41b 8558 arg = fold_build2_loc (loc, MULT_EXPR, type, arg, arg1);
8559 return build_call_expr_loc (loc, expfn, 1, arg);
cdfeb715 8560 }
e6e27594 8561
cdfeb715 8562 /* Optimize pow(sqrt(x),y) = pow(x,y*0.5). */
8563 if (BUILTIN_SQRT_P (fcode))
a0c938f0 8564 {
c2f47e15 8565 tree narg0 = CALL_EXPR_ARG (arg0, 0);
389dd41b 8566 tree narg1 = fold_build2_loc (loc, MULT_EXPR, type, arg1,
49d00087 8567 build_real (type, dconsthalf));
389dd41b 8568 return build_call_expr_loc (loc, fndecl, 2, narg0, narg1);
cdfeb715 8569 }
8570
8571 /* Optimize pow(cbrt(x),y) = pow(x,y/3) iff x is nonnegative. */
8572 if (BUILTIN_CBRT_P (fcode))
a0c938f0 8573 {
c2f47e15 8574 tree arg = CALL_EXPR_ARG (arg0, 0);
cdfeb715 8575 if (tree_expr_nonnegative_p (arg))
8576 {
8577 const REAL_VALUE_TYPE dconstroot
7910b2fb 8578 = real_value_truncate (TYPE_MODE (type), dconst_third ());
389dd41b 8579 tree narg1 = fold_build2_loc (loc, MULT_EXPR, type, arg1,
49d00087 8580 build_real (type, dconstroot));
389dd41b 8581 return build_call_expr_loc (loc, fndecl, 2, arg, narg1);
cdfeb715 8582 }
8583 }
a0c938f0 8584
49e436b5 8585 /* Optimize pow(pow(x,y),z) = pow(x,y*z) iff x is nonnegative. */
c2f47e15 8586 if (fcode == BUILT_IN_POW
8587 || fcode == BUILT_IN_POWF
8588 || fcode == BUILT_IN_POWL)
a0c938f0 8589 {
c2f47e15 8590 tree arg00 = CALL_EXPR_ARG (arg0, 0);
49e436b5 8591 if (tree_expr_nonnegative_p (arg00))
8592 {
8593 tree arg01 = CALL_EXPR_ARG (arg0, 1);
8594 tree narg1 = fold_build2_loc (loc, MULT_EXPR, type, arg01, arg1);
8595 return build_call_expr_loc (loc, fndecl, 2, arg00, narg1);
8596 }
cdfeb715 8597 }
e6e27594 8598 }
cdfeb715 8599
e6e27594 8600 return NULL_TREE;
8601}
8602
c2f47e15 8603/* Fold a builtin function call to powi, powif, or powil with argument ARG.
8604 Return NULL_TREE if no simplification can be made. */
b4d0c20c 8605static tree
389dd41b 8606fold_builtin_powi (location_t loc, tree fndecl ATTRIBUTE_UNUSED,
c2f47e15 8607 tree arg0, tree arg1, tree type)
b4d0c20c 8608{
c2f47e15 8609 if (!validate_arg (arg0, REAL_TYPE)
8610 || !validate_arg (arg1, INTEGER_TYPE))
b4d0c20c 8611 return NULL_TREE;
8612
8613 /* Optimize pow(1.0,y) = 1.0. */
8614 if (real_onep (arg0))
389dd41b 8615 return omit_one_operand_loc (loc, type, build_real (type, dconst1), arg1);
b4d0c20c 8616
e913b5cd 8617 if (tree_fits_shwi_p (arg1))
b4d0c20c 8618 {
e913b5cd 8619 HOST_WIDE_INT c = tree_to_shwi (arg1);
b4d0c20c 8620
8621 /* Evaluate powi at compile-time. */
8622 if (TREE_CODE (arg0) == REAL_CST
f96bd2bf 8623 && !TREE_OVERFLOW (arg0))
b4d0c20c 8624 {
8625 REAL_VALUE_TYPE x;
8626 x = TREE_REAL_CST (arg0);
8627 real_powi (&x, TYPE_MODE (type), &x, c);
8628 return build_real (type, x);
8629 }
8630
8631 /* Optimize pow(x,0) = 1.0. */
8632 if (c == 0)
389dd41b 8633 return omit_one_operand_loc (loc, type, build_real (type, dconst1),
b4d0c20c 8634 arg0);
8635
8636 /* Optimize pow(x,1) = x. */
8637 if (c == 1)
8638 return arg0;
8639
8640 /* Optimize pow(x,-1) = 1.0/x. */
8641 if (c == -1)
389dd41b 8642 return fold_build2_loc (loc, RDIV_EXPR, type,
49d00087 8643 build_real (type, dconst1), arg0);
b4d0c20c 8644 }
8645
8646 return NULL_TREE;
8647}
8648
8918c507 8649/* A subroutine of fold_builtin to fold the various exponent
c2f47e15 8650 functions. Return NULL_TREE if no simplification can be made.
debf9994 8651 FUNC is the corresponding MPFR exponent function. */
8918c507 8652
8653static tree
389dd41b 8654fold_builtin_exponent (location_t loc, tree fndecl, tree arg,
debf9994 8655 int (*func)(mpfr_ptr, mpfr_srcptr, mp_rnd_t))
8918c507 8656{
c2f47e15 8657 if (validate_arg (arg, REAL_TYPE))
8918c507 8658 {
8918c507 8659 tree type = TREE_TYPE (TREE_TYPE (fndecl));
29f4cd78 8660 tree res;
48e1416a 8661
debf9994 8662 /* Calculate the result when the argument is a constant. */
728bac60 8663 if ((res = do_mpfr_arg1 (arg, type, func, NULL, NULL, 0)))
debf9994 8664 return res;
8918c507 8665
8666 /* Optimize expN(logN(x)) = x. */
8667 if (flag_unsafe_math_optimizations)
a0c938f0 8668 {
8918c507 8669 const enum built_in_function fcode = builtin_mathfn_code (arg);
8670
debf9994 8671 if ((func == mpfr_exp
8918c507 8672 && (fcode == BUILT_IN_LOG
8673 || fcode == BUILT_IN_LOGF
8674 || fcode == BUILT_IN_LOGL))
debf9994 8675 || (func == mpfr_exp2
8918c507 8676 && (fcode == BUILT_IN_LOG2
8677 || fcode == BUILT_IN_LOG2F
8678 || fcode == BUILT_IN_LOG2L))
debf9994 8679 || (func == mpfr_exp10
8918c507 8680 && (fcode == BUILT_IN_LOG10
8681 || fcode == BUILT_IN_LOG10F
8682 || fcode == BUILT_IN_LOG10L)))
389dd41b 8683 return fold_convert_loc (loc, type, CALL_EXPR_ARG (arg, 0));
8918c507 8684 }
8685 }
8686
c2f47e15 8687 return NULL_TREE;
8918c507 8688}
8689
7959b13b 8690/* Fold function call to builtin memchr. ARG1, ARG2 and LEN are the
8691 arguments to the call, and TYPE is its return type.
8692 Return NULL_TREE if no simplification can be made. */
8693
8694static tree
389dd41b 8695fold_builtin_memchr (location_t loc, tree arg1, tree arg2, tree len, tree type)
7959b13b 8696{
8697 if (!validate_arg (arg1, POINTER_TYPE)
8698 || !validate_arg (arg2, INTEGER_TYPE)
8699 || !validate_arg (len, INTEGER_TYPE))
8700 return NULL_TREE;
8701 else
8702 {
8703 const char *p1;
8704
8705 if (TREE_CODE (arg2) != INTEGER_CST
e913b5cd 8706 || !tree_fits_uhwi_p (len))
7959b13b 8707 return NULL_TREE;
8708
8709 p1 = c_getstr (arg1);
8710 if (p1 && compare_tree_int (len, strlen (p1) + 1) <= 0)
8711 {
8712 char c;
8713 const char *r;
8714 tree tem;
8715
8716 if (target_char_cast (arg2, &c))
8717 return NULL_TREE;
8718
e913b5cd 8719 r = (const char *) memchr (p1, c, tree_to_uhwi (len));
7959b13b 8720
8721 if (r == NULL)
8722 return build_int_cst (TREE_TYPE (arg1), 0);
8723
2cc66f2a 8724 tem = fold_build_pointer_plus_hwi_loc (loc, arg1, r - p1);
389dd41b 8725 return fold_convert_loc (loc, type, tem);
7959b13b 8726 }
8727 return NULL_TREE;
8728 }
8729}
8730
c2f47e15 8731/* Fold function call to builtin memcmp with arguments ARG1 and ARG2.
8732 Return NULL_TREE if no simplification can be made. */
9c8a1629 8733
8734static tree
389dd41b 8735fold_builtin_memcmp (location_t loc, tree arg1, tree arg2, tree len)
9c8a1629 8736{
c4fef134 8737 const char *p1, *p2;
9c8a1629 8738
c2f47e15 8739 if (!validate_arg (arg1, POINTER_TYPE)
8740 || !validate_arg (arg2, POINTER_TYPE)
8741 || !validate_arg (len, INTEGER_TYPE))
8742 return NULL_TREE;
9c8a1629 8743
8744 /* If the LEN parameter is zero, return zero. */
8745 if (integer_zerop (len))
389dd41b 8746 return omit_two_operands_loc (loc, integer_type_node, integer_zero_node,
c4fef134 8747 arg1, arg2);
9c8a1629 8748
8749 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
8750 if (operand_equal_p (arg1, arg2, 0))
389dd41b 8751 return omit_one_operand_loc (loc, integer_type_node, integer_zero_node, len);
c4fef134 8752
8753 p1 = c_getstr (arg1);
8754 p2 = c_getstr (arg2);
8755
8756 /* If all arguments are constant, and the value of len is not greater
8757 than the lengths of arg1 and arg2, evaluate at compile-time. */
e913b5cd 8758 if (tree_fits_uhwi_p (len) && p1 && p2
c4fef134 8759 && compare_tree_int (len, strlen (p1) + 1) <= 0
8760 && compare_tree_int (len, strlen (p2) + 1) <= 0)
8761 {
e913b5cd 8762 const int r = memcmp (p1, p2, tree_to_uhwi (len));
c4fef134 8763
8764 if (r > 0)
8765 return integer_one_node;
8766 else if (r < 0)
8767 return integer_minus_one_node;
8768 else
8769 return integer_zero_node;
8770 }
8771
8772 /* If len parameter is one, return an expression corresponding to
8773 (*(const unsigned char*)arg1 - (const unsigned char*)arg2). */
e913b5cd 8774 if (tree_fits_uhwi_p (len) && tree_to_uhwi (len) == 1)
c4fef134 8775 {
8776 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
4f1b71c0 8777 tree cst_uchar_ptr_node
8778 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
8779
389dd41b 8780 tree ind1
8781 = fold_convert_loc (loc, integer_type_node,
8782 build1 (INDIRECT_REF, cst_uchar_node,
8783 fold_convert_loc (loc,
8784 cst_uchar_ptr_node,
c4fef134 8785 arg1)));
389dd41b 8786 tree ind2
8787 = fold_convert_loc (loc, integer_type_node,
8788 build1 (INDIRECT_REF, cst_uchar_node,
8789 fold_convert_loc (loc,
8790 cst_uchar_ptr_node,
c4fef134 8791 arg2)));
389dd41b 8792 return fold_build2_loc (loc, MINUS_EXPR, integer_type_node, ind1, ind2);
c4fef134 8793 }
9c8a1629 8794
c2f47e15 8795 return NULL_TREE;
9c8a1629 8796}
8797
c2f47e15 8798/* Fold function call to builtin strcmp with arguments ARG1 and ARG2.
8799 Return NULL_TREE if no simplification can be made. */
9c8a1629 8800
8801static tree
389dd41b 8802fold_builtin_strcmp (location_t loc, tree arg1, tree arg2)
9c8a1629 8803{
9c8a1629 8804 const char *p1, *p2;
8805
c2f47e15 8806 if (!validate_arg (arg1, POINTER_TYPE)
8807 || !validate_arg (arg2, POINTER_TYPE))
8808 return NULL_TREE;
9c8a1629 8809
8810 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
8811 if (operand_equal_p (arg1, arg2, 0))
c4fef134 8812 return integer_zero_node;
9c8a1629 8813
8814 p1 = c_getstr (arg1);
8815 p2 = c_getstr (arg2);
8816
8817 if (p1 && p2)
8818 {
9c8a1629 8819 const int i = strcmp (p1, p2);
8820 if (i < 0)
c4fef134 8821 return integer_minus_one_node;
9c8a1629 8822 else if (i > 0)
c4fef134 8823 return integer_one_node;
9c8a1629 8824 else
c4fef134 8825 return integer_zero_node;
8826 }
8827
8828 /* If the second arg is "", return *(const unsigned char*)arg1. */
8829 if (p2 && *p2 == '\0')
8830 {
8831 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
4f1b71c0 8832 tree cst_uchar_ptr_node
8833 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
8834
389dd41b 8835 return fold_convert_loc (loc, integer_type_node,
8836 build1 (INDIRECT_REF, cst_uchar_node,
8837 fold_convert_loc (loc,
8838 cst_uchar_ptr_node,
8839 arg1)));
c4fef134 8840 }
8841
8842 /* If the first arg is "", return -*(const unsigned char*)arg2. */
8843 if (p1 && *p1 == '\0')
8844 {
8845 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
4f1b71c0 8846 tree cst_uchar_ptr_node
8847 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
8848
389dd41b 8849 tree temp
8850 = fold_convert_loc (loc, integer_type_node,
8851 build1 (INDIRECT_REF, cst_uchar_node,
8852 fold_convert_loc (loc,
8853 cst_uchar_ptr_node,
c4fef134 8854 arg2)));
389dd41b 8855 return fold_build1_loc (loc, NEGATE_EXPR, integer_type_node, temp);
9c8a1629 8856 }
8857
c2f47e15 8858 return NULL_TREE;
9c8a1629 8859}
8860
c2f47e15 8861/* Fold function call to builtin strncmp with arguments ARG1, ARG2, and LEN.
8862 Return NULL_TREE if no simplification can be made. */
9c8a1629 8863
8864static tree
389dd41b 8865fold_builtin_strncmp (location_t loc, tree arg1, tree arg2, tree len)
9c8a1629 8866{
9c8a1629 8867 const char *p1, *p2;
8868
c2f47e15 8869 if (!validate_arg (arg1, POINTER_TYPE)
8870 || !validate_arg (arg2, POINTER_TYPE)
8871 || !validate_arg (len, INTEGER_TYPE))
8872 return NULL_TREE;
9c8a1629 8873
8874 /* If the LEN parameter is zero, return zero. */
8875 if (integer_zerop (len))
389dd41b 8876 return omit_two_operands_loc (loc, integer_type_node, integer_zero_node,
c4fef134 8877 arg1, arg2);
9c8a1629 8878
8879 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
8880 if (operand_equal_p (arg1, arg2, 0))
389dd41b 8881 return omit_one_operand_loc (loc, integer_type_node, integer_zero_node, len);
9c8a1629 8882
8883 p1 = c_getstr (arg1);
8884 p2 = c_getstr (arg2);
8885
e913b5cd 8886 if (tree_fits_uhwi_p (len) && p1 && p2)
9c8a1629 8887 {
e913b5cd 8888 const int i = strncmp (p1, p2, tree_to_uhwi (len));
c4fef134 8889 if (i > 0)
8890 return integer_one_node;
8891 else if (i < 0)
8892 return integer_minus_one_node;
9c8a1629 8893 else
c4fef134 8894 return integer_zero_node;
8895 }
8896
8897 /* If the second arg is "", and the length is greater than zero,
8898 return *(const unsigned char*)arg1. */
8899 if (p2 && *p2 == '\0'
8900 && TREE_CODE (len) == INTEGER_CST
8901 && tree_int_cst_sgn (len) == 1)
8902 {
8903 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
4f1b71c0 8904 tree cst_uchar_ptr_node
8905 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
8906
389dd41b 8907 return fold_convert_loc (loc, integer_type_node,
8908 build1 (INDIRECT_REF, cst_uchar_node,
8909 fold_convert_loc (loc,
8910 cst_uchar_ptr_node,
8911 arg1)));
c4fef134 8912 }
8913
8914 /* If the first arg is "", and the length is greater than zero,
8915 return -*(const unsigned char*)arg2. */
8916 if (p1 && *p1 == '\0'
8917 && TREE_CODE (len) == INTEGER_CST
8918 && tree_int_cst_sgn (len) == 1)
8919 {
8920 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
4f1b71c0 8921 tree cst_uchar_ptr_node
8922 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
8923
389dd41b 8924 tree temp = fold_convert_loc (loc, integer_type_node,
8925 build1 (INDIRECT_REF, cst_uchar_node,
8926 fold_convert_loc (loc,
8927 cst_uchar_ptr_node,
8928 arg2)));
8929 return fold_build1_loc (loc, NEGATE_EXPR, integer_type_node, temp);
c4fef134 8930 }
8931
8932 /* If len parameter is one, return an expression corresponding to
8933 (*(const unsigned char*)arg1 - (const unsigned char*)arg2). */
e913b5cd 8934 if (tree_fits_uhwi_p (len) && tree_to_uhwi (len) == 1)
c4fef134 8935 {
8936 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
4f1b71c0 8937 tree cst_uchar_ptr_node
8938 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
8939
389dd41b 8940 tree ind1 = fold_convert_loc (loc, integer_type_node,
8941 build1 (INDIRECT_REF, cst_uchar_node,
8942 fold_convert_loc (loc,
8943 cst_uchar_ptr_node,
8944 arg1)));
8945 tree ind2 = fold_convert_loc (loc, integer_type_node,
8946 build1 (INDIRECT_REF, cst_uchar_node,
8947 fold_convert_loc (loc,
8948 cst_uchar_ptr_node,
8949 arg2)));
8950 return fold_build2_loc (loc, MINUS_EXPR, integer_type_node, ind1, ind2);
9c8a1629 8951 }
8952
c2f47e15 8953 return NULL_TREE;
9c8a1629 8954}
8955
c2f47e15 8956/* Fold function call to builtin signbit, signbitf or signbitl with argument
8957 ARG. Return NULL_TREE if no simplification can be made. */
27f261ef 8958
8959static tree
389dd41b 8960fold_builtin_signbit (location_t loc, tree arg, tree type)
27f261ef 8961{
c2f47e15 8962 if (!validate_arg (arg, REAL_TYPE))
27f261ef 8963 return NULL_TREE;
8964
27f261ef 8965 /* If ARG is a compile-time constant, determine the result. */
8966 if (TREE_CODE (arg) == REAL_CST
f96bd2bf 8967 && !TREE_OVERFLOW (arg))
27f261ef 8968 {
8969 REAL_VALUE_TYPE c;
8970
8971 c = TREE_REAL_CST (arg);
385f3f36 8972 return (REAL_VALUE_NEGATIVE (c)
8973 ? build_one_cst (type)
8974 : build_zero_cst (type));
27f261ef 8975 }
8976
8977 /* If ARG is non-negative, the result is always zero. */
8978 if (tree_expr_nonnegative_p (arg))
389dd41b 8979 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
27f261ef 8980
8981 /* If ARG's format doesn't have signed zeros, return "arg < 0.0". */
fe994837 8982 if (!HONOR_SIGNED_ZEROS (arg))
de67cbb8 8983 return fold_convert (type,
8984 fold_build2_loc (loc, LT_EXPR, boolean_type_node, arg,
8985 build_real (TREE_TYPE (arg), dconst0)));
27f261ef 8986
8987 return NULL_TREE;
8988}
8989
c2f47e15 8990/* Fold function call to builtin copysign, copysignf or copysignl with
8991 arguments ARG1 and ARG2. Return NULL_TREE if no simplification can
8992 be made. */
467214fd 8993
8994static tree
389dd41b 8995fold_builtin_copysign (location_t loc, tree fndecl,
8996 tree arg1, tree arg2, tree type)
467214fd 8997{
c2f47e15 8998 tree tem;
467214fd 8999
c2f47e15 9000 if (!validate_arg (arg1, REAL_TYPE)
9001 || !validate_arg (arg2, REAL_TYPE))
467214fd 9002 return NULL_TREE;
9003
467214fd 9004 /* copysign(X,X) is X. */
9005 if (operand_equal_p (arg1, arg2, 0))
389dd41b 9006 return fold_convert_loc (loc, type, arg1);
467214fd 9007
9008 /* If ARG1 and ARG2 are compile-time constants, determine the result. */
9009 if (TREE_CODE (arg1) == REAL_CST
9010 && TREE_CODE (arg2) == REAL_CST
f96bd2bf 9011 && !TREE_OVERFLOW (arg1)
9012 && !TREE_OVERFLOW (arg2))
467214fd 9013 {
9014 REAL_VALUE_TYPE c1, c2;
9015
9016 c1 = TREE_REAL_CST (arg1);
9017 c2 = TREE_REAL_CST (arg2);
749680e2 9018 /* c1.sign := c2.sign. */
467214fd 9019 real_copysign (&c1, &c2);
9020 return build_real (type, c1);
467214fd 9021 }
9022
9023 /* copysign(X, Y) is fabs(X) when Y is always non-negative.
9024 Remember to evaluate Y for side-effects. */
9025 if (tree_expr_nonnegative_p (arg2))
389dd41b 9026 return omit_one_operand_loc (loc, type,
9027 fold_build1_loc (loc, ABS_EXPR, type, arg1),
467214fd 9028 arg2);
9029
198d9bbe 9030 /* Strip sign changing operations for the first argument. */
9031 tem = fold_strip_sign_ops (arg1);
9032 if (tem)
389dd41b 9033 return build_call_expr_loc (loc, fndecl, 2, tem, arg2);
198d9bbe 9034
467214fd 9035 return NULL_TREE;
9036}
9037
c2f47e15 9038/* Fold a call to builtin isascii with argument ARG. */
d49367d4 9039
9040static tree
389dd41b 9041fold_builtin_isascii (location_t loc, tree arg)
d49367d4 9042{
c2f47e15 9043 if (!validate_arg (arg, INTEGER_TYPE))
9044 return NULL_TREE;
d49367d4 9045 else
9046 {
9047 /* Transform isascii(c) -> ((c & ~0x7f) == 0). */
c90b5d40 9048 arg = fold_build2 (BIT_AND_EXPR, integer_type_node, arg,
7002a1c8 9049 build_int_cst (integer_type_node,
c90b5d40 9050 ~ (unsigned HOST_WIDE_INT) 0x7f));
389dd41b 9051 return fold_build2_loc (loc, EQ_EXPR, integer_type_node,
7002a1c8 9052 arg, integer_zero_node);
d49367d4 9053 }
9054}
9055
c2f47e15 9056/* Fold a call to builtin toascii with argument ARG. */
d49367d4 9057
9058static tree
389dd41b 9059fold_builtin_toascii (location_t loc, tree arg)
d49367d4 9060{
c2f47e15 9061 if (!validate_arg (arg, INTEGER_TYPE))
9062 return NULL_TREE;
48e1416a 9063
c2f47e15 9064 /* Transform toascii(c) -> (c & 0x7f). */
389dd41b 9065 return fold_build2_loc (loc, BIT_AND_EXPR, integer_type_node, arg,
7002a1c8 9066 build_int_cst (integer_type_node, 0x7f));
d49367d4 9067}
9068
c2f47e15 9069/* Fold a call to builtin isdigit with argument ARG. */
df1cf42e 9070
9071static tree
389dd41b 9072fold_builtin_isdigit (location_t loc, tree arg)
df1cf42e 9073{
c2f47e15 9074 if (!validate_arg (arg, INTEGER_TYPE))
9075 return NULL_TREE;
df1cf42e 9076 else
9077 {
9078 /* Transform isdigit(c) -> (unsigned)(c) - '0' <= 9. */
624d37a6 9079 /* According to the C standard, isdigit is unaffected by locale.
9080 However, it definitely is affected by the target character set. */
624d37a6 9081 unsigned HOST_WIDE_INT target_digit0
9082 = lang_hooks.to_target_charset ('0');
9083
9084 if (target_digit0 == 0)
9085 return NULL_TREE;
9086
389dd41b 9087 arg = fold_convert_loc (loc, unsigned_type_node, arg);
c90b5d40 9088 arg = fold_build2 (MINUS_EXPR, unsigned_type_node, arg,
9089 build_int_cst (unsigned_type_node, target_digit0));
389dd41b 9090 return fold_build2_loc (loc, LE_EXPR, integer_type_node, arg,
f2532264 9091 build_int_cst (unsigned_type_node, 9));
df1cf42e 9092 }
9093}
27f261ef 9094
c2f47e15 9095/* Fold a call to fabs, fabsf or fabsl with argument ARG. */
d1aade50 9096
9097static tree
389dd41b 9098fold_builtin_fabs (location_t loc, tree arg, tree type)
d1aade50 9099{
c2f47e15 9100 if (!validate_arg (arg, REAL_TYPE))
9101 return NULL_TREE;
d1aade50 9102
389dd41b 9103 arg = fold_convert_loc (loc, type, arg);
d1aade50 9104 if (TREE_CODE (arg) == REAL_CST)
9105 return fold_abs_const (arg, type);
389dd41b 9106 return fold_build1_loc (loc, ABS_EXPR, type, arg);
d1aade50 9107}
9108
c2f47e15 9109/* Fold a call to abs, labs, llabs or imaxabs with argument ARG. */
d1aade50 9110
9111static tree
389dd41b 9112fold_builtin_abs (location_t loc, tree arg, tree type)
d1aade50 9113{
c2f47e15 9114 if (!validate_arg (arg, INTEGER_TYPE))
9115 return NULL_TREE;
d1aade50 9116
389dd41b 9117 arg = fold_convert_loc (loc, type, arg);
d1aade50 9118 if (TREE_CODE (arg) == INTEGER_CST)
9119 return fold_abs_const (arg, type);
389dd41b 9120 return fold_build1_loc (loc, ABS_EXPR, type, arg);
d1aade50 9121}
9122
b9be572e 9123/* Fold a fma operation with arguments ARG[012]. */
9124
9125tree
9126fold_fma (location_t loc ATTRIBUTE_UNUSED,
9127 tree type, tree arg0, tree arg1, tree arg2)
9128{
9129 if (TREE_CODE (arg0) == REAL_CST
9130 && TREE_CODE (arg1) == REAL_CST
9131 && TREE_CODE (arg2) == REAL_CST)
9132 return do_mpfr_arg3 (arg0, arg1, arg2, type, mpfr_fma);
9133
9134 return NULL_TREE;
9135}
9136
9137/* Fold a call to fma, fmaf, or fmal with arguments ARG[012]. */
9138
9139static tree
9140fold_builtin_fma (location_t loc, tree arg0, tree arg1, tree arg2, tree type)
9141{
9142 if (validate_arg (arg0, REAL_TYPE)
9af5ce0c 9143 && validate_arg (arg1, REAL_TYPE)
9144 && validate_arg (arg2, REAL_TYPE))
b9be572e 9145 {
9146 tree tem = fold_fma (loc, type, arg0, arg1, arg2);
9147 if (tem)
9148 return tem;
9149
9150 /* ??? Only expand to FMA_EXPR if it's directly supported. */
9151 if (optab_handler (fma_optab, TYPE_MODE (type)) != CODE_FOR_nothing)
9152 return fold_build3_loc (loc, FMA_EXPR, type, arg0, arg1, arg2);
9153 }
9154 return NULL_TREE;
9155}
9156
d4a43a03 9157/* Fold a call to builtin fmin or fmax. */
9158
9159static tree
389dd41b 9160fold_builtin_fmin_fmax (location_t loc, tree arg0, tree arg1,
9161 tree type, bool max)
d4a43a03 9162{
c2f47e15 9163 if (validate_arg (arg0, REAL_TYPE) && validate_arg (arg1, REAL_TYPE))
d4a43a03 9164 {
d4a43a03 9165 /* Calculate the result when the argument is a constant. */
9166 tree res = do_mpfr_arg2 (arg0, arg1, type, (max ? mpfr_max : mpfr_min));
9167
9168 if (res)
9169 return res;
9170
61fe3555 9171 /* If either argument is NaN, return the other one. Avoid the
9172 transformation if we get (and honor) a signalling NaN. Using
9173 omit_one_operand() ensures we create a non-lvalue. */
9174 if (TREE_CODE (arg0) == REAL_CST
9175 && real_isnan (&TREE_REAL_CST (arg0))
fe994837 9176 && (! HONOR_SNANS (arg0)
61fe3555 9177 || ! TREE_REAL_CST (arg0).signalling))
389dd41b 9178 return omit_one_operand_loc (loc, type, arg1, arg0);
61fe3555 9179 if (TREE_CODE (arg1) == REAL_CST
9180 && real_isnan (&TREE_REAL_CST (arg1))
fe994837 9181 && (! HONOR_SNANS (arg1)
61fe3555 9182 || ! TREE_REAL_CST (arg1).signalling))
389dd41b 9183 return omit_one_operand_loc (loc, type, arg0, arg1);
61fe3555 9184
d4a43a03 9185 /* Transform fmin/fmax(x,x) -> x. */
9186 if (operand_equal_p (arg0, arg1, OEP_PURE_SAME))
389dd41b 9187 return omit_one_operand_loc (loc, type, arg0, arg1);
48e1416a 9188
d4a43a03 9189 /* Convert fmin/fmax to MIN_EXPR/MAX_EXPR. C99 requires these
9190 functions to return the numeric arg if the other one is NaN.
9191 These tree codes don't honor that, so only transform if
9192 -ffinite-math-only is set. C99 doesn't require -0.0 to be
9193 handled, so we don't have to worry about it either. */
9194 if (flag_finite_math_only)
389dd41b 9195 return fold_build2_loc (loc, (max ? MAX_EXPR : MIN_EXPR), type,
9196 fold_convert_loc (loc, type, arg0),
9197 fold_convert_loc (loc, type, arg1));
d4a43a03 9198 }
9199 return NULL_TREE;
9200}
9201
abe4dcf6 9202/* Fold a call to builtin carg(a+bi) -> atan2(b,a). */
9203
9204static tree
389dd41b 9205fold_builtin_carg (location_t loc, tree arg, tree type)
abe4dcf6 9206{
239d491a 9207 if (validate_arg (arg, COMPLEX_TYPE)
9208 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) == REAL_TYPE)
abe4dcf6 9209 {
9210 tree atan2_fn = mathfn_built_in (type, BUILT_IN_ATAN2);
48e1416a 9211
abe4dcf6 9212 if (atan2_fn)
9213 {
c2f47e15 9214 tree new_arg = builtin_save_expr (arg);
389dd41b 9215 tree r_arg = fold_build1_loc (loc, REALPART_EXPR, type, new_arg);
9216 tree i_arg = fold_build1_loc (loc, IMAGPART_EXPR, type, new_arg);
9217 return build_call_expr_loc (loc, atan2_fn, 2, i_arg, r_arg);
abe4dcf6 9218 }
9219 }
48e1416a 9220
abe4dcf6 9221 return NULL_TREE;
9222}
9223
cb2b9385 9224/* Fold a call to builtin logb/ilogb. */
9225
9226static tree
389dd41b 9227fold_builtin_logb (location_t loc, tree arg, tree rettype)
cb2b9385 9228{
9229 if (! validate_arg (arg, REAL_TYPE))
9230 return NULL_TREE;
48e1416a 9231
cb2b9385 9232 STRIP_NOPS (arg);
48e1416a 9233
cb2b9385 9234 if (TREE_CODE (arg) == REAL_CST && ! TREE_OVERFLOW (arg))
9235 {
9236 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg);
48e1416a 9237
cb2b9385 9238 switch (value->cl)
9239 {
9240 case rvc_nan:
9241 case rvc_inf:
9242 /* If arg is Inf or NaN and we're logb, return it. */
9243 if (TREE_CODE (rettype) == REAL_TYPE)
7695fea9 9244 {
9245 /* For logb(-Inf) we have to return +Inf. */
9246 if (real_isinf (value) && real_isneg (value))
9247 {
9248 REAL_VALUE_TYPE tem;
9249 real_inf (&tem);
9250 return build_real (rettype, tem);
9251 }
9252 return fold_convert_loc (loc, rettype, arg);
9253 }
cb2b9385 9254 /* Fall through... */
9255 case rvc_zero:
9256 /* Zero may set errno and/or raise an exception for logb, also
9257 for ilogb we don't know FP_ILOGB0. */
9258 return NULL_TREE;
9259 case rvc_normal:
9260 /* For normal numbers, proceed iff radix == 2. In GCC,
9261 normalized significands are in the range [0.5, 1.0). We
9262 want the exponent as if they were [1.0, 2.0) so get the
9263 exponent and subtract 1. */
9264 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (arg)))->b == 2)
389dd41b 9265 return fold_convert_loc (loc, rettype,
7002a1c8 9266 build_int_cst (integer_type_node,
389dd41b 9267 REAL_EXP (value)-1));
cb2b9385 9268 break;
9269 }
9270 }
48e1416a 9271
cb2b9385 9272 return NULL_TREE;
9273}
9274
9275/* Fold a call to builtin significand, if radix == 2. */
9276
9277static tree
389dd41b 9278fold_builtin_significand (location_t loc, tree arg, tree rettype)
cb2b9385 9279{
9280 if (! validate_arg (arg, REAL_TYPE))
9281 return NULL_TREE;
48e1416a 9282
cb2b9385 9283 STRIP_NOPS (arg);
48e1416a 9284
cb2b9385 9285 if (TREE_CODE (arg) == REAL_CST && ! TREE_OVERFLOW (arg))
9286 {
9287 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg);
48e1416a 9288
cb2b9385 9289 switch (value->cl)
9290 {
9291 case rvc_zero:
9292 case rvc_nan:
9293 case rvc_inf:
9294 /* If arg is +-0, +-Inf or +-NaN, then return it. */
389dd41b 9295 return fold_convert_loc (loc, rettype, arg);
cb2b9385 9296 case rvc_normal:
9297 /* For normal numbers, proceed iff radix == 2. */
9298 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (arg)))->b == 2)
9299 {
9300 REAL_VALUE_TYPE result = *value;
9301 /* In GCC, normalized significands are in the range [0.5,
9302 1.0). We want them to be [1.0, 2.0) so set the
9303 exponent to 1. */
9304 SET_REAL_EXP (&result, 1);
9305 return build_real (rettype, result);
9306 }
9307 break;
9308 }
9309 }
48e1416a 9310
cb2b9385 9311 return NULL_TREE;
9312}
9313
3838b9ae 9314/* Fold a call to builtin frexp, we can assume the base is 2. */
9315
9316static tree
389dd41b 9317fold_builtin_frexp (location_t loc, tree arg0, tree arg1, tree rettype)
3838b9ae 9318{
9319 if (! validate_arg (arg0, REAL_TYPE) || ! validate_arg (arg1, POINTER_TYPE))
9320 return NULL_TREE;
48e1416a 9321
3838b9ae 9322 STRIP_NOPS (arg0);
48e1416a 9323
3838b9ae 9324 if (!(TREE_CODE (arg0) == REAL_CST && ! TREE_OVERFLOW (arg0)))
9325 return NULL_TREE;
48e1416a 9326
389dd41b 9327 arg1 = build_fold_indirect_ref_loc (loc, arg1);
3838b9ae 9328
9329 /* Proceed if a valid pointer type was passed in. */
9330 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg1)) == integer_type_node)
9331 {
9332 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg0);
9333 tree frac, exp;
48e1416a 9334
3838b9ae 9335 switch (value->cl)
9336 {
9337 case rvc_zero:
9338 /* For +-0, return (*exp = 0, +-0). */
9339 exp = integer_zero_node;
9340 frac = arg0;
9341 break;
9342 case rvc_nan:
9343 case rvc_inf:
9344 /* For +-NaN or +-Inf, *exp is unspecified, return arg0. */
389dd41b 9345 return omit_one_operand_loc (loc, rettype, arg0, arg1);
3838b9ae 9346 case rvc_normal:
9347 {
9348 /* Since the frexp function always expects base 2, and in
9349 GCC normalized significands are already in the range
9350 [0.5, 1.0), we have exactly what frexp wants. */
9351 REAL_VALUE_TYPE frac_rvt = *value;
9352 SET_REAL_EXP (&frac_rvt, 0);
9353 frac = build_real (rettype, frac_rvt);
7002a1c8 9354 exp = build_int_cst (integer_type_node, REAL_EXP (value));
3838b9ae 9355 }
9356 break;
9357 default:
9358 gcc_unreachable ();
9359 }
48e1416a 9360
3838b9ae 9361 /* Create the COMPOUND_EXPR (*arg1 = trunc, frac). */
389dd41b 9362 arg1 = fold_build2_loc (loc, MODIFY_EXPR, rettype, arg1, exp);
3838b9ae 9363 TREE_SIDE_EFFECTS (arg1) = 1;
389dd41b 9364 return fold_build2_loc (loc, COMPOUND_EXPR, rettype, arg1, frac);
3838b9ae 9365 }
9366
9367 return NULL_TREE;
9368}
9369
7587301b 9370/* Fold a call to builtin ldexp or scalbn/scalbln. If LDEXP is true
9371 then we can assume the base is two. If it's false, then we have to
9372 check the mode of the TYPE parameter in certain cases. */
9373
9374static tree
389dd41b 9375fold_builtin_load_exponent (location_t loc, tree arg0, tree arg1,
9376 tree type, bool ldexp)
7587301b 9377{
9378 if (validate_arg (arg0, REAL_TYPE) && validate_arg (arg1, INTEGER_TYPE))
9379 {
9380 STRIP_NOPS (arg0);
9381 STRIP_NOPS (arg1);
9382
9383 /* If arg0 is 0, Inf or NaN, or if arg1 is 0, then return arg0. */
9384 if (real_zerop (arg0) || integer_zerop (arg1)
9385 || (TREE_CODE (arg0) == REAL_CST
776a7bab 9386 && !real_isfinite (&TREE_REAL_CST (arg0))))
389dd41b 9387 return omit_one_operand_loc (loc, type, arg0, arg1);
48e1416a 9388
7587301b 9389 /* If both arguments are constant, then try to evaluate it. */
9390 if ((ldexp || REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2)
9391 && TREE_CODE (arg0) == REAL_CST && !TREE_OVERFLOW (arg0)
e913b5cd 9392 && tree_fits_shwi_p (arg1))
7587301b 9393 {
9394 /* Bound the maximum adjustment to twice the range of the
9395 mode's valid exponents. Use abs to ensure the range is
9396 positive as a sanity check. */
48e1416a 9397 const long max_exp_adj = 2 *
7587301b 9398 labs (REAL_MODE_FORMAT (TYPE_MODE (type))->emax
9399 - REAL_MODE_FORMAT (TYPE_MODE (type))->emin);
9400
9401 /* Get the user-requested adjustment. */
e913b5cd 9402 const HOST_WIDE_INT req_exp_adj = tree_to_shwi (arg1);
48e1416a 9403
7587301b 9404 /* The requested adjustment must be inside this range. This
9405 is a preliminary cap to avoid things like overflow, we
9406 may still fail to compute the result for other reasons. */
9407 if (-max_exp_adj < req_exp_adj && req_exp_adj < max_exp_adj)
9408 {
9409 REAL_VALUE_TYPE initial_result;
48e1416a 9410
7587301b 9411 real_ldexp (&initial_result, &TREE_REAL_CST (arg0), req_exp_adj);
9412
9413 /* Ensure we didn't overflow. */
9414 if (! real_isinf (&initial_result))
9415 {
9416 const REAL_VALUE_TYPE trunc_result
9417 = real_value_truncate (TYPE_MODE (type), initial_result);
48e1416a 9418
7587301b 9419 /* Only proceed if the target mode can hold the
9420 resulting value. */
9421 if (REAL_VALUES_EQUAL (initial_result, trunc_result))
9422 return build_real (type, trunc_result);
9423 }
9424 }
9425 }
9426 }
9427
9428 return NULL_TREE;
9429}
9430
ebf8b4f5 9431/* Fold a call to builtin modf. */
9432
9433static tree
389dd41b 9434fold_builtin_modf (location_t loc, tree arg0, tree arg1, tree rettype)
ebf8b4f5 9435{
9436 if (! validate_arg (arg0, REAL_TYPE) || ! validate_arg (arg1, POINTER_TYPE))
9437 return NULL_TREE;
48e1416a 9438
ebf8b4f5 9439 STRIP_NOPS (arg0);
48e1416a 9440
ebf8b4f5 9441 if (!(TREE_CODE (arg0) == REAL_CST && ! TREE_OVERFLOW (arg0)))
9442 return NULL_TREE;
48e1416a 9443
389dd41b 9444 arg1 = build_fold_indirect_ref_loc (loc, arg1);
ebf8b4f5 9445
9446 /* Proceed if a valid pointer type was passed in. */
9447 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg1)) == TYPE_MAIN_VARIANT (rettype))
9448 {
9449 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg0);
9450 REAL_VALUE_TYPE trunc, frac;
9451
9452 switch (value->cl)
9453 {
9454 case rvc_nan:
9455 case rvc_zero:
9456 /* For +-NaN or +-0, return (*arg1 = arg0, arg0). */
9457 trunc = frac = *value;
9458 break;
9459 case rvc_inf:
9460 /* For +-Inf, return (*arg1 = arg0, +-0). */
9461 frac = dconst0;
9462 frac.sign = value->sign;
9463 trunc = *value;
9464 break;
9465 case rvc_normal:
9466 /* Return (*arg1 = trunc(arg0), arg0-trunc(arg0)). */
9467 real_trunc (&trunc, VOIDmode, value);
9468 real_arithmetic (&frac, MINUS_EXPR, value, &trunc);
9469 /* If the original number was negative and already
9470 integral, then the fractional part is -0.0. */
9471 if (value->sign && frac.cl == rvc_zero)
9472 frac.sign = value->sign;
9473 break;
9474 }
48e1416a 9475
ebf8b4f5 9476 /* Create the COMPOUND_EXPR (*arg1 = trunc, frac). */
389dd41b 9477 arg1 = fold_build2_loc (loc, MODIFY_EXPR, rettype, arg1,
ebf8b4f5 9478 build_real (rettype, trunc));
9479 TREE_SIDE_EFFECTS (arg1) = 1;
389dd41b 9480 return fold_build2_loc (loc, COMPOUND_EXPR, rettype, arg1,
ebf8b4f5 9481 build_real (rettype, frac));
9482 }
48e1416a 9483
ebf8b4f5 9484 return NULL_TREE;
9485}
9486
a65c4d64 9487/* Given a location LOC, an interclass builtin function decl FNDECL
9488 and its single argument ARG, return an folded expression computing
9489 the same, or NULL_TREE if we either couldn't or didn't want to fold
9490 (the latter happen if there's an RTL instruction available). */
9491
9492static tree
9493fold_builtin_interclass_mathfn (location_t loc, tree fndecl, tree arg)
9494{
3754d046 9495 machine_mode mode;
a65c4d64 9496
9497 if (!validate_arg (arg, REAL_TYPE))
9498 return NULL_TREE;
9499
9500 if (interclass_mathfn_icode (arg, fndecl) != CODE_FOR_nothing)
9501 return NULL_TREE;
9502
9503 mode = TYPE_MODE (TREE_TYPE (arg));
9504
9505 /* If there is no optab, try generic code. */
9506 switch (DECL_FUNCTION_CODE (fndecl))
9507 {
9508 tree result;
9509
9510 CASE_FLT_FN (BUILT_IN_ISINF):
9511 {
9512 /* isinf(x) -> isgreater(fabs(x),DBL_MAX). */
b9a16870 9513 tree const isgr_fn = builtin_decl_explicit (BUILT_IN_ISGREATER);
a65c4d64 9514 tree const type = TREE_TYPE (arg);
9515 REAL_VALUE_TYPE r;
9516 char buf[128];
9517
9518 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf));
9519 real_from_string (&r, buf);
9520 result = build_call_expr (isgr_fn, 2,
9521 fold_build1_loc (loc, ABS_EXPR, type, arg),
9522 build_real (type, r));
9523 return result;
9524 }
9525 CASE_FLT_FN (BUILT_IN_FINITE):
9526 case BUILT_IN_ISFINITE:
9527 {
9528 /* isfinite(x) -> islessequal(fabs(x),DBL_MAX). */
b9a16870 9529 tree const isle_fn = builtin_decl_explicit (BUILT_IN_ISLESSEQUAL);
a65c4d64 9530 tree const type = TREE_TYPE (arg);
9531 REAL_VALUE_TYPE r;
9532 char buf[128];
9533
9534 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf));
9535 real_from_string (&r, buf);
9536 result = build_call_expr (isle_fn, 2,
9537 fold_build1_loc (loc, ABS_EXPR, type, arg),
9538 build_real (type, r));
9539 /*result = fold_build2_loc (loc, UNGT_EXPR,
9540 TREE_TYPE (TREE_TYPE (fndecl)),
9541 fold_build1_loc (loc, ABS_EXPR, type, arg),
9542 build_real (type, r));
9543 result = fold_build1_loc (loc, TRUTH_NOT_EXPR,
9544 TREE_TYPE (TREE_TYPE (fndecl)),
9545 result);*/
9546 return result;
9547 }
9548 case BUILT_IN_ISNORMAL:
9549 {
9550 /* isnormal(x) -> isgreaterequal(fabs(x),DBL_MIN) &
9551 islessequal(fabs(x),DBL_MAX). */
b9a16870 9552 tree const isle_fn = builtin_decl_explicit (BUILT_IN_ISLESSEQUAL);
9553 tree const isge_fn = builtin_decl_explicit (BUILT_IN_ISGREATEREQUAL);
a65c4d64 9554 tree const type = TREE_TYPE (arg);
9555 REAL_VALUE_TYPE rmax, rmin;
9556 char buf[128];
9557
9558 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf));
9559 real_from_string (&rmax, buf);
9560 sprintf (buf, "0x1p%d", REAL_MODE_FORMAT (mode)->emin - 1);
9561 real_from_string (&rmin, buf);
9562 arg = builtin_save_expr (fold_build1_loc (loc, ABS_EXPR, type, arg));
9563 result = build_call_expr (isle_fn, 2, arg,
9564 build_real (type, rmax));
9565 result = fold_build2 (BIT_AND_EXPR, integer_type_node, result,
9566 build_call_expr (isge_fn, 2, arg,
9567 build_real (type, rmin)));
9568 return result;
9569 }
9570 default:
9571 break;
9572 }
9573
9574 return NULL_TREE;
9575}
9576
726069ba 9577/* Fold a call to __builtin_isnan(), __builtin_isinf, __builtin_finite.
c2f47e15 9578 ARG is the argument for the call. */
726069ba 9579
9580static tree
389dd41b 9581fold_builtin_classify (location_t loc, tree fndecl, tree arg, int builtin_index)
726069ba 9582{
726069ba 9583 tree type = TREE_TYPE (TREE_TYPE (fndecl));
726069ba 9584 REAL_VALUE_TYPE r;
9585
c2f47e15 9586 if (!validate_arg (arg, REAL_TYPE))
d43cee80 9587 return NULL_TREE;
726069ba 9588
726069ba 9589 switch (builtin_index)
9590 {
9591 case BUILT_IN_ISINF:
fe994837 9592 if (!HONOR_INFINITIES (arg))
389dd41b 9593 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
726069ba 9594
9595 if (TREE_CODE (arg) == REAL_CST)
9596 {
9597 r = TREE_REAL_CST (arg);
9598 if (real_isinf (&r))
9599 return real_compare (GT_EXPR, &r, &dconst0)
9600 ? integer_one_node : integer_minus_one_node;
9601 else
9602 return integer_zero_node;
9603 }
9604
9605 return NULL_TREE;
9606
c319d56a 9607 case BUILT_IN_ISINF_SIGN:
9608 {
9609 /* isinf_sign(x) -> isinf(x) ? (signbit(x) ? -1 : 1) : 0 */
9610 /* In a boolean context, GCC will fold the inner COND_EXPR to
9611 1. So e.g. "if (isinf_sign(x))" would be folded to just
9612 "if (isinf(x) ? 1 : 0)" which becomes "if (isinf(x))". */
9613 tree signbit_fn = mathfn_built_in_1 (TREE_TYPE (arg), BUILT_IN_SIGNBIT, 0);
b9a16870 9614 tree isinf_fn = builtin_decl_explicit (BUILT_IN_ISINF);
c319d56a 9615 tree tmp = NULL_TREE;
9616
9617 arg = builtin_save_expr (arg);
9618
9619 if (signbit_fn && isinf_fn)
9620 {
389dd41b 9621 tree signbit_call = build_call_expr_loc (loc, signbit_fn, 1, arg);
9622 tree isinf_call = build_call_expr_loc (loc, isinf_fn, 1, arg);
c319d56a 9623
389dd41b 9624 signbit_call = fold_build2_loc (loc, NE_EXPR, integer_type_node,
c319d56a 9625 signbit_call, integer_zero_node);
389dd41b 9626 isinf_call = fold_build2_loc (loc, NE_EXPR, integer_type_node,
c319d56a 9627 isinf_call, integer_zero_node);
48e1416a 9628
389dd41b 9629 tmp = fold_build3_loc (loc, COND_EXPR, integer_type_node, signbit_call,
c319d56a 9630 integer_minus_one_node, integer_one_node);
389dd41b 9631 tmp = fold_build3_loc (loc, COND_EXPR, integer_type_node,
9632 isinf_call, tmp,
c319d56a 9633 integer_zero_node);
9634 }
9635
9636 return tmp;
9637 }
9638
cde061c1 9639 case BUILT_IN_ISFINITE:
93633022 9640 if (!HONOR_NANS (arg)
fe994837 9641 && !HONOR_INFINITIES (arg))
389dd41b 9642 return omit_one_operand_loc (loc, type, integer_one_node, arg);
726069ba 9643
9644 if (TREE_CODE (arg) == REAL_CST)
9645 {
9646 r = TREE_REAL_CST (arg);
776a7bab 9647 return real_isfinite (&r) ? integer_one_node : integer_zero_node;
726069ba 9648 }
9649
9650 return NULL_TREE;
9651
9652 case BUILT_IN_ISNAN:
93633022 9653 if (!HONOR_NANS (arg))
389dd41b 9654 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
726069ba 9655
9656 if (TREE_CODE (arg) == REAL_CST)
9657 {
9658 r = TREE_REAL_CST (arg);
9659 return real_isnan (&r) ? integer_one_node : integer_zero_node;
9660 }
9661
9662 arg = builtin_save_expr (arg);
389dd41b 9663 return fold_build2_loc (loc, UNORDERED_EXPR, type, arg, arg);
726069ba 9664
9665 default:
64db345d 9666 gcc_unreachable ();
726069ba 9667 }
9668}
9669
19fbe3a4 9670/* Fold a call to __builtin_fpclassify(int, int, int, int, int, ...).
9671 This builtin will generate code to return the appropriate floating
9672 point classification depending on the value of the floating point
9673 number passed in. The possible return values must be supplied as
921b27c0 9674 int arguments to the call in the following order: FP_NAN, FP_INFINITE,
19fbe3a4 9675 FP_NORMAL, FP_SUBNORMAL and FP_ZERO. The ellipses is for exactly
9676 one floating point argument which is "type generic". */
9677
9678static tree
9d884767 9679fold_builtin_fpclassify (location_t loc, tree *args, int nargs)
19fbe3a4 9680{
921b27c0 9681 tree fp_nan, fp_infinite, fp_normal, fp_subnormal, fp_zero,
9682 arg, type, res, tmp;
3754d046 9683 machine_mode mode;
19fbe3a4 9684 REAL_VALUE_TYPE r;
9685 char buf[128];
48e1416a 9686
19fbe3a4 9687 /* Verify the required arguments in the original call. */
9d884767 9688 if (nargs != 6
9689 || !validate_arg (args[0], INTEGER_TYPE)
9690 || !validate_arg (args[1], INTEGER_TYPE)
9691 || !validate_arg (args[2], INTEGER_TYPE)
9692 || !validate_arg (args[3], INTEGER_TYPE)
9693 || !validate_arg (args[4], INTEGER_TYPE)
9694 || !validate_arg (args[5], REAL_TYPE))
19fbe3a4 9695 return NULL_TREE;
48e1416a 9696
9d884767 9697 fp_nan = args[0];
9698 fp_infinite = args[1];
9699 fp_normal = args[2];
9700 fp_subnormal = args[3];
9701 fp_zero = args[4];
9702 arg = args[5];
19fbe3a4 9703 type = TREE_TYPE (arg);
9704 mode = TYPE_MODE (type);
389dd41b 9705 arg = builtin_save_expr (fold_build1_loc (loc, ABS_EXPR, type, arg));
19fbe3a4 9706
48e1416a 9707 /* fpclassify(x) ->
19fbe3a4 9708 isnan(x) ? FP_NAN :
921b27c0 9709 (fabs(x) == Inf ? FP_INFINITE :
19fbe3a4 9710 (fabs(x) >= DBL_MIN ? FP_NORMAL :
9711 (x == 0 ? FP_ZERO : FP_SUBNORMAL))). */
48e1416a 9712
389dd41b 9713 tmp = fold_build2_loc (loc, EQ_EXPR, integer_type_node, arg,
19fbe3a4 9714 build_real (type, dconst0));
389dd41b 9715 res = fold_build3_loc (loc, COND_EXPR, integer_type_node,
9716 tmp, fp_zero, fp_subnormal);
19fbe3a4 9717
9718 sprintf (buf, "0x1p%d", REAL_MODE_FORMAT (mode)->emin - 1);
9719 real_from_string (&r, buf);
389dd41b 9720 tmp = fold_build2_loc (loc, GE_EXPR, integer_type_node,
9721 arg, build_real (type, r));
9722 res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp, fp_normal, res);
48e1416a 9723
19fbe3a4 9724 if (HONOR_INFINITIES (mode))
9725 {
9726 real_inf (&r);
389dd41b 9727 tmp = fold_build2_loc (loc, EQ_EXPR, integer_type_node, arg,
19fbe3a4 9728 build_real (type, r));
389dd41b 9729 res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp,
9730 fp_infinite, res);
19fbe3a4 9731 }
9732
9733 if (HONOR_NANS (mode))
9734 {
389dd41b 9735 tmp = fold_build2_loc (loc, ORDERED_EXPR, integer_type_node, arg, arg);
9736 res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp, res, fp_nan);
19fbe3a4 9737 }
48e1416a 9738
19fbe3a4 9739 return res;
9740}
9741
9bc9f15f 9742/* Fold a call to an unordered comparison function such as
d5019fe8 9743 __builtin_isgreater(). FNDECL is the FUNCTION_DECL for the function
c2f47e15 9744 being called and ARG0 and ARG1 are the arguments for the call.
726069ba 9745 UNORDERED_CODE and ORDERED_CODE are comparison codes that give
9746 the opposite of the desired result. UNORDERED_CODE is used
9747 for modes that can hold NaNs and ORDERED_CODE is used for
9748 the rest. */
9bc9f15f 9749
9750static tree
389dd41b 9751fold_builtin_unordered_cmp (location_t loc, tree fndecl, tree arg0, tree arg1,
9bc9f15f 9752 enum tree_code unordered_code,
9753 enum tree_code ordered_code)
9754{
859f903a 9755 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9bc9f15f 9756 enum tree_code code;
6978db0d 9757 tree type0, type1;
9758 enum tree_code code0, code1;
9759 tree cmp_type = NULL_TREE;
9bc9f15f 9760
6978db0d 9761 type0 = TREE_TYPE (arg0);
9762 type1 = TREE_TYPE (arg1);
a0c938f0 9763
6978db0d 9764 code0 = TREE_CODE (type0);
9765 code1 = TREE_CODE (type1);
a0c938f0 9766
6978db0d 9767 if (code0 == REAL_TYPE && code1 == REAL_TYPE)
9768 /* Choose the wider of two real types. */
9769 cmp_type = TYPE_PRECISION (type0) >= TYPE_PRECISION (type1)
9770 ? type0 : type1;
9771 else if (code0 == REAL_TYPE && code1 == INTEGER_TYPE)
9772 cmp_type = type0;
9773 else if (code0 == INTEGER_TYPE && code1 == REAL_TYPE)
9774 cmp_type = type1;
a0c938f0 9775
389dd41b 9776 arg0 = fold_convert_loc (loc, cmp_type, arg0);
9777 arg1 = fold_convert_loc (loc, cmp_type, arg1);
859f903a 9778
9779 if (unordered_code == UNORDERED_EXPR)
9780 {
93633022 9781 if (!HONOR_NANS (arg0))
389dd41b 9782 return omit_two_operands_loc (loc, type, integer_zero_node, arg0, arg1);
9783 return fold_build2_loc (loc, UNORDERED_EXPR, type, arg0, arg1);
859f903a 9784 }
9bc9f15f 9785
93633022 9786 code = HONOR_NANS (arg0) ? unordered_code : ordered_code;
389dd41b 9787 return fold_build1_loc (loc, TRUTH_NOT_EXPR, type,
9788 fold_build2_loc (loc, code, type, arg0, arg1));
9bc9f15f 9789}
9790
0c93c8a9 9791/* Fold __builtin_{,s,u}{add,sub,mul}{,l,ll}_overflow, either into normal
9792 arithmetics if it can never overflow, or into internal functions that
9793 return both result of arithmetics and overflowed boolean flag in
9794 a complex integer result, or some other check for overflow. */
9795
9796static tree
9797fold_builtin_arith_overflow (location_t loc, enum built_in_function fcode,
9798 tree arg0, tree arg1, tree arg2)
9799{
9800 enum internal_fn ifn = IFN_LAST;
9801 tree type = TREE_TYPE (TREE_TYPE (arg2));
9802 tree mem_arg2 = build_fold_indirect_ref_loc (loc, arg2);
9803 switch (fcode)
9804 {
9805 case BUILT_IN_ADD_OVERFLOW:
9806 case BUILT_IN_SADD_OVERFLOW:
9807 case BUILT_IN_SADDL_OVERFLOW:
9808 case BUILT_IN_SADDLL_OVERFLOW:
9809 case BUILT_IN_UADD_OVERFLOW:
9810 case BUILT_IN_UADDL_OVERFLOW:
9811 case BUILT_IN_UADDLL_OVERFLOW:
9812 ifn = IFN_ADD_OVERFLOW;
9813 break;
9814 case BUILT_IN_SUB_OVERFLOW:
9815 case BUILT_IN_SSUB_OVERFLOW:
9816 case BUILT_IN_SSUBL_OVERFLOW:
9817 case BUILT_IN_SSUBLL_OVERFLOW:
9818 case BUILT_IN_USUB_OVERFLOW:
9819 case BUILT_IN_USUBL_OVERFLOW:
9820 case BUILT_IN_USUBLL_OVERFLOW:
9821 ifn = IFN_SUB_OVERFLOW;
9822 break;
9823 case BUILT_IN_MUL_OVERFLOW:
9824 case BUILT_IN_SMUL_OVERFLOW:
9825 case BUILT_IN_SMULL_OVERFLOW:
9826 case BUILT_IN_SMULLL_OVERFLOW:
9827 case BUILT_IN_UMUL_OVERFLOW:
9828 case BUILT_IN_UMULL_OVERFLOW:
9829 case BUILT_IN_UMULLL_OVERFLOW:
9830 ifn = IFN_MUL_OVERFLOW;
9831 break;
9832 default:
9833 gcc_unreachable ();
9834 }
9835 tree ctype = build_complex_type (type);
9836 tree call = build_call_expr_internal_loc (loc, ifn, ctype,
9837 2, arg0, arg1);
9838 tree tgt = save_expr (call);
9839 tree intres = build1_loc (loc, REALPART_EXPR, type, tgt);
9840 tree ovfres = build1_loc (loc, IMAGPART_EXPR, type, tgt);
9841 ovfres = fold_convert_loc (loc, boolean_type_node, ovfres);
9842 tree store
9843 = fold_build2_loc (loc, MODIFY_EXPR, void_type_node, mem_arg2, intres);
9844 return build2_loc (loc, COMPOUND_EXPR, boolean_type_node, store, ovfres);
9845}
9846
c2f47e15 9847/* Fold a call to built-in function FNDECL with 0 arguments.
e80cc485 9848 This function returns NULL_TREE if no simplification was possible. */
650e4c94 9849
4ee9c684 9850static tree
e80cc485 9851fold_builtin_0 (location_t loc, tree fndecl)
650e4c94 9852{
e9f80ff5 9853 tree type = TREE_TYPE (TREE_TYPE (fndecl));
c2f47e15 9854 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
189b3398 9855 switch (fcode)
650e4c94 9856 {
c2f47e15 9857 CASE_FLT_FN (BUILT_IN_INF):
9858 case BUILT_IN_INFD32:
9859 case BUILT_IN_INFD64:
9860 case BUILT_IN_INFD128:
389dd41b 9861 return fold_builtin_inf (loc, type, true);
7c2f0500 9862
c2f47e15 9863 CASE_FLT_FN (BUILT_IN_HUGE_VAL):
389dd41b 9864 return fold_builtin_inf (loc, type, false);
7c2f0500 9865
c2f47e15 9866 case BUILT_IN_CLASSIFY_TYPE:
9867 return fold_builtin_classify_type (NULL_TREE);
7c2f0500 9868
c2f47e15 9869 default:
9870 break;
9871 }
9872 return NULL_TREE;
9873}
7c2f0500 9874
c2f47e15 9875/* Fold a call to built-in function FNDECL with 1 argument, ARG0.
e80cc485 9876 This function returns NULL_TREE if no simplification was possible. */
7c2f0500 9877
c2f47e15 9878static tree
e80cc485 9879fold_builtin_1 (location_t loc, tree fndecl, tree arg0)
c2f47e15 9880{
9881 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9882 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
9883 switch (fcode)
9884 {
650e4c94 9885 case BUILT_IN_CONSTANT_P:
7c2f0500 9886 {
c2f47e15 9887 tree val = fold_builtin_constant_p (arg0);
7c2f0500 9888
7c2f0500 9889 /* Gimplification will pull the CALL_EXPR for the builtin out of
9890 an if condition. When not optimizing, we'll not CSE it back.
9891 To avoid link error types of regressions, return false now. */
9892 if (!val && !optimize)
9893 val = integer_zero_node;
9894
9895 return val;
9896 }
650e4c94 9897
539a3a92 9898 case BUILT_IN_CLASSIFY_TYPE:
c2f47e15 9899 return fold_builtin_classify_type (arg0);
539a3a92 9900
650e4c94 9901 case BUILT_IN_STRLEN:
c7cbde74 9902 return fold_builtin_strlen (loc, type, arg0);
650e4c94 9903
4f35b1fc 9904 CASE_FLT_FN (BUILT_IN_FABS):
8aa32773 9905 case BUILT_IN_FABSD32:
9906 case BUILT_IN_FABSD64:
9907 case BUILT_IN_FABSD128:
389dd41b 9908 return fold_builtin_fabs (loc, arg0, type);
d1aade50 9909
9910 case BUILT_IN_ABS:
9911 case BUILT_IN_LABS:
9912 case BUILT_IN_LLABS:
9913 case BUILT_IN_IMAXABS:
389dd41b 9914 return fold_builtin_abs (loc, arg0, type);
c63f4ad3 9915
4f35b1fc 9916 CASE_FLT_FN (BUILT_IN_CONJ):
239d491a 9917 if (validate_arg (arg0, COMPLEX_TYPE)
48e1416a 9918 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
389dd41b 9919 return fold_build1_loc (loc, CONJ_EXPR, type, arg0);
c2f47e15 9920 break;
36d3581d 9921
4f35b1fc 9922 CASE_FLT_FN (BUILT_IN_CREAL):
239d491a 9923 if (validate_arg (arg0, COMPLEX_TYPE)
48e1416a 9924 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
7082509e 9925 return non_lvalue_loc (loc, fold_build1_loc (loc, REALPART_EXPR, type, arg0));
c2f47e15 9926 break;
36d3581d 9927
4f35b1fc 9928 CASE_FLT_FN (BUILT_IN_CIMAG):
b0ce8887 9929 if (validate_arg (arg0, COMPLEX_TYPE)
9930 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
389dd41b 9931 return non_lvalue_loc (loc, fold_build1_loc (loc, IMAGPART_EXPR, type, arg0));
c2f47e15 9932 break;
36d3581d 9933
503733d5 9934 CASE_FLT_FN (BUILT_IN_CCOS):
9af5ce0c 9935 return fold_builtin_ccos (loc, arg0, type, fndecl, /*hyper=*/ false);
48e1416a 9936
503733d5 9937 CASE_FLT_FN (BUILT_IN_CCOSH):
9af5ce0c 9938 return fold_builtin_ccos (loc, arg0, type, fndecl, /*hyper=*/ true);
48e1416a 9939
c2373fdb 9940 CASE_FLT_FN (BUILT_IN_CPROJ):
9af5ce0c 9941 return fold_builtin_cproj (loc, arg0, type);
c2373fdb 9942
239d491a 9943 CASE_FLT_FN (BUILT_IN_CSIN):
9944 if (validate_arg (arg0, COMPLEX_TYPE)
48e1416a 9945 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
239d491a 9946 return do_mpc_arg1 (arg0, type, mpc_sin);
c2f47e15 9947 break;
48e1416a 9948
239d491a 9949 CASE_FLT_FN (BUILT_IN_CSINH):
9950 if (validate_arg (arg0, COMPLEX_TYPE)
48e1416a 9951 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
239d491a 9952 return do_mpc_arg1 (arg0, type, mpc_sinh);
9953 break;
48e1416a 9954
239d491a 9955 CASE_FLT_FN (BUILT_IN_CTAN):
9956 if (validate_arg (arg0, COMPLEX_TYPE)
48e1416a 9957 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
239d491a 9958 return do_mpc_arg1 (arg0, type, mpc_tan);
9959 break;
48e1416a 9960
239d491a 9961 CASE_FLT_FN (BUILT_IN_CTANH):
9962 if (validate_arg (arg0, COMPLEX_TYPE)
48e1416a 9963 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
239d491a 9964 return do_mpc_arg1 (arg0, type, mpc_tanh);
9965 break;
48e1416a 9966
239d491a 9967 CASE_FLT_FN (BUILT_IN_CLOG):
9968 if (validate_arg (arg0, COMPLEX_TYPE)
48e1416a 9969 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
239d491a 9970 return do_mpc_arg1 (arg0, type, mpc_log);
9971 break;
48e1416a 9972
239d491a 9973 CASE_FLT_FN (BUILT_IN_CSQRT):
9974 if (validate_arg (arg0, COMPLEX_TYPE)
48e1416a 9975 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
239d491a 9976 return do_mpc_arg1 (arg0, type, mpc_sqrt);
9977 break;
48e1416a 9978
0e7e6e7f 9979 CASE_FLT_FN (BUILT_IN_CASIN):
9980 if (validate_arg (arg0, COMPLEX_TYPE)
48e1416a 9981 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
0e7e6e7f 9982 return do_mpc_arg1 (arg0, type, mpc_asin);
9983 break;
48e1416a 9984
0e7e6e7f 9985 CASE_FLT_FN (BUILT_IN_CACOS):
9986 if (validate_arg (arg0, COMPLEX_TYPE)
48e1416a 9987 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
0e7e6e7f 9988 return do_mpc_arg1 (arg0, type, mpc_acos);
9989 break;
48e1416a 9990
0e7e6e7f 9991 CASE_FLT_FN (BUILT_IN_CATAN):
9992 if (validate_arg (arg0, COMPLEX_TYPE)
48e1416a 9993 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
0e7e6e7f 9994 return do_mpc_arg1 (arg0, type, mpc_atan);
9995 break;
48e1416a 9996
0e7e6e7f 9997 CASE_FLT_FN (BUILT_IN_CASINH):
9998 if (validate_arg (arg0, COMPLEX_TYPE)
48e1416a 9999 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
0e7e6e7f 10000 return do_mpc_arg1 (arg0, type, mpc_asinh);
10001 break;
48e1416a 10002
0e7e6e7f 10003 CASE_FLT_FN (BUILT_IN_CACOSH):
10004 if (validate_arg (arg0, COMPLEX_TYPE)
48e1416a 10005 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
0e7e6e7f 10006 return do_mpc_arg1 (arg0, type, mpc_acosh);
10007 break;
48e1416a 10008
0e7e6e7f 10009 CASE_FLT_FN (BUILT_IN_CATANH):
10010 if (validate_arg (arg0, COMPLEX_TYPE)
48e1416a 10011 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
0e7e6e7f 10012 return do_mpc_arg1 (arg0, type, mpc_atanh);
10013 break;
48e1416a 10014
4f35b1fc 10015 CASE_FLT_FN (BUILT_IN_CABS):
389dd41b 10016 return fold_builtin_cabs (loc, arg0, type, fndecl);
c63f4ad3 10017
abe4dcf6 10018 CASE_FLT_FN (BUILT_IN_CARG):
389dd41b 10019 return fold_builtin_carg (loc, arg0, type);
abe4dcf6 10020
4f35b1fc 10021 CASE_FLT_FN (BUILT_IN_SQRT):
389dd41b 10022 return fold_builtin_sqrt (loc, arg0, type);
805e22b2 10023
4f35b1fc 10024 CASE_FLT_FN (BUILT_IN_CBRT):
389dd41b 10025 return fold_builtin_cbrt (loc, arg0, type);
3bc5c41b 10026
728bac60 10027 CASE_FLT_FN (BUILT_IN_ASIN):
c2f47e15 10028 if (validate_arg (arg0, REAL_TYPE))
10029 return do_mpfr_arg1 (arg0, type, mpfr_asin,
728bac60 10030 &dconstm1, &dconst1, true);
10031 break;
10032
10033 CASE_FLT_FN (BUILT_IN_ACOS):
c2f47e15 10034 if (validate_arg (arg0, REAL_TYPE))
10035 return do_mpfr_arg1 (arg0, type, mpfr_acos,
728bac60 10036 &dconstm1, &dconst1, true);
10037 break;
10038
10039 CASE_FLT_FN (BUILT_IN_ATAN):
c2f47e15 10040 if (validate_arg (arg0, REAL_TYPE))
10041 return do_mpfr_arg1 (arg0, type, mpfr_atan, NULL, NULL, 0);
728bac60 10042 break;
10043
10044 CASE_FLT_FN (BUILT_IN_ASINH):
c2f47e15 10045 if (validate_arg (arg0, REAL_TYPE))
10046 return do_mpfr_arg1 (arg0, type, mpfr_asinh, NULL, NULL, 0);
728bac60 10047 break;
10048
10049 CASE_FLT_FN (BUILT_IN_ACOSH):
c2f47e15 10050 if (validate_arg (arg0, REAL_TYPE))
10051 return do_mpfr_arg1 (arg0, type, mpfr_acosh,
728bac60 10052 &dconst1, NULL, true);
10053 break;
10054
10055 CASE_FLT_FN (BUILT_IN_ATANH):
c2f47e15 10056 if (validate_arg (arg0, REAL_TYPE))
10057 return do_mpfr_arg1 (arg0, type, mpfr_atanh,
728bac60 10058 &dconstm1, &dconst1, false);
10059 break;
10060
4f35b1fc 10061 CASE_FLT_FN (BUILT_IN_SIN):
c2f47e15 10062 if (validate_arg (arg0, REAL_TYPE))
10063 return do_mpfr_arg1 (arg0, type, mpfr_sin, NULL, NULL, 0);
728bac60 10064 break;
77e89269 10065
4f35b1fc 10066 CASE_FLT_FN (BUILT_IN_COS):
389dd41b 10067 return fold_builtin_cos (loc, arg0, type, fndecl);
77e89269 10068
728bac60 10069 CASE_FLT_FN (BUILT_IN_TAN):
c2f47e15 10070 return fold_builtin_tan (arg0, type);
d735c391 10071
c5bb2c4b 10072 CASE_FLT_FN (BUILT_IN_CEXP):
389dd41b 10073 return fold_builtin_cexp (loc, arg0, type);
c5bb2c4b 10074
d735c391 10075 CASE_FLT_FN (BUILT_IN_CEXPI):
c2f47e15 10076 if (validate_arg (arg0, REAL_TYPE))
10077 return do_mpfr_sincos (arg0, NULL_TREE, NULL_TREE);
10078 break;
d92f994c 10079
728bac60 10080 CASE_FLT_FN (BUILT_IN_SINH):
c2f47e15 10081 if (validate_arg (arg0, REAL_TYPE))
10082 return do_mpfr_arg1 (arg0, type, mpfr_sinh, NULL, NULL, 0);
728bac60 10083 break;
10084
10085 CASE_FLT_FN (BUILT_IN_COSH):
389dd41b 10086 return fold_builtin_cosh (loc, arg0, type, fndecl);
728bac60 10087
10088 CASE_FLT_FN (BUILT_IN_TANH):
c2f47e15 10089 if (validate_arg (arg0, REAL_TYPE))
10090 return do_mpfr_arg1 (arg0, type, mpfr_tanh, NULL, NULL, 0);
728bac60 10091 break;
10092
29f4cd78 10093 CASE_FLT_FN (BUILT_IN_ERF):
c2f47e15 10094 if (validate_arg (arg0, REAL_TYPE))
10095 return do_mpfr_arg1 (arg0, type, mpfr_erf, NULL, NULL, 0);
29f4cd78 10096 break;
10097
10098 CASE_FLT_FN (BUILT_IN_ERFC):
c2f47e15 10099 if (validate_arg (arg0, REAL_TYPE))
10100 return do_mpfr_arg1 (arg0, type, mpfr_erfc, NULL, NULL, 0);
29f4cd78 10101 break;
10102
32dba52b 10103 CASE_FLT_FN (BUILT_IN_TGAMMA):
c2f47e15 10104 if (validate_arg (arg0, REAL_TYPE))
10105 return do_mpfr_arg1 (arg0, type, mpfr_gamma, NULL, NULL, 0);
32dba52b 10106 break;
48e1416a 10107
4f35b1fc 10108 CASE_FLT_FN (BUILT_IN_EXP):
389dd41b 10109 return fold_builtin_exponent (loc, fndecl, arg0, mpfr_exp);
467214fd 10110
4f35b1fc 10111 CASE_FLT_FN (BUILT_IN_EXP2):
389dd41b 10112 return fold_builtin_exponent (loc, fndecl, arg0, mpfr_exp2);
467214fd 10113
4f35b1fc 10114 CASE_FLT_FN (BUILT_IN_EXP10):
10115 CASE_FLT_FN (BUILT_IN_POW10):
389dd41b 10116 return fold_builtin_exponent (loc, fndecl, arg0, mpfr_exp10);
467214fd 10117
29f4cd78 10118 CASE_FLT_FN (BUILT_IN_EXPM1):
c2f47e15 10119 if (validate_arg (arg0, REAL_TYPE))
10120 return do_mpfr_arg1 (arg0, type, mpfr_expm1, NULL, NULL, 0);
f8dad9b4 10121 break;
48e1416a 10122
4f35b1fc 10123 CASE_FLT_FN (BUILT_IN_LOG):
f8dad9b4 10124 if (validate_arg (arg0, REAL_TYPE))
10125 return do_mpfr_arg1 (arg0, type, mpfr_log, &dconst0, NULL, false);
10126 break;
467214fd 10127
4f35b1fc 10128 CASE_FLT_FN (BUILT_IN_LOG2):
f8dad9b4 10129 if (validate_arg (arg0, REAL_TYPE))
10130 return do_mpfr_arg1 (arg0, type, mpfr_log2, &dconst0, NULL, false);
10131 break;
467214fd 10132
4f35b1fc 10133 CASE_FLT_FN (BUILT_IN_LOG10):
f8dad9b4 10134 if (validate_arg (arg0, REAL_TYPE))
10135 return do_mpfr_arg1 (arg0, type, mpfr_log10, &dconst0, NULL, false);
10136 break;
29f4cd78 10137
10138 CASE_FLT_FN (BUILT_IN_LOG1P):
c2f47e15 10139 if (validate_arg (arg0, REAL_TYPE))
10140 return do_mpfr_arg1 (arg0, type, mpfr_log1p,
29f4cd78 10141 &dconstm1, NULL, false);
10142 break;
805e22b2 10143
65dd1378 10144 CASE_FLT_FN (BUILT_IN_J0):
10145 if (validate_arg (arg0, REAL_TYPE))
10146 return do_mpfr_arg1 (arg0, type, mpfr_j0,
10147 NULL, NULL, 0);
10148 break;
10149
10150 CASE_FLT_FN (BUILT_IN_J1):
10151 if (validate_arg (arg0, REAL_TYPE))
10152 return do_mpfr_arg1 (arg0, type, mpfr_j1,
10153 NULL, NULL, 0);
10154 break;
6ff9eeff 10155
10156 CASE_FLT_FN (BUILT_IN_Y0):
10157 if (validate_arg (arg0, REAL_TYPE))
10158 return do_mpfr_arg1 (arg0, type, mpfr_y0,
10159 &dconst0, NULL, false);
10160 break;
10161
10162 CASE_FLT_FN (BUILT_IN_Y1):
10163 if (validate_arg (arg0, REAL_TYPE))
10164 return do_mpfr_arg1 (arg0, type, mpfr_y1,
10165 &dconst0, NULL, false);
10166 break;
65dd1378 10167
4f35b1fc 10168 CASE_FLT_FN (BUILT_IN_NAN):
c4503c0a 10169 case BUILT_IN_NAND32:
10170 case BUILT_IN_NAND64:
10171 case BUILT_IN_NAND128:
c2f47e15 10172 return fold_builtin_nan (arg0, type, true);
b0db7939 10173
4f35b1fc 10174 CASE_FLT_FN (BUILT_IN_NANS):
c2f47e15 10175 return fold_builtin_nan (arg0, type, false);
b0db7939 10176
4f35b1fc 10177 CASE_FLT_FN (BUILT_IN_FLOOR):
389dd41b 10178 return fold_builtin_floor (loc, fndecl, arg0);
277f8dd2 10179
4f35b1fc 10180 CASE_FLT_FN (BUILT_IN_CEIL):
389dd41b 10181 return fold_builtin_ceil (loc, fndecl, arg0);
277f8dd2 10182
4f35b1fc 10183 CASE_FLT_FN (BUILT_IN_TRUNC):
389dd41b 10184 return fold_builtin_trunc (loc, fndecl, arg0);
277f8dd2 10185
4f35b1fc 10186 CASE_FLT_FN (BUILT_IN_ROUND):
389dd41b 10187 return fold_builtin_round (loc, fndecl, arg0);
89ab3887 10188
4f35b1fc 10189 CASE_FLT_FN (BUILT_IN_NEARBYINT):
10190 CASE_FLT_FN (BUILT_IN_RINT):
389dd41b 10191 return fold_trunc_transparent_mathfn (loc, fndecl, arg0);
6528f4f4 10192
80ff6494 10193 CASE_FLT_FN (BUILT_IN_ICEIL):
4f35b1fc 10194 CASE_FLT_FN (BUILT_IN_LCEIL):
10195 CASE_FLT_FN (BUILT_IN_LLCEIL):
10196 CASE_FLT_FN (BUILT_IN_LFLOOR):
80ff6494 10197 CASE_FLT_FN (BUILT_IN_IFLOOR):
4f35b1fc 10198 CASE_FLT_FN (BUILT_IN_LLFLOOR):
80ff6494 10199 CASE_FLT_FN (BUILT_IN_IROUND):
a0c938f0 10200 CASE_FLT_FN (BUILT_IN_LROUND):
4f35b1fc 10201 CASE_FLT_FN (BUILT_IN_LLROUND):
389dd41b 10202 return fold_builtin_int_roundingfn (loc, fndecl, arg0);
34f17811 10203
80ff6494 10204 CASE_FLT_FN (BUILT_IN_IRINT):
4f35b1fc 10205 CASE_FLT_FN (BUILT_IN_LRINT):
10206 CASE_FLT_FN (BUILT_IN_LLRINT):
389dd41b 10207 return fold_fixed_mathfn (loc, fndecl, arg0);
9ed65c7f 10208
74bdbe96 10209 case BUILT_IN_BSWAP16:
42791117 10210 case BUILT_IN_BSWAP32:
10211 case BUILT_IN_BSWAP64:
c2f47e15 10212 return fold_builtin_bswap (fndecl, arg0);
42791117 10213
4f35b1fc 10214 CASE_INT_FN (BUILT_IN_FFS):
10215 CASE_INT_FN (BUILT_IN_CLZ):
10216 CASE_INT_FN (BUILT_IN_CTZ):
6aaa1f9e 10217 CASE_INT_FN (BUILT_IN_CLRSB):
4f35b1fc 10218 CASE_INT_FN (BUILT_IN_POPCOUNT):
10219 CASE_INT_FN (BUILT_IN_PARITY):
c2f47e15 10220 return fold_builtin_bitop (fndecl, arg0);
9c8a1629 10221
4f35b1fc 10222 CASE_FLT_FN (BUILT_IN_SIGNBIT):
389dd41b 10223 return fold_builtin_signbit (loc, arg0, type);
27f261ef 10224
cb2b9385 10225 CASE_FLT_FN (BUILT_IN_SIGNIFICAND):
389dd41b 10226 return fold_builtin_significand (loc, arg0, type);
cb2b9385 10227
10228 CASE_FLT_FN (BUILT_IN_ILOGB):
10229 CASE_FLT_FN (BUILT_IN_LOGB):
389dd41b 10230 return fold_builtin_logb (loc, arg0, type);
cb2b9385 10231
d49367d4 10232 case BUILT_IN_ISASCII:
389dd41b 10233 return fold_builtin_isascii (loc, arg0);
d49367d4 10234
10235 case BUILT_IN_TOASCII:
389dd41b 10236 return fold_builtin_toascii (loc, arg0);
d49367d4 10237
df1cf42e 10238 case BUILT_IN_ISDIGIT:
389dd41b 10239 return fold_builtin_isdigit (loc, arg0);
467214fd 10240
4f35b1fc 10241 CASE_FLT_FN (BUILT_IN_FINITE):
c4503c0a 10242 case BUILT_IN_FINITED32:
10243 case BUILT_IN_FINITED64:
10244 case BUILT_IN_FINITED128:
cde061c1 10245 case BUILT_IN_ISFINITE:
a65c4d64 10246 {
10247 tree ret = fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISFINITE);
10248 if (ret)
10249 return ret;
10250 return fold_builtin_interclass_mathfn (loc, fndecl, arg0);
10251 }
726069ba 10252
4f35b1fc 10253 CASE_FLT_FN (BUILT_IN_ISINF):
c4503c0a 10254 case BUILT_IN_ISINFD32:
10255 case BUILT_IN_ISINFD64:
10256 case BUILT_IN_ISINFD128:
a65c4d64 10257 {
10258 tree ret = fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISINF);
10259 if (ret)
10260 return ret;
10261 return fold_builtin_interclass_mathfn (loc, fndecl, arg0);
10262 }
10263
10264 case BUILT_IN_ISNORMAL:
10265 return fold_builtin_interclass_mathfn (loc, fndecl, arg0);
726069ba 10266
c319d56a 10267 case BUILT_IN_ISINF_SIGN:
389dd41b 10268 return fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISINF_SIGN);
c319d56a 10269
4f35b1fc 10270 CASE_FLT_FN (BUILT_IN_ISNAN):
c4503c0a 10271 case BUILT_IN_ISNAND32:
10272 case BUILT_IN_ISNAND64:
10273 case BUILT_IN_ISNAND128:
389dd41b 10274 return fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISNAN);
c2f47e15 10275
663870fc 10276 case BUILT_IN_FREE:
10277 if (integer_zerop (arg0))
10278 return build_empty_stmt (loc);
10279 break;
10280
c2f47e15 10281 default:
10282 break;
10283 }
10284
10285 return NULL_TREE;
10286
10287}
10288
10289/* Fold a call to built-in function FNDECL with 2 arguments, ARG0 and ARG1.
e80cc485 10290 This function returns NULL_TREE if no simplification was possible. */
c2f47e15 10291
10292static tree
e80cc485 10293fold_builtin_2 (location_t loc, tree fndecl, tree arg0, tree arg1)
c2f47e15 10294{
10295 tree type = TREE_TYPE (TREE_TYPE (fndecl));
10296 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10297
10298 switch (fcode)
10299 {
65dd1378 10300 CASE_FLT_FN (BUILT_IN_JN):
10301 if (validate_arg (arg0, INTEGER_TYPE)
10302 && validate_arg (arg1, REAL_TYPE))
10303 return do_mpfr_bessel_n (arg0, arg1, type, mpfr_jn, NULL, 0);
10304 break;
6ff9eeff 10305
10306 CASE_FLT_FN (BUILT_IN_YN):
10307 if (validate_arg (arg0, INTEGER_TYPE)
10308 && validate_arg (arg1, REAL_TYPE))
10309 return do_mpfr_bessel_n (arg0, arg1, type, mpfr_yn,
10310 &dconst0, false);
10311 break;
e5407ca6 10312
10313 CASE_FLT_FN (BUILT_IN_DREM):
10314 CASE_FLT_FN (BUILT_IN_REMAINDER):
10315 if (validate_arg (arg0, REAL_TYPE)
9af5ce0c 10316 && validate_arg (arg1, REAL_TYPE))
e5407ca6 10317 return do_mpfr_arg2 (arg0, arg1, type, mpfr_remainder);
10318 break;
e84da7c1 10319
10320 CASE_FLT_FN_REENT (BUILT_IN_GAMMA): /* GAMMA_R */
10321 CASE_FLT_FN_REENT (BUILT_IN_LGAMMA): /* LGAMMA_R */
10322 if (validate_arg (arg0, REAL_TYPE)
9af5ce0c 10323 && validate_arg (arg1, POINTER_TYPE))
e84da7c1 10324 return do_mpfr_lgamma_r (arg0, arg1, type);
10325 break;
c2f47e15 10326
10327 CASE_FLT_FN (BUILT_IN_ATAN2):
10328 if (validate_arg (arg0, REAL_TYPE)
9af5ce0c 10329 && validate_arg (arg1, REAL_TYPE))
c2f47e15 10330 return do_mpfr_arg2 (arg0, arg1, type, mpfr_atan2);
10331 break;
10332
10333 CASE_FLT_FN (BUILT_IN_FDIM):
10334 if (validate_arg (arg0, REAL_TYPE)
9af5ce0c 10335 && validate_arg (arg1, REAL_TYPE))
c2f47e15 10336 return do_mpfr_arg2 (arg0, arg1, type, mpfr_dim);
10337 break;
10338
10339 CASE_FLT_FN (BUILT_IN_HYPOT):
389dd41b 10340 return fold_builtin_hypot (loc, fndecl, arg0, arg1, type);
c2f47e15 10341
c699fab8 10342 CASE_FLT_FN (BUILT_IN_CPOW):
10343 if (validate_arg (arg0, COMPLEX_TYPE)
10344 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE
10345 && validate_arg (arg1, COMPLEX_TYPE)
48e1416a 10346 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg1))) == REAL_TYPE)
652d9409 10347 return do_mpc_arg2 (arg0, arg1, type, /*do_nonfinite=*/ 0, mpc_pow);
c699fab8 10348 break;
c699fab8 10349
7587301b 10350 CASE_FLT_FN (BUILT_IN_LDEXP):
389dd41b 10351 return fold_builtin_load_exponent (loc, arg0, arg1, type, /*ldexp=*/true);
7587301b 10352 CASE_FLT_FN (BUILT_IN_SCALBN):
10353 CASE_FLT_FN (BUILT_IN_SCALBLN):
389dd41b 10354 return fold_builtin_load_exponent (loc, arg0, arg1,
10355 type, /*ldexp=*/false);
7587301b 10356
3838b9ae 10357 CASE_FLT_FN (BUILT_IN_FREXP):
389dd41b 10358 return fold_builtin_frexp (loc, arg0, arg1, type);
3838b9ae 10359
ebf8b4f5 10360 CASE_FLT_FN (BUILT_IN_MODF):
389dd41b 10361 return fold_builtin_modf (loc, arg0, arg1, type);
ebf8b4f5 10362
c2f47e15 10363 case BUILT_IN_STRSTR:
389dd41b 10364 return fold_builtin_strstr (loc, arg0, arg1, type);
c2f47e15 10365
c2f47e15 10366 case BUILT_IN_STRSPN:
389dd41b 10367 return fold_builtin_strspn (loc, arg0, arg1);
c2f47e15 10368
10369 case BUILT_IN_STRCSPN:
389dd41b 10370 return fold_builtin_strcspn (loc, arg0, arg1);
c2f47e15 10371
10372 case BUILT_IN_STRCHR:
10373 case BUILT_IN_INDEX:
389dd41b 10374 return fold_builtin_strchr (loc, arg0, arg1, type);
c2f47e15 10375
10376 case BUILT_IN_STRRCHR:
10377 case BUILT_IN_RINDEX:
389dd41b 10378 return fold_builtin_strrchr (loc, arg0, arg1, type);
c2f47e15 10379
c2f47e15 10380 case BUILT_IN_STRCMP:
389dd41b 10381 return fold_builtin_strcmp (loc, arg0, arg1);
c2f47e15 10382
10383 case BUILT_IN_STRPBRK:
389dd41b 10384 return fold_builtin_strpbrk (loc, arg0, arg1, type);
c2f47e15 10385
10386 case BUILT_IN_EXPECT:
c83059be 10387 return fold_builtin_expect (loc, arg0, arg1, NULL_TREE);
c2f47e15 10388
10389 CASE_FLT_FN (BUILT_IN_POW):
389dd41b 10390 return fold_builtin_pow (loc, fndecl, arg0, arg1, type);
c2f47e15 10391
10392 CASE_FLT_FN (BUILT_IN_POWI):
389dd41b 10393 return fold_builtin_powi (loc, fndecl, arg0, arg1, type);
c2f47e15 10394
10395 CASE_FLT_FN (BUILT_IN_COPYSIGN):
389dd41b 10396 return fold_builtin_copysign (loc, fndecl, arg0, arg1, type);
c2f47e15 10397
10398 CASE_FLT_FN (BUILT_IN_FMIN):
389dd41b 10399 return fold_builtin_fmin_fmax (loc, arg0, arg1, type, /*max=*/false);
c2f47e15 10400
10401 CASE_FLT_FN (BUILT_IN_FMAX):
389dd41b 10402 return fold_builtin_fmin_fmax (loc, arg0, arg1, type, /*max=*/true);
726069ba 10403
9bc9f15f 10404 case BUILT_IN_ISGREATER:
389dd41b 10405 return fold_builtin_unordered_cmp (loc, fndecl,
10406 arg0, arg1, UNLE_EXPR, LE_EXPR);
9bc9f15f 10407 case BUILT_IN_ISGREATEREQUAL:
389dd41b 10408 return fold_builtin_unordered_cmp (loc, fndecl,
10409 arg0, arg1, UNLT_EXPR, LT_EXPR);
9bc9f15f 10410 case BUILT_IN_ISLESS:
389dd41b 10411 return fold_builtin_unordered_cmp (loc, fndecl,
10412 arg0, arg1, UNGE_EXPR, GE_EXPR);
9bc9f15f 10413 case BUILT_IN_ISLESSEQUAL:
389dd41b 10414 return fold_builtin_unordered_cmp (loc, fndecl,
10415 arg0, arg1, UNGT_EXPR, GT_EXPR);
9bc9f15f 10416 case BUILT_IN_ISLESSGREATER:
389dd41b 10417 return fold_builtin_unordered_cmp (loc, fndecl,
10418 arg0, arg1, UNEQ_EXPR, EQ_EXPR);
9bc9f15f 10419 case BUILT_IN_ISUNORDERED:
389dd41b 10420 return fold_builtin_unordered_cmp (loc, fndecl,
10421 arg0, arg1, UNORDERED_EXPR,
d5019fe8 10422 NOP_EXPR);
9bc9f15f 10423
7c2f0500 10424 /* We do the folding for va_start in the expander. */
10425 case BUILT_IN_VA_START:
10426 break;
f0613857 10427
0a39fd54 10428 case BUILT_IN_OBJECT_SIZE:
c2f47e15 10429 return fold_builtin_object_size (arg0, arg1);
0a39fd54 10430
1cd6e20d 10431 case BUILT_IN_ATOMIC_ALWAYS_LOCK_FREE:
10432 return fold_builtin_atomic_always_lock_free (arg0, arg1);
10433
10434 case BUILT_IN_ATOMIC_IS_LOCK_FREE:
10435 return fold_builtin_atomic_is_lock_free (arg0, arg1);
10436
c2f47e15 10437 default:
10438 break;
10439 }
10440 return NULL_TREE;
10441}
10442
10443/* Fold a call to built-in function FNDECL with 3 arguments, ARG0, ARG1,
e80cc485 10444 and ARG2.
c2f47e15 10445 This function returns NULL_TREE if no simplification was possible. */
10446
10447static tree
389dd41b 10448fold_builtin_3 (location_t loc, tree fndecl,
e80cc485 10449 tree arg0, tree arg1, tree arg2)
c2f47e15 10450{
10451 tree type = TREE_TYPE (TREE_TYPE (fndecl));
10452 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10453 switch (fcode)
10454 {
10455
10456 CASE_FLT_FN (BUILT_IN_SINCOS):
389dd41b 10457 return fold_builtin_sincos (loc, arg0, arg1, arg2);
c2f47e15 10458
10459 CASE_FLT_FN (BUILT_IN_FMA):
b9be572e 10460 return fold_builtin_fma (loc, arg0, arg1, arg2, type);
c2f47e15 10461 break;
10462
e5407ca6 10463 CASE_FLT_FN (BUILT_IN_REMQUO):
10464 if (validate_arg (arg0, REAL_TYPE)
9af5ce0c 10465 && validate_arg (arg1, REAL_TYPE)
10466 && validate_arg (arg2, POINTER_TYPE))
e5407ca6 10467 return do_mpfr_remquo (arg0, arg1, arg2);
10468 break;
e5407ca6 10469
c2f47e15 10470 case BUILT_IN_STRNCMP:
389dd41b 10471 return fold_builtin_strncmp (loc, arg0, arg1, arg2);
c2f47e15 10472
7959b13b 10473 case BUILT_IN_MEMCHR:
389dd41b 10474 return fold_builtin_memchr (loc, arg0, arg1, arg2, type);
7959b13b 10475
c2f47e15 10476 case BUILT_IN_BCMP:
10477 case BUILT_IN_MEMCMP:
389dd41b 10478 return fold_builtin_memcmp (loc, arg0, arg1, arg2);;
c2f47e15 10479
c83059be 10480 case BUILT_IN_EXPECT:
10481 return fold_builtin_expect (loc, arg0, arg1, arg2);
10482
0c93c8a9 10483 case BUILT_IN_ADD_OVERFLOW:
10484 case BUILT_IN_SUB_OVERFLOW:
10485 case BUILT_IN_MUL_OVERFLOW:
10486 case BUILT_IN_SADD_OVERFLOW:
10487 case BUILT_IN_SADDL_OVERFLOW:
10488 case BUILT_IN_SADDLL_OVERFLOW:
10489 case BUILT_IN_SSUB_OVERFLOW:
10490 case BUILT_IN_SSUBL_OVERFLOW:
10491 case BUILT_IN_SSUBLL_OVERFLOW:
10492 case BUILT_IN_SMUL_OVERFLOW:
10493 case BUILT_IN_SMULL_OVERFLOW:
10494 case BUILT_IN_SMULLL_OVERFLOW:
10495 case BUILT_IN_UADD_OVERFLOW:
10496 case BUILT_IN_UADDL_OVERFLOW:
10497 case BUILT_IN_UADDLL_OVERFLOW:
10498 case BUILT_IN_USUB_OVERFLOW:
10499 case BUILT_IN_USUBL_OVERFLOW:
10500 case BUILT_IN_USUBLL_OVERFLOW:
10501 case BUILT_IN_UMUL_OVERFLOW:
10502 case BUILT_IN_UMULL_OVERFLOW:
10503 case BUILT_IN_UMULLL_OVERFLOW:
10504 return fold_builtin_arith_overflow (loc, fcode, arg0, arg1, arg2);
10505
650e4c94 10506 default:
10507 break;
10508 }
c2f47e15 10509 return NULL_TREE;
10510}
650e4c94 10511
c2f47e15 10512/* Fold a call to built-in function FNDECL. ARGS is an array of NARGS
9d884767 10513 arguments. IGNORE is true if the result of the
10514 function call is ignored. This function returns NULL_TREE if no
10515 simplification was possible. */
48e1416a 10516
2165588a 10517tree
e80cc485 10518fold_builtin_n (location_t loc, tree fndecl, tree *args, int nargs, bool)
c2f47e15 10519{
10520 tree ret = NULL_TREE;
a7f5bb2d 10521
c2f47e15 10522 switch (nargs)
10523 {
10524 case 0:
e80cc485 10525 ret = fold_builtin_0 (loc, fndecl);
c2f47e15 10526 break;
10527 case 1:
e80cc485 10528 ret = fold_builtin_1 (loc, fndecl, args[0]);
c2f47e15 10529 break;
10530 case 2:
e80cc485 10531 ret = fold_builtin_2 (loc, fndecl, args[0], args[1]);
c2f47e15 10532 break;
10533 case 3:
e80cc485 10534 ret = fold_builtin_3 (loc, fndecl, args[0], args[1], args[2]);
c2f47e15 10535 break;
c2f47e15 10536 default:
e80cc485 10537 ret = fold_builtin_varargs (loc, fndecl, args, nargs);
c2f47e15 10538 break;
10539 }
10540 if (ret)
10541 {
75a70cf9 10542 ret = build1 (NOP_EXPR, TREE_TYPE (ret), ret);
389dd41b 10543 SET_EXPR_LOCATION (ret, loc);
c2f47e15 10544 TREE_NO_WARNING (ret) = 1;
10545 return ret;
10546 }
10547 return NULL_TREE;
10548}
10549
0e80b01d 10550/* Construct a new CALL_EXPR to FNDECL using the tail of the argument
10551 list ARGS along with N new arguments in NEWARGS. SKIP is the number
10552 of arguments in ARGS to be omitted. OLDNARGS is the number of
10553 elements in ARGS. */
c2f47e15 10554
10555static tree
0e80b01d 10556rewrite_call_expr_valist (location_t loc, int oldnargs, tree *args,
10557 int skip, tree fndecl, int n, va_list newargs)
c2f47e15 10558{
0e80b01d 10559 int nargs = oldnargs - skip + n;
10560 tree *buffer;
c2f47e15 10561
0e80b01d 10562 if (n > 0)
c2f47e15 10563 {
0e80b01d 10564 int i, j;
c2f47e15 10565
0e80b01d 10566 buffer = XALLOCAVEC (tree, nargs);
10567 for (i = 0; i < n; i++)
10568 buffer[i] = va_arg (newargs, tree);
10569 for (j = skip; j < oldnargs; j++, i++)
10570 buffer[i] = args[j];
10571 }
10572 else
10573 buffer = args + skip;
19fbe3a4 10574
0e80b01d 10575 return build_call_expr_loc_array (loc, fndecl, nargs, buffer);
10576}
c2f47e15 10577
198622c0 10578/* Return true if FNDECL shouldn't be folded right now.
10579 If a built-in function has an inline attribute always_inline
10580 wrapper, defer folding it after always_inline functions have
10581 been inlined, otherwise e.g. -D_FORTIFY_SOURCE checking
10582 might not be performed. */
10583
51d2c51e 10584bool
198622c0 10585avoid_folding_inline_builtin (tree fndecl)
10586{
10587 return (DECL_DECLARED_INLINE_P (fndecl)
10588 && DECL_DISREGARD_INLINE_LIMITS (fndecl)
10589 && cfun
10590 && !cfun->always_inline_functions_inlined
10591 && lookup_attribute ("always_inline", DECL_ATTRIBUTES (fndecl)));
10592}
10593
4ee9c684 10594/* A wrapper function for builtin folding that prevents warnings for
491e04ef 10595 "statement without effect" and the like, caused by removing the
4ee9c684 10596 call node earlier than the warning is generated. */
10597
10598tree
389dd41b 10599fold_call_expr (location_t loc, tree exp, bool ignore)
4ee9c684 10600{
c2f47e15 10601 tree ret = NULL_TREE;
10602 tree fndecl = get_callee_fndecl (exp);
10603 if (fndecl
10604 && TREE_CODE (fndecl) == FUNCTION_DECL
48dc2227 10605 && DECL_BUILT_IN (fndecl)
10606 /* If CALL_EXPR_VA_ARG_PACK is set, the arguments aren't finalized
10607 yet. Defer folding until we see all the arguments
10608 (after inlining). */
10609 && !CALL_EXPR_VA_ARG_PACK (exp))
10610 {
10611 int nargs = call_expr_nargs (exp);
10612
10613 /* Before gimplification CALL_EXPR_VA_ARG_PACK is not set, but
10614 instead last argument is __builtin_va_arg_pack (). Defer folding
10615 even in that case, until arguments are finalized. */
10616 if (nargs && TREE_CODE (CALL_EXPR_ARG (exp, nargs - 1)) == CALL_EXPR)
10617 {
10618 tree fndecl2 = get_callee_fndecl (CALL_EXPR_ARG (exp, nargs - 1));
10619 if (fndecl2
10620 && TREE_CODE (fndecl2) == FUNCTION_DECL
10621 && DECL_BUILT_IN_CLASS (fndecl2) == BUILT_IN_NORMAL
10622 && DECL_FUNCTION_CODE (fndecl2) == BUILT_IN_VA_ARG_PACK)
10623 return NULL_TREE;
10624 }
10625
198622c0 10626 if (avoid_folding_inline_builtin (fndecl))
10627 return NULL_TREE;
10628
c2f47e15 10629 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
97d67146 10630 return targetm.fold_builtin (fndecl, call_expr_nargs (exp),
10631 CALL_EXPR_ARGP (exp), ignore);
c2f47e15 10632 else
10633 {
9d884767 10634 tree *args = CALL_EXPR_ARGP (exp);
10635 ret = fold_builtin_n (loc, fndecl, args, nargs, ignore);
c2f47e15 10636 if (ret)
389dd41b 10637 return ret;
c2f47e15 10638 }
4ee9c684 10639 }
c2f47e15 10640 return NULL_TREE;
10641}
48e1416a 10642
9d884767 10643/* Fold a CALL_EXPR with type TYPE with FN as the function expression.
10644 N arguments are passed in the array ARGARRAY. Return a folded
10645 expression or NULL_TREE if no simplification was possible. */
805e22b2 10646
10647tree
9d884767 10648fold_builtin_call_array (location_t loc, tree,
d01f58f9 10649 tree fn,
10650 int n,
10651 tree *argarray)
7e15618b 10652{
9d884767 10653 if (TREE_CODE (fn) != ADDR_EXPR)
10654 return NULL_TREE;
c2f47e15 10655
9d884767 10656 tree fndecl = TREE_OPERAND (fn, 0);
10657 if (TREE_CODE (fndecl) == FUNCTION_DECL
10658 && DECL_BUILT_IN (fndecl))
10659 {
10660 /* If last argument is __builtin_va_arg_pack (), arguments to this
10661 function are not finalized yet. Defer folding until they are. */
10662 if (n && TREE_CODE (argarray[n - 1]) == CALL_EXPR)
10663 {
10664 tree fndecl2 = get_callee_fndecl (argarray[n - 1]);
10665 if (fndecl2
10666 && TREE_CODE (fndecl2) == FUNCTION_DECL
10667 && DECL_BUILT_IN_CLASS (fndecl2) == BUILT_IN_NORMAL
10668 && DECL_FUNCTION_CODE (fndecl2) == BUILT_IN_VA_ARG_PACK)
10669 return NULL_TREE;
10670 }
10671 if (avoid_folding_inline_builtin (fndecl))
10672 return NULL_TREE;
10673 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
10674 return targetm.fold_builtin (fndecl, n, argarray, false);
10675 else
10676 return fold_builtin_n (loc, fndecl, argarray, n, false);
10677 }
c2f47e15 10678
9d884767 10679 return NULL_TREE;
c2f47e15 10680}
10681
af1409ad 10682/* Construct a new CALL_EXPR using the tail of the argument list of EXP
10683 along with N new arguments specified as the "..." parameters. SKIP
10684 is the number of arguments in EXP to be omitted. This function is used
10685 to do varargs-to-varargs transformations. */
10686
10687static tree
10688rewrite_call_expr (location_t loc, tree exp, int skip, tree fndecl, int n, ...)
10689{
10690 va_list ap;
10691 tree t;
10692
10693 va_start (ap, n);
10694 t = rewrite_call_expr_valist (loc, call_expr_nargs (exp),
10695 CALL_EXPR_ARGP (exp), skip, fndecl, n, ap);
10696 va_end (ap);
c2f47e15 10697
af1409ad 10698 return t;
c2f47e15 10699}
10700
10701/* Validate a single argument ARG against a tree code CODE representing
10702 a type. */
48e1416a 10703
c2f47e15 10704static bool
b7bf20db 10705validate_arg (const_tree arg, enum tree_code code)
c2f47e15 10706{
10707 if (!arg)
10708 return false;
10709 else if (code == POINTER_TYPE)
10710 return POINTER_TYPE_P (TREE_TYPE (arg));
c7f617c2 10711 else if (code == INTEGER_TYPE)
10712 return INTEGRAL_TYPE_P (TREE_TYPE (arg));
c2f47e15 10713 return code == TREE_CODE (TREE_TYPE (arg));
7e15618b 10714}
0eb671f7 10715
75a70cf9 10716/* This function validates the types of a function call argument list
10717 against a specified list of tree_codes. If the last specifier is a 0,
10718 that represents an ellipses, otherwise the last specifier must be a
10719 VOID_TYPE.
10720
10721 This is the GIMPLE version of validate_arglist. Eventually we want to
10722 completely convert builtins.c to work from GIMPLEs and the tree based
10723 validate_arglist will then be removed. */
10724
10725bool
1a91d914 10726validate_gimple_arglist (const gcall *call, ...)
75a70cf9 10727{
10728 enum tree_code code;
10729 bool res = 0;
10730 va_list ap;
10731 const_tree arg;
10732 size_t i;
10733
10734 va_start (ap, call);
10735 i = 0;
10736
10737 do
10738 {
d62e827b 10739 code = (enum tree_code) va_arg (ap, int);
75a70cf9 10740 switch (code)
10741 {
10742 case 0:
10743 /* This signifies an ellipses, any further arguments are all ok. */
10744 res = true;
10745 goto end;
10746 case VOID_TYPE:
10747 /* This signifies an endlink, if no arguments remain, return
10748 true, otherwise return false. */
10749 res = (i == gimple_call_num_args (call));
10750 goto end;
10751 default:
10752 /* If no parameters remain or the parameter's code does not
10753 match the specified code, return false. Otherwise continue
10754 checking any remaining arguments. */
10755 arg = gimple_call_arg (call, i++);
10756 if (!validate_arg (arg, code))
10757 goto end;
10758 break;
10759 }
10760 }
10761 while (1);
10762
10763 /* We need gotos here since we can only have one VA_CLOSE in a
10764 function. */
10765 end: ;
10766 va_end (ap);
10767
10768 return res;
10769}
10770
fc2a2dcb 10771/* Default target-specific builtin expander that does nothing. */
10772
10773rtx
aecda0d6 10774default_expand_builtin (tree exp ATTRIBUTE_UNUSED,
10775 rtx target ATTRIBUTE_UNUSED,
10776 rtx subtarget ATTRIBUTE_UNUSED,
3754d046 10777 machine_mode mode ATTRIBUTE_UNUSED,
aecda0d6 10778 int ignore ATTRIBUTE_UNUSED)
fc2a2dcb 10779{
10780 return NULL_RTX;
10781}
c7926a82 10782
01537105 10783/* Returns true is EXP represents data that would potentially reside
10784 in a readonly section. */
10785
b9ea678c 10786bool
01537105 10787readonly_data_expr (tree exp)
10788{
10789 STRIP_NOPS (exp);
10790
9ff0637e 10791 if (TREE_CODE (exp) != ADDR_EXPR)
10792 return false;
10793
10794 exp = get_base_address (TREE_OPERAND (exp, 0));
10795 if (!exp)
10796 return false;
10797
10798 /* Make sure we call decl_readonly_section only for trees it
10799 can handle (since it returns true for everything it doesn't
10800 understand). */
491e04ef 10801 if (TREE_CODE (exp) == STRING_CST
9ff0637e 10802 || TREE_CODE (exp) == CONSTRUCTOR
10803 || (TREE_CODE (exp) == VAR_DECL && TREE_STATIC (exp)))
10804 return decl_readonly_section (exp, 0);
01537105 10805 else
10806 return false;
10807}
4ee9c684 10808
c2f47e15 10809/* Simplify a call to the strstr builtin. S1 and S2 are the arguments
10810 to the call, and TYPE is its return type.
4ee9c684 10811
c2f47e15 10812 Return NULL_TREE if no simplification was possible, otherwise return the
4ee9c684 10813 simplified form of the call as a tree.
10814
10815 The simplified form may be a constant or other expression which
10816 computes the same value, but in a more efficient manner (including
10817 calls to other builtin functions).
10818
10819 The call may contain arguments which need to be evaluated, but
10820 which are not useful to determine the result of the call. In
10821 this case we return a chain of COMPOUND_EXPRs. The LHS of each
10822 COMPOUND_EXPR will be an argument which must be evaluated.
10823 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
10824 COMPOUND_EXPR in the chain will contain the tree for the simplified
10825 form of the builtin function call. */
10826
10827static tree
389dd41b 10828fold_builtin_strstr (location_t loc, tree s1, tree s2, tree type)
4ee9c684 10829{
c2f47e15 10830 if (!validate_arg (s1, POINTER_TYPE)
10831 || !validate_arg (s2, POINTER_TYPE))
10832 return NULL_TREE;
4ee9c684 10833 else
10834 {
4ee9c684 10835 tree fn;
10836 const char *p1, *p2;
10837
10838 p2 = c_getstr (s2);
10839 if (p2 == NULL)
c2f47e15 10840 return NULL_TREE;
4ee9c684 10841
10842 p1 = c_getstr (s1);
10843 if (p1 != NULL)
10844 {
10845 const char *r = strstr (p1, p2);
daa1d5f5 10846 tree tem;
4ee9c684 10847
4ee9c684 10848 if (r == NULL)
779b4c41 10849 return build_int_cst (TREE_TYPE (s1), 0);
c0c67e38 10850
10851 /* Return an offset into the constant string argument. */
2cc66f2a 10852 tem = fold_build_pointer_plus_hwi_loc (loc, s1, r - p1);
389dd41b 10853 return fold_convert_loc (loc, type, tem);
4ee9c684 10854 }
10855
7efa231c 10856 /* The argument is const char *, and the result is char *, so we need
10857 a type conversion here to avoid a warning. */
4ee9c684 10858 if (p2[0] == '\0')
389dd41b 10859 return fold_convert_loc (loc, type, s1);
4ee9c684 10860
10861 if (p2[1] != '\0')
c2f47e15 10862 return NULL_TREE;
4ee9c684 10863
b9a16870 10864 fn = builtin_decl_implicit (BUILT_IN_STRCHR);
4ee9c684 10865 if (!fn)
c2f47e15 10866 return NULL_TREE;
4ee9c684 10867
10868 /* New argument list transforming strstr(s1, s2) to
10869 strchr(s1, s2[0]). */
7002a1c8 10870 return build_call_expr_loc (loc, fn, 2, s1,
10871 build_int_cst (integer_type_node, p2[0]));
4ee9c684 10872 }
10873}
10874
c2f47e15 10875/* Simplify a call to the strchr builtin. S1 and S2 are the arguments to
10876 the call, and TYPE is its return type.
4ee9c684 10877
c2f47e15 10878 Return NULL_TREE if no simplification was possible, otherwise return the
4ee9c684 10879 simplified form of the call as a tree.
10880
10881 The simplified form may be a constant or other expression which
10882 computes the same value, but in a more efficient manner (including
10883 calls to other builtin functions).
10884
10885 The call may contain arguments which need to be evaluated, but
10886 which are not useful to determine the result of the call. In
10887 this case we return a chain of COMPOUND_EXPRs. The LHS of each
10888 COMPOUND_EXPR will be an argument which must be evaluated.
10889 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
10890 COMPOUND_EXPR in the chain will contain the tree for the simplified
10891 form of the builtin function call. */
10892
10893static tree
389dd41b 10894fold_builtin_strchr (location_t loc, tree s1, tree s2, tree type)
4ee9c684 10895{
c2f47e15 10896 if (!validate_arg (s1, POINTER_TYPE)
10897 || !validate_arg (s2, INTEGER_TYPE))
10898 return NULL_TREE;
4ee9c684 10899 else
10900 {
4ee9c684 10901 const char *p1;
10902
10903 if (TREE_CODE (s2) != INTEGER_CST)
c2f47e15 10904 return NULL_TREE;
4ee9c684 10905
10906 p1 = c_getstr (s1);
10907 if (p1 != NULL)
10908 {
10909 char c;
10910 const char *r;
daa1d5f5 10911 tree tem;
4ee9c684 10912
10913 if (target_char_cast (s2, &c))
c2f47e15 10914 return NULL_TREE;
4ee9c684 10915
10916 r = strchr (p1, c);
10917
10918 if (r == NULL)
779b4c41 10919 return build_int_cst (TREE_TYPE (s1), 0);
4ee9c684 10920
10921 /* Return an offset into the constant string argument. */
2cc66f2a 10922 tem = fold_build_pointer_plus_hwi_loc (loc, s1, r - p1);
389dd41b 10923 return fold_convert_loc (loc, type, tem);
4ee9c684 10924 }
c2f47e15 10925 return NULL_TREE;
4ee9c684 10926 }
10927}
10928
c2f47e15 10929/* Simplify a call to the strrchr builtin. S1 and S2 are the arguments to
10930 the call, and TYPE is its return type.
4ee9c684 10931
c2f47e15 10932 Return NULL_TREE if no simplification was possible, otherwise return the
4ee9c684 10933 simplified form of the call as a tree.
10934
10935 The simplified form may be a constant or other expression which
10936 computes the same value, but in a more efficient manner (including
10937 calls to other builtin functions).
10938
10939 The call may contain arguments which need to be evaluated, but
10940 which are not useful to determine the result of the call. In
10941 this case we return a chain of COMPOUND_EXPRs. The LHS of each
10942 COMPOUND_EXPR will be an argument which must be evaluated.
10943 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
10944 COMPOUND_EXPR in the chain will contain the tree for the simplified
10945 form of the builtin function call. */
10946
10947static tree
389dd41b 10948fold_builtin_strrchr (location_t loc, tree s1, tree s2, tree type)
4ee9c684 10949{
c2f47e15 10950 if (!validate_arg (s1, POINTER_TYPE)
10951 || !validate_arg (s2, INTEGER_TYPE))
10952 return NULL_TREE;
4ee9c684 10953 else
10954 {
4ee9c684 10955 tree fn;
10956 const char *p1;
10957
10958 if (TREE_CODE (s2) != INTEGER_CST)
c2f47e15 10959 return NULL_TREE;
4ee9c684 10960
10961 p1 = c_getstr (s1);
10962 if (p1 != NULL)
10963 {
10964 char c;
10965 const char *r;
daa1d5f5 10966 tree tem;
4ee9c684 10967
10968 if (target_char_cast (s2, &c))
c2f47e15 10969 return NULL_TREE;
4ee9c684 10970
10971 r = strrchr (p1, c);
10972
10973 if (r == NULL)
779b4c41 10974 return build_int_cst (TREE_TYPE (s1), 0);
4ee9c684 10975
10976 /* Return an offset into the constant string argument. */
2cc66f2a 10977 tem = fold_build_pointer_plus_hwi_loc (loc, s1, r - p1);
389dd41b 10978 return fold_convert_loc (loc, type, tem);
4ee9c684 10979 }
10980
10981 if (! integer_zerop (s2))
c2f47e15 10982 return NULL_TREE;
4ee9c684 10983
b9a16870 10984 fn = builtin_decl_implicit (BUILT_IN_STRCHR);
4ee9c684 10985 if (!fn)
c2f47e15 10986 return NULL_TREE;
4ee9c684 10987
10988 /* Transform strrchr(s1, '\0') to strchr(s1, '\0'). */
389dd41b 10989 return build_call_expr_loc (loc, fn, 2, s1, s2);
4ee9c684 10990 }
10991}
10992
c2f47e15 10993/* Simplify a call to the strpbrk builtin. S1 and S2 are the arguments
10994 to the call, and TYPE is its return type.
4ee9c684 10995
c2f47e15 10996 Return NULL_TREE if no simplification was possible, otherwise return the
4ee9c684 10997 simplified form of the call as a tree.
10998
10999 The simplified form may be a constant or other expression which
11000 computes the same value, but in a more efficient manner (including
11001 calls to other builtin functions).
11002
11003 The call may contain arguments which need to be evaluated, but
11004 which are not useful to determine the result of the call. In
11005 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11006 COMPOUND_EXPR will be an argument which must be evaluated.
11007 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11008 COMPOUND_EXPR in the chain will contain the tree for the simplified
11009 form of the builtin function call. */
11010
11011static tree
389dd41b 11012fold_builtin_strpbrk (location_t loc, tree s1, tree s2, tree type)
4ee9c684 11013{
c2f47e15 11014 if (!validate_arg (s1, POINTER_TYPE)
11015 || !validate_arg (s2, POINTER_TYPE))
11016 return NULL_TREE;
4ee9c684 11017 else
11018 {
4ee9c684 11019 tree fn;
11020 const char *p1, *p2;
11021
11022 p2 = c_getstr (s2);
11023 if (p2 == NULL)
c2f47e15 11024 return NULL_TREE;
4ee9c684 11025
11026 p1 = c_getstr (s1);
11027 if (p1 != NULL)
11028 {
11029 const char *r = strpbrk (p1, p2);
daa1d5f5 11030 tree tem;
4ee9c684 11031
11032 if (r == NULL)
779b4c41 11033 return build_int_cst (TREE_TYPE (s1), 0);
4ee9c684 11034
11035 /* Return an offset into the constant string argument. */
2cc66f2a 11036 tem = fold_build_pointer_plus_hwi_loc (loc, s1, r - p1);
389dd41b 11037 return fold_convert_loc (loc, type, tem);
4ee9c684 11038 }
11039
11040 if (p2[0] == '\0')
05abc81b 11041 /* strpbrk(x, "") == NULL.
11042 Evaluate and ignore s1 in case it had side-effects. */
389dd41b 11043 return omit_one_operand_loc (loc, TREE_TYPE (s1), integer_zero_node, s1);
4ee9c684 11044
11045 if (p2[1] != '\0')
c2f47e15 11046 return NULL_TREE; /* Really call strpbrk. */
4ee9c684 11047
b9a16870 11048 fn = builtin_decl_implicit (BUILT_IN_STRCHR);
4ee9c684 11049 if (!fn)
c2f47e15 11050 return NULL_TREE;
4ee9c684 11051
11052 /* New argument list transforming strpbrk(s1, s2) to
11053 strchr(s1, s2[0]). */
7002a1c8 11054 return build_call_expr_loc (loc, fn, 2, s1,
11055 build_int_cst (integer_type_node, p2[0]));
4ee9c684 11056 }
11057}
11058
c2f47e15 11059/* Simplify a call to the strspn builtin. S1 and S2 are the arguments
11060 to the call.
4ee9c684 11061
c2f47e15 11062 Return NULL_TREE if no simplification was possible, otherwise return the
4ee9c684 11063 simplified form of the call as a tree.
11064
11065 The simplified form may be a constant or other expression which
11066 computes the same value, but in a more efficient manner (including
11067 calls to other builtin functions).
11068
11069 The call may contain arguments which need to be evaluated, but
11070 which are not useful to determine the result of the call. In
11071 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11072 COMPOUND_EXPR will be an argument which must be evaluated.
11073 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11074 COMPOUND_EXPR in the chain will contain the tree for the simplified
11075 form of the builtin function call. */
11076
11077static tree
389dd41b 11078fold_builtin_strspn (location_t loc, tree s1, tree s2)
4ee9c684 11079{
c2f47e15 11080 if (!validate_arg (s1, POINTER_TYPE)
11081 || !validate_arg (s2, POINTER_TYPE))
11082 return NULL_TREE;
4ee9c684 11083 else
11084 {
4ee9c684 11085 const char *p1 = c_getstr (s1), *p2 = c_getstr (s2);
11086
11087 /* If both arguments are constants, evaluate at compile-time. */
11088 if (p1 && p2)
11089 {
11090 const size_t r = strspn (p1, p2);
547b938d 11091 return build_int_cst (size_type_node, r);
4ee9c684 11092 }
11093
c2f47e15 11094 /* If either argument is "", return NULL_TREE. */
4ee9c684 11095 if ((p1 && *p1 == '\0') || (p2 && *p2 == '\0'))
9bc9f15f 11096 /* Evaluate and ignore both arguments in case either one has
11097 side-effects. */
389dd41b 11098 return omit_two_operands_loc (loc, size_type_node, size_zero_node,
9bc9f15f 11099 s1, s2);
c2f47e15 11100 return NULL_TREE;
4ee9c684 11101 }
11102}
11103
c2f47e15 11104/* Simplify a call to the strcspn builtin. S1 and S2 are the arguments
11105 to the call.
4ee9c684 11106
c2f47e15 11107 Return NULL_TREE if no simplification was possible, otherwise return the
4ee9c684 11108 simplified form of the call as a tree.
11109
11110 The simplified form may be a constant or other expression which
11111 computes the same value, but in a more efficient manner (including
11112 calls to other builtin functions).
11113
11114 The call may contain arguments which need to be evaluated, but
11115 which are not useful to determine the result of the call. In
11116 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11117 COMPOUND_EXPR will be an argument which must be evaluated.
11118 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11119 COMPOUND_EXPR in the chain will contain the tree for the simplified
11120 form of the builtin function call. */
11121
11122static tree
389dd41b 11123fold_builtin_strcspn (location_t loc, tree s1, tree s2)
4ee9c684 11124{
c2f47e15 11125 if (!validate_arg (s1, POINTER_TYPE)
11126 || !validate_arg (s2, POINTER_TYPE))
11127 return NULL_TREE;
4ee9c684 11128 else
11129 {
4ee9c684 11130 const char *p1 = c_getstr (s1), *p2 = c_getstr (s2);
11131
11132 /* If both arguments are constants, evaluate at compile-time. */
11133 if (p1 && p2)
11134 {
11135 const size_t r = strcspn (p1, p2);
547b938d 11136 return build_int_cst (size_type_node, r);
4ee9c684 11137 }
11138
c2f47e15 11139 /* If the first argument is "", return NULL_TREE. */
4ee9c684 11140 if (p1 && *p1 == '\0')
11141 {
11142 /* Evaluate and ignore argument s2 in case it has
11143 side-effects. */
389dd41b 11144 return omit_one_operand_loc (loc, size_type_node,
39761420 11145 size_zero_node, s2);
4ee9c684 11146 }
11147
11148 /* If the second argument is "", return __builtin_strlen(s1). */
11149 if (p2 && *p2 == '\0')
11150 {
b9a16870 11151 tree fn = builtin_decl_implicit (BUILT_IN_STRLEN);
4ee9c684 11152
11153 /* If the replacement _DECL isn't initialized, don't do the
11154 transformation. */
11155 if (!fn)
c2f47e15 11156 return NULL_TREE;
4ee9c684 11157
389dd41b 11158 return build_call_expr_loc (loc, fn, 1, s1);
4ee9c684 11159 }
c2f47e15 11160 return NULL_TREE;
4ee9c684 11161 }
11162}
11163
c2f47e15 11164/* Fold the next_arg or va_start call EXP. Returns true if there was an error
743b0c6a 11165 produced. False otherwise. This is done so that we don't output the error
11166 or warning twice or three times. */
75a70cf9 11167
743b0c6a 11168bool
c2f47e15 11169fold_builtin_next_arg (tree exp, bool va_start_p)
4ee9c684 11170{
11171 tree fntype = TREE_TYPE (current_function_decl);
c2f47e15 11172 int nargs = call_expr_nargs (exp);
11173 tree arg;
d98fd4a4 11174 /* There is good chance the current input_location points inside the
11175 definition of the va_start macro (perhaps on the token for
11176 builtin) in a system header, so warnings will not be emitted.
11177 Use the location in real source code. */
11178 source_location current_location =
11179 linemap_unwind_to_first_non_reserved_loc (line_table, input_location,
11180 NULL);
4ee9c684 11181
257d99c3 11182 if (!stdarg_p (fntype))
743b0c6a 11183 {
11184 error ("%<va_start%> used in function with fixed args");
11185 return true;
11186 }
c2f47e15 11187
11188 if (va_start_p)
79012a9d 11189 {
c2f47e15 11190 if (va_start_p && (nargs != 2))
11191 {
11192 error ("wrong number of arguments to function %<va_start%>");
11193 return true;
11194 }
11195 arg = CALL_EXPR_ARG (exp, 1);
79012a9d 11196 }
11197 /* We use __builtin_va_start (ap, 0, 0) or __builtin_next_arg (0, 0)
11198 when we checked the arguments and if needed issued a warning. */
c2f47e15 11199 else
4ee9c684 11200 {
c2f47e15 11201 if (nargs == 0)
11202 {
11203 /* Evidently an out of date version of <stdarg.h>; can't validate
11204 va_start's second argument, but can still work as intended. */
d98fd4a4 11205 warning_at (current_location,
7edb1062 11206 OPT_Wvarargs,
11207 "%<__builtin_next_arg%> called without an argument");
c2f47e15 11208 return true;
11209 }
11210 else if (nargs > 1)
a0c938f0 11211 {
c2f47e15 11212 error ("wrong number of arguments to function %<__builtin_next_arg%>");
a0c938f0 11213 return true;
11214 }
c2f47e15 11215 arg = CALL_EXPR_ARG (exp, 0);
11216 }
11217
a8dd994c 11218 if (TREE_CODE (arg) == SSA_NAME)
11219 arg = SSA_NAME_VAR (arg);
11220
c2f47e15 11221 /* We destructively modify the call to be __builtin_va_start (ap, 0)
48e1416a 11222 or __builtin_next_arg (0) the first time we see it, after checking
c2f47e15 11223 the arguments and if needed issuing a warning. */
11224 if (!integer_zerop (arg))
11225 {
11226 tree last_parm = tree_last (DECL_ARGUMENTS (current_function_decl));
79012a9d 11227
4ee9c684 11228 /* Strip off all nops for the sake of the comparison. This
11229 is not quite the same as STRIP_NOPS. It does more.
11230 We must also strip off INDIRECT_EXPR for C++ reference
11231 parameters. */
72dd6141 11232 while (CONVERT_EXPR_P (arg)
4ee9c684 11233 || TREE_CODE (arg) == INDIRECT_REF)
11234 arg = TREE_OPERAND (arg, 0);
11235 if (arg != last_parm)
a0c938f0 11236 {
b08cf617 11237 /* FIXME: Sometimes with the tree optimizers we can get the
11238 not the last argument even though the user used the last
11239 argument. We just warn and set the arg to be the last
11240 argument so that we will get wrong-code because of
11241 it. */
d98fd4a4 11242 warning_at (current_location,
7edb1062 11243 OPT_Wvarargs,
d98fd4a4 11244 "second parameter of %<va_start%> not last named argument");
743b0c6a 11245 }
24158ad7 11246
11247 /* Undefined by C99 7.15.1.4p4 (va_start):
11248 "If the parameter parmN is declared with the register storage
11249 class, with a function or array type, or with a type that is
11250 not compatible with the type that results after application of
11251 the default argument promotions, the behavior is undefined."
11252 */
11253 else if (DECL_REGISTER (arg))
d98fd4a4 11254 {
11255 warning_at (current_location,
7edb1062 11256 OPT_Wvarargs,
d98fd4a4 11257 "undefined behaviour when second parameter of "
11258 "%<va_start%> is declared with %<register%> storage");
11259 }
24158ad7 11260
79012a9d 11261 /* We want to verify the second parameter just once before the tree
a0c938f0 11262 optimizers are run and then avoid keeping it in the tree,
11263 as otherwise we could warn even for correct code like:
11264 void foo (int i, ...)
11265 { va_list ap; i++; va_start (ap, i); va_end (ap); } */
c2f47e15 11266 if (va_start_p)
11267 CALL_EXPR_ARG (exp, 1) = integer_zero_node;
11268 else
11269 CALL_EXPR_ARG (exp, 0) = integer_zero_node;
743b0c6a 11270 }
11271 return false;
4ee9c684 11272}
11273
11274
c2f47e15 11275/* Expand a call EXP to __builtin_object_size. */
0a39fd54 11276
f7715905 11277static rtx
0a39fd54 11278expand_builtin_object_size (tree exp)
11279{
11280 tree ost;
11281 int object_size_type;
11282 tree fndecl = get_callee_fndecl (exp);
0a39fd54 11283
c2f47e15 11284 if (!validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
0a39fd54 11285 {
b8c23db3 11286 error ("%Kfirst argument of %D must be a pointer, second integer constant",
11287 exp, fndecl);
0a39fd54 11288 expand_builtin_trap ();
11289 return const0_rtx;
11290 }
11291
c2f47e15 11292 ost = CALL_EXPR_ARG (exp, 1);
0a39fd54 11293 STRIP_NOPS (ost);
11294
11295 if (TREE_CODE (ost) != INTEGER_CST
11296 || tree_int_cst_sgn (ost) < 0
11297 || compare_tree_int (ost, 3) > 0)
11298 {
b8c23db3 11299 error ("%Klast argument of %D is not integer constant between 0 and 3",
11300 exp, fndecl);
0a39fd54 11301 expand_builtin_trap ();
11302 return const0_rtx;
11303 }
11304
e913b5cd 11305 object_size_type = tree_to_shwi (ost);
0a39fd54 11306
11307 return object_size_type < 2 ? constm1_rtx : const0_rtx;
11308}
11309
11310/* Expand EXP, a call to the __mem{cpy,pcpy,move,set}_chk builtin.
11311 FCODE is the BUILT_IN_* to use.
c2f47e15 11312 Return NULL_RTX if we failed; the caller should emit a normal call,
0a39fd54 11313 otherwise try to get the result in TARGET, if convenient (and in
11314 mode MODE if that's convenient). */
11315
11316static rtx
3754d046 11317expand_builtin_memory_chk (tree exp, rtx target, machine_mode mode,
0a39fd54 11318 enum built_in_function fcode)
11319{
0a39fd54 11320 tree dest, src, len, size;
11321
c2f47e15 11322 if (!validate_arglist (exp,
0a39fd54 11323 POINTER_TYPE,
11324 fcode == BUILT_IN_MEMSET_CHK
11325 ? INTEGER_TYPE : POINTER_TYPE,
11326 INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
c2f47e15 11327 return NULL_RTX;
0a39fd54 11328
c2f47e15 11329 dest = CALL_EXPR_ARG (exp, 0);
11330 src = CALL_EXPR_ARG (exp, 1);
11331 len = CALL_EXPR_ARG (exp, 2);
11332 size = CALL_EXPR_ARG (exp, 3);
0a39fd54 11333
e913b5cd 11334 if (! tree_fits_uhwi_p (size))
c2f47e15 11335 return NULL_RTX;
0a39fd54 11336
e913b5cd 11337 if (tree_fits_uhwi_p (len) || integer_all_onesp (size))
0a39fd54 11338 {
11339 tree fn;
11340
11341 if (! integer_all_onesp (size) && tree_int_cst_lt (size, len))
11342 {
b430e8d9 11343 warning_at (tree_nonartificial_location (exp),
11344 0, "%Kcall to %D will always overflow destination buffer",
11345 exp, get_callee_fndecl (exp));
c2f47e15 11346 return NULL_RTX;
0a39fd54 11347 }
11348
0a39fd54 11349 fn = NULL_TREE;
11350 /* If __builtin_mem{cpy,pcpy,move,set}_chk is used, assume
11351 mem{cpy,pcpy,move,set} is available. */
11352 switch (fcode)
11353 {
11354 case BUILT_IN_MEMCPY_CHK:
b9a16870 11355 fn = builtin_decl_explicit (BUILT_IN_MEMCPY);
0a39fd54 11356 break;
11357 case BUILT_IN_MEMPCPY_CHK:
b9a16870 11358 fn = builtin_decl_explicit (BUILT_IN_MEMPCPY);
0a39fd54 11359 break;
11360 case BUILT_IN_MEMMOVE_CHK:
b9a16870 11361 fn = builtin_decl_explicit (BUILT_IN_MEMMOVE);
0a39fd54 11362 break;
11363 case BUILT_IN_MEMSET_CHK:
b9a16870 11364 fn = builtin_decl_explicit (BUILT_IN_MEMSET);
0a39fd54 11365 break;
11366 default:
11367 break;
11368 }
11369
11370 if (! fn)
c2f47e15 11371 return NULL_RTX;
0a39fd54 11372
0568e9c1 11373 fn = build_call_nofold_loc (EXPR_LOCATION (exp), fn, 3, dest, src, len);
a65c4d64 11374 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
11375 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
0a39fd54 11376 return expand_expr (fn, target, mode, EXPAND_NORMAL);
11377 }
11378 else if (fcode == BUILT_IN_MEMSET_CHK)
c2f47e15 11379 return NULL_RTX;
0a39fd54 11380 else
11381 {
957d0361 11382 unsigned int dest_align = get_pointer_alignment (dest);
0a39fd54 11383
11384 /* If DEST is not a pointer type, call the normal function. */
11385 if (dest_align == 0)
c2f47e15 11386 return NULL_RTX;
0a39fd54 11387
11388 /* If SRC and DEST are the same (and not volatile), do nothing. */
11389 if (operand_equal_p (src, dest, 0))
11390 {
11391 tree expr;
11392
11393 if (fcode != BUILT_IN_MEMPCPY_CHK)
11394 {
11395 /* Evaluate and ignore LEN in case it has side-effects. */
11396 expand_expr (len, const0_rtx, VOIDmode, EXPAND_NORMAL);
11397 return expand_expr (dest, target, mode, EXPAND_NORMAL);
11398 }
11399
2cc66f2a 11400 expr = fold_build_pointer_plus (dest, len);
0a39fd54 11401 return expand_expr (expr, target, mode, EXPAND_NORMAL);
11402 }
11403
11404 /* __memmove_chk special case. */
11405 if (fcode == BUILT_IN_MEMMOVE_CHK)
11406 {
957d0361 11407 unsigned int src_align = get_pointer_alignment (src);
0a39fd54 11408
11409 if (src_align == 0)
c2f47e15 11410 return NULL_RTX;
0a39fd54 11411
11412 /* If src is categorized for a readonly section we can use
11413 normal __memcpy_chk. */
11414 if (readonly_data_expr (src))
11415 {
b9a16870 11416 tree fn = builtin_decl_explicit (BUILT_IN_MEMCPY_CHK);
0a39fd54 11417 if (!fn)
c2f47e15 11418 return NULL_RTX;
0568e9c1 11419 fn = build_call_nofold_loc (EXPR_LOCATION (exp), fn, 4,
11420 dest, src, len, size);
a65c4d64 11421 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
11422 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
0a39fd54 11423 return expand_expr (fn, target, mode, EXPAND_NORMAL);
11424 }
11425 }
c2f47e15 11426 return NULL_RTX;
0a39fd54 11427 }
11428}
11429
11430/* Emit warning if a buffer overflow is detected at compile time. */
11431
11432static void
11433maybe_emit_chk_warning (tree exp, enum built_in_function fcode)
11434{
c2f47e15 11435 int is_strlen = 0;
0a39fd54 11436 tree len, size;
b430e8d9 11437 location_t loc = tree_nonartificial_location (exp);
0a39fd54 11438
11439 switch (fcode)
11440 {
11441 case BUILT_IN_STRCPY_CHK:
11442 case BUILT_IN_STPCPY_CHK:
11443 /* For __strcat_chk the warning will be emitted only if overflowing
11444 by at least strlen (dest) + 1 bytes. */
11445 case BUILT_IN_STRCAT_CHK:
c2f47e15 11446 len = CALL_EXPR_ARG (exp, 1);
11447 size = CALL_EXPR_ARG (exp, 2);
0a39fd54 11448 is_strlen = 1;
11449 break;
b356dfef 11450 case BUILT_IN_STRNCAT_CHK:
0a39fd54 11451 case BUILT_IN_STRNCPY_CHK:
1063acde 11452 case BUILT_IN_STPNCPY_CHK:
c2f47e15 11453 len = CALL_EXPR_ARG (exp, 2);
11454 size = CALL_EXPR_ARG (exp, 3);
0a39fd54 11455 break;
11456 case BUILT_IN_SNPRINTF_CHK:
11457 case BUILT_IN_VSNPRINTF_CHK:
c2f47e15 11458 len = CALL_EXPR_ARG (exp, 1);
11459 size = CALL_EXPR_ARG (exp, 3);
0a39fd54 11460 break;
11461 default:
11462 gcc_unreachable ();
11463 }
11464
0a39fd54 11465 if (!len || !size)
11466 return;
11467
e913b5cd 11468 if (! tree_fits_uhwi_p (size) || integer_all_onesp (size))
0a39fd54 11469 return;
11470
11471 if (is_strlen)
11472 {
11473 len = c_strlen (len, 1);
e913b5cd 11474 if (! len || ! tree_fits_uhwi_p (len) || tree_int_cst_lt (len, size))
0a39fd54 11475 return;
11476 }
b356dfef 11477 else if (fcode == BUILT_IN_STRNCAT_CHK)
11478 {
c2f47e15 11479 tree src = CALL_EXPR_ARG (exp, 1);
e913b5cd 11480 if (! src || ! tree_fits_uhwi_p (len) || tree_int_cst_lt (len, size))
b356dfef 11481 return;
11482 src = c_strlen (src, 1);
e913b5cd 11483 if (! src || ! tree_fits_uhwi_p (src))
b356dfef 11484 {
b430e8d9 11485 warning_at (loc, 0, "%Kcall to %D might overflow destination buffer",
11486 exp, get_callee_fndecl (exp));
b356dfef 11487 return;
11488 }
11489 else if (tree_int_cst_lt (src, size))
11490 return;
11491 }
e913b5cd 11492 else if (! tree_fits_uhwi_p (len) || ! tree_int_cst_lt (size, len))
0a39fd54 11493 return;
11494
b430e8d9 11495 warning_at (loc, 0, "%Kcall to %D will always overflow destination buffer",
11496 exp, get_callee_fndecl (exp));
0a39fd54 11497}
11498
11499/* Emit warning if a buffer overflow is detected at compile time
11500 in __sprintf_chk/__vsprintf_chk calls. */
11501
11502static void
11503maybe_emit_sprintf_chk_warning (tree exp, enum built_in_function fcode)
11504{
1e4adcfc 11505 tree size, len, fmt;
0a39fd54 11506 const char *fmt_str;
c2f47e15 11507 int nargs = call_expr_nargs (exp);
0a39fd54 11508
11509 /* Verify the required arguments in the original call. */
48e1416a 11510
c2f47e15 11511 if (nargs < 4)
0a39fd54 11512 return;
c2f47e15 11513 size = CALL_EXPR_ARG (exp, 2);
11514 fmt = CALL_EXPR_ARG (exp, 3);
0a39fd54 11515
e913b5cd 11516 if (! tree_fits_uhwi_p (size) || integer_all_onesp (size))
0a39fd54 11517 return;
11518
11519 /* Check whether the format is a literal string constant. */
11520 fmt_str = c_getstr (fmt);
11521 if (fmt_str == NULL)
11522 return;
11523
d4473c84 11524 if (!init_target_chars ())
99eabcc1 11525 return;
11526
0a39fd54 11527 /* If the format doesn't contain % args or %%, we know its size. */
99eabcc1 11528 if (strchr (fmt_str, target_percent) == 0)
0a39fd54 11529 len = build_int_cstu (size_type_node, strlen (fmt_str));
11530 /* If the format is "%s" and first ... argument is a string literal,
11531 we know it too. */
c2f47e15 11532 else if (fcode == BUILT_IN_SPRINTF_CHK
11533 && strcmp (fmt_str, target_percent_s) == 0)
0a39fd54 11534 {
11535 tree arg;
11536
c2f47e15 11537 if (nargs < 5)
0a39fd54 11538 return;
c2f47e15 11539 arg = CALL_EXPR_ARG (exp, 4);
0a39fd54 11540 if (! POINTER_TYPE_P (TREE_TYPE (arg)))
11541 return;
11542
11543 len = c_strlen (arg, 1);
e913b5cd 11544 if (!len || ! tree_fits_uhwi_p (len))
0a39fd54 11545 return;
11546 }
11547 else
11548 return;
11549
11550 if (! tree_int_cst_lt (len, size))
b430e8d9 11551 warning_at (tree_nonartificial_location (exp),
11552 0, "%Kcall to %D will always overflow destination buffer",
11553 exp, get_callee_fndecl (exp));
0a39fd54 11554}
11555
2c281b15 11556/* Emit warning if a free is called with address of a variable. */
11557
11558static void
11559maybe_emit_free_warning (tree exp)
11560{
11561 tree arg = CALL_EXPR_ARG (exp, 0);
11562
11563 STRIP_NOPS (arg);
11564 if (TREE_CODE (arg) != ADDR_EXPR)
11565 return;
11566
11567 arg = get_base_address (TREE_OPERAND (arg, 0));
182cf5a9 11568 if (arg == NULL || INDIRECT_REF_P (arg) || TREE_CODE (arg) == MEM_REF)
2c281b15 11569 return;
11570
11571 if (SSA_VAR_P (arg))
f74ea1c2 11572 warning_at (tree_nonartificial_location (exp), OPT_Wfree_nonheap_object,
11573 "%Kattempt to free a non-heap object %qD", exp, arg);
2c281b15 11574 else
f74ea1c2 11575 warning_at (tree_nonartificial_location (exp), OPT_Wfree_nonheap_object,
11576 "%Kattempt to free a non-heap object", exp);
2c281b15 11577}
11578
c2f47e15 11579/* Fold a call to __builtin_object_size with arguments PTR and OST,
11580 if possible. */
0a39fd54 11581
f7715905 11582static tree
c2f47e15 11583fold_builtin_object_size (tree ptr, tree ost)
0a39fd54 11584{
a6caa15f 11585 unsigned HOST_WIDE_INT bytes;
0a39fd54 11586 int object_size_type;
11587
c2f47e15 11588 if (!validate_arg (ptr, POINTER_TYPE)
11589 || !validate_arg (ost, INTEGER_TYPE))
11590 return NULL_TREE;
0a39fd54 11591
0a39fd54 11592 STRIP_NOPS (ost);
11593
11594 if (TREE_CODE (ost) != INTEGER_CST
11595 || tree_int_cst_sgn (ost) < 0
11596 || compare_tree_int (ost, 3) > 0)
c2f47e15 11597 return NULL_TREE;
0a39fd54 11598
e913b5cd 11599 object_size_type = tree_to_shwi (ost);
0a39fd54 11600
11601 /* __builtin_object_size doesn't evaluate side-effects in its arguments;
11602 if there are any side-effects, it returns (size_t) -1 for types 0 and 1
11603 and (size_t) 0 for types 2 and 3. */
11604 if (TREE_SIDE_EFFECTS (ptr))
697bbc3f 11605 return build_int_cst_type (size_type_node, object_size_type < 2 ? -1 : 0);
0a39fd54 11606
11607 if (TREE_CODE (ptr) == ADDR_EXPR)
a6caa15f 11608 {
6da74b21 11609 bytes = compute_builtin_object_size (ptr, object_size_type);
11610 if (wi::fits_to_tree_p (bytes, size_type_node))
11611 return build_int_cstu (size_type_node, bytes);
a6caa15f 11612 }
0a39fd54 11613 else if (TREE_CODE (ptr) == SSA_NAME)
11614 {
0a39fd54 11615 /* If object size is not known yet, delay folding until
11616 later. Maybe subsequent passes will help determining
11617 it. */
11618 bytes = compute_builtin_object_size (ptr, object_size_type);
a6caa15f 11619 if (bytes != (unsigned HOST_WIDE_INT) (object_size_type < 2 ? -1 : 0)
6da74b21 11620 && wi::fits_to_tree_p (bytes, size_type_node))
11621 return build_int_cstu (size_type_node, bytes);
0a39fd54 11622 }
11623
a6caa15f 11624 return NULL_TREE;
0a39fd54 11625}
11626
0e80b01d 11627/* Builtins with folding operations that operate on "..." arguments
11628 need special handling; we need to store the arguments in a convenient
11629 data structure before attempting any folding. Fortunately there are
11630 only a few builtins that fall into this category. FNDECL is the
e80cc485 11631 function, EXP is the CALL_EXPR for the call. */
0e80b01d 11632
11633static tree
e80cc485 11634fold_builtin_varargs (location_t loc, tree fndecl, tree *args, int nargs)
0e80b01d 11635{
11636 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
11637 tree ret = NULL_TREE;
11638
11639 switch (fcode)
11640 {
0e80b01d 11641 case BUILT_IN_FPCLASSIFY:
9d884767 11642 ret = fold_builtin_fpclassify (loc, args, nargs);
0e80b01d 11643 break;
11644
11645 default:
11646 break;
11647 }
11648 if (ret)
11649 {
11650 ret = build1 (NOP_EXPR, TREE_TYPE (ret), ret);
11651 SET_EXPR_LOCATION (ret, loc);
11652 TREE_NO_WARNING (ret) = 1;
11653 return ret;
11654 }
11655 return NULL_TREE;
11656}
11657
99eabcc1 11658/* Initialize format string characters in the target charset. */
11659
b9ea678c 11660bool
99eabcc1 11661init_target_chars (void)
11662{
11663 static bool init;
11664 if (!init)
11665 {
11666 target_newline = lang_hooks.to_target_charset ('\n');
11667 target_percent = lang_hooks.to_target_charset ('%');
11668 target_c = lang_hooks.to_target_charset ('c');
11669 target_s = lang_hooks.to_target_charset ('s');
11670 if (target_newline == 0 || target_percent == 0 || target_c == 0
11671 || target_s == 0)
11672 return false;
11673
11674 target_percent_c[0] = target_percent;
11675 target_percent_c[1] = target_c;
11676 target_percent_c[2] = '\0';
11677
11678 target_percent_s[0] = target_percent;
11679 target_percent_s[1] = target_s;
11680 target_percent_s[2] = '\0';
11681
11682 target_percent_s_newline[0] = target_percent;
11683 target_percent_s_newline[1] = target_s;
11684 target_percent_s_newline[2] = target_newline;
11685 target_percent_s_newline[3] = '\0';
a0c938f0 11686
99eabcc1 11687 init = true;
11688 }
11689 return true;
11690}
bffb7645 11691
f0c477f2 11692/* Helper function for do_mpfr_arg*(). Ensure M is a normal number
11693 and no overflow/underflow occurred. INEXACT is true if M was not
fa7637bd 11694 exactly calculated. TYPE is the tree type for the result. This
f0c477f2 11695 function assumes that you cleared the MPFR flags and then
11696 calculated M to see if anything subsequently set a flag prior to
11697 entering this function. Return NULL_TREE if any checks fail. */
11698
11699static tree
d4473c84 11700do_mpfr_ckconv (mpfr_srcptr m, tree type, int inexact)
f0c477f2 11701{
11702 /* Proceed iff we get a normal number, i.e. not NaN or Inf and no
11703 overflow/underflow occurred. If -frounding-math, proceed iff the
11704 result of calling FUNC was exact. */
d4473c84 11705 if (mpfr_number_p (m) && !mpfr_overflow_p () && !mpfr_underflow_p ()
f0c477f2 11706 && (!flag_rounding_math || !inexact))
11707 {
11708 REAL_VALUE_TYPE rr;
11709
66fa16e6 11710 real_from_mpfr (&rr, m, type, GMP_RNDN);
f0c477f2 11711 /* Proceed iff GCC's REAL_VALUE_TYPE can hold the MPFR value,
11712 check for overflow/underflow. If the REAL_VALUE_TYPE is zero
11713 but the mpft_t is not, then we underflowed in the
11714 conversion. */
776a7bab 11715 if (real_isfinite (&rr)
f0c477f2 11716 && (rr.cl == rvc_zero) == (mpfr_zero_p (m) != 0))
11717 {
11718 REAL_VALUE_TYPE rmode;
11719
11720 real_convert (&rmode, TYPE_MODE (type), &rr);
11721 /* Proceed iff the specified mode can hold the value. */
11722 if (real_identical (&rmode, &rr))
11723 return build_real (type, rmode);
11724 }
11725 }
11726 return NULL_TREE;
11727}
11728
239d491a 11729/* Helper function for do_mpc_arg*(). Ensure M is a normal complex
11730 number and no overflow/underflow occurred. INEXACT is true if M
11731 was not exactly calculated. TYPE is the tree type for the result.
11732 This function assumes that you cleared the MPFR flags and then
11733 calculated M to see if anything subsequently set a flag prior to
652d9409 11734 entering this function. Return NULL_TREE if any checks fail, if
11735 FORCE_CONVERT is true, then bypass the checks. */
239d491a 11736
11737static tree
652d9409 11738do_mpc_ckconv (mpc_srcptr m, tree type, int inexact, int force_convert)
239d491a 11739{
11740 /* Proceed iff we get a normal number, i.e. not NaN or Inf and no
11741 overflow/underflow occurred. If -frounding-math, proceed iff the
11742 result of calling FUNC was exact. */
652d9409 11743 if (force_convert
11744 || (mpfr_number_p (mpc_realref (m)) && mpfr_number_p (mpc_imagref (m))
11745 && !mpfr_overflow_p () && !mpfr_underflow_p ()
11746 && (!flag_rounding_math || !inexact)))
239d491a 11747 {
11748 REAL_VALUE_TYPE re, im;
11749
b0e7c4d4 11750 real_from_mpfr (&re, mpc_realref (m), TREE_TYPE (type), GMP_RNDN);
11751 real_from_mpfr (&im, mpc_imagref (m), TREE_TYPE (type), GMP_RNDN);
239d491a 11752 /* Proceed iff GCC's REAL_VALUE_TYPE can hold the MPFR values,
11753 check for overflow/underflow. If the REAL_VALUE_TYPE is zero
11754 but the mpft_t is not, then we underflowed in the
11755 conversion. */
652d9409 11756 if (force_convert
11757 || (real_isfinite (&re) && real_isfinite (&im)
11758 && (re.cl == rvc_zero) == (mpfr_zero_p (mpc_realref (m)) != 0)
11759 && (im.cl == rvc_zero) == (mpfr_zero_p (mpc_imagref (m)) != 0)))
239d491a 11760 {
11761 REAL_VALUE_TYPE re_mode, im_mode;
11762
11763 real_convert (&re_mode, TYPE_MODE (TREE_TYPE (type)), &re);
11764 real_convert (&im_mode, TYPE_MODE (TREE_TYPE (type)), &im);
11765 /* Proceed iff the specified mode can hold the value. */
652d9409 11766 if (force_convert
11767 || (real_identical (&re_mode, &re)
11768 && real_identical (&im_mode, &im)))
239d491a 11769 return build_complex (type, build_real (TREE_TYPE (type), re_mode),
11770 build_real (TREE_TYPE (type), im_mode));
11771 }
11772 }
11773 return NULL_TREE;
11774}
239d491a 11775
bffb7645 11776/* If argument ARG is a REAL_CST, call the one-argument mpfr function
11777 FUNC on it and return the resulting value as a tree with type TYPE.
728bac60 11778 If MIN and/or MAX are not NULL, then the supplied ARG must be
11779 within those bounds. If INCLUSIVE is true, then MIN/MAX are
11780 acceptable values, otherwise they are not. The mpfr precision is
11781 set to the precision of TYPE. We assume that function FUNC returns
11782 zero if the result could be calculated exactly within the requested
11783 precision. */
bffb7645 11784
11785static tree
728bac60 11786do_mpfr_arg1 (tree arg, tree type, int (*func)(mpfr_ptr, mpfr_srcptr, mp_rnd_t),
11787 const REAL_VALUE_TYPE *min, const REAL_VALUE_TYPE *max,
11788 bool inclusive)
bffb7645 11789{
11790 tree result = NULL_TREE;
48e1416a 11791
bffb7645 11792 STRIP_NOPS (arg);
11793
bd7d6fa4 11794 /* To proceed, MPFR must exactly represent the target floating point
11795 format, which only happens when the target base equals two. */
11796 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
f96bd2bf 11797 && TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
bffb7645 11798 {
f0c477f2 11799 const REAL_VALUE_TYPE *const ra = &TREE_REAL_CST (arg);
bffb7645 11800
776a7bab 11801 if (real_isfinite (ra)
f0c477f2 11802 && (!min || real_compare (inclusive ? GE_EXPR: GT_EXPR , ra, min))
11803 && (!max || real_compare (inclusive ? LE_EXPR: LT_EXPR , ra, max)))
bffb7645 11804 {
e2eb2b7f 11805 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
11806 const int prec = fmt->p;
11807 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
debf9994 11808 int inexact;
bffb7645 11809 mpfr_t m;
11810
11811 mpfr_init2 (m, prec);
66fa16e6 11812 mpfr_from_real (m, ra, GMP_RNDN);
d4473c84 11813 mpfr_clear_flags ();
e2eb2b7f 11814 inexact = func (m, m, rnd);
f0c477f2 11815 result = do_mpfr_ckconv (m, type, inexact);
bffb7645 11816 mpfr_clear (m);
11817 }
11818 }
48e1416a 11819
bffb7645 11820 return result;
11821}
f0c477f2 11822
11823/* If argument ARG is a REAL_CST, call the two-argument mpfr function
11824 FUNC on it and return the resulting value as a tree with type TYPE.
11825 The mpfr precision is set to the precision of TYPE. We assume that
11826 function FUNC returns zero if the result could be calculated
11827 exactly within the requested precision. */
11828
11829static tree
11830do_mpfr_arg2 (tree arg1, tree arg2, tree type,
11831 int (*func)(mpfr_ptr, mpfr_srcptr, mpfr_srcptr, mp_rnd_t))
11832{
11833 tree result = NULL_TREE;
48e1416a 11834
f0c477f2 11835 STRIP_NOPS (arg1);
11836 STRIP_NOPS (arg2);
11837
bd7d6fa4 11838 /* To proceed, MPFR must exactly represent the target floating point
11839 format, which only happens when the target base equals two. */
11840 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
f96bd2bf 11841 && TREE_CODE (arg1) == REAL_CST && !TREE_OVERFLOW (arg1)
11842 && TREE_CODE (arg2) == REAL_CST && !TREE_OVERFLOW (arg2))
f0c477f2 11843 {
11844 const REAL_VALUE_TYPE *const ra1 = &TREE_REAL_CST (arg1);
11845 const REAL_VALUE_TYPE *const ra2 = &TREE_REAL_CST (arg2);
11846
776a7bab 11847 if (real_isfinite (ra1) && real_isfinite (ra2))
f0c477f2 11848 {
e2eb2b7f 11849 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
11850 const int prec = fmt->p;
11851 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
f0c477f2 11852 int inexact;
11853 mpfr_t m1, m2;
11854
11855 mpfr_inits2 (prec, m1, m2, NULL);
66fa16e6 11856 mpfr_from_real (m1, ra1, GMP_RNDN);
11857 mpfr_from_real (m2, ra2, GMP_RNDN);
d4473c84 11858 mpfr_clear_flags ();
e2eb2b7f 11859 inexact = func (m1, m1, m2, rnd);
f0c477f2 11860 result = do_mpfr_ckconv (m1, type, inexact);
11861 mpfr_clears (m1, m2, NULL);
11862 }
11863 }
48e1416a 11864
f0c477f2 11865 return result;
11866}
d92f994c 11867
9917422b 11868/* If argument ARG is a REAL_CST, call the three-argument mpfr function
11869 FUNC on it and return the resulting value as a tree with type TYPE.
11870 The mpfr precision is set to the precision of TYPE. We assume that
11871 function FUNC returns zero if the result could be calculated
11872 exactly within the requested precision. */
11873
11874static tree
11875do_mpfr_arg3 (tree arg1, tree arg2, tree arg3, tree type,
11876 int (*func)(mpfr_ptr, mpfr_srcptr, mpfr_srcptr, mpfr_srcptr, mp_rnd_t))
11877{
11878 tree result = NULL_TREE;
48e1416a 11879
9917422b 11880 STRIP_NOPS (arg1);
11881 STRIP_NOPS (arg2);
11882 STRIP_NOPS (arg3);
11883
bd7d6fa4 11884 /* To proceed, MPFR must exactly represent the target floating point
11885 format, which only happens when the target base equals two. */
11886 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
f96bd2bf 11887 && TREE_CODE (arg1) == REAL_CST && !TREE_OVERFLOW (arg1)
11888 && TREE_CODE (arg2) == REAL_CST && !TREE_OVERFLOW (arg2)
11889 && TREE_CODE (arg3) == REAL_CST && !TREE_OVERFLOW (arg3))
9917422b 11890 {
11891 const REAL_VALUE_TYPE *const ra1 = &TREE_REAL_CST (arg1);
11892 const REAL_VALUE_TYPE *const ra2 = &TREE_REAL_CST (arg2);
11893 const REAL_VALUE_TYPE *const ra3 = &TREE_REAL_CST (arg3);
11894
776a7bab 11895 if (real_isfinite (ra1) && real_isfinite (ra2) && real_isfinite (ra3))
9917422b 11896 {
e2eb2b7f 11897 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
11898 const int prec = fmt->p;
11899 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
9917422b 11900 int inexact;
11901 mpfr_t m1, m2, m3;
11902
11903 mpfr_inits2 (prec, m1, m2, m3, NULL);
66fa16e6 11904 mpfr_from_real (m1, ra1, GMP_RNDN);
11905 mpfr_from_real (m2, ra2, GMP_RNDN);
11906 mpfr_from_real (m3, ra3, GMP_RNDN);
d4473c84 11907 mpfr_clear_flags ();
e2eb2b7f 11908 inexact = func (m1, m1, m2, m3, rnd);
9917422b 11909 result = do_mpfr_ckconv (m1, type, inexact);
11910 mpfr_clears (m1, m2, m3, NULL);
11911 }
11912 }
48e1416a 11913
9917422b 11914 return result;
11915}
11916
d92f994c 11917/* If argument ARG is a REAL_CST, call mpfr_sin_cos() on it and set
11918 the pointers *(ARG_SINP) and *(ARG_COSP) to the resulting values.
d735c391 11919 If ARG_SINP and ARG_COSP are NULL then the result is returned
11920 as a complex value.
d92f994c 11921 The type is taken from the type of ARG and is used for setting the
11922 precision of the calculation and results. */
11923
11924static tree
11925do_mpfr_sincos (tree arg, tree arg_sinp, tree arg_cosp)
11926{
bd7d6fa4 11927 tree const type = TREE_TYPE (arg);
d92f994c 11928 tree result = NULL_TREE;
48e1416a 11929
d92f994c 11930 STRIP_NOPS (arg);
48e1416a 11931
bd7d6fa4 11932 /* To proceed, MPFR must exactly represent the target floating point
11933 format, which only happens when the target base equals two. */
11934 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
f96bd2bf 11935 && TREE_CODE (arg) == REAL_CST
11936 && !TREE_OVERFLOW (arg))
d92f994c 11937 {
11938 const REAL_VALUE_TYPE *const ra = &TREE_REAL_CST (arg);
11939
776a7bab 11940 if (real_isfinite (ra))
d92f994c 11941 {
e2eb2b7f 11942 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
11943 const int prec = fmt->p;
11944 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
d92f994c 11945 tree result_s, result_c;
11946 int inexact;
11947 mpfr_t m, ms, mc;
11948
11949 mpfr_inits2 (prec, m, ms, mc, NULL);
66fa16e6 11950 mpfr_from_real (m, ra, GMP_RNDN);
d4473c84 11951 mpfr_clear_flags ();
e2eb2b7f 11952 inexact = mpfr_sin_cos (ms, mc, m, rnd);
d92f994c 11953 result_s = do_mpfr_ckconv (ms, type, inexact);
11954 result_c = do_mpfr_ckconv (mc, type, inexact);
11955 mpfr_clears (m, ms, mc, NULL);
11956 if (result_s && result_c)
11957 {
d735c391 11958 /* If we are to return in a complex value do so. */
11959 if (!arg_sinp && !arg_cosp)
11960 return build_complex (build_complex_type (type),
11961 result_c, result_s);
11962
d92f994c 11963 /* Dereference the sin/cos pointer arguments. */
11964 arg_sinp = build_fold_indirect_ref (arg_sinp);
11965 arg_cosp = build_fold_indirect_ref (arg_cosp);
11966 /* Proceed if valid pointer type were passed in. */
11967 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg_sinp)) == TYPE_MAIN_VARIANT (type)
11968 && TYPE_MAIN_VARIANT (TREE_TYPE (arg_cosp)) == TYPE_MAIN_VARIANT (type))
11969 {
11970 /* Set the values. */
41076ef6 11971 result_s = fold_build2 (MODIFY_EXPR, type, arg_sinp,
35cc02b5 11972 result_s);
d92f994c 11973 TREE_SIDE_EFFECTS (result_s) = 1;
41076ef6 11974 result_c = fold_build2 (MODIFY_EXPR, type, arg_cosp,
35cc02b5 11975 result_c);
d92f994c 11976 TREE_SIDE_EFFECTS (result_c) = 1;
11977 /* Combine the assignments into a compound expr. */
11978 result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
11979 result_s, result_c));
11980 }
11981 }
11982 }
11983 }
11984 return result;
11985}
65dd1378 11986
65dd1378 11987/* If argument ARG1 is an INTEGER_CST and ARG2 is a REAL_CST, call the
11988 two-argument mpfr order N Bessel function FUNC on them and return
11989 the resulting value as a tree with type TYPE. The mpfr precision
11990 is set to the precision of TYPE. We assume that function FUNC
11991 returns zero if the result could be calculated exactly within the
11992 requested precision. */
11993static tree
11994do_mpfr_bessel_n (tree arg1, tree arg2, tree type,
11995 int (*func)(mpfr_ptr, long, mpfr_srcptr, mp_rnd_t),
11996 const REAL_VALUE_TYPE *min, bool inclusive)
11997{
11998 tree result = NULL_TREE;
11999
12000 STRIP_NOPS (arg1);
12001 STRIP_NOPS (arg2);
12002
12003 /* To proceed, MPFR must exactly represent the target floating point
12004 format, which only happens when the target base equals two. */
12005 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
e913b5cd 12006 && tree_fits_shwi_p (arg1)
65dd1378 12007 && TREE_CODE (arg2) == REAL_CST && !TREE_OVERFLOW (arg2))
12008 {
e913b5cd 12009 const HOST_WIDE_INT n = tree_to_shwi (arg1);
65dd1378 12010 const REAL_VALUE_TYPE *const ra = &TREE_REAL_CST (arg2);
12011
12012 if (n == (long)n
776a7bab 12013 && real_isfinite (ra)
65dd1378 12014 && (!min || real_compare (inclusive ? GE_EXPR: GT_EXPR , ra, min)))
12015 {
e2eb2b7f 12016 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
12017 const int prec = fmt->p;
12018 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
65dd1378 12019 int inexact;
12020 mpfr_t m;
12021
12022 mpfr_init2 (m, prec);
12023 mpfr_from_real (m, ra, GMP_RNDN);
12024 mpfr_clear_flags ();
e2eb2b7f 12025 inexact = func (m, n, m, rnd);
65dd1378 12026 result = do_mpfr_ckconv (m, type, inexact);
12027 mpfr_clear (m);
12028 }
12029 }
48e1416a 12030
65dd1378 12031 return result;
12032}
e5407ca6 12033
12034/* If arguments ARG0 and ARG1 are REAL_CSTs, call mpfr_remquo() to set
12035 the pointer *(ARG_QUO) and return the result. The type is taken
12036 from the type of ARG0 and is used for setting the precision of the
12037 calculation and results. */
12038
12039static tree
12040do_mpfr_remquo (tree arg0, tree arg1, tree arg_quo)
12041{
12042 tree const type = TREE_TYPE (arg0);
12043 tree result = NULL_TREE;
48e1416a 12044
e5407ca6 12045 STRIP_NOPS (arg0);
12046 STRIP_NOPS (arg1);
48e1416a 12047
e5407ca6 12048 /* To proceed, MPFR must exactly represent the target floating point
12049 format, which only happens when the target base equals two. */
12050 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
12051 && TREE_CODE (arg0) == REAL_CST && !TREE_OVERFLOW (arg0)
12052 && TREE_CODE (arg1) == REAL_CST && !TREE_OVERFLOW (arg1))
12053 {
12054 const REAL_VALUE_TYPE *const ra0 = TREE_REAL_CST_PTR (arg0);
12055 const REAL_VALUE_TYPE *const ra1 = TREE_REAL_CST_PTR (arg1);
12056
776a7bab 12057 if (real_isfinite (ra0) && real_isfinite (ra1))
e5407ca6 12058 {
e2eb2b7f 12059 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
12060 const int prec = fmt->p;
12061 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
e5407ca6 12062 tree result_rem;
12063 long integer_quo;
12064 mpfr_t m0, m1;
12065
12066 mpfr_inits2 (prec, m0, m1, NULL);
12067 mpfr_from_real (m0, ra0, GMP_RNDN);
12068 mpfr_from_real (m1, ra1, GMP_RNDN);
12069 mpfr_clear_flags ();
e2eb2b7f 12070 mpfr_remquo (m0, &integer_quo, m0, m1, rnd);
e5407ca6 12071 /* Remquo is independent of the rounding mode, so pass
12072 inexact=0 to do_mpfr_ckconv(). */
12073 result_rem = do_mpfr_ckconv (m0, type, /*inexact=*/ 0);
12074 mpfr_clears (m0, m1, NULL);
12075 if (result_rem)
12076 {
12077 /* MPFR calculates quo in the host's long so it may
12078 return more bits in quo than the target int can hold
12079 if sizeof(host long) > sizeof(target int). This can
12080 happen even for native compilers in LP64 mode. In
12081 these cases, modulo the quo value with the largest
12082 number that the target int can hold while leaving one
12083 bit for the sign. */
12084 if (sizeof (integer_quo) * CHAR_BIT > INT_TYPE_SIZE)
12085 integer_quo %= (long)(1UL << (INT_TYPE_SIZE - 1));
12086
12087 /* Dereference the quo pointer argument. */
12088 arg_quo = build_fold_indirect_ref (arg_quo);
12089 /* Proceed iff a valid pointer type was passed in. */
12090 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg_quo)) == integer_type_node)
12091 {
12092 /* Set the value. */
7002a1c8 12093 tree result_quo
12094 = fold_build2 (MODIFY_EXPR, TREE_TYPE (arg_quo), arg_quo,
12095 build_int_cst (TREE_TYPE (arg_quo),
12096 integer_quo));
e5407ca6 12097 TREE_SIDE_EFFECTS (result_quo) = 1;
12098 /* Combine the quo assignment with the rem. */
12099 result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
12100 result_quo, result_rem));
12101 }
12102 }
12103 }
12104 }
12105 return result;
12106}
e84da7c1 12107
12108/* If ARG is a REAL_CST, call mpfr_lgamma() on it and return the
12109 resulting value as a tree with type TYPE. The mpfr precision is
12110 set to the precision of TYPE. We assume that this mpfr function
12111 returns zero if the result could be calculated exactly within the
12112 requested precision. In addition, the integer pointer represented
12113 by ARG_SG will be dereferenced and set to the appropriate signgam
12114 (-1,1) value. */
12115
12116static tree
12117do_mpfr_lgamma_r (tree arg, tree arg_sg, tree type)
12118{
12119 tree result = NULL_TREE;
12120
12121 STRIP_NOPS (arg);
48e1416a 12122
e84da7c1 12123 /* To proceed, MPFR must exactly represent the target floating point
12124 format, which only happens when the target base equals two. Also
12125 verify ARG is a constant and that ARG_SG is an int pointer. */
12126 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
12127 && TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg)
12128 && TREE_CODE (TREE_TYPE (arg_sg)) == POINTER_TYPE
12129 && TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (arg_sg))) == integer_type_node)
12130 {
12131 const REAL_VALUE_TYPE *const ra = TREE_REAL_CST_PTR (arg);
12132
12133 /* In addition to NaN and Inf, the argument cannot be zero or a
12134 negative integer. */
776a7bab 12135 if (real_isfinite (ra)
e84da7c1 12136 && ra->cl != rvc_zero
9af5ce0c 12137 && !(real_isneg (ra) && real_isinteger (ra, TYPE_MODE (type))))
e84da7c1 12138 {
e2eb2b7f 12139 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
12140 const int prec = fmt->p;
12141 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
e84da7c1 12142 int inexact, sg;
12143 mpfr_t m;
12144 tree result_lg;
12145
12146 mpfr_init2 (m, prec);
12147 mpfr_from_real (m, ra, GMP_RNDN);
12148 mpfr_clear_flags ();
e2eb2b7f 12149 inexact = mpfr_lgamma (m, &sg, m, rnd);
e84da7c1 12150 result_lg = do_mpfr_ckconv (m, type, inexact);
12151 mpfr_clear (m);
12152 if (result_lg)
12153 {
12154 tree result_sg;
12155
12156 /* Dereference the arg_sg pointer argument. */
12157 arg_sg = build_fold_indirect_ref (arg_sg);
12158 /* Assign the signgam value into *arg_sg. */
12159 result_sg = fold_build2 (MODIFY_EXPR,
12160 TREE_TYPE (arg_sg), arg_sg,
7002a1c8 12161 build_int_cst (TREE_TYPE (arg_sg), sg));
e84da7c1 12162 TREE_SIDE_EFFECTS (result_sg) = 1;
12163 /* Combine the signgam assignment with the lgamma result. */
12164 result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
12165 result_sg, result_lg));
12166 }
12167 }
12168 }
12169
12170 return result;
12171}
75a70cf9 12172
239d491a 12173/* If argument ARG is a COMPLEX_CST, call the one-argument mpc
12174 function FUNC on it and return the resulting value as a tree with
12175 type TYPE. The mpfr precision is set to the precision of TYPE. We
12176 assume that function FUNC returns zero if the result could be
12177 calculated exactly within the requested precision. */
12178
12179static tree
12180do_mpc_arg1 (tree arg, tree type, int (*func)(mpc_ptr, mpc_srcptr, mpc_rnd_t))
12181{
12182 tree result = NULL_TREE;
48e1416a 12183
239d491a 12184 STRIP_NOPS (arg);
12185
12186 /* To proceed, MPFR must exactly represent the target floating point
12187 format, which only happens when the target base equals two. */
12188 if (TREE_CODE (arg) == COMPLEX_CST && !TREE_OVERFLOW (arg)
12189 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) == REAL_TYPE
12190 && REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (TREE_TYPE (arg))))->b == 2)
12191 {
12192 const REAL_VALUE_TYPE *const re = TREE_REAL_CST_PTR (TREE_REALPART (arg));
12193 const REAL_VALUE_TYPE *const im = TREE_REAL_CST_PTR (TREE_IMAGPART (arg));
12194
12195 if (real_isfinite (re) && real_isfinite (im))
12196 {
12197 const struct real_format *const fmt =
12198 REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (type)));
12199 const int prec = fmt->p;
12200 const mp_rnd_t rnd = fmt->round_towards_zero ? GMP_RNDZ : GMP_RNDN;
44d89feb 12201 const mpc_rnd_t crnd = fmt->round_towards_zero ? MPC_RNDZZ : MPC_RNDNN;
239d491a 12202 int inexact;
12203 mpc_t m;
48e1416a 12204
239d491a 12205 mpc_init2 (m, prec);
9af5ce0c 12206 mpfr_from_real (mpc_realref (m), re, rnd);
12207 mpfr_from_real (mpc_imagref (m), im, rnd);
239d491a 12208 mpfr_clear_flags ();
44d89feb 12209 inexact = func (m, m, crnd);
652d9409 12210 result = do_mpc_ckconv (m, type, inexact, /*force_convert=*/ 0);
239d491a 12211 mpc_clear (m);
12212 }
12213 }
12214
12215 return result;
12216}
c699fab8 12217
12218/* If arguments ARG0 and ARG1 are a COMPLEX_CST, call the two-argument
12219 mpc function FUNC on it and return the resulting value as a tree
12220 with type TYPE. The mpfr precision is set to the precision of
12221 TYPE. We assume that function FUNC returns zero if the result
652d9409 12222 could be calculated exactly within the requested precision. If
12223 DO_NONFINITE is true, then fold expressions containing Inf or NaN
12224 in the arguments and/or results. */
c699fab8 12225
63e89698 12226tree
652d9409 12227do_mpc_arg2 (tree arg0, tree arg1, tree type, int do_nonfinite,
c699fab8 12228 int (*func)(mpc_ptr, mpc_srcptr, mpc_srcptr, mpc_rnd_t))
12229{
12230 tree result = NULL_TREE;
48e1416a 12231
c699fab8 12232 STRIP_NOPS (arg0);
12233 STRIP_NOPS (arg1);
12234
12235 /* To proceed, MPFR must exactly represent the target floating point
12236 format, which only happens when the target base equals two. */
12237 if (TREE_CODE (arg0) == COMPLEX_CST && !TREE_OVERFLOW (arg0)
12238 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE
12239 && TREE_CODE (arg1) == COMPLEX_CST && !TREE_OVERFLOW (arg1)
12240 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg1))) == REAL_TYPE
12241 && REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (TREE_TYPE (arg0))))->b == 2)
12242 {
12243 const REAL_VALUE_TYPE *const re0 = TREE_REAL_CST_PTR (TREE_REALPART (arg0));
12244 const REAL_VALUE_TYPE *const im0 = TREE_REAL_CST_PTR (TREE_IMAGPART (arg0));
12245 const REAL_VALUE_TYPE *const re1 = TREE_REAL_CST_PTR (TREE_REALPART (arg1));
12246 const REAL_VALUE_TYPE *const im1 = TREE_REAL_CST_PTR (TREE_IMAGPART (arg1));
12247
652d9409 12248 if (do_nonfinite
12249 || (real_isfinite (re0) && real_isfinite (im0)
12250 && real_isfinite (re1) && real_isfinite (im1)))
c699fab8 12251 {
12252 const struct real_format *const fmt =
12253 REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (type)));
12254 const int prec = fmt->p;
12255 const mp_rnd_t rnd = fmt->round_towards_zero ? GMP_RNDZ : GMP_RNDN;
12256 const mpc_rnd_t crnd = fmt->round_towards_zero ? MPC_RNDZZ : MPC_RNDNN;
12257 int inexact;
12258 mpc_t m0, m1;
48e1416a 12259
c699fab8 12260 mpc_init2 (m0, prec);
12261 mpc_init2 (m1, prec);
9af5ce0c 12262 mpfr_from_real (mpc_realref (m0), re0, rnd);
12263 mpfr_from_real (mpc_imagref (m0), im0, rnd);
12264 mpfr_from_real (mpc_realref (m1), re1, rnd);
12265 mpfr_from_real (mpc_imagref (m1), im1, rnd);
c699fab8 12266 mpfr_clear_flags ();
12267 inexact = func (m0, m0, m1, crnd);
652d9409 12268 result = do_mpc_ckconv (m0, type, inexact, do_nonfinite);
c699fab8 12269 mpc_clear (m0);
12270 mpc_clear (m1);
12271 }
12272 }
12273
12274 return result;
12275}
239d491a 12276
75a70cf9 12277/* A wrapper function for builtin folding that prevents warnings for
12278 "statement without effect" and the like, caused by removing the
12279 call node earlier than the warning is generated. */
12280
12281tree
1a91d914 12282fold_call_stmt (gcall *stmt, bool ignore)
75a70cf9 12283{
12284 tree ret = NULL_TREE;
12285 tree fndecl = gimple_call_fndecl (stmt);
389dd41b 12286 location_t loc = gimple_location (stmt);
75a70cf9 12287 if (fndecl
12288 && TREE_CODE (fndecl) == FUNCTION_DECL
12289 && DECL_BUILT_IN (fndecl)
12290 && !gimple_call_va_arg_pack_p (stmt))
12291 {
12292 int nargs = gimple_call_num_args (stmt);
9845fb99 12293 tree *args = (nargs > 0
12294 ? gimple_call_arg_ptr (stmt, 0)
12295 : &error_mark_node);
75a70cf9 12296
198622c0 12297 if (avoid_folding_inline_builtin (fndecl))
12298 return NULL_TREE;
75a70cf9 12299 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
12300 {
9845fb99 12301 return targetm.fold_builtin (fndecl, nargs, args, ignore);
75a70cf9 12302 }
12303 else
12304 {
9d884767 12305 ret = fold_builtin_n (loc, fndecl, args, nargs, ignore);
75a70cf9 12306 if (ret)
12307 {
12308 /* Propagate location information from original call to
12309 expansion of builtin. Otherwise things like
12310 maybe_emit_chk_warning, that operate on the expansion
12311 of a builtin, will use the wrong location information. */
12312 if (gimple_has_location (stmt))
12313 {
12314 tree realret = ret;
12315 if (TREE_CODE (ret) == NOP_EXPR)
12316 realret = TREE_OPERAND (ret, 0);
12317 if (CAN_HAVE_LOCATION_P (realret)
12318 && !EXPR_HAS_LOCATION (realret))
389dd41b 12319 SET_EXPR_LOCATION (realret, loc);
75a70cf9 12320 return realret;
12321 }
12322 return ret;
12323 }
12324 }
12325 }
12326 return NULL_TREE;
12327}
7bfefa9d 12328
b9a16870 12329/* Look up the function in builtin_decl that corresponds to DECL
7bfefa9d 12330 and set ASMSPEC as its user assembler name. DECL must be a
12331 function decl that declares a builtin. */
12332
12333void
12334set_builtin_user_assembler_name (tree decl, const char *asmspec)
12335{
12336 tree builtin;
12337 gcc_assert (TREE_CODE (decl) == FUNCTION_DECL
12338 && DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL
12339 && asmspec != 0);
12340
b9a16870 12341 builtin = builtin_decl_explicit (DECL_FUNCTION_CODE (decl));
4d8e0d6d 12342 set_user_assembler_name (builtin, asmspec);
7bfefa9d 12343 switch (DECL_FUNCTION_CODE (decl))
12344 {
12345 case BUILT_IN_MEMCPY:
12346 init_block_move_fn (asmspec);
12347 memcpy_libfunc = set_user_assembler_libfunc ("memcpy", asmspec);
12348 break;
12349 case BUILT_IN_MEMSET:
12350 init_block_clear_fn (asmspec);
12351 memset_libfunc = set_user_assembler_libfunc ("memset", asmspec);
12352 break;
12353 case BUILT_IN_MEMMOVE:
12354 memmove_libfunc = set_user_assembler_libfunc ("memmove", asmspec);
12355 break;
12356 case BUILT_IN_MEMCMP:
12357 memcmp_libfunc = set_user_assembler_libfunc ("memcmp", asmspec);
12358 break;
12359 case BUILT_IN_ABORT:
12360 abort_libfunc = set_user_assembler_libfunc ("abort", asmspec);
12361 break;
5a80a58b 12362 case BUILT_IN_FFS:
12363 if (INT_TYPE_SIZE < BITS_PER_WORD)
12364 {
12365 set_user_assembler_libfunc ("ffs", asmspec);
12366 set_optab_libfunc (ffs_optab, mode_for_size (INT_TYPE_SIZE,
12367 MODE_INT, 0), "ffs");
12368 }
12369 break;
7bfefa9d 12370 default:
12371 break;
12372 }
12373}
a6b74a67 12374
12375/* Return true if DECL is a builtin that expands to a constant or similarly
12376 simple code. */
12377bool
12378is_simple_builtin (tree decl)
12379{
12380 if (decl && DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL)
12381 switch (DECL_FUNCTION_CODE (decl))
12382 {
12383 /* Builtins that expand to constants. */
12384 case BUILT_IN_CONSTANT_P:
12385 case BUILT_IN_EXPECT:
12386 case BUILT_IN_OBJECT_SIZE:
12387 case BUILT_IN_UNREACHABLE:
12388 /* Simple register moves or loads from stack. */
fca0886c 12389 case BUILT_IN_ASSUME_ALIGNED:
a6b74a67 12390 case BUILT_IN_RETURN_ADDRESS:
12391 case BUILT_IN_EXTRACT_RETURN_ADDR:
12392 case BUILT_IN_FROB_RETURN_ADDR:
12393 case BUILT_IN_RETURN:
12394 case BUILT_IN_AGGREGATE_INCOMING_ADDRESS:
12395 case BUILT_IN_FRAME_ADDRESS:
12396 case BUILT_IN_VA_END:
12397 case BUILT_IN_STACK_SAVE:
12398 case BUILT_IN_STACK_RESTORE:
12399 /* Exception state returns or moves registers around. */
12400 case BUILT_IN_EH_FILTER:
12401 case BUILT_IN_EH_POINTER:
12402 case BUILT_IN_EH_COPY_VALUES:
12403 return true;
12404
12405 default:
12406 return false;
12407 }
12408
12409 return false;
12410}
12411
12412/* Return true if DECL is a builtin that is not expensive, i.e., they are
12413 most probably expanded inline into reasonably simple code. This is a
12414 superset of is_simple_builtin. */
12415bool
12416is_inexpensive_builtin (tree decl)
12417{
12418 if (!decl)
12419 return false;
12420 else if (DECL_BUILT_IN_CLASS (decl) == BUILT_IN_MD)
12421 return true;
12422 else if (DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL)
12423 switch (DECL_FUNCTION_CODE (decl))
12424 {
12425 case BUILT_IN_ABS:
12426 case BUILT_IN_ALLOCA:
581bf1c2 12427 case BUILT_IN_ALLOCA_WITH_ALIGN:
74bdbe96 12428 case BUILT_IN_BSWAP16:
a6b74a67 12429 case BUILT_IN_BSWAP32:
12430 case BUILT_IN_BSWAP64:
12431 case BUILT_IN_CLZ:
12432 case BUILT_IN_CLZIMAX:
12433 case BUILT_IN_CLZL:
12434 case BUILT_IN_CLZLL:
12435 case BUILT_IN_CTZ:
12436 case BUILT_IN_CTZIMAX:
12437 case BUILT_IN_CTZL:
12438 case BUILT_IN_CTZLL:
12439 case BUILT_IN_FFS:
12440 case BUILT_IN_FFSIMAX:
12441 case BUILT_IN_FFSL:
12442 case BUILT_IN_FFSLL:
12443 case BUILT_IN_IMAXABS:
12444 case BUILT_IN_FINITE:
12445 case BUILT_IN_FINITEF:
12446 case BUILT_IN_FINITEL:
12447 case BUILT_IN_FINITED32:
12448 case BUILT_IN_FINITED64:
12449 case BUILT_IN_FINITED128:
12450 case BUILT_IN_FPCLASSIFY:
12451 case BUILT_IN_ISFINITE:
12452 case BUILT_IN_ISINF_SIGN:
12453 case BUILT_IN_ISINF:
12454 case BUILT_IN_ISINFF:
12455 case BUILT_IN_ISINFL:
12456 case BUILT_IN_ISINFD32:
12457 case BUILT_IN_ISINFD64:
12458 case BUILT_IN_ISINFD128:
12459 case BUILT_IN_ISNAN:
12460 case BUILT_IN_ISNANF:
12461 case BUILT_IN_ISNANL:
12462 case BUILT_IN_ISNAND32:
12463 case BUILT_IN_ISNAND64:
12464 case BUILT_IN_ISNAND128:
12465 case BUILT_IN_ISNORMAL:
12466 case BUILT_IN_ISGREATER:
12467 case BUILT_IN_ISGREATEREQUAL:
12468 case BUILT_IN_ISLESS:
12469 case BUILT_IN_ISLESSEQUAL:
12470 case BUILT_IN_ISLESSGREATER:
12471 case BUILT_IN_ISUNORDERED:
12472 case BUILT_IN_VA_ARG_PACK:
12473 case BUILT_IN_VA_ARG_PACK_LEN:
12474 case BUILT_IN_VA_COPY:
12475 case BUILT_IN_TRAP:
12476 case BUILT_IN_SAVEREGS:
12477 case BUILT_IN_POPCOUNTL:
12478 case BUILT_IN_POPCOUNTLL:
12479 case BUILT_IN_POPCOUNTIMAX:
12480 case BUILT_IN_POPCOUNT:
12481 case BUILT_IN_PARITYL:
12482 case BUILT_IN_PARITYLL:
12483 case BUILT_IN_PARITYIMAX:
12484 case BUILT_IN_PARITY:
12485 case BUILT_IN_LABS:
12486 case BUILT_IN_LLABS:
12487 case BUILT_IN_PREFETCH:
ca4c3545 12488 case BUILT_IN_ACC_ON_DEVICE:
a6b74a67 12489 return true;
12490
12491 default:
12492 return is_simple_builtin (decl);
12493 }
12494
12495 return false;
12496}