]> git.ipfire.org Git - thirdparty/gcc.git/blame - gcc/builtins.c
2015-06-25 Andrew MacLeod <amacleod@redhat.com>
[thirdparty/gcc.git] / gcc / builtins.c
CommitLineData
53800dbe 1/* Expand builtin functions.
d353bf18 2 Copyright (C) 1988-2015 Free Software Foundation, Inc.
53800dbe 3
f12b58b3 4This file is part of GCC.
53800dbe 5
f12b58b3 6GCC is free software; you can redistribute it and/or modify it under
7the terms of the GNU General Public License as published by the Free
8c4c00c1 8Software Foundation; either version 3, or (at your option) any later
f12b58b3 9version.
53800dbe 10
f12b58b3 11GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12WARRANTY; without even the implied warranty of MERCHANTABILITY or
13FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14for more details.
53800dbe 15
16You should have received a copy of the GNU General Public License
8c4c00c1 17along with GCC; see the file COPYING3. If not see
18<http://www.gnu.org/licenses/>. */
53800dbe 19
20#include "config.h"
21#include "system.h"
805e22b2 22#include "coretypes.h"
23#include "tm.h"
53800dbe 24#include "rtl.h"
b20a8bb4 25#include "alias.h"
26#include "symtab.h"
53800dbe 27#include "tree.h"
b20a8bb4 28#include "fold-const.h"
9ed99284 29#include "stringpool.h"
30#include "stor-layout.h"
31#include "calls.h"
32#include "varasm.h"
33#include "tree-object-size.h"
dae0b5cb 34#include "realmpfr.h"
94ea8568 35#include "predict.h"
94ea8568 36#include "hard-reg-set.h"
94ea8568 37#include "function.h"
38#include "cfgrtl.h"
bc61cadb 39#include "basic-block.h"
40#include "tree-ssa-alias.h"
41#include "internal-fn.h"
42#include "gimple-expr.h"
75a70cf9 43#include "gimple.h"
53800dbe 44#include "flags.h"
45#include "regs.h"
53800dbe 46#include "except.h"
53800dbe 47#include "insn-config.h"
d53441c8 48#include "expmed.h"
49#include "dojump.h"
50#include "explow.h"
51#include "emit-rtl.h"
52#include "stmt.h"
53800dbe 53#include "expr.h"
34517c64 54#include "insn-codes.h"
d8fc4d0b 55#include "optabs.h"
56#include "libfuncs.h"
53800dbe 57#include "recog.h"
58#include "output.h"
59#include "typeclass.h"
1dd6c958 60#include "tm_p.h"
fc2a2dcb 61#include "target.h"
63c62881 62#include "langhooks.h"
073c1fd5 63#include "tree-ssanames.h"
64#include "tree-dfa.h"
162719b3 65#include "value-prof.h"
852f689e 66#include "diagnostic-core.h"
3b9c3a16 67#include "builtins.h"
f9acf11a 68#include "asan.h"
d037099f 69#include "cilk.h"
058a1b7a 70#include "lto-streamer.h"
71#include "cgraph.h"
72#include "tree-chkp.h"
73#include "rtl-chkp.h"
ca4c3545 74#include "gomp-constants.h"
53800dbe 75
5383fb56 76
239d491a 77static tree do_mpc_arg1 (tree, tree, int (*)(mpc_ptr, mpc_srcptr, mpc_rnd_t));
726e2588 78
3b9c3a16 79struct target_builtins default_target_builtins;
80#if SWITCHABLE_TARGET
81struct target_builtins *this_target_builtins = &default_target_builtins;
82#endif
83
ab7943b9 84/* Define the names of the builtin function types and codes. */
96423453 85const char *const built_in_class_names[BUILT_IN_LAST]
ab7943b9 86 = {"NOT_BUILT_IN", "BUILT_IN_FRONTEND", "BUILT_IN_MD", "BUILT_IN_NORMAL"};
87
9cfddb70 88#define DEF_BUILTIN(X, N, C, T, LT, B, F, NA, AT, IM, COND) #X,
0dfc45b5 89const char * built_in_names[(int) END_BUILTINS] =
4e9d90c7 90{
91#include "builtins.def"
92};
93#undef DEF_BUILTIN
ab7943b9 94
cffdfb3d 95/* Setup an array of builtin_info_type, make sure each element decl is
df94cd3b 96 initialized to NULL_TREE. */
cffdfb3d 97builtin_info_type builtin_info[(int)END_BUILTINS];
df94cd3b 98
0b049e15 99/* Non-zero if __builtin_constant_p should be folded right away. */
100bool force_folding_builtin_constant_p;
101
3754d046 102static rtx c_readstr (const char *, machine_mode);
aecda0d6 103static int target_char_cast (tree, char *);
d8ae1baa 104static rtx get_memory_rtx (tree, tree);
aecda0d6 105static int apply_args_size (void);
106static int apply_result_size (void);
d8c9779c 107#if defined (HAVE_untyped_call) || defined (HAVE_untyped_return)
aecda0d6 108static rtx result_vector (int, rtx);
d8c9779c 109#endif
aecda0d6 110static void expand_builtin_prefetch (tree);
111static rtx expand_builtin_apply_args (void);
112static rtx expand_builtin_apply_args_1 (void);
113static rtx expand_builtin_apply (rtx, rtx, rtx);
114static void expand_builtin_return (rtx);
115static enum type_class type_to_class (tree);
116static rtx expand_builtin_classify_type (tree);
117static void expand_errno_check (tree, rtx);
118static rtx expand_builtin_mathfn (tree, rtx, rtx);
119static rtx expand_builtin_mathfn_2 (tree, rtx, rtx);
6b43bae4 120static rtx expand_builtin_mathfn_3 (tree, rtx, rtx);
7e0713b1 121static rtx expand_builtin_mathfn_ternary (tree, rtx, rtx);
f97eea22 122static rtx expand_builtin_interclass_mathfn (tree, rtx);
c3147c1a 123static rtx expand_builtin_sincos (tree);
f97eea22 124static rtx expand_builtin_cexpi (tree, rtx);
ff1b14e4 125static rtx expand_builtin_int_roundingfn (tree, rtx);
126static rtx expand_builtin_int_roundingfn_2 (tree, rtx);
79012a9d 127static rtx expand_builtin_next_arg (void);
aecda0d6 128static rtx expand_builtin_va_start (tree);
129static rtx expand_builtin_va_end (tree);
130static rtx expand_builtin_va_copy (tree);
3754d046 131static rtx expand_builtin_memcmp (tree, rtx, machine_mode);
a65c4d64 132static rtx expand_builtin_strcmp (tree, rtx);
3754d046 133static rtx expand_builtin_strncmp (tree, rtx, machine_mode);
134static rtx builtin_memcpy_read_str (void *, HOST_WIDE_INT, machine_mode);
a65c4d64 135static rtx expand_builtin_memcpy (tree, rtx);
f21337ef 136static rtx expand_builtin_memcpy_with_bounds (tree, rtx);
137static rtx expand_builtin_memcpy_args (tree, tree, tree, rtx, tree);
3754d046 138static rtx expand_builtin_mempcpy (tree, rtx, machine_mode);
f21337ef 139static rtx expand_builtin_mempcpy_with_bounds (tree, rtx, machine_mode);
48e1416a 140static rtx expand_builtin_mempcpy_args (tree, tree, tree, rtx,
f21337ef 141 machine_mode, int, tree);
a65c4d64 142static rtx expand_builtin_strcpy (tree, rtx);
143static rtx expand_builtin_strcpy_args (tree, tree, rtx);
3754d046 144static rtx expand_builtin_stpcpy (tree, rtx, machine_mode);
a65c4d64 145static rtx expand_builtin_strncpy (tree, rtx);
3754d046 146static rtx builtin_memset_gen_str (void *, HOST_WIDE_INT, machine_mode);
147static rtx expand_builtin_memset (tree, rtx, machine_mode);
f21337ef 148static rtx expand_builtin_memset_with_bounds (tree, rtx, machine_mode);
3754d046 149static rtx expand_builtin_memset_args (tree, tree, tree, rtx, machine_mode, tree);
aecda0d6 150static rtx expand_builtin_bzero (tree);
3754d046 151static rtx expand_builtin_strlen (tree, rtx, machine_mode);
5be42b39 152static rtx expand_builtin_alloca (tree, bool);
3754d046 153static rtx expand_builtin_unop (machine_mode, tree, rtx, rtx, optab);
aecda0d6 154static rtx expand_builtin_frame_address (tree, tree);
389dd41b 155static tree stabilize_va_list_loc (location_t, tree, int);
aecda0d6 156static rtx expand_builtin_expect (tree, rtx);
157static tree fold_builtin_constant_p (tree);
158static tree fold_builtin_classify_type (tree);
c7cbde74 159static tree fold_builtin_strlen (location_t, tree, tree);
389dd41b 160static tree fold_builtin_inf (location_t, tree, int);
aecda0d6 161static tree fold_builtin_nan (tree, tree, int);
389dd41b 162static tree rewrite_call_expr (location_t, tree, int, tree, int, ...);
b7bf20db 163static bool validate_arg (const_tree, enum tree_code code);
277f8dd2 164static bool integer_valued_real_p (tree);
389dd41b 165static tree fold_trunc_transparent_mathfn (location_t, tree, tree);
aecda0d6 166static rtx expand_builtin_fabs (tree, rtx, rtx);
27f261ef 167static rtx expand_builtin_signbit (tree, rtx);
389dd41b 168static tree fold_builtin_sqrt (location_t, tree, tree);
169static tree fold_builtin_cbrt (location_t, tree, tree);
170static tree fold_builtin_pow (location_t, tree, tree, tree, tree);
171static tree fold_builtin_powi (location_t, tree, tree, tree, tree);
172static tree fold_builtin_cos (location_t, tree, tree, tree);
173static tree fold_builtin_cosh (location_t, tree, tree, tree);
bffb7645 174static tree fold_builtin_tan (tree, tree);
389dd41b 175static tree fold_builtin_trunc (location_t, tree, tree);
176static tree fold_builtin_floor (location_t, tree, tree);
177static tree fold_builtin_ceil (location_t, tree, tree);
178static tree fold_builtin_round (location_t, tree, tree);
179static tree fold_builtin_int_roundingfn (location_t, tree, tree);
10b9666f 180static tree fold_builtin_bitop (tree, tree);
389dd41b 181static tree fold_builtin_strchr (location_t, tree, tree, tree);
182static tree fold_builtin_memchr (location_t, tree, tree, tree, tree);
183static tree fold_builtin_memcmp (location_t, tree, tree, tree);
184static tree fold_builtin_strcmp (location_t, tree, tree);
185static tree fold_builtin_strncmp (location_t, tree, tree, tree);
186static tree fold_builtin_signbit (location_t, tree, tree);
187static tree fold_builtin_copysign (location_t, tree, tree, tree, tree);
188static tree fold_builtin_isascii (location_t, tree);
189static tree fold_builtin_toascii (location_t, tree);
190static tree fold_builtin_isdigit (location_t, tree);
191static tree fold_builtin_fabs (location_t, tree, tree);
192static tree fold_builtin_abs (location_t, tree, tree);
193static tree fold_builtin_unordered_cmp (location_t, tree, tree, tree, enum tree_code,
d5019fe8 194 enum tree_code);
e80cc485 195static tree fold_builtin_0 (location_t, tree);
196static tree fold_builtin_1 (location_t, tree, tree);
197static tree fold_builtin_2 (location_t, tree, tree, tree);
198static tree fold_builtin_3 (location_t, tree, tree, tree, tree);
199static tree fold_builtin_varargs (location_t, tree, tree*, int);
389dd41b 200
201static tree fold_builtin_strpbrk (location_t, tree, tree, tree);
202static tree fold_builtin_strstr (location_t, tree, tree, tree);
203static tree fold_builtin_strrchr (location_t, tree, tree, tree);
389dd41b 204static tree fold_builtin_strspn (location_t, tree, tree);
205static tree fold_builtin_strcspn (location_t, tree, tree);
4ee9c684 206
0a39fd54 207static rtx expand_builtin_object_size (tree);
3754d046 208static rtx expand_builtin_memory_chk (tree, rtx, machine_mode,
0a39fd54 209 enum built_in_function);
210static void maybe_emit_chk_warning (tree, enum built_in_function);
211static void maybe_emit_sprintf_chk_warning (tree, enum built_in_function);
2c281b15 212static void maybe_emit_free_warning (tree);
c2f47e15 213static tree fold_builtin_object_size (tree, tree);
99eabcc1 214
e788f202 215unsigned HOST_WIDE_INT target_newline;
b9ea678c 216unsigned HOST_WIDE_INT target_percent;
99eabcc1 217static unsigned HOST_WIDE_INT target_c;
218static unsigned HOST_WIDE_INT target_s;
aea88c77 219char target_percent_c[3];
b9ea678c 220char target_percent_s[3];
e788f202 221char target_percent_s_newline[4];
728bac60 222static tree do_mpfr_arg1 (tree, tree, int (*)(mpfr_ptr, mpfr_srcptr, mp_rnd_t),
223 const REAL_VALUE_TYPE *, const REAL_VALUE_TYPE *, bool);
f0c477f2 224static tree do_mpfr_arg2 (tree, tree, tree,
225 int (*)(mpfr_ptr, mpfr_srcptr, mpfr_srcptr, mp_rnd_t));
9917422b 226static tree do_mpfr_arg3 (tree, tree, tree, tree,
227 int (*)(mpfr_ptr, mpfr_srcptr, mpfr_srcptr, mpfr_srcptr, mp_rnd_t));
d92f994c 228static tree do_mpfr_sincos (tree, tree, tree);
65dd1378 229static tree do_mpfr_bessel_n (tree, tree, tree,
230 int (*)(mpfr_ptr, long, mpfr_srcptr, mp_rnd_t),
231 const REAL_VALUE_TYPE *, bool);
e5407ca6 232static tree do_mpfr_remquo (tree, tree, tree);
e84da7c1 233static tree do_mpfr_lgamma_r (tree, tree, tree);
1cd6e20d 234static void expand_builtin_sync_synchronize (void);
0a39fd54 235
7bfefa9d 236/* Return true if NAME starts with __builtin_ or __sync_. */
237
b29139ad 238static bool
1c47b3e8 239is_builtin_name (const char *name)
b6a5fc45 240{
b6a5fc45 241 if (strncmp (name, "__builtin_", 10) == 0)
242 return true;
243 if (strncmp (name, "__sync_", 7) == 0)
244 return true;
1cd6e20d 245 if (strncmp (name, "__atomic_", 9) == 0)
246 return true;
a89e6c15 247 if (flag_cilkplus
d037099f 248 && (!strcmp (name, "__cilkrts_detach")
249 || !strcmp (name, "__cilkrts_pop_frame")))
250 return true;
b6a5fc45 251 return false;
252}
4ee9c684 253
7bfefa9d 254
255/* Return true if DECL is a function symbol representing a built-in. */
256
257bool
258is_builtin_fn (tree decl)
259{
260 return TREE_CODE (decl) == FUNCTION_DECL && DECL_BUILT_IN (decl);
261}
262
1c47b3e8 263/* Return true if NODE should be considered for inline expansion regardless
264 of the optimization level. This means whenever a function is invoked with
265 its "internal" name, which normally contains the prefix "__builtin". */
266
267static bool
268called_as_built_in (tree node)
269{
270 /* Note that we must use DECL_NAME, not DECL_ASSEMBLER_NAME_SET_P since
271 we want the name used to call the function, not the name it
272 will have. */
273 const char *name = IDENTIFIER_POINTER (DECL_NAME (node));
274 return is_builtin_name (name);
275}
276
ceea063b 277/* Compute values M and N such that M divides (address of EXP - N) and such
278 that N < M. If these numbers can be determined, store M in alignp and N in
279 *BITPOSP and return true. Otherwise return false and store BITS_PER_UNIT to
280 *alignp and any bit-offset to *bitposp.
0d8f7716 281
282 Note that the address (and thus the alignment) computed here is based
283 on the address to which a symbol resolves, whereas DECL_ALIGN is based
284 on the address at which an object is actually located. These two
285 addresses are not always the same. For example, on ARM targets,
286 the address &foo of a Thumb function foo() has the lowest bit set,
3482bf13 287 whereas foo() itself starts on an even address.
698537d1 288
3482bf13 289 If ADDR_P is true we are taking the address of the memory reference EXP
290 and thus cannot rely on the access taking place. */
291
292static bool
293get_object_alignment_2 (tree exp, unsigned int *alignp,
294 unsigned HOST_WIDE_INT *bitposp, bool addr_p)
698537d1 295{
98ab9e8f 296 HOST_WIDE_INT bitsize, bitpos;
297 tree offset;
3754d046 298 machine_mode mode;
98ab9e8f 299 int unsignedp, volatilep;
c8a2b4ff 300 unsigned int align = BITS_PER_UNIT;
ceea063b 301 bool known_alignment = false;
698537d1 302
98ab9e8f 303 /* Get the innermost object and the constant (bitpos) and possibly
304 variable (offset) offset of the access. */
305 exp = get_inner_reference (exp, &bitsize, &bitpos, &offset,
306 &mode, &unsignedp, &volatilep, true);
307
308 /* Extract alignment information from the innermost object and
309 possibly adjust bitpos and offset. */
3482bf13 310 if (TREE_CODE (exp) == FUNCTION_DECL)
0d8f7716 311 {
3482bf13 312 /* Function addresses can encode extra information besides their
313 alignment. However, if TARGET_PTRMEMFUNC_VBIT_LOCATION
314 allows the low bit to be used as a virtual bit, we know
315 that the address itself must be at least 2-byte aligned. */
316 if (TARGET_PTRMEMFUNC_VBIT_LOCATION == ptrmemfunc_vbit_in_pfn)
317 align = 2 * BITS_PER_UNIT;
0d8f7716 318 }
3482bf13 319 else if (TREE_CODE (exp) == LABEL_DECL)
320 ;
321 else if (TREE_CODE (exp) == CONST_DECL)
98ab9e8f 322 {
3482bf13 323 /* The alignment of a CONST_DECL is determined by its initializer. */
324 exp = DECL_INITIAL (exp);
98ab9e8f 325 align = TYPE_ALIGN (TREE_TYPE (exp));
326#ifdef CONSTANT_ALIGNMENT
3482bf13 327 if (CONSTANT_CLASS_P (exp))
328 align = (unsigned) CONSTANT_ALIGNMENT (exp, align);
698537d1 329#endif
3482bf13 330 known_alignment = true;
98ab9e8f 331 }
3482bf13 332 else if (DECL_P (exp))
ceea063b 333 {
3482bf13 334 align = DECL_ALIGN (exp);
ceea063b 335 known_alignment = true;
ceea063b 336 }
3482bf13 337 else if (TREE_CODE (exp) == VIEW_CONVERT_EXPR)
ceea063b 338 {
ceea063b 339 align = TYPE_ALIGN (TREE_TYPE (exp));
340 }
3482bf13 341 else if (TREE_CODE (exp) == INDIRECT_REF
342 || TREE_CODE (exp) == MEM_REF
343 || TREE_CODE (exp) == TARGET_MEM_REF)
98ab9e8f 344 {
345 tree addr = TREE_OPERAND (exp, 0);
ceea063b 346 unsigned ptr_align;
347 unsigned HOST_WIDE_INT ptr_bitpos;
ab1e78e5 348 unsigned HOST_WIDE_INT ptr_bitmask = ~0;
ceea063b 349
ab1e78e5 350 /* If the address is explicitely aligned, handle that. */
98ab9e8f 351 if (TREE_CODE (addr) == BIT_AND_EXPR
352 && TREE_CODE (TREE_OPERAND (addr, 1)) == INTEGER_CST)
353 {
ab1e78e5 354 ptr_bitmask = TREE_INT_CST_LOW (TREE_OPERAND (addr, 1));
355 ptr_bitmask *= BITS_PER_UNIT;
356 align = ptr_bitmask & -ptr_bitmask;
98ab9e8f 357 addr = TREE_OPERAND (addr, 0);
358 }
ceea063b 359
3482bf13 360 known_alignment
361 = get_pointer_alignment_1 (addr, &ptr_align, &ptr_bitpos);
3482bf13 362 align = MAX (ptr_align, align);
363
ab1e78e5 364 /* Re-apply explicit alignment to the bitpos. */
365 ptr_bitpos &= ptr_bitmask;
366
4083990a 367 /* The alignment of the pointer operand in a TARGET_MEM_REF
368 has to take the variable offset parts into account. */
3482bf13 369 if (TREE_CODE (exp) == TARGET_MEM_REF)
153c3b50 370 {
3482bf13 371 if (TMR_INDEX (exp))
372 {
373 unsigned HOST_WIDE_INT step = 1;
374 if (TMR_STEP (exp))
f9ae6f95 375 step = TREE_INT_CST_LOW (TMR_STEP (exp));
3482bf13 376 align = MIN (align, (step & -step) * BITS_PER_UNIT);
377 }
378 if (TMR_INDEX2 (exp))
379 align = BITS_PER_UNIT;
380 known_alignment = false;
153c3b50 381 }
ceea063b 382
3482bf13 383 /* When EXP is an actual memory reference then we can use
384 TYPE_ALIGN of a pointer indirection to derive alignment.
385 Do so only if get_pointer_alignment_1 did not reveal absolute
4083990a 386 alignment knowledge and if using that alignment would
387 improve the situation. */
388 if (!addr_p && !known_alignment
389 && TYPE_ALIGN (TREE_TYPE (exp)) > align)
390 align = TYPE_ALIGN (TREE_TYPE (exp));
391 else
392 {
393 /* Else adjust bitpos accordingly. */
394 bitpos += ptr_bitpos;
395 if (TREE_CODE (exp) == MEM_REF
396 || TREE_CODE (exp) == TARGET_MEM_REF)
e913b5cd 397 bitpos += mem_ref_offset (exp).to_short_addr () * BITS_PER_UNIT;
4083990a 398 }
98ab9e8f 399 }
3482bf13 400 else if (TREE_CODE (exp) == STRING_CST)
153c3b50 401 {
3482bf13 402 /* STRING_CST are the only constant objects we allow to be not
403 wrapped inside a CONST_DECL. */
404 align = TYPE_ALIGN (TREE_TYPE (exp));
405#ifdef CONSTANT_ALIGNMENT
406 if (CONSTANT_CLASS_P (exp))
407 align = (unsigned) CONSTANT_ALIGNMENT (exp, align);
408#endif
409 known_alignment = true;
98ab9e8f 410 }
98ab9e8f 411
412 /* If there is a non-constant offset part extract the maximum
413 alignment that can prevail. */
c8a2b4ff 414 if (offset)
98ab9e8f 415 {
ad464c56 416 unsigned int trailing_zeros = tree_ctz (offset);
c8a2b4ff 417 if (trailing_zeros < HOST_BITS_PER_INT)
98ab9e8f 418 {
c8a2b4ff 419 unsigned int inner = (1U << trailing_zeros) * BITS_PER_UNIT;
420 if (inner)
421 align = MIN (align, inner);
98ab9e8f 422 }
98ab9e8f 423 }
424
3482bf13 425 *alignp = align;
426 *bitposp = bitpos & (*alignp - 1);
ceea063b 427 return known_alignment;
0c883ef3 428}
429
3482bf13 430/* For a memory reference expression EXP compute values M and N such that M
431 divides (&EXP - N) and such that N < M. If these numbers can be determined,
432 store M in alignp and N in *BITPOSP and return true. Otherwise return false
433 and store BITS_PER_UNIT to *alignp and any bit-offset to *bitposp. */
434
435bool
436get_object_alignment_1 (tree exp, unsigned int *alignp,
437 unsigned HOST_WIDE_INT *bitposp)
438{
439 return get_object_alignment_2 (exp, alignp, bitposp, false);
440}
441
957d0361 442/* Return the alignment in bits of EXP, an object. */
0c883ef3 443
444unsigned int
957d0361 445get_object_alignment (tree exp)
0c883ef3 446{
447 unsigned HOST_WIDE_INT bitpos = 0;
448 unsigned int align;
449
ceea063b 450 get_object_alignment_1 (exp, &align, &bitpos);
0c883ef3 451
98ab9e8f 452 /* align and bitpos now specify known low bits of the pointer.
453 ptr & (align - 1) == bitpos. */
454
455 if (bitpos != 0)
456 align = (bitpos & -bitpos);
957d0361 457 return align;
698537d1 458}
459
ceea063b 460/* For a pointer valued expression EXP compute values M and N such that M
461 divides (EXP - N) and such that N < M. If these numbers can be determined,
3482bf13 462 store M in alignp and N in *BITPOSP and return true. Return false if
463 the results are just a conservative approximation.
53800dbe 464
ceea063b 465 If EXP is not a pointer, false is returned too. */
53800dbe 466
ceea063b 467bool
468get_pointer_alignment_1 (tree exp, unsigned int *alignp,
469 unsigned HOST_WIDE_INT *bitposp)
53800dbe 470{
153c3b50 471 STRIP_NOPS (exp);
535e2026 472
153c3b50 473 if (TREE_CODE (exp) == ADDR_EXPR)
3482bf13 474 return get_object_alignment_2 (TREE_OPERAND (exp, 0),
475 alignp, bitposp, true);
153c3b50 476 else if (TREE_CODE (exp) == SSA_NAME
477 && POINTER_TYPE_P (TREE_TYPE (exp)))
53800dbe 478 {
ceea063b 479 unsigned int ptr_align, ptr_misalign;
153c3b50 480 struct ptr_info_def *pi = SSA_NAME_PTR_INFO (exp);
ceea063b 481
482 if (pi && get_ptr_info_alignment (pi, &ptr_align, &ptr_misalign))
483 {
484 *bitposp = ptr_misalign * BITS_PER_UNIT;
485 *alignp = ptr_align * BITS_PER_UNIT;
3482bf13 486 /* We cannot really tell whether this result is an approximation. */
ceea063b 487 return true;
488 }
489 else
69fbc3aa 490 {
491 *bitposp = 0;
ceea063b 492 *alignp = BITS_PER_UNIT;
493 return false;
69fbc3aa 494 }
53800dbe 495 }
0bb8b39a 496 else if (TREE_CODE (exp) == INTEGER_CST)
497 {
498 *alignp = BIGGEST_ALIGNMENT;
f9ae6f95 499 *bitposp = ((TREE_INT_CST_LOW (exp) * BITS_PER_UNIT)
0bb8b39a 500 & (BIGGEST_ALIGNMENT - 1));
501 return true;
502 }
153c3b50 503
69fbc3aa 504 *bitposp = 0;
ceea063b 505 *alignp = BITS_PER_UNIT;
506 return false;
53800dbe 507}
508
69fbc3aa 509/* Return the alignment in bits of EXP, a pointer valued expression.
510 The alignment returned is, by default, the alignment of the thing that
511 EXP points to. If it is not a POINTER_TYPE, 0 is returned.
512
513 Otherwise, look at the expression to see if we can do better, i.e., if the
514 expression is actually pointing at an object whose alignment is tighter. */
515
516unsigned int
517get_pointer_alignment (tree exp)
518{
519 unsigned HOST_WIDE_INT bitpos = 0;
520 unsigned int align;
ceea063b 521
522 get_pointer_alignment_1 (exp, &align, &bitpos);
69fbc3aa 523
524 /* align and bitpos now specify known low bits of the pointer.
525 ptr & (align - 1) == bitpos. */
526
527 if (bitpos != 0)
528 align = (bitpos & -bitpos);
529
530 return align;
531}
532
53800dbe 533/* Compute the length of a C string. TREE_STRING_LENGTH is not the right
534 way, because it could contain a zero byte in the middle.
535 TREE_STRING_LENGTH is the size of the character array, not the string.
536
4172d65e 537 ONLY_VALUE should be nonzero if the result is not going to be emitted
c09841f6 538 into the instruction stream and zero if it is going to be expanded.
4172d65e 539 E.g. with i++ ? "foo" : "bar", if ONLY_VALUE is nonzero, constant 3
681fab1e 540 is returned, otherwise NULL, since
541 len = c_strlen (src, 1); if (len) expand_expr (len, ...); would not
542 evaluate the side-effects.
543
6bda159e 544 If ONLY_VALUE is two then we do not emit warnings about out-of-bound
545 accesses. Note that this implies the result is not going to be emitted
546 into the instruction stream.
547
902de8ed 548 The value returned is of type `ssizetype'.
549
53800dbe 550 Unfortunately, string_constant can't access the values of const char
551 arrays with initializers, so neither can we do so here. */
552
4ee9c684 553tree
681fab1e 554c_strlen (tree src, int only_value)
53800dbe 555{
556 tree offset_node;
27d0c333 557 HOST_WIDE_INT offset;
558 int max;
44acf429 559 const char *ptr;
da136652 560 location_t loc;
53800dbe 561
681fab1e 562 STRIP_NOPS (src);
563 if (TREE_CODE (src) == COND_EXPR
564 && (only_value || !TREE_SIDE_EFFECTS (TREE_OPERAND (src, 0))))
565 {
566 tree len1, len2;
567
568 len1 = c_strlen (TREE_OPERAND (src, 1), only_value);
569 len2 = c_strlen (TREE_OPERAND (src, 2), only_value);
0862b7e9 570 if (tree_int_cst_equal (len1, len2))
681fab1e 571 return len1;
572 }
573
574 if (TREE_CODE (src) == COMPOUND_EXPR
575 && (only_value || !TREE_SIDE_EFFECTS (TREE_OPERAND (src, 0))))
576 return c_strlen (TREE_OPERAND (src, 1), only_value);
577
3df42822 578 loc = EXPR_LOC_OR_LOC (src, input_location);
da136652 579
53800dbe 580 src = string_constant (src, &offset_node);
581 if (src == 0)
c2f47e15 582 return NULL_TREE;
902de8ed 583
83d79705 584 max = TREE_STRING_LENGTH (src) - 1;
53800dbe 585 ptr = TREE_STRING_POINTER (src);
902de8ed 586
53800dbe 587 if (offset_node && TREE_CODE (offset_node) != INTEGER_CST)
588 {
589 /* If the string has an internal zero byte (e.g., "foo\0bar"), we can't
590 compute the offset to the following null if we don't know where to
591 start searching for it. */
592 int i;
902de8ed 593
53800dbe 594 for (i = 0; i < max; i++)
595 if (ptr[i] == 0)
c2f47e15 596 return NULL_TREE;
902de8ed 597
53800dbe 598 /* We don't know the starting offset, but we do know that the string
599 has no internal zero bytes. We can assume that the offset falls
600 within the bounds of the string; otherwise, the programmer deserves
601 what he gets. Subtract the offset from the length of the string,
902de8ed 602 and return that. This would perhaps not be valid if we were dealing
603 with named arrays in addition to literal string constants. */
604
da136652 605 return size_diffop_loc (loc, size_int (max), offset_node);
53800dbe 606 }
607
608 /* We have a known offset into the string. Start searching there for
27d0c333 609 a null character if we can represent it as a single HOST_WIDE_INT. */
dabc4084 610 if (offset_node == 0)
53800dbe 611 offset = 0;
35ec552a 612 else if (! tree_fits_shwi_p (offset_node))
dabc4084 613 offset = -1;
53800dbe 614 else
e913b5cd 615 offset = tree_to_shwi (offset_node);
902de8ed 616
1f63a7d6 617 /* If the offset is known to be out of bounds, warn, and call strlen at
618 runtime. */
2f1c4f17 619 if (offset < 0 || offset > max)
53800dbe 620 {
1f63a7d6 621 /* Suppress multiple warnings for propagated constant strings. */
2f1c4f17 622 if (only_value != 2
623 && !TREE_NO_WARNING (src))
1f63a7d6 624 {
da136652 625 warning_at (loc, 0, "offset outside bounds of constant string");
1f63a7d6 626 TREE_NO_WARNING (src) = 1;
627 }
c2f47e15 628 return NULL_TREE;
53800dbe 629 }
902de8ed 630
53800dbe 631 /* Use strlen to search for the first zero byte. Since any strings
632 constructed with build_string will have nulls appended, we win even
633 if we get handed something like (char[4])"abcd".
634
635 Since OFFSET is our starting index into the string, no further
636 calculation is needed. */
902de8ed 637 return ssize_int (strlen (ptr + offset));
53800dbe 638}
639
83d79705 640/* Return a char pointer for a C string if it is a string constant
641 or sum of string constant and integer constant. */
642
b9ea678c 643const char *
aecda0d6 644c_getstr (tree src)
83d79705 645{
646 tree offset_node;
83d79705 647
648 src = string_constant (src, &offset_node);
649 if (src == 0)
650 return 0;
651
8c85fcb7 652 if (offset_node == 0)
653 return TREE_STRING_POINTER (src);
e913b5cd 654 else if (!tree_fits_uhwi_p (offset_node)
8c85fcb7 655 || compare_tree_int (offset_node, TREE_STRING_LENGTH (src) - 1) > 0)
83d79705 656 return 0;
83d79705 657
e913b5cd 658 return TREE_STRING_POINTER (src) + tree_to_uhwi (offset_node);
83d79705 659}
660
e913b5cd 661/* Return a constant integer corresponding to target reading
8c85fcb7 662 GET_MODE_BITSIZE (MODE) bits from string constant STR. */
ecc318ff 663
6840589f 664static rtx
3754d046 665c_readstr (const char *str, machine_mode mode)
6840589f 666{
6840589f 667 HOST_WIDE_INT ch;
668 unsigned int i, j;
e913b5cd 669 HOST_WIDE_INT tmp[MAX_BITSIZE_MODE_ANY_INT / HOST_BITS_PER_WIDE_INT];
0407eaee 670
671 gcc_assert (GET_MODE_CLASS (mode) == MODE_INT);
e913b5cd 672 unsigned int len = (GET_MODE_PRECISION (mode) + HOST_BITS_PER_WIDE_INT - 1)
673 / HOST_BITS_PER_WIDE_INT;
674
a12aa4cc 675 gcc_assert (len <= MAX_BITSIZE_MODE_ANY_INT / HOST_BITS_PER_WIDE_INT);
e913b5cd 676 for (i = 0; i < len; i++)
677 tmp[i] = 0;
6840589f 678
6840589f 679 ch = 1;
680 for (i = 0; i < GET_MODE_SIZE (mode); i++)
681 {
682 j = i;
683 if (WORDS_BIG_ENDIAN)
684 j = GET_MODE_SIZE (mode) - i - 1;
685 if (BYTES_BIG_ENDIAN != WORDS_BIG_ENDIAN
ad8f8e52 686 && GET_MODE_SIZE (mode) >= UNITS_PER_WORD)
6840589f 687 j = j + UNITS_PER_WORD - 2 * (j % UNITS_PER_WORD) - 1;
688 j *= BITS_PER_UNIT;
7d3f6cc7 689
6840589f 690 if (ch)
691 ch = (unsigned char) str[i];
e913b5cd 692 tmp[j / HOST_BITS_PER_WIDE_INT] |= ch << (j % HOST_BITS_PER_WIDE_INT);
6840589f 693 }
ddb1be65 694
ab2c1de8 695 wide_int c = wide_int::from_array (tmp, len, GET_MODE_PRECISION (mode));
e913b5cd 696 return immed_wide_int_const (c, mode);
6840589f 697}
698
ecc318ff 699/* Cast a target constant CST to target CHAR and if that value fits into
5206b159 700 host char type, return zero and put that value into variable pointed to by
ecc318ff 701 P. */
702
703static int
aecda0d6 704target_char_cast (tree cst, char *p)
ecc318ff 705{
706 unsigned HOST_WIDE_INT val, hostval;
707
c19686c5 708 if (TREE_CODE (cst) != INTEGER_CST
ecc318ff 709 || CHAR_TYPE_SIZE > HOST_BITS_PER_WIDE_INT)
710 return 1;
711
e913b5cd 712 /* Do not care if it fits or not right here. */
f9ae6f95 713 val = TREE_INT_CST_LOW (cst);
e913b5cd 714
ecc318ff 715 if (CHAR_TYPE_SIZE < HOST_BITS_PER_WIDE_INT)
716 val &= (((unsigned HOST_WIDE_INT) 1) << CHAR_TYPE_SIZE) - 1;
717
718 hostval = val;
719 if (HOST_BITS_PER_CHAR < HOST_BITS_PER_WIDE_INT)
720 hostval &= (((unsigned HOST_WIDE_INT) 1) << HOST_BITS_PER_CHAR) - 1;
721
722 if (val != hostval)
723 return 1;
724
725 *p = hostval;
726 return 0;
727}
728
4ee9c684 729/* Similar to save_expr, but assumes that arbitrary code is not executed
730 in between the multiple evaluations. In particular, we assume that a
731 non-addressable local variable will not be modified. */
732
733static tree
734builtin_save_expr (tree exp)
735{
f6c35aa4 736 if (TREE_CODE (exp) == SSA_NAME
737 || (TREE_ADDRESSABLE (exp) == 0
738 && (TREE_CODE (exp) == PARM_DECL
739 || (TREE_CODE (exp) == VAR_DECL && !TREE_STATIC (exp)))))
4ee9c684 740 return exp;
741
742 return save_expr (exp);
743}
744
53800dbe 745/* Given TEM, a pointer to a stack frame, follow the dynamic chain COUNT
746 times to get the address of either a higher stack frame, or a return
747 address located within it (depending on FNDECL_CODE). */
902de8ed 748
c626df3d 749static rtx
869d0ef0 750expand_builtin_return_addr (enum built_in_function fndecl_code, int count)
53800dbe 751{
752 int i;
753
869d0ef0 754#ifdef INITIAL_FRAME_ADDRESS_RTX
755 rtx tem = INITIAL_FRAME_ADDRESS_RTX;
756#else
e3e15c50 757 rtx tem;
758
1b74fde7 759 /* For a zero count with __builtin_return_address, we don't care what
760 frame address we return, because target-specific definitions will
761 override us. Therefore frame pointer elimination is OK, and using
762 the soft frame pointer is OK.
763
fa7637bd 764 For a nonzero count, or a zero count with __builtin_frame_address,
1b74fde7 765 we require a stable offset from the current frame pointer to the
766 previous one, so we must use the hard frame pointer, and
e3e15c50 767 we must disable frame pointer elimination. */
1b74fde7 768 if (count == 0 && fndecl_code == BUILT_IN_RETURN_ADDRESS)
e3e15c50 769 tem = frame_pointer_rtx;
a0c938f0 770 else
e3e15c50 771 {
772 tem = hard_frame_pointer_rtx;
773
774 /* Tell reload not to eliminate the frame pointer. */
18d50ae6 775 crtl->accesses_prior_frames = 1;
e3e15c50 776 }
869d0ef0 777#endif
778
53800dbe 779 /* Some machines need special handling before we can access
3a69c60c 780 arbitrary frames. For example, on the SPARC, we must first flush
53800dbe 781 all register windows to the stack. */
782#ifdef SETUP_FRAME_ADDRESSES
783 if (count > 0)
784 SETUP_FRAME_ADDRESSES ();
785#endif
786
3a69c60c 787 /* On the SPARC, the return address is not in the frame, it is in a
53800dbe 788 register. There is no way to access it off of the current frame
789 pointer, but it can be accessed off the previous frame pointer by
790 reading the value from the register window save area. */
a26d6c60 791 if (RETURN_ADDR_IN_PREVIOUS_FRAME && fndecl_code == BUILT_IN_RETURN_ADDRESS)
53800dbe 792 count--;
53800dbe 793
794 /* Scan back COUNT frames to the specified frame. */
795 for (i = 0; i < count; i++)
796 {
797 /* Assume the dynamic chain pointer is in the word that the
798 frame address points to, unless otherwise specified. */
799#ifdef DYNAMIC_CHAIN_ADDRESS
800 tem = DYNAMIC_CHAIN_ADDRESS (tem);
801#endif
802 tem = memory_address (Pmode, tem);
00060fc2 803 tem = gen_frame_mem (Pmode, tem);
83fc1478 804 tem = copy_to_reg (tem);
53800dbe 805 }
806
3a69c60c 807 /* For __builtin_frame_address, return what we've got. But, on
808 the SPARC for example, we may have to add a bias. */
53800dbe 809 if (fndecl_code == BUILT_IN_FRAME_ADDRESS)
3a69c60c 810#ifdef FRAME_ADDR_RTX
811 return FRAME_ADDR_RTX (tem);
812#else
53800dbe 813 return tem;
3a69c60c 814#endif
53800dbe 815
3a69c60c 816 /* For __builtin_return_address, get the return address from that frame. */
53800dbe 817#ifdef RETURN_ADDR_RTX
818 tem = RETURN_ADDR_RTX (count, tem);
819#else
820 tem = memory_address (Pmode,
29c05e22 821 plus_constant (Pmode, tem, GET_MODE_SIZE (Pmode)));
00060fc2 822 tem = gen_frame_mem (Pmode, tem);
53800dbe 823#endif
824 return tem;
825}
826
f7c44134 827/* Alias set used for setjmp buffer. */
32c2fdea 828static alias_set_type setjmp_alias_set = -1;
f7c44134 829
6b7f6858 830/* Construct the leading half of a __builtin_setjmp call. Control will
2c8a1497 831 return to RECEIVER_LABEL. This is also called directly by the SJLJ
832 exception handling code. */
53800dbe 833
6b7f6858 834void
aecda0d6 835expand_builtin_setjmp_setup (rtx buf_addr, rtx receiver_label)
53800dbe 836{
3754d046 837 machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
53800dbe 838 rtx stack_save;
f7c44134 839 rtx mem;
53800dbe 840
f7c44134 841 if (setjmp_alias_set == -1)
842 setjmp_alias_set = new_alias_set ();
843
85d654dd 844 buf_addr = convert_memory_address (Pmode, buf_addr);
53800dbe 845
37ae8504 846 buf_addr = force_reg (Pmode, force_operand (buf_addr, NULL_RTX));
53800dbe 847
6b7f6858 848 /* We store the frame pointer and the address of receiver_label in
849 the buffer and use the rest of it for the stack save area, which
850 is machine-dependent. */
53800dbe 851
f7c44134 852 mem = gen_rtx_MEM (Pmode, buf_addr);
ab6ab77e 853 set_mem_alias_set (mem, setjmp_alias_set);
e3e026e8 854 emit_move_insn (mem, targetm.builtin_setjmp_frame_value ());
f7c44134 855
29c05e22 856 mem = gen_rtx_MEM (Pmode, plus_constant (Pmode, buf_addr,
857 GET_MODE_SIZE (Pmode))),
ab6ab77e 858 set_mem_alias_set (mem, setjmp_alias_set);
f7c44134 859
860 emit_move_insn (validize_mem (mem),
6b7f6858 861 force_reg (Pmode, gen_rtx_LABEL_REF (Pmode, receiver_label)));
53800dbe 862
863 stack_save = gen_rtx_MEM (sa_mode,
29c05e22 864 plus_constant (Pmode, buf_addr,
53800dbe 865 2 * GET_MODE_SIZE (Pmode)));
ab6ab77e 866 set_mem_alias_set (stack_save, setjmp_alias_set);
e9c97615 867 emit_stack_save (SAVE_NONLOCAL, &stack_save);
53800dbe 868
869 /* If there is further processing to do, do it. */
870#ifdef HAVE_builtin_setjmp_setup
871 if (HAVE_builtin_setjmp_setup)
872 emit_insn (gen_builtin_setjmp_setup (buf_addr));
873#endif
874
29f09705 875 /* We have a nonlocal label. */
18d50ae6 876 cfun->has_nonlocal_label = 1;
6b7f6858 877}
53800dbe 878
2c8a1497 879/* Construct the trailing part of a __builtin_setjmp call. This is
4598ade9 880 also called directly by the SJLJ exception handling code.
881 If RECEIVER_LABEL is NULL, instead contruct a nonlocal goto handler. */
6b7f6858 882
883void
aecda0d6 884expand_builtin_setjmp_receiver (rtx receiver_label ATTRIBUTE_UNUSED)
6b7f6858 885{
82c7907c 886 rtx chain;
887
4598ade9 888 /* Mark the FP as used when we get here, so we have to make sure it's
53800dbe 889 marked as used by this function. */
18b42941 890 emit_use (hard_frame_pointer_rtx);
53800dbe 891
892 /* Mark the static chain as clobbered here so life information
893 doesn't get messed up for it. */
82c7907c 894 chain = targetm.calls.static_chain (current_function_decl, true);
895 if (chain && REG_P (chain))
896 emit_clobber (chain);
53800dbe 897
898 /* Now put in the code to restore the frame pointer, and argument
491e04ef 899 pointer, if needed. */
53800dbe 900#ifdef HAVE_nonlocal_goto
901 if (! HAVE_nonlocal_goto)
902#endif
62dcb5c8 903 {
904 /* First adjust our frame pointer to its actual value. It was
905 previously set to the start of the virtual area corresponding to
906 the stacked variables when we branched here and now needs to be
907 adjusted to the actual hardware fp value.
908
909 Assignments to virtual registers are converted by
910 instantiate_virtual_regs into the corresponding assignment
911 to the underlying register (fp in this case) that makes
912 the original assignment true.
913 So the following insn will actually be decrementing fp by
914 STARTING_FRAME_OFFSET. */
915 emit_move_insn (virtual_stack_vars_rtx, hard_frame_pointer_rtx);
916
917 /* Restoring the frame pointer also modifies the hard frame pointer.
918 Mark it used (so that the previous assignment remains live once
919 the frame pointer is eliminated) and clobbered (to represent the
920 implicit update from the assignment). */
921 emit_use (hard_frame_pointer_rtx);
922 emit_clobber (hard_frame_pointer_rtx);
923 }
53800dbe 924
5ae82d58 925#if !HARD_FRAME_POINTER_IS_ARG_POINTER
53800dbe 926 if (fixed_regs[ARG_POINTER_REGNUM])
927 {
928#ifdef ELIMINABLE_REGS
4598ade9 929 /* If the argument pointer can be eliminated in favor of the
930 frame pointer, we don't need to restore it. We assume here
931 that if such an elimination is present, it can always be used.
932 This is the case on all known machines; if we don't make this
933 assumption, we do unnecessary saving on many machines. */
53800dbe 934 size_t i;
e99c3a1d 935 static const struct elims {const int from, to;} elim_regs[] = ELIMINABLE_REGS;
53800dbe 936
3098b2d3 937 for (i = 0; i < ARRAY_SIZE (elim_regs); i++)
53800dbe 938 if (elim_regs[i].from == ARG_POINTER_REGNUM
939 && elim_regs[i].to == HARD_FRAME_POINTER_REGNUM)
940 break;
941
3098b2d3 942 if (i == ARRAY_SIZE (elim_regs))
53800dbe 943#endif
944 {
945 /* Now restore our arg pointer from the address at which it
05927e40 946 was saved in our stack frame. */
27a7a23a 947 emit_move_insn (crtl->args.internal_arg_pointer,
b079a207 948 copy_to_reg (get_arg_pointer_save_area ()));
53800dbe 949 }
950 }
951#endif
952
953#ifdef HAVE_builtin_setjmp_receiver
4598ade9 954 if (receiver_label != NULL && HAVE_builtin_setjmp_receiver)
6b7f6858 955 emit_insn (gen_builtin_setjmp_receiver (receiver_label));
53800dbe 956 else
957#endif
958#ifdef HAVE_nonlocal_goto_receiver
959 if (HAVE_nonlocal_goto_receiver)
960 emit_insn (gen_nonlocal_goto_receiver ());
961 else
962#endif
6b7f6858 963 { /* Nothing */ }
57f6bb94 964
3072d30e 965 /* We must not allow the code we just generated to be reordered by
966 scheduling. Specifically, the update of the frame pointer must
62dcb5c8 967 happen immediately, not later. */
3072d30e 968 emit_insn (gen_blockage ());
6b7f6858 969}
53800dbe 970
53800dbe 971/* __builtin_longjmp is passed a pointer to an array of five words (not
972 all will be used on all machines). It operates similarly to the C
973 library function of the same name, but is more efficient. Much of
2c8a1497 974 the code below is copied from the handling of non-local gotos. */
53800dbe 975
c626df3d 976static void
aecda0d6 977expand_builtin_longjmp (rtx buf_addr, rtx value)
53800dbe 978{
1e0c0b35 979 rtx fp, lab, stack;
980 rtx_insn *insn, *last;
3754d046 981 machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
53800dbe 982
48e1416a 983 /* DRAP is needed for stack realign if longjmp is expanded to current
27a7a23a 984 function */
985 if (SUPPORTS_STACK_ALIGNMENT)
986 crtl->need_drap = true;
987
f7c44134 988 if (setjmp_alias_set == -1)
989 setjmp_alias_set = new_alias_set ();
990
85d654dd 991 buf_addr = convert_memory_address (Pmode, buf_addr);
479e4d5e 992
53800dbe 993 buf_addr = force_reg (Pmode, buf_addr);
994
82c7907c 995 /* We require that the user must pass a second argument of 1, because
996 that is what builtin_setjmp will return. */
64db345d 997 gcc_assert (value == const1_rtx);
53800dbe 998
4712c7d6 999 last = get_last_insn ();
53800dbe 1000#ifdef HAVE_builtin_longjmp
1001 if (HAVE_builtin_longjmp)
1002 emit_insn (gen_builtin_longjmp (buf_addr));
1003 else
1004#endif
1005 {
1006 fp = gen_rtx_MEM (Pmode, buf_addr);
29c05e22 1007 lab = gen_rtx_MEM (Pmode, plus_constant (Pmode, buf_addr,
53800dbe 1008 GET_MODE_SIZE (Pmode)));
1009
29c05e22 1010 stack = gen_rtx_MEM (sa_mode, plus_constant (Pmode, buf_addr,
53800dbe 1011 2 * GET_MODE_SIZE (Pmode)));
ab6ab77e 1012 set_mem_alias_set (fp, setjmp_alias_set);
1013 set_mem_alias_set (lab, setjmp_alias_set);
1014 set_mem_alias_set (stack, setjmp_alias_set);
53800dbe 1015
1016 /* Pick up FP, label, and SP from the block and jump. This code is
1017 from expand_goto in stmt.c; see there for detailed comments. */
03fd9d2c 1018#ifdef HAVE_nonlocal_goto
53800dbe 1019 if (HAVE_nonlocal_goto)
1020 /* We have to pass a value to the nonlocal_goto pattern that will
1021 get copied into the static_chain pointer, but it does not matter
1022 what that value is, because builtin_setjmp does not use it. */
28d202a8 1023 emit_insn (gen_nonlocal_goto (value, lab, stack, fp));
53800dbe 1024 else
1025#endif
1026 {
1027 lab = copy_to_reg (lab);
1028
18b42941 1029 emit_clobber (gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode)));
1030 emit_clobber (gen_rtx_MEM (BLKmode, hard_frame_pointer_rtx));
2a871ad1 1031
53800dbe 1032 emit_move_insn (hard_frame_pointer_rtx, fp);
e9c97615 1033 emit_stack_restore (SAVE_NONLOCAL, stack);
53800dbe 1034
18b42941 1035 emit_use (hard_frame_pointer_rtx);
1036 emit_use (stack_pointer_rtx);
53800dbe 1037 emit_indirect_jump (lab);
1038 }
1039 }
615166bb 1040
1041 /* Search backwards and mark the jump insn as a non-local goto.
1042 Note that this precludes the use of __builtin_longjmp to a
1043 __builtin_setjmp target in the same function. However, we've
1044 already cautioned the user that these functions are for
1045 internal exception handling use only. */
449c0509 1046 for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
1047 {
64db345d 1048 gcc_assert (insn != last);
7d3f6cc7 1049
6d7dc5b9 1050 if (JUMP_P (insn))
449c0509 1051 {
a1ddb869 1052 add_reg_note (insn, REG_NON_LOCAL_GOTO, const0_rtx);
449c0509 1053 break;
1054 }
6d7dc5b9 1055 else if (CALL_P (insn))
9342ee68 1056 break;
449c0509 1057 }
53800dbe 1058}
1059
0e80b01d 1060static inline bool
1061more_const_call_expr_args_p (const const_call_expr_arg_iterator *iter)
1062{
1063 return (iter->i < iter->n);
1064}
1065
1066/* This function validates the types of a function call argument list
1067 against a specified list of tree_codes. If the last specifier is a 0,
1068 that represents an ellipses, otherwise the last specifier must be a
1069 VOID_TYPE. */
1070
1071static bool
1072validate_arglist (const_tree callexpr, ...)
1073{
1074 enum tree_code code;
1075 bool res = 0;
1076 va_list ap;
1077 const_call_expr_arg_iterator iter;
1078 const_tree arg;
1079
1080 va_start (ap, callexpr);
1081 init_const_call_expr_arg_iterator (callexpr, &iter);
1082
1083 do
1084 {
1085 code = (enum tree_code) va_arg (ap, int);
1086 switch (code)
1087 {
1088 case 0:
1089 /* This signifies an ellipses, any further arguments are all ok. */
1090 res = true;
1091 goto end;
1092 case VOID_TYPE:
1093 /* This signifies an endlink, if no arguments remain, return
1094 true, otherwise return false. */
1095 res = !more_const_call_expr_args_p (&iter);
1096 goto end;
1097 default:
1098 /* If no parameters remain or the parameter's code does not
1099 match the specified code, return false. Otherwise continue
1100 checking any remaining arguments. */
1101 arg = next_const_call_expr_arg (&iter);
1102 if (!validate_arg (arg, code))
1103 goto end;
1104 break;
1105 }
1106 }
1107 while (1);
1108
1109 /* We need gotos here since we can only have one VA_CLOSE in a
1110 function. */
1111 end: ;
1112 va_end (ap);
1113
1114 return res;
1115}
1116
4ee9c684 1117/* Expand a call to __builtin_nonlocal_goto. We're passed the target label
1118 and the address of the save area. */
1119
1120static rtx
c2f47e15 1121expand_builtin_nonlocal_goto (tree exp)
4ee9c684 1122{
1123 tree t_label, t_save_area;
1e0c0b35 1124 rtx r_label, r_save_area, r_fp, r_sp;
1125 rtx_insn *insn;
4ee9c684 1126
c2f47e15 1127 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
4ee9c684 1128 return NULL_RTX;
1129
c2f47e15 1130 t_label = CALL_EXPR_ARG (exp, 0);
1131 t_save_area = CALL_EXPR_ARG (exp, 1);
4ee9c684 1132
8ec3c5c2 1133 r_label = expand_normal (t_label);
3dce56cc 1134 r_label = convert_memory_address (Pmode, r_label);
8ec3c5c2 1135 r_save_area = expand_normal (t_save_area);
3dce56cc 1136 r_save_area = convert_memory_address (Pmode, r_save_area);
d1ff492e 1137 /* Copy the address of the save location to a register just in case it was
1138 based on the frame pointer. */
51adbc8a 1139 r_save_area = copy_to_reg (r_save_area);
4ee9c684 1140 r_fp = gen_rtx_MEM (Pmode, r_save_area);
1141 r_sp = gen_rtx_MEM (STACK_SAVEAREA_MODE (SAVE_NONLOCAL),
29c05e22 1142 plus_constant (Pmode, r_save_area,
1143 GET_MODE_SIZE (Pmode)));
4ee9c684 1144
18d50ae6 1145 crtl->has_nonlocal_goto = 1;
4ee9c684 1146
03fd9d2c 1147#ifdef HAVE_nonlocal_goto
4ee9c684 1148 /* ??? We no longer need to pass the static chain value, afaik. */
1149 if (HAVE_nonlocal_goto)
1150 emit_insn (gen_nonlocal_goto (const0_rtx, r_label, r_sp, r_fp));
1151 else
1152#endif
1153 {
1154 r_label = copy_to_reg (r_label);
1155
18b42941 1156 emit_clobber (gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode)));
1157 emit_clobber (gen_rtx_MEM (BLKmode, hard_frame_pointer_rtx));
491e04ef 1158
d1ff492e 1159 /* Restore frame pointer for containing function. */
4ee9c684 1160 emit_move_insn (hard_frame_pointer_rtx, r_fp);
e9c97615 1161 emit_stack_restore (SAVE_NONLOCAL, r_sp);
491e04ef 1162
4ee9c684 1163 /* USE of hard_frame_pointer_rtx added for consistency;
1164 not clear if really needed. */
18b42941 1165 emit_use (hard_frame_pointer_rtx);
1166 emit_use (stack_pointer_rtx);
ad0d0af8 1167
1168 /* If the architecture is using a GP register, we must
1169 conservatively assume that the target function makes use of it.
1170 The prologue of functions with nonlocal gotos must therefore
1171 initialize the GP register to the appropriate value, and we
1172 must then make sure that this value is live at the point
1173 of the jump. (Note that this doesn't necessarily apply
1174 to targets with a nonlocal_goto pattern; they are free
1175 to implement it in their own way. Note also that this is
1176 a no-op if the GP register is a global invariant.) */
1177 if ((unsigned) PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM
1178 && fixed_regs[PIC_OFFSET_TABLE_REGNUM])
18b42941 1179 emit_use (pic_offset_table_rtx);
ad0d0af8 1180
4ee9c684 1181 emit_indirect_jump (r_label);
1182 }
491e04ef 1183
4ee9c684 1184 /* Search backwards to the jump insn and mark it as a
1185 non-local goto. */
1186 for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
1187 {
6d7dc5b9 1188 if (JUMP_P (insn))
4ee9c684 1189 {
a1ddb869 1190 add_reg_note (insn, REG_NON_LOCAL_GOTO, const0_rtx);
4ee9c684 1191 break;
1192 }
6d7dc5b9 1193 else if (CALL_P (insn))
4ee9c684 1194 break;
1195 }
1196
1197 return const0_rtx;
1198}
1199
843d08a9 1200/* __builtin_update_setjmp_buf is passed a pointer to an array of five words
1201 (not all will be used on all machines) that was passed to __builtin_setjmp.
97354ae4 1202 It updates the stack pointer in that block to the current value. This is
1203 also called directly by the SJLJ exception handling code. */
843d08a9 1204
97354ae4 1205void
843d08a9 1206expand_builtin_update_setjmp_buf (rtx buf_addr)
1207{
3754d046 1208 machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
d1ff492e 1209 rtx stack_save
843d08a9 1210 = gen_rtx_MEM (sa_mode,
1211 memory_address
1212 (sa_mode,
29c05e22 1213 plus_constant (Pmode, buf_addr,
1214 2 * GET_MODE_SIZE (Pmode))));
843d08a9 1215
e9c97615 1216 emit_stack_save (SAVE_NONLOCAL, &stack_save);
843d08a9 1217}
1218
5e3608d8 1219/* Expand a call to __builtin_prefetch. For a target that does not support
1220 data prefetch, evaluate the memory address argument in case it has side
1221 effects. */
1222
1223static void
c2f47e15 1224expand_builtin_prefetch (tree exp)
5e3608d8 1225{
1226 tree arg0, arg1, arg2;
c2f47e15 1227 int nargs;
5e3608d8 1228 rtx op0, op1, op2;
1229
c2f47e15 1230 if (!validate_arglist (exp, POINTER_TYPE, 0))
26a5cadb 1231 return;
1232
c2f47e15 1233 arg0 = CALL_EXPR_ARG (exp, 0);
1234
26a5cadb 1235 /* Arguments 1 and 2 are optional; argument 1 (read/write) defaults to
1236 zero (read) and argument 2 (locality) defaults to 3 (high degree of
1237 locality). */
c2f47e15 1238 nargs = call_expr_nargs (exp);
1239 if (nargs > 1)
1240 arg1 = CALL_EXPR_ARG (exp, 1);
26a5cadb 1241 else
c2f47e15 1242 arg1 = integer_zero_node;
1243 if (nargs > 2)
1244 arg2 = CALL_EXPR_ARG (exp, 2);
1245 else
2512209b 1246 arg2 = integer_three_node;
5e3608d8 1247
1248 /* Argument 0 is an address. */
1249 op0 = expand_expr (arg0, NULL_RTX, Pmode, EXPAND_NORMAL);
1250
1251 /* Argument 1 (read/write flag) must be a compile-time constant int. */
1252 if (TREE_CODE (arg1) != INTEGER_CST)
1253 {
07e3a3d2 1254 error ("second argument to %<__builtin_prefetch%> must be a constant");
9342ee68 1255 arg1 = integer_zero_node;
5e3608d8 1256 }
8ec3c5c2 1257 op1 = expand_normal (arg1);
5e3608d8 1258 /* Argument 1 must be either zero or one. */
1259 if (INTVAL (op1) != 0 && INTVAL (op1) != 1)
1260 {
c3ceba8e 1261 warning (0, "invalid second argument to %<__builtin_prefetch%>;"
07e3a3d2 1262 " using zero");
5e3608d8 1263 op1 = const0_rtx;
1264 }
1265
1266 /* Argument 2 (locality) must be a compile-time constant int. */
1267 if (TREE_CODE (arg2) != INTEGER_CST)
1268 {
07e3a3d2 1269 error ("third argument to %<__builtin_prefetch%> must be a constant");
5e3608d8 1270 arg2 = integer_zero_node;
1271 }
8ec3c5c2 1272 op2 = expand_normal (arg2);
5e3608d8 1273 /* Argument 2 must be 0, 1, 2, or 3. */
1274 if (INTVAL (op2) < 0 || INTVAL (op2) > 3)
1275 {
c3ceba8e 1276 warning (0, "invalid third argument to %<__builtin_prefetch%>; using zero");
5e3608d8 1277 op2 = const0_rtx;
1278 }
1279
1280#ifdef HAVE_prefetch
1281 if (HAVE_prefetch)
1282 {
8786db1e 1283 struct expand_operand ops[3];
1284
1285 create_address_operand (&ops[0], op0);
1286 create_integer_operand (&ops[1], INTVAL (op1));
1287 create_integer_operand (&ops[2], INTVAL (op2));
1288 if (maybe_expand_insn (CODE_FOR_prefetch, 3, ops))
1289 return;
5e3608d8 1290 }
5e3608d8 1291#endif
0a534ba7 1292
f0ce3b1f 1293 /* Don't do anything with direct references to volatile memory, but
1294 generate code to handle other side effects. */
e16ceb8e 1295 if (!MEM_P (op0) && side_effects_p (op0))
f0ce3b1f 1296 emit_insn (op0);
5e3608d8 1297}
1298
f7c44134 1299/* Get a MEM rtx for expression EXP which is the address of an operand
d8ae1baa 1300 to be used in a string instruction (cmpstrsi, movmemsi, ..). LEN is
1301 the maximum length of the block of memory that might be accessed or
1302 NULL if unknown. */
f7c44134 1303
53800dbe 1304static rtx
d8ae1baa 1305get_memory_rtx (tree exp, tree len)
53800dbe 1306{
ad0a178f 1307 tree orig_exp = exp;
1308 rtx addr, mem;
ad0a178f 1309
1310 /* When EXP is not resolved SAVE_EXPR, MEM_ATTRS can be still derived
1311 from its expression, for expr->a.b only <variable>.a.b is recorded. */
1312 if (TREE_CODE (exp) == SAVE_EXPR && !SAVE_EXPR_RESOLVED_P (exp))
1313 exp = TREE_OPERAND (exp, 0);
1314
1315 addr = expand_expr (orig_exp, NULL_RTX, ptr_mode, EXPAND_NORMAL);
1316 mem = gen_rtx_MEM (BLKmode, memory_address (BLKmode, addr));
2a631e19 1317
f7c44134 1318 /* Get an expression we can use to find the attributes to assign to MEM.
5dd3f78f 1319 First remove any nops. */
72dd6141 1320 while (CONVERT_EXPR_P (exp)
f7c44134 1321 && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (exp, 0))))
1322 exp = TREE_OPERAND (exp, 0);
1323
5dd3f78f 1324 /* Build a MEM_REF representing the whole accessed area as a byte blob,
1325 (as builtin stringops may alias with anything). */
1326 exp = fold_build2 (MEM_REF,
1327 build_array_type (char_type_node,
1328 build_range_type (sizetype,
1329 size_one_node, len)),
1330 exp, build_int_cst (ptr_type_node, 0));
1331
1332 /* If the MEM_REF has no acceptable address, try to get the base object
1333 from the original address we got, and build an all-aliasing
1334 unknown-sized access to that one. */
1335 if (is_gimple_mem_ref_addr (TREE_OPERAND (exp, 0)))
1336 set_mem_attributes (mem, exp, 0);
1337 else if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
1338 && (exp = get_base_address (TREE_OPERAND (TREE_OPERAND (exp, 0),
1339 0))))
eec8e941 1340 {
5dd3f78f 1341 exp = build_fold_addr_expr (exp);
1342 exp = fold_build2 (MEM_REF,
1343 build_array_type (char_type_node,
1344 build_range_type (sizetype,
1345 size_zero_node,
1346 NULL)),
1347 exp, build_int_cst (ptr_type_node, 0));
a1a25d19 1348 set_mem_attributes (mem, exp, 0);
eec8e941 1349 }
5dd3f78f 1350 set_mem_alias_set (mem, 0);
53800dbe 1351 return mem;
1352}
1353\f
1354/* Built-in functions to perform an untyped call and return. */
1355
3b9c3a16 1356#define apply_args_mode \
1357 (this_target_builtins->x_apply_args_mode)
1358#define apply_result_mode \
1359 (this_target_builtins->x_apply_result_mode)
53800dbe 1360
53800dbe 1361/* Return the size required for the block returned by __builtin_apply_args,
1362 and initialize apply_args_mode. */
1363
1364static int
aecda0d6 1365apply_args_size (void)
53800dbe 1366{
1367 static int size = -1;
58e9ce8f 1368 int align;
1369 unsigned int regno;
3754d046 1370 machine_mode mode;
53800dbe 1371
1372 /* The values computed by this function never change. */
1373 if (size < 0)
1374 {
1375 /* The first value is the incoming arg-pointer. */
1376 size = GET_MODE_SIZE (Pmode);
1377
1378 /* The second value is the structure value address unless this is
1379 passed as an "invisible" first argument. */
6812c89e 1380 if (targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0))
53800dbe 1381 size += GET_MODE_SIZE (Pmode);
1382
1383 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1384 if (FUNCTION_ARG_REGNO_P (regno))
1385 {
4bac51c9 1386 mode = targetm.calls.get_raw_arg_mode (regno);
0862b7e9 1387
64db345d 1388 gcc_assert (mode != VOIDmode);
53800dbe 1389
1390 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1391 if (size % align != 0)
1392 size = CEIL (size, align) * align;
53800dbe 1393 size += GET_MODE_SIZE (mode);
1394 apply_args_mode[regno] = mode;
1395 }
1396 else
1397 {
1398 apply_args_mode[regno] = VOIDmode;
53800dbe 1399 }
1400 }
1401 return size;
1402}
1403
1404/* Return the size required for the block returned by __builtin_apply,
1405 and initialize apply_result_mode. */
1406
1407static int
aecda0d6 1408apply_result_size (void)
53800dbe 1409{
1410 static int size = -1;
1411 int align, regno;
3754d046 1412 machine_mode mode;
53800dbe 1413
1414 /* The values computed by this function never change. */
1415 if (size < 0)
1416 {
1417 size = 0;
1418
1419 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
e1ce1485 1420 if (targetm.calls.function_value_regno_p (regno))
53800dbe 1421 {
4bac51c9 1422 mode = targetm.calls.get_raw_result_mode (regno);
0862b7e9 1423
64db345d 1424 gcc_assert (mode != VOIDmode);
53800dbe 1425
1426 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1427 if (size % align != 0)
1428 size = CEIL (size, align) * align;
1429 size += GET_MODE_SIZE (mode);
1430 apply_result_mode[regno] = mode;
1431 }
1432 else
1433 apply_result_mode[regno] = VOIDmode;
1434
1435 /* Allow targets that use untyped_call and untyped_return to override
1436 the size so that machine-specific information can be stored here. */
1437#ifdef APPLY_RESULT_SIZE
1438 size = APPLY_RESULT_SIZE;
1439#endif
1440 }
1441 return size;
1442}
1443
1444#if defined (HAVE_untyped_call) || defined (HAVE_untyped_return)
1445/* Create a vector describing the result block RESULT. If SAVEP is true,
1446 the result block is used to save the values; otherwise it is used to
1447 restore the values. */
1448
1449static rtx
aecda0d6 1450result_vector (int savep, rtx result)
53800dbe 1451{
1452 int regno, size, align, nelts;
3754d046 1453 machine_mode mode;
53800dbe 1454 rtx reg, mem;
364c0c59 1455 rtx *savevec = XALLOCAVEC (rtx, FIRST_PSEUDO_REGISTER);
bf8e3599 1456
53800dbe 1457 size = nelts = 0;
1458 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1459 if ((mode = apply_result_mode[regno]) != VOIDmode)
1460 {
1461 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1462 if (size % align != 0)
1463 size = CEIL (size, align) * align;
1464 reg = gen_rtx_REG (mode, savep ? regno : INCOMING_REGNO (regno));
e513d163 1465 mem = adjust_address (result, mode, size);
53800dbe 1466 savevec[nelts++] = (savep
d1f9b275 1467 ? gen_rtx_SET (mem, reg)
1468 : gen_rtx_SET (reg, mem));
53800dbe 1469 size += GET_MODE_SIZE (mode);
1470 }
1471 return gen_rtx_PARALLEL (VOIDmode, gen_rtvec_v (nelts, savevec));
1472}
1473#endif /* HAVE_untyped_call or HAVE_untyped_return */
1474
1475/* Save the state required to perform an untyped call with the same
1476 arguments as were passed to the current function. */
1477
1478static rtx
aecda0d6 1479expand_builtin_apply_args_1 (void)
53800dbe 1480{
1c7e61a7 1481 rtx registers, tem;
53800dbe 1482 int size, align, regno;
3754d046 1483 machine_mode mode;
6812c89e 1484 rtx struct_incoming_value = targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 1);
53800dbe 1485
1486 /* Create a block where the arg-pointer, structure value address,
1487 and argument registers can be saved. */
1488 registers = assign_stack_local (BLKmode, apply_args_size (), -1);
1489
1490 /* Walk past the arg-pointer and structure value address. */
1491 size = GET_MODE_SIZE (Pmode);
6812c89e 1492 if (targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0))
53800dbe 1493 size += GET_MODE_SIZE (Pmode);
1494
1495 /* Save each register used in calling a function to the block. */
1496 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1497 if ((mode = apply_args_mode[regno]) != VOIDmode)
1498 {
53800dbe 1499 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1500 if (size % align != 0)
1501 size = CEIL (size, align) * align;
1502
1503 tem = gen_rtx_REG (mode, INCOMING_REGNO (regno));
1504
e513d163 1505 emit_move_insn (adjust_address (registers, mode, size), tem);
53800dbe 1506 size += GET_MODE_SIZE (mode);
1507 }
1508
1509 /* Save the arg pointer to the block. */
27a7a23a 1510 tem = copy_to_reg (crtl->args.internal_arg_pointer);
1c7e61a7 1511 /* We need the pointer as the caller actually passed them to us, not
9d4b544c 1512 as we might have pretended they were passed. Make sure it's a valid
1513 operand, as emit_move_insn isn't expected to handle a PLUS. */
3764c94e 1514 if (STACK_GROWS_DOWNWARD)
1515 tem
1516 = force_operand (plus_constant (Pmode, tem,
1517 crtl->args.pretend_args_size),
1518 NULL_RTX);
1c7e61a7 1519 emit_move_insn (adjust_address (registers, Pmode, 0), tem);
0862b7e9 1520
53800dbe 1521 size = GET_MODE_SIZE (Pmode);
1522
1523 /* Save the structure value address unless this is passed as an
1524 "invisible" first argument. */
45550790 1525 if (struct_incoming_value)
53800dbe 1526 {
e513d163 1527 emit_move_insn (adjust_address (registers, Pmode, size),
45550790 1528 copy_to_reg (struct_incoming_value));
53800dbe 1529 size += GET_MODE_SIZE (Pmode);
1530 }
1531
1532 /* Return the address of the block. */
1533 return copy_addr_to_reg (XEXP (registers, 0));
1534}
1535
1536/* __builtin_apply_args returns block of memory allocated on
1537 the stack into which is stored the arg pointer, structure
1538 value address, static chain, and all the registers that might
1539 possibly be used in performing a function call. The code is
1540 moved to the start of the function so the incoming values are
1541 saved. */
27d0c333 1542
53800dbe 1543static rtx
aecda0d6 1544expand_builtin_apply_args (void)
53800dbe 1545{
1546 /* Don't do __builtin_apply_args more than once in a function.
1547 Save the result of the first call and reuse it. */
1548 if (apply_args_value != 0)
1549 return apply_args_value;
1550 {
1551 /* When this function is called, it means that registers must be
1552 saved on entry to this function. So we migrate the
1553 call to the first insn of this function. */
1554 rtx temp;
53800dbe 1555
1556 start_sequence ();
1557 temp = expand_builtin_apply_args_1 ();
9ed997be 1558 rtx_insn *seq = get_insns ();
53800dbe 1559 end_sequence ();
1560
1561 apply_args_value = temp;
1562
31d3e01c 1563 /* Put the insns after the NOTE that starts the function.
1564 If this is inside a start_sequence, make the outer-level insn
53800dbe 1565 chain current, so the code is placed at the start of the
0ef1a651 1566 function. If internal_arg_pointer is a non-virtual pseudo,
1567 it needs to be placed after the function that initializes
1568 that pseudo. */
53800dbe 1569 push_topmost_sequence ();
0ef1a651 1570 if (REG_P (crtl->args.internal_arg_pointer)
1571 && REGNO (crtl->args.internal_arg_pointer) > LAST_VIRTUAL_REGISTER)
1572 emit_insn_before (seq, parm_birth_insn);
1573 else
1574 emit_insn_before (seq, NEXT_INSN (entry_of_function ()));
53800dbe 1575 pop_topmost_sequence ();
1576 return temp;
1577 }
1578}
1579
1580/* Perform an untyped call and save the state required to perform an
1581 untyped return of whatever value was returned by the given function. */
1582
1583static rtx
aecda0d6 1584expand_builtin_apply (rtx function, rtx arguments, rtx argsize)
53800dbe 1585{
1586 int size, align, regno;
3754d046 1587 machine_mode mode;
1e0c0b35 1588 rtx incoming_args, result, reg, dest, src;
1589 rtx_call_insn *call_insn;
53800dbe 1590 rtx old_stack_level = 0;
1591 rtx call_fusage = 0;
6812c89e 1592 rtx struct_value = targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0);
53800dbe 1593
85d654dd 1594 arguments = convert_memory_address (Pmode, arguments);
726ec87c 1595
53800dbe 1596 /* Create a block where the return registers can be saved. */
1597 result = assign_stack_local (BLKmode, apply_result_size (), -1);
1598
53800dbe 1599 /* Fetch the arg pointer from the ARGUMENTS block. */
1600 incoming_args = gen_reg_rtx (Pmode);
726ec87c 1601 emit_move_insn (incoming_args, gen_rtx_MEM (Pmode, arguments));
3764c94e 1602 if (!STACK_GROWS_DOWNWARD)
1603 incoming_args = expand_simple_binop (Pmode, MINUS, incoming_args, argsize,
1604 incoming_args, 0, OPTAB_LIB_WIDEN);
53800dbe 1605
04a46d40 1606 /* Push a new argument block and copy the arguments. Do not allow
1607 the (potential) memcpy call below to interfere with our stack
1608 manipulations. */
53800dbe 1609 do_pending_stack_adjust ();
04a46d40 1610 NO_DEFER_POP;
53800dbe 1611
2358393e 1612 /* Save the stack with nonlocal if available. */
53800dbe 1613#ifdef HAVE_save_stack_nonlocal
1614 if (HAVE_save_stack_nonlocal)
e9c97615 1615 emit_stack_save (SAVE_NONLOCAL, &old_stack_level);
53800dbe 1616 else
1617#endif
e9c97615 1618 emit_stack_save (SAVE_BLOCK, &old_stack_level);
53800dbe 1619
59647703 1620 /* Allocate a block of memory onto the stack and copy the memory
990495a7 1621 arguments to the outgoing arguments address. We can pass TRUE
1622 as the 4th argument because we just saved the stack pointer
1623 and will restore it right after the call. */
5be42b39 1624 allocate_dynamic_stack_space (argsize, 0, BIGGEST_ALIGNMENT, true);
27a7a23a 1625
1626 /* Set DRAP flag to true, even though allocate_dynamic_stack_space
1627 may have already set current_function_calls_alloca to true.
1628 current_function_calls_alloca won't be set if argsize is zero,
1629 so we have to guarantee need_drap is true here. */
1630 if (SUPPORTS_STACK_ALIGNMENT)
1631 crtl->need_drap = true;
1632
59647703 1633 dest = virtual_outgoing_args_rtx;
3764c94e 1634 if (!STACK_GROWS_DOWNWARD)
1635 {
1636 if (CONST_INT_P (argsize))
1637 dest = plus_constant (Pmode, dest, -INTVAL (argsize));
1638 else
1639 dest = gen_rtx_PLUS (Pmode, dest, negate_rtx (Pmode, argsize));
1640 }
2a631e19 1641 dest = gen_rtx_MEM (BLKmode, dest);
1642 set_mem_align (dest, PARM_BOUNDARY);
1643 src = gen_rtx_MEM (BLKmode, incoming_args);
1644 set_mem_align (src, PARM_BOUNDARY);
0378dbdc 1645 emit_block_move (dest, src, argsize, BLOCK_OP_NORMAL);
53800dbe 1646
1647 /* Refer to the argument block. */
1648 apply_args_size ();
1649 arguments = gen_rtx_MEM (BLKmode, arguments);
2a631e19 1650 set_mem_align (arguments, PARM_BOUNDARY);
53800dbe 1651
1652 /* Walk past the arg-pointer and structure value address. */
1653 size = GET_MODE_SIZE (Pmode);
45550790 1654 if (struct_value)
53800dbe 1655 size += GET_MODE_SIZE (Pmode);
1656
1657 /* Restore each of the registers previously saved. Make USE insns
1658 for each of these registers for use in making the call. */
1659 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1660 if ((mode = apply_args_mode[regno]) != VOIDmode)
1661 {
1662 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1663 if (size % align != 0)
1664 size = CEIL (size, align) * align;
1665 reg = gen_rtx_REG (mode, regno);
e513d163 1666 emit_move_insn (reg, adjust_address (arguments, mode, size));
53800dbe 1667 use_reg (&call_fusage, reg);
1668 size += GET_MODE_SIZE (mode);
1669 }
1670
1671 /* Restore the structure value address unless this is passed as an
1672 "invisible" first argument. */
1673 size = GET_MODE_SIZE (Pmode);
45550790 1674 if (struct_value)
53800dbe 1675 {
1676 rtx value = gen_reg_rtx (Pmode);
e513d163 1677 emit_move_insn (value, adjust_address (arguments, Pmode, size));
45550790 1678 emit_move_insn (struct_value, value);
8ad4c111 1679 if (REG_P (struct_value))
45550790 1680 use_reg (&call_fusage, struct_value);
53800dbe 1681 size += GET_MODE_SIZE (Pmode);
1682 }
1683
1684 /* All arguments and registers used for the call are set up by now! */
82c7907c 1685 function = prepare_call_address (NULL, function, NULL, &call_fusage, 0, 0);
53800dbe 1686
1687 /* Ensure address is valid. SYMBOL_REF is already valid, so no need,
1688 and we don't want to load it into a register as an optimization,
1689 because prepare_call_address already did it if it should be done. */
1690 if (GET_CODE (function) != SYMBOL_REF)
1691 function = memory_address (FUNCTION_MODE, function);
1692
1693 /* Generate the actual call instruction and save the return value. */
1694#ifdef HAVE_untyped_call
1695 if (HAVE_untyped_call)
1696 emit_call_insn (gen_untyped_call (gen_rtx_MEM (FUNCTION_MODE, function),
1697 result, result_vector (1, result)));
1698 else
1699#endif
1700#ifdef HAVE_call_value
1701 if (HAVE_call_value)
1702 {
1703 rtx valreg = 0;
1704
1705 /* Locate the unique return register. It is not possible to
1706 express a call that sets more than one return register using
1707 call_value; use untyped_call for that. In fact, untyped_call
1708 only needs to save the return registers in the given block. */
1709 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1710 if ((mode = apply_result_mode[regno]) != VOIDmode)
1711 {
64db345d 1712 gcc_assert (!valreg); /* HAVE_untyped_call required. */
7d3f6cc7 1713
53800dbe 1714 valreg = gen_rtx_REG (mode, regno);
1715 }
1716
2ed6c343 1717 emit_call_insn (GEN_CALL_VALUE (valreg,
53800dbe 1718 gen_rtx_MEM (FUNCTION_MODE, function),
1719 const0_rtx, NULL_RTX, const0_rtx));
1720
e513d163 1721 emit_move_insn (adjust_address (result, GET_MODE (valreg), 0), valreg);
53800dbe 1722 }
1723 else
1724#endif
64db345d 1725 gcc_unreachable ();
53800dbe 1726
d5f9786f 1727 /* Find the CALL insn we just emitted, and attach the register usage
1728 information. */
1729 call_insn = last_call_insn ();
1730 add_function_usage_to (call_insn, call_fusage);
53800dbe 1731
1732 /* Restore the stack. */
1733#ifdef HAVE_save_stack_nonlocal
1734 if (HAVE_save_stack_nonlocal)
e9c97615 1735 emit_stack_restore (SAVE_NONLOCAL, old_stack_level);
53800dbe 1736 else
1737#endif
e9c97615 1738 emit_stack_restore (SAVE_BLOCK, old_stack_level);
9af5ce0c 1739 fixup_args_size_notes (call_insn, get_last_insn (), 0);
53800dbe 1740
04a46d40 1741 OK_DEFER_POP;
1742
53800dbe 1743 /* Return the address of the result block. */
85d654dd 1744 result = copy_addr_to_reg (XEXP (result, 0));
1745 return convert_memory_address (ptr_mode, result);
53800dbe 1746}
1747
1748/* Perform an untyped return. */
1749
1750static void
aecda0d6 1751expand_builtin_return (rtx result)
53800dbe 1752{
1753 int size, align, regno;
3754d046 1754 machine_mode mode;
53800dbe 1755 rtx reg;
57c26b3a 1756 rtx_insn *call_fusage = 0;
53800dbe 1757
85d654dd 1758 result = convert_memory_address (Pmode, result);
726ec87c 1759
53800dbe 1760 apply_result_size ();
1761 result = gen_rtx_MEM (BLKmode, result);
1762
1763#ifdef HAVE_untyped_return
1764 if (HAVE_untyped_return)
1765 {
1766 emit_jump_insn (gen_untyped_return (result, result_vector (0, result)));
1767 emit_barrier ();
1768 return;
1769 }
1770#endif
1771
1772 /* Restore the return value and note that each value is used. */
1773 size = 0;
1774 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1775 if ((mode = apply_result_mode[regno]) != VOIDmode)
1776 {
1777 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1778 if (size % align != 0)
1779 size = CEIL (size, align) * align;
1780 reg = gen_rtx_REG (mode, INCOMING_REGNO (regno));
e513d163 1781 emit_move_insn (reg, adjust_address (result, mode, size));
53800dbe 1782
1783 push_to_sequence (call_fusage);
18b42941 1784 emit_use (reg);
53800dbe 1785 call_fusage = get_insns ();
1786 end_sequence ();
1787 size += GET_MODE_SIZE (mode);
1788 }
1789
1790 /* Put the USE insns before the return. */
31d3e01c 1791 emit_insn (call_fusage);
53800dbe 1792
1793 /* Return whatever values was restored by jumping directly to the end
1794 of the function. */
62380d2d 1795 expand_naked_return ();
53800dbe 1796}
1797
539a3a92 1798/* Used by expand_builtin_classify_type and fold_builtin_classify_type. */
27d0c333 1799
539a3a92 1800static enum type_class
aecda0d6 1801type_to_class (tree type)
539a3a92 1802{
1803 switch (TREE_CODE (type))
1804 {
1805 case VOID_TYPE: return void_type_class;
1806 case INTEGER_TYPE: return integer_type_class;
539a3a92 1807 case ENUMERAL_TYPE: return enumeral_type_class;
1808 case BOOLEAN_TYPE: return boolean_type_class;
1809 case POINTER_TYPE: return pointer_type_class;
1810 case REFERENCE_TYPE: return reference_type_class;
1811 case OFFSET_TYPE: return offset_type_class;
1812 case REAL_TYPE: return real_type_class;
1813 case COMPLEX_TYPE: return complex_type_class;
1814 case FUNCTION_TYPE: return function_type_class;
1815 case METHOD_TYPE: return method_type_class;
1816 case RECORD_TYPE: return record_type_class;
1817 case UNION_TYPE:
1818 case QUAL_UNION_TYPE: return union_type_class;
1819 case ARRAY_TYPE: return (TYPE_STRING_FLAG (type)
1820 ? string_type_class : array_type_class);
539a3a92 1821 case LANG_TYPE: return lang_type_class;
1822 default: return no_type_class;
1823 }
1824}
bf8e3599 1825
c2f47e15 1826/* Expand a call EXP to __builtin_classify_type. */
27d0c333 1827
53800dbe 1828static rtx
c2f47e15 1829expand_builtin_classify_type (tree exp)
53800dbe 1830{
c2f47e15 1831 if (call_expr_nargs (exp))
1832 return GEN_INT (type_to_class (TREE_TYPE (CALL_EXPR_ARG (exp, 0))));
53800dbe 1833 return GEN_INT (no_type_class);
1834}
1835
07976da7 1836/* This helper macro, meant to be used in mathfn_built_in below,
1837 determines which among a set of three builtin math functions is
1838 appropriate for a given type mode. The `F' and `L' cases are
1839 automatically generated from the `double' case. */
1840#define CASE_MATHFN(BUILT_IN_MATHFN) \
1841 case BUILT_IN_MATHFN: case BUILT_IN_MATHFN##F: case BUILT_IN_MATHFN##L: \
1842 fcode = BUILT_IN_MATHFN; fcodef = BUILT_IN_MATHFN##F ; \
1843 fcodel = BUILT_IN_MATHFN##L ; break;
cd2656b0 1844/* Similar to above, but appends _R after any F/L suffix. */
1845#define CASE_MATHFN_REENT(BUILT_IN_MATHFN) \
1846 case BUILT_IN_MATHFN##_R: case BUILT_IN_MATHFN##F_R: case BUILT_IN_MATHFN##L_R: \
1847 fcode = BUILT_IN_MATHFN##_R; fcodef = BUILT_IN_MATHFN##F_R ; \
1848 fcodel = BUILT_IN_MATHFN##L_R ; break;
07976da7 1849
b9a16870 1850/* Return mathematic function equivalent to FN but operating directly on TYPE,
1851 if available. If IMPLICIT is true use the implicit builtin declaration,
1852 otherwise use the explicit declaration. If we can't do the conversion,
1853 return zero. */
c319d56a 1854
1855static tree
b9a16870 1856mathfn_built_in_1 (tree type, enum built_in_function fn, bool implicit_p)
0a68165a 1857{
b9a16870 1858 enum built_in_function fcode, fcodef, fcodel, fcode2;
07976da7 1859
1860 switch (fn)
1861 {
746114e8 1862 CASE_MATHFN (BUILT_IN_ACOS)
1863 CASE_MATHFN (BUILT_IN_ACOSH)
1864 CASE_MATHFN (BUILT_IN_ASIN)
1865 CASE_MATHFN (BUILT_IN_ASINH)
07976da7 1866 CASE_MATHFN (BUILT_IN_ATAN)
746114e8 1867 CASE_MATHFN (BUILT_IN_ATAN2)
1868 CASE_MATHFN (BUILT_IN_ATANH)
1869 CASE_MATHFN (BUILT_IN_CBRT)
07976da7 1870 CASE_MATHFN (BUILT_IN_CEIL)
d735c391 1871 CASE_MATHFN (BUILT_IN_CEXPI)
746114e8 1872 CASE_MATHFN (BUILT_IN_COPYSIGN)
07976da7 1873 CASE_MATHFN (BUILT_IN_COS)
746114e8 1874 CASE_MATHFN (BUILT_IN_COSH)
1875 CASE_MATHFN (BUILT_IN_DREM)
1876 CASE_MATHFN (BUILT_IN_ERF)
1877 CASE_MATHFN (BUILT_IN_ERFC)
07976da7 1878 CASE_MATHFN (BUILT_IN_EXP)
746114e8 1879 CASE_MATHFN (BUILT_IN_EXP10)
1880 CASE_MATHFN (BUILT_IN_EXP2)
1881 CASE_MATHFN (BUILT_IN_EXPM1)
1882 CASE_MATHFN (BUILT_IN_FABS)
1883 CASE_MATHFN (BUILT_IN_FDIM)
07976da7 1884 CASE_MATHFN (BUILT_IN_FLOOR)
746114e8 1885 CASE_MATHFN (BUILT_IN_FMA)
1886 CASE_MATHFN (BUILT_IN_FMAX)
1887 CASE_MATHFN (BUILT_IN_FMIN)
1888 CASE_MATHFN (BUILT_IN_FMOD)
1889 CASE_MATHFN (BUILT_IN_FREXP)
1890 CASE_MATHFN (BUILT_IN_GAMMA)
cd2656b0 1891 CASE_MATHFN_REENT (BUILT_IN_GAMMA) /* GAMMA_R */
746114e8 1892 CASE_MATHFN (BUILT_IN_HUGE_VAL)
1893 CASE_MATHFN (BUILT_IN_HYPOT)
1894 CASE_MATHFN (BUILT_IN_ILOGB)
80ff6494 1895 CASE_MATHFN (BUILT_IN_ICEIL)
1896 CASE_MATHFN (BUILT_IN_IFLOOR)
746114e8 1897 CASE_MATHFN (BUILT_IN_INF)
80ff6494 1898 CASE_MATHFN (BUILT_IN_IRINT)
1899 CASE_MATHFN (BUILT_IN_IROUND)
69b779ea 1900 CASE_MATHFN (BUILT_IN_ISINF)
746114e8 1901 CASE_MATHFN (BUILT_IN_J0)
1902 CASE_MATHFN (BUILT_IN_J1)
1903 CASE_MATHFN (BUILT_IN_JN)
ac148751 1904 CASE_MATHFN (BUILT_IN_LCEIL)
746114e8 1905 CASE_MATHFN (BUILT_IN_LDEXP)
ad52b9b7 1906 CASE_MATHFN (BUILT_IN_LFLOOR)
746114e8 1907 CASE_MATHFN (BUILT_IN_LGAMMA)
cd2656b0 1908 CASE_MATHFN_REENT (BUILT_IN_LGAMMA) /* LGAMMA_R */
ac148751 1909 CASE_MATHFN (BUILT_IN_LLCEIL)
ad52b9b7 1910 CASE_MATHFN (BUILT_IN_LLFLOOR)
746114e8 1911 CASE_MATHFN (BUILT_IN_LLRINT)
1912 CASE_MATHFN (BUILT_IN_LLROUND)
07976da7 1913 CASE_MATHFN (BUILT_IN_LOG)
746114e8 1914 CASE_MATHFN (BUILT_IN_LOG10)
1915 CASE_MATHFN (BUILT_IN_LOG1P)
1916 CASE_MATHFN (BUILT_IN_LOG2)
1917 CASE_MATHFN (BUILT_IN_LOGB)
1918 CASE_MATHFN (BUILT_IN_LRINT)
1919 CASE_MATHFN (BUILT_IN_LROUND)
1920 CASE_MATHFN (BUILT_IN_MODF)
1921 CASE_MATHFN (BUILT_IN_NAN)
1922 CASE_MATHFN (BUILT_IN_NANS)
07976da7 1923 CASE_MATHFN (BUILT_IN_NEARBYINT)
746114e8 1924 CASE_MATHFN (BUILT_IN_NEXTAFTER)
1925 CASE_MATHFN (BUILT_IN_NEXTTOWARD)
1926 CASE_MATHFN (BUILT_IN_POW)
757c219d 1927 CASE_MATHFN (BUILT_IN_POWI)
746114e8 1928 CASE_MATHFN (BUILT_IN_POW10)
1929 CASE_MATHFN (BUILT_IN_REMAINDER)
1930 CASE_MATHFN (BUILT_IN_REMQUO)
1931 CASE_MATHFN (BUILT_IN_RINT)
07976da7 1932 CASE_MATHFN (BUILT_IN_ROUND)
746114e8 1933 CASE_MATHFN (BUILT_IN_SCALB)
1934 CASE_MATHFN (BUILT_IN_SCALBLN)
1935 CASE_MATHFN (BUILT_IN_SCALBN)
c319d56a 1936 CASE_MATHFN (BUILT_IN_SIGNBIT)
746114e8 1937 CASE_MATHFN (BUILT_IN_SIGNIFICAND)
07976da7 1938 CASE_MATHFN (BUILT_IN_SIN)
746114e8 1939 CASE_MATHFN (BUILT_IN_SINCOS)
1940 CASE_MATHFN (BUILT_IN_SINH)
07976da7 1941 CASE_MATHFN (BUILT_IN_SQRT)
1942 CASE_MATHFN (BUILT_IN_TAN)
746114e8 1943 CASE_MATHFN (BUILT_IN_TANH)
1944 CASE_MATHFN (BUILT_IN_TGAMMA)
07976da7 1945 CASE_MATHFN (BUILT_IN_TRUNC)
746114e8 1946 CASE_MATHFN (BUILT_IN_Y0)
1947 CASE_MATHFN (BUILT_IN_Y1)
1948 CASE_MATHFN (BUILT_IN_YN)
07976da7 1949
0a68165a 1950 default:
c2f47e15 1951 return NULL_TREE;
0a68165a 1952 }
07976da7 1953
96b9f485 1954 if (TYPE_MAIN_VARIANT (type) == double_type_node)
b9a16870 1955 fcode2 = fcode;
96b9f485 1956 else if (TYPE_MAIN_VARIANT (type) == float_type_node)
b9a16870 1957 fcode2 = fcodef;
96b9f485 1958 else if (TYPE_MAIN_VARIANT (type) == long_double_type_node)
b9a16870 1959 fcode2 = fcodel;
07976da7 1960 else
c2f47e15 1961 return NULL_TREE;
b9a16870 1962
1963 if (implicit_p && !builtin_decl_implicit_p (fcode2))
1964 return NULL_TREE;
1965
1966 return builtin_decl_explicit (fcode2);
0a68165a 1967}
1968
c319d56a 1969/* Like mathfn_built_in_1(), but always use the implicit array. */
1970
1971tree
1972mathfn_built_in (tree type, enum built_in_function fn)
1973{
1974 return mathfn_built_in_1 (type, fn, /*implicit=*/ 1);
1975}
1976
0fd605a5 1977/* If errno must be maintained, expand the RTL to check if the result,
1978 TARGET, of a built-in function call, EXP, is NaN, and if so set
1979 errno to EDOM. */
1980
1981static void
aecda0d6 1982expand_errno_check (tree exp, rtx target)
0fd605a5 1983{
1e0c0b35 1984 rtx_code_label *lab = gen_label_rtx ();
0fd605a5 1985
7f05340e 1986 /* Test the result; if it is NaN, set errno=EDOM because
1987 the argument was not in the domain. */
3fcf767f 1988 do_compare_rtx_and_jump (target, target, EQ, 0, GET_MODE (target),
f9a00e9e 1989 NULL_RTX, NULL, lab,
79ab74cc 1990 /* The jump is very likely. */
1991 REG_BR_PROB_BASE - (REG_BR_PROB_BASE / 2000 - 1));
0fd605a5 1992
1993#ifdef TARGET_EDOM
7f05340e 1994 /* If this built-in doesn't throw an exception, set errno directly. */
c2f47e15 1995 if (TREE_NOTHROW (TREE_OPERAND (CALL_EXPR_FN (exp), 0)))
7f05340e 1996 {
0fd605a5 1997#ifdef GEN_ERRNO_RTX
7f05340e 1998 rtx errno_rtx = GEN_ERRNO_RTX;
0fd605a5 1999#else
7f05340e 2000 rtx errno_rtx
0fd605a5 2001 = gen_rtx_MEM (word_mode, gen_rtx_SYMBOL_REF (Pmode, "errno"));
2002#endif
d11aedc7 2003 emit_move_insn (errno_rtx,
2004 gen_int_mode (TARGET_EDOM, GET_MODE (errno_rtx)));
0fd605a5 2005 emit_label (lab);
7f05340e 2006 return;
0fd605a5 2007 }
7f05340e 2008#endif
2009
08491912 2010 /* Make sure the library call isn't expanded as a tail call. */
2011 CALL_EXPR_TAILCALL (exp) = 0;
2012
7f05340e 2013 /* We can't set errno=EDOM directly; let the library call do it.
2014 Pop the arguments right away in case the call gets deleted. */
2015 NO_DEFER_POP;
2016 expand_call (exp, target, 0);
2017 OK_DEFER_POP;
2018 emit_label (lab);
0fd605a5 2019}
2020
6b43bae4 2021/* Expand a call to one of the builtin math functions (sqrt, exp, or log).
c2f47e15 2022 Return NULL_RTX if a normal call should be emitted rather than expanding
2023 the function in-line. EXP is the expression that is a call to the builtin
53800dbe 2024 function; if convenient, the result should be placed in TARGET.
2025 SUBTARGET may be used as the target for computing one of EXP's operands. */
27d0c333 2026
53800dbe 2027static rtx
aecda0d6 2028expand_builtin_mathfn (tree exp, rtx target, rtx subtarget)
53800dbe 2029{
bf8e3599 2030 optab builtin_optab;
1e0c0b35 2031 rtx op0;
2032 rtx_insn *insns;
c6e6ecb1 2033 tree fndecl = get_callee_fndecl (exp);
3754d046 2034 machine_mode mode;
528ee710 2035 bool errno_set = false;
d6a0a4b0 2036 bool try_widening = false;
abfea505 2037 tree arg;
53800dbe 2038
c2f47e15 2039 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2040 return NULL_RTX;
53800dbe 2041
c2f47e15 2042 arg = CALL_EXPR_ARG (exp, 0);
53800dbe 2043
2044 switch (DECL_FUNCTION_CODE (fndecl))
2045 {
4f35b1fc 2046 CASE_FLT_FN (BUILT_IN_SQRT):
7f05340e 2047 errno_set = ! tree_expr_nonnegative_p (arg);
d6a0a4b0 2048 try_widening = true;
7f05340e 2049 builtin_optab = sqrt_optab;
2050 break;
4f35b1fc 2051 CASE_FLT_FN (BUILT_IN_EXP):
528ee710 2052 errno_set = true; builtin_optab = exp_optab; break;
4f35b1fc 2053 CASE_FLT_FN (BUILT_IN_EXP10):
2054 CASE_FLT_FN (BUILT_IN_POW10):
750ef9f5 2055 errno_set = true; builtin_optab = exp10_optab; break;
4f35b1fc 2056 CASE_FLT_FN (BUILT_IN_EXP2):
750ef9f5 2057 errno_set = true; builtin_optab = exp2_optab; break;
4f35b1fc 2058 CASE_FLT_FN (BUILT_IN_EXPM1):
a6b4eed2 2059 errno_set = true; builtin_optab = expm1_optab; break;
4f35b1fc 2060 CASE_FLT_FN (BUILT_IN_LOGB):
4efbc641 2061 errno_set = true; builtin_optab = logb_optab; break;
4f35b1fc 2062 CASE_FLT_FN (BUILT_IN_LOG):
528ee710 2063 errno_set = true; builtin_optab = log_optab; break;
4f35b1fc 2064 CASE_FLT_FN (BUILT_IN_LOG10):
d3cd9bde 2065 errno_set = true; builtin_optab = log10_optab; break;
4f35b1fc 2066 CASE_FLT_FN (BUILT_IN_LOG2):
d3cd9bde 2067 errno_set = true; builtin_optab = log2_optab; break;
4f35b1fc 2068 CASE_FLT_FN (BUILT_IN_LOG1P):
f474cd93 2069 errno_set = true; builtin_optab = log1p_optab; break;
4f35b1fc 2070 CASE_FLT_FN (BUILT_IN_ASIN):
8de2f465 2071 builtin_optab = asin_optab; break;
4f35b1fc 2072 CASE_FLT_FN (BUILT_IN_ACOS):
8de2f465 2073 builtin_optab = acos_optab; break;
4f35b1fc 2074 CASE_FLT_FN (BUILT_IN_TAN):
528ee710 2075 builtin_optab = tan_optab; break;
4f35b1fc 2076 CASE_FLT_FN (BUILT_IN_ATAN):
528ee710 2077 builtin_optab = atan_optab; break;
4f35b1fc 2078 CASE_FLT_FN (BUILT_IN_FLOOR):
528ee710 2079 builtin_optab = floor_optab; break;
4f35b1fc 2080 CASE_FLT_FN (BUILT_IN_CEIL):
528ee710 2081 builtin_optab = ceil_optab; break;
4f35b1fc 2082 CASE_FLT_FN (BUILT_IN_TRUNC):
a7cc195f 2083 builtin_optab = btrunc_optab; break;
4f35b1fc 2084 CASE_FLT_FN (BUILT_IN_ROUND):
528ee710 2085 builtin_optab = round_optab; break;
4f35b1fc 2086 CASE_FLT_FN (BUILT_IN_NEARBYINT):
0ddf4ad9 2087 builtin_optab = nearbyint_optab;
2088 if (flag_trapping_math)
2089 break;
2090 /* Else fallthrough and expand as rint. */
4f35b1fc 2091 CASE_FLT_FN (BUILT_IN_RINT):
aef94a0f 2092 builtin_optab = rint_optab; break;
b3154a1f 2093 CASE_FLT_FN (BUILT_IN_SIGNIFICAND):
2094 builtin_optab = significand_optab; break;
42721db0 2095 default:
64db345d 2096 gcc_unreachable ();
53800dbe 2097 }
2098
7f05340e 2099 /* Make a suitable register to place result in. */
2100 mode = TYPE_MODE (TREE_TYPE (exp));
fc4eef90 2101
7f05340e 2102 if (! flag_errno_math || ! HONOR_NANS (mode))
2103 errno_set = false;
2104
d6a0a4b0 2105 /* Before working hard, check whether the instruction is available, but try
2106 to widen the mode for specific operations. */
2107 if ((optab_handler (builtin_optab, mode) != CODE_FOR_nothing
2108 || (try_widening && !excess_precision_type (TREE_TYPE (exp))))
f2aca212 2109 && (!errno_set || !optimize_insn_for_size_p ()))
68e6cb9d 2110 {
de2e453e 2111 rtx result = gen_reg_rtx (mode);
7f05340e 2112
bd421108 2113 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2114 need to expand the argument again. This way, we will not perform
2115 side-effects more the once. */
abfea505 2116 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
7f05340e 2117
1db6d067 2118 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
7f05340e 2119
bd421108 2120 start_sequence ();
53800dbe 2121
de2e453e 2122 /* Compute into RESULT.
2123 Set RESULT to wherever the result comes back. */
2124 result = expand_unop (mode, builtin_optab, op0, result, 0);
bd421108 2125
de2e453e 2126 if (result != 0)
bd421108 2127 {
2128 if (errno_set)
de2e453e 2129 expand_errno_check (exp, result);
bd421108 2130
2131 /* Output the entire sequence. */
2132 insns = get_insns ();
2133 end_sequence ();
2134 emit_insn (insns);
de2e453e 2135 return result;
bd421108 2136 }
2137
2138 /* If we were unable to expand via the builtin, stop the sequence
2139 (without outputting the insns) and call to the library function
2140 with the stabilized argument list. */
53800dbe 2141 end_sequence ();
53800dbe 2142 }
2143
1e5b92fa 2144 return expand_call (exp, target, target == const0_rtx);
0fd605a5 2145}
2146
2147/* Expand a call to the builtin binary math functions (pow and atan2).
c2f47e15 2148 Return NULL_RTX if a normal call should be emitted rather than expanding the
0fd605a5 2149 function in-line. EXP is the expression that is a call to the builtin
2150 function; if convenient, the result should be placed in TARGET.
2151 SUBTARGET may be used as the target for computing one of EXP's
2152 operands. */
2153
2154static rtx
aecda0d6 2155expand_builtin_mathfn_2 (tree exp, rtx target, rtx subtarget)
0fd605a5 2156{
2157 optab builtin_optab;
1e0c0b35 2158 rtx op0, op1, result;
2159 rtx_insn *insns;
4737caf2 2160 int op1_type = REAL_TYPE;
c6e6ecb1 2161 tree fndecl = get_callee_fndecl (exp);
abfea505 2162 tree arg0, arg1;
3754d046 2163 machine_mode mode;
0fd605a5 2164 bool errno_set = true;
0fd605a5 2165
73a954a1 2166 switch (DECL_FUNCTION_CODE (fndecl))
2167 {
2168 CASE_FLT_FN (BUILT_IN_SCALBN):
2169 CASE_FLT_FN (BUILT_IN_SCALBLN):
2170 CASE_FLT_FN (BUILT_IN_LDEXP):
2171 op1_type = INTEGER_TYPE;
2172 default:
2173 break;
2174 }
4737caf2 2175
c2f47e15 2176 if (!validate_arglist (exp, REAL_TYPE, op1_type, VOID_TYPE))
2177 return NULL_RTX;
0fd605a5 2178
c2f47e15 2179 arg0 = CALL_EXPR_ARG (exp, 0);
2180 arg1 = CALL_EXPR_ARG (exp, 1);
0fd605a5 2181
0fd605a5 2182 switch (DECL_FUNCTION_CODE (fndecl))
2183 {
4f35b1fc 2184 CASE_FLT_FN (BUILT_IN_POW):
0fd605a5 2185 builtin_optab = pow_optab; break;
4f35b1fc 2186 CASE_FLT_FN (BUILT_IN_ATAN2):
0fd605a5 2187 builtin_optab = atan2_optab; break;
73a954a1 2188 CASE_FLT_FN (BUILT_IN_SCALB):
2189 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (exp)))->b != 2)
2190 return 0;
2191 builtin_optab = scalb_optab; break;
2192 CASE_FLT_FN (BUILT_IN_SCALBN):
2193 CASE_FLT_FN (BUILT_IN_SCALBLN):
2194 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (exp)))->b != 2)
2195 return 0;
2196 /* Fall through... */
4f35b1fc 2197 CASE_FLT_FN (BUILT_IN_LDEXP):
4737caf2 2198 builtin_optab = ldexp_optab; break;
4f35b1fc 2199 CASE_FLT_FN (BUILT_IN_FMOD):
80ed5c06 2200 builtin_optab = fmod_optab; break;
ef722005 2201 CASE_FLT_FN (BUILT_IN_REMAINDER):
4f35b1fc 2202 CASE_FLT_FN (BUILT_IN_DREM):
ef722005 2203 builtin_optab = remainder_optab; break;
0fd605a5 2204 default:
64db345d 2205 gcc_unreachable ();
0fd605a5 2206 }
2207
7f05340e 2208 /* Make a suitable register to place result in. */
2209 mode = TYPE_MODE (TREE_TYPE (exp));
fc4eef90 2210
2211 /* Before working hard, check whether the instruction is available. */
d6bf3b14 2212 if (optab_handler (builtin_optab, mode) == CODE_FOR_nothing)
c2f47e15 2213 return NULL_RTX;
fc4eef90 2214
de2e453e 2215 result = gen_reg_rtx (mode);
7f05340e 2216
2217 if (! flag_errno_math || ! HONOR_NANS (mode))
2218 errno_set = false;
2219
f2aca212 2220 if (errno_set && optimize_insn_for_size_p ())
2221 return 0;
2222
4ee9c684 2223 /* Always stabilize the argument list. */
abfea505 2224 CALL_EXPR_ARG (exp, 0) = arg0 = builtin_save_expr (arg0);
2225 CALL_EXPR_ARG (exp, 1) = arg1 = builtin_save_expr (arg1);
7f05340e 2226
8ec3c5c2 2227 op0 = expand_expr (arg0, subtarget, VOIDmode, EXPAND_NORMAL);
2228 op1 = expand_normal (arg1);
7f05340e 2229
7f05340e 2230 start_sequence ();
2231
de2e453e 2232 /* Compute into RESULT.
2233 Set RESULT to wherever the result comes back. */
2234 result = expand_binop (mode, builtin_optab, op0, op1,
2235 result, 0, OPTAB_DIRECT);
53800dbe 2236
68e6cb9d 2237 /* If we were unable to expand via the builtin, stop the sequence
2238 (without outputting the insns) and call to the library function
2239 with the stabilized argument list. */
de2e453e 2240 if (result == 0)
0fd605a5 2241 {
2242 end_sequence ();
68e6cb9d 2243 return expand_call (exp, target, target == const0_rtx);
53800dbe 2244 }
2245
a4356fb9 2246 if (errno_set)
de2e453e 2247 expand_errno_check (exp, result);
0fd605a5 2248
53800dbe 2249 /* Output the entire sequence. */
2250 insns = get_insns ();
2251 end_sequence ();
31d3e01c 2252 emit_insn (insns);
bf8e3599 2253
de2e453e 2254 return result;
53800dbe 2255}
2256
7e0713b1 2257/* Expand a call to the builtin trinary math functions (fma).
2258 Return NULL_RTX if a normal call should be emitted rather than expanding the
2259 function in-line. EXP is the expression that is a call to the builtin
2260 function; if convenient, the result should be placed in TARGET.
2261 SUBTARGET may be used as the target for computing one of EXP's
2262 operands. */
2263
2264static rtx
2265expand_builtin_mathfn_ternary (tree exp, rtx target, rtx subtarget)
2266{
2267 optab builtin_optab;
1e0c0b35 2268 rtx op0, op1, op2, result;
2269 rtx_insn *insns;
7e0713b1 2270 tree fndecl = get_callee_fndecl (exp);
2271 tree arg0, arg1, arg2;
3754d046 2272 machine_mode mode;
7e0713b1 2273
2274 if (!validate_arglist (exp, REAL_TYPE, REAL_TYPE, REAL_TYPE, VOID_TYPE))
2275 return NULL_RTX;
2276
2277 arg0 = CALL_EXPR_ARG (exp, 0);
2278 arg1 = CALL_EXPR_ARG (exp, 1);
2279 arg2 = CALL_EXPR_ARG (exp, 2);
2280
2281 switch (DECL_FUNCTION_CODE (fndecl))
2282 {
2283 CASE_FLT_FN (BUILT_IN_FMA):
2284 builtin_optab = fma_optab; break;
2285 default:
2286 gcc_unreachable ();
2287 }
2288
2289 /* Make a suitable register to place result in. */
2290 mode = TYPE_MODE (TREE_TYPE (exp));
2291
2292 /* Before working hard, check whether the instruction is available. */
2293 if (optab_handler (builtin_optab, mode) == CODE_FOR_nothing)
2294 return NULL_RTX;
2295
de2e453e 2296 result = gen_reg_rtx (mode);
7e0713b1 2297
2298 /* Always stabilize the argument list. */
2299 CALL_EXPR_ARG (exp, 0) = arg0 = builtin_save_expr (arg0);
2300 CALL_EXPR_ARG (exp, 1) = arg1 = builtin_save_expr (arg1);
2301 CALL_EXPR_ARG (exp, 2) = arg2 = builtin_save_expr (arg2);
2302
2303 op0 = expand_expr (arg0, subtarget, VOIDmode, EXPAND_NORMAL);
2304 op1 = expand_normal (arg1);
2305 op2 = expand_normal (arg2);
2306
2307 start_sequence ();
2308
de2e453e 2309 /* Compute into RESULT.
2310 Set RESULT to wherever the result comes back. */
2311 result = expand_ternary_op (mode, builtin_optab, op0, op1, op2,
2312 result, 0);
7e0713b1 2313
2314 /* If we were unable to expand via the builtin, stop the sequence
2315 (without outputting the insns) and call to the library function
2316 with the stabilized argument list. */
de2e453e 2317 if (result == 0)
7e0713b1 2318 {
2319 end_sequence ();
2320 return expand_call (exp, target, target == const0_rtx);
2321 }
2322
2323 /* Output the entire sequence. */
2324 insns = get_insns ();
2325 end_sequence ();
2326 emit_insn (insns);
2327
de2e453e 2328 return result;
7e0713b1 2329}
2330
6b43bae4 2331/* Expand a call to the builtin sin and cos math functions.
c2f47e15 2332 Return NULL_RTX if a normal call should be emitted rather than expanding the
6b43bae4 2333 function in-line. EXP is the expression that is a call to the builtin
2334 function; if convenient, the result should be placed in TARGET.
2335 SUBTARGET may be used as the target for computing one of EXP's
2336 operands. */
2337
2338static rtx
2339expand_builtin_mathfn_3 (tree exp, rtx target, rtx subtarget)
2340{
2341 optab builtin_optab;
1e0c0b35 2342 rtx op0;
2343 rtx_insn *insns;
6b43bae4 2344 tree fndecl = get_callee_fndecl (exp);
3754d046 2345 machine_mode mode;
abfea505 2346 tree arg;
6b43bae4 2347
c2f47e15 2348 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2349 return NULL_RTX;
6b43bae4 2350
c2f47e15 2351 arg = CALL_EXPR_ARG (exp, 0);
6b43bae4 2352
2353 switch (DECL_FUNCTION_CODE (fndecl))
2354 {
4f35b1fc 2355 CASE_FLT_FN (BUILT_IN_SIN):
2356 CASE_FLT_FN (BUILT_IN_COS):
6b43bae4 2357 builtin_optab = sincos_optab; break;
2358 default:
64db345d 2359 gcc_unreachable ();
6b43bae4 2360 }
2361
2362 /* Make a suitable register to place result in. */
2363 mode = TYPE_MODE (TREE_TYPE (exp));
2364
6b43bae4 2365 /* Check if sincos insn is available, otherwise fallback
0bed3869 2366 to sin or cos insn. */
d6bf3b14 2367 if (optab_handler (builtin_optab, mode) == CODE_FOR_nothing)
6b43bae4 2368 switch (DECL_FUNCTION_CODE (fndecl))
2369 {
4f35b1fc 2370 CASE_FLT_FN (BUILT_IN_SIN):
6b43bae4 2371 builtin_optab = sin_optab; break;
4f35b1fc 2372 CASE_FLT_FN (BUILT_IN_COS):
6b43bae4 2373 builtin_optab = cos_optab; break;
2374 default:
64db345d 2375 gcc_unreachable ();
6b43bae4 2376 }
6b43bae4 2377
2378 /* Before working hard, check whether the instruction is available. */
d6bf3b14 2379 if (optab_handler (builtin_optab, mode) != CODE_FOR_nothing)
6b43bae4 2380 {
de2e453e 2381 rtx result = gen_reg_rtx (mode);
6b43bae4 2382
2383 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2384 need to expand the argument again. This way, we will not perform
2385 side-effects more the once. */
abfea505 2386 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
6b43bae4 2387
1db6d067 2388 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
6b43bae4 2389
6b43bae4 2390 start_sequence ();
2391
de2e453e 2392 /* Compute into RESULT.
2393 Set RESULT to wherever the result comes back. */
6b43bae4 2394 if (builtin_optab == sincos_optab)
2395 {
de2e453e 2396 int ok;
7d3f6cc7 2397
6b43bae4 2398 switch (DECL_FUNCTION_CODE (fndecl))
2399 {
4f35b1fc 2400 CASE_FLT_FN (BUILT_IN_SIN):
de2e453e 2401 ok = expand_twoval_unop (builtin_optab, op0, 0, result, 0);
6b43bae4 2402 break;
4f35b1fc 2403 CASE_FLT_FN (BUILT_IN_COS):
de2e453e 2404 ok = expand_twoval_unop (builtin_optab, op0, result, 0, 0);
6b43bae4 2405 break;
2406 default:
64db345d 2407 gcc_unreachable ();
6b43bae4 2408 }
de2e453e 2409 gcc_assert (ok);
6b43bae4 2410 }
2411 else
de2e453e 2412 result = expand_unop (mode, builtin_optab, op0, result, 0);
6b43bae4 2413
de2e453e 2414 if (result != 0)
6b43bae4 2415 {
6b43bae4 2416 /* Output the entire sequence. */
2417 insns = get_insns ();
2418 end_sequence ();
2419 emit_insn (insns);
de2e453e 2420 return result;
6b43bae4 2421 }
2422
2423 /* If we were unable to expand via the builtin, stop the sequence
2424 (without outputting the insns) and call to the library function
2425 with the stabilized argument list. */
2426 end_sequence ();
2427 }
2428
de2e453e 2429 return expand_call (exp, target, target == const0_rtx);
6b43bae4 2430}
2431
a65c4d64 2432/* Given an interclass math builtin decl FNDECL and it's argument ARG
2433 return an RTL instruction code that implements the functionality.
2434 If that isn't possible or available return CODE_FOR_nothing. */
a67a90e5 2435
a65c4d64 2436static enum insn_code
2437interclass_mathfn_icode (tree arg, tree fndecl)
a67a90e5 2438{
a65c4d64 2439 bool errno_set = false;
6cdd383a 2440 optab builtin_optab = unknown_optab;
3754d046 2441 machine_mode mode;
a67a90e5 2442
2443 switch (DECL_FUNCTION_CODE (fndecl))
2444 {
2445 CASE_FLT_FN (BUILT_IN_ILOGB):
2446 errno_set = true; builtin_optab = ilogb_optab; break;
69b779ea 2447 CASE_FLT_FN (BUILT_IN_ISINF):
2448 builtin_optab = isinf_optab; break;
8a1a9cb7 2449 case BUILT_IN_ISNORMAL:
cde061c1 2450 case BUILT_IN_ISFINITE:
2451 CASE_FLT_FN (BUILT_IN_FINITE):
a65c4d64 2452 case BUILT_IN_FINITED32:
2453 case BUILT_IN_FINITED64:
2454 case BUILT_IN_FINITED128:
2455 case BUILT_IN_ISINFD32:
2456 case BUILT_IN_ISINFD64:
2457 case BUILT_IN_ISINFD128:
cde061c1 2458 /* These builtins have no optabs (yet). */
2459 break;
a67a90e5 2460 default:
2461 gcc_unreachable ();
2462 }
2463
2464 /* There's no easy way to detect the case we need to set EDOM. */
2465 if (flag_errno_math && errno_set)
a65c4d64 2466 return CODE_FOR_nothing;
a67a90e5 2467
2468 /* Optab mode depends on the mode of the input argument. */
2469 mode = TYPE_MODE (TREE_TYPE (arg));
2470
cde061c1 2471 if (builtin_optab)
d6bf3b14 2472 return optab_handler (builtin_optab, mode);
a65c4d64 2473 return CODE_FOR_nothing;
2474}
2475
2476/* Expand a call to one of the builtin math functions that operate on
2477 floating point argument and output an integer result (ilogb, isinf,
2478 isnan, etc).
2479 Return 0 if a normal call should be emitted rather than expanding the
2480 function in-line. EXP is the expression that is a call to the builtin
f97eea22 2481 function; if convenient, the result should be placed in TARGET. */
a65c4d64 2482
2483static rtx
f97eea22 2484expand_builtin_interclass_mathfn (tree exp, rtx target)
a65c4d64 2485{
2486 enum insn_code icode = CODE_FOR_nothing;
2487 rtx op0;
2488 tree fndecl = get_callee_fndecl (exp);
3754d046 2489 machine_mode mode;
a65c4d64 2490 tree arg;
2491
2492 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2493 return NULL_RTX;
2494
2495 arg = CALL_EXPR_ARG (exp, 0);
2496 icode = interclass_mathfn_icode (arg, fndecl);
2497 mode = TYPE_MODE (TREE_TYPE (arg));
2498
a67a90e5 2499 if (icode != CODE_FOR_nothing)
2500 {
8786db1e 2501 struct expand_operand ops[1];
1e0c0b35 2502 rtx_insn *last = get_last_insn ();
4e2a2fb4 2503 tree orig_arg = arg;
a67a90e5 2504
2505 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2506 need to expand the argument again. This way, we will not perform
2507 side-effects more the once. */
abfea505 2508 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
a67a90e5 2509
f97eea22 2510 op0 = expand_expr (arg, NULL_RTX, VOIDmode, EXPAND_NORMAL);
a67a90e5 2511
2512 if (mode != GET_MODE (op0))
2513 op0 = convert_to_mode (mode, op0, 0);
2514
8786db1e 2515 create_output_operand (&ops[0], target, TYPE_MODE (TREE_TYPE (exp)));
2516 if (maybe_legitimize_operands (icode, 0, 1, ops)
2517 && maybe_emit_unop_insn (icode, ops[0].value, op0, UNKNOWN))
2518 return ops[0].value;
2519
4e2a2fb4 2520 delete_insns_since (last);
2521 CALL_EXPR_ARG (exp, 0) = orig_arg;
a67a90e5 2522 }
2523
a65c4d64 2524 return NULL_RTX;
a67a90e5 2525}
2526
c3147c1a 2527/* Expand a call to the builtin sincos math function.
c2f47e15 2528 Return NULL_RTX if a normal call should be emitted rather than expanding the
c3147c1a 2529 function in-line. EXP is the expression that is a call to the builtin
2530 function. */
2531
2532static rtx
2533expand_builtin_sincos (tree exp)
2534{
2535 rtx op0, op1, op2, target1, target2;
3754d046 2536 machine_mode mode;
c3147c1a 2537 tree arg, sinp, cosp;
2538 int result;
389dd41b 2539 location_t loc = EXPR_LOCATION (exp);
be5575b2 2540 tree alias_type, alias_off;
c3147c1a 2541
c2f47e15 2542 if (!validate_arglist (exp, REAL_TYPE,
2543 POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
2544 return NULL_RTX;
c3147c1a 2545
c2f47e15 2546 arg = CALL_EXPR_ARG (exp, 0);
2547 sinp = CALL_EXPR_ARG (exp, 1);
2548 cosp = CALL_EXPR_ARG (exp, 2);
c3147c1a 2549
2550 /* Make a suitable register to place result in. */
2551 mode = TYPE_MODE (TREE_TYPE (arg));
2552
2553 /* Check if sincos insn is available, otherwise emit the call. */
d6bf3b14 2554 if (optab_handler (sincos_optab, mode) == CODE_FOR_nothing)
c3147c1a 2555 return NULL_RTX;
2556
2557 target1 = gen_reg_rtx (mode);
2558 target2 = gen_reg_rtx (mode);
2559
8ec3c5c2 2560 op0 = expand_normal (arg);
be5575b2 2561 alias_type = build_pointer_type_for_mode (TREE_TYPE (arg), ptr_mode, true);
2562 alias_off = build_int_cst (alias_type, 0);
2563 op1 = expand_normal (fold_build2_loc (loc, MEM_REF, TREE_TYPE (arg),
2564 sinp, alias_off));
2565 op2 = expand_normal (fold_build2_loc (loc, MEM_REF, TREE_TYPE (arg),
2566 cosp, alias_off));
c3147c1a 2567
2568 /* Compute into target1 and target2.
2569 Set TARGET to wherever the result comes back. */
2570 result = expand_twoval_unop (sincos_optab, op0, target2, target1, 0);
2571 gcc_assert (result);
2572
2573 /* Move target1 and target2 to the memory locations indicated
2574 by op1 and op2. */
2575 emit_move_insn (op1, target1);
2576 emit_move_insn (op2, target2);
2577
2578 return const0_rtx;
2579}
2580
d735c391 2581/* Expand a call to the internal cexpi builtin to the sincos math function.
2582 EXP is the expression that is a call to the builtin function; if convenient,
f97eea22 2583 the result should be placed in TARGET. */
d735c391 2584
2585static rtx
f97eea22 2586expand_builtin_cexpi (tree exp, rtx target)
d735c391 2587{
2588 tree fndecl = get_callee_fndecl (exp);
d735c391 2589 tree arg, type;
3754d046 2590 machine_mode mode;
d735c391 2591 rtx op0, op1, op2;
389dd41b 2592 location_t loc = EXPR_LOCATION (exp);
d735c391 2593
c2f47e15 2594 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2595 return NULL_RTX;
d735c391 2596
c2f47e15 2597 arg = CALL_EXPR_ARG (exp, 0);
d735c391 2598 type = TREE_TYPE (arg);
2599 mode = TYPE_MODE (TREE_TYPE (arg));
2600
2601 /* Try expanding via a sincos optab, fall back to emitting a libcall
18b8d8ae 2602 to sincos or cexp. We are sure we have sincos or cexp because cexpi
2603 is only generated from sincos, cexp or if we have either of them. */
d6bf3b14 2604 if (optab_handler (sincos_optab, mode) != CODE_FOR_nothing)
d735c391 2605 {
2606 op1 = gen_reg_rtx (mode);
2607 op2 = gen_reg_rtx (mode);
2608
f97eea22 2609 op0 = expand_expr (arg, NULL_RTX, VOIDmode, EXPAND_NORMAL);
d735c391 2610
2611 /* Compute into op1 and op2. */
2612 expand_twoval_unop (sincos_optab, op0, op2, op1, 0);
2613 }
30f690e0 2614 else if (targetm.libc_has_function (function_sincos))
d735c391 2615 {
c2f47e15 2616 tree call, fn = NULL_TREE;
d735c391 2617 tree top1, top2;
2618 rtx op1a, op2a;
2619
2620 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
b9a16870 2621 fn = builtin_decl_explicit (BUILT_IN_SINCOSF);
d735c391 2622 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
b9a16870 2623 fn = builtin_decl_explicit (BUILT_IN_SINCOS);
d735c391 2624 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
b9a16870 2625 fn = builtin_decl_explicit (BUILT_IN_SINCOSL);
c2f47e15 2626 else
2627 gcc_unreachable ();
48e1416a 2628
0ab48139 2629 op1 = assign_temp (TREE_TYPE (arg), 1, 1);
2630 op2 = assign_temp (TREE_TYPE (arg), 1, 1);
99182918 2631 op1a = copy_addr_to_reg (XEXP (op1, 0));
2632 op2a = copy_addr_to_reg (XEXP (op2, 0));
d735c391 2633 top1 = make_tree (build_pointer_type (TREE_TYPE (arg)), op1a);
2634 top2 = make_tree (build_pointer_type (TREE_TYPE (arg)), op2a);
2635
d735c391 2636 /* Make sure not to fold the sincos call again. */
2637 call = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
c2f47e15 2638 expand_normal (build_call_nary (TREE_TYPE (TREE_TYPE (fn)),
2639 call, 3, arg, top1, top2));
d735c391 2640 }
18b8d8ae 2641 else
2642 {
0ecbc158 2643 tree call, fn = NULL_TREE, narg;
18b8d8ae 2644 tree ctype = build_complex_type (type);
2645
0ecbc158 2646 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
b9a16870 2647 fn = builtin_decl_explicit (BUILT_IN_CEXPF);
0ecbc158 2648 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
b9a16870 2649 fn = builtin_decl_explicit (BUILT_IN_CEXP);
0ecbc158 2650 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
b9a16870 2651 fn = builtin_decl_explicit (BUILT_IN_CEXPL);
c2f47e15 2652 else
2653 gcc_unreachable ();
fc0dfa6e 2654
2655 /* If we don't have a decl for cexp create one. This is the
2656 friendliest fallback if the user calls __builtin_cexpi
2657 without full target C99 function support. */
2658 if (fn == NULL_TREE)
2659 {
2660 tree fntype;
2661 const char *name = NULL;
2662
2663 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2664 name = "cexpf";
2665 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2666 name = "cexp";
2667 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2668 name = "cexpl";
2669
2670 fntype = build_function_type_list (ctype, ctype, NULL_TREE);
2671 fn = build_fn_decl (name, fntype);
2672 }
2673
389dd41b 2674 narg = fold_build2_loc (loc, COMPLEX_EXPR, ctype,
18b8d8ae 2675 build_real (type, dconst0), arg);
2676
2677 /* Make sure not to fold the cexp call again. */
2678 call = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
48e1416a 2679 return expand_expr (build_call_nary (ctype, call, 1, narg),
1db6d067 2680 target, VOIDmode, EXPAND_NORMAL);
18b8d8ae 2681 }
d735c391 2682
2683 /* Now build the proper return type. */
2684 return expand_expr (build2 (COMPLEX_EXPR, build_complex_type (type),
2685 make_tree (TREE_TYPE (arg), op2),
2686 make_tree (TREE_TYPE (arg), op1)),
1db6d067 2687 target, VOIDmode, EXPAND_NORMAL);
d735c391 2688}
2689
a65c4d64 2690/* Conveniently construct a function call expression. FNDECL names the
2691 function to be called, N is the number of arguments, and the "..."
2692 parameters are the argument expressions. Unlike build_call_exr
2693 this doesn't fold the call, hence it will always return a CALL_EXPR. */
2694
2695static tree
2696build_call_nofold_loc (location_t loc, tree fndecl, int n, ...)
2697{
2698 va_list ap;
2699 tree fntype = TREE_TYPE (fndecl);
2700 tree fn = build1 (ADDR_EXPR, build_pointer_type (fntype), fndecl);
2701
2702 va_start (ap, n);
2703 fn = build_call_valist (TREE_TYPE (fntype), fn, n, ap);
2704 va_end (ap);
2705 SET_EXPR_LOCATION (fn, loc);
2706 return fn;
2707}
a65c4d64 2708
7d3afc77 2709/* Expand a call to one of the builtin rounding functions gcc defines
2710 as an extension (lfloor and lceil). As these are gcc extensions we
2711 do not need to worry about setting errno to EDOM.
ad52b9b7 2712 If expanding via optab fails, lower expression to (int)(floor(x)).
2713 EXP is the expression that is a call to the builtin function;
ff1b14e4 2714 if convenient, the result should be placed in TARGET. */
ad52b9b7 2715
2716static rtx
ff1b14e4 2717expand_builtin_int_roundingfn (tree exp, rtx target)
ad52b9b7 2718{
9c42dd28 2719 convert_optab builtin_optab;
1e0c0b35 2720 rtx op0, tmp;
2721 rtx_insn *insns;
ad52b9b7 2722 tree fndecl = get_callee_fndecl (exp);
ad52b9b7 2723 enum built_in_function fallback_fn;
2724 tree fallback_fndecl;
3754d046 2725 machine_mode mode;
4de0924f 2726 tree arg;
ad52b9b7 2727
c2f47e15 2728 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
ad52b9b7 2729 gcc_unreachable ();
2730
c2f47e15 2731 arg = CALL_EXPR_ARG (exp, 0);
ad52b9b7 2732
2733 switch (DECL_FUNCTION_CODE (fndecl))
2734 {
80ff6494 2735 CASE_FLT_FN (BUILT_IN_ICEIL):
4f35b1fc 2736 CASE_FLT_FN (BUILT_IN_LCEIL):
2737 CASE_FLT_FN (BUILT_IN_LLCEIL):
ac148751 2738 builtin_optab = lceil_optab;
2739 fallback_fn = BUILT_IN_CEIL;
2740 break;
2741
80ff6494 2742 CASE_FLT_FN (BUILT_IN_IFLOOR):
4f35b1fc 2743 CASE_FLT_FN (BUILT_IN_LFLOOR):
2744 CASE_FLT_FN (BUILT_IN_LLFLOOR):
ad52b9b7 2745 builtin_optab = lfloor_optab;
2746 fallback_fn = BUILT_IN_FLOOR;
2747 break;
2748
2749 default:
2750 gcc_unreachable ();
2751 }
2752
2753 /* Make a suitable register to place result in. */
2754 mode = TYPE_MODE (TREE_TYPE (exp));
2755
9c42dd28 2756 target = gen_reg_rtx (mode);
ad52b9b7 2757
9c42dd28 2758 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2759 need to expand the argument again. This way, we will not perform
2760 side-effects more the once. */
abfea505 2761 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
ad52b9b7 2762
ff1b14e4 2763 op0 = expand_expr (arg, NULL, VOIDmode, EXPAND_NORMAL);
ad52b9b7 2764
9c42dd28 2765 start_sequence ();
ad52b9b7 2766
9c42dd28 2767 /* Compute into TARGET. */
2768 if (expand_sfix_optab (target, op0, builtin_optab))
2769 {
2770 /* Output the entire sequence. */
2771 insns = get_insns ();
ad52b9b7 2772 end_sequence ();
9c42dd28 2773 emit_insn (insns);
2774 return target;
ad52b9b7 2775 }
2776
9c42dd28 2777 /* If we were unable to expand via the builtin, stop the sequence
2778 (without outputting the insns). */
2779 end_sequence ();
2780
ad52b9b7 2781 /* Fall back to floating point rounding optab. */
2782 fallback_fndecl = mathfn_built_in (TREE_TYPE (arg), fallback_fn);
fc0dfa6e 2783
2784 /* For non-C99 targets we may end up without a fallback fndecl here
2785 if the user called __builtin_lfloor directly. In this case emit
2786 a call to the floor/ceil variants nevertheless. This should result
2787 in the best user experience for not full C99 targets. */
2788 if (fallback_fndecl == NULL_TREE)
2789 {
2790 tree fntype;
2791 const char *name = NULL;
2792
2793 switch (DECL_FUNCTION_CODE (fndecl))
2794 {
80ff6494 2795 case BUILT_IN_ICEIL:
fc0dfa6e 2796 case BUILT_IN_LCEIL:
2797 case BUILT_IN_LLCEIL:
2798 name = "ceil";
2799 break;
80ff6494 2800 case BUILT_IN_ICEILF:
fc0dfa6e 2801 case BUILT_IN_LCEILF:
2802 case BUILT_IN_LLCEILF:
2803 name = "ceilf";
2804 break;
80ff6494 2805 case BUILT_IN_ICEILL:
fc0dfa6e 2806 case BUILT_IN_LCEILL:
2807 case BUILT_IN_LLCEILL:
2808 name = "ceill";
2809 break;
80ff6494 2810 case BUILT_IN_IFLOOR:
fc0dfa6e 2811 case BUILT_IN_LFLOOR:
2812 case BUILT_IN_LLFLOOR:
2813 name = "floor";
2814 break;
80ff6494 2815 case BUILT_IN_IFLOORF:
fc0dfa6e 2816 case BUILT_IN_LFLOORF:
2817 case BUILT_IN_LLFLOORF:
2818 name = "floorf";
2819 break;
80ff6494 2820 case BUILT_IN_IFLOORL:
fc0dfa6e 2821 case BUILT_IN_LFLOORL:
2822 case BUILT_IN_LLFLOORL:
2823 name = "floorl";
2824 break;
2825 default:
2826 gcc_unreachable ();
2827 }
2828
2829 fntype = build_function_type_list (TREE_TYPE (arg),
2830 TREE_TYPE (arg), NULL_TREE);
2831 fallback_fndecl = build_fn_decl (name, fntype);
2832 }
2833
0568e9c1 2834 exp = build_call_nofold_loc (EXPR_LOCATION (exp), fallback_fndecl, 1, arg);
ad52b9b7 2835
d4c690af 2836 tmp = expand_normal (exp);
933eb13a 2837 tmp = maybe_emit_group_store (tmp, TREE_TYPE (exp));
ad52b9b7 2838
2839 /* Truncate the result of floating point optab to integer
2840 via expand_fix (). */
2841 target = gen_reg_rtx (mode);
2842 expand_fix (target, tmp, 0);
2843
2844 return target;
2845}
2846
7d3afc77 2847/* Expand a call to one of the builtin math functions doing integer
2848 conversion (lrint).
2849 Return 0 if a normal call should be emitted rather than expanding the
2850 function in-line. EXP is the expression that is a call to the builtin
ff1b14e4 2851 function; if convenient, the result should be placed in TARGET. */
7d3afc77 2852
2853static rtx
ff1b14e4 2854expand_builtin_int_roundingfn_2 (tree exp, rtx target)
7d3afc77 2855{
5f51ee59 2856 convert_optab builtin_optab;
1e0c0b35 2857 rtx op0;
2858 rtx_insn *insns;
7d3afc77 2859 tree fndecl = get_callee_fndecl (exp);
4de0924f 2860 tree arg;
3754d046 2861 machine_mode mode;
e951f9a4 2862 enum built_in_function fallback_fn = BUILT_IN_NONE;
7d3afc77 2863
c2f47e15 2864 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2865 gcc_unreachable ();
48e1416a 2866
c2f47e15 2867 arg = CALL_EXPR_ARG (exp, 0);
7d3afc77 2868
2869 switch (DECL_FUNCTION_CODE (fndecl))
2870 {
80ff6494 2871 CASE_FLT_FN (BUILT_IN_IRINT):
e951f9a4 2872 fallback_fn = BUILT_IN_LRINT;
2873 /* FALLTHRU */
7d3afc77 2874 CASE_FLT_FN (BUILT_IN_LRINT):
2875 CASE_FLT_FN (BUILT_IN_LLRINT):
e951f9a4 2876 builtin_optab = lrint_optab;
2877 break;
80ff6494 2878
2879 CASE_FLT_FN (BUILT_IN_IROUND):
e951f9a4 2880 fallback_fn = BUILT_IN_LROUND;
2881 /* FALLTHRU */
ef2f1a10 2882 CASE_FLT_FN (BUILT_IN_LROUND):
2883 CASE_FLT_FN (BUILT_IN_LLROUND):
e951f9a4 2884 builtin_optab = lround_optab;
2885 break;
80ff6494 2886
7d3afc77 2887 default:
2888 gcc_unreachable ();
2889 }
2890
e951f9a4 2891 /* There's no easy way to detect the case we need to set EDOM. */
2892 if (flag_errno_math && fallback_fn == BUILT_IN_NONE)
2893 return NULL_RTX;
2894
7d3afc77 2895 /* Make a suitable register to place result in. */
2896 mode = TYPE_MODE (TREE_TYPE (exp));
2897
e951f9a4 2898 /* There's no easy way to detect the case we need to set EDOM. */
2899 if (!flag_errno_math)
2900 {
de2e453e 2901 rtx result = gen_reg_rtx (mode);
7d3afc77 2902
e951f9a4 2903 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2904 need to expand the argument again. This way, we will not perform
2905 side-effects more the once. */
2906 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
7d3afc77 2907
e951f9a4 2908 op0 = expand_expr (arg, NULL, VOIDmode, EXPAND_NORMAL);
7d3afc77 2909
e951f9a4 2910 start_sequence ();
7d3afc77 2911
de2e453e 2912 if (expand_sfix_optab (result, op0, builtin_optab))
e951f9a4 2913 {
2914 /* Output the entire sequence. */
2915 insns = get_insns ();
2916 end_sequence ();
2917 emit_insn (insns);
de2e453e 2918 return result;
e951f9a4 2919 }
2920
2921 /* If we were unable to expand via the builtin, stop the sequence
2922 (without outputting the insns) and call to the library function
2923 with the stabilized argument list. */
7d3afc77 2924 end_sequence ();
2925 }
2926
e951f9a4 2927 if (fallback_fn != BUILT_IN_NONE)
2928 {
2929 /* Fall back to rounding to long int. Use implicit_p 0 - for non-C99
2930 targets, (int) round (x) should never be transformed into
2931 BUILT_IN_IROUND and if __builtin_iround is called directly, emit
2932 a call to lround in the hope that the target provides at least some
2933 C99 functions. This should result in the best user experience for
2934 not full C99 targets. */
2935 tree fallback_fndecl = mathfn_built_in_1 (TREE_TYPE (arg),
2936 fallback_fn, 0);
2937
2938 exp = build_call_nofold_loc (EXPR_LOCATION (exp),
2939 fallback_fndecl, 1, arg);
2940
2941 target = expand_call (exp, NULL_RTX, target == const0_rtx);
933eb13a 2942 target = maybe_emit_group_store (target, TREE_TYPE (exp));
e951f9a4 2943 return convert_to_mode (mode, target, 0);
2944 }
5f51ee59 2945
de2e453e 2946 return expand_call (exp, target, target == const0_rtx);
7d3afc77 2947}
2948
c2f47e15 2949/* Expand a call to the powi built-in mathematical function. Return NULL_RTX if
757c219d 2950 a normal call should be emitted rather than expanding the function
2951 in-line. EXP is the expression that is a call to the builtin
2952 function; if convenient, the result should be placed in TARGET. */
2953
2954static rtx
f97eea22 2955expand_builtin_powi (tree exp, rtx target)
757c219d 2956{
757c219d 2957 tree arg0, arg1;
2958 rtx op0, op1;
3754d046 2959 machine_mode mode;
2960 machine_mode mode2;
757c219d 2961
c2f47e15 2962 if (! validate_arglist (exp, REAL_TYPE, INTEGER_TYPE, VOID_TYPE))
2963 return NULL_RTX;
757c219d 2964
c2f47e15 2965 arg0 = CALL_EXPR_ARG (exp, 0);
2966 arg1 = CALL_EXPR_ARG (exp, 1);
757c219d 2967 mode = TYPE_MODE (TREE_TYPE (exp));
2968
757c219d 2969 /* Emit a libcall to libgcc. */
2970
c2f47e15 2971 /* Mode of the 2nd argument must match that of an int. */
d0405f40 2972 mode2 = mode_for_size (INT_TYPE_SIZE, MODE_INT, 0);
2973
757c219d 2974 if (target == NULL_RTX)
2975 target = gen_reg_rtx (mode);
2976
f97eea22 2977 op0 = expand_expr (arg0, NULL_RTX, mode, EXPAND_NORMAL);
757c219d 2978 if (GET_MODE (op0) != mode)
2979 op0 = convert_to_mode (mode, op0, 0);
1db6d067 2980 op1 = expand_expr (arg1, NULL_RTX, mode2, EXPAND_NORMAL);
d0405f40 2981 if (GET_MODE (op1) != mode2)
2982 op1 = convert_to_mode (mode2, op1, 0);
757c219d 2983
f36b9f69 2984 target = emit_library_call_value (optab_libfunc (powi_optab, mode),
2dd6f9ed 2985 target, LCT_CONST, mode, 2,
d0405f40 2986 op0, mode, op1, mode2);
757c219d 2987
2988 return target;
2989}
2990
48e1416a 2991/* Expand expression EXP which is a call to the strlen builtin. Return
c2f47e15 2992 NULL_RTX if we failed the caller should emit a normal call, otherwise
aed0bd19 2993 try to get the result in TARGET, if convenient. */
f7c44134 2994
53800dbe 2995static rtx
c2f47e15 2996expand_builtin_strlen (tree exp, rtx target,
3754d046 2997 machine_mode target_mode)
53800dbe 2998{
c2f47e15 2999 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
3000 return NULL_RTX;
53800dbe 3001 else
3002 {
8786db1e 3003 struct expand_operand ops[4];
911c0150 3004 rtx pat;
c2f47e15 3005 tree len;
3006 tree src = CALL_EXPR_ARG (exp, 0);
1e0c0b35 3007 rtx src_reg;
3008 rtx_insn *before_strlen;
3754d046 3009 machine_mode insn_mode = target_mode;
ef2c4a29 3010 enum insn_code icode = CODE_FOR_nothing;
153c3b50 3011 unsigned int align;
6248e345 3012
3013 /* If the length can be computed at compile-time, return it. */
681fab1e 3014 len = c_strlen (src, 0);
6248e345 3015 if (len)
80cd7a5e 3016 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
6248e345 3017
681fab1e 3018 /* If the length can be computed at compile-time and is constant
3019 integer, but there are side-effects in src, evaluate
3020 src for side-effects, then return len.
3021 E.g. x = strlen (i++ ? "xfoo" + 1 : "bar");
3022 can be optimized into: i++; x = 3; */
3023 len = c_strlen (src, 1);
3024 if (len && TREE_CODE (len) == INTEGER_CST)
3025 {
3026 expand_expr (src, const0_rtx, VOIDmode, EXPAND_NORMAL);
3027 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
3028 }
3029
957d0361 3030 align = get_pointer_alignment (src) / BITS_PER_UNIT;
53800dbe 3031
53800dbe 3032 /* If SRC is not a pointer type, don't do this operation inline. */
3033 if (align == 0)
c2f47e15 3034 return NULL_RTX;
53800dbe 3035
911c0150 3036 /* Bail out if we can't compute strlen in the right mode. */
53800dbe 3037 while (insn_mode != VOIDmode)
3038 {
d6bf3b14 3039 icode = optab_handler (strlen_optab, insn_mode);
53800dbe 3040 if (icode != CODE_FOR_nothing)
c28ae87f 3041 break;
53800dbe 3042
3043 insn_mode = GET_MODE_WIDER_MODE (insn_mode);
3044 }
3045 if (insn_mode == VOIDmode)
c2f47e15 3046 return NULL_RTX;
53800dbe 3047
911c0150 3048 /* Make a place to hold the source address. We will not expand
3049 the actual source until we are sure that the expansion will
3050 not fail -- there are trees that cannot be expanded twice. */
3051 src_reg = gen_reg_rtx (Pmode);
53800dbe 3052
911c0150 3053 /* Mark the beginning of the strlen sequence so we can emit the
3054 source operand later. */
f0ce3b1f 3055 before_strlen = get_last_insn ();
53800dbe 3056
8786db1e 3057 create_output_operand (&ops[0], target, insn_mode);
3058 create_fixed_operand (&ops[1], gen_rtx_MEM (BLKmode, src_reg));
3059 create_integer_operand (&ops[2], 0);
3060 create_integer_operand (&ops[3], align);
3061 if (!maybe_expand_insn (icode, 4, ops))
c2f47e15 3062 return NULL_RTX;
911c0150 3063
3064 /* Now that we are assured of success, expand the source. */
3065 start_sequence ();
499eee58 3066 pat = expand_expr (src, src_reg, Pmode, EXPAND_NORMAL);
911c0150 3067 if (pat != src_reg)
499eee58 3068 {
3069#ifdef POINTERS_EXTEND_UNSIGNED
3070 if (GET_MODE (pat) != Pmode)
3071 pat = convert_to_mode (Pmode, pat,
3072 POINTERS_EXTEND_UNSIGNED);
3073#endif
3074 emit_move_insn (src_reg, pat);
3075 }
31d3e01c 3076 pat = get_insns ();
911c0150 3077 end_sequence ();
bceb0d1f 3078
3079 if (before_strlen)
3080 emit_insn_after (pat, before_strlen);
3081 else
3082 emit_insn_before (pat, get_insns ());
53800dbe 3083
3084 /* Return the value in the proper mode for this function. */
8786db1e 3085 if (GET_MODE (ops[0].value) == target_mode)
3086 target = ops[0].value;
53800dbe 3087 else if (target != 0)
8786db1e 3088 convert_move (target, ops[0].value, 0);
53800dbe 3089 else
8786db1e 3090 target = convert_to_mode (target_mode, ops[0].value, 0);
911c0150 3091
3092 return target;
53800dbe 3093 }
3094}
3095
6840589f 3096/* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3097 bytes from constant string DATA + OFFSET and return it as target
3098 constant. */
3099
3100static rtx
aecda0d6 3101builtin_memcpy_read_str (void *data, HOST_WIDE_INT offset,
3754d046 3102 machine_mode mode)
6840589f 3103{
3104 const char *str = (const char *) data;
3105
64db345d 3106 gcc_assert (offset >= 0
3107 && ((unsigned HOST_WIDE_INT) offset + GET_MODE_SIZE (mode)
3108 <= strlen (str) + 1));
6840589f 3109
3110 return c_readstr (str + offset, mode);
3111}
3112
36d63243 3113/* LEN specify length of the block of memcpy/memset operation.
9db0f34d 3114 Figure out its range and put it into MIN_SIZE/MAX_SIZE.
3115 In some cases we can make very likely guess on max size, then we
3116 set it into PROBABLE_MAX_SIZE. */
36d63243 3117
3118static void
3119determine_block_size (tree len, rtx len_rtx,
3120 unsigned HOST_WIDE_INT *min_size,
9db0f34d 3121 unsigned HOST_WIDE_INT *max_size,
3122 unsigned HOST_WIDE_INT *probable_max_size)
36d63243 3123{
3124 if (CONST_INT_P (len_rtx))
3125 {
4e140a5c 3126 *min_size = *max_size = *probable_max_size = UINTVAL (len_rtx);
36d63243 3127 return;
3128 }
3129 else
3130 {
9c1be15e 3131 wide_int min, max;
9db0f34d 3132 enum value_range_type range_type = VR_UNDEFINED;
3133
3134 /* Determine bounds from the type. */
3135 if (tree_fits_uhwi_p (TYPE_MIN_VALUE (TREE_TYPE (len))))
3136 *min_size = tree_to_uhwi (TYPE_MIN_VALUE (TREE_TYPE (len)));
3137 else
3138 *min_size = 0;
3139 if (tree_fits_uhwi_p (TYPE_MAX_VALUE (TREE_TYPE (len))))
4e140a5c 3140 *probable_max_size = *max_size
3141 = tree_to_uhwi (TYPE_MAX_VALUE (TREE_TYPE (len)));
9db0f34d 3142 else
3143 *probable_max_size = *max_size = GET_MODE_MASK (GET_MODE (len_rtx));
3144
3145 if (TREE_CODE (len) == SSA_NAME)
3146 range_type = get_range_info (len, &min, &max);
3147 if (range_type == VR_RANGE)
36d63243 3148 {
fe5ad926 3149 if (wi::fits_uhwi_p (min) && *min_size < min.to_uhwi ())
36d63243 3150 *min_size = min.to_uhwi ();
fe5ad926 3151 if (wi::fits_uhwi_p (max) && *max_size > max.to_uhwi ())
9db0f34d 3152 *probable_max_size = *max_size = max.to_uhwi ();
36d63243 3153 }
9db0f34d 3154 else if (range_type == VR_ANTI_RANGE)
36d63243 3155 {
4a474a5a 3156 /* Anti range 0...N lets us to determine minimal size to N+1. */
fe5ad926 3157 if (min == 0)
9db0f34d 3158 {
9c1be15e 3159 if (wi::fits_uhwi_p (max) && max.to_uhwi () + 1 != 0)
3160 *min_size = max.to_uhwi () + 1;
9db0f34d 3161 }
3162 /* Code like
3163
3164 int n;
3165 if (n < 100)
4a474a5a 3166 memcpy (a, b, n)
9db0f34d 3167
3168 Produce anti range allowing negative values of N. We still
3169 can use the information and make a guess that N is not negative.
3170 */
fe5ad926 3171 else if (!wi::leu_p (max, 1 << 30) && wi::fits_uhwi_p (min))
3172 *probable_max_size = min.to_uhwi () - 1;
36d63243 3173 }
3174 }
3175 gcc_checking_assert (*max_size <=
3176 (unsigned HOST_WIDE_INT)
3177 GET_MODE_MASK (GET_MODE (len_rtx)));
3178}
3179
f21337ef 3180/* Helper function to do the actual work for expand_builtin_memcpy. */
3181
3182static rtx
3183expand_builtin_memcpy_args (tree dest, tree src, tree len, rtx target, tree exp)
3184{
3185 const char *src_str;
3186 unsigned int src_align = get_pointer_alignment (src);
3187 unsigned int dest_align = get_pointer_alignment (dest);
3188 rtx dest_mem, src_mem, dest_addr, len_rtx;
3189 HOST_WIDE_INT expected_size = -1;
3190 unsigned int expected_align = 0;
3191 unsigned HOST_WIDE_INT min_size;
3192 unsigned HOST_WIDE_INT max_size;
3193 unsigned HOST_WIDE_INT probable_max_size;
3194
3195 /* If DEST is not a pointer type, call the normal function. */
3196 if (dest_align == 0)
3197 return NULL_RTX;
3198
3199 /* If either SRC is not a pointer type, don't do this
3200 operation in-line. */
3201 if (src_align == 0)
3202 return NULL_RTX;
3203
3204 if (currently_expanding_gimple_stmt)
3205 stringop_block_profile (currently_expanding_gimple_stmt,
3206 &expected_align, &expected_size);
3207
3208 if (expected_align < dest_align)
3209 expected_align = dest_align;
3210 dest_mem = get_memory_rtx (dest, len);
3211 set_mem_align (dest_mem, dest_align);
3212 len_rtx = expand_normal (len);
3213 determine_block_size (len, len_rtx, &min_size, &max_size,
3214 &probable_max_size);
3215 src_str = c_getstr (src);
3216
3217 /* If SRC is a string constant and block move would be done
3218 by pieces, we can avoid loading the string from memory
3219 and only stored the computed constants. */
3220 if (src_str
3221 && CONST_INT_P (len_rtx)
3222 && (unsigned HOST_WIDE_INT) INTVAL (len_rtx) <= strlen (src_str) + 1
3223 && can_store_by_pieces (INTVAL (len_rtx), builtin_memcpy_read_str,
3224 CONST_CAST (char *, src_str),
3225 dest_align, false))
3226 {
3227 dest_mem = store_by_pieces (dest_mem, INTVAL (len_rtx),
3228 builtin_memcpy_read_str,
3229 CONST_CAST (char *, src_str),
3230 dest_align, false, 0);
3231 dest_mem = force_operand (XEXP (dest_mem, 0), target);
3232 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3233 return dest_mem;
3234 }
3235
3236 src_mem = get_memory_rtx (src, len);
3237 set_mem_align (src_mem, src_align);
3238
3239 /* Copy word part most expediently. */
3240 dest_addr = emit_block_move_hints (dest_mem, src_mem, len_rtx,
3241 CALL_EXPR_TAILCALL (exp)
3242 ? BLOCK_OP_TAILCALL : BLOCK_OP_NORMAL,
3243 expected_align, expected_size,
3244 min_size, max_size, probable_max_size);
3245
3246 if (dest_addr == 0)
3247 {
3248 dest_addr = force_operand (XEXP (dest_mem, 0), target);
3249 dest_addr = convert_memory_address (ptr_mode, dest_addr);
3250 }
3251
3252 return dest_addr;
3253}
3254
c2f47e15 3255/* Expand a call EXP to the memcpy builtin.
3256 Return NULL_RTX if we failed, the caller should emit a normal call,
3b824fa6 3257 otherwise try to get the result in TARGET, if convenient (and in
9fe0e1b8 3258 mode MODE if that's convenient). */
c2f47e15 3259
53800dbe 3260static rtx
a65c4d64 3261expand_builtin_memcpy (tree exp, rtx target)
53800dbe 3262{
c2f47e15 3263 if (!validate_arglist (exp,
3264 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3265 return NULL_RTX;
53800dbe 3266 else
3267 {
c2f47e15 3268 tree dest = CALL_EXPR_ARG (exp, 0);
3269 tree src = CALL_EXPR_ARG (exp, 1);
3270 tree len = CALL_EXPR_ARG (exp, 2);
f21337ef 3271 return expand_builtin_memcpy_args (dest, src, len, target, exp);
3272 }
3273}
6840589f 3274
f21337ef 3275/* Expand an instrumented call EXP to the memcpy builtin.
3276 Return NULL_RTX if we failed, the caller should emit a normal call,
3277 otherwise try to get the result in TARGET, if convenient (and in
3278 mode MODE if that's convenient). */
53800dbe 3279
f21337ef 3280static rtx
3281expand_builtin_memcpy_with_bounds (tree exp, rtx target)
3282{
3283 if (!validate_arglist (exp,
3284 POINTER_TYPE, POINTER_BOUNDS_TYPE,
3285 POINTER_TYPE, POINTER_BOUNDS_TYPE,
3286 INTEGER_TYPE, VOID_TYPE))
3287 return NULL_RTX;
3288 else
3289 {
3290 tree dest = CALL_EXPR_ARG (exp, 0);
3291 tree src = CALL_EXPR_ARG (exp, 2);
3292 tree len = CALL_EXPR_ARG (exp, 4);
3293 rtx res = expand_builtin_memcpy_args (dest, src, len, target, exp);
53800dbe 3294
f21337ef 3295 /* Return src bounds with the result. */
3296 if (res)
e5716f7e 3297 {
17d388d8 3298 rtx bnd = force_reg (targetm.chkp_bound_mode (),
f21337ef 3299 expand_normal (CALL_EXPR_ARG (exp, 1)));
3300 res = chkp_join_splitted_slot (res, bnd);
e5716f7e 3301 }
f21337ef 3302 return res;
53800dbe 3303 }
3304}
3305
c2f47e15 3306/* Expand a call EXP to the mempcpy builtin.
3307 Return NULL_RTX if we failed; the caller should emit a normal call,
647661c6 3308 otherwise try to get the result in TARGET, if convenient (and in
9fe0e1b8 3309 mode MODE if that's convenient). If ENDP is 0 return the
3310 destination pointer, if ENDP is 1 return the end pointer ala
3311 mempcpy, and if ENDP is 2 return the end pointer minus one ala
3312 stpcpy. */
647661c6 3313
3314static rtx
3754d046 3315expand_builtin_mempcpy (tree exp, rtx target, machine_mode mode)
647661c6 3316{
c2f47e15 3317 if (!validate_arglist (exp,
3318 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3319 return NULL_RTX;
3320 else
3321 {
3322 tree dest = CALL_EXPR_ARG (exp, 0);
3323 tree src = CALL_EXPR_ARG (exp, 1);
3324 tree len = CALL_EXPR_ARG (exp, 2);
3325 return expand_builtin_mempcpy_args (dest, src, len,
f21337ef 3326 target, mode, /*endp=*/ 1,
3327 exp);
3328 }
3329}
3330
3331/* Expand an instrumented call EXP to the mempcpy builtin.
3332 Return NULL_RTX if we failed, the caller should emit a normal call,
3333 otherwise try to get the result in TARGET, if convenient (and in
3334 mode MODE if that's convenient). */
3335
3336static rtx
3337expand_builtin_mempcpy_with_bounds (tree exp, rtx target, machine_mode mode)
3338{
3339 if (!validate_arglist (exp,
3340 POINTER_TYPE, POINTER_BOUNDS_TYPE,
3341 POINTER_TYPE, POINTER_BOUNDS_TYPE,
3342 INTEGER_TYPE, VOID_TYPE))
3343 return NULL_RTX;
3344 else
3345 {
3346 tree dest = CALL_EXPR_ARG (exp, 0);
3347 tree src = CALL_EXPR_ARG (exp, 2);
3348 tree len = CALL_EXPR_ARG (exp, 4);
3349 rtx res = expand_builtin_mempcpy_args (dest, src, len, target,
3350 mode, 1, exp);
3351
3352 /* Return src bounds with the result. */
3353 if (res)
3354 {
17d388d8 3355 rtx bnd = force_reg (targetm.chkp_bound_mode (),
f21337ef 3356 expand_normal (CALL_EXPR_ARG (exp, 1)));
3357 res = chkp_join_splitted_slot (res, bnd);
3358 }
3359 return res;
c2f47e15 3360 }
3361}
3362
3363/* Helper function to do the actual work for expand_builtin_mempcpy. The
3364 arguments to the builtin_mempcpy call DEST, SRC, and LEN are broken out
3365 so that this can also be called without constructing an actual CALL_EXPR.
a65c4d64 3366 The other arguments and return value are the same as for
3367 expand_builtin_mempcpy. */
c2f47e15 3368
3369static rtx
a65c4d64 3370expand_builtin_mempcpy_args (tree dest, tree src, tree len,
f21337ef 3371 rtx target, machine_mode mode, int endp,
3372 tree orig_exp)
c2f47e15 3373{
f21337ef 3374 tree fndecl = get_callee_fndecl (orig_exp);
3375
c2f47e15 3376 /* If return value is ignored, transform mempcpy into memcpy. */
f21337ef 3377 if (target == const0_rtx
3378 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_MEMPCPY_NOBND_NOCHK_CHKP
3379 && builtin_decl_implicit_p (BUILT_IN_CHKP_MEMCPY_NOBND_NOCHK_CHKP))
3380 {
3381 tree fn = builtin_decl_implicit (BUILT_IN_CHKP_MEMCPY_NOBND_NOCHK_CHKP);
3382 tree result = build_call_nofold_loc (UNKNOWN_LOCATION, fn, 3,
3383 dest, src, len);
3384 return expand_expr (result, target, mode, EXPAND_NORMAL);
3385 }
3386 else if (target == const0_rtx
3387 && builtin_decl_implicit_p (BUILT_IN_MEMCPY))
9fe0e1b8 3388 {
b9a16870 3389 tree fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
0568e9c1 3390 tree result = build_call_nofold_loc (UNKNOWN_LOCATION, fn, 3,
3391 dest, src, len);
c8b17b2e 3392 return expand_expr (result, target, mode, EXPAND_NORMAL);
9fe0e1b8 3393 }
647661c6 3394 else
3395 {
9fe0e1b8 3396 const char *src_str;
957d0361 3397 unsigned int src_align = get_pointer_alignment (src);
3398 unsigned int dest_align = get_pointer_alignment (dest);
9fe0e1b8 3399 rtx dest_mem, src_mem, len_rtx;
a0c938f0 3400
7da1412b 3401 /* If either SRC or DEST is not a pointer type, don't do this
a0c938f0 3402 operation in-line. */
7da1412b 3403 if (dest_align == 0 || src_align == 0)
c2f47e15 3404 return NULL_RTX;
9fe0e1b8 3405
6217c238 3406 /* If LEN is not constant, call the normal function. */
e913b5cd 3407 if (! tree_fits_uhwi_p (len))
c2f47e15 3408 return NULL_RTX;
0862b7e9 3409
8ec3c5c2 3410 len_rtx = expand_normal (len);
9fe0e1b8 3411 src_str = c_getstr (src);
647661c6 3412
9fe0e1b8 3413 /* If SRC is a string constant and block move would be done
3414 by pieces, we can avoid loading the string from memory
3415 and only stored the computed constants. */
3416 if (src_str
971ba038 3417 && CONST_INT_P (len_rtx)
9fe0e1b8 3418 && (unsigned HOST_WIDE_INT) INTVAL (len_rtx) <= strlen (src_str) + 1
3419 && can_store_by_pieces (INTVAL (len_rtx), builtin_memcpy_read_str,
364c0c59 3420 CONST_CAST (char *, src_str),
3421 dest_align, false))
9fe0e1b8 3422 {
d8ae1baa 3423 dest_mem = get_memory_rtx (dest, len);
9fe0e1b8 3424 set_mem_align (dest_mem, dest_align);
3425 dest_mem = store_by_pieces (dest_mem, INTVAL (len_rtx),
3426 builtin_memcpy_read_str,
364c0c59 3427 CONST_CAST (char *, src_str),
3428 dest_align, false, endp);
9fe0e1b8 3429 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
85d654dd 3430 dest_mem = convert_memory_address (ptr_mode, dest_mem);
9fe0e1b8 3431 return dest_mem;
647661c6 3432 }
3433
971ba038 3434 if (CONST_INT_P (len_rtx)
9fe0e1b8 3435 && can_move_by_pieces (INTVAL (len_rtx),
3436 MIN (dest_align, src_align)))
3437 {
d8ae1baa 3438 dest_mem = get_memory_rtx (dest, len);
9fe0e1b8 3439 set_mem_align (dest_mem, dest_align);
d8ae1baa 3440 src_mem = get_memory_rtx (src, len);
9fe0e1b8 3441 set_mem_align (src_mem, src_align);
3442 dest_mem = move_by_pieces (dest_mem, src_mem, INTVAL (len_rtx),
3443 MIN (dest_align, src_align), endp);
3444 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
85d654dd 3445 dest_mem = convert_memory_address (ptr_mode, dest_mem);
9fe0e1b8 3446 return dest_mem;
3447 }
3448
c2f47e15 3449 return NULL_RTX;
647661c6 3450 }
3451}
3452
727c62dd 3453#ifndef HAVE_movstr
3454# define HAVE_movstr 0
3455# define CODE_FOR_movstr CODE_FOR_nothing
3456#endif
3457
c2f47e15 3458/* Expand into a movstr instruction, if one is available. Return NULL_RTX if
727c62dd 3459 we failed, the caller should emit a normal call, otherwise try to
3460 get the result in TARGET, if convenient. If ENDP is 0 return the
3461 destination pointer, if ENDP is 1 return the end pointer ala
3462 mempcpy, and if ENDP is 2 return the end pointer minus one ala
3463 stpcpy. */
3464
3465static rtx
3466expand_movstr (tree dest, tree src, rtx target, int endp)
3467{
8786db1e 3468 struct expand_operand ops[3];
727c62dd 3469 rtx dest_mem;
3470 rtx src_mem;
727c62dd 3471
3472 if (!HAVE_movstr)
c2f47e15 3473 return NULL_RTX;
727c62dd 3474
d8ae1baa 3475 dest_mem = get_memory_rtx (dest, NULL);
3476 src_mem = get_memory_rtx (src, NULL);
727c62dd 3477 if (!endp)
3478 {
3479 target = force_reg (Pmode, XEXP (dest_mem, 0));
3480 dest_mem = replace_equiv_address (dest_mem, target);
727c62dd 3481 }
3482
8786db1e 3483 create_output_operand (&ops[0], endp ? target : NULL_RTX, Pmode);
3484 create_fixed_operand (&ops[1], dest_mem);
3485 create_fixed_operand (&ops[2], src_mem);
1e1d5623 3486 if (!maybe_expand_insn (CODE_FOR_movstr, 3, ops))
3487 return NULL_RTX;
727c62dd 3488
8786db1e 3489 if (endp && target != const0_rtx)
c5aba89c 3490 {
8786db1e 3491 target = ops[0].value;
3492 /* movstr is supposed to set end to the address of the NUL
3493 terminator. If the caller requested a mempcpy-like return value,
3494 adjust it. */
3495 if (endp == 1)
3496 {
29c05e22 3497 rtx tem = plus_constant (GET_MODE (target),
3498 gen_lowpart (GET_MODE (target), target), 1);
8786db1e 3499 emit_move_insn (target, force_operand (tem, NULL_RTX));
3500 }
c5aba89c 3501 }
727c62dd 3502 return target;
3503}
3504
48e1416a 3505/* Expand expression EXP, which is a call to the strcpy builtin. Return
3506 NULL_RTX if we failed the caller should emit a normal call, otherwise
c2f47e15 3507 try to get the result in TARGET, if convenient (and in mode MODE if that's
6f428e8b 3508 convenient). */
902de8ed 3509
53800dbe 3510static rtx
a65c4d64 3511expand_builtin_strcpy (tree exp, rtx target)
53800dbe 3512{
c2f47e15 3513 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3514 {
3515 tree dest = CALL_EXPR_ARG (exp, 0);
3516 tree src = CALL_EXPR_ARG (exp, 1);
a65c4d64 3517 return expand_builtin_strcpy_args (dest, src, target);
c2f47e15 3518 }
3519 return NULL_RTX;
3520}
3521
3522/* Helper function to do the actual work for expand_builtin_strcpy. The
3523 arguments to the builtin_strcpy call DEST and SRC are broken out
3524 so that this can also be called without constructing an actual CALL_EXPR.
3525 The other arguments and return value are the same as for
3526 expand_builtin_strcpy. */
3527
3528static rtx
a65c4d64 3529expand_builtin_strcpy_args (tree dest, tree src, rtx target)
c2f47e15 3530{
c2f47e15 3531 return expand_movstr (dest, src, target, /*endp=*/0);
53800dbe 3532}
3533
c2f47e15 3534/* Expand a call EXP to the stpcpy builtin.
3535 Return NULL_RTX if we failed the caller should emit a normal call,
3b824fa6 3536 otherwise try to get the result in TARGET, if convenient (and in
3537 mode MODE if that's convenient). */
3538
3539static rtx
3754d046 3540expand_builtin_stpcpy (tree exp, rtx target, machine_mode mode)
3b824fa6 3541{
c2f47e15 3542 tree dst, src;
389dd41b 3543 location_t loc = EXPR_LOCATION (exp);
c2f47e15 3544
3545 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3546 return NULL_RTX;
3547
3548 dst = CALL_EXPR_ARG (exp, 0);
3549 src = CALL_EXPR_ARG (exp, 1);
3550
727c62dd 3551 /* If return value is ignored, transform stpcpy into strcpy. */
b9a16870 3552 if (target == const0_rtx && builtin_decl_implicit (BUILT_IN_STRCPY))
978836e5 3553 {
b9a16870 3554 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
0568e9c1 3555 tree result = build_call_nofold_loc (loc, fn, 2, dst, src);
c8b17b2e 3556 return expand_expr (result, target, mode, EXPAND_NORMAL);
978836e5 3557 }
3b824fa6 3558 else
3559 {
c2f47e15 3560 tree len, lenp1;
727c62dd 3561 rtx ret;
647661c6 3562
9fe0e1b8 3563 /* Ensure we get an actual string whose length can be evaluated at
a0c938f0 3564 compile-time, not an expression containing a string. This is
3565 because the latter will potentially produce pessimized code
3566 when used to produce the return value. */
681fab1e 3567 if (! c_getstr (src) || ! (len = c_strlen (src, 0)))
c2f47e15 3568 return expand_movstr (dst, src, target, /*endp=*/2);
3b824fa6 3569
389dd41b 3570 lenp1 = size_binop_loc (loc, PLUS_EXPR, len, ssize_int (1));
a65c4d64 3571 ret = expand_builtin_mempcpy_args (dst, src, lenp1,
f21337ef 3572 target, mode, /*endp=*/2,
3573 exp);
727c62dd 3574
3575 if (ret)
3576 return ret;
3577
3578 if (TREE_CODE (len) == INTEGER_CST)
3579 {
8ec3c5c2 3580 rtx len_rtx = expand_normal (len);
727c62dd 3581
971ba038 3582 if (CONST_INT_P (len_rtx))
727c62dd 3583 {
a65c4d64 3584 ret = expand_builtin_strcpy_args (dst, src, target);
727c62dd 3585
3586 if (ret)
3587 {
3588 if (! target)
7ac87324 3589 {
3590 if (mode != VOIDmode)
3591 target = gen_reg_rtx (mode);
3592 else
3593 target = gen_reg_rtx (GET_MODE (ret));
3594 }
727c62dd 3595 if (GET_MODE (target) != GET_MODE (ret))
3596 ret = gen_lowpart (GET_MODE (target), ret);
3597
29c05e22 3598 ret = plus_constant (GET_MODE (ret), ret, INTVAL (len_rtx));
c5aba89c 3599 ret = emit_move_insn (target, force_operand (ret, NULL_RTX));
64db345d 3600 gcc_assert (ret);
727c62dd 3601
3602 return target;
3603 }
3604 }
3605 }
3606
c2f47e15 3607 return expand_movstr (dst, src, target, /*endp=*/2);
3b824fa6 3608 }
3609}
3610
6840589f 3611/* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3612 bytes from constant string DATA + OFFSET and return it as target
3613 constant. */
3614
09879952 3615rtx
aecda0d6 3616builtin_strncpy_read_str (void *data, HOST_WIDE_INT offset,
3754d046 3617 machine_mode mode)
6840589f 3618{
3619 const char *str = (const char *) data;
3620
3621 if ((unsigned HOST_WIDE_INT) offset > strlen (str))
3622 return const0_rtx;
3623
3624 return c_readstr (str + offset, mode);
3625}
3626
48e1416a 3627/* Expand expression EXP, which is a call to the strncpy builtin. Return
c2f47e15 3628 NULL_RTX if we failed the caller should emit a normal call. */
ed09096d 3629
3630static rtx
a65c4d64 3631expand_builtin_strncpy (tree exp, rtx target)
ed09096d 3632{
389dd41b 3633 location_t loc = EXPR_LOCATION (exp);
c2f47e15 3634
3635 if (validate_arglist (exp,
3636 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
ed09096d 3637 {
c2f47e15 3638 tree dest = CALL_EXPR_ARG (exp, 0);
3639 tree src = CALL_EXPR_ARG (exp, 1);
3640 tree len = CALL_EXPR_ARG (exp, 2);
3641 tree slen = c_strlen (src, 1);
6840589f 3642
8ff6a5cd 3643 /* We must be passed a constant len and src parameter. */
e913b5cd 3644 if (!tree_fits_uhwi_p (len) || !slen || !tree_fits_uhwi_p (slen))
c2f47e15 3645 return NULL_RTX;
ed09096d 3646
389dd41b 3647 slen = size_binop_loc (loc, PLUS_EXPR, slen, ssize_int (1));
ed09096d 3648
3649 /* We're required to pad with trailing zeros if the requested
a0c938f0 3650 len is greater than strlen(s2)+1. In that case try to
6840589f 3651 use store_by_pieces, if it fails, punt. */
ed09096d 3652 if (tree_int_cst_lt (slen, len))
6840589f 3653 {
957d0361 3654 unsigned int dest_align = get_pointer_alignment (dest);
c2f47e15 3655 const char *p = c_getstr (src);
6840589f 3656 rtx dest_mem;
3657
e913b5cd 3658 if (!p || dest_align == 0 || !tree_fits_uhwi_p (len)
3659 || !can_store_by_pieces (tree_to_uhwi (len),
6840589f 3660 builtin_strncpy_read_str,
364c0c59 3661 CONST_CAST (char *, p),
3662 dest_align, false))
c2f47e15 3663 return NULL_RTX;
6840589f 3664
d8ae1baa 3665 dest_mem = get_memory_rtx (dest, len);
e913b5cd 3666 store_by_pieces (dest_mem, tree_to_uhwi (len),
6840589f 3667 builtin_strncpy_read_str,
364c0c59 3668 CONST_CAST (char *, p), dest_align, false, 0);
a65c4d64 3669 dest_mem = force_operand (XEXP (dest_mem, 0), target);
85d654dd 3670 dest_mem = convert_memory_address (ptr_mode, dest_mem);
e5716f7e 3671 return dest_mem;
6840589f 3672 }
ed09096d 3673 }
c2f47e15 3674 return NULL_RTX;
ed09096d 3675}
3676
ecc318ff 3677/* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3678 bytes from constant string DATA + OFFSET and return it as target
3679 constant. */
3680
f656b751 3681rtx
aecda0d6 3682builtin_memset_read_str (void *data, HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
3754d046 3683 machine_mode mode)
ecc318ff 3684{
3685 const char *c = (const char *) data;
364c0c59 3686 char *p = XALLOCAVEC (char, GET_MODE_SIZE (mode));
ecc318ff 3687
3688 memset (p, *c, GET_MODE_SIZE (mode));
3689
3690 return c_readstr (p, mode);
3691}
3692
a7ec6974 3693/* Callback routine for store_by_pieces. Return the RTL of a register
3694 containing GET_MODE_SIZE (MODE) consecutive copies of the unsigned
3695 char value given in the RTL register data. For example, if mode is
3696 4 bytes wide, return the RTL for 0x01010101*data. */
3697
3698static rtx
aecda0d6 3699builtin_memset_gen_str (void *data, HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
3754d046 3700 machine_mode mode)
a7ec6974 3701{
3702 rtx target, coeff;
3703 size_t size;
3704 char *p;
3705
3706 size = GET_MODE_SIZE (mode);
f0ce3b1f 3707 if (size == 1)
3708 return (rtx) data;
a7ec6974 3709
364c0c59 3710 p = XALLOCAVEC (char, size);
a7ec6974 3711 memset (p, 1, size);
3712 coeff = c_readstr (p, mode);
3713
f0ce3b1f 3714 target = convert_to_mode (mode, (rtx) data, 1);
a7ec6974 3715 target = expand_mult (mode, target, coeff, NULL_RTX, 1);
3716 return force_reg (mode, target);
3717}
3718
48e1416a 3719/* Expand expression EXP, which is a call to the memset builtin. Return
3720 NULL_RTX if we failed the caller should emit a normal call, otherwise
c2f47e15 3721 try to get the result in TARGET, if convenient (and in mode MODE if that's
6f428e8b 3722 convenient). */
902de8ed 3723
53800dbe 3724static rtx
3754d046 3725expand_builtin_memset (tree exp, rtx target, machine_mode mode)
53800dbe 3726{
c2f47e15 3727 if (!validate_arglist (exp,
3728 POINTER_TYPE, INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
3729 return NULL_RTX;
53800dbe 3730 else
3731 {
c2f47e15 3732 tree dest = CALL_EXPR_ARG (exp, 0);
3733 tree val = CALL_EXPR_ARG (exp, 1);
3734 tree len = CALL_EXPR_ARG (exp, 2);
3735 return expand_builtin_memset_args (dest, val, len, target, mode, exp);
3736 }
3737}
53800dbe 3738
f21337ef 3739/* Expand expression EXP, which is an instrumented call to the memset builtin.
3740 Return NULL_RTX if we failed the caller should emit a normal call, otherwise
3741 try to get the result in TARGET, if convenient (and in mode MODE if that's
3742 convenient). */
3743
3744static rtx
3745expand_builtin_memset_with_bounds (tree exp, rtx target, machine_mode mode)
3746{
3747 if (!validate_arglist (exp,
3748 POINTER_TYPE, POINTER_BOUNDS_TYPE,
3749 INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
3750 return NULL_RTX;
3751 else
3752 {
3753 tree dest = CALL_EXPR_ARG (exp, 0);
3754 tree val = CALL_EXPR_ARG (exp, 2);
3755 tree len = CALL_EXPR_ARG (exp, 3);
3756 rtx res = expand_builtin_memset_args (dest, val, len, target, mode, exp);
3757
3758 /* Return src bounds with the result. */
3759 if (res)
3760 {
17d388d8 3761 rtx bnd = force_reg (targetm.chkp_bound_mode (),
f21337ef 3762 expand_normal (CALL_EXPR_ARG (exp, 1)));
3763 res = chkp_join_splitted_slot (res, bnd);
3764 }
3765 return res;
3766 }
3767}
3768
c2f47e15 3769/* Helper function to do the actual work for expand_builtin_memset. The
3770 arguments to the builtin_memset call DEST, VAL, and LEN are broken out
3771 so that this can also be called without constructing an actual CALL_EXPR.
3772 The other arguments and return value are the same as for
3773 expand_builtin_memset. */
6b961939 3774
c2f47e15 3775static rtx
3776expand_builtin_memset_args (tree dest, tree val, tree len,
3754d046 3777 rtx target, machine_mode mode, tree orig_exp)
c2f47e15 3778{
3779 tree fndecl, fn;
3780 enum built_in_function fcode;
3754d046 3781 machine_mode val_mode;
c2f47e15 3782 char c;
3783 unsigned int dest_align;
3784 rtx dest_mem, dest_addr, len_rtx;
3785 HOST_WIDE_INT expected_size = -1;
3786 unsigned int expected_align = 0;
36d63243 3787 unsigned HOST_WIDE_INT min_size;
3788 unsigned HOST_WIDE_INT max_size;
9db0f34d 3789 unsigned HOST_WIDE_INT probable_max_size;
53800dbe 3790
957d0361 3791 dest_align = get_pointer_alignment (dest);
162719b3 3792
c2f47e15 3793 /* If DEST is not a pointer type, don't do this operation in-line. */
3794 if (dest_align == 0)
3795 return NULL_RTX;
6f428e8b 3796
8cee8dc0 3797 if (currently_expanding_gimple_stmt)
3798 stringop_block_profile (currently_expanding_gimple_stmt,
3799 &expected_align, &expected_size);
75a70cf9 3800
c2f47e15 3801 if (expected_align < dest_align)
3802 expected_align = dest_align;
6b961939 3803
c2f47e15 3804 /* If the LEN parameter is zero, return DEST. */
3805 if (integer_zerop (len))
3806 {
3807 /* Evaluate and ignore VAL in case it has side-effects. */
3808 expand_expr (val, const0_rtx, VOIDmode, EXPAND_NORMAL);
3809 return expand_expr (dest, target, mode, EXPAND_NORMAL);
3810 }
7a3e5564 3811
c2f47e15 3812 /* Stabilize the arguments in case we fail. */
3813 dest = builtin_save_expr (dest);
3814 val = builtin_save_expr (val);
3815 len = builtin_save_expr (len);
a7ec6974 3816
c2f47e15 3817 len_rtx = expand_normal (len);
9db0f34d 3818 determine_block_size (len, len_rtx, &min_size, &max_size,
3819 &probable_max_size);
c2f47e15 3820 dest_mem = get_memory_rtx (dest, len);
03a5dda9 3821 val_mode = TYPE_MODE (unsigned_char_type_node);
a7ec6974 3822
c2f47e15 3823 if (TREE_CODE (val) != INTEGER_CST)
3824 {
3825 rtx val_rtx;
a7ec6974 3826
c2f47e15 3827 val_rtx = expand_normal (val);
03a5dda9 3828 val_rtx = convert_to_mode (val_mode, val_rtx, 0);
53800dbe 3829
c2f47e15 3830 /* Assume that we can memset by pieces if we can store
3831 * the coefficients by pieces (in the required modes).
3832 * We can't pass builtin_memset_gen_str as that emits RTL. */
3833 c = 1;
e913b5cd 3834 if (tree_fits_uhwi_p (len)
3835 && can_store_by_pieces (tree_to_uhwi (len),
4b297e2e 3836 builtin_memset_read_str, &c, dest_align,
3837 true))
c2f47e15 3838 {
03a5dda9 3839 val_rtx = force_reg (val_mode, val_rtx);
e913b5cd 3840 store_by_pieces (dest_mem, tree_to_uhwi (len),
4b297e2e 3841 builtin_memset_gen_str, val_rtx, dest_align,
3842 true, 0);
c2f47e15 3843 }
3844 else if (!set_storage_via_setmem (dest_mem, len_rtx, val_rtx,
3845 dest_align, expected_align,
9db0f34d 3846 expected_size, min_size, max_size,
3847 probable_max_size))
6b961939 3848 goto do_libcall;
48e1416a 3849
c2f47e15 3850 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3851 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3852 return dest_mem;
3853 }
53800dbe 3854
c2f47e15 3855 if (target_char_cast (val, &c))
3856 goto do_libcall;
ecc318ff 3857
c2f47e15 3858 if (c)
3859 {
e913b5cd 3860 if (tree_fits_uhwi_p (len)
3861 && can_store_by_pieces (tree_to_uhwi (len),
4b297e2e 3862 builtin_memset_read_str, &c, dest_align,
3863 true))
e913b5cd 3864 store_by_pieces (dest_mem, tree_to_uhwi (len),
4b297e2e 3865 builtin_memset_read_str, &c, dest_align, true, 0);
03a5dda9 3866 else if (!set_storage_via_setmem (dest_mem, len_rtx,
3867 gen_int_mode (c, val_mode),
c2f47e15 3868 dest_align, expected_align,
9db0f34d 3869 expected_size, min_size, max_size,
3870 probable_max_size))
c2f47e15 3871 goto do_libcall;
48e1416a 3872
c2f47e15 3873 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3874 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3875 return dest_mem;
3876 }
ecc318ff 3877
c2f47e15 3878 set_mem_align (dest_mem, dest_align);
3879 dest_addr = clear_storage_hints (dest_mem, len_rtx,
3880 CALL_EXPR_TAILCALL (orig_exp)
3881 ? BLOCK_OP_TAILCALL : BLOCK_OP_NORMAL,
36d63243 3882 expected_align, expected_size,
9db0f34d 3883 min_size, max_size,
3884 probable_max_size);
53800dbe 3885
c2f47e15 3886 if (dest_addr == 0)
3887 {
3888 dest_addr = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3889 dest_addr = convert_memory_address (ptr_mode, dest_addr);
3890 }
53800dbe 3891
c2f47e15 3892 return dest_addr;
6b961939 3893
c2f47e15 3894 do_libcall:
3895 fndecl = get_callee_fndecl (orig_exp);
3896 fcode = DECL_FUNCTION_CODE (fndecl);
f21337ef 3897 if (fcode == BUILT_IN_MEMSET
3898 || fcode == BUILT_IN_CHKP_MEMSET_NOBND_NOCHK_CHKP)
0568e9c1 3899 fn = build_call_nofold_loc (EXPR_LOCATION (orig_exp), fndecl, 3,
3900 dest, val, len);
c2f47e15 3901 else if (fcode == BUILT_IN_BZERO)
0568e9c1 3902 fn = build_call_nofold_loc (EXPR_LOCATION (orig_exp), fndecl, 2,
3903 dest, len);
c2f47e15 3904 else
3905 gcc_unreachable ();
a65c4d64 3906 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
3907 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (orig_exp);
c2f47e15 3908 return expand_call (fn, target, target == const0_rtx);
53800dbe 3909}
3910
48e1416a 3911/* Expand expression EXP, which is a call to the bzero builtin. Return
c2f47e15 3912 NULL_RTX if we failed the caller should emit a normal call. */
27d0c333 3913
ffc83088 3914static rtx
0b25db21 3915expand_builtin_bzero (tree exp)
ffc83088 3916{
c2f47e15 3917 tree dest, size;
389dd41b 3918 location_t loc = EXPR_LOCATION (exp);
ffc83088 3919
c2f47e15 3920 if (!validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
7369e7ba 3921 return NULL_RTX;
ffc83088 3922
c2f47e15 3923 dest = CALL_EXPR_ARG (exp, 0);
3924 size = CALL_EXPR_ARG (exp, 1);
bf8e3599 3925
7369e7ba 3926 /* New argument list transforming bzero(ptr x, int y) to
6f428e8b 3927 memset(ptr x, int 0, size_t y). This is done this way
3928 so that if it isn't expanded inline, we fallback to
3929 calling bzero instead of memset. */
bf8e3599 3930
c2f47e15 3931 return expand_builtin_memset_args (dest, integer_zero_node,
a0553bff 3932 fold_convert_loc (loc,
3933 size_type_node, size),
c2f47e15 3934 const0_rtx, VOIDmode, exp);
ffc83088 3935}
3936
7a3f89b5 3937/* Expand expression EXP, which is a call to the memcmp built-in function.
bd021c1c 3938 Return NULL_RTX if we failed and the caller should emit a normal call,
3939 otherwise try to get the result in TARGET, if convenient (and in mode
3940 MODE, if that's convenient). */
27d0c333 3941
53800dbe 3942static rtx
a65c4d64 3943expand_builtin_memcmp (tree exp, ATTRIBUTE_UNUSED rtx target,
3754d046 3944 ATTRIBUTE_UNUSED machine_mode mode)
53800dbe 3945{
a65c4d64 3946 location_t loc ATTRIBUTE_UNUSED = EXPR_LOCATION (exp);
389dd41b 3947
c2f47e15 3948 if (!validate_arglist (exp,
3949 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3950 return NULL_RTX;
6f428e8b 3951
bd021c1c 3952 /* Note: The cmpstrnsi pattern, if it exists, is not suitable for
3953 implementing memcmp because it will stop if it encounters two
3954 zero bytes. */
3955#if defined HAVE_cmpmemsi
53800dbe 3956 {
0cd832f0 3957 rtx arg1_rtx, arg2_rtx, arg3_rtx;
53800dbe 3958 rtx result;
0cd832f0 3959 rtx insn;
c2f47e15 3960 tree arg1 = CALL_EXPR_ARG (exp, 0);
3961 tree arg2 = CALL_EXPR_ARG (exp, 1);
3962 tree len = CALL_EXPR_ARG (exp, 2);
53800dbe 3963
957d0361 3964 unsigned int arg1_align = get_pointer_alignment (arg1) / BITS_PER_UNIT;
3965 unsigned int arg2_align = get_pointer_alignment (arg2) / BITS_PER_UNIT;
3754d046 3966 machine_mode insn_mode;
b428c0a5 3967
b428c0a5 3968 if (HAVE_cmpmemsi)
3969 insn_mode = insn_data[(int) CODE_FOR_cmpmemsi].operand[0].mode;
3970 else
c2f47e15 3971 return NULL_RTX;
53800dbe 3972
3973 /* If we don't have POINTER_TYPE, call the function. */
3974 if (arg1_align == 0 || arg2_align == 0)
c2f47e15 3975 return NULL_RTX;
53800dbe 3976
3977 /* Make a place to write the result of the instruction. */
3978 result = target;
3979 if (! (result != 0
8ad4c111 3980 && REG_P (result) && GET_MODE (result) == insn_mode
53800dbe 3981 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
3982 result = gen_reg_rtx (insn_mode);
3983
d8ae1baa 3984 arg1_rtx = get_memory_rtx (arg1, len);
3985 arg2_rtx = get_memory_rtx (arg2, len);
389dd41b 3986 arg3_rtx = expand_normal (fold_convert_loc (loc, sizetype, len));
83f88f8e 3987
3988 /* Set MEM_SIZE as appropriate. */
971ba038 3989 if (CONST_INT_P (arg3_rtx))
83f88f8e 3990 {
5b2a69fa 3991 set_mem_size (arg1_rtx, INTVAL (arg3_rtx));
3992 set_mem_size (arg2_rtx, INTVAL (arg3_rtx));
83f88f8e 3993 }
3994
b428c0a5 3995 if (HAVE_cmpmemsi)
3996 insn = gen_cmpmemsi (result, arg1_rtx, arg2_rtx, arg3_rtx,
3997 GEN_INT (MIN (arg1_align, arg2_align)));
0cd832f0 3998 else
64db345d 3999 gcc_unreachable ();
0cd832f0 4000
4001 if (insn)
4002 emit_insn (insn);
4003 else
2dd6f9ed 4004 emit_library_call_value (memcmp_libfunc, result, LCT_PURE,
0cd832f0 4005 TYPE_MODE (integer_type_node), 3,
4006 XEXP (arg1_rtx, 0), Pmode,
4007 XEXP (arg2_rtx, 0), Pmode,
4008 convert_to_mode (TYPE_MODE (sizetype), arg3_rtx,
78a8ed03 4009 TYPE_UNSIGNED (sizetype)),
0cd832f0 4010 TYPE_MODE (sizetype));
53800dbe 4011
4012 /* Return the value in the proper mode for this function. */
4013 mode = TYPE_MODE (TREE_TYPE (exp));
4014 if (GET_MODE (result) == mode)
4015 return result;
4016 else if (target != 0)
4017 {
4018 convert_move (target, result, 0);
4019 return target;
4020 }
4021 else
4022 return convert_to_mode (mode, result, 0);
4023 }
bd021c1c 4024#endif /* HAVE_cmpmemsi. */
53800dbe 4025
c2f47e15 4026 return NULL_RTX;
6f428e8b 4027}
4028
c2f47e15 4029/* Expand expression EXP, which is a call to the strcmp builtin. Return NULL_RTX
53800dbe 4030 if we failed the caller should emit a normal call, otherwise try to get
4031 the result in TARGET, if convenient. */
902de8ed 4032
53800dbe 4033static rtx
a65c4d64 4034expand_builtin_strcmp (tree exp, ATTRIBUTE_UNUSED rtx target)
53800dbe 4035{
c2f47e15 4036 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
4037 return NULL_RTX;
bf8e3599 4038
6ac5504b 4039#if defined HAVE_cmpstrsi || defined HAVE_cmpstrnsi
6b531606 4040 if (direct_optab_handler (cmpstr_optab, SImode) != CODE_FOR_nothing
4041 || direct_optab_handler (cmpstrn_optab, SImode) != CODE_FOR_nothing)
6ac5504b 4042 {
4043 rtx arg1_rtx, arg2_rtx;
4044 rtx result, insn = NULL_RTX;
4045 tree fndecl, fn;
c2f47e15 4046 tree arg1 = CALL_EXPR_ARG (exp, 0);
4047 tree arg2 = CALL_EXPR_ARG (exp, 1);
a0c938f0 4048
957d0361 4049 unsigned int arg1_align = get_pointer_alignment (arg1) / BITS_PER_UNIT;
4050 unsigned int arg2_align = get_pointer_alignment (arg2) / BITS_PER_UNIT;
6ac5504b 4051
4052 /* If we don't have POINTER_TYPE, call the function. */
4053 if (arg1_align == 0 || arg2_align == 0)
c2f47e15 4054 return NULL_RTX;
7a3f89b5 4055
6ac5504b 4056 /* Stabilize the arguments in case gen_cmpstr(n)si fail. */
4057 arg1 = builtin_save_expr (arg1);
4058 arg2 = builtin_save_expr (arg2);
7a3f89b5 4059
d8ae1baa 4060 arg1_rtx = get_memory_rtx (arg1, NULL);
4061 arg2_rtx = get_memory_rtx (arg2, NULL);
53800dbe 4062
6ac5504b 4063#ifdef HAVE_cmpstrsi
4064 /* Try to call cmpstrsi. */
4065 if (HAVE_cmpstrsi)
4066 {
3754d046 4067 machine_mode insn_mode
6ac5504b 4068 = insn_data[(int) CODE_FOR_cmpstrsi].operand[0].mode;
4069
4070 /* Make a place to write the result of the instruction. */
4071 result = target;
4072 if (! (result != 0
4073 && REG_P (result) && GET_MODE (result) == insn_mode
4074 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
4075 result = gen_reg_rtx (insn_mode);
4076
4077 insn = gen_cmpstrsi (result, arg1_rtx, arg2_rtx,
4078 GEN_INT (MIN (arg1_align, arg2_align)));
4079 }
4080#endif
03fd9d2c 4081#ifdef HAVE_cmpstrnsi
6ac5504b 4082 /* Try to determine at least one length and call cmpstrnsi. */
a0c938f0 4083 if (!insn && HAVE_cmpstrnsi)
6ac5504b 4084 {
4085 tree len;
4086 rtx arg3_rtx;
4087
3754d046 4088 machine_mode insn_mode
6ac5504b 4089 = insn_data[(int) CODE_FOR_cmpstrnsi].operand[0].mode;
4090 tree len1 = c_strlen (arg1, 1);
4091 tree len2 = c_strlen (arg2, 1);
4092
4093 if (len1)
4094 len1 = size_binop (PLUS_EXPR, ssize_int (1), len1);
4095 if (len2)
4096 len2 = size_binop (PLUS_EXPR, ssize_int (1), len2);
4097
4098 /* If we don't have a constant length for the first, use the length
4099 of the second, if we know it. We don't require a constant for
4100 this case; some cost analysis could be done if both are available
4101 but neither is constant. For now, assume they're equally cheap,
4102 unless one has side effects. If both strings have constant lengths,
4103 use the smaller. */
4104
4105 if (!len1)
4106 len = len2;
4107 else if (!len2)
4108 len = len1;
4109 else if (TREE_SIDE_EFFECTS (len1))
4110 len = len2;
4111 else if (TREE_SIDE_EFFECTS (len2))
4112 len = len1;
4113 else if (TREE_CODE (len1) != INTEGER_CST)
4114 len = len2;
4115 else if (TREE_CODE (len2) != INTEGER_CST)
4116 len = len1;
4117 else if (tree_int_cst_lt (len1, len2))
4118 len = len1;
4119 else
4120 len = len2;
4121
4122 /* If both arguments have side effects, we cannot optimize. */
4123 if (!len || TREE_SIDE_EFFECTS (len))
6b961939 4124 goto do_libcall;
53800dbe 4125
8ec3c5c2 4126 arg3_rtx = expand_normal (len);
902de8ed 4127
6ac5504b 4128 /* Make a place to write the result of the instruction. */
4129 result = target;
4130 if (! (result != 0
4131 && REG_P (result) && GET_MODE (result) == insn_mode
4132 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
4133 result = gen_reg_rtx (insn_mode);
53800dbe 4134
6ac5504b 4135 insn = gen_cmpstrnsi (result, arg1_rtx, arg2_rtx, arg3_rtx,
4136 GEN_INT (MIN (arg1_align, arg2_align)));
4137 }
4138#endif
3f8aefe2 4139
6ac5504b 4140 if (insn)
4141 {
3754d046 4142 machine_mode mode;
6ac5504b 4143 emit_insn (insn);
3f8aefe2 4144
6ac5504b 4145 /* Return the value in the proper mode for this function. */
4146 mode = TYPE_MODE (TREE_TYPE (exp));
4147 if (GET_MODE (result) == mode)
4148 return result;
4149 if (target == 0)
4150 return convert_to_mode (mode, result, 0);
4151 convert_move (target, result, 0);
4152 return target;
4153 }
902de8ed 4154
6ac5504b 4155 /* Expand the library call ourselves using a stabilized argument
4156 list to avoid re-evaluating the function's arguments twice. */
2694880e 4157#ifdef HAVE_cmpstrnsi
6b961939 4158 do_libcall:
2694880e 4159#endif
6ac5504b 4160 fndecl = get_callee_fndecl (exp);
0568e9c1 4161 fn = build_call_nofold_loc (EXPR_LOCATION (exp), fndecl, 2, arg1, arg2);
a65c4d64 4162 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
4163 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
6ac5504b 4164 return expand_call (fn, target, target == const0_rtx);
4165 }
7a3f89b5 4166#endif
c2f47e15 4167 return NULL_RTX;
83d79705 4168}
53800dbe 4169
48e1416a 4170/* Expand expression EXP, which is a call to the strncmp builtin. Return
c2f47e15 4171 NULL_RTX if we failed the caller should emit a normal call, otherwise try to get
ed09096d 4172 the result in TARGET, if convenient. */
27d0c333 4173
ed09096d 4174static rtx
a65c4d64 4175expand_builtin_strncmp (tree exp, ATTRIBUTE_UNUSED rtx target,
3754d046 4176 ATTRIBUTE_UNUSED machine_mode mode)
ed09096d 4177{
a65c4d64 4178 location_t loc ATTRIBUTE_UNUSED = EXPR_LOCATION (exp);
389dd41b 4179
c2f47e15 4180 if (!validate_arglist (exp,
4181 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
4182 return NULL_RTX;
ed09096d 4183
6e34e617 4184 /* If c_strlen can determine an expression for one of the string
6ac5504b 4185 lengths, and it doesn't have side effects, then emit cmpstrnsi
7a3f89b5 4186 using length MIN(strlen(string)+1, arg3). */
6ac5504b 4187#ifdef HAVE_cmpstrnsi
4188 if (HAVE_cmpstrnsi)
7a3f89b5 4189 {
4190 tree len, len1, len2;
4191 rtx arg1_rtx, arg2_rtx, arg3_rtx;
4192 rtx result, insn;
0b25db21 4193 tree fndecl, fn;
c2f47e15 4194 tree arg1 = CALL_EXPR_ARG (exp, 0);
4195 tree arg2 = CALL_EXPR_ARG (exp, 1);
4196 tree arg3 = CALL_EXPR_ARG (exp, 2);
6f428e8b 4197
957d0361 4198 unsigned int arg1_align = get_pointer_alignment (arg1) / BITS_PER_UNIT;
4199 unsigned int arg2_align = get_pointer_alignment (arg2) / BITS_PER_UNIT;
3754d046 4200 machine_mode insn_mode
6ac5504b 4201 = insn_data[(int) CODE_FOR_cmpstrnsi].operand[0].mode;
bf8e3599 4202
681fab1e 4203 len1 = c_strlen (arg1, 1);
4204 len2 = c_strlen (arg2, 1);
7a3f89b5 4205
4206 if (len1)
389dd41b 4207 len1 = size_binop_loc (loc, PLUS_EXPR, ssize_int (1), len1);
7a3f89b5 4208 if (len2)
389dd41b 4209 len2 = size_binop_loc (loc, PLUS_EXPR, ssize_int (1), len2);
7a3f89b5 4210
4211 /* If we don't have a constant length for the first, use the length
4212 of the second, if we know it. We don't require a constant for
4213 this case; some cost analysis could be done if both are available
4214 but neither is constant. For now, assume they're equally cheap,
4215 unless one has side effects. If both strings have constant lengths,
4216 use the smaller. */
4217
4218 if (!len1)
4219 len = len2;
4220 else if (!len2)
4221 len = len1;
4222 else if (TREE_SIDE_EFFECTS (len1))
4223 len = len2;
4224 else if (TREE_SIDE_EFFECTS (len2))
4225 len = len1;
4226 else if (TREE_CODE (len1) != INTEGER_CST)
4227 len = len2;
4228 else if (TREE_CODE (len2) != INTEGER_CST)
4229 len = len1;
4230 else if (tree_int_cst_lt (len1, len2))
4231 len = len1;
4232 else
4233 len = len2;
6e34e617 4234
7a3f89b5 4235 /* If both arguments have side effects, we cannot optimize. */
4236 if (!len || TREE_SIDE_EFFECTS (len))
c2f47e15 4237 return NULL_RTX;
bf8e3599 4238
7a3f89b5 4239 /* The actual new length parameter is MIN(len,arg3). */
389dd41b 4240 len = fold_build2_loc (loc, MIN_EXPR, TREE_TYPE (len), len,
4241 fold_convert_loc (loc, TREE_TYPE (len), arg3));
7a3f89b5 4242
4243 /* If we don't have POINTER_TYPE, call the function. */
4244 if (arg1_align == 0 || arg2_align == 0)
c2f47e15 4245 return NULL_RTX;
7a3f89b5 4246
4247 /* Make a place to write the result of the instruction. */
4248 result = target;
4249 if (! (result != 0
8ad4c111 4250 && REG_P (result) && GET_MODE (result) == insn_mode
7a3f89b5 4251 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
4252 result = gen_reg_rtx (insn_mode);
4253
a65c4d64 4254 /* Stabilize the arguments in case gen_cmpstrnsi fails. */
4255 arg1 = builtin_save_expr (arg1);
4256 arg2 = builtin_save_expr (arg2);
4257 len = builtin_save_expr (len);
27d0c333 4258
a65c4d64 4259 arg1_rtx = get_memory_rtx (arg1, len);
4260 arg2_rtx = get_memory_rtx (arg2, len);
4261 arg3_rtx = expand_normal (len);
4262 insn = gen_cmpstrnsi (result, arg1_rtx, arg2_rtx, arg3_rtx,
4263 GEN_INT (MIN (arg1_align, arg2_align)));
4264 if (insn)
4265 {
4266 emit_insn (insn);
49f0327b 4267
a65c4d64 4268 /* Return the value in the proper mode for this function. */
4269 mode = TYPE_MODE (TREE_TYPE (exp));
4270 if (GET_MODE (result) == mode)
4271 return result;
4272 if (target == 0)
4273 return convert_to_mode (mode, result, 0);
4274 convert_move (target, result, 0);
4275 return target;
4276 }
27d0c333 4277
a65c4d64 4278 /* Expand the library call ourselves using a stabilized argument
4279 list to avoid re-evaluating the function's arguments twice. */
4280 fndecl = get_callee_fndecl (exp);
0568e9c1 4281 fn = build_call_nofold_loc (EXPR_LOCATION (exp), fndecl, 3,
4282 arg1, arg2, len);
a65c4d64 4283 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
4284 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
4285 return expand_call (fn, target, target == const0_rtx);
4286 }
4287#endif
c2f47e15 4288 return NULL_RTX;
49f0327b 4289}
4290
a66c9326 4291/* Expand a call to __builtin_saveregs, generating the result in TARGET,
4292 if that's convenient. */
902de8ed 4293
a66c9326 4294rtx
aecda0d6 4295expand_builtin_saveregs (void)
53800dbe 4296{
1e0c0b35 4297 rtx val;
4298 rtx_insn *seq;
53800dbe 4299
4300 /* Don't do __builtin_saveregs more than once in a function.
4301 Save the result of the first call and reuse it. */
4302 if (saveregs_value != 0)
4303 return saveregs_value;
53800dbe 4304
a66c9326 4305 /* When this function is called, it means that registers must be
4306 saved on entry to this function. So we migrate the call to the
4307 first insn of this function. */
4308
4309 start_sequence ();
53800dbe 4310
a66c9326 4311 /* Do whatever the machine needs done in this case. */
45550790 4312 val = targetm.calls.expand_builtin_saveregs ();
53800dbe 4313
a66c9326 4314 seq = get_insns ();
4315 end_sequence ();
53800dbe 4316
a66c9326 4317 saveregs_value = val;
53800dbe 4318
31d3e01c 4319 /* Put the insns after the NOTE that starts the function. If this
4320 is inside a start_sequence, make the outer-level insn chain current, so
a66c9326 4321 the code is placed at the start of the function. */
4322 push_topmost_sequence ();
0ec80471 4323 emit_insn_after (seq, entry_of_function ());
a66c9326 4324 pop_topmost_sequence ();
4325
4326 return val;
53800dbe 4327}
4328
79012a9d 4329/* Expand a call to __builtin_next_arg. */
27d0c333 4330
53800dbe 4331static rtx
79012a9d 4332expand_builtin_next_arg (void)
53800dbe 4333{
79012a9d 4334 /* Checking arguments is already done in fold_builtin_next_arg
4335 that must be called before this function. */
940ddc5c 4336 return expand_binop (ptr_mode, add_optab,
abe32cce 4337 crtl->args.internal_arg_pointer,
4338 crtl->args.arg_offset_rtx,
53800dbe 4339 NULL_RTX, 0, OPTAB_LIB_WIDEN);
4340}
4341
a66c9326 4342/* Make it easier for the backends by protecting the valist argument
4343 from multiple evaluations. */
4344
4345static tree
389dd41b 4346stabilize_va_list_loc (location_t loc, tree valist, int needs_lvalue)
a66c9326 4347{
5f57a8b1 4348 tree vatype = targetm.canonical_va_list_type (TREE_TYPE (valist));
4349
182cf5a9 4350 /* The current way of determining the type of valist is completely
4351 bogus. We should have the information on the va builtin instead. */
4352 if (!vatype)
4353 vatype = targetm.fn_abi_va_list (cfun->decl);
5f57a8b1 4354
4355 if (TREE_CODE (vatype) == ARRAY_TYPE)
a66c9326 4356 {
2d47cc32 4357 if (TREE_SIDE_EFFECTS (valist))
4358 valist = save_expr (valist);
11a61dea 4359
2d47cc32 4360 /* For this case, the backends will be expecting a pointer to
5f57a8b1 4361 vatype, but it's possible we've actually been given an array
4362 (an actual TARGET_CANONICAL_VA_LIST_TYPE (valist)).
2d47cc32 4363 So fix it. */
4364 if (TREE_CODE (TREE_TYPE (valist)) == ARRAY_TYPE)
8a15c04a 4365 {
5f57a8b1 4366 tree p1 = build_pointer_type (TREE_TYPE (vatype));
389dd41b 4367 valist = build_fold_addr_expr_with_type_loc (loc, valist, p1);
8a15c04a 4368 }
a66c9326 4369 }
11a61dea 4370 else
a66c9326 4371 {
182cf5a9 4372 tree pt = build_pointer_type (vatype);
11a61dea 4373
2d47cc32 4374 if (! needs_lvalue)
4375 {
11a61dea 4376 if (! TREE_SIDE_EFFECTS (valist))
4377 return valist;
bf8e3599 4378
389dd41b 4379 valist = fold_build1_loc (loc, ADDR_EXPR, pt, valist);
a66c9326 4380 TREE_SIDE_EFFECTS (valist) = 1;
a66c9326 4381 }
2d47cc32 4382
11a61dea 4383 if (TREE_SIDE_EFFECTS (valist))
2d47cc32 4384 valist = save_expr (valist);
182cf5a9 4385 valist = fold_build2_loc (loc, MEM_REF,
4386 vatype, valist, build_int_cst (pt, 0));
a66c9326 4387 }
4388
4389 return valist;
4390}
4391
2e15d750 4392/* The "standard" definition of va_list is void*. */
4393
4394tree
4395std_build_builtin_va_list (void)
4396{
4397 return ptr_type_node;
4398}
4399
5f57a8b1 4400/* The "standard" abi va_list is va_list_type_node. */
4401
4402tree
4403std_fn_abi_va_list (tree fndecl ATTRIBUTE_UNUSED)
4404{
4405 return va_list_type_node;
4406}
4407
4408/* The "standard" type of va_list is va_list_type_node. */
4409
4410tree
4411std_canonical_va_list_type (tree type)
4412{
4413 tree wtype, htype;
4414
4415 if (INDIRECT_REF_P (type))
4416 type = TREE_TYPE (type);
9af5ce0c 4417 else if (POINTER_TYPE_P (type) && POINTER_TYPE_P (TREE_TYPE (type)))
5f57a8b1 4418 type = TREE_TYPE (type);
5f57a8b1 4419 wtype = va_list_type_node;
4420 htype = type;
7b36f9ab 4421 /* Treat structure va_list types. */
4422 if (TREE_CODE (wtype) == RECORD_TYPE && POINTER_TYPE_P (htype))
4423 htype = TREE_TYPE (htype);
4424 else if (TREE_CODE (wtype) == ARRAY_TYPE)
5f57a8b1 4425 {
4426 /* If va_list is an array type, the argument may have decayed
4427 to a pointer type, e.g. by being passed to another function.
4428 In that case, unwrap both types so that we can compare the
4429 underlying records. */
4430 if (TREE_CODE (htype) == ARRAY_TYPE
4431 || POINTER_TYPE_P (htype))
4432 {
4433 wtype = TREE_TYPE (wtype);
4434 htype = TREE_TYPE (htype);
4435 }
4436 }
4437 if (TYPE_MAIN_VARIANT (wtype) == TYPE_MAIN_VARIANT (htype))
4438 return va_list_type_node;
4439
4440 return NULL_TREE;
4441}
4442
a66c9326 4443/* The "standard" implementation of va_start: just assign `nextarg' to
4444 the variable. */
27d0c333 4445
a66c9326 4446void
aecda0d6 4447std_expand_builtin_va_start (tree valist, rtx nextarg)
a66c9326 4448{
f03c17bc 4449 rtx va_r = expand_expr (valist, NULL_RTX, VOIDmode, EXPAND_WRITE);
4450 convert_move (va_r, nextarg, 0);
058a1b7a 4451
4452 /* We do not have any valid bounds for the pointer, so
4453 just store zero bounds for it. */
4454 if (chkp_function_instrumented_p (current_function_decl))
4455 chkp_expand_bounds_reset_for_mem (valist,
4456 make_tree (TREE_TYPE (valist),
4457 nextarg));
a66c9326 4458}
4459
c2f47e15 4460/* Expand EXP, a call to __builtin_va_start. */
27d0c333 4461
a66c9326 4462static rtx
c2f47e15 4463expand_builtin_va_start (tree exp)
a66c9326 4464{
4465 rtx nextarg;
c2f47e15 4466 tree valist;
389dd41b 4467 location_t loc = EXPR_LOCATION (exp);
a66c9326 4468
c2f47e15 4469 if (call_expr_nargs (exp) < 2)
cb166087 4470 {
389dd41b 4471 error_at (loc, "too few arguments to function %<va_start%>");
cb166087 4472 return const0_rtx;
4473 }
a66c9326 4474
c2f47e15 4475 if (fold_builtin_next_arg (exp, true))
79012a9d 4476 return const0_rtx;
7c2f0500 4477
79012a9d 4478 nextarg = expand_builtin_next_arg ();
389dd41b 4479 valist = stabilize_va_list_loc (loc, CALL_EXPR_ARG (exp, 0), 1);
a66c9326 4480
8a58ed0a 4481 if (targetm.expand_builtin_va_start)
4482 targetm.expand_builtin_va_start (valist, nextarg);
4483 else
4484 std_expand_builtin_va_start (valist, nextarg);
a66c9326 4485
4486 return const0_rtx;
4487}
4488
c2f47e15 4489/* Expand EXP, a call to __builtin_va_end. */
f7c44134 4490
a66c9326 4491static rtx
c2f47e15 4492expand_builtin_va_end (tree exp)
a66c9326 4493{
c2f47e15 4494 tree valist = CALL_EXPR_ARG (exp, 0);
8a15c04a 4495
8a15c04a 4496 /* Evaluate for side effects, if needed. I hate macros that don't
4497 do that. */
4498 if (TREE_SIDE_EFFECTS (valist))
4499 expand_expr (valist, const0_rtx, VOIDmode, EXPAND_NORMAL);
a66c9326 4500
4501 return const0_rtx;
4502}
4503
c2f47e15 4504/* Expand EXP, a call to __builtin_va_copy. We do this as a
a66c9326 4505 builtin rather than just as an assignment in stdarg.h because of the
4506 nastiness of array-type va_list types. */
f7c44134 4507
a66c9326 4508static rtx
c2f47e15 4509expand_builtin_va_copy (tree exp)
a66c9326 4510{
4511 tree dst, src, t;
389dd41b 4512 location_t loc = EXPR_LOCATION (exp);
a66c9326 4513
c2f47e15 4514 dst = CALL_EXPR_ARG (exp, 0);
4515 src = CALL_EXPR_ARG (exp, 1);
a66c9326 4516
389dd41b 4517 dst = stabilize_va_list_loc (loc, dst, 1);
4518 src = stabilize_va_list_loc (loc, src, 0);
a66c9326 4519
5f57a8b1 4520 gcc_assert (cfun != NULL && cfun->decl != NULL_TREE);
4521
4522 if (TREE_CODE (targetm.fn_abi_va_list (cfun->decl)) != ARRAY_TYPE)
a66c9326 4523 {
5f57a8b1 4524 t = build2 (MODIFY_EXPR, targetm.fn_abi_va_list (cfun->decl), dst, src);
a66c9326 4525 TREE_SIDE_EFFECTS (t) = 1;
4526 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
4527 }
4528 else
4529 {
11a61dea 4530 rtx dstb, srcb, size;
4531
4532 /* Evaluate to pointers. */
4533 dstb = expand_expr (dst, NULL_RTX, Pmode, EXPAND_NORMAL);
4534 srcb = expand_expr (src, NULL_RTX, Pmode, EXPAND_NORMAL);
5f57a8b1 4535 size = expand_expr (TYPE_SIZE_UNIT (targetm.fn_abi_va_list (cfun->decl)),
4536 NULL_RTX, VOIDmode, EXPAND_NORMAL);
11a61dea 4537
85d654dd 4538 dstb = convert_memory_address (Pmode, dstb);
4539 srcb = convert_memory_address (Pmode, srcb);
726ec87c 4540
11a61dea 4541 /* "Dereference" to BLKmode memories. */
4542 dstb = gen_rtx_MEM (BLKmode, dstb);
ab6ab77e 4543 set_mem_alias_set (dstb, get_alias_set (TREE_TYPE (TREE_TYPE (dst))));
5f57a8b1 4544 set_mem_align (dstb, TYPE_ALIGN (targetm.fn_abi_va_list (cfun->decl)));
11a61dea 4545 srcb = gen_rtx_MEM (BLKmode, srcb);
ab6ab77e 4546 set_mem_alias_set (srcb, get_alias_set (TREE_TYPE (TREE_TYPE (src))));
5f57a8b1 4547 set_mem_align (srcb, TYPE_ALIGN (targetm.fn_abi_va_list (cfun->decl)));
11a61dea 4548
4549 /* Copy. */
0378dbdc 4550 emit_block_move (dstb, srcb, size, BLOCK_OP_NORMAL);
a66c9326 4551 }
4552
4553 return const0_rtx;
4554}
4555
53800dbe 4556/* Expand a call to one of the builtin functions __builtin_frame_address or
4557 __builtin_return_address. */
27d0c333 4558
53800dbe 4559static rtx
c2f47e15 4560expand_builtin_frame_address (tree fndecl, tree exp)
53800dbe 4561{
53800dbe 4562 /* The argument must be a nonnegative integer constant.
4563 It counts the number of frames to scan up the stack.
4564 The value is the return address saved in that frame. */
c2f47e15 4565 if (call_expr_nargs (exp) == 0)
53800dbe 4566 /* Warning about missing arg was already issued. */
4567 return const0_rtx;
e913b5cd 4568 else if (! tree_fits_uhwi_p (CALL_EXPR_ARG (exp, 0)))
53800dbe 4569 {
4570 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
07e3a3d2 4571 error ("invalid argument to %<__builtin_frame_address%>");
53800dbe 4572 else
07e3a3d2 4573 error ("invalid argument to %<__builtin_return_address%>");
53800dbe 4574 return const0_rtx;
4575 }
4576 else
4577 {
27d0c333 4578 rtx tem
4579 = expand_builtin_return_addr (DECL_FUNCTION_CODE (fndecl),
e913b5cd 4580 tree_to_uhwi (CALL_EXPR_ARG (exp, 0)));
53800dbe 4581
4582 /* Some ports cannot access arbitrary stack frames. */
4583 if (tem == NULL)
4584 {
4585 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
c3ceba8e 4586 warning (0, "unsupported argument to %<__builtin_frame_address%>");
53800dbe 4587 else
c3ceba8e 4588 warning (0, "unsupported argument to %<__builtin_return_address%>");
53800dbe 4589 return const0_rtx;
4590 }
4591
4592 /* For __builtin_frame_address, return what we've got. */
4593 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
4594 return tem;
4595
8ad4c111 4596 if (!REG_P (tem)
53800dbe 4597 && ! CONSTANT_P (tem))
99182918 4598 tem = copy_addr_to_reg (tem);
53800dbe 4599 return tem;
4600 }
4601}
4602
990495a7 4603/* Expand EXP, a call to the alloca builtin. Return NULL_RTX if we
5be42b39 4604 failed and the caller should emit a normal call. CANNOT_ACCUMULATE
4605 is the same as for allocate_dynamic_stack_space. */
15c6cf6b 4606
53800dbe 4607static rtx
5be42b39 4608expand_builtin_alloca (tree exp, bool cannot_accumulate)
53800dbe 4609{
4610 rtx op0;
15c6cf6b 4611 rtx result;
581bf1c2 4612 bool valid_arglist;
4613 unsigned int align;
4614 bool alloca_with_align = (DECL_FUNCTION_CODE (get_callee_fndecl (exp))
4615 == BUILT_IN_ALLOCA_WITH_ALIGN);
53800dbe 4616
581bf1c2 4617 valid_arglist
4618 = (alloca_with_align
4619 ? validate_arglist (exp, INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE)
4620 : validate_arglist (exp, INTEGER_TYPE, VOID_TYPE));
4621
4622 if (!valid_arglist)
c2f47e15 4623 return NULL_RTX;
53800dbe 4624
4625 /* Compute the argument. */
c2f47e15 4626 op0 = expand_normal (CALL_EXPR_ARG (exp, 0));
53800dbe 4627
581bf1c2 4628 /* Compute the alignment. */
4629 align = (alloca_with_align
f9ae6f95 4630 ? TREE_INT_CST_LOW (CALL_EXPR_ARG (exp, 1))
581bf1c2 4631 : BIGGEST_ALIGNMENT);
4632
53800dbe 4633 /* Allocate the desired space. */
581bf1c2 4634 result = allocate_dynamic_stack_space (op0, 0, align, cannot_accumulate);
85d654dd 4635 result = convert_memory_address (ptr_mode, result);
15c6cf6b 4636
4637 return result;
53800dbe 4638}
4639
74bdbe96 4640/* Expand a call to bswap builtin in EXP.
4641 Return NULL_RTX if a normal call should be emitted rather than expanding the
4642 function in-line. If convenient, the result should be placed in TARGET.
4643 SUBTARGET may be used as the target for computing one of EXP's operands. */
42791117 4644
4645static rtx
3754d046 4646expand_builtin_bswap (machine_mode target_mode, tree exp, rtx target,
74bdbe96 4647 rtx subtarget)
42791117 4648{
42791117 4649 tree arg;
4650 rtx op0;
4651
c2f47e15 4652 if (!validate_arglist (exp, INTEGER_TYPE, VOID_TYPE))
4653 return NULL_RTX;
42791117 4654
c2f47e15 4655 arg = CALL_EXPR_ARG (exp, 0);
74bdbe96 4656 op0 = expand_expr (arg,
4657 subtarget && GET_MODE (subtarget) == target_mode
4658 ? subtarget : NULL_RTX,
4659 target_mode, EXPAND_NORMAL);
4660 if (GET_MODE (op0) != target_mode)
4661 op0 = convert_to_mode (target_mode, op0, 1);
42791117 4662
74bdbe96 4663 target = expand_unop (target_mode, bswap_optab, op0, target, 1);
42791117 4664
4665 gcc_assert (target);
4666
74bdbe96 4667 return convert_to_mode (target_mode, target, 1);
42791117 4668}
4669
c2f47e15 4670/* Expand a call to a unary builtin in EXP.
4671 Return NULL_RTX if a normal call should be emitted rather than expanding the
53800dbe 4672 function in-line. If convenient, the result should be placed in TARGET.
4673 SUBTARGET may be used as the target for computing one of EXP's operands. */
15c6cf6b 4674
53800dbe 4675static rtx
3754d046 4676expand_builtin_unop (machine_mode target_mode, tree exp, rtx target,
aecda0d6 4677 rtx subtarget, optab op_optab)
53800dbe 4678{
4679 rtx op0;
c2f47e15 4680
4681 if (!validate_arglist (exp, INTEGER_TYPE, VOID_TYPE))
4682 return NULL_RTX;
53800dbe 4683
4684 /* Compute the argument. */
f97eea22 4685 op0 = expand_expr (CALL_EXPR_ARG (exp, 0),
4686 (subtarget
4687 && (TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 0)))
4688 == GET_MODE (subtarget))) ? subtarget : NULL_RTX,
1db6d067 4689 VOIDmode, EXPAND_NORMAL);
6a08d0ab 4690 /* Compute op, into TARGET if possible.
53800dbe 4691 Set TARGET to wherever the result comes back. */
c2f47e15 4692 target = expand_unop (TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 0))),
6aaa1f9e 4693 op_optab, op0, target, op_optab != clrsb_optab);
64db345d 4694 gcc_assert (target);
7d3f6cc7 4695
efb070c8 4696 return convert_to_mode (target_mode, target, 0);
53800dbe 4697}
89cfe6e5 4698
48e1416a 4699/* Expand a call to __builtin_expect. We just return our argument
5a74f77e 4700 as the builtin_expect semantic should've been already executed by
4701 tree branch prediction pass. */
89cfe6e5 4702
4703static rtx
c2f47e15 4704expand_builtin_expect (tree exp, rtx target)
89cfe6e5 4705{
1e4adcfc 4706 tree arg;
89cfe6e5 4707
c2f47e15 4708 if (call_expr_nargs (exp) < 2)
89cfe6e5 4709 return const0_rtx;
c2f47e15 4710 arg = CALL_EXPR_ARG (exp, 0);
89cfe6e5 4711
c2f47e15 4712 target = expand_expr (arg, target, VOIDmode, EXPAND_NORMAL);
5a74f77e 4713 /* When guessing was done, the hints should be already stripped away. */
07311427 4714 gcc_assert (!flag_guess_branch_prob
852f689e 4715 || optimize == 0 || seen_error ());
89cfe6e5 4716 return target;
4717}
689df48e 4718
fca0886c 4719/* Expand a call to __builtin_assume_aligned. We just return our first
4720 argument as the builtin_assume_aligned semantic should've been already
4721 executed by CCP. */
4722
4723static rtx
4724expand_builtin_assume_aligned (tree exp, rtx target)
4725{
4726 if (call_expr_nargs (exp) < 2)
4727 return const0_rtx;
4728 target = expand_expr (CALL_EXPR_ARG (exp, 0), target, VOIDmode,
4729 EXPAND_NORMAL);
4730 gcc_assert (!TREE_SIDE_EFFECTS (CALL_EXPR_ARG (exp, 1))
4731 && (call_expr_nargs (exp) < 3
4732 || !TREE_SIDE_EFFECTS (CALL_EXPR_ARG (exp, 2))));
4733 return target;
4734}
4735
c22de3f0 4736void
aecda0d6 4737expand_builtin_trap (void)
a0ef1725 4738{
4739#ifdef HAVE_trap
4740 if (HAVE_trap)
f73960eb 4741 {
bf79ca12 4742 rtx_insn *insn = emit_insn (gen_trap ());
f73960eb 4743 /* For trap insns when not accumulating outgoing args force
4744 REG_ARGS_SIZE note to prevent crossjumping of calls with
4745 different args sizes. */
4746 if (!ACCUMULATE_OUTGOING_ARGS)
4747 add_reg_note (insn, REG_ARGS_SIZE, GEN_INT (stack_pointer_delta));
4748 }
a0ef1725 4749 else
4750#endif
4751 emit_library_call (abort_libfunc, LCT_NORETURN, VOIDmode, 0);
4752 emit_barrier ();
4753}
78a74442 4754
d2b48f0c 4755/* Expand a call to __builtin_unreachable. We do nothing except emit
4756 a barrier saying that control flow will not pass here.
4757
4758 It is the responsibility of the program being compiled to ensure
4759 that control flow does never reach __builtin_unreachable. */
4760static void
4761expand_builtin_unreachable (void)
4762{
4763 emit_barrier ();
4764}
4765
c2f47e15 4766/* Expand EXP, a call to fabs, fabsf or fabsl.
4767 Return NULL_RTX if a normal call should be emitted rather than expanding
78a74442 4768 the function inline. If convenient, the result should be placed
4769 in TARGET. SUBTARGET may be used as the target for computing
4770 the operand. */
4771
4772static rtx
c2f47e15 4773expand_builtin_fabs (tree exp, rtx target, rtx subtarget)
78a74442 4774{
3754d046 4775 machine_mode mode;
78a74442 4776 tree arg;
4777 rtx op0;
4778
c2f47e15 4779 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
4780 return NULL_RTX;
78a74442 4781
c2f47e15 4782 arg = CALL_EXPR_ARG (exp, 0);
c7f617c2 4783 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
78a74442 4784 mode = TYPE_MODE (TREE_TYPE (arg));
1db6d067 4785 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
78a74442 4786 return expand_abs (mode, op0, target, 0, safe_from_p (target, arg, 1));
4787}
4788
c2f47e15 4789/* Expand EXP, a call to copysign, copysignf, or copysignl.
270436f3 4790 Return NULL is a normal call should be emitted rather than expanding the
4791 function inline. If convenient, the result should be placed in TARGET.
4792 SUBTARGET may be used as the target for computing the operand. */
4793
4794static rtx
c2f47e15 4795expand_builtin_copysign (tree exp, rtx target, rtx subtarget)
270436f3 4796{
4797 rtx op0, op1;
4798 tree arg;
4799
c2f47e15 4800 if (!validate_arglist (exp, REAL_TYPE, REAL_TYPE, VOID_TYPE))
4801 return NULL_RTX;
270436f3 4802
c2f47e15 4803 arg = CALL_EXPR_ARG (exp, 0);
8ec3c5c2 4804 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
270436f3 4805
c2f47e15 4806 arg = CALL_EXPR_ARG (exp, 1);
8ec3c5c2 4807 op1 = expand_normal (arg);
270436f3 4808
4809 return expand_copysign (op0, op1, target);
4810}
4811
ac8fb6db 4812/* Expand a call to __builtin___clear_cache. */
4813
4814static rtx
4815expand_builtin___clear_cache (tree exp ATTRIBUTE_UNUSED)
4816{
4817#ifndef HAVE_clear_cache
4818#ifdef CLEAR_INSN_CACHE
4819 /* There is no "clear_cache" insn, and __clear_cache() in libgcc
4820 does something. Just do the default expansion to a call to
4821 __clear_cache(). */
4822 return NULL_RTX;
4823#else
4824 /* There is no "clear_cache" insn, and __clear_cache() in libgcc
4825 does nothing. There is no need to call it. Do nothing. */
4826 return const0_rtx;
4827#endif /* CLEAR_INSN_CACHE */
4828#else
4829 /* We have a "clear_cache" insn, and it will handle everything. */
4830 tree begin, end;
4831 rtx begin_rtx, end_rtx;
ac8fb6db 4832
4833 /* We must not expand to a library call. If we did, any
4834 fallback library function in libgcc that might contain a call to
4835 __builtin___clear_cache() would recurse infinitely. */
4836 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
4837 {
4838 error ("both arguments to %<__builtin___clear_cache%> must be pointers");
4839 return const0_rtx;
4840 }
4841
4842 if (HAVE_clear_cache)
4843 {
8786db1e 4844 struct expand_operand ops[2];
ac8fb6db 4845
4846 begin = CALL_EXPR_ARG (exp, 0);
4847 begin_rtx = expand_expr (begin, NULL_RTX, Pmode, EXPAND_NORMAL);
ac8fb6db 4848
4849 end = CALL_EXPR_ARG (exp, 1);
4850 end_rtx = expand_expr (end, NULL_RTX, Pmode, EXPAND_NORMAL);
ac8fb6db 4851
8786db1e 4852 create_address_operand (&ops[0], begin_rtx);
4853 create_address_operand (&ops[1], end_rtx);
4854 if (maybe_expand_insn (CODE_FOR_clear_cache, 2, ops))
4855 return const0_rtx;
ac8fb6db 4856 }
4857 return const0_rtx;
4858#endif /* HAVE_clear_cache */
4859}
4860
4ee9c684 4861/* Given a trampoline address, make sure it satisfies TRAMPOLINE_ALIGNMENT. */
4862
4863static rtx
4864round_trampoline_addr (rtx tramp)
4865{
4866 rtx temp, addend, mask;
4867
4868 /* If we don't need too much alignment, we'll have been guaranteed
4869 proper alignment by get_trampoline_type. */
4870 if (TRAMPOLINE_ALIGNMENT <= STACK_BOUNDARY)
4871 return tramp;
4872
4873 /* Round address up to desired boundary. */
4874 temp = gen_reg_rtx (Pmode);
0359f9f5 4875 addend = gen_int_mode (TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT - 1, Pmode);
4876 mask = gen_int_mode (-TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT, Pmode);
4ee9c684 4877
4878 temp = expand_simple_binop (Pmode, PLUS, tramp, addend,
4879 temp, 0, OPTAB_LIB_WIDEN);
4880 tramp = expand_simple_binop (Pmode, AND, temp, mask,
4881 temp, 0, OPTAB_LIB_WIDEN);
4882
4883 return tramp;
4884}
4885
4886static rtx
c307f106 4887expand_builtin_init_trampoline (tree exp, bool onstack)
4ee9c684 4888{
4889 tree t_tramp, t_func, t_chain;
82c7907c 4890 rtx m_tramp, r_tramp, r_chain, tmp;
4ee9c684 4891
c2f47e15 4892 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE,
4ee9c684 4893 POINTER_TYPE, VOID_TYPE))
4894 return NULL_RTX;
4895
c2f47e15 4896 t_tramp = CALL_EXPR_ARG (exp, 0);
4897 t_func = CALL_EXPR_ARG (exp, 1);
4898 t_chain = CALL_EXPR_ARG (exp, 2);
4ee9c684 4899
8ec3c5c2 4900 r_tramp = expand_normal (t_tramp);
82c7907c 4901 m_tramp = gen_rtx_MEM (BLKmode, r_tramp);
4902 MEM_NOTRAP_P (m_tramp) = 1;
4903
c307f106 4904 /* If ONSTACK, the TRAMP argument should be the address of a field
4905 within the local function's FRAME decl. Either way, let's see if
4906 we can fill in the MEM_ATTRs for this memory. */
82c7907c 4907 if (TREE_CODE (t_tramp) == ADDR_EXPR)
f4146cb8 4908 set_mem_attributes (m_tramp, TREE_OPERAND (t_tramp, 0), true);
82c7907c 4909
c307f106 4910 /* Creator of a heap trampoline is responsible for making sure the
4911 address is aligned to at least STACK_BOUNDARY. Normally malloc
4912 will ensure this anyhow. */
82c7907c 4913 tmp = round_trampoline_addr (r_tramp);
4914 if (tmp != r_tramp)
4915 {
4916 m_tramp = change_address (m_tramp, BLKmode, tmp);
4917 set_mem_align (m_tramp, TRAMPOLINE_ALIGNMENT);
5b2a69fa 4918 set_mem_size (m_tramp, TRAMPOLINE_SIZE);
82c7907c 4919 }
4920
4921 /* The FUNC argument should be the address of the nested function.
4922 Extract the actual function decl to pass to the hook. */
4923 gcc_assert (TREE_CODE (t_func) == ADDR_EXPR);
4924 t_func = TREE_OPERAND (t_func, 0);
4925 gcc_assert (TREE_CODE (t_func) == FUNCTION_DECL);
4926
8ec3c5c2 4927 r_chain = expand_normal (t_chain);
4ee9c684 4928
4929 /* Generate insns to initialize the trampoline. */
82c7907c 4930 targetm.calls.trampoline_init (m_tramp, t_func, r_chain);
4ee9c684 4931
c307f106 4932 if (onstack)
4933 {
4934 trampolines_created = 1;
8bc8a8f4 4935
c307f106 4936 warning_at (DECL_SOURCE_LOCATION (t_func), OPT_Wtrampolines,
4937 "trampoline generated for nested function %qD", t_func);
4938 }
8bc8a8f4 4939
4ee9c684 4940 return const0_rtx;
4941}
4942
4943static rtx
c2f47e15 4944expand_builtin_adjust_trampoline (tree exp)
4ee9c684 4945{
4946 rtx tramp;
4947
c2f47e15 4948 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
4ee9c684 4949 return NULL_RTX;
4950
c2f47e15 4951 tramp = expand_normal (CALL_EXPR_ARG (exp, 0));
4ee9c684 4952 tramp = round_trampoline_addr (tramp);
82c7907c 4953 if (targetm.calls.trampoline_adjust_address)
4954 tramp = targetm.calls.trampoline_adjust_address (tramp);
4ee9c684 4955
4956 return tramp;
4957}
4958
93f564d6 4959/* Expand the call EXP to the built-in signbit, signbitf or signbitl
4960 function. The function first checks whether the back end provides
4961 an insn to implement signbit for the respective mode. If not, it
4962 checks whether the floating point format of the value is such that
4963 the sign bit can be extracted. If that is not the case, the
4964 function returns NULL_RTX to indicate that a normal call should be
4965 emitted rather than expanding the function in-line. EXP is the
4966 expression that is a call to the builtin function; if convenient,
4967 the result should be placed in TARGET. */
27f261ef 4968static rtx
4969expand_builtin_signbit (tree exp, rtx target)
4970{
4971 const struct real_format *fmt;
3754d046 4972 machine_mode fmode, imode, rmode;
c2f47e15 4973 tree arg;
ca4f1f5b 4974 int word, bitpos;
27eda240 4975 enum insn_code icode;
27f261ef 4976 rtx temp;
389dd41b 4977 location_t loc = EXPR_LOCATION (exp);
27f261ef 4978
c2f47e15 4979 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
4980 return NULL_RTX;
27f261ef 4981
c2f47e15 4982 arg = CALL_EXPR_ARG (exp, 0);
27f261ef 4983 fmode = TYPE_MODE (TREE_TYPE (arg));
4984 rmode = TYPE_MODE (TREE_TYPE (exp));
4985 fmt = REAL_MODE_FORMAT (fmode);
4986
93f564d6 4987 arg = builtin_save_expr (arg);
4988
4989 /* Expand the argument yielding a RTX expression. */
4990 temp = expand_normal (arg);
4991
4992 /* Check if the back end provides an insn that handles signbit for the
4993 argument's mode. */
d6bf3b14 4994 icode = optab_handler (signbit_optab, fmode);
27eda240 4995 if (icode != CODE_FOR_nothing)
93f564d6 4996 {
1e0c0b35 4997 rtx_insn *last = get_last_insn ();
93f564d6 4998 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
4e2a2fb4 4999 if (maybe_emit_unop_insn (icode, target, temp, UNKNOWN))
5000 return target;
5001 delete_insns_since (last);
93f564d6 5002 }
5003
27f261ef 5004 /* For floating point formats without a sign bit, implement signbit
5005 as "ARG < 0.0". */
8d564692 5006 bitpos = fmt->signbit_ro;
ca4f1f5b 5007 if (bitpos < 0)
27f261ef 5008 {
5009 /* But we can't do this if the format supports signed zero. */
5010 if (fmt->has_signed_zero && HONOR_SIGNED_ZEROS (fmode))
c2f47e15 5011 return NULL_RTX;
27f261ef 5012
389dd41b 5013 arg = fold_build2_loc (loc, LT_EXPR, TREE_TYPE (exp), arg,
49d00087 5014 build_real (TREE_TYPE (arg), dconst0));
27f261ef 5015 return expand_expr (arg, target, VOIDmode, EXPAND_NORMAL);
5016 }
5017
ca4f1f5b 5018 if (GET_MODE_SIZE (fmode) <= UNITS_PER_WORD)
27f261ef 5019 {
ca4f1f5b 5020 imode = int_mode_for_mode (fmode);
5021 if (imode == BLKmode)
c2f47e15 5022 return NULL_RTX;
ca4f1f5b 5023 temp = gen_lowpart (imode, temp);
24fd4260 5024 }
5025 else
5026 {
ca4f1f5b 5027 imode = word_mode;
5028 /* Handle targets with different FP word orders. */
5029 if (FLOAT_WORDS_BIG_ENDIAN)
a0c938f0 5030 word = (GET_MODE_BITSIZE (fmode) - bitpos) / BITS_PER_WORD;
ca4f1f5b 5031 else
a0c938f0 5032 word = bitpos / BITS_PER_WORD;
ca4f1f5b 5033 temp = operand_subword_force (temp, word, fmode);
5034 bitpos = bitpos % BITS_PER_WORD;
5035 }
5036
44b0f1d0 5037 /* Force the intermediate word_mode (or narrower) result into a
5038 register. This avoids attempting to create paradoxical SUBREGs
5039 of floating point modes below. */
5040 temp = force_reg (imode, temp);
5041
ca4f1f5b 5042 /* If the bitpos is within the "result mode" lowpart, the operation
5043 can be implement with a single bitwise AND. Otherwise, we need
5044 a right shift and an AND. */
5045
5046 if (bitpos < GET_MODE_BITSIZE (rmode))
5047 {
796b6678 5048 wide_int mask = wi::set_bit_in_zero (bitpos, GET_MODE_PRECISION (rmode));
27f261ef 5049
4a46f016 5050 if (GET_MODE_SIZE (imode) > GET_MODE_SIZE (rmode))
ca4f1f5b 5051 temp = gen_lowpart (rmode, temp);
24fd4260 5052 temp = expand_binop (rmode, and_optab, temp,
e913b5cd 5053 immed_wide_int_const (mask, rmode),
ca4f1f5b 5054 NULL_RTX, 1, OPTAB_LIB_WIDEN);
27f261ef 5055 }
ca4f1f5b 5056 else
5057 {
5058 /* Perform a logical right shift to place the signbit in the least
a0c938f0 5059 significant bit, then truncate the result to the desired mode
ca4f1f5b 5060 and mask just this bit. */
f5ff0b21 5061 temp = expand_shift (RSHIFT_EXPR, imode, temp, bitpos, NULL_RTX, 1);
ca4f1f5b 5062 temp = gen_lowpart (rmode, temp);
5063 temp = expand_binop (rmode, and_optab, temp, const1_rtx,
5064 NULL_RTX, 1, OPTAB_LIB_WIDEN);
5065 }
5066
27f261ef 5067 return temp;
5068}
73673831 5069
5070/* Expand fork or exec calls. TARGET is the desired target of the
c2f47e15 5071 call. EXP is the call. FN is the
73673831 5072 identificator of the actual function. IGNORE is nonzero if the
5073 value is to be ignored. */
5074
5075static rtx
c2f47e15 5076expand_builtin_fork_or_exec (tree fn, tree exp, rtx target, int ignore)
73673831 5077{
5078 tree id, decl;
5079 tree call;
5080
5081 /* If we are not profiling, just call the function. */
5082 if (!profile_arc_flag)
5083 return NULL_RTX;
5084
5085 /* Otherwise call the wrapper. This should be equivalent for the rest of
5086 compiler, so the code does not diverge, and the wrapper may run the
9c9bad97 5087 code necessary for keeping the profiling sane. */
73673831 5088
5089 switch (DECL_FUNCTION_CODE (fn))
5090 {
5091 case BUILT_IN_FORK:
5092 id = get_identifier ("__gcov_fork");
5093 break;
5094
5095 case BUILT_IN_EXECL:
5096 id = get_identifier ("__gcov_execl");
5097 break;
5098
5099 case BUILT_IN_EXECV:
5100 id = get_identifier ("__gcov_execv");
5101 break;
5102
5103 case BUILT_IN_EXECLP:
5104 id = get_identifier ("__gcov_execlp");
5105 break;
5106
5107 case BUILT_IN_EXECLE:
5108 id = get_identifier ("__gcov_execle");
5109 break;
5110
5111 case BUILT_IN_EXECVP:
5112 id = get_identifier ("__gcov_execvp");
5113 break;
5114
5115 case BUILT_IN_EXECVE:
5116 id = get_identifier ("__gcov_execve");
5117 break;
5118
5119 default:
64db345d 5120 gcc_unreachable ();
73673831 5121 }
5122
e60a6f7b 5123 decl = build_decl (DECL_SOURCE_LOCATION (fn),
5124 FUNCTION_DECL, id, TREE_TYPE (fn));
73673831 5125 DECL_EXTERNAL (decl) = 1;
5126 TREE_PUBLIC (decl) = 1;
5127 DECL_ARTIFICIAL (decl) = 1;
5128 TREE_NOTHROW (decl) = 1;
e82d310b 5129 DECL_VISIBILITY (decl) = VISIBILITY_DEFAULT;
5130 DECL_VISIBILITY_SPECIFIED (decl) = 1;
389dd41b 5131 call = rewrite_call_expr (EXPR_LOCATION (exp), exp, 0, decl, 0);
73673831 5132 return expand_call (call, target, ignore);
c2f47e15 5133 }
48e1416a 5134
b6a5fc45 5135
5136\f
3e272de8 5137/* Reconstitute a mode for a __sync intrinsic operation. Since the type of
5138 the pointer in these functions is void*, the tree optimizers may remove
5139 casts. The mode computed in expand_builtin isn't reliable either, due
5140 to __sync_bool_compare_and_swap.
5141
5142 FCODE_DIFF should be fcode - base, where base is the FOO_1 code for the
5143 group of builtins. This gives us log2 of the mode size. */
5144
3754d046 5145static inline machine_mode
3e272de8 5146get_builtin_sync_mode (int fcode_diff)
5147{
ad3a13b5 5148 /* The size is not negotiable, so ask not to get BLKmode in return
5149 if the target indicates that a smaller size would be better. */
5150 return mode_for_size (BITS_PER_UNIT << fcode_diff, MODE_INT, 0);
3e272de8 5151}
5152
041e0215 5153/* Expand the memory expression LOC and return the appropriate memory operand
5154 for the builtin_sync operations. */
5155
5156static rtx
3754d046 5157get_builtin_sync_mem (tree loc, machine_mode mode)
041e0215 5158{
5159 rtx addr, mem;
5160
7f4d56ad 5161 addr = expand_expr (loc, NULL_RTX, ptr_mode, EXPAND_SUM);
5162 addr = convert_memory_address (Pmode, addr);
041e0215 5163
5164 /* Note that we explicitly do not want any alias information for this
5165 memory, so that we kill all other live memories. Otherwise we don't
5166 satisfy the full barrier semantics of the intrinsic. */
5167 mem = validize_mem (gen_rtx_MEM (mode, addr));
5168
153c3b50 5169 /* The alignment needs to be at least according to that of the mode. */
5170 set_mem_align (mem, MAX (GET_MODE_ALIGNMENT (mode),
957d0361 5171 get_pointer_alignment (loc)));
c94cfd1c 5172 set_mem_alias_set (mem, ALIAS_SET_MEMORY_BARRIER);
041e0215 5173 MEM_VOLATILE_P (mem) = 1;
5174
5175 return mem;
5176}
5177
1cd6e20d 5178/* Make sure an argument is in the right mode.
5179 EXP is the tree argument.
5180 MODE is the mode it should be in. */
5181
5182static rtx
3754d046 5183expand_expr_force_mode (tree exp, machine_mode mode)
1cd6e20d 5184{
5185 rtx val;
3754d046 5186 machine_mode old_mode;
1cd6e20d 5187
5188 val = expand_expr (exp, NULL_RTX, mode, EXPAND_NORMAL);
5189 /* If VAL is promoted to a wider mode, convert it back to MODE. Take care
5190 of CONST_INTs, where we know the old_mode only from the call argument. */
5191
5192 old_mode = GET_MODE (val);
5193 if (old_mode == VOIDmode)
5194 old_mode = TYPE_MODE (TREE_TYPE (exp));
5195 val = convert_modes (mode, old_mode, val, 1);
5196 return val;
5197}
5198
5199
b6a5fc45 5200/* Expand the __sync_xxx_and_fetch and __sync_fetch_and_xxx intrinsics.
c2f47e15 5201 EXP is the CALL_EXPR. CODE is the rtx code
b6a5fc45 5202 that corresponds to the arithmetic or logical operation from the name;
5203 an exception here is that NOT actually means NAND. TARGET is an optional
5204 place for us to store the results; AFTER is true if this is the
1cd6e20d 5205 fetch_and_xxx form. */
b6a5fc45 5206
5207static rtx
3754d046 5208expand_builtin_sync_operation (machine_mode mode, tree exp,
3e272de8 5209 enum rtx_code code, bool after,
1cd6e20d 5210 rtx target)
b6a5fc45 5211{
041e0215 5212 rtx val, mem;
e60a6f7b 5213 location_t loc = EXPR_LOCATION (exp);
b6a5fc45 5214
cf73e559 5215 if (code == NOT && warn_sync_nand)
5216 {
5217 tree fndecl = get_callee_fndecl (exp);
5218 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
5219
5220 static bool warned_f_a_n, warned_n_a_f;
5221
5222 switch (fcode)
5223 {
2797f13a 5224 case BUILT_IN_SYNC_FETCH_AND_NAND_1:
5225 case BUILT_IN_SYNC_FETCH_AND_NAND_2:
5226 case BUILT_IN_SYNC_FETCH_AND_NAND_4:
5227 case BUILT_IN_SYNC_FETCH_AND_NAND_8:
5228 case BUILT_IN_SYNC_FETCH_AND_NAND_16:
cf73e559 5229 if (warned_f_a_n)
5230 break;
5231
b9a16870 5232 fndecl = builtin_decl_implicit (BUILT_IN_SYNC_FETCH_AND_NAND_N);
e60a6f7b 5233 inform (loc, "%qD changed semantics in GCC 4.4", fndecl);
cf73e559 5234 warned_f_a_n = true;
5235 break;
5236
2797f13a 5237 case BUILT_IN_SYNC_NAND_AND_FETCH_1:
5238 case BUILT_IN_SYNC_NAND_AND_FETCH_2:
5239 case BUILT_IN_SYNC_NAND_AND_FETCH_4:
5240 case BUILT_IN_SYNC_NAND_AND_FETCH_8:
5241 case BUILT_IN_SYNC_NAND_AND_FETCH_16:
cf73e559 5242 if (warned_n_a_f)
5243 break;
5244
b9a16870 5245 fndecl = builtin_decl_implicit (BUILT_IN_SYNC_NAND_AND_FETCH_N);
e60a6f7b 5246 inform (loc, "%qD changed semantics in GCC 4.4", fndecl);
cf73e559 5247 warned_n_a_f = true;
5248 break;
5249
5250 default:
5251 gcc_unreachable ();
5252 }
5253 }
5254
b6a5fc45 5255 /* Expand the operands. */
c2f47e15 5256 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
1cd6e20d 5257 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
b6a5fc45 5258
a372f7ca 5259 return expand_atomic_fetch_op (target, mem, val, code, MEMMODEL_SYNC_SEQ_CST,
1cd6e20d 5260 after);
b6a5fc45 5261}
5262
5263/* Expand the __sync_val_compare_and_swap and __sync_bool_compare_and_swap
c2f47e15 5264 intrinsics. EXP is the CALL_EXPR. IS_BOOL is
b6a5fc45 5265 true if this is the boolean form. TARGET is a place for us to store the
5266 results; this is NOT optional if IS_BOOL is true. */
5267
5268static rtx
3754d046 5269expand_builtin_compare_and_swap (machine_mode mode, tree exp,
3e272de8 5270 bool is_bool, rtx target)
b6a5fc45 5271{
041e0215 5272 rtx old_val, new_val, mem;
ba885f6a 5273 rtx *pbool, *poval;
b6a5fc45 5274
5275 /* Expand the operands. */
c2f47e15 5276 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
1cd6e20d 5277 old_val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5278 new_val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 2), mode);
b6a5fc45 5279
ba885f6a 5280 pbool = poval = NULL;
5281 if (target != const0_rtx)
5282 {
5283 if (is_bool)
5284 pbool = &target;
5285 else
5286 poval = &target;
5287 }
5288 if (!expand_atomic_compare_and_swap (pbool, poval, mem, old_val, new_val,
a372f7ca 5289 false, MEMMODEL_SYNC_SEQ_CST,
5290 MEMMODEL_SYNC_SEQ_CST))
1cd6e20d 5291 return NULL_RTX;
c2f47e15 5292
1cd6e20d 5293 return target;
b6a5fc45 5294}
5295
5296/* Expand the __sync_lock_test_and_set intrinsic. Note that the most
5297 general form is actually an atomic exchange, and some targets only
5298 support a reduced form with the second argument being a constant 1.
48e1416a 5299 EXP is the CALL_EXPR; TARGET is an optional place for us to store
c2f47e15 5300 the results. */
b6a5fc45 5301
5302static rtx
3754d046 5303expand_builtin_sync_lock_test_and_set (machine_mode mode, tree exp,
1cd6e20d 5304 rtx target)
b6a5fc45 5305{
041e0215 5306 rtx val, mem;
b6a5fc45 5307
5308 /* Expand the operands. */
c2f47e15 5309 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
1cd6e20d 5310 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5311
7821cde1 5312 return expand_sync_lock_test_and_set (target, mem, val);
1cd6e20d 5313}
5314
5315/* Expand the __sync_lock_release intrinsic. EXP is the CALL_EXPR. */
5316
5317static void
3754d046 5318expand_builtin_sync_lock_release (machine_mode mode, tree exp)
1cd6e20d 5319{
5320 rtx mem;
5321
5322 /* Expand the operands. */
5323 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5324
a372f7ca 5325 expand_atomic_store (mem, const0_rtx, MEMMODEL_SYNC_RELEASE, true);
1cd6e20d 5326}
5327
5328/* Given an integer representing an ``enum memmodel'', verify its
5329 correctness and return the memory model enum. */
5330
5331static enum memmodel
5332get_memmodel (tree exp)
5333{
5334 rtx op;
7f738025 5335 unsigned HOST_WIDE_INT val;
1cd6e20d 5336
5337 /* If the parameter is not a constant, it's a run time value so we'll just
5338 convert it to MEMMODEL_SEQ_CST to avoid annoying runtime checking. */
5339 if (TREE_CODE (exp) != INTEGER_CST)
5340 return MEMMODEL_SEQ_CST;
5341
5342 op = expand_normal (exp);
7f738025 5343
5344 val = INTVAL (op);
5345 if (targetm.memmodel_check)
5346 val = targetm.memmodel_check (val);
5347 else if (val & ~MEMMODEL_MASK)
5348 {
5349 warning (OPT_Winvalid_memory_model,
5350 "Unknown architecture specifier in memory model to builtin.");
5351 return MEMMODEL_SEQ_CST;
5352 }
5353
a372f7ca 5354 /* Should never see a user explicit SYNC memodel model, so >= LAST works. */
5355 if (memmodel_base (val) >= MEMMODEL_LAST)
1cd6e20d 5356 {
5357 warning (OPT_Winvalid_memory_model,
5358 "invalid memory model argument to builtin");
5359 return MEMMODEL_SEQ_CST;
5360 }
7f738025 5361
3070f133 5362 /* Workaround for Bugzilla 59448. GCC doesn't track consume properly, so
5363 be conservative and promote consume to acquire. */
5364 if (val == MEMMODEL_CONSUME)
5365 val = MEMMODEL_ACQUIRE;
5366
7f738025 5367 return (enum memmodel) val;
1cd6e20d 5368}
5369
5370/* Expand the __atomic_exchange intrinsic:
5371 TYPE __atomic_exchange (TYPE *object, TYPE desired, enum memmodel)
5372 EXP is the CALL_EXPR.
5373 TARGET is an optional place for us to store the results. */
5374
5375static rtx
3754d046 5376expand_builtin_atomic_exchange (machine_mode mode, tree exp, rtx target)
1cd6e20d 5377{
5378 rtx val, mem;
5379 enum memmodel model;
5380
5381 model = get_memmodel (CALL_EXPR_ARG (exp, 2));
1cd6e20d 5382
5383 if (!flag_inline_atomics)
5384 return NULL_RTX;
5385
5386 /* Expand the operands. */
5387 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5388 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5389
7821cde1 5390 return expand_atomic_exchange (target, mem, val, model);
1cd6e20d 5391}
5392
5393/* Expand the __atomic_compare_exchange intrinsic:
5394 bool __atomic_compare_exchange (TYPE *object, TYPE *expect,
5395 TYPE desired, BOOL weak,
5396 enum memmodel success,
5397 enum memmodel failure)
5398 EXP is the CALL_EXPR.
5399 TARGET is an optional place for us to store the results. */
5400
5401static rtx
3754d046 5402expand_builtin_atomic_compare_exchange (machine_mode mode, tree exp,
1cd6e20d 5403 rtx target)
5404{
1e0c0b35 5405 rtx expect, desired, mem, oldval;
5406 rtx_code_label *label;
1cd6e20d 5407 enum memmodel success, failure;
5408 tree weak;
5409 bool is_weak;
5410
5411 success = get_memmodel (CALL_EXPR_ARG (exp, 4));
5412 failure = get_memmodel (CALL_EXPR_ARG (exp, 5));
5413
086f4e33 5414 if (failure > success)
5415 {
5416 warning (OPT_Winvalid_memory_model,
5417 "failure memory model cannot be stronger than success memory "
5418 "model for %<__atomic_compare_exchange%>");
5419 success = MEMMODEL_SEQ_CST;
5420 }
5421
a372f7ca 5422 if (is_mm_release (failure) || is_mm_acq_rel (failure))
1cd6e20d 5423 {
086f4e33 5424 warning (OPT_Winvalid_memory_model,
5425 "invalid failure memory model for "
5426 "%<__atomic_compare_exchange%>");
5427 failure = MEMMODEL_SEQ_CST;
5428 success = MEMMODEL_SEQ_CST;
1cd6e20d 5429 }
5430
086f4e33 5431
1cd6e20d 5432 if (!flag_inline_atomics)
5433 return NULL_RTX;
5434
5435 /* Expand the operands. */
5436 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5437
5438 expect = expand_normal (CALL_EXPR_ARG (exp, 1));
5439 expect = convert_memory_address (Pmode, expect);
c401b131 5440 expect = gen_rtx_MEM (mode, expect);
1cd6e20d 5441 desired = expand_expr_force_mode (CALL_EXPR_ARG (exp, 2), mode);
5442
5443 weak = CALL_EXPR_ARG (exp, 3);
5444 is_weak = false;
e913b5cd 5445 if (tree_fits_shwi_p (weak) && tree_to_shwi (weak) != 0)
1cd6e20d 5446 is_weak = true;
5447
d86e3752 5448 if (target == const0_rtx)
5449 target = NULL;
d86e3752 5450
3c29a9ea 5451 /* Lest the rtl backend create a race condition with an imporoper store
5452 to memory, always create a new pseudo for OLDVAL. */
5453 oldval = NULL;
5454
5455 if (!expand_atomic_compare_and_swap (&target, &oldval, mem, expect, desired,
ba885f6a 5456 is_weak, success, failure))
1cd6e20d 5457 return NULL_RTX;
5458
d86e3752 5459 /* Conditionally store back to EXPECT, lest we create a race condition
5460 with an improper store to memory. */
5461 /* ??? With a rearrangement of atomics at the gimple level, we can handle
5462 the normal case where EXPECT is totally private, i.e. a register. At
5463 which point the store can be unconditional. */
5464 label = gen_label_rtx ();
62589f76 5465 emit_cmp_and_jump_insns (target, const0_rtx, NE, NULL,
5466 GET_MODE (target), 1, label);
d86e3752 5467 emit_move_insn (expect, oldval);
5468 emit_label (label);
c401b131 5469
1cd6e20d 5470 return target;
5471}
5472
5473/* Expand the __atomic_load intrinsic:
5474 TYPE __atomic_load (TYPE *object, enum memmodel)
5475 EXP is the CALL_EXPR.
5476 TARGET is an optional place for us to store the results. */
5477
5478static rtx
3754d046 5479expand_builtin_atomic_load (machine_mode mode, tree exp, rtx target)
1cd6e20d 5480{
5481 rtx mem;
5482 enum memmodel model;
5483
5484 model = get_memmodel (CALL_EXPR_ARG (exp, 1));
a372f7ca 5485 if (is_mm_release (model) || is_mm_acq_rel (model))
1cd6e20d 5486 {
086f4e33 5487 warning (OPT_Winvalid_memory_model,
5488 "invalid memory model for %<__atomic_load%>");
5489 model = MEMMODEL_SEQ_CST;
1cd6e20d 5490 }
5491
5492 if (!flag_inline_atomics)
5493 return NULL_RTX;
5494
5495 /* Expand the operand. */
5496 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5497
5498 return expand_atomic_load (target, mem, model);
5499}
5500
5501
5502/* Expand the __atomic_store intrinsic:
5503 void __atomic_store (TYPE *object, TYPE desired, enum memmodel)
5504 EXP is the CALL_EXPR.
5505 TARGET is an optional place for us to store the results. */
5506
5507static rtx
3754d046 5508expand_builtin_atomic_store (machine_mode mode, tree exp)
1cd6e20d 5509{
5510 rtx mem, val;
5511 enum memmodel model;
5512
5513 model = get_memmodel (CALL_EXPR_ARG (exp, 2));
a372f7ca 5514 if (!(is_mm_relaxed (model) || is_mm_seq_cst (model)
5515 || is_mm_release (model)))
1cd6e20d 5516 {
086f4e33 5517 warning (OPT_Winvalid_memory_model,
5518 "invalid memory model for %<__atomic_store%>");
5519 model = MEMMODEL_SEQ_CST;
1cd6e20d 5520 }
5521
5522 if (!flag_inline_atomics)
5523 return NULL_RTX;
5524
5525 /* Expand the operands. */
5526 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5527 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5528
8808bf16 5529 return expand_atomic_store (mem, val, model, false);
1cd6e20d 5530}
5531
5532/* Expand the __atomic_fetch_XXX intrinsic:
5533 TYPE __atomic_fetch_XXX (TYPE *object, TYPE val, enum memmodel)
5534 EXP is the CALL_EXPR.
5535 TARGET is an optional place for us to store the results.
5536 CODE is the operation, PLUS, MINUS, ADD, XOR, or IOR.
5537 FETCH_AFTER is true if returning the result of the operation.
5538 FETCH_AFTER is false if returning the value before the operation.
5539 IGNORE is true if the result is not used.
5540 EXT_CALL is the correct builtin for an external call if this cannot be
5541 resolved to an instruction sequence. */
5542
5543static rtx
3754d046 5544expand_builtin_atomic_fetch_op (machine_mode mode, tree exp, rtx target,
1cd6e20d 5545 enum rtx_code code, bool fetch_after,
5546 bool ignore, enum built_in_function ext_call)
5547{
5548 rtx val, mem, ret;
5549 enum memmodel model;
5550 tree fndecl;
5551 tree addr;
5552
5553 model = get_memmodel (CALL_EXPR_ARG (exp, 2));
5554
5555 /* Expand the operands. */
5556 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5557 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5558
5559 /* Only try generating instructions if inlining is turned on. */
5560 if (flag_inline_atomics)
5561 {
5562 ret = expand_atomic_fetch_op (target, mem, val, code, model, fetch_after);
5563 if (ret)
5564 return ret;
5565 }
5566
5567 /* Return if a different routine isn't needed for the library call. */
5568 if (ext_call == BUILT_IN_NONE)
5569 return NULL_RTX;
5570
5571 /* Change the call to the specified function. */
5572 fndecl = get_callee_fndecl (exp);
5573 addr = CALL_EXPR_FN (exp);
5574 STRIP_NOPS (addr);
5575
5576 gcc_assert (TREE_OPERAND (addr, 0) == fndecl);
9af5ce0c 5577 TREE_OPERAND (addr, 0) = builtin_decl_explicit (ext_call);
1cd6e20d 5578
5579 /* Expand the call here so we can emit trailing code. */
5580 ret = expand_call (exp, target, ignore);
5581
5582 /* Replace the original function just in case it matters. */
5583 TREE_OPERAND (addr, 0) = fndecl;
5584
5585 /* Then issue the arithmetic correction to return the right result. */
5586 if (!ignore)
c449f851 5587 {
5588 if (code == NOT)
5589 {
5590 ret = expand_simple_binop (mode, AND, ret, val, NULL_RTX, true,
5591 OPTAB_LIB_WIDEN);
5592 ret = expand_simple_unop (mode, NOT, ret, target, true);
5593 }
5594 else
5595 ret = expand_simple_binop (mode, code, ret, val, target, true,
5596 OPTAB_LIB_WIDEN);
5597 }
1cd6e20d 5598 return ret;
5599}
5600
10b744a3 5601
7821cde1 5602#ifndef HAVE_atomic_clear
5603# define HAVE_atomic_clear 0
5604# define gen_atomic_clear(x,y) (gcc_unreachable (), NULL_RTX)
5605#endif
5606
10b744a3 5607/* Expand an atomic clear operation.
5608 void _atomic_clear (BOOL *obj, enum memmodel)
5609 EXP is the call expression. */
5610
5611static rtx
5612expand_builtin_atomic_clear (tree exp)
5613{
3754d046 5614 machine_mode mode;
10b744a3 5615 rtx mem, ret;
5616 enum memmodel model;
5617
5618 mode = mode_for_size (BOOL_TYPE_SIZE, MODE_INT, 0);
5619 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5620 model = get_memmodel (CALL_EXPR_ARG (exp, 1));
5621
a372f7ca 5622 if (is_mm_consume (model) || is_mm_acquire (model) || is_mm_acq_rel (model))
10b744a3 5623 {
086f4e33 5624 warning (OPT_Winvalid_memory_model,
5625 "invalid memory model for %<__atomic_store%>");
5626 model = MEMMODEL_SEQ_CST;
10b744a3 5627 }
5628
7821cde1 5629 if (HAVE_atomic_clear)
5630 {
5631 emit_insn (gen_atomic_clear (mem, model));
5632 return const0_rtx;
5633 }
5634
10b744a3 5635 /* Try issuing an __atomic_store, and allow fallback to __sync_lock_release.
5636 Failing that, a store is issued by __atomic_store. The only way this can
5637 fail is if the bool type is larger than a word size. Unlikely, but
5638 handle it anyway for completeness. Assume a single threaded model since
5639 there is no atomic support in this case, and no barriers are required. */
5640 ret = expand_atomic_store (mem, const0_rtx, model, true);
5641 if (!ret)
5642 emit_move_insn (mem, const0_rtx);
5643 return const0_rtx;
5644}
5645
5646/* Expand an atomic test_and_set operation.
5647 bool _atomic_test_and_set (BOOL *obj, enum memmodel)
5648 EXP is the call expression. */
5649
5650static rtx
7821cde1 5651expand_builtin_atomic_test_and_set (tree exp, rtx target)
10b744a3 5652{
7821cde1 5653 rtx mem;
10b744a3 5654 enum memmodel model;
3754d046 5655 machine_mode mode;
10b744a3 5656
5657 mode = mode_for_size (BOOL_TYPE_SIZE, MODE_INT, 0);
5658 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5659 model = get_memmodel (CALL_EXPR_ARG (exp, 1));
5660
7821cde1 5661 return expand_atomic_test_and_set (target, mem, model);
10b744a3 5662}
5663
5664
1cd6e20d 5665/* Return true if (optional) argument ARG1 of size ARG0 is always lock free on
5666 this architecture. If ARG1 is NULL, use typical alignment for size ARG0. */
5667
5668static tree
5669fold_builtin_atomic_always_lock_free (tree arg0, tree arg1)
5670{
5671 int size;
3754d046 5672 machine_mode mode;
1cd6e20d 5673 unsigned int mode_align, type_align;
5674
5675 if (TREE_CODE (arg0) != INTEGER_CST)
5676 return NULL_TREE;
b6a5fc45 5677
1cd6e20d 5678 size = INTVAL (expand_normal (arg0)) * BITS_PER_UNIT;
5679 mode = mode_for_size (size, MODE_INT, 0);
5680 mode_align = GET_MODE_ALIGNMENT (mode);
5681
5682 if (TREE_CODE (arg1) == INTEGER_CST && INTVAL (expand_normal (arg1)) == 0)
5683 type_align = mode_align;
5684 else
5685 {
5686 tree ttype = TREE_TYPE (arg1);
5687
5688 /* This function is usually invoked and folded immediately by the front
5689 end before anything else has a chance to look at it. The pointer
5690 parameter at this point is usually cast to a void *, so check for that
5691 and look past the cast. */
d09ef31a 5692 if (CONVERT_EXPR_P (arg1) && POINTER_TYPE_P (ttype)
1cd6e20d 5693 && VOID_TYPE_P (TREE_TYPE (ttype)))
5694 arg1 = TREE_OPERAND (arg1, 0);
5695
5696 ttype = TREE_TYPE (arg1);
5697 gcc_assert (POINTER_TYPE_P (ttype));
5698
5699 /* Get the underlying type of the object. */
5700 ttype = TREE_TYPE (ttype);
5701 type_align = TYPE_ALIGN (ttype);
5702 }
5703
5704 /* If the object has smaller alignment, the the lock free routines cannot
5705 be used. */
5706 if (type_align < mode_align)
06308d2a 5707 return boolean_false_node;
1cd6e20d 5708
5709 /* Check if a compare_and_swap pattern exists for the mode which represents
5710 the required size. The pattern is not allowed to fail, so the existence
5711 of the pattern indicates support is present. */
29139cdc 5712 if (can_compare_and_swap_p (mode, true))
06308d2a 5713 return boolean_true_node;
1cd6e20d 5714 else
06308d2a 5715 return boolean_false_node;
1cd6e20d 5716}
5717
5718/* Return true if the parameters to call EXP represent an object which will
5719 always generate lock free instructions. The first argument represents the
5720 size of the object, and the second parameter is a pointer to the object
5721 itself. If NULL is passed for the object, then the result is based on
5722 typical alignment for an object of the specified size. Otherwise return
5723 false. */
5724
5725static rtx
5726expand_builtin_atomic_always_lock_free (tree exp)
5727{
5728 tree size;
5729 tree arg0 = CALL_EXPR_ARG (exp, 0);
5730 tree arg1 = CALL_EXPR_ARG (exp, 1);
5731
5732 if (TREE_CODE (arg0) != INTEGER_CST)
5733 {
5734 error ("non-constant argument 1 to __atomic_always_lock_free");
5735 return const0_rtx;
5736 }
5737
5738 size = fold_builtin_atomic_always_lock_free (arg0, arg1);
06308d2a 5739 if (size == boolean_true_node)
1cd6e20d 5740 return const1_rtx;
5741 return const0_rtx;
5742}
5743
5744/* Return a one or zero if it can be determined that object ARG1 of size ARG
5745 is lock free on this architecture. */
5746
5747static tree
5748fold_builtin_atomic_is_lock_free (tree arg0, tree arg1)
5749{
5750 if (!flag_inline_atomics)
5751 return NULL_TREE;
5752
5753 /* If it isn't always lock free, don't generate a result. */
06308d2a 5754 if (fold_builtin_atomic_always_lock_free (arg0, arg1) == boolean_true_node)
5755 return boolean_true_node;
1cd6e20d 5756
5757 return NULL_TREE;
5758}
5759
5760/* Return true if the parameters to call EXP represent an object which will
5761 always generate lock free instructions. The first argument represents the
5762 size of the object, and the second parameter is a pointer to the object
5763 itself. If NULL is passed for the object, then the result is based on
5764 typical alignment for an object of the specified size. Otherwise return
5765 NULL*/
5766
5767static rtx
5768expand_builtin_atomic_is_lock_free (tree exp)
5769{
5770 tree size;
5771 tree arg0 = CALL_EXPR_ARG (exp, 0);
5772 tree arg1 = CALL_EXPR_ARG (exp, 1);
5773
5774 if (!INTEGRAL_TYPE_P (TREE_TYPE (arg0)))
5775 {
5776 error ("non-integer argument 1 to __atomic_is_lock_free");
5777 return NULL_RTX;
5778 }
5779
5780 if (!flag_inline_atomics)
5781 return NULL_RTX;
5782
5783 /* If the value is known at compile time, return the RTX for it. */
5784 size = fold_builtin_atomic_is_lock_free (arg0, arg1);
06308d2a 5785 if (size == boolean_true_node)
1cd6e20d 5786 return const1_rtx;
5787
5788 return NULL_RTX;
5789}
5790
1cd6e20d 5791/* Expand the __atomic_thread_fence intrinsic:
5792 void __atomic_thread_fence (enum memmodel)
5793 EXP is the CALL_EXPR. */
5794
5795static void
5796expand_builtin_atomic_thread_fence (tree exp)
5797{
fe54c06b 5798 enum memmodel model = get_memmodel (CALL_EXPR_ARG (exp, 0));
5799 expand_mem_thread_fence (model);
1cd6e20d 5800}
5801
5802/* Expand the __atomic_signal_fence intrinsic:
5803 void __atomic_signal_fence (enum memmodel)
5804 EXP is the CALL_EXPR. */
5805
5806static void
5807expand_builtin_atomic_signal_fence (tree exp)
5808{
fe54c06b 5809 enum memmodel model = get_memmodel (CALL_EXPR_ARG (exp, 0));
5810 expand_mem_signal_fence (model);
b6a5fc45 5811}
5812
5813/* Expand the __sync_synchronize intrinsic. */
5814
5815static void
2797f13a 5816expand_builtin_sync_synchronize (void)
b6a5fc45 5817{
a372f7ca 5818 expand_mem_thread_fence (MEMMODEL_SYNC_SEQ_CST);
b6a5fc45 5819}
5820
badaa04c 5821static rtx
5822expand_builtin_thread_pointer (tree exp, rtx target)
5823{
5824 enum insn_code icode;
5825 if (!validate_arglist (exp, VOID_TYPE))
5826 return const0_rtx;
5827 icode = direct_optab_handler (get_thread_pointer_optab, Pmode);
5828 if (icode != CODE_FOR_nothing)
5829 {
5830 struct expand_operand op;
3ed779c3 5831 /* If the target is not sutitable then create a new target. */
5832 if (target == NULL_RTX
5833 || !REG_P (target)
5834 || GET_MODE (target) != Pmode)
badaa04c 5835 target = gen_reg_rtx (Pmode);
5836 create_output_operand (&op, target, Pmode);
5837 expand_insn (icode, 1, &op);
5838 return target;
5839 }
5840 error ("__builtin_thread_pointer is not supported on this target");
5841 return const0_rtx;
5842}
5843
5844static void
5845expand_builtin_set_thread_pointer (tree exp)
5846{
5847 enum insn_code icode;
5848 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
5849 return;
5850 icode = direct_optab_handler (set_thread_pointer_optab, Pmode);
5851 if (icode != CODE_FOR_nothing)
5852 {
5853 struct expand_operand op;
5854 rtx val = expand_expr (CALL_EXPR_ARG (exp, 0), NULL_RTX,
5855 Pmode, EXPAND_NORMAL);
6f343c10 5856 create_input_operand (&op, val, Pmode);
badaa04c 5857 expand_insn (icode, 1, &op);
5858 return;
5859 }
5860 error ("__builtin_set_thread_pointer is not supported on this target");
5861}
5862
53800dbe 5863\f
0e80b01d 5864/* Emit code to restore the current value of stack. */
5865
5866static void
5867expand_stack_restore (tree var)
5868{
1e0c0b35 5869 rtx_insn *prev;
5870 rtx sa = expand_normal (var);
0e80b01d 5871
5872 sa = convert_memory_address (Pmode, sa);
5873
5874 prev = get_last_insn ();
5875 emit_stack_restore (SAVE_BLOCK, sa);
97354ae4 5876
5877 record_new_stack_level ();
5878
0e80b01d 5879 fixup_args_size_notes (prev, get_last_insn (), 0);
5880}
5881
0e80b01d 5882/* Emit code to save the current value of stack. */
5883
5884static rtx
5885expand_stack_save (void)
5886{
5887 rtx ret = NULL_RTX;
5888
0e80b01d 5889 emit_stack_save (SAVE_BLOCK, &ret);
5890 return ret;
5891}
5892
ca4c3545 5893
5894/* Expand OpenACC acc_on_device.
5895
5896 This has to happen late (that is, not in early folding; expand_builtin_*,
5897 rather than fold_builtin_*), as we have to act differently for host and
5898 acceleration device (ACCEL_COMPILER conditional). */
5899
5900static rtx
86181b33 5901expand_builtin_acc_on_device (tree exp ATTRIBUTE_UNUSED,
5902 rtx target ATTRIBUTE_UNUSED)
ca4c3545 5903{
071f2c66 5904#ifdef ACCEL_COMPILER
ca4c3545 5905 if (!validate_arglist (exp, INTEGER_TYPE, VOID_TYPE))
5906 return NULL_RTX;
5907
5908 tree arg = CALL_EXPR_ARG (exp, 0);
5909
5910 /* Return (arg == v1 || arg == v2) ? 1 : 0. */
5911 machine_mode v_mode = TYPE_MODE (TREE_TYPE (arg));
5912 rtx v = expand_normal (arg), v1, v2;
ca4c3545 5913 v1 = GEN_INT (GOMP_DEVICE_NOT_HOST);
5914 v2 = GEN_INT (ACCEL_COMPILER_acc_device);
ca4c3545 5915 machine_mode target_mode = TYPE_MODE (integer_type_node);
15b4214c 5916 if (!target || !register_operand (target, target_mode))
ca4c3545 5917 target = gen_reg_rtx (target_mode);
5918 emit_move_insn (target, const1_rtx);
5919 rtx_code_label *done_label = gen_label_rtx ();
5920 do_compare_rtx_and_jump (v, v1, EQ, false, v_mode, NULL_RTX,
f9a00e9e 5921 NULL, done_label, PROB_EVEN);
ca4c3545 5922 do_compare_rtx_and_jump (v, v2, EQ, false, v_mode, NULL_RTX,
f9a00e9e 5923 NULL, done_label, PROB_EVEN);
ca4c3545 5924 emit_move_insn (target, const0_rtx);
5925 emit_label (done_label);
5926
5927 return target;
071f2c66 5928#else
5929 return NULL;
5930#endif
ca4c3545 5931}
5932
5933
53800dbe 5934/* Expand an expression EXP that calls a built-in function,
5935 with result going to TARGET if that's convenient
5936 (and in mode MODE if that's convenient).
5937 SUBTARGET may be used as the target for computing one of EXP's operands.
5938 IGNORE is nonzero if the value is to be ignored. */
5939
5940rtx
3754d046 5941expand_builtin (tree exp, rtx target, rtx subtarget, machine_mode mode,
aecda0d6 5942 int ignore)
53800dbe 5943{
c6e6ecb1 5944 tree fndecl = get_callee_fndecl (exp);
53800dbe 5945 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
3754d046 5946 machine_mode target_mode = TYPE_MODE (TREE_TYPE (exp));
67fa4078 5947 int flags;
53800dbe 5948
4e2f4ed5 5949 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
5950 return targetm.expand_builtin (exp, target, subtarget, mode, ignore);
5951
f9acf11a 5952 /* When ASan is enabled, we don't want to expand some memory/string
5953 builtins and rely on libsanitizer's hooks. This allows us to avoid
5954 redundant checks and be sure, that possible overflow will be detected
5955 by ASan. */
5956
5957 if ((flag_sanitize & SANITIZE_ADDRESS) && asan_intercepted_p (fcode))
5958 return expand_call (exp, target, ignore);
5959
53800dbe 5960 /* When not optimizing, generate calls to library functions for a certain
5961 set of builtins. */
cd9ff771 5962 if (!optimize
b6a5fc45 5963 && !called_as_built_in (fndecl)
73037a1e 5964 && fcode != BUILT_IN_FORK
5965 && fcode != BUILT_IN_EXECL
5966 && fcode != BUILT_IN_EXECV
5967 && fcode != BUILT_IN_EXECLP
5968 && fcode != BUILT_IN_EXECLE
5969 && fcode != BUILT_IN_EXECVP
5970 && fcode != BUILT_IN_EXECVE
2c281b15 5971 && fcode != BUILT_IN_ALLOCA
581bf1c2 5972 && fcode != BUILT_IN_ALLOCA_WITH_ALIGN
058a1b7a 5973 && fcode != BUILT_IN_FREE
5974 && fcode != BUILT_IN_CHKP_SET_PTR_BOUNDS
5975 && fcode != BUILT_IN_CHKP_INIT_PTR_BOUNDS
5976 && fcode != BUILT_IN_CHKP_NULL_PTR_BOUNDS
5977 && fcode != BUILT_IN_CHKP_COPY_PTR_BOUNDS
5978 && fcode != BUILT_IN_CHKP_NARROW_PTR_BOUNDS
5979 && fcode != BUILT_IN_CHKP_STORE_PTR_BOUNDS
5980 && fcode != BUILT_IN_CHKP_CHECK_PTR_LBOUNDS
5981 && fcode != BUILT_IN_CHKP_CHECK_PTR_UBOUNDS
5982 && fcode != BUILT_IN_CHKP_CHECK_PTR_BOUNDS
5983 && fcode != BUILT_IN_CHKP_GET_PTR_LBOUND
5984 && fcode != BUILT_IN_CHKP_GET_PTR_UBOUND
5985 && fcode != BUILT_IN_CHKP_BNDRET)
cd9ff771 5986 return expand_call (exp, target, ignore);
53800dbe 5987
8d6d7930 5988 /* The built-in function expanders test for target == const0_rtx
5989 to determine whether the function's result will be ignored. */
5990 if (ignore)
5991 target = const0_rtx;
5992
5993 /* If the result of a pure or const built-in function is ignored, and
5994 none of its arguments are volatile, we can avoid expanding the
5995 built-in call and just evaluate the arguments for side-effects. */
5996 if (target == const0_rtx
67fa4078 5997 && ((flags = flags_from_decl_or_type (fndecl)) & (ECF_CONST | ECF_PURE))
5998 && !(flags & ECF_LOOPING_CONST_OR_PURE))
8d6d7930 5999 {
6000 bool volatilep = false;
6001 tree arg;
c2f47e15 6002 call_expr_arg_iterator iter;
8d6d7930 6003
c2f47e15 6004 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
6005 if (TREE_THIS_VOLATILE (arg))
8d6d7930 6006 {
6007 volatilep = true;
6008 break;
6009 }
6010
6011 if (! volatilep)
6012 {
c2f47e15 6013 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
6014 expand_expr (arg, const0_rtx, VOIDmode, EXPAND_NORMAL);
8d6d7930 6015 return const0_rtx;
6016 }
6017 }
6018
f21337ef 6019 /* expand_builtin_with_bounds is supposed to be used for
6020 instrumented builtin calls. */
058a1b7a 6021 gcc_assert (!CALL_WITH_BOUNDS_P (exp));
6022
53800dbe 6023 switch (fcode)
6024 {
4f35b1fc 6025 CASE_FLT_FN (BUILT_IN_FABS):
8aa32773 6026 case BUILT_IN_FABSD32:
6027 case BUILT_IN_FABSD64:
6028 case BUILT_IN_FABSD128:
c2f47e15 6029 target = expand_builtin_fabs (exp, target, subtarget);
78a74442 6030 if (target)
a0c938f0 6031 return target;
78a74442 6032 break;
6033
4f35b1fc 6034 CASE_FLT_FN (BUILT_IN_COPYSIGN):
c2f47e15 6035 target = expand_builtin_copysign (exp, target, subtarget);
270436f3 6036 if (target)
6037 return target;
6038 break;
6039
7d3f6cc7 6040 /* Just do a normal library call if we were unable to fold
6041 the values. */
4f35b1fc 6042 CASE_FLT_FN (BUILT_IN_CABS):
78a74442 6043 break;
53800dbe 6044
4f35b1fc 6045 CASE_FLT_FN (BUILT_IN_EXP):
6046 CASE_FLT_FN (BUILT_IN_EXP10):
6047 CASE_FLT_FN (BUILT_IN_POW10):
6048 CASE_FLT_FN (BUILT_IN_EXP2):
6049 CASE_FLT_FN (BUILT_IN_EXPM1):
6050 CASE_FLT_FN (BUILT_IN_LOGB):
4f35b1fc 6051 CASE_FLT_FN (BUILT_IN_LOG):
6052 CASE_FLT_FN (BUILT_IN_LOG10):
6053 CASE_FLT_FN (BUILT_IN_LOG2):
6054 CASE_FLT_FN (BUILT_IN_LOG1P):
6055 CASE_FLT_FN (BUILT_IN_TAN):
6056 CASE_FLT_FN (BUILT_IN_ASIN):
6057 CASE_FLT_FN (BUILT_IN_ACOS):
6058 CASE_FLT_FN (BUILT_IN_ATAN):
b3154a1f 6059 CASE_FLT_FN (BUILT_IN_SIGNIFICAND):
7f3be425 6060 /* Treat these like sqrt only if unsafe math optimizations are allowed,
6061 because of possible accuracy problems. */
6062 if (! flag_unsafe_math_optimizations)
53800dbe 6063 break;
4f35b1fc 6064 CASE_FLT_FN (BUILT_IN_SQRT):
6065 CASE_FLT_FN (BUILT_IN_FLOOR):
6066 CASE_FLT_FN (BUILT_IN_CEIL):
6067 CASE_FLT_FN (BUILT_IN_TRUNC):
6068 CASE_FLT_FN (BUILT_IN_ROUND):
6069 CASE_FLT_FN (BUILT_IN_NEARBYINT):
6070 CASE_FLT_FN (BUILT_IN_RINT):
53800dbe 6071 target = expand_builtin_mathfn (exp, target, subtarget);
6072 if (target)
6073 return target;
6074 break;
6075
7e0713b1 6076 CASE_FLT_FN (BUILT_IN_FMA):
6077 target = expand_builtin_mathfn_ternary (exp, target, subtarget);
6078 if (target)
6079 return target;
6080 break;
6081
a67a90e5 6082 CASE_FLT_FN (BUILT_IN_ILOGB):
6083 if (! flag_unsafe_math_optimizations)
6084 break;
69b779ea 6085 CASE_FLT_FN (BUILT_IN_ISINF):
cde061c1 6086 CASE_FLT_FN (BUILT_IN_FINITE):
6087 case BUILT_IN_ISFINITE:
8a1a9cb7 6088 case BUILT_IN_ISNORMAL:
f97eea22 6089 target = expand_builtin_interclass_mathfn (exp, target);
a67a90e5 6090 if (target)
6091 return target;
6092 break;
6093
80ff6494 6094 CASE_FLT_FN (BUILT_IN_ICEIL):
4f35b1fc 6095 CASE_FLT_FN (BUILT_IN_LCEIL):
6096 CASE_FLT_FN (BUILT_IN_LLCEIL):
6097 CASE_FLT_FN (BUILT_IN_LFLOOR):
80ff6494 6098 CASE_FLT_FN (BUILT_IN_IFLOOR):
4f35b1fc 6099 CASE_FLT_FN (BUILT_IN_LLFLOOR):
ff1b14e4 6100 target = expand_builtin_int_roundingfn (exp, target);
ad52b9b7 6101 if (target)
6102 return target;
6103 break;
6104
80ff6494 6105 CASE_FLT_FN (BUILT_IN_IRINT):
7d3afc77 6106 CASE_FLT_FN (BUILT_IN_LRINT):
6107 CASE_FLT_FN (BUILT_IN_LLRINT):
80ff6494 6108 CASE_FLT_FN (BUILT_IN_IROUND):
ef2f1a10 6109 CASE_FLT_FN (BUILT_IN_LROUND):
6110 CASE_FLT_FN (BUILT_IN_LLROUND):
ff1b14e4 6111 target = expand_builtin_int_roundingfn_2 (exp, target);
7d3afc77 6112 if (target)
6113 return target;
6114 break;
6115
4f35b1fc 6116 CASE_FLT_FN (BUILT_IN_POWI):
f97eea22 6117 target = expand_builtin_powi (exp, target);
757c219d 6118 if (target)
6119 return target;
6120 break;
6121
4f35b1fc 6122 CASE_FLT_FN (BUILT_IN_ATAN2):
6123 CASE_FLT_FN (BUILT_IN_LDEXP):
73a954a1 6124 CASE_FLT_FN (BUILT_IN_SCALB):
6125 CASE_FLT_FN (BUILT_IN_SCALBN):
6126 CASE_FLT_FN (BUILT_IN_SCALBLN):
0fd605a5 6127 if (! flag_unsafe_math_optimizations)
6128 break;
ef722005 6129
6130 CASE_FLT_FN (BUILT_IN_FMOD):
6131 CASE_FLT_FN (BUILT_IN_REMAINDER):
6132 CASE_FLT_FN (BUILT_IN_DREM):
0810ff17 6133 CASE_FLT_FN (BUILT_IN_POW):
0fd605a5 6134 target = expand_builtin_mathfn_2 (exp, target, subtarget);
6135 if (target)
6136 return target;
6137 break;
6138
d735c391 6139 CASE_FLT_FN (BUILT_IN_CEXPI):
f97eea22 6140 target = expand_builtin_cexpi (exp, target);
d735c391 6141 gcc_assert (target);
6142 return target;
6143
4f35b1fc 6144 CASE_FLT_FN (BUILT_IN_SIN):
6145 CASE_FLT_FN (BUILT_IN_COS):
6b43bae4 6146 if (! flag_unsafe_math_optimizations)
6147 break;
6148 target = expand_builtin_mathfn_3 (exp, target, subtarget);
6149 if (target)
6150 return target;
6151 break;
6152
c3147c1a 6153 CASE_FLT_FN (BUILT_IN_SINCOS):
6154 if (! flag_unsafe_math_optimizations)
6155 break;
6156 target = expand_builtin_sincos (exp);
6157 if (target)
6158 return target;
6159 break;
6160
53800dbe 6161 case BUILT_IN_APPLY_ARGS:
6162 return expand_builtin_apply_args ();
6163
6164 /* __builtin_apply (FUNCTION, ARGUMENTS, ARGSIZE) invokes
6165 FUNCTION with a copy of the parameters described by
6166 ARGUMENTS, and ARGSIZE. It returns a block of memory
6167 allocated on the stack into which is stored all the registers
6168 that might possibly be used for returning the result of a
6169 function. ARGUMENTS is the value returned by
6170 __builtin_apply_args. ARGSIZE is the number of bytes of
6171 arguments that must be copied. ??? How should this value be
6172 computed? We'll also need a safe worst case value for varargs
6173 functions. */
6174 case BUILT_IN_APPLY:
c2f47e15 6175 if (!validate_arglist (exp, POINTER_TYPE,
0eb671f7 6176 POINTER_TYPE, INTEGER_TYPE, VOID_TYPE)
c2f47e15 6177 && !validate_arglist (exp, REFERENCE_TYPE,
0eb671f7 6178 POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
53800dbe 6179 return const0_rtx;
6180 else
6181 {
53800dbe 6182 rtx ops[3];
6183
c2f47e15 6184 ops[0] = expand_normal (CALL_EXPR_ARG (exp, 0));
6185 ops[1] = expand_normal (CALL_EXPR_ARG (exp, 1));
6186 ops[2] = expand_normal (CALL_EXPR_ARG (exp, 2));
53800dbe 6187
6188 return expand_builtin_apply (ops[0], ops[1], ops[2]);
6189 }
6190
6191 /* __builtin_return (RESULT) causes the function to return the
6192 value described by RESULT. RESULT is address of the block of
6193 memory returned by __builtin_apply. */
6194 case BUILT_IN_RETURN:
c2f47e15 6195 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6196 expand_builtin_return (expand_normal (CALL_EXPR_ARG (exp, 0)));
53800dbe 6197 return const0_rtx;
6198
6199 case BUILT_IN_SAVEREGS:
a66c9326 6200 return expand_builtin_saveregs ();
53800dbe 6201
48dc2227 6202 case BUILT_IN_VA_ARG_PACK:
6203 /* All valid uses of __builtin_va_arg_pack () are removed during
6204 inlining. */
b8c23db3 6205 error ("%Kinvalid use of %<__builtin_va_arg_pack ()%>", exp);
48dc2227 6206 return const0_rtx;
6207
4e1d7ea4 6208 case BUILT_IN_VA_ARG_PACK_LEN:
6209 /* All valid uses of __builtin_va_arg_pack_len () are removed during
6210 inlining. */
b8c23db3 6211 error ("%Kinvalid use of %<__builtin_va_arg_pack_len ()%>", exp);
4e1d7ea4 6212 return const0_rtx;
6213
53800dbe 6214 /* Return the address of the first anonymous stack arg. */
6215 case BUILT_IN_NEXT_ARG:
c2f47e15 6216 if (fold_builtin_next_arg (exp, false))
a0c938f0 6217 return const0_rtx;
79012a9d 6218 return expand_builtin_next_arg ();
53800dbe 6219
ac8fb6db 6220 case BUILT_IN_CLEAR_CACHE:
6221 target = expand_builtin___clear_cache (exp);
6222 if (target)
6223 return target;
6224 break;
6225
53800dbe 6226 case BUILT_IN_CLASSIFY_TYPE:
c2f47e15 6227 return expand_builtin_classify_type (exp);
53800dbe 6228
6229 case BUILT_IN_CONSTANT_P:
4ee9c684 6230 return const0_rtx;
53800dbe 6231
6232 case BUILT_IN_FRAME_ADDRESS:
6233 case BUILT_IN_RETURN_ADDRESS:
c2f47e15 6234 return expand_builtin_frame_address (fndecl, exp);
53800dbe 6235
6236 /* Returns the address of the area where the structure is returned.
6237 0 otherwise. */
6238 case BUILT_IN_AGGREGATE_INCOMING_ADDRESS:
c2f47e15 6239 if (call_expr_nargs (exp) != 0
9342ee68 6240 || ! AGGREGATE_TYPE_P (TREE_TYPE (TREE_TYPE (current_function_decl)))
e16ceb8e 6241 || !MEM_P (DECL_RTL (DECL_RESULT (current_function_decl))))
9342ee68 6242 return const0_rtx;
53800dbe 6243 else
9342ee68 6244 return XEXP (DECL_RTL (DECL_RESULT (current_function_decl)), 0);
53800dbe 6245
6246 case BUILT_IN_ALLOCA:
581bf1c2 6247 case BUILT_IN_ALLOCA_WITH_ALIGN:
990495a7 6248 /* If the allocation stems from the declaration of a variable-sized
6249 object, it cannot accumulate. */
a882d754 6250 target = expand_builtin_alloca (exp, CALL_ALLOCA_FOR_VAR_P (exp));
53800dbe 6251 if (target)
6252 return target;
6253 break;
6254
4ee9c684 6255 case BUILT_IN_STACK_SAVE:
6256 return expand_stack_save ();
6257
6258 case BUILT_IN_STACK_RESTORE:
c2f47e15 6259 expand_stack_restore (CALL_EXPR_ARG (exp, 0));
4ee9c684 6260 return const0_rtx;
6261
74bdbe96 6262 case BUILT_IN_BSWAP16:
42791117 6263 case BUILT_IN_BSWAP32:
6264 case BUILT_IN_BSWAP64:
74bdbe96 6265 target = expand_builtin_bswap (target_mode, exp, target, subtarget);
42791117 6266 if (target)
6267 return target;
6268 break;
6269
4f35b1fc 6270 CASE_INT_FN (BUILT_IN_FFS):
c2f47e15 6271 target = expand_builtin_unop (target_mode, exp, target,
efb070c8 6272 subtarget, ffs_optab);
6a08d0ab 6273 if (target)
6274 return target;
6275 break;
6276
4f35b1fc 6277 CASE_INT_FN (BUILT_IN_CLZ):
c2f47e15 6278 target = expand_builtin_unop (target_mode, exp, target,
efb070c8 6279 subtarget, clz_optab);
6a08d0ab 6280 if (target)
6281 return target;
6282 break;
6283
4f35b1fc 6284 CASE_INT_FN (BUILT_IN_CTZ):
c2f47e15 6285 target = expand_builtin_unop (target_mode, exp, target,
efb070c8 6286 subtarget, ctz_optab);
6a08d0ab 6287 if (target)
6288 return target;
6289 break;
6290
d8492bd3 6291 CASE_INT_FN (BUILT_IN_CLRSB):
d8492bd3 6292 target = expand_builtin_unop (target_mode, exp, target,
6293 subtarget, clrsb_optab);
6294 if (target)
6295 return target;
6296 break;
6297
4f35b1fc 6298 CASE_INT_FN (BUILT_IN_POPCOUNT):
c2f47e15 6299 target = expand_builtin_unop (target_mode, exp, target,
efb070c8 6300 subtarget, popcount_optab);
6a08d0ab 6301 if (target)
6302 return target;
6303 break;
6304
4f35b1fc 6305 CASE_INT_FN (BUILT_IN_PARITY):
c2f47e15 6306 target = expand_builtin_unop (target_mode, exp, target,
efb070c8 6307 subtarget, parity_optab);
53800dbe 6308 if (target)
6309 return target;
6310 break;
6311
6312 case BUILT_IN_STRLEN:
c2f47e15 6313 target = expand_builtin_strlen (exp, target, target_mode);
53800dbe 6314 if (target)
6315 return target;
6316 break;
6317
6318 case BUILT_IN_STRCPY:
a65c4d64 6319 target = expand_builtin_strcpy (exp, target);
53800dbe 6320 if (target)
6321 return target;
6322 break;
bf8e3599 6323
ed09096d 6324 case BUILT_IN_STRNCPY:
a65c4d64 6325 target = expand_builtin_strncpy (exp, target);
ed09096d 6326 if (target)
6327 return target;
6328 break;
bf8e3599 6329
3b824fa6 6330 case BUILT_IN_STPCPY:
dc369150 6331 target = expand_builtin_stpcpy (exp, target, mode);
3b824fa6 6332 if (target)
6333 return target;
6334 break;
6335
53800dbe 6336 case BUILT_IN_MEMCPY:
a65c4d64 6337 target = expand_builtin_memcpy (exp, target);
3b824fa6 6338 if (target)
6339 return target;
6340 break;
6341
6342 case BUILT_IN_MEMPCPY:
c2f47e15 6343 target = expand_builtin_mempcpy (exp, target, mode);
53800dbe 6344 if (target)
6345 return target;
6346 break;
6347
6348 case BUILT_IN_MEMSET:
c2f47e15 6349 target = expand_builtin_memset (exp, target, mode);
53800dbe 6350 if (target)
6351 return target;
6352 break;
6353
ffc83088 6354 case BUILT_IN_BZERO:
0b25db21 6355 target = expand_builtin_bzero (exp);
ffc83088 6356 if (target)
6357 return target;
6358 break;
6359
53800dbe 6360 case BUILT_IN_STRCMP:
a65c4d64 6361 target = expand_builtin_strcmp (exp, target);
53800dbe 6362 if (target)
6363 return target;
6364 break;
6365
ed09096d 6366 case BUILT_IN_STRNCMP:
6367 target = expand_builtin_strncmp (exp, target, mode);
6368 if (target)
6369 return target;
6370 break;
6371
071f1696 6372 case BUILT_IN_BCMP:
53800dbe 6373 case BUILT_IN_MEMCMP:
c2f47e15 6374 target = expand_builtin_memcmp (exp, target, mode);
53800dbe 6375 if (target)
6376 return target;
6377 break;
53800dbe 6378
6379 case BUILT_IN_SETJMP:
2c8a1497 6380 /* This should have been lowered to the builtins below. */
6381 gcc_unreachable ();
6382
6383 case BUILT_IN_SETJMP_SETUP:
6384 /* __builtin_setjmp_setup is passed a pointer to an array of five words
6385 and the receiver label. */
c2f47e15 6386 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
2c8a1497 6387 {
c2f47e15 6388 rtx buf_addr = expand_expr (CALL_EXPR_ARG (exp, 0), subtarget,
2c8a1497 6389 VOIDmode, EXPAND_NORMAL);
c2f47e15 6390 tree label = TREE_OPERAND (CALL_EXPR_ARG (exp, 1), 0);
9ed997be 6391 rtx_insn *label_r = label_rtx (label);
2c8a1497 6392
6393 /* This is copied from the handling of non-local gotos. */
6394 expand_builtin_setjmp_setup (buf_addr, label_r);
6395 nonlocal_goto_handler_labels
a4de1c23 6396 = gen_rtx_INSN_LIST (VOIDmode, label_r,
2c8a1497 6397 nonlocal_goto_handler_labels);
6398 /* ??? Do not let expand_label treat us as such since we would
6399 not want to be both on the list of non-local labels and on
6400 the list of forced labels. */
6401 FORCED_LABEL (label) = 0;
6402 return const0_rtx;
6403 }
6404 break;
6405
2c8a1497 6406 case BUILT_IN_SETJMP_RECEIVER:
6407 /* __builtin_setjmp_receiver is passed the receiver label. */
c2f47e15 6408 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
2c8a1497 6409 {
c2f47e15 6410 tree label = TREE_OPERAND (CALL_EXPR_ARG (exp, 0), 0);
9ed997be 6411 rtx_insn *label_r = label_rtx (label);
2c8a1497 6412
6413 expand_builtin_setjmp_receiver (label_r);
6414 return const0_rtx;
6415 }
6b7f6858 6416 break;
53800dbe 6417
6418 /* __builtin_longjmp is passed a pointer to an array of five words.
6419 It's similar to the C library longjmp function but works with
6420 __builtin_setjmp above. */
6421 case BUILT_IN_LONGJMP:
c2f47e15 6422 if (validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
53800dbe 6423 {
c2f47e15 6424 rtx buf_addr = expand_expr (CALL_EXPR_ARG (exp, 0), subtarget,
8ec3c5c2 6425 VOIDmode, EXPAND_NORMAL);
c2f47e15 6426 rtx value = expand_normal (CALL_EXPR_ARG (exp, 1));
53800dbe 6427
6428 if (value != const1_rtx)
6429 {
1e5fcbe2 6430 error ("%<__builtin_longjmp%> second argument must be 1");
53800dbe 6431 return const0_rtx;
6432 }
6433
6434 expand_builtin_longjmp (buf_addr, value);
6435 return const0_rtx;
6436 }
2c8a1497 6437 break;
53800dbe 6438
4ee9c684 6439 case BUILT_IN_NONLOCAL_GOTO:
c2f47e15 6440 target = expand_builtin_nonlocal_goto (exp);
4ee9c684 6441 if (target)
6442 return target;
6443 break;
6444
843d08a9 6445 /* This updates the setjmp buffer that is its argument with the value
6446 of the current stack pointer. */
6447 case BUILT_IN_UPDATE_SETJMP_BUF:
c2f47e15 6448 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
843d08a9 6449 {
6450 rtx buf_addr
c2f47e15 6451 = expand_normal (CALL_EXPR_ARG (exp, 0));
843d08a9 6452
6453 expand_builtin_update_setjmp_buf (buf_addr);
6454 return const0_rtx;
6455 }
6456 break;
6457
53800dbe 6458 case BUILT_IN_TRAP:
a0ef1725 6459 expand_builtin_trap ();
53800dbe 6460 return const0_rtx;
6461
d2b48f0c 6462 case BUILT_IN_UNREACHABLE:
6463 expand_builtin_unreachable ();
6464 return const0_rtx;
6465
4f35b1fc 6466 CASE_FLT_FN (BUILT_IN_SIGNBIT):
004e23c4 6467 case BUILT_IN_SIGNBITD32:
6468 case BUILT_IN_SIGNBITD64:
6469 case BUILT_IN_SIGNBITD128:
27f261ef 6470 target = expand_builtin_signbit (exp, target);
6471 if (target)
6472 return target;
6473 break;
6474
53800dbe 6475 /* Various hooks for the DWARF 2 __throw routine. */
6476 case BUILT_IN_UNWIND_INIT:
6477 expand_builtin_unwind_init ();
6478 return const0_rtx;
6479 case BUILT_IN_DWARF_CFA:
6480 return virtual_cfa_rtx;
6481#ifdef DWARF2_UNWIND_INFO
f8f023a5 6482 case BUILT_IN_DWARF_SP_COLUMN:
6483 return expand_builtin_dwarf_sp_column ();
695e919b 6484 case BUILT_IN_INIT_DWARF_REG_SIZES:
c2f47e15 6485 expand_builtin_init_dwarf_reg_sizes (CALL_EXPR_ARG (exp, 0));
695e919b 6486 return const0_rtx;
53800dbe 6487#endif
6488 case BUILT_IN_FROB_RETURN_ADDR:
c2f47e15 6489 return expand_builtin_frob_return_addr (CALL_EXPR_ARG (exp, 0));
53800dbe 6490 case BUILT_IN_EXTRACT_RETURN_ADDR:
c2f47e15 6491 return expand_builtin_extract_return_addr (CALL_EXPR_ARG (exp, 0));
53800dbe 6492 case BUILT_IN_EH_RETURN:
c2f47e15 6493 expand_builtin_eh_return (CALL_EXPR_ARG (exp, 0),
6494 CALL_EXPR_ARG (exp, 1));
53800dbe 6495 return const0_rtx;
df4b504c 6496 case BUILT_IN_EH_RETURN_DATA_REGNO:
c2f47e15 6497 return expand_builtin_eh_return_data_regno (exp);
26093bf4 6498 case BUILT_IN_EXTEND_POINTER:
c2f47e15 6499 return expand_builtin_extend_pointer (CALL_EXPR_ARG (exp, 0));
e38def9c 6500 case BUILT_IN_EH_POINTER:
6501 return expand_builtin_eh_pointer (exp);
6502 case BUILT_IN_EH_FILTER:
6503 return expand_builtin_eh_filter (exp);
6504 case BUILT_IN_EH_COPY_VALUES:
6505 return expand_builtin_eh_copy_values (exp);
26093bf4 6506
7ccc713a 6507 case BUILT_IN_VA_START:
c2f47e15 6508 return expand_builtin_va_start (exp);
a66c9326 6509 case BUILT_IN_VA_END:
c2f47e15 6510 return expand_builtin_va_end (exp);
a66c9326 6511 case BUILT_IN_VA_COPY:
c2f47e15 6512 return expand_builtin_va_copy (exp);
89cfe6e5 6513 case BUILT_IN_EXPECT:
c2f47e15 6514 return expand_builtin_expect (exp, target);
fca0886c 6515 case BUILT_IN_ASSUME_ALIGNED:
6516 return expand_builtin_assume_aligned (exp, target);
5e3608d8 6517 case BUILT_IN_PREFETCH:
c2f47e15 6518 expand_builtin_prefetch (exp);
5e3608d8 6519 return const0_rtx;
6520
4ee9c684 6521 case BUILT_IN_INIT_TRAMPOLINE:
c307f106 6522 return expand_builtin_init_trampoline (exp, true);
6523 case BUILT_IN_INIT_HEAP_TRAMPOLINE:
6524 return expand_builtin_init_trampoline (exp, false);
4ee9c684 6525 case BUILT_IN_ADJUST_TRAMPOLINE:
c2f47e15 6526 return expand_builtin_adjust_trampoline (exp);
4ee9c684 6527
73673831 6528 case BUILT_IN_FORK:
6529 case BUILT_IN_EXECL:
6530 case BUILT_IN_EXECV:
6531 case BUILT_IN_EXECLP:
6532 case BUILT_IN_EXECLE:
6533 case BUILT_IN_EXECVP:
6534 case BUILT_IN_EXECVE:
c2f47e15 6535 target = expand_builtin_fork_or_exec (fndecl, exp, target, ignore);
73673831 6536 if (target)
6537 return target;
6538 break;
53800dbe 6539
2797f13a 6540 case BUILT_IN_SYNC_FETCH_AND_ADD_1:
6541 case BUILT_IN_SYNC_FETCH_AND_ADD_2:
6542 case BUILT_IN_SYNC_FETCH_AND_ADD_4:
6543 case BUILT_IN_SYNC_FETCH_AND_ADD_8:
6544 case BUILT_IN_SYNC_FETCH_AND_ADD_16:
6545 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_ADD_1);
1cd6e20d 6546 target = expand_builtin_sync_operation (mode, exp, PLUS, false, target);
b6a5fc45 6547 if (target)
6548 return target;
6549 break;
6550
2797f13a 6551 case BUILT_IN_SYNC_FETCH_AND_SUB_1:
6552 case BUILT_IN_SYNC_FETCH_AND_SUB_2:
6553 case BUILT_IN_SYNC_FETCH_AND_SUB_4:
6554 case BUILT_IN_SYNC_FETCH_AND_SUB_8:
6555 case BUILT_IN_SYNC_FETCH_AND_SUB_16:
6556 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_SUB_1);
1cd6e20d 6557 target = expand_builtin_sync_operation (mode, exp, MINUS, false, target);
b6a5fc45 6558 if (target)
6559 return target;
6560 break;
6561
2797f13a 6562 case BUILT_IN_SYNC_FETCH_AND_OR_1:
6563 case BUILT_IN_SYNC_FETCH_AND_OR_2:
6564 case BUILT_IN_SYNC_FETCH_AND_OR_4:
6565 case BUILT_IN_SYNC_FETCH_AND_OR_8:
6566 case BUILT_IN_SYNC_FETCH_AND_OR_16:
6567 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_OR_1);
1cd6e20d 6568 target = expand_builtin_sync_operation (mode, exp, IOR, false, target);
b6a5fc45 6569 if (target)
6570 return target;
6571 break;
6572
2797f13a 6573 case BUILT_IN_SYNC_FETCH_AND_AND_1:
6574 case BUILT_IN_SYNC_FETCH_AND_AND_2:
6575 case BUILT_IN_SYNC_FETCH_AND_AND_4:
6576 case BUILT_IN_SYNC_FETCH_AND_AND_8:
6577 case BUILT_IN_SYNC_FETCH_AND_AND_16:
6578 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_AND_1);
1cd6e20d 6579 target = expand_builtin_sync_operation (mode, exp, AND, false, target);
b6a5fc45 6580 if (target)
6581 return target;
6582 break;
6583
2797f13a 6584 case BUILT_IN_SYNC_FETCH_AND_XOR_1:
6585 case BUILT_IN_SYNC_FETCH_AND_XOR_2:
6586 case BUILT_IN_SYNC_FETCH_AND_XOR_4:
6587 case BUILT_IN_SYNC_FETCH_AND_XOR_8:
6588 case BUILT_IN_SYNC_FETCH_AND_XOR_16:
6589 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_XOR_1);
1cd6e20d 6590 target = expand_builtin_sync_operation (mode, exp, XOR, false, target);
b6a5fc45 6591 if (target)
6592 return target;
6593 break;
6594
2797f13a 6595 case BUILT_IN_SYNC_FETCH_AND_NAND_1:
6596 case BUILT_IN_SYNC_FETCH_AND_NAND_2:
6597 case BUILT_IN_SYNC_FETCH_AND_NAND_4:
6598 case BUILT_IN_SYNC_FETCH_AND_NAND_8:
6599 case BUILT_IN_SYNC_FETCH_AND_NAND_16:
6600 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_NAND_1);
1cd6e20d 6601 target = expand_builtin_sync_operation (mode, exp, NOT, false, target);
b6a5fc45 6602 if (target)
6603 return target;
6604 break;
6605
2797f13a 6606 case BUILT_IN_SYNC_ADD_AND_FETCH_1:
6607 case BUILT_IN_SYNC_ADD_AND_FETCH_2:
6608 case BUILT_IN_SYNC_ADD_AND_FETCH_4:
6609 case BUILT_IN_SYNC_ADD_AND_FETCH_8:
6610 case BUILT_IN_SYNC_ADD_AND_FETCH_16:
6611 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_ADD_AND_FETCH_1);
1cd6e20d 6612 target = expand_builtin_sync_operation (mode, exp, PLUS, true, target);
b6a5fc45 6613 if (target)
6614 return target;
6615 break;
6616
2797f13a 6617 case BUILT_IN_SYNC_SUB_AND_FETCH_1:
6618 case BUILT_IN_SYNC_SUB_AND_FETCH_2:
6619 case BUILT_IN_SYNC_SUB_AND_FETCH_4:
6620 case BUILT_IN_SYNC_SUB_AND_FETCH_8:
6621 case BUILT_IN_SYNC_SUB_AND_FETCH_16:
6622 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_SUB_AND_FETCH_1);
1cd6e20d 6623 target = expand_builtin_sync_operation (mode, exp, MINUS, true, target);
b6a5fc45 6624 if (target)
6625 return target;
6626 break;
6627
2797f13a 6628 case BUILT_IN_SYNC_OR_AND_FETCH_1:
6629 case BUILT_IN_SYNC_OR_AND_FETCH_2:
6630 case BUILT_IN_SYNC_OR_AND_FETCH_4:
6631 case BUILT_IN_SYNC_OR_AND_FETCH_8:
6632 case BUILT_IN_SYNC_OR_AND_FETCH_16:
6633 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_OR_AND_FETCH_1);
1cd6e20d 6634 target = expand_builtin_sync_operation (mode, exp, IOR, true, target);
b6a5fc45 6635 if (target)
6636 return target;
6637 break;
6638
2797f13a 6639 case BUILT_IN_SYNC_AND_AND_FETCH_1:
6640 case BUILT_IN_SYNC_AND_AND_FETCH_2:
6641 case BUILT_IN_SYNC_AND_AND_FETCH_4:
6642 case BUILT_IN_SYNC_AND_AND_FETCH_8:
6643 case BUILT_IN_SYNC_AND_AND_FETCH_16:
6644 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_AND_AND_FETCH_1);
1cd6e20d 6645 target = expand_builtin_sync_operation (mode, exp, AND, true, target);
b6a5fc45 6646 if (target)
6647 return target;
6648 break;
6649
2797f13a 6650 case BUILT_IN_SYNC_XOR_AND_FETCH_1:
6651 case BUILT_IN_SYNC_XOR_AND_FETCH_2:
6652 case BUILT_IN_SYNC_XOR_AND_FETCH_4:
6653 case BUILT_IN_SYNC_XOR_AND_FETCH_8:
6654 case BUILT_IN_SYNC_XOR_AND_FETCH_16:
6655 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_XOR_AND_FETCH_1);
1cd6e20d 6656 target = expand_builtin_sync_operation (mode, exp, XOR, true, target);
b6a5fc45 6657 if (target)
6658 return target;
6659 break;
6660
2797f13a 6661 case BUILT_IN_SYNC_NAND_AND_FETCH_1:
6662 case BUILT_IN_SYNC_NAND_AND_FETCH_2:
6663 case BUILT_IN_SYNC_NAND_AND_FETCH_4:
6664 case BUILT_IN_SYNC_NAND_AND_FETCH_8:
6665 case BUILT_IN_SYNC_NAND_AND_FETCH_16:
6666 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_NAND_AND_FETCH_1);
1cd6e20d 6667 target = expand_builtin_sync_operation (mode, exp, NOT, true, target);
b6a5fc45 6668 if (target)
6669 return target;
6670 break;
6671
2797f13a 6672 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_1:
6673 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_2:
6674 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_4:
6675 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_8:
6676 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_16:
a601d32a 6677 if (mode == VOIDmode)
6678 mode = TYPE_MODE (boolean_type_node);
b6a5fc45 6679 if (!target || !register_operand (target, mode))
6680 target = gen_reg_rtx (mode);
3e272de8 6681
2797f13a 6682 mode = get_builtin_sync_mode
6683 (fcode - BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_1);
c2f47e15 6684 target = expand_builtin_compare_and_swap (mode, exp, true, target);
b6a5fc45 6685 if (target)
6686 return target;
6687 break;
6688
2797f13a 6689 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_1:
6690 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_2:
6691 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_4:
6692 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_8:
6693 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_16:
6694 mode = get_builtin_sync_mode
6695 (fcode - BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_1);
c2f47e15 6696 target = expand_builtin_compare_and_swap (mode, exp, false, target);
b6a5fc45 6697 if (target)
6698 return target;
6699 break;
6700
2797f13a 6701 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_1:
6702 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_2:
6703 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_4:
6704 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_8:
6705 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_16:
6706 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_LOCK_TEST_AND_SET_1);
6707 target = expand_builtin_sync_lock_test_and_set (mode, exp, target);
b6a5fc45 6708 if (target)
6709 return target;
6710 break;
6711
2797f13a 6712 case BUILT_IN_SYNC_LOCK_RELEASE_1:
6713 case BUILT_IN_SYNC_LOCK_RELEASE_2:
6714 case BUILT_IN_SYNC_LOCK_RELEASE_4:
6715 case BUILT_IN_SYNC_LOCK_RELEASE_8:
6716 case BUILT_IN_SYNC_LOCK_RELEASE_16:
6717 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_LOCK_RELEASE_1);
6718 expand_builtin_sync_lock_release (mode, exp);
b6a5fc45 6719 return const0_rtx;
6720
2797f13a 6721 case BUILT_IN_SYNC_SYNCHRONIZE:
6722 expand_builtin_sync_synchronize ();
b6a5fc45 6723 return const0_rtx;
6724
1cd6e20d 6725 case BUILT_IN_ATOMIC_EXCHANGE_1:
6726 case BUILT_IN_ATOMIC_EXCHANGE_2:
6727 case BUILT_IN_ATOMIC_EXCHANGE_4:
6728 case BUILT_IN_ATOMIC_EXCHANGE_8:
6729 case BUILT_IN_ATOMIC_EXCHANGE_16:
6730 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_EXCHANGE_1);
6731 target = expand_builtin_atomic_exchange (mode, exp, target);
6732 if (target)
6733 return target;
6734 break;
6735
6736 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1:
6737 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_2:
6738 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_4:
6739 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_8:
6740 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_16:
2c201ad1 6741 {
6742 unsigned int nargs, z;
f1f41a6c 6743 vec<tree, va_gc> *vec;
2c201ad1 6744
6745 mode =
6746 get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1);
6747 target = expand_builtin_atomic_compare_exchange (mode, exp, target);
6748 if (target)
6749 return target;
6750
6751 /* If this is turned into an external library call, the weak parameter
6752 must be dropped to match the expected parameter list. */
6753 nargs = call_expr_nargs (exp);
f1f41a6c 6754 vec_alloc (vec, nargs - 1);
2c201ad1 6755 for (z = 0; z < 3; z++)
f1f41a6c 6756 vec->quick_push (CALL_EXPR_ARG (exp, z));
2c201ad1 6757 /* Skip the boolean weak parameter. */
6758 for (z = 4; z < 6; z++)
f1f41a6c 6759 vec->quick_push (CALL_EXPR_ARG (exp, z));
2c201ad1 6760 exp = build_call_vec (TREE_TYPE (exp), CALL_EXPR_FN (exp), vec);
6761 break;
6762 }
1cd6e20d 6763
6764 case BUILT_IN_ATOMIC_LOAD_1:
6765 case BUILT_IN_ATOMIC_LOAD_2:
6766 case BUILT_IN_ATOMIC_LOAD_4:
6767 case BUILT_IN_ATOMIC_LOAD_8:
6768 case BUILT_IN_ATOMIC_LOAD_16:
6769 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_LOAD_1);
6770 target = expand_builtin_atomic_load (mode, exp, target);
6771 if (target)
6772 return target;
6773 break;
6774
6775 case BUILT_IN_ATOMIC_STORE_1:
6776 case BUILT_IN_ATOMIC_STORE_2:
6777 case BUILT_IN_ATOMIC_STORE_4:
6778 case BUILT_IN_ATOMIC_STORE_8:
6779 case BUILT_IN_ATOMIC_STORE_16:
6780 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_STORE_1);
6781 target = expand_builtin_atomic_store (mode, exp);
6782 if (target)
6783 return const0_rtx;
6784 break;
6785
6786 case BUILT_IN_ATOMIC_ADD_FETCH_1:
6787 case BUILT_IN_ATOMIC_ADD_FETCH_2:
6788 case BUILT_IN_ATOMIC_ADD_FETCH_4:
6789 case BUILT_IN_ATOMIC_ADD_FETCH_8:
6790 case BUILT_IN_ATOMIC_ADD_FETCH_16:
6791 {
6792 enum built_in_function lib;
6793 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_ADD_FETCH_1);
6794 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_ADD_1 +
6795 (fcode - BUILT_IN_ATOMIC_ADD_FETCH_1));
6796 target = expand_builtin_atomic_fetch_op (mode, exp, target, PLUS, true,
6797 ignore, lib);
6798 if (target)
6799 return target;
6800 break;
6801 }
6802 case BUILT_IN_ATOMIC_SUB_FETCH_1:
6803 case BUILT_IN_ATOMIC_SUB_FETCH_2:
6804 case BUILT_IN_ATOMIC_SUB_FETCH_4:
6805 case BUILT_IN_ATOMIC_SUB_FETCH_8:
6806 case BUILT_IN_ATOMIC_SUB_FETCH_16:
6807 {
6808 enum built_in_function lib;
6809 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_SUB_FETCH_1);
6810 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_SUB_1 +
6811 (fcode - BUILT_IN_ATOMIC_SUB_FETCH_1));
6812 target = expand_builtin_atomic_fetch_op (mode, exp, target, MINUS, true,
6813 ignore, lib);
6814 if (target)
6815 return target;
6816 break;
6817 }
6818 case BUILT_IN_ATOMIC_AND_FETCH_1:
6819 case BUILT_IN_ATOMIC_AND_FETCH_2:
6820 case BUILT_IN_ATOMIC_AND_FETCH_4:
6821 case BUILT_IN_ATOMIC_AND_FETCH_8:
6822 case BUILT_IN_ATOMIC_AND_FETCH_16:
6823 {
6824 enum built_in_function lib;
6825 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_AND_FETCH_1);
6826 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_AND_1 +
6827 (fcode - BUILT_IN_ATOMIC_AND_FETCH_1));
6828 target = expand_builtin_atomic_fetch_op (mode, exp, target, AND, true,
6829 ignore, lib);
6830 if (target)
6831 return target;
6832 break;
6833 }
6834 case BUILT_IN_ATOMIC_NAND_FETCH_1:
6835 case BUILT_IN_ATOMIC_NAND_FETCH_2:
6836 case BUILT_IN_ATOMIC_NAND_FETCH_4:
6837 case BUILT_IN_ATOMIC_NAND_FETCH_8:
6838 case BUILT_IN_ATOMIC_NAND_FETCH_16:
6839 {
6840 enum built_in_function lib;
6841 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_NAND_FETCH_1);
6842 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_NAND_1 +
6843 (fcode - BUILT_IN_ATOMIC_NAND_FETCH_1));
6844 target = expand_builtin_atomic_fetch_op (mode, exp, target, NOT, true,
6845 ignore, lib);
6846 if (target)
6847 return target;
6848 break;
6849 }
6850 case BUILT_IN_ATOMIC_XOR_FETCH_1:
6851 case BUILT_IN_ATOMIC_XOR_FETCH_2:
6852 case BUILT_IN_ATOMIC_XOR_FETCH_4:
6853 case BUILT_IN_ATOMIC_XOR_FETCH_8:
6854 case BUILT_IN_ATOMIC_XOR_FETCH_16:
6855 {
6856 enum built_in_function lib;
6857 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_XOR_FETCH_1);
6858 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_XOR_1 +
6859 (fcode - BUILT_IN_ATOMIC_XOR_FETCH_1));
6860 target = expand_builtin_atomic_fetch_op (mode, exp, target, XOR, true,
6861 ignore, lib);
6862 if (target)
6863 return target;
6864 break;
6865 }
6866 case BUILT_IN_ATOMIC_OR_FETCH_1:
6867 case BUILT_IN_ATOMIC_OR_FETCH_2:
6868 case BUILT_IN_ATOMIC_OR_FETCH_4:
6869 case BUILT_IN_ATOMIC_OR_FETCH_8:
6870 case BUILT_IN_ATOMIC_OR_FETCH_16:
6871 {
6872 enum built_in_function lib;
6873 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_OR_FETCH_1);
6874 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_OR_1 +
6875 (fcode - BUILT_IN_ATOMIC_OR_FETCH_1));
6876 target = expand_builtin_atomic_fetch_op (mode, exp, target, IOR, true,
6877 ignore, lib);
6878 if (target)
6879 return target;
6880 break;
6881 }
6882 case BUILT_IN_ATOMIC_FETCH_ADD_1:
6883 case BUILT_IN_ATOMIC_FETCH_ADD_2:
6884 case BUILT_IN_ATOMIC_FETCH_ADD_4:
6885 case BUILT_IN_ATOMIC_FETCH_ADD_8:
6886 case BUILT_IN_ATOMIC_FETCH_ADD_16:
6887 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_ADD_1);
6888 target = expand_builtin_atomic_fetch_op (mode, exp, target, PLUS, false,
6889 ignore, BUILT_IN_NONE);
6890 if (target)
6891 return target;
6892 break;
6893
6894 case BUILT_IN_ATOMIC_FETCH_SUB_1:
6895 case BUILT_IN_ATOMIC_FETCH_SUB_2:
6896 case BUILT_IN_ATOMIC_FETCH_SUB_4:
6897 case BUILT_IN_ATOMIC_FETCH_SUB_8:
6898 case BUILT_IN_ATOMIC_FETCH_SUB_16:
6899 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_SUB_1);
6900 target = expand_builtin_atomic_fetch_op (mode, exp, target, MINUS, false,
6901 ignore, BUILT_IN_NONE);
6902 if (target)
6903 return target;
6904 break;
6905
6906 case BUILT_IN_ATOMIC_FETCH_AND_1:
6907 case BUILT_IN_ATOMIC_FETCH_AND_2:
6908 case BUILT_IN_ATOMIC_FETCH_AND_4:
6909 case BUILT_IN_ATOMIC_FETCH_AND_8:
6910 case BUILT_IN_ATOMIC_FETCH_AND_16:
6911 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_AND_1);
6912 target = expand_builtin_atomic_fetch_op (mode, exp, target, AND, false,
6913 ignore, BUILT_IN_NONE);
6914 if (target)
6915 return target;
6916 break;
6917
6918 case BUILT_IN_ATOMIC_FETCH_NAND_1:
6919 case BUILT_IN_ATOMIC_FETCH_NAND_2:
6920 case BUILT_IN_ATOMIC_FETCH_NAND_4:
6921 case BUILT_IN_ATOMIC_FETCH_NAND_8:
6922 case BUILT_IN_ATOMIC_FETCH_NAND_16:
6923 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_NAND_1);
6924 target = expand_builtin_atomic_fetch_op (mode, exp, target, NOT, false,
6925 ignore, BUILT_IN_NONE);
6926 if (target)
6927 return target;
6928 break;
6929
6930 case BUILT_IN_ATOMIC_FETCH_XOR_1:
6931 case BUILT_IN_ATOMIC_FETCH_XOR_2:
6932 case BUILT_IN_ATOMIC_FETCH_XOR_4:
6933 case BUILT_IN_ATOMIC_FETCH_XOR_8:
6934 case BUILT_IN_ATOMIC_FETCH_XOR_16:
6935 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_XOR_1);
6936 target = expand_builtin_atomic_fetch_op (mode, exp, target, XOR, false,
6937 ignore, BUILT_IN_NONE);
6938 if (target)
6939 return target;
6940 break;
6941
6942 case BUILT_IN_ATOMIC_FETCH_OR_1:
6943 case BUILT_IN_ATOMIC_FETCH_OR_2:
6944 case BUILT_IN_ATOMIC_FETCH_OR_4:
6945 case BUILT_IN_ATOMIC_FETCH_OR_8:
6946 case BUILT_IN_ATOMIC_FETCH_OR_16:
6947 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_OR_1);
6948 target = expand_builtin_atomic_fetch_op (mode, exp, target, IOR, false,
6949 ignore, BUILT_IN_NONE);
6950 if (target)
6951 return target;
6952 break;
10b744a3 6953
6954 case BUILT_IN_ATOMIC_TEST_AND_SET:
7821cde1 6955 return expand_builtin_atomic_test_and_set (exp, target);
10b744a3 6956
6957 case BUILT_IN_ATOMIC_CLEAR:
6958 return expand_builtin_atomic_clear (exp);
1cd6e20d 6959
6960 case BUILT_IN_ATOMIC_ALWAYS_LOCK_FREE:
6961 return expand_builtin_atomic_always_lock_free (exp);
6962
6963 case BUILT_IN_ATOMIC_IS_LOCK_FREE:
6964 target = expand_builtin_atomic_is_lock_free (exp);
6965 if (target)
6966 return target;
6967 break;
6968
6969 case BUILT_IN_ATOMIC_THREAD_FENCE:
6970 expand_builtin_atomic_thread_fence (exp);
6971 return const0_rtx;
6972
6973 case BUILT_IN_ATOMIC_SIGNAL_FENCE:
6974 expand_builtin_atomic_signal_fence (exp);
6975 return const0_rtx;
6976
0a39fd54 6977 case BUILT_IN_OBJECT_SIZE:
6978 return expand_builtin_object_size (exp);
6979
6980 case BUILT_IN_MEMCPY_CHK:
6981 case BUILT_IN_MEMPCPY_CHK:
6982 case BUILT_IN_MEMMOVE_CHK:
6983 case BUILT_IN_MEMSET_CHK:
6984 target = expand_builtin_memory_chk (exp, target, mode, fcode);
6985 if (target)
6986 return target;
6987 break;
6988
6989 case BUILT_IN_STRCPY_CHK:
6990 case BUILT_IN_STPCPY_CHK:
6991 case BUILT_IN_STRNCPY_CHK:
1063acde 6992 case BUILT_IN_STPNCPY_CHK:
0a39fd54 6993 case BUILT_IN_STRCAT_CHK:
b356dfef 6994 case BUILT_IN_STRNCAT_CHK:
0a39fd54 6995 case BUILT_IN_SNPRINTF_CHK:
6996 case BUILT_IN_VSNPRINTF_CHK:
6997 maybe_emit_chk_warning (exp, fcode);
6998 break;
6999
7000 case BUILT_IN_SPRINTF_CHK:
7001 case BUILT_IN_VSPRINTF_CHK:
7002 maybe_emit_sprintf_chk_warning (exp, fcode);
7003 break;
7004
2c281b15 7005 case BUILT_IN_FREE:
f74ea1c2 7006 if (warn_free_nonheap_object)
7007 maybe_emit_free_warning (exp);
2c281b15 7008 break;
7009
badaa04c 7010 case BUILT_IN_THREAD_POINTER:
7011 return expand_builtin_thread_pointer (exp, target);
7012
7013 case BUILT_IN_SET_THREAD_POINTER:
7014 expand_builtin_set_thread_pointer (exp);
7015 return const0_rtx;
7016
d037099f 7017 case BUILT_IN_CILK_DETACH:
7018 expand_builtin_cilk_detach (exp);
7019 return const0_rtx;
7020
7021 case BUILT_IN_CILK_POP_FRAME:
7022 expand_builtin_cilk_pop_frame (exp);
7023 return const0_rtx;
7024
058a1b7a 7025 case BUILT_IN_CHKP_INIT_PTR_BOUNDS:
7026 case BUILT_IN_CHKP_NULL_PTR_BOUNDS:
7027 case BUILT_IN_CHKP_COPY_PTR_BOUNDS:
7028 case BUILT_IN_CHKP_CHECK_PTR_LBOUNDS:
7029 case BUILT_IN_CHKP_CHECK_PTR_UBOUNDS:
7030 case BUILT_IN_CHKP_CHECK_PTR_BOUNDS:
7031 case BUILT_IN_CHKP_SET_PTR_BOUNDS:
7032 case BUILT_IN_CHKP_NARROW_PTR_BOUNDS:
7033 case BUILT_IN_CHKP_STORE_PTR_BOUNDS:
7034 case BUILT_IN_CHKP_GET_PTR_LBOUND:
7035 case BUILT_IN_CHKP_GET_PTR_UBOUND:
7036 /* We allow user CHKP builtins if Pointer Bounds
7037 Checker is off. */
7038 if (!chkp_function_instrumented_p (current_function_decl))
7039 {
7040 if (fcode == BUILT_IN_CHKP_SET_PTR_BOUNDS
7041 || fcode == BUILT_IN_CHKP_NARROW_PTR_BOUNDS
7042 || fcode == BUILT_IN_CHKP_INIT_PTR_BOUNDS
7043 || fcode == BUILT_IN_CHKP_NULL_PTR_BOUNDS
7044 || fcode == BUILT_IN_CHKP_COPY_PTR_BOUNDS)
7045 return expand_normal (CALL_EXPR_ARG (exp, 0));
7046 else if (fcode == BUILT_IN_CHKP_GET_PTR_LBOUND)
7047 return expand_normal (size_zero_node);
7048 else if (fcode == BUILT_IN_CHKP_GET_PTR_UBOUND)
7049 return expand_normal (size_int (-1));
7050 else
7051 return const0_rtx;
7052 }
7053 /* FALLTHROUGH */
7054
7055 case BUILT_IN_CHKP_BNDMK:
7056 case BUILT_IN_CHKP_BNDSTX:
7057 case BUILT_IN_CHKP_BNDCL:
7058 case BUILT_IN_CHKP_BNDCU:
7059 case BUILT_IN_CHKP_BNDLDX:
7060 case BUILT_IN_CHKP_BNDRET:
7061 case BUILT_IN_CHKP_INTERSECT:
7062 case BUILT_IN_CHKP_NARROW:
7063 case BUILT_IN_CHKP_EXTRACT_LOWER:
7064 case BUILT_IN_CHKP_EXTRACT_UPPER:
7065 /* Software implementation of Pointer Bounds Checker is NYI.
7066 Target support is required. */
7067 error ("Your target platform does not support -fcheck-pointer-bounds");
7068 break;
7069
ca4c3545 7070 case BUILT_IN_ACC_ON_DEVICE:
7071 target = expand_builtin_acc_on_device (exp, target);
7072 if (target)
7073 return target;
7074 break;
7075
92482ee0 7076 default: /* just do library call, if unknown builtin */
146c1b4f 7077 break;
53800dbe 7078 }
7079
7080 /* The switch statement above can drop through to cause the function
7081 to be called normally. */
7082 return expand_call (exp, target, ignore);
7083}
650e4c94 7084
f21337ef 7085/* Similar to expand_builtin but is used for instrumented calls. */
7086
7087rtx
7088expand_builtin_with_bounds (tree exp, rtx target,
7089 rtx subtarget ATTRIBUTE_UNUSED,
7090 machine_mode mode, int ignore)
7091{
7092 tree fndecl = get_callee_fndecl (exp);
7093 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
7094
7095 gcc_assert (CALL_WITH_BOUNDS_P (exp));
7096
7097 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
7098 return targetm.expand_builtin (exp, target, subtarget, mode, ignore);
7099
7100 gcc_assert (fcode > BEGIN_CHKP_BUILTINS
7101 && fcode < END_CHKP_BUILTINS);
7102
7103 switch (fcode)
7104 {
7105 case BUILT_IN_CHKP_MEMCPY_NOBND_NOCHK_CHKP:
7106 target = expand_builtin_memcpy_with_bounds (exp, target);
7107 if (target)
7108 return target;
7109 break;
7110
7111 case BUILT_IN_CHKP_MEMPCPY_NOBND_NOCHK_CHKP:
7112 target = expand_builtin_mempcpy_with_bounds (exp, target, mode);
7113 if (target)
7114 return target;
7115 break;
7116
7117 case BUILT_IN_CHKP_MEMSET_NOBND_NOCHK_CHKP:
7118 target = expand_builtin_memset_with_bounds (exp, target, mode);
7119 if (target)
7120 return target;
7121 break;
7122
7123 default:
7124 break;
7125 }
7126
7127 /* The switch statement above can drop through to cause the function
7128 to be called normally. */
7129 return expand_call (exp, target, ignore);
7130 }
7131
805e22b2 7132/* Determine whether a tree node represents a call to a built-in
52203a9d 7133 function. If the tree T is a call to a built-in function with
7134 the right number of arguments of the appropriate types, return
7135 the DECL_FUNCTION_CODE of the call, e.g. BUILT_IN_SQRT.
7136 Otherwise the return value is END_BUILTINS. */
aecda0d6 7137
805e22b2 7138enum built_in_function
b7bf20db 7139builtin_mathfn_code (const_tree t)
805e22b2 7140{
b7bf20db 7141 const_tree fndecl, arg, parmlist;
7142 const_tree argtype, parmtype;
7143 const_call_expr_arg_iterator iter;
805e22b2 7144
7145 if (TREE_CODE (t) != CALL_EXPR
c2f47e15 7146 || TREE_CODE (CALL_EXPR_FN (t)) != ADDR_EXPR)
805e22b2 7147 return END_BUILTINS;
7148
c6e6ecb1 7149 fndecl = get_callee_fndecl (t);
7150 if (fndecl == NULL_TREE
52203a9d 7151 || TREE_CODE (fndecl) != FUNCTION_DECL
805e22b2 7152 || ! DECL_BUILT_IN (fndecl)
7153 || DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
7154 return END_BUILTINS;
7155
52203a9d 7156 parmlist = TYPE_ARG_TYPES (TREE_TYPE (fndecl));
b7bf20db 7157 init_const_call_expr_arg_iterator (t, &iter);
52203a9d 7158 for (; parmlist; parmlist = TREE_CHAIN (parmlist))
e9f80ff5 7159 {
52203a9d 7160 /* If a function doesn't take a variable number of arguments,
7161 the last element in the list will have type `void'. */
7162 parmtype = TREE_VALUE (parmlist);
7163 if (VOID_TYPE_P (parmtype))
7164 {
b7bf20db 7165 if (more_const_call_expr_args_p (&iter))
52203a9d 7166 return END_BUILTINS;
7167 return DECL_FUNCTION_CODE (fndecl);
7168 }
7169
b7bf20db 7170 if (! more_const_call_expr_args_p (&iter))
e9f80ff5 7171 return END_BUILTINS;
48e1416a 7172
b7bf20db 7173 arg = next_const_call_expr_arg (&iter);
c2f47e15 7174 argtype = TREE_TYPE (arg);
52203a9d 7175
7176 if (SCALAR_FLOAT_TYPE_P (parmtype))
7177 {
7178 if (! SCALAR_FLOAT_TYPE_P (argtype))
7179 return END_BUILTINS;
7180 }
7181 else if (COMPLEX_FLOAT_TYPE_P (parmtype))
7182 {
7183 if (! COMPLEX_FLOAT_TYPE_P (argtype))
7184 return END_BUILTINS;
7185 }
7186 else if (POINTER_TYPE_P (parmtype))
7187 {
7188 if (! POINTER_TYPE_P (argtype))
7189 return END_BUILTINS;
7190 }
7191 else if (INTEGRAL_TYPE_P (parmtype))
7192 {
7193 if (! INTEGRAL_TYPE_P (argtype))
7194 return END_BUILTINS;
7195 }
7196 else
e9f80ff5 7197 return END_BUILTINS;
e9f80ff5 7198 }
7199
52203a9d 7200 /* Variable-length argument list. */
805e22b2 7201 return DECL_FUNCTION_CODE (fndecl);
7202}
7203
c2f47e15 7204/* Fold a call to __builtin_constant_p, if we know its argument ARG will
7205 evaluate to a constant. */
650e4c94 7206
7207static tree
c2f47e15 7208fold_builtin_constant_p (tree arg)
650e4c94 7209{
650e4c94 7210 /* We return 1 for a numeric type that's known to be a constant
7211 value at compile-time or for an aggregate type that's a
7212 literal constant. */
c2f47e15 7213 STRIP_NOPS (arg);
650e4c94 7214
7215 /* If we know this is a constant, emit the constant of one. */
c2f47e15 7216 if (CONSTANT_CLASS_P (arg)
7217 || (TREE_CODE (arg) == CONSTRUCTOR
7218 && TREE_CONSTANT (arg)))
650e4c94 7219 return integer_one_node;
c2f47e15 7220 if (TREE_CODE (arg) == ADDR_EXPR)
adcfa3a3 7221 {
c2f47e15 7222 tree op = TREE_OPERAND (arg, 0);
adcfa3a3 7223 if (TREE_CODE (op) == STRING_CST
7224 || (TREE_CODE (op) == ARRAY_REF
7225 && integer_zerop (TREE_OPERAND (op, 1))
7226 && TREE_CODE (TREE_OPERAND (op, 0)) == STRING_CST))
7227 return integer_one_node;
7228 }
650e4c94 7229
1fb4300c 7230 /* If this expression has side effects, show we don't know it to be a
7231 constant. Likewise if it's a pointer or aggregate type since in
7232 those case we only want literals, since those are only optimized
f97c71a1 7233 when generating RTL, not later.
7234 And finally, if we are compiling an initializer, not code, we
7235 need to return a definite result now; there's not going to be any
7236 more optimization done. */
c2f47e15 7237 if (TREE_SIDE_EFFECTS (arg)
7238 || AGGREGATE_TYPE_P (TREE_TYPE (arg))
7239 || POINTER_TYPE_P (TREE_TYPE (arg))
47be647d 7240 || cfun == 0
0b049e15 7241 || folding_initializer
7242 || force_folding_builtin_constant_p)
650e4c94 7243 return integer_zero_node;
7244
c2f47e15 7245 return NULL_TREE;
650e4c94 7246}
7247
76f5a783 7248/* Create builtin_expect with PRED and EXPECTED as its arguments and
7249 return it as a truthvalue. */
4ee9c684 7250
7251static tree
c83059be 7252build_builtin_expect_predicate (location_t loc, tree pred, tree expected,
7253 tree predictor)
4ee9c684 7254{
76f5a783 7255 tree fn, arg_types, pred_type, expected_type, call_expr, ret_type;
4ee9c684 7256
b9a16870 7257 fn = builtin_decl_explicit (BUILT_IN_EXPECT);
76f5a783 7258 arg_types = TYPE_ARG_TYPES (TREE_TYPE (fn));
7259 ret_type = TREE_TYPE (TREE_TYPE (fn));
7260 pred_type = TREE_VALUE (arg_types);
7261 expected_type = TREE_VALUE (TREE_CHAIN (arg_types));
7262
389dd41b 7263 pred = fold_convert_loc (loc, pred_type, pred);
7264 expected = fold_convert_loc (loc, expected_type, expected);
c83059be 7265 call_expr = build_call_expr_loc (loc, fn, predictor ? 3 : 2, pred, expected,
7266 predictor);
76f5a783 7267
7268 return build2 (NE_EXPR, TREE_TYPE (pred), call_expr,
7269 build_int_cst (ret_type, 0));
7270}
7271
7272/* Fold a call to builtin_expect with arguments ARG0 and ARG1. Return
7273 NULL_TREE if no simplification is possible. */
7274
c83059be 7275tree
7276fold_builtin_expect (location_t loc, tree arg0, tree arg1, tree arg2)
76f5a783 7277{
083bada9 7278 tree inner, fndecl, inner_arg0;
76f5a783 7279 enum tree_code code;
7280
083bada9 7281 /* Distribute the expected value over short-circuiting operators.
7282 See through the cast from truthvalue_type_node to long. */
7283 inner_arg0 = arg0;
d09ef31a 7284 while (CONVERT_EXPR_P (inner_arg0)
083bada9 7285 && INTEGRAL_TYPE_P (TREE_TYPE (inner_arg0))
7286 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (inner_arg0, 0))))
7287 inner_arg0 = TREE_OPERAND (inner_arg0, 0);
7288
76f5a783 7289 /* If this is a builtin_expect within a builtin_expect keep the
7290 inner one. See through a comparison against a constant. It
7291 might have been added to create a thruthvalue. */
083bada9 7292 inner = inner_arg0;
7293
76f5a783 7294 if (COMPARISON_CLASS_P (inner)
7295 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST)
7296 inner = TREE_OPERAND (inner, 0);
7297
7298 if (TREE_CODE (inner) == CALL_EXPR
7299 && (fndecl = get_callee_fndecl (inner))
7300 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
7301 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_EXPECT)
7302 return arg0;
7303
083bada9 7304 inner = inner_arg0;
76f5a783 7305 code = TREE_CODE (inner);
7306 if (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR)
7307 {
7308 tree op0 = TREE_OPERAND (inner, 0);
7309 tree op1 = TREE_OPERAND (inner, 1);
7310
c83059be 7311 op0 = build_builtin_expect_predicate (loc, op0, arg1, arg2);
7312 op1 = build_builtin_expect_predicate (loc, op1, arg1, arg2);
76f5a783 7313 inner = build2 (code, TREE_TYPE (inner), op0, op1);
7314
389dd41b 7315 return fold_convert_loc (loc, TREE_TYPE (arg0), inner);
76f5a783 7316 }
7317
7318 /* If the argument isn't invariant then there's nothing else we can do. */
083bada9 7319 if (!TREE_CONSTANT (inner_arg0))
c2f47e15 7320 return NULL_TREE;
4ee9c684 7321
76f5a783 7322 /* If we expect that a comparison against the argument will fold to
7323 a constant return the constant. In practice, this means a true
7324 constant or the address of a non-weak symbol. */
083bada9 7325 inner = inner_arg0;
4ee9c684 7326 STRIP_NOPS (inner);
7327 if (TREE_CODE (inner) == ADDR_EXPR)
7328 {
7329 do
7330 {
7331 inner = TREE_OPERAND (inner, 0);
7332 }
7333 while (TREE_CODE (inner) == COMPONENT_REF
7334 || TREE_CODE (inner) == ARRAY_REF);
062b4460 7335 if ((TREE_CODE (inner) == VAR_DECL
7336 || TREE_CODE (inner) == FUNCTION_DECL)
7337 && DECL_WEAK (inner))
c2f47e15 7338 return NULL_TREE;
4ee9c684 7339 }
7340
76f5a783 7341 /* Otherwise, ARG0 already has the proper type for the return value. */
7342 return arg0;
4ee9c684 7343}
7344
c2f47e15 7345/* Fold a call to __builtin_classify_type with argument ARG. */
27d0c333 7346
539a3a92 7347static tree
c2f47e15 7348fold_builtin_classify_type (tree arg)
539a3a92 7349{
c2f47e15 7350 if (arg == 0)
7002a1c8 7351 return build_int_cst (integer_type_node, no_type_class);
539a3a92 7352
7002a1c8 7353 return build_int_cst (integer_type_node, type_to_class (TREE_TYPE (arg)));
539a3a92 7354}
7355
c2f47e15 7356/* Fold a call to __builtin_strlen with argument ARG. */
e6e27594 7357
7358static tree
c7cbde74 7359fold_builtin_strlen (location_t loc, tree type, tree arg)
e6e27594 7360{
c2f47e15 7361 if (!validate_arg (arg, POINTER_TYPE))
e6e27594 7362 return NULL_TREE;
7363 else
7364 {
c2f47e15 7365 tree len = c_strlen (arg, 0);
e6e27594 7366
7367 if (len)
c7cbde74 7368 return fold_convert_loc (loc, type, len);
e6e27594 7369
7370 return NULL_TREE;
7371 }
7372}
7373
92c43e3c 7374/* Fold a call to __builtin_inf or __builtin_huge_val. */
7375
7376static tree
389dd41b 7377fold_builtin_inf (location_t loc, tree type, int warn)
92c43e3c 7378{
aa870c1b 7379 REAL_VALUE_TYPE real;
7380
40f4dbd5 7381 /* __builtin_inff is intended to be usable to define INFINITY on all
7382 targets. If an infinity is not available, INFINITY expands "to a
7383 positive constant of type float that overflows at translation
7384 time", footnote "In this case, using INFINITY will violate the
7385 constraint in 6.4.4 and thus require a diagnostic." (C99 7.12#4).
7386 Thus we pedwarn to ensure this constraint violation is
7387 diagnosed. */
92c43e3c 7388 if (!MODE_HAS_INFINITIES (TYPE_MODE (type)) && warn)
389dd41b 7389 pedwarn (loc, 0, "target format does not support infinity");
92c43e3c 7390
aa870c1b 7391 real_inf (&real);
7392 return build_real (type, real);
92c43e3c 7393}
7394
c2f47e15 7395/* Fold a call to __builtin_nan or __builtin_nans with argument ARG. */
b0db7939 7396
7397static tree
c2f47e15 7398fold_builtin_nan (tree arg, tree type, int quiet)
b0db7939 7399{
7400 REAL_VALUE_TYPE real;
7401 const char *str;
7402
c2f47e15 7403 if (!validate_arg (arg, POINTER_TYPE))
7404 return NULL_TREE;
7405 str = c_getstr (arg);
b0db7939 7406 if (!str)
c2f47e15 7407 return NULL_TREE;
b0db7939 7408
7409 if (!real_nan (&real, str, quiet, TYPE_MODE (type)))
c2f47e15 7410 return NULL_TREE;
b0db7939 7411
7412 return build_real (type, real);
7413}
7414
277f8dd2 7415/* Return true if the floating point expression T has an integer value.
7416 We also allow +Inf, -Inf and NaN to be considered integer values. */
7417
7418static bool
7419integer_valued_real_p (tree t)
7420{
7421 switch (TREE_CODE (t))
7422 {
7423 case FLOAT_EXPR:
7424 return true;
7425
7426 case ABS_EXPR:
7427 case SAVE_EXPR:
277f8dd2 7428 return integer_valued_real_p (TREE_OPERAND (t, 0));
7429
7430 case COMPOUND_EXPR:
41076ef6 7431 case MODIFY_EXPR:
277f8dd2 7432 case BIND_EXPR:
75a70cf9 7433 return integer_valued_real_p (TREE_OPERAND (t, 1));
277f8dd2 7434
7435 case PLUS_EXPR:
7436 case MINUS_EXPR:
7437 case MULT_EXPR:
7438 case MIN_EXPR:
7439 case MAX_EXPR:
7440 return integer_valued_real_p (TREE_OPERAND (t, 0))
7441 && integer_valued_real_p (TREE_OPERAND (t, 1));
7442
7443 case COND_EXPR:
7444 return integer_valued_real_p (TREE_OPERAND (t, 1))
7445 && integer_valued_real_p (TREE_OPERAND (t, 2));
7446
7447 case REAL_CST:
0570334c 7448 return real_isinteger (TREE_REAL_CST_PTR (t), TYPE_MODE (TREE_TYPE (t)));
277f8dd2 7449
d09ef31a 7450 CASE_CONVERT:
277f8dd2 7451 {
7452 tree type = TREE_TYPE (TREE_OPERAND (t, 0));
7453 if (TREE_CODE (type) == INTEGER_TYPE)
7454 return true;
7455 if (TREE_CODE (type) == REAL_TYPE)
7456 return integer_valued_real_p (TREE_OPERAND (t, 0));
7457 break;
7458 }
7459
7460 case CALL_EXPR:
7461 switch (builtin_mathfn_code (t))
7462 {
4f35b1fc 7463 CASE_FLT_FN (BUILT_IN_CEIL):
7464 CASE_FLT_FN (BUILT_IN_FLOOR):
7465 CASE_FLT_FN (BUILT_IN_NEARBYINT):
7466 CASE_FLT_FN (BUILT_IN_RINT):
7467 CASE_FLT_FN (BUILT_IN_ROUND):
7468 CASE_FLT_FN (BUILT_IN_TRUNC):
277f8dd2 7469 return true;
7470
d4a43a03 7471 CASE_FLT_FN (BUILT_IN_FMIN):
7472 CASE_FLT_FN (BUILT_IN_FMAX):
c2f47e15 7473 return integer_valued_real_p (CALL_EXPR_ARG (t, 0))
7474 && integer_valued_real_p (CALL_EXPR_ARG (t, 1));
d4a43a03 7475
277f8dd2 7476 default:
7477 break;
7478 }
7479 break;
7480
7481 default:
7482 break;
7483 }
7484 return false;
7485}
7486
c2f47e15 7487/* FNDECL is assumed to be a builtin where truncation can be propagated
6528f4f4 7488 across (for instance floor((double)f) == (double)floorf (f).
c2f47e15 7489 Do the transformation for a call with argument ARG. */
277f8dd2 7490
6528f4f4 7491static tree
389dd41b 7492fold_trunc_transparent_mathfn (location_t loc, tree fndecl, tree arg)
6528f4f4 7493{
6528f4f4 7494 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
277f8dd2 7495
c2f47e15 7496 if (!validate_arg (arg, REAL_TYPE))
7497 return NULL_TREE;
6528f4f4 7498
277f8dd2 7499 /* Integer rounding functions are idempotent. */
7500 if (fcode == builtin_mathfn_code (arg))
7501 return arg;
7502
7503 /* If argument is already integer valued, and we don't need to worry
7504 about setting errno, there's no need to perform rounding. */
7505 if (! flag_errno_math && integer_valued_real_p (arg))
7506 return arg;
7507
7508 if (optimize)
6528f4f4 7509 {
277f8dd2 7510 tree arg0 = strip_float_extensions (arg);
2426241c 7511 tree ftype = TREE_TYPE (TREE_TYPE (fndecl));
6528f4f4 7512 tree newtype = TREE_TYPE (arg0);
7513 tree decl;
7514
7515 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (ftype)
7516 && (decl = mathfn_built_in (newtype, fcode)))
389dd41b 7517 return fold_convert_loc (loc, ftype,
7518 build_call_expr_loc (loc, decl, 1,
7519 fold_convert_loc (loc,
7520 newtype,
7521 arg0)));
6528f4f4 7522 }
c2f47e15 7523 return NULL_TREE;
6528f4f4 7524}
7525
c2f47e15 7526/* FNDECL is assumed to be builtin which can narrow the FP type of
7527 the argument, for instance lround((double)f) -> lroundf (f).
7528 Do the transformation for a call with argument ARG. */
9ed65c7f 7529
7530static tree
389dd41b 7531fold_fixed_mathfn (location_t loc, tree fndecl, tree arg)
9ed65c7f 7532{
9ed65c7f 7533 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
9ed65c7f 7534
c2f47e15 7535 if (!validate_arg (arg, REAL_TYPE))
7536 return NULL_TREE;
9ed65c7f 7537
7538 /* If argument is already integer valued, and we don't need to worry
7539 about setting errno, there's no need to perform rounding. */
7540 if (! flag_errno_math && integer_valued_real_p (arg))
389dd41b 7541 return fold_build1_loc (loc, FIX_TRUNC_EXPR,
7542 TREE_TYPE (TREE_TYPE (fndecl)), arg);
9ed65c7f 7543
7544 if (optimize)
7545 {
7546 tree ftype = TREE_TYPE (arg);
7547 tree arg0 = strip_float_extensions (arg);
7548 tree newtype = TREE_TYPE (arg0);
7549 tree decl;
7550
7551 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (ftype)
7552 && (decl = mathfn_built_in (newtype, fcode)))
389dd41b 7553 return build_call_expr_loc (loc, decl, 1,
7554 fold_convert_loc (loc, newtype, arg0));
9ed65c7f 7555 }
73a0da56 7556
80ff6494 7557 /* Canonicalize iround (x) to lround (x) on ILP32 targets where
7558 sizeof (int) == sizeof (long). */
7559 if (TYPE_PRECISION (integer_type_node)
7560 == TYPE_PRECISION (long_integer_type_node))
7561 {
7562 tree newfn = NULL_TREE;
7563 switch (fcode)
7564 {
7565 CASE_FLT_FN (BUILT_IN_ICEIL):
7566 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LCEIL);
7567 break;
7568
7569 CASE_FLT_FN (BUILT_IN_IFLOOR):
7570 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LFLOOR);
7571 break;
7572
7573 CASE_FLT_FN (BUILT_IN_IROUND):
7574 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LROUND);
7575 break;
7576
7577 CASE_FLT_FN (BUILT_IN_IRINT):
7578 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LRINT);
7579 break;
7580
7581 default:
7582 break;
7583 }
7584
7585 if (newfn)
7586 {
7587 tree newcall = build_call_expr_loc (loc, newfn, 1, arg);
7588 return fold_convert_loc (loc,
7589 TREE_TYPE (TREE_TYPE (fndecl)), newcall);
7590 }
7591 }
7592
73a0da56 7593 /* Canonicalize llround (x) to lround (x) on LP64 targets where
7594 sizeof (long long) == sizeof (long). */
7595 if (TYPE_PRECISION (long_long_integer_type_node)
7596 == TYPE_PRECISION (long_integer_type_node))
7597 {
7598 tree newfn = NULL_TREE;
7599 switch (fcode)
7600 {
7601 CASE_FLT_FN (BUILT_IN_LLCEIL):
7602 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LCEIL);
7603 break;
7604
7605 CASE_FLT_FN (BUILT_IN_LLFLOOR):
7606 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LFLOOR);
7607 break;
7608
7609 CASE_FLT_FN (BUILT_IN_LLROUND):
7610 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LROUND);
7611 break;
7612
7613 CASE_FLT_FN (BUILT_IN_LLRINT):
7614 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LRINT);
7615 break;
7616
7617 default:
7618 break;
7619 }
7620
7621 if (newfn)
7622 {
389dd41b 7623 tree newcall = build_call_expr_loc (loc, newfn, 1, arg);
7624 return fold_convert_loc (loc,
7625 TREE_TYPE (TREE_TYPE (fndecl)), newcall);
73a0da56 7626 }
7627 }
7628
c2f47e15 7629 return NULL_TREE;
9ed65c7f 7630}
7631
c2f47e15 7632/* Fold call to builtin cabs, cabsf or cabsl with argument ARG. TYPE is the
7633 return type. Return NULL_TREE if no simplification can be made. */
c63f4ad3 7634
7635static tree
389dd41b 7636fold_builtin_cabs (location_t loc, tree arg, tree type, tree fndecl)
c63f4ad3 7637{
c2f47e15 7638 tree res;
c63f4ad3 7639
b0ce8887 7640 if (!validate_arg (arg, COMPLEX_TYPE)
c63f4ad3 7641 || TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) != REAL_TYPE)
7642 return NULL_TREE;
7643
b4725390 7644 /* Calculate the result when the argument is a constant. */
7645 if (TREE_CODE (arg) == COMPLEX_CST
7646 && (res = do_mpfr_arg2 (TREE_REALPART (arg), TREE_IMAGPART (arg),
7647 type, mpfr_hypot)))
7648 return res;
48e1416a 7649
1af0d139 7650 if (TREE_CODE (arg) == COMPLEX_EXPR)
7651 {
7652 tree real = TREE_OPERAND (arg, 0);
7653 tree imag = TREE_OPERAND (arg, 1);
48e1416a 7654
1af0d139 7655 /* If either part is zero, cabs is fabs of the other. */
7656 if (real_zerop (real))
389dd41b 7657 return fold_build1_loc (loc, ABS_EXPR, type, imag);
1af0d139 7658 if (real_zerop (imag))
389dd41b 7659 return fold_build1_loc (loc, ABS_EXPR, type, real);
1af0d139 7660
7661 /* cabs(x+xi) -> fabs(x)*sqrt(2). */
7662 if (flag_unsafe_math_optimizations
7663 && operand_equal_p (real, imag, OEP_PURE_SAME))
7664 {
2e7ca27b 7665 const REAL_VALUE_TYPE sqrt2_trunc
7910b2fb 7666 = real_value_truncate (TYPE_MODE (type), dconst_sqrt2 ());
1af0d139 7667 STRIP_NOPS (real);
389dd41b 7668 return fold_build2_loc (loc, MULT_EXPR, type,
7669 fold_build1_loc (loc, ABS_EXPR, type, real),
2e7ca27b 7670 build_real (type, sqrt2_trunc));
1af0d139 7671 }
7672 }
c63f4ad3 7673
749891b2 7674 /* Optimize cabs(-z) and cabs(conj(z)) as cabs(z). */
7675 if (TREE_CODE (arg) == NEGATE_EXPR
7676 || TREE_CODE (arg) == CONJ_EXPR)
389dd41b 7677 return build_call_expr_loc (loc, fndecl, 1, TREE_OPERAND (arg, 0));
749891b2 7678
7d3f6cc7 7679 /* Don't do this when optimizing for size. */
7680 if (flag_unsafe_math_optimizations
0bfd8d5c 7681 && optimize && optimize_function_for_speed_p (cfun))
c63f4ad3 7682 {
0da0dbfa 7683 tree sqrtfn = mathfn_built_in (type, BUILT_IN_SQRT);
c63f4ad3 7684
7685 if (sqrtfn != NULL_TREE)
7686 {
c2f47e15 7687 tree rpart, ipart, result;
c63f4ad3 7688
4ee9c684 7689 arg = builtin_save_expr (arg);
29a6518e 7690
389dd41b 7691 rpart = fold_build1_loc (loc, REALPART_EXPR, type, arg);
7692 ipart = fold_build1_loc (loc, IMAGPART_EXPR, type, arg);
c63f4ad3 7693
4ee9c684 7694 rpart = builtin_save_expr (rpart);
7695 ipart = builtin_save_expr (ipart);
c63f4ad3 7696
389dd41b 7697 result = fold_build2_loc (loc, PLUS_EXPR, type,
7698 fold_build2_loc (loc, MULT_EXPR, type,
49d00087 7699 rpart, rpart),
389dd41b 7700 fold_build2_loc (loc, MULT_EXPR, type,
49d00087 7701 ipart, ipart));
c63f4ad3 7702
389dd41b 7703 return build_call_expr_loc (loc, sqrtfn, 1, result);
c63f4ad3 7704 }
7705 }
7706
7707 return NULL_TREE;
7708}
7709
c2373fdb 7710/* Build a complex (inf +- 0i) for the result of cproj. TYPE is the
7711 complex tree type of the result. If NEG is true, the imaginary
7712 zero is negative. */
7713
7714static tree
7715build_complex_cproj (tree type, bool neg)
7716{
7717 REAL_VALUE_TYPE rinf, rzero = dconst0;
7718
7719 real_inf (&rinf);
7720 rzero.sign = neg;
7721 return build_complex (type, build_real (TREE_TYPE (type), rinf),
7722 build_real (TREE_TYPE (type), rzero));
7723}
7724
7725/* Fold call to builtin cproj, cprojf or cprojl with argument ARG. TYPE is the
7726 return type. Return NULL_TREE if no simplification can be made. */
7727
7728static tree
7729fold_builtin_cproj (location_t loc, tree arg, tree type)
7730{
7731 if (!validate_arg (arg, COMPLEX_TYPE)
7732 || TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) != REAL_TYPE)
7733 return NULL_TREE;
7734
7735 /* If there are no infinities, return arg. */
fe994837 7736 if (! HONOR_INFINITIES (type))
c2373fdb 7737 return non_lvalue_loc (loc, arg);
7738
7739 /* Calculate the result when the argument is a constant. */
7740 if (TREE_CODE (arg) == COMPLEX_CST)
7741 {
7742 const REAL_VALUE_TYPE *real = TREE_REAL_CST_PTR (TREE_REALPART (arg));
7743 const REAL_VALUE_TYPE *imag = TREE_REAL_CST_PTR (TREE_IMAGPART (arg));
7744
7745 if (real_isinf (real) || real_isinf (imag))
7746 return build_complex_cproj (type, imag->sign);
7747 else
7748 return arg;
7749 }
b4c7e601 7750 else if (TREE_CODE (arg) == COMPLEX_EXPR)
7751 {
7752 tree real = TREE_OPERAND (arg, 0);
7753 tree imag = TREE_OPERAND (arg, 1);
7754
7755 STRIP_NOPS (real);
7756 STRIP_NOPS (imag);
7757
7758 /* If the real part is inf and the imag part is known to be
7759 nonnegative, return (inf + 0i). Remember side-effects are
7760 possible in the imag part. */
7761 if (TREE_CODE (real) == REAL_CST
7762 && real_isinf (TREE_REAL_CST_PTR (real))
7763 && tree_expr_nonnegative_p (imag))
7764 return omit_one_operand_loc (loc, type,
7765 build_complex_cproj (type, false),
7766 arg);
7767
7768 /* If the imag part is inf, return (inf+I*copysign(0,imag)).
7769 Remember side-effects are possible in the real part. */
7770 if (TREE_CODE (imag) == REAL_CST
7771 && real_isinf (TREE_REAL_CST_PTR (imag)))
7772 return
7773 omit_one_operand_loc (loc, type,
7774 build_complex_cproj (type, TREE_REAL_CST_PTR
7775 (imag)->sign), arg);
7776 }
c2373fdb 7777
7778 return NULL_TREE;
7779}
7780
c2f47e15 7781/* Fold a builtin function call to sqrt, sqrtf, or sqrtl with argument ARG.
7782 Return NULL_TREE if no simplification can be made. */
e6e27594 7783
7784static tree
389dd41b 7785fold_builtin_sqrt (location_t loc, tree arg, tree type)
e6e27594 7786{
7787
7788 enum built_in_function fcode;
b4e8ab0c 7789 tree res;
c2f47e15 7790
7791 if (!validate_arg (arg, REAL_TYPE))
e6e27594 7792 return NULL_TREE;
7793
b4e8ab0c 7794 /* Calculate the result when the argument is a constant. */
7795 if ((res = do_mpfr_arg1 (arg, type, mpfr_sqrt, &dconst0, NULL, true)))
7796 return res;
48e1416a 7797
e6e27594 7798 /* Optimize sqrt(expN(x)) = expN(x*0.5). */
7799 fcode = builtin_mathfn_code (arg);
7800 if (flag_unsafe_math_optimizations && BUILTIN_EXPONENT_P (fcode))
7801 {
c2f47e15 7802 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg), 0);
389dd41b 7803 arg = fold_build2_loc (loc, MULT_EXPR, type,
c2f47e15 7804 CALL_EXPR_ARG (arg, 0),
49d00087 7805 build_real (type, dconsthalf));
389dd41b 7806 return build_call_expr_loc (loc, expfn, 1, arg);
e6e27594 7807 }
7808
7809 /* Optimize sqrt(Nroot(x)) -> pow(x,1/(2*N)). */
7810 if (flag_unsafe_math_optimizations && BUILTIN_ROOT_P (fcode))
7811 {
7812 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
7813
7814 if (powfn)
7815 {
c2f47e15 7816 tree arg0 = CALL_EXPR_ARG (arg, 0);
e6e27594 7817 tree tree_root;
7818 /* The inner root was either sqrt or cbrt. */
57510da6 7819 /* This was a conditional expression but it triggered a bug
18381619 7820 in Sun C 5.5. */
ce6cd837 7821 REAL_VALUE_TYPE dconstroot;
7822 if (BUILTIN_SQRT_P (fcode))
7823 dconstroot = dconsthalf;
7824 else
7825 dconstroot = dconst_third ();
e6e27594 7826
7827 /* Adjust for the outer root. */
7828 SET_REAL_EXP (&dconstroot, REAL_EXP (&dconstroot) - 1);
7829 dconstroot = real_value_truncate (TYPE_MODE (type), dconstroot);
7830 tree_root = build_real (type, dconstroot);
389dd41b 7831 return build_call_expr_loc (loc, powfn, 2, arg0, tree_root);
e6e27594 7832 }
7833 }
7834
bc33117f 7835 /* Optimize sqrt(pow(x,y)) = pow(|x|,y*0.5). */
e6e27594 7836 if (flag_unsafe_math_optimizations
7837 && (fcode == BUILT_IN_POW
7838 || fcode == BUILT_IN_POWF
7839 || fcode == BUILT_IN_POWL))
7840 {
c2f47e15 7841 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg), 0);
7842 tree arg0 = CALL_EXPR_ARG (arg, 0);
7843 tree arg1 = CALL_EXPR_ARG (arg, 1);
bc33117f 7844 tree narg1;
7845 if (!tree_expr_nonnegative_p (arg0))
7846 arg0 = build1 (ABS_EXPR, type, arg0);
389dd41b 7847 narg1 = fold_build2_loc (loc, MULT_EXPR, type, arg1,
49d00087 7848 build_real (type, dconsthalf));
389dd41b 7849 return build_call_expr_loc (loc, powfn, 2, arg0, narg1);
e6e27594 7850 }
7851
7852 return NULL_TREE;
7853}
7854
c2f47e15 7855/* Fold a builtin function call to cbrt, cbrtf, or cbrtl with argument ARG.
7856 Return NULL_TREE if no simplification can be made. */
7857
e6e27594 7858static tree
389dd41b 7859fold_builtin_cbrt (location_t loc, tree arg, tree type)
e6e27594 7860{
e6e27594 7861 const enum built_in_function fcode = builtin_mathfn_code (arg);
29f4cd78 7862 tree res;
e6e27594 7863
c2f47e15 7864 if (!validate_arg (arg, REAL_TYPE))
e6e27594 7865 return NULL_TREE;
7866
29f4cd78 7867 /* Calculate the result when the argument is a constant. */
7868 if ((res = do_mpfr_arg1 (arg, type, mpfr_cbrt, NULL, NULL, 0)))
7869 return res;
e6e27594 7870
cdfeb715 7871 if (flag_unsafe_math_optimizations)
e6e27594 7872 {
cdfeb715 7873 /* Optimize cbrt(expN(x)) -> expN(x/3). */
7874 if (BUILTIN_EXPONENT_P (fcode))
a0c938f0 7875 {
c2f47e15 7876 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg), 0);
cdfeb715 7877 const REAL_VALUE_TYPE third_trunc =
7910b2fb 7878 real_value_truncate (TYPE_MODE (type), dconst_third ());
389dd41b 7879 arg = fold_build2_loc (loc, MULT_EXPR, type,
c2f47e15 7880 CALL_EXPR_ARG (arg, 0),
49d00087 7881 build_real (type, third_trunc));
389dd41b 7882 return build_call_expr_loc (loc, expfn, 1, arg);
cdfeb715 7883 }
e6e27594 7884
cdfeb715 7885 /* Optimize cbrt(sqrt(x)) -> pow(x,1/6). */
7886 if (BUILTIN_SQRT_P (fcode))
a0c938f0 7887 {
cdfeb715 7888 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
e6e27594 7889
cdfeb715 7890 if (powfn)
7891 {
c2f47e15 7892 tree arg0 = CALL_EXPR_ARG (arg, 0);
cdfeb715 7893 tree tree_root;
7910b2fb 7894 REAL_VALUE_TYPE dconstroot = dconst_third ();
cdfeb715 7895
7896 SET_REAL_EXP (&dconstroot, REAL_EXP (&dconstroot) - 1);
7897 dconstroot = real_value_truncate (TYPE_MODE (type), dconstroot);
7898 tree_root = build_real (type, dconstroot);
389dd41b 7899 return build_call_expr_loc (loc, powfn, 2, arg0, tree_root);
cdfeb715 7900 }
e6e27594 7901 }
7902
cdfeb715 7903 /* Optimize cbrt(cbrt(x)) -> pow(x,1/9) iff x is nonnegative. */
7904 if (BUILTIN_CBRT_P (fcode))
a0c938f0 7905 {
c2f47e15 7906 tree arg0 = CALL_EXPR_ARG (arg, 0);
cdfeb715 7907 if (tree_expr_nonnegative_p (arg0))
7908 {
7909 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
7910
7911 if (powfn)
a0c938f0 7912 {
cdfeb715 7913 tree tree_root;
7914 REAL_VALUE_TYPE dconstroot;
a0c938f0 7915
3fa759a9 7916 real_arithmetic (&dconstroot, MULT_EXPR,
7910b2fb 7917 dconst_third_ptr (), dconst_third_ptr ());
cdfeb715 7918 dconstroot = real_value_truncate (TYPE_MODE (type), dconstroot);
7919 tree_root = build_real (type, dconstroot);
389dd41b 7920 return build_call_expr_loc (loc, powfn, 2, arg0, tree_root);
cdfeb715 7921 }
7922 }
7923 }
a0c938f0 7924
cdfeb715 7925 /* Optimize cbrt(pow(x,y)) -> pow(x,y/3) iff x is nonnegative. */
48e1416a 7926 if (fcode == BUILT_IN_POW
c2f47e15 7927 || fcode == BUILT_IN_POWF
cdfeb715 7928 || fcode == BUILT_IN_POWL)
a0c938f0 7929 {
c2f47e15 7930 tree arg00 = CALL_EXPR_ARG (arg, 0);
7931 tree arg01 = CALL_EXPR_ARG (arg, 1);
cdfeb715 7932 if (tree_expr_nonnegative_p (arg00))
7933 {
c2f47e15 7934 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg), 0);
cdfeb715 7935 const REAL_VALUE_TYPE dconstroot
7910b2fb 7936 = real_value_truncate (TYPE_MODE (type), dconst_third ());
389dd41b 7937 tree narg01 = fold_build2_loc (loc, MULT_EXPR, type, arg01,
49d00087 7938 build_real (type, dconstroot));
389dd41b 7939 return build_call_expr_loc (loc, powfn, 2, arg00, narg01);
cdfeb715 7940 }
7941 }
e6e27594 7942 }
7943 return NULL_TREE;
7944}
7945
c2f47e15 7946/* Fold function call to builtin cos, cosf, or cosl with argument ARG.
7947 TYPE is the type of the return value. Return NULL_TREE if no
7948 simplification can be made. */
7949
e6e27594 7950static tree
389dd41b 7951fold_builtin_cos (location_t loc,
7952 tree arg, tree type, tree fndecl)
e6e27594 7953{
e6ab33d8 7954 tree res, narg;
e6e27594 7955
c2f47e15 7956 if (!validate_arg (arg, REAL_TYPE))
e6e27594 7957 return NULL_TREE;
7958
bffb7645 7959 /* Calculate the result when the argument is a constant. */
728bac60 7960 if ((res = do_mpfr_arg1 (arg, type, mpfr_cos, NULL, NULL, 0)))
bffb7645 7961 return res;
48e1416a 7962
e6e27594 7963 /* Optimize cos(-x) into cos (x). */
e6ab33d8 7964 if ((narg = fold_strip_sign_ops (arg)))
389dd41b 7965 return build_call_expr_loc (loc, fndecl, 1, narg);
e6e27594 7966
7967 return NULL_TREE;
7968}
7969
c2f47e15 7970/* Fold function call to builtin cosh, coshf, or coshl with argument ARG.
7971 Return NULL_TREE if no simplification can be made. */
7972
cacdc1af 7973static tree
389dd41b 7974fold_builtin_cosh (location_t loc, tree arg, tree type, tree fndecl)
cacdc1af 7975{
c2f47e15 7976 if (validate_arg (arg, REAL_TYPE))
cacdc1af 7977 {
cacdc1af 7978 tree res, narg;
7979
7980 /* Calculate the result when the argument is a constant. */
7981 if ((res = do_mpfr_arg1 (arg, type, mpfr_cosh, NULL, NULL, 0)))
7982 return res;
48e1416a 7983
cacdc1af 7984 /* Optimize cosh(-x) into cosh (x). */
7985 if ((narg = fold_strip_sign_ops (arg)))
389dd41b 7986 return build_call_expr_loc (loc, fndecl, 1, narg);
cacdc1af 7987 }
48e1416a 7988
cacdc1af 7989 return NULL_TREE;
7990}
7991
239d491a 7992/* Fold function call to builtin ccos (or ccosh if HYPER is TRUE) with
7993 argument ARG. TYPE is the type of the return value. Return
7994 NULL_TREE if no simplification can be made. */
7995
7996static tree
965d0f29 7997fold_builtin_ccos (location_t loc, tree arg, tree type, tree fndecl,
7998 bool hyper)
239d491a 7999{
8000 if (validate_arg (arg, COMPLEX_TYPE)
8001 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) == REAL_TYPE)
8002 {
8003 tree tmp;
8004
239d491a 8005 /* Calculate the result when the argument is a constant. */
8006 if ((tmp = do_mpc_arg1 (arg, type, (hyper ? mpc_cosh : mpc_cos))))
8007 return tmp;
48e1416a 8008
239d491a 8009 /* Optimize fn(-x) into fn(x). */
8010 if ((tmp = fold_strip_sign_ops (arg)))
389dd41b 8011 return build_call_expr_loc (loc, fndecl, 1, tmp);
239d491a 8012 }
8013
8014 return NULL_TREE;
8015}
8016
c2f47e15 8017/* Fold function call to builtin tan, tanf, or tanl with argument ARG.
8018 Return NULL_TREE if no simplification can be made. */
8019
e6e27594 8020static tree
c2f47e15 8021fold_builtin_tan (tree arg, tree type)
e6e27594 8022{
8023 enum built_in_function fcode;
29f4cd78 8024 tree res;
e6e27594 8025
c2f47e15 8026 if (!validate_arg (arg, REAL_TYPE))
e6e27594 8027 return NULL_TREE;
8028
bffb7645 8029 /* Calculate the result when the argument is a constant. */
728bac60 8030 if ((res = do_mpfr_arg1 (arg, type, mpfr_tan, NULL, NULL, 0)))
bffb7645 8031 return res;
48e1416a 8032
e6e27594 8033 /* Optimize tan(atan(x)) = x. */
8034 fcode = builtin_mathfn_code (arg);
8035 if (flag_unsafe_math_optimizations
8036 && (fcode == BUILT_IN_ATAN
8037 || fcode == BUILT_IN_ATANF
8038 || fcode == BUILT_IN_ATANL))
c2f47e15 8039 return CALL_EXPR_ARG (arg, 0);
e6e27594 8040
8041 return NULL_TREE;
8042}
8043
d735c391 8044/* Fold function call to builtin sincos, sincosf, or sincosl. Return
8045 NULL_TREE if no simplification can be made. */
8046
8047static tree
389dd41b 8048fold_builtin_sincos (location_t loc,
8049 tree arg0, tree arg1, tree arg2)
d735c391 8050{
c2f47e15 8051 tree type;
d735c391 8052 tree res, fn, call;
8053
c2f47e15 8054 if (!validate_arg (arg0, REAL_TYPE)
8055 || !validate_arg (arg1, POINTER_TYPE)
8056 || !validate_arg (arg2, POINTER_TYPE))
d735c391 8057 return NULL_TREE;
8058
d735c391 8059 type = TREE_TYPE (arg0);
d735c391 8060
8061 /* Calculate the result when the argument is a constant. */
8062 if ((res = do_mpfr_sincos (arg0, arg1, arg2)))
8063 return res;
8064
8065 /* Canonicalize sincos to cexpi. */
30f690e0 8066 if (!targetm.libc_has_function (function_c99_math_complex))
2a6b4c77 8067 return NULL_TREE;
d735c391 8068 fn = mathfn_built_in (type, BUILT_IN_CEXPI);
8069 if (!fn)
8070 return NULL_TREE;
8071
389dd41b 8072 call = build_call_expr_loc (loc, fn, 1, arg0);
d735c391 8073 call = builtin_save_expr (call);
8074
a75b1c71 8075 return build2 (COMPOUND_EXPR, void_type_node,
d735c391 8076 build2 (MODIFY_EXPR, void_type_node,
389dd41b 8077 build_fold_indirect_ref_loc (loc, arg1),
d735c391 8078 build1 (IMAGPART_EXPR, type, call)),
8079 build2 (MODIFY_EXPR, void_type_node,
389dd41b 8080 build_fold_indirect_ref_loc (loc, arg2),
d735c391 8081 build1 (REALPART_EXPR, type, call)));
8082}
8083
c5bb2c4b 8084/* Fold function call to builtin cexp, cexpf, or cexpl. Return
8085 NULL_TREE if no simplification can be made. */
8086
8087static tree
389dd41b 8088fold_builtin_cexp (location_t loc, tree arg0, tree type)
c5bb2c4b 8089{
c2f47e15 8090 tree rtype;
c5bb2c4b 8091 tree realp, imagp, ifn;
239d491a 8092 tree res;
c5bb2c4b 8093
239d491a 8094 if (!validate_arg (arg0, COMPLEX_TYPE)
b0ce8887 8095 || TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) != REAL_TYPE)
c5bb2c4b 8096 return NULL_TREE;
8097
239d491a 8098 /* Calculate the result when the argument is a constant. */
8099 if ((res = do_mpc_arg1 (arg0, type, mpc_exp)))
8100 return res;
48e1416a 8101
c5bb2c4b 8102 rtype = TREE_TYPE (TREE_TYPE (arg0));
8103
8104 /* In case we can figure out the real part of arg0 and it is constant zero
8105 fold to cexpi. */
30f690e0 8106 if (!targetm.libc_has_function (function_c99_math_complex))
2a6b4c77 8107 return NULL_TREE;
c5bb2c4b 8108 ifn = mathfn_built_in (rtype, BUILT_IN_CEXPI);
8109 if (!ifn)
8110 return NULL_TREE;
8111
389dd41b 8112 if ((realp = fold_unary_loc (loc, REALPART_EXPR, rtype, arg0))
c5bb2c4b 8113 && real_zerop (realp))
8114 {
389dd41b 8115 tree narg = fold_build1_loc (loc, IMAGPART_EXPR, rtype, arg0);
8116 return build_call_expr_loc (loc, ifn, 1, narg);
c5bb2c4b 8117 }
8118
8119 /* In case we can easily decompose real and imaginary parts split cexp
8120 to exp (r) * cexpi (i). */
8121 if (flag_unsafe_math_optimizations
8122 && realp)
8123 {
8124 tree rfn, rcall, icall;
8125
8126 rfn = mathfn_built_in (rtype, BUILT_IN_EXP);
8127 if (!rfn)
8128 return NULL_TREE;
8129
389dd41b 8130 imagp = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg0);
c5bb2c4b 8131 if (!imagp)
8132 return NULL_TREE;
8133
389dd41b 8134 icall = build_call_expr_loc (loc, ifn, 1, imagp);
c5bb2c4b 8135 icall = builtin_save_expr (icall);
389dd41b 8136 rcall = build_call_expr_loc (loc, rfn, 1, realp);
c5bb2c4b 8137 rcall = builtin_save_expr (rcall);
389dd41b 8138 return fold_build2_loc (loc, COMPLEX_EXPR, type,
8139 fold_build2_loc (loc, MULT_EXPR, rtype,
71bf42bb 8140 rcall,
389dd41b 8141 fold_build1_loc (loc, REALPART_EXPR,
8142 rtype, icall)),
8143 fold_build2_loc (loc, MULT_EXPR, rtype,
71bf42bb 8144 rcall,
389dd41b 8145 fold_build1_loc (loc, IMAGPART_EXPR,
8146 rtype, icall)));
c5bb2c4b 8147 }
8148
8149 return NULL_TREE;
8150}
8151
c2f47e15 8152/* Fold function call to builtin trunc, truncf or truncl with argument ARG.
8153 Return NULL_TREE if no simplification can be made. */
277f8dd2 8154
8155static tree
389dd41b 8156fold_builtin_trunc (location_t loc, tree fndecl, tree arg)
277f8dd2 8157{
c2f47e15 8158 if (!validate_arg (arg, REAL_TYPE))
8159 return NULL_TREE;
277f8dd2 8160
8161 /* Optimize trunc of constant value. */
f96bd2bf 8162 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
277f8dd2 8163 {
8164 REAL_VALUE_TYPE r, x;
2426241c 8165 tree type = TREE_TYPE (TREE_TYPE (fndecl));
277f8dd2 8166
8167 x = TREE_REAL_CST (arg);
8168 real_trunc (&r, TYPE_MODE (type), &x);
8169 return build_real (type, r);
8170 }
8171
389dd41b 8172 return fold_trunc_transparent_mathfn (loc, fndecl, arg);
277f8dd2 8173}
8174
c2f47e15 8175/* Fold function call to builtin floor, floorf or floorl with argument ARG.
8176 Return NULL_TREE if no simplification can be made. */
277f8dd2 8177
8178static tree
389dd41b 8179fold_builtin_floor (location_t loc, tree fndecl, tree arg)
277f8dd2 8180{
c2f47e15 8181 if (!validate_arg (arg, REAL_TYPE))
8182 return NULL_TREE;
277f8dd2 8183
8184 /* Optimize floor of constant value. */
f96bd2bf 8185 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
277f8dd2 8186 {
8187 REAL_VALUE_TYPE x;
8188
8189 x = TREE_REAL_CST (arg);
8190 if (! REAL_VALUE_ISNAN (x) || ! flag_errno_math)
8191 {
2426241c 8192 tree type = TREE_TYPE (TREE_TYPE (fndecl));
277f8dd2 8193 REAL_VALUE_TYPE r;
8194
8195 real_floor (&r, TYPE_MODE (type), &x);
8196 return build_real (type, r);
8197 }
8198 }
8199
acc2b92e 8200 /* Fold floor (x) where x is nonnegative to trunc (x). */
8201 if (tree_expr_nonnegative_p (arg))
30fe8286 8202 {
8203 tree truncfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_TRUNC);
8204 if (truncfn)
389dd41b 8205 return build_call_expr_loc (loc, truncfn, 1, arg);
30fe8286 8206 }
acc2b92e 8207
389dd41b 8208 return fold_trunc_transparent_mathfn (loc, fndecl, arg);
277f8dd2 8209}
8210
c2f47e15 8211/* Fold function call to builtin ceil, ceilf or ceill with argument ARG.
8212 Return NULL_TREE if no simplification can be made. */
277f8dd2 8213
8214static tree
389dd41b 8215fold_builtin_ceil (location_t loc, tree fndecl, tree arg)
277f8dd2 8216{
c2f47e15 8217 if (!validate_arg (arg, REAL_TYPE))
8218 return NULL_TREE;
277f8dd2 8219
8220 /* Optimize ceil of constant value. */
f96bd2bf 8221 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
277f8dd2 8222 {
8223 REAL_VALUE_TYPE x;
8224
8225 x = TREE_REAL_CST (arg);
8226 if (! REAL_VALUE_ISNAN (x) || ! flag_errno_math)
8227 {
2426241c 8228 tree type = TREE_TYPE (TREE_TYPE (fndecl));
277f8dd2 8229 REAL_VALUE_TYPE r;
8230
8231 real_ceil (&r, TYPE_MODE (type), &x);
8232 return build_real (type, r);
8233 }
8234 }
8235
389dd41b 8236 return fold_trunc_transparent_mathfn (loc, fndecl, arg);
277f8dd2 8237}
8238
c2f47e15 8239/* Fold function call to builtin round, roundf or roundl with argument ARG.
8240 Return NULL_TREE if no simplification can be made. */
89ab3887 8241
8242static tree
389dd41b 8243fold_builtin_round (location_t loc, tree fndecl, tree arg)
89ab3887 8244{
c2f47e15 8245 if (!validate_arg (arg, REAL_TYPE))
8246 return NULL_TREE;
89ab3887 8247
34f17811 8248 /* Optimize round of constant value. */
f96bd2bf 8249 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
89ab3887 8250 {
8251 REAL_VALUE_TYPE x;
8252
8253 x = TREE_REAL_CST (arg);
8254 if (! REAL_VALUE_ISNAN (x) || ! flag_errno_math)
8255 {
2426241c 8256 tree type = TREE_TYPE (TREE_TYPE (fndecl));
89ab3887 8257 REAL_VALUE_TYPE r;
8258
8259 real_round (&r, TYPE_MODE (type), &x);
8260 return build_real (type, r);
8261 }
8262 }
8263
389dd41b 8264 return fold_trunc_transparent_mathfn (loc, fndecl, arg);
89ab3887 8265}
8266
34f17811 8267/* Fold function call to builtin lround, lroundf or lroundl (or the
c2f47e15 8268 corresponding long long versions) and other rounding functions. ARG
8269 is the argument to the call. Return NULL_TREE if no simplification
8270 can be made. */
34f17811 8271
8272static tree
389dd41b 8273fold_builtin_int_roundingfn (location_t loc, tree fndecl, tree arg)
34f17811 8274{
c2f47e15 8275 if (!validate_arg (arg, REAL_TYPE))
8276 return NULL_TREE;
34f17811 8277
8278 /* Optimize lround of constant value. */
f96bd2bf 8279 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
34f17811 8280 {
8281 const REAL_VALUE_TYPE x = TREE_REAL_CST (arg);
8282
776a7bab 8283 if (real_isfinite (&x))
34f17811 8284 {
2426241c 8285 tree itype = TREE_TYPE (TREE_TYPE (fndecl));
ca9b061d 8286 tree ftype = TREE_TYPE (arg);
34f17811 8287 REAL_VALUE_TYPE r;
e913b5cd 8288 bool fail = false;
34f17811 8289
ad52b9b7 8290 switch (DECL_FUNCTION_CODE (fndecl))
8291 {
80ff6494 8292 CASE_FLT_FN (BUILT_IN_IFLOOR):
4f35b1fc 8293 CASE_FLT_FN (BUILT_IN_LFLOOR):
8294 CASE_FLT_FN (BUILT_IN_LLFLOOR):
ad52b9b7 8295 real_floor (&r, TYPE_MODE (ftype), &x);
8296 break;
8297
80ff6494 8298 CASE_FLT_FN (BUILT_IN_ICEIL):
4f35b1fc 8299 CASE_FLT_FN (BUILT_IN_LCEIL):
8300 CASE_FLT_FN (BUILT_IN_LLCEIL):
ac148751 8301 real_ceil (&r, TYPE_MODE (ftype), &x);
8302 break;
8303
80ff6494 8304 CASE_FLT_FN (BUILT_IN_IROUND):
4f35b1fc 8305 CASE_FLT_FN (BUILT_IN_LROUND):
8306 CASE_FLT_FN (BUILT_IN_LLROUND):
ad52b9b7 8307 real_round (&r, TYPE_MODE (ftype), &x);
8308 break;
8309
8310 default:
8311 gcc_unreachable ();
8312 }
8313
ab2c1de8 8314 wide_int val = real_to_integer (&r, &fail, TYPE_PRECISION (itype));
e913b5cd 8315 if (!fail)
8316 return wide_int_to_tree (itype, val);
34f17811 8317 }
8318 }
8319
acc2b92e 8320 switch (DECL_FUNCTION_CODE (fndecl))
8321 {
8322 CASE_FLT_FN (BUILT_IN_LFLOOR):
8323 CASE_FLT_FN (BUILT_IN_LLFLOOR):
8324 /* Fold lfloor (x) where x is nonnegative to FIX_TRUNC (x). */
8325 if (tree_expr_nonnegative_p (arg))
389dd41b 8326 return fold_build1_loc (loc, FIX_TRUNC_EXPR,
8327 TREE_TYPE (TREE_TYPE (fndecl)), arg);
acc2b92e 8328 break;
8329 default:;
8330 }
8331
389dd41b 8332 return fold_fixed_mathfn (loc, fndecl, arg);
34f17811 8333}
8334
70fb4c07 8335/* Fold function call to builtin ffs, clz, ctz, popcount and parity
c2f47e15 8336 and their long and long long variants (i.e. ffsl and ffsll). ARG is
8337 the argument to the call. Return NULL_TREE if no simplification can
8338 be made. */
70fb4c07 8339
8340static tree
c2f47e15 8341fold_builtin_bitop (tree fndecl, tree arg)
70fb4c07 8342{
c2f47e15 8343 if (!validate_arg (arg, INTEGER_TYPE))
70fb4c07 8344 return NULL_TREE;
8345
8346 /* Optimize for constant argument. */
f96bd2bf 8347 if (TREE_CODE (arg) == INTEGER_CST && !TREE_OVERFLOW (arg))
70fb4c07 8348 {
1cee90ad 8349 tree type = TREE_TYPE (arg);
796b6678 8350 int result;
70fb4c07 8351
8352 switch (DECL_FUNCTION_CODE (fndecl))
8353 {
4f35b1fc 8354 CASE_INT_FN (BUILT_IN_FFS):
ab2c1de8 8355 result = wi::ffs (arg);
70fb4c07 8356 break;
8357
4f35b1fc 8358 CASE_INT_FN (BUILT_IN_CLZ):
1cee90ad 8359 if (wi::ne_p (arg, 0))
8360 result = wi::clz (arg);
8361 else if (! CLZ_DEFINED_VALUE_AT_ZERO (TYPE_MODE (type), result))
8362 result = TYPE_PRECISION (type);
70fb4c07 8363 break;
8364
4f35b1fc 8365 CASE_INT_FN (BUILT_IN_CTZ):
1cee90ad 8366 if (wi::ne_p (arg, 0))
8367 result = wi::ctz (arg);
8368 else if (! CTZ_DEFINED_VALUE_AT_ZERO (TYPE_MODE (type), result))
8369 result = TYPE_PRECISION (type);
70fb4c07 8370 break;
8371
6aaa1f9e 8372 CASE_INT_FN (BUILT_IN_CLRSB):
ab2c1de8 8373 result = wi::clrsb (arg);
6aaa1f9e 8374 break;
8375
4f35b1fc 8376 CASE_INT_FN (BUILT_IN_POPCOUNT):
ab2c1de8 8377 result = wi::popcount (arg);
70fb4c07 8378 break;
8379
4f35b1fc 8380 CASE_INT_FN (BUILT_IN_PARITY):
ab2c1de8 8381 result = wi::parity (arg);
70fb4c07 8382 break;
8383
8384 default:
64db345d 8385 gcc_unreachable ();
70fb4c07 8386 }
8387
796b6678 8388 return build_int_cst (TREE_TYPE (TREE_TYPE (fndecl)), result);
70fb4c07 8389 }
8390
8391 return NULL_TREE;
8392}
8393
74bdbe96 8394/* Fold function call to builtin_bswap and the short, long and long long
42791117 8395 variants. Return NULL_TREE if no simplification can be made. */
8396static tree
c2f47e15 8397fold_builtin_bswap (tree fndecl, tree arg)
42791117 8398{
c2f47e15 8399 if (! validate_arg (arg, INTEGER_TYPE))
8400 return NULL_TREE;
42791117 8401
8402 /* Optimize constant value. */
f96bd2bf 8403 if (TREE_CODE (arg) == INTEGER_CST && !TREE_OVERFLOW (arg))
42791117 8404 {
74bdbe96 8405 tree type = TREE_TYPE (TREE_TYPE (fndecl));
42791117 8406
42791117 8407 switch (DECL_FUNCTION_CODE (fndecl))
8408 {
74bdbe96 8409 case BUILT_IN_BSWAP16:
42791117 8410 case BUILT_IN_BSWAP32:
8411 case BUILT_IN_BSWAP64:
8412 {
e913b5cd 8413 signop sgn = TYPE_SIGN (type);
ddb1be65 8414 tree result =
796b6678 8415 wide_int_to_tree (type,
8416 wide_int::from (arg, TYPE_PRECISION (type),
8417 sgn).bswap ());
e913b5cd 8418 return result;
42791117 8419 }
42791117 8420 default:
8421 gcc_unreachable ();
8422 }
42791117 8423 }
8424
8425 return NULL_TREE;
8426}
c2f47e15 8427
f0c477f2 8428/* Fold a builtin function call to hypot, hypotf, or hypotl. Return
8429 NULL_TREE if no simplification can be made. */
8430
8431static tree
389dd41b 8432fold_builtin_hypot (location_t loc, tree fndecl,
8433 tree arg0, tree arg1, tree type)
f0c477f2 8434{
e6ab33d8 8435 tree res, narg0, narg1;
f0c477f2 8436
c2f47e15 8437 if (!validate_arg (arg0, REAL_TYPE)
8438 || !validate_arg (arg1, REAL_TYPE))
f0c477f2 8439 return NULL_TREE;
8440
8441 /* Calculate the result when the argument is a constant. */
8442 if ((res = do_mpfr_arg2 (arg0, arg1, type, mpfr_hypot)))
8443 return res;
48e1416a 8444
6c95f21c 8445 /* If either argument to hypot has a negate or abs, strip that off.
8446 E.g. hypot(-x,fabs(y)) -> hypot(x,y). */
e6ab33d8 8447 narg0 = fold_strip_sign_ops (arg0);
8448 narg1 = fold_strip_sign_ops (arg1);
8449 if (narg0 || narg1)
8450 {
48e1416a 8451 return build_call_expr_loc (loc, fndecl, 2, narg0 ? narg0 : arg0,
c2f47e15 8452 narg1 ? narg1 : arg1);
6c95f21c 8453 }
48e1416a 8454
f0c477f2 8455 /* If either argument is zero, hypot is fabs of the other. */
8456 if (real_zerop (arg0))
389dd41b 8457 return fold_build1_loc (loc, ABS_EXPR, type, arg1);
f0c477f2 8458 else if (real_zerop (arg1))
389dd41b 8459 return fold_build1_loc (loc, ABS_EXPR, type, arg0);
48e1416a 8460
6c95f21c 8461 /* hypot(x,x) -> fabs(x)*sqrt(2). */
8462 if (flag_unsafe_math_optimizations
8463 && operand_equal_p (arg0, arg1, OEP_PURE_SAME))
f0c477f2 8464 {
2e7ca27b 8465 const REAL_VALUE_TYPE sqrt2_trunc
7910b2fb 8466 = real_value_truncate (TYPE_MODE (type), dconst_sqrt2 ());
389dd41b 8467 return fold_build2_loc (loc, MULT_EXPR, type,
8468 fold_build1_loc (loc, ABS_EXPR, type, arg0),
2e7ca27b 8469 build_real (type, sqrt2_trunc));
f0c477f2 8470 }
8471
f0c477f2 8472 return NULL_TREE;
8473}
8474
8475
e6e27594 8476/* Fold a builtin function call to pow, powf, or powl. Return
8477 NULL_TREE if no simplification can be made. */
8478static tree
389dd41b 8479fold_builtin_pow (location_t loc, tree fndecl, tree arg0, tree arg1, tree type)
e6e27594 8480{
f0c477f2 8481 tree res;
e6e27594 8482
c2f47e15 8483 if (!validate_arg (arg0, REAL_TYPE)
8484 || !validate_arg (arg1, REAL_TYPE))
e6e27594 8485 return NULL_TREE;
8486
f0c477f2 8487 /* Calculate the result when the argument is a constant. */
8488 if ((res = do_mpfr_arg2 (arg0, arg1, type, mpfr_pow)))
8489 return res;
8490
e6e27594 8491 /* Optimize pow(1.0,y) = 1.0. */
8492 if (real_onep (arg0))
389dd41b 8493 return omit_one_operand_loc (loc, type, build_real (type, dconst1), arg1);
e6e27594 8494
8495 if (TREE_CODE (arg1) == REAL_CST
f96bd2bf 8496 && !TREE_OVERFLOW (arg1))
e6e27594 8497 {
198d9bbe 8498 REAL_VALUE_TYPE cint;
e6e27594 8499 REAL_VALUE_TYPE c;
198d9bbe 8500 HOST_WIDE_INT n;
8501
e6e27594 8502 c = TREE_REAL_CST (arg1);
8503
8504 /* Optimize pow(x,0.0) = 1.0. */
8505 if (REAL_VALUES_EQUAL (c, dconst0))
389dd41b 8506 return omit_one_operand_loc (loc, type, build_real (type, dconst1),
e6e27594 8507 arg0);
8508
8509 /* Optimize pow(x,1.0) = x. */
8510 if (REAL_VALUES_EQUAL (c, dconst1))
8511 return arg0;
8512
8513 /* Optimize pow(x,-1.0) = 1.0/x. */
8514 if (REAL_VALUES_EQUAL (c, dconstm1))
389dd41b 8515 return fold_build2_loc (loc, RDIV_EXPR, type,
49d00087 8516 build_real (type, dconst1), arg0);
e6e27594 8517
8518 /* Optimize pow(x,0.5) = sqrt(x). */
8519 if (flag_unsafe_math_optimizations
8520 && REAL_VALUES_EQUAL (c, dconsthalf))
8521 {
8522 tree sqrtfn = mathfn_built_in (type, BUILT_IN_SQRT);
8523
8524 if (sqrtfn != NULL_TREE)
389dd41b 8525 return build_call_expr_loc (loc, sqrtfn, 1, arg0);
e6e27594 8526 }
8527
feb5b3eb 8528 /* Optimize pow(x,1.0/3.0) = cbrt(x). */
8529 if (flag_unsafe_math_optimizations)
8530 {
8531 const REAL_VALUE_TYPE dconstroot
7910b2fb 8532 = real_value_truncate (TYPE_MODE (type), dconst_third ());
feb5b3eb 8533
8534 if (REAL_VALUES_EQUAL (c, dconstroot))
8535 {
8536 tree cbrtfn = mathfn_built_in (type, BUILT_IN_CBRT);
8537 if (cbrtfn != NULL_TREE)
389dd41b 8538 return build_call_expr_loc (loc, cbrtfn, 1, arg0);
feb5b3eb 8539 }
8540 }
8541
198d9bbe 8542 /* Check for an integer exponent. */
8543 n = real_to_integer (&c);
e913b5cd 8544 real_from_integer (&cint, VOIDmode, n, SIGNED);
198d9bbe 8545 if (real_identical (&c, &cint))
e6e27594 8546 {
a2b30b48 8547 /* Attempt to evaluate pow at compile-time, unless this should
8548 raise an exception. */
198d9bbe 8549 if (TREE_CODE (arg0) == REAL_CST
a2b30b48 8550 && !TREE_OVERFLOW (arg0)
8551 && (n > 0
8552 || (!flag_trapping_math && !flag_errno_math)
8553 || !REAL_VALUES_EQUAL (TREE_REAL_CST (arg0), dconst0)))
e6e27594 8554 {
8555 REAL_VALUE_TYPE x;
8556 bool inexact;
8557
8558 x = TREE_REAL_CST (arg0);
8559 inexact = real_powi (&x, TYPE_MODE (type), &x, n);
8560 if (flag_unsafe_math_optimizations || !inexact)
8561 return build_real (type, x);
8562 }
198d9bbe 8563
8564 /* Strip sign ops from even integer powers. */
8565 if ((n & 1) == 0 && flag_unsafe_math_optimizations)
8566 {
8567 tree narg0 = fold_strip_sign_ops (arg0);
8568 if (narg0)
389dd41b 8569 return build_call_expr_loc (loc, fndecl, 2, narg0, arg1);
198d9bbe 8570 }
e6e27594 8571 }
8572 }
8573
cdfeb715 8574 if (flag_unsafe_math_optimizations)
e6e27594 8575 {
cdfeb715 8576 const enum built_in_function fcode = builtin_mathfn_code (arg0);
e6e27594 8577
cdfeb715 8578 /* Optimize pow(expN(x),y) = expN(x*y). */
8579 if (BUILTIN_EXPONENT_P (fcode))
a0c938f0 8580 {
c2f47e15 8581 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
8582 tree arg = CALL_EXPR_ARG (arg0, 0);
389dd41b 8583 arg = fold_build2_loc (loc, MULT_EXPR, type, arg, arg1);
8584 return build_call_expr_loc (loc, expfn, 1, arg);
cdfeb715 8585 }
e6e27594 8586
cdfeb715 8587 /* Optimize pow(sqrt(x),y) = pow(x,y*0.5). */
8588 if (BUILTIN_SQRT_P (fcode))
a0c938f0 8589 {
c2f47e15 8590 tree narg0 = CALL_EXPR_ARG (arg0, 0);
389dd41b 8591 tree narg1 = fold_build2_loc (loc, MULT_EXPR, type, arg1,
49d00087 8592 build_real (type, dconsthalf));
389dd41b 8593 return build_call_expr_loc (loc, fndecl, 2, narg0, narg1);
cdfeb715 8594 }
8595
8596 /* Optimize pow(cbrt(x),y) = pow(x,y/3) iff x is nonnegative. */
8597 if (BUILTIN_CBRT_P (fcode))
a0c938f0 8598 {
c2f47e15 8599 tree arg = CALL_EXPR_ARG (arg0, 0);
cdfeb715 8600 if (tree_expr_nonnegative_p (arg))
8601 {
8602 const REAL_VALUE_TYPE dconstroot
7910b2fb 8603 = real_value_truncate (TYPE_MODE (type), dconst_third ());
389dd41b 8604 tree narg1 = fold_build2_loc (loc, MULT_EXPR, type, arg1,
49d00087 8605 build_real (type, dconstroot));
389dd41b 8606 return build_call_expr_loc (loc, fndecl, 2, arg, narg1);
cdfeb715 8607 }
8608 }
a0c938f0 8609
49e436b5 8610 /* Optimize pow(pow(x,y),z) = pow(x,y*z) iff x is nonnegative. */
c2f47e15 8611 if (fcode == BUILT_IN_POW
8612 || fcode == BUILT_IN_POWF
8613 || fcode == BUILT_IN_POWL)
a0c938f0 8614 {
c2f47e15 8615 tree arg00 = CALL_EXPR_ARG (arg0, 0);
49e436b5 8616 if (tree_expr_nonnegative_p (arg00))
8617 {
8618 tree arg01 = CALL_EXPR_ARG (arg0, 1);
8619 tree narg1 = fold_build2_loc (loc, MULT_EXPR, type, arg01, arg1);
8620 return build_call_expr_loc (loc, fndecl, 2, arg00, narg1);
8621 }
cdfeb715 8622 }
e6e27594 8623 }
cdfeb715 8624
e6e27594 8625 return NULL_TREE;
8626}
8627
c2f47e15 8628/* Fold a builtin function call to powi, powif, or powil with argument ARG.
8629 Return NULL_TREE if no simplification can be made. */
b4d0c20c 8630static tree
389dd41b 8631fold_builtin_powi (location_t loc, tree fndecl ATTRIBUTE_UNUSED,
c2f47e15 8632 tree arg0, tree arg1, tree type)
b4d0c20c 8633{
c2f47e15 8634 if (!validate_arg (arg0, REAL_TYPE)
8635 || !validate_arg (arg1, INTEGER_TYPE))
b4d0c20c 8636 return NULL_TREE;
8637
8638 /* Optimize pow(1.0,y) = 1.0. */
8639 if (real_onep (arg0))
389dd41b 8640 return omit_one_operand_loc (loc, type, build_real (type, dconst1), arg1);
b4d0c20c 8641
e913b5cd 8642 if (tree_fits_shwi_p (arg1))
b4d0c20c 8643 {
e913b5cd 8644 HOST_WIDE_INT c = tree_to_shwi (arg1);
b4d0c20c 8645
8646 /* Evaluate powi at compile-time. */
8647 if (TREE_CODE (arg0) == REAL_CST
f96bd2bf 8648 && !TREE_OVERFLOW (arg0))
b4d0c20c 8649 {
8650 REAL_VALUE_TYPE x;
8651 x = TREE_REAL_CST (arg0);
8652 real_powi (&x, TYPE_MODE (type), &x, c);
8653 return build_real (type, x);
8654 }
8655
8656 /* Optimize pow(x,0) = 1.0. */
8657 if (c == 0)
389dd41b 8658 return omit_one_operand_loc (loc, type, build_real (type, dconst1),
b4d0c20c 8659 arg0);
8660
8661 /* Optimize pow(x,1) = x. */
8662 if (c == 1)
8663 return arg0;
8664
8665 /* Optimize pow(x,-1) = 1.0/x. */
8666 if (c == -1)
389dd41b 8667 return fold_build2_loc (loc, RDIV_EXPR, type,
49d00087 8668 build_real (type, dconst1), arg0);
b4d0c20c 8669 }
8670
8671 return NULL_TREE;
8672}
8673
8918c507 8674/* A subroutine of fold_builtin to fold the various exponent
c2f47e15 8675 functions. Return NULL_TREE if no simplification can be made.
debf9994 8676 FUNC is the corresponding MPFR exponent function. */
8918c507 8677
8678static tree
389dd41b 8679fold_builtin_exponent (location_t loc, tree fndecl, tree arg,
debf9994 8680 int (*func)(mpfr_ptr, mpfr_srcptr, mp_rnd_t))
8918c507 8681{
c2f47e15 8682 if (validate_arg (arg, REAL_TYPE))
8918c507 8683 {
8918c507 8684 tree type = TREE_TYPE (TREE_TYPE (fndecl));
29f4cd78 8685 tree res;
48e1416a 8686
debf9994 8687 /* Calculate the result when the argument is a constant. */
728bac60 8688 if ((res = do_mpfr_arg1 (arg, type, func, NULL, NULL, 0)))
debf9994 8689 return res;
8918c507 8690
8691 /* Optimize expN(logN(x)) = x. */
8692 if (flag_unsafe_math_optimizations)
a0c938f0 8693 {
8918c507 8694 const enum built_in_function fcode = builtin_mathfn_code (arg);
8695
debf9994 8696 if ((func == mpfr_exp
8918c507 8697 && (fcode == BUILT_IN_LOG
8698 || fcode == BUILT_IN_LOGF
8699 || fcode == BUILT_IN_LOGL))
debf9994 8700 || (func == mpfr_exp2
8918c507 8701 && (fcode == BUILT_IN_LOG2
8702 || fcode == BUILT_IN_LOG2F
8703 || fcode == BUILT_IN_LOG2L))
debf9994 8704 || (func == mpfr_exp10
8918c507 8705 && (fcode == BUILT_IN_LOG10
8706 || fcode == BUILT_IN_LOG10F
8707 || fcode == BUILT_IN_LOG10L)))
389dd41b 8708 return fold_convert_loc (loc, type, CALL_EXPR_ARG (arg, 0));
8918c507 8709 }
8710 }
8711
c2f47e15 8712 return NULL_TREE;
8918c507 8713}
8714
7959b13b 8715/* Fold function call to builtin memchr. ARG1, ARG2 and LEN are the
8716 arguments to the call, and TYPE is its return type.
8717 Return NULL_TREE if no simplification can be made. */
8718
8719static tree
389dd41b 8720fold_builtin_memchr (location_t loc, tree arg1, tree arg2, tree len, tree type)
7959b13b 8721{
8722 if (!validate_arg (arg1, POINTER_TYPE)
8723 || !validate_arg (arg2, INTEGER_TYPE)
8724 || !validate_arg (len, INTEGER_TYPE))
8725 return NULL_TREE;
8726 else
8727 {
8728 const char *p1;
8729
8730 if (TREE_CODE (arg2) != INTEGER_CST
e913b5cd 8731 || !tree_fits_uhwi_p (len))
7959b13b 8732 return NULL_TREE;
8733
8734 p1 = c_getstr (arg1);
8735 if (p1 && compare_tree_int (len, strlen (p1) + 1) <= 0)
8736 {
8737 char c;
8738 const char *r;
8739 tree tem;
8740
8741 if (target_char_cast (arg2, &c))
8742 return NULL_TREE;
8743
e913b5cd 8744 r = (const char *) memchr (p1, c, tree_to_uhwi (len));
7959b13b 8745
8746 if (r == NULL)
8747 return build_int_cst (TREE_TYPE (arg1), 0);
8748
2cc66f2a 8749 tem = fold_build_pointer_plus_hwi_loc (loc, arg1, r - p1);
389dd41b 8750 return fold_convert_loc (loc, type, tem);
7959b13b 8751 }
8752 return NULL_TREE;
8753 }
8754}
8755
c2f47e15 8756/* Fold function call to builtin memcmp with arguments ARG1 and ARG2.
8757 Return NULL_TREE if no simplification can be made. */
9c8a1629 8758
8759static tree
389dd41b 8760fold_builtin_memcmp (location_t loc, tree arg1, tree arg2, tree len)
9c8a1629 8761{
c4fef134 8762 const char *p1, *p2;
9c8a1629 8763
c2f47e15 8764 if (!validate_arg (arg1, POINTER_TYPE)
8765 || !validate_arg (arg2, POINTER_TYPE)
8766 || !validate_arg (len, INTEGER_TYPE))
8767 return NULL_TREE;
9c8a1629 8768
8769 /* If the LEN parameter is zero, return zero. */
8770 if (integer_zerop (len))
389dd41b 8771 return omit_two_operands_loc (loc, integer_type_node, integer_zero_node,
c4fef134 8772 arg1, arg2);
9c8a1629 8773
8774 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
8775 if (operand_equal_p (arg1, arg2, 0))
389dd41b 8776 return omit_one_operand_loc (loc, integer_type_node, integer_zero_node, len);
c4fef134 8777
8778 p1 = c_getstr (arg1);
8779 p2 = c_getstr (arg2);
8780
8781 /* If all arguments are constant, and the value of len is not greater
8782 than the lengths of arg1 and arg2, evaluate at compile-time. */
e913b5cd 8783 if (tree_fits_uhwi_p (len) && p1 && p2
c4fef134 8784 && compare_tree_int (len, strlen (p1) + 1) <= 0
8785 && compare_tree_int (len, strlen (p2) + 1) <= 0)
8786 {
e913b5cd 8787 const int r = memcmp (p1, p2, tree_to_uhwi (len));
c4fef134 8788
8789 if (r > 0)
8790 return integer_one_node;
8791 else if (r < 0)
8792 return integer_minus_one_node;
8793 else
8794 return integer_zero_node;
8795 }
8796
8797 /* If len parameter is one, return an expression corresponding to
8798 (*(const unsigned char*)arg1 - (const unsigned char*)arg2). */
e913b5cd 8799 if (tree_fits_uhwi_p (len) && tree_to_uhwi (len) == 1)
c4fef134 8800 {
8801 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
4f1b71c0 8802 tree cst_uchar_ptr_node
8803 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
8804
389dd41b 8805 tree ind1
8806 = fold_convert_loc (loc, integer_type_node,
8807 build1 (INDIRECT_REF, cst_uchar_node,
8808 fold_convert_loc (loc,
8809 cst_uchar_ptr_node,
c4fef134 8810 arg1)));
389dd41b 8811 tree ind2
8812 = fold_convert_loc (loc, integer_type_node,
8813 build1 (INDIRECT_REF, cst_uchar_node,
8814 fold_convert_loc (loc,
8815 cst_uchar_ptr_node,
c4fef134 8816 arg2)));
389dd41b 8817 return fold_build2_loc (loc, MINUS_EXPR, integer_type_node, ind1, ind2);
c4fef134 8818 }
9c8a1629 8819
c2f47e15 8820 return NULL_TREE;
9c8a1629 8821}
8822
c2f47e15 8823/* Fold function call to builtin strcmp with arguments ARG1 and ARG2.
8824 Return NULL_TREE if no simplification can be made. */
9c8a1629 8825
8826static tree
389dd41b 8827fold_builtin_strcmp (location_t loc, tree arg1, tree arg2)
9c8a1629 8828{
9c8a1629 8829 const char *p1, *p2;
8830
c2f47e15 8831 if (!validate_arg (arg1, POINTER_TYPE)
8832 || !validate_arg (arg2, POINTER_TYPE))
8833 return NULL_TREE;
9c8a1629 8834
8835 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
8836 if (operand_equal_p (arg1, arg2, 0))
c4fef134 8837 return integer_zero_node;
9c8a1629 8838
8839 p1 = c_getstr (arg1);
8840 p2 = c_getstr (arg2);
8841
8842 if (p1 && p2)
8843 {
9c8a1629 8844 const int i = strcmp (p1, p2);
8845 if (i < 0)
c4fef134 8846 return integer_minus_one_node;
9c8a1629 8847 else if (i > 0)
c4fef134 8848 return integer_one_node;
9c8a1629 8849 else
c4fef134 8850 return integer_zero_node;
8851 }
8852
8853 /* If the second arg is "", return *(const unsigned char*)arg1. */
8854 if (p2 && *p2 == '\0')
8855 {
8856 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
4f1b71c0 8857 tree cst_uchar_ptr_node
8858 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
8859
389dd41b 8860 return fold_convert_loc (loc, integer_type_node,
8861 build1 (INDIRECT_REF, cst_uchar_node,
8862 fold_convert_loc (loc,
8863 cst_uchar_ptr_node,
8864 arg1)));
c4fef134 8865 }
8866
8867 /* If the first arg is "", return -*(const unsigned char*)arg2. */
8868 if (p1 && *p1 == '\0')
8869 {
8870 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
4f1b71c0 8871 tree cst_uchar_ptr_node
8872 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
8873
389dd41b 8874 tree temp
8875 = fold_convert_loc (loc, integer_type_node,
8876 build1 (INDIRECT_REF, cst_uchar_node,
8877 fold_convert_loc (loc,
8878 cst_uchar_ptr_node,
c4fef134 8879 arg2)));
389dd41b 8880 return fold_build1_loc (loc, NEGATE_EXPR, integer_type_node, temp);
9c8a1629 8881 }
8882
c2f47e15 8883 return NULL_TREE;
9c8a1629 8884}
8885
c2f47e15 8886/* Fold function call to builtin strncmp with arguments ARG1, ARG2, and LEN.
8887 Return NULL_TREE if no simplification can be made. */
9c8a1629 8888
8889static tree
389dd41b 8890fold_builtin_strncmp (location_t loc, tree arg1, tree arg2, tree len)
9c8a1629 8891{
9c8a1629 8892 const char *p1, *p2;
8893
c2f47e15 8894 if (!validate_arg (arg1, POINTER_TYPE)
8895 || !validate_arg (arg2, POINTER_TYPE)
8896 || !validate_arg (len, INTEGER_TYPE))
8897 return NULL_TREE;
9c8a1629 8898
8899 /* If the LEN parameter is zero, return zero. */
8900 if (integer_zerop (len))
389dd41b 8901 return omit_two_operands_loc (loc, integer_type_node, integer_zero_node,
c4fef134 8902 arg1, arg2);
9c8a1629 8903
8904 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
8905 if (operand_equal_p (arg1, arg2, 0))
389dd41b 8906 return omit_one_operand_loc (loc, integer_type_node, integer_zero_node, len);
9c8a1629 8907
8908 p1 = c_getstr (arg1);
8909 p2 = c_getstr (arg2);
8910
e913b5cd 8911 if (tree_fits_uhwi_p (len) && p1 && p2)
9c8a1629 8912 {
e913b5cd 8913 const int i = strncmp (p1, p2, tree_to_uhwi (len));
c4fef134 8914 if (i > 0)
8915 return integer_one_node;
8916 else if (i < 0)
8917 return integer_minus_one_node;
9c8a1629 8918 else
c4fef134 8919 return integer_zero_node;
8920 }
8921
8922 /* If the second arg is "", and the length is greater than zero,
8923 return *(const unsigned char*)arg1. */
8924 if (p2 && *p2 == '\0'
8925 && TREE_CODE (len) == INTEGER_CST
8926 && tree_int_cst_sgn (len) == 1)
8927 {
8928 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
4f1b71c0 8929 tree cst_uchar_ptr_node
8930 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
8931
389dd41b 8932 return fold_convert_loc (loc, integer_type_node,
8933 build1 (INDIRECT_REF, cst_uchar_node,
8934 fold_convert_loc (loc,
8935 cst_uchar_ptr_node,
8936 arg1)));
c4fef134 8937 }
8938
8939 /* If the first arg is "", and the length is greater than zero,
8940 return -*(const unsigned char*)arg2. */
8941 if (p1 && *p1 == '\0'
8942 && TREE_CODE (len) == INTEGER_CST
8943 && tree_int_cst_sgn (len) == 1)
8944 {
8945 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
4f1b71c0 8946 tree cst_uchar_ptr_node
8947 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
8948
389dd41b 8949 tree temp = fold_convert_loc (loc, integer_type_node,
8950 build1 (INDIRECT_REF, cst_uchar_node,
8951 fold_convert_loc (loc,
8952 cst_uchar_ptr_node,
8953 arg2)));
8954 return fold_build1_loc (loc, NEGATE_EXPR, integer_type_node, temp);
c4fef134 8955 }
8956
8957 /* If len parameter is one, return an expression corresponding to
8958 (*(const unsigned char*)arg1 - (const unsigned char*)arg2). */
e913b5cd 8959 if (tree_fits_uhwi_p (len) && tree_to_uhwi (len) == 1)
c4fef134 8960 {
8961 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
4f1b71c0 8962 tree cst_uchar_ptr_node
8963 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
8964
389dd41b 8965 tree ind1 = fold_convert_loc (loc, integer_type_node,
8966 build1 (INDIRECT_REF, cst_uchar_node,
8967 fold_convert_loc (loc,
8968 cst_uchar_ptr_node,
8969 arg1)));
8970 tree ind2 = fold_convert_loc (loc, integer_type_node,
8971 build1 (INDIRECT_REF, cst_uchar_node,
8972 fold_convert_loc (loc,
8973 cst_uchar_ptr_node,
8974 arg2)));
8975 return fold_build2_loc (loc, MINUS_EXPR, integer_type_node, ind1, ind2);
9c8a1629 8976 }
8977
c2f47e15 8978 return NULL_TREE;
9c8a1629 8979}
8980
c2f47e15 8981/* Fold function call to builtin signbit, signbitf or signbitl with argument
8982 ARG. Return NULL_TREE if no simplification can be made. */
27f261ef 8983
8984static tree
389dd41b 8985fold_builtin_signbit (location_t loc, tree arg, tree type)
27f261ef 8986{
c2f47e15 8987 if (!validate_arg (arg, REAL_TYPE))
27f261ef 8988 return NULL_TREE;
8989
27f261ef 8990 /* If ARG is a compile-time constant, determine the result. */
8991 if (TREE_CODE (arg) == REAL_CST
f96bd2bf 8992 && !TREE_OVERFLOW (arg))
27f261ef 8993 {
8994 REAL_VALUE_TYPE c;
8995
8996 c = TREE_REAL_CST (arg);
385f3f36 8997 return (REAL_VALUE_NEGATIVE (c)
8998 ? build_one_cst (type)
8999 : build_zero_cst (type));
27f261ef 9000 }
9001
9002 /* If ARG is non-negative, the result is always zero. */
9003 if (tree_expr_nonnegative_p (arg))
389dd41b 9004 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
27f261ef 9005
9006 /* If ARG's format doesn't have signed zeros, return "arg < 0.0". */
fe994837 9007 if (!HONOR_SIGNED_ZEROS (arg))
de67cbb8 9008 return fold_convert (type,
9009 fold_build2_loc (loc, LT_EXPR, boolean_type_node, arg,
9010 build_real (TREE_TYPE (arg), dconst0)));
27f261ef 9011
9012 return NULL_TREE;
9013}
9014
c2f47e15 9015/* Fold function call to builtin copysign, copysignf or copysignl with
9016 arguments ARG1 and ARG2. Return NULL_TREE if no simplification can
9017 be made. */
467214fd 9018
9019static tree
389dd41b 9020fold_builtin_copysign (location_t loc, tree fndecl,
9021 tree arg1, tree arg2, tree type)
467214fd 9022{
c2f47e15 9023 tree tem;
467214fd 9024
c2f47e15 9025 if (!validate_arg (arg1, REAL_TYPE)
9026 || !validate_arg (arg2, REAL_TYPE))
467214fd 9027 return NULL_TREE;
9028
467214fd 9029 /* copysign(X,X) is X. */
9030 if (operand_equal_p (arg1, arg2, 0))
389dd41b 9031 return fold_convert_loc (loc, type, arg1);
467214fd 9032
9033 /* If ARG1 and ARG2 are compile-time constants, determine the result. */
9034 if (TREE_CODE (arg1) == REAL_CST
9035 && TREE_CODE (arg2) == REAL_CST
f96bd2bf 9036 && !TREE_OVERFLOW (arg1)
9037 && !TREE_OVERFLOW (arg2))
467214fd 9038 {
9039 REAL_VALUE_TYPE c1, c2;
9040
9041 c1 = TREE_REAL_CST (arg1);
9042 c2 = TREE_REAL_CST (arg2);
749680e2 9043 /* c1.sign := c2.sign. */
467214fd 9044 real_copysign (&c1, &c2);
9045 return build_real (type, c1);
467214fd 9046 }
9047
9048 /* copysign(X, Y) is fabs(X) when Y is always non-negative.
9049 Remember to evaluate Y for side-effects. */
9050 if (tree_expr_nonnegative_p (arg2))
389dd41b 9051 return omit_one_operand_loc (loc, type,
9052 fold_build1_loc (loc, ABS_EXPR, type, arg1),
467214fd 9053 arg2);
9054
198d9bbe 9055 /* Strip sign changing operations for the first argument. */
9056 tem = fold_strip_sign_ops (arg1);
9057 if (tem)
389dd41b 9058 return build_call_expr_loc (loc, fndecl, 2, tem, arg2);
198d9bbe 9059
467214fd 9060 return NULL_TREE;
9061}
9062
c2f47e15 9063/* Fold a call to builtin isascii with argument ARG. */
d49367d4 9064
9065static tree
389dd41b 9066fold_builtin_isascii (location_t loc, tree arg)
d49367d4 9067{
c2f47e15 9068 if (!validate_arg (arg, INTEGER_TYPE))
9069 return NULL_TREE;
d49367d4 9070 else
9071 {
9072 /* Transform isascii(c) -> ((c & ~0x7f) == 0). */
c90b5d40 9073 arg = fold_build2 (BIT_AND_EXPR, integer_type_node, arg,
7002a1c8 9074 build_int_cst (integer_type_node,
c90b5d40 9075 ~ (unsigned HOST_WIDE_INT) 0x7f));
389dd41b 9076 return fold_build2_loc (loc, EQ_EXPR, integer_type_node,
7002a1c8 9077 arg, integer_zero_node);
d49367d4 9078 }
9079}
9080
c2f47e15 9081/* Fold a call to builtin toascii with argument ARG. */
d49367d4 9082
9083static tree
389dd41b 9084fold_builtin_toascii (location_t loc, tree arg)
d49367d4 9085{
c2f47e15 9086 if (!validate_arg (arg, INTEGER_TYPE))
9087 return NULL_TREE;
48e1416a 9088
c2f47e15 9089 /* Transform toascii(c) -> (c & 0x7f). */
389dd41b 9090 return fold_build2_loc (loc, BIT_AND_EXPR, integer_type_node, arg,
7002a1c8 9091 build_int_cst (integer_type_node, 0x7f));
d49367d4 9092}
9093
c2f47e15 9094/* Fold a call to builtin isdigit with argument ARG. */
df1cf42e 9095
9096static tree
389dd41b 9097fold_builtin_isdigit (location_t loc, tree arg)
df1cf42e 9098{
c2f47e15 9099 if (!validate_arg (arg, INTEGER_TYPE))
9100 return NULL_TREE;
df1cf42e 9101 else
9102 {
9103 /* Transform isdigit(c) -> (unsigned)(c) - '0' <= 9. */
624d37a6 9104 /* According to the C standard, isdigit is unaffected by locale.
9105 However, it definitely is affected by the target character set. */
624d37a6 9106 unsigned HOST_WIDE_INT target_digit0
9107 = lang_hooks.to_target_charset ('0');
9108
9109 if (target_digit0 == 0)
9110 return NULL_TREE;
9111
389dd41b 9112 arg = fold_convert_loc (loc, unsigned_type_node, arg);
c90b5d40 9113 arg = fold_build2 (MINUS_EXPR, unsigned_type_node, arg,
9114 build_int_cst (unsigned_type_node, target_digit0));
389dd41b 9115 return fold_build2_loc (loc, LE_EXPR, integer_type_node, arg,
f2532264 9116 build_int_cst (unsigned_type_node, 9));
df1cf42e 9117 }
9118}
27f261ef 9119
c2f47e15 9120/* Fold a call to fabs, fabsf or fabsl with argument ARG. */
d1aade50 9121
9122static tree
389dd41b 9123fold_builtin_fabs (location_t loc, tree arg, tree type)
d1aade50 9124{
c2f47e15 9125 if (!validate_arg (arg, REAL_TYPE))
9126 return NULL_TREE;
d1aade50 9127
389dd41b 9128 arg = fold_convert_loc (loc, type, arg);
d1aade50 9129 if (TREE_CODE (arg) == REAL_CST)
9130 return fold_abs_const (arg, type);
389dd41b 9131 return fold_build1_loc (loc, ABS_EXPR, type, arg);
d1aade50 9132}
9133
c2f47e15 9134/* Fold a call to abs, labs, llabs or imaxabs with argument ARG. */
d1aade50 9135
9136static tree
389dd41b 9137fold_builtin_abs (location_t loc, tree arg, tree type)
d1aade50 9138{
c2f47e15 9139 if (!validate_arg (arg, INTEGER_TYPE))
9140 return NULL_TREE;
d1aade50 9141
389dd41b 9142 arg = fold_convert_loc (loc, type, arg);
d1aade50 9143 if (TREE_CODE (arg) == INTEGER_CST)
9144 return fold_abs_const (arg, type);
389dd41b 9145 return fold_build1_loc (loc, ABS_EXPR, type, arg);
d1aade50 9146}
9147
b9be572e 9148/* Fold a fma operation with arguments ARG[012]. */
9149
9150tree
9151fold_fma (location_t loc ATTRIBUTE_UNUSED,
9152 tree type, tree arg0, tree arg1, tree arg2)
9153{
9154 if (TREE_CODE (arg0) == REAL_CST
9155 && TREE_CODE (arg1) == REAL_CST
9156 && TREE_CODE (arg2) == REAL_CST)
9157 return do_mpfr_arg3 (arg0, arg1, arg2, type, mpfr_fma);
9158
9159 return NULL_TREE;
9160}
9161
9162/* Fold a call to fma, fmaf, or fmal with arguments ARG[012]. */
9163
9164static tree
9165fold_builtin_fma (location_t loc, tree arg0, tree arg1, tree arg2, tree type)
9166{
9167 if (validate_arg (arg0, REAL_TYPE)
9af5ce0c 9168 && validate_arg (arg1, REAL_TYPE)
9169 && validate_arg (arg2, REAL_TYPE))
b9be572e 9170 {
9171 tree tem = fold_fma (loc, type, arg0, arg1, arg2);
9172 if (tem)
9173 return tem;
9174
9175 /* ??? Only expand to FMA_EXPR if it's directly supported. */
9176 if (optab_handler (fma_optab, TYPE_MODE (type)) != CODE_FOR_nothing)
9177 return fold_build3_loc (loc, FMA_EXPR, type, arg0, arg1, arg2);
9178 }
9179 return NULL_TREE;
9180}
9181
d4a43a03 9182/* Fold a call to builtin fmin or fmax. */
9183
9184static tree
389dd41b 9185fold_builtin_fmin_fmax (location_t loc, tree arg0, tree arg1,
9186 tree type, bool max)
d4a43a03 9187{
c2f47e15 9188 if (validate_arg (arg0, REAL_TYPE) && validate_arg (arg1, REAL_TYPE))
d4a43a03 9189 {
d4a43a03 9190 /* Calculate the result when the argument is a constant. */
9191 tree res = do_mpfr_arg2 (arg0, arg1, type, (max ? mpfr_max : mpfr_min));
9192
9193 if (res)
9194 return res;
9195
61fe3555 9196 /* If either argument is NaN, return the other one. Avoid the
9197 transformation if we get (and honor) a signalling NaN. Using
9198 omit_one_operand() ensures we create a non-lvalue. */
9199 if (TREE_CODE (arg0) == REAL_CST
9200 && real_isnan (&TREE_REAL_CST (arg0))
fe994837 9201 && (! HONOR_SNANS (arg0)
61fe3555 9202 || ! TREE_REAL_CST (arg0).signalling))
389dd41b 9203 return omit_one_operand_loc (loc, type, arg1, arg0);
61fe3555 9204 if (TREE_CODE (arg1) == REAL_CST
9205 && real_isnan (&TREE_REAL_CST (arg1))
fe994837 9206 && (! HONOR_SNANS (arg1)
61fe3555 9207 || ! TREE_REAL_CST (arg1).signalling))
389dd41b 9208 return omit_one_operand_loc (loc, type, arg0, arg1);
61fe3555 9209
d4a43a03 9210 /* Transform fmin/fmax(x,x) -> x. */
9211 if (operand_equal_p (arg0, arg1, OEP_PURE_SAME))
389dd41b 9212 return omit_one_operand_loc (loc, type, arg0, arg1);
48e1416a 9213
d4a43a03 9214 /* Convert fmin/fmax to MIN_EXPR/MAX_EXPR. C99 requires these
9215 functions to return the numeric arg if the other one is NaN.
9216 These tree codes don't honor that, so only transform if
9217 -ffinite-math-only is set. C99 doesn't require -0.0 to be
9218 handled, so we don't have to worry about it either. */
9219 if (flag_finite_math_only)
389dd41b 9220 return fold_build2_loc (loc, (max ? MAX_EXPR : MIN_EXPR), type,
9221 fold_convert_loc (loc, type, arg0),
9222 fold_convert_loc (loc, type, arg1));
d4a43a03 9223 }
9224 return NULL_TREE;
9225}
9226
abe4dcf6 9227/* Fold a call to builtin carg(a+bi) -> atan2(b,a). */
9228
9229static tree
389dd41b 9230fold_builtin_carg (location_t loc, tree arg, tree type)
abe4dcf6 9231{
239d491a 9232 if (validate_arg (arg, COMPLEX_TYPE)
9233 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) == REAL_TYPE)
abe4dcf6 9234 {
9235 tree atan2_fn = mathfn_built_in (type, BUILT_IN_ATAN2);
48e1416a 9236
abe4dcf6 9237 if (atan2_fn)
9238 {
c2f47e15 9239 tree new_arg = builtin_save_expr (arg);
389dd41b 9240 tree r_arg = fold_build1_loc (loc, REALPART_EXPR, type, new_arg);
9241 tree i_arg = fold_build1_loc (loc, IMAGPART_EXPR, type, new_arg);
9242 return build_call_expr_loc (loc, atan2_fn, 2, i_arg, r_arg);
abe4dcf6 9243 }
9244 }
48e1416a 9245
abe4dcf6 9246 return NULL_TREE;
9247}
9248
cb2b9385 9249/* Fold a call to builtin logb/ilogb. */
9250
9251static tree
389dd41b 9252fold_builtin_logb (location_t loc, tree arg, tree rettype)
cb2b9385 9253{
9254 if (! validate_arg (arg, REAL_TYPE))
9255 return NULL_TREE;
48e1416a 9256
cb2b9385 9257 STRIP_NOPS (arg);
48e1416a 9258
cb2b9385 9259 if (TREE_CODE (arg) == REAL_CST && ! TREE_OVERFLOW (arg))
9260 {
9261 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg);
48e1416a 9262
cb2b9385 9263 switch (value->cl)
9264 {
9265 case rvc_nan:
9266 case rvc_inf:
9267 /* If arg is Inf or NaN and we're logb, return it. */
9268 if (TREE_CODE (rettype) == REAL_TYPE)
7695fea9 9269 {
9270 /* For logb(-Inf) we have to return +Inf. */
9271 if (real_isinf (value) && real_isneg (value))
9272 {
9273 REAL_VALUE_TYPE tem;
9274 real_inf (&tem);
9275 return build_real (rettype, tem);
9276 }
9277 return fold_convert_loc (loc, rettype, arg);
9278 }
cb2b9385 9279 /* Fall through... */
9280 case rvc_zero:
9281 /* Zero may set errno and/or raise an exception for logb, also
9282 for ilogb we don't know FP_ILOGB0. */
9283 return NULL_TREE;
9284 case rvc_normal:
9285 /* For normal numbers, proceed iff radix == 2. In GCC,
9286 normalized significands are in the range [0.5, 1.0). We
9287 want the exponent as if they were [1.0, 2.0) so get the
9288 exponent and subtract 1. */
9289 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (arg)))->b == 2)
389dd41b 9290 return fold_convert_loc (loc, rettype,
7002a1c8 9291 build_int_cst (integer_type_node,
389dd41b 9292 REAL_EXP (value)-1));
cb2b9385 9293 break;
9294 }
9295 }
48e1416a 9296
cb2b9385 9297 return NULL_TREE;
9298}
9299
9300/* Fold a call to builtin significand, if radix == 2. */
9301
9302static tree
389dd41b 9303fold_builtin_significand (location_t loc, tree arg, tree rettype)
cb2b9385 9304{
9305 if (! validate_arg (arg, REAL_TYPE))
9306 return NULL_TREE;
48e1416a 9307
cb2b9385 9308 STRIP_NOPS (arg);
48e1416a 9309
cb2b9385 9310 if (TREE_CODE (arg) == REAL_CST && ! TREE_OVERFLOW (arg))
9311 {
9312 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg);
48e1416a 9313
cb2b9385 9314 switch (value->cl)
9315 {
9316 case rvc_zero:
9317 case rvc_nan:
9318 case rvc_inf:
9319 /* If arg is +-0, +-Inf or +-NaN, then return it. */
389dd41b 9320 return fold_convert_loc (loc, rettype, arg);
cb2b9385 9321 case rvc_normal:
9322 /* For normal numbers, proceed iff radix == 2. */
9323 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (arg)))->b == 2)
9324 {
9325 REAL_VALUE_TYPE result = *value;
9326 /* In GCC, normalized significands are in the range [0.5,
9327 1.0). We want them to be [1.0, 2.0) so set the
9328 exponent to 1. */
9329 SET_REAL_EXP (&result, 1);
9330 return build_real (rettype, result);
9331 }
9332 break;
9333 }
9334 }
48e1416a 9335
cb2b9385 9336 return NULL_TREE;
9337}
9338
3838b9ae 9339/* Fold a call to builtin frexp, we can assume the base is 2. */
9340
9341static tree
389dd41b 9342fold_builtin_frexp (location_t loc, tree arg0, tree arg1, tree rettype)
3838b9ae 9343{
9344 if (! validate_arg (arg0, REAL_TYPE) || ! validate_arg (arg1, POINTER_TYPE))
9345 return NULL_TREE;
48e1416a 9346
3838b9ae 9347 STRIP_NOPS (arg0);
48e1416a 9348
3838b9ae 9349 if (!(TREE_CODE (arg0) == REAL_CST && ! TREE_OVERFLOW (arg0)))
9350 return NULL_TREE;
48e1416a 9351
389dd41b 9352 arg1 = build_fold_indirect_ref_loc (loc, arg1);
3838b9ae 9353
9354 /* Proceed if a valid pointer type was passed in. */
9355 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg1)) == integer_type_node)
9356 {
9357 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg0);
9358 tree frac, exp;
48e1416a 9359
3838b9ae 9360 switch (value->cl)
9361 {
9362 case rvc_zero:
9363 /* For +-0, return (*exp = 0, +-0). */
9364 exp = integer_zero_node;
9365 frac = arg0;
9366 break;
9367 case rvc_nan:
9368 case rvc_inf:
9369 /* For +-NaN or +-Inf, *exp is unspecified, return arg0. */
389dd41b 9370 return omit_one_operand_loc (loc, rettype, arg0, arg1);
3838b9ae 9371 case rvc_normal:
9372 {
9373 /* Since the frexp function always expects base 2, and in
9374 GCC normalized significands are already in the range
9375 [0.5, 1.0), we have exactly what frexp wants. */
9376 REAL_VALUE_TYPE frac_rvt = *value;
9377 SET_REAL_EXP (&frac_rvt, 0);
9378 frac = build_real (rettype, frac_rvt);
7002a1c8 9379 exp = build_int_cst (integer_type_node, REAL_EXP (value));
3838b9ae 9380 }
9381 break;
9382 default:
9383 gcc_unreachable ();
9384 }
48e1416a 9385
3838b9ae 9386 /* Create the COMPOUND_EXPR (*arg1 = trunc, frac). */
389dd41b 9387 arg1 = fold_build2_loc (loc, MODIFY_EXPR, rettype, arg1, exp);
3838b9ae 9388 TREE_SIDE_EFFECTS (arg1) = 1;
389dd41b 9389 return fold_build2_loc (loc, COMPOUND_EXPR, rettype, arg1, frac);
3838b9ae 9390 }
9391
9392 return NULL_TREE;
9393}
9394
7587301b 9395/* Fold a call to builtin ldexp or scalbn/scalbln. If LDEXP is true
9396 then we can assume the base is two. If it's false, then we have to
9397 check the mode of the TYPE parameter in certain cases. */
9398
9399static tree
389dd41b 9400fold_builtin_load_exponent (location_t loc, tree arg0, tree arg1,
9401 tree type, bool ldexp)
7587301b 9402{
9403 if (validate_arg (arg0, REAL_TYPE) && validate_arg (arg1, INTEGER_TYPE))
9404 {
9405 STRIP_NOPS (arg0);
9406 STRIP_NOPS (arg1);
9407
9408 /* If arg0 is 0, Inf or NaN, or if arg1 is 0, then return arg0. */
9409 if (real_zerop (arg0) || integer_zerop (arg1)
9410 || (TREE_CODE (arg0) == REAL_CST
776a7bab 9411 && !real_isfinite (&TREE_REAL_CST (arg0))))
389dd41b 9412 return omit_one_operand_loc (loc, type, arg0, arg1);
48e1416a 9413
7587301b 9414 /* If both arguments are constant, then try to evaluate it. */
9415 if ((ldexp || REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2)
9416 && TREE_CODE (arg0) == REAL_CST && !TREE_OVERFLOW (arg0)
e913b5cd 9417 && tree_fits_shwi_p (arg1))
7587301b 9418 {
9419 /* Bound the maximum adjustment to twice the range of the
9420 mode's valid exponents. Use abs to ensure the range is
9421 positive as a sanity check. */
48e1416a 9422 const long max_exp_adj = 2 *
7587301b 9423 labs (REAL_MODE_FORMAT (TYPE_MODE (type))->emax
9424 - REAL_MODE_FORMAT (TYPE_MODE (type))->emin);
9425
9426 /* Get the user-requested adjustment. */
e913b5cd 9427 const HOST_WIDE_INT req_exp_adj = tree_to_shwi (arg1);
48e1416a 9428
7587301b 9429 /* The requested adjustment must be inside this range. This
9430 is a preliminary cap to avoid things like overflow, we
9431 may still fail to compute the result for other reasons. */
9432 if (-max_exp_adj < req_exp_adj && req_exp_adj < max_exp_adj)
9433 {
9434 REAL_VALUE_TYPE initial_result;
48e1416a 9435
7587301b 9436 real_ldexp (&initial_result, &TREE_REAL_CST (arg0), req_exp_adj);
9437
9438 /* Ensure we didn't overflow. */
9439 if (! real_isinf (&initial_result))
9440 {
9441 const REAL_VALUE_TYPE trunc_result
9442 = real_value_truncate (TYPE_MODE (type), initial_result);
48e1416a 9443
7587301b 9444 /* Only proceed if the target mode can hold the
9445 resulting value. */
9446 if (REAL_VALUES_EQUAL (initial_result, trunc_result))
9447 return build_real (type, trunc_result);
9448 }
9449 }
9450 }
9451 }
9452
9453 return NULL_TREE;
9454}
9455
ebf8b4f5 9456/* Fold a call to builtin modf. */
9457
9458static tree
389dd41b 9459fold_builtin_modf (location_t loc, tree arg0, tree arg1, tree rettype)
ebf8b4f5 9460{
9461 if (! validate_arg (arg0, REAL_TYPE) || ! validate_arg (arg1, POINTER_TYPE))
9462 return NULL_TREE;
48e1416a 9463
ebf8b4f5 9464 STRIP_NOPS (arg0);
48e1416a 9465
ebf8b4f5 9466 if (!(TREE_CODE (arg0) == REAL_CST && ! TREE_OVERFLOW (arg0)))
9467 return NULL_TREE;
48e1416a 9468
389dd41b 9469 arg1 = build_fold_indirect_ref_loc (loc, arg1);
ebf8b4f5 9470
9471 /* Proceed if a valid pointer type was passed in. */
9472 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg1)) == TYPE_MAIN_VARIANT (rettype))
9473 {
9474 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg0);
9475 REAL_VALUE_TYPE trunc, frac;
9476
9477 switch (value->cl)
9478 {
9479 case rvc_nan:
9480 case rvc_zero:
9481 /* For +-NaN or +-0, return (*arg1 = arg0, arg0). */
9482 trunc = frac = *value;
9483 break;
9484 case rvc_inf:
9485 /* For +-Inf, return (*arg1 = arg0, +-0). */
9486 frac = dconst0;
9487 frac.sign = value->sign;
9488 trunc = *value;
9489 break;
9490 case rvc_normal:
9491 /* Return (*arg1 = trunc(arg0), arg0-trunc(arg0)). */
9492 real_trunc (&trunc, VOIDmode, value);
9493 real_arithmetic (&frac, MINUS_EXPR, value, &trunc);
9494 /* If the original number was negative and already
9495 integral, then the fractional part is -0.0. */
9496 if (value->sign && frac.cl == rvc_zero)
9497 frac.sign = value->sign;
9498 break;
9499 }
48e1416a 9500
ebf8b4f5 9501 /* Create the COMPOUND_EXPR (*arg1 = trunc, frac). */
389dd41b 9502 arg1 = fold_build2_loc (loc, MODIFY_EXPR, rettype, arg1,
ebf8b4f5 9503 build_real (rettype, trunc));
9504 TREE_SIDE_EFFECTS (arg1) = 1;
389dd41b 9505 return fold_build2_loc (loc, COMPOUND_EXPR, rettype, arg1,
ebf8b4f5 9506 build_real (rettype, frac));
9507 }
48e1416a 9508
ebf8b4f5 9509 return NULL_TREE;
9510}
9511
a65c4d64 9512/* Given a location LOC, an interclass builtin function decl FNDECL
9513 and its single argument ARG, return an folded expression computing
9514 the same, or NULL_TREE if we either couldn't or didn't want to fold
9515 (the latter happen if there's an RTL instruction available). */
9516
9517static tree
9518fold_builtin_interclass_mathfn (location_t loc, tree fndecl, tree arg)
9519{
3754d046 9520 machine_mode mode;
a65c4d64 9521
9522 if (!validate_arg (arg, REAL_TYPE))
9523 return NULL_TREE;
9524
9525 if (interclass_mathfn_icode (arg, fndecl) != CODE_FOR_nothing)
9526 return NULL_TREE;
9527
9528 mode = TYPE_MODE (TREE_TYPE (arg));
9529
9530 /* If there is no optab, try generic code. */
9531 switch (DECL_FUNCTION_CODE (fndecl))
9532 {
9533 tree result;
9534
9535 CASE_FLT_FN (BUILT_IN_ISINF):
9536 {
9537 /* isinf(x) -> isgreater(fabs(x),DBL_MAX). */
b9a16870 9538 tree const isgr_fn = builtin_decl_explicit (BUILT_IN_ISGREATER);
a65c4d64 9539 tree const type = TREE_TYPE (arg);
9540 REAL_VALUE_TYPE r;
9541 char buf[128];
9542
9543 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf));
9544 real_from_string (&r, buf);
9545 result = build_call_expr (isgr_fn, 2,
9546 fold_build1_loc (loc, ABS_EXPR, type, arg),
9547 build_real (type, r));
9548 return result;
9549 }
9550 CASE_FLT_FN (BUILT_IN_FINITE):
9551 case BUILT_IN_ISFINITE:
9552 {
9553 /* isfinite(x) -> islessequal(fabs(x),DBL_MAX). */
b9a16870 9554 tree const isle_fn = builtin_decl_explicit (BUILT_IN_ISLESSEQUAL);
a65c4d64 9555 tree const type = TREE_TYPE (arg);
9556 REAL_VALUE_TYPE r;
9557 char buf[128];
9558
9559 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf));
9560 real_from_string (&r, buf);
9561 result = build_call_expr (isle_fn, 2,
9562 fold_build1_loc (loc, ABS_EXPR, type, arg),
9563 build_real (type, r));
9564 /*result = fold_build2_loc (loc, UNGT_EXPR,
9565 TREE_TYPE (TREE_TYPE (fndecl)),
9566 fold_build1_loc (loc, ABS_EXPR, type, arg),
9567 build_real (type, r));
9568 result = fold_build1_loc (loc, TRUTH_NOT_EXPR,
9569 TREE_TYPE (TREE_TYPE (fndecl)),
9570 result);*/
9571 return result;
9572 }
9573 case BUILT_IN_ISNORMAL:
9574 {
9575 /* isnormal(x) -> isgreaterequal(fabs(x),DBL_MIN) &
9576 islessequal(fabs(x),DBL_MAX). */
b9a16870 9577 tree const isle_fn = builtin_decl_explicit (BUILT_IN_ISLESSEQUAL);
9578 tree const isge_fn = builtin_decl_explicit (BUILT_IN_ISGREATEREQUAL);
a65c4d64 9579 tree const type = TREE_TYPE (arg);
9580 REAL_VALUE_TYPE rmax, rmin;
9581 char buf[128];
9582
9583 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf));
9584 real_from_string (&rmax, buf);
9585 sprintf (buf, "0x1p%d", REAL_MODE_FORMAT (mode)->emin - 1);
9586 real_from_string (&rmin, buf);
9587 arg = builtin_save_expr (fold_build1_loc (loc, ABS_EXPR, type, arg));
9588 result = build_call_expr (isle_fn, 2, arg,
9589 build_real (type, rmax));
9590 result = fold_build2 (BIT_AND_EXPR, integer_type_node, result,
9591 build_call_expr (isge_fn, 2, arg,
9592 build_real (type, rmin)));
9593 return result;
9594 }
9595 default:
9596 break;
9597 }
9598
9599 return NULL_TREE;
9600}
9601
726069ba 9602/* Fold a call to __builtin_isnan(), __builtin_isinf, __builtin_finite.
c2f47e15 9603 ARG is the argument for the call. */
726069ba 9604
9605static tree
389dd41b 9606fold_builtin_classify (location_t loc, tree fndecl, tree arg, int builtin_index)
726069ba 9607{
726069ba 9608 tree type = TREE_TYPE (TREE_TYPE (fndecl));
726069ba 9609 REAL_VALUE_TYPE r;
9610
c2f47e15 9611 if (!validate_arg (arg, REAL_TYPE))
d43cee80 9612 return NULL_TREE;
726069ba 9613
726069ba 9614 switch (builtin_index)
9615 {
9616 case BUILT_IN_ISINF:
fe994837 9617 if (!HONOR_INFINITIES (arg))
389dd41b 9618 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
726069ba 9619
9620 if (TREE_CODE (arg) == REAL_CST)
9621 {
9622 r = TREE_REAL_CST (arg);
9623 if (real_isinf (&r))
9624 return real_compare (GT_EXPR, &r, &dconst0)
9625 ? integer_one_node : integer_minus_one_node;
9626 else
9627 return integer_zero_node;
9628 }
9629
9630 return NULL_TREE;
9631
c319d56a 9632 case BUILT_IN_ISINF_SIGN:
9633 {
9634 /* isinf_sign(x) -> isinf(x) ? (signbit(x) ? -1 : 1) : 0 */
9635 /* In a boolean context, GCC will fold the inner COND_EXPR to
9636 1. So e.g. "if (isinf_sign(x))" would be folded to just
9637 "if (isinf(x) ? 1 : 0)" which becomes "if (isinf(x))". */
9638 tree signbit_fn = mathfn_built_in_1 (TREE_TYPE (arg), BUILT_IN_SIGNBIT, 0);
b9a16870 9639 tree isinf_fn = builtin_decl_explicit (BUILT_IN_ISINF);
c319d56a 9640 tree tmp = NULL_TREE;
9641
9642 arg = builtin_save_expr (arg);
9643
9644 if (signbit_fn && isinf_fn)
9645 {
389dd41b 9646 tree signbit_call = build_call_expr_loc (loc, signbit_fn, 1, arg);
9647 tree isinf_call = build_call_expr_loc (loc, isinf_fn, 1, arg);
c319d56a 9648
389dd41b 9649 signbit_call = fold_build2_loc (loc, NE_EXPR, integer_type_node,
c319d56a 9650 signbit_call, integer_zero_node);
389dd41b 9651 isinf_call = fold_build2_loc (loc, NE_EXPR, integer_type_node,
c319d56a 9652 isinf_call, integer_zero_node);
48e1416a 9653
389dd41b 9654 tmp = fold_build3_loc (loc, COND_EXPR, integer_type_node, signbit_call,
c319d56a 9655 integer_minus_one_node, integer_one_node);
389dd41b 9656 tmp = fold_build3_loc (loc, COND_EXPR, integer_type_node,
9657 isinf_call, tmp,
c319d56a 9658 integer_zero_node);
9659 }
9660
9661 return tmp;
9662 }
9663
cde061c1 9664 case BUILT_IN_ISFINITE:
93633022 9665 if (!HONOR_NANS (arg)
fe994837 9666 && !HONOR_INFINITIES (arg))
389dd41b 9667 return omit_one_operand_loc (loc, type, integer_one_node, arg);
726069ba 9668
9669 if (TREE_CODE (arg) == REAL_CST)
9670 {
9671 r = TREE_REAL_CST (arg);
776a7bab 9672 return real_isfinite (&r) ? integer_one_node : integer_zero_node;
726069ba 9673 }
9674
9675 return NULL_TREE;
9676
9677 case BUILT_IN_ISNAN:
93633022 9678 if (!HONOR_NANS (arg))
389dd41b 9679 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
726069ba 9680
9681 if (TREE_CODE (arg) == REAL_CST)
9682 {
9683 r = TREE_REAL_CST (arg);
9684 return real_isnan (&r) ? integer_one_node : integer_zero_node;
9685 }
9686
9687 arg = builtin_save_expr (arg);
389dd41b 9688 return fold_build2_loc (loc, UNORDERED_EXPR, type, arg, arg);
726069ba 9689
9690 default:
64db345d 9691 gcc_unreachable ();
726069ba 9692 }
9693}
9694
19fbe3a4 9695/* Fold a call to __builtin_fpclassify(int, int, int, int, int, ...).
9696 This builtin will generate code to return the appropriate floating
9697 point classification depending on the value of the floating point
9698 number passed in. The possible return values must be supplied as
921b27c0 9699 int arguments to the call in the following order: FP_NAN, FP_INFINITE,
19fbe3a4 9700 FP_NORMAL, FP_SUBNORMAL and FP_ZERO. The ellipses is for exactly
9701 one floating point argument which is "type generic". */
9702
9703static tree
9d884767 9704fold_builtin_fpclassify (location_t loc, tree *args, int nargs)
19fbe3a4 9705{
921b27c0 9706 tree fp_nan, fp_infinite, fp_normal, fp_subnormal, fp_zero,
9707 arg, type, res, tmp;
3754d046 9708 machine_mode mode;
19fbe3a4 9709 REAL_VALUE_TYPE r;
9710 char buf[128];
48e1416a 9711
19fbe3a4 9712 /* Verify the required arguments in the original call. */
9d884767 9713 if (nargs != 6
9714 || !validate_arg (args[0], INTEGER_TYPE)
9715 || !validate_arg (args[1], INTEGER_TYPE)
9716 || !validate_arg (args[2], INTEGER_TYPE)
9717 || !validate_arg (args[3], INTEGER_TYPE)
9718 || !validate_arg (args[4], INTEGER_TYPE)
9719 || !validate_arg (args[5], REAL_TYPE))
19fbe3a4 9720 return NULL_TREE;
48e1416a 9721
9d884767 9722 fp_nan = args[0];
9723 fp_infinite = args[1];
9724 fp_normal = args[2];
9725 fp_subnormal = args[3];
9726 fp_zero = args[4];
9727 arg = args[5];
19fbe3a4 9728 type = TREE_TYPE (arg);
9729 mode = TYPE_MODE (type);
389dd41b 9730 arg = builtin_save_expr (fold_build1_loc (loc, ABS_EXPR, type, arg));
19fbe3a4 9731
48e1416a 9732 /* fpclassify(x) ->
19fbe3a4 9733 isnan(x) ? FP_NAN :
921b27c0 9734 (fabs(x) == Inf ? FP_INFINITE :
19fbe3a4 9735 (fabs(x) >= DBL_MIN ? FP_NORMAL :
9736 (x == 0 ? FP_ZERO : FP_SUBNORMAL))). */
48e1416a 9737
389dd41b 9738 tmp = fold_build2_loc (loc, EQ_EXPR, integer_type_node, arg,
19fbe3a4 9739 build_real (type, dconst0));
389dd41b 9740 res = fold_build3_loc (loc, COND_EXPR, integer_type_node,
9741 tmp, fp_zero, fp_subnormal);
19fbe3a4 9742
9743 sprintf (buf, "0x1p%d", REAL_MODE_FORMAT (mode)->emin - 1);
9744 real_from_string (&r, buf);
389dd41b 9745 tmp = fold_build2_loc (loc, GE_EXPR, integer_type_node,
9746 arg, build_real (type, r));
9747 res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp, fp_normal, res);
48e1416a 9748
19fbe3a4 9749 if (HONOR_INFINITIES (mode))
9750 {
9751 real_inf (&r);
389dd41b 9752 tmp = fold_build2_loc (loc, EQ_EXPR, integer_type_node, arg,
19fbe3a4 9753 build_real (type, r));
389dd41b 9754 res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp,
9755 fp_infinite, res);
19fbe3a4 9756 }
9757
9758 if (HONOR_NANS (mode))
9759 {
389dd41b 9760 tmp = fold_build2_loc (loc, ORDERED_EXPR, integer_type_node, arg, arg);
9761 res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp, res, fp_nan);
19fbe3a4 9762 }
48e1416a 9763
19fbe3a4 9764 return res;
9765}
9766
9bc9f15f 9767/* Fold a call to an unordered comparison function such as
d5019fe8 9768 __builtin_isgreater(). FNDECL is the FUNCTION_DECL for the function
c2f47e15 9769 being called and ARG0 and ARG1 are the arguments for the call.
726069ba 9770 UNORDERED_CODE and ORDERED_CODE are comparison codes that give
9771 the opposite of the desired result. UNORDERED_CODE is used
9772 for modes that can hold NaNs and ORDERED_CODE is used for
9773 the rest. */
9bc9f15f 9774
9775static tree
389dd41b 9776fold_builtin_unordered_cmp (location_t loc, tree fndecl, tree arg0, tree arg1,
9bc9f15f 9777 enum tree_code unordered_code,
9778 enum tree_code ordered_code)
9779{
859f903a 9780 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9bc9f15f 9781 enum tree_code code;
6978db0d 9782 tree type0, type1;
9783 enum tree_code code0, code1;
9784 tree cmp_type = NULL_TREE;
9bc9f15f 9785
6978db0d 9786 type0 = TREE_TYPE (arg0);
9787 type1 = TREE_TYPE (arg1);
a0c938f0 9788
6978db0d 9789 code0 = TREE_CODE (type0);
9790 code1 = TREE_CODE (type1);
a0c938f0 9791
6978db0d 9792 if (code0 == REAL_TYPE && code1 == REAL_TYPE)
9793 /* Choose the wider of two real types. */
9794 cmp_type = TYPE_PRECISION (type0) >= TYPE_PRECISION (type1)
9795 ? type0 : type1;
9796 else if (code0 == REAL_TYPE && code1 == INTEGER_TYPE)
9797 cmp_type = type0;
9798 else if (code0 == INTEGER_TYPE && code1 == REAL_TYPE)
9799 cmp_type = type1;
a0c938f0 9800
389dd41b 9801 arg0 = fold_convert_loc (loc, cmp_type, arg0);
9802 arg1 = fold_convert_loc (loc, cmp_type, arg1);
859f903a 9803
9804 if (unordered_code == UNORDERED_EXPR)
9805 {
93633022 9806 if (!HONOR_NANS (arg0))
389dd41b 9807 return omit_two_operands_loc (loc, type, integer_zero_node, arg0, arg1);
9808 return fold_build2_loc (loc, UNORDERED_EXPR, type, arg0, arg1);
859f903a 9809 }
9bc9f15f 9810
93633022 9811 code = HONOR_NANS (arg0) ? unordered_code : ordered_code;
389dd41b 9812 return fold_build1_loc (loc, TRUTH_NOT_EXPR, type,
9813 fold_build2_loc (loc, code, type, arg0, arg1));
9bc9f15f 9814}
9815
0c93c8a9 9816/* Fold __builtin_{,s,u}{add,sub,mul}{,l,ll}_overflow, either into normal
9817 arithmetics if it can never overflow, or into internal functions that
9818 return both result of arithmetics and overflowed boolean flag in
9819 a complex integer result, or some other check for overflow. */
9820
9821static tree
9822fold_builtin_arith_overflow (location_t loc, enum built_in_function fcode,
9823 tree arg0, tree arg1, tree arg2)
9824{
9825 enum internal_fn ifn = IFN_LAST;
9826 tree type = TREE_TYPE (TREE_TYPE (arg2));
9827 tree mem_arg2 = build_fold_indirect_ref_loc (loc, arg2);
9828 switch (fcode)
9829 {
9830 case BUILT_IN_ADD_OVERFLOW:
9831 case BUILT_IN_SADD_OVERFLOW:
9832 case BUILT_IN_SADDL_OVERFLOW:
9833 case BUILT_IN_SADDLL_OVERFLOW:
9834 case BUILT_IN_UADD_OVERFLOW:
9835 case BUILT_IN_UADDL_OVERFLOW:
9836 case BUILT_IN_UADDLL_OVERFLOW:
9837 ifn = IFN_ADD_OVERFLOW;
9838 break;
9839 case BUILT_IN_SUB_OVERFLOW:
9840 case BUILT_IN_SSUB_OVERFLOW:
9841 case BUILT_IN_SSUBL_OVERFLOW:
9842 case BUILT_IN_SSUBLL_OVERFLOW:
9843 case BUILT_IN_USUB_OVERFLOW:
9844 case BUILT_IN_USUBL_OVERFLOW:
9845 case BUILT_IN_USUBLL_OVERFLOW:
9846 ifn = IFN_SUB_OVERFLOW;
9847 break;
9848 case BUILT_IN_MUL_OVERFLOW:
9849 case BUILT_IN_SMUL_OVERFLOW:
9850 case BUILT_IN_SMULL_OVERFLOW:
9851 case BUILT_IN_SMULLL_OVERFLOW:
9852 case BUILT_IN_UMUL_OVERFLOW:
9853 case BUILT_IN_UMULL_OVERFLOW:
9854 case BUILT_IN_UMULLL_OVERFLOW:
9855 ifn = IFN_MUL_OVERFLOW;
9856 break;
9857 default:
9858 gcc_unreachable ();
9859 }
9860 tree ctype = build_complex_type (type);
9861 tree call = build_call_expr_internal_loc (loc, ifn, ctype,
9862 2, arg0, arg1);
9863 tree tgt = save_expr (call);
9864 tree intres = build1_loc (loc, REALPART_EXPR, type, tgt);
9865 tree ovfres = build1_loc (loc, IMAGPART_EXPR, type, tgt);
9866 ovfres = fold_convert_loc (loc, boolean_type_node, ovfres);
9867 tree store
9868 = fold_build2_loc (loc, MODIFY_EXPR, void_type_node, mem_arg2, intres);
9869 return build2_loc (loc, COMPOUND_EXPR, boolean_type_node, store, ovfres);
9870}
9871
c2f47e15 9872/* Fold a call to built-in function FNDECL with 0 arguments.
e80cc485 9873 This function returns NULL_TREE if no simplification was possible. */
650e4c94 9874
4ee9c684 9875static tree
e80cc485 9876fold_builtin_0 (location_t loc, tree fndecl)
650e4c94 9877{
e9f80ff5 9878 tree type = TREE_TYPE (TREE_TYPE (fndecl));
c2f47e15 9879 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
189b3398 9880 switch (fcode)
650e4c94 9881 {
c2f47e15 9882 CASE_FLT_FN (BUILT_IN_INF):
9883 case BUILT_IN_INFD32:
9884 case BUILT_IN_INFD64:
9885 case BUILT_IN_INFD128:
389dd41b 9886 return fold_builtin_inf (loc, type, true);
7c2f0500 9887
c2f47e15 9888 CASE_FLT_FN (BUILT_IN_HUGE_VAL):
389dd41b 9889 return fold_builtin_inf (loc, type, false);
7c2f0500 9890
c2f47e15 9891 case BUILT_IN_CLASSIFY_TYPE:
9892 return fold_builtin_classify_type (NULL_TREE);
7c2f0500 9893
c2f47e15 9894 default:
9895 break;
9896 }
9897 return NULL_TREE;
9898}
7c2f0500 9899
c2f47e15 9900/* Fold a call to built-in function FNDECL with 1 argument, ARG0.
e80cc485 9901 This function returns NULL_TREE if no simplification was possible. */
7c2f0500 9902
c2f47e15 9903static tree
e80cc485 9904fold_builtin_1 (location_t loc, tree fndecl, tree arg0)
c2f47e15 9905{
9906 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9907 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
9908 switch (fcode)
9909 {
650e4c94 9910 case BUILT_IN_CONSTANT_P:
7c2f0500 9911 {
c2f47e15 9912 tree val = fold_builtin_constant_p (arg0);
7c2f0500 9913
7c2f0500 9914 /* Gimplification will pull the CALL_EXPR for the builtin out of
9915 an if condition. When not optimizing, we'll not CSE it back.
9916 To avoid link error types of regressions, return false now. */
9917 if (!val && !optimize)
9918 val = integer_zero_node;
9919
9920 return val;
9921 }
650e4c94 9922
539a3a92 9923 case BUILT_IN_CLASSIFY_TYPE:
c2f47e15 9924 return fold_builtin_classify_type (arg0);
539a3a92 9925
650e4c94 9926 case BUILT_IN_STRLEN:
c7cbde74 9927 return fold_builtin_strlen (loc, type, arg0);
650e4c94 9928
4f35b1fc 9929 CASE_FLT_FN (BUILT_IN_FABS):
8aa32773 9930 case BUILT_IN_FABSD32:
9931 case BUILT_IN_FABSD64:
9932 case BUILT_IN_FABSD128:
389dd41b 9933 return fold_builtin_fabs (loc, arg0, type);
d1aade50 9934
9935 case BUILT_IN_ABS:
9936 case BUILT_IN_LABS:
9937 case BUILT_IN_LLABS:
9938 case BUILT_IN_IMAXABS:
389dd41b 9939 return fold_builtin_abs (loc, arg0, type);
c63f4ad3 9940
4f35b1fc 9941 CASE_FLT_FN (BUILT_IN_CONJ):
239d491a 9942 if (validate_arg (arg0, COMPLEX_TYPE)
48e1416a 9943 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
389dd41b 9944 return fold_build1_loc (loc, CONJ_EXPR, type, arg0);
c2f47e15 9945 break;
36d3581d 9946
4f35b1fc 9947 CASE_FLT_FN (BUILT_IN_CREAL):
239d491a 9948 if (validate_arg (arg0, COMPLEX_TYPE)
48e1416a 9949 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
7082509e 9950 return non_lvalue_loc (loc, fold_build1_loc (loc, REALPART_EXPR, type, arg0));
c2f47e15 9951 break;
36d3581d 9952
4f35b1fc 9953 CASE_FLT_FN (BUILT_IN_CIMAG):
b0ce8887 9954 if (validate_arg (arg0, COMPLEX_TYPE)
9955 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
389dd41b 9956 return non_lvalue_loc (loc, fold_build1_loc (loc, IMAGPART_EXPR, type, arg0));
c2f47e15 9957 break;
36d3581d 9958
503733d5 9959 CASE_FLT_FN (BUILT_IN_CCOS):
9af5ce0c 9960 return fold_builtin_ccos (loc, arg0, type, fndecl, /*hyper=*/ false);
48e1416a 9961
503733d5 9962 CASE_FLT_FN (BUILT_IN_CCOSH):
9af5ce0c 9963 return fold_builtin_ccos (loc, arg0, type, fndecl, /*hyper=*/ true);
48e1416a 9964
c2373fdb 9965 CASE_FLT_FN (BUILT_IN_CPROJ):
9af5ce0c 9966 return fold_builtin_cproj (loc, arg0, type);
c2373fdb 9967
239d491a 9968 CASE_FLT_FN (BUILT_IN_CSIN):
9969 if (validate_arg (arg0, COMPLEX_TYPE)
48e1416a 9970 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
239d491a 9971 return do_mpc_arg1 (arg0, type, mpc_sin);
c2f47e15 9972 break;
48e1416a 9973
239d491a 9974 CASE_FLT_FN (BUILT_IN_CSINH):
9975 if (validate_arg (arg0, COMPLEX_TYPE)
48e1416a 9976 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
239d491a 9977 return do_mpc_arg1 (arg0, type, mpc_sinh);
9978 break;
48e1416a 9979
239d491a 9980 CASE_FLT_FN (BUILT_IN_CTAN):
9981 if (validate_arg (arg0, COMPLEX_TYPE)
48e1416a 9982 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
239d491a 9983 return do_mpc_arg1 (arg0, type, mpc_tan);
9984 break;
48e1416a 9985
239d491a 9986 CASE_FLT_FN (BUILT_IN_CTANH):
9987 if (validate_arg (arg0, COMPLEX_TYPE)
48e1416a 9988 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
239d491a 9989 return do_mpc_arg1 (arg0, type, mpc_tanh);
9990 break;
48e1416a 9991
239d491a 9992 CASE_FLT_FN (BUILT_IN_CLOG):
9993 if (validate_arg (arg0, COMPLEX_TYPE)
48e1416a 9994 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
239d491a 9995 return do_mpc_arg1 (arg0, type, mpc_log);
9996 break;
48e1416a 9997
239d491a 9998 CASE_FLT_FN (BUILT_IN_CSQRT):
9999 if (validate_arg (arg0, COMPLEX_TYPE)
48e1416a 10000 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
239d491a 10001 return do_mpc_arg1 (arg0, type, mpc_sqrt);
10002 break;
48e1416a 10003
0e7e6e7f 10004 CASE_FLT_FN (BUILT_IN_CASIN):
10005 if (validate_arg (arg0, COMPLEX_TYPE)
48e1416a 10006 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
0e7e6e7f 10007 return do_mpc_arg1 (arg0, type, mpc_asin);
10008 break;
48e1416a 10009
0e7e6e7f 10010 CASE_FLT_FN (BUILT_IN_CACOS):
10011 if (validate_arg (arg0, COMPLEX_TYPE)
48e1416a 10012 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
0e7e6e7f 10013 return do_mpc_arg1 (arg0, type, mpc_acos);
10014 break;
48e1416a 10015
0e7e6e7f 10016 CASE_FLT_FN (BUILT_IN_CATAN):
10017 if (validate_arg (arg0, COMPLEX_TYPE)
48e1416a 10018 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
0e7e6e7f 10019 return do_mpc_arg1 (arg0, type, mpc_atan);
10020 break;
48e1416a 10021
0e7e6e7f 10022 CASE_FLT_FN (BUILT_IN_CASINH):
10023 if (validate_arg (arg0, COMPLEX_TYPE)
48e1416a 10024 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
0e7e6e7f 10025 return do_mpc_arg1 (arg0, type, mpc_asinh);
10026 break;
48e1416a 10027
0e7e6e7f 10028 CASE_FLT_FN (BUILT_IN_CACOSH):
10029 if (validate_arg (arg0, COMPLEX_TYPE)
48e1416a 10030 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
0e7e6e7f 10031 return do_mpc_arg1 (arg0, type, mpc_acosh);
10032 break;
48e1416a 10033
0e7e6e7f 10034 CASE_FLT_FN (BUILT_IN_CATANH):
10035 if (validate_arg (arg0, COMPLEX_TYPE)
48e1416a 10036 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
0e7e6e7f 10037 return do_mpc_arg1 (arg0, type, mpc_atanh);
10038 break;
48e1416a 10039
4f35b1fc 10040 CASE_FLT_FN (BUILT_IN_CABS):
389dd41b 10041 return fold_builtin_cabs (loc, arg0, type, fndecl);
c63f4ad3 10042
abe4dcf6 10043 CASE_FLT_FN (BUILT_IN_CARG):
389dd41b 10044 return fold_builtin_carg (loc, arg0, type);
abe4dcf6 10045
4f35b1fc 10046 CASE_FLT_FN (BUILT_IN_SQRT):
389dd41b 10047 return fold_builtin_sqrt (loc, arg0, type);
805e22b2 10048
4f35b1fc 10049 CASE_FLT_FN (BUILT_IN_CBRT):
389dd41b 10050 return fold_builtin_cbrt (loc, arg0, type);
3bc5c41b 10051
728bac60 10052 CASE_FLT_FN (BUILT_IN_ASIN):
c2f47e15 10053 if (validate_arg (arg0, REAL_TYPE))
10054 return do_mpfr_arg1 (arg0, type, mpfr_asin,
728bac60 10055 &dconstm1, &dconst1, true);
10056 break;
10057
10058 CASE_FLT_FN (BUILT_IN_ACOS):
c2f47e15 10059 if (validate_arg (arg0, REAL_TYPE))
10060 return do_mpfr_arg1 (arg0, type, mpfr_acos,
728bac60 10061 &dconstm1, &dconst1, true);
10062 break;
10063
10064 CASE_FLT_FN (BUILT_IN_ATAN):
c2f47e15 10065 if (validate_arg (arg0, REAL_TYPE))
10066 return do_mpfr_arg1 (arg0, type, mpfr_atan, NULL, NULL, 0);
728bac60 10067 break;
10068
10069 CASE_FLT_FN (BUILT_IN_ASINH):
c2f47e15 10070 if (validate_arg (arg0, REAL_TYPE))
10071 return do_mpfr_arg1 (arg0, type, mpfr_asinh, NULL, NULL, 0);
728bac60 10072 break;
10073
10074 CASE_FLT_FN (BUILT_IN_ACOSH):
c2f47e15 10075 if (validate_arg (arg0, REAL_TYPE))
10076 return do_mpfr_arg1 (arg0, type, mpfr_acosh,
728bac60 10077 &dconst1, NULL, true);
10078 break;
10079
10080 CASE_FLT_FN (BUILT_IN_ATANH):
c2f47e15 10081 if (validate_arg (arg0, REAL_TYPE))
10082 return do_mpfr_arg1 (arg0, type, mpfr_atanh,
728bac60 10083 &dconstm1, &dconst1, false);
10084 break;
10085
4f35b1fc 10086 CASE_FLT_FN (BUILT_IN_SIN):
c2f47e15 10087 if (validate_arg (arg0, REAL_TYPE))
10088 return do_mpfr_arg1 (arg0, type, mpfr_sin, NULL, NULL, 0);
728bac60 10089 break;
77e89269 10090
4f35b1fc 10091 CASE_FLT_FN (BUILT_IN_COS):
389dd41b 10092 return fold_builtin_cos (loc, arg0, type, fndecl);
77e89269 10093
728bac60 10094 CASE_FLT_FN (BUILT_IN_TAN):
c2f47e15 10095 return fold_builtin_tan (arg0, type);
d735c391 10096
c5bb2c4b 10097 CASE_FLT_FN (BUILT_IN_CEXP):
389dd41b 10098 return fold_builtin_cexp (loc, arg0, type);
c5bb2c4b 10099
d735c391 10100 CASE_FLT_FN (BUILT_IN_CEXPI):
c2f47e15 10101 if (validate_arg (arg0, REAL_TYPE))
10102 return do_mpfr_sincos (arg0, NULL_TREE, NULL_TREE);
10103 break;
d92f994c 10104
728bac60 10105 CASE_FLT_FN (BUILT_IN_SINH):
c2f47e15 10106 if (validate_arg (arg0, REAL_TYPE))
10107 return do_mpfr_arg1 (arg0, type, mpfr_sinh, NULL, NULL, 0);
728bac60 10108 break;
10109
10110 CASE_FLT_FN (BUILT_IN_COSH):
389dd41b 10111 return fold_builtin_cosh (loc, arg0, type, fndecl);
728bac60 10112
10113 CASE_FLT_FN (BUILT_IN_TANH):
c2f47e15 10114 if (validate_arg (arg0, REAL_TYPE))
10115 return do_mpfr_arg1 (arg0, type, mpfr_tanh, NULL, NULL, 0);
728bac60 10116 break;
10117
29f4cd78 10118 CASE_FLT_FN (BUILT_IN_ERF):
c2f47e15 10119 if (validate_arg (arg0, REAL_TYPE))
10120 return do_mpfr_arg1 (arg0, type, mpfr_erf, NULL, NULL, 0);
29f4cd78 10121 break;
10122
10123 CASE_FLT_FN (BUILT_IN_ERFC):
c2f47e15 10124 if (validate_arg (arg0, REAL_TYPE))
10125 return do_mpfr_arg1 (arg0, type, mpfr_erfc, NULL, NULL, 0);
29f4cd78 10126 break;
10127
32dba52b 10128 CASE_FLT_FN (BUILT_IN_TGAMMA):
c2f47e15 10129 if (validate_arg (arg0, REAL_TYPE))
10130 return do_mpfr_arg1 (arg0, type, mpfr_gamma, NULL, NULL, 0);
32dba52b 10131 break;
48e1416a 10132
4f35b1fc 10133 CASE_FLT_FN (BUILT_IN_EXP):
389dd41b 10134 return fold_builtin_exponent (loc, fndecl, arg0, mpfr_exp);
467214fd 10135
4f35b1fc 10136 CASE_FLT_FN (BUILT_IN_EXP2):
389dd41b 10137 return fold_builtin_exponent (loc, fndecl, arg0, mpfr_exp2);
467214fd 10138
4f35b1fc 10139 CASE_FLT_FN (BUILT_IN_EXP10):
10140 CASE_FLT_FN (BUILT_IN_POW10):
389dd41b 10141 return fold_builtin_exponent (loc, fndecl, arg0, mpfr_exp10);
467214fd 10142
29f4cd78 10143 CASE_FLT_FN (BUILT_IN_EXPM1):
c2f47e15 10144 if (validate_arg (arg0, REAL_TYPE))
10145 return do_mpfr_arg1 (arg0, type, mpfr_expm1, NULL, NULL, 0);
f8dad9b4 10146 break;
48e1416a 10147
4f35b1fc 10148 CASE_FLT_FN (BUILT_IN_LOG):
f8dad9b4 10149 if (validate_arg (arg0, REAL_TYPE))
10150 return do_mpfr_arg1 (arg0, type, mpfr_log, &dconst0, NULL, false);
10151 break;
467214fd 10152
4f35b1fc 10153 CASE_FLT_FN (BUILT_IN_LOG2):
f8dad9b4 10154 if (validate_arg (arg0, REAL_TYPE))
10155 return do_mpfr_arg1 (arg0, type, mpfr_log2, &dconst0, NULL, false);
10156 break;
467214fd 10157
4f35b1fc 10158 CASE_FLT_FN (BUILT_IN_LOG10):
f8dad9b4 10159 if (validate_arg (arg0, REAL_TYPE))
10160 return do_mpfr_arg1 (arg0, type, mpfr_log10, &dconst0, NULL, false);
10161 break;
29f4cd78 10162
10163 CASE_FLT_FN (BUILT_IN_LOG1P):
c2f47e15 10164 if (validate_arg (arg0, REAL_TYPE))
10165 return do_mpfr_arg1 (arg0, type, mpfr_log1p,
29f4cd78 10166 &dconstm1, NULL, false);
10167 break;
805e22b2 10168
65dd1378 10169 CASE_FLT_FN (BUILT_IN_J0):
10170 if (validate_arg (arg0, REAL_TYPE))
10171 return do_mpfr_arg1 (arg0, type, mpfr_j0,
10172 NULL, NULL, 0);
10173 break;
10174
10175 CASE_FLT_FN (BUILT_IN_J1):
10176 if (validate_arg (arg0, REAL_TYPE))
10177 return do_mpfr_arg1 (arg0, type, mpfr_j1,
10178 NULL, NULL, 0);
10179 break;
6ff9eeff 10180
10181 CASE_FLT_FN (BUILT_IN_Y0):
10182 if (validate_arg (arg0, REAL_TYPE))
10183 return do_mpfr_arg1 (arg0, type, mpfr_y0,
10184 &dconst0, NULL, false);
10185 break;
10186
10187 CASE_FLT_FN (BUILT_IN_Y1):
10188 if (validate_arg (arg0, REAL_TYPE))
10189 return do_mpfr_arg1 (arg0, type, mpfr_y1,
10190 &dconst0, NULL, false);
10191 break;
65dd1378 10192
4f35b1fc 10193 CASE_FLT_FN (BUILT_IN_NAN):
c4503c0a 10194 case BUILT_IN_NAND32:
10195 case BUILT_IN_NAND64:
10196 case BUILT_IN_NAND128:
c2f47e15 10197 return fold_builtin_nan (arg0, type, true);
b0db7939 10198
4f35b1fc 10199 CASE_FLT_FN (BUILT_IN_NANS):
c2f47e15 10200 return fold_builtin_nan (arg0, type, false);
b0db7939 10201
4f35b1fc 10202 CASE_FLT_FN (BUILT_IN_FLOOR):
389dd41b 10203 return fold_builtin_floor (loc, fndecl, arg0);
277f8dd2 10204
4f35b1fc 10205 CASE_FLT_FN (BUILT_IN_CEIL):
389dd41b 10206 return fold_builtin_ceil (loc, fndecl, arg0);
277f8dd2 10207
4f35b1fc 10208 CASE_FLT_FN (BUILT_IN_TRUNC):
389dd41b 10209 return fold_builtin_trunc (loc, fndecl, arg0);
277f8dd2 10210
4f35b1fc 10211 CASE_FLT_FN (BUILT_IN_ROUND):
389dd41b 10212 return fold_builtin_round (loc, fndecl, arg0);
89ab3887 10213
4f35b1fc 10214 CASE_FLT_FN (BUILT_IN_NEARBYINT):
10215 CASE_FLT_FN (BUILT_IN_RINT):
389dd41b 10216 return fold_trunc_transparent_mathfn (loc, fndecl, arg0);
6528f4f4 10217
80ff6494 10218 CASE_FLT_FN (BUILT_IN_ICEIL):
4f35b1fc 10219 CASE_FLT_FN (BUILT_IN_LCEIL):
10220 CASE_FLT_FN (BUILT_IN_LLCEIL):
10221 CASE_FLT_FN (BUILT_IN_LFLOOR):
80ff6494 10222 CASE_FLT_FN (BUILT_IN_IFLOOR):
4f35b1fc 10223 CASE_FLT_FN (BUILT_IN_LLFLOOR):
80ff6494 10224 CASE_FLT_FN (BUILT_IN_IROUND):
a0c938f0 10225 CASE_FLT_FN (BUILT_IN_LROUND):
4f35b1fc 10226 CASE_FLT_FN (BUILT_IN_LLROUND):
389dd41b 10227 return fold_builtin_int_roundingfn (loc, fndecl, arg0);
34f17811 10228
80ff6494 10229 CASE_FLT_FN (BUILT_IN_IRINT):
4f35b1fc 10230 CASE_FLT_FN (BUILT_IN_LRINT):
10231 CASE_FLT_FN (BUILT_IN_LLRINT):
389dd41b 10232 return fold_fixed_mathfn (loc, fndecl, arg0);
9ed65c7f 10233
74bdbe96 10234 case BUILT_IN_BSWAP16:
42791117 10235 case BUILT_IN_BSWAP32:
10236 case BUILT_IN_BSWAP64:
c2f47e15 10237 return fold_builtin_bswap (fndecl, arg0);
42791117 10238
4f35b1fc 10239 CASE_INT_FN (BUILT_IN_FFS):
10240 CASE_INT_FN (BUILT_IN_CLZ):
10241 CASE_INT_FN (BUILT_IN_CTZ):
6aaa1f9e 10242 CASE_INT_FN (BUILT_IN_CLRSB):
4f35b1fc 10243 CASE_INT_FN (BUILT_IN_POPCOUNT):
10244 CASE_INT_FN (BUILT_IN_PARITY):
c2f47e15 10245 return fold_builtin_bitop (fndecl, arg0);
9c8a1629 10246
4f35b1fc 10247 CASE_FLT_FN (BUILT_IN_SIGNBIT):
389dd41b 10248 return fold_builtin_signbit (loc, arg0, type);
27f261ef 10249
cb2b9385 10250 CASE_FLT_FN (BUILT_IN_SIGNIFICAND):
389dd41b 10251 return fold_builtin_significand (loc, arg0, type);
cb2b9385 10252
10253 CASE_FLT_FN (BUILT_IN_ILOGB):
10254 CASE_FLT_FN (BUILT_IN_LOGB):
389dd41b 10255 return fold_builtin_logb (loc, arg0, type);
cb2b9385 10256
d49367d4 10257 case BUILT_IN_ISASCII:
389dd41b 10258 return fold_builtin_isascii (loc, arg0);
d49367d4 10259
10260 case BUILT_IN_TOASCII:
389dd41b 10261 return fold_builtin_toascii (loc, arg0);
d49367d4 10262
df1cf42e 10263 case BUILT_IN_ISDIGIT:
389dd41b 10264 return fold_builtin_isdigit (loc, arg0);
467214fd 10265
4f35b1fc 10266 CASE_FLT_FN (BUILT_IN_FINITE):
c4503c0a 10267 case BUILT_IN_FINITED32:
10268 case BUILT_IN_FINITED64:
10269 case BUILT_IN_FINITED128:
cde061c1 10270 case BUILT_IN_ISFINITE:
a65c4d64 10271 {
10272 tree ret = fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISFINITE);
10273 if (ret)
10274 return ret;
10275 return fold_builtin_interclass_mathfn (loc, fndecl, arg0);
10276 }
726069ba 10277
4f35b1fc 10278 CASE_FLT_FN (BUILT_IN_ISINF):
c4503c0a 10279 case BUILT_IN_ISINFD32:
10280 case BUILT_IN_ISINFD64:
10281 case BUILT_IN_ISINFD128:
a65c4d64 10282 {
10283 tree ret = fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISINF);
10284 if (ret)
10285 return ret;
10286 return fold_builtin_interclass_mathfn (loc, fndecl, arg0);
10287 }
10288
10289 case BUILT_IN_ISNORMAL:
10290 return fold_builtin_interclass_mathfn (loc, fndecl, arg0);
726069ba 10291
c319d56a 10292 case BUILT_IN_ISINF_SIGN:
389dd41b 10293 return fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISINF_SIGN);
c319d56a 10294
4f35b1fc 10295 CASE_FLT_FN (BUILT_IN_ISNAN):
c4503c0a 10296 case BUILT_IN_ISNAND32:
10297 case BUILT_IN_ISNAND64:
10298 case BUILT_IN_ISNAND128:
389dd41b 10299 return fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISNAN);
c2f47e15 10300
663870fc 10301 case BUILT_IN_FREE:
10302 if (integer_zerop (arg0))
10303 return build_empty_stmt (loc);
10304 break;
10305
c2f47e15 10306 default:
10307 break;
10308 }
10309
10310 return NULL_TREE;
10311
10312}
10313
10314/* Fold a call to built-in function FNDECL with 2 arguments, ARG0 and ARG1.
e80cc485 10315 This function returns NULL_TREE if no simplification was possible. */
c2f47e15 10316
10317static tree
e80cc485 10318fold_builtin_2 (location_t loc, tree fndecl, tree arg0, tree arg1)
c2f47e15 10319{
10320 tree type = TREE_TYPE (TREE_TYPE (fndecl));
10321 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10322
10323 switch (fcode)
10324 {
65dd1378 10325 CASE_FLT_FN (BUILT_IN_JN):
10326 if (validate_arg (arg0, INTEGER_TYPE)
10327 && validate_arg (arg1, REAL_TYPE))
10328 return do_mpfr_bessel_n (arg0, arg1, type, mpfr_jn, NULL, 0);
10329 break;
6ff9eeff 10330
10331 CASE_FLT_FN (BUILT_IN_YN):
10332 if (validate_arg (arg0, INTEGER_TYPE)
10333 && validate_arg (arg1, REAL_TYPE))
10334 return do_mpfr_bessel_n (arg0, arg1, type, mpfr_yn,
10335 &dconst0, false);
10336 break;
e5407ca6 10337
10338 CASE_FLT_FN (BUILT_IN_DREM):
10339 CASE_FLT_FN (BUILT_IN_REMAINDER):
10340 if (validate_arg (arg0, REAL_TYPE)
9af5ce0c 10341 && validate_arg (arg1, REAL_TYPE))
e5407ca6 10342 return do_mpfr_arg2 (arg0, arg1, type, mpfr_remainder);
10343 break;
e84da7c1 10344
10345 CASE_FLT_FN_REENT (BUILT_IN_GAMMA): /* GAMMA_R */
10346 CASE_FLT_FN_REENT (BUILT_IN_LGAMMA): /* LGAMMA_R */
10347 if (validate_arg (arg0, REAL_TYPE)
9af5ce0c 10348 && validate_arg (arg1, POINTER_TYPE))
e84da7c1 10349 return do_mpfr_lgamma_r (arg0, arg1, type);
10350 break;
c2f47e15 10351
10352 CASE_FLT_FN (BUILT_IN_ATAN2):
10353 if (validate_arg (arg0, REAL_TYPE)
9af5ce0c 10354 && validate_arg (arg1, REAL_TYPE))
c2f47e15 10355 return do_mpfr_arg2 (arg0, arg1, type, mpfr_atan2);
10356 break;
10357
10358 CASE_FLT_FN (BUILT_IN_FDIM):
10359 if (validate_arg (arg0, REAL_TYPE)
9af5ce0c 10360 && validate_arg (arg1, REAL_TYPE))
c2f47e15 10361 return do_mpfr_arg2 (arg0, arg1, type, mpfr_dim);
10362 break;
10363
10364 CASE_FLT_FN (BUILT_IN_HYPOT):
389dd41b 10365 return fold_builtin_hypot (loc, fndecl, arg0, arg1, type);
c2f47e15 10366
c699fab8 10367 CASE_FLT_FN (BUILT_IN_CPOW):
10368 if (validate_arg (arg0, COMPLEX_TYPE)
10369 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE
10370 && validate_arg (arg1, COMPLEX_TYPE)
48e1416a 10371 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg1))) == REAL_TYPE)
652d9409 10372 return do_mpc_arg2 (arg0, arg1, type, /*do_nonfinite=*/ 0, mpc_pow);
c699fab8 10373 break;
c699fab8 10374
7587301b 10375 CASE_FLT_FN (BUILT_IN_LDEXP):
389dd41b 10376 return fold_builtin_load_exponent (loc, arg0, arg1, type, /*ldexp=*/true);
7587301b 10377 CASE_FLT_FN (BUILT_IN_SCALBN):
10378 CASE_FLT_FN (BUILT_IN_SCALBLN):
389dd41b 10379 return fold_builtin_load_exponent (loc, arg0, arg1,
10380 type, /*ldexp=*/false);
7587301b 10381
3838b9ae 10382 CASE_FLT_FN (BUILT_IN_FREXP):
389dd41b 10383 return fold_builtin_frexp (loc, arg0, arg1, type);
3838b9ae 10384
ebf8b4f5 10385 CASE_FLT_FN (BUILT_IN_MODF):
389dd41b 10386 return fold_builtin_modf (loc, arg0, arg1, type);
ebf8b4f5 10387
c2f47e15 10388 case BUILT_IN_STRSTR:
389dd41b 10389 return fold_builtin_strstr (loc, arg0, arg1, type);
c2f47e15 10390
c2f47e15 10391 case BUILT_IN_STRSPN:
389dd41b 10392 return fold_builtin_strspn (loc, arg0, arg1);
c2f47e15 10393
10394 case BUILT_IN_STRCSPN:
389dd41b 10395 return fold_builtin_strcspn (loc, arg0, arg1);
c2f47e15 10396
10397 case BUILT_IN_STRCHR:
10398 case BUILT_IN_INDEX:
389dd41b 10399 return fold_builtin_strchr (loc, arg0, arg1, type);
c2f47e15 10400
10401 case BUILT_IN_STRRCHR:
10402 case BUILT_IN_RINDEX:
389dd41b 10403 return fold_builtin_strrchr (loc, arg0, arg1, type);
c2f47e15 10404
c2f47e15 10405 case BUILT_IN_STRCMP:
389dd41b 10406 return fold_builtin_strcmp (loc, arg0, arg1);
c2f47e15 10407
10408 case BUILT_IN_STRPBRK:
389dd41b 10409 return fold_builtin_strpbrk (loc, arg0, arg1, type);
c2f47e15 10410
10411 case BUILT_IN_EXPECT:
c83059be 10412 return fold_builtin_expect (loc, arg0, arg1, NULL_TREE);
c2f47e15 10413
10414 CASE_FLT_FN (BUILT_IN_POW):
389dd41b 10415 return fold_builtin_pow (loc, fndecl, arg0, arg1, type);
c2f47e15 10416
10417 CASE_FLT_FN (BUILT_IN_POWI):
389dd41b 10418 return fold_builtin_powi (loc, fndecl, arg0, arg1, type);
c2f47e15 10419
10420 CASE_FLT_FN (BUILT_IN_COPYSIGN):
389dd41b 10421 return fold_builtin_copysign (loc, fndecl, arg0, arg1, type);
c2f47e15 10422
10423 CASE_FLT_FN (BUILT_IN_FMIN):
389dd41b 10424 return fold_builtin_fmin_fmax (loc, arg0, arg1, type, /*max=*/false);
c2f47e15 10425
10426 CASE_FLT_FN (BUILT_IN_FMAX):
389dd41b 10427 return fold_builtin_fmin_fmax (loc, arg0, arg1, type, /*max=*/true);
726069ba 10428
9bc9f15f 10429 case BUILT_IN_ISGREATER:
389dd41b 10430 return fold_builtin_unordered_cmp (loc, fndecl,
10431 arg0, arg1, UNLE_EXPR, LE_EXPR);
9bc9f15f 10432 case BUILT_IN_ISGREATEREQUAL:
389dd41b 10433 return fold_builtin_unordered_cmp (loc, fndecl,
10434 arg0, arg1, UNLT_EXPR, LT_EXPR);
9bc9f15f 10435 case BUILT_IN_ISLESS:
389dd41b 10436 return fold_builtin_unordered_cmp (loc, fndecl,
10437 arg0, arg1, UNGE_EXPR, GE_EXPR);
9bc9f15f 10438 case BUILT_IN_ISLESSEQUAL:
389dd41b 10439 return fold_builtin_unordered_cmp (loc, fndecl,
10440 arg0, arg1, UNGT_EXPR, GT_EXPR);
9bc9f15f 10441 case BUILT_IN_ISLESSGREATER:
389dd41b 10442 return fold_builtin_unordered_cmp (loc, fndecl,
10443 arg0, arg1, UNEQ_EXPR, EQ_EXPR);
9bc9f15f 10444 case BUILT_IN_ISUNORDERED:
389dd41b 10445 return fold_builtin_unordered_cmp (loc, fndecl,
10446 arg0, arg1, UNORDERED_EXPR,
d5019fe8 10447 NOP_EXPR);
9bc9f15f 10448
7c2f0500 10449 /* We do the folding for va_start in the expander. */
10450 case BUILT_IN_VA_START:
10451 break;
f0613857 10452
0a39fd54 10453 case BUILT_IN_OBJECT_SIZE:
c2f47e15 10454 return fold_builtin_object_size (arg0, arg1);
0a39fd54 10455
1cd6e20d 10456 case BUILT_IN_ATOMIC_ALWAYS_LOCK_FREE:
10457 return fold_builtin_atomic_always_lock_free (arg0, arg1);
10458
10459 case BUILT_IN_ATOMIC_IS_LOCK_FREE:
10460 return fold_builtin_atomic_is_lock_free (arg0, arg1);
10461
c2f47e15 10462 default:
10463 break;
10464 }
10465 return NULL_TREE;
10466}
10467
10468/* Fold a call to built-in function FNDECL with 3 arguments, ARG0, ARG1,
e80cc485 10469 and ARG2.
c2f47e15 10470 This function returns NULL_TREE if no simplification was possible. */
10471
10472static tree
389dd41b 10473fold_builtin_3 (location_t loc, tree fndecl,
e80cc485 10474 tree arg0, tree arg1, tree arg2)
c2f47e15 10475{
10476 tree type = TREE_TYPE (TREE_TYPE (fndecl));
10477 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10478 switch (fcode)
10479 {
10480
10481 CASE_FLT_FN (BUILT_IN_SINCOS):
389dd41b 10482 return fold_builtin_sincos (loc, arg0, arg1, arg2);
c2f47e15 10483
10484 CASE_FLT_FN (BUILT_IN_FMA):
b9be572e 10485 return fold_builtin_fma (loc, arg0, arg1, arg2, type);
c2f47e15 10486 break;
10487
e5407ca6 10488 CASE_FLT_FN (BUILT_IN_REMQUO):
10489 if (validate_arg (arg0, REAL_TYPE)
9af5ce0c 10490 && validate_arg (arg1, REAL_TYPE)
10491 && validate_arg (arg2, POINTER_TYPE))
e5407ca6 10492 return do_mpfr_remquo (arg0, arg1, arg2);
10493 break;
e5407ca6 10494
c2f47e15 10495 case BUILT_IN_STRNCMP:
389dd41b 10496 return fold_builtin_strncmp (loc, arg0, arg1, arg2);
c2f47e15 10497
7959b13b 10498 case BUILT_IN_MEMCHR:
389dd41b 10499 return fold_builtin_memchr (loc, arg0, arg1, arg2, type);
7959b13b 10500
c2f47e15 10501 case BUILT_IN_BCMP:
10502 case BUILT_IN_MEMCMP:
389dd41b 10503 return fold_builtin_memcmp (loc, arg0, arg1, arg2);;
c2f47e15 10504
c83059be 10505 case BUILT_IN_EXPECT:
10506 return fold_builtin_expect (loc, arg0, arg1, arg2);
10507
0c93c8a9 10508 case BUILT_IN_ADD_OVERFLOW:
10509 case BUILT_IN_SUB_OVERFLOW:
10510 case BUILT_IN_MUL_OVERFLOW:
10511 case BUILT_IN_SADD_OVERFLOW:
10512 case BUILT_IN_SADDL_OVERFLOW:
10513 case BUILT_IN_SADDLL_OVERFLOW:
10514 case BUILT_IN_SSUB_OVERFLOW:
10515 case BUILT_IN_SSUBL_OVERFLOW:
10516 case BUILT_IN_SSUBLL_OVERFLOW:
10517 case BUILT_IN_SMUL_OVERFLOW:
10518 case BUILT_IN_SMULL_OVERFLOW:
10519 case BUILT_IN_SMULLL_OVERFLOW:
10520 case BUILT_IN_UADD_OVERFLOW:
10521 case BUILT_IN_UADDL_OVERFLOW:
10522 case BUILT_IN_UADDLL_OVERFLOW:
10523 case BUILT_IN_USUB_OVERFLOW:
10524 case BUILT_IN_USUBL_OVERFLOW:
10525 case BUILT_IN_USUBLL_OVERFLOW:
10526 case BUILT_IN_UMUL_OVERFLOW:
10527 case BUILT_IN_UMULL_OVERFLOW:
10528 case BUILT_IN_UMULLL_OVERFLOW:
10529 return fold_builtin_arith_overflow (loc, fcode, arg0, arg1, arg2);
10530
650e4c94 10531 default:
10532 break;
10533 }
c2f47e15 10534 return NULL_TREE;
10535}
650e4c94 10536
c2f47e15 10537/* Fold a call to built-in function FNDECL. ARGS is an array of NARGS
9d884767 10538 arguments. IGNORE is true if the result of the
10539 function call is ignored. This function returns NULL_TREE if no
10540 simplification was possible. */
48e1416a 10541
2165588a 10542tree
e80cc485 10543fold_builtin_n (location_t loc, tree fndecl, tree *args, int nargs, bool)
c2f47e15 10544{
10545 tree ret = NULL_TREE;
a7f5bb2d 10546
c2f47e15 10547 switch (nargs)
10548 {
10549 case 0:
e80cc485 10550 ret = fold_builtin_0 (loc, fndecl);
c2f47e15 10551 break;
10552 case 1:
e80cc485 10553 ret = fold_builtin_1 (loc, fndecl, args[0]);
c2f47e15 10554 break;
10555 case 2:
e80cc485 10556 ret = fold_builtin_2 (loc, fndecl, args[0], args[1]);
c2f47e15 10557 break;
10558 case 3:
e80cc485 10559 ret = fold_builtin_3 (loc, fndecl, args[0], args[1], args[2]);
c2f47e15 10560 break;
c2f47e15 10561 default:
e80cc485 10562 ret = fold_builtin_varargs (loc, fndecl, args, nargs);
c2f47e15 10563 break;
10564 }
10565 if (ret)
10566 {
75a70cf9 10567 ret = build1 (NOP_EXPR, TREE_TYPE (ret), ret);
389dd41b 10568 SET_EXPR_LOCATION (ret, loc);
c2f47e15 10569 TREE_NO_WARNING (ret) = 1;
10570 return ret;
10571 }
10572 return NULL_TREE;
10573}
10574
0e80b01d 10575/* Construct a new CALL_EXPR to FNDECL using the tail of the argument
10576 list ARGS along with N new arguments in NEWARGS. SKIP is the number
10577 of arguments in ARGS to be omitted. OLDNARGS is the number of
10578 elements in ARGS. */
c2f47e15 10579
10580static tree
0e80b01d 10581rewrite_call_expr_valist (location_t loc, int oldnargs, tree *args,
10582 int skip, tree fndecl, int n, va_list newargs)
c2f47e15 10583{
0e80b01d 10584 int nargs = oldnargs - skip + n;
10585 tree *buffer;
c2f47e15 10586
0e80b01d 10587 if (n > 0)
c2f47e15 10588 {
0e80b01d 10589 int i, j;
c2f47e15 10590
0e80b01d 10591 buffer = XALLOCAVEC (tree, nargs);
10592 for (i = 0; i < n; i++)
10593 buffer[i] = va_arg (newargs, tree);
10594 for (j = skip; j < oldnargs; j++, i++)
10595 buffer[i] = args[j];
10596 }
10597 else
10598 buffer = args + skip;
19fbe3a4 10599
0e80b01d 10600 return build_call_expr_loc_array (loc, fndecl, nargs, buffer);
10601}
c2f47e15 10602
198622c0 10603/* Return true if FNDECL shouldn't be folded right now.
10604 If a built-in function has an inline attribute always_inline
10605 wrapper, defer folding it after always_inline functions have
10606 been inlined, otherwise e.g. -D_FORTIFY_SOURCE checking
10607 might not be performed. */
10608
51d2c51e 10609bool
198622c0 10610avoid_folding_inline_builtin (tree fndecl)
10611{
10612 return (DECL_DECLARED_INLINE_P (fndecl)
10613 && DECL_DISREGARD_INLINE_LIMITS (fndecl)
10614 && cfun
10615 && !cfun->always_inline_functions_inlined
10616 && lookup_attribute ("always_inline", DECL_ATTRIBUTES (fndecl)));
10617}
10618
4ee9c684 10619/* A wrapper function for builtin folding that prevents warnings for
491e04ef 10620 "statement without effect" and the like, caused by removing the
4ee9c684 10621 call node earlier than the warning is generated. */
10622
10623tree
389dd41b 10624fold_call_expr (location_t loc, tree exp, bool ignore)
4ee9c684 10625{
c2f47e15 10626 tree ret = NULL_TREE;
10627 tree fndecl = get_callee_fndecl (exp);
10628 if (fndecl
10629 && TREE_CODE (fndecl) == FUNCTION_DECL
48dc2227 10630 && DECL_BUILT_IN (fndecl)
10631 /* If CALL_EXPR_VA_ARG_PACK is set, the arguments aren't finalized
10632 yet. Defer folding until we see all the arguments
10633 (after inlining). */
10634 && !CALL_EXPR_VA_ARG_PACK (exp))
10635 {
10636 int nargs = call_expr_nargs (exp);
10637
10638 /* Before gimplification CALL_EXPR_VA_ARG_PACK is not set, but
10639 instead last argument is __builtin_va_arg_pack (). Defer folding
10640 even in that case, until arguments are finalized. */
10641 if (nargs && TREE_CODE (CALL_EXPR_ARG (exp, nargs - 1)) == CALL_EXPR)
10642 {
10643 tree fndecl2 = get_callee_fndecl (CALL_EXPR_ARG (exp, nargs - 1));
10644 if (fndecl2
10645 && TREE_CODE (fndecl2) == FUNCTION_DECL
10646 && DECL_BUILT_IN_CLASS (fndecl2) == BUILT_IN_NORMAL
10647 && DECL_FUNCTION_CODE (fndecl2) == BUILT_IN_VA_ARG_PACK)
10648 return NULL_TREE;
10649 }
10650
198622c0 10651 if (avoid_folding_inline_builtin (fndecl))
10652 return NULL_TREE;
10653
c2f47e15 10654 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
97d67146 10655 return targetm.fold_builtin (fndecl, call_expr_nargs (exp),
10656 CALL_EXPR_ARGP (exp), ignore);
c2f47e15 10657 else
10658 {
9d884767 10659 tree *args = CALL_EXPR_ARGP (exp);
10660 ret = fold_builtin_n (loc, fndecl, args, nargs, ignore);
c2f47e15 10661 if (ret)
389dd41b 10662 return ret;
c2f47e15 10663 }
4ee9c684 10664 }
c2f47e15 10665 return NULL_TREE;
10666}
48e1416a 10667
9d884767 10668/* Fold a CALL_EXPR with type TYPE with FN as the function expression.
10669 N arguments are passed in the array ARGARRAY. Return a folded
10670 expression or NULL_TREE if no simplification was possible. */
805e22b2 10671
10672tree
9d884767 10673fold_builtin_call_array (location_t loc, tree,
d01f58f9 10674 tree fn,
10675 int n,
10676 tree *argarray)
7e15618b 10677{
9d884767 10678 if (TREE_CODE (fn) != ADDR_EXPR)
10679 return NULL_TREE;
c2f47e15 10680
9d884767 10681 tree fndecl = TREE_OPERAND (fn, 0);
10682 if (TREE_CODE (fndecl) == FUNCTION_DECL
10683 && DECL_BUILT_IN (fndecl))
10684 {
10685 /* If last argument is __builtin_va_arg_pack (), arguments to this
10686 function are not finalized yet. Defer folding until they are. */
10687 if (n && TREE_CODE (argarray[n - 1]) == CALL_EXPR)
10688 {
10689 tree fndecl2 = get_callee_fndecl (argarray[n - 1]);
10690 if (fndecl2
10691 && TREE_CODE (fndecl2) == FUNCTION_DECL
10692 && DECL_BUILT_IN_CLASS (fndecl2) == BUILT_IN_NORMAL
10693 && DECL_FUNCTION_CODE (fndecl2) == BUILT_IN_VA_ARG_PACK)
10694 return NULL_TREE;
10695 }
10696 if (avoid_folding_inline_builtin (fndecl))
10697 return NULL_TREE;
10698 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
10699 return targetm.fold_builtin (fndecl, n, argarray, false);
10700 else
10701 return fold_builtin_n (loc, fndecl, argarray, n, false);
10702 }
c2f47e15 10703
9d884767 10704 return NULL_TREE;
c2f47e15 10705}
10706
af1409ad 10707/* Construct a new CALL_EXPR using the tail of the argument list of EXP
10708 along with N new arguments specified as the "..." parameters. SKIP
10709 is the number of arguments in EXP to be omitted. This function is used
10710 to do varargs-to-varargs transformations. */
10711
10712static tree
10713rewrite_call_expr (location_t loc, tree exp, int skip, tree fndecl, int n, ...)
10714{
10715 va_list ap;
10716 tree t;
10717
10718 va_start (ap, n);
10719 t = rewrite_call_expr_valist (loc, call_expr_nargs (exp),
10720 CALL_EXPR_ARGP (exp), skip, fndecl, n, ap);
10721 va_end (ap);
c2f47e15 10722
af1409ad 10723 return t;
c2f47e15 10724}
10725
10726/* Validate a single argument ARG against a tree code CODE representing
10727 a type. */
48e1416a 10728
c2f47e15 10729static bool
b7bf20db 10730validate_arg (const_tree arg, enum tree_code code)
c2f47e15 10731{
10732 if (!arg)
10733 return false;
10734 else if (code == POINTER_TYPE)
10735 return POINTER_TYPE_P (TREE_TYPE (arg));
c7f617c2 10736 else if (code == INTEGER_TYPE)
10737 return INTEGRAL_TYPE_P (TREE_TYPE (arg));
c2f47e15 10738 return code == TREE_CODE (TREE_TYPE (arg));
7e15618b 10739}
0eb671f7 10740
75a70cf9 10741/* This function validates the types of a function call argument list
10742 against a specified list of tree_codes. If the last specifier is a 0,
10743 that represents an ellipses, otherwise the last specifier must be a
10744 VOID_TYPE.
10745
10746 This is the GIMPLE version of validate_arglist. Eventually we want to
10747 completely convert builtins.c to work from GIMPLEs and the tree based
10748 validate_arglist will then be removed. */
10749
10750bool
1a91d914 10751validate_gimple_arglist (const gcall *call, ...)
75a70cf9 10752{
10753 enum tree_code code;
10754 bool res = 0;
10755 va_list ap;
10756 const_tree arg;
10757 size_t i;
10758
10759 va_start (ap, call);
10760 i = 0;
10761
10762 do
10763 {
d62e827b 10764 code = (enum tree_code) va_arg (ap, int);
75a70cf9 10765 switch (code)
10766 {
10767 case 0:
10768 /* This signifies an ellipses, any further arguments are all ok. */
10769 res = true;
10770 goto end;
10771 case VOID_TYPE:
10772 /* This signifies an endlink, if no arguments remain, return
10773 true, otherwise return false. */
10774 res = (i == gimple_call_num_args (call));
10775 goto end;
10776 default:
10777 /* If no parameters remain or the parameter's code does not
10778 match the specified code, return false. Otherwise continue
10779 checking any remaining arguments. */
10780 arg = gimple_call_arg (call, i++);
10781 if (!validate_arg (arg, code))
10782 goto end;
10783 break;
10784 }
10785 }
10786 while (1);
10787
10788 /* We need gotos here since we can only have one VA_CLOSE in a
10789 function. */
10790 end: ;
10791 va_end (ap);
10792
10793 return res;
10794}
10795
fc2a2dcb 10796/* Default target-specific builtin expander that does nothing. */
10797
10798rtx
aecda0d6 10799default_expand_builtin (tree exp ATTRIBUTE_UNUSED,
10800 rtx target ATTRIBUTE_UNUSED,
10801 rtx subtarget ATTRIBUTE_UNUSED,
3754d046 10802 machine_mode mode ATTRIBUTE_UNUSED,
aecda0d6 10803 int ignore ATTRIBUTE_UNUSED)
fc2a2dcb 10804{
10805 return NULL_RTX;
10806}
c7926a82 10807
01537105 10808/* Returns true is EXP represents data that would potentially reside
10809 in a readonly section. */
10810
b9ea678c 10811bool
01537105 10812readonly_data_expr (tree exp)
10813{
10814 STRIP_NOPS (exp);
10815
9ff0637e 10816 if (TREE_CODE (exp) != ADDR_EXPR)
10817 return false;
10818
10819 exp = get_base_address (TREE_OPERAND (exp, 0));
10820 if (!exp)
10821 return false;
10822
10823 /* Make sure we call decl_readonly_section only for trees it
10824 can handle (since it returns true for everything it doesn't
10825 understand). */
491e04ef 10826 if (TREE_CODE (exp) == STRING_CST
9ff0637e 10827 || TREE_CODE (exp) == CONSTRUCTOR
10828 || (TREE_CODE (exp) == VAR_DECL && TREE_STATIC (exp)))
10829 return decl_readonly_section (exp, 0);
01537105 10830 else
10831 return false;
10832}
4ee9c684 10833
c2f47e15 10834/* Simplify a call to the strstr builtin. S1 and S2 are the arguments
10835 to the call, and TYPE is its return type.
4ee9c684 10836
c2f47e15 10837 Return NULL_TREE if no simplification was possible, otherwise return the
4ee9c684 10838 simplified form of the call as a tree.
10839
10840 The simplified form may be a constant or other expression which
10841 computes the same value, but in a more efficient manner (including
10842 calls to other builtin functions).
10843
10844 The call may contain arguments which need to be evaluated, but
10845 which are not useful to determine the result of the call. In
10846 this case we return a chain of COMPOUND_EXPRs. The LHS of each
10847 COMPOUND_EXPR will be an argument which must be evaluated.
10848 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
10849 COMPOUND_EXPR in the chain will contain the tree for the simplified
10850 form of the builtin function call. */
10851
10852static tree
389dd41b 10853fold_builtin_strstr (location_t loc, tree s1, tree s2, tree type)
4ee9c684 10854{
c2f47e15 10855 if (!validate_arg (s1, POINTER_TYPE)
10856 || !validate_arg (s2, POINTER_TYPE))
10857 return NULL_TREE;
4ee9c684 10858 else
10859 {
4ee9c684 10860 tree fn;
10861 const char *p1, *p2;
10862
10863 p2 = c_getstr (s2);
10864 if (p2 == NULL)
c2f47e15 10865 return NULL_TREE;
4ee9c684 10866
10867 p1 = c_getstr (s1);
10868 if (p1 != NULL)
10869 {
10870 const char *r = strstr (p1, p2);
daa1d5f5 10871 tree tem;
4ee9c684 10872
4ee9c684 10873 if (r == NULL)
779b4c41 10874 return build_int_cst (TREE_TYPE (s1), 0);
c0c67e38 10875
10876 /* Return an offset into the constant string argument. */
2cc66f2a 10877 tem = fold_build_pointer_plus_hwi_loc (loc, s1, r - p1);
389dd41b 10878 return fold_convert_loc (loc, type, tem);
4ee9c684 10879 }
10880
7efa231c 10881 /* The argument is const char *, and the result is char *, so we need
10882 a type conversion here to avoid a warning. */
4ee9c684 10883 if (p2[0] == '\0')
389dd41b 10884 return fold_convert_loc (loc, type, s1);
4ee9c684 10885
10886 if (p2[1] != '\0')
c2f47e15 10887 return NULL_TREE;
4ee9c684 10888
b9a16870 10889 fn = builtin_decl_implicit (BUILT_IN_STRCHR);
4ee9c684 10890 if (!fn)
c2f47e15 10891 return NULL_TREE;
4ee9c684 10892
10893 /* New argument list transforming strstr(s1, s2) to
10894 strchr(s1, s2[0]). */
7002a1c8 10895 return build_call_expr_loc (loc, fn, 2, s1,
10896 build_int_cst (integer_type_node, p2[0]));
4ee9c684 10897 }
10898}
10899
c2f47e15 10900/* Simplify a call to the strchr builtin. S1 and S2 are the arguments to
10901 the call, and TYPE is its return type.
4ee9c684 10902
c2f47e15 10903 Return NULL_TREE if no simplification was possible, otherwise return the
4ee9c684 10904 simplified form of the call as a tree.
10905
10906 The simplified form may be a constant or other expression which
10907 computes the same value, but in a more efficient manner (including
10908 calls to other builtin functions).
10909
10910 The call may contain arguments which need to be evaluated, but
10911 which are not useful to determine the result of the call. In
10912 this case we return a chain of COMPOUND_EXPRs. The LHS of each
10913 COMPOUND_EXPR will be an argument which must be evaluated.
10914 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
10915 COMPOUND_EXPR in the chain will contain the tree for the simplified
10916 form of the builtin function call. */
10917
10918static tree
389dd41b 10919fold_builtin_strchr (location_t loc, tree s1, tree s2, tree type)
4ee9c684 10920{
c2f47e15 10921 if (!validate_arg (s1, POINTER_TYPE)
10922 || !validate_arg (s2, INTEGER_TYPE))
10923 return NULL_TREE;
4ee9c684 10924 else
10925 {
4ee9c684 10926 const char *p1;
10927
10928 if (TREE_CODE (s2) != INTEGER_CST)
c2f47e15 10929 return NULL_TREE;
4ee9c684 10930
10931 p1 = c_getstr (s1);
10932 if (p1 != NULL)
10933 {
10934 char c;
10935 const char *r;
daa1d5f5 10936 tree tem;
4ee9c684 10937
10938 if (target_char_cast (s2, &c))
c2f47e15 10939 return NULL_TREE;
4ee9c684 10940
10941 r = strchr (p1, c);
10942
10943 if (r == NULL)
779b4c41 10944 return build_int_cst (TREE_TYPE (s1), 0);
4ee9c684 10945
10946 /* Return an offset into the constant string argument. */
2cc66f2a 10947 tem = fold_build_pointer_plus_hwi_loc (loc, s1, r - p1);
389dd41b 10948 return fold_convert_loc (loc, type, tem);
4ee9c684 10949 }
c2f47e15 10950 return NULL_TREE;
4ee9c684 10951 }
10952}
10953
c2f47e15 10954/* Simplify a call to the strrchr builtin. S1 and S2 are the arguments to
10955 the call, and TYPE is its return type.
4ee9c684 10956
c2f47e15 10957 Return NULL_TREE if no simplification was possible, otherwise return the
4ee9c684 10958 simplified form of the call as a tree.
10959
10960 The simplified form may be a constant or other expression which
10961 computes the same value, but in a more efficient manner (including
10962 calls to other builtin functions).
10963
10964 The call may contain arguments which need to be evaluated, but
10965 which are not useful to determine the result of the call. In
10966 this case we return a chain of COMPOUND_EXPRs. The LHS of each
10967 COMPOUND_EXPR will be an argument which must be evaluated.
10968 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
10969 COMPOUND_EXPR in the chain will contain the tree for the simplified
10970 form of the builtin function call. */
10971
10972static tree
389dd41b 10973fold_builtin_strrchr (location_t loc, tree s1, tree s2, tree type)
4ee9c684 10974{
c2f47e15 10975 if (!validate_arg (s1, POINTER_TYPE)
10976 || !validate_arg (s2, INTEGER_TYPE))
10977 return NULL_TREE;
4ee9c684 10978 else
10979 {
4ee9c684 10980 tree fn;
10981 const char *p1;
10982
10983 if (TREE_CODE (s2) != INTEGER_CST)
c2f47e15 10984 return NULL_TREE;
4ee9c684 10985
10986 p1 = c_getstr (s1);
10987 if (p1 != NULL)
10988 {
10989 char c;
10990 const char *r;
daa1d5f5 10991 tree tem;
4ee9c684 10992
10993 if (target_char_cast (s2, &c))
c2f47e15 10994 return NULL_TREE;
4ee9c684 10995
10996 r = strrchr (p1, c);
10997
10998 if (r == NULL)
779b4c41 10999 return build_int_cst (TREE_TYPE (s1), 0);
4ee9c684 11000
11001 /* Return an offset into the constant string argument. */
2cc66f2a 11002 tem = fold_build_pointer_plus_hwi_loc (loc, s1, r - p1);
389dd41b 11003 return fold_convert_loc (loc, type, tem);
4ee9c684 11004 }
11005
11006 if (! integer_zerop (s2))
c2f47e15 11007 return NULL_TREE;
4ee9c684 11008
b9a16870 11009 fn = builtin_decl_implicit (BUILT_IN_STRCHR);
4ee9c684 11010 if (!fn)
c2f47e15 11011 return NULL_TREE;
4ee9c684 11012
11013 /* Transform strrchr(s1, '\0') to strchr(s1, '\0'). */
389dd41b 11014 return build_call_expr_loc (loc, fn, 2, s1, s2);
4ee9c684 11015 }
11016}
11017
c2f47e15 11018/* Simplify a call to the strpbrk builtin. S1 and S2 are the arguments
11019 to the call, and TYPE is its return type.
4ee9c684 11020
c2f47e15 11021 Return NULL_TREE if no simplification was possible, otherwise return the
4ee9c684 11022 simplified form of the call as a tree.
11023
11024 The simplified form may be a constant or other expression which
11025 computes the same value, but in a more efficient manner (including
11026 calls to other builtin functions).
11027
11028 The call may contain arguments which need to be evaluated, but
11029 which are not useful to determine the result of the call. In
11030 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11031 COMPOUND_EXPR will be an argument which must be evaluated.
11032 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11033 COMPOUND_EXPR in the chain will contain the tree for the simplified
11034 form of the builtin function call. */
11035
11036static tree
389dd41b 11037fold_builtin_strpbrk (location_t loc, tree s1, tree s2, tree type)
4ee9c684 11038{
c2f47e15 11039 if (!validate_arg (s1, POINTER_TYPE)
11040 || !validate_arg (s2, POINTER_TYPE))
11041 return NULL_TREE;
4ee9c684 11042 else
11043 {
4ee9c684 11044 tree fn;
11045 const char *p1, *p2;
11046
11047 p2 = c_getstr (s2);
11048 if (p2 == NULL)
c2f47e15 11049 return NULL_TREE;
4ee9c684 11050
11051 p1 = c_getstr (s1);
11052 if (p1 != NULL)
11053 {
11054 const char *r = strpbrk (p1, p2);
daa1d5f5 11055 tree tem;
4ee9c684 11056
11057 if (r == NULL)
779b4c41 11058 return build_int_cst (TREE_TYPE (s1), 0);
4ee9c684 11059
11060 /* Return an offset into the constant string argument. */
2cc66f2a 11061 tem = fold_build_pointer_plus_hwi_loc (loc, s1, r - p1);
389dd41b 11062 return fold_convert_loc (loc, type, tem);
4ee9c684 11063 }
11064
11065 if (p2[0] == '\0')
05abc81b 11066 /* strpbrk(x, "") == NULL.
11067 Evaluate and ignore s1 in case it had side-effects. */
389dd41b 11068 return omit_one_operand_loc (loc, TREE_TYPE (s1), integer_zero_node, s1);
4ee9c684 11069
11070 if (p2[1] != '\0')
c2f47e15 11071 return NULL_TREE; /* Really call strpbrk. */
4ee9c684 11072
b9a16870 11073 fn = builtin_decl_implicit (BUILT_IN_STRCHR);
4ee9c684 11074 if (!fn)
c2f47e15 11075 return NULL_TREE;
4ee9c684 11076
11077 /* New argument list transforming strpbrk(s1, s2) to
11078 strchr(s1, s2[0]). */
7002a1c8 11079 return build_call_expr_loc (loc, fn, 2, s1,
11080 build_int_cst (integer_type_node, p2[0]));
4ee9c684 11081 }
11082}
11083
c2f47e15 11084/* Simplify a call to the strspn builtin. S1 and S2 are the arguments
11085 to the call.
4ee9c684 11086
c2f47e15 11087 Return NULL_TREE if no simplification was possible, otherwise return the
4ee9c684 11088 simplified form of the call as a tree.
11089
11090 The simplified form may be a constant or other expression which
11091 computes the same value, but in a more efficient manner (including
11092 calls to other builtin functions).
11093
11094 The call may contain arguments which need to be evaluated, but
11095 which are not useful to determine the result of the call. In
11096 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11097 COMPOUND_EXPR will be an argument which must be evaluated.
11098 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11099 COMPOUND_EXPR in the chain will contain the tree for the simplified
11100 form of the builtin function call. */
11101
11102static tree
389dd41b 11103fold_builtin_strspn (location_t loc, tree s1, tree s2)
4ee9c684 11104{
c2f47e15 11105 if (!validate_arg (s1, POINTER_TYPE)
11106 || !validate_arg (s2, POINTER_TYPE))
11107 return NULL_TREE;
4ee9c684 11108 else
11109 {
4ee9c684 11110 const char *p1 = c_getstr (s1), *p2 = c_getstr (s2);
11111
11112 /* If both arguments are constants, evaluate at compile-time. */
11113 if (p1 && p2)
11114 {
11115 const size_t r = strspn (p1, p2);
547b938d 11116 return build_int_cst (size_type_node, r);
4ee9c684 11117 }
11118
c2f47e15 11119 /* If either argument is "", return NULL_TREE. */
4ee9c684 11120 if ((p1 && *p1 == '\0') || (p2 && *p2 == '\0'))
9bc9f15f 11121 /* Evaluate and ignore both arguments in case either one has
11122 side-effects. */
389dd41b 11123 return omit_two_operands_loc (loc, size_type_node, size_zero_node,
9bc9f15f 11124 s1, s2);
c2f47e15 11125 return NULL_TREE;
4ee9c684 11126 }
11127}
11128
c2f47e15 11129/* Simplify a call to the strcspn builtin. S1 and S2 are the arguments
11130 to the call.
4ee9c684 11131
c2f47e15 11132 Return NULL_TREE if no simplification was possible, otherwise return the
4ee9c684 11133 simplified form of the call as a tree.
11134
11135 The simplified form may be a constant or other expression which
11136 computes the same value, but in a more efficient manner (including
11137 calls to other builtin functions).
11138
11139 The call may contain arguments which need to be evaluated, but
11140 which are not useful to determine the result of the call. In
11141 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11142 COMPOUND_EXPR will be an argument which must be evaluated.
11143 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11144 COMPOUND_EXPR in the chain will contain the tree for the simplified
11145 form of the builtin function call. */
11146
11147static tree
389dd41b 11148fold_builtin_strcspn (location_t loc, tree s1, tree s2)
4ee9c684 11149{
c2f47e15 11150 if (!validate_arg (s1, POINTER_TYPE)
11151 || !validate_arg (s2, POINTER_TYPE))
11152 return NULL_TREE;
4ee9c684 11153 else
11154 {
4ee9c684 11155 const char *p1 = c_getstr (s1), *p2 = c_getstr (s2);
11156
11157 /* If both arguments are constants, evaluate at compile-time. */
11158 if (p1 && p2)
11159 {
11160 const size_t r = strcspn (p1, p2);
547b938d 11161 return build_int_cst (size_type_node, r);
4ee9c684 11162 }
11163
c2f47e15 11164 /* If the first argument is "", return NULL_TREE. */
4ee9c684 11165 if (p1 && *p1 == '\0')
11166 {
11167 /* Evaluate and ignore argument s2 in case it has
11168 side-effects. */
389dd41b 11169 return omit_one_operand_loc (loc, size_type_node,
39761420 11170 size_zero_node, s2);
4ee9c684 11171 }
11172
11173 /* If the second argument is "", return __builtin_strlen(s1). */
11174 if (p2 && *p2 == '\0')
11175 {
b9a16870 11176 tree fn = builtin_decl_implicit (BUILT_IN_STRLEN);
4ee9c684 11177
11178 /* If the replacement _DECL isn't initialized, don't do the
11179 transformation. */
11180 if (!fn)
c2f47e15 11181 return NULL_TREE;
4ee9c684 11182
389dd41b 11183 return build_call_expr_loc (loc, fn, 1, s1);
4ee9c684 11184 }
c2f47e15 11185 return NULL_TREE;
4ee9c684 11186 }
11187}
11188
c2f47e15 11189/* Fold the next_arg or va_start call EXP. Returns true if there was an error
743b0c6a 11190 produced. False otherwise. This is done so that we don't output the error
11191 or warning twice or three times. */
75a70cf9 11192
743b0c6a 11193bool
c2f47e15 11194fold_builtin_next_arg (tree exp, bool va_start_p)
4ee9c684 11195{
11196 tree fntype = TREE_TYPE (current_function_decl);
c2f47e15 11197 int nargs = call_expr_nargs (exp);
11198 tree arg;
d98fd4a4 11199 /* There is good chance the current input_location points inside the
11200 definition of the va_start macro (perhaps on the token for
11201 builtin) in a system header, so warnings will not be emitted.
11202 Use the location in real source code. */
11203 source_location current_location =
11204 linemap_unwind_to_first_non_reserved_loc (line_table, input_location,
11205 NULL);
4ee9c684 11206
257d99c3 11207 if (!stdarg_p (fntype))
743b0c6a 11208 {
11209 error ("%<va_start%> used in function with fixed args");
11210 return true;
11211 }
c2f47e15 11212
11213 if (va_start_p)
79012a9d 11214 {
c2f47e15 11215 if (va_start_p && (nargs != 2))
11216 {
11217 error ("wrong number of arguments to function %<va_start%>");
11218 return true;
11219 }
11220 arg = CALL_EXPR_ARG (exp, 1);
79012a9d 11221 }
11222 /* We use __builtin_va_start (ap, 0, 0) or __builtin_next_arg (0, 0)
11223 when we checked the arguments and if needed issued a warning. */
c2f47e15 11224 else
4ee9c684 11225 {
c2f47e15 11226 if (nargs == 0)
11227 {
11228 /* Evidently an out of date version of <stdarg.h>; can't validate
11229 va_start's second argument, but can still work as intended. */
d98fd4a4 11230 warning_at (current_location,
7edb1062 11231 OPT_Wvarargs,
11232 "%<__builtin_next_arg%> called without an argument");
c2f47e15 11233 return true;
11234 }
11235 else if (nargs > 1)
a0c938f0 11236 {
c2f47e15 11237 error ("wrong number of arguments to function %<__builtin_next_arg%>");
a0c938f0 11238 return true;
11239 }
c2f47e15 11240 arg = CALL_EXPR_ARG (exp, 0);
11241 }
11242
a8dd994c 11243 if (TREE_CODE (arg) == SSA_NAME)
11244 arg = SSA_NAME_VAR (arg);
11245
c2f47e15 11246 /* We destructively modify the call to be __builtin_va_start (ap, 0)
48e1416a 11247 or __builtin_next_arg (0) the first time we see it, after checking
c2f47e15 11248 the arguments and if needed issuing a warning. */
11249 if (!integer_zerop (arg))
11250 {
11251 tree last_parm = tree_last (DECL_ARGUMENTS (current_function_decl));
79012a9d 11252
4ee9c684 11253 /* Strip off all nops for the sake of the comparison. This
11254 is not quite the same as STRIP_NOPS. It does more.
11255 We must also strip off INDIRECT_EXPR for C++ reference
11256 parameters. */
72dd6141 11257 while (CONVERT_EXPR_P (arg)
4ee9c684 11258 || TREE_CODE (arg) == INDIRECT_REF)
11259 arg = TREE_OPERAND (arg, 0);
11260 if (arg != last_parm)
a0c938f0 11261 {
b08cf617 11262 /* FIXME: Sometimes with the tree optimizers we can get the
11263 not the last argument even though the user used the last
11264 argument. We just warn and set the arg to be the last
11265 argument so that we will get wrong-code because of
11266 it. */
d98fd4a4 11267 warning_at (current_location,
7edb1062 11268 OPT_Wvarargs,
d98fd4a4 11269 "second parameter of %<va_start%> not last named argument");
743b0c6a 11270 }
24158ad7 11271
11272 /* Undefined by C99 7.15.1.4p4 (va_start):
11273 "If the parameter parmN is declared with the register storage
11274 class, with a function or array type, or with a type that is
11275 not compatible with the type that results after application of
11276 the default argument promotions, the behavior is undefined."
11277 */
11278 else if (DECL_REGISTER (arg))
d98fd4a4 11279 {
11280 warning_at (current_location,
7edb1062 11281 OPT_Wvarargs,
d98fd4a4 11282 "undefined behaviour when second parameter of "
11283 "%<va_start%> is declared with %<register%> storage");
11284 }
24158ad7 11285
79012a9d 11286 /* We want to verify the second parameter just once before the tree
a0c938f0 11287 optimizers are run and then avoid keeping it in the tree,
11288 as otherwise we could warn even for correct code like:
11289 void foo (int i, ...)
11290 { va_list ap; i++; va_start (ap, i); va_end (ap); } */
c2f47e15 11291 if (va_start_p)
11292 CALL_EXPR_ARG (exp, 1) = integer_zero_node;
11293 else
11294 CALL_EXPR_ARG (exp, 0) = integer_zero_node;
743b0c6a 11295 }
11296 return false;
4ee9c684 11297}
11298
11299
c2f47e15 11300/* Expand a call EXP to __builtin_object_size. */
0a39fd54 11301
f7715905 11302static rtx
0a39fd54 11303expand_builtin_object_size (tree exp)
11304{
11305 tree ost;
11306 int object_size_type;
11307 tree fndecl = get_callee_fndecl (exp);
0a39fd54 11308
c2f47e15 11309 if (!validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
0a39fd54 11310 {
b8c23db3 11311 error ("%Kfirst argument of %D must be a pointer, second integer constant",
11312 exp, fndecl);
0a39fd54 11313 expand_builtin_trap ();
11314 return const0_rtx;
11315 }
11316
c2f47e15 11317 ost = CALL_EXPR_ARG (exp, 1);
0a39fd54 11318 STRIP_NOPS (ost);
11319
11320 if (TREE_CODE (ost) != INTEGER_CST
11321 || tree_int_cst_sgn (ost) < 0
11322 || compare_tree_int (ost, 3) > 0)
11323 {
b8c23db3 11324 error ("%Klast argument of %D is not integer constant between 0 and 3",
11325 exp, fndecl);
0a39fd54 11326 expand_builtin_trap ();
11327 return const0_rtx;
11328 }
11329
e913b5cd 11330 object_size_type = tree_to_shwi (ost);
0a39fd54 11331
11332 return object_size_type < 2 ? constm1_rtx : const0_rtx;
11333}
11334
11335/* Expand EXP, a call to the __mem{cpy,pcpy,move,set}_chk builtin.
11336 FCODE is the BUILT_IN_* to use.
c2f47e15 11337 Return NULL_RTX if we failed; the caller should emit a normal call,
0a39fd54 11338 otherwise try to get the result in TARGET, if convenient (and in
11339 mode MODE if that's convenient). */
11340
11341static rtx
3754d046 11342expand_builtin_memory_chk (tree exp, rtx target, machine_mode mode,
0a39fd54 11343 enum built_in_function fcode)
11344{
0a39fd54 11345 tree dest, src, len, size;
11346
c2f47e15 11347 if (!validate_arglist (exp,
0a39fd54 11348 POINTER_TYPE,
11349 fcode == BUILT_IN_MEMSET_CHK
11350 ? INTEGER_TYPE : POINTER_TYPE,
11351 INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
c2f47e15 11352 return NULL_RTX;
0a39fd54 11353
c2f47e15 11354 dest = CALL_EXPR_ARG (exp, 0);
11355 src = CALL_EXPR_ARG (exp, 1);
11356 len = CALL_EXPR_ARG (exp, 2);
11357 size = CALL_EXPR_ARG (exp, 3);
0a39fd54 11358
e913b5cd 11359 if (! tree_fits_uhwi_p (size))
c2f47e15 11360 return NULL_RTX;
0a39fd54 11361
e913b5cd 11362 if (tree_fits_uhwi_p (len) || integer_all_onesp (size))
0a39fd54 11363 {
11364 tree fn;
11365
11366 if (! integer_all_onesp (size) && tree_int_cst_lt (size, len))
11367 {
b430e8d9 11368 warning_at (tree_nonartificial_location (exp),
11369 0, "%Kcall to %D will always overflow destination buffer",
11370 exp, get_callee_fndecl (exp));
c2f47e15 11371 return NULL_RTX;
0a39fd54 11372 }
11373
0a39fd54 11374 fn = NULL_TREE;
11375 /* If __builtin_mem{cpy,pcpy,move,set}_chk is used, assume
11376 mem{cpy,pcpy,move,set} is available. */
11377 switch (fcode)
11378 {
11379 case BUILT_IN_MEMCPY_CHK:
b9a16870 11380 fn = builtin_decl_explicit (BUILT_IN_MEMCPY);
0a39fd54 11381 break;
11382 case BUILT_IN_MEMPCPY_CHK:
b9a16870 11383 fn = builtin_decl_explicit (BUILT_IN_MEMPCPY);
0a39fd54 11384 break;
11385 case BUILT_IN_MEMMOVE_CHK:
b9a16870 11386 fn = builtin_decl_explicit (BUILT_IN_MEMMOVE);
0a39fd54 11387 break;
11388 case BUILT_IN_MEMSET_CHK:
b9a16870 11389 fn = builtin_decl_explicit (BUILT_IN_MEMSET);
0a39fd54 11390 break;
11391 default:
11392 break;
11393 }
11394
11395 if (! fn)
c2f47e15 11396 return NULL_RTX;
0a39fd54 11397
0568e9c1 11398 fn = build_call_nofold_loc (EXPR_LOCATION (exp), fn, 3, dest, src, len);
a65c4d64 11399 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
11400 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
0a39fd54 11401 return expand_expr (fn, target, mode, EXPAND_NORMAL);
11402 }
11403 else if (fcode == BUILT_IN_MEMSET_CHK)
c2f47e15 11404 return NULL_RTX;
0a39fd54 11405 else
11406 {
957d0361 11407 unsigned int dest_align = get_pointer_alignment (dest);
0a39fd54 11408
11409 /* If DEST is not a pointer type, call the normal function. */
11410 if (dest_align == 0)
c2f47e15 11411 return NULL_RTX;
0a39fd54 11412
11413 /* If SRC and DEST are the same (and not volatile), do nothing. */
11414 if (operand_equal_p (src, dest, 0))
11415 {
11416 tree expr;
11417
11418 if (fcode != BUILT_IN_MEMPCPY_CHK)
11419 {
11420 /* Evaluate and ignore LEN in case it has side-effects. */
11421 expand_expr (len, const0_rtx, VOIDmode, EXPAND_NORMAL);
11422 return expand_expr (dest, target, mode, EXPAND_NORMAL);
11423 }
11424
2cc66f2a 11425 expr = fold_build_pointer_plus (dest, len);
0a39fd54 11426 return expand_expr (expr, target, mode, EXPAND_NORMAL);
11427 }
11428
11429 /* __memmove_chk special case. */
11430 if (fcode == BUILT_IN_MEMMOVE_CHK)
11431 {
957d0361 11432 unsigned int src_align = get_pointer_alignment (src);
0a39fd54 11433
11434 if (src_align == 0)
c2f47e15 11435 return NULL_RTX;
0a39fd54 11436
11437 /* If src is categorized for a readonly section we can use
11438 normal __memcpy_chk. */
11439 if (readonly_data_expr (src))
11440 {
b9a16870 11441 tree fn = builtin_decl_explicit (BUILT_IN_MEMCPY_CHK);
0a39fd54 11442 if (!fn)
c2f47e15 11443 return NULL_RTX;
0568e9c1 11444 fn = build_call_nofold_loc (EXPR_LOCATION (exp), fn, 4,
11445 dest, src, len, size);
a65c4d64 11446 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
11447 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
0a39fd54 11448 return expand_expr (fn, target, mode, EXPAND_NORMAL);
11449 }
11450 }
c2f47e15 11451 return NULL_RTX;
0a39fd54 11452 }
11453}
11454
11455/* Emit warning if a buffer overflow is detected at compile time. */
11456
11457static void
11458maybe_emit_chk_warning (tree exp, enum built_in_function fcode)
11459{
c2f47e15 11460 int is_strlen = 0;
0a39fd54 11461 tree len, size;
b430e8d9 11462 location_t loc = tree_nonartificial_location (exp);
0a39fd54 11463
11464 switch (fcode)
11465 {
11466 case BUILT_IN_STRCPY_CHK:
11467 case BUILT_IN_STPCPY_CHK:
11468 /* For __strcat_chk the warning will be emitted only if overflowing
11469 by at least strlen (dest) + 1 bytes. */
11470 case BUILT_IN_STRCAT_CHK:
c2f47e15 11471 len = CALL_EXPR_ARG (exp, 1);
11472 size = CALL_EXPR_ARG (exp, 2);
0a39fd54 11473 is_strlen = 1;
11474 break;
b356dfef 11475 case BUILT_IN_STRNCAT_CHK:
0a39fd54 11476 case BUILT_IN_STRNCPY_CHK:
1063acde 11477 case BUILT_IN_STPNCPY_CHK:
c2f47e15 11478 len = CALL_EXPR_ARG (exp, 2);
11479 size = CALL_EXPR_ARG (exp, 3);
0a39fd54 11480 break;
11481 case BUILT_IN_SNPRINTF_CHK:
11482 case BUILT_IN_VSNPRINTF_CHK:
c2f47e15 11483 len = CALL_EXPR_ARG (exp, 1);
11484 size = CALL_EXPR_ARG (exp, 3);
0a39fd54 11485 break;
11486 default:
11487 gcc_unreachable ();
11488 }
11489
0a39fd54 11490 if (!len || !size)
11491 return;
11492
e913b5cd 11493 if (! tree_fits_uhwi_p (size) || integer_all_onesp (size))
0a39fd54 11494 return;
11495
11496 if (is_strlen)
11497 {
11498 len = c_strlen (len, 1);
e913b5cd 11499 if (! len || ! tree_fits_uhwi_p (len) || tree_int_cst_lt (len, size))
0a39fd54 11500 return;
11501 }
b356dfef 11502 else if (fcode == BUILT_IN_STRNCAT_CHK)
11503 {
c2f47e15 11504 tree src = CALL_EXPR_ARG (exp, 1);
e913b5cd 11505 if (! src || ! tree_fits_uhwi_p (len) || tree_int_cst_lt (len, size))
b356dfef 11506 return;
11507 src = c_strlen (src, 1);
e913b5cd 11508 if (! src || ! tree_fits_uhwi_p (src))
b356dfef 11509 {
b430e8d9 11510 warning_at (loc, 0, "%Kcall to %D might overflow destination buffer",
11511 exp, get_callee_fndecl (exp));
b356dfef 11512 return;
11513 }
11514 else if (tree_int_cst_lt (src, size))
11515 return;
11516 }
e913b5cd 11517 else if (! tree_fits_uhwi_p (len) || ! tree_int_cst_lt (size, len))
0a39fd54 11518 return;
11519
b430e8d9 11520 warning_at (loc, 0, "%Kcall to %D will always overflow destination buffer",
11521 exp, get_callee_fndecl (exp));
0a39fd54 11522}
11523
11524/* Emit warning if a buffer overflow is detected at compile time
11525 in __sprintf_chk/__vsprintf_chk calls. */
11526
11527static void
11528maybe_emit_sprintf_chk_warning (tree exp, enum built_in_function fcode)
11529{
1e4adcfc 11530 tree size, len, fmt;
0a39fd54 11531 const char *fmt_str;
c2f47e15 11532 int nargs = call_expr_nargs (exp);
0a39fd54 11533
11534 /* Verify the required arguments in the original call. */
48e1416a 11535
c2f47e15 11536 if (nargs < 4)
0a39fd54 11537 return;
c2f47e15 11538 size = CALL_EXPR_ARG (exp, 2);
11539 fmt = CALL_EXPR_ARG (exp, 3);
0a39fd54 11540
e913b5cd 11541 if (! tree_fits_uhwi_p (size) || integer_all_onesp (size))
0a39fd54 11542 return;
11543
11544 /* Check whether the format is a literal string constant. */
11545 fmt_str = c_getstr (fmt);
11546 if (fmt_str == NULL)
11547 return;
11548
d4473c84 11549 if (!init_target_chars ())
99eabcc1 11550 return;
11551
0a39fd54 11552 /* If the format doesn't contain % args or %%, we know its size. */
99eabcc1 11553 if (strchr (fmt_str, target_percent) == 0)
0a39fd54 11554 len = build_int_cstu (size_type_node, strlen (fmt_str));
11555 /* If the format is "%s" and first ... argument is a string literal,
11556 we know it too. */
c2f47e15 11557 else if (fcode == BUILT_IN_SPRINTF_CHK
11558 && strcmp (fmt_str, target_percent_s) == 0)
0a39fd54 11559 {
11560 tree arg;
11561
c2f47e15 11562 if (nargs < 5)
0a39fd54 11563 return;
c2f47e15 11564 arg = CALL_EXPR_ARG (exp, 4);
0a39fd54 11565 if (! POINTER_TYPE_P (TREE_TYPE (arg)))
11566 return;
11567
11568 len = c_strlen (arg, 1);
e913b5cd 11569 if (!len || ! tree_fits_uhwi_p (len))
0a39fd54 11570 return;
11571 }
11572 else
11573 return;
11574
11575 if (! tree_int_cst_lt (len, size))
b430e8d9 11576 warning_at (tree_nonartificial_location (exp),
11577 0, "%Kcall to %D will always overflow destination buffer",
11578 exp, get_callee_fndecl (exp));
0a39fd54 11579}
11580
2c281b15 11581/* Emit warning if a free is called with address of a variable. */
11582
11583static void
11584maybe_emit_free_warning (tree exp)
11585{
11586 tree arg = CALL_EXPR_ARG (exp, 0);
11587
11588 STRIP_NOPS (arg);
11589 if (TREE_CODE (arg) != ADDR_EXPR)
11590 return;
11591
11592 arg = get_base_address (TREE_OPERAND (arg, 0));
182cf5a9 11593 if (arg == NULL || INDIRECT_REF_P (arg) || TREE_CODE (arg) == MEM_REF)
2c281b15 11594 return;
11595
11596 if (SSA_VAR_P (arg))
f74ea1c2 11597 warning_at (tree_nonartificial_location (exp), OPT_Wfree_nonheap_object,
11598 "%Kattempt to free a non-heap object %qD", exp, arg);
2c281b15 11599 else
f74ea1c2 11600 warning_at (tree_nonartificial_location (exp), OPT_Wfree_nonheap_object,
11601 "%Kattempt to free a non-heap object", exp);
2c281b15 11602}
11603
c2f47e15 11604/* Fold a call to __builtin_object_size with arguments PTR and OST,
11605 if possible. */
0a39fd54 11606
f7715905 11607static tree
c2f47e15 11608fold_builtin_object_size (tree ptr, tree ost)
0a39fd54 11609{
a6caa15f 11610 unsigned HOST_WIDE_INT bytes;
0a39fd54 11611 int object_size_type;
11612
c2f47e15 11613 if (!validate_arg (ptr, POINTER_TYPE)
11614 || !validate_arg (ost, INTEGER_TYPE))
11615 return NULL_TREE;
0a39fd54 11616
0a39fd54 11617 STRIP_NOPS (ost);
11618
11619 if (TREE_CODE (ost) != INTEGER_CST
11620 || tree_int_cst_sgn (ost) < 0
11621 || compare_tree_int (ost, 3) > 0)
c2f47e15 11622 return NULL_TREE;
0a39fd54 11623
e913b5cd 11624 object_size_type = tree_to_shwi (ost);
0a39fd54 11625
11626 /* __builtin_object_size doesn't evaluate side-effects in its arguments;
11627 if there are any side-effects, it returns (size_t) -1 for types 0 and 1
11628 and (size_t) 0 for types 2 and 3. */
11629 if (TREE_SIDE_EFFECTS (ptr))
697bbc3f 11630 return build_int_cst_type (size_type_node, object_size_type < 2 ? -1 : 0);
0a39fd54 11631
11632 if (TREE_CODE (ptr) == ADDR_EXPR)
a6caa15f 11633 {
6da74b21 11634 bytes = compute_builtin_object_size (ptr, object_size_type);
11635 if (wi::fits_to_tree_p (bytes, size_type_node))
11636 return build_int_cstu (size_type_node, bytes);
a6caa15f 11637 }
0a39fd54 11638 else if (TREE_CODE (ptr) == SSA_NAME)
11639 {
0a39fd54 11640 /* If object size is not known yet, delay folding until
11641 later. Maybe subsequent passes will help determining
11642 it. */
11643 bytes = compute_builtin_object_size (ptr, object_size_type);
a6caa15f 11644 if (bytes != (unsigned HOST_WIDE_INT) (object_size_type < 2 ? -1 : 0)
6da74b21 11645 && wi::fits_to_tree_p (bytes, size_type_node))
11646 return build_int_cstu (size_type_node, bytes);
0a39fd54 11647 }
11648
a6caa15f 11649 return NULL_TREE;
0a39fd54 11650}
11651
0e80b01d 11652/* Builtins with folding operations that operate on "..." arguments
11653 need special handling; we need to store the arguments in a convenient
11654 data structure before attempting any folding. Fortunately there are
11655 only a few builtins that fall into this category. FNDECL is the
e80cc485 11656 function, EXP is the CALL_EXPR for the call. */
0e80b01d 11657
11658static tree
e80cc485 11659fold_builtin_varargs (location_t loc, tree fndecl, tree *args, int nargs)
0e80b01d 11660{
11661 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
11662 tree ret = NULL_TREE;
11663
11664 switch (fcode)
11665 {
0e80b01d 11666 case BUILT_IN_FPCLASSIFY:
9d884767 11667 ret = fold_builtin_fpclassify (loc, args, nargs);
0e80b01d 11668 break;
11669
11670 default:
11671 break;
11672 }
11673 if (ret)
11674 {
11675 ret = build1 (NOP_EXPR, TREE_TYPE (ret), ret);
11676 SET_EXPR_LOCATION (ret, loc);
11677 TREE_NO_WARNING (ret) = 1;
11678 return ret;
11679 }
11680 return NULL_TREE;
11681}
11682
99eabcc1 11683/* Initialize format string characters in the target charset. */
11684
b9ea678c 11685bool
99eabcc1 11686init_target_chars (void)
11687{
11688 static bool init;
11689 if (!init)
11690 {
11691 target_newline = lang_hooks.to_target_charset ('\n');
11692 target_percent = lang_hooks.to_target_charset ('%');
11693 target_c = lang_hooks.to_target_charset ('c');
11694 target_s = lang_hooks.to_target_charset ('s');
11695 if (target_newline == 0 || target_percent == 0 || target_c == 0
11696 || target_s == 0)
11697 return false;
11698
11699 target_percent_c[0] = target_percent;
11700 target_percent_c[1] = target_c;
11701 target_percent_c[2] = '\0';
11702
11703 target_percent_s[0] = target_percent;
11704 target_percent_s[1] = target_s;
11705 target_percent_s[2] = '\0';
11706
11707 target_percent_s_newline[0] = target_percent;
11708 target_percent_s_newline[1] = target_s;
11709 target_percent_s_newline[2] = target_newline;
11710 target_percent_s_newline[3] = '\0';
a0c938f0 11711
99eabcc1 11712 init = true;
11713 }
11714 return true;
11715}
bffb7645 11716
f0c477f2 11717/* Helper function for do_mpfr_arg*(). Ensure M is a normal number
11718 and no overflow/underflow occurred. INEXACT is true if M was not
fa7637bd 11719 exactly calculated. TYPE is the tree type for the result. This
f0c477f2 11720 function assumes that you cleared the MPFR flags and then
11721 calculated M to see if anything subsequently set a flag prior to
11722 entering this function. Return NULL_TREE if any checks fail. */
11723
11724static tree
d4473c84 11725do_mpfr_ckconv (mpfr_srcptr m, tree type, int inexact)
f0c477f2 11726{
11727 /* Proceed iff we get a normal number, i.e. not NaN or Inf and no
11728 overflow/underflow occurred. If -frounding-math, proceed iff the
11729 result of calling FUNC was exact. */
d4473c84 11730 if (mpfr_number_p (m) && !mpfr_overflow_p () && !mpfr_underflow_p ()
f0c477f2 11731 && (!flag_rounding_math || !inexact))
11732 {
11733 REAL_VALUE_TYPE rr;
11734
66fa16e6 11735 real_from_mpfr (&rr, m, type, GMP_RNDN);
f0c477f2 11736 /* Proceed iff GCC's REAL_VALUE_TYPE can hold the MPFR value,
11737 check for overflow/underflow. If the REAL_VALUE_TYPE is zero
11738 but the mpft_t is not, then we underflowed in the
11739 conversion. */
776a7bab 11740 if (real_isfinite (&rr)
f0c477f2 11741 && (rr.cl == rvc_zero) == (mpfr_zero_p (m) != 0))
11742 {
11743 REAL_VALUE_TYPE rmode;
11744
11745 real_convert (&rmode, TYPE_MODE (type), &rr);
11746 /* Proceed iff the specified mode can hold the value. */
11747 if (real_identical (&rmode, &rr))
11748 return build_real (type, rmode);
11749 }
11750 }
11751 return NULL_TREE;
11752}
11753
239d491a 11754/* Helper function for do_mpc_arg*(). Ensure M is a normal complex
11755 number and no overflow/underflow occurred. INEXACT is true if M
11756 was not exactly calculated. TYPE is the tree type for the result.
11757 This function assumes that you cleared the MPFR flags and then
11758 calculated M to see if anything subsequently set a flag prior to
652d9409 11759 entering this function. Return NULL_TREE if any checks fail, if
11760 FORCE_CONVERT is true, then bypass the checks. */
239d491a 11761
11762static tree
652d9409 11763do_mpc_ckconv (mpc_srcptr m, tree type, int inexact, int force_convert)
239d491a 11764{
11765 /* Proceed iff we get a normal number, i.e. not NaN or Inf and no
11766 overflow/underflow occurred. If -frounding-math, proceed iff the
11767 result of calling FUNC was exact. */
652d9409 11768 if (force_convert
11769 || (mpfr_number_p (mpc_realref (m)) && mpfr_number_p (mpc_imagref (m))
11770 && !mpfr_overflow_p () && !mpfr_underflow_p ()
11771 && (!flag_rounding_math || !inexact)))
239d491a 11772 {
11773 REAL_VALUE_TYPE re, im;
11774
b0e7c4d4 11775 real_from_mpfr (&re, mpc_realref (m), TREE_TYPE (type), GMP_RNDN);
11776 real_from_mpfr (&im, mpc_imagref (m), TREE_TYPE (type), GMP_RNDN);
239d491a 11777 /* Proceed iff GCC's REAL_VALUE_TYPE can hold the MPFR values,
11778 check for overflow/underflow. If the REAL_VALUE_TYPE is zero
11779 but the mpft_t is not, then we underflowed in the
11780 conversion. */
652d9409 11781 if (force_convert
11782 || (real_isfinite (&re) && real_isfinite (&im)
11783 && (re.cl == rvc_zero) == (mpfr_zero_p (mpc_realref (m)) != 0)
11784 && (im.cl == rvc_zero) == (mpfr_zero_p (mpc_imagref (m)) != 0)))
239d491a 11785 {
11786 REAL_VALUE_TYPE re_mode, im_mode;
11787
11788 real_convert (&re_mode, TYPE_MODE (TREE_TYPE (type)), &re);
11789 real_convert (&im_mode, TYPE_MODE (TREE_TYPE (type)), &im);
11790 /* Proceed iff the specified mode can hold the value. */
652d9409 11791 if (force_convert
11792 || (real_identical (&re_mode, &re)
11793 && real_identical (&im_mode, &im)))
239d491a 11794 return build_complex (type, build_real (TREE_TYPE (type), re_mode),
11795 build_real (TREE_TYPE (type), im_mode));
11796 }
11797 }
11798 return NULL_TREE;
11799}
239d491a 11800
bffb7645 11801/* If argument ARG is a REAL_CST, call the one-argument mpfr function
11802 FUNC on it and return the resulting value as a tree with type TYPE.
728bac60 11803 If MIN and/or MAX are not NULL, then the supplied ARG must be
11804 within those bounds. If INCLUSIVE is true, then MIN/MAX are
11805 acceptable values, otherwise they are not. The mpfr precision is
11806 set to the precision of TYPE. We assume that function FUNC returns
11807 zero if the result could be calculated exactly within the requested
11808 precision. */
bffb7645 11809
11810static tree
728bac60 11811do_mpfr_arg1 (tree arg, tree type, int (*func)(mpfr_ptr, mpfr_srcptr, mp_rnd_t),
11812 const REAL_VALUE_TYPE *min, const REAL_VALUE_TYPE *max,
11813 bool inclusive)
bffb7645 11814{
11815 tree result = NULL_TREE;
48e1416a 11816
bffb7645 11817 STRIP_NOPS (arg);
11818
bd7d6fa4 11819 /* To proceed, MPFR must exactly represent the target floating point
11820 format, which only happens when the target base equals two. */
11821 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
f96bd2bf 11822 && TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
bffb7645 11823 {
f0c477f2 11824 const REAL_VALUE_TYPE *const ra = &TREE_REAL_CST (arg);
bffb7645 11825
776a7bab 11826 if (real_isfinite (ra)
f0c477f2 11827 && (!min || real_compare (inclusive ? GE_EXPR: GT_EXPR , ra, min))
11828 && (!max || real_compare (inclusive ? LE_EXPR: LT_EXPR , ra, max)))
bffb7645 11829 {
e2eb2b7f 11830 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
11831 const int prec = fmt->p;
11832 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
debf9994 11833 int inexact;
bffb7645 11834 mpfr_t m;
11835
11836 mpfr_init2 (m, prec);
66fa16e6 11837 mpfr_from_real (m, ra, GMP_RNDN);
d4473c84 11838 mpfr_clear_flags ();
e2eb2b7f 11839 inexact = func (m, m, rnd);
f0c477f2 11840 result = do_mpfr_ckconv (m, type, inexact);
bffb7645 11841 mpfr_clear (m);
11842 }
11843 }
48e1416a 11844
bffb7645 11845 return result;
11846}
f0c477f2 11847
11848/* If argument ARG is a REAL_CST, call the two-argument mpfr function
11849 FUNC on it and return the resulting value as a tree with type TYPE.
11850 The mpfr precision is set to the precision of TYPE. We assume that
11851 function FUNC returns zero if the result could be calculated
11852 exactly within the requested precision. */
11853
11854static tree
11855do_mpfr_arg2 (tree arg1, tree arg2, tree type,
11856 int (*func)(mpfr_ptr, mpfr_srcptr, mpfr_srcptr, mp_rnd_t))
11857{
11858 tree result = NULL_TREE;
48e1416a 11859
f0c477f2 11860 STRIP_NOPS (arg1);
11861 STRIP_NOPS (arg2);
11862
bd7d6fa4 11863 /* To proceed, MPFR must exactly represent the target floating point
11864 format, which only happens when the target base equals two. */
11865 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
f96bd2bf 11866 && TREE_CODE (arg1) == REAL_CST && !TREE_OVERFLOW (arg1)
11867 && TREE_CODE (arg2) == REAL_CST && !TREE_OVERFLOW (arg2))
f0c477f2 11868 {
11869 const REAL_VALUE_TYPE *const ra1 = &TREE_REAL_CST (arg1);
11870 const REAL_VALUE_TYPE *const ra2 = &TREE_REAL_CST (arg2);
11871
776a7bab 11872 if (real_isfinite (ra1) && real_isfinite (ra2))
f0c477f2 11873 {
e2eb2b7f 11874 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
11875 const int prec = fmt->p;
11876 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
f0c477f2 11877 int inexact;
11878 mpfr_t m1, m2;
11879
11880 mpfr_inits2 (prec, m1, m2, NULL);
66fa16e6 11881 mpfr_from_real (m1, ra1, GMP_RNDN);
11882 mpfr_from_real (m2, ra2, GMP_RNDN);
d4473c84 11883 mpfr_clear_flags ();
e2eb2b7f 11884 inexact = func (m1, m1, m2, rnd);
f0c477f2 11885 result = do_mpfr_ckconv (m1, type, inexact);
11886 mpfr_clears (m1, m2, NULL);
11887 }
11888 }
48e1416a 11889
f0c477f2 11890 return result;
11891}
d92f994c 11892
9917422b 11893/* If argument ARG is a REAL_CST, call the three-argument mpfr function
11894 FUNC on it and return the resulting value as a tree with type TYPE.
11895 The mpfr precision is set to the precision of TYPE. We assume that
11896 function FUNC returns zero if the result could be calculated
11897 exactly within the requested precision. */
11898
11899static tree
11900do_mpfr_arg3 (tree arg1, tree arg2, tree arg3, tree type,
11901 int (*func)(mpfr_ptr, mpfr_srcptr, mpfr_srcptr, mpfr_srcptr, mp_rnd_t))
11902{
11903 tree result = NULL_TREE;
48e1416a 11904
9917422b 11905 STRIP_NOPS (arg1);
11906 STRIP_NOPS (arg2);
11907 STRIP_NOPS (arg3);
11908
bd7d6fa4 11909 /* To proceed, MPFR must exactly represent the target floating point
11910 format, which only happens when the target base equals two. */
11911 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
f96bd2bf 11912 && TREE_CODE (arg1) == REAL_CST && !TREE_OVERFLOW (arg1)
11913 && TREE_CODE (arg2) == REAL_CST && !TREE_OVERFLOW (arg2)
11914 && TREE_CODE (arg3) == REAL_CST && !TREE_OVERFLOW (arg3))
9917422b 11915 {
11916 const REAL_VALUE_TYPE *const ra1 = &TREE_REAL_CST (arg1);
11917 const REAL_VALUE_TYPE *const ra2 = &TREE_REAL_CST (arg2);
11918 const REAL_VALUE_TYPE *const ra3 = &TREE_REAL_CST (arg3);
11919
776a7bab 11920 if (real_isfinite (ra1) && real_isfinite (ra2) && real_isfinite (ra3))
9917422b 11921 {
e2eb2b7f 11922 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
11923 const int prec = fmt->p;
11924 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
9917422b 11925 int inexact;
11926 mpfr_t m1, m2, m3;
11927
11928 mpfr_inits2 (prec, m1, m2, m3, NULL);
66fa16e6 11929 mpfr_from_real (m1, ra1, GMP_RNDN);
11930 mpfr_from_real (m2, ra2, GMP_RNDN);
11931 mpfr_from_real (m3, ra3, GMP_RNDN);
d4473c84 11932 mpfr_clear_flags ();
e2eb2b7f 11933 inexact = func (m1, m1, m2, m3, rnd);
9917422b 11934 result = do_mpfr_ckconv (m1, type, inexact);
11935 mpfr_clears (m1, m2, m3, NULL);
11936 }
11937 }
48e1416a 11938
9917422b 11939 return result;
11940}
11941
d92f994c 11942/* If argument ARG is a REAL_CST, call mpfr_sin_cos() on it and set
11943 the pointers *(ARG_SINP) and *(ARG_COSP) to the resulting values.
d735c391 11944 If ARG_SINP and ARG_COSP are NULL then the result is returned
11945 as a complex value.
d92f994c 11946 The type is taken from the type of ARG and is used for setting the
11947 precision of the calculation and results. */
11948
11949static tree
11950do_mpfr_sincos (tree arg, tree arg_sinp, tree arg_cosp)
11951{
bd7d6fa4 11952 tree const type = TREE_TYPE (arg);
d92f994c 11953 tree result = NULL_TREE;
48e1416a 11954
d92f994c 11955 STRIP_NOPS (arg);
48e1416a 11956
bd7d6fa4 11957 /* To proceed, MPFR must exactly represent the target floating point
11958 format, which only happens when the target base equals two. */
11959 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
f96bd2bf 11960 && TREE_CODE (arg) == REAL_CST
11961 && !TREE_OVERFLOW (arg))
d92f994c 11962 {
11963 const REAL_VALUE_TYPE *const ra = &TREE_REAL_CST (arg);
11964
776a7bab 11965 if (real_isfinite (ra))
d92f994c 11966 {
e2eb2b7f 11967 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
11968 const int prec = fmt->p;
11969 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
d92f994c 11970 tree result_s, result_c;
11971 int inexact;
11972 mpfr_t m, ms, mc;
11973
11974 mpfr_inits2 (prec, m, ms, mc, NULL);
66fa16e6 11975 mpfr_from_real (m, ra, GMP_RNDN);
d4473c84 11976 mpfr_clear_flags ();
e2eb2b7f 11977 inexact = mpfr_sin_cos (ms, mc, m, rnd);
d92f994c 11978 result_s = do_mpfr_ckconv (ms, type, inexact);
11979 result_c = do_mpfr_ckconv (mc, type, inexact);
11980 mpfr_clears (m, ms, mc, NULL);
11981 if (result_s && result_c)
11982 {
d735c391 11983 /* If we are to return in a complex value do so. */
11984 if (!arg_sinp && !arg_cosp)
11985 return build_complex (build_complex_type (type),
11986 result_c, result_s);
11987
d92f994c 11988 /* Dereference the sin/cos pointer arguments. */
11989 arg_sinp = build_fold_indirect_ref (arg_sinp);
11990 arg_cosp = build_fold_indirect_ref (arg_cosp);
11991 /* Proceed if valid pointer type were passed in. */
11992 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg_sinp)) == TYPE_MAIN_VARIANT (type)
11993 && TYPE_MAIN_VARIANT (TREE_TYPE (arg_cosp)) == TYPE_MAIN_VARIANT (type))
11994 {
11995 /* Set the values. */
41076ef6 11996 result_s = fold_build2 (MODIFY_EXPR, type, arg_sinp,
35cc02b5 11997 result_s);
d92f994c 11998 TREE_SIDE_EFFECTS (result_s) = 1;
41076ef6 11999 result_c = fold_build2 (MODIFY_EXPR, type, arg_cosp,
35cc02b5 12000 result_c);
d92f994c 12001 TREE_SIDE_EFFECTS (result_c) = 1;
12002 /* Combine the assignments into a compound expr. */
12003 result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
12004 result_s, result_c));
12005 }
12006 }
12007 }
12008 }
12009 return result;
12010}
65dd1378 12011
65dd1378 12012/* If argument ARG1 is an INTEGER_CST and ARG2 is a REAL_CST, call the
12013 two-argument mpfr order N Bessel function FUNC on them and return
12014 the resulting value as a tree with type TYPE. The mpfr precision
12015 is set to the precision of TYPE. We assume that function FUNC
12016 returns zero if the result could be calculated exactly within the
12017 requested precision. */
12018static tree
12019do_mpfr_bessel_n (tree arg1, tree arg2, tree type,
12020 int (*func)(mpfr_ptr, long, mpfr_srcptr, mp_rnd_t),
12021 const REAL_VALUE_TYPE *min, bool inclusive)
12022{
12023 tree result = NULL_TREE;
12024
12025 STRIP_NOPS (arg1);
12026 STRIP_NOPS (arg2);
12027
12028 /* To proceed, MPFR must exactly represent the target floating point
12029 format, which only happens when the target base equals two. */
12030 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
e913b5cd 12031 && tree_fits_shwi_p (arg1)
65dd1378 12032 && TREE_CODE (arg2) == REAL_CST && !TREE_OVERFLOW (arg2))
12033 {
e913b5cd 12034 const HOST_WIDE_INT n = tree_to_shwi (arg1);
65dd1378 12035 const REAL_VALUE_TYPE *const ra = &TREE_REAL_CST (arg2);
12036
12037 if (n == (long)n
776a7bab 12038 && real_isfinite (ra)
65dd1378 12039 && (!min || real_compare (inclusive ? GE_EXPR: GT_EXPR , ra, min)))
12040 {
e2eb2b7f 12041 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
12042 const int prec = fmt->p;
12043 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
65dd1378 12044 int inexact;
12045 mpfr_t m;
12046
12047 mpfr_init2 (m, prec);
12048 mpfr_from_real (m, ra, GMP_RNDN);
12049 mpfr_clear_flags ();
e2eb2b7f 12050 inexact = func (m, n, m, rnd);
65dd1378 12051 result = do_mpfr_ckconv (m, type, inexact);
12052 mpfr_clear (m);
12053 }
12054 }
48e1416a 12055
65dd1378 12056 return result;
12057}
e5407ca6 12058
12059/* If arguments ARG0 and ARG1 are REAL_CSTs, call mpfr_remquo() to set
12060 the pointer *(ARG_QUO) and return the result. The type is taken
12061 from the type of ARG0 and is used for setting the precision of the
12062 calculation and results. */
12063
12064static tree
12065do_mpfr_remquo (tree arg0, tree arg1, tree arg_quo)
12066{
12067 tree const type = TREE_TYPE (arg0);
12068 tree result = NULL_TREE;
48e1416a 12069
e5407ca6 12070 STRIP_NOPS (arg0);
12071 STRIP_NOPS (arg1);
48e1416a 12072
e5407ca6 12073 /* To proceed, MPFR must exactly represent the target floating point
12074 format, which only happens when the target base equals two. */
12075 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
12076 && TREE_CODE (arg0) == REAL_CST && !TREE_OVERFLOW (arg0)
12077 && TREE_CODE (arg1) == REAL_CST && !TREE_OVERFLOW (arg1))
12078 {
12079 const REAL_VALUE_TYPE *const ra0 = TREE_REAL_CST_PTR (arg0);
12080 const REAL_VALUE_TYPE *const ra1 = TREE_REAL_CST_PTR (arg1);
12081
776a7bab 12082 if (real_isfinite (ra0) && real_isfinite (ra1))
e5407ca6 12083 {
e2eb2b7f 12084 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
12085 const int prec = fmt->p;
12086 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
e5407ca6 12087 tree result_rem;
12088 long integer_quo;
12089 mpfr_t m0, m1;
12090
12091 mpfr_inits2 (prec, m0, m1, NULL);
12092 mpfr_from_real (m0, ra0, GMP_RNDN);
12093 mpfr_from_real (m1, ra1, GMP_RNDN);
12094 mpfr_clear_flags ();
e2eb2b7f 12095 mpfr_remquo (m0, &integer_quo, m0, m1, rnd);
e5407ca6 12096 /* Remquo is independent of the rounding mode, so pass
12097 inexact=0 to do_mpfr_ckconv(). */
12098 result_rem = do_mpfr_ckconv (m0, type, /*inexact=*/ 0);
12099 mpfr_clears (m0, m1, NULL);
12100 if (result_rem)
12101 {
12102 /* MPFR calculates quo in the host's long so it may
12103 return more bits in quo than the target int can hold
12104 if sizeof(host long) > sizeof(target int). This can
12105 happen even for native compilers in LP64 mode. In
12106 these cases, modulo the quo value with the largest
12107 number that the target int can hold while leaving one
12108 bit for the sign. */
12109 if (sizeof (integer_quo) * CHAR_BIT > INT_TYPE_SIZE)
12110 integer_quo %= (long)(1UL << (INT_TYPE_SIZE - 1));
12111
12112 /* Dereference the quo pointer argument. */
12113 arg_quo = build_fold_indirect_ref (arg_quo);
12114 /* Proceed iff a valid pointer type was passed in. */
12115 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg_quo)) == integer_type_node)
12116 {
12117 /* Set the value. */
7002a1c8 12118 tree result_quo
12119 = fold_build2 (MODIFY_EXPR, TREE_TYPE (arg_quo), arg_quo,
12120 build_int_cst (TREE_TYPE (arg_quo),
12121 integer_quo));
e5407ca6 12122 TREE_SIDE_EFFECTS (result_quo) = 1;
12123 /* Combine the quo assignment with the rem. */
12124 result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
12125 result_quo, result_rem));
12126 }
12127 }
12128 }
12129 }
12130 return result;
12131}
e84da7c1 12132
12133/* If ARG is a REAL_CST, call mpfr_lgamma() on it and return the
12134 resulting value as a tree with type TYPE. The mpfr precision is
12135 set to the precision of TYPE. We assume that this mpfr function
12136 returns zero if the result could be calculated exactly within the
12137 requested precision. In addition, the integer pointer represented
12138 by ARG_SG will be dereferenced and set to the appropriate signgam
12139 (-1,1) value. */
12140
12141static tree
12142do_mpfr_lgamma_r (tree arg, tree arg_sg, tree type)
12143{
12144 tree result = NULL_TREE;
12145
12146 STRIP_NOPS (arg);
48e1416a 12147
e84da7c1 12148 /* To proceed, MPFR must exactly represent the target floating point
12149 format, which only happens when the target base equals two. Also
12150 verify ARG is a constant and that ARG_SG is an int pointer. */
12151 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
12152 && TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg)
12153 && TREE_CODE (TREE_TYPE (arg_sg)) == POINTER_TYPE
12154 && TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (arg_sg))) == integer_type_node)
12155 {
12156 const REAL_VALUE_TYPE *const ra = TREE_REAL_CST_PTR (arg);
12157
12158 /* In addition to NaN and Inf, the argument cannot be zero or a
12159 negative integer. */
776a7bab 12160 if (real_isfinite (ra)
e84da7c1 12161 && ra->cl != rvc_zero
9af5ce0c 12162 && !(real_isneg (ra) && real_isinteger (ra, TYPE_MODE (type))))
e84da7c1 12163 {
e2eb2b7f 12164 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
12165 const int prec = fmt->p;
12166 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
e84da7c1 12167 int inexact, sg;
12168 mpfr_t m;
12169 tree result_lg;
12170
12171 mpfr_init2 (m, prec);
12172 mpfr_from_real (m, ra, GMP_RNDN);
12173 mpfr_clear_flags ();
e2eb2b7f 12174 inexact = mpfr_lgamma (m, &sg, m, rnd);
e84da7c1 12175 result_lg = do_mpfr_ckconv (m, type, inexact);
12176 mpfr_clear (m);
12177 if (result_lg)
12178 {
12179 tree result_sg;
12180
12181 /* Dereference the arg_sg pointer argument. */
12182 arg_sg = build_fold_indirect_ref (arg_sg);
12183 /* Assign the signgam value into *arg_sg. */
12184 result_sg = fold_build2 (MODIFY_EXPR,
12185 TREE_TYPE (arg_sg), arg_sg,
7002a1c8 12186 build_int_cst (TREE_TYPE (arg_sg), sg));
e84da7c1 12187 TREE_SIDE_EFFECTS (result_sg) = 1;
12188 /* Combine the signgam assignment with the lgamma result. */
12189 result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
12190 result_sg, result_lg));
12191 }
12192 }
12193 }
12194
12195 return result;
12196}
75a70cf9 12197
239d491a 12198/* If argument ARG is a COMPLEX_CST, call the one-argument mpc
12199 function FUNC on it and return the resulting value as a tree with
12200 type TYPE. The mpfr precision is set to the precision of TYPE. We
12201 assume that function FUNC returns zero if the result could be
12202 calculated exactly within the requested precision. */
12203
12204static tree
12205do_mpc_arg1 (tree arg, tree type, int (*func)(mpc_ptr, mpc_srcptr, mpc_rnd_t))
12206{
12207 tree result = NULL_TREE;
48e1416a 12208
239d491a 12209 STRIP_NOPS (arg);
12210
12211 /* To proceed, MPFR must exactly represent the target floating point
12212 format, which only happens when the target base equals two. */
12213 if (TREE_CODE (arg) == COMPLEX_CST && !TREE_OVERFLOW (arg)
12214 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) == REAL_TYPE
12215 && REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (TREE_TYPE (arg))))->b == 2)
12216 {
12217 const REAL_VALUE_TYPE *const re = TREE_REAL_CST_PTR (TREE_REALPART (arg));
12218 const REAL_VALUE_TYPE *const im = TREE_REAL_CST_PTR (TREE_IMAGPART (arg));
12219
12220 if (real_isfinite (re) && real_isfinite (im))
12221 {
12222 const struct real_format *const fmt =
12223 REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (type)));
12224 const int prec = fmt->p;
12225 const mp_rnd_t rnd = fmt->round_towards_zero ? GMP_RNDZ : GMP_RNDN;
44d89feb 12226 const mpc_rnd_t crnd = fmt->round_towards_zero ? MPC_RNDZZ : MPC_RNDNN;
239d491a 12227 int inexact;
12228 mpc_t m;
48e1416a 12229
239d491a 12230 mpc_init2 (m, prec);
9af5ce0c 12231 mpfr_from_real (mpc_realref (m), re, rnd);
12232 mpfr_from_real (mpc_imagref (m), im, rnd);
239d491a 12233 mpfr_clear_flags ();
44d89feb 12234 inexact = func (m, m, crnd);
652d9409 12235 result = do_mpc_ckconv (m, type, inexact, /*force_convert=*/ 0);
239d491a 12236 mpc_clear (m);
12237 }
12238 }
12239
12240 return result;
12241}
c699fab8 12242
12243/* If arguments ARG0 and ARG1 are a COMPLEX_CST, call the two-argument
12244 mpc function FUNC on it and return the resulting value as a tree
12245 with type TYPE. The mpfr precision is set to the precision of
12246 TYPE. We assume that function FUNC returns zero if the result
652d9409 12247 could be calculated exactly within the requested precision. If
12248 DO_NONFINITE is true, then fold expressions containing Inf or NaN
12249 in the arguments and/or results. */
c699fab8 12250
63e89698 12251tree
652d9409 12252do_mpc_arg2 (tree arg0, tree arg1, tree type, int do_nonfinite,
c699fab8 12253 int (*func)(mpc_ptr, mpc_srcptr, mpc_srcptr, mpc_rnd_t))
12254{
12255 tree result = NULL_TREE;
48e1416a 12256
c699fab8 12257 STRIP_NOPS (arg0);
12258 STRIP_NOPS (arg1);
12259
12260 /* To proceed, MPFR must exactly represent the target floating point
12261 format, which only happens when the target base equals two. */
12262 if (TREE_CODE (arg0) == COMPLEX_CST && !TREE_OVERFLOW (arg0)
12263 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE
12264 && TREE_CODE (arg1) == COMPLEX_CST && !TREE_OVERFLOW (arg1)
12265 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg1))) == REAL_TYPE
12266 && REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (TREE_TYPE (arg0))))->b == 2)
12267 {
12268 const REAL_VALUE_TYPE *const re0 = TREE_REAL_CST_PTR (TREE_REALPART (arg0));
12269 const REAL_VALUE_TYPE *const im0 = TREE_REAL_CST_PTR (TREE_IMAGPART (arg0));
12270 const REAL_VALUE_TYPE *const re1 = TREE_REAL_CST_PTR (TREE_REALPART (arg1));
12271 const REAL_VALUE_TYPE *const im1 = TREE_REAL_CST_PTR (TREE_IMAGPART (arg1));
12272
652d9409 12273 if (do_nonfinite
12274 || (real_isfinite (re0) && real_isfinite (im0)
12275 && real_isfinite (re1) && real_isfinite (im1)))
c699fab8 12276 {
12277 const struct real_format *const fmt =
12278 REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (type)));
12279 const int prec = fmt->p;
12280 const mp_rnd_t rnd = fmt->round_towards_zero ? GMP_RNDZ : GMP_RNDN;
12281 const mpc_rnd_t crnd = fmt->round_towards_zero ? MPC_RNDZZ : MPC_RNDNN;
12282 int inexact;
12283 mpc_t m0, m1;
48e1416a 12284
c699fab8 12285 mpc_init2 (m0, prec);
12286 mpc_init2 (m1, prec);
9af5ce0c 12287 mpfr_from_real (mpc_realref (m0), re0, rnd);
12288 mpfr_from_real (mpc_imagref (m0), im0, rnd);
12289 mpfr_from_real (mpc_realref (m1), re1, rnd);
12290 mpfr_from_real (mpc_imagref (m1), im1, rnd);
c699fab8 12291 mpfr_clear_flags ();
12292 inexact = func (m0, m0, m1, crnd);
652d9409 12293 result = do_mpc_ckconv (m0, type, inexact, do_nonfinite);
c699fab8 12294 mpc_clear (m0);
12295 mpc_clear (m1);
12296 }
12297 }
12298
12299 return result;
12300}
239d491a 12301
75a70cf9 12302/* A wrapper function for builtin folding that prevents warnings for
12303 "statement without effect" and the like, caused by removing the
12304 call node earlier than the warning is generated. */
12305
12306tree
1a91d914 12307fold_call_stmt (gcall *stmt, bool ignore)
75a70cf9 12308{
12309 tree ret = NULL_TREE;
12310 tree fndecl = gimple_call_fndecl (stmt);
389dd41b 12311 location_t loc = gimple_location (stmt);
75a70cf9 12312 if (fndecl
12313 && TREE_CODE (fndecl) == FUNCTION_DECL
12314 && DECL_BUILT_IN (fndecl)
12315 && !gimple_call_va_arg_pack_p (stmt))
12316 {
12317 int nargs = gimple_call_num_args (stmt);
9845fb99 12318 tree *args = (nargs > 0
12319 ? gimple_call_arg_ptr (stmt, 0)
12320 : &error_mark_node);
75a70cf9 12321
198622c0 12322 if (avoid_folding_inline_builtin (fndecl))
12323 return NULL_TREE;
75a70cf9 12324 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
12325 {
9845fb99 12326 return targetm.fold_builtin (fndecl, nargs, args, ignore);
75a70cf9 12327 }
12328 else
12329 {
9d884767 12330 ret = fold_builtin_n (loc, fndecl, args, nargs, ignore);
75a70cf9 12331 if (ret)
12332 {
12333 /* Propagate location information from original call to
12334 expansion of builtin. Otherwise things like
12335 maybe_emit_chk_warning, that operate on the expansion
12336 of a builtin, will use the wrong location information. */
12337 if (gimple_has_location (stmt))
12338 {
12339 tree realret = ret;
12340 if (TREE_CODE (ret) == NOP_EXPR)
12341 realret = TREE_OPERAND (ret, 0);
12342 if (CAN_HAVE_LOCATION_P (realret)
12343 && !EXPR_HAS_LOCATION (realret))
389dd41b 12344 SET_EXPR_LOCATION (realret, loc);
75a70cf9 12345 return realret;
12346 }
12347 return ret;
12348 }
12349 }
12350 }
12351 return NULL_TREE;
12352}
7bfefa9d 12353
b9a16870 12354/* Look up the function in builtin_decl that corresponds to DECL
7bfefa9d 12355 and set ASMSPEC as its user assembler name. DECL must be a
12356 function decl that declares a builtin. */
12357
12358void
12359set_builtin_user_assembler_name (tree decl, const char *asmspec)
12360{
12361 tree builtin;
12362 gcc_assert (TREE_CODE (decl) == FUNCTION_DECL
12363 && DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL
12364 && asmspec != 0);
12365
b9a16870 12366 builtin = builtin_decl_explicit (DECL_FUNCTION_CODE (decl));
4d8e0d6d 12367 set_user_assembler_name (builtin, asmspec);
7bfefa9d 12368 switch (DECL_FUNCTION_CODE (decl))
12369 {
12370 case BUILT_IN_MEMCPY:
12371 init_block_move_fn (asmspec);
12372 memcpy_libfunc = set_user_assembler_libfunc ("memcpy", asmspec);
12373 break;
12374 case BUILT_IN_MEMSET:
12375 init_block_clear_fn (asmspec);
12376 memset_libfunc = set_user_assembler_libfunc ("memset", asmspec);
12377 break;
12378 case BUILT_IN_MEMMOVE:
12379 memmove_libfunc = set_user_assembler_libfunc ("memmove", asmspec);
12380 break;
12381 case BUILT_IN_MEMCMP:
12382 memcmp_libfunc = set_user_assembler_libfunc ("memcmp", asmspec);
12383 break;
12384 case BUILT_IN_ABORT:
12385 abort_libfunc = set_user_assembler_libfunc ("abort", asmspec);
12386 break;
5a80a58b 12387 case BUILT_IN_FFS:
12388 if (INT_TYPE_SIZE < BITS_PER_WORD)
12389 {
12390 set_user_assembler_libfunc ("ffs", asmspec);
12391 set_optab_libfunc (ffs_optab, mode_for_size (INT_TYPE_SIZE,
12392 MODE_INT, 0), "ffs");
12393 }
12394 break;
7bfefa9d 12395 default:
12396 break;
12397 }
12398}
a6b74a67 12399
12400/* Return true if DECL is a builtin that expands to a constant or similarly
12401 simple code. */
12402bool
12403is_simple_builtin (tree decl)
12404{
12405 if (decl && DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL)
12406 switch (DECL_FUNCTION_CODE (decl))
12407 {
12408 /* Builtins that expand to constants. */
12409 case BUILT_IN_CONSTANT_P:
12410 case BUILT_IN_EXPECT:
12411 case BUILT_IN_OBJECT_SIZE:
12412 case BUILT_IN_UNREACHABLE:
12413 /* Simple register moves or loads from stack. */
fca0886c 12414 case BUILT_IN_ASSUME_ALIGNED:
a6b74a67 12415 case BUILT_IN_RETURN_ADDRESS:
12416 case BUILT_IN_EXTRACT_RETURN_ADDR:
12417 case BUILT_IN_FROB_RETURN_ADDR:
12418 case BUILT_IN_RETURN:
12419 case BUILT_IN_AGGREGATE_INCOMING_ADDRESS:
12420 case BUILT_IN_FRAME_ADDRESS:
12421 case BUILT_IN_VA_END:
12422 case BUILT_IN_STACK_SAVE:
12423 case BUILT_IN_STACK_RESTORE:
12424 /* Exception state returns or moves registers around. */
12425 case BUILT_IN_EH_FILTER:
12426 case BUILT_IN_EH_POINTER:
12427 case BUILT_IN_EH_COPY_VALUES:
12428 return true;
12429
12430 default:
12431 return false;
12432 }
12433
12434 return false;
12435}
12436
12437/* Return true if DECL is a builtin that is not expensive, i.e., they are
12438 most probably expanded inline into reasonably simple code. This is a
12439 superset of is_simple_builtin. */
12440bool
12441is_inexpensive_builtin (tree decl)
12442{
12443 if (!decl)
12444 return false;
12445 else if (DECL_BUILT_IN_CLASS (decl) == BUILT_IN_MD)
12446 return true;
12447 else if (DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL)
12448 switch (DECL_FUNCTION_CODE (decl))
12449 {
12450 case BUILT_IN_ABS:
12451 case BUILT_IN_ALLOCA:
581bf1c2 12452 case BUILT_IN_ALLOCA_WITH_ALIGN:
74bdbe96 12453 case BUILT_IN_BSWAP16:
a6b74a67 12454 case BUILT_IN_BSWAP32:
12455 case BUILT_IN_BSWAP64:
12456 case BUILT_IN_CLZ:
12457 case BUILT_IN_CLZIMAX:
12458 case BUILT_IN_CLZL:
12459 case BUILT_IN_CLZLL:
12460 case BUILT_IN_CTZ:
12461 case BUILT_IN_CTZIMAX:
12462 case BUILT_IN_CTZL:
12463 case BUILT_IN_CTZLL:
12464 case BUILT_IN_FFS:
12465 case BUILT_IN_FFSIMAX:
12466 case BUILT_IN_FFSL:
12467 case BUILT_IN_FFSLL:
12468 case BUILT_IN_IMAXABS:
12469 case BUILT_IN_FINITE:
12470 case BUILT_IN_FINITEF:
12471 case BUILT_IN_FINITEL:
12472 case BUILT_IN_FINITED32:
12473 case BUILT_IN_FINITED64:
12474 case BUILT_IN_FINITED128:
12475 case BUILT_IN_FPCLASSIFY:
12476 case BUILT_IN_ISFINITE:
12477 case BUILT_IN_ISINF_SIGN:
12478 case BUILT_IN_ISINF:
12479 case BUILT_IN_ISINFF:
12480 case BUILT_IN_ISINFL:
12481 case BUILT_IN_ISINFD32:
12482 case BUILT_IN_ISINFD64:
12483 case BUILT_IN_ISINFD128:
12484 case BUILT_IN_ISNAN:
12485 case BUILT_IN_ISNANF:
12486 case BUILT_IN_ISNANL:
12487 case BUILT_IN_ISNAND32:
12488 case BUILT_IN_ISNAND64:
12489 case BUILT_IN_ISNAND128:
12490 case BUILT_IN_ISNORMAL:
12491 case BUILT_IN_ISGREATER:
12492 case BUILT_IN_ISGREATEREQUAL:
12493 case BUILT_IN_ISLESS:
12494 case BUILT_IN_ISLESSEQUAL:
12495 case BUILT_IN_ISLESSGREATER:
12496 case BUILT_IN_ISUNORDERED:
12497 case BUILT_IN_VA_ARG_PACK:
12498 case BUILT_IN_VA_ARG_PACK_LEN:
12499 case BUILT_IN_VA_COPY:
12500 case BUILT_IN_TRAP:
12501 case BUILT_IN_SAVEREGS:
12502 case BUILT_IN_POPCOUNTL:
12503 case BUILT_IN_POPCOUNTLL:
12504 case BUILT_IN_POPCOUNTIMAX:
12505 case BUILT_IN_POPCOUNT:
12506 case BUILT_IN_PARITYL:
12507 case BUILT_IN_PARITYLL:
12508 case BUILT_IN_PARITYIMAX:
12509 case BUILT_IN_PARITY:
12510 case BUILT_IN_LABS:
12511 case BUILT_IN_LLABS:
12512 case BUILT_IN_PREFETCH:
ca4c3545 12513 case BUILT_IN_ACC_ON_DEVICE:
a6b74a67 12514 return true;
12515
12516 default:
12517 return is_simple_builtin (decl);
12518 }
12519
12520 return false;
12521}