]> git.ipfire.org Git - thirdparty/gcc.git/blame - gcc/builtins.c
gcc/
[thirdparty/gcc.git] / gcc / builtins.c
CommitLineData
53800dbe 1/* Expand builtin functions.
d353bf18 2 Copyright (C) 1988-2015 Free Software Foundation, Inc.
53800dbe 3
f12b58b3 4This file is part of GCC.
53800dbe 5
f12b58b3 6GCC is free software; you can redistribute it and/or modify it under
7the terms of the GNU General Public License as published by the Free
8c4c00c1 8Software Foundation; either version 3, or (at your option) any later
f12b58b3 9version.
53800dbe 10
f12b58b3 11GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12WARRANTY; without even the implied warranty of MERCHANTABILITY or
13FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14for more details.
53800dbe 15
16You should have received a copy of the GNU General Public License
8c4c00c1 17along with GCC; see the file COPYING3. If not see
18<http://www.gnu.org/licenses/>. */
53800dbe 19
20#include "config.h"
21#include "system.h"
805e22b2 22#include "coretypes.h"
9ef16211 23#include "backend.h"
d040a5b0 24#include "predict.h"
9ef16211 25#include "tree.h"
26#include "gimple.h"
53800dbe 27#include "rtl.h"
b20a8bb4 28#include "alias.h"
b20a8bb4 29#include "fold-const.h"
9ed99284 30#include "stringpool.h"
31#include "stor-layout.h"
32#include "calls.h"
33#include "varasm.h"
34#include "tree-object-size.h"
dae0b5cb 35#include "realmpfr.h"
94ea8568 36#include "cfgrtl.h"
bc61cadb 37#include "internal-fn.h"
53800dbe 38#include "flags.h"
39#include "regs.h"
53800dbe 40#include "except.h"
53800dbe 41#include "insn-config.h"
d53441c8 42#include "expmed.h"
43#include "dojump.h"
44#include "explow.h"
45#include "emit-rtl.h"
46#include "stmt.h"
53800dbe 47#include "expr.h"
34517c64 48#include "insn-codes.h"
d8fc4d0b 49#include "optabs.h"
50#include "libfuncs.h"
53800dbe 51#include "recog.h"
52#include "output.h"
53#include "typeclass.h"
1dd6c958 54#include "tm_p.h"
fc2a2dcb 55#include "target.h"
63c62881 56#include "langhooks.h"
073c1fd5 57#include "tree-ssanames.h"
58#include "tree-dfa.h"
162719b3 59#include "value-prof.h"
852f689e 60#include "diagnostic-core.h"
3b9c3a16 61#include "builtins.h"
f9acf11a 62#include "asan.h"
d037099f 63#include "cilk.h"
058a1b7a 64#include "cgraph.h"
65#include "tree-chkp.h"
66#include "rtl-chkp.h"
ca4c3545 67#include "gomp-constants.h"
53800dbe 68
5383fb56 69
239d491a 70static tree do_mpc_arg1 (tree, tree, int (*)(mpc_ptr, mpc_srcptr, mpc_rnd_t));
726e2588 71
3b9c3a16 72struct target_builtins default_target_builtins;
73#if SWITCHABLE_TARGET
74struct target_builtins *this_target_builtins = &default_target_builtins;
75#endif
76
ab7943b9 77/* Define the names of the builtin function types and codes. */
96423453 78const char *const built_in_class_names[BUILT_IN_LAST]
ab7943b9 79 = {"NOT_BUILT_IN", "BUILT_IN_FRONTEND", "BUILT_IN_MD", "BUILT_IN_NORMAL"};
80
9cfddb70 81#define DEF_BUILTIN(X, N, C, T, LT, B, F, NA, AT, IM, COND) #X,
0dfc45b5 82const char * built_in_names[(int) END_BUILTINS] =
4e9d90c7 83{
84#include "builtins.def"
85};
86#undef DEF_BUILTIN
ab7943b9 87
cffdfb3d 88/* Setup an array of builtin_info_type, make sure each element decl is
df94cd3b 89 initialized to NULL_TREE. */
cffdfb3d 90builtin_info_type builtin_info[(int)END_BUILTINS];
df94cd3b 91
0b049e15 92/* Non-zero if __builtin_constant_p should be folded right away. */
93bool force_folding_builtin_constant_p;
94
3754d046 95static rtx c_readstr (const char *, machine_mode);
aecda0d6 96static int target_char_cast (tree, char *);
d8ae1baa 97static rtx get_memory_rtx (tree, tree);
aecda0d6 98static int apply_args_size (void);
99static int apply_result_size (void);
aecda0d6 100static rtx result_vector (int, rtx);
aecda0d6 101static void expand_builtin_prefetch (tree);
102static rtx expand_builtin_apply_args (void);
103static rtx expand_builtin_apply_args_1 (void);
104static rtx expand_builtin_apply (rtx, rtx, rtx);
105static void expand_builtin_return (rtx);
106static enum type_class type_to_class (tree);
107static rtx expand_builtin_classify_type (tree);
108static void expand_errno_check (tree, rtx);
109static rtx expand_builtin_mathfn (tree, rtx, rtx);
110static rtx expand_builtin_mathfn_2 (tree, rtx, rtx);
6b43bae4 111static rtx expand_builtin_mathfn_3 (tree, rtx, rtx);
7e0713b1 112static rtx expand_builtin_mathfn_ternary (tree, rtx, rtx);
f97eea22 113static rtx expand_builtin_interclass_mathfn (tree, rtx);
c3147c1a 114static rtx expand_builtin_sincos (tree);
f97eea22 115static rtx expand_builtin_cexpi (tree, rtx);
ff1b14e4 116static rtx expand_builtin_int_roundingfn (tree, rtx);
117static rtx expand_builtin_int_roundingfn_2 (tree, rtx);
79012a9d 118static rtx expand_builtin_next_arg (void);
aecda0d6 119static rtx expand_builtin_va_start (tree);
120static rtx expand_builtin_va_end (tree);
121static rtx expand_builtin_va_copy (tree);
3754d046 122static rtx expand_builtin_memcmp (tree, rtx, machine_mode);
a65c4d64 123static rtx expand_builtin_strcmp (tree, rtx);
3754d046 124static rtx expand_builtin_strncmp (tree, rtx, machine_mode);
125static rtx builtin_memcpy_read_str (void *, HOST_WIDE_INT, machine_mode);
a65c4d64 126static rtx expand_builtin_memcpy (tree, rtx);
f21337ef 127static rtx expand_builtin_memcpy_with_bounds (tree, rtx);
128static rtx expand_builtin_memcpy_args (tree, tree, tree, rtx, tree);
3754d046 129static rtx expand_builtin_mempcpy (tree, rtx, machine_mode);
f21337ef 130static rtx expand_builtin_mempcpy_with_bounds (tree, rtx, machine_mode);
48e1416a 131static rtx expand_builtin_mempcpy_args (tree, tree, tree, rtx,
f21337ef 132 machine_mode, int, tree);
a65c4d64 133static rtx expand_builtin_strcpy (tree, rtx);
134static rtx expand_builtin_strcpy_args (tree, tree, rtx);
3754d046 135static rtx expand_builtin_stpcpy (tree, rtx, machine_mode);
a65c4d64 136static rtx expand_builtin_strncpy (tree, rtx);
3754d046 137static rtx builtin_memset_gen_str (void *, HOST_WIDE_INT, machine_mode);
138static rtx expand_builtin_memset (tree, rtx, machine_mode);
f21337ef 139static rtx expand_builtin_memset_with_bounds (tree, rtx, machine_mode);
3754d046 140static rtx expand_builtin_memset_args (tree, tree, tree, rtx, machine_mode, tree);
aecda0d6 141static rtx expand_builtin_bzero (tree);
3754d046 142static rtx expand_builtin_strlen (tree, rtx, machine_mode);
5be42b39 143static rtx expand_builtin_alloca (tree, bool);
3754d046 144static rtx expand_builtin_unop (machine_mode, tree, rtx, rtx, optab);
aecda0d6 145static rtx expand_builtin_frame_address (tree, tree);
389dd41b 146static tree stabilize_va_list_loc (location_t, tree, int);
aecda0d6 147static rtx expand_builtin_expect (tree, rtx);
148static tree fold_builtin_constant_p (tree);
149static tree fold_builtin_classify_type (tree);
c7cbde74 150static tree fold_builtin_strlen (location_t, tree, tree);
389dd41b 151static tree fold_builtin_inf (location_t, tree, int);
aecda0d6 152static tree fold_builtin_nan (tree, tree, int);
389dd41b 153static tree rewrite_call_expr (location_t, tree, int, tree, int, ...);
b7bf20db 154static bool validate_arg (const_tree, enum tree_code code);
277f8dd2 155static bool integer_valued_real_p (tree);
389dd41b 156static tree fold_trunc_transparent_mathfn (location_t, tree, tree);
aecda0d6 157static rtx expand_builtin_fabs (tree, rtx, rtx);
27f261ef 158static rtx expand_builtin_signbit (tree, rtx);
389dd41b 159static tree fold_builtin_sqrt (location_t, tree, tree);
160static tree fold_builtin_cbrt (location_t, tree, tree);
161static tree fold_builtin_pow (location_t, tree, tree, tree, tree);
162static tree fold_builtin_powi (location_t, tree, tree, tree, tree);
163static tree fold_builtin_cos (location_t, tree, tree, tree);
164static tree fold_builtin_cosh (location_t, tree, tree, tree);
bffb7645 165static tree fold_builtin_tan (tree, tree);
389dd41b 166static tree fold_builtin_trunc (location_t, tree, tree);
167static tree fold_builtin_floor (location_t, tree, tree);
168static tree fold_builtin_ceil (location_t, tree, tree);
169static tree fold_builtin_round (location_t, tree, tree);
170static tree fold_builtin_int_roundingfn (location_t, tree, tree);
10b9666f 171static tree fold_builtin_bitop (tree, tree);
389dd41b 172static tree fold_builtin_strchr (location_t, tree, tree, tree);
173static tree fold_builtin_memchr (location_t, tree, tree, tree, tree);
174static tree fold_builtin_memcmp (location_t, tree, tree, tree);
175static tree fold_builtin_strcmp (location_t, tree, tree);
176static tree fold_builtin_strncmp (location_t, tree, tree, tree);
177static tree fold_builtin_signbit (location_t, tree, tree);
178static tree fold_builtin_copysign (location_t, tree, tree, tree, tree);
179static tree fold_builtin_isascii (location_t, tree);
180static tree fold_builtin_toascii (location_t, tree);
181static tree fold_builtin_isdigit (location_t, tree);
182static tree fold_builtin_fabs (location_t, tree, tree);
183static tree fold_builtin_abs (location_t, tree, tree);
184static tree fold_builtin_unordered_cmp (location_t, tree, tree, tree, enum tree_code,
d5019fe8 185 enum tree_code);
e80cc485 186static tree fold_builtin_0 (location_t, tree);
187static tree fold_builtin_1 (location_t, tree, tree);
188static tree fold_builtin_2 (location_t, tree, tree, tree);
189static tree fold_builtin_3 (location_t, tree, tree, tree, tree);
190static tree fold_builtin_varargs (location_t, tree, tree*, int);
389dd41b 191
192static tree fold_builtin_strpbrk (location_t, tree, tree, tree);
193static tree fold_builtin_strstr (location_t, tree, tree, tree);
194static tree fold_builtin_strrchr (location_t, tree, tree, tree);
389dd41b 195static tree fold_builtin_strspn (location_t, tree, tree);
196static tree fold_builtin_strcspn (location_t, tree, tree);
4ee9c684 197
0a39fd54 198static rtx expand_builtin_object_size (tree);
3754d046 199static rtx expand_builtin_memory_chk (tree, rtx, machine_mode,
0a39fd54 200 enum built_in_function);
201static void maybe_emit_chk_warning (tree, enum built_in_function);
202static void maybe_emit_sprintf_chk_warning (tree, enum built_in_function);
2c281b15 203static void maybe_emit_free_warning (tree);
c2f47e15 204static tree fold_builtin_object_size (tree, tree);
99eabcc1 205
e788f202 206unsigned HOST_WIDE_INT target_newline;
b9ea678c 207unsigned HOST_WIDE_INT target_percent;
99eabcc1 208static unsigned HOST_WIDE_INT target_c;
209static unsigned HOST_WIDE_INT target_s;
aea88c77 210char target_percent_c[3];
b9ea678c 211char target_percent_s[3];
e788f202 212char target_percent_s_newline[4];
728bac60 213static tree do_mpfr_arg1 (tree, tree, int (*)(mpfr_ptr, mpfr_srcptr, mp_rnd_t),
214 const REAL_VALUE_TYPE *, const REAL_VALUE_TYPE *, bool);
f0c477f2 215static tree do_mpfr_arg2 (tree, tree, tree,
216 int (*)(mpfr_ptr, mpfr_srcptr, mpfr_srcptr, mp_rnd_t));
9917422b 217static tree do_mpfr_arg3 (tree, tree, tree, tree,
218 int (*)(mpfr_ptr, mpfr_srcptr, mpfr_srcptr, mpfr_srcptr, mp_rnd_t));
d92f994c 219static tree do_mpfr_sincos (tree, tree, tree);
65dd1378 220static tree do_mpfr_bessel_n (tree, tree, tree,
221 int (*)(mpfr_ptr, long, mpfr_srcptr, mp_rnd_t),
222 const REAL_VALUE_TYPE *, bool);
e5407ca6 223static tree do_mpfr_remquo (tree, tree, tree);
e84da7c1 224static tree do_mpfr_lgamma_r (tree, tree, tree);
1cd6e20d 225static void expand_builtin_sync_synchronize (void);
0a39fd54 226
7bfefa9d 227/* Return true if NAME starts with __builtin_ or __sync_. */
228
b29139ad 229static bool
1c47b3e8 230is_builtin_name (const char *name)
b6a5fc45 231{
b6a5fc45 232 if (strncmp (name, "__builtin_", 10) == 0)
233 return true;
234 if (strncmp (name, "__sync_", 7) == 0)
235 return true;
1cd6e20d 236 if (strncmp (name, "__atomic_", 9) == 0)
237 return true;
a89e6c15 238 if (flag_cilkplus
d037099f 239 && (!strcmp (name, "__cilkrts_detach")
240 || !strcmp (name, "__cilkrts_pop_frame")))
241 return true;
b6a5fc45 242 return false;
243}
4ee9c684 244
7bfefa9d 245
246/* Return true if DECL is a function symbol representing a built-in. */
247
248bool
249is_builtin_fn (tree decl)
250{
251 return TREE_CODE (decl) == FUNCTION_DECL && DECL_BUILT_IN (decl);
252}
253
1c47b3e8 254/* Return true if NODE should be considered for inline expansion regardless
255 of the optimization level. This means whenever a function is invoked with
256 its "internal" name, which normally contains the prefix "__builtin". */
257
258static bool
259called_as_built_in (tree node)
260{
261 /* Note that we must use DECL_NAME, not DECL_ASSEMBLER_NAME_SET_P since
262 we want the name used to call the function, not the name it
263 will have. */
264 const char *name = IDENTIFIER_POINTER (DECL_NAME (node));
265 return is_builtin_name (name);
266}
267
ceea063b 268/* Compute values M and N such that M divides (address of EXP - N) and such
269 that N < M. If these numbers can be determined, store M in alignp and N in
270 *BITPOSP and return true. Otherwise return false and store BITS_PER_UNIT to
271 *alignp and any bit-offset to *bitposp.
0d8f7716 272
273 Note that the address (and thus the alignment) computed here is based
274 on the address to which a symbol resolves, whereas DECL_ALIGN is based
275 on the address at which an object is actually located. These two
276 addresses are not always the same. For example, on ARM targets,
277 the address &foo of a Thumb function foo() has the lowest bit set,
3482bf13 278 whereas foo() itself starts on an even address.
698537d1 279
3482bf13 280 If ADDR_P is true we are taking the address of the memory reference EXP
281 and thus cannot rely on the access taking place. */
282
283static bool
284get_object_alignment_2 (tree exp, unsigned int *alignp,
285 unsigned HOST_WIDE_INT *bitposp, bool addr_p)
698537d1 286{
98ab9e8f 287 HOST_WIDE_INT bitsize, bitpos;
288 tree offset;
3754d046 289 machine_mode mode;
98ab9e8f 290 int unsignedp, volatilep;
c8a2b4ff 291 unsigned int align = BITS_PER_UNIT;
ceea063b 292 bool known_alignment = false;
698537d1 293
98ab9e8f 294 /* Get the innermost object and the constant (bitpos) and possibly
295 variable (offset) offset of the access. */
296 exp = get_inner_reference (exp, &bitsize, &bitpos, &offset,
297 &mode, &unsignedp, &volatilep, true);
298
299 /* Extract alignment information from the innermost object and
300 possibly adjust bitpos and offset. */
3482bf13 301 if (TREE_CODE (exp) == FUNCTION_DECL)
0d8f7716 302 {
3482bf13 303 /* Function addresses can encode extra information besides their
304 alignment. However, if TARGET_PTRMEMFUNC_VBIT_LOCATION
305 allows the low bit to be used as a virtual bit, we know
306 that the address itself must be at least 2-byte aligned. */
307 if (TARGET_PTRMEMFUNC_VBIT_LOCATION == ptrmemfunc_vbit_in_pfn)
308 align = 2 * BITS_PER_UNIT;
0d8f7716 309 }
3482bf13 310 else if (TREE_CODE (exp) == LABEL_DECL)
311 ;
312 else if (TREE_CODE (exp) == CONST_DECL)
98ab9e8f 313 {
3482bf13 314 /* The alignment of a CONST_DECL is determined by its initializer. */
315 exp = DECL_INITIAL (exp);
98ab9e8f 316 align = TYPE_ALIGN (TREE_TYPE (exp));
317#ifdef CONSTANT_ALIGNMENT
3482bf13 318 if (CONSTANT_CLASS_P (exp))
319 align = (unsigned) CONSTANT_ALIGNMENT (exp, align);
698537d1 320#endif
3482bf13 321 known_alignment = true;
98ab9e8f 322 }
3482bf13 323 else if (DECL_P (exp))
ceea063b 324 {
3482bf13 325 align = DECL_ALIGN (exp);
ceea063b 326 known_alignment = true;
ceea063b 327 }
3482bf13 328 else if (TREE_CODE (exp) == VIEW_CONVERT_EXPR)
ceea063b 329 {
ceea063b 330 align = TYPE_ALIGN (TREE_TYPE (exp));
331 }
3482bf13 332 else if (TREE_CODE (exp) == INDIRECT_REF
333 || TREE_CODE (exp) == MEM_REF
334 || TREE_CODE (exp) == TARGET_MEM_REF)
98ab9e8f 335 {
336 tree addr = TREE_OPERAND (exp, 0);
ceea063b 337 unsigned ptr_align;
338 unsigned HOST_WIDE_INT ptr_bitpos;
ab1e78e5 339 unsigned HOST_WIDE_INT ptr_bitmask = ~0;
ceea063b 340
ab1e78e5 341 /* If the address is explicitely aligned, handle that. */
98ab9e8f 342 if (TREE_CODE (addr) == BIT_AND_EXPR
343 && TREE_CODE (TREE_OPERAND (addr, 1)) == INTEGER_CST)
344 {
ab1e78e5 345 ptr_bitmask = TREE_INT_CST_LOW (TREE_OPERAND (addr, 1));
346 ptr_bitmask *= BITS_PER_UNIT;
347 align = ptr_bitmask & -ptr_bitmask;
98ab9e8f 348 addr = TREE_OPERAND (addr, 0);
349 }
ceea063b 350
3482bf13 351 known_alignment
352 = get_pointer_alignment_1 (addr, &ptr_align, &ptr_bitpos);
3482bf13 353 align = MAX (ptr_align, align);
354
ab1e78e5 355 /* Re-apply explicit alignment to the bitpos. */
356 ptr_bitpos &= ptr_bitmask;
357
4083990a 358 /* The alignment of the pointer operand in a TARGET_MEM_REF
359 has to take the variable offset parts into account. */
3482bf13 360 if (TREE_CODE (exp) == TARGET_MEM_REF)
153c3b50 361 {
3482bf13 362 if (TMR_INDEX (exp))
363 {
364 unsigned HOST_WIDE_INT step = 1;
365 if (TMR_STEP (exp))
f9ae6f95 366 step = TREE_INT_CST_LOW (TMR_STEP (exp));
3482bf13 367 align = MIN (align, (step & -step) * BITS_PER_UNIT);
368 }
369 if (TMR_INDEX2 (exp))
370 align = BITS_PER_UNIT;
371 known_alignment = false;
153c3b50 372 }
ceea063b 373
3482bf13 374 /* When EXP is an actual memory reference then we can use
375 TYPE_ALIGN of a pointer indirection to derive alignment.
376 Do so only if get_pointer_alignment_1 did not reveal absolute
4083990a 377 alignment knowledge and if using that alignment would
378 improve the situation. */
379 if (!addr_p && !known_alignment
380 && TYPE_ALIGN (TREE_TYPE (exp)) > align)
381 align = TYPE_ALIGN (TREE_TYPE (exp));
382 else
383 {
384 /* Else adjust bitpos accordingly. */
385 bitpos += ptr_bitpos;
386 if (TREE_CODE (exp) == MEM_REF
387 || TREE_CODE (exp) == TARGET_MEM_REF)
e913b5cd 388 bitpos += mem_ref_offset (exp).to_short_addr () * BITS_PER_UNIT;
4083990a 389 }
98ab9e8f 390 }
3482bf13 391 else if (TREE_CODE (exp) == STRING_CST)
153c3b50 392 {
3482bf13 393 /* STRING_CST are the only constant objects we allow to be not
394 wrapped inside a CONST_DECL. */
395 align = TYPE_ALIGN (TREE_TYPE (exp));
396#ifdef CONSTANT_ALIGNMENT
397 if (CONSTANT_CLASS_P (exp))
398 align = (unsigned) CONSTANT_ALIGNMENT (exp, align);
399#endif
400 known_alignment = true;
98ab9e8f 401 }
98ab9e8f 402
403 /* If there is a non-constant offset part extract the maximum
404 alignment that can prevail. */
c8a2b4ff 405 if (offset)
98ab9e8f 406 {
ad464c56 407 unsigned int trailing_zeros = tree_ctz (offset);
c8a2b4ff 408 if (trailing_zeros < HOST_BITS_PER_INT)
98ab9e8f 409 {
c8a2b4ff 410 unsigned int inner = (1U << trailing_zeros) * BITS_PER_UNIT;
411 if (inner)
412 align = MIN (align, inner);
98ab9e8f 413 }
98ab9e8f 414 }
415
3482bf13 416 *alignp = align;
417 *bitposp = bitpos & (*alignp - 1);
ceea063b 418 return known_alignment;
0c883ef3 419}
420
3482bf13 421/* For a memory reference expression EXP compute values M and N such that M
422 divides (&EXP - N) and such that N < M. If these numbers can be determined,
423 store M in alignp and N in *BITPOSP and return true. Otherwise return false
424 and store BITS_PER_UNIT to *alignp and any bit-offset to *bitposp. */
425
426bool
427get_object_alignment_1 (tree exp, unsigned int *alignp,
428 unsigned HOST_WIDE_INT *bitposp)
429{
430 return get_object_alignment_2 (exp, alignp, bitposp, false);
431}
432
957d0361 433/* Return the alignment in bits of EXP, an object. */
0c883ef3 434
435unsigned int
957d0361 436get_object_alignment (tree exp)
0c883ef3 437{
438 unsigned HOST_WIDE_INT bitpos = 0;
439 unsigned int align;
440
ceea063b 441 get_object_alignment_1 (exp, &align, &bitpos);
0c883ef3 442
98ab9e8f 443 /* align and bitpos now specify known low bits of the pointer.
444 ptr & (align - 1) == bitpos. */
445
446 if (bitpos != 0)
447 align = (bitpos & -bitpos);
957d0361 448 return align;
698537d1 449}
450
ceea063b 451/* For a pointer valued expression EXP compute values M and N such that M
452 divides (EXP - N) and such that N < M. If these numbers can be determined,
3482bf13 453 store M in alignp and N in *BITPOSP and return true. Return false if
454 the results are just a conservative approximation.
53800dbe 455
ceea063b 456 If EXP is not a pointer, false is returned too. */
53800dbe 457
ceea063b 458bool
459get_pointer_alignment_1 (tree exp, unsigned int *alignp,
460 unsigned HOST_WIDE_INT *bitposp)
53800dbe 461{
153c3b50 462 STRIP_NOPS (exp);
535e2026 463
153c3b50 464 if (TREE_CODE (exp) == ADDR_EXPR)
3482bf13 465 return get_object_alignment_2 (TREE_OPERAND (exp, 0),
466 alignp, bitposp, true);
906a9403 467 else if (TREE_CODE (exp) == POINTER_PLUS_EXPR)
468 {
469 unsigned int align;
470 unsigned HOST_WIDE_INT bitpos;
471 bool res = get_pointer_alignment_1 (TREE_OPERAND (exp, 0),
472 &align, &bitpos);
473 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
474 bitpos += TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)) * BITS_PER_UNIT;
475 else
476 {
477 unsigned int trailing_zeros = tree_ctz (TREE_OPERAND (exp, 1));
478 if (trailing_zeros < HOST_BITS_PER_INT)
479 {
480 unsigned int inner = (1U << trailing_zeros) * BITS_PER_UNIT;
481 if (inner)
482 align = MIN (align, inner);
483 }
484 }
485 *alignp = align;
486 *bitposp = bitpos & (align - 1);
487 return res;
488 }
153c3b50 489 else if (TREE_CODE (exp) == SSA_NAME
490 && POINTER_TYPE_P (TREE_TYPE (exp)))
53800dbe 491 {
ceea063b 492 unsigned int ptr_align, ptr_misalign;
153c3b50 493 struct ptr_info_def *pi = SSA_NAME_PTR_INFO (exp);
ceea063b 494
495 if (pi && get_ptr_info_alignment (pi, &ptr_align, &ptr_misalign))
496 {
497 *bitposp = ptr_misalign * BITS_PER_UNIT;
498 *alignp = ptr_align * BITS_PER_UNIT;
3482bf13 499 /* We cannot really tell whether this result is an approximation. */
ceea063b 500 return true;
501 }
502 else
69fbc3aa 503 {
504 *bitposp = 0;
ceea063b 505 *alignp = BITS_PER_UNIT;
506 return false;
69fbc3aa 507 }
53800dbe 508 }
0bb8b39a 509 else if (TREE_CODE (exp) == INTEGER_CST)
510 {
511 *alignp = BIGGEST_ALIGNMENT;
f9ae6f95 512 *bitposp = ((TREE_INT_CST_LOW (exp) * BITS_PER_UNIT)
0bb8b39a 513 & (BIGGEST_ALIGNMENT - 1));
514 return true;
515 }
153c3b50 516
69fbc3aa 517 *bitposp = 0;
ceea063b 518 *alignp = BITS_PER_UNIT;
519 return false;
53800dbe 520}
521
69fbc3aa 522/* Return the alignment in bits of EXP, a pointer valued expression.
523 The alignment returned is, by default, the alignment of the thing that
524 EXP points to. If it is not a POINTER_TYPE, 0 is returned.
525
526 Otherwise, look at the expression to see if we can do better, i.e., if the
527 expression is actually pointing at an object whose alignment is tighter. */
528
529unsigned int
530get_pointer_alignment (tree exp)
531{
532 unsigned HOST_WIDE_INT bitpos = 0;
533 unsigned int align;
ceea063b 534
535 get_pointer_alignment_1 (exp, &align, &bitpos);
69fbc3aa 536
537 /* align and bitpos now specify known low bits of the pointer.
538 ptr & (align - 1) == bitpos. */
539
540 if (bitpos != 0)
541 align = (bitpos & -bitpos);
542
543 return align;
544}
545
53800dbe 546/* Compute the length of a C string. TREE_STRING_LENGTH is not the right
547 way, because it could contain a zero byte in the middle.
548 TREE_STRING_LENGTH is the size of the character array, not the string.
549
4172d65e 550 ONLY_VALUE should be nonzero if the result is not going to be emitted
c09841f6 551 into the instruction stream and zero if it is going to be expanded.
4172d65e 552 E.g. with i++ ? "foo" : "bar", if ONLY_VALUE is nonzero, constant 3
681fab1e 553 is returned, otherwise NULL, since
554 len = c_strlen (src, 1); if (len) expand_expr (len, ...); would not
555 evaluate the side-effects.
556
6bda159e 557 If ONLY_VALUE is two then we do not emit warnings about out-of-bound
558 accesses. Note that this implies the result is not going to be emitted
559 into the instruction stream.
560
902de8ed 561 The value returned is of type `ssizetype'.
562
53800dbe 563 Unfortunately, string_constant can't access the values of const char
564 arrays with initializers, so neither can we do so here. */
565
4ee9c684 566tree
681fab1e 567c_strlen (tree src, int only_value)
53800dbe 568{
569 tree offset_node;
27d0c333 570 HOST_WIDE_INT offset;
571 int max;
44acf429 572 const char *ptr;
da136652 573 location_t loc;
53800dbe 574
681fab1e 575 STRIP_NOPS (src);
576 if (TREE_CODE (src) == COND_EXPR
577 && (only_value || !TREE_SIDE_EFFECTS (TREE_OPERAND (src, 0))))
578 {
579 tree len1, len2;
580
581 len1 = c_strlen (TREE_OPERAND (src, 1), only_value);
582 len2 = c_strlen (TREE_OPERAND (src, 2), only_value);
0862b7e9 583 if (tree_int_cst_equal (len1, len2))
681fab1e 584 return len1;
585 }
586
587 if (TREE_CODE (src) == COMPOUND_EXPR
588 && (only_value || !TREE_SIDE_EFFECTS (TREE_OPERAND (src, 0))))
589 return c_strlen (TREE_OPERAND (src, 1), only_value);
590
3df42822 591 loc = EXPR_LOC_OR_LOC (src, input_location);
da136652 592
53800dbe 593 src = string_constant (src, &offset_node);
594 if (src == 0)
c2f47e15 595 return NULL_TREE;
902de8ed 596
83d79705 597 max = TREE_STRING_LENGTH (src) - 1;
53800dbe 598 ptr = TREE_STRING_POINTER (src);
902de8ed 599
53800dbe 600 if (offset_node && TREE_CODE (offset_node) != INTEGER_CST)
601 {
602 /* If the string has an internal zero byte (e.g., "foo\0bar"), we can't
603 compute the offset to the following null if we don't know where to
604 start searching for it. */
605 int i;
902de8ed 606
53800dbe 607 for (i = 0; i < max; i++)
608 if (ptr[i] == 0)
c2f47e15 609 return NULL_TREE;
902de8ed 610
53800dbe 611 /* We don't know the starting offset, but we do know that the string
612 has no internal zero bytes. We can assume that the offset falls
613 within the bounds of the string; otherwise, the programmer deserves
614 what he gets. Subtract the offset from the length of the string,
902de8ed 615 and return that. This would perhaps not be valid if we were dealing
616 with named arrays in addition to literal string constants. */
617
da136652 618 return size_diffop_loc (loc, size_int (max), offset_node);
53800dbe 619 }
620
621 /* We have a known offset into the string. Start searching there for
27d0c333 622 a null character if we can represent it as a single HOST_WIDE_INT. */
dabc4084 623 if (offset_node == 0)
53800dbe 624 offset = 0;
35ec552a 625 else if (! tree_fits_shwi_p (offset_node))
dabc4084 626 offset = -1;
53800dbe 627 else
e913b5cd 628 offset = tree_to_shwi (offset_node);
902de8ed 629
1f63a7d6 630 /* If the offset is known to be out of bounds, warn, and call strlen at
631 runtime. */
2f1c4f17 632 if (offset < 0 || offset > max)
53800dbe 633 {
1f63a7d6 634 /* Suppress multiple warnings for propagated constant strings. */
2f1c4f17 635 if (only_value != 2
636 && !TREE_NO_WARNING (src))
1f63a7d6 637 {
da136652 638 warning_at (loc, 0, "offset outside bounds of constant string");
1f63a7d6 639 TREE_NO_WARNING (src) = 1;
640 }
c2f47e15 641 return NULL_TREE;
53800dbe 642 }
902de8ed 643
53800dbe 644 /* Use strlen to search for the first zero byte. Since any strings
645 constructed with build_string will have nulls appended, we win even
646 if we get handed something like (char[4])"abcd".
647
648 Since OFFSET is our starting index into the string, no further
649 calculation is needed. */
902de8ed 650 return ssize_int (strlen (ptr + offset));
53800dbe 651}
652
83d79705 653/* Return a char pointer for a C string if it is a string constant
654 or sum of string constant and integer constant. */
655
b9ea678c 656const char *
aecda0d6 657c_getstr (tree src)
83d79705 658{
659 tree offset_node;
83d79705 660
661 src = string_constant (src, &offset_node);
662 if (src == 0)
663 return 0;
664
8c85fcb7 665 if (offset_node == 0)
666 return TREE_STRING_POINTER (src);
e913b5cd 667 else if (!tree_fits_uhwi_p (offset_node)
8c85fcb7 668 || compare_tree_int (offset_node, TREE_STRING_LENGTH (src) - 1) > 0)
83d79705 669 return 0;
83d79705 670
e913b5cd 671 return TREE_STRING_POINTER (src) + tree_to_uhwi (offset_node);
83d79705 672}
673
e913b5cd 674/* Return a constant integer corresponding to target reading
8c85fcb7 675 GET_MODE_BITSIZE (MODE) bits from string constant STR. */
ecc318ff 676
6840589f 677static rtx
3754d046 678c_readstr (const char *str, machine_mode mode)
6840589f 679{
6840589f 680 HOST_WIDE_INT ch;
681 unsigned int i, j;
e913b5cd 682 HOST_WIDE_INT tmp[MAX_BITSIZE_MODE_ANY_INT / HOST_BITS_PER_WIDE_INT];
0407eaee 683
684 gcc_assert (GET_MODE_CLASS (mode) == MODE_INT);
e913b5cd 685 unsigned int len = (GET_MODE_PRECISION (mode) + HOST_BITS_PER_WIDE_INT - 1)
686 / HOST_BITS_PER_WIDE_INT;
687
a12aa4cc 688 gcc_assert (len <= MAX_BITSIZE_MODE_ANY_INT / HOST_BITS_PER_WIDE_INT);
e913b5cd 689 for (i = 0; i < len; i++)
690 tmp[i] = 0;
6840589f 691
6840589f 692 ch = 1;
693 for (i = 0; i < GET_MODE_SIZE (mode); i++)
694 {
695 j = i;
696 if (WORDS_BIG_ENDIAN)
697 j = GET_MODE_SIZE (mode) - i - 1;
698 if (BYTES_BIG_ENDIAN != WORDS_BIG_ENDIAN
ad8f8e52 699 && GET_MODE_SIZE (mode) >= UNITS_PER_WORD)
6840589f 700 j = j + UNITS_PER_WORD - 2 * (j % UNITS_PER_WORD) - 1;
701 j *= BITS_PER_UNIT;
7d3f6cc7 702
6840589f 703 if (ch)
704 ch = (unsigned char) str[i];
e913b5cd 705 tmp[j / HOST_BITS_PER_WIDE_INT] |= ch << (j % HOST_BITS_PER_WIDE_INT);
6840589f 706 }
ddb1be65 707
ab2c1de8 708 wide_int c = wide_int::from_array (tmp, len, GET_MODE_PRECISION (mode));
e913b5cd 709 return immed_wide_int_const (c, mode);
6840589f 710}
711
ecc318ff 712/* Cast a target constant CST to target CHAR and if that value fits into
5206b159 713 host char type, return zero and put that value into variable pointed to by
ecc318ff 714 P. */
715
716static int
aecda0d6 717target_char_cast (tree cst, char *p)
ecc318ff 718{
719 unsigned HOST_WIDE_INT val, hostval;
720
c19686c5 721 if (TREE_CODE (cst) != INTEGER_CST
ecc318ff 722 || CHAR_TYPE_SIZE > HOST_BITS_PER_WIDE_INT)
723 return 1;
724
e913b5cd 725 /* Do not care if it fits or not right here. */
f9ae6f95 726 val = TREE_INT_CST_LOW (cst);
e913b5cd 727
ecc318ff 728 if (CHAR_TYPE_SIZE < HOST_BITS_PER_WIDE_INT)
729 val &= (((unsigned HOST_WIDE_INT) 1) << CHAR_TYPE_SIZE) - 1;
730
731 hostval = val;
732 if (HOST_BITS_PER_CHAR < HOST_BITS_PER_WIDE_INT)
733 hostval &= (((unsigned HOST_WIDE_INT) 1) << HOST_BITS_PER_CHAR) - 1;
734
735 if (val != hostval)
736 return 1;
737
738 *p = hostval;
739 return 0;
740}
741
4ee9c684 742/* Similar to save_expr, but assumes that arbitrary code is not executed
743 in between the multiple evaluations. In particular, we assume that a
744 non-addressable local variable will not be modified. */
745
746static tree
747builtin_save_expr (tree exp)
748{
f6c35aa4 749 if (TREE_CODE (exp) == SSA_NAME
750 || (TREE_ADDRESSABLE (exp) == 0
751 && (TREE_CODE (exp) == PARM_DECL
752 || (TREE_CODE (exp) == VAR_DECL && !TREE_STATIC (exp)))))
4ee9c684 753 return exp;
754
755 return save_expr (exp);
756}
757
53800dbe 758/* Given TEM, a pointer to a stack frame, follow the dynamic chain COUNT
759 times to get the address of either a higher stack frame, or a return
760 address located within it (depending on FNDECL_CODE). */
902de8ed 761
c626df3d 762static rtx
869d0ef0 763expand_builtin_return_addr (enum built_in_function fndecl_code, int count)
53800dbe 764{
765 int i;
766
869d0ef0 767#ifdef INITIAL_FRAME_ADDRESS_RTX
768 rtx tem = INITIAL_FRAME_ADDRESS_RTX;
769#else
e3e15c50 770 rtx tem;
771
1b74fde7 772 /* For a zero count with __builtin_return_address, we don't care what
773 frame address we return, because target-specific definitions will
774 override us. Therefore frame pointer elimination is OK, and using
775 the soft frame pointer is OK.
776
fa7637bd 777 For a nonzero count, or a zero count with __builtin_frame_address,
1b74fde7 778 we require a stable offset from the current frame pointer to the
779 previous one, so we must use the hard frame pointer, and
e3e15c50 780 we must disable frame pointer elimination. */
1b74fde7 781 if (count == 0 && fndecl_code == BUILT_IN_RETURN_ADDRESS)
e3e15c50 782 tem = frame_pointer_rtx;
a0c938f0 783 else
e3e15c50 784 {
785 tem = hard_frame_pointer_rtx;
786
787 /* Tell reload not to eliminate the frame pointer. */
18d50ae6 788 crtl->accesses_prior_frames = 1;
e3e15c50 789 }
869d0ef0 790#endif
791
53800dbe 792 /* Some machines need special handling before we can access
3a69c60c 793 arbitrary frames. For example, on the SPARC, we must first flush
53800dbe 794 all register windows to the stack. */
795#ifdef SETUP_FRAME_ADDRESSES
796 if (count > 0)
797 SETUP_FRAME_ADDRESSES ();
798#endif
799
3a69c60c 800 /* On the SPARC, the return address is not in the frame, it is in a
53800dbe 801 register. There is no way to access it off of the current frame
802 pointer, but it can be accessed off the previous frame pointer by
803 reading the value from the register window save area. */
a26d6c60 804 if (RETURN_ADDR_IN_PREVIOUS_FRAME && fndecl_code == BUILT_IN_RETURN_ADDRESS)
53800dbe 805 count--;
53800dbe 806
807 /* Scan back COUNT frames to the specified frame. */
808 for (i = 0; i < count; i++)
809 {
810 /* Assume the dynamic chain pointer is in the word that the
811 frame address points to, unless otherwise specified. */
812#ifdef DYNAMIC_CHAIN_ADDRESS
813 tem = DYNAMIC_CHAIN_ADDRESS (tem);
814#endif
815 tem = memory_address (Pmode, tem);
00060fc2 816 tem = gen_frame_mem (Pmode, tem);
83fc1478 817 tem = copy_to_reg (tem);
53800dbe 818 }
819
3a69c60c 820 /* For __builtin_frame_address, return what we've got. But, on
821 the SPARC for example, we may have to add a bias. */
53800dbe 822 if (fndecl_code == BUILT_IN_FRAME_ADDRESS)
3a69c60c 823#ifdef FRAME_ADDR_RTX
824 return FRAME_ADDR_RTX (tem);
825#else
53800dbe 826 return tem;
3a69c60c 827#endif
53800dbe 828
3a69c60c 829 /* For __builtin_return_address, get the return address from that frame. */
53800dbe 830#ifdef RETURN_ADDR_RTX
831 tem = RETURN_ADDR_RTX (count, tem);
832#else
833 tem = memory_address (Pmode,
29c05e22 834 plus_constant (Pmode, tem, GET_MODE_SIZE (Pmode)));
00060fc2 835 tem = gen_frame_mem (Pmode, tem);
53800dbe 836#endif
837 return tem;
838}
839
f7c44134 840/* Alias set used for setjmp buffer. */
32c2fdea 841static alias_set_type setjmp_alias_set = -1;
f7c44134 842
6b7f6858 843/* Construct the leading half of a __builtin_setjmp call. Control will
2c8a1497 844 return to RECEIVER_LABEL. This is also called directly by the SJLJ
845 exception handling code. */
53800dbe 846
6b7f6858 847void
aecda0d6 848expand_builtin_setjmp_setup (rtx buf_addr, rtx receiver_label)
53800dbe 849{
3754d046 850 machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
53800dbe 851 rtx stack_save;
f7c44134 852 rtx mem;
53800dbe 853
f7c44134 854 if (setjmp_alias_set == -1)
855 setjmp_alias_set = new_alias_set ();
856
85d654dd 857 buf_addr = convert_memory_address (Pmode, buf_addr);
53800dbe 858
37ae8504 859 buf_addr = force_reg (Pmode, force_operand (buf_addr, NULL_RTX));
53800dbe 860
6b7f6858 861 /* We store the frame pointer and the address of receiver_label in
862 the buffer and use the rest of it for the stack save area, which
863 is machine-dependent. */
53800dbe 864
f7c44134 865 mem = gen_rtx_MEM (Pmode, buf_addr);
ab6ab77e 866 set_mem_alias_set (mem, setjmp_alias_set);
e3e026e8 867 emit_move_insn (mem, targetm.builtin_setjmp_frame_value ());
f7c44134 868
29c05e22 869 mem = gen_rtx_MEM (Pmode, plus_constant (Pmode, buf_addr,
870 GET_MODE_SIZE (Pmode))),
ab6ab77e 871 set_mem_alias_set (mem, setjmp_alias_set);
f7c44134 872
873 emit_move_insn (validize_mem (mem),
6b7f6858 874 force_reg (Pmode, gen_rtx_LABEL_REF (Pmode, receiver_label)));
53800dbe 875
876 stack_save = gen_rtx_MEM (sa_mode,
29c05e22 877 plus_constant (Pmode, buf_addr,
53800dbe 878 2 * GET_MODE_SIZE (Pmode)));
ab6ab77e 879 set_mem_alias_set (stack_save, setjmp_alias_set);
e9c97615 880 emit_stack_save (SAVE_NONLOCAL, &stack_save);
53800dbe 881
882 /* If there is further processing to do, do it. */
a3c81e61 883 if (targetm.have_builtin_setjmp_setup ())
884 emit_insn (targetm.gen_builtin_setjmp_setup (buf_addr));
53800dbe 885
29f09705 886 /* We have a nonlocal label. */
18d50ae6 887 cfun->has_nonlocal_label = 1;
6b7f6858 888}
53800dbe 889
2c8a1497 890/* Construct the trailing part of a __builtin_setjmp call. This is
4598ade9 891 also called directly by the SJLJ exception handling code.
892 If RECEIVER_LABEL is NULL, instead contruct a nonlocal goto handler. */
6b7f6858 893
894void
a3c81e61 895expand_builtin_setjmp_receiver (rtx receiver_label)
6b7f6858 896{
82c7907c 897 rtx chain;
898
4598ade9 899 /* Mark the FP as used when we get here, so we have to make sure it's
53800dbe 900 marked as used by this function. */
18b42941 901 emit_use (hard_frame_pointer_rtx);
53800dbe 902
903 /* Mark the static chain as clobbered here so life information
904 doesn't get messed up for it. */
82c7907c 905 chain = targetm.calls.static_chain (current_function_decl, true);
906 if (chain && REG_P (chain))
907 emit_clobber (chain);
53800dbe 908
909 /* Now put in the code to restore the frame pointer, and argument
491e04ef 910 pointer, if needed. */
a3c81e61 911 if (! targetm.have_nonlocal_goto ())
62dcb5c8 912 {
913 /* First adjust our frame pointer to its actual value. It was
914 previously set to the start of the virtual area corresponding to
915 the stacked variables when we branched here and now needs to be
916 adjusted to the actual hardware fp value.
917
918 Assignments to virtual registers are converted by
919 instantiate_virtual_regs into the corresponding assignment
920 to the underlying register (fp in this case) that makes
921 the original assignment true.
922 So the following insn will actually be decrementing fp by
923 STARTING_FRAME_OFFSET. */
924 emit_move_insn (virtual_stack_vars_rtx, hard_frame_pointer_rtx);
925
926 /* Restoring the frame pointer also modifies the hard frame pointer.
927 Mark it used (so that the previous assignment remains live once
928 the frame pointer is eliminated) and clobbered (to represent the
929 implicit update from the assignment). */
930 emit_use (hard_frame_pointer_rtx);
931 emit_clobber (hard_frame_pointer_rtx);
932 }
53800dbe 933
5ae82d58 934#if !HARD_FRAME_POINTER_IS_ARG_POINTER
53800dbe 935 if (fixed_regs[ARG_POINTER_REGNUM])
936 {
937#ifdef ELIMINABLE_REGS
4598ade9 938 /* If the argument pointer can be eliminated in favor of the
939 frame pointer, we don't need to restore it. We assume here
940 that if such an elimination is present, it can always be used.
941 This is the case on all known machines; if we don't make this
942 assumption, we do unnecessary saving on many machines. */
53800dbe 943 size_t i;
e99c3a1d 944 static const struct elims {const int from, to;} elim_regs[] = ELIMINABLE_REGS;
53800dbe 945
3098b2d3 946 for (i = 0; i < ARRAY_SIZE (elim_regs); i++)
53800dbe 947 if (elim_regs[i].from == ARG_POINTER_REGNUM
948 && elim_regs[i].to == HARD_FRAME_POINTER_REGNUM)
949 break;
950
3098b2d3 951 if (i == ARRAY_SIZE (elim_regs))
53800dbe 952#endif
953 {
954 /* Now restore our arg pointer from the address at which it
05927e40 955 was saved in our stack frame. */
27a7a23a 956 emit_move_insn (crtl->args.internal_arg_pointer,
b079a207 957 copy_to_reg (get_arg_pointer_save_area ()));
53800dbe 958 }
959 }
960#endif
961
a3c81e61 962 if (receiver_label != NULL && targetm.have_builtin_setjmp_receiver ())
963 emit_insn (targetm.gen_builtin_setjmp_receiver (receiver_label));
964 else if (targetm.have_nonlocal_goto_receiver ())
965 emit_insn (targetm.gen_nonlocal_goto_receiver ());
53800dbe 966 else
a3c81e61 967 { /* Nothing */ }
57f6bb94 968
3072d30e 969 /* We must not allow the code we just generated to be reordered by
970 scheduling. Specifically, the update of the frame pointer must
62dcb5c8 971 happen immediately, not later. */
3072d30e 972 emit_insn (gen_blockage ());
6b7f6858 973}
53800dbe 974
53800dbe 975/* __builtin_longjmp is passed a pointer to an array of five words (not
976 all will be used on all machines). It operates similarly to the C
977 library function of the same name, but is more efficient. Much of
2c8a1497 978 the code below is copied from the handling of non-local gotos. */
53800dbe 979
c626df3d 980static void
aecda0d6 981expand_builtin_longjmp (rtx buf_addr, rtx value)
53800dbe 982{
1e0c0b35 983 rtx fp, lab, stack;
984 rtx_insn *insn, *last;
3754d046 985 machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
53800dbe 986
48e1416a 987 /* DRAP is needed for stack realign if longjmp is expanded to current
27a7a23a 988 function */
989 if (SUPPORTS_STACK_ALIGNMENT)
990 crtl->need_drap = true;
991
f7c44134 992 if (setjmp_alias_set == -1)
993 setjmp_alias_set = new_alias_set ();
994
85d654dd 995 buf_addr = convert_memory_address (Pmode, buf_addr);
479e4d5e 996
53800dbe 997 buf_addr = force_reg (Pmode, buf_addr);
998
82c7907c 999 /* We require that the user must pass a second argument of 1, because
1000 that is what builtin_setjmp will return. */
64db345d 1001 gcc_assert (value == const1_rtx);
53800dbe 1002
4712c7d6 1003 last = get_last_insn ();
a3c81e61 1004 if (targetm.have_builtin_longjmp ())
1005 emit_insn (targetm.gen_builtin_longjmp (buf_addr));
53800dbe 1006 else
53800dbe 1007 {
1008 fp = gen_rtx_MEM (Pmode, buf_addr);
29c05e22 1009 lab = gen_rtx_MEM (Pmode, plus_constant (Pmode, buf_addr,
53800dbe 1010 GET_MODE_SIZE (Pmode)));
1011
29c05e22 1012 stack = gen_rtx_MEM (sa_mode, plus_constant (Pmode, buf_addr,
53800dbe 1013 2 * GET_MODE_SIZE (Pmode)));
ab6ab77e 1014 set_mem_alias_set (fp, setjmp_alias_set);
1015 set_mem_alias_set (lab, setjmp_alias_set);
1016 set_mem_alias_set (stack, setjmp_alias_set);
53800dbe 1017
1018 /* Pick up FP, label, and SP from the block and jump. This code is
1019 from expand_goto in stmt.c; see there for detailed comments. */
a3c81e61 1020 if (targetm.have_nonlocal_goto ())
53800dbe 1021 /* We have to pass a value to the nonlocal_goto pattern that will
1022 get copied into the static_chain pointer, but it does not matter
1023 what that value is, because builtin_setjmp does not use it. */
a3c81e61 1024 emit_insn (targetm.gen_nonlocal_goto (value, lab, stack, fp));
53800dbe 1025 else
53800dbe 1026 {
1027 lab = copy_to_reg (lab);
1028
18b42941 1029 emit_clobber (gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode)));
1030 emit_clobber (gen_rtx_MEM (BLKmode, hard_frame_pointer_rtx));
2a871ad1 1031
53800dbe 1032 emit_move_insn (hard_frame_pointer_rtx, fp);
e9c97615 1033 emit_stack_restore (SAVE_NONLOCAL, stack);
53800dbe 1034
18b42941 1035 emit_use (hard_frame_pointer_rtx);
1036 emit_use (stack_pointer_rtx);
53800dbe 1037 emit_indirect_jump (lab);
1038 }
1039 }
615166bb 1040
1041 /* Search backwards and mark the jump insn as a non-local goto.
1042 Note that this precludes the use of __builtin_longjmp to a
1043 __builtin_setjmp target in the same function. However, we've
1044 already cautioned the user that these functions are for
1045 internal exception handling use only. */
449c0509 1046 for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
1047 {
64db345d 1048 gcc_assert (insn != last);
7d3f6cc7 1049
6d7dc5b9 1050 if (JUMP_P (insn))
449c0509 1051 {
a1ddb869 1052 add_reg_note (insn, REG_NON_LOCAL_GOTO, const0_rtx);
449c0509 1053 break;
1054 }
6d7dc5b9 1055 else if (CALL_P (insn))
9342ee68 1056 break;
449c0509 1057 }
53800dbe 1058}
1059
0e80b01d 1060static inline bool
1061more_const_call_expr_args_p (const const_call_expr_arg_iterator *iter)
1062{
1063 return (iter->i < iter->n);
1064}
1065
1066/* This function validates the types of a function call argument list
1067 against a specified list of tree_codes. If the last specifier is a 0,
1068 that represents an ellipses, otherwise the last specifier must be a
1069 VOID_TYPE. */
1070
1071static bool
1072validate_arglist (const_tree callexpr, ...)
1073{
1074 enum tree_code code;
1075 bool res = 0;
1076 va_list ap;
1077 const_call_expr_arg_iterator iter;
1078 const_tree arg;
1079
1080 va_start (ap, callexpr);
1081 init_const_call_expr_arg_iterator (callexpr, &iter);
1082
1083 do
1084 {
1085 code = (enum tree_code) va_arg (ap, int);
1086 switch (code)
1087 {
1088 case 0:
1089 /* This signifies an ellipses, any further arguments are all ok. */
1090 res = true;
1091 goto end;
1092 case VOID_TYPE:
1093 /* This signifies an endlink, if no arguments remain, return
1094 true, otherwise return false. */
1095 res = !more_const_call_expr_args_p (&iter);
1096 goto end;
1097 default:
1098 /* If no parameters remain or the parameter's code does not
1099 match the specified code, return false. Otherwise continue
1100 checking any remaining arguments. */
1101 arg = next_const_call_expr_arg (&iter);
1102 if (!validate_arg (arg, code))
1103 goto end;
1104 break;
1105 }
1106 }
1107 while (1);
1108
1109 /* We need gotos here since we can only have one VA_CLOSE in a
1110 function. */
1111 end: ;
1112 va_end (ap);
1113
1114 return res;
1115}
1116
4ee9c684 1117/* Expand a call to __builtin_nonlocal_goto. We're passed the target label
1118 and the address of the save area. */
1119
1120static rtx
c2f47e15 1121expand_builtin_nonlocal_goto (tree exp)
4ee9c684 1122{
1123 tree t_label, t_save_area;
1e0c0b35 1124 rtx r_label, r_save_area, r_fp, r_sp;
1125 rtx_insn *insn;
4ee9c684 1126
c2f47e15 1127 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
4ee9c684 1128 return NULL_RTX;
1129
c2f47e15 1130 t_label = CALL_EXPR_ARG (exp, 0);
1131 t_save_area = CALL_EXPR_ARG (exp, 1);
4ee9c684 1132
8ec3c5c2 1133 r_label = expand_normal (t_label);
3dce56cc 1134 r_label = convert_memory_address (Pmode, r_label);
8ec3c5c2 1135 r_save_area = expand_normal (t_save_area);
3dce56cc 1136 r_save_area = convert_memory_address (Pmode, r_save_area);
d1ff492e 1137 /* Copy the address of the save location to a register just in case it was
1138 based on the frame pointer. */
51adbc8a 1139 r_save_area = copy_to_reg (r_save_area);
4ee9c684 1140 r_fp = gen_rtx_MEM (Pmode, r_save_area);
1141 r_sp = gen_rtx_MEM (STACK_SAVEAREA_MODE (SAVE_NONLOCAL),
29c05e22 1142 plus_constant (Pmode, r_save_area,
1143 GET_MODE_SIZE (Pmode)));
4ee9c684 1144
18d50ae6 1145 crtl->has_nonlocal_goto = 1;
4ee9c684 1146
4ee9c684 1147 /* ??? We no longer need to pass the static chain value, afaik. */
a3c81e61 1148 if (targetm.have_nonlocal_goto ())
1149 emit_insn (targetm.gen_nonlocal_goto (const0_rtx, r_label, r_sp, r_fp));
4ee9c684 1150 else
4ee9c684 1151 {
1152 r_label = copy_to_reg (r_label);
1153
18b42941 1154 emit_clobber (gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode)));
1155 emit_clobber (gen_rtx_MEM (BLKmode, hard_frame_pointer_rtx));
491e04ef 1156
d1ff492e 1157 /* Restore frame pointer for containing function. */
4ee9c684 1158 emit_move_insn (hard_frame_pointer_rtx, r_fp);
e9c97615 1159 emit_stack_restore (SAVE_NONLOCAL, r_sp);
491e04ef 1160
4ee9c684 1161 /* USE of hard_frame_pointer_rtx added for consistency;
1162 not clear if really needed. */
18b42941 1163 emit_use (hard_frame_pointer_rtx);
1164 emit_use (stack_pointer_rtx);
ad0d0af8 1165
1166 /* If the architecture is using a GP register, we must
1167 conservatively assume that the target function makes use of it.
1168 The prologue of functions with nonlocal gotos must therefore
1169 initialize the GP register to the appropriate value, and we
1170 must then make sure that this value is live at the point
1171 of the jump. (Note that this doesn't necessarily apply
1172 to targets with a nonlocal_goto pattern; they are free
1173 to implement it in their own way. Note also that this is
1174 a no-op if the GP register is a global invariant.) */
1175 if ((unsigned) PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM
1176 && fixed_regs[PIC_OFFSET_TABLE_REGNUM])
18b42941 1177 emit_use (pic_offset_table_rtx);
ad0d0af8 1178
4ee9c684 1179 emit_indirect_jump (r_label);
1180 }
491e04ef 1181
4ee9c684 1182 /* Search backwards to the jump insn and mark it as a
1183 non-local goto. */
1184 for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
1185 {
6d7dc5b9 1186 if (JUMP_P (insn))
4ee9c684 1187 {
a1ddb869 1188 add_reg_note (insn, REG_NON_LOCAL_GOTO, const0_rtx);
4ee9c684 1189 break;
1190 }
6d7dc5b9 1191 else if (CALL_P (insn))
4ee9c684 1192 break;
1193 }
1194
1195 return const0_rtx;
1196}
1197
843d08a9 1198/* __builtin_update_setjmp_buf is passed a pointer to an array of five words
1199 (not all will be used on all machines) that was passed to __builtin_setjmp.
97354ae4 1200 It updates the stack pointer in that block to the current value. This is
1201 also called directly by the SJLJ exception handling code. */
843d08a9 1202
97354ae4 1203void
843d08a9 1204expand_builtin_update_setjmp_buf (rtx buf_addr)
1205{
3754d046 1206 machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
d1ff492e 1207 rtx stack_save
843d08a9 1208 = gen_rtx_MEM (sa_mode,
1209 memory_address
1210 (sa_mode,
29c05e22 1211 plus_constant (Pmode, buf_addr,
1212 2 * GET_MODE_SIZE (Pmode))));
843d08a9 1213
e9c97615 1214 emit_stack_save (SAVE_NONLOCAL, &stack_save);
843d08a9 1215}
1216
5e3608d8 1217/* Expand a call to __builtin_prefetch. For a target that does not support
1218 data prefetch, evaluate the memory address argument in case it has side
1219 effects. */
1220
1221static void
c2f47e15 1222expand_builtin_prefetch (tree exp)
5e3608d8 1223{
1224 tree arg0, arg1, arg2;
c2f47e15 1225 int nargs;
5e3608d8 1226 rtx op0, op1, op2;
1227
c2f47e15 1228 if (!validate_arglist (exp, POINTER_TYPE, 0))
26a5cadb 1229 return;
1230
c2f47e15 1231 arg0 = CALL_EXPR_ARG (exp, 0);
1232
26a5cadb 1233 /* Arguments 1 and 2 are optional; argument 1 (read/write) defaults to
1234 zero (read) and argument 2 (locality) defaults to 3 (high degree of
1235 locality). */
c2f47e15 1236 nargs = call_expr_nargs (exp);
1237 if (nargs > 1)
1238 arg1 = CALL_EXPR_ARG (exp, 1);
26a5cadb 1239 else
c2f47e15 1240 arg1 = integer_zero_node;
1241 if (nargs > 2)
1242 arg2 = CALL_EXPR_ARG (exp, 2);
1243 else
2512209b 1244 arg2 = integer_three_node;
5e3608d8 1245
1246 /* Argument 0 is an address. */
1247 op0 = expand_expr (arg0, NULL_RTX, Pmode, EXPAND_NORMAL);
1248
1249 /* Argument 1 (read/write flag) must be a compile-time constant int. */
1250 if (TREE_CODE (arg1) != INTEGER_CST)
1251 {
07e3a3d2 1252 error ("second argument to %<__builtin_prefetch%> must be a constant");
9342ee68 1253 arg1 = integer_zero_node;
5e3608d8 1254 }
8ec3c5c2 1255 op1 = expand_normal (arg1);
5e3608d8 1256 /* Argument 1 must be either zero or one. */
1257 if (INTVAL (op1) != 0 && INTVAL (op1) != 1)
1258 {
c3ceba8e 1259 warning (0, "invalid second argument to %<__builtin_prefetch%>;"
07e3a3d2 1260 " using zero");
5e3608d8 1261 op1 = const0_rtx;
1262 }
1263
1264 /* Argument 2 (locality) must be a compile-time constant int. */
1265 if (TREE_CODE (arg2) != INTEGER_CST)
1266 {
07e3a3d2 1267 error ("third argument to %<__builtin_prefetch%> must be a constant");
5e3608d8 1268 arg2 = integer_zero_node;
1269 }
8ec3c5c2 1270 op2 = expand_normal (arg2);
5e3608d8 1271 /* Argument 2 must be 0, 1, 2, or 3. */
1272 if (INTVAL (op2) < 0 || INTVAL (op2) > 3)
1273 {
c3ceba8e 1274 warning (0, "invalid third argument to %<__builtin_prefetch%>; using zero");
5e3608d8 1275 op2 = const0_rtx;
1276 }
1277
1d375a79 1278 if (targetm.have_prefetch ())
5e3608d8 1279 {
8786db1e 1280 struct expand_operand ops[3];
1281
1282 create_address_operand (&ops[0], op0);
1283 create_integer_operand (&ops[1], INTVAL (op1));
1284 create_integer_operand (&ops[2], INTVAL (op2));
1d375a79 1285 if (maybe_expand_insn (targetm.code_for_prefetch, 3, ops))
8786db1e 1286 return;
5e3608d8 1287 }
0a534ba7 1288
f0ce3b1f 1289 /* Don't do anything with direct references to volatile memory, but
1290 generate code to handle other side effects. */
e16ceb8e 1291 if (!MEM_P (op0) && side_effects_p (op0))
f0ce3b1f 1292 emit_insn (op0);
5e3608d8 1293}
1294
f7c44134 1295/* Get a MEM rtx for expression EXP which is the address of an operand
d8ae1baa 1296 to be used in a string instruction (cmpstrsi, movmemsi, ..). LEN is
1297 the maximum length of the block of memory that might be accessed or
1298 NULL if unknown. */
f7c44134 1299
53800dbe 1300static rtx
d8ae1baa 1301get_memory_rtx (tree exp, tree len)
53800dbe 1302{
ad0a178f 1303 tree orig_exp = exp;
1304 rtx addr, mem;
ad0a178f 1305
1306 /* When EXP is not resolved SAVE_EXPR, MEM_ATTRS can be still derived
1307 from its expression, for expr->a.b only <variable>.a.b is recorded. */
1308 if (TREE_CODE (exp) == SAVE_EXPR && !SAVE_EXPR_RESOLVED_P (exp))
1309 exp = TREE_OPERAND (exp, 0);
1310
1311 addr = expand_expr (orig_exp, NULL_RTX, ptr_mode, EXPAND_NORMAL);
1312 mem = gen_rtx_MEM (BLKmode, memory_address (BLKmode, addr));
2a631e19 1313
f7c44134 1314 /* Get an expression we can use to find the attributes to assign to MEM.
5dd3f78f 1315 First remove any nops. */
72dd6141 1316 while (CONVERT_EXPR_P (exp)
f7c44134 1317 && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (exp, 0))))
1318 exp = TREE_OPERAND (exp, 0);
1319
5dd3f78f 1320 /* Build a MEM_REF representing the whole accessed area as a byte blob,
1321 (as builtin stringops may alias with anything). */
1322 exp = fold_build2 (MEM_REF,
1323 build_array_type (char_type_node,
1324 build_range_type (sizetype,
1325 size_one_node, len)),
1326 exp, build_int_cst (ptr_type_node, 0));
1327
1328 /* If the MEM_REF has no acceptable address, try to get the base object
1329 from the original address we got, and build an all-aliasing
1330 unknown-sized access to that one. */
1331 if (is_gimple_mem_ref_addr (TREE_OPERAND (exp, 0)))
1332 set_mem_attributes (mem, exp, 0);
1333 else if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
1334 && (exp = get_base_address (TREE_OPERAND (TREE_OPERAND (exp, 0),
1335 0))))
eec8e941 1336 {
5dd3f78f 1337 exp = build_fold_addr_expr (exp);
1338 exp = fold_build2 (MEM_REF,
1339 build_array_type (char_type_node,
1340 build_range_type (sizetype,
1341 size_zero_node,
1342 NULL)),
1343 exp, build_int_cst (ptr_type_node, 0));
a1a25d19 1344 set_mem_attributes (mem, exp, 0);
eec8e941 1345 }
5dd3f78f 1346 set_mem_alias_set (mem, 0);
53800dbe 1347 return mem;
1348}
1349\f
1350/* Built-in functions to perform an untyped call and return. */
1351
3b9c3a16 1352#define apply_args_mode \
1353 (this_target_builtins->x_apply_args_mode)
1354#define apply_result_mode \
1355 (this_target_builtins->x_apply_result_mode)
53800dbe 1356
53800dbe 1357/* Return the size required for the block returned by __builtin_apply_args,
1358 and initialize apply_args_mode. */
1359
1360static int
aecda0d6 1361apply_args_size (void)
53800dbe 1362{
1363 static int size = -1;
58e9ce8f 1364 int align;
1365 unsigned int regno;
3754d046 1366 machine_mode mode;
53800dbe 1367
1368 /* The values computed by this function never change. */
1369 if (size < 0)
1370 {
1371 /* The first value is the incoming arg-pointer. */
1372 size = GET_MODE_SIZE (Pmode);
1373
1374 /* The second value is the structure value address unless this is
1375 passed as an "invisible" first argument. */
6812c89e 1376 if (targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0))
53800dbe 1377 size += GET_MODE_SIZE (Pmode);
1378
1379 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1380 if (FUNCTION_ARG_REGNO_P (regno))
1381 {
4bac51c9 1382 mode = targetm.calls.get_raw_arg_mode (regno);
0862b7e9 1383
64db345d 1384 gcc_assert (mode != VOIDmode);
53800dbe 1385
1386 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1387 if (size % align != 0)
1388 size = CEIL (size, align) * align;
53800dbe 1389 size += GET_MODE_SIZE (mode);
1390 apply_args_mode[regno] = mode;
1391 }
1392 else
1393 {
1394 apply_args_mode[regno] = VOIDmode;
53800dbe 1395 }
1396 }
1397 return size;
1398}
1399
1400/* Return the size required for the block returned by __builtin_apply,
1401 and initialize apply_result_mode. */
1402
1403static int
aecda0d6 1404apply_result_size (void)
53800dbe 1405{
1406 static int size = -1;
1407 int align, regno;
3754d046 1408 machine_mode mode;
53800dbe 1409
1410 /* The values computed by this function never change. */
1411 if (size < 0)
1412 {
1413 size = 0;
1414
1415 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
e1ce1485 1416 if (targetm.calls.function_value_regno_p (regno))
53800dbe 1417 {
4bac51c9 1418 mode = targetm.calls.get_raw_result_mode (regno);
0862b7e9 1419
64db345d 1420 gcc_assert (mode != VOIDmode);
53800dbe 1421
1422 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1423 if (size % align != 0)
1424 size = CEIL (size, align) * align;
1425 size += GET_MODE_SIZE (mode);
1426 apply_result_mode[regno] = mode;
1427 }
1428 else
1429 apply_result_mode[regno] = VOIDmode;
1430
1431 /* Allow targets that use untyped_call and untyped_return to override
1432 the size so that machine-specific information can be stored here. */
1433#ifdef APPLY_RESULT_SIZE
1434 size = APPLY_RESULT_SIZE;
1435#endif
1436 }
1437 return size;
1438}
1439
53800dbe 1440/* Create a vector describing the result block RESULT. If SAVEP is true,
1441 the result block is used to save the values; otherwise it is used to
1442 restore the values. */
1443
1444static rtx
aecda0d6 1445result_vector (int savep, rtx result)
53800dbe 1446{
1447 int regno, size, align, nelts;
3754d046 1448 machine_mode mode;
53800dbe 1449 rtx reg, mem;
364c0c59 1450 rtx *savevec = XALLOCAVEC (rtx, FIRST_PSEUDO_REGISTER);
bf8e3599 1451
53800dbe 1452 size = nelts = 0;
1453 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1454 if ((mode = apply_result_mode[regno]) != VOIDmode)
1455 {
1456 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1457 if (size % align != 0)
1458 size = CEIL (size, align) * align;
1459 reg = gen_rtx_REG (mode, savep ? regno : INCOMING_REGNO (regno));
e513d163 1460 mem = adjust_address (result, mode, size);
53800dbe 1461 savevec[nelts++] = (savep
d1f9b275 1462 ? gen_rtx_SET (mem, reg)
1463 : gen_rtx_SET (reg, mem));
53800dbe 1464 size += GET_MODE_SIZE (mode);
1465 }
1466 return gen_rtx_PARALLEL (VOIDmode, gen_rtvec_v (nelts, savevec));
1467}
53800dbe 1468
1469/* Save the state required to perform an untyped call with the same
1470 arguments as were passed to the current function. */
1471
1472static rtx
aecda0d6 1473expand_builtin_apply_args_1 (void)
53800dbe 1474{
1c7e61a7 1475 rtx registers, tem;
53800dbe 1476 int size, align, regno;
3754d046 1477 machine_mode mode;
6812c89e 1478 rtx struct_incoming_value = targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 1);
53800dbe 1479
1480 /* Create a block where the arg-pointer, structure value address,
1481 and argument registers can be saved. */
1482 registers = assign_stack_local (BLKmode, apply_args_size (), -1);
1483
1484 /* Walk past the arg-pointer and structure value address. */
1485 size = GET_MODE_SIZE (Pmode);
6812c89e 1486 if (targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0))
53800dbe 1487 size += GET_MODE_SIZE (Pmode);
1488
1489 /* Save each register used in calling a function to the block. */
1490 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1491 if ((mode = apply_args_mode[regno]) != VOIDmode)
1492 {
53800dbe 1493 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1494 if (size % align != 0)
1495 size = CEIL (size, align) * align;
1496
1497 tem = gen_rtx_REG (mode, INCOMING_REGNO (regno));
1498
e513d163 1499 emit_move_insn (adjust_address (registers, mode, size), tem);
53800dbe 1500 size += GET_MODE_SIZE (mode);
1501 }
1502
1503 /* Save the arg pointer to the block. */
27a7a23a 1504 tem = copy_to_reg (crtl->args.internal_arg_pointer);
1c7e61a7 1505 /* We need the pointer as the caller actually passed them to us, not
9d4b544c 1506 as we might have pretended they were passed. Make sure it's a valid
1507 operand, as emit_move_insn isn't expected to handle a PLUS. */
3764c94e 1508 if (STACK_GROWS_DOWNWARD)
1509 tem
1510 = force_operand (plus_constant (Pmode, tem,
1511 crtl->args.pretend_args_size),
1512 NULL_RTX);
1c7e61a7 1513 emit_move_insn (adjust_address (registers, Pmode, 0), tem);
0862b7e9 1514
53800dbe 1515 size = GET_MODE_SIZE (Pmode);
1516
1517 /* Save the structure value address unless this is passed as an
1518 "invisible" first argument. */
45550790 1519 if (struct_incoming_value)
53800dbe 1520 {
e513d163 1521 emit_move_insn (adjust_address (registers, Pmode, size),
45550790 1522 copy_to_reg (struct_incoming_value));
53800dbe 1523 size += GET_MODE_SIZE (Pmode);
1524 }
1525
1526 /* Return the address of the block. */
1527 return copy_addr_to_reg (XEXP (registers, 0));
1528}
1529
1530/* __builtin_apply_args returns block of memory allocated on
1531 the stack into which is stored the arg pointer, structure
1532 value address, static chain, and all the registers that might
1533 possibly be used in performing a function call. The code is
1534 moved to the start of the function so the incoming values are
1535 saved. */
27d0c333 1536
53800dbe 1537static rtx
aecda0d6 1538expand_builtin_apply_args (void)
53800dbe 1539{
1540 /* Don't do __builtin_apply_args more than once in a function.
1541 Save the result of the first call and reuse it. */
1542 if (apply_args_value != 0)
1543 return apply_args_value;
1544 {
1545 /* When this function is called, it means that registers must be
1546 saved on entry to this function. So we migrate the
1547 call to the first insn of this function. */
1548 rtx temp;
53800dbe 1549
1550 start_sequence ();
1551 temp = expand_builtin_apply_args_1 ();
9ed997be 1552 rtx_insn *seq = get_insns ();
53800dbe 1553 end_sequence ();
1554
1555 apply_args_value = temp;
1556
31d3e01c 1557 /* Put the insns after the NOTE that starts the function.
1558 If this is inside a start_sequence, make the outer-level insn
53800dbe 1559 chain current, so the code is placed at the start of the
0ef1a651 1560 function. If internal_arg_pointer is a non-virtual pseudo,
1561 it needs to be placed after the function that initializes
1562 that pseudo. */
53800dbe 1563 push_topmost_sequence ();
0ef1a651 1564 if (REG_P (crtl->args.internal_arg_pointer)
1565 && REGNO (crtl->args.internal_arg_pointer) > LAST_VIRTUAL_REGISTER)
1566 emit_insn_before (seq, parm_birth_insn);
1567 else
1568 emit_insn_before (seq, NEXT_INSN (entry_of_function ()));
53800dbe 1569 pop_topmost_sequence ();
1570 return temp;
1571 }
1572}
1573
1574/* Perform an untyped call and save the state required to perform an
1575 untyped return of whatever value was returned by the given function. */
1576
1577static rtx
aecda0d6 1578expand_builtin_apply (rtx function, rtx arguments, rtx argsize)
53800dbe 1579{
1580 int size, align, regno;
3754d046 1581 machine_mode mode;
1e0c0b35 1582 rtx incoming_args, result, reg, dest, src;
1583 rtx_call_insn *call_insn;
53800dbe 1584 rtx old_stack_level = 0;
1585 rtx call_fusage = 0;
6812c89e 1586 rtx struct_value = targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0);
53800dbe 1587
85d654dd 1588 arguments = convert_memory_address (Pmode, arguments);
726ec87c 1589
53800dbe 1590 /* Create a block where the return registers can be saved. */
1591 result = assign_stack_local (BLKmode, apply_result_size (), -1);
1592
53800dbe 1593 /* Fetch the arg pointer from the ARGUMENTS block. */
1594 incoming_args = gen_reg_rtx (Pmode);
726ec87c 1595 emit_move_insn (incoming_args, gen_rtx_MEM (Pmode, arguments));
3764c94e 1596 if (!STACK_GROWS_DOWNWARD)
1597 incoming_args = expand_simple_binop (Pmode, MINUS, incoming_args, argsize,
1598 incoming_args, 0, OPTAB_LIB_WIDEN);
53800dbe 1599
04a46d40 1600 /* Push a new argument block and copy the arguments. Do not allow
1601 the (potential) memcpy call below to interfere with our stack
1602 manipulations. */
53800dbe 1603 do_pending_stack_adjust ();
04a46d40 1604 NO_DEFER_POP;
53800dbe 1605
2358393e 1606 /* Save the stack with nonlocal if available. */
71512c05 1607 if (targetm.have_save_stack_nonlocal ())
e9c97615 1608 emit_stack_save (SAVE_NONLOCAL, &old_stack_level);
53800dbe 1609 else
e9c97615 1610 emit_stack_save (SAVE_BLOCK, &old_stack_level);
53800dbe 1611
59647703 1612 /* Allocate a block of memory onto the stack and copy the memory
990495a7 1613 arguments to the outgoing arguments address. We can pass TRUE
1614 as the 4th argument because we just saved the stack pointer
1615 and will restore it right after the call. */
5be42b39 1616 allocate_dynamic_stack_space (argsize, 0, BIGGEST_ALIGNMENT, true);
27a7a23a 1617
1618 /* Set DRAP flag to true, even though allocate_dynamic_stack_space
1619 may have already set current_function_calls_alloca to true.
1620 current_function_calls_alloca won't be set if argsize is zero,
1621 so we have to guarantee need_drap is true here. */
1622 if (SUPPORTS_STACK_ALIGNMENT)
1623 crtl->need_drap = true;
1624
59647703 1625 dest = virtual_outgoing_args_rtx;
3764c94e 1626 if (!STACK_GROWS_DOWNWARD)
1627 {
1628 if (CONST_INT_P (argsize))
1629 dest = plus_constant (Pmode, dest, -INTVAL (argsize));
1630 else
1631 dest = gen_rtx_PLUS (Pmode, dest, negate_rtx (Pmode, argsize));
1632 }
2a631e19 1633 dest = gen_rtx_MEM (BLKmode, dest);
1634 set_mem_align (dest, PARM_BOUNDARY);
1635 src = gen_rtx_MEM (BLKmode, incoming_args);
1636 set_mem_align (src, PARM_BOUNDARY);
0378dbdc 1637 emit_block_move (dest, src, argsize, BLOCK_OP_NORMAL);
53800dbe 1638
1639 /* Refer to the argument block. */
1640 apply_args_size ();
1641 arguments = gen_rtx_MEM (BLKmode, arguments);
2a631e19 1642 set_mem_align (arguments, PARM_BOUNDARY);
53800dbe 1643
1644 /* Walk past the arg-pointer and structure value address. */
1645 size = GET_MODE_SIZE (Pmode);
45550790 1646 if (struct_value)
53800dbe 1647 size += GET_MODE_SIZE (Pmode);
1648
1649 /* Restore each of the registers previously saved. Make USE insns
1650 for each of these registers for use in making the call. */
1651 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1652 if ((mode = apply_args_mode[regno]) != VOIDmode)
1653 {
1654 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1655 if (size % align != 0)
1656 size = CEIL (size, align) * align;
1657 reg = gen_rtx_REG (mode, regno);
e513d163 1658 emit_move_insn (reg, adjust_address (arguments, mode, size));
53800dbe 1659 use_reg (&call_fusage, reg);
1660 size += GET_MODE_SIZE (mode);
1661 }
1662
1663 /* Restore the structure value address unless this is passed as an
1664 "invisible" first argument. */
1665 size = GET_MODE_SIZE (Pmode);
45550790 1666 if (struct_value)
53800dbe 1667 {
1668 rtx value = gen_reg_rtx (Pmode);
e513d163 1669 emit_move_insn (value, adjust_address (arguments, Pmode, size));
45550790 1670 emit_move_insn (struct_value, value);
8ad4c111 1671 if (REG_P (struct_value))
45550790 1672 use_reg (&call_fusage, struct_value);
53800dbe 1673 size += GET_MODE_SIZE (Pmode);
1674 }
1675
1676 /* All arguments and registers used for the call are set up by now! */
82c7907c 1677 function = prepare_call_address (NULL, function, NULL, &call_fusage, 0, 0);
53800dbe 1678
1679 /* Ensure address is valid. SYMBOL_REF is already valid, so no need,
1680 and we don't want to load it into a register as an optimization,
1681 because prepare_call_address already did it if it should be done. */
1682 if (GET_CODE (function) != SYMBOL_REF)
1683 function = memory_address (FUNCTION_MODE, function);
1684
1685 /* Generate the actual call instruction and save the return value. */
1d99ab0a 1686 if (targetm.have_untyped_call ())
1687 {
1688 rtx mem = gen_rtx_MEM (FUNCTION_MODE, function);
1689 emit_call_insn (targetm.gen_untyped_call (mem, result,
1690 result_vector (1, result)));
1691 }
53800dbe 1692 else
53800dbe 1693#ifdef HAVE_call_value
1694 if (HAVE_call_value)
1695 {
1696 rtx valreg = 0;
1697
1698 /* Locate the unique return register. It is not possible to
1699 express a call that sets more than one return register using
1700 call_value; use untyped_call for that. In fact, untyped_call
1701 only needs to save the return registers in the given block. */
1702 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1703 if ((mode = apply_result_mode[regno]) != VOIDmode)
1704 {
64db345d 1705 gcc_assert (!valreg); /* HAVE_untyped_call required. */
7d3f6cc7 1706
53800dbe 1707 valreg = gen_rtx_REG (mode, regno);
1708 }
1709
2ed6c343 1710 emit_call_insn (GEN_CALL_VALUE (valreg,
53800dbe 1711 gen_rtx_MEM (FUNCTION_MODE, function),
1712 const0_rtx, NULL_RTX, const0_rtx));
1713
e513d163 1714 emit_move_insn (adjust_address (result, GET_MODE (valreg), 0), valreg);
53800dbe 1715 }
1716 else
1717#endif
64db345d 1718 gcc_unreachable ();
53800dbe 1719
d5f9786f 1720 /* Find the CALL insn we just emitted, and attach the register usage
1721 information. */
1722 call_insn = last_call_insn ();
1723 add_function_usage_to (call_insn, call_fusage);
53800dbe 1724
1725 /* Restore the stack. */
71512c05 1726 if (targetm.have_save_stack_nonlocal ())
e9c97615 1727 emit_stack_restore (SAVE_NONLOCAL, old_stack_level);
53800dbe 1728 else
e9c97615 1729 emit_stack_restore (SAVE_BLOCK, old_stack_level);
9af5ce0c 1730 fixup_args_size_notes (call_insn, get_last_insn (), 0);
53800dbe 1731
04a46d40 1732 OK_DEFER_POP;
1733
53800dbe 1734 /* Return the address of the result block. */
85d654dd 1735 result = copy_addr_to_reg (XEXP (result, 0));
1736 return convert_memory_address (ptr_mode, result);
53800dbe 1737}
1738
1739/* Perform an untyped return. */
1740
1741static void
aecda0d6 1742expand_builtin_return (rtx result)
53800dbe 1743{
1744 int size, align, regno;
3754d046 1745 machine_mode mode;
53800dbe 1746 rtx reg;
57c26b3a 1747 rtx_insn *call_fusage = 0;
53800dbe 1748
85d654dd 1749 result = convert_memory_address (Pmode, result);
726ec87c 1750
53800dbe 1751 apply_result_size ();
1752 result = gen_rtx_MEM (BLKmode, result);
1753
1d99ab0a 1754 if (targetm.have_untyped_return ())
53800dbe 1755 {
1d99ab0a 1756 rtx vector = result_vector (0, result);
1757 emit_jump_insn (targetm.gen_untyped_return (result, vector));
53800dbe 1758 emit_barrier ();
1759 return;
1760 }
53800dbe 1761
1762 /* Restore the return value and note that each value is used. */
1763 size = 0;
1764 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1765 if ((mode = apply_result_mode[regno]) != VOIDmode)
1766 {
1767 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1768 if (size % align != 0)
1769 size = CEIL (size, align) * align;
1770 reg = gen_rtx_REG (mode, INCOMING_REGNO (regno));
e513d163 1771 emit_move_insn (reg, adjust_address (result, mode, size));
53800dbe 1772
1773 push_to_sequence (call_fusage);
18b42941 1774 emit_use (reg);
53800dbe 1775 call_fusage = get_insns ();
1776 end_sequence ();
1777 size += GET_MODE_SIZE (mode);
1778 }
1779
1780 /* Put the USE insns before the return. */
31d3e01c 1781 emit_insn (call_fusage);
53800dbe 1782
1783 /* Return whatever values was restored by jumping directly to the end
1784 of the function. */
62380d2d 1785 expand_naked_return ();
53800dbe 1786}
1787
539a3a92 1788/* Used by expand_builtin_classify_type and fold_builtin_classify_type. */
27d0c333 1789
539a3a92 1790static enum type_class
aecda0d6 1791type_to_class (tree type)
539a3a92 1792{
1793 switch (TREE_CODE (type))
1794 {
1795 case VOID_TYPE: return void_type_class;
1796 case INTEGER_TYPE: return integer_type_class;
539a3a92 1797 case ENUMERAL_TYPE: return enumeral_type_class;
1798 case BOOLEAN_TYPE: return boolean_type_class;
1799 case POINTER_TYPE: return pointer_type_class;
1800 case REFERENCE_TYPE: return reference_type_class;
1801 case OFFSET_TYPE: return offset_type_class;
1802 case REAL_TYPE: return real_type_class;
1803 case COMPLEX_TYPE: return complex_type_class;
1804 case FUNCTION_TYPE: return function_type_class;
1805 case METHOD_TYPE: return method_type_class;
1806 case RECORD_TYPE: return record_type_class;
1807 case UNION_TYPE:
1808 case QUAL_UNION_TYPE: return union_type_class;
1809 case ARRAY_TYPE: return (TYPE_STRING_FLAG (type)
1810 ? string_type_class : array_type_class);
539a3a92 1811 case LANG_TYPE: return lang_type_class;
1812 default: return no_type_class;
1813 }
1814}
bf8e3599 1815
c2f47e15 1816/* Expand a call EXP to __builtin_classify_type. */
27d0c333 1817
53800dbe 1818static rtx
c2f47e15 1819expand_builtin_classify_type (tree exp)
53800dbe 1820{
c2f47e15 1821 if (call_expr_nargs (exp))
1822 return GEN_INT (type_to_class (TREE_TYPE (CALL_EXPR_ARG (exp, 0))));
53800dbe 1823 return GEN_INT (no_type_class);
1824}
1825
07976da7 1826/* This helper macro, meant to be used in mathfn_built_in below,
1827 determines which among a set of three builtin math functions is
1828 appropriate for a given type mode. The `F' and `L' cases are
1829 automatically generated from the `double' case. */
1830#define CASE_MATHFN(BUILT_IN_MATHFN) \
1831 case BUILT_IN_MATHFN: case BUILT_IN_MATHFN##F: case BUILT_IN_MATHFN##L: \
1832 fcode = BUILT_IN_MATHFN; fcodef = BUILT_IN_MATHFN##F ; \
1833 fcodel = BUILT_IN_MATHFN##L ; break;
cd2656b0 1834/* Similar to above, but appends _R after any F/L suffix. */
1835#define CASE_MATHFN_REENT(BUILT_IN_MATHFN) \
1836 case BUILT_IN_MATHFN##_R: case BUILT_IN_MATHFN##F_R: case BUILT_IN_MATHFN##L_R: \
1837 fcode = BUILT_IN_MATHFN##_R; fcodef = BUILT_IN_MATHFN##F_R ; \
1838 fcodel = BUILT_IN_MATHFN##L_R ; break;
07976da7 1839
b9a16870 1840/* Return mathematic function equivalent to FN but operating directly on TYPE,
1841 if available. If IMPLICIT is true use the implicit builtin declaration,
1842 otherwise use the explicit declaration. If we can't do the conversion,
1843 return zero. */
c319d56a 1844
1845static tree
b9a16870 1846mathfn_built_in_1 (tree type, enum built_in_function fn, bool implicit_p)
0a68165a 1847{
b9a16870 1848 enum built_in_function fcode, fcodef, fcodel, fcode2;
07976da7 1849
1850 switch (fn)
1851 {
746114e8 1852 CASE_MATHFN (BUILT_IN_ACOS)
1853 CASE_MATHFN (BUILT_IN_ACOSH)
1854 CASE_MATHFN (BUILT_IN_ASIN)
1855 CASE_MATHFN (BUILT_IN_ASINH)
07976da7 1856 CASE_MATHFN (BUILT_IN_ATAN)
746114e8 1857 CASE_MATHFN (BUILT_IN_ATAN2)
1858 CASE_MATHFN (BUILT_IN_ATANH)
1859 CASE_MATHFN (BUILT_IN_CBRT)
07976da7 1860 CASE_MATHFN (BUILT_IN_CEIL)
d735c391 1861 CASE_MATHFN (BUILT_IN_CEXPI)
746114e8 1862 CASE_MATHFN (BUILT_IN_COPYSIGN)
07976da7 1863 CASE_MATHFN (BUILT_IN_COS)
746114e8 1864 CASE_MATHFN (BUILT_IN_COSH)
1865 CASE_MATHFN (BUILT_IN_DREM)
1866 CASE_MATHFN (BUILT_IN_ERF)
1867 CASE_MATHFN (BUILT_IN_ERFC)
07976da7 1868 CASE_MATHFN (BUILT_IN_EXP)
746114e8 1869 CASE_MATHFN (BUILT_IN_EXP10)
1870 CASE_MATHFN (BUILT_IN_EXP2)
1871 CASE_MATHFN (BUILT_IN_EXPM1)
1872 CASE_MATHFN (BUILT_IN_FABS)
1873 CASE_MATHFN (BUILT_IN_FDIM)
07976da7 1874 CASE_MATHFN (BUILT_IN_FLOOR)
746114e8 1875 CASE_MATHFN (BUILT_IN_FMA)
1876 CASE_MATHFN (BUILT_IN_FMAX)
1877 CASE_MATHFN (BUILT_IN_FMIN)
1878 CASE_MATHFN (BUILT_IN_FMOD)
1879 CASE_MATHFN (BUILT_IN_FREXP)
1880 CASE_MATHFN (BUILT_IN_GAMMA)
cd2656b0 1881 CASE_MATHFN_REENT (BUILT_IN_GAMMA) /* GAMMA_R */
746114e8 1882 CASE_MATHFN (BUILT_IN_HUGE_VAL)
1883 CASE_MATHFN (BUILT_IN_HYPOT)
1884 CASE_MATHFN (BUILT_IN_ILOGB)
80ff6494 1885 CASE_MATHFN (BUILT_IN_ICEIL)
1886 CASE_MATHFN (BUILT_IN_IFLOOR)
746114e8 1887 CASE_MATHFN (BUILT_IN_INF)
80ff6494 1888 CASE_MATHFN (BUILT_IN_IRINT)
1889 CASE_MATHFN (BUILT_IN_IROUND)
69b779ea 1890 CASE_MATHFN (BUILT_IN_ISINF)
746114e8 1891 CASE_MATHFN (BUILT_IN_J0)
1892 CASE_MATHFN (BUILT_IN_J1)
1893 CASE_MATHFN (BUILT_IN_JN)
ac148751 1894 CASE_MATHFN (BUILT_IN_LCEIL)
746114e8 1895 CASE_MATHFN (BUILT_IN_LDEXP)
ad52b9b7 1896 CASE_MATHFN (BUILT_IN_LFLOOR)
746114e8 1897 CASE_MATHFN (BUILT_IN_LGAMMA)
cd2656b0 1898 CASE_MATHFN_REENT (BUILT_IN_LGAMMA) /* LGAMMA_R */
ac148751 1899 CASE_MATHFN (BUILT_IN_LLCEIL)
ad52b9b7 1900 CASE_MATHFN (BUILT_IN_LLFLOOR)
746114e8 1901 CASE_MATHFN (BUILT_IN_LLRINT)
1902 CASE_MATHFN (BUILT_IN_LLROUND)
07976da7 1903 CASE_MATHFN (BUILT_IN_LOG)
746114e8 1904 CASE_MATHFN (BUILT_IN_LOG10)
1905 CASE_MATHFN (BUILT_IN_LOG1P)
1906 CASE_MATHFN (BUILT_IN_LOG2)
1907 CASE_MATHFN (BUILT_IN_LOGB)
1908 CASE_MATHFN (BUILT_IN_LRINT)
1909 CASE_MATHFN (BUILT_IN_LROUND)
1910 CASE_MATHFN (BUILT_IN_MODF)
1911 CASE_MATHFN (BUILT_IN_NAN)
1912 CASE_MATHFN (BUILT_IN_NANS)
07976da7 1913 CASE_MATHFN (BUILT_IN_NEARBYINT)
746114e8 1914 CASE_MATHFN (BUILT_IN_NEXTAFTER)
1915 CASE_MATHFN (BUILT_IN_NEXTTOWARD)
1916 CASE_MATHFN (BUILT_IN_POW)
757c219d 1917 CASE_MATHFN (BUILT_IN_POWI)
746114e8 1918 CASE_MATHFN (BUILT_IN_POW10)
1919 CASE_MATHFN (BUILT_IN_REMAINDER)
1920 CASE_MATHFN (BUILT_IN_REMQUO)
1921 CASE_MATHFN (BUILT_IN_RINT)
07976da7 1922 CASE_MATHFN (BUILT_IN_ROUND)
746114e8 1923 CASE_MATHFN (BUILT_IN_SCALB)
1924 CASE_MATHFN (BUILT_IN_SCALBLN)
1925 CASE_MATHFN (BUILT_IN_SCALBN)
c319d56a 1926 CASE_MATHFN (BUILT_IN_SIGNBIT)
746114e8 1927 CASE_MATHFN (BUILT_IN_SIGNIFICAND)
07976da7 1928 CASE_MATHFN (BUILT_IN_SIN)
746114e8 1929 CASE_MATHFN (BUILT_IN_SINCOS)
1930 CASE_MATHFN (BUILT_IN_SINH)
07976da7 1931 CASE_MATHFN (BUILT_IN_SQRT)
1932 CASE_MATHFN (BUILT_IN_TAN)
746114e8 1933 CASE_MATHFN (BUILT_IN_TANH)
1934 CASE_MATHFN (BUILT_IN_TGAMMA)
07976da7 1935 CASE_MATHFN (BUILT_IN_TRUNC)
746114e8 1936 CASE_MATHFN (BUILT_IN_Y0)
1937 CASE_MATHFN (BUILT_IN_Y1)
1938 CASE_MATHFN (BUILT_IN_YN)
07976da7 1939
0a68165a 1940 default:
c2f47e15 1941 return NULL_TREE;
0a68165a 1942 }
07976da7 1943
96b9f485 1944 if (TYPE_MAIN_VARIANT (type) == double_type_node)
b9a16870 1945 fcode2 = fcode;
96b9f485 1946 else if (TYPE_MAIN_VARIANT (type) == float_type_node)
b9a16870 1947 fcode2 = fcodef;
96b9f485 1948 else if (TYPE_MAIN_VARIANT (type) == long_double_type_node)
b9a16870 1949 fcode2 = fcodel;
07976da7 1950 else
c2f47e15 1951 return NULL_TREE;
b9a16870 1952
1953 if (implicit_p && !builtin_decl_implicit_p (fcode2))
1954 return NULL_TREE;
1955
1956 return builtin_decl_explicit (fcode2);
0a68165a 1957}
1958
c319d56a 1959/* Like mathfn_built_in_1(), but always use the implicit array. */
1960
1961tree
1962mathfn_built_in (tree type, enum built_in_function fn)
1963{
1964 return mathfn_built_in_1 (type, fn, /*implicit=*/ 1);
1965}
1966
0fd605a5 1967/* If errno must be maintained, expand the RTL to check if the result,
1968 TARGET, of a built-in function call, EXP, is NaN, and if so set
1969 errno to EDOM. */
1970
1971static void
aecda0d6 1972expand_errno_check (tree exp, rtx target)
0fd605a5 1973{
1e0c0b35 1974 rtx_code_label *lab = gen_label_rtx ();
0fd605a5 1975
7f05340e 1976 /* Test the result; if it is NaN, set errno=EDOM because
1977 the argument was not in the domain. */
3fcf767f 1978 do_compare_rtx_and_jump (target, target, EQ, 0, GET_MODE (target),
f9a00e9e 1979 NULL_RTX, NULL, lab,
79ab74cc 1980 /* The jump is very likely. */
1981 REG_BR_PROB_BASE - (REG_BR_PROB_BASE / 2000 - 1));
0fd605a5 1982
1983#ifdef TARGET_EDOM
7f05340e 1984 /* If this built-in doesn't throw an exception, set errno directly. */
c2f47e15 1985 if (TREE_NOTHROW (TREE_OPERAND (CALL_EXPR_FN (exp), 0)))
7f05340e 1986 {
0fd605a5 1987#ifdef GEN_ERRNO_RTX
7f05340e 1988 rtx errno_rtx = GEN_ERRNO_RTX;
0fd605a5 1989#else
7f05340e 1990 rtx errno_rtx
0fd605a5 1991 = gen_rtx_MEM (word_mode, gen_rtx_SYMBOL_REF (Pmode, "errno"));
1992#endif
d11aedc7 1993 emit_move_insn (errno_rtx,
1994 gen_int_mode (TARGET_EDOM, GET_MODE (errno_rtx)));
0fd605a5 1995 emit_label (lab);
7f05340e 1996 return;
0fd605a5 1997 }
7f05340e 1998#endif
1999
08491912 2000 /* Make sure the library call isn't expanded as a tail call. */
2001 CALL_EXPR_TAILCALL (exp) = 0;
2002
7f05340e 2003 /* We can't set errno=EDOM directly; let the library call do it.
2004 Pop the arguments right away in case the call gets deleted. */
2005 NO_DEFER_POP;
2006 expand_call (exp, target, 0);
2007 OK_DEFER_POP;
2008 emit_label (lab);
0fd605a5 2009}
2010
6b43bae4 2011/* Expand a call to one of the builtin math functions (sqrt, exp, or log).
c2f47e15 2012 Return NULL_RTX if a normal call should be emitted rather than expanding
2013 the function in-line. EXP is the expression that is a call to the builtin
53800dbe 2014 function; if convenient, the result should be placed in TARGET.
2015 SUBTARGET may be used as the target for computing one of EXP's operands. */
27d0c333 2016
53800dbe 2017static rtx
aecda0d6 2018expand_builtin_mathfn (tree exp, rtx target, rtx subtarget)
53800dbe 2019{
bf8e3599 2020 optab builtin_optab;
1e0c0b35 2021 rtx op0;
2022 rtx_insn *insns;
c6e6ecb1 2023 tree fndecl = get_callee_fndecl (exp);
3754d046 2024 machine_mode mode;
528ee710 2025 bool errno_set = false;
d6a0a4b0 2026 bool try_widening = false;
abfea505 2027 tree arg;
53800dbe 2028
c2f47e15 2029 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2030 return NULL_RTX;
53800dbe 2031
c2f47e15 2032 arg = CALL_EXPR_ARG (exp, 0);
53800dbe 2033
2034 switch (DECL_FUNCTION_CODE (fndecl))
2035 {
4f35b1fc 2036 CASE_FLT_FN (BUILT_IN_SQRT):
7f05340e 2037 errno_set = ! tree_expr_nonnegative_p (arg);
d6a0a4b0 2038 try_widening = true;
7f05340e 2039 builtin_optab = sqrt_optab;
2040 break;
4f35b1fc 2041 CASE_FLT_FN (BUILT_IN_EXP):
528ee710 2042 errno_set = true; builtin_optab = exp_optab; break;
4f35b1fc 2043 CASE_FLT_FN (BUILT_IN_EXP10):
2044 CASE_FLT_FN (BUILT_IN_POW10):
750ef9f5 2045 errno_set = true; builtin_optab = exp10_optab; break;
4f35b1fc 2046 CASE_FLT_FN (BUILT_IN_EXP2):
750ef9f5 2047 errno_set = true; builtin_optab = exp2_optab; break;
4f35b1fc 2048 CASE_FLT_FN (BUILT_IN_EXPM1):
a6b4eed2 2049 errno_set = true; builtin_optab = expm1_optab; break;
4f35b1fc 2050 CASE_FLT_FN (BUILT_IN_LOGB):
4efbc641 2051 errno_set = true; builtin_optab = logb_optab; break;
4f35b1fc 2052 CASE_FLT_FN (BUILT_IN_LOG):
528ee710 2053 errno_set = true; builtin_optab = log_optab; break;
4f35b1fc 2054 CASE_FLT_FN (BUILT_IN_LOG10):
d3cd9bde 2055 errno_set = true; builtin_optab = log10_optab; break;
4f35b1fc 2056 CASE_FLT_FN (BUILT_IN_LOG2):
d3cd9bde 2057 errno_set = true; builtin_optab = log2_optab; break;
4f35b1fc 2058 CASE_FLT_FN (BUILT_IN_LOG1P):
f474cd93 2059 errno_set = true; builtin_optab = log1p_optab; break;
4f35b1fc 2060 CASE_FLT_FN (BUILT_IN_ASIN):
8de2f465 2061 builtin_optab = asin_optab; break;
4f35b1fc 2062 CASE_FLT_FN (BUILT_IN_ACOS):
8de2f465 2063 builtin_optab = acos_optab; break;
4f35b1fc 2064 CASE_FLT_FN (BUILT_IN_TAN):
528ee710 2065 builtin_optab = tan_optab; break;
4f35b1fc 2066 CASE_FLT_FN (BUILT_IN_ATAN):
528ee710 2067 builtin_optab = atan_optab; break;
4f35b1fc 2068 CASE_FLT_FN (BUILT_IN_FLOOR):
528ee710 2069 builtin_optab = floor_optab; break;
4f35b1fc 2070 CASE_FLT_FN (BUILT_IN_CEIL):
528ee710 2071 builtin_optab = ceil_optab; break;
4f35b1fc 2072 CASE_FLT_FN (BUILT_IN_TRUNC):
a7cc195f 2073 builtin_optab = btrunc_optab; break;
4f35b1fc 2074 CASE_FLT_FN (BUILT_IN_ROUND):
528ee710 2075 builtin_optab = round_optab; break;
4f35b1fc 2076 CASE_FLT_FN (BUILT_IN_NEARBYINT):
0ddf4ad9 2077 builtin_optab = nearbyint_optab;
2078 if (flag_trapping_math)
2079 break;
2080 /* Else fallthrough and expand as rint. */
4f35b1fc 2081 CASE_FLT_FN (BUILT_IN_RINT):
aef94a0f 2082 builtin_optab = rint_optab; break;
b3154a1f 2083 CASE_FLT_FN (BUILT_IN_SIGNIFICAND):
2084 builtin_optab = significand_optab; break;
42721db0 2085 default:
64db345d 2086 gcc_unreachable ();
53800dbe 2087 }
2088
7f05340e 2089 /* Make a suitable register to place result in. */
2090 mode = TYPE_MODE (TREE_TYPE (exp));
fc4eef90 2091
7f05340e 2092 if (! flag_errno_math || ! HONOR_NANS (mode))
2093 errno_set = false;
2094
d6a0a4b0 2095 /* Before working hard, check whether the instruction is available, but try
2096 to widen the mode for specific operations. */
2097 if ((optab_handler (builtin_optab, mode) != CODE_FOR_nothing
2098 || (try_widening && !excess_precision_type (TREE_TYPE (exp))))
f2aca212 2099 && (!errno_set || !optimize_insn_for_size_p ()))
68e6cb9d 2100 {
de2e453e 2101 rtx result = gen_reg_rtx (mode);
7f05340e 2102
bd421108 2103 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2104 need to expand the argument again. This way, we will not perform
2105 side-effects more the once. */
abfea505 2106 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
7f05340e 2107
1db6d067 2108 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
7f05340e 2109
bd421108 2110 start_sequence ();
53800dbe 2111
de2e453e 2112 /* Compute into RESULT.
2113 Set RESULT to wherever the result comes back. */
2114 result = expand_unop (mode, builtin_optab, op0, result, 0);
bd421108 2115
de2e453e 2116 if (result != 0)
bd421108 2117 {
2118 if (errno_set)
de2e453e 2119 expand_errno_check (exp, result);
bd421108 2120
2121 /* Output the entire sequence. */
2122 insns = get_insns ();
2123 end_sequence ();
2124 emit_insn (insns);
de2e453e 2125 return result;
bd421108 2126 }
2127
2128 /* If we were unable to expand via the builtin, stop the sequence
2129 (without outputting the insns) and call to the library function
2130 with the stabilized argument list. */
53800dbe 2131 end_sequence ();
53800dbe 2132 }
2133
1e5b92fa 2134 return expand_call (exp, target, target == const0_rtx);
0fd605a5 2135}
2136
2137/* Expand a call to the builtin binary math functions (pow and atan2).
c2f47e15 2138 Return NULL_RTX if a normal call should be emitted rather than expanding the
0fd605a5 2139 function in-line. EXP is the expression that is a call to the builtin
2140 function; if convenient, the result should be placed in TARGET.
2141 SUBTARGET may be used as the target for computing one of EXP's
2142 operands. */
2143
2144static rtx
aecda0d6 2145expand_builtin_mathfn_2 (tree exp, rtx target, rtx subtarget)
0fd605a5 2146{
2147 optab builtin_optab;
1e0c0b35 2148 rtx op0, op1, result;
2149 rtx_insn *insns;
4737caf2 2150 int op1_type = REAL_TYPE;
c6e6ecb1 2151 tree fndecl = get_callee_fndecl (exp);
abfea505 2152 tree arg0, arg1;
3754d046 2153 machine_mode mode;
0fd605a5 2154 bool errno_set = true;
0fd605a5 2155
73a954a1 2156 switch (DECL_FUNCTION_CODE (fndecl))
2157 {
2158 CASE_FLT_FN (BUILT_IN_SCALBN):
2159 CASE_FLT_FN (BUILT_IN_SCALBLN):
2160 CASE_FLT_FN (BUILT_IN_LDEXP):
2161 op1_type = INTEGER_TYPE;
2162 default:
2163 break;
2164 }
4737caf2 2165
c2f47e15 2166 if (!validate_arglist (exp, REAL_TYPE, op1_type, VOID_TYPE))
2167 return NULL_RTX;
0fd605a5 2168
c2f47e15 2169 arg0 = CALL_EXPR_ARG (exp, 0);
2170 arg1 = CALL_EXPR_ARG (exp, 1);
0fd605a5 2171
0fd605a5 2172 switch (DECL_FUNCTION_CODE (fndecl))
2173 {
4f35b1fc 2174 CASE_FLT_FN (BUILT_IN_POW):
0fd605a5 2175 builtin_optab = pow_optab; break;
4f35b1fc 2176 CASE_FLT_FN (BUILT_IN_ATAN2):
0fd605a5 2177 builtin_optab = atan2_optab; break;
73a954a1 2178 CASE_FLT_FN (BUILT_IN_SCALB):
2179 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (exp)))->b != 2)
2180 return 0;
2181 builtin_optab = scalb_optab; break;
2182 CASE_FLT_FN (BUILT_IN_SCALBN):
2183 CASE_FLT_FN (BUILT_IN_SCALBLN):
2184 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (exp)))->b != 2)
2185 return 0;
2186 /* Fall through... */
4f35b1fc 2187 CASE_FLT_FN (BUILT_IN_LDEXP):
4737caf2 2188 builtin_optab = ldexp_optab; break;
4f35b1fc 2189 CASE_FLT_FN (BUILT_IN_FMOD):
80ed5c06 2190 builtin_optab = fmod_optab; break;
ef722005 2191 CASE_FLT_FN (BUILT_IN_REMAINDER):
4f35b1fc 2192 CASE_FLT_FN (BUILT_IN_DREM):
ef722005 2193 builtin_optab = remainder_optab; break;
0fd605a5 2194 default:
64db345d 2195 gcc_unreachable ();
0fd605a5 2196 }
2197
7f05340e 2198 /* Make a suitable register to place result in. */
2199 mode = TYPE_MODE (TREE_TYPE (exp));
fc4eef90 2200
2201 /* Before working hard, check whether the instruction is available. */
d6bf3b14 2202 if (optab_handler (builtin_optab, mode) == CODE_FOR_nothing)
c2f47e15 2203 return NULL_RTX;
fc4eef90 2204
de2e453e 2205 result = gen_reg_rtx (mode);
7f05340e 2206
2207 if (! flag_errno_math || ! HONOR_NANS (mode))
2208 errno_set = false;
2209
f2aca212 2210 if (errno_set && optimize_insn_for_size_p ())
2211 return 0;
2212
4ee9c684 2213 /* Always stabilize the argument list. */
abfea505 2214 CALL_EXPR_ARG (exp, 0) = arg0 = builtin_save_expr (arg0);
2215 CALL_EXPR_ARG (exp, 1) = arg1 = builtin_save_expr (arg1);
7f05340e 2216
8ec3c5c2 2217 op0 = expand_expr (arg0, subtarget, VOIDmode, EXPAND_NORMAL);
2218 op1 = expand_normal (arg1);
7f05340e 2219
7f05340e 2220 start_sequence ();
2221
de2e453e 2222 /* Compute into RESULT.
2223 Set RESULT to wherever the result comes back. */
2224 result = expand_binop (mode, builtin_optab, op0, op1,
2225 result, 0, OPTAB_DIRECT);
53800dbe 2226
68e6cb9d 2227 /* If we were unable to expand via the builtin, stop the sequence
2228 (without outputting the insns) and call to the library function
2229 with the stabilized argument list. */
de2e453e 2230 if (result == 0)
0fd605a5 2231 {
2232 end_sequence ();
68e6cb9d 2233 return expand_call (exp, target, target == const0_rtx);
53800dbe 2234 }
2235
a4356fb9 2236 if (errno_set)
de2e453e 2237 expand_errno_check (exp, result);
0fd605a5 2238
53800dbe 2239 /* Output the entire sequence. */
2240 insns = get_insns ();
2241 end_sequence ();
31d3e01c 2242 emit_insn (insns);
bf8e3599 2243
de2e453e 2244 return result;
53800dbe 2245}
2246
7e0713b1 2247/* Expand a call to the builtin trinary math functions (fma).
2248 Return NULL_RTX if a normal call should be emitted rather than expanding the
2249 function in-line. EXP is the expression that is a call to the builtin
2250 function; if convenient, the result should be placed in TARGET.
2251 SUBTARGET may be used as the target for computing one of EXP's
2252 operands. */
2253
2254static rtx
2255expand_builtin_mathfn_ternary (tree exp, rtx target, rtx subtarget)
2256{
2257 optab builtin_optab;
1e0c0b35 2258 rtx op0, op1, op2, result;
2259 rtx_insn *insns;
7e0713b1 2260 tree fndecl = get_callee_fndecl (exp);
2261 tree arg0, arg1, arg2;
3754d046 2262 machine_mode mode;
7e0713b1 2263
2264 if (!validate_arglist (exp, REAL_TYPE, REAL_TYPE, REAL_TYPE, VOID_TYPE))
2265 return NULL_RTX;
2266
2267 arg0 = CALL_EXPR_ARG (exp, 0);
2268 arg1 = CALL_EXPR_ARG (exp, 1);
2269 arg2 = CALL_EXPR_ARG (exp, 2);
2270
2271 switch (DECL_FUNCTION_CODE (fndecl))
2272 {
2273 CASE_FLT_FN (BUILT_IN_FMA):
2274 builtin_optab = fma_optab; break;
2275 default:
2276 gcc_unreachable ();
2277 }
2278
2279 /* Make a suitable register to place result in. */
2280 mode = TYPE_MODE (TREE_TYPE (exp));
2281
2282 /* Before working hard, check whether the instruction is available. */
2283 if (optab_handler (builtin_optab, mode) == CODE_FOR_nothing)
2284 return NULL_RTX;
2285
de2e453e 2286 result = gen_reg_rtx (mode);
7e0713b1 2287
2288 /* Always stabilize the argument list. */
2289 CALL_EXPR_ARG (exp, 0) = arg0 = builtin_save_expr (arg0);
2290 CALL_EXPR_ARG (exp, 1) = arg1 = builtin_save_expr (arg1);
2291 CALL_EXPR_ARG (exp, 2) = arg2 = builtin_save_expr (arg2);
2292
2293 op0 = expand_expr (arg0, subtarget, VOIDmode, EXPAND_NORMAL);
2294 op1 = expand_normal (arg1);
2295 op2 = expand_normal (arg2);
2296
2297 start_sequence ();
2298
de2e453e 2299 /* Compute into RESULT.
2300 Set RESULT to wherever the result comes back. */
2301 result = expand_ternary_op (mode, builtin_optab, op0, op1, op2,
2302 result, 0);
7e0713b1 2303
2304 /* If we were unable to expand via the builtin, stop the sequence
2305 (without outputting the insns) and call to the library function
2306 with the stabilized argument list. */
de2e453e 2307 if (result == 0)
7e0713b1 2308 {
2309 end_sequence ();
2310 return expand_call (exp, target, target == const0_rtx);
2311 }
2312
2313 /* Output the entire sequence. */
2314 insns = get_insns ();
2315 end_sequence ();
2316 emit_insn (insns);
2317
de2e453e 2318 return result;
7e0713b1 2319}
2320
6b43bae4 2321/* Expand a call to the builtin sin and cos math functions.
c2f47e15 2322 Return NULL_RTX if a normal call should be emitted rather than expanding the
6b43bae4 2323 function in-line. EXP is the expression that is a call to the builtin
2324 function; if convenient, the result should be placed in TARGET.
2325 SUBTARGET may be used as the target for computing one of EXP's
2326 operands. */
2327
2328static rtx
2329expand_builtin_mathfn_3 (tree exp, rtx target, rtx subtarget)
2330{
2331 optab builtin_optab;
1e0c0b35 2332 rtx op0;
2333 rtx_insn *insns;
6b43bae4 2334 tree fndecl = get_callee_fndecl (exp);
3754d046 2335 machine_mode mode;
abfea505 2336 tree arg;
6b43bae4 2337
c2f47e15 2338 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2339 return NULL_RTX;
6b43bae4 2340
c2f47e15 2341 arg = CALL_EXPR_ARG (exp, 0);
6b43bae4 2342
2343 switch (DECL_FUNCTION_CODE (fndecl))
2344 {
4f35b1fc 2345 CASE_FLT_FN (BUILT_IN_SIN):
2346 CASE_FLT_FN (BUILT_IN_COS):
6b43bae4 2347 builtin_optab = sincos_optab; break;
2348 default:
64db345d 2349 gcc_unreachable ();
6b43bae4 2350 }
2351
2352 /* Make a suitable register to place result in. */
2353 mode = TYPE_MODE (TREE_TYPE (exp));
2354
6b43bae4 2355 /* Check if sincos insn is available, otherwise fallback
0bed3869 2356 to sin or cos insn. */
d6bf3b14 2357 if (optab_handler (builtin_optab, mode) == CODE_FOR_nothing)
6b43bae4 2358 switch (DECL_FUNCTION_CODE (fndecl))
2359 {
4f35b1fc 2360 CASE_FLT_FN (BUILT_IN_SIN):
6b43bae4 2361 builtin_optab = sin_optab; break;
4f35b1fc 2362 CASE_FLT_FN (BUILT_IN_COS):
6b43bae4 2363 builtin_optab = cos_optab; break;
2364 default:
64db345d 2365 gcc_unreachable ();
6b43bae4 2366 }
6b43bae4 2367
2368 /* Before working hard, check whether the instruction is available. */
d6bf3b14 2369 if (optab_handler (builtin_optab, mode) != CODE_FOR_nothing)
6b43bae4 2370 {
de2e453e 2371 rtx result = gen_reg_rtx (mode);
6b43bae4 2372
2373 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2374 need to expand the argument again. This way, we will not perform
2375 side-effects more the once. */
abfea505 2376 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
6b43bae4 2377
1db6d067 2378 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
6b43bae4 2379
6b43bae4 2380 start_sequence ();
2381
de2e453e 2382 /* Compute into RESULT.
2383 Set RESULT to wherever the result comes back. */
6b43bae4 2384 if (builtin_optab == sincos_optab)
2385 {
de2e453e 2386 int ok;
7d3f6cc7 2387
6b43bae4 2388 switch (DECL_FUNCTION_CODE (fndecl))
2389 {
4f35b1fc 2390 CASE_FLT_FN (BUILT_IN_SIN):
de2e453e 2391 ok = expand_twoval_unop (builtin_optab, op0, 0, result, 0);
6b43bae4 2392 break;
4f35b1fc 2393 CASE_FLT_FN (BUILT_IN_COS):
de2e453e 2394 ok = expand_twoval_unop (builtin_optab, op0, result, 0, 0);
6b43bae4 2395 break;
2396 default:
64db345d 2397 gcc_unreachable ();
6b43bae4 2398 }
de2e453e 2399 gcc_assert (ok);
6b43bae4 2400 }
2401 else
de2e453e 2402 result = expand_unop (mode, builtin_optab, op0, result, 0);
6b43bae4 2403
de2e453e 2404 if (result != 0)
6b43bae4 2405 {
6b43bae4 2406 /* Output the entire sequence. */
2407 insns = get_insns ();
2408 end_sequence ();
2409 emit_insn (insns);
de2e453e 2410 return result;
6b43bae4 2411 }
2412
2413 /* If we were unable to expand via the builtin, stop the sequence
2414 (without outputting the insns) and call to the library function
2415 with the stabilized argument list. */
2416 end_sequence ();
2417 }
2418
de2e453e 2419 return expand_call (exp, target, target == const0_rtx);
6b43bae4 2420}
2421
a65c4d64 2422/* Given an interclass math builtin decl FNDECL and it's argument ARG
2423 return an RTL instruction code that implements the functionality.
2424 If that isn't possible or available return CODE_FOR_nothing. */
a67a90e5 2425
a65c4d64 2426static enum insn_code
2427interclass_mathfn_icode (tree arg, tree fndecl)
a67a90e5 2428{
a65c4d64 2429 bool errno_set = false;
6cdd383a 2430 optab builtin_optab = unknown_optab;
3754d046 2431 machine_mode mode;
a67a90e5 2432
2433 switch (DECL_FUNCTION_CODE (fndecl))
2434 {
2435 CASE_FLT_FN (BUILT_IN_ILOGB):
2436 errno_set = true; builtin_optab = ilogb_optab; break;
69b779ea 2437 CASE_FLT_FN (BUILT_IN_ISINF):
2438 builtin_optab = isinf_optab; break;
8a1a9cb7 2439 case BUILT_IN_ISNORMAL:
cde061c1 2440 case BUILT_IN_ISFINITE:
2441 CASE_FLT_FN (BUILT_IN_FINITE):
a65c4d64 2442 case BUILT_IN_FINITED32:
2443 case BUILT_IN_FINITED64:
2444 case BUILT_IN_FINITED128:
2445 case BUILT_IN_ISINFD32:
2446 case BUILT_IN_ISINFD64:
2447 case BUILT_IN_ISINFD128:
cde061c1 2448 /* These builtins have no optabs (yet). */
2449 break;
a67a90e5 2450 default:
2451 gcc_unreachable ();
2452 }
2453
2454 /* There's no easy way to detect the case we need to set EDOM. */
2455 if (flag_errno_math && errno_set)
a65c4d64 2456 return CODE_FOR_nothing;
a67a90e5 2457
2458 /* Optab mode depends on the mode of the input argument. */
2459 mode = TYPE_MODE (TREE_TYPE (arg));
2460
cde061c1 2461 if (builtin_optab)
d6bf3b14 2462 return optab_handler (builtin_optab, mode);
a65c4d64 2463 return CODE_FOR_nothing;
2464}
2465
2466/* Expand a call to one of the builtin math functions that operate on
2467 floating point argument and output an integer result (ilogb, isinf,
2468 isnan, etc).
2469 Return 0 if a normal call should be emitted rather than expanding the
2470 function in-line. EXP is the expression that is a call to the builtin
f97eea22 2471 function; if convenient, the result should be placed in TARGET. */
a65c4d64 2472
2473static rtx
f97eea22 2474expand_builtin_interclass_mathfn (tree exp, rtx target)
a65c4d64 2475{
2476 enum insn_code icode = CODE_FOR_nothing;
2477 rtx op0;
2478 tree fndecl = get_callee_fndecl (exp);
3754d046 2479 machine_mode mode;
a65c4d64 2480 tree arg;
2481
2482 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2483 return NULL_RTX;
2484
2485 arg = CALL_EXPR_ARG (exp, 0);
2486 icode = interclass_mathfn_icode (arg, fndecl);
2487 mode = TYPE_MODE (TREE_TYPE (arg));
2488
a67a90e5 2489 if (icode != CODE_FOR_nothing)
2490 {
8786db1e 2491 struct expand_operand ops[1];
1e0c0b35 2492 rtx_insn *last = get_last_insn ();
4e2a2fb4 2493 tree orig_arg = arg;
a67a90e5 2494
2495 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2496 need to expand the argument again. This way, we will not perform
2497 side-effects more the once. */
abfea505 2498 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
a67a90e5 2499
f97eea22 2500 op0 = expand_expr (arg, NULL_RTX, VOIDmode, EXPAND_NORMAL);
a67a90e5 2501
2502 if (mode != GET_MODE (op0))
2503 op0 = convert_to_mode (mode, op0, 0);
2504
8786db1e 2505 create_output_operand (&ops[0], target, TYPE_MODE (TREE_TYPE (exp)));
2506 if (maybe_legitimize_operands (icode, 0, 1, ops)
2507 && maybe_emit_unop_insn (icode, ops[0].value, op0, UNKNOWN))
2508 return ops[0].value;
2509
4e2a2fb4 2510 delete_insns_since (last);
2511 CALL_EXPR_ARG (exp, 0) = orig_arg;
a67a90e5 2512 }
2513
a65c4d64 2514 return NULL_RTX;
a67a90e5 2515}
2516
c3147c1a 2517/* Expand a call to the builtin sincos math function.
c2f47e15 2518 Return NULL_RTX if a normal call should be emitted rather than expanding the
c3147c1a 2519 function in-line. EXP is the expression that is a call to the builtin
2520 function. */
2521
2522static rtx
2523expand_builtin_sincos (tree exp)
2524{
2525 rtx op0, op1, op2, target1, target2;
3754d046 2526 machine_mode mode;
c3147c1a 2527 tree arg, sinp, cosp;
2528 int result;
389dd41b 2529 location_t loc = EXPR_LOCATION (exp);
be5575b2 2530 tree alias_type, alias_off;
c3147c1a 2531
c2f47e15 2532 if (!validate_arglist (exp, REAL_TYPE,
2533 POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
2534 return NULL_RTX;
c3147c1a 2535
c2f47e15 2536 arg = CALL_EXPR_ARG (exp, 0);
2537 sinp = CALL_EXPR_ARG (exp, 1);
2538 cosp = CALL_EXPR_ARG (exp, 2);
c3147c1a 2539
2540 /* Make a suitable register to place result in. */
2541 mode = TYPE_MODE (TREE_TYPE (arg));
2542
2543 /* Check if sincos insn is available, otherwise emit the call. */
d6bf3b14 2544 if (optab_handler (sincos_optab, mode) == CODE_FOR_nothing)
c3147c1a 2545 return NULL_RTX;
2546
2547 target1 = gen_reg_rtx (mode);
2548 target2 = gen_reg_rtx (mode);
2549
8ec3c5c2 2550 op0 = expand_normal (arg);
be5575b2 2551 alias_type = build_pointer_type_for_mode (TREE_TYPE (arg), ptr_mode, true);
2552 alias_off = build_int_cst (alias_type, 0);
2553 op1 = expand_normal (fold_build2_loc (loc, MEM_REF, TREE_TYPE (arg),
2554 sinp, alias_off));
2555 op2 = expand_normal (fold_build2_loc (loc, MEM_REF, TREE_TYPE (arg),
2556 cosp, alias_off));
c3147c1a 2557
2558 /* Compute into target1 and target2.
2559 Set TARGET to wherever the result comes back. */
2560 result = expand_twoval_unop (sincos_optab, op0, target2, target1, 0);
2561 gcc_assert (result);
2562
2563 /* Move target1 and target2 to the memory locations indicated
2564 by op1 and op2. */
2565 emit_move_insn (op1, target1);
2566 emit_move_insn (op2, target2);
2567
2568 return const0_rtx;
2569}
2570
d735c391 2571/* Expand a call to the internal cexpi builtin to the sincos math function.
2572 EXP is the expression that is a call to the builtin function; if convenient,
f97eea22 2573 the result should be placed in TARGET. */
d735c391 2574
2575static rtx
f97eea22 2576expand_builtin_cexpi (tree exp, rtx target)
d735c391 2577{
2578 tree fndecl = get_callee_fndecl (exp);
d735c391 2579 tree arg, type;
3754d046 2580 machine_mode mode;
d735c391 2581 rtx op0, op1, op2;
389dd41b 2582 location_t loc = EXPR_LOCATION (exp);
d735c391 2583
c2f47e15 2584 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2585 return NULL_RTX;
d735c391 2586
c2f47e15 2587 arg = CALL_EXPR_ARG (exp, 0);
d735c391 2588 type = TREE_TYPE (arg);
2589 mode = TYPE_MODE (TREE_TYPE (arg));
2590
2591 /* Try expanding via a sincos optab, fall back to emitting a libcall
18b8d8ae 2592 to sincos or cexp. We are sure we have sincos or cexp because cexpi
2593 is only generated from sincos, cexp or if we have either of them. */
d6bf3b14 2594 if (optab_handler (sincos_optab, mode) != CODE_FOR_nothing)
d735c391 2595 {
2596 op1 = gen_reg_rtx (mode);
2597 op2 = gen_reg_rtx (mode);
2598
f97eea22 2599 op0 = expand_expr (arg, NULL_RTX, VOIDmode, EXPAND_NORMAL);
d735c391 2600
2601 /* Compute into op1 and op2. */
2602 expand_twoval_unop (sincos_optab, op0, op2, op1, 0);
2603 }
30f690e0 2604 else if (targetm.libc_has_function (function_sincos))
d735c391 2605 {
c2f47e15 2606 tree call, fn = NULL_TREE;
d735c391 2607 tree top1, top2;
2608 rtx op1a, op2a;
2609
2610 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
b9a16870 2611 fn = builtin_decl_explicit (BUILT_IN_SINCOSF);
d735c391 2612 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
b9a16870 2613 fn = builtin_decl_explicit (BUILT_IN_SINCOS);
d735c391 2614 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
b9a16870 2615 fn = builtin_decl_explicit (BUILT_IN_SINCOSL);
c2f47e15 2616 else
2617 gcc_unreachable ();
48e1416a 2618
0ab48139 2619 op1 = assign_temp (TREE_TYPE (arg), 1, 1);
2620 op2 = assign_temp (TREE_TYPE (arg), 1, 1);
99182918 2621 op1a = copy_addr_to_reg (XEXP (op1, 0));
2622 op2a = copy_addr_to_reg (XEXP (op2, 0));
d735c391 2623 top1 = make_tree (build_pointer_type (TREE_TYPE (arg)), op1a);
2624 top2 = make_tree (build_pointer_type (TREE_TYPE (arg)), op2a);
2625
d735c391 2626 /* Make sure not to fold the sincos call again. */
2627 call = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
c2f47e15 2628 expand_normal (build_call_nary (TREE_TYPE (TREE_TYPE (fn)),
2629 call, 3, arg, top1, top2));
d735c391 2630 }
18b8d8ae 2631 else
2632 {
0ecbc158 2633 tree call, fn = NULL_TREE, narg;
18b8d8ae 2634 tree ctype = build_complex_type (type);
2635
0ecbc158 2636 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
b9a16870 2637 fn = builtin_decl_explicit (BUILT_IN_CEXPF);
0ecbc158 2638 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
b9a16870 2639 fn = builtin_decl_explicit (BUILT_IN_CEXP);
0ecbc158 2640 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
b9a16870 2641 fn = builtin_decl_explicit (BUILT_IN_CEXPL);
c2f47e15 2642 else
2643 gcc_unreachable ();
fc0dfa6e 2644
2645 /* If we don't have a decl for cexp create one. This is the
2646 friendliest fallback if the user calls __builtin_cexpi
2647 without full target C99 function support. */
2648 if (fn == NULL_TREE)
2649 {
2650 tree fntype;
2651 const char *name = NULL;
2652
2653 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2654 name = "cexpf";
2655 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2656 name = "cexp";
2657 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2658 name = "cexpl";
2659
2660 fntype = build_function_type_list (ctype, ctype, NULL_TREE);
2661 fn = build_fn_decl (name, fntype);
2662 }
2663
389dd41b 2664 narg = fold_build2_loc (loc, COMPLEX_EXPR, ctype,
18b8d8ae 2665 build_real (type, dconst0), arg);
2666
2667 /* Make sure not to fold the cexp call again. */
2668 call = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
48e1416a 2669 return expand_expr (build_call_nary (ctype, call, 1, narg),
1db6d067 2670 target, VOIDmode, EXPAND_NORMAL);
18b8d8ae 2671 }
d735c391 2672
2673 /* Now build the proper return type. */
2674 return expand_expr (build2 (COMPLEX_EXPR, build_complex_type (type),
2675 make_tree (TREE_TYPE (arg), op2),
2676 make_tree (TREE_TYPE (arg), op1)),
1db6d067 2677 target, VOIDmode, EXPAND_NORMAL);
d735c391 2678}
2679
a65c4d64 2680/* Conveniently construct a function call expression. FNDECL names the
2681 function to be called, N is the number of arguments, and the "..."
2682 parameters are the argument expressions. Unlike build_call_exr
2683 this doesn't fold the call, hence it will always return a CALL_EXPR. */
2684
2685static tree
2686build_call_nofold_loc (location_t loc, tree fndecl, int n, ...)
2687{
2688 va_list ap;
2689 tree fntype = TREE_TYPE (fndecl);
2690 tree fn = build1 (ADDR_EXPR, build_pointer_type (fntype), fndecl);
2691
2692 va_start (ap, n);
2693 fn = build_call_valist (TREE_TYPE (fntype), fn, n, ap);
2694 va_end (ap);
2695 SET_EXPR_LOCATION (fn, loc);
2696 return fn;
2697}
a65c4d64 2698
7d3afc77 2699/* Expand a call to one of the builtin rounding functions gcc defines
2700 as an extension (lfloor and lceil). As these are gcc extensions we
2701 do not need to worry about setting errno to EDOM.
ad52b9b7 2702 If expanding via optab fails, lower expression to (int)(floor(x)).
2703 EXP is the expression that is a call to the builtin function;
ff1b14e4 2704 if convenient, the result should be placed in TARGET. */
ad52b9b7 2705
2706static rtx
ff1b14e4 2707expand_builtin_int_roundingfn (tree exp, rtx target)
ad52b9b7 2708{
9c42dd28 2709 convert_optab builtin_optab;
1e0c0b35 2710 rtx op0, tmp;
2711 rtx_insn *insns;
ad52b9b7 2712 tree fndecl = get_callee_fndecl (exp);
ad52b9b7 2713 enum built_in_function fallback_fn;
2714 tree fallback_fndecl;
3754d046 2715 machine_mode mode;
4de0924f 2716 tree arg;
ad52b9b7 2717
c2f47e15 2718 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
ad52b9b7 2719 gcc_unreachable ();
2720
c2f47e15 2721 arg = CALL_EXPR_ARG (exp, 0);
ad52b9b7 2722
2723 switch (DECL_FUNCTION_CODE (fndecl))
2724 {
80ff6494 2725 CASE_FLT_FN (BUILT_IN_ICEIL):
4f35b1fc 2726 CASE_FLT_FN (BUILT_IN_LCEIL):
2727 CASE_FLT_FN (BUILT_IN_LLCEIL):
ac148751 2728 builtin_optab = lceil_optab;
2729 fallback_fn = BUILT_IN_CEIL;
2730 break;
2731
80ff6494 2732 CASE_FLT_FN (BUILT_IN_IFLOOR):
4f35b1fc 2733 CASE_FLT_FN (BUILT_IN_LFLOOR):
2734 CASE_FLT_FN (BUILT_IN_LLFLOOR):
ad52b9b7 2735 builtin_optab = lfloor_optab;
2736 fallback_fn = BUILT_IN_FLOOR;
2737 break;
2738
2739 default:
2740 gcc_unreachable ();
2741 }
2742
2743 /* Make a suitable register to place result in. */
2744 mode = TYPE_MODE (TREE_TYPE (exp));
2745
9c42dd28 2746 target = gen_reg_rtx (mode);
ad52b9b7 2747
9c42dd28 2748 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2749 need to expand the argument again. This way, we will not perform
2750 side-effects more the once. */
abfea505 2751 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
ad52b9b7 2752
ff1b14e4 2753 op0 = expand_expr (arg, NULL, VOIDmode, EXPAND_NORMAL);
ad52b9b7 2754
9c42dd28 2755 start_sequence ();
ad52b9b7 2756
9c42dd28 2757 /* Compute into TARGET. */
2758 if (expand_sfix_optab (target, op0, builtin_optab))
2759 {
2760 /* Output the entire sequence. */
2761 insns = get_insns ();
ad52b9b7 2762 end_sequence ();
9c42dd28 2763 emit_insn (insns);
2764 return target;
ad52b9b7 2765 }
2766
9c42dd28 2767 /* If we were unable to expand via the builtin, stop the sequence
2768 (without outputting the insns). */
2769 end_sequence ();
2770
ad52b9b7 2771 /* Fall back to floating point rounding optab. */
2772 fallback_fndecl = mathfn_built_in (TREE_TYPE (arg), fallback_fn);
fc0dfa6e 2773
2774 /* For non-C99 targets we may end up without a fallback fndecl here
2775 if the user called __builtin_lfloor directly. In this case emit
2776 a call to the floor/ceil variants nevertheless. This should result
2777 in the best user experience for not full C99 targets. */
2778 if (fallback_fndecl == NULL_TREE)
2779 {
2780 tree fntype;
2781 const char *name = NULL;
2782
2783 switch (DECL_FUNCTION_CODE (fndecl))
2784 {
80ff6494 2785 case BUILT_IN_ICEIL:
fc0dfa6e 2786 case BUILT_IN_LCEIL:
2787 case BUILT_IN_LLCEIL:
2788 name = "ceil";
2789 break;
80ff6494 2790 case BUILT_IN_ICEILF:
fc0dfa6e 2791 case BUILT_IN_LCEILF:
2792 case BUILT_IN_LLCEILF:
2793 name = "ceilf";
2794 break;
80ff6494 2795 case BUILT_IN_ICEILL:
fc0dfa6e 2796 case BUILT_IN_LCEILL:
2797 case BUILT_IN_LLCEILL:
2798 name = "ceill";
2799 break;
80ff6494 2800 case BUILT_IN_IFLOOR:
fc0dfa6e 2801 case BUILT_IN_LFLOOR:
2802 case BUILT_IN_LLFLOOR:
2803 name = "floor";
2804 break;
80ff6494 2805 case BUILT_IN_IFLOORF:
fc0dfa6e 2806 case BUILT_IN_LFLOORF:
2807 case BUILT_IN_LLFLOORF:
2808 name = "floorf";
2809 break;
80ff6494 2810 case BUILT_IN_IFLOORL:
fc0dfa6e 2811 case BUILT_IN_LFLOORL:
2812 case BUILT_IN_LLFLOORL:
2813 name = "floorl";
2814 break;
2815 default:
2816 gcc_unreachable ();
2817 }
2818
2819 fntype = build_function_type_list (TREE_TYPE (arg),
2820 TREE_TYPE (arg), NULL_TREE);
2821 fallback_fndecl = build_fn_decl (name, fntype);
2822 }
2823
0568e9c1 2824 exp = build_call_nofold_loc (EXPR_LOCATION (exp), fallback_fndecl, 1, arg);
ad52b9b7 2825
d4c690af 2826 tmp = expand_normal (exp);
933eb13a 2827 tmp = maybe_emit_group_store (tmp, TREE_TYPE (exp));
ad52b9b7 2828
2829 /* Truncate the result of floating point optab to integer
2830 via expand_fix (). */
2831 target = gen_reg_rtx (mode);
2832 expand_fix (target, tmp, 0);
2833
2834 return target;
2835}
2836
7d3afc77 2837/* Expand a call to one of the builtin math functions doing integer
2838 conversion (lrint).
2839 Return 0 if a normal call should be emitted rather than expanding the
2840 function in-line. EXP is the expression that is a call to the builtin
ff1b14e4 2841 function; if convenient, the result should be placed in TARGET. */
7d3afc77 2842
2843static rtx
ff1b14e4 2844expand_builtin_int_roundingfn_2 (tree exp, rtx target)
7d3afc77 2845{
5f51ee59 2846 convert_optab builtin_optab;
1e0c0b35 2847 rtx op0;
2848 rtx_insn *insns;
7d3afc77 2849 tree fndecl = get_callee_fndecl (exp);
4de0924f 2850 tree arg;
3754d046 2851 machine_mode mode;
e951f9a4 2852 enum built_in_function fallback_fn = BUILT_IN_NONE;
7d3afc77 2853
c2f47e15 2854 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2855 gcc_unreachable ();
48e1416a 2856
c2f47e15 2857 arg = CALL_EXPR_ARG (exp, 0);
7d3afc77 2858
2859 switch (DECL_FUNCTION_CODE (fndecl))
2860 {
80ff6494 2861 CASE_FLT_FN (BUILT_IN_IRINT):
e951f9a4 2862 fallback_fn = BUILT_IN_LRINT;
2863 /* FALLTHRU */
7d3afc77 2864 CASE_FLT_FN (BUILT_IN_LRINT):
2865 CASE_FLT_FN (BUILT_IN_LLRINT):
e951f9a4 2866 builtin_optab = lrint_optab;
2867 break;
80ff6494 2868
2869 CASE_FLT_FN (BUILT_IN_IROUND):
e951f9a4 2870 fallback_fn = BUILT_IN_LROUND;
2871 /* FALLTHRU */
ef2f1a10 2872 CASE_FLT_FN (BUILT_IN_LROUND):
2873 CASE_FLT_FN (BUILT_IN_LLROUND):
e951f9a4 2874 builtin_optab = lround_optab;
2875 break;
80ff6494 2876
7d3afc77 2877 default:
2878 gcc_unreachable ();
2879 }
2880
e951f9a4 2881 /* There's no easy way to detect the case we need to set EDOM. */
2882 if (flag_errno_math && fallback_fn == BUILT_IN_NONE)
2883 return NULL_RTX;
2884
7d3afc77 2885 /* Make a suitable register to place result in. */
2886 mode = TYPE_MODE (TREE_TYPE (exp));
2887
e951f9a4 2888 /* There's no easy way to detect the case we need to set EDOM. */
2889 if (!flag_errno_math)
2890 {
de2e453e 2891 rtx result = gen_reg_rtx (mode);
7d3afc77 2892
e951f9a4 2893 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2894 need to expand the argument again. This way, we will not perform
2895 side-effects more the once. */
2896 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
7d3afc77 2897
e951f9a4 2898 op0 = expand_expr (arg, NULL, VOIDmode, EXPAND_NORMAL);
7d3afc77 2899
e951f9a4 2900 start_sequence ();
7d3afc77 2901
de2e453e 2902 if (expand_sfix_optab (result, op0, builtin_optab))
e951f9a4 2903 {
2904 /* Output the entire sequence. */
2905 insns = get_insns ();
2906 end_sequence ();
2907 emit_insn (insns);
de2e453e 2908 return result;
e951f9a4 2909 }
2910
2911 /* If we were unable to expand via the builtin, stop the sequence
2912 (without outputting the insns) and call to the library function
2913 with the stabilized argument list. */
7d3afc77 2914 end_sequence ();
2915 }
2916
e951f9a4 2917 if (fallback_fn != BUILT_IN_NONE)
2918 {
2919 /* Fall back to rounding to long int. Use implicit_p 0 - for non-C99
2920 targets, (int) round (x) should never be transformed into
2921 BUILT_IN_IROUND and if __builtin_iround is called directly, emit
2922 a call to lround in the hope that the target provides at least some
2923 C99 functions. This should result in the best user experience for
2924 not full C99 targets. */
2925 tree fallback_fndecl = mathfn_built_in_1 (TREE_TYPE (arg),
2926 fallback_fn, 0);
2927
2928 exp = build_call_nofold_loc (EXPR_LOCATION (exp),
2929 fallback_fndecl, 1, arg);
2930
2931 target = expand_call (exp, NULL_RTX, target == const0_rtx);
933eb13a 2932 target = maybe_emit_group_store (target, TREE_TYPE (exp));
e951f9a4 2933 return convert_to_mode (mode, target, 0);
2934 }
5f51ee59 2935
de2e453e 2936 return expand_call (exp, target, target == const0_rtx);
7d3afc77 2937}
2938
c2f47e15 2939/* Expand a call to the powi built-in mathematical function. Return NULL_RTX if
757c219d 2940 a normal call should be emitted rather than expanding the function
2941 in-line. EXP is the expression that is a call to the builtin
2942 function; if convenient, the result should be placed in TARGET. */
2943
2944static rtx
f97eea22 2945expand_builtin_powi (tree exp, rtx target)
757c219d 2946{
757c219d 2947 tree arg0, arg1;
2948 rtx op0, op1;
3754d046 2949 machine_mode mode;
2950 machine_mode mode2;
757c219d 2951
c2f47e15 2952 if (! validate_arglist (exp, REAL_TYPE, INTEGER_TYPE, VOID_TYPE))
2953 return NULL_RTX;
757c219d 2954
c2f47e15 2955 arg0 = CALL_EXPR_ARG (exp, 0);
2956 arg1 = CALL_EXPR_ARG (exp, 1);
757c219d 2957 mode = TYPE_MODE (TREE_TYPE (exp));
2958
757c219d 2959 /* Emit a libcall to libgcc. */
2960
c2f47e15 2961 /* Mode of the 2nd argument must match that of an int. */
d0405f40 2962 mode2 = mode_for_size (INT_TYPE_SIZE, MODE_INT, 0);
2963
757c219d 2964 if (target == NULL_RTX)
2965 target = gen_reg_rtx (mode);
2966
f97eea22 2967 op0 = expand_expr (arg0, NULL_RTX, mode, EXPAND_NORMAL);
757c219d 2968 if (GET_MODE (op0) != mode)
2969 op0 = convert_to_mode (mode, op0, 0);
1db6d067 2970 op1 = expand_expr (arg1, NULL_RTX, mode2, EXPAND_NORMAL);
d0405f40 2971 if (GET_MODE (op1) != mode2)
2972 op1 = convert_to_mode (mode2, op1, 0);
757c219d 2973
f36b9f69 2974 target = emit_library_call_value (optab_libfunc (powi_optab, mode),
2dd6f9ed 2975 target, LCT_CONST, mode, 2,
d0405f40 2976 op0, mode, op1, mode2);
757c219d 2977
2978 return target;
2979}
2980
48e1416a 2981/* Expand expression EXP which is a call to the strlen builtin. Return
c2f47e15 2982 NULL_RTX if we failed the caller should emit a normal call, otherwise
aed0bd19 2983 try to get the result in TARGET, if convenient. */
f7c44134 2984
53800dbe 2985static rtx
c2f47e15 2986expand_builtin_strlen (tree exp, rtx target,
3754d046 2987 machine_mode target_mode)
53800dbe 2988{
c2f47e15 2989 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
2990 return NULL_RTX;
53800dbe 2991 else
2992 {
8786db1e 2993 struct expand_operand ops[4];
911c0150 2994 rtx pat;
c2f47e15 2995 tree len;
2996 tree src = CALL_EXPR_ARG (exp, 0);
1e0c0b35 2997 rtx src_reg;
2998 rtx_insn *before_strlen;
3754d046 2999 machine_mode insn_mode = target_mode;
ef2c4a29 3000 enum insn_code icode = CODE_FOR_nothing;
153c3b50 3001 unsigned int align;
6248e345 3002
3003 /* If the length can be computed at compile-time, return it. */
681fab1e 3004 len = c_strlen (src, 0);
6248e345 3005 if (len)
80cd7a5e 3006 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
6248e345 3007
681fab1e 3008 /* If the length can be computed at compile-time and is constant
3009 integer, but there are side-effects in src, evaluate
3010 src for side-effects, then return len.
3011 E.g. x = strlen (i++ ? "xfoo" + 1 : "bar");
3012 can be optimized into: i++; x = 3; */
3013 len = c_strlen (src, 1);
3014 if (len && TREE_CODE (len) == INTEGER_CST)
3015 {
3016 expand_expr (src, const0_rtx, VOIDmode, EXPAND_NORMAL);
3017 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
3018 }
3019
957d0361 3020 align = get_pointer_alignment (src) / BITS_PER_UNIT;
53800dbe 3021
53800dbe 3022 /* If SRC is not a pointer type, don't do this operation inline. */
3023 if (align == 0)
c2f47e15 3024 return NULL_RTX;
53800dbe 3025
911c0150 3026 /* Bail out if we can't compute strlen in the right mode. */
53800dbe 3027 while (insn_mode != VOIDmode)
3028 {
d6bf3b14 3029 icode = optab_handler (strlen_optab, insn_mode);
53800dbe 3030 if (icode != CODE_FOR_nothing)
c28ae87f 3031 break;
53800dbe 3032
3033 insn_mode = GET_MODE_WIDER_MODE (insn_mode);
3034 }
3035 if (insn_mode == VOIDmode)
c2f47e15 3036 return NULL_RTX;
53800dbe 3037
911c0150 3038 /* Make a place to hold the source address. We will not expand
3039 the actual source until we are sure that the expansion will
3040 not fail -- there are trees that cannot be expanded twice. */
3041 src_reg = gen_reg_rtx (Pmode);
53800dbe 3042
911c0150 3043 /* Mark the beginning of the strlen sequence so we can emit the
3044 source operand later. */
f0ce3b1f 3045 before_strlen = get_last_insn ();
53800dbe 3046
8786db1e 3047 create_output_operand (&ops[0], target, insn_mode);
3048 create_fixed_operand (&ops[1], gen_rtx_MEM (BLKmode, src_reg));
3049 create_integer_operand (&ops[2], 0);
3050 create_integer_operand (&ops[3], align);
3051 if (!maybe_expand_insn (icode, 4, ops))
c2f47e15 3052 return NULL_RTX;
911c0150 3053
3054 /* Now that we are assured of success, expand the source. */
3055 start_sequence ();
499eee58 3056 pat = expand_expr (src, src_reg, Pmode, EXPAND_NORMAL);
911c0150 3057 if (pat != src_reg)
499eee58 3058 {
3059#ifdef POINTERS_EXTEND_UNSIGNED
3060 if (GET_MODE (pat) != Pmode)
3061 pat = convert_to_mode (Pmode, pat,
3062 POINTERS_EXTEND_UNSIGNED);
3063#endif
3064 emit_move_insn (src_reg, pat);
3065 }
31d3e01c 3066 pat = get_insns ();
911c0150 3067 end_sequence ();
bceb0d1f 3068
3069 if (before_strlen)
3070 emit_insn_after (pat, before_strlen);
3071 else
3072 emit_insn_before (pat, get_insns ());
53800dbe 3073
3074 /* Return the value in the proper mode for this function. */
8786db1e 3075 if (GET_MODE (ops[0].value) == target_mode)
3076 target = ops[0].value;
53800dbe 3077 else if (target != 0)
8786db1e 3078 convert_move (target, ops[0].value, 0);
53800dbe 3079 else
8786db1e 3080 target = convert_to_mode (target_mode, ops[0].value, 0);
911c0150 3081
3082 return target;
53800dbe 3083 }
3084}
3085
6840589f 3086/* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3087 bytes from constant string DATA + OFFSET and return it as target
3088 constant. */
3089
3090static rtx
aecda0d6 3091builtin_memcpy_read_str (void *data, HOST_WIDE_INT offset,
3754d046 3092 machine_mode mode)
6840589f 3093{
3094 const char *str = (const char *) data;
3095
64db345d 3096 gcc_assert (offset >= 0
3097 && ((unsigned HOST_WIDE_INT) offset + GET_MODE_SIZE (mode)
3098 <= strlen (str) + 1));
6840589f 3099
3100 return c_readstr (str + offset, mode);
3101}
3102
36d63243 3103/* LEN specify length of the block of memcpy/memset operation.
9db0f34d 3104 Figure out its range and put it into MIN_SIZE/MAX_SIZE.
3105 In some cases we can make very likely guess on max size, then we
3106 set it into PROBABLE_MAX_SIZE. */
36d63243 3107
3108static void
3109determine_block_size (tree len, rtx len_rtx,
3110 unsigned HOST_WIDE_INT *min_size,
9db0f34d 3111 unsigned HOST_WIDE_INT *max_size,
3112 unsigned HOST_WIDE_INT *probable_max_size)
36d63243 3113{
3114 if (CONST_INT_P (len_rtx))
3115 {
4e140a5c 3116 *min_size = *max_size = *probable_max_size = UINTVAL (len_rtx);
36d63243 3117 return;
3118 }
3119 else
3120 {
9c1be15e 3121 wide_int min, max;
9db0f34d 3122 enum value_range_type range_type = VR_UNDEFINED;
3123
3124 /* Determine bounds from the type. */
3125 if (tree_fits_uhwi_p (TYPE_MIN_VALUE (TREE_TYPE (len))))
3126 *min_size = tree_to_uhwi (TYPE_MIN_VALUE (TREE_TYPE (len)));
3127 else
3128 *min_size = 0;
3129 if (tree_fits_uhwi_p (TYPE_MAX_VALUE (TREE_TYPE (len))))
4e140a5c 3130 *probable_max_size = *max_size
3131 = tree_to_uhwi (TYPE_MAX_VALUE (TREE_TYPE (len)));
9db0f34d 3132 else
3133 *probable_max_size = *max_size = GET_MODE_MASK (GET_MODE (len_rtx));
3134
3135 if (TREE_CODE (len) == SSA_NAME)
3136 range_type = get_range_info (len, &min, &max);
3137 if (range_type == VR_RANGE)
36d63243 3138 {
fe5ad926 3139 if (wi::fits_uhwi_p (min) && *min_size < min.to_uhwi ())
36d63243 3140 *min_size = min.to_uhwi ();
fe5ad926 3141 if (wi::fits_uhwi_p (max) && *max_size > max.to_uhwi ())
9db0f34d 3142 *probable_max_size = *max_size = max.to_uhwi ();
36d63243 3143 }
9db0f34d 3144 else if (range_type == VR_ANTI_RANGE)
36d63243 3145 {
4a474a5a 3146 /* Anti range 0...N lets us to determine minimal size to N+1. */
fe5ad926 3147 if (min == 0)
9db0f34d 3148 {
9c1be15e 3149 if (wi::fits_uhwi_p (max) && max.to_uhwi () + 1 != 0)
3150 *min_size = max.to_uhwi () + 1;
9db0f34d 3151 }
3152 /* Code like
3153
3154 int n;
3155 if (n < 100)
4a474a5a 3156 memcpy (a, b, n)
9db0f34d 3157
3158 Produce anti range allowing negative values of N. We still
3159 can use the information and make a guess that N is not negative.
3160 */
fe5ad926 3161 else if (!wi::leu_p (max, 1 << 30) && wi::fits_uhwi_p (min))
3162 *probable_max_size = min.to_uhwi () - 1;
36d63243 3163 }
3164 }
3165 gcc_checking_assert (*max_size <=
3166 (unsigned HOST_WIDE_INT)
3167 GET_MODE_MASK (GET_MODE (len_rtx)));
3168}
3169
f21337ef 3170/* Helper function to do the actual work for expand_builtin_memcpy. */
3171
3172static rtx
3173expand_builtin_memcpy_args (tree dest, tree src, tree len, rtx target, tree exp)
3174{
3175 const char *src_str;
3176 unsigned int src_align = get_pointer_alignment (src);
3177 unsigned int dest_align = get_pointer_alignment (dest);
3178 rtx dest_mem, src_mem, dest_addr, len_rtx;
3179 HOST_WIDE_INT expected_size = -1;
3180 unsigned int expected_align = 0;
3181 unsigned HOST_WIDE_INT min_size;
3182 unsigned HOST_WIDE_INT max_size;
3183 unsigned HOST_WIDE_INT probable_max_size;
3184
3185 /* If DEST is not a pointer type, call the normal function. */
3186 if (dest_align == 0)
3187 return NULL_RTX;
3188
3189 /* If either SRC is not a pointer type, don't do this
3190 operation in-line. */
3191 if (src_align == 0)
3192 return NULL_RTX;
3193
3194 if (currently_expanding_gimple_stmt)
3195 stringop_block_profile (currently_expanding_gimple_stmt,
3196 &expected_align, &expected_size);
3197
3198 if (expected_align < dest_align)
3199 expected_align = dest_align;
3200 dest_mem = get_memory_rtx (dest, len);
3201 set_mem_align (dest_mem, dest_align);
3202 len_rtx = expand_normal (len);
3203 determine_block_size (len, len_rtx, &min_size, &max_size,
3204 &probable_max_size);
3205 src_str = c_getstr (src);
3206
3207 /* If SRC is a string constant and block move would be done
3208 by pieces, we can avoid loading the string from memory
3209 and only stored the computed constants. */
3210 if (src_str
3211 && CONST_INT_P (len_rtx)
3212 && (unsigned HOST_WIDE_INT) INTVAL (len_rtx) <= strlen (src_str) + 1
3213 && can_store_by_pieces (INTVAL (len_rtx), builtin_memcpy_read_str,
3214 CONST_CAST (char *, src_str),
3215 dest_align, false))
3216 {
3217 dest_mem = store_by_pieces (dest_mem, INTVAL (len_rtx),
3218 builtin_memcpy_read_str,
3219 CONST_CAST (char *, src_str),
3220 dest_align, false, 0);
3221 dest_mem = force_operand (XEXP (dest_mem, 0), target);
3222 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3223 return dest_mem;
3224 }
3225
3226 src_mem = get_memory_rtx (src, len);
3227 set_mem_align (src_mem, src_align);
3228
3229 /* Copy word part most expediently. */
3230 dest_addr = emit_block_move_hints (dest_mem, src_mem, len_rtx,
3231 CALL_EXPR_TAILCALL (exp)
3232 ? BLOCK_OP_TAILCALL : BLOCK_OP_NORMAL,
3233 expected_align, expected_size,
3234 min_size, max_size, probable_max_size);
3235
3236 if (dest_addr == 0)
3237 {
3238 dest_addr = force_operand (XEXP (dest_mem, 0), target);
3239 dest_addr = convert_memory_address (ptr_mode, dest_addr);
3240 }
3241
3242 return dest_addr;
3243}
3244
c2f47e15 3245/* Expand a call EXP to the memcpy builtin.
3246 Return NULL_RTX if we failed, the caller should emit a normal call,
3b824fa6 3247 otherwise try to get the result in TARGET, if convenient (and in
9fe0e1b8 3248 mode MODE if that's convenient). */
c2f47e15 3249
53800dbe 3250static rtx
a65c4d64 3251expand_builtin_memcpy (tree exp, rtx target)
53800dbe 3252{
c2f47e15 3253 if (!validate_arglist (exp,
3254 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3255 return NULL_RTX;
53800dbe 3256 else
3257 {
c2f47e15 3258 tree dest = CALL_EXPR_ARG (exp, 0);
3259 tree src = CALL_EXPR_ARG (exp, 1);
3260 tree len = CALL_EXPR_ARG (exp, 2);
f21337ef 3261 return expand_builtin_memcpy_args (dest, src, len, target, exp);
3262 }
3263}
6840589f 3264
f21337ef 3265/* Expand an instrumented call EXP to the memcpy builtin.
3266 Return NULL_RTX if we failed, the caller should emit a normal call,
3267 otherwise try to get the result in TARGET, if convenient (and in
3268 mode MODE if that's convenient). */
53800dbe 3269
f21337ef 3270static rtx
3271expand_builtin_memcpy_with_bounds (tree exp, rtx target)
3272{
3273 if (!validate_arglist (exp,
3274 POINTER_TYPE, POINTER_BOUNDS_TYPE,
3275 POINTER_TYPE, POINTER_BOUNDS_TYPE,
3276 INTEGER_TYPE, VOID_TYPE))
3277 return NULL_RTX;
3278 else
3279 {
3280 tree dest = CALL_EXPR_ARG (exp, 0);
3281 tree src = CALL_EXPR_ARG (exp, 2);
3282 tree len = CALL_EXPR_ARG (exp, 4);
3283 rtx res = expand_builtin_memcpy_args (dest, src, len, target, exp);
53800dbe 3284
f21337ef 3285 /* Return src bounds with the result. */
3286 if (res)
e5716f7e 3287 {
17d388d8 3288 rtx bnd = force_reg (targetm.chkp_bound_mode (),
f21337ef 3289 expand_normal (CALL_EXPR_ARG (exp, 1)));
3290 res = chkp_join_splitted_slot (res, bnd);
e5716f7e 3291 }
f21337ef 3292 return res;
53800dbe 3293 }
3294}
3295
c2f47e15 3296/* Expand a call EXP to the mempcpy builtin.
3297 Return NULL_RTX if we failed; the caller should emit a normal call,
647661c6 3298 otherwise try to get the result in TARGET, if convenient (and in
9fe0e1b8 3299 mode MODE if that's convenient). If ENDP is 0 return the
3300 destination pointer, if ENDP is 1 return the end pointer ala
3301 mempcpy, and if ENDP is 2 return the end pointer minus one ala
3302 stpcpy. */
647661c6 3303
3304static rtx
3754d046 3305expand_builtin_mempcpy (tree exp, rtx target, machine_mode mode)
647661c6 3306{
c2f47e15 3307 if (!validate_arglist (exp,
3308 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3309 return NULL_RTX;
3310 else
3311 {
3312 tree dest = CALL_EXPR_ARG (exp, 0);
3313 tree src = CALL_EXPR_ARG (exp, 1);
3314 tree len = CALL_EXPR_ARG (exp, 2);
3315 return expand_builtin_mempcpy_args (dest, src, len,
f21337ef 3316 target, mode, /*endp=*/ 1,
3317 exp);
3318 }
3319}
3320
3321/* Expand an instrumented call EXP to the mempcpy builtin.
3322 Return NULL_RTX if we failed, the caller should emit a normal call,
3323 otherwise try to get the result in TARGET, if convenient (and in
3324 mode MODE if that's convenient). */
3325
3326static rtx
3327expand_builtin_mempcpy_with_bounds (tree exp, rtx target, machine_mode mode)
3328{
3329 if (!validate_arglist (exp,
3330 POINTER_TYPE, POINTER_BOUNDS_TYPE,
3331 POINTER_TYPE, POINTER_BOUNDS_TYPE,
3332 INTEGER_TYPE, VOID_TYPE))
3333 return NULL_RTX;
3334 else
3335 {
3336 tree dest = CALL_EXPR_ARG (exp, 0);
3337 tree src = CALL_EXPR_ARG (exp, 2);
3338 tree len = CALL_EXPR_ARG (exp, 4);
3339 rtx res = expand_builtin_mempcpy_args (dest, src, len, target,
3340 mode, 1, exp);
3341
3342 /* Return src bounds with the result. */
3343 if (res)
3344 {
17d388d8 3345 rtx bnd = force_reg (targetm.chkp_bound_mode (),
f21337ef 3346 expand_normal (CALL_EXPR_ARG (exp, 1)));
3347 res = chkp_join_splitted_slot (res, bnd);
3348 }
3349 return res;
c2f47e15 3350 }
3351}
3352
3353/* Helper function to do the actual work for expand_builtin_mempcpy. The
3354 arguments to the builtin_mempcpy call DEST, SRC, and LEN are broken out
3355 so that this can also be called without constructing an actual CALL_EXPR.
a65c4d64 3356 The other arguments and return value are the same as for
3357 expand_builtin_mempcpy. */
c2f47e15 3358
3359static rtx
a65c4d64 3360expand_builtin_mempcpy_args (tree dest, tree src, tree len,
f21337ef 3361 rtx target, machine_mode mode, int endp,
3362 tree orig_exp)
c2f47e15 3363{
f21337ef 3364 tree fndecl = get_callee_fndecl (orig_exp);
3365
c2f47e15 3366 /* If return value is ignored, transform mempcpy into memcpy. */
f21337ef 3367 if (target == const0_rtx
3368 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_MEMPCPY_NOBND_NOCHK_CHKP
3369 && builtin_decl_implicit_p (BUILT_IN_CHKP_MEMCPY_NOBND_NOCHK_CHKP))
3370 {
3371 tree fn = builtin_decl_implicit (BUILT_IN_CHKP_MEMCPY_NOBND_NOCHK_CHKP);
3372 tree result = build_call_nofold_loc (UNKNOWN_LOCATION, fn, 3,
3373 dest, src, len);
3374 return expand_expr (result, target, mode, EXPAND_NORMAL);
3375 }
3376 else if (target == const0_rtx
3377 && builtin_decl_implicit_p (BUILT_IN_MEMCPY))
9fe0e1b8 3378 {
b9a16870 3379 tree fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
0568e9c1 3380 tree result = build_call_nofold_loc (UNKNOWN_LOCATION, fn, 3,
3381 dest, src, len);
c8b17b2e 3382 return expand_expr (result, target, mode, EXPAND_NORMAL);
9fe0e1b8 3383 }
647661c6 3384 else
3385 {
9fe0e1b8 3386 const char *src_str;
957d0361 3387 unsigned int src_align = get_pointer_alignment (src);
3388 unsigned int dest_align = get_pointer_alignment (dest);
9fe0e1b8 3389 rtx dest_mem, src_mem, len_rtx;
a0c938f0 3390
7da1412b 3391 /* If either SRC or DEST is not a pointer type, don't do this
a0c938f0 3392 operation in-line. */
7da1412b 3393 if (dest_align == 0 || src_align == 0)
c2f47e15 3394 return NULL_RTX;
9fe0e1b8 3395
6217c238 3396 /* If LEN is not constant, call the normal function. */
e913b5cd 3397 if (! tree_fits_uhwi_p (len))
c2f47e15 3398 return NULL_RTX;
0862b7e9 3399
8ec3c5c2 3400 len_rtx = expand_normal (len);
9fe0e1b8 3401 src_str = c_getstr (src);
647661c6 3402
9fe0e1b8 3403 /* If SRC is a string constant and block move would be done
3404 by pieces, we can avoid loading the string from memory
3405 and only stored the computed constants. */
3406 if (src_str
971ba038 3407 && CONST_INT_P (len_rtx)
9fe0e1b8 3408 && (unsigned HOST_WIDE_INT) INTVAL (len_rtx) <= strlen (src_str) + 1
3409 && can_store_by_pieces (INTVAL (len_rtx), builtin_memcpy_read_str,
364c0c59 3410 CONST_CAST (char *, src_str),
3411 dest_align, false))
9fe0e1b8 3412 {
d8ae1baa 3413 dest_mem = get_memory_rtx (dest, len);
9fe0e1b8 3414 set_mem_align (dest_mem, dest_align);
3415 dest_mem = store_by_pieces (dest_mem, INTVAL (len_rtx),
3416 builtin_memcpy_read_str,
364c0c59 3417 CONST_CAST (char *, src_str),
3418 dest_align, false, endp);
9fe0e1b8 3419 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
85d654dd 3420 dest_mem = convert_memory_address (ptr_mode, dest_mem);
9fe0e1b8 3421 return dest_mem;
647661c6 3422 }
3423
971ba038 3424 if (CONST_INT_P (len_rtx)
9fe0e1b8 3425 && can_move_by_pieces (INTVAL (len_rtx),
3426 MIN (dest_align, src_align)))
3427 {
d8ae1baa 3428 dest_mem = get_memory_rtx (dest, len);
9fe0e1b8 3429 set_mem_align (dest_mem, dest_align);
d8ae1baa 3430 src_mem = get_memory_rtx (src, len);
9fe0e1b8 3431 set_mem_align (src_mem, src_align);
3432 dest_mem = move_by_pieces (dest_mem, src_mem, INTVAL (len_rtx),
3433 MIN (dest_align, src_align), endp);
3434 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
85d654dd 3435 dest_mem = convert_memory_address (ptr_mode, dest_mem);
9fe0e1b8 3436 return dest_mem;
3437 }
3438
c2f47e15 3439 return NULL_RTX;
647661c6 3440 }
3441}
3442
727c62dd 3443#ifndef HAVE_movstr
3444# define HAVE_movstr 0
3445# define CODE_FOR_movstr CODE_FOR_nothing
3446#endif
3447
c2f47e15 3448/* Expand into a movstr instruction, if one is available. Return NULL_RTX if
727c62dd 3449 we failed, the caller should emit a normal call, otherwise try to
3450 get the result in TARGET, if convenient. If ENDP is 0 return the
3451 destination pointer, if ENDP is 1 return the end pointer ala
3452 mempcpy, and if ENDP is 2 return the end pointer minus one ala
3453 stpcpy. */
3454
3455static rtx
3456expand_movstr (tree dest, tree src, rtx target, int endp)
3457{
8786db1e 3458 struct expand_operand ops[3];
727c62dd 3459 rtx dest_mem;
3460 rtx src_mem;
727c62dd 3461
3462 if (!HAVE_movstr)
c2f47e15 3463 return NULL_RTX;
727c62dd 3464
d8ae1baa 3465 dest_mem = get_memory_rtx (dest, NULL);
3466 src_mem = get_memory_rtx (src, NULL);
727c62dd 3467 if (!endp)
3468 {
3469 target = force_reg (Pmode, XEXP (dest_mem, 0));
3470 dest_mem = replace_equiv_address (dest_mem, target);
727c62dd 3471 }
3472
8786db1e 3473 create_output_operand (&ops[0], endp ? target : NULL_RTX, Pmode);
3474 create_fixed_operand (&ops[1], dest_mem);
3475 create_fixed_operand (&ops[2], src_mem);
1e1d5623 3476 if (!maybe_expand_insn (CODE_FOR_movstr, 3, ops))
3477 return NULL_RTX;
727c62dd 3478
8786db1e 3479 if (endp && target != const0_rtx)
c5aba89c 3480 {
8786db1e 3481 target = ops[0].value;
3482 /* movstr is supposed to set end to the address of the NUL
3483 terminator. If the caller requested a mempcpy-like return value,
3484 adjust it. */
3485 if (endp == 1)
3486 {
29c05e22 3487 rtx tem = plus_constant (GET_MODE (target),
3488 gen_lowpart (GET_MODE (target), target), 1);
8786db1e 3489 emit_move_insn (target, force_operand (tem, NULL_RTX));
3490 }
c5aba89c 3491 }
727c62dd 3492 return target;
3493}
3494
48e1416a 3495/* Expand expression EXP, which is a call to the strcpy builtin. Return
3496 NULL_RTX if we failed the caller should emit a normal call, otherwise
c2f47e15 3497 try to get the result in TARGET, if convenient (and in mode MODE if that's
6f428e8b 3498 convenient). */
902de8ed 3499
53800dbe 3500static rtx
a65c4d64 3501expand_builtin_strcpy (tree exp, rtx target)
53800dbe 3502{
c2f47e15 3503 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3504 {
3505 tree dest = CALL_EXPR_ARG (exp, 0);
3506 tree src = CALL_EXPR_ARG (exp, 1);
a65c4d64 3507 return expand_builtin_strcpy_args (dest, src, target);
c2f47e15 3508 }
3509 return NULL_RTX;
3510}
3511
3512/* Helper function to do the actual work for expand_builtin_strcpy. The
3513 arguments to the builtin_strcpy call DEST and SRC are broken out
3514 so that this can also be called without constructing an actual CALL_EXPR.
3515 The other arguments and return value are the same as for
3516 expand_builtin_strcpy. */
3517
3518static rtx
a65c4d64 3519expand_builtin_strcpy_args (tree dest, tree src, rtx target)
c2f47e15 3520{
c2f47e15 3521 return expand_movstr (dest, src, target, /*endp=*/0);
53800dbe 3522}
3523
c2f47e15 3524/* Expand a call EXP to the stpcpy builtin.
3525 Return NULL_RTX if we failed the caller should emit a normal call,
3b824fa6 3526 otherwise try to get the result in TARGET, if convenient (and in
3527 mode MODE if that's convenient). */
3528
3529static rtx
3754d046 3530expand_builtin_stpcpy (tree exp, rtx target, machine_mode mode)
3b824fa6 3531{
c2f47e15 3532 tree dst, src;
389dd41b 3533 location_t loc = EXPR_LOCATION (exp);
c2f47e15 3534
3535 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3536 return NULL_RTX;
3537
3538 dst = CALL_EXPR_ARG (exp, 0);
3539 src = CALL_EXPR_ARG (exp, 1);
3540
727c62dd 3541 /* If return value is ignored, transform stpcpy into strcpy. */
b9a16870 3542 if (target == const0_rtx && builtin_decl_implicit (BUILT_IN_STRCPY))
978836e5 3543 {
b9a16870 3544 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
0568e9c1 3545 tree result = build_call_nofold_loc (loc, fn, 2, dst, src);
c8b17b2e 3546 return expand_expr (result, target, mode, EXPAND_NORMAL);
978836e5 3547 }
3b824fa6 3548 else
3549 {
c2f47e15 3550 tree len, lenp1;
727c62dd 3551 rtx ret;
647661c6 3552
9fe0e1b8 3553 /* Ensure we get an actual string whose length can be evaluated at
a0c938f0 3554 compile-time, not an expression containing a string. This is
3555 because the latter will potentially produce pessimized code
3556 when used to produce the return value. */
681fab1e 3557 if (! c_getstr (src) || ! (len = c_strlen (src, 0)))
c2f47e15 3558 return expand_movstr (dst, src, target, /*endp=*/2);
3b824fa6 3559
389dd41b 3560 lenp1 = size_binop_loc (loc, PLUS_EXPR, len, ssize_int (1));
a65c4d64 3561 ret = expand_builtin_mempcpy_args (dst, src, lenp1,
f21337ef 3562 target, mode, /*endp=*/2,
3563 exp);
727c62dd 3564
3565 if (ret)
3566 return ret;
3567
3568 if (TREE_CODE (len) == INTEGER_CST)
3569 {
8ec3c5c2 3570 rtx len_rtx = expand_normal (len);
727c62dd 3571
971ba038 3572 if (CONST_INT_P (len_rtx))
727c62dd 3573 {
a65c4d64 3574 ret = expand_builtin_strcpy_args (dst, src, target);
727c62dd 3575
3576 if (ret)
3577 {
3578 if (! target)
7ac87324 3579 {
3580 if (mode != VOIDmode)
3581 target = gen_reg_rtx (mode);
3582 else
3583 target = gen_reg_rtx (GET_MODE (ret));
3584 }
727c62dd 3585 if (GET_MODE (target) != GET_MODE (ret))
3586 ret = gen_lowpart (GET_MODE (target), ret);
3587
29c05e22 3588 ret = plus_constant (GET_MODE (ret), ret, INTVAL (len_rtx));
c5aba89c 3589 ret = emit_move_insn (target, force_operand (ret, NULL_RTX));
64db345d 3590 gcc_assert (ret);
727c62dd 3591
3592 return target;
3593 }
3594 }
3595 }
3596
c2f47e15 3597 return expand_movstr (dst, src, target, /*endp=*/2);
3b824fa6 3598 }
3599}
3600
6840589f 3601/* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3602 bytes from constant string DATA + OFFSET and return it as target
3603 constant. */
3604
09879952 3605rtx
aecda0d6 3606builtin_strncpy_read_str (void *data, HOST_WIDE_INT offset,
3754d046 3607 machine_mode mode)
6840589f 3608{
3609 const char *str = (const char *) data;
3610
3611 if ((unsigned HOST_WIDE_INT) offset > strlen (str))
3612 return const0_rtx;
3613
3614 return c_readstr (str + offset, mode);
3615}
3616
48e1416a 3617/* Expand expression EXP, which is a call to the strncpy builtin. Return
c2f47e15 3618 NULL_RTX if we failed the caller should emit a normal call. */
ed09096d 3619
3620static rtx
a65c4d64 3621expand_builtin_strncpy (tree exp, rtx target)
ed09096d 3622{
389dd41b 3623 location_t loc = EXPR_LOCATION (exp);
c2f47e15 3624
3625 if (validate_arglist (exp,
3626 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
ed09096d 3627 {
c2f47e15 3628 tree dest = CALL_EXPR_ARG (exp, 0);
3629 tree src = CALL_EXPR_ARG (exp, 1);
3630 tree len = CALL_EXPR_ARG (exp, 2);
3631 tree slen = c_strlen (src, 1);
6840589f 3632
8ff6a5cd 3633 /* We must be passed a constant len and src parameter. */
e913b5cd 3634 if (!tree_fits_uhwi_p (len) || !slen || !tree_fits_uhwi_p (slen))
c2f47e15 3635 return NULL_RTX;
ed09096d 3636
389dd41b 3637 slen = size_binop_loc (loc, PLUS_EXPR, slen, ssize_int (1));
ed09096d 3638
3639 /* We're required to pad with trailing zeros if the requested
a0c938f0 3640 len is greater than strlen(s2)+1. In that case try to
6840589f 3641 use store_by_pieces, if it fails, punt. */
ed09096d 3642 if (tree_int_cst_lt (slen, len))
6840589f 3643 {
957d0361 3644 unsigned int dest_align = get_pointer_alignment (dest);
c2f47e15 3645 const char *p = c_getstr (src);
6840589f 3646 rtx dest_mem;
3647
e913b5cd 3648 if (!p || dest_align == 0 || !tree_fits_uhwi_p (len)
3649 || !can_store_by_pieces (tree_to_uhwi (len),
6840589f 3650 builtin_strncpy_read_str,
364c0c59 3651 CONST_CAST (char *, p),
3652 dest_align, false))
c2f47e15 3653 return NULL_RTX;
6840589f 3654
d8ae1baa 3655 dest_mem = get_memory_rtx (dest, len);
e913b5cd 3656 store_by_pieces (dest_mem, tree_to_uhwi (len),
6840589f 3657 builtin_strncpy_read_str,
364c0c59 3658 CONST_CAST (char *, p), dest_align, false, 0);
a65c4d64 3659 dest_mem = force_operand (XEXP (dest_mem, 0), target);
85d654dd 3660 dest_mem = convert_memory_address (ptr_mode, dest_mem);
e5716f7e 3661 return dest_mem;
6840589f 3662 }
ed09096d 3663 }
c2f47e15 3664 return NULL_RTX;
ed09096d 3665}
3666
ecc318ff 3667/* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3668 bytes from constant string DATA + OFFSET and return it as target
3669 constant. */
3670
f656b751 3671rtx
aecda0d6 3672builtin_memset_read_str (void *data, HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
3754d046 3673 machine_mode mode)
ecc318ff 3674{
3675 const char *c = (const char *) data;
364c0c59 3676 char *p = XALLOCAVEC (char, GET_MODE_SIZE (mode));
ecc318ff 3677
3678 memset (p, *c, GET_MODE_SIZE (mode));
3679
3680 return c_readstr (p, mode);
3681}
3682
a7ec6974 3683/* Callback routine for store_by_pieces. Return the RTL of a register
3684 containing GET_MODE_SIZE (MODE) consecutive copies of the unsigned
3685 char value given in the RTL register data. For example, if mode is
3686 4 bytes wide, return the RTL for 0x01010101*data. */
3687
3688static rtx
aecda0d6 3689builtin_memset_gen_str (void *data, HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
3754d046 3690 machine_mode mode)
a7ec6974 3691{
3692 rtx target, coeff;
3693 size_t size;
3694 char *p;
3695
3696 size = GET_MODE_SIZE (mode);
f0ce3b1f 3697 if (size == 1)
3698 return (rtx) data;
a7ec6974 3699
364c0c59 3700 p = XALLOCAVEC (char, size);
a7ec6974 3701 memset (p, 1, size);
3702 coeff = c_readstr (p, mode);
3703
f0ce3b1f 3704 target = convert_to_mode (mode, (rtx) data, 1);
a7ec6974 3705 target = expand_mult (mode, target, coeff, NULL_RTX, 1);
3706 return force_reg (mode, target);
3707}
3708
48e1416a 3709/* Expand expression EXP, which is a call to the memset builtin. Return
3710 NULL_RTX if we failed the caller should emit a normal call, otherwise
c2f47e15 3711 try to get the result in TARGET, if convenient (and in mode MODE if that's
6f428e8b 3712 convenient). */
902de8ed 3713
53800dbe 3714static rtx
3754d046 3715expand_builtin_memset (tree exp, rtx target, machine_mode mode)
53800dbe 3716{
c2f47e15 3717 if (!validate_arglist (exp,
3718 POINTER_TYPE, INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
3719 return NULL_RTX;
53800dbe 3720 else
3721 {
c2f47e15 3722 tree dest = CALL_EXPR_ARG (exp, 0);
3723 tree val = CALL_EXPR_ARG (exp, 1);
3724 tree len = CALL_EXPR_ARG (exp, 2);
3725 return expand_builtin_memset_args (dest, val, len, target, mode, exp);
3726 }
3727}
53800dbe 3728
f21337ef 3729/* Expand expression EXP, which is an instrumented call to the memset builtin.
3730 Return NULL_RTX if we failed the caller should emit a normal call, otherwise
3731 try to get the result in TARGET, if convenient (and in mode MODE if that's
3732 convenient). */
3733
3734static rtx
3735expand_builtin_memset_with_bounds (tree exp, rtx target, machine_mode mode)
3736{
3737 if (!validate_arglist (exp,
3738 POINTER_TYPE, POINTER_BOUNDS_TYPE,
3739 INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
3740 return NULL_RTX;
3741 else
3742 {
3743 tree dest = CALL_EXPR_ARG (exp, 0);
3744 tree val = CALL_EXPR_ARG (exp, 2);
3745 tree len = CALL_EXPR_ARG (exp, 3);
3746 rtx res = expand_builtin_memset_args (dest, val, len, target, mode, exp);
3747
3748 /* Return src bounds with the result. */
3749 if (res)
3750 {
17d388d8 3751 rtx bnd = force_reg (targetm.chkp_bound_mode (),
f21337ef 3752 expand_normal (CALL_EXPR_ARG (exp, 1)));
3753 res = chkp_join_splitted_slot (res, bnd);
3754 }
3755 return res;
3756 }
3757}
3758
c2f47e15 3759/* Helper function to do the actual work for expand_builtin_memset. The
3760 arguments to the builtin_memset call DEST, VAL, and LEN are broken out
3761 so that this can also be called without constructing an actual CALL_EXPR.
3762 The other arguments and return value are the same as for
3763 expand_builtin_memset. */
6b961939 3764
c2f47e15 3765static rtx
3766expand_builtin_memset_args (tree dest, tree val, tree len,
3754d046 3767 rtx target, machine_mode mode, tree orig_exp)
c2f47e15 3768{
3769 tree fndecl, fn;
3770 enum built_in_function fcode;
3754d046 3771 machine_mode val_mode;
c2f47e15 3772 char c;
3773 unsigned int dest_align;
3774 rtx dest_mem, dest_addr, len_rtx;
3775 HOST_WIDE_INT expected_size = -1;
3776 unsigned int expected_align = 0;
36d63243 3777 unsigned HOST_WIDE_INT min_size;
3778 unsigned HOST_WIDE_INT max_size;
9db0f34d 3779 unsigned HOST_WIDE_INT probable_max_size;
53800dbe 3780
957d0361 3781 dest_align = get_pointer_alignment (dest);
162719b3 3782
c2f47e15 3783 /* If DEST is not a pointer type, don't do this operation in-line. */
3784 if (dest_align == 0)
3785 return NULL_RTX;
6f428e8b 3786
8cee8dc0 3787 if (currently_expanding_gimple_stmt)
3788 stringop_block_profile (currently_expanding_gimple_stmt,
3789 &expected_align, &expected_size);
75a70cf9 3790
c2f47e15 3791 if (expected_align < dest_align)
3792 expected_align = dest_align;
6b961939 3793
c2f47e15 3794 /* If the LEN parameter is zero, return DEST. */
3795 if (integer_zerop (len))
3796 {
3797 /* Evaluate and ignore VAL in case it has side-effects. */
3798 expand_expr (val, const0_rtx, VOIDmode, EXPAND_NORMAL);
3799 return expand_expr (dest, target, mode, EXPAND_NORMAL);
3800 }
7a3e5564 3801
c2f47e15 3802 /* Stabilize the arguments in case we fail. */
3803 dest = builtin_save_expr (dest);
3804 val = builtin_save_expr (val);
3805 len = builtin_save_expr (len);
a7ec6974 3806
c2f47e15 3807 len_rtx = expand_normal (len);
9db0f34d 3808 determine_block_size (len, len_rtx, &min_size, &max_size,
3809 &probable_max_size);
c2f47e15 3810 dest_mem = get_memory_rtx (dest, len);
03a5dda9 3811 val_mode = TYPE_MODE (unsigned_char_type_node);
a7ec6974 3812
c2f47e15 3813 if (TREE_CODE (val) != INTEGER_CST)
3814 {
3815 rtx val_rtx;
a7ec6974 3816
c2f47e15 3817 val_rtx = expand_normal (val);
03a5dda9 3818 val_rtx = convert_to_mode (val_mode, val_rtx, 0);
53800dbe 3819
c2f47e15 3820 /* Assume that we can memset by pieces if we can store
3821 * the coefficients by pieces (in the required modes).
3822 * We can't pass builtin_memset_gen_str as that emits RTL. */
3823 c = 1;
e913b5cd 3824 if (tree_fits_uhwi_p (len)
3825 && can_store_by_pieces (tree_to_uhwi (len),
4b297e2e 3826 builtin_memset_read_str, &c, dest_align,
3827 true))
c2f47e15 3828 {
03a5dda9 3829 val_rtx = force_reg (val_mode, val_rtx);
e913b5cd 3830 store_by_pieces (dest_mem, tree_to_uhwi (len),
4b297e2e 3831 builtin_memset_gen_str, val_rtx, dest_align,
3832 true, 0);
c2f47e15 3833 }
3834 else if (!set_storage_via_setmem (dest_mem, len_rtx, val_rtx,
3835 dest_align, expected_align,
9db0f34d 3836 expected_size, min_size, max_size,
3837 probable_max_size))
6b961939 3838 goto do_libcall;
48e1416a 3839
c2f47e15 3840 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3841 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3842 return dest_mem;
3843 }
53800dbe 3844
c2f47e15 3845 if (target_char_cast (val, &c))
3846 goto do_libcall;
ecc318ff 3847
c2f47e15 3848 if (c)
3849 {
e913b5cd 3850 if (tree_fits_uhwi_p (len)
3851 && can_store_by_pieces (tree_to_uhwi (len),
4b297e2e 3852 builtin_memset_read_str, &c, dest_align,
3853 true))
e913b5cd 3854 store_by_pieces (dest_mem, tree_to_uhwi (len),
4b297e2e 3855 builtin_memset_read_str, &c, dest_align, true, 0);
03a5dda9 3856 else if (!set_storage_via_setmem (dest_mem, len_rtx,
3857 gen_int_mode (c, val_mode),
c2f47e15 3858 dest_align, expected_align,
9db0f34d 3859 expected_size, min_size, max_size,
3860 probable_max_size))
c2f47e15 3861 goto do_libcall;
48e1416a 3862
c2f47e15 3863 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3864 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3865 return dest_mem;
3866 }
ecc318ff 3867
c2f47e15 3868 set_mem_align (dest_mem, dest_align);
3869 dest_addr = clear_storage_hints (dest_mem, len_rtx,
3870 CALL_EXPR_TAILCALL (orig_exp)
3871 ? BLOCK_OP_TAILCALL : BLOCK_OP_NORMAL,
36d63243 3872 expected_align, expected_size,
9db0f34d 3873 min_size, max_size,
3874 probable_max_size);
53800dbe 3875
c2f47e15 3876 if (dest_addr == 0)
3877 {
3878 dest_addr = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3879 dest_addr = convert_memory_address (ptr_mode, dest_addr);
3880 }
53800dbe 3881
c2f47e15 3882 return dest_addr;
6b961939 3883
c2f47e15 3884 do_libcall:
3885 fndecl = get_callee_fndecl (orig_exp);
3886 fcode = DECL_FUNCTION_CODE (fndecl);
f21337ef 3887 if (fcode == BUILT_IN_MEMSET
3888 || fcode == BUILT_IN_CHKP_MEMSET_NOBND_NOCHK_CHKP)
0568e9c1 3889 fn = build_call_nofold_loc (EXPR_LOCATION (orig_exp), fndecl, 3,
3890 dest, val, len);
c2f47e15 3891 else if (fcode == BUILT_IN_BZERO)
0568e9c1 3892 fn = build_call_nofold_loc (EXPR_LOCATION (orig_exp), fndecl, 2,
3893 dest, len);
c2f47e15 3894 else
3895 gcc_unreachable ();
a65c4d64 3896 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
3897 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (orig_exp);
c2f47e15 3898 return expand_call (fn, target, target == const0_rtx);
53800dbe 3899}
3900
48e1416a 3901/* Expand expression EXP, which is a call to the bzero builtin. Return
c2f47e15 3902 NULL_RTX if we failed the caller should emit a normal call. */
27d0c333 3903
ffc83088 3904static rtx
0b25db21 3905expand_builtin_bzero (tree exp)
ffc83088 3906{
c2f47e15 3907 tree dest, size;
389dd41b 3908 location_t loc = EXPR_LOCATION (exp);
ffc83088 3909
c2f47e15 3910 if (!validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
7369e7ba 3911 return NULL_RTX;
ffc83088 3912
c2f47e15 3913 dest = CALL_EXPR_ARG (exp, 0);
3914 size = CALL_EXPR_ARG (exp, 1);
bf8e3599 3915
7369e7ba 3916 /* New argument list transforming bzero(ptr x, int y) to
6f428e8b 3917 memset(ptr x, int 0, size_t y). This is done this way
3918 so that if it isn't expanded inline, we fallback to
3919 calling bzero instead of memset. */
bf8e3599 3920
c2f47e15 3921 return expand_builtin_memset_args (dest, integer_zero_node,
a0553bff 3922 fold_convert_loc (loc,
3923 size_type_node, size),
c2f47e15 3924 const0_rtx, VOIDmode, exp);
ffc83088 3925}
3926
7a3f89b5 3927/* Expand expression EXP, which is a call to the memcmp built-in function.
bd021c1c 3928 Return NULL_RTX if we failed and the caller should emit a normal call,
3929 otherwise try to get the result in TARGET, if convenient (and in mode
3930 MODE, if that's convenient). */
27d0c333 3931
53800dbe 3932static rtx
a65c4d64 3933expand_builtin_memcmp (tree exp, ATTRIBUTE_UNUSED rtx target,
3754d046 3934 ATTRIBUTE_UNUSED machine_mode mode)
53800dbe 3935{
a65c4d64 3936 location_t loc ATTRIBUTE_UNUSED = EXPR_LOCATION (exp);
389dd41b 3937
c2f47e15 3938 if (!validate_arglist (exp,
3939 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3940 return NULL_RTX;
6f428e8b 3941
bd021c1c 3942 /* Note: The cmpstrnsi pattern, if it exists, is not suitable for
3943 implementing memcmp because it will stop if it encounters two
3944 zero bytes. */
3945#if defined HAVE_cmpmemsi
53800dbe 3946 {
0cd832f0 3947 rtx arg1_rtx, arg2_rtx, arg3_rtx;
53800dbe 3948 rtx result;
0cd832f0 3949 rtx insn;
c2f47e15 3950 tree arg1 = CALL_EXPR_ARG (exp, 0);
3951 tree arg2 = CALL_EXPR_ARG (exp, 1);
3952 tree len = CALL_EXPR_ARG (exp, 2);
53800dbe 3953
957d0361 3954 unsigned int arg1_align = get_pointer_alignment (arg1) / BITS_PER_UNIT;
3955 unsigned int arg2_align = get_pointer_alignment (arg2) / BITS_PER_UNIT;
3754d046 3956 machine_mode insn_mode;
b428c0a5 3957
b428c0a5 3958 if (HAVE_cmpmemsi)
3959 insn_mode = insn_data[(int) CODE_FOR_cmpmemsi].operand[0].mode;
3960 else
c2f47e15 3961 return NULL_RTX;
53800dbe 3962
3963 /* If we don't have POINTER_TYPE, call the function. */
3964 if (arg1_align == 0 || arg2_align == 0)
c2f47e15 3965 return NULL_RTX;
53800dbe 3966
3967 /* Make a place to write the result of the instruction. */
3968 result = target;
3969 if (! (result != 0
8ad4c111 3970 && REG_P (result) && GET_MODE (result) == insn_mode
53800dbe 3971 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
3972 result = gen_reg_rtx (insn_mode);
3973
d8ae1baa 3974 arg1_rtx = get_memory_rtx (arg1, len);
3975 arg2_rtx = get_memory_rtx (arg2, len);
389dd41b 3976 arg3_rtx = expand_normal (fold_convert_loc (loc, sizetype, len));
83f88f8e 3977
3978 /* Set MEM_SIZE as appropriate. */
971ba038 3979 if (CONST_INT_P (arg3_rtx))
83f88f8e 3980 {
5b2a69fa 3981 set_mem_size (arg1_rtx, INTVAL (arg3_rtx));
3982 set_mem_size (arg2_rtx, INTVAL (arg3_rtx));
83f88f8e 3983 }
3984
b428c0a5 3985 if (HAVE_cmpmemsi)
3986 insn = gen_cmpmemsi (result, arg1_rtx, arg2_rtx, arg3_rtx,
3987 GEN_INT (MIN (arg1_align, arg2_align)));
0cd832f0 3988 else
64db345d 3989 gcc_unreachable ();
0cd832f0 3990
3991 if (insn)
3992 emit_insn (insn);
3993 else
2dd6f9ed 3994 emit_library_call_value (memcmp_libfunc, result, LCT_PURE,
0cd832f0 3995 TYPE_MODE (integer_type_node), 3,
3996 XEXP (arg1_rtx, 0), Pmode,
3997 XEXP (arg2_rtx, 0), Pmode,
3998 convert_to_mode (TYPE_MODE (sizetype), arg3_rtx,
78a8ed03 3999 TYPE_UNSIGNED (sizetype)),
0cd832f0 4000 TYPE_MODE (sizetype));
53800dbe 4001
4002 /* Return the value in the proper mode for this function. */
4003 mode = TYPE_MODE (TREE_TYPE (exp));
4004 if (GET_MODE (result) == mode)
4005 return result;
4006 else if (target != 0)
4007 {
4008 convert_move (target, result, 0);
4009 return target;
4010 }
4011 else
4012 return convert_to_mode (mode, result, 0);
4013 }
bd021c1c 4014#endif /* HAVE_cmpmemsi. */
53800dbe 4015
c2f47e15 4016 return NULL_RTX;
6f428e8b 4017}
4018
c2f47e15 4019/* Expand expression EXP, which is a call to the strcmp builtin. Return NULL_RTX
53800dbe 4020 if we failed the caller should emit a normal call, otherwise try to get
4021 the result in TARGET, if convenient. */
902de8ed 4022
53800dbe 4023static rtx
a65c4d64 4024expand_builtin_strcmp (tree exp, ATTRIBUTE_UNUSED rtx target)
53800dbe 4025{
c2f47e15 4026 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
4027 return NULL_RTX;
bf8e3599 4028
6ac5504b 4029#if defined HAVE_cmpstrsi || defined HAVE_cmpstrnsi
6b531606 4030 if (direct_optab_handler (cmpstr_optab, SImode) != CODE_FOR_nothing
4031 || direct_optab_handler (cmpstrn_optab, SImode) != CODE_FOR_nothing)
6ac5504b 4032 {
4033 rtx arg1_rtx, arg2_rtx;
4034 rtx result, insn = NULL_RTX;
4035 tree fndecl, fn;
c2f47e15 4036 tree arg1 = CALL_EXPR_ARG (exp, 0);
4037 tree arg2 = CALL_EXPR_ARG (exp, 1);
a0c938f0 4038
957d0361 4039 unsigned int arg1_align = get_pointer_alignment (arg1) / BITS_PER_UNIT;
4040 unsigned int arg2_align = get_pointer_alignment (arg2) / BITS_PER_UNIT;
6ac5504b 4041
4042 /* If we don't have POINTER_TYPE, call the function. */
4043 if (arg1_align == 0 || arg2_align == 0)
c2f47e15 4044 return NULL_RTX;
7a3f89b5 4045
6ac5504b 4046 /* Stabilize the arguments in case gen_cmpstr(n)si fail. */
4047 arg1 = builtin_save_expr (arg1);
4048 arg2 = builtin_save_expr (arg2);
7a3f89b5 4049
d8ae1baa 4050 arg1_rtx = get_memory_rtx (arg1, NULL);
4051 arg2_rtx = get_memory_rtx (arg2, NULL);
53800dbe 4052
6ac5504b 4053#ifdef HAVE_cmpstrsi
4054 /* Try to call cmpstrsi. */
4055 if (HAVE_cmpstrsi)
4056 {
3754d046 4057 machine_mode insn_mode
6ac5504b 4058 = insn_data[(int) CODE_FOR_cmpstrsi].operand[0].mode;
4059
4060 /* Make a place to write the result of the instruction. */
4061 result = target;
4062 if (! (result != 0
4063 && REG_P (result) && GET_MODE (result) == insn_mode
4064 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
4065 result = gen_reg_rtx (insn_mode);
4066
4067 insn = gen_cmpstrsi (result, arg1_rtx, arg2_rtx,
4068 GEN_INT (MIN (arg1_align, arg2_align)));
4069 }
4070#endif
03fd9d2c 4071#ifdef HAVE_cmpstrnsi
6ac5504b 4072 /* Try to determine at least one length and call cmpstrnsi. */
a0c938f0 4073 if (!insn && HAVE_cmpstrnsi)
6ac5504b 4074 {
4075 tree len;
4076 rtx arg3_rtx;
4077
3754d046 4078 machine_mode insn_mode
6ac5504b 4079 = insn_data[(int) CODE_FOR_cmpstrnsi].operand[0].mode;
4080 tree len1 = c_strlen (arg1, 1);
4081 tree len2 = c_strlen (arg2, 1);
4082
4083 if (len1)
4084 len1 = size_binop (PLUS_EXPR, ssize_int (1), len1);
4085 if (len2)
4086 len2 = size_binop (PLUS_EXPR, ssize_int (1), len2);
4087
4088 /* If we don't have a constant length for the first, use the length
4089 of the second, if we know it. We don't require a constant for
4090 this case; some cost analysis could be done if both are available
4091 but neither is constant. For now, assume they're equally cheap,
4092 unless one has side effects. If both strings have constant lengths,
4093 use the smaller. */
4094
4095 if (!len1)
4096 len = len2;
4097 else if (!len2)
4098 len = len1;
4099 else if (TREE_SIDE_EFFECTS (len1))
4100 len = len2;
4101 else if (TREE_SIDE_EFFECTS (len2))
4102 len = len1;
4103 else if (TREE_CODE (len1) != INTEGER_CST)
4104 len = len2;
4105 else if (TREE_CODE (len2) != INTEGER_CST)
4106 len = len1;
4107 else if (tree_int_cst_lt (len1, len2))
4108 len = len1;
4109 else
4110 len = len2;
4111
4112 /* If both arguments have side effects, we cannot optimize. */
4113 if (!len || TREE_SIDE_EFFECTS (len))
6b961939 4114 goto do_libcall;
53800dbe 4115
8ec3c5c2 4116 arg3_rtx = expand_normal (len);
902de8ed 4117
6ac5504b 4118 /* Make a place to write the result of the instruction. */
4119 result = target;
4120 if (! (result != 0
4121 && REG_P (result) && GET_MODE (result) == insn_mode
4122 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
4123 result = gen_reg_rtx (insn_mode);
53800dbe 4124
6ac5504b 4125 insn = gen_cmpstrnsi (result, arg1_rtx, arg2_rtx, arg3_rtx,
4126 GEN_INT (MIN (arg1_align, arg2_align)));
4127 }
4128#endif
3f8aefe2 4129
6ac5504b 4130 if (insn)
4131 {
3754d046 4132 machine_mode mode;
6ac5504b 4133 emit_insn (insn);
3f8aefe2 4134
6ac5504b 4135 /* Return the value in the proper mode for this function. */
4136 mode = TYPE_MODE (TREE_TYPE (exp));
4137 if (GET_MODE (result) == mode)
4138 return result;
4139 if (target == 0)
4140 return convert_to_mode (mode, result, 0);
4141 convert_move (target, result, 0);
4142 return target;
4143 }
902de8ed 4144
6ac5504b 4145 /* Expand the library call ourselves using a stabilized argument
4146 list to avoid re-evaluating the function's arguments twice. */
2694880e 4147#ifdef HAVE_cmpstrnsi
6b961939 4148 do_libcall:
2694880e 4149#endif
6ac5504b 4150 fndecl = get_callee_fndecl (exp);
0568e9c1 4151 fn = build_call_nofold_loc (EXPR_LOCATION (exp), fndecl, 2, arg1, arg2);
a65c4d64 4152 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
4153 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
6ac5504b 4154 return expand_call (fn, target, target == const0_rtx);
4155 }
7a3f89b5 4156#endif
c2f47e15 4157 return NULL_RTX;
83d79705 4158}
53800dbe 4159
48e1416a 4160/* Expand expression EXP, which is a call to the strncmp builtin. Return
c2f47e15 4161 NULL_RTX if we failed the caller should emit a normal call, otherwise try to get
ed09096d 4162 the result in TARGET, if convenient. */
27d0c333 4163
ed09096d 4164static rtx
a65c4d64 4165expand_builtin_strncmp (tree exp, ATTRIBUTE_UNUSED rtx target,
3754d046 4166 ATTRIBUTE_UNUSED machine_mode mode)
ed09096d 4167{
a65c4d64 4168 location_t loc ATTRIBUTE_UNUSED = EXPR_LOCATION (exp);
389dd41b 4169
c2f47e15 4170 if (!validate_arglist (exp,
4171 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
4172 return NULL_RTX;
ed09096d 4173
6e34e617 4174 /* If c_strlen can determine an expression for one of the string
6ac5504b 4175 lengths, and it doesn't have side effects, then emit cmpstrnsi
7a3f89b5 4176 using length MIN(strlen(string)+1, arg3). */
6ac5504b 4177#ifdef HAVE_cmpstrnsi
4178 if (HAVE_cmpstrnsi)
7a3f89b5 4179 {
4180 tree len, len1, len2;
4181 rtx arg1_rtx, arg2_rtx, arg3_rtx;
4182 rtx result, insn;
0b25db21 4183 tree fndecl, fn;
c2f47e15 4184 tree arg1 = CALL_EXPR_ARG (exp, 0);
4185 tree arg2 = CALL_EXPR_ARG (exp, 1);
4186 tree arg3 = CALL_EXPR_ARG (exp, 2);
6f428e8b 4187
957d0361 4188 unsigned int arg1_align = get_pointer_alignment (arg1) / BITS_PER_UNIT;
4189 unsigned int arg2_align = get_pointer_alignment (arg2) / BITS_PER_UNIT;
3754d046 4190 machine_mode insn_mode
6ac5504b 4191 = insn_data[(int) CODE_FOR_cmpstrnsi].operand[0].mode;
bf8e3599 4192
681fab1e 4193 len1 = c_strlen (arg1, 1);
4194 len2 = c_strlen (arg2, 1);
7a3f89b5 4195
4196 if (len1)
389dd41b 4197 len1 = size_binop_loc (loc, PLUS_EXPR, ssize_int (1), len1);
7a3f89b5 4198 if (len2)
389dd41b 4199 len2 = size_binop_loc (loc, PLUS_EXPR, ssize_int (1), len2);
7a3f89b5 4200
4201 /* If we don't have a constant length for the first, use the length
4202 of the second, if we know it. We don't require a constant for
4203 this case; some cost analysis could be done if both are available
4204 but neither is constant. For now, assume they're equally cheap,
4205 unless one has side effects. If both strings have constant lengths,
4206 use the smaller. */
4207
4208 if (!len1)
4209 len = len2;
4210 else if (!len2)
4211 len = len1;
4212 else if (TREE_SIDE_EFFECTS (len1))
4213 len = len2;
4214 else if (TREE_SIDE_EFFECTS (len2))
4215 len = len1;
4216 else if (TREE_CODE (len1) != INTEGER_CST)
4217 len = len2;
4218 else if (TREE_CODE (len2) != INTEGER_CST)
4219 len = len1;
4220 else if (tree_int_cst_lt (len1, len2))
4221 len = len1;
4222 else
4223 len = len2;
6e34e617 4224
7a3f89b5 4225 /* If both arguments have side effects, we cannot optimize. */
4226 if (!len || TREE_SIDE_EFFECTS (len))
c2f47e15 4227 return NULL_RTX;
bf8e3599 4228
7a3f89b5 4229 /* The actual new length parameter is MIN(len,arg3). */
389dd41b 4230 len = fold_build2_loc (loc, MIN_EXPR, TREE_TYPE (len), len,
4231 fold_convert_loc (loc, TREE_TYPE (len), arg3));
7a3f89b5 4232
4233 /* If we don't have POINTER_TYPE, call the function. */
4234 if (arg1_align == 0 || arg2_align == 0)
c2f47e15 4235 return NULL_RTX;
7a3f89b5 4236
4237 /* Make a place to write the result of the instruction. */
4238 result = target;
4239 if (! (result != 0
8ad4c111 4240 && REG_P (result) && GET_MODE (result) == insn_mode
7a3f89b5 4241 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
4242 result = gen_reg_rtx (insn_mode);
4243
a65c4d64 4244 /* Stabilize the arguments in case gen_cmpstrnsi fails. */
4245 arg1 = builtin_save_expr (arg1);
4246 arg2 = builtin_save_expr (arg2);
4247 len = builtin_save_expr (len);
27d0c333 4248
a65c4d64 4249 arg1_rtx = get_memory_rtx (arg1, len);
4250 arg2_rtx = get_memory_rtx (arg2, len);
4251 arg3_rtx = expand_normal (len);
4252 insn = gen_cmpstrnsi (result, arg1_rtx, arg2_rtx, arg3_rtx,
4253 GEN_INT (MIN (arg1_align, arg2_align)));
4254 if (insn)
4255 {
4256 emit_insn (insn);
49f0327b 4257
a65c4d64 4258 /* Return the value in the proper mode for this function. */
4259 mode = TYPE_MODE (TREE_TYPE (exp));
4260 if (GET_MODE (result) == mode)
4261 return result;
4262 if (target == 0)
4263 return convert_to_mode (mode, result, 0);
4264 convert_move (target, result, 0);
4265 return target;
4266 }
27d0c333 4267
a65c4d64 4268 /* Expand the library call ourselves using a stabilized argument
4269 list to avoid re-evaluating the function's arguments twice. */
4270 fndecl = get_callee_fndecl (exp);
0568e9c1 4271 fn = build_call_nofold_loc (EXPR_LOCATION (exp), fndecl, 3,
4272 arg1, arg2, len);
a65c4d64 4273 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
4274 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
4275 return expand_call (fn, target, target == const0_rtx);
4276 }
4277#endif
c2f47e15 4278 return NULL_RTX;
49f0327b 4279}
4280
a66c9326 4281/* Expand a call to __builtin_saveregs, generating the result in TARGET,
4282 if that's convenient. */
902de8ed 4283
a66c9326 4284rtx
aecda0d6 4285expand_builtin_saveregs (void)
53800dbe 4286{
1e0c0b35 4287 rtx val;
4288 rtx_insn *seq;
53800dbe 4289
4290 /* Don't do __builtin_saveregs more than once in a function.
4291 Save the result of the first call and reuse it. */
4292 if (saveregs_value != 0)
4293 return saveregs_value;
53800dbe 4294
a66c9326 4295 /* When this function is called, it means that registers must be
4296 saved on entry to this function. So we migrate the call to the
4297 first insn of this function. */
4298
4299 start_sequence ();
53800dbe 4300
a66c9326 4301 /* Do whatever the machine needs done in this case. */
45550790 4302 val = targetm.calls.expand_builtin_saveregs ();
53800dbe 4303
a66c9326 4304 seq = get_insns ();
4305 end_sequence ();
53800dbe 4306
a66c9326 4307 saveregs_value = val;
53800dbe 4308
31d3e01c 4309 /* Put the insns after the NOTE that starts the function. If this
4310 is inside a start_sequence, make the outer-level insn chain current, so
a66c9326 4311 the code is placed at the start of the function. */
4312 push_topmost_sequence ();
0ec80471 4313 emit_insn_after (seq, entry_of_function ());
a66c9326 4314 pop_topmost_sequence ();
4315
4316 return val;
53800dbe 4317}
4318
79012a9d 4319/* Expand a call to __builtin_next_arg. */
27d0c333 4320
53800dbe 4321static rtx
79012a9d 4322expand_builtin_next_arg (void)
53800dbe 4323{
79012a9d 4324 /* Checking arguments is already done in fold_builtin_next_arg
4325 that must be called before this function. */
940ddc5c 4326 return expand_binop (ptr_mode, add_optab,
abe32cce 4327 crtl->args.internal_arg_pointer,
4328 crtl->args.arg_offset_rtx,
53800dbe 4329 NULL_RTX, 0, OPTAB_LIB_WIDEN);
4330}
4331
a66c9326 4332/* Make it easier for the backends by protecting the valist argument
4333 from multiple evaluations. */
4334
4335static tree
389dd41b 4336stabilize_va_list_loc (location_t loc, tree valist, int needs_lvalue)
a66c9326 4337{
5f57a8b1 4338 tree vatype = targetm.canonical_va_list_type (TREE_TYPE (valist));
4339
182cf5a9 4340 /* The current way of determining the type of valist is completely
4341 bogus. We should have the information on the va builtin instead. */
4342 if (!vatype)
4343 vatype = targetm.fn_abi_va_list (cfun->decl);
5f57a8b1 4344
4345 if (TREE_CODE (vatype) == ARRAY_TYPE)
a66c9326 4346 {
2d47cc32 4347 if (TREE_SIDE_EFFECTS (valist))
4348 valist = save_expr (valist);
11a61dea 4349
2d47cc32 4350 /* For this case, the backends will be expecting a pointer to
5f57a8b1 4351 vatype, but it's possible we've actually been given an array
4352 (an actual TARGET_CANONICAL_VA_LIST_TYPE (valist)).
2d47cc32 4353 So fix it. */
4354 if (TREE_CODE (TREE_TYPE (valist)) == ARRAY_TYPE)
8a15c04a 4355 {
5f57a8b1 4356 tree p1 = build_pointer_type (TREE_TYPE (vatype));
389dd41b 4357 valist = build_fold_addr_expr_with_type_loc (loc, valist, p1);
8a15c04a 4358 }
a66c9326 4359 }
11a61dea 4360 else
a66c9326 4361 {
182cf5a9 4362 tree pt = build_pointer_type (vatype);
11a61dea 4363
2d47cc32 4364 if (! needs_lvalue)
4365 {
11a61dea 4366 if (! TREE_SIDE_EFFECTS (valist))
4367 return valist;
bf8e3599 4368
389dd41b 4369 valist = fold_build1_loc (loc, ADDR_EXPR, pt, valist);
a66c9326 4370 TREE_SIDE_EFFECTS (valist) = 1;
a66c9326 4371 }
2d47cc32 4372
11a61dea 4373 if (TREE_SIDE_EFFECTS (valist))
2d47cc32 4374 valist = save_expr (valist);
182cf5a9 4375 valist = fold_build2_loc (loc, MEM_REF,
4376 vatype, valist, build_int_cst (pt, 0));
a66c9326 4377 }
4378
4379 return valist;
4380}
4381
2e15d750 4382/* The "standard" definition of va_list is void*. */
4383
4384tree
4385std_build_builtin_va_list (void)
4386{
4387 return ptr_type_node;
4388}
4389
5f57a8b1 4390/* The "standard" abi va_list is va_list_type_node. */
4391
4392tree
4393std_fn_abi_va_list (tree fndecl ATTRIBUTE_UNUSED)
4394{
4395 return va_list_type_node;
4396}
4397
4398/* The "standard" type of va_list is va_list_type_node. */
4399
4400tree
4401std_canonical_va_list_type (tree type)
4402{
4403 tree wtype, htype;
4404
4405 if (INDIRECT_REF_P (type))
4406 type = TREE_TYPE (type);
9af5ce0c 4407 else if (POINTER_TYPE_P (type) && POINTER_TYPE_P (TREE_TYPE (type)))
5f57a8b1 4408 type = TREE_TYPE (type);
5f57a8b1 4409 wtype = va_list_type_node;
4410 htype = type;
7b36f9ab 4411 /* Treat structure va_list types. */
4412 if (TREE_CODE (wtype) == RECORD_TYPE && POINTER_TYPE_P (htype))
4413 htype = TREE_TYPE (htype);
4414 else if (TREE_CODE (wtype) == ARRAY_TYPE)
5f57a8b1 4415 {
4416 /* If va_list is an array type, the argument may have decayed
4417 to a pointer type, e.g. by being passed to another function.
4418 In that case, unwrap both types so that we can compare the
4419 underlying records. */
4420 if (TREE_CODE (htype) == ARRAY_TYPE
4421 || POINTER_TYPE_P (htype))
4422 {
4423 wtype = TREE_TYPE (wtype);
4424 htype = TREE_TYPE (htype);
4425 }
4426 }
4427 if (TYPE_MAIN_VARIANT (wtype) == TYPE_MAIN_VARIANT (htype))
4428 return va_list_type_node;
4429
4430 return NULL_TREE;
4431}
4432
a66c9326 4433/* The "standard" implementation of va_start: just assign `nextarg' to
4434 the variable. */
27d0c333 4435
a66c9326 4436void
aecda0d6 4437std_expand_builtin_va_start (tree valist, rtx nextarg)
a66c9326 4438{
f03c17bc 4439 rtx va_r = expand_expr (valist, NULL_RTX, VOIDmode, EXPAND_WRITE);
4440 convert_move (va_r, nextarg, 0);
058a1b7a 4441
4442 /* We do not have any valid bounds for the pointer, so
4443 just store zero bounds for it. */
4444 if (chkp_function_instrumented_p (current_function_decl))
4445 chkp_expand_bounds_reset_for_mem (valist,
4446 make_tree (TREE_TYPE (valist),
4447 nextarg));
a66c9326 4448}
4449
c2f47e15 4450/* Expand EXP, a call to __builtin_va_start. */
27d0c333 4451
a66c9326 4452static rtx
c2f47e15 4453expand_builtin_va_start (tree exp)
a66c9326 4454{
4455 rtx nextarg;
c2f47e15 4456 tree valist;
389dd41b 4457 location_t loc = EXPR_LOCATION (exp);
a66c9326 4458
c2f47e15 4459 if (call_expr_nargs (exp) < 2)
cb166087 4460 {
389dd41b 4461 error_at (loc, "too few arguments to function %<va_start%>");
cb166087 4462 return const0_rtx;
4463 }
a66c9326 4464
c2f47e15 4465 if (fold_builtin_next_arg (exp, true))
79012a9d 4466 return const0_rtx;
7c2f0500 4467
79012a9d 4468 nextarg = expand_builtin_next_arg ();
389dd41b 4469 valist = stabilize_va_list_loc (loc, CALL_EXPR_ARG (exp, 0), 1);
a66c9326 4470
8a58ed0a 4471 if (targetm.expand_builtin_va_start)
4472 targetm.expand_builtin_va_start (valist, nextarg);
4473 else
4474 std_expand_builtin_va_start (valist, nextarg);
a66c9326 4475
4476 return const0_rtx;
4477}
4478
c2f47e15 4479/* Expand EXP, a call to __builtin_va_end. */
f7c44134 4480
a66c9326 4481static rtx
c2f47e15 4482expand_builtin_va_end (tree exp)
a66c9326 4483{
c2f47e15 4484 tree valist = CALL_EXPR_ARG (exp, 0);
8a15c04a 4485
8a15c04a 4486 /* Evaluate for side effects, if needed. I hate macros that don't
4487 do that. */
4488 if (TREE_SIDE_EFFECTS (valist))
4489 expand_expr (valist, const0_rtx, VOIDmode, EXPAND_NORMAL);
a66c9326 4490
4491 return const0_rtx;
4492}
4493
c2f47e15 4494/* Expand EXP, a call to __builtin_va_copy. We do this as a
a66c9326 4495 builtin rather than just as an assignment in stdarg.h because of the
4496 nastiness of array-type va_list types. */
f7c44134 4497
a66c9326 4498static rtx
c2f47e15 4499expand_builtin_va_copy (tree exp)
a66c9326 4500{
4501 tree dst, src, t;
389dd41b 4502 location_t loc = EXPR_LOCATION (exp);
a66c9326 4503
c2f47e15 4504 dst = CALL_EXPR_ARG (exp, 0);
4505 src = CALL_EXPR_ARG (exp, 1);
a66c9326 4506
389dd41b 4507 dst = stabilize_va_list_loc (loc, dst, 1);
4508 src = stabilize_va_list_loc (loc, src, 0);
a66c9326 4509
5f57a8b1 4510 gcc_assert (cfun != NULL && cfun->decl != NULL_TREE);
4511
4512 if (TREE_CODE (targetm.fn_abi_va_list (cfun->decl)) != ARRAY_TYPE)
a66c9326 4513 {
5f57a8b1 4514 t = build2 (MODIFY_EXPR, targetm.fn_abi_va_list (cfun->decl), dst, src);
a66c9326 4515 TREE_SIDE_EFFECTS (t) = 1;
4516 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
4517 }
4518 else
4519 {
11a61dea 4520 rtx dstb, srcb, size;
4521
4522 /* Evaluate to pointers. */
4523 dstb = expand_expr (dst, NULL_RTX, Pmode, EXPAND_NORMAL);
4524 srcb = expand_expr (src, NULL_RTX, Pmode, EXPAND_NORMAL);
5f57a8b1 4525 size = expand_expr (TYPE_SIZE_UNIT (targetm.fn_abi_va_list (cfun->decl)),
4526 NULL_RTX, VOIDmode, EXPAND_NORMAL);
11a61dea 4527
85d654dd 4528 dstb = convert_memory_address (Pmode, dstb);
4529 srcb = convert_memory_address (Pmode, srcb);
726ec87c 4530
11a61dea 4531 /* "Dereference" to BLKmode memories. */
4532 dstb = gen_rtx_MEM (BLKmode, dstb);
ab6ab77e 4533 set_mem_alias_set (dstb, get_alias_set (TREE_TYPE (TREE_TYPE (dst))));
5f57a8b1 4534 set_mem_align (dstb, TYPE_ALIGN (targetm.fn_abi_va_list (cfun->decl)));
11a61dea 4535 srcb = gen_rtx_MEM (BLKmode, srcb);
ab6ab77e 4536 set_mem_alias_set (srcb, get_alias_set (TREE_TYPE (TREE_TYPE (src))));
5f57a8b1 4537 set_mem_align (srcb, TYPE_ALIGN (targetm.fn_abi_va_list (cfun->decl)));
11a61dea 4538
4539 /* Copy. */
0378dbdc 4540 emit_block_move (dstb, srcb, size, BLOCK_OP_NORMAL);
a66c9326 4541 }
4542
4543 return const0_rtx;
4544}
4545
53800dbe 4546/* Expand a call to one of the builtin functions __builtin_frame_address or
4547 __builtin_return_address. */
27d0c333 4548
53800dbe 4549static rtx
c2f47e15 4550expand_builtin_frame_address (tree fndecl, tree exp)
53800dbe 4551{
53800dbe 4552 /* The argument must be a nonnegative integer constant.
4553 It counts the number of frames to scan up the stack.
5b252e95 4554 The value is either the frame pointer value or the return
4555 address saved in that frame. */
c2f47e15 4556 if (call_expr_nargs (exp) == 0)
53800dbe 4557 /* Warning about missing arg was already issued. */
4558 return const0_rtx;
e913b5cd 4559 else if (! tree_fits_uhwi_p (CALL_EXPR_ARG (exp, 0)))
53800dbe 4560 {
5b252e95 4561 error ("invalid argument to %qD", fndecl);
53800dbe 4562 return const0_rtx;
4563 }
4564 else
4565 {
5b252e95 4566 /* Number of frames to scan up the stack. */
4567 unsigned HOST_WIDE_INT count = tree_to_uhwi (CALL_EXPR_ARG (exp, 0));
4568
4569 rtx tem = expand_builtin_return_addr (DECL_FUNCTION_CODE (fndecl), count);
53800dbe 4570
4571 /* Some ports cannot access arbitrary stack frames. */
4572 if (tem == NULL)
4573 {
5b252e95 4574 warning (0, "unsupported argument to %qD", fndecl);
53800dbe 4575 return const0_rtx;
4576 }
4577
5b252e95 4578 if (count)
4579 {
4580 /* Warn since no effort is made to ensure that any frame
4581 beyond the current one exists or can be safely reached. */
4582 warning (OPT_Wframe_address, "calling %qD with "
4583 "a nonzero argument is unsafe", fndecl);
4584 }
4585
53800dbe 4586 /* For __builtin_frame_address, return what we've got. */
4587 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
4588 return tem;
4589
8ad4c111 4590 if (!REG_P (tem)
53800dbe 4591 && ! CONSTANT_P (tem))
99182918 4592 tem = copy_addr_to_reg (tem);
53800dbe 4593 return tem;
4594 }
4595}
4596
990495a7 4597/* Expand EXP, a call to the alloca builtin. Return NULL_RTX if we
5be42b39 4598 failed and the caller should emit a normal call. CANNOT_ACCUMULATE
4599 is the same as for allocate_dynamic_stack_space. */
15c6cf6b 4600
53800dbe 4601static rtx
5be42b39 4602expand_builtin_alloca (tree exp, bool cannot_accumulate)
53800dbe 4603{
4604 rtx op0;
15c6cf6b 4605 rtx result;
581bf1c2 4606 bool valid_arglist;
4607 unsigned int align;
4608 bool alloca_with_align = (DECL_FUNCTION_CODE (get_callee_fndecl (exp))
4609 == BUILT_IN_ALLOCA_WITH_ALIGN);
53800dbe 4610
581bf1c2 4611 valid_arglist
4612 = (alloca_with_align
4613 ? validate_arglist (exp, INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE)
4614 : validate_arglist (exp, INTEGER_TYPE, VOID_TYPE));
4615
4616 if (!valid_arglist)
c2f47e15 4617 return NULL_RTX;
53800dbe 4618
4619 /* Compute the argument. */
c2f47e15 4620 op0 = expand_normal (CALL_EXPR_ARG (exp, 0));
53800dbe 4621
581bf1c2 4622 /* Compute the alignment. */
4623 align = (alloca_with_align
f9ae6f95 4624 ? TREE_INT_CST_LOW (CALL_EXPR_ARG (exp, 1))
581bf1c2 4625 : BIGGEST_ALIGNMENT);
4626
53800dbe 4627 /* Allocate the desired space. */
581bf1c2 4628 result = allocate_dynamic_stack_space (op0, 0, align, cannot_accumulate);
85d654dd 4629 result = convert_memory_address (ptr_mode, result);
15c6cf6b 4630
4631 return result;
53800dbe 4632}
4633
74bdbe96 4634/* Expand a call to bswap builtin in EXP.
4635 Return NULL_RTX if a normal call should be emitted rather than expanding the
4636 function in-line. If convenient, the result should be placed in TARGET.
4637 SUBTARGET may be used as the target for computing one of EXP's operands. */
42791117 4638
4639static rtx
3754d046 4640expand_builtin_bswap (machine_mode target_mode, tree exp, rtx target,
74bdbe96 4641 rtx subtarget)
42791117 4642{
42791117 4643 tree arg;
4644 rtx op0;
4645
c2f47e15 4646 if (!validate_arglist (exp, INTEGER_TYPE, VOID_TYPE))
4647 return NULL_RTX;
42791117 4648
c2f47e15 4649 arg = CALL_EXPR_ARG (exp, 0);
74bdbe96 4650 op0 = expand_expr (arg,
4651 subtarget && GET_MODE (subtarget) == target_mode
4652 ? subtarget : NULL_RTX,
4653 target_mode, EXPAND_NORMAL);
4654 if (GET_MODE (op0) != target_mode)
4655 op0 = convert_to_mode (target_mode, op0, 1);
42791117 4656
74bdbe96 4657 target = expand_unop (target_mode, bswap_optab, op0, target, 1);
42791117 4658
4659 gcc_assert (target);
4660
74bdbe96 4661 return convert_to_mode (target_mode, target, 1);
42791117 4662}
4663
c2f47e15 4664/* Expand a call to a unary builtin in EXP.
4665 Return NULL_RTX if a normal call should be emitted rather than expanding the
53800dbe 4666 function in-line. If convenient, the result should be placed in TARGET.
4667 SUBTARGET may be used as the target for computing one of EXP's operands. */
15c6cf6b 4668
53800dbe 4669static rtx
3754d046 4670expand_builtin_unop (machine_mode target_mode, tree exp, rtx target,
aecda0d6 4671 rtx subtarget, optab op_optab)
53800dbe 4672{
4673 rtx op0;
c2f47e15 4674
4675 if (!validate_arglist (exp, INTEGER_TYPE, VOID_TYPE))
4676 return NULL_RTX;
53800dbe 4677
4678 /* Compute the argument. */
f97eea22 4679 op0 = expand_expr (CALL_EXPR_ARG (exp, 0),
4680 (subtarget
4681 && (TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 0)))
4682 == GET_MODE (subtarget))) ? subtarget : NULL_RTX,
1db6d067 4683 VOIDmode, EXPAND_NORMAL);
6a08d0ab 4684 /* Compute op, into TARGET if possible.
53800dbe 4685 Set TARGET to wherever the result comes back. */
c2f47e15 4686 target = expand_unop (TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 0))),
6aaa1f9e 4687 op_optab, op0, target, op_optab != clrsb_optab);
64db345d 4688 gcc_assert (target);
7d3f6cc7 4689
efb070c8 4690 return convert_to_mode (target_mode, target, 0);
53800dbe 4691}
89cfe6e5 4692
48e1416a 4693/* Expand a call to __builtin_expect. We just return our argument
5a74f77e 4694 as the builtin_expect semantic should've been already executed by
4695 tree branch prediction pass. */
89cfe6e5 4696
4697static rtx
c2f47e15 4698expand_builtin_expect (tree exp, rtx target)
89cfe6e5 4699{
1e4adcfc 4700 tree arg;
89cfe6e5 4701
c2f47e15 4702 if (call_expr_nargs (exp) < 2)
89cfe6e5 4703 return const0_rtx;
c2f47e15 4704 arg = CALL_EXPR_ARG (exp, 0);
89cfe6e5 4705
c2f47e15 4706 target = expand_expr (arg, target, VOIDmode, EXPAND_NORMAL);
5a74f77e 4707 /* When guessing was done, the hints should be already stripped away. */
07311427 4708 gcc_assert (!flag_guess_branch_prob
852f689e 4709 || optimize == 0 || seen_error ());
89cfe6e5 4710 return target;
4711}
689df48e 4712
fca0886c 4713/* Expand a call to __builtin_assume_aligned. We just return our first
4714 argument as the builtin_assume_aligned semantic should've been already
4715 executed by CCP. */
4716
4717static rtx
4718expand_builtin_assume_aligned (tree exp, rtx target)
4719{
4720 if (call_expr_nargs (exp) < 2)
4721 return const0_rtx;
4722 target = expand_expr (CALL_EXPR_ARG (exp, 0), target, VOIDmode,
4723 EXPAND_NORMAL);
4724 gcc_assert (!TREE_SIDE_EFFECTS (CALL_EXPR_ARG (exp, 1))
4725 && (call_expr_nargs (exp) < 3
4726 || !TREE_SIDE_EFFECTS (CALL_EXPR_ARG (exp, 2))));
4727 return target;
4728}
4729
c22de3f0 4730void
aecda0d6 4731expand_builtin_trap (void)
a0ef1725 4732{
4db8dd0c 4733 if (targetm.have_trap ())
f73960eb 4734 {
4db8dd0c 4735 rtx_insn *insn = emit_insn (targetm.gen_trap ());
f73960eb 4736 /* For trap insns when not accumulating outgoing args force
4737 REG_ARGS_SIZE note to prevent crossjumping of calls with
4738 different args sizes. */
4739 if (!ACCUMULATE_OUTGOING_ARGS)
4740 add_reg_note (insn, REG_ARGS_SIZE, GEN_INT (stack_pointer_delta));
4741 }
a0ef1725 4742 else
a0ef1725 4743 emit_library_call (abort_libfunc, LCT_NORETURN, VOIDmode, 0);
4744 emit_barrier ();
4745}
78a74442 4746
d2b48f0c 4747/* Expand a call to __builtin_unreachable. We do nothing except emit
4748 a barrier saying that control flow will not pass here.
4749
4750 It is the responsibility of the program being compiled to ensure
4751 that control flow does never reach __builtin_unreachable. */
4752static void
4753expand_builtin_unreachable (void)
4754{
4755 emit_barrier ();
4756}
4757
c2f47e15 4758/* Expand EXP, a call to fabs, fabsf or fabsl.
4759 Return NULL_RTX if a normal call should be emitted rather than expanding
78a74442 4760 the function inline. If convenient, the result should be placed
4761 in TARGET. SUBTARGET may be used as the target for computing
4762 the operand. */
4763
4764static rtx
c2f47e15 4765expand_builtin_fabs (tree exp, rtx target, rtx subtarget)
78a74442 4766{
3754d046 4767 machine_mode mode;
78a74442 4768 tree arg;
4769 rtx op0;
4770
c2f47e15 4771 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
4772 return NULL_RTX;
78a74442 4773
c2f47e15 4774 arg = CALL_EXPR_ARG (exp, 0);
c7f617c2 4775 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
78a74442 4776 mode = TYPE_MODE (TREE_TYPE (arg));
1db6d067 4777 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
78a74442 4778 return expand_abs (mode, op0, target, 0, safe_from_p (target, arg, 1));
4779}
4780
c2f47e15 4781/* Expand EXP, a call to copysign, copysignf, or copysignl.
270436f3 4782 Return NULL is a normal call should be emitted rather than expanding the
4783 function inline. If convenient, the result should be placed in TARGET.
4784 SUBTARGET may be used as the target for computing the operand. */
4785
4786static rtx
c2f47e15 4787expand_builtin_copysign (tree exp, rtx target, rtx subtarget)
270436f3 4788{
4789 rtx op0, op1;
4790 tree arg;
4791
c2f47e15 4792 if (!validate_arglist (exp, REAL_TYPE, REAL_TYPE, VOID_TYPE))
4793 return NULL_RTX;
270436f3 4794
c2f47e15 4795 arg = CALL_EXPR_ARG (exp, 0);
8ec3c5c2 4796 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
270436f3 4797
c2f47e15 4798 arg = CALL_EXPR_ARG (exp, 1);
8ec3c5c2 4799 op1 = expand_normal (arg);
270436f3 4800
4801 return expand_copysign (op0, op1, target);
4802}
4803
ac8fb6db 4804/* Expand a call to __builtin___clear_cache. */
4805
4806static rtx
32e17df0 4807expand_builtin___clear_cache (tree exp)
ac8fb6db 4808{
32e17df0 4809 if (!targetm.code_for_clear_cache)
4810 {
ac8fb6db 4811#ifdef CLEAR_INSN_CACHE
32e17df0 4812 /* There is no "clear_cache" insn, and __clear_cache() in libgcc
4813 does something. Just do the default expansion to a call to
4814 __clear_cache(). */
4815 return NULL_RTX;
ac8fb6db 4816#else
32e17df0 4817 /* There is no "clear_cache" insn, and __clear_cache() in libgcc
4818 does nothing. There is no need to call it. Do nothing. */
4819 return const0_rtx;
ac8fb6db 4820#endif /* CLEAR_INSN_CACHE */
32e17df0 4821 }
4822
ac8fb6db 4823 /* We have a "clear_cache" insn, and it will handle everything. */
4824 tree begin, end;
4825 rtx begin_rtx, end_rtx;
ac8fb6db 4826
4827 /* We must not expand to a library call. If we did, any
4828 fallback library function in libgcc that might contain a call to
4829 __builtin___clear_cache() would recurse infinitely. */
4830 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
4831 {
4832 error ("both arguments to %<__builtin___clear_cache%> must be pointers");
4833 return const0_rtx;
4834 }
4835
32e17df0 4836 if (targetm.have_clear_cache ())
ac8fb6db 4837 {
8786db1e 4838 struct expand_operand ops[2];
ac8fb6db 4839
4840 begin = CALL_EXPR_ARG (exp, 0);
4841 begin_rtx = expand_expr (begin, NULL_RTX, Pmode, EXPAND_NORMAL);
ac8fb6db 4842
4843 end = CALL_EXPR_ARG (exp, 1);
4844 end_rtx = expand_expr (end, NULL_RTX, Pmode, EXPAND_NORMAL);
ac8fb6db 4845
8786db1e 4846 create_address_operand (&ops[0], begin_rtx);
4847 create_address_operand (&ops[1], end_rtx);
32e17df0 4848 if (maybe_expand_insn (targetm.code_for_clear_cache, 2, ops))
8786db1e 4849 return const0_rtx;
ac8fb6db 4850 }
4851 return const0_rtx;
ac8fb6db 4852}
4853
4ee9c684 4854/* Given a trampoline address, make sure it satisfies TRAMPOLINE_ALIGNMENT. */
4855
4856static rtx
4857round_trampoline_addr (rtx tramp)
4858{
4859 rtx temp, addend, mask;
4860
4861 /* If we don't need too much alignment, we'll have been guaranteed
4862 proper alignment by get_trampoline_type. */
4863 if (TRAMPOLINE_ALIGNMENT <= STACK_BOUNDARY)
4864 return tramp;
4865
4866 /* Round address up to desired boundary. */
4867 temp = gen_reg_rtx (Pmode);
0359f9f5 4868 addend = gen_int_mode (TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT - 1, Pmode);
4869 mask = gen_int_mode (-TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT, Pmode);
4ee9c684 4870
4871 temp = expand_simple_binop (Pmode, PLUS, tramp, addend,
4872 temp, 0, OPTAB_LIB_WIDEN);
4873 tramp = expand_simple_binop (Pmode, AND, temp, mask,
4874 temp, 0, OPTAB_LIB_WIDEN);
4875
4876 return tramp;
4877}
4878
4879static rtx
c307f106 4880expand_builtin_init_trampoline (tree exp, bool onstack)
4ee9c684 4881{
4882 tree t_tramp, t_func, t_chain;
82c7907c 4883 rtx m_tramp, r_tramp, r_chain, tmp;
4ee9c684 4884
c2f47e15 4885 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE,
4ee9c684 4886 POINTER_TYPE, VOID_TYPE))
4887 return NULL_RTX;
4888
c2f47e15 4889 t_tramp = CALL_EXPR_ARG (exp, 0);
4890 t_func = CALL_EXPR_ARG (exp, 1);
4891 t_chain = CALL_EXPR_ARG (exp, 2);
4ee9c684 4892
8ec3c5c2 4893 r_tramp = expand_normal (t_tramp);
82c7907c 4894 m_tramp = gen_rtx_MEM (BLKmode, r_tramp);
4895 MEM_NOTRAP_P (m_tramp) = 1;
4896
c307f106 4897 /* If ONSTACK, the TRAMP argument should be the address of a field
4898 within the local function's FRAME decl. Either way, let's see if
4899 we can fill in the MEM_ATTRs for this memory. */
82c7907c 4900 if (TREE_CODE (t_tramp) == ADDR_EXPR)
f4146cb8 4901 set_mem_attributes (m_tramp, TREE_OPERAND (t_tramp, 0), true);
82c7907c 4902
c307f106 4903 /* Creator of a heap trampoline is responsible for making sure the
4904 address is aligned to at least STACK_BOUNDARY. Normally malloc
4905 will ensure this anyhow. */
82c7907c 4906 tmp = round_trampoline_addr (r_tramp);
4907 if (tmp != r_tramp)
4908 {
4909 m_tramp = change_address (m_tramp, BLKmode, tmp);
4910 set_mem_align (m_tramp, TRAMPOLINE_ALIGNMENT);
5b2a69fa 4911 set_mem_size (m_tramp, TRAMPOLINE_SIZE);
82c7907c 4912 }
4913
4914 /* The FUNC argument should be the address of the nested function.
4915 Extract the actual function decl to pass to the hook. */
4916 gcc_assert (TREE_CODE (t_func) == ADDR_EXPR);
4917 t_func = TREE_OPERAND (t_func, 0);
4918 gcc_assert (TREE_CODE (t_func) == FUNCTION_DECL);
4919
8ec3c5c2 4920 r_chain = expand_normal (t_chain);
4ee9c684 4921
4922 /* Generate insns to initialize the trampoline. */
82c7907c 4923 targetm.calls.trampoline_init (m_tramp, t_func, r_chain);
4ee9c684 4924
c307f106 4925 if (onstack)
4926 {
4927 trampolines_created = 1;
8bc8a8f4 4928
c307f106 4929 warning_at (DECL_SOURCE_LOCATION (t_func), OPT_Wtrampolines,
4930 "trampoline generated for nested function %qD", t_func);
4931 }
8bc8a8f4 4932
4ee9c684 4933 return const0_rtx;
4934}
4935
4936static rtx
c2f47e15 4937expand_builtin_adjust_trampoline (tree exp)
4ee9c684 4938{
4939 rtx tramp;
4940
c2f47e15 4941 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
4ee9c684 4942 return NULL_RTX;
4943
c2f47e15 4944 tramp = expand_normal (CALL_EXPR_ARG (exp, 0));
4ee9c684 4945 tramp = round_trampoline_addr (tramp);
82c7907c 4946 if (targetm.calls.trampoline_adjust_address)
4947 tramp = targetm.calls.trampoline_adjust_address (tramp);
4ee9c684 4948
4949 return tramp;
4950}
4951
93f564d6 4952/* Expand the call EXP to the built-in signbit, signbitf or signbitl
4953 function. The function first checks whether the back end provides
4954 an insn to implement signbit for the respective mode. If not, it
4955 checks whether the floating point format of the value is such that
4956 the sign bit can be extracted. If that is not the case, the
4957 function returns NULL_RTX to indicate that a normal call should be
4958 emitted rather than expanding the function in-line. EXP is the
4959 expression that is a call to the builtin function; if convenient,
4960 the result should be placed in TARGET. */
27f261ef 4961static rtx
4962expand_builtin_signbit (tree exp, rtx target)
4963{
4964 const struct real_format *fmt;
3754d046 4965 machine_mode fmode, imode, rmode;
c2f47e15 4966 tree arg;
ca4f1f5b 4967 int word, bitpos;
27eda240 4968 enum insn_code icode;
27f261ef 4969 rtx temp;
389dd41b 4970 location_t loc = EXPR_LOCATION (exp);
27f261ef 4971
c2f47e15 4972 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
4973 return NULL_RTX;
27f261ef 4974
c2f47e15 4975 arg = CALL_EXPR_ARG (exp, 0);
27f261ef 4976 fmode = TYPE_MODE (TREE_TYPE (arg));
4977 rmode = TYPE_MODE (TREE_TYPE (exp));
4978 fmt = REAL_MODE_FORMAT (fmode);
4979
93f564d6 4980 arg = builtin_save_expr (arg);
4981
4982 /* Expand the argument yielding a RTX expression. */
4983 temp = expand_normal (arg);
4984
4985 /* Check if the back end provides an insn that handles signbit for the
4986 argument's mode. */
d6bf3b14 4987 icode = optab_handler (signbit_optab, fmode);
27eda240 4988 if (icode != CODE_FOR_nothing)
93f564d6 4989 {
1e0c0b35 4990 rtx_insn *last = get_last_insn ();
93f564d6 4991 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
4e2a2fb4 4992 if (maybe_emit_unop_insn (icode, target, temp, UNKNOWN))
4993 return target;
4994 delete_insns_since (last);
93f564d6 4995 }
4996
27f261ef 4997 /* For floating point formats without a sign bit, implement signbit
4998 as "ARG < 0.0". */
8d564692 4999 bitpos = fmt->signbit_ro;
ca4f1f5b 5000 if (bitpos < 0)
27f261ef 5001 {
5002 /* But we can't do this if the format supports signed zero. */
5003 if (fmt->has_signed_zero && HONOR_SIGNED_ZEROS (fmode))
c2f47e15 5004 return NULL_RTX;
27f261ef 5005
389dd41b 5006 arg = fold_build2_loc (loc, LT_EXPR, TREE_TYPE (exp), arg,
49d00087 5007 build_real (TREE_TYPE (arg), dconst0));
27f261ef 5008 return expand_expr (arg, target, VOIDmode, EXPAND_NORMAL);
5009 }
5010
ca4f1f5b 5011 if (GET_MODE_SIZE (fmode) <= UNITS_PER_WORD)
27f261ef 5012 {
ca4f1f5b 5013 imode = int_mode_for_mode (fmode);
5014 if (imode == BLKmode)
c2f47e15 5015 return NULL_RTX;
ca4f1f5b 5016 temp = gen_lowpart (imode, temp);
24fd4260 5017 }
5018 else
5019 {
ca4f1f5b 5020 imode = word_mode;
5021 /* Handle targets with different FP word orders. */
5022 if (FLOAT_WORDS_BIG_ENDIAN)
a0c938f0 5023 word = (GET_MODE_BITSIZE (fmode) - bitpos) / BITS_PER_WORD;
ca4f1f5b 5024 else
a0c938f0 5025 word = bitpos / BITS_PER_WORD;
ca4f1f5b 5026 temp = operand_subword_force (temp, word, fmode);
5027 bitpos = bitpos % BITS_PER_WORD;
5028 }
5029
44b0f1d0 5030 /* Force the intermediate word_mode (or narrower) result into a
5031 register. This avoids attempting to create paradoxical SUBREGs
5032 of floating point modes below. */
5033 temp = force_reg (imode, temp);
5034
ca4f1f5b 5035 /* If the bitpos is within the "result mode" lowpart, the operation
5036 can be implement with a single bitwise AND. Otherwise, we need
5037 a right shift and an AND. */
5038
5039 if (bitpos < GET_MODE_BITSIZE (rmode))
5040 {
796b6678 5041 wide_int mask = wi::set_bit_in_zero (bitpos, GET_MODE_PRECISION (rmode));
27f261ef 5042
4a46f016 5043 if (GET_MODE_SIZE (imode) > GET_MODE_SIZE (rmode))
ca4f1f5b 5044 temp = gen_lowpart (rmode, temp);
24fd4260 5045 temp = expand_binop (rmode, and_optab, temp,
e913b5cd 5046 immed_wide_int_const (mask, rmode),
ca4f1f5b 5047 NULL_RTX, 1, OPTAB_LIB_WIDEN);
27f261ef 5048 }
ca4f1f5b 5049 else
5050 {
5051 /* Perform a logical right shift to place the signbit in the least
a0c938f0 5052 significant bit, then truncate the result to the desired mode
ca4f1f5b 5053 and mask just this bit. */
f5ff0b21 5054 temp = expand_shift (RSHIFT_EXPR, imode, temp, bitpos, NULL_RTX, 1);
ca4f1f5b 5055 temp = gen_lowpart (rmode, temp);
5056 temp = expand_binop (rmode, and_optab, temp, const1_rtx,
5057 NULL_RTX, 1, OPTAB_LIB_WIDEN);
5058 }
5059
27f261ef 5060 return temp;
5061}
73673831 5062
5063/* Expand fork or exec calls. TARGET is the desired target of the
c2f47e15 5064 call. EXP is the call. FN is the
73673831 5065 identificator of the actual function. IGNORE is nonzero if the
5066 value is to be ignored. */
5067
5068static rtx
c2f47e15 5069expand_builtin_fork_or_exec (tree fn, tree exp, rtx target, int ignore)
73673831 5070{
5071 tree id, decl;
5072 tree call;
5073
5074 /* If we are not profiling, just call the function. */
5075 if (!profile_arc_flag)
5076 return NULL_RTX;
5077
5078 /* Otherwise call the wrapper. This should be equivalent for the rest of
5079 compiler, so the code does not diverge, and the wrapper may run the
9c9bad97 5080 code necessary for keeping the profiling sane. */
73673831 5081
5082 switch (DECL_FUNCTION_CODE (fn))
5083 {
5084 case BUILT_IN_FORK:
5085 id = get_identifier ("__gcov_fork");
5086 break;
5087
5088 case BUILT_IN_EXECL:
5089 id = get_identifier ("__gcov_execl");
5090 break;
5091
5092 case BUILT_IN_EXECV:
5093 id = get_identifier ("__gcov_execv");
5094 break;
5095
5096 case BUILT_IN_EXECLP:
5097 id = get_identifier ("__gcov_execlp");
5098 break;
5099
5100 case BUILT_IN_EXECLE:
5101 id = get_identifier ("__gcov_execle");
5102 break;
5103
5104 case BUILT_IN_EXECVP:
5105 id = get_identifier ("__gcov_execvp");
5106 break;
5107
5108 case BUILT_IN_EXECVE:
5109 id = get_identifier ("__gcov_execve");
5110 break;
5111
5112 default:
64db345d 5113 gcc_unreachable ();
73673831 5114 }
5115
e60a6f7b 5116 decl = build_decl (DECL_SOURCE_LOCATION (fn),
5117 FUNCTION_DECL, id, TREE_TYPE (fn));
73673831 5118 DECL_EXTERNAL (decl) = 1;
5119 TREE_PUBLIC (decl) = 1;
5120 DECL_ARTIFICIAL (decl) = 1;
5121 TREE_NOTHROW (decl) = 1;
e82d310b 5122 DECL_VISIBILITY (decl) = VISIBILITY_DEFAULT;
5123 DECL_VISIBILITY_SPECIFIED (decl) = 1;
389dd41b 5124 call = rewrite_call_expr (EXPR_LOCATION (exp), exp, 0, decl, 0);
73673831 5125 return expand_call (call, target, ignore);
c2f47e15 5126 }
48e1416a 5127
b6a5fc45 5128
5129\f
3e272de8 5130/* Reconstitute a mode for a __sync intrinsic operation. Since the type of
5131 the pointer in these functions is void*, the tree optimizers may remove
5132 casts. The mode computed in expand_builtin isn't reliable either, due
5133 to __sync_bool_compare_and_swap.
5134
5135 FCODE_DIFF should be fcode - base, where base is the FOO_1 code for the
5136 group of builtins. This gives us log2 of the mode size. */
5137
3754d046 5138static inline machine_mode
3e272de8 5139get_builtin_sync_mode (int fcode_diff)
5140{
ad3a13b5 5141 /* The size is not negotiable, so ask not to get BLKmode in return
5142 if the target indicates that a smaller size would be better. */
5143 return mode_for_size (BITS_PER_UNIT << fcode_diff, MODE_INT, 0);
3e272de8 5144}
5145
041e0215 5146/* Expand the memory expression LOC and return the appropriate memory operand
5147 for the builtin_sync operations. */
5148
5149static rtx
3754d046 5150get_builtin_sync_mem (tree loc, machine_mode mode)
041e0215 5151{
5152 rtx addr, mem;
5153
7f4d56ad 5154 addr = expand_expr (loc, NULL_RTX, ptr_mode, EXPAND_SUM);
5155 addr = convert_memory_address (Pmode, addr);
041e0215 5156
5157 /* Note that we explicitly do not want any alias information for this
5158 memory, so that we kill all other live memories. Otherwise we don't
5159 satisfy the full barrier semantics of the intrinsic. */
5160 mem = validize_mem (gen_rtx_MEM (mode, addr));
5161
153c3b50 5162 /* The alignment needs to be at least according to that of the mode. */
5163 set_mem_align (mem, MAX (GET_MODE_ALIGNMENT (mode),
957d0361 5164 get_pointer_alignment (loc)));
c94cfd1c 5165 set_mem_alias_set (mem, ALIAS_SET_MEMORY_BARRIER);
041e0215 5166 MEM_VOLATILE_P (mem) = 1;
5167
5168 return mem;
5169}
5170
1cd6e20d 5171/* Make sure an argument is in the right mode.
5172 EXP is the tree argument.
5173 MODE is the mode it should be in. */
5174
5175static rtx
3754d046 5176expand_expr_force_mode (tree exp, machine_mode mode)
1cd6e20d 5177{
5178 rtx val;
3754d046 5179 machine_mode old_mode;
1cd6e20d 5180
5181 val = expand_expr (exp, NULL_RTX, mode, EXPAND_NORMAL);
5182 /* If VAL is promoted to a wider mode, convert it back to MODE. Take care
5183 of CONST_INTs, where we know the old_mode only from the call argument. */
5184
5185 old_mode = GET_MODE (val);
5186 if (old_mode == VOIDmode)
5187 old_mode = TYPE_MODE (TREE_TYPE (exp));
5188 val = convert_modes (mode, old_mode, val, 1);
5189 return val;
5190}
5191
5192
b6a5fc45 5193/* Expand the __sync_xxx_and_fetch and __sync_fetch_and_xxx intrinsics.
c2f47e15 5194 EXP is the CALL_EXPR. CODE is the rtx code
b6a5fc45 5195 that corresponds to the arithmetic or logical operation from the name;
5196 an exception here is that NOT actually means NAND. TARGET is an optional
5197 place for us to store the results; AFTER is true if this is the
1cd6e20d 5198 fetch_and_xxx form. */
b6a5fc45 5199
5200static rtx
3754d046 5201expand_builtin_sync_operation (machine_mode mode, tree exp,
3e272de8 5202 enum rtx_code code, bool after,
1cd6e20d 5203 rtx target)
b6a5fc45 5204{
041e0215 5205 rtx val, mem;
e60a6f7b 5206 location_t loc = EXPR_LOCATION (exp);
b6a5fc45 5207
cf73e559 5208 if (code == NOT && warn_sync_nand)
5209 {
5210 tree fndecl = get_callee_fndecl (exp);
5211 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
5212
5213 static bool warned_f_a_n, warned_n_a_f;
5214
5215 switch (fcode)
5216 {
2797f13a 5217 case BUILT_IN_SYNC_FETCH_AND_NAND_1:
5218 case BUILT_IN_SYNC_FETCH_AND_NAND_2:
5219 case BUILT_IN_SYNC_FETCH_AND_NAND_4:
5220 case BUILT_IN_SYNC_FETCH_AND_NAND_8:
5221 case BUILT_IN_SYNC_FETCH_AND_NAND_16:
cf73e559 5222 if (warned_f_a_n)
5223 break;
5224
b9a16870 5225 fndecl = builtin_decl_implicit (BUILT_IN_SYNC_FETCH_AND_NAND_N);
e60a6f7b 5226 inform (loc, "%qD changed semantics in GCC 4.4", fndecl);
cf73e559 5227 warned_f_a_n = true;
5228 break;
5229
2797f13a 5230 case BUILT_IN_SYNC_NAND_AND_FETCH_1:
5231 case BUILT_IN_SYNC_NAND_AND_FETCH_2:
5232 case BUILT_IN_SYNC_NAND_AND_FETCH_4:
5233 case BUILT_IN_SYNC_NAND_AND_FETCH_8:
5234 case BUILT_IN_SYNC_NAND_AND_FETCH_16:
cf73e559 5235 if (warned_n_a_f)
5236 break;
5237
b9a16870 5238 fndecl = builtin_decl_implicit (BUILT_IN_SYNC_NAND_AND_FETCH_N);
e60a6f7b 5239 inform (loc, "%qD changed semantics in GCC 4.4", fndecl);
cf73e559 5240 warned_n_a_f = true;
5241 break;
5242
5243 default:
5244 gcc_unreachable ();
5245 }
5246 }
5247
b6a5fc45 5248 /* Expand the operands. */
c2f47e15 5249 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
1cd6e20d 5250 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
b6a5fc45 5251
a372f7ca 5252 return expand_atomic_fetch_op (target, mem, val, code, MEMMODEL_SYNC_SEQ_CST,
1cd6e20d 5253 after);
b6a5fc45 5254}
5255
5256/* Expand the __sync_val_compare_and_swap and __sync_bool_compare_and_swap
c2f47e15 5257 intrinsics. EXP is the CALL_EXPR. IS_BOOL is
b6a5fc45 5258 true if this is the boolean form. TARGET is a place for us to store the
5259 results; this is NOT optional if IS_BOOL is true. */
5260
5261static rtx
3754d046 5262expand_builtin_compare_and_swap (machine_mode mode, tree exp,
3e272de8 5263 bool is_bool, rtx target)
b6a5fc45 5264{
041e0215 5265 rtx old_val, new_val, mem;
ba885f6a 5266 rtx *pbool, *poval;
b6a5fc45 5267
5268 /* Expand the operands. */
c2f47e15 5269 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
1cd6e20d 5270 old_val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5271 new_val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 2), mode);
b6a5fc45 5272
ba885f6a 5273 pbool = poval = NULL;
5274 if (target != const0_rtx)
5275 {
5276 if (is_bool)
5277 pbool = &target;
5278 else
5279 poval = &target;
5280 }
5281 if (!expand_atomic_compare_and_swap (pbool, poval, mem, old_val, new_val,
a372f7ca 5282 false, MEMMODEL_SYNC_SEQ_CST,
5283 MEMMODEL_SYNC_SEQ_CST))
1cd6e20d 5284 return NULL_RTX;
c2f47e15 5285
1cd6e20d 5286 return target;
b6a5fc45 5287}
5288
5289/* Expand the __sync_lock_test_and_set intrinsic. Note that the most
5290 general form is actually an atomic exchange, and some targets only
5291 support a reduced form with the second argument being a constant 1.
48e1416a 5292 EXP is the CALL_EXPR; TARGET is an optional place for us to store
c2f47e15 5293 the results. */
b6a5fc45 5294
5295static rtx
3754d046 5296expand_builtin_sync_lock_test_and_set (machine_mode mode, tree exp,
1cd6e20d 5297 rtx target)
b6a5fc45 5298{
041e0215 5299 rtx val, mem;
b6a5fc45 5300
5301 /* Expand the operands. */
c2f47e15 5302 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
1cd6e20d 5303 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5304
7821cde1 5305 return expand_sync_lock_test_and_set (target, mem, val);
1cd6e20d 5306}
5307
5308/* Expand the __sync_lock_release intrinsic. EXP is the CALL_EXPR. */
5309
5310static void
3754d046 5311expand_builtin_sync_lock_release (machine_mode mode, tree exp)
1cd6e20d 5312{
5313 rtx mem;
5314
5315 /* Expand the operands. */
5316 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5317
a372f7ca 5318 expand_atomic_store (mem, const0_rtx, MEMMODEL_SYNC_RELEASE, true);
1cd6e20d 5319}
5320
5321/* Given an integer representing an ``enum memmodel'', verify its
5322 correctness and return the memory model enum. */
5323
5324static enum memmodel
5325get_memmodel (tree exp)
5326{
5327 rtx op;
7f738025 5328 unsigned HOST_WIDE_INT val;
1cd6e20d 5329
5330 /* If the parameter is not a constant, it's a run time value so we'll just
5331 convert it to MEMMODEL_SEQ_CST to avoid annoying runtime checking. */
5332 if (TREE_CODE (exp) != INTEGER_CST)
5333 return MEMMODEL_SEQ_CST;
5334
5335 op = expand_normal (exp);
7f738025 5336
5337 val = INTVAL (op);
5338 if (targetm.memmodel_check)
5339 val = targetm.memmodel_check (val);
5340 else if (val & ~MEMMODEL_MASK)
5341 {
5342 warning (OPT_Winvalid_memory_model,
5343 "Unknown architecture specifier in memory model to builtin.");
5344 return MEMMODEL_SEQ_CST;
5345 }
5346
a372f7ca 5347 /* Should never see a user explicit SYNC memodel model, so >= LAST works. */
5348 if (memmodel_base (val) >= MEMMODEL_LAST)
1cd6e20d 5349 {
5350 warning (OPT_Winvalid_memory_model,
5351 "invalid memory model argument to builtin");
5352 return MEMMODEL_SEQ_CST;
5353 }
7f738025 5354
3070f133 5355 /* Workaround for Bugzilla 59448. GCC doesn't track consume properly, so
5356 be conservative and promote consume to acquire. */
5357 if (val == MEMMODEL_CONSUME)
5358 val = MEMMODEL_ACQUIRE;
5359
7f738025 5360 return (enum memmodel) val;
1cd6e20d 5361}
5362
5363/* Expand the __atomic_exchange intrinsic:
5364 TYPE __atomic_exchange (TYPE *object, TYPE desired, enum memmodel)
5365 EXP is the CALL_EXPR.
5366 TARGET is an optional place for us to store the results. */
5367
5368static rtx
3754d046 5369expand_builtin_atomic_exchange (machine_mode mode, tree exp, rtx target)
1cd6e20d 5370{
5371 rtx val, mem;
5372 enum memmodel model;
5373
5374 model = get_memmodel (CALL_EXPR_ARG (exp, 2));
1cd6e20d 5375
5376 if (!flag_inline_atomics)
5377 return NULL_RTX;
5378
5379 /* Expand the operands. */
5380 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5381 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5382
7821cde1 5383 return expand_atomic_exchange (target, mem, val, model);
1cd6e20d 5384}
5385
5386/* Expand the __atomic_compare_exchange intrinsic:
5387 bool __atomic_compare_exchange (TYPE *object, TYPE *expect,
5388 TYPE desired, BOOL weak,
5389 enum memmodel success,
5390 enum memmodel failure)
5391 EXP is the CALL_EXPR.
5392 TARGET is an optional place for us to store the results. */
5393
5394static rtx
3754d046 5395expand_builtin_atomic_compare_exchange (machine_mode mode, tree exp,
1cd6e20d 5396 rtx target)
5397{
1e0c0b35 5398 rtx expect, desired, mem, oldval;
5399 rtx_code_label *label;
1cd6e20d 5400 enum memmodel success, failure;
5401 tree weak;
5402 bool is_weak;
5403
5404 success = get_memmodel (CALL_EXPR_ARG (exp, 4));
5405 failure = get_memmodel (CALL_EXPR_ARG (exp, 5));
5406
086f4e33 5407 if (failure > success)
5408 {
5409 warning (OPT_Winvalid_memory_model,
5410 "failure memory model cannot be stronger than success memory "
5411 "model for %<__atomic_compare_exchange%>");
5412 success = MEMMODEL_SEQ_CST;
5413 }
5414
a372f7ca 5415 if (is_mm_release (failure) || is_mm_acq_rel (failure))
1cd6e20d 5416 {
086f4e33 5417 warning (OPT_Winvalid_memory_model,
5418 "invalid failure memory model for "
5419 "%<__atomic_compare_exchange%>");
5420 failure = MEMMODEL_SEQ_CST;
5421 success = MEMMODEL_SEQ_CST;
1cd6e20d 5422 }
5423
086f4e33 5424
1cd6e20d 5425 if (!flag_inline_atomics)
5426 return NULL_RTX;
5427
5428 /* Expand the operands. */
5429 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5430
5431 expect = expand_normal (CALL_EXPR_ARG (exp, 1));
5432 expect = convert_memory_address (Pmode, expect);
c401b131 5433 expect = gen_rtx_MEM (mode, expect);
1cd6e20d 5434 desired = expand_expr_force_mode (CALL_EXPR_ARG (exp, 2), mode);
5435
5436 weak = CALL_EXPR_ARG (exp, 3);
5437 is_weak = false;
e913b5cd 5438 if (tree_fits_shwi_p (weak) && tree_to_shwi (weak) != 0)
1cd6e20d 5439 is_weak = true;
5440
d86e3752 5441 if (target == const0_rtx)
5442 target = NULL;
d86e3752 5443
3c29a9ea 5444 /* Lest the rtl backend create a race condition with an imporoper store
5445 to memory, always create a new pseudo for OLDVAL. */
5446 oldval = NULL;
5447
5448 if (!expand_atomic_compare_and_swap (&target, &oldval, mem, expect, desired,
ba885f6a 5449 is_weak, success, failure))
1cd6e20d 5450 return NULL_RTX;
5451
d86e3752 5452 /* Conditionally store back to EXPECT, lest we create a race condition
5453 with an improper store to memory. */
5454 /* ??? With a rearrangement of atomics at the gimple level, we can handle
5455 the normal case where EXPECT is totally private, i.e. a register. At
5456 which point the store can be unconditional. */
5457 label = gen_label_rtx ();
62589f76 5458 emit_cmp_and_jump_insns (target, const0_rtx, NE, NULL,
5459 GET_MODE (target), 1, label);
d86e3752 5460 emit_move_insn (expect, oldval);
5461 emit_label (label);
c401b131 5462
1cd6e20d 5463 return target;
5464}
5465
5466/* Expand the __atomic_load intrinsic:
5467 TYPE __atomic_load (TYPE *object, enum memmodel)
5468 EXP is the CALL_EXPR.
5469 TARGET is an optional place for us to store the results. */
5470
5471static rtx
3754d046 5472expand_builtin_atomic_load (machine_mode mode, tree exp, rtx target)
1cd6e20d 5473{
5474 rtx mem;
5475 enum memmodel model;
5476
5477 model = get_memmodel (CALL_EXPR_ARG (exp, 1));
a372f7ca 5478 if (is_mm_release (model) || is_mm_acq_rel (model))
1cd6e20d 5479 {
086f4e33 5480 warning (OPT_Winvalid_memory_model,
5481 "invalid memory model for %<__atomic_load%>");
5482 model = MEMMODEL_SEQ_CST;
1cd6e20d 5483 }
5484
5485 if (!flag_inline_atomics)
5486 return NULL_RTX;
5487
5488 /* Expand the operand. */
5489 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5490
5491 return expand_atomic_load (target, mem, model);
5492}
5493
5494
5495/* Expand the __atomic_store intrinsic:
5496 void __atomic_store (TYPE *object, TYPE desired, enum memmodel)
5497 EXP is the CALL_EXPR.
5498 TARGET is an optional place for us to store the results. */
5499
5500static rtx
3754d046 5501expand_builtin_atomic_store (machine_mode mode, tree exp)
1cd6e20d 5502{
5503 rtx mem, val;
5504 enum memmodel model;
5505
5506 model = get_memmodel (CALL_EXPR_ARG (exp, 2));
a372f7ca 5507 if (!(is_mm_relaxed (model) || is_mm_seq_cst (model)
5508 || is_mm_release (model)))
1cd6e20d 5509 {
086f4e33 5510 warning (OPT_Winvalid_memory_model,
5511 "invalid memory model for %<__atomic_store%>");
5512 model = MEMMODEL_SEQ_CST;
1cd6e20d 5513 }
5514
5515 if (!flag_inline_atomics)
5516 return NULL_RTX;
5517
5518 /* Expand the operands. */
5519 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5520 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5521
8808bf16 5522 return expand_atomic_store (mem, val, model, false);
1cd6e20d 5523}
5524
5525/* Expand the __atomic_fetch_XXX intrinsic:
5526 TYPE __atomic_fetch_XXX (TYPE *object, TYPE val, enum memmodel)
5527 EXP is the CALL_EXPR.
5528 TARGET is an optional place for us to store the results.
5529 CODE is the operation, PLUS, MINUS, ADD, XOR, or IOR.
5530 FETCH_AFTER is true if returning the result of the operation.
5531 FETCH_AFTER is false if returning the value before the operation.
5532 IGNORE is true if the result is not used.
5533 EXT_CALL is the correct builtin for an external call if this cannot be
5534 resolved to an instruction sequence. */
5535
5536static rtx
3754d046 5537expand_builtin_atomic_fetch_op (machine_mode mode, tree exp, rtx target,
1cd6e20d 5538 enum rtx_code code, bool fetch_after,
5539 bool ignore, enum built_in_function ext_call)
5540{
5541 rtx val, mem, ret;
5542 enum memmodel model;
5543 tree fndecl;
5544 tree addr;
5545
5546 model = get_memmodel (CALL_EXPR_ARG (exp, 2));
5547
5548 /* Expand the operands. */
5549 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5550 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5551
5552 /* Only try generating instructions if inlining is turned on. */
5553 if (flag_inline_atomics)
5554 {
5555 ret = expand_atomic_fetch_op (target, mem, val, code, model, fetch_after);
5556 if (ret)
5557 return ret;
5558 }
5559
5560 /* Return if a different routine isn't needed for the library call. */
5561 if (ext_call == BUILT_IN_NONE)
5562 return NULL_RTX;
5563
5564 /* Change the call to the specified function. */
5565 fndecl = get_callee_fndecl (exp);
5566 addr = CALL_EXPR_FN (exp);
5567 STRIP_NOPS (addr);
5568
5569 gcc_assert (TREE_OPERAND (addr, 0) == fndecl);
9af5ce0c 5570 TREE_OPERAND (addr, 0) = builtin_decl_explicit (ext_call);
1cd6e20d 5571
5572 /* Expand the call here so we can emit trailing code. */
5573 ret = expand_call (exp, target, ignore);
5574
5575 /* Replace the original function just in case it matters. */
5576 TREE_OPERAND (addr, 0) = fndecl;
5577
5578 /* Then issue the arithmetic correction to return the right result. */
5579 if (!ignore)
c449f851 5580 {
5581 if (code == NOT)
5582 {
5583 ret = expand_simple_binop (mode, AND, ret, val, NULL_RTX, true,
5584 OPTAB_LIB_WIDEN);
5585 ret = expand_simple_unop (mode, NOT, ret, target, true);
5586 }
5587 else
5588 ret = expand_simple_binop (mode, code, ret, val, target, true,
5589 OPTAB_LIB_WIDEN);
5590 }
1cd6e20d 5591 return ret;
5592}
5593
10b744a3 5594/* Expand an atomic clear operation.
5595 void _atomic_clear (BOOL *obj, enum memmodel)
5596 EXP is the call expression. */
5597
5598static rtx
5599expand_builtin_atomic_clear (tree exp)
5600{
3754d046 5601 machine_mode mode;
10b744a3 5602 rtx mem, ret;
5603 enum memmodel model;
5604
5605 mode = mode_for_size (BOOL_TYPE_SIZE, MODE_INT, 0);
5606 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5607 model = get_memmodel (CALL_EXPR_ARG (exp, 1));
5608
a372f7ca 5609 if (is_mm_consume (model) || is_mm_acquire (model) || is_mm_acq_rel (model))
10b744a3 5610 {
086f4e33 5611 warning (OPT_Winvalid_memory_model,
5612 "invalid memory model for %<__atomic_store%>");
5613 model = MEMMODEL_SEQ_CST;
10b744a3 5614 }
5615
5616 /* Try issuing an __atomic_store, and allow fallback to __sync_lock_release.
5617 Failing that, a store is issued by __atomic_store. The only way this can
5618 fail is if the bool type is larger than a word size. Unlikely, but
5619 handle it anyway for completeness. Assume a single threaded model since
5620 there is no atomic support in this case, and no barriers are required. */
5621 ret = expand_atomic_store (mem, const0_rtx, model, true);
5622 if (!ret)
5623 emit_move_insn (mem, const0_rtx);
5624 return const0_rtx;
5625}
5626
5627/* Expand an atomic test_and_set operation.
5628 bool _atomic_test_and_set (BOOL *obj, enum memmodel)
5629 EXP is the call expression. */
5630
5631static rtx
7821cde1 5632expand_builtin_atomic_test_and_set (tree exp, rtx target)
10b744a3 5633{
7821cde1 5634 rtx mem;
10b744a3 5635 enum memmodel model;
3754d046 5636 machine_mode mode;
10b744a3 5637
5638 mode = mode_for_size (BOOL_TYPE_SIZE, MODE_INT, 0);
5639 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5640 model = get_memmodel (CALL_EXPR_ARG (exp, 1));
5641
7821cde1 5642 return expand_atomic_test_and_set (target, mem, model);
10b744a3 5643}
5644
5645
1cd6e20d 5646/* Return true if (optional) argument ARG1 of size ARG0 is always lock free on
5647 this architecture. If ARG1 is NULL, use typical alignment for size ARG0. */
5648
5649static tree
5650fold_builtin_atomic_always_lock_free (tree arg0, tree arg1)
5651{
5652 int size;
3754d046 5653 machine_mode mode;
1cd6e20d 5654 unsigned int mode_align, type_align;
5655
5656 if (TREE_CODE (arg0) != INTEGER_CST)
5657 return NULL_TREE;
b6a5fc45 5658
1cd6e20d 5659 size = INTVAL (expand_normal (arg0)) * BITS_PER_UNIT;
5660 mode = mode_for_size (size, MODE_INT, 0);
5661 mode_align = GET_MODE_ALIGNMENT (mode);
5662
5663 if (TREE_CODE (arg1) == INTEGER_CST && INTVAL (expand_normal (arg1)) == 0)
5664 type_align = mode_align;
5665 else
5666 {
5667 tree ttype = TREE_TYPE (arg1);
5668
5669 /* This function is usually invoked and folded immediately by the front
5670 end before anything else has a chance to look at it. The pointer
5671 parameter at this point is usually cast to a void *, so check for that
5672 and look past the cast. */
d09ef31a 5673 if (CONVERT_EXPR_P (arg1) && POINTER_TYPE_P (ttype)
1cd6e20d 5674 && VOID_TYPE_P (TREE_TYPE (ttype)))
5675 arg1 = TREE_OPERAND (arg1, 0);
5676
5677 ttype = TREE_TYPE (arg1);
5678 gcc_assert (POINTER_TYPE_P (ttype));
5679
5680 /* Get the underlying type of the object. */
5681 ttype = TREE_TYPE (ttype);
5682 type_align = TYPE_ALIGN (ttype);
5683 }
5684
47ae02b7 5685 /* If the object has smaller alignment, the lock free routines cannot
1cd6e20d 5686 be used. */
5687 if (type_align < mode_align)
06308d2a 5688 return boolean_false_node;
1cd6e20d 5689
5690 /* Check if a compare_and_swap pattern exists for the mode which represents
5691 the required size. The pattern is not allowed to fail, so the existence
5692 of the pattern indicates support is present. */
29139cdc 5693 if (can_compare_and_swap_p (mode, true))
06308d2a 5694 return boolean_true_node;
1cd6e20d 5695 else
06308d2a 5696 return boolean_false_node;
1cd6e20d 5697}
5698
5699/* Return true if the parameters to call EXP represent an object which will
5700 always generate lock free instructions. The first argument represents the
5701 size of the object, and the second parameter is a pointer to the object
5702 itself. If NULL is passed for the object, then the result is based on
5703 typical alignment for an object of the specified size. Otherwise return
5704 false. */
5705
5706static rtx
5707expand_builtin_atomic_always_lock_free (tree exp)
5708{
5709 tree size;
5710 tree arg0 = CALL_EXPR_ARG (exp, 0);
5711 tree arg1 = CALL_EXPR_ARG (exp, 1);
5712
5713 if (TREE_CODE (arg0) != INTEGER_CST)
5714 {
5715 error ("non-constant argument 1 to __atomic_always_lock_free");
5716 return const0_rtx;
5717 }
5718
5719 size = fold_builtin_atomic_always_lock_free (arg0, arg1);
06308d2a 5720 if (size == boolean_true_node)
1cd6e20d 5721 return const1_rtx;
5722 return const0_rtx;
5723}
5724
5725/* Return a one or zero if it can be determined that object ARG1 of size ARG
5726 is lock free on this architecture. */
5727
5728static tree
5729fold_builtin_atomic_is_lock_free (tree arg0, tree arg1)
5730{
5731 if (!flag_inline_atomics)
5732 return NULL_TREE;
5733
5734 /* If it isn't always lock free, don't generate a result. */
06308d2a 5735 if (fold_builtin_atomic_always_lock_free (arg0, arg1) == boolean_true_node)
5736 return boolean_true_node;
1cd6e20d 5737
5738 return NULL_TREE;
5739}
5740
5741/* Return true if the parameters to call EXP represent an object which will
5742 always generate lock free instructions. The first argument represents the
5743 size of the object, and the second parameter is a pointer to the object
5744 itself. If NULL is passed for the object, then the result is based on
5745 typical alignment for an object of the specified size. Otherwise return
5746 NULL*/
5747
5748static rtx
5749expand_builtin_atomic_is_lock_free (tree exp)
5750{
5751 tree size;
5752 tree arg0 = CALL_EXPR_ARG (exp, 0);
5753 tree arg1 = CALL_EXPR_ARG (exp, 1);
5754
5755 if (!INTEGRAL_TYPE_P (TREE_TYPE (arg0)))
5756 {
5757 error ("non-integer argument 1 to __atomic_is_lock_free");
5758 return NULL_RTX;
5759 }
5760
5761 if (!flag_inline_atomics)
5762 return NULL_RTX;
5763
5764 /* If the value is known at compile time, return the RTX for it. */
5765 size = fold_builtin_atomic_is_lock_free (arg0, arg1);
06308d2a 5766 if (size == boolean_true_node)
1cd6e20d 5767 return const1_rtx;
5768
5769 return NULL_RTX;
5770}
5771
1cd6e20d 5772/* Expand the __atomic_thread_fence intrinsic:
5773 void __atomic_thread_fence (enum memmodel)
5774 EXP is the CALL_EXPR. */
5775
5776static void
5777expand_builtin_atomic_thread_fence (tree exp)
5778{
fe54c06b 5779 enum memmodel model = get_memmodel (CALL_EXPR_ARG (exp, 0));
5780 expand_mem_thread_fence (model);
1cd6e20d 5781}
5782
5783/* Expand the __atomic_signal_fence intrinsic:
5784 void __atomic_signal_fence (enum memmodel)
5785 EXP is the CALL_EXPR. */
5786
5787static void
5788expand_builtin_atomic_signal_fence (tree exp)
5789{
fe54c06b 5790 enum memmodel model = get_memmodel (CALL_EXPR_ARG (exp, 0));
5791 expand_mem_signal_fence (model);
b6a5fc45 5792}
5793
5794/* Expand the __sync_synchronize intrinsic. */
5795
5796static void
2797f13a 5797expand_builtin_sync_synchronize (void)
b6a5fc45 5798{
a372f7ca 5799 expand_mem_thread_fence (MEMMODEL_SYNC_SEQ_CST);
b6a5fc45 5800}
5801
badaa04c 5802static rtx
5803expand_builtin_thread_pointer (tree exp, rtx target)
5804{
5805 enum insn_code icode;
5806 if (!validate_arglist (exp, VOID_TYPE))
5807 return const0_rtx;
5808 icode = direct_optab_handler (get_thread_pointer_optab, Pmode);
5809 if (icode != CODE_FOR_nothing)
5810 {
5811 struct expand_operand op;
3ed779c3 5812 /* If the target is not sutitable then create a new target. */
5813 if (target == NULL_RTX
5814 || !REG_P (target)
5815 || GET_MODE (target) != Pmode)
badaa04c 5816 target = gen_reg_rtx (Pmode);
5817 create_output_operand (&op, target, Pmode);
5818 expand_insn (icode, 1, &op);
5819 return target;
5820 }
5821 error ("__builtin_thread_pointer is not supported on this target");
5822 return const0_rtx;
5823}
5824
5825static void
5826expand_builtin_set_thread_pointer (tree exp)
5827{
5828 enum insn_code icode;
5829 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
5830 return;
5831 icode = direct_optab_handler (set_thread_pointer_optab, Pmode);
5832 if (icode != CODE_FOR_nothing)
5833 {
5834 struct expand_operand op;
5835 rtx val = expand_expr (CALL_EXPR_ARG (exp, 0), NULL_RTX,
5836 Pmode, EXPAND_NORMAL);
6f343c10 5837 create_input_operand (&op, val, Pmode);
badaa04c 5838 expand_insn (icode, 1, &op);
5839 return;
5840 }
5841 error ("__builtin_set_thread_pointer is not supported on this target");
5842}
5843
53800dbe 5844\f
0e80b01d 5845/* Emit code to restore the current value of stack. */
5846
5847static void
5848expand_stack_restore (tree var)
5849{
1e0c0b35 5850 rtx_insn *prev;
5851 rtx sa = expand_normal (var);
0e80b01d 5852
5853 sa = convert_memory_address (Pmode, sa);
5854
5855 prev = get_last_insn ();
5856 emit_stack_restore (SAVE_BLOCK, sa);
97354ae4 5857
5858 record_new_stack_level ();
5859
0e80b01d 5860 fixup_args_size_notes (prev, get_last_insn (), 0);
5861}
5862
0e80b01d 5863/* Emit code to save the current value of stack. */
5864
5865static rtx
5866expand_stack_save (void)
5867{
5868 rtx ret = NULL_RTX;
5869
0e80b01d 5870 emit_stack_save (SAVE_BLOCK, &ret);
5871 return ret;
5872}
5873
ca4c3545 5874
5875/* Expand OpenACC acc_on_device.
5876
5877 This has to happen late (that is, not in early folding; expand_builtin_*,
5878 rather than fold_builtin_*), as we have to act differently for host and
5879 acceleration device (ACCEL_COMPILER conditional). */
5880
5881static rtx
f212338e 5882expand_builtin_acc_on_device (tree exp, rtx target)
ca4c3545 5883{
5884 if (!validate_arglist (exp, INTEGER_TYPE, VOID_TYPE))
5885 return NULL_RTX;
5886
5887 tree arg = CALL_EXPR_ARG (exp, 0);
5888
5889 /* Return (arg == v1 || arg == v2) ? 1 : 0. */
5890 machine_mode v_mode = TYPE_MODE (TREE_TYPE (arg));
5891 rtx v = expand_normal (arg), v1, v2;
f212338e 5892#ifdef ACCEL_COMPILER
ca4c3545 5893 v1 = GEN_INT (GOMP_DEVICE_NOT_HOST);
5894 v2 = GEN_INT (ACCEL_COMPILER_acc_device);
f212338e 5895#else
5896 v1 = GEN_INT (GOMP_DEVICE_NONE);
5897 v2 = GEN_INT (GOMP_DEVICE_HOST);
5898#endif
ca4c3545 5899 machine_mode target_mode = TYPE_MODE (integer_type_node);
15b4214c 5900 if (!target || !register_operand (target, target_mode))
ca4c3545 5901 target = gen_reg_rtx (target_mode);
5902 emit_move_insn (target, const1_rtx);
5903 rtx_code_label *done_label = gen_label_rtx ();
5904 do_compare_rtx_and_jump (v, v1, EQ, false, v_mode, NULL_RTX,
f9a00e9e 5905 NULL, done_label, PROB_EVEN);
ca4c3545 5906 do_compare_rtx_and_jump (v, v2, EQ, false, v_mode, NULL_RTX,
f9a00e9e 5907 NULL, done_label, PROB_EVEN);
ca4c3545 5908 emit_move_insn (target, const0_rtx);
5909 emit_label (done_label);
5910
5911 return target;
5912}
5913
5914
53800dbe 5915/* Expand an expression EXP that calls a built-in function,
5916 with result going to TARGET if that's convenient
5917 (and in mode MODE if that's convenient).
5918 SUBTARGET may be used as the target for computing one of EXP's operands.
5919 IGNORE is nonzero if the value is to be ignored. */
5920
5921rtx
3754d046 5922expand_builtin (tree exp, rtx target, rtx subtarget, machine_mode mode,
aecda0d6 5923 int ignore)
53800dbe 5924{
c6e6ecb1 5925 tree fndecl = get_callee_fndecl (exp);
53800dbe 5926 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
3754d046 5927 machine_mode target_mode = TYPE_MODE (TREE_TYPE (exp));
67fa4078 5928 int flags;
53800dbe 5929
4e2f4ed5 5930 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
5931 return targetm.expand_builtin (exp, target, subtarget, mode, ignore);
5932
f9acf11a 5933 /* When ASan is enabled, we don't want to expand some memory/string
5934 builtins and rely on libsanitizer's hooks. This allows us to avoid
5935 redundant checks and be sure, that possible overflow will be detected
5936 by ASan. */
5937
5938 if ((flag_sanitize & SANITIZE_ADDRESS) && asan_intercepted_p (fcode))
5939 return expand_call (exp, target, ignore);
5940
53800dbe 5941 /* When not optimizing, generate calls to library functions for a certain
5942 set of builtins. */
cd9ff771 5943 if (!optimize
b6a5fc45 5944 && !called_as_built_in (fndecl)
73037a1e 5945 && fcode != BUILT_IN_FORK
5946 && fcode != BUILT_IN_EXECL
5947 && fcode != BUILT_IN_EXECV
5948 && fcode != BUILT_IN_EXECLP
5949 && fcode != BUILT_IN_EXECLE
5950 && fcode != BUILT_IN_EXECVP
5951 && fcode != BUILT_IN_EXECVE
2c281b15 5952 && fcode != BUILT_IN_ALLOCA
581bf1c2 5953 && fcode != BUILT_IN_ALLOCA_WITH_ALIGN
058a1b7a 5954 && fcode != BUILT_IN_FREE
5955 && fcode != BUILT_IN_CHKP_SET_PTR_BOUNDS
5956 && fcode != BUILT_IN_CHKP_INIT_PTR_BOUNDS
5957 && fcode != BUILT_IN_CHKP_NULL_PTR_BOUNDS
5958 && fcode != BUILT_IN_CHKP_COPY_PTR_BOUNDS
5959 && fcode != BUILT_IN_CHKP_NARROW_PTR_BOUNDS
5960 && fcode != BUILT_IN_CHKP_STORE_PTR_BOUNDS
5961 && fcode != BUILT_IN_CHKP_CHECK_PTR_LBOUNDS
5962 && fcode != BUILT_IN_CHKP_CHECK_PTR_UBOUNDS
5963 && fcode != BUILT_IN_CHKP_CHECK_PTR_BOUNDS
5964 && fcode != BUILT_IN_CHKP_GET_PTR_LBOUND
5965 && fcode != BUILT_IN_CHKP_GET_PTR_UBOUND
5966 && fcode != BUILT_IN_CHKP_BNDRET)
cd9ff771 5967 return expand_call (exp, target, ignore);
53800dbe 5968
8d6d7930 5969 /* The built-in function expanders test for target == const0_rtx
5970 to determine whether the function's result will be ignored. */
5971 if (ignore)
5972 target = const0_rtx;
5973
5974 /* If the result of a pure or const built-in function is ignored, and
5975 none of its arguments are volatile, we can avoid expanding the
5976 built-in call and just evaluate the arguments for side-effects. */
5977 if (target == const0_rtx
67fa4078 5978 && ((flags = flags_from_decl_or_type (fndecl)) & (ECF_CONST | ECF_PURE))
5979 && !(flags & ECF_LOOPING_CONST_OR_PURE))
8d6d7930 5980 {
5981 bool volatilep = false;
5982 tree arg;
c2f47e15 5983 call_expr_arg_iterator iter;
8d6d7930 5984
c2f47e15 5985 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
5986 if (TREE_THIS_VOLATILE (arg))
8d6d7930 5987 {
5988 volatilep = true;
5989 break;
5990 }
5991
5992 if (! volatilep)
5993 {
c2f47e15 5994 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
5995 expand_expr (arg, const0_rtx, VOIDmode, EXPAND_NORMAL);
8d6d7930 5996 return const0_rtx;
5997 }
5998 }
5999
f21337ef 6000 /* expand_builtin_with_bounds is supposed to be used for
6001 instrumented builtin calls. */
058a1b7a 6002 gcc_assert (!CALL_WITH_BOUNDS_P (exp));
6003
53800dbe 6004 switch (fcode)
6005 {
4f35b1fc 6006 CASE_FLT_FN (BUILT_IN_FABS):
8aa32773 6007 case BUILT_IN_FABSD32:
6008 case BUILT_IN_FABSD64:
6009 case BUILT_IN_FABSD128:
c2f47e15 6010 target = expand_builtin_fabs (exp, target, subtarget);
78a74442 6011 if (target)
a0c938f0 6012 return target;
78a74442 6013 break;
6014
4f35b1fc 6015 CASE_FLT_FN (BUILT_IN_COPYSIGN):
c2f47e15 6016 target = expand_builtin_copysign (exp, target, subtarget);
270436f3 6017 if (target)
6018 return target;
6019 break;
6020
7d3f6cc7 6021 /* Just do a normal library call if we were unable to fold
6022 the values. */
4f35b1fc 6023 CASE_FLT_FN (BUILT_IN_CABS):
78a74442 6024 break;
53800dbe 6025
4f35b1fc 6026 CASE_FLT_FN (BUILT_IN_EXP):
6027 CASE_FLT_FN (BUILT_IN_EXP10):
6028 CASE_FLT_FN (BUILT_IN_POW10):
6029 CASE_FLT_FN (BUILT_IN_EXP2):
6030 CASE_FLT_FN (BUILT_IN_EXPM1):
6031 CASE_FLT_FN (BUILT_IN_LOGB):
4f35b1fc 6032 CASE_FLT_FN (BUILT_IN_LOG):
6033 CASE_FLT_FN (BUILT_IN_LOG10):
6034 CASE_FLT_FN (BUILT_IN_LOG2):
6035 CASE_FLT_FN (BUILT_IN_LOG1P):
6036 CASE_FLT_FN (BUILT_IN_TAN):
6037 CASE_FLT_FN (BUILT_IN_ASIN):
6038 CASE_FLT_FN (BUILT_IN_ACOS):
6039 CASE_FLT_FN (BUILT_IN_ATAN):
b3154a1f 6040 CASE_FLT_FN (BUILT_IN_SIGNIFICAND):
7f3be425 6041 /* Treat these like sqrt only if unsafe math optimizations are allowed,
6042 because of possible accuracy problems. */
6043 if (! flag_unsafe_math_optimizations)
53800dbe 6044 break;
4f35b1fc 6045 CASE_FLT_FN (BUILT_IN_SQRT):
6046 CASE_FLT_FN (BUILT_IN_FLOOR):
6047 CASE_FLT_FN (BUILT_IN_CEIL):
6048 CASE_FLT_FN (BUILT_IN_TRUNC):
6049 CASE_FLT_FN (BUILT_IN_ROUND):
6050 CASE_FLT_FN (BUILT_IN_NEARBYINT):
6051 CASE_FLT_FN (BUILT_IN_RINT):
53800dbe 6052 target = expand_builtin_mathfn (exp, target, subtarget);
6053 if (target)
6054 return target;
6055 break;
6056
7e0713b1 6057 CASE_FLT_FN (BUILT_IN_FMA):
6058 target = expand_builtin_mathfn_ternary (exp, target, subtarget);
6059 if (target)
6060 return target;
6061 break;
6062
a67a90e5 6063 CASE_FLT_FN (BUILT_IN_ILOGB):
6064 if (! flag_unsafe_math_optimizations)
6065 break;
69b779ea 6066 CASE_FLT_FN (BUILT_IN_ISINF):
cde061c1 6067 CASE_FLT_FN (BUILT_IN_FINITE):
6068 case BUILT_IN_ISFINITE:
8a1a9cb7 6069 case BUILT_IN_ISNORMAL:
f97eea22 6070 target = expand_builtin_interclass_mathfn (exp, target);
a67a90e5 6071 if (target)
6072 return target;
6073 break;
6074
80ff6494 6075 CASE_FLT_FN (BUILT_IN_ICEIL):
4f35b1fc 6076 CASE_FLT_FN (BUILT_IN_LCEIL):
6077 CASE_FLT_FN (BUILT_IN_LLCEIL):
6078 CASE_FLT_FN (BUILT_IN_LFLOOR):
80ff6494 6079 CASE_FLT_FN (BUILT_IN_IFLOOR):
4f35b1fc 6080 CASE_FLT_FN (BUILT_IN_LLFLOOR):
ff1b14e4 6081 target = expand_builtin_int_roundingfn (exp, target);
ad52b9b7 6082 if (target)
6083 return target;
6084 break;
6085
80ff6494 6086 CASE_FLT_FN (BUILT_IN_IRINT):
7d3afc77 6087 CASE_FLT_FN (BUILT_IN_LRINT):
6088 CASE_FLT_FN (BUILT_IN_LLRINT):
80ff6494 6089 CASE_FLT_FN (BUILT_IN_IROUND):
ef2f1a10 6090 CASE_FLT_FN (BUILT_IN_LROUND):
6091 CASE_FLT_FN (BUILT_IN_LLROUND):
ff1b14e4 6092 target = expand_builtin_int_roundingfn_2 (exp, target);
7d3afc77 6093 if (target)
6094 return target;
6095 break;
6096
4f35b1fc 6097 CASE_FLT_FN (BUILT_IN_POWI):
f97eea22 6098 target = expand_builtin_powi (exp, target);
757c219d 6099 if (target)
6100 return target;
6101 break;
6102
4f35b1fc 6103 CASE_FLT_FN (BUILT_IN_ATAN2):
6104 CASE_FLT_FN (BUILT_IN_LDEXP):
73a954a1 6105 CASE_FLT_FN (BUILT_IN_SCALB):
6106 CASE_FLT_FN (BUILT_IN_SCALBN):
6107 CASE_FLT_FN (BUILT_IN_SCALBLN):
0fd605a5 6108 if (! flag_unsafe_math_optimizations)
6109 break;
ef722005 6110
6111 CASE_FLT_FN (BUILT_IN_FMOD):
6112 CASE_FLT_FN (BUILT_IN_REMAINDER):
6113 CASE_FLT_FN (BUILT_IN_DREM):
0810ff17 6114 CASE_FLT_FN (BUILT_IN_POW):
0fd605a5 6115 target = expand_builtin_mathfn_2 (exp, target, subtarget);
6116 if (target)
6117 return target;
6118 break;
6119
d735c391 6120 CASE_FLT_FN (BUILT_IN_CEXPI):
f97eea22 6121 target = expand_builtin_cexpi (exp, target);
d735c391 6122 gcc_assert (target);
6123 return target;
6124
4f35b1fc 6125 CASE_FLT_FN (BUILT_IN_SIN):
6126 CASE_FLT_FN (BUILT_IN_COS):
6b43bae4 6127 if (! flag_unsafe_math_optimizations)
6128 break;
6129 target = expand_builtin_mathfn_3 (exp, target, subtarget);
6130 if (target)
6131 return target;
6132 break;
6133
c3147c1a 6134 CASE_FLT_FN (BUILT_IN_SINCOS):
6135 if (! flag_unsafe_math_optimizations)
6136 break;
6137 target = expand_builtin_sincos (exp);
6138 if (target)
6139 return target;
6140 break;
6141
53800dbe 6142 case BUILT_IN_APPLY_ARGS:
6143 return expand_builtin_apply_args ();
6144
6145 /* __builtin_apply (FUNCTION, ARGUMENTS, ARGSIZE) invokes
6146 FUNCTION with a copy of the parameters described by
6147 ARGUMENTS, and ARGSIZE. It returns a block of memory
6148 allocated on the stack into which is stored all the registers
6149 that might possibly be used for returning the result of a
6150 function. ARGUMENTS is the value returned by
6151 __builtin_apply_args. ARGSIZE is the number of bytes of
6152 arguments that must be copied. ??? How should this value be
6153 computed? We'll also need a safe worst case value for varargs
6154 functions. */
6155 case BUILT_IN_APPLY:
c2f47e15 6156 if (!validate_arglist (exp, POINTER_TYPE,
0eb671f7 6157 POINTER_TYPE, INTEGER_TYPE, VOID_TYPE)
c2f47e15 6158 && !validate_arglist (exp, REFERENCE_TYPE,
0eb671f7 6159 POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
53800dbe 6160 return const0_rtx;
6161 else
6162 {
53800dbe 6163 rtx ops[3];
6164
c2f47e15 6165 ops[0] = expand_normal (CALL_EXPR_ARG (exp, 0));
6166 ops[1] = expand_normal (CALL_EXPR_ARG (exp, 1));
6167 ops[2] = expand_normal (CALL_EXPR_ARG (exp, 2));
53800dbe 6168
6169 return expand_builtin_apply (ops[0], ops[1], ops[2]);
6170 }
6171
6172 /* __builtin_return (RESULT) causes the function to return the
6173 value described by RESULT. RESULT is address of the block of
6174 memory returned by __builtin_apply. */
6175 case BUILT_IN_RETURN:
c2f47e15 6176 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6177 expand_builtin_return (expand_normal (CALL_EXPR_ARG (exp, 0)));
53800dbe 6178 return const0_rtx;
6179
6180 case BUILT_IN_SAVEREGS:
a66c9326 6181 return expand_builtin_saveregs ();
53800dbe 6182
48dc2227 6183 case BUILT_IN_VA_ARG_PACK:
6184 /* All valid uses of __builtin_va_arg_pack () are removed during
6185 inlining. */
b8c23db3 6186 error ("%Kinvalid use of %<__builtin_va_arg_pack ()%>", exp);
48dc2227 6187 return const0_rtx;
6188
4e1d7ea4 6189 case BUILT_IN_VA_ARG_PACK_LEN:
6190 /* All valid uses of __builtin_va_arg_pack_len () are removed during
6191 inlining. */
b8c23db3 6192 error ("%Kinvalid use of %<__builtin_va_arg_pack_len ()%>", exp);
4e1d7ea4 6193 return const0_rtx;
6194
53800dbe 6195 /* Return the address of the first anonymous stack arg. */
6196 case BUILT_IN_NEXT_ARG:
c2f47e15 6197 if (fold_builtin_next_arg (exp, false))
a0c938f0 6198 return const0_rtx;
79012a9d 6199 return expand_builtin_next_arg ();
53800dbe 6200
ac8fb6db 6201 case BUILT_IN_CLEAR_CACHE:
6202 target = expand_builtin___clear_cache (exp);
6203 if (target)
6204 return target;
6205 break;
6206
53800dbe 6207 case BUILT_IN_CLASSIFY_TYPE:
c2f47e15 6208 return expand_builtin_classify_type (exp);
53800dbe 6209
6210 case BUILT_IN_CONSTANT_P:
4ee9c684 6211 return const0_rtx;
53800dbe 6212
6213 case BUILT_IN_FRAME_ADDRESS:
6214 case BUILT_IN_RETURN_ADDRESS:
c2f47e15 6215 return expand_builtin_frame_address (fndecl, exp);
53800dbe 6216
6217 /* Returns the address of the area where the structure is returned.
6218 0 otherwise. */
6219 case BUILT_IN_AGGREGATE_INCOMING_ADDRESS:
c2f47e15 6220 if (call_expr_nargs (exp) != 0
9342ee68 6221 || ! AGGREGATE_TYPE_P (TREE_TYPE (TREE_TYPE (current_function_decl)))
e16ceb8e 6222 || !MEM_P (DECL_RTL (DECL_RESULT (current_function_decl))))
9342ee68 6223 return const0_rtx;
53800dbe 6224 else
9342ee68 6225 return XEXP (DECL_RTL (DECL_RESULT (current_function_decl)), 0);
53800dbe 6226
6227 case BUILT_IN_ALLOCA:
581bf1c2 6228 case BUILT_IN_ALLOCA_WITH_ALIGN:
990495a7 6229 /* If the allocation stems from the declaration of a variable-sized
6230 object, it cannot accumulate. */
a882d754 6231 target = expand_builtin_alloca (exp, CALL_ALLOCA_FOR_VAR_P (exp));
53800dbe 6232 if (target)
6233 return target;
6234 break;
6235
4ee9c684 6236 case BUILT_IN_STACK_SAVE:
6237 return expand_stack_save ();
6238
6239 case BUILT_IN_STACK_RESTORE:
c2f47e15 6240 expand_stack_restore (CALL_EXPR_ARG (exp, 0));
4ee9c684 6241 return const0_rtx;
6242
74bdbe96 6243 case BUILT_IN_BSWAP16:
42791117 6244 case BUILT_IN_BSWAP32:
6245 case BUILT_IN_BSWAP64:
74bdbe96 6246 target = expand_builtin_bswap (target_mode, exp, target, subtarget);
42791117 6247 if (target)
6248 return target;
6249 break;
6250
4f35b1fc 6251 CASE_INT_FN (BUILT_IN_FFS):
c2f47e15 6252 target = expand_builtin_unop (target_mode, exp, target,
efb070c8 6253 subtarget, ffs_optab);
6a08d0ab 6254 if (target)
6255 return target;
6256 break;
6257
4f35b1fc 6258 CASE_INT_FN (BUILT_IN_CLZ):
c2f47e15 6259 target = expand_builtin_unop (target_mode, exp, target,
efb070c8 6260 subtarget, clz_optab);
6a08d0ab 6261 if (target)
6262 return target;
6263 break;
6264
4f35b1fc 6265 CASE_INT_FN (BUILT_IN_CTZ):
c2f47e15 6266 target = expand_builtin_unop (target_mode, exp, target,
efb070c8 6267 subtarget, ctz_optab);
6a08d0ab 6268 if (target)
6269 return target;
6270 break;
6271
d8492bd3 6272 CASE_INT_FN (BUILT_IN_CLRSB):
d8492bd3 6273 target = expand_builtin_unop (target_mode, exp, target,
6274 subtarget, clrsb_optab);
6275 if (target)
6276 return target;
6277 break;
6278
4f35b1fc 6279 CASE_INT_FN (BUILT_IN_POPCOUNT):
c2f47e15 6280 target = expand_builtin_unop (target_mode, exp, target,
efb070c8 6281 subtarget, popcount_optab);
6a08d0ab 6282 if (target)
6283 return target;
6284 break;
6285
4f35b1fc 6286 CASE_INT_FN (BUILT_IN_PARITY):
c2f47e15 6287 target = expand_builtin_unop (target_mode, exp, target,
efb070c8 6288 subtarget, parity_optab);
53800dbe 6289 if (target)
6290 return target;
6291 break;
6292
6293 case BUILT_IN_STRLEN:
c2f47e15 6294 target = expand_builtin_strlen (exp, target, target_mode);
53800dbe 6295 if (target)
6296 return target;
6297 break;
6298
6299 case BUILT_IN_STRCPY:
a65c4d64 6300 target = expand_builtin_strcpy (exp, target);
53800dbe 6301 if (target)
6302 return target;
6303 break;
bf8e3599 6304
ed09096d 6305 case BUILT_IN_STRNCPY:
a65c4d64 6306 target = expand_builtin_strncpy (exp, target);
ed09096d 6307 if (target)
6308 return target;
6309 break;
bf8e3599 6310
3b824fa6 6311 case BUILT_IN_STPCPY:
dc369150 6312 target = expand_builtin_stpcpy (exp, target, mode);
3b824fa6 6313 if (target)
6314 return target;
6315 break;
6316
53800dbe 6317 case BUILT_IN_MEMCPY:
a65c4d64 6318 target = expand_builtin_memcpy (exp, target);
3b824fa6 6319 if (target)
6320 return target;
6321 break;
6322
6323 case BUILT_IN_MEMPCPY:
c2f47e15 6324 target = expand_builtin_mempcpy (exp, target, mode);
53800dbe 6325 if (target)
6326 return target;
6327 break;
6328
6329 case BUILT_IN_MEMSET:
c2f47e15 6330 target = expand_builtin_memset (exp, target, mode);
53800dbe 6331 if (target)
6332 return target;
6333 break;
6334
ffc83088 6335 case BUILT_IN_BZERO:
0b25db21 6336 target = expand_builtin_bzero (exp);
ffc83088 6337 if (target)
6338 return target;
6339 break;
6340
53800dbe 6341 case BUILT_IN_STRCMP:
a65c4d64 6342 target = expand_builtin_strcmp (exp, target);
53800dbe 6343 if (target)
6344 return target;
6345 break;
6346
ed09096d 6347 case BUILT_IN_STRNCMP:
6348 target = expand_builtin_strncmp (exp, target, mode);
6349 if (target)
6350 return target;
6351 break;
6352
071f1696 6353 case BUILT_IN_BCMP:
53800dbe 6354 case BUILT_IN_MEMCMP:
c2f47e15 6355 target = expand_builtin_memcmp (exp, target, mode);
53800dbe 6356 if (target)
6357 return target;
6358 break;
53800dbe 6359
6360 case BUILT_IN_SETJMP:
2c8a1497 6361 /* This should have been lowered to the builtins below. */
6362 gcc_unreachable ();
6363
6364 case BUILT_IN_SETJMP_SETUP:
6365 /* __builtin_setjmp_setup is passed a pointer to an array of five words
6366 and the receiver label. */
c2f47e15 6367 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
2c8a1497 6368 {
c2f47e15 6369 rtx buf_addr = expand_expr (CALL_EXPR_ARG (exp, 0), subtarget,
2c8a1497 6370 VOIDmode, EXPAND_NORMAL);
c2f47e15 6371 tree label = TREE_OPERAND (CALL_EXPR_ARG (exp, 1), 0);
9ed997be 6372 rtx_insn *label_r = label_rtx (label);
2c8a1497 6373
6374 /* This is copied from the handling of non-local gotos. */
6375 expand_builtin_setjmp_setup (buf_addr, label_r);
6376 nonlocal_goto_handler_labels
a4de1c23 6377 = gen_rtx_INSN_LIST (VOIDmode, label_r,
2c8a1497 6378 nonlocal_goto_handler_labels);
6379 /* ??? Do not let expand_label treat us as such since we would
6380 not want to be both on the list of non-local labels and on
6381 the list of forced labels. */
6382 FORCED_LABEL (label) = 0;
6383 return const0_rtx;
6384 }
6385 break;
6386
2c8a1497 6387 case BUILT_IN_SETJMP_RECEIVER:
6388 /* __builtin_setjmp_receiver is passed the receiver label. */
c2f47e15 6389 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
2c8a1497 6390 {
c2f47e15 6391 tree label = TREE_OPERAND (CALL_EXPR_ARG (exp, 0), 0);
9ed997be 6392 rtx_insn *label_r = label_rtx (label);
2c8a1497 6393
6394 expand_builtin_setjmp_receiver (label_r);
6395 return const0_rtx;
6396 }
6b7f6858 6397 break;
53800dbe 6398
6399 /* __builtin_longjmp is passed a pointer to an array of five words.
6400 It's similar to the C library longjmp function but works with
6401 __builtin_setjmp above. */
6402 case BUILT_IN_LONGJMP:
c2f47e15 6403 if (validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
53800dbe 6404 {
c2f47e15 6405 rtx buf_addr = expand_expr (CALL_EXPR_ARG (exp, 0), subtarget,
8ec3c5c2 6406 VOIDmode, EXPAND_NORMAL);
c2f47e15 6407 rtx value = expand_normal (CALL_EXPR_ARG (exp, 1));
53800dbe 6408
6409 if (value != const1_rtx)
6410 {
1e5fcbe2 6411 error ("%<__builtin_longjmp%> second argument must be 1");
53800dbe 6412 return const0_rtx;
6413 }
6414
6415 expand_builtin_longjmp (buf_addr, value);
6416 return const0_rtx;
6417 }
2c8a1497 6418 break;
53800dbe 6419
4ee9c684 6420 case BUILT_IN_NONLOCAL_GOTO:
c2f47e15 6421 target = expand_builtin_nonlocal_goto (exp);
4ee9c684 6422 if (target)
6423 return target;
6424 break;
6425
843d08a9 6426 /* This updates the setjmp buffer that is its argument with the value
6427 of the current stack pointer. */
6428 case BUILT_IN_UPDATE_SETJMP_BUF:
c2f47e15 6429 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
843d08a9 6430 {
6431 rtx buf_addr
c2f47e15 6432 = expand_normal (CALL_EXPR_ARG (exp, 0));
843d08a9 6433
6434 expand_builtin_update_setjmp_buf (buf_addr);
6435 return const0_rtx;
6436 }
6437 break;
6438
53800dbe 6439 case BUILT_IN_TRAP:
a0ef1725 6440 expand_builtin_trap ();
53800dbe 6441 return const0_rtx;
6442
d2b48f0c 6443 case BUILT_IN_UNREACHABLE:
6444 expand_builtin_unreachable ();
6445 return const0_rtx;
6446
4f35b1fc 6447 CASE_FLT_FN (BUILT_IN_SIGNBIT):
004e23c4 6448 case BUILT_IN_SIGNBITD32:
6449 case BUILT_IN_SIGNBITD64:
6450 case BUILT_IN_SIGNBITD128:
27f261ef 6451 target = expand_builtin_signbit (exp, target);
6452 if (target)
6453 return target;
6454 break;
6455
53800dbe 6456 /* Various hooks for the DWARF 2 __throw routine. */
6457 case BUILT_IN_UNWIND_INIT:
6458 expand_builtin_unwind_init ();
6459 return const0_rtx;
6460 case BUILT_IN_DWARF_CFA:
6461 return virtual_cfa_rtx;
6462#ifdef DWARF2_UNWIND_INFO
f8f023a5 6463 case BUILT_IN_DWARF_SP_COLUMN:
6464 return expand_builtin_dwarf_sp_column ();
695e919b 6465 case BUILT_IN_INIT_DWARF_REG_SIZES:
c2f47e15 6466 expand_builtin_init_dwarf_reg_sizes (CALL_EXPR_ARG (exp, 0));
695e919b 6467 return const0_rtx;
53800dbe 6468#endif
6469 case BUILT_IN_FROB_RETURN_ADDR:
c2f47e15 6470 return expand_builtin_frob_return_addr (CALL_EXPR_ARG (exp, 0));
53800dbe 6471 case BUILT_IN_EXTRACT_RETURN_ADDR:
c2f47e15 6472 return expand_builtin_extract_return_addr (CALL_EXPR_ARG (exp, 0));
53800dbe 6473 case BUILT_IN_EH_RETURN:
c2f47e15 6474 expand_builtin_eh_return (CALL_EXPR_ARG (exp, 0),
6475 CALL_EXPR_ARG (exp, 1));
53800dbe 6476 return const0_rtx;
df4b504c 6477 case BUILT_IN_EH_RETURN_DATA_REGNO:
c2f47e15 6478 return expand_builtin_eh_return_data_regno (exp);
26093bf4 6479 case BUILT_IN_EXTEND_POINTER:
c2f47e15 6480 return expand_builtin_extend_pointer (CALL_EXPR_ARG (exp, 0));
e38def9c 6481 case BUILT_IN_EH_POINTER:
6482 return expand_builtin_eh_pointer (exp);
6483 case BUILT_IN_EH_FILTER:
6484 return expand_builtin_eh_filter (exp);
6485 case BUILT_IN_EH_COPY_VALUES:
6486 return expand_builtin_eh_copy_values (exp);
26093bf4 6487
7ccc713a 6488 case BUILT_IN_VA_START:
c2f47e15 6489 return expand_builtin_va_start (exp);
a66c9326 6490 case BUILT_IN_VA_END:
c2f47e15 6491 return expand_builtin_va_end (exp);
a66c9326 6492 case BUILT_IN_VA_COPY:
c2f47e15 6493 return expand_builtin_va_copy (exp);
89cfe6e5 6494 case BUILT_IN_EXPECT:
c2f47e15 6495 return expand_builtin_expect (exp, target);
fca0886c 6496 case BUILT_IN_ASSUME_ALIGNED:
6497 return expand_builtin_assume_aligned (exp, target);
5e3608d8 6498 case BUILT_IN_PREFETCH:
c2f47e15 6499 expand_builtin_prefetch (exp);
5e3608d8 6500 return const0_rtx;
6501
4ee9c684 6502 case BUILT_IN_INIT_TRAMPOLINE:
c307f106 6503 return expand_builtin_init_trampoline (exp, true);
6504 case BUILT_IN_INIT_HEAP_TRAMPOLINE:
6505 return expand_builtin_init_trampoline (exp, false);
4ee9c684 6506 case BUILT_IN_ADJUST_TRAMPOLINE:
c2f47e15 6507 return expand_builtin_adjust_trampoline (exp);
4ee9c684 6508
73673831 6509 case BUILT_IN_FORK:
6510 case BUILT_IN_EXECL:
6511 case BUILT_IN_EXECV:
6512 case BUILT_IN_EXECLP:
6513 case BUILT_IN_EXECLE:
6514 case BUILT_IN_EXECVP:
6515 case BUILT_IN_EXECVE:
c2f47e15 6516 target = expand_builtin_fork_or_exec (fndecl, exp, target, ignore);
73673831 6517 if (target)
6518 return target;
6519 break;
53800dbe 6520
2797f13a 6521 case BUILT_IN_SYNC_FETCH_AND_ADD_1:
6522 case BUILT_IN_SYNC_FETCH_AND_ADD_2:
6523 case BUILT_IN_SYNC_FETCH_AND_ADD_4:
6524 case BUILT_IN_SYNC_FETCH_AND_ADD_8:
6525 case BUILT_IN_SYNC_FETCH_AND_ADD_16:
6526 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_ADD_1);
1cd6e20d 6527 target = expand_builtin_sync_operation (mode, exp, PLUS, false, target);
b6a5fc45 6528 if (target)
6529 return target;
6530 break;
6531
2797f13a 6532 case BUILT_IN_SYNC_FETCH_AND_SUB_1:
6533 case BUILT_IN_SYNC_FETCH_AND_SUB_2:
6534 case BUILT_IN_SYNC_FETCH_AND_SUB_4:
6535 case BUILT_IN_SYNC_FETCH_AND_SUB_8:
6536 case BUILT_IN_SYNC_FETCH_AND_SUB_16:
6537 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_SUB_1);
1cd6e20d 6538 target = expand_builtin_sync_operation (mode, exp, MINUS, false, target);
b6a5fc45 6539 if (target)
6540 return target;
6541 break;
6542
2797f13a 6543 case BUILT_IN_SYNC_FETCH_AND_OR_1:
6544 case BUILT_IN_SYNC_FETCH_AND_OR_2:
6545 case BUILT_IN_SYNC_FETCH_AND_OR_4:
6546 case BUILT_IN_SYNC_FETCH_AND_OR_8:
6547 case BUILT_IN_SYNC_FETCH_AND_OR_16:
6548 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_OR_1);
1cd6e20d 6549 target = expand_builtin_sync_operation (mode, exp, IOR, false, target);
b6a5fc45 6550 if (target)
6551 return target;
6552 break;
6553
2797f13a 6554 case BUILT_IN_SYNC_FETCH_AND_AND_1:
6555 case BUILT_IN_SYNC_FETCH_AND_AND_2:
6556 case BUILT_IN_SYNC_FETCH_AND_AND_4:
6557 case BUILT_IN_SYNC_FETCH_AND_AND_8:
6558 case BUILT_IN_SYNC_FETCH_AND_AND_16:
6559 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_AND_1);
1cd6e20d 6560 target = expand_builtin_sync_operation (mode, exp, AND, false, target);
b6a5fc45 6561 if (target)
6562 return target;
6563 break;
6564
2797f13a 6565 case BUILT_IN_SYNC_FETCH_AND_XOR_1:
6566 case BUILT_IN_SYNC_FETCH_AND_XOR_2:
6567 case BUILT_IN_SYNC_FETCH_AND_XOR_4:
6568 case BUILT_IN_SYNC_FETCH_AND_XOR_8:
6569 case BUILT_IN_SYNC_FETCH_AND_XOR_16:
6570 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_XOR_1);
1cd6e20d 6571 target = expand_builtin_sync_operation (mode, exp, XOR, false, target);
b6a5fc45 6572 if (target)
6573 return target;
6574 break;
6575
2797f13a 6576 case BUILT_IN_SYNC_FETCH_AND_NAND_1:
6577 case BUILT_IN_SYNC_FETCH_AND_NAND_2:
6578 case BUILT_IN_SYNC_FETCH_AND_NAND_4:
6579 case BUILT_IN_SYNC_FETCH_AND_NAND_8:
6580 case BUILT_IN_SYNC_FETCH_AND_NAND_16:
6581 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_NAND_1);
1cd6e20d 6582 target = expand_builtin_sync_operation (mode, exp, NOT, false, target);
b6a5fc45 6583 if (target)
6584 return target;
6585 break;
6586
2797f13a 6587 case BUILT_IN_SYNC_ADD_AND_FETCH_1:
6588 case BUILT_IN_SYNC_ADD_AND_FETCH_2:
6589 case BUILT_IN_SYNC_ADD_AND_FETCH_4:
6590 case BUILT_IN_SYNC_ADD_AND_FETCH_8:
6591 case BUILT_IN_SYNC_ADD_AND_FETCH_16:
6592 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_ADD_AND_FETCH_1);
1cd6e20d 6593 target = expand_builtin_sync_operation (mode, exp, PLUS, true, target);
b6a5fc45 6594 if (target)
6595 return target;
6596 break;
6597
2797f13a 6598 case BUILT_IN_SYNC_SUB_AND_FETCH_1:
6599 case BUILT_IN_SYNC_SUB_AND_FETCH_2:
6600 case BUILT_IN_SYNC_SUB_AND_FETCH_4:
6601 case BUILT_IN_SYNC_SUB_AND_FETCH_8:
6602 case BUILT_IN_SYNC_SUB_AND_FETCH_16:
6603 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_SUB_AND_FETCH_1);
1cd6e20d 6604 target = expand_builtin_sync_operation (mode, exp, MINUS, true, target);
b6a5fc45 6605 if (target)
6606 return target;
6607 break;
6608
2797f13a 6609 case BUILT_IN_SYNC_OR_AND_FETCH_1:
6610 case BUILT_IN_SYNC_OR_AND_FETCH_2:
6611 case BUILT_IN_SYNC_OR_AND_FETCH_4:
6612 case BUILT_IN_SYNC_OR_AND_FETCH_8:
6613 case BUILT_IN_SYNC_OR_AND_FETCH_16:
6614 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_OR_AND_FETCH_1);
1cd6e20d 6615 target = expand_builtin_sync_operation (mode, exp, IOR, true, target);
b6a5fc45 6616 if (target)
6617 return target;
6618 break;
6619
2797f13a 6620 case BUILT_IN_SYNC_AND_AND_FETCH_1:
6621 case BUILT_IN_SYNC_AND_AND_FETCH_2:
6622 case BUILT_IN_SYNC_AND_AND_FETCH_4:
6623 case BUILT_IN_SYNC_AND_AND_FETCH_8:
6624 case BUILT_IN_SYNC_AND_AND_FETCH_16:
6625 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_AND_AND_FETCH_1);
1cd6e20d 6626 target = expand_builtin_sync_operation (mode, exp, AND, true, target);
b6a5fc45 6627 if (target)
6628 return target;
6629 break;
6630
2797f13a 6631 case BUILT_IN_SYNC_XOR_AND_FETCH_1:
6632 case BUILT_IN_SYNC_XOR_AND_FETCH_2:
6633 case BUILT_IN_SYNC_XOR_AND_FETCH_4:
6634 case BUILT_IN_SYNC_XOR_AND_FETCH_8:
6635 case BUILT_IN_SYNC_XOR_AND_FETCH_16:
6636 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_XOR_AND_FETCH_1);
1cd6e20d 6637 target = expand_builtin_sync_operation (mode, exp, XOR, true, target);
b6a5fc45 6638 if (target)
6639 return target;
6640 break;
6641
2797f13a 6642 case BUILT_IN_SYNC_NAND_AND_FETCH_1:
6643 case BUILT_IN_SYNC_NAND_AND_FETCH_2:
6644 case BUILT_IN_SYNC_NAND_AND_FETCH_4:
6645 case BUILT_IN_SYNC_NAND_AND_FETCH_8:
6646 case BUILT_IN_SYNC_NAND_AND_FETCH_16:
6647 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_NAND_AND_FETCH_1);
1cd6e20d 6648 target = expand_builtin_sync_operation (mode, exp, NOT, true, target);
b6a5fc45 6649 if (target)
6650 return target;
6651 break;
6652
2797f13a 6653 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_1:
6654 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_2:
6655 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_4:
6656 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_8:
6657 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_16:
a601d32a 6658 if (mode == VOIDmode)
6659 mode = TYPE_MODE (boolean_type_node);
b6a5fc45 6660 if (!target || !register_operand (target, mode))
6661 target = gen_reg_rtx (mode);
3e272de8 6662
2797f13a 6663 mode = get_builtin_sync_mode
6664 (fcode - BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_1);
c2f47e15 6665 target = expand_builtin_compare_and_swap (mode, exp, true, target);
b6a5fc45 6666 if (target)
6667 return target;
6668 break;
6669
2797f13a 6670 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_1:
6671 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_2:
6672 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_4:
6673 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_8:
6674 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_16:
6675 mode = get_builtin_sync_mode
6676 (fcode - BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_1);
c2f47e15 6677 target = expand_builtin_compare_and_swap (mode, exp, false, target);
b6a5fc45 6678 if (target)
6679 return target;
6680 break;
6681
2797f13a 6682 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_1:
6683 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_2:
6684 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_4:
6685 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_8:
6686 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_16:
6687 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_LOCK_TEST_AND_SET_1);
6688 target = expand_builtin_sync_lock_test_and_set (mode, exp, target);
b6a5fc45 6689 if (target)
6690 return target;
6691 break;
6692
2797f13a 6693 case BUILT_IN_SYNC_LOCK_RELEASE_1:
6694 case BUILT_IN_SYNC_LOCK_RELEASE_2:
6695 case BUILT_IN_SYNC_LOCK_RELEASE_4:
6696 case BUILT_IN_SYNC_LOCK_RELEASE_8:
6697 case BUILT_IN_SYNC_LOCK_RELEASE_16:
6698 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_LOCK_RELEASE_1);
6699 expand_builtin_sync_lock_release (mode, exp);
b6a5fc45 6700 return const0_rtx;
6701
2797f13a 6702 case BUILT_IN_SYNC_SYNCHRONIZE:
6703 expand_builtin_sync_synchronize ();
b6a5fc45 6704 return const0_rtx;
6705
1cd6e20d 6706 case BUILT_IN_ATOMIC_EXCHANGE_1:
6707 case BUILT_IN_ATOMIC_EXCHANGE_2:
6708 case BUILT_IN_ATOMIC_EXCHANGE_4:
6709 case BUILT_IN_ATOMIC_EXCHANGE_8:
6710 case BUILT_IN_ATOMIC_EXCHANGE_16:
6711 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_EXCHANGE_1);
6712 target = expand_builtin_atomic_exchange (mode, exp, target);
6713 if (target)
6714 return target;
6715 break;
6716
6717 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1:
6718 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_2:
6719 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_4:
6720 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_8:
6721 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_16:
2c201ad1 6722 {
6723 unsigned int nargs, z;
f1f41a6c 6724 vec<tree, va_gc> *vec;
2c201ad1 6725
6726 mode =
6727 get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1);
6728 target = expand_builtin_atomic_compare_exchange (mode, exp, target);
6729 if (target)
6730 return target;
6731
6732 /* If this is turned into an external library call, the weak parameter
6733 must be dropped to match the expected parameter list. */
6734 nargs = call_expr_nargs (exp);
f1f41a6c 6735 vec_alloc (vec, nargs - 1);
2c201ad1 6736 for (z = 0; z < 3; z++)
f1f41a6c 6737 vec->quick_push (CALL_EXPR_ARG (exp, z));
2c201ad1 6738 /* Skip the boolean weak parameter. */
6739 for (z = 4; z < 6; z++)
f1f41a6c 6740 vec->quick_push (CALL_EXPR_ARG (exp, z));
2c201ad1 6741 exp = build_call_vec (TREE_TYPE (exp), CALL_EXPR_FN (exp), vec);
6742 break;
6743 }
1cd6e20d 6744
6745 case BUILT_IN_ATOMIC_LOAD_1:
6746 case BUILT_IN_ATOMIC_LOAD_2:
6747 case BUILT_IN_ATOMIC_LOAD_4:
6748 case BUILT_IN_ATOMIC_LOAD_8:
6749 case BUILT_IN_ATOMIC_LOAD_16:
6750 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_LOAD_1);
6751 target = expand_builtin_atomic_load (mode, exp, target);
6752 if (target)
6753 return target;
6754 break;
6755
6756 case BUILT_IN_ATOMIC_STORE_1:
6757 case BUILT_IN_ATOMIC_STORE_2:
6758 case BUILT_IN_ATOMIC_STORE_4:
6759 case BUILT_IN_ATOMIC_STORE_8:
6760 case BUILT_IN_ATOMIC_STORE_16:
6761 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_STORE_1);
6762 target = expand_builtin_atomic_store (mode, exp);
6763 if (target)
6764 return const0_rtx;
6765 break;
6766
6767 case BUILT_IN_ATOMIC_ADD_FETCH_1:
6768 case BUILT_IN_ATOMIC_ADD_FETCH_2:
6769 case BUILT_IN_ATOMIC_ADD_FETCH_4:
6770 case BUILT_IN_ATOMIC_ADD_FETCH_8:
6771 case BUILT_IN_ATOMIC_ADD_FETCH_16:
6772 {
6773 enum built_in_function lib;
6774 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_ADD_FETCH_1);
6775 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_ADD_1 +
6776 (fcode - BUILT_IN_ATOMIC_ADD_FETCH_1));
6777 target = expand_builtin_atomic_fetch_op (mode, exp, target, PLUS, true,
6778 ignore, lib);
6779 if (target)
6780 return target;
6781 break;
6782 }
6783 case BUILT_IN_ATOMIC_SUB_FETCH_1:
6784 case BUILT_IN_ATOMIC_SUB_FETCH_2:
6785 case BUILT_IN_ATOMIC_SUB_FETCH_4:
6786 case BUILT_IN_ATOMIC_SUB_FETCH_8:
6787 case BUILT_IN_ATOMIC_SUB_FETCH_16:
6788 {
6789 enum built_in_function lib;
6790 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_SUB_FETCH_1);
6791 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_SUB_1 +
6792 (fcode - BUILT_IN_ATOMIC_SUB_FETCH_1));
6793 target = expand_builtin_atomic_fetch_op (mode, exp, target, MINUS, true,
6794 ignore, lib);
6795 if (target)
6796 return target;
6797 break;
6798 }
6799 case BUILT_IN_ATOMIC_AND_FETCH_1:
6800 case BUILT_IN_ATOMIC_AND_FETCH_2:
6801 case BUILT_IN_ATOMIC_AND_FETCH_4:
6802 case BUILT_IN_ATOMIC_AND_FETCH_8:
6803 case BUILT_IN_ATOMIC_AND_FETCH_16:
6804 {
6805 enum built_in_function lib;
6806 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_AND_FETCH_1);
6807 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_AND_1 +
6808 (fcode - BUILT_IN_ATOMIC_AND_FETCH_1));
6809 target = expand_builtin_atomic_fetch_op (mode, exp, target, AND, true,
6810 ignore, lib);
6811 if (target)
6812 return target;
6813 break;
6814 }
6815 case BUILT_IN_ATOMIC_NAND_FETCH_1:
6816 case BUILT_IN_ATOMIC_NAND_FETCH_2:
6817 case BUILT_IN_ATOMIC_NAND_FETCH_4:
6818 case BUILT_IN_ATOMIC_NAND_FETCH_8:
6819 case BUILT_IN_ATOMIC_NAND_FETCH_16:
6820 {
6821 enum built_in_function lib;
6822 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_NAND_FETCH_1);
6823 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_NAND_1 +
6824 (fcode - BUILT_IN_ATOMIC_NAND_FETCH_1));
6825 target = expand_builtin_atomic_fetch_op (mode, exp, target, NOT, true,
6826 ignore, lib);
6827 if (target)
6828 return target;
6829 break;
6830 }
6831 case BUILT_IN_ATOMIC_XOR_FETCH_1:
6832 case BUILT_IN_ATOMIC_XOR_FETCH_2:
6833 case BUILT_IN_ATOMIC_XOR_FETCH_4:
6834 case BUILT_IN_ATOMIC_XOR_FETCH_8:
6835 case BUILT_IN_ATOMIC_XOR_FETCH_16:
6836 {
6837 enum built_in_function lib;
6838 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_XOR_FETCH_1);
6839 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_XOR_1 +
6840 (fcode - BUILT_IN_ATOMIC_XOR_FETCH_1));
6841 target = expand_builtin_atomic_fetch_op (mode, exp, target, XOR, true,
6842 ignore, lib);
6843 if (target)
6844 return target;
6845 break;
6846 }
6847 case BUILT_IN_ATOMIC_OR_FETCH_1:
6848 case BUILT_IN_ATOMIC_OR_FETCH_2:
6849 case BUILT_IN_ATOMIC_OR_FETCH_4:
6850 case BUILT_IN_ATOMIC_OR_FETCH_8:
6851 case BUILT_IN_ATOMIC_OR_FETCH_16:
6852 {
6853 enum built_in_function lib;
6854 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_OR_FETCH_1);
6855 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_OR_1 +
6856 (fcode - BUILT_IN_ATOMIC_OR_FETCH_1));
6857 target = expand_builtin_atomic_fetch_op (mode, exp, target, IOR, true,
6858 ignore, lib);
6859 if (target)
6860 return target;
6861 break;
6862 }
6863 case BUILT_IN_ATOMIC_FETCH_ADD_1:
6864 case BUILT_IN_ATOMIC_FETCH_ADD_2:
6865 case BUILT_IN_ATOMIC_FETCH_ADD_4:
6866 case BUILT_IN_ATOMIC_FETCH_ADD_8:
6867 case BUILT_IN_ATOMIC_FETCH_ADD_16:
6868 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_ADD_1);
6869 target = expand_builtin_atomic_fetch_op (mode, exp, target, PLUS, false,
6870 ignore, BUILT_IN_NONE);
6871 if (target)
6872 return target;
6873 break;
6874
6875 case BUILT_IN_ATOMIC_FETCH_SUB_1:
6876 case BUILT_IN_ATOMIC_FETCH_SUB_2:
6877 case BUILT_IN_ATOMIC_FETCH_SUB_4:
6878 case BUILT_IN_ATOMIC_FETCH_SUB_8:
6879 case BUILT_IN_ATOMIC_FETCH_SUB_16:
6880 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_SUB_1);
6881 target = expand_builtin_atomic_fetch_op (mode, exp, target, MINUS, false,
6882 ignore, BUILT_IN_NONE);
6883 if (target)
6884 return target;
6885 break;
6886
6887 case BUILT_IN_ATOMIC_FETCH_AND_1:
6888 case BUILT_IN_ATOMIC_FETCH_AND_2:
6889 case BUILT_IN_ATOMIC_FETCH_AND_4:
6890 case BUILT_IN_ATOMIC_FETCH_AND_8:
6891 case BUILT_IN_ATOMIC_FETCH_AND_16:
6892 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_AND_1);
6893 target = expand_builtin_atomic_fetch_op (mode, exp, target, AND, false,
6894 ignore, BUILT_IN_NONE);
6895 if (target)
6896 return target;
6897 break;
6898
6899 case BUILT_IN_ATOMIC_FETCH_NAND_1:
6900 case BUILT_IN_ATOMIC_FETCH_NAND_2:
6901 case BUILT_IN_ATOMIC_FETCH_NAND_4:
6902 case BUILT_IN_ATOMIC_FETCH_NAND_8:
6903 case BUILT_IN_ATOMIC_FETCH_NAND_16:
6904 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_NAND_1);
6905 target = expand_builtin_atomic_fetch_op (mode, exp, target, NOT, false,
6906 ignore, BUILT_IN_NONE);
6907 if (target)
6908 return target;
6909 break;
6910
6911 case BUILT_IN_ATOMIC_FETCH_XOR_1:
6912 case BUILT_IN_ATOMIC_FETCH_XOR_2:
6913 case BUILT_IN_ATOMIC_FETCH_XOR_4:
6914 case BUILT_IN_ATOMIC_FETCH_XOR_8:
6915 case BUILT_IN_ATOMIC_FETCH_XOR_16:
6916 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_XOR_1);
6917 target = expand_builtin_atomic_fetch_op (mode, exp, target, XOR, false,
6918 ignore, BUILT_IN_NONE);
6919 if (target)
6920 return target;
6921 break;
6922
6923 case BUILT_IN_ATOMIC_FETCH_OR_1:
6924 case BUILT_IN_ATOMIC_FETCH_OR_2:
6925 case BUILT_IN_ATOMIC_FETCH_OR_4:
6926 case BUILT_IN_ATOMIC_FETCH_OR_8:
6927 case BUILT_IN_ATOMIC_FETCH_OR_16:
6928 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_OR_1);
6929 target = expand_builtin_atomic_fetch_op (mode, exp, target, IOR, false,
6930 ignore, BUILT_IN_NONE);
6931 if (target)
6932 return target;
6933 break;
10b744a3 6934
6935 case BUILT_IN_ATOMIC_TEST_AND_SET:
7821cde1 6936 return expand_builtin_atomic_test_and_set (exp, target);
10b744a3 6937
6938 case BUILT_IN_ATOMIC_CLEAR:
6939 return expand_builtin_atomic_clear (exp);
1cd6e20d 6940
6941 case BUILT_IN_ATOMIC_ALWAYS_LOCK_FREE:
6942 return expand_builtin_atomic_always_lock_free (exp);
6943
6944 case BUILT_IN_ATOMIC_IS_LOCK_FREE:
6945 target = expand_builtin_atomic_is_lock_free (exp);
6946 if (target)
6947 return target;
6948 break;
6949
6950 case BUILT_IN_ATOMIC_THREAD_FENCE:
6951 expand_builtin_atomic_thread_fence (exp);
6952 return const0_rtx;
6953
6954 case BUILT_IN_ATOMIC_SIGNAL_FENCE:
6955 expand_builtin_atomic_signal_fence (exp);
6956 return const0_rtx;
6957
0a39fd54 6958 case BUILT_IN_OBJECT_SIZE:
6959 return expand_builtin_object_size (exp);
6960
6961 case BUILT_IN_MEMCPY_CHK:
6962 case BUILT_IN_MEMPCPY_CHK:
6963 case BUILT_IN_MEMMOVE_CHK:
6964 case BUILT_IN_MEMSET_CHK:
6965 target = expand_builtin_memory_chk (exp, target, mode, fcode);
6966 if (target)
6967 return target;
6968 break;
6969
6970 case BUILT_IN_STRCPY_CHK:
6971 case BUILT_IN_STPCPY_CHK:
6972 case BUILT_IN_STRNCPY_CHK:
1063acde 6973 case BUILT_IN_STPNCPY_CHK:
0a39fd54 6974 case BUILT_IN_STRCAT_CHK:
b356dfef 6975 case BUILT_IN_STRNCAT_CHK:
0a39fd54 6976 case BUILT_IN_SNPRINTF_CHK:
6977 case BUILT_IN_VSNPRINTF_CHK:
6978 maybe_emit_chk_warning (exp, fcode);
6979 break;
6980
6981 case BUILT_IN_SPRINTF_CHK:
6982 case BUILT_IN_VSPRINTF_CHK:
6983 maybe_emit_sprintf_chk_warning (exp, fcode);
6984 break;
6985
2c281b15 6986 case BUILT_IN_FREE:
f74ea1c2 6987 if (warn_free_nonheap_object)
6988 maybe_emit_free_warning (exp);
2c281b15 6989 break;
6990
badaa04c 6991 case BUILT_IN_THREAD_POINTER:
6992 return expand_builtin_thread_pointer (exp, target);
6993
6994 case BUILT_IN_SET_THREAD_POINTER:
6995 expand_builtin_set_thread_pointer (exp);
6996 return const0_rtx;
6997
d037099f 6998 case BUILT_IN_CILK_DETACH:
6999 expand_builtin_cilk_detach (exp);
7000 return const0_rtx;
7001
7002 case BUILT_IN_CILK_POP_FRAME:
7003 expand_builtin_cilk_pop_frame (exp);
7004 return const0_rtx;
7005
058a1b7a 7006 case BUILT_IN_CHKP_INIT_PTR_BOUNDS:
7007 case BUILT_IN_CHKP_NULL_PTR_BOUNDS:
7008 case BUILT_IN_CHKP_COPY_PTR_BOUNDS:
7009 case BUILT_IN_CHKP_CHECK_PTR_LBOUNDS:
7010 case BUILT_IN_CHKP_CHECK_PTR_UBOUNDS:
7011 case BUILT_IN_CHKP_CHECK_PTR_BOUNDS:
7012 case BUILT_IN_CHKP_SET_PTR_BOUNDS:
7013 case BUILT_IN_CHKP_NARROW_PTR_BOUNDS:
7014 case BUILT_IN_CHKP_STORE_PTR_BOUNDS:
7015 case BUILT_IN_CHKP_GET_PTR_LBOUND:
7016 case BUILT_IN_CHKP_GET_PTR_UBOUND:
7017 /* We allow user CHKP builtins if Pointer Bounds
7018 Checker is off. */
7019 if (!chkp_function_instrumented_p (current_function_decl))
7020 {
7021 if (fcode == BUILT_IN_CHKP_SET_PTR_BOUNDS
7022 || fcode == BUILT_IN_CHKP_NARROW_PTR_BOUNDS
7023 || fcode == BUILT_IN_CHKP_INIT_PTR_BOUNDS
7024 || fcode == BUILT_IN_CHKP_NULL_PTR_BOUNDS
7025 || fcode == BUILT_IN_CHKP_COPY_PTR_BOUNDS)
7026 return expand_normal (CALL_EXPR_ARG (exp, 0));
7027 else if (fcode == BUILT_IN_CHKP_GET_PTR_LBOUND)
7028 return expand_normal (size_zero_node);
7029 else if (fcode == BUILT_IN_CHKP_GET_PTR_UBOUND)
7030 return expand_normal (size_int (-1));
7031 else
7032 return const0_rtx;
7033 }
7034 /* FALLTHROUGH */
7035
7036 case BUILT_IN_CHKP_BNDMK:
7037 case BUILT_IN_CHKP_BNDSTX:
7038 case BUILT_IN_CHKP_BNDCL:
7039 case BUILT_IN_CHKP_BNDCU:
7040 case BUILT_IN_CHKP_BNDLDX:
7041 case BUILT_IN_CHKP_BNDRET:
7042 case BUILT_IN_CHKP_INTERSECT:
7043 case BUILT_IN_CHKP_NARROW:
7044 case BUILT_IN_CHKP_EXTRACT_LOWER:
7045 case BUILT_IN_CHKP_EXTRACT_UPPER:
7046 /* Software implementation of Pointer Bounds Checker is NYI.
7047 Target support is required. */
7048 error ("Your target platform does not support -fcheck-pointer-bounds");
7049 break;
7050
ca4c3545 7051 case BUILT_IN_ACC_ON_DEVICE:
7052 target = expand_builtin_acc_on_device (exp, target);
7053 if (target)
7054 return target;
7055 break;
7056
92482ee0 7057 default: /* just do library call, if unknown builtin */
146c1b4f 7058 break;
53800dbe 7059 }
7060
7061 /* The switch statement above can drop through to cause the function
7062 to be called normally. */
7063 return expand_call (exp, target, ignore);
7064}
650e4c94 7065
f21337ef 7066/* Similar to expand_builtin but is used for instrumented calls. */
7067
7068rtx
7069expand_builtin_with_bounds (tree exp, rtx target,
7070 rtx subtarget ATTRIBUTE_UNUSED,
7071 machine_mode mode, int ignore)
7072{
7073 tree fndecl = get_callee_fndecl (exp);
7074 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
7075
7076 gcc_assert (CALL_WITH_BOUNDS_P (exp));
7077
7078 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
7079 return targetm.expand_builtin (exp, target, subtarget, mode, ignore);
7080
7081 gcc_assert (fcode > BEGIN_CHKP_BUILTINS
7082 && fcode < END_CHKP_BUILTINS);
7083
7084 switch (fcode)
7085 {
7086 case BUILT_IN_CHKP_MEMCPY_NOBND_NOCHK_CHKP:
7087 target = expand_builtin_memcpy_with_bounds (exp, target);
7088 if (target)
7089 return target;
7090 break;
7091
7092 case BUILT_IN_CHKP_MEMPCPY_NOBND_NOCHK_CHKP:
7093 target = expand_builtin_mempcpy_with_bounds (exp, target, mode);
7094 if (target)
7095 return target;
7096 break;
7097
7098 case BUILT_IN_CHKP_MEMSET_NOBND_NOCHK_CHKP:
7099 target = expand_builtin_memset_with_bounds (exp, target, mode);
7100 if (target)
7101 return target;
7102 break;
7103
7104 default:
7105 break;
7106 }
7107
7108 /* The switch statement above can drop through to cause the function
7109 to be called normally. */
7110 return expand_call (exp, target, ignore);
7111 }
7112
805e22b2 7113/* Determine whether a tree node represents a call to a built-in
52203a9d 7114 function. If the tree T is a call to a built-in function with
7115 the right number of arguments of the appropriate types, return
7116 the DECL_FUNCTION_CODE of the call, e.g. BUILT_IN_SQRT.
7117 Otherwise the return value is END_BUILTINS. */
aecda0d6 7118
805e22b2 7119enum built_in_function
b7bf20db 7120builtin_mathfn_code (const_tree t)
805e22b2 7121{
b7bf20db 7122 const_tree fndecl, arg, parmlist;
7123 const_tree argtype, parmtype;
7124 const_call_expr_arg_iterator iter;
805e22b2 7125
7126 if (TREE_CODE (t) != CALL_EXPR
c2f47e15 7127 || TREE_CODE (CALL_EXPR_FN (t)) != ADDR_EXPR)
805e22b2 7128 return END_BUILTINS;
7129
c6e6ecb1 7130 fndecl = get_callee_fndecl (t);
7131 if (fndecl == NULL_TREE
52203a9d 7132 || TREE_CODE (fndecl) != FUNCTION_DECL
805e22b2 7133 || ! DECL_BUILT_IN (fndecl)
7134 || DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
7135 return END_BUILTINS;
7136
52203a9d 7137 parmlist = TYPE_ARG_TYPES (TREE_TYPE (fndecl));
b7bf20db 7138 init_const_call_expr_arg_iterator (t, &iter);
52203a9d 7139 for (; parmlist; parmlist = TREE_CHAIN (parmlist))
e9f80ff5 7140 {
52203a9d 7141 /* If a function doesn't take a variable number of arguments,
7142 the last element in the list will have type `void'. */
7143 parmtype = TREE_VALUE (parmlist);
7144 if (VOID_TYPE_P (parmtype))
7145 {
b7bf20db 7146 if (more_const_call_expr_args_p (&iter))
52203a9d 7147 return END_BUILTINS;
7148 return DECL_FUNCTION_CODE (fndecl);
7149 }
7150
b7bf20db 7151 if (! more_const_call_expr_args_p (&iter))
e9f80ff5 7152 return END_BUILTINS;
48e1416a 7153
b7bf20db 7154 arg = next_const_call_expr_arg (&iter);
c2f47e15 7155 argtype = TREE_TYPE (arg);
52203a9d 7156
7157 if (SCALAR_FLOAT_TYPE_P (parmtype))
7158 {
7159 if (! SCALAR_FLOAT_TYPE_P (argtype))
7160 return END_BUILTINS;
7161 }
7162 else if (COMPLEX_FLOAT_TYPE_P (parmtype))
7163 {
7164 if (! COMPLEX_FLOAT_TYPE_P (argtype))
7165 return END_BUILTINS;
7166 }
7167 else if (POINTER_TYPE_P (parmtype))
7168 {
7169 if (! POINTER_TYPE_P (argtype))
7170 return END_BUILTINS;
7171 }
7172 else if (INTEGRAL_TYPE_P (parmtype))
7173 {
7174 if (! INTEGRAL_TYPE_P (argtype))
7175 return END_BUILTINS;
7176 }
7177 else
e9f80ff5 7178 return END_BUILTINS;
e9f80ff5 7179 }
7180
52203a9d 7181 /* Variable-length argument list. */
805e22b2 7182 return DECL_FUNCTION_CODE (fndecl);
7183}
7184
c2f47e15 7185/* Fold a call to __builtin_constant_p, if we know its argument ARG will
7186 evaluate to a constant. */
650e4c94 7187
7188static tree
c2f47e15 7189fold_builtin_constant_p (tree arg)
650e4c94 7190{
650e4c94 7191 /* We return 1 for a numeric type that's known to be a constant
7192 value at compile-time or for an aggregate type that's a
7193 literal constant. */
c2f47e15 7194 STRIP_NOPS (arg);
650e4c94 7195
7196 /* If we know this is a constant, emit the constant of one. */
c2f47e15 7197 if (CONSTANT_CLASS_P (arg)
7198 || (TREE_CODE (arg) == CONSTRUCTOR
7199 && TREE_CONSTANT (arg)))
650e4c94 7200 return integer_one_node;
c2f47e15 7201 if (TREE_CODE (arg) == ADDR_EXPR)
adcfa3a3 7202 {
c2f47e15 7203 tree op = TREE_OPERAND (arg, 0);
adcfa3a3 7204 if (TREE_CODE (op) == STRING_CST
7205 || (TREE_CODE (op) == ARRAY_REF
7206 && integer_zerop (TREE_OPERAND (op, 1))
7207 && TREE_CODE (TREE_OPERAND (op, 0)) == STRING_CST))
7208 return integer_one_node;
7209 }
650e4c94 7210
1fb4300c 7211 /* If this expression has side effects, show we don't know it to be a
7212 constant. Likewise if it's a pointer or aggregate type since in
7213 those case we only want literals, since those are only optimized
f97c71a1 7214 when generating RTL, not later.
7215 And finally, if we are compiling an initializer, not code, we
7216 need to return a definite result now; there's not going to be any
7217 more optimization done. */
c2f47e15 7218 if (TREE_SIDE_EFFECTS (arg)
7219 || AGGREGATE_TYPE_P (TREE_TYPE (arg))
7220 || POINTER_TYPE_P (TREE_TYPE (arg))
47be647d 7221 || cfun == 0
0b049e15 7222 || folding_initializer
7223 || force_folding_builtin_constant_p)
650e4c94 7224 return integer_zero_node;
7225
c2f47e15 7226 return NULL_TREE;
650e4c94 7227}
7228
76f5a783 7229/* Create builtin_expect with PRED and EXPECTED as its arguments and
7230 return it as a truthvalue. */
4ee9c684 7231
7232static tree
c83059be 7233build_builtin_expect_predicate (location_t loc, tree pred, tree expected,
7234 tree predictor)
4ee9c684 7235{
76f5a783 7236 tree fn, arg_types, pred_type, expected_type, call_expr, ret_type;
4ee9c684 7237
b9a16870 7238 fn = builtin_decl_explicit (BUILT_IN_EXPECT);
76f5a783 7239 arg_types = TYPE_ARG_TYPES (TREE_TYPE (fn));
7240 ret_type = TREE_TYPE (TREE_TYPE (fn));
7241 pred_type = TREE_VALUE (arg_types);
7242 expected_type = TREE_VALUE (TREE_CHAIN (arg_types));
7243
389dd41b 7244 pred = fold_convert_loc (loc, pred_type, pred);
7245 expected = fold_convert_loc (loc, expected_type, expected);
c83059be 7246 call_expr = build_call_expr_loc (loc, fn, predictor ? 3 : 2, pred, expected,
7247 predictor);
76f5a783 7248
7249 return build2 (NE_EXPR, TREE_TYPE (pred), call_expr,
7250 build_int_cst (ret_type, 0));
7251}
7252
7253/* Fold a call to builtin_expect with arguments ARG0 and ARG1. Return
7254 NULL_TREE if no simplification is possible. */
7255
c83059be 7256tree
7257fold_builtin_expect (location_t loc, tree arg0, tree arg1, tree arg2)
76f5a783 7258{
083bada9 7259 tree inner, fndecl, inner_arg0;
76f5a783 7260 enum tree_code code;
7261
083bada9 7262 /* Distribute the expected value over short-circuiting operators.
7263 See through the cast from truthvalue_type_node to long. */
7264 inner_arg0 = arg0;
d09ef31a 7265 while (CONVERT_EXPR_P (inner_arg0)
083bada9 7266 && INTEGRAL_TYPE_P (TREE_TYPE (inner_arg0))
7267 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (inner_arg0, 0))))
7268 inner_arg0 = TREE_OPERAND (inner_arg0, 0);
7269
76f5a783 7270 /* If this is a builtin_expect within a builtin_expect keep the
7271 inner one. See through a comparison against a constant. It
7272 might have been added to create a thruthvalue. */
083bada9 7273 inner = inner_arg0;
7274
76f5a783 7275 if (COMPARISON_CLASS_P (inner)
7276 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST)
7277 inner = TREE_OPERAND (inner, 0);
7278
7279 if (TREE_CODE (inner) == CALL_EXPR
7280 && (fndecl = get_callee_fndecl (inner))
7281 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
7282 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_EXPECT)
7283 return arg0;
7284
083bada9 7285 inner = inner_arg0;
76f5a783 7286 code = TREE_CODE (inner);
7287 if (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR)
7288 {
7289 tree op0 = TREE_OPERAND (inner, 0);
7290 tree op1 = TREE_OPERAND (inner, 1);
7291
c83059be 7292 op0 = build_builtin_expect_predicate (loc, op0, arg1, arg2);
7293 op1 = build_builtin_expect_predicate (loc, op1, arg1, arg2);
76f5a783 7294 inner = build2 (code, TREE_TYPE (inner), op0, op1);
7295
389dd41b 7296 return fold_convert_loc (loc, TREE_TYPE (arg0), inner);
76f5a783 7297 }
7298
7299 /* If the argument isn't invariant then there's nothing else we can do. */
083bada9 7300 if (!TREE_CONSTANT (inner_arg0))
c2f47e15 7301 return NULL_TREE;
4ee9c684 7302
76f5a783 7303 /* If we expect that a comparison against the argument will fold to
7304 a constant return the constant. In practice, this means a true
7305 constant or the address of a non-weak symbol. */
083bada9 7306 inner = inner_arg0;
4ee9c684 7307 STRIP_NOPS (inner);
7308 if (TREE_CODE (inner) == ADDR_EXPR)
7309 {
7310 do
7311 {
7312 inner = TREE_OPERAND (inner, 0);
7313 }
7314 while (TREE_CODE (inner) == COMPONENT_REF
7315 || TREE_CODE (inner) == ARRAY_REF);
062b4460 7316 if ((TREE_CODE (inner) == VAR_DECL
7317 || TREE_CODE (inner) == FUNCTION_DECL)
7318 && DECL_WEAK (inner))
c2f47e15 7319 return NULL_TREE;
4ee9c684 7320 }
7321
76f5a783 7322 /* Otherwise, ARG0 already has the proper type for the return value. */
7323 return arg0;
4ee9c684 7324}
7325
c2f47e15 7326/* Fold a call to __builtin_classify_type with argument ARG. */
27d0c333 7327
539a3a92 7328static tree
c2f47e15 7329fold_builtin_classify_type (tree arg)
539a3a92 7330{
c2f47e15 7331 if (arg == 0)
7002a1c8 7332 return build_int_cst (integer_type_node, no_type_class);
539a3a92 7333
7002a1c8 7334 return build_int_cst (integer_type_node, type_to_class (TREE_TYPE (arg)));
539a3a92 7335}
7336
c2f47e15 7337/* Fold a call to __builtin_strlen with argument ARG. */
e6e27594 7338
7339static tree
c7cbde74 7340fold_builtin_strlen (location_t loc, tree type, tree arg)
e6e27594 7341{
c2f47e15 7342 if (!validate_arg (arg, POINTER_TYPE))
e6e27594 7343 return NULL_TREE;
7344 else
7345 {
c2f47e15 7346 tree len = c_strlen (arg, 0);
e6e27594 7347
7348 if (len)
c7cbde74 7349 return fold_convert_loc (loc, type, len);
e6e27594 7350
7351 return NULL_TREE;
7352 }
7353}
7354
92c43e3c 7355/* Fold a call to __builtin_inf or __builtin_huge_val. */
7356
7357static tree
389dd41b 7358fold_builtin_inf (location_t loc, tree type, int warn)
92c43e3c 7359{
aa870c1b 7360 REAL_VALUE_TYPE real;
7361
40f4dbd5 7362 /* __builtin_inff is intended to be usable to define INFINITY on all
7363 targets. If an infinity is not available, INFINITY expands "to a
7364 positive constant of type float that overflows at translation
7365 time", footnote "In this case, using INFINITY will violate the
7366 constraint in 6.4.4 and thus require a diagnostic." (C99 7.12#4).
7367 Thus we pedwarn to ensure this constraint violation is
7368 diagnosed. */
92c43e3c 7369 if (!MODE_HAS_INFINITIES (TYPE_MODE (type)) && warn)
389dd41b 7370 pedwarn (loc, 0, "target format does not support infinity");
92c43e3c 7371
aa870c1b 7372 real_inf (&real);
7373 return build_real (type, real);
92c43e3c 7374}
7375
c2f47e15 7376/* Fold a call to __builtin_nan or __builtin_nans with argument ARG. */
b0db7939 7377
7378static tree
c2f47e15 7379fold_builtin_nan (tree arg, tree type, int quiet)
b0db7939 7380{
7381 REAL_VALUE_TYPE real;
7382 const char *str;
7383
c2f47e15 7384 if (!validate_arg (arg, POINTER_TYPE))
7385 return NULL_TREE;
7386 str = c_getstr (arg);
b0db7939 7387 if (!str)
c2f47e15 7388 return NULL_TREE;
b0db7939 7389
7390 if (!real_nan (&real, str, quiet, TYPE_MODE (type)))
c2f47e15 7391 return NULL_TREE;
b0db7939 7392
7393 return build_real (type, real);
7394}
7395
277f8dd2 7396/* Return true if the floating point expression T has an integer value.
7397 We also allow +Inf, -Inf and NaN to be considered integer values. */
7398
7399static bool
7400integer_valued_real_p (tree t)
7401{
7402 switch (TREE_CODE (t))
7403 {
7404 case FLOAT_EXPR:
7405 return true;
7406
7407 case ABS_EXPR:
7408 case SAVE_EXPR:
277f8dd2 7409 return integer_valued_real_p (TREE_OPERAND (t, 0));
7410
7411 case COMPOUND_EXPR:
41076ef6 7412 case MODIFY_EXPR:
277f8dd2 7413 case BIND_EXPR:
75a70cf9 7414 return integer_valued_real_p (TREE_OPERAND (t, 1));
277f8dd2 7415
7416 case PLUS_EXPR:
7417 case MINUS_EXPR:
7418 case MULT_EXPR:
7419 case MIN_EXPR:
7420 case MAX_EXPR:
7421 return integer_valued_real_p (TREE_OPERAND (t, 0))
7422 && integer_valued_real_p (TREE_OPERAND (t, 1));
7423
7424 case COND_EXPR:
7425 return integer_valued_real_p (TREE_OPERAND (t, 1))
7426 && integer_valued_real_p (TREE_OPERAND (t, 2));
7427
7428 case REAL_CST:
0570334c 7429 return real_isinteger (TREE_REAL_CST_PTR (t), TYPE_MODE (TREE_TYPE (t)));
277f8dd2 7430
d09ef31a 7431 CASE_CONVERT:
277f8dd2 7432 {
7433 tree type = TREE_TYPE (TREE_OPERAND (t, 0));
7434 if (TREE_CODE (type) == INTEGER_TYPE)
7435 return true;
7436 if (TREE_CODE (type) == REAL_TYPE)
7437 return integer_valued_real_p (TREE_OPERAND (t, 0));
7438 break;
7439 }
7440
7441 case CALL_EXPR:
7442 switch (builtin_mathfn_code (t))
7443 {
4f35b1fc 7444 CASE_FLT_FN (BUILT_IN_CEIL):
7445 CASE_FLT_FN (BUILT_IN_FLOOR):
7446 CASE_FLT_FN (BUILT_IN_NEARBYINT):
7447 CASE_FLT_FN (BUILT_IN_RINT):
7448 CASE_FLT_FN (BUILT_IN_ROUND):
7449 CASE_FLT_FN (BUILT_IN_TRUNC):
277f8dd2 7450 return true;
7451
d4a43a03 7452 CASE_FLT_FN (BUILT_IN_FMIN):
7453 CASE_FLT_FN (BUILT_IN_FMAX):
c2f47e15 7454 return integer_valued_real_p (CALL_EXPR_ARG (t, 0))
7455 && integer_valued_real_p (CALL_EXPR_ARG (t, 1));
d4a43a03 7456
277f8dd2 7457 default:
7458 break;
7459 }
7460 break;
7461
7462 default:
7463 break;
7464 }
7465 return false;
7466}
7467
c2f47e15 7468/* FNDECL is assumed to be a builtin where truncation can be propagated
6528f4f4 7469 across (for instance floor((double)f) == (double)floorf (f).
c2f47e15 7470 Do the transformation for a call with argument ARG. */
277f8dd2 7471
6528f4f4 7472static tree
389dd41b 7473fold_trunc_transparent_mathfn (location_t loc, tree fndecl, tree arg)
6528f4f4 7474{
6528f4f4 7475 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
277f8dd2 7476
c2f47e15 7477 if (!validate_arg (arg, REAL_TYPE))
7478 return NULL_TREE;
6528f4f4 7479
277f8dd2 7480 /* Integer rounding functions are idempotent. */
7481 if (fcode == builtin_mathfn_code (arg))
7482 return arg;
7483
7484 /* If argument is already integer valued, and we don't need to worry
7485 about setting errno, there's no need to perform rounding. */
7486 if (! flag_errno_math && integer_valued_real_p (arg))
7487 return arg;
7488
7489 if (optimize)
6528f4f4 7490 {
277f8dd2 7491 tree arg0 = strip_float_extensions (arg);
2426241c 7492 tree ftype = TREE_TYPE (TREE_TYPE (fndecl));
6528f4f4 7493 tree newtype = TREE_TYPE (arg0);
7494 tree decl;
7495
7496 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (ftype)
7497 && (decl = mathfn_built_in (newtype, fcode)))
389dd41b 7498 return fold_convert_loc (loc, ftype,
7499 build_call_expr_loc (loc, decl, 1,
7500 fold_convert_loc (loc,
7501 newtype,
7502 arg0)));
6528f4f4 7503 }
c2f47e15 7504 return NULL_TREE;
6528f4f4 7505}
7506
c2f47e15 7507/* FNDECL is assumed to be builtin which can narrow the FP type of
7508 the argument, for instance lround((double)f) -> lroundf (f).
7509 Do the transformation for a call with argument ARG. */
9ed65c7f 7510
7511static tree
389dd41b 7512fold_fixed_mathfn (location_t loc, tree fndecl, tree arg)
9ed65c7f 7513{
9ed65c7f 7514 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
9ed65c7f 7515
c2f47e15 7516 if (!validate_arg (arg, REAL_TYPE))
7517 return NULL_TREE;
9ed65c7f 7518
7519 /* If argument is already integer valued, and we don't need to worry
7520 about setting errno, there's no need to perform rounding. */
7521 if (! flag_errno_math && integer_valued_real_p (arg))
389dd41b 7522 return fold_build1_loc (loc, FIX_TRUNC_EXPR,
7523 TREE_TYPE (TREE_TYPE (fndecl)), arg);
9ed65c7f 7524
7525 if (optimize)
7526 {
7527 tree ftype = TREE_TYPE (arg);
7528 tree arg0 = strip_float_extensions (arg);
7529 tree newtype = TREE_TYPE (arg0);
7530 tree decl;
7531
7532 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (ftype)
7533 && (decl = mathfn_built_in (newtype, fcode)))
389dd41b 7534 return build_call_expr_loc (loc, decl, 1,
7535 fold_convert_loc (loc, newtype, arg0));
9ed65c7f 7536 }
73a0da56 7537
80ff6494 7538 /* Canonicalize iround (x) to lround (x) on ILP32 targets where
7539 sizeof (int) == sizeof (long). */
7540 if (TYPE_PRECISION (integer_type_node)
7541 == TYPE_PRECISION (long_integer_type_node))
7542 {
7543 tree newfn = NULL_TREE;
7544 switch (fcode)
7545 {
7546 CASE_FLT_FN (BUILT_IN_ICEIL):
7547 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LCEIL);
7548 break;
7549
7550 CASE_FLT_FN (BUILT_IN_IFLOOR):
7551 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LFLOOR);
7552 break;
7553
7554 CASE_FLT_FN (BUILT_IN_IROUND):
7555 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LROUND);
7556 break;
7557
7558 CASE_FLT_FN (BUILT_IN_IRINT):
7559 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LRINT);
7560 break;
7561
7562 default:
7563 break;
7564 }
7565
7566 if (newfn)
7567 {
7568 tree newcall = build_call_expr_loc (loc, newfn, 1, arg);
7569 return fold_convert_loc (loc,
7570 TREE_TYPE (TREE_TYPE (fndecl)), newcall);
7571 }
7572 }
7573
73a0da56 7574 /* Canonicalize llround (x) to lround (x) on LP64 targets where
7575 sizeof (long long) == sizeof (long). */
7576 if (TYPE_PRECISION (long_long_integer_type_node)
7577 == TYPE_PRECISION (long_integer_type_node))
7578 {
7579 tree newfn = NULL_TREE;
7580 switch (fcode)
7581 {
7582 CASE_FLT_FN (BUILT_IN_LLCEIL):
7583 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LCEIL);
7584 break;
7585
7586 CASE_FLT_FN (BUILT_IN_LLFLOOR):
7587 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LFLOOR);
7588 break;
7589
7590 CASE_FLT_FN (BUILT_IN_LLROUND):
7591 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LROUND);
7592 break;
7593
7594 CASE_FLT_FN (BUILT_IN_LLRINT):
7595 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LRINT);
7596 break;
7597
7598 default:
7599 break;
7600 }
7601
7602 if (newfn)
7603 {
389dd41b 7604 tree newcall = build_call_expr_loc (loc, newfn, 1, arg);
7605 return fold_convert_loc (loc,
7606 TREE_TYPE (TREE_TYPE (fndecl)), newcall);
73a0da56 7607 }
7608 }
7609
c2f47e15 7610 return NULL_TREE;
9ed65c7f 7611}
7612
c2f47e15 7613/* Fold call to builtin cabs, cabsf or cabsl with argument ARG. TYPE is the
7614 return type. Return NULL_TREE if no simplification can be made. */
c63f4ad3 7615
7616static tree
389dd41b 7617fold_builtin_cabs (location_t loc, tree arg, tree type, tree fndecl)
c63f4ad3 7618{
c2f47e15 7619 tree res;
c63f4ad3 7620
b0ce8887 7621 if (!validate_arg (arg, COMPLEX_TYPE)
c63f4ad3 7622 || TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) != REAL_TYPE)
7623 return NULL_TREE;
7624
b4725390 7625 /* Calculate the result when the argument is a constant. */
7626 if (TREE_CODE (arg) == COMPLEX_CST
7627 && (res = do_mpfr_arg2 (TREE_REALPART (arg), TREE_IMAGPART (arg),
7628 type, mpfr_hypot)))
7629 return res;
48e1416a 7630
1af0d139 7631 if (TREE_CODE (arg) == COMPLEX_EXPR)
7632 {
7633 tree real = TREE_OPERAND (arg, 0);
7634 tree imag = TREE_OPERAND (arg, 1);
48e1416a 7635
1af0d139 7636 /* If either part is zero, cabs is fabs of the other. */
7637 if (real_zerop (real))
389dd41b 7638 return fold_build1_loc (loc, ABS_EXPR, type, imag);
1af0d139 7639 if (real_zerop (imag))
389dd41b 7640 return fold_build1_loc (loc, ABS_EXPR, type, real);
1af0d139 7641
7642 /* cabs(x+xi) -> fabs(x)*sqrt(2). */
7643 if (flag_unsafe_math_optimizations
7644 && operand_equal_p (real, imag, OEP_PURE_SAME))
7645 {
2e7ca27b 7646 const REAL_VALUE_TYPE sqrt2_trunc
7910b2fb 7647 = real_value_truncate (TYPE_MODE (type), dconst_sqrt2 ());
1af0d139 7648 STRIP_NOPS (real);
389dd41b 7649 return fold_build2_loc (loc, MULT_EXPR, type,
7650 fold_build1_loc (loc, ABS_EXPR, type, real),
2e7ca27b 7651 build_real (type, sqrt2_trunc));
1af0d139 7652 }
7653 }
c63f4ad3 7654
749891b2 7655 /* Optimize cabs(-z) and cabs(conj(z)) as cabs(z). */
7656 if (TREE_CODE (arg) == NEGATE_EXPR
7657 || TREE_CODE (arg) == CONJ_EXPR)
389dd41b 7658 return build_call_expr_loc (loc, fndecl, 1, TREE_OPERAND (arg, 0));
749891b2 7659
7d3f6cc7 7660 /* Don't do this when optimizing for size. */
7661 if (flag_unsafe_math_optimizations
0bfd8d5c 7662 && optimize && optimize_function_for_speed_p (cfun))
c63f4ad3 7663 {
0da0dbfa 7664 tree sqrtfn = mathfn_built_in (type, BUILT_IN_SQRT);
c63f4ad3 7665
7666 if (sqrtfn != NULL_TREE)
7667 {
c2f47e15 7668 tree rpart, ipart, result;
c63f4ad3 7669
4ee9c684 7670 arg = builtin_save_expr (arg);
29a6518e 7671
389dd41b 7672 rpart = fold_build1_loc (loc, REALPART_EXPR, type, arg);
7673 ipart = fold_build1_loc (loc, IMAGPART_EXPR, type, arg);
c63f4ad3 7674
4ee9c684 7675 rpart = builtin_save_expr (rpart);
7676 ipart = builtin_save_expr (ipart);
c63f4ad3 7677
389dd41b 7678 result = fold_build2_loc (loc, PLUS_EXPR, type,
7679 fold_build2_loc (loc, MULT_EXPR, type,
49d00087 7680 rpart, rpart),
389dd41b 7681 fold_build2_loc (loc, MULT_EXPR, type,
49d00087 7682 ipart, ipart));
c63f4ad3 7683
389dd41b 7684 return build_call_expr_loc (loc, sqrtfn, 1, result);
c63f4ad3 7685 }
7686 }
7687
7688 return NULL_TREE;
7689}
7690
c2373fdb 7691/* Build a complex (inf +- 0i) for the result of cproj. TYPE is the
7692 complex tree type of the result. If NEG is true, the imaginary
7693 zero is negative. */
7694
7695static tree
7696build_complex_cproj (tree type, bool neg)
7697{
7698 REAL_VALUE_TYPE rinf, rzero = dconst0;
7699
7700 real_inf (&rinf);
7701 rzero.sign = neg;
7702 return build_complex (type, build_real (TREE_TYPE (type), rinf),
7703 build_real (TREE_TYPE (type), rzero));
7704}
7705
7706/* Fold call to builtin cproj, cprojf or cprojl with argument ARG. TYPE is the
7707 return type. Return NULL_TREE if no simplification can be made. */
7708
7709static tree
7710fold_builtin_cproj (location_t loc, tree arg, tree type)
7711{
7712 if (!validate_arg (arg, COMPLEX_TYPE)
7713 || TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) != REAL_TYPE)
7714 return NULL_TREE;
7715
7716 /* If there are no infinities, return arg. */
fe994837 7717 if (! HONOR_INFINITIES (type))
c2373fdb 7718 return non_lvalue_loc (loc, arg);
7719
7720 /* Calculate the result when the argument is a constant. */
7721 if (TREE_CODE (arg) == COMPLEX_CST)
7722 {
7723 const REAL_VALUE_TYPE *real = TREE_REAL_CST_PTR (TREE_REALPART (arg));
7724 const REAL_VALUE_TYPE *imag = TREE_REAL_CST_PTR (TREE_IMAGPART (arg));
7725
7726 if (real_isinf (real) || real_isinf (imag))
7727 return build_complex_cproj (type, imag->sign);
7728 else
7729 return arg;
7730 }
b4c7e601 7731 else if (TREE_CODE (arg) == COMPLEX_EXPR)
7732 {
7733 tree real = TREE_OPERAND (arg, 0);
7734 tree imag = TREE_OPERAND (arg, 1);
7735
7736 STRIP_NOPS (real);
7737 STRIP_NOPS (imag);
7738
7739 /* If the real part is inf and the imag part is known to be
7740 nonnegative, return (inf + 0i). Remember side-effects are
7741 possible in the imag part. */
7742 if (TREE_CODE (real) == REAL_CST
7743 && real_isinf (TREE_REAL_CST_PTR (real))
7744 && tree_expr_nonnegative_p (imag))
7745 return omit_one_operand_loc (loc, type,
7746 build_complex_cproj (type, false),
7747 arg);
7748
7749 /* If the imag part is inf, return (inf+I*copysign(0,imag)).
7750 Remember side-effects are possible in the real part. */
7751 if (TREE_CODE (imag) == REAL_CST
7752 && real_isinf (TREE_REAL_CST_PTR (imag)))
7753 return
7754 omit_one_operand_loc (loc, type,
7755 build_complex_cproj (type, TREE_REAL_CST_PTR
7756 (imag)->sign), arg);
7757 }
c2373fdb 7758
7759 return NULL_TREE;
7760}
7761
c2f47e15 7762/* Fold a builtin function call to sqrt, sqrtf, or sqrtl with argument ARG.
7763 Return NULL_TREE if no simplification can be made. */
e6e27594 7764
7765static tree
389dd41b 7766fold_builtin_sqrt (location_t loc, tree arg, tree type)
e6e27594 7767{
7768
7769 enum built_in_function fcode;
b4e8ab0c 7770 tree res;
c2f47e15 7771
7772 if (!validate_arg (arg, REAL_TYPE))
e6e27594 7773 return NULL_TREE;
7774
b4e8ab0c 7775 /* Calculate the result when the argument is a constant. */
7776 if ((res = do_mpfr_arg1 (arg, type, mpfr_sqrt, &dconst0, NULL, true)))
7777 return res;
48e1416a 7778
e6e27594 7779 /* Optimize sqrt(expN(x)) = expN(x*0.5). */
7780 fcode = builtin_mathfn_code (arg);
7781 if (flag_unsafe_math_optimizations && BUILTIN_EXPONENT_P (fcode))
7782 {
c2f47e15 7783 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg), 0);
389dd41b 7784 arg = fold_build2_loc (loc, MULT_EXPR, type,
c2f47e15 7785 CALL_EXPR_ARG (arg, 0),
49d00087 7786 build_real (type, dconsthalf));
389dd41b 7787 return build_call_expr_loc (loc, expfn, 1, arg);
e6e27594 7788 }
7789
7790 /* Optimize sqrt(Nroot(x)) -> pow(x,1/(2*N)). */
7791 if (flag_unsafe_math_optimizations && BUILTIN_ROOT_P (fcode))
7792 {
7793 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
7794
7795 if (powfn)
7796 {
c2f47e15 7797 tree arg0 = CALL_EXPR_ARG (arg, 0);
e6e27594 7798 tree tree_root;
7799 /* The inner root was either sqrt or cbrt. */
57510da6 7800 /* This was a conditional expression but it triggered a bug
18381619 7801 in Sun C 5.5. */
ce6cd837 7802 REAL_VALUE_TYPE dconstroot;
7803 if (BUILTIN_SQRT_P (fcode))
7804 dconstroot = dconsthalf;
7805 else
7806 dconstroot = dconst_third ();
e6e27594 7807
7808 /* Adjust for the outer root. */
7809 SET_REAL_EXP (&dconstroot, REAL_EXP (&dconstroot) - 1);
7810 dconstroot = real_value_truncate (TYPE_MODE (type), dconstroot);
7811 tree_root = build_real (type, dconstroot);
389dd41b 7812 return build_call_expr_loc (loc, powfn, 2, arg0, tree_root);
e6e27594 7813 }
7814 }
7815
bc33117f 7816 /* Optimize sqrt(pow(x,y)) = pow(|x|,y*0.5). */
e6e27594 7817 if (flag_unsafe_math_optimizations
7818 && (fcode == BUILT_IN_POW
7819 || fcode == BUILT_IN_POWF
7820 || fcode == BUILT_IN_POWL))
7821 {
c2f47e15 7822 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg), 0);
7823 tree arg0 = CALL_EXPR_ARG (arg, 0);
7824 tree arg1 = CALL_EXPR_ARG (arg, 1);
bc33117f 7825 tree narg1;
7826 if (!tree_expr_nonnegative_p (arg0))
7827 arg0 = build1 (ABS_EXPR, type, arg0);
389dd41b 7828 narg1 = fold_build2_loc (loc, MULT_EXPR, type, arg1,
49d00087 7829 build_real (type, dconsthalf));
389dd41b 7830 return build_call_expr_loc (loc, powfn, 2, arg0, narg1);
e6e27594 7831 }
7832
7833 return NULL_TREE;
7834}
7835
c2f47e15 7836/* Fold a builtin function call to cbrt, cbrtf, or cbrtl with argument ARG.
7837 Return NULL_TREE if no simplification can be made. */
7838
e6e27594 7839static tree
389dd41b 7840fold_builtin_cbrt (location_t loc, tree arg, tree type)
e6e27594 7841{
e6e27594 7842 const enum built_in_function fcode = builtin_mathfn_code (arg);
29f4cd78 7843 tree res;
e6e27594 7844
c2f47e15 7845 if (!validate_arg (arg, REAL_TYPE))
e6e27594 7846 return NULL_TREE;
7847
29f4cd78 7848 /* Calculate the result when the argument is a constant. */
7849 if ((res = do_mpfr_arg1 (arg, type, mpfr_cbrt, NULL, NULL, 0)))
7850 return res;
e6e27594 7851
cdfeb715 7852 if (flag_unsafe_math_optimizations)
e6e27594 7853 {
cdfeb715 7854 /* Optimize cbrt(expN(x)) -> expN(x/3). */
7855 if (BUILTIN_EXPONENT_P (fcode))
a0c938f0 7856 {
c2f47e15 7857 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg), 0);
cdfeb715 7858 const REAL_VALUE_TYPE third_trunc =
7910b2fb 7859 real_value_truncate (TYPE_MODE (type), dconst_third ());
389dd41b 7860 arg = fold_build2_loc (loc, MULT_EXPR, type,
c2f47e15 7861 CALL_EXPR_ARG (arg, 0),
49d00087 7862 build_real (type, third_trunc));
389dd41b 7863 return build_call_expr_loc (loc, expfn, 1, arg);
cdfeb715 7864 }
e6e27594 7865
cdfeb715 7866 /* Optimize cbrt(sqrt(x)) -> pow(x,1/6). */
7867 if (BUILTIN_SQRT_P (fcode))
a0c938f0 7868 {
cdfeb715 7869 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
e6e27594 7870
cdfeb715 7871 if (powfn)
7872 {
c2f47e15 7873 tree arg0 = CALL_EXPR_ARG (arg, 0);
cdfeb715 7874 tree tree_root;
7910b2fb 7875 REAL_VALUE_TYPE dconstroot = dconst_third ();
cdfeb715 7876
7877 SET_REAL_EXP (&dconstroot, REAL_EXP (&dconstroot) - 1);
7878 dconstroot = real_value_truncate (TYPE_MODE (type), dconstroot);
7879 tree_root = build_real (type, dconstroot);
389dd41b 7880 return build_call_expr_loc (loc, powfn, 2, arg0, tree_root);
cdfeb715 7881 }
e6e27594 7882 }
7883
cdfeb715 7884 /* Optimize cbrt(cbrt(x)) -> pow(x,1/9) iff x is nonnegative. */
7885 if (BUILTIN_CBRT_P (fcode))
a0c938f0 7886 {
c2f47e15 7887 tree arg0 = CALL_EXPR_ARG (arg, 0);
cdfeb715 7888 if (tree_expr_nonnegative_p (arg0))
7889 {
7890 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
7891
7892 if (powfn)
a0c938f0 7893 {
cdfeb715 7894 tree tree_root;
7895 REAL_VALUE_TYPE dconstroot;
a0c938f0 7896
3fa759a9 7897 real_arithmetic (&dconstroot, MULT_EXPR,
7910b2fb 7898 dconst_third_ptr (), dconst_third_ptr ());
cdfeb715 7899 dconstroot = real_value_truncate (TYPE_MODE (type), dconstroot);
7900 tree_root = build_real (type, dconstroot);
389dd41b 7901 return build_call_expr_loc (loc, powfn, 2, arg0, tree_root);
cdfeb715 7902 }
7903 }
7904 }
a0c938f0 7905
cdfeb715 7906 /* Optimize cbrt(pow(x,y)) -> pow(x,y/3) iff x is nonnegative. */
48e1416a 7907 if (fcode == BUILT_IN_POW
c2f47e15 7908 || fcode == BUILT_IN_POWF
cdfeb715 7909 || fcode == BUILT_IN_POWL)
a0c938f0 7910 {
c2f47e15 7911 tree arg00 = CALL_EXPR_ARG (arg, 0);
7912 tree arg01 = CALL_EXPR_ARG (arg, 1);
cdfeb715 7913 if (tree_expr_nonnegative_p (arg00))
7914 {
c2f47e15 7915 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg), 0);
cdfeb715 7916 const REAL_VALUE_TYPE dconstroot
7910b2fb 7917 = real_value_truncate (TYPE_MODE (type), dconst_third ());
389dd41b 7918 tree narg01 = fold_build2_loc (loc, MULT_EXPR, type, arg01,
49d00087 7919 build_real (type, dconstroot));
389dd41b 7920 return build_call_expr_loc (loc, powfn, 2, arg00, narg01);
cdfeb715 7921 }
7922 }
e6e27594 7923 }
7924 return NULL_TREE;
7925}
7926
c2f47e15 7927/* Fold function call to builtin cos, cosf, or cosl with argument ARG.
7928 TYPE is the type of the return value. Return NULL_TREE if no
7929 simplification can be made. */
7930
e6e27594 7931static tree
389dd41b 7932fold_builtin_cos (location_t loc,
7933 tree arg, tree type, tree fndecl)
e6e27594 7934{
e6ab33d8 7935 tree res, narg;
e6e27594 7936
c2f47e15 7937 if (!validate_arg (arg, REAL_TYPE))
e6e27594 7938 return NULL_TREE;
7939
bffb7645 7940 /* Calculate the result when the argument is a constant. */
728bac60 7941 if ((res = do_mpfr_arg1 (arg, type, mpfr_cos, NULL, NULL, 0)))
bffb7645 7942 return res;
48e1416a 7943
e6e27594 7944 /* Optimize cos(-x) into cos (x). */
e6ab33d8 7945 if ((narg = fold_strip_sign_ops (arg)))
389dd41b 7946 return build_call_expr_loc (loc, fndecl, 1, narg);
e6e27594 7947
7948 return NULL_TREE;
7949}
7950
c2f47e15 7951/* Fold function call to builtin cosh, coshf, or coshl with argument ARG.
7952 Return NULL_TREE if no simplification can be made. */
7953
cacdc1af 7954static tree
389dd41b 7955fold_builtin_cosh (location_t loc, tree arg, tree type, tree fndecl)
cacdc1af 7956{
c2f47e15 7957 if (validate_arg (arg, REAL_TYPE))
cacdc1af 7958 {
cacdc1af 7959 tree res, narg;
7960
7961 /* Calculate the result when the argument is a constant. */
7962 if ((res = do_mpfr_arg1 (arg, type, mpfr_cosh, NULL, NULL, 0)))
7963 return res;
48e1416a 7964
cacdc1af 7965 /* Optimize cosh(-x) into cosh (x). */
7966 if ((narg = fold_strip_sign_ops (arg)))
389dd41b 7967 return build_call_expr_loc (loc, fndecl, 1, narg);
cacdc1af 7968 }
48e1416a 7969
cacdc1af 7970 return NULL_TREE;
7971}
7972
239d491a 7973/* Fold function call to builtin ccos (or ccosh if HYPER is TRUE) with
7974 argument ARG. TYPE is the type of the return value. Return
7975 NULL_TREE if no simplification can be made. */
7976
7977static tree
965d0f29 7978fold_builtin_ccos (location_t loc, tree arg, tree type, tree fndecl,
7979 bool hyper)
239d491a 7980{
7981 if (validate_arg (arg, COMPLEX_TYPE)
7982 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) == REAL_TYPE)
7983 {
7984 tree tmp;
7985
239d491a 7986 /* Calculate the result when the argument is a constant. */
7987 if ((tmp = do_mpc_arg1 (arg, type, (hyper ? mpc_cosh : mpc_cos))))
7988 return tmp;
48e1416a 7989
239d491a 7990 /* Optimize fn(-x) into fn(x). */
7991 if ((tmp = fold_strip_sign_ops (arg)))
389dd41b 7992 return build_call_expr_loc (loc, fndecl, 1, tmp);
239d491a 7993 }
7994
7995 return NULL_TREE;
7996}
7997
c2f47e15 7998/* Fold function call to builtin tan, tanf, or tanl with argument ARG.
7999 Return NULL_TREE if no simplification can be made. */
8000
e6e27594 8001static tree
c2f47e15 8002fold_builtin_tan (tree arg, tree type)
e6e27594 8003{
8004 enum built_in_function fcode;
29f4cd78 8005 tree res;
e6e27594 8006
c2f47e15 8007 if (!validate_arg (arg, REAL_TYPE))
e6e27594 8008 return NULL_TREE;
8009
bffb7645 8010 /* Calculate the result when the argument is a constant. */
728bac60 8011 if ((res = do_mpfr_arg1 (arg, type, mpfr_tan, NULL, NULL, 0)))
bffb7645 8012 return res;
48e1416a 8013
e6e27594 8014 /* Optimize tan(atan(x)) = x. */
8015 fcode = builtin_mathfn_code (arg);
8016 if (flag_unsafe_math_optimizations
8017 && (fcode == BUILT_IN_ATAN
8018 || fcode == BUILT_IN_ATANF
8019 || fcode == BUILT_IN_ATANL))
c2f47e15 8020 return CALL_EXPR_ARG (arg, 0);
e6e27594 8021
8022 return NULL_TREE;
8023}
8024
d735c391 8025/* Fold function call to builtin sincos, sincosf, or sincosl. Return
8026 NULL_TREE if no simplification can be made. */
8027
8028static tree
389dd41b 8029fold_builtin_sincos (location_t loc,
8030 tree arg0, tree arg1, tree arg2)
d735c391 8031{
c2f47e15 8032 tree type;
d735c391 8033 tree res, fn, call;
8034
c2f47e15 8035 if (!validate_arg (arg0, REAL_TYPE)
8036 || !validate_arg (arg1, POINTER_TYPE)
8037 || !validate_arg (arg2, POINTER_TYPE))
d735c391 8038 return NULL_TREE;
8039
d735c391 8040 type = TREE_TYPE (arg0);
d735c391 8041
8042 /* Calculate the result when the argument is a constant. */
8043 if ((res = do_mpfr_sincos (arg0, arg1, arg2)))
8044 return res;
8045
8046 /* Canonicalize sincos to cexpi. */
30f690e0 8047 if (!targetm.libc_has_function (function_c99_math_complex))
2a6b4c77 8048 return NULL_TREE;
d735c391 8049 fn = mathfn_built_in (type, BUILT_IN_CEXPI);
8050 if (!fn)
8051 return NULL_TREE;
8052
389dd41b 8053 call = build_call_expr_loc (loc, fn, 1, arg0);
d735c391 8054 call = builtin_save_expr (call);
8055
a75b1c71 8056 return build2 (COMPOUND_EXPR, void_type_node,
d735c391 8057 build2 (MODIFY_EXPR, void_type_node,
389dd41b 8058 build_fold_indirect_ref_loc (loc, arg1),
d735c391 8059 build1 (IMAGPART_EXPR, type, call)),
8060 build2 (MODIFY_EXPR, void_type_node,
389dd41b 8061 build_fold_indirect_ref_loc (loc, arg2),
d735c391 8062 build1 (REALPART_EXPR, type, call)));
8063}
8064
c5bb2c4b 8065/* Fold function call to builtin cexp, cexpf, or cexpl. Return
8066 NULL_TREE if no simplification can be made. */
8067
8068static tree
389dd41b 8069fold_builtin_cexp (location_t loc, tree arg0, tree type)
c5bb2c4b 8070{
c2f47e15 8071 tree rtype;
c5bb2c4b 8072 tree realp, imagp, ifn;
239d491a 8073 tree res;
c5bb2c4b 8074
239d491a 8075 if (!validate_arg (arg0, COMPLEX_TYPE)
b0ce8887 8076 || TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) != REAL_TYPE)
c5bb2c4b 8077 return NULL_TREE;
8078
239d491a 8079 /* Calculate the result when the argument is a constant. */
8080 if ((res = do_mpc_arg1 (arg0, type, mpc_exp)))
8081 return res;
48e1416a 8082
c5bb2c4b 8083 rtype = TREE_TYPE (TREE_TYPE (arg0));
8084
8085 /* In case we can figure out the real part of arg0 and it is constant zero
8086 fold to cexpi. */
30f690e0 8087 if (!targetm.libc_has_function (function_c99_math_complex))
2a6b4c77 8088 return NULL_TREE;
c5bb2c4b 8089 ifn = mathfn_built_in (rtype, BUILT_IN_CEXPI);
8090 if (!ifn)
8091 return NULL_TREE;
8092
389dd41b 8093 if ((realp = fold_unary_loc (loc, REALPART_EXPR, rtype, arg0))
c5bb2c4b 8094 && real_zerop (realp))
8095 {
389dd41b 8096 tree narg = fold_build1_loc (loc, IMAGPART_EXPR, rtype, arg0);
8097 return build_call_expr_loc (loc, ifn, 1, narg);
c5bb2c4b 8098 }
8099
8100 /* In case we can easily decompose real and imaginary parts split cexp
8101 to exp (r) * cexpi (i). */
8102 if (flag_unsafe_math_optimizations
8103 && realp)
8104 {
8105 tree rfn, rcall, icall;
8106
8107 rfn = mathfn_built_in (rtype, BUILT_IN_EXP);
8108 if (!rfn)
8109 return NULL_TREE;
8110
389dd41b 8111 imagp = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg0);
c5bb2c4b 8112 if (!imagp)
8113 return NULL_TREE;
8114
389dd41b 8115 icall = build_call_expr_loc (loc, ifn, 1, imagp);
c5bb2c4b 8116 icall = builtin_save_expr (icall);
389dd41b 8117 rcall = build_call_expr_loc (loc, rfn, 1, realp);
c5bb2c4b 8118 rcall = builtin_save_expr (rcall);
389dd41b 8119 return fold_build2_loc (loc, COMPLEX_EXPR, type,
8120 fold_build2_loc (loc, MULT_EXPR, rtype,
71bf42bb 8121 rcall,
389dd41b 8122 fold_build1_loc (loc, REALPART_EXPR,
8123 rtype, icall)),
8124 fold_build2_loc (loc, MULT_EXPR, rtype,
71bf42bb 8125 rcall,
389dd41b 8126 fold_build1_loc (loc, IMAGPART_EXPR,
8127 rtype, icall)));
c5bb2c4b 8128 }
8129
8130 return NULL_TREE;
8131}
8132
c2f47e15 8133/* Fold function call to builtin trunc, truncf or truncl with argument ARG.
8134 Return NULL_TREE if no simplification can be made. */
277f8dd2 8135
8136static tree
389dd41b 8137fold_builtin_trunc (location_t loc, tree fndecl, tree arg)
277f8dd2 8138{
c2f47e15 8139 if (!validate_arg (arg, REAL_TYPE))
8140 return NULL_TREE;
277f8dd2 8141
8142 /* Optimize trunc of constant value. */
f96bd2bf 8143 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
277f8dd2 8144 {
8145 REAL_VALUE_TYPE r, x;
2426241c 8146 tree type = TREE_TYPE (TREE_TYPE (fndecl));
277f8dd2 8147
8148 x = TREE_REAL_CST (arg);
8149 real_trunc (&r, TYPE_MODE (type), &x);
8150 return build_real (type, r);
8151 }
8152
389dd41b 8153 return fold_trunc_transparent_mathfn (loc, fndecl, arg);
277f8dd2 8154}
8155
c2f47e15 8156/* Fold function call to builtin floor, floorf or floorl with argument ARG.
8157 Return NULL_TREE if no simplification can be made. */
277f8dd2 8158
8159static tree
389dd41b 8160fold_builtin_floor (location_t loc, tree fndecl, tree arg)
277f8dd2 8161{
c2f47e15 8162 if (!validate_arg (arg, REAL_TYPE))
8163 return NULL_TREE;
277f8dd2 8164
8165 /* Optimize floor of constant value. */
f96bd2bf 8166 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
277f8dd2 8167 {
8168 REAL_VALUE_TYPE x;
8169
8170 x = TREE_REAL_CST (arg);
8171 if (! REAL_VALUE_ISNAN (x) || ! flag_errno_math)
8172 {
2426241c 8173 tree type = TREE_TYPE (TREE_TYPE (fndecl));
277f8dd2 8174 REAL_VALUE_TYPE r;
8175
8176 real_floor (&r, TYPE_MODE (type), &x);
8177 return build_real (type, r);
8178 }
8179 }
8180
acc2b92e 8181 /* Fold floor (x) where x is nonnegative to trunc (x). */
8182 if (tree_expr_nonnegative_p (arg))
30fe8286 8183 {
8184 tree truncfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_TRUNC);
8185 if (truncfn)
389dd41b 8186 return build_call_expr_loc (loc, truncfn, 1, arg);
30fe8286 8187 }
acc2b92e 8188
389dd41b 8189 return fold_trunc_transparent_mathfn (loc, fndecl, arg);
277f8dd2 8190}
8191
c2f47e15 8192/* Fold function call to builtin ceil, ceilf or ceill with argument ARG.
8193 Return NULL_TREE if no simplification can be made. */
277f8dd2 8194
8195static tree
389dd41b 8196fold_builtin_ceil (location_t loc, tree fndecl, tree arg)
277f8dd2 8197{
c2f47e15 8198 if (!validate_arg (arg, REAL_TYPE))
8199 return NULL_TREE;
277f8dd2 8200
8201 /* Optimize ceil of constant value. */
f96bd2bf 8202 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
277f8dd2 8203 {
8204 REAL_VALUE_TYPE x;
8205
8206 x = TREE_REAL_CST (arg);
8207 if (! REAL_VALUE_ISNAN (x) || ! flag_errno_math)
8208 {
2426241c 8209 tree type = TREE_TYPE (TREE_TYPE (fndecl));
277f8dd2 8210 REAL_VALUE_TYPE r;
8211
8212 real_ceil (&r, TYPE_MODE (type), &x);
8213 return build_real (type, r);
8214 }
8215 }
8216
389dd41b 8217 return fold_trunc_transparent_mathfn (loc, fndecl, arg);
277f8dd2 8218}
8219
c2f47e15 8220/* Fold function call to builtin round, roundf or roundl with argument ARG.
8221 Return NULL_TREE if no simplification can be made. */
89ab3887 8222
8223static tree
389dd41b 8224fold_builtin_round (location_t loc, tree fndecl, tree arg)
89ab3887 8225{
c2f47e15 8226 if (!validate_arg (arg, REAL_TYPE))
8227 return NULL_TREE;
89ab3887 8228
34f17811 8229 /* Optimize round of constant value. */
f96bd2bf 8230 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
89ab3887 8231 {
8232 REAL_VALUE_TYPE x;
8233
8234 x = TREE_REAL_CST (arg);
8235 if (! REAL_VALUE_ISNAN (x) || ! flag_errno_math)
8236 {
2426241c 8237 tree type = TREE_TYPE (TREE_TYPE (fndecl));
89ab3887 8238 REAL_VALUE_TYPE r;
8239
8240 real_round (&r, TYPE_MODE (type), &x);
8241 return build_real (type, r);
8242 }
8243 }
8244
389dd41b 8245 return fold_trunc_transparent_mathfn (loc, fndecl, arg);
89ab3887 8246}
8247
34f17811 8248/* Fold function call to builtin lround, lroundf or lroundl (or the
c2f47e15 8249 corresponding long long versions) and other rounding functions. ARG
8250 is the argument to the call. Return NULL_TREE if no simplification
8251 can be made. */
34f17811 8252
8253static tree
389dd41b 8254fold_builtin_int_roundingfn (location_t loc, tree fndecl, tree arg)
34f17811 8255{
c2f47e15 8256 if (!validate_arg (arg, REAL_TYPE))
8257 return NULL_TREE;
34f17811 8258
8259 /* Optimize lround of constant value. */
f96bd2bf 8260 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
34f17811 8261 {
8262 const REAL_VALUE_TYPE x = TREE_REAL_CST (arg);
8263
776a7bab 8264 if (real_isfinite (&x))
34f17811 8265 {
2426241c 8266 tree itype = TREE_TYPE (TREE_TYPE (fndecl));
ca9b061d 8267 tree ftype = TREE_TYPE (arg);
34f17811 8268 REAL_VALUE_TYPE r;
e913b5cd 8269 bool fail = false;
34f17811 8270
ad52b9b7 8271 switch (DECL_FUNCTION_CODE (fndecl))
8272 {
80ff6494 8273 CASE_FLT_FN (BUILT_IN_IFLOOR):
4f35b1fc 8274 CASE_FLT_FN (BUILT_IN_LFLOOR):
8275 CASE_FLT_FN (BUILT_IN_LLFLOOR):
ad52b9b7 8276 real_floor (&r, TYPE_MODE (ftype), &x);
8277 break;
8278
80ff6494 8279 CASE_FLT_FN (BUILT_IN_ICEIL):
4f35b1fc 8280 CASE_FLT_FN (BUILT_IN_LCEIL):
8281 CASE_FLT_FN (BUILT_IN_LLCEIL):
ac148751 8282 real_ceil (&r, TYPE_MODE (ftype), &x);
8283 break;
8284
80ff6494 8285 CASE_FLT_FN (BUILT_IN_IROUND):
4f35b1fc 8286 CASE_FLT_FN (BUILT_IN_LROUND):
8287 CASE_FLT_FN (BUILT_IN_LLROUND):
ad52b9b7 8288 real_round (&r, TYPE_MODE (ftype), &x);
8289 break;
8290
8291 default:
8292 gcc_unreachable ();
8293 }
8294
ab2c1de8 8295 wide_int val = real_to_integer (&r, &fail, TYPE_PRECISION (itype));
e913b5cd 8296 if (!fail)
8297 return wide_int_to_tree (itype, val);
34f17811 8298 }
8299 }
8300
acc2b92e 8301 switch (DECL_FUNCTION_CODE (fndecl))
8302 {
8303 CASE_FLT_FN (BUILT_IN_LFLOOR):
8304 CASE_FLT_FN (BUILT_IN_LLFLOOR):
8305 /* Fold lfloor (x) where x is nonnegative to FIX_TRUNC (x). */
8306 if (tree_expr_nonnegative_p (arg))
389dd41b 8307 return fold_build1_loc (loc, FIX_TRUNC_EXPR,
8308 TREE_TYPE (TREE_TYPE (fndecl)), arg);
acc2b92e 8309 break;
8310 default:;
8311 }
8312
389dd41b 8313 return fold_fixed_mathfn (loc, fndecl, arg);
34f17811 8314}
8315
70fb4c07 8316/* Fold function call to builtin ffs, clz, ctz, popcount and parity
c2f47e15 8317 and their long and long long variants (i.e. ffsl and ffsll). ARG is
8318 the argument to the call. Return NULL_TREE if no simplification can
8319 be made. */
70fb4c07 8320
8321static tree
c2f47e15 8322fold_builtin_bitop (tree fndecl, tree arg)
70fb4c07 8323{
c2f47e15 8324 if (!validate_arg (arg, INTEGER_TYPE))
70fb4c07 8325 return NULL_TREE;
8326
8327 /* Optimize for constant argument. */
f96bd2bf 8328 if (TREE_CODE (arg) == INTEGER_CST && !TREE_OVERFLOW (arg))
70fb4c07 8329 {
1cee90ad 8330 tree type = TREE_TYPE (arg);
796b6678 8331 int result;
70fb4c07 8332
8333 switch (DECL_FUNCTION_CODE (fndecl))
8334 {
4f35b1fc 8335 CASE_INT_FN (BUILT_IN_FFS):
ab2c1de8 8336 result = wi::ffs (arg);
70fb4c07 8337 break;
8338
4f35b1fc 8339 CASE_INT_FN (BUILT_IN_CLZ):
1cee90ad 8340 if (wi::ne_p (arg, 0))
8341 result = wi::clz (arg);
8342 else if (! CLZ_DEFINED_VALUE_AT_ZERO (TYPE_MODE (type), result))
8343 result = TYPE_PRECISION (type);
70fb4c07 8344 break;
8345
4f35b1fc 8346 CASE_INT_FN (BUILT_IN_CTZ):
1cee90ad 8347 if (wi::ne_p (arg, 0))
8348 result = wi::ctz (arg);
8349 else if (! CTZ_DEFINED_VALUE_AT_ZERO (TYPE_MODE (type), result))
8350 result = TYPE_PRECISION (type);
70fb4c07 8351 break;
8352
6aaa1f9e 8353 CASE_INT_FN (BUILT_IN_CLRSB):
ab2c1de8 8354 result = wi::clrsb (arg);
6aaa1f9e 8355 break;
8356
4f35b1fc 8357 CASE_INT_FN (BUILT_IN_POPCOUNT):
ab2c1de8 8358 result = wi::popcount (arg);
70fb4c07 8359 break;
8360
4f35b1fc 8361 CASE_INT_FN (BUILT_IN_PARITY):
ab2c1de8 8362 result = wi::parity (arg);
70fb4c07 8363 break;
8364
8365 default:
64db345d 8366 gcc_unreachable ();
70fb4c07 8367 }
8368
796b6678 8369 return build_int_cst (TREE_TYPE (TREE_TYPE (fndecl)), result);
70fb4c07 8370 }
8371
8372 return NULL_TREE;
8373}
8374
74bdbe96 8375/* Fold function call to builtin_bswap and the short, long and long long
42791117 8376 variants. Return NULL_TREE if no simplification can be made. */
8377static tree
c2f47e15 8378fold_builtin_bswap (tree fndecl, tree arg)
42791117 8379{
c2f47e15 8380 if (! validate_arg (arg, INTEGER_TYPE))
8381 return NULL_TREE;
42791117 8382
8383 /* Optimize constant value. */
f96bd2bf 8384 if (TREE_CODE (arg) == INTEGER_CST && !TREE_OVERFLOW (arg))
42791117 8385 {
74bdbe96 8386 tree type = TREE_TYPE (TREE_TYPE (fndecl));
42791117 8387
42791117 8388 switch (DECL_FUNCTION_CODE (fndecl))
8389 {
74bdbe96 8390 case BUILT_IN_BSWAP16:
42791117 8391 case BUILT_IN_BSWAP32:
8392 case BUILT_IN_BSWAP64:
8393 {
e913b5cd 8394 signop sgn = TYPE_SIGN (type);
ddb1be65 8395 tree result =
796b6678 8396 wide_int_to_tree (type,
8397 wide_int::from (arg, TYPE_PRECISION (type),
8398 sgn).bswap ());
e913b5cd 8399 return result;
42791117 8400 }
42791117 8401 default:
8402 gcc_unreachable ();
8403 }
42791117 8404 }
8405
8406 return NULL_TREE;
8407}
c2f47e15 8408
f0c477f2 8409/* Fold a builtin function call to hypot, hypotf, or hypotl. Return
8410 NULL_TREE if no simplification can be made. */
8411
8412static tree
389dd41b 8413fold_builtin_hypot (location_t loc, tree fndecl,
8414 tree arg0, tree arg1, tree type)
f0c477f2 8415{
e6ab33d8 8416 tree res, narg0, narg1;
f0c477f2 8417
c2f47e15 8418 if (!validate_arg (arg0, REAL_TYPE)
8419 || !validate_arg (arg1, REAL_TYPE))
f0c477f2 8420 return NULL_TREE;
8421
8422 /* Calculate the result when the argument is a constant. */
8423 if ((res = do_mpfr_arg2 (arg0, arg1, type, mpfr_hypot)))
8424 return res;
48e1416a 8425
6c95f21c 8426 /* If either argument to hypot has a negate or abs, strip that off.
8427 E.g. hypot(-x,fabs(y)) -> hypot(x,y). */
e6ab33d8 8428 narg0 = fold_strip_sign_ops (arg0);
8429 narg1 = fold_strip_sign_ops (arg1);
8430 if (narg0 || narg1)
8431 {
48e1416a 8432 return build_call_expr_loc (loc, fndecl, 2, narg0 ? narg0 : arg0,
c2f47e15 8433 narg1 ? narg1 : arg1);
6c95f21c 8434 }
48e1416a 8435
f0c477f2 8436 /* If either argument is zero, hypot is fabs of the other. */
8437 if (real_zerop (arg0))
389dd41b 8438 return fold_build1_loc (loc, ABS_EXPR, type, arg1);
f0c477f2 8439 else if (real_zerop (arg1))
389dd41b 8440 return fold_build1_loc (loc, ABS_EXPR, type, arg0);
48e1416a 8441
6c95f21c 8442 /* hypot(x,x) -> fabs(x)*sqrt(2). */
8443 if (flag_unsafe_math_optimizations
8444 && operand_equal_p (arg0, arg1, OEP_PURE_SAME))
f0c477f2 8445 {
2e7ca27b 8446 const REAL_VALUE_TYPE sqrt2_trunc
7910b2fb 8447 = real_value_truncate (TYPE_MODE (type), dconst_sqrt2 ());
389dd41b 8448 return fold_build2_loc (loc, MULT_EXPR, type,
8449 fold_build1_loc (loc, ABS_EXPR, type, arg0),
2e7ca27b 8450 build_real (type, sqrt2_trunc));
f0c477f2 8451 }
8452
f0c477f2 8453 return NULL_TREE;
8454}
8455
8456
e6e27594 8457/* Fold a builtin function call to pow, powf, or powl. Return
8458 NULL_TREE if no simplification can be made. */
8459static tree
389dd41b 8460fold_builtin_pow (location_t loc, tree fndecl, tree arg0, tree arg1, tree type)
e6e27594 8461{
f0c477f2 8462 tree res;
e6e27594 8463
c2f47e15 8464 if (!validate_arg (arg0, REAL_TYPE)
8465 || !validate_arg (arg1, REAL_TYPE))
e6e27594 8466 return NULL_TREE;
8467
f0c477f2 8468 /* Calculate the result when the argument is a constant. */
8469 if ((res = do_mpfr_arg2 (arg0, arg1, type, mpfr_pow)))
8470 return res;
8471
e6e27594 8472 /* Optimize pow(1.0,y) = 1.0. */
8473 if (real_onep (arg0))
389dd41b 8474 return omit_one_operand_loc (loc, type, build_real (type, dconst1), arg1);
e6e27594 8475
8476 if (TREE_CODE (arg1) == REAL_CST
f96bd2bf 8477 && !TREE_OVERFLOW (arg1))
e6e27594 8478 {
198d9bbe 8479 REAL_VALUE_TYPE cint;
e6e27594 8480 REAL_VALUE_TYPE c;
198d9bbe 8481 HOST_WIDE_INT n;
8482
e6e27594 8483 c = TREE_REAL_CST (arg1);
8484
8485 /* Optimize pow(x,0.0) = 1.0. */
8486 if (REAL_VALUES_EQUAL (c, dconst0))
389dd41b 8487 return omit_one_operand_loc (loc, type, build_real (type, dconst1),
e6e27594 8488 arg0);
8489
8490 /* Optimize pow(x,1.0) = x. */
8491 if (REAL_VALUES_EQUAL (c, dconst1))
8492 return arg0;
8493
8494 /* Optimize pow(x,-1.0) = 1.0/x. */
8495 if (REAL_VALUES_EQUAL (c, dconstm1))
389dd41b 8496 return fold_build2_loc (loc, RDIV_EXPR, type,
49d00087 8497 build_real (type, dconst1), arg0);
e6e27594 8498
8499 /* Optimize pow(x,0.5) = sqrt(x). */
8500 if (flag_unsafe_math_optimizations
8501 && REAL_VALUES_EQUAL (c, dconsthalf))
8502 {
8503 tree sqrtfn = mathfn_built_in (type, BUILT_IN_SQRT);
8504
8505 if (sqrtfn != NULL_TREE)
389dd41b 8506 return build_call_expr_loc (loc, sqrtfn, 1, arg0);
e6e27594 8507 }
8508
feb5b3eb 8509 /* Optimize pow(x,1.0/3.0) = cbrt(x). */
8510 if (flag_unsafe_math_optimizations)
8511 {
8512 const REAL_VALUE_TYPE dconstroot
7910b2fb 8513 = real_value_truncate (TYPE_MODE (type), dconst_third ());
feb5b3eb 8514
8515 if (REAL_VALUES_EQUAL (c, dconstroot))
8516 {
8517 tree cbrtfn = mathfn_built_in (type, BUILT_IN_CBRT);
8518 if (cbrtfn != NULL_TREE)
389dd41b 8519 return build_call_expr_loc (loc, cbrtfn, 1, arg0);
feb5b3eb 8520 }
8521 }
8522
198d9bbe 8523 /* Check for an integer exponent. */
8524 n = real_to_integer (&c);
e913b5cd 8525 real_from_integer (&cint, VOIDmode, n, SIGNED);
198d9bbe 8526 if (real_identical (&c, &cint))
e6e27594 8527 {
a2b30b48 8528 /* Attempt to evaluate pow at compile-time, unless this should
8529 raise an exception. */
198d9bbe 8530 if (TREE_CODE (arg0) == REAL_CST
a2b30b48 8531 && !TREE_OVERFLOW (arg0)
8532 && (n > 0
8533 || (!flag_trapping_math && !flag_errno_math)
8534 || !REAL_VALUES_EQUAL (TREE_REAL_CST (arg0), dconst0)))
e6e27594 8535 {
8536 REAL_VALUE_TYPE x;
8537 bool inexact;
8538
8539 x = TREE_REAL_CST (arg0);
8540 inexact = real_powi (&x, TYPE_MODE (type), &x, n);
8541 if (flag_unsafe_math_optimizations || !inexact)
8542 return build_real (type, x);
8543 }
198d9bbe 8544
8545 /* Strip sign ops from even integer powers. */
8546 if ((n & 1) == 0 && flag_unsafe_math_optimizations)
8547 {
8548 tree narg0 = fold_strip_sign_ops (arg0);
8549 if (narg0)
389dd41b 8550 return build_call_expr_loc (loc, fndecl, 2, narg0, arg1);
198d9bbe 8551 }
e6e27594 8552 }
8553 }
8554
cdfeb715 8555 if (flag_unsafe_math_optimizations)
e6e27594 8556 {
cdfeb715 8557 const enum built_in_function fcode = builtin_mathfn_code (arg0);
e6e27594 8558
cdfeb715 8559 /* Optimize pow(expN(x),y) = expN(x*y). */
8560 if (BUILTIN_EXPONENT_P (fcode))
a0c938f0 8561 {
c2f47e15 8562 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
8563 tree arg = CALL_EXPR_ARG (arg0, 0);
389dd41b 8564 arg = fold_build2_loc (loc, MULT_EXPR, type, arg, arg1);
8565 return build_call_expr_loc (loc, expfn, 1, arg);
cdfeb715 8566 }
e6e27594 8567
cdfeb715 8568 /* Optimize pow(sqrt(x),y) = pow(x,y*0.5). */
8569 if (BUILTIN_SQRT_P (fcode))
a0c938f0 8570 {
c2f47e15 8571 tree narg0 = CALL_EXPR_ARG (arg0, 0);
389dd41b 8572 tree narg1 = fold_build2_loc (loc, MULT_EXPR, type, arg1,
49d00087 8573 build_real (type, dconsthalf));
389dd41b 8574 return build_call_expr_loc (loc, fndecl, 2, narg0, narg1);
cdfeb715 8575 }
8576
8577 /* Optimize pow(cbrt(x),y) = pow(x,y/3) iff x is nonnegative. */
8578 if (BUILTIN_CBRT_P (fcode))
a0c938f0 8579 {
c2f47e15 8580 tree arg = CALL_EXPR_ARG (arg0, 0);
cdfeb715 8581 if (tree_expr_nonnegative_p (arg))
8582 {
8583 const REAL_VALUE_TYPE dconstroot
7910b2fb 8584 = real_value_truncate (TYPE_MODE (type), dconst_third ());
389dd41b 8585 tree narg1 = fold_build2_loc (loc, MULT_EXPR, type, arg1,
49d00087 8586 build_real (type, dconstroot));
389dd41b 8587 return build_call_expr_loc (loc, fndecl, 2, arg, narg1);
cdfeb715 8588 }
8589 }
a0c938f0 8590
49e436b5 8591 /* Optimize pow(pow(x,y),z) = pow(x,y*z) iff x is nonnegative. */
c2f47e15 8592 if (fcode == BUILT_IN_POW
8593 || fcode == BUILT_IN_POWF
8594 || fcode == BUILT_IN_POWL)
a0c938f0 8595 {
c2f47e15 8596 tree arg00 = CALL_EXPR_ARG (arg0, 0);
49e436b5 8597 if (tree_expr_nonnegative_p (arg00))
8598 {
8599 tree arg01 = CALL_EXPR_ARG (arg0, 1);
8600 tree narg1 = fold_build2_loc (loc, MULT_EXPR, type, arg01, arg1);
8601 return build_call_expr_loc (loc, fndecl, 2, arg00, narg1);
8602 }
cdfeb715 8603 }
e6e27594 8604 }
cdfeb715 8605
e6e27594 8606 return NULL_TREE;
8607}
8608
c2f47e15 8609/* Fold a builtin function call to powi, powif, or powil with argument ARG.
8610 Return NULL_TREE if no simplification can be made. */
b4d0c20c 8611static tree
389dd41b 8612fold_builtin_powi (location_t loc, tree fndecl ATTRIBUTE_UNUSED,
c2f47e15 8613 tree arg0, tree arg1, tree type)
b4d0c20c 8614{
c2f47e15 8615 if (!validate_arg (arg0, REAL_TYPE)
8616 || !validate_arg (arg1, INTEGER_TYPE))
b4d0c20c 8617 return NULL_TREE;
8618
8619 /* Optimize pow(1.0,y) = 1.0. */
8620 if (real_onep (arg0))
389dd41b 8621 return omit_one_operand_loc (loc, type, build_real (type, dconst1), arg1);
b4d0c20c 8622
e913b5cd 8623 if (tree_fits_shwi_p (arg1))
b4d0c20c 8624 {
e913b5cd 8625 HOST_WIDE_INT c = tree_to_shwi (arg1);
b4d0c20c 8626
8627 /* Evaluate powi at compile-time. */
8628 if (TREE_CODE (arg0) == REAL_CST
f96bd2bf 8629 && !TREE_OVERFLOW (arg0))
b4d0c20c 8630 {
8631 REAL_VALUE_TYPE x;
8632 x = TREE_REAL_CST (arg0);
8633 real_powi (&x, TYPE_MODE (type), &x, c);
8634 return build_real (type, x);
8635 }
8636
8637 /* Optimize pow(x,0) = 1.0. */
8638 if (c == 0)
389dd41b 8639 return omit_one_operand_loc (loc, type, build_real (type, dconst1),
b4d0c20c 8640 arg0);
8641
8642 /* Optimize pow(x,1) = x. */
8643 if (c == 1)
8644 return arg0;
8645
8646 /* Optimize pow(x,-1) = 1.0/x. */
8647 if (c == -1)
389dd41b 8648 return fold_build2_loc (loc, RDIV_EXPR, type,
49d00087 8649 build_real (type, dconst1), arg0);
b4d0c20c 8650 }
8651
8652 return NULL_TREE;
8653}
8654
8918c507 8655/* A subroutine of fold_builtin to fold the various exponent
c2f47e15 8656 functions. Return NULL_TREE if no simplification can be made.
debf9994 8657 FUNC is the corresponding MPFR exponent function. */
8918c507 8658
8659static tree
389dd41b 8660fold_builtin_exponent (location_t loc, tree fndecl, tree arg,
debf9994 8661 int (*func)(mpfr_ptr, mpfr_srcptr, mp_rnd_t))
8918c507 8662{
c2f47e15 8663 if (validate_arg (arg, REAL_TYPE))
8918c507 8664 {
8918c507 8665 tree type = TREE_TYPE (TREE_TYPE (fndecl));
29f4cd78 8666 tree res;
48e1416a 8667
debf9994 8668 /* Calculate the result when the argument is a constant. */
728bac60 8669 if ((res = do_mpfr_arg1 (arg, type, func, NULL, NULL, 0)))
debf9994 8670 return res;
8918c507 8671
8672 /* Optimize expN(logN(x)) = x. */
8673 if (flag_unsafe_math_optimizations)
a0c938f0 8674 {
8918c507 8675 const enum built_in_function fcode = builtin_mathfn_code (arg);
8676
debf9994 8677 if ((func == mpfr_exp
8918c507 8678 && (fcode == BUILT_IN_LOG
8679 || fcode == BUILT_IN_LOGF
8680 || fcode == BUILT_IN_LOGL))
debf9994 8681 || (func == mpfr_exp2
8918c507 8682 && (fcode == BUILT_IN_LOG2
8683 || fcode == BUILT_IN_LOG2F
8684 || fcode == BUILT_IN_LOG2L))
debf9994 8685 || (func == mpfr_exp10
8918c507 8686 && (fcode == BUILT_IN_LOG10
8687 || fcode == BUILT_IN_LOG10F
8688 || fcode == BUILT_IN_LOG10L)))
389dd41b 8689 return fold_convert_loc (loc, type, CALL_EXPR_ARG (arg, 0));
8918c507 8690 }
8691 }
8692
c2f47e15 8693 return NULL_TREE;
8918c507 8694}
8695
7959b13b 8696/* Fold function call to builtin memchr. ARG1, ARG2 and LEN are the
8697 arguments to the call, and TYPE is its return type.
8698 Return NULL_TREE if no simplification can be made. */
8699
8700static tree
389dd41b 8701fold_builtin_memchr (location_t loc, tree arg1, tree arg2, tree len, tree type)
7959b13b 8702{
8703 if (!validate_arg (arg1, POINTER_TYPE)
8704 || !validate_arg (arg2, INTEGER_TYPE)
8705 || !validate_arg (len, INTEGER_TYPE))
8706 return NULL_TREE;
8707 else
8708 {
8709 const char *p1;
8710
8711 if (TREE_CODE (arg2) != INTEGER_CST
e913b5cd 8712 || !tree_fits_uhwi_p (len))
7959b13b 8713 return NULL_TREE;
8714
8715 p1 = c_getstr (arg1);
8716 if (p1 && compare_tree_int (len, strlen (p1) + 1) <= 0)
8717 {
8718 char c;
8719 const char *r;
8720 tree tem;
8721
8722 if (target_char_cast (arg2, &c))
8723 return NULL_TREE;
8724
e913b5cd 8725 r = (const char *) memchr (p1, c, tree_to_uhwi (len));
7959b13b 8726
8727 if (r == NULL)
8728 return build_int_cst (TREE_TYPE (arg1), 0);
8729
2cc66f2a 8730 tem = fold_build_pointer_plus_hwi_loc (loc, arg1, r - p1);
389dd41b 8731 return fold_convert_loc (loc, type, tem);
7959b13b 8732 }
8733 return NULL_TREE;
8734 }
8735}
8736
c2f47e15 8737/* Fold function call to builtin memcmp with arguments ARG1 and ARG2.
8738 Return NULL_TREE if no simplification can be made. */
9c8a1629 8739
8740static tree
389dd41b 8741fold_builtin_memcmp (location_t loc, tree arg1, tree arg2, tree len)
9c8a1629 8742{
c4fef134 8743 const char *p1, *p2;
9c8a1629 8744
c2f47e15 8745 if (!validate_arg (arg1, POINTER_TYPE)
8746 || !validate_arg (arg2, POINTER_TYPE)
8747 || !validate_arg (len, INTEGER_TYPE))
8748 return NULL_TREE;
9c8a1629 8749
8750 /* If the LEN parameter is zero, return zero. */
8751 if (integer_zerop (len))
389dd41b 8752 return omit_two_operands_loc (loc, integer_type_node, integer_zero_node,
c4fef134 8753 arg1, arg2);
9c8a1629 8754
8755 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
8756 if (operand_equal_p (arg1, arg2, 0))
389dd41b 8757 return omit_one_operand_loc (loc, integer_type_node, integer_zero_node, len);
c4fef134 8758
8759 p1 = c_getstr (arg1);
8760 p2 = c_getstr (arg2);
8761
8762 /* If all arguments are constant, and the value of len is not greater
8763 than the lengths of arg1 and arg2, evaluate at compile-time. */
e913b5cd 8764 if (tree_fits_uhwi_p (len) && p1 && p2
c4fef134 8765 && compare_tree_int (len, strlen (p1) + 1) <= 0
8766 && compare_tree_int (len, strlen (p2) + 1) <= 0)
8767 {
e913b5cd 8768 const int r = memcmp (p1, p2, tree_to_uhwi (len));
c4fef134 8769
8770 if (r > 0)
8771 return integer_one_node;
8772 else if (r < 0)
8773 return integer_minus_one_node;
8774 else
8775 return integer_zero_node;
8776 }
8777
8778 /* If len parameter is one, return an expression corresponding to
8779 (*(const unsigned char*)arg1 - (const unsigned char*)arg2). */
e913b5cd 8780 if (tree_fits_uhwi_p (len) && tree_to_uhwi (len) == 1)
c4fef134 8781 {
8782 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
4f1b71c0 8783 tree cst_uchar_ptr_node
8784 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
8785
389dd41b 8786 tree ind1
8787 = fold_convert_loc (loc, integer_type_node,
8788 build1 (INDIRECT_REF, cst_uchar_node,
8789 fold_convert_loc (loc,
8790 cst_uchar_ptr_node,
c4fef134 8791 arg1)));
389dd41b 8792 tree ind2
8793 = fold_convert_loc (loc, integer_type_node,
8794 build1 (INDIRECT_REF, cst_uchar_node,
8795 fold_convert_loc (loc,
8796 cst_uchar_ptr_node,
c4fef134 8797 arg2)));
389dd41b 8798 return fold_build2_loc (loc, MINUS_EXPR, integer_type_node, ind1, ind2);
c4fef134 8799 }
9c8a1629 8800
c2f47e15 8801 return NULL_TREE;
9c8a1629 8802}
8803
c2f47e15 8804/* Fold function call to builtin strcmp with arguments ARG1 and ARG2.
8805 Return NULL_TREE if no simplification can be made. */
9c8a1629 8806
8807static tree
389dd41b 8808fold_builtin_strcmp (location_t loc, tree arg1, tree arg2)
9c8a1629 8809{
9c8a1629 8810 const char *p1, *p2;
8811
c2f47e15 8812 if (!validate_arg (arg1, POINTER_TYPE)
8813 || !validate_arg (arg2, POINTER_TYPE))
8814 return NULL_TREE;
9c8a1629 8815
8816 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
8817 if (operand_equal_p (arg1, arg2, 0))
c4fef134 8818 return integer_zero_node;
9c8a1629 8819
8820 p1 = c_getstr (arg1);
8821 p2 = c_getstr (arg2);
8822
8823 if (p1 && p2)
8824 {
9c8a1629 8825 const int i = strcmp (p1, p2);
8826 if (i < 0)
c4fef134 8827 return integer_minus_one_node;
9c8a1629 8828 else if (i > 0)
c4fef134 8829 return integer_one_node;
9c8a1629 8830 else
c4fef134 8831 return integer_zero_node;
8832 }
8833
8834 /* If the second arg is "", return *(const unsigned char*)arg1. */
8835 if (p2 && *p2 == '\0')
8836 {
8837 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
4f1b71c0 8838 tree cst_uchar_ptr_node
8839 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
8840
389dd41b 8841 return fold_convert_loc (loc, integer_type_node,
8842 build1 (INDIRECT_REF, cst_uchar_node,
8843 fold_convert_loc (loc,
8844 cst_uchar_ptr_node,
8845 arg1)));
c4fef134 8846 }
8847
8848 /* If the first arg is "", return -*(const unsigned char*)arg2. */
8849 if (p1 && *p1 == '\0')
8850 {
8851 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
4f1b71c0 8852 tree cst_uchar_ptr_node
8853 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
8854
389dd41b 8855 tree temp
8856 = fold_convert_loc (loc, integer_type_node,
8857 build1 (INDIRECT_REF, cst_uchar_node,
8858 fold_convert_loc (loc,
8859 cst_uchar_ptr_node,
c4fef134 8860 arg2)));
389dd41b 8861 return fold_build1_loc (loc, NEGATE_EXPR, integer_type_node, temp);
9c8a1629 8862 }
8863
c2f47e15 8864 return NULL_TREE;
9c8a1629 8865}
8866
c2f47e15 8867/* Fold function call to builtin strncmp with arguments ARG1, ARG2, and LEN.
8868 Return NULL_TREE if no simplification can be made. */
9c8a1629 8869
8870static tree
389dd41b 8871fold_builtin_strncmp (location_t loc, tree arg1, tree arg2, tree len)
9c8a1629 8872{
9c8a1629 8873 const char *p1, *p2;
8874
c2f47e15 8875 if (!validate_arg (arg1, POINTER_TYPE)
8876 || !validate_arg (arg2, POINTER_TYPE)
8877 || !validate_arg (len, INTEGER_TYPE))
8878 return NULL_TREE;
9c8a1629 8879
8880 /* If the LEN parameter is zero, return zero. */
8881 if (integer_zerop (len))
389dd41b 8882 return omit_two_operands_loc (loc, integer_type_node, integer_zero_node,
c4fef134 8883 arg1, arg2);
9c8a1629 8884
8885 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
8886 if (operand_equal_p (arg1, arg2, 0))
389dd41b 8887 return omit_one_operand_loc (loc, integer_type_node, integer_zero_node, len);
9c8a1629 8888
8889 p1 = c_getstr (arg1);
8890 p2 = c_getstr (arg2);
8891
e913b5cd 8892 if (tree_fits_uhwi_p (len) && p1 && p2)
9c8a1629 8893 {
e913b5cd 8894 const int i = strncmp (p1, p2, tree_to_uhwi (len));
c4fef134 8895 if (i > 0)
8896 return integer_one_node;
8897 else if (i < 0)
8898 return integer_minus_one_node;
9c8a1629 8899 else
c4fef134 8900 return integer_zero_node;
8901 }
8902
8903 /* If the second arg is "", and the length is greater than zero,
8904 return *(const unsigned char*)arg1. */
8905 if (p2 && *p2 == '\0'
8906 && TREE_CODE (len) == INTEGER_CST
8907 && tree_int_cst_sgn (len) == 1)
8908 {
8909 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
4f1b71c0 8910 tree cst_uchar_ptr_node
8911 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
8912
389dd41b 8913 return fold_convert_loc (loc, integer_type_node,
8914 build1 (INDIRECT_REF, cst_uchar_node,
8915 fold_convert_loc (loc,
8916 cst_uchar_ptr_node,
8917 arg1)));
c4fef134 8918 }
8919
8920 /* If the first arg is "", and the length is greater than zero,
8921 return -*(const unsigned char*)arg2. */
8922 if (p1 && *p1 == '\0'
8923 && TREE_CODE (len) == INTEGER_CST
8924 && tree_int_cst_sgn (len) == 1)
8925 {
8926 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
4f1b71c0 8927 tree cst_uchar_ptr_node
8928 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
8929
389dd41b 8930 tree temp = fold_convert_loc (loc, integer_type_node,
8931 build1 (INDIRECT_REF, cst_uchar_node,
8932 fold_convert_loc (loc,
8933 cst_uchar_ptr_node,
8934 arg2)));
8935 return fold_build1_loc (loc, NEGATE_EXPR, integer_type_node, temp);
c4fef134 8936 }
8937
8938 /* If len parameter is one, return an expression corresponding to
8939 (*(const unsigned char*)arg1 - (const unsigned char*)arg2). */
e913b5cd 8940 if (tree_fits_uhwi_p (len) && tree_to_uhwi (len) == 1)
c4fef134 8941 {
8942 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
4f1b71c0 8943 tree cst_uchar_ptr_node
8944 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
8945
389dd41b 8946 tree ind1 = fold_convert_loc (loc, integer_type_node,
8947 build1 (INDIRECT_REF, cst_uchar_node,
8948 fold_convert_loc (loc,
8949 cst_uchar_ptr_node,
8950 arg1)));
8951 tree ind2 = fold_convert_loc (loc, integer_type_node,
8952 build1 (INDIRECT_REF, cst_uchar_node,
8953 fold_convert_loc (loc,
8954 cst_uchar_ptr_node,
8955 arg2)));
8956 return fold_build2_loc (loc, MINUS_EXPR, integer_type_node, ind1, ind2);
9c8a1629 8957 }
8958
c2f47e15 8959 return NULL_TREE;
9c8a1629 8960}
8961
c2f47e15 8962/* Fold function call to builtin signbit, signbitf or signbitl with argument
8963 ARG. Return NULL_TREE if no simplification can be made. */
27f261ef 8964
8965static tree
389dd41b 8966fold_builtin_signbit (location_t loc, tree arg, tree type)
27f261ef 8967{
c2f47e15 8968 if (!validate_arg (arg, REAL_TYPE))
27f261ef 8969 return NULL_TREE;
8970
27f261ef 8971 /* If ARG is a compile-time constant, determine the result. */
8972 if (TREE_CODE (arg) == REAL_CST
f96bd2bf 8973 && !TREE_OVERFLOW (arg))
27f261ef 8974 {
8975 REAL_VALUE_TYPE c;
8976
8977 c = TREE_REAL_CST (arg);
385f3f36 8978 return (REAL_VALUE_NEGATIVE (c)
8979 ? build_one_cst (type)
8980 : build_zero_cst (type));
27f261ef 8981 }
8982
8983 /* If ARG is non-negative, the result is always zero. */
8984 if (tree_expr_nonnegative_p (arg))
389dd41b 8985 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
27f261ef 8986
8987 /* If ARG's format doesn't have signed zeros, return "arg < 0.0". */
fe994837 8988 if (!HONOR_SIGNED_ZEROS (arg))
de67cbb8 8989 return fold_convert (type,
8990 fold_build2_loc (loc, LT_EXPR, boolean_type_node, arg,
8991 build_real (TREE_TYPE (arg), dconst0)));
27f261ef 8992
8993 return NULL_TREE;
8994}
8995
c2f47e15 8996/* Fold function call to builtin copysign, copysignf or copysignl with
8997 arguments ARG1 and ARG2. Return NULL_TREE if no simplification can
8998 be made. */
467214fd 8999
9000static tree
389dd41b 9001fold_builtin_copysign (location_t loc, tree fndecl,
9002 tree arg1, tree arg2, tree type)
467214fd 9003{
c2f47e15 9004 tree tem;
467214fd 9005
c2f47e15 9006 if (!validate_arg (arg1, REAL_TYPE)
9007 || !validate_arg (arg2, REAL_TYPE))
467214fd 9008 return NULL_TREE;
9009
467214fd 9010 /* copysign(X,X) is X. */
9011 if (operand_equal_p (arg1, arg2, 0))
389dd41b 9012 return fold_convert_loc (loc, type, arg1);
467214fd 9013
9014 /* If ARG1 and ARG2 are compile-time constants, determine the result. */
9015 if (TREE_CODE (arg1) == REAL_CST
9016 && TREE_CODE (arg2) == REAL_CST
f96bd2bf 9017 && !TREE_OVERFLOW (arg1)
9018 && !TREE_OVERFLOW (arg2))
467214fd 9019 {
9020 REAL_VALUE_TYPE c1, c2;
9021
9022 c1 = TREE_REAL_CST (arg1);
9023 c2 = TREE_REAL_CST (arg2);
749680e2 9024 /* c1.sign := c2.sign. */
467214fd 9025 real_copysign (&c1, &c2);
9026 return build_real (type, c1);
467214fd 9027 }
9028
9029 /* copysign(X, Y) is fabs(X) when Y is always non-negative.
9030 Remember to evaluate Y for side-effects. */
9031 if (tree_expr_nonnegative_p (arg2))
389dd41b 9032 return omit_one_operand_loc (loc, type,
9033 fold_build1_loc (loc, ABS_EXPR, type, arg1),
467214fd 9034 arg2);
9035
198d9bbe 9036 /* Strip sign changing operations for the first argument. */
9037 tem = fold_strip_sign_ops (arg1);
9038 if (tem)
389dd41b 9039 return build_call_expr_loc (loc, fndecl, 2, tem, arg2);
198d9bbe 9040
467214fd 9041 return NULL_TREE;
9042}
9043
c2f47e15 9044/* Fold a call to builtin isascii with argument ARG. */
d49367d4 9045
9046static tree
389dd41b 9047fold_builtin_isascii (location_t loc, tree arg)
d49367d4 9048{
c2f47e15 9049 if (!validate_arg (arg, INTEGER_TYPE))
9050 return NULL_TREE;
d49367d4 9051 else
9052 {
9053 /* Transform isascii(c) -> ((c & ~0x7f) == 0). */
c90b5d40 9054 arg = fold_build2 (BIT_AND_EXPR, integer_type_node, arg,
7002a1c8 9055 build_int_cst (integer_type_node,
c90b5d40 9056 ~ (unsigned HOST_WIDE_INT) 0x7f));
389dd41b 9057 return fold_build2_loc (loc, EQ_EXPR, integer_type_node,
7002a1c8 9058 arg, integer_zero_node);
d49367d4 9059 }
9060}
9061
c2f47e15 9062/* Fold a call to builtin toascii with argument ARG. */
d49367d4 9063
9064static tree
389dd41b 9065fold_builtin_toascii (location_t loc, tree arg)
d49367d4 9066{
c2f47e15 9067 if (!validate_arg (arg, INTEGER_TYPE))
9068 return NULL_TREE;
48e1416a 9069
c2f47e15 9070 /* Transform toascii(c) -> (c & 0x7f). */
389dd41b 9071 return fold_build2_loc (loc, BIT_AND_EXPR, integer_type_node, arg,
7002a1c8 9072 build_int_cst (integer_type_node, 0x7f));
d49367d4 9073}
9074
c2f47e15 9075/* Fold a call to builtin isdigit with argument ARG. */
df1cf42e 9076
9077static tree
389dd41b 9078fold_builtin_isdigit (location_t loc, tree arg)
df1cf42e 9079{
c2f47e15 9080 if (!validate_arg (arg, INTEGER_TYPE))
9081 return NULL_TREE;
df1cf42e 9082 else
9083 {
9084 /* Transform isdigit(c) -> (unsigned)(c) - '0' <= 9. */
624d37a6 9085 /* According to the C standard, isdigit is unaffected by locale.
9086 However, it definitely is affected by the target character set. */
624d37a6 9087 unsigned HOST_WIDE_INT target_digit0
9088 = lang_hooks.to_target_charset ('0');
9089
9090 if (target_digit0 == 0)
9091 return NULL_TREE;
9092
389dd41b 9093 arg = fold_convert_loc (loc, unsigned_type_node, arg);
c90b5d40 9094 arg = fold_build2 (MINUS_EXPR, unsigned_type_node, arg,
9095 build_int_cst (unsigned_type_node, target_digit0));
389dd41b 9096 return fold_build2_loc (loc, LE_EXPR, integer_type_node, arg,
f2532264 9097 build_int_cst (unsigned_type_node, 9));
df1cf42e 9098 }
9099}
27f261ef 9100
c2f47e15 9101/* Fold a call to fabs, fabsf or fabsl with argument ARG. */
d1aade50 9102
9103static tree
389dd41b 9104fold_builtin_fabs (location_t loc, tree arg, tree type)
d1aade50 9105{
c2f47e15 9106 if (!validate_arg (arg, REAL_TYPE))
9107 return NULL_TREE;
d1aade50 9108
389dd41b 9109 arg = fold_convert_loc (loc, type, arg);
d1aade50 9110 if (TREE_CODE (arg) == REAL_CST)
9111 return fold_abs_const (arg, type);
389dd41b 9112 return fold_build1_loc (loc, ABS_EXPR, type, arg);
d1aade50 9113}
9114
c2f47e15 9115/* Fold a call to abs, labs, llabs or imaxabs with argument ARG. */
d1aade50 9116
9117static tree
389dd41b 9118fold_builtin_abs (location_t loc, tree arg, tree type)
d1aade50 9119{
c2f47e15 9120 if (!validate_arg (arg, INTEGER_TYPE))
9121 return NULL_TREE;
d1aade50 9122
389dd41b 9123 arg = fold_convert_loc (loc, type, arg);
d1aade50 9124 if (TREE_CODE (arg) == INTEGER_CST)
9125 return fold_abs_const (arg, type);
389dd41b 9126 return fold_build1_loc (loc, ABS_EXPR, type, arg);
d1aade50 9127}
9128
b9be572e 9129/* Fold a fma operation with arguments ARG[012]. */
9130
9131tree
9132fold_fma (location_t loc ATTRIBUTE_UNUSED,
9133 tree type, tree arg0, tree arg1, tree arg2)
9134{
9135 if (TREE_CODE (arg0) == REAL_CST
9136 && TREE_CODE (arg1) == REAL_CST
9137 && TREE_CODE (arg2) == REAL_CST)
9138 return do_mpfr_arg3 (arg0, arg1, arg2, type, mpfr_fma);
9139
9140 return NULL_TREE;
9141}
9142
9143/* Fold a call to fma, fmaf, or fmal with arguments ARG[012]. */
9144
9145static tree
9146fold_builtin_fma (location_t loc, tree arg0, tree arg1, tree arg2, tree type)
9147{
9148 if (validate_arg (arg0, REAL_TYPE)
9af5ce0c 9149 && validate_arg (arg1, REAL_TYPE)
9150 && validate_arg (arg2, REAL_TYPE))
b9be572e 9151 {
9152 tree tem = fold_fma (loc, type, arg0, arg1, arg2);
9153 if (tem)
9154 return tem;
9155
9156 /* ??? Only expand to FMA_EXPR if it's directly supported. */
9157 if (optab_handler (fma_optab, TYPE_MODE (type)) != CODE_FOR_nothing)
9158 return fold_build3_loc (loc, FMA_EXPR, type, arg0, arg1, arg2);
9159 }
9160 return NULL_TREE;
9161}
9162
d4a43a03 9163/* Fold a call to builtin fmin or fmax. */
9164
9165static tree
389dd41b 9166fold_builtin_fmin_fmax (location_t loc, tree arg0, tree arg1,
9167 tree type, bool max)
d4a43a03 9168{
c2f47e15 9169 if (validate_arg (arg0, REAL_TYPE) && validate_arg (arg1, REAL_TYPE))
d4a43a03 9170 {
d4a43a03 9171 /* Calculate the result when the argument is a constant. */
9172 tree res = do_mpfr_arg2 (arg0, arg1, type, (max ? mpfr_max : mpfr_min));
9173
9174 if (res)
9175 return res;
9176
61fe3555 9177 /* If either argument is NaN, return the other one. Avoid the
9178 transformation if we get (and honor) a signalling NaN. Using
9179 omit_one_operand() ensures we create a non-lvalue. */
9180 if (TREE_CODE (arg0) == REAL_CST
9181 && real_isnan (&TREE_REAL_CST (arg0))
fe994837 9182 && (! HONOR_SNANS (arg0)
61fe3555 9183 || ! TREE_REAL_CST (arg0).signalling))
389dd41b 9184 return omit_one_operand_loc (loc, type, arg1, arg0);
61fe3555 9185 if (TREE_CODE (arg1) == REAL_CST
9186 && real_isnan (&TREE_REAL_CST (arg1))
fe994837 9187 && (! HONOR_SNANS (arg1)
61fe3555 9188 || ! TREE_REAL_CST (arg1).signalling))
389dd41b 9189 return omit_one_operand_loc (loc, type, arg0, arg1);
61fe3555 9190
d4a43a03 9191 /* Transform fmin/fmax(x,x) -> x. */
9192 if (operand_equal_p (arg0, arg1, OEP_PURE_SAME))
389dd41b 9193 return omit_one_operand_loc (loc, type, arg0, arg1);
48e1416a 9194
d4a43a03 9195 /* Convert fmin/fmax to MIN_EXPR/MAX_EXPR. C99 requires these
9196 functions to return the numeric arg if the other one is NaN.
9197 These tree codes don't honor that, so only transform if
9198 -ffinite-math-only is set. C99 doesn't require -0.0 to be
9199 handled, so we don't have to worry about it either. */
9200 if (flag_finite_math_only)
389dd41b 9201 return fold_build2_loc (loc, (max ? MAX_EXPR : MIN_EXPR), type,
9202 fold_convert_loc (loc, type, arg0),
9203 fold_convert_loc (loc, type, arg1));
d4a43a03 9204 }
9205 return NULL_TREE;
9206}
9207
abe4dcf6 9208/* Fold a call to builtin carg(a+bi) -> atan2(b,a). */
9209
9210static tree
389dd41b 9211fold_builtin_carg (location_t loc, tree arg, tree type)
abe4dcf6 9212{
239d491a 9213 if (validate_arg (arg, COMPLEX_TYPE)
9214 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) == REAL_TYPE)
abe4dcf6 9215 {
9216 tree atan2_fn = mathfn_built_in (type, BUILT_IN_ATAN2);
48e1416a 9217
abe4dcf6 9218 if (atan2_fn)
9219 {
c2f47e15 9220 tree new_arg = builtin_save_expr (arg);
389dd41b 9221 tree r_arg = fold_build1_loc (loc, REALPART_EXPR, type, new_arg);
9222 tree i_arg = fold_build1_loc (loc, IMAGPART_EXPR, type, new_arg);
9223 return build_call_expr_loc (loc, atan2_fn, 2, i_arg, r_arg);
abe4dcf6 9224 }
9225 }
48e1416a 9226
abe4dcf6 9227 return NULL_TREE;
9228}
9229
cb2b9385 9230/* Fold a call to builtin logb/ilogb. */
9231
9232static tree
389dd41b 9233fold_builtin_logb (location_t loc, tree arg, tree rettype)
cb2b9385 9234{
9235 if (! validate_arg (arg, REAL_TYPE))
9236 return NULL_TREE;
48e1416a 9237
cb2b9385 9238 STRIP_NOPS (arg);
48e1416a 9239
cb2b9385 9240 if (TREE_CODE (arg) == REAL_CST && ! TREE_OVERFLOW (arg))
9241 {
9242 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg);
48e1416a 9243
cb2b9385 9244 switch (value->cl)
9245 {
9246 case rvc_nan:
9247 case rvc_inf:
9248 /* If arg is Inf or NaN and we're logb, return it. */
9249 if (TREE_CODE (rettype) == REAL_TYPE)
7695fea9 9250 {
9251 /* For logb(-Inf) we have to return +Inf. */
9252 if (real_isinf (value) && real_isneg (value))
9253 {
9254 REAL_VALUE_TYPE tem;
9255 real_inf (&tem);
9256 return build_real (rettype, tem);
9257 }
9258 return fold_convert_loc (loc, rettype, arg);
9259 }
cb2b9385 9260 /* Fall through... */
9261 case rvc_zero:
9262 /* Zero may set errno and/or raise an exception for logb, also
9263 for ilogb we don't know FP_ILOGB0. */
9264 return NULL_TREE;
9265 case rvc_normal:
9266 /* For normal numbers, proceed iff radix == 2. In GCC,
9267 normalized significands are in the range [0.5, 1.0). We
9268 want the exponent as if they were [1.0, 2.0) so get the
9269 exponent and subtract 1. */
9270 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (arg)))->b == 2)
389dd41b 9271 return fold_convert_loc (loc, rettype,
7002a1c8 9272 build_int_cst (integer_type_node,
389dd41b 9273 REAL_EXP (value)-1));
cb2b9385 9274 break;
9275 }
9276 }
48e1416a 9277
cb2b9385 9278 return NULL_TREE;
9279}
9280
9281/* Fold a call to builtin significand, if radix == 2. */
9282
9283static tree
389dd41b 9284fold_builtin_significand (location_t loc, tree arg, tree rettype)
cb2b9385 9285{
9286 if (! validate_arg (arg, REAL_TYPE))
9287 return NULL_TREE;
48e1416a 9288
cb2b9385 9289 STRIP_NOPS (arg);
48e1416a 9290
cb2b9385 9291 if (TREE_CODE (arg) == REAL_CST && ! TREE_OVERFLOW (arg))
9292 {
9293 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg);
48e1416a 9294
cb2b9385 9295 switch (value->cl)
9296 {
9297 case rvc_zero:
9298 case rvc_nan:
9299 case rvc_inf:
9300 /* If arg is +-0, +-Inf or +-NaN, then return it. */
389dd41b 9301 return fold_convert_loc (loc, rettype, arg);
cb2b9385 9302 case rvc_normal:
9303 /* For normal numbers, proceed iff radix == 2. */
9304 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (arg)))->b == 2)
9305 {
9306 REAL_VALUE_TYPE result = *value;
9307 /* In GCC, normalized significands are in the range [0.5,
9308 1.0). We want them to be [1.0, 2.0) so set the
9309 exponent to 1. */
9310 SET_REAL_EXP (&result, 1);
9311 return build_real (rettype, result);
9312 }
9313 break;
9314 }
9315 }
48e1416a 9316
cb2b9385 9317 return NULL_TREE;
9318}
9319
3838b9ae 9320/* Fold a call to builtin frexp, we can assume the base is 2. */
9321
9322static tree
389dd41b 9323fold_builtin_frexp (location_t loc, tree arg0, tree arg1, tree rettype)
3838b9ae 9324{
9325 if (! validate_arg (arg0, REAL_TYPE) || ! validate_arg (arg1, POINTER_TYPE))
9326 return NULL_TREE;
48e1416a 9327
3838b9ae 9328 STRIP_NOPS (arg0);
48e1416a 9329
3838b9ae 9330 if (!(TREE_CODE (arg0) == REAL_CST && ! TREE_OVERFLOW (arg0)))
9331 return NULL_TREE;
48e1416a 9332
389dd41b 9333 arg1 = build_fold_indirect_ref_loc (loc, arg1);
3838b9ae 9334
9335 /* Proceed if a valid pointer type was passed in. */
9336 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg1)) == integer_type_node)
9337 {
9338 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg0);
9339 tree frac, exp;
48e1416a 9340
3838b9ae 9341 switch (value->cl)
9342 {
9343 case rvc_zero:
9344 /* For +-0, return (*exp = 0, +-0). */
9345 exp = integer_zero_node;
9346 frac = arg0;
9347 break;
9348 case rvc_nan:
9349 case rvc_inf:
9350 /* For +-NaN or +-Inf, *exp is unspecified, return arg0. */
389dd41b 9351 return omit_one_operand_loc (loc, rettype, arg0, arg1);
3838b9ae 9352 case rvc_normal:
9353 {
9354 /* Since the frexp function always expects base 2, and in
9355 GCC normalized significands are already in the range
9356 [0.5, 1.0), we have exactly what frexp wants. */
9357 REAL_VALUE_TYPE frac_rvt = *value;
9358 SET_REAL_EXP (&frac_rvt, 0);
9359 frac = build_real (rettype, frac_rvt);
7002a1c8 9360 exp = build_int_cst (integer_type_node, REAL_EXP (value));
3838b9ae 9361 }
9362 break;
9363 default:
9364 gcc_unreachable ();
9365 }
48e1416a 9366
3838b9ae 9367 /* Create the COMPOUND_EXPR (*arg1 = trunc, frac). */
389dd41b 9368 arg1 = fold_build2_loc (loc, MODIFY_EXPR, rettype, arg1, exp);
3838b9ae 9369 TREE_SIDE_EFFECTS (arg1) = 1;
389dd41b 9370 return fold_build2_loc (loc, COMPOUND_EXPR, rettype, arg1, frac);
3838b9ae 9371 }
9372
9373 return NULL_TREE;
9374}
9375
7587301b 9376/* Fold a call to builtin ldexp or scalbn/scalbln. If LDEXP is true
9377 then we can assume the base is two. If it's false, then we have to
9378 check the mode of the TYPE parameter in certain cases. */
9379
9380static tree
389dd41b 9381fold_builtin_load_exponent (location_t loc, tree arg0, tree arg1,
9382 tree type, bool ldexp)
7587301b 9383{
9384 if (validate_arg (arg0, REAL_TYPE) && validate_arg (arg1, INTEGER_TYPE))
9385 {
9386 STRIP_NOPS (arg0);
9387 STRIP_NOPS (arg1);
9388
9389 /* If arg0 is 0, Inf or NaN, or if arg1 is 0, then return arg0. */
9390 if (real_zerop (arg0) || integer_zerop (arg1)
9391 || (TREE_CODE (arg0) == REAL_CST
776a7bab 9392 && !real_isfinite (&TREE_REAL_CST (arg0))))
389dd41b 9393 return omit_one_operand_loc (loc, type, arg0, arg1);
48e1416a 9394
7587301b 9395 /* If both arguments are constant, then try to evaluate it. */
9396 if ((ldexp || REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2)
9397 && TREE_CODE (arg0) == REAL_CST && !TREE_OVERFLOW (arg0)
e913b5cd 9398 && tree_fits_shwi_p (arg1))
7587301b 9399 {
9400 /* Bound the maximum adjustment to twice the range of the
9401 mode's valid exponents. Use abs to ensure the range is
9402 positive as a sanity check. */
48e1416a 9403 const long max_exp_adj = 2 *
7587301b 9404 labs (REAL_MODE_FORMAT (TYPE_MODE (type))->emax
9405 - REAL_MODE_FORMAT (TYPE_MODE (type))->emin);
9406
9407 /* Get the user-requested adjustment. */
e913b5cd 9408 const HOST_WIDE_INT req_exp_adj = tree_to_shwi (arg1);
48e1416a 9409
7587301b 9410 /* The requested adjustment must be inside this range. This
9411 is a preliminary cap to avoid things like overflow, we
9412 may still fail to compute the result for other reasons. */
9413 if (-max_exp_adj < req_exp_adj && req_exp_adj < max_exp_adj)
9414 {
9415 REAL_VALUE_TYPE initial_result;
48e1416a 9416
7587301b 9417 real_ldexp (&initial_result, &TREE_REAL_CST (arg0), req_exp_adj);
9418
9419 /* Ensure we didn't overflow. */
9420 if (! real_isinf (&initial_result))
9421 {
9422 const REAL_VALUE_TYPE trunc_result
9423 = real_value_truncate (TYPE_MODE (type), initial_result);
48e1416a 9424
7587301b 9425 /* Only proceed if the target mode can hold the
9426 resulting value. */
9427 if (REAL_VALUES_EQUAL (initial_result, trunc_result))
9428 return build_real (type, trunc_result);
9429 }
9430 }
9431 }
9432 }
9433
9434 return NULL_TREE;
9435}
9436
ebf8b4f5 9437/* Fold a call to builtin modf. */
9438
9439static tree
389dd41b 9440fold_builtin_modf (location_t loc, tree arg0, tree arg1, tree rettype)
ebf8b4f5 9441{
9442 if (! validate_arg (arg0, REAL_TYPE) || ! validate_arg (arg1, POINTER_TYPE))
9443 return NULL_TREE;
48e1416a 9444
ebf8b4f5 9445 STRIP_NOPS (arg0);
48e1416a 9446
ebf8b4f5 9447 if (!(TREE_CODE (arg0) == REAL_CST && ! TREE_OVERFLOW (arg0)))
9448 return NULL_TREE;
48e1416a 9449
389dd41b 9450 arg1 = build_fold_indirect_ref_loc (loc, arg1);
ebf8b4f5 9451
9452 /* Proceed if a valid pointer type was passed in. */
9453 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg1)) == TYPE_MAIN_VARIANT (rettype))
9454 {
9455 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg0);
9456 REAL_VALUE_TYPE trunc, frac;
9457
9458 switch (value->cl)
9459 {
9460 case rvc_nan:
9461 case rvc_zero:
9462 /* For +-NaN or +-0, return (*arg1 = arg0, arg0). */
9463 trunc = frac = *value;
9464 break;
9465 case rvc_inf:
9466 /* For +-Inf, return (*arg1 = arg0, +-0). */
9467 frac = dconst0;
9468 frac.sign = value->sign;
9469 trunc = *value;
9470 break;
9471 case rvc_normal:
9472 /* Return (*arg1 = trunc(arg0), arg0-trunc(arg0)). */
9473 real_trunc (&trunc, VOIDmode, value);
9474 real_arithmetic (&frac, MINUS_EXPR, value, &trunc);
9475 /* If the original number was negative and already
9476 integral, then the fractional part is -0.0. */
9477 if (value->sign && frac.cl == rvc_zero)
9478 frac.sign = value->sign;
9479 break;
9480 }
48e1416a 9481
ebf8b4f5 9482 /* Create the COMPOUND_EXPR (*arg1 = trunc, frac). */
389dd41b 9483 arg1 = fold_build2_loc (loc, MODIFY_EXPR, rettype, arg1,
ebf8b4f5 9484 build_real (rettype, trunc));
9485 TREE_SIDE_EFFECTS (arg1) = 1;
389dd41b 9486 return fold_build2_loc (loc, COMPOUND_EXPR, rettype, arg1,
ebf8b4f5 9487 build_real (rettype, frac));
9488 }
48e1416a 9489
ebf8b4f5 9490 return NULL_TREE;
9491}
9492
a65c4d64 9493/* Given a location LOC, an interclass builtin function decl FNDECL
9494 and its single argument ARG, return an folded expression computing
9495 the same, or NULL_TREE if we either couldn't or didn't want to fold
9496 (the latter happen if there's an RTL instruction available). */
9497
9498static tree
9499fold_builtin_interclass_mathfn (location_t loc, tree fndecl, tree arg)
9500{
3754d046 9501 machine_mode mode;
a65c4d64 9502
9503 if (!validate_arg (arg, REAL_TYPE))
9504 return NULL_TREE;
9505
9506 if (interclass_mathfn_icode (arg, fndecl) != CODE_FOR_nothing)
9507 return NULL_TREE;
9508
9509 mode = TYPE_MODE (TREE_TYPE (arg));
9510
9511 /* If there is no optab, try generic code. */
9512 switch (DECL_FUNCTION_CODE (fndecl))
9513 {
9514 tree result;
9515
9516 CASE_FLT_FN (BUILT_IN_ISINF):
9517 {
9518 /* isinf(x) -> isgreater(fabs(x),DBL_MAX). */
b9a16870 9519 tree const isgr_fn = builtin_decl_explicit (BUILT_IN_ISGREATER);
a65c4d64 9520 tree const type = TREE_TYPE (arg);
9521 REAL_VALUE_TYPE r;
9522 char buf[128];
9523
9524 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf));
9525 real_from_string (&r, buf);
9526 result = build_call_expr (isgr_fn, 2,
9527 fold_build1_loc (loc, ABS_EXPR, type, arg),
9528 build_real (type, r));
9529 return result;
9530 }
9531 CASE_FLT_FN (BUILT_IN_FINITE):
9532 case BUILT_IN_ISFINITE:
9533 {
9534 /* isfinite(x) -> islessequal(fabs(x),DBL_MAX). */
b9a16870 9535 tree const isle_fn = builtin_decl_explicit (BUILT_IN_ISLESSEQUAL);
a65c4d64 9536 tree const type = TREE_TYPE (arg);
9537 REAL_VALUE_TYPE r;
9538 char buf[128];
9539
9540 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf));
9541 real_from_string (&r, buf);
9542 result = build_call_expr (isle_fn, 2,
9543 fold_build1_loc (loc, ABS_EXPR, type, arg),
9544 build_real (type, r));
9545 /*result = fold_build2_loc (loc, UNGT_EXPR,
9546 TREE_TYPE (TREE_TYPE (fndecl)),
9547 fold_build1_loc (loc, ABS_EXPR, type, arg),
9548 build_real (type, r));
9549 result = fold_build1_loc (loc, TRUTH_NOT_EXPR,
9550 TREE_TYPE (TREE_TYPE (fndecl)),
9551 result);*/
9552 return result;
9553 }
9554 case BUILT_IN_ISNORMAL:
9555 {
9556 /* isnormal(x) -> isgreaterequal(fabs(x),DBL_MIN) &
9557 islessequal(fabs(x),DBL_MAX). */
b9a16870 9558 tree const isle_fn = builtin_decl_explicit (BUILT_IN_ISLESSEQUAL);
9559 tree const isge_fn = builtin_decl_explicit (BUILT_IN_ISGREATEREQUAL);
a65c4d64 9560 tree const type = TREE_TYPE (arg);
9561 REAL_VALUE_TYPE rmax, rmin;
9562 char buf[128];
9563
9564 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf));
9565 real_from_string (&rmax, buf);
9566 sprintf (buf, "0x1p%d", REAL_MODE_FORMAT (mode)->emin - 1);
9567 real_from_string (&rmin, buf);
9568 arg = builtin_save_expr (fold_build1_loc (loc, ABS_EXPR, type, arg));
9569 result = build_call_expr (isle_fn, 2, arg,
9570 build_real (type, rmax));
9571 result = fold_build2 (BIT_AND_EXPR, integer_type_node, result,
9572 build_call_expr (isge_fn, 2, arg,
9573 build_real (type, rmin)));
9574 return result;
9575 }
9576 default:
9577 break;
9578 }
9579
9580 return NULL_TREE;
9581}
9582
726069ba 9583/* Fold a call to __builtin_isnan(), __builtin_isinf, __builtin_finite.
c2f47e15 9584 ARG is the argument for the call. */
726069ba 9585
9586static tree
389dd41b 9587fold_builtin_classify (location_t loc, tree fndecl, tree arg, int builtin_index)
726069ba 9588{
726069ba 9589 tree type = TREE_TYPE (TREE_TYPE (fndecl));
726069ba 9590 REAL_VALUE_TYPE r;
9591
c2f47e15 9592 if (!validate_arg (arg, REAL_TYPE))
d43cee80 9593 return NULL_TREE;
726069ba 9594
726069ba 9595 switch (builtin_index)
9596 {
9597 case BUILT_IN_ISINF:
fe994837 9598 if (!HONOR_INFINITIES (arg))
389dd41b 9599 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
726069ba 9600
9601 if (TREE_CODE (arg) == REAL_CST)
9602 {
9603 r = TREE_REAL_CST (arg);
9604 if (real_isinf (&r))
9605 return real_compare (GT_EXPR, &r, &dconst0)
9606 ? integer_one_node : integer_minus_one_node;
9607 else
9608 return integer_zero_node;
9609 }
9610
9611 return NULL_TREE;
9612
c319d56a 9613 case BUILT_IN_ISINF_SIGN:
9614 {
9615 /* isinf_sign(x) -> isinf(x) ? (signbit(x) ? -1 : 1) : 0 */
9616 /* In a boolean context, GCC will fold the inner COND_EXPR to
9617 1. So e.g. "if (isinf_sign(x))" would be folded to just
9618 "if (isinf(x) ? 1 : 0)" which becomes "if (isinf(x))". */
9619 tree signbit_fn = mathfn_built_in_1 (TREE_TYPE (arg), BUILT_IN_SIGNBIT, 0);
b9a16870 9620 tree isinf_fn = builtin_decl_explicit (BUILT_IN_ISINF);
c319d56a 9621 tree tmp = NULL_TREE;
9622
9623 arg = builtin_save_expr (arg);
9624
9625 if (signbit_fn && isinf_fn)
9626 {
389dd41b 9627 tree signbit_call = build_call_expr_loc (loc, signbit_fn, 1, arg);
9628 tree isinf_call = build_call_expr_loc (loc, isinf_fn, 1, arg);
c319d56a 9629
389dd41b 9630 signbit_call = fold_build2_loc (loc, NE_EXPR, integer_type_node,
c319d56a 9631 signbit_call, integer_zero_node);
389dd41b 9632 isinf_call = fold_build2_loc (loc, NE_EXPR, integer_type_node,
c319d56a 9633 isinf_call, integer_zero_node);
48e1416a 9634
389dd41b 9635 tmp = fold_build3_loc (loc, COND_EXPR, integer_type_node, signbit_call,
c319d56a 9636 integer_minus_one_node, integer_one_node);
389dd41b 9637 tmp = fold_build3_loc (loc, COND_EXPR, integer_type_node,
9638 isinf_call, tmp,
c319d56a 9639 integer_zero_node);
9640 }
9641
9642 return tmp;
9643 }
9644
cde061c1 9645 case BUILT_IN_ISFINITE:
93633022 9646 if (!HONOR_NANS (arg)
fe994837 9647 && !HONOR_INFINITIES (arg))
389dd41b 9648 return omit_one_operand_loc (loc, type, integer_one_node, arg);
726069ba 9649
9650 if (TREE_CODE (arg) == REAL_CST)
9651 {
9652 r = TREE_REAL_CST (arg);
776a7bab 9653 return real_isfinite (&r) ? integer_one_node : integer_zero_node;
726069ba 9654 }
9655
9656 return NULL_TREE;
9657
9658 case BUILT_IN_ISNAN:
93633022 9659 if (!HONOR_NANS (arg))
389dd41b 9660 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
726069ba 9661
9662 if (TREE_CODE (arg) == REAL_CST)
9663 {
9664 r = TREE_REAL_CST (arg);
9665 return real_isnan (&r) ? integer_one_node : integer_zero_node;
9666 }
9667
9668 arg = builtin_save_expr (arg);
389dd41b 9669 return fold_build2_loc (loc, UNORDERED_EXPR, type, arg, arg);
726069ba 9670
9671 default:
64db345d 9672 gcc_unreachable ();
726069ba 9673 }
9674}
9675
19fbe3a4 9676/* Fold a call to __builtin_fpclassify(int, int, int, int, int, ...).
9677 This builtin will generate code to return the appropriate floating
9678 point classification depending on the value of the floating point
9679 number passed in. The possible return values must be supplied as
921b27c0 9680 int arguments to the call in the following order: FP_NAN, FP_INFINITE,
19fbe3a4 9681 FP_NORMAL, FP_SUBNORMAL and FP_ZERO. The ellipses is for exactly
9682 one floating point argument which is "type generic". */
9683
9684static tree
9d884767 9685fold_builtin_fpclassify (location_t loc, tree *args, int nargs)
19fbe3a4 9686{
921b27c0 9687 tree fp_nan, fp_infinite, fp_normal, fp_subnormal, fp_zero,
9688 arg, type, res, tmp;
3754d046 9689 machine_mode mode;
19fbe3a4 9690 REAL_VALUE_TYPE r;
9691 char buf[128];
48e1416a 9692
19fbe3a4 9693 /* Verify the required arguments in the original call. */
9d884767 9694 if (nargs != 6
9695 || !validate_arg (args[0], INTEGER_TYPE)
9696 || !validate_arg (args[1], INTEGER_TYPE)
9697 || !validate_arg (args[2], INTEGER_TYPE)
9698 || !validate_arg (args[3], INTEGER_TYPE)
9699 || !validate_arg (args[4], INTEGER_TYPE)
9700 || !validate_arg (args[5], REAL_TYPE))
19fbe3a4 9701 return NULL_TREE;
48e1416a 9702
9d884767 9703 fp_nan = args[0];
9704 fp_infinite = args[1];
9705 fp_normal = args[2];
9706 fp_subnormal = args[3];
9707 fp_zero = args[4];
9708 arg = args[5];
19fbe3a4 9709 type = TREE_TYPE (arg);
9710 mode = TYPE_MODE (type);
389dd41b 9711 arg = builtin_save_expr (fold_build1_loc (loc, ABS_EXPR, type, arg));
19fbe3a4 9712
48e1416a 9713 /* fpclassify(x) ->
19fbe3a4 9714 isnan(x) ? FP_NAN :
921b27c0 9715 (fabs(x) == Inf ? FP_INFINITE :
19fbe3a4 9716 (fabs(x) >= DBL_MIN ? FP_NORMAL :
9717 (x == 0 ? FP_ZERO : FP_SUBNORMAL))). */
48e1416a 9718
389dd41b 9719 tmp = fold_build2_loc (loc, EQ_EXPR, integer_type_node, arg,
19fbe3a4 9720 build_real (type, dconst0));
389dd41b 9721 res = fold_build3_loc (loc, COND_EXPR, integer_type_node,
9722 tmp, fp_zero, fp_subnormal);
19fbe3a4 9723
9724 sprintf (buf, "0x1p%d", REAL_MODE_FORMAT (mode)->emin - 1);
9725 real_from_string (&r, buf);
389dd41b 9726 tmp = fold_build2_loc (loc, GE_EXPR, integer_type_node,
9727 arg, build_real (type, r));
9728 res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp, fp_normal, res);
48e1416a 9729
19fbe3a4 9730 if (HONOR_INFINITIES (mode))
9731 {
9732 real_inf (&r);
389dd41b 9733 tmp = fold_build2_loc (loc, EQ_EXPR, integer_type_node, arg,
19fbe3a4 9734 build_real (type, r));
389dd41b 9735 res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp,
9736 fp_infinite, res);
19fbe3a4 9737 }
9738
9739 if (HONOR_NANS (mode))
9740 {
389dd41b 9741 tmp = fold_build2_loc (loc, ORDERED_EXPR, integer_type_node, arg, arg);
9742 res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp, res, fp_nan);
19fbe3a4 9743 }
48e1416a 9744
19fbe3a4 9745 return res;
9746}
9747
9bc9f15f 9748/* Fold a call to an unordered comparison function such as
d5019fe8 9749 __builtin_isgreater(). FNDECL is the FUNCTION_DECL for the function
c2f47e15 9750 being called and ARG0 and ARG1 are the arguments for the call.
726069ba 9751 UNORDERED_CODE and ORDERED_CODE are comparison codes that give
9752 the opposite of the desired result. UNORDERED_CODE is used
9753 for modes that can hold NaNs and ORDERED_CODE is used for
9754 the rest. */
9bc9f15f 9755
9756static tree
389dd41b 9757fold_builtin_unordered_cmp (location_t loc, tree fndecl, tree arg0, tree arg1,
9bc9f15f 9758 enum tree_code unordered_code,
9759 enum tree_code ordered_code)
9760{
859f903a 9761 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9bc9f15f 9762 enum tree_code code;
6978db0d 9763 tree type0, type1;
9764 enum tree_code code0, code1;
9765 tree cmp_type = NULL_TREE;
9bc9f15f 9766
6978db0d 9767 type0 = TREE_TYPE (arg0);
9768 type1 = TREE_TYPE (arg1);
a0c938f0 9769
6978db0d 9770 code0 = TREE_CODE (type0);
9771 code1 = TREE_CODE (type1);
a0c938f0 9772
6978db0d 9773 if (code0 == REAL_TYPE && code1 == REAL_TYPE)
9774 /* Choose the wider of two real types. */
9775 cmp_type = TYPE_PRECISION (type0) >= TYPE_PRECISION (type1)
9776 ? type0 : type1;
9777 else if (code0 == REAL_TYPE && code1 == INTEGER_TYPE)
9778 cmp_type = type0;
9779 else if (code0 == INTEGER_TYPE && code1 == REAL_TYPE)
9780 cmp_type = type1;
a0c938f0 9781
389dd41b 9782 arg0 = fold_convert_loc (loc, cmp_type, arg0);
9783 arg1 = fold_convert_loc (loc, cmp_type, arg1);
859f903a 9784
9785 if (unordered_code == UNORDERED_EXPR)
9786 {
93633022 9787 if (!HONOR_NANS (arg0))
389dd41b 9788 return omit_two_operands_loc (loc, type, integer_zero_node, arg0, arg1);
9789 return fold_build2_loc (loc, UNORDERED_EXPR, type, arg0, arg1);
859f903a 9790 }
9bc9f15f 9791
93633022 9792 code = HONOR_NANS (arg0) ? unordered_code : ordered_code;
389dd41b 9793 return fold_build1_loc (loc, TRUTH_NOT_EXPR, type,
9794 fold_build2_loc (loc, code, type, arg0, arg1));
9bc9f15f 9795}
9796
0c93c8a9 9797/* Fold __builtin_{,s,u}{add,sub,mul}{,l,ll}_overflow, either into normal
9798 arithmetics if it can never overflow, or into internal functions that
9799 return both result of arithmetics and overflowed boolean flag in
9800 a complex integer result, or some other check for overflow. */
9801
9802static tree
9803fold_builtin_arith_overflow (location_t loc, enum built_in_function fcode,
9804 tree arg0, tree arg1, tree arg2)
9805{
9806 enum internal_fn ifn = IFN_LAST;
9807 tree type = TREE_TYPE (TREE_TYPE (arg2));
9808 tree mem_arg2 = build_fold_indirect_ref_loc (loc, arg2);
9809 switch (fcode)
9810 {
9811 case BUILT_IN_ADD_OVERFLOW:
9812 case BUILT_IN_SADD_OVERFLOW:
9813 case BUILT_IN_SADDL_OVERFLOW:
9814 case BUILT_IN_SADDLL_OVERFLOW:
9815 case BUILT_IN_UADD_OVERFLOW:
9816 case BUILT_IN_UADDL_OVERFLOW:
9817 case BUILT_IN_UADDLL_OVERFLOW:
9818 ifn = IFN_ADD_OVERFLOW;
9819 break;
9820 case BUILT_IN_SUB_OVERFLOW:
9821 case BUILT_IN_SSUB_OVERFLOW:
9822 case BUILT_IN_SSUBL_OVERFLOW:
9823 case BUILT_IN_SSUBLL_OVERFLOW:
9824 case BUILT_IN_USUB_OVERFLOW:
9825 case BUILT_IN_USUBL_OVERFLOW:
9826 case BUILT_IN_USUBLL_OVERFLOW:
9827 ifn = IFN_SUB_OVERFLOW;
9828 break;
9829 case BUILT_IN_MUL_OVERFLOW:
9830 case BUILT_IN_SMUL_OVERFLOW:
9831 case BUILT_IN_SMULL_OVERFLOW:
9832 case BUILT_IN_SMULLL_OVERFLOW:
9833 case BUILT_IN_UMUL_OVERFLOW:
9834 case BUILT_IN_UMULL_OVERFLOW:
9835 case BUILT_IN_UMULLL_OVERFLOW:
9836 ifn = IFN_MUL_OVERFLOW;
9837 break;
9838 default:
9839 gcc_unreachable ();
9840 }
9841 tree ctype = build_complex_type (type);
9842 tree call = build_call_expr_internal_loc (loc, ifn, ctype,
9843 2, arg0, arg1);
9844 tree tgt = save_expr (call);
9845 tree intres = build1_loc (loc, REALPART_EXPR, type, tgt);
9846 tree ovfres = build1_loc (loc, IMAGPART_EXPR, type, tgt);
9847 ovfres = fold_convert_loc (loc, boolean_type_node, ovfres);
9848 tree store
9849 = fold_build2_loc (loc, MODIFY_EXPR, void_type_node, mem_arg2, intres);
9850 return build2_loc (loc, COMPOUND_EXPR, boolean_type_node, store, ovfres);
9851}
9852
c2f47e15 9853/* Fold a call to built-in function FNDECL with 0 arguments.
e80cc485 9854 This function returns NULL_TREE if no simplification was possible. */
650e4c94 9855
4ee9c684 9856static tree
e80cc485 9857fold_builtin_0 (location_t loc, tree fndecl)
650e4c94 9858{
e9f80ff5 9859 tree type = TREE_TYPE (TREE_TYPE (fndecl));
c2f47e15 9860 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
189b3398 9861 switch (fcode)
650e4c94 9862 {
c2f47e15 9863 CASE_FLT_FN (BUILT_IN_INF):
9864 case BUILT_IN_INFD32:
9865 case BUILT_IN_INFD64:
9866 case BUILT_IN_INFD128:
389dd41b 9867 return fold_builtin_inf (loc, type, true);
7c2f0500 9868
c2f47e15 9869 CASE_FLT_FN (BUILT_IN_HUGE_VAL):
389dd41b 9870 return fold_builtin_inf (loc, type, false);
7c2f0500 9871
c2f47e15 9872 case BUILT_IN_CLASSIFY_TYPE:
9873 return fold_builtin_classify_type (NULL_TREE);
7c2f0500 9874
c2f47e15 9875 default:
9876 break;
9877 }
9878 return NULL_TREE;
9879}
7c2f0500 9880
c2f47e15 9881/* Fold a call to built-in function FNDECL with 1 argument, ARG0.
e80cc485 9882 This function returns NULL_TREE if no simplification was possible. */
7c2f0500 9883
c2f47e15 9884static tree
e80cc485 9885fold_builtin_1 (location_t loc, tree fndecl, tree arg0)
c2f47e15 9886{
9887 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9888 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
9889 switch (fcode)
9890 {
650e4c94 9891 case BUILT_IN_CONSTANT_P:
7c2f0500 9892 {
c2f47e15 9893 tree val = fold_builtin_constant_p (arg0);
7c2f0500 9894
7c2f0500 9895 /* Gimplification will pull the CALL_EXPR for the builtin out of
9896 an if condition. When not optimizing, we'll not CSE it back.
9897 To avoid link error types of regressions, return false now. */
9898 if (!val && !optimize)
9899 val = integer_zero_node;
9900
9901 return val;
9902 }
650e4c94 9903
539a3a92 9904 case BUILT_IN_CLASSIFY_TYPE:
c2f47e15 9905 return fold_builtin_classify_type (arg0);
539a3a92 9906
650e4c94 9907 case BUILT_IN_STRLEN:
c7cbde74 9908 return fold_builtin_strlen (loc, type, arg0);
650e4c94 9909
4f35b1fc 9910 CASE_FLT_FN (BUILT_IN_FABS):
8aa32773 9911 case BUILT_IN_FABSD32:
9912 case BUILT_IN_FABSD64:
9913 case BUILT_IN_FABSD128:
389dd41b 9914 return fold_builtin_fabs (loc, arg0, type);
d1aade50 9915
9916 case BUILT_IN_ABS:
9917 case BUILT_IN_LABS:
9918 case BUILT_IN_LLABS:
9919 case BUILT_IN_IMAXABS:
389dd41b 9920 return fold_builtin_abs (loc, arg0, type);
c63f4ad3 9921
4f35b1fc 9922 CASE_FLT_FN (BUILT_IN_CONJ):
239d491a 9923 if (validate_arg (arg0, COMPLEX_TYPE)
48e1416a 9924 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
389dd41b 9925 return fold_build1_loc (loc, CONJ_EXPR, type, arg0);
c2f47e15 9926 break;
36d3581d 9927
4f35b1fc 9928 CASE_FLT_FN (BUILT_IN_CREAL):
239d491a 9929 if (validate_arg (arg0, COMPLEX_TYPE)
48e1416a 9930 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
7082509e 9931 return non_lvalue_loc (loc, fold_build1_loc (loc, REALPART_EXPR, type, arg0));
c2f47e15 9932 break;
36d3581d 9933
4f35b1fc 9934 CASE_FLT_FN (BUILT_IN_CIMAG):
b0ce8887 9935 if (validate_arg (arg0, COMPLEX_TYPE)
9936 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
389dd41b 9937 return non_lvalue_loc (loc, fold_build1_loc (loc, IMAGPART_EXPR, type, arg0));
c2f47e15 9938 break;
36d3581d 9939
503733d5 9940 CASE_FLT_FN (BUILT_IN_CCOS):
9af5ce0c 9941 return fold_builtin_ccos (loc, arg0, type, fndecl, /*hyper=*/ false);
48e1416a 9942
503733d5 9943 CASE_FLT_FN (BUILT_IN_CCOSH):
9af5ce0c 9944 return fold_builtin_ccos (loc, arg0, type, fndecl, /*hyper=*/ true);
48e1416a 9945
c2373fdb 9946 CASE_FLT_FN (BUILT_IN_CPROJ):
9af5ce0c 9947 return fold_builtin_cproj (loc, arg0, type);
c2373fdb 9948
239d491a 9949 CASE_FLT_FN (BUILT_IN_CSIN):
9950 if (validate_arg (arg0, COMPLEX_TYPE)
48e1416a 9951 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
239d491a 9952 return do_mpc_arg1 (arg0, type, mpc_sin);
c2f47e15 9953 break;
48e1416a 9954
239d491a 9955 CASE_FLT_FN (BUILT_IN_CSINH):
9956 if (validate_arg (arg0, COMPLEX_TYPE)
48e1416a 9957 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
239d491a 9958 return do_mpc_arg1 (arg0, type, mpc_sinh);
9959 break;
48e1416a 9960
239d491a 9961 CASE_FLT_FN (BUILT_IN_CTAN):
9962 if (validate_arg (arg0, COMPLEX_TYPE)
48e1416a 9963 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
239d491a 9964 return do_mpc_arg1 (arg0, type, mpc_tan);
9965 break;
48e1416a 9966
239d491a 9967 CASE_FLT_FN (BUILT_IN_CTANH):
9968 if (validate_arg (arg0, COMPLEX_TYPE)
48e1416a 9969 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
239d491a 9970 return do_mpc_arg1 (arg0, type, mpc_tanh);
9971 break;
48e1416a 9972
239d491a 9973 CASE_FLT_FN (BUILT_IN_CLOG):
9974 if (validate_arg (arg0, COMPLEX_TYPE)
48e1416a 9975 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
239d491a 9976 return do_mpc_arg1 (arg0, type, mpc_log);
9977 break;
48e1416a 9978
239d491a 9979 CASE_FLT_FN (BUILT_IN_CSQRT):
9980 if (validate_arg (arg0, COMPLEX_TYPE)
48e1416a 9981 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
239d491a 9982 return do_mpc_arg1 (arg0, type, mpc_sqrt);
9983 break;
48e1416a 9984
0e7e6e7f 9985 CASE_FLT_FN (BUILT_IN_CASIN):
9986 if (validate_arg (arg0, COMPLEX_TYPE)
48e1416a 9987 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
0e7e6e7f 9988 return do_mpc_arg1 (arg0, type, mpc_asin);
9989 break;
48e1416a 9990
0e7e6e7f 9991 CASE_FLT_FN (BUILT_IN_CACOS):
9992 if (validate_arg (arg0, COMPLEX_TYPE)
48e1416a 9993 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
0e7e6e7f 9994 return do_mpc_arg1 (arg0, type, mpc_acos);
9995 break;
48e1416a 9996
0e7e6e7f 9997 CASE_FLT_FN (BUILT_IN_CATAN):
9998 if (validate_arg (arg0, COMPLEX_TYPE)
48e1416a 9999 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
0e7e6e7f 10000 return do_mpc_arg1 (arg0, type, mpc_atan);
10001 break;
48e1416a 10002
0e7e6e7f 10003 CASE_FLT_FN (BUILT_IN_CASINH):
10004 if (validate_arg (arg0, COMPLEX_TYPE)
48e1416a 10005 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
0e7e6e7f 10006 return do_mpc_arg1 (arg0, type, mpc_asinh);
10007 break;
48e1416a 10008
0e7e6e7f 10009 CASE_FLT_FN (BUILT_IN_CACOSH):
10010 if (validate_arg (arg0, COMPLEX_TYPE)
48e1416a 10011 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
0e7e6e7f 10012 return do_mpc_arg1 (arg0, type, mpc_acosh);
10013 break;
48e1416a 10014
0e7e6e7f 10015 CASE_FLT_FN (BUILT_IN_CATANH):
10016 if (validate_arg (arg0, COMPLEX_TYPE)
48e1416a 10017 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
0e7e6e7f 10018 return do_mpc_arg1 (arg0, type, mpc_atanh);
10019 break;
48e1416a 10020
4f35b1fc 10021 CASE_FLT_FN (BUILT_IN_CABS):
389dd41b 10022 return fold_builtin_cabs (loc, arg0, type, fndecl);
c63f4ad3 10023
abe4dcf6 10024 CASE_FLT_FN (BUILT_IN_CARG):
389dd41b 10025 return fold_builtin_carg (loc, arg0, type);
abe4dcf6 10026
4f35b1fc 10027 CASE_FLT_FN (BUILT_IN_SQRT):
389dd41b 10028 return fold_builtin_sqrt (loc, arg0, type);
805e22b2 10029
4f35b1fc 10030 CASE_FLT_FN (BUILT_IN_CBRT):
389dd41b 10031 return fold_builtin_cbrt (loc, arg0, type);
3bc5c41b 10032
728bac60 10033 CASE_FLT_FN (BUILT_IN_ASIN):
c2f47e15 10034 if (validate_arg (arg0, REAL_TYPE))
10035 return do_mpfr_arg1 (arg0, type, mpfr_asin,
728bac60 10036 &dconstm1, &dconst1, true);
10037 break;
10038
10039 CASE_FLT_FN (BUILT_IN_ACOS):
c2f47e15 10040 if (validate_arg (arg0, REAL_TYPE))
10041 return do_mpfr_arg1 (arg0, type, mpfr_acos,
728bac60 10042 &dconstm1, &dconst1, true);
10043 break;
10044
10045 CASE_FLT_FN (BUILT_IN_ATAN):
c2f47e15 10046 if (validate_arg (arg0, REAL_TYPE))
10047 return do_mpfr_arg1 (arg0, type, mpfr_atan, NULL, NULL, 0);
728bac60 10048 break;
10049
10050 CASE_FLT_FN (BUILT_IN_ASINH):
c2f47e15 10051 if (validate_arg (arg0, REAL_TYPE))
10052 return do_mpfr_arg1 (arg0, type, mpfr_asinh, NULL, NULL, 0);
728bac60 10053 break;
10054
10055 CASE_FLT_FN (BUILT_IN_ACOSH):
c2f47e15 10056 if (validate_arg (arg0, REAL_TYPE))
10057 return do_mpfr_arg1 (arg0, type, mpfr_acosh,
728bac60 10058 &dconst1, NULL, true);
10059 break;
10060
10061 CASE_FLT_FN (BUILT_IN_ATANH):
c2f47e15 10062 if (validate_arg (arg0, REAL_TYPE))
10063 return do_mpfr_arg1 (arg0, type, mpfr_atanh,
728bac60 10064 &dconstm1, &dconst1, false);
10065 break;
10066
4f35b1fc 10067 CASE_FLT_FN (BUILT_IN_SIN):
c2f47e15 10068 if (validate_arg (arg0, REAL_TYPE))
10069 return do_mpfr_arg1 (arg0, type, mpfr_sin, NULL, NULL, 0);
728bac60 10070 break;
77e89269 10071
4f35b1fc 10072 CASE_FLT_FN (BUILT_IN_COS):
389dd41b 10073 return fold_builtin_cos (loc, arg0, type, fndecl);
77e89269 10074
728bac60 10075 CASE_FLT_FN (BUILT_IN_TAN):
c2f47e15 10076 return fold_builtin_tan (arg0, type);
d735c391 10077
c5bb2c4b 10078 CASE_FLT_FN (BUILT_IN_CEXP):
389dd41b 10079 return fold_builtin_cexp (loc, arg0, type);
c5bb2c4b 10080
d735c391 10081 CASE_FLT_FN (BUILT_IN_CEXPI):
c2f47e15 10082 if (validate_arg (arg0, REAL_TYPE))
10083 return do_mpfr_sincos (arg0, NULL_TREE, NULL_TREE);
10084 break;
d92f994c 10085
728bac60 10086 CASE_FLT_FN (BUILT_IN_SINH):
c2f47e15 10087 if (validate_arg (arg0, REAL_TYPE))
10088 return do_mpfr_arg1 (arg0, type, mpfr_sinh, NULL, NULL, 0);
728bac60 10089 break;
10090
10091 CASE_FLT_FN (BUILT_IN_COSH):
389dd41b 10092 return fold_builtin_cosh (loc, arg0, type, fndecl);
728bac60 10093
10094 CASE_FLT_FN (BUILT_IN_TANH):
c2f47e15 10095 if (validate_arg (arg0, REAL_TYPE))
10096 return do_mpfr_arg1 (arg0, type, mpfr_tanh, NULL, NULL, 0);
728bac60 10097 break;
10098
29f4cd78 10099 CASE_FLT_FN (BUILT_IN_ERF):
c2f47e15 10100 if (validate_arg (arg0, REAL_TYPE))
10101 return do_mpfr_arg1 (arg0, type, mpfr_erf, NULL, NULL, 0);
29f4cd78 10102 break;
10103
10104 CASE_FLT_FN (BUILT_IN_ERFC):
c2f47e15 10105 if (validate_arg (arg0, REAL_TYPE))
10106 return do_mpfr_arg1 (arg0, type, mpfr_erfc, NULL, NULL, 0);
29f4cd78 10107 break;
10108
32dba52b 10109 CASE_FLT_FN (BUILT_IN_TGAMMA):
c2f47e15 10110 if (validate_arg (arg0, REAL_TYPE))
10111 return do_mpfr_arg1 (arg0, type, mpfr_gamma, NULL, NULL, 0);
32dba52b 10112 break;
48e1416a 10113
4f35b1fc 10114 CASE_FLT_FN (BUILT_IN_EXP):
389dd41b 10115 return fold_builtin_exponent (loc, fndecl, arg0, mpfr_exp);
467214fd 10116
4f35b1fc 10117 CASE_FLT_FN (BUILT_IN_EXP2):
389dd41b 10118 return fold_builtin_exponent (loc, fndecl, arg0, mpfr_exp2);
467214fd 10119
4f35b1fc 10120 CASE_FLT_FN (BUILT_IN_EXP10):
10121 CASE_FLT_FN (BUILT_IN_POW10):
389dd41b 10122 return fold_builtin_exponent (loc, fndecl, arg0, mpfr_exp10);
467214fd 10123
29f4cd78 10124 CASE_FLT_FN (BUILT_IN_EXPM1):
c2f47e15 10125 if (validate_arg (arg0, REAL_TYPE))
10126 return do_mpfr_arg1 (arg0, type, mpfr_expm1, NULL, NULL, 0);
f8dad9b4 10127 break;
48e1416a 10128
4f35b1fc 10129 CASE_FLT_FN (BUILT_IN_LOG):
f8dad9b4 10130 if (validate_arg (arg0, REAL_TYPE))
10131 return do_mpfr_arg1 (arg0, type, mpfr_log, &dconst0, NULL, false);
10132 break;
467214fd 10133
4f35b1fc 10134 CASE_FLT_FN (BUILT_IN_LOG2):
f8dad9b4 10135 if (validate_arg (arg0, REAL_TYPE))
10136 return do_mpfr_arg1 (arg0, type, mpfr_log2, &dconst0, NULL, false);
10137 break;
467214fd 10138
4f35b1fc 10139 CASE_FLT_FN (BUILT_IN_LOG10):
f8dad9b4 10140 if (validate_arg (arg0, REAL_TYPE))
10141 return do_mpfr_arg1 (arg0, type, mpfr_log10, &dconst0, NULL, false);
10142 break;
29f4cd78 10143
10144 CASE_FLT_FN (BUILT_IN_LOG1P):
c2f47e15 10145 if (validate_arg (arg0, REAL_TYPE))
10146 return do_mpfr_arg1 (arg0, type, mpfr_log1p,
29f4cd78 10147 &dconstm1, NULL, false);
10148 break;
805e22b2 10149
65dd1378 10150 CASE_FLT_FN (BUILT_IN_J0):
10151 if (validate_arg (arg0, REAL_TYPE))
10152 return do_mpfr_arg1 (arg0, type, mpfr_j0,
10153 NULL, NULL, 0);
10154 break;
10155
10156 CASE_FLT_FN (BUILT_IN_J1):
10157 if (validate_arg (arg0, REAL_TYPE))
10158 return do_mpfr_arg1 (arg0, type, mpfr_j1,
10159 NULL, NULL, 0);
10160 break;
6ff9eeff 10161
10162 CASE_FLT_FN (BUILT_IN_Y0):
10163 if (validate_arg (arg0, REAL_TYPE))
10164 return do_mpfr_arg1 (arg0, type, mpfr_y0,
10165 &dconst0, NULL, false);
10166 break;
10167
10168 CASE_FLT_FN (BUILT_IN_Y1):
10169 if (validate_arg (arg0, REAL_TYPE))
10170 return do_mpfr_arg1 (arg0, type, mpfr_y1,
10171 &dconst0, NULL, false);
10172 break;
65dd1378 10173
4f35b1fc 10174 CASE_FLT_FN (BUILT_IN_NAN):
c4503c0a 10175 case BUILT_IN_NAND32:
10176 case BUILT_IN_NAND64:
10177 case BUILT_IN_NAND128:
c2f47e15 10178 return fold_builtin_nan (arg0, type, true);
b0db7939 10179
4f35b1fc 10180 CASE_FLT_FN (BUILT_IN_NANS):
c2f47e15 10181 return fold_builtin_nan (arg0, type, false);
b0db7939 10182
4f35b1fc 10183 CASE_FLT_FN (BUILT_IN_FLOOR):
389dd41b 10184 return fold_builtin_floor (loc, fndecl, arg0);
277f8dd2 10185
4f35b1fc 10186 CASE_FLT_FN (BUILT_IN_CEIL):
389dd41b 10187 return fold_builtin_ceil (loc, fndecl, arg0);
277f8dd2 10188
4f35b1fc 10189 CASE_FLT_FN (BUILT_IN_TRUNC):
389dd41b 10190 return fold_builtin_trunc (loc, fndecl, arg0);
277f8dd2 10191
4f35b1fc 10192 CASE_FLT_FN (BUILT_IN_ROUND):
389dd41b 10193 return fold_builtin_round (loc, fndecl, arg0);
89ab3887 10194
4f35b1fc 10195 CASE_FLT_FN (BUILT_IN_NEARBYINT):
10196 CASE_FLT_FN (BUILT_IN_RINT):
389dd41b 10197 return fold_trunc_transparent_mathfn (loc, fndecl, arg0);
6528f4f4 10198
80ff6494 10199 CASE_FLT_FN (BUILT_IN_ICEIL):
4f35b1fc 10200 CASE_FLT_FN (BUILT_IN_LCEIL):
10201 CASE_FLT_FN (BUILT_IN_LLCEIL):
10202 CASE_FLT_FN (BUILT_IN_LFLOOR):
80ff6494 10203 CASE_FLT_FN (BUILT_IN_IFLOOR):
4f35b1fc 10204 CASE_FLT_FN (BUILT_IN_LLFLOOR):
80ff6494 10205 CASE_FLT_FN (BUILT_IN_IROUND):
a0c938f0 10206 CASE_FLT_FN (BUILT_IN_LROUND):
4f35b1fc 10207 CASE_FLT_FN (BUILT_IN_LLROUND):
389dd41b 10208 return fold_builtin_int_roundingfn (loc, fndecl, arg0);
34f17811 10209
80ff6494 10210 CASE_FLT_FN (BUILT_IN_IRINT):
4f35b1fc 10211 CASE_FLT_FN (BUILT_IN_LRINT):
10212 CASE_FLT_FN (BUILT_IN_LLRINT):
389dd41b 10213 return fold_fixed_mathfn (loc, fndecl, arg0);
9ed65c7f 10214
74bdbe96 10215 case BUILT_IN_BSWAP16:
42791117 10216 case BUILT_IN_BSWAP32:
10217 case BUILT_IN_BSWAP64:
c2f47e15 10218 return fold_builtin_bswap (fndecl, arg0);
42791117 10219
4f35b1fc 10220 CASE_INT_FN (BUILT_IN_FFS):
10221 CASE_INT_FN (BUILT_IN_CLZ):
10222 CASE_INT_FN (BUILT_IN_CTZ):
6aaa1f9e 10223 CASE_INT_FN (BUILT_IN_CLRSB):
4f35b1fc 10224 CASE_INT_FN (BUILT_IN_POPCOUNT):
10225 CASE_INT_FN (BUILT_IN_PARITY):
c2f47e15 10226 return fold_builtin_bitop (fndecl, arg0);
9c8a1629 10227
4f35b1fc 10228 CASE_FLT_FN (BUILT_IN_SIGNBIT):
389dd41b 10229 return fold_builtin_signbit (loc, arg0, type);
27f261ef 10230
cb2b9385 10231 CASE_FLT_FN (BUILT_IN_SIGNIFICAND):
389dd41b 10232 return fold_builtin_significand (loc, arg0, type);
cb2b9385 10233
10234 CASE_FLT_FN (BUILT_IN_ILOGB):
10235 CASE_FLT_FN (BUILT_IN_LOGB):
389dd41b 10236 return fold_builtin_logb (loc, arg0, type);
cb2b9385 10237
d49367d4 10238 case BUILT_IN_ISASCII:
389dd41b 10239 return fold_builtin_isascii (loc, arg0);
d49367d4 10240
10241 case BUILT_IN_TOASCII:
389dd41b 10242 return fold_builtin_toascii (loc, arg0);
d49367d4 10243
df1cf42e 10244 case BUILT_IN_ISDIGIT:
389dd41b 10245 return fold_builtin_isdigit (loc, arg0);
467214fd 10246
4f35b1fc 10247 CASE_FLT_FN (BUILT_IN_FINITE):
c4503c0a 10248 case BUILT_IN_FINITED32:
10249 case BUILT_IN_FINITED64:
10250 case BUILT_IN_FINITED128:
cde061c1 10251 case BUILT_IN_ISFINITE:
a65c4d64 10252 {
10253 tree ret = fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISFINITE);
10254 if (ret)
10255 return ret;
10256 return fold_builtin_interclass_mathfn (loc, fndecl, arg0);
10257 }
726069ba 10258
4f35b1fc 10259 CASE_FLT_FN (BUILT_IN_ISINF):
c4503c0a 10260 case BUILT_IN_ISINFD32:
10261 case BUILT_IN_ISINFD64:
10262 case BUILT_IN_ISINFD128:
a65c4d64 10263 {
10264 tree ret = fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISINF);
10265 if (ret)
10266 return ret;
10267 return fold_builtin_interclass_mathfn (loc, fndecl, arg0);
10268 }
10269
10270 case BUILT_IN_ISNORMAL:
10271 return fold_builtin_interclass_mathfn (loc, fndecl, arg0);
726069ba 10272
c319d56a 10273 case BUILT_IN_ISINF_SIGN:
389dd41b 10274 return fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISINF_SIGN);
c319d56a 10275
4f35b1fc 10276 CASE_FLT_FN (BUILT_IN_ISNAN):
c4503c0a 10277 case BUILT_IN_ISNAND32:
10278 case BUILT_IN_ISNAND64:
10279 case BUILT_IN_ISNAND128:
389dd41b 10280 return fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISNAN);
c2f47e15 10281
663870fc 10282 case BUILT_IN_FREE:
10283 if (integer_zerop (arg0))
10284 return build_empty_stmt (loc);
10285 break;
10286
c2f47e15 10287 default:
10288 break;
10289 }
10290
10291 return NULL_TREE;
10292
10293}
10294
10295/* Fold a call to built-in function FNDECL with 2 arguments, ARG0 and ARG1.
e80cc485 10296 This function returns NULL_TREE if no simplification was possible. */
c2f47e15 10297
10298static tree
e80cc485 10299fold_builtin_2 (location_t loc, tree fndecl, tree arg0, tree arg1)
c2f47e15 10300{
10301 tree type = TREE_TYPE (TREE_TYPE (fndecl));
10302 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10303
10304 switch (fcode)
10305 {
65dd1378 10306 CASE_FLT_FN (BUILT_IN_JN):
10307 if (validate_arg (arg0, INTEGER_TYPE)
10308 && validate_arg (arg1, REAL_TYPE))
10309 return do_mpfr_bessel_n (arg0, arg1, type, mpfr_jn, NULL, 0);
10310 break;
6ff9eeff 10311
10312 CASE_FLT_FN (BUILT_IN_YN):
10313 if (validate_arg (arg0, INTEGER_TYPE)
10314 && validate_arg (arg1, REAL_TYPE))
10315 return do_mpfr_bessel_n (arg0, arg1, type, mpfr_yn,
10316 &dconst0, false);
10317 break;
e5407ca6 10318
10319 CASE_FLT_FN (BUILT_IN_DREM):
10320 CASE_FLT_FN (BUILT_IN_REMAINDER):
10321 if (validate_arg (arg0, REAL_TYPE)
9af5ce0c 10322 && validate_arg (arg1, REAL_TYPE))
e5407ca6 10323 return do_mpfr_arg2 (arg0, arg1, type, mpfr_remainder);
10324 break;
e84da7c1 10325
10326 CASE_FLT_FN_REENT (BUILT_IN_GAMMA): /* GAMMA_R */
10327 CASE_FLT_FN_REENT (BUILT_IN_LGAMMA): /* LGAMMA_R */
10328 if (validate_arg (arg0, REAL_TYPE)
9af5ce0c 10329 && validate_arg (arg1, POINTER_TYPE))
e84da7c1 10330 return do_mpfr_lgamma_r (arg0, arg1, type);
10331 break;
c2f47e15 10332
10333 CASE_FLT_FN (BUILT_IN_ATAN2):
10334 if (validate_arg (arg0, REAL_TYPE)
9af5ce0c 10335 && validate_arg (arg1, REAL_TYPE))
c2f47e15 10336 return do_mpfr_arg2 (arg0, arg1, type, mpfr_atan2);
10337 break;
10338
10339 CASE_FLT_FN (BUILT_IN_FDIM):
10340 if (validate_arg (arg0, REAL_TYPE)
9af5ce0c 10341 && validate_arg (arg1, REAL_TYPE))
c2f47e15 10342 return do_mpfr_arg2 (arg0, arg1, type, mpfr_dim);
10343 break;
10344
10345 CASE_FLT_FN (BUILT_IN_HYPOT):
389dd41b 10346 return fold_builtin_hypot (loc, fndecl, arg0, arg1, type);
c2f47e15 10347
c699fab8 10348 CASE_FLT_FN (BUILT_IN_CPOW):
10349 if (validate_arg (arg0, COMPLEX_TYPE)
10350 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE
10351 && validate_arg (arg1, COMPLEX_TYPE)
48e1416a 10352 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg1))) == REAL_TYPE)
652d9409 10353 return do_mpc_arg2 (arg0, arg1, type, /*do_nonfinite=*/ 0, mpc_pow);
c699fab8 10354 break;
c699fab8 10355
7587301b 10356 CASE_FLT_FN (BUILT_IN_LDEXP):
389dd41b 10357 return fold_builtin_load_exponent (loc, arg0, arg1, type, /*ldexp=*/true);
7587301b 10358 CASE_FLT_FN (BUILT_IN_SCALBN):
10359 CASE_FLT_FN (BUILT_IN_SCALBLN):
389dd41b 10360 return fold_builtin_load_exponent (loc, arg0, arg1,
10361 type, /*ldexp=*/false);
7587301b 10362
3838b9ae 10363 CASE_FLT_FN (BUILT_IN_FREXP):
389dd41b 10364 return fold_builtin_frexp (loc, arg0, arg1, type);
3838b9ae 10365
ebf8b4f5 10366 CASE_FLT_FN (BUILT_IN_MODF):
389dd41b 10367 return fold_builtin_modf (loc, arg0, arg1, type);
ebf8b4f5 10368
c2f47e15 10369 case BUILT_IN_STRSTR:
389dd41b 10370 return fold_builtin_strstr (loc, arg0, arg1, type);
c2f47e15 10371
c2f47e15 10372 case BUILT_IN_STRSPN:
389dd41b 10373 return fold_builtin_strspn (loc, arg0, arg1);
c2f47e15 10374
10375 case BUILT_IN_STRCSPN:
389dd41b 10376 return fold_builtin_strcspn (loc, arg0, arg1);
c2f47e15 10377
10378 case BUILT_IN_STRCHR:
10379 case BUILT_IN_INDEX:
389dd41b 10380 return fold_builtin_strchr (loc, arg0, arg1, type);
c2f47e15 10381
10382 case BUILT_IN_STRRCHR:
10383 case BUILT_IN_RINDEX:
389dd41b 10384 return fold_builtin_strrchr (loc, arg0, arg1, type);
c2f47e15 10385
c2f47e15 10386 case BUILT_IN_STRCMP:
389dd41b 10387 return fold_builtin_strcmp (loc, arg0, arg1);
c2f47e15 10388
10389 case BUILT_IN_STRPBRK:
389dd41b 10390 return fold_builtin_strpbrk (loc, arg0, arg1, type);
c2f47e15 10391
10392 case BUILT_IN_EXPECT:
c83059be 10393 return fold_builtin_expect (loc, arg0, arg1, NULL_TREE);
c2f47e15 10394
10395 CASE_FLT_FN (BUILT_IN_POW):
389dd41b 10396 return fold_builtin_pow (loc, fndecl, arg0, arg1, type);
c2f47e15 10397
10398 CASE_FLT_FN (BUILT_IN_POWI):
389dd41b 10399 return fold_builtin_powi (loc, fndecl, arg0, arg1, type);
c2f47e15 10400
10401 CASE_FLT_FN (BUILT_IN_COPYSIGN):
389dd41b 10402 return fold_builtin_copysign (loc, fndecl, arg0, arg1, type);
c2f47e15 10403
10404 CASE_FLT_FN (BUILT_IN_FMIN):
389dd41b 10405 return fold_builtin_fmin_fmax (loc, arg0, arg1, type, /*max=*/false);
c2f47e15 10406
10407 CASE_FLT_FN (BUILT_IN_FMAX):
389dd41b 10408 return fold_builtin_fmin_fmax (loc, arg0, arg1, type, /*max=*/true);
726069ba 10409
9bc9f15f 10410 case BUILT_IN_ISGREATER:
389dd41b 10411 return fold_builtin_unordered_cmp (loc, fndecl,
10412 arg0, arg1, UNLE_EXPR, LE_EXPR);
9bc9f15f 10413 case BUILT_IN_ISGREATEREQUAL:
389dd41b 10414 return fold_builtin_unordered_cmp (loc, fndecl,
10415 arg0, arg1, UNLT_EXPR, LT_EXPR);
9bc9f15f 10416 case BUILT_IN_ISLESS:
389dd41b 10417 return fold_builtin_unordered_cmp (loc, fndecl,
10418 arg0, arg1, UNGE_EXPR, GE_EXPR);
9bc9f15f 10419 case BUILT_IN_ISLESSEQUAL:
389dd41b 10420 return fold_builtin_unordered_cmp (loc, fndecl,
10421 arg0, arg1, UNGT_EXPR, GT_EXPR);
9bc9f15f 10422 case BUILT_IN_ISLESSGREATER:
389dd41b 10423 return fold_builtin_unordered_cmp (loc, fndecl,
10424 arg0, arg1, UNEQ_EXPR, EQ_EXPR);
9bc9f15f 10425 case BUILT_IN_ISUNORDERED:
389dd41b 10426 return fold_builtin_unordered_cmp (loc, fndecl,
10427 arg0, arg1, UNORDERED_EXPR,
d5019fe8 10428 NOP_EXPR);
9bc9f15f 10429
7c2f0500 10430 /* We do the folding for va_start in the expander. */
10431 case BUILT_IN_VA_START:
10432 break;
f0613857 10433
0a39fd54 10434 case BUILT_IN_OBJECT_SIZE:
c2f47e15 10435 return fold_builtin_object_size (arg0, arg1);
0a39fd54 10436
1cd6e20d 10437 case BUILT_IN_ATOMIC_ALWAYS_LOCK_FREE:
10438 return fold_builtin_atomic_always_lock_free (arg0, arg1);
10439
10440 case BUILT_IN_ATOMIC_IS_LOCK_FREE:
10441 return fold_builtin_atomic_is_lock_free (arg0, arg1);
10442
c2f47e15 10443 default:
10444 break;
10445 }
10446 return NULL_TREE;
10447}
10448
10449/* Fold a call to built-in function FNDECL with 3 arguments, ARG0, ARG1,
e80cc485 10450 and ARG2.
c2f47e15 10451 This function returns NULL_TREE if no simplification was possible. */
10452
10453static tree
389dd41b 10454fold_builtin_3 (location_t loc, tree fndecl,
e80cc485 10455 tree arg0, tree arg1, tree arg2)
c2f47e15 10456{
10457 tree type = TREE_TYPE (TREE_TYPE (fndecl));
10458 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10459 switch (fcode)
10460 {
10461
10462 CASE_FLT_FN (BUILT_IN_SINCOS):
389dd41b 10463 return fold_builtin_sincos (loc, arg0, arg1, arg2);
c2f47e15 10464
10465 CASE_FLT_FN (BUILT_IN_FMA):
b9be572e 10466 return fold_builtin_fma (loc, arg0, arg1, arg2, type);
c2f47e15 10467 break;
10468
e5407ca6 10469 CASE_FLT_FN (BUILT_IN_REMQUO):
10470 if (validate_arg (arg0, REAL_TYPE)
9af5ce0c 10471 && validate_arg (arg1, REAL_TYPE)
10472 && validate_arg (arg2, POINTER_TYPE))
e5407ca6 10473 return do_mpfr_remquo (arg0, arg1, arg2);
10474 break;
e5407ca6 10475
c2f47e15 10476 case BUILT_IN_STRNCMP:
389dd41b 10477 return fold_builtin_strncmp (loc, arg0, arg1, arg2);
c2f47e15 10478
7959b13b 10479 case BUILT_IN_MEMCHR:
389dd41b 10480 return fold_builtin_memchr (loc, arg0, arg1, arg2, type);
7959b13b 10481
c2f47e15 10482 case BUILT_IN_BCMP:
10483 case BUILT_IN_MEMCMP:
389dd41b 10484 return fold_builtin_memcmp (loc, arg0, arg1, arg2);;
c2f47e15 10485
c83059be 10486 case BUILT_IN_EXPECT:
10487 return fold_builtin_expect (loc, arg0, arg1, arg2);
10488
0c93c8a9 10489 case BUILT_IN_ADD_OVERFLOW:
10490 case BUILT_IN_SUB_OVERFLOW:
10491 case BUILT_IN_MUL_OVERFLOW:
10492 case BUILT_IN_SADD_OVERFLOW:
10493 case BUILT_IN_SADDL_OVERFLOW:
10494 case BUILT_IN_SADDLL_OVERFLOW:
10495 case BUILT_IN_SSUB_OVERFLOW:
10496 case BUILT_IN_SSUBL_OVERFLOW:
10497 case BUILT_IN_SSUBLL_OVERFLOW:
10498 case BUILT_IN_SMUL_OVERFLOW:
10499 case BUILT_IN_SMULL_OVERFLOW:
10500 case BUILT_IN_SMULLL_OVERFLOW:
10501 case BUILT_IN_UADD_OVERFLOW:
10502 case BUILT_IN_UADDL_OVERFLOW:
10503 case BUILT_IN_UADDLL_OVERFLOW:
10504 case BUILT_IN_USUB_OVERFLOW:
10505 case BUILT_IN_USUBL_OVERFLOW:
10506 case BUILT_IN_USUBLL_OVERFLOW:
10507 case BUILT_IN_UMUL_OVERFLOW:
10508 case BUILT_IN_UMULL_OVERFLOW:
10509 case BUILT_IN_UMULLL_OVERFLOW:
10510 return fold_builtin_arith_overflow (loc, fcode, arg0, arg1, arg2);
10511
650e4c94 10512 default:
10513 break;
10514 }
c2f47e15 10515 return NULL_TREE;
10516}
650e4c94 10517
c2f47e15 10518/* Fold a call to built-in function FNDECL. ARGS is an array of NARGS
9d884767 10519 arguments. IGNORE is true if the result of the
10520 function call is ignored. This function returns NULL_TREE if no
10521 simplification was possible. */
48e1416a 10522
2165588a 10523tree
e80cc485 10524fold_builtin_n (location_t loc, tree fndecl, tree *args, int nargs, bool)
c2f47e15 10525{
10526 tree ret = NULL_TREE;
a7f5bb2d 10527
c2f47e15 10528 switch (nargs)
10529 {
10530 case 0:
e80cc485 10531 ret = fold_builtin_0 (loc, fndecl);
c2f47e15 10532 break;
10533 case 1:
e80cc485 10534 ret = fold_builtin_1 (loc, fndecl, args[0]);
c2f47e15 10535 break;
10536 case 2:
e80cc485 10537 ret = fold_builtin_2 (loc, fndecl, args[0], args[1]);
c2f47e15 10538 break;
10539 case 3:
e80cc485 10540 ret = fold_builtin_3 (loc, fndecl, args[0], args[1], args[2]);
c2f47e15 10541 break;
c2f47e15 10542 default:
e80cc485 10543 ret = fold_builtin_varargs (loc, fndecl, args, nargs);
c2f47e15 10544 break;
10545 }
10546 if (ret)
10547 {
75a70cf9 10548 ret = build1 (NOP_EXPR, TREE_TYPE (ret), ret);
389dd41b 10549 SET_EXPR_LOCATION (ret, loc);
c2f47e15 10550 TREE_NO_WARNING (ret) = 1;
10551 return ret;
10552 }
10553 return NULL_TREE;
10554}
10555
0e80b01d 10556/* Construct a new CALL_EXPR to FNDECL using the tail of the argument
10557 list ARGS along with N new arguments in NEWARGS. SKIP is the number
10558 of arguments in ARGS to be omitted. OLDNARGS is the number of
10559 elements in ARGS. */
c2f47e15 10560
10561static tree
0e80b01d 10562rewrite_call_expr_valist (location_t loc, int oldnargs, tree *args,
10563 int skip, tree fndecl, int n, va_list newargs)
c2f47e15 10564{
0e80b01d 10565 int nargs = oldnargs - skip + n;
10566 tree *buffer;
c2f47e15 10567
0e80b01d 10568 if (n > 0)
c2f47e15 10569 {
0e80b01d 10570 int i, j;
c2f47e15 10571
0e80b01d 10572 buffer = XALLOCAVEC (tree, nargs);
10573 for (i = 0; i < n; i++)
10574 buffer[i] = va_arg (newargs, tree);
10575 for (j = skip; j < oldnargs; j++, i++)
10576 buffer[i] = args[j];
10577 }
10578 else
10579 buffer = args + skip;
19fbe3a4 10580
0e80b01d 10581 return build_call_expr_loc_array (loc, fndecl, nargs, buffer);
10582}
c2f47e15 10583
198622c0 10584/* Return true if FNDECL shouldn't be folded right now.
10585 If a built-in function has an inline attribute always_inline
10586 wrapper, defer folding it after always_inline functions have
10587 been inlined, otherwise e.g. -D_FORTIFY_SOURCE checking
10588 might not be performed. */
10589
51d2c51e 10590bool
198622c0 10591avoid_folding_inline_builtin (tree fndecl)
10592{
10593 return (DECL_DECLARED_INLINE_P (fndecl)
10594 && DECL_DISREGARD_INLINE_LIMITS (fndecl)
10595 && cfun
10596 && !cfun->always_inline_functions_inlined
10597 && lookup_attribute ("always_inline", DECL_ATTRIBUTES (fndecl)));
10598}
10599
4ee9c684 10600/* A wrapper function for builtin folding that prevents warnings for
491e04ef 10601 "statement without effect" and the like, caused by removing the
4ee9c684 10602 call node earlier than the warning is generated. */
10603
10604tree
389dd41b 10605fold_call_expr (location_t loc, tree exp, bool ignore)
4ee9c684 10606{
c2f47e15 10607 tree ret = NULL_TREE;
10608 tree fndecl = get_callee_fndecl (exp);
10609 if (fndecl
10610 && TREE_CODE (fndecl) == FUNCTION_DECL
48dc2227 10611 && DECL_BUILT_IN (fndecl)
10612 /* If CALL_EXPR_VA_ARG_PACK is set, the arguments aren't finalized
10613 yet. Defer folding until we see all the arguments
10614 (after inlining). */
10615 && !CALL_EXPR_VA_ARG_PACK (exp))
10616 {
10617 int nargs = call_expr_nargs (exp);
10618
10619 /* Before gimplification CALL_EXPR_VA_ARG_PACK is not set, but
10620 instead last argument is __builtin_va_arg_pack (). Defer folding
10621 even in that case, until arguments are finalized. */
10622 if (nargs && TREE_CODE (CALL_EXPR_ARG (exp, nargs - 1)) == CALL_EXPR)
10623 {
10624 tree fndecl2 = get_callee_fndecl (CALL_EXPR_ARG (exp, nargs - 1));
10625 if (fndecl2
10626 && TREE_CODE (fndecl2) == FUNCTION_DECL
10627 && DECL_BUILT_IN_CLASS (fndecl2) == BUILT_IN_NORMAL
10628 && DECL_FUNCTION_CODE (fndecl2) == BUILT_IN_VA_ARG_PACK)
10629 return NULL_TREE;
10630 }
10631
198622c0 10632 if (avoid_folding_inline_builtin (fndecl))
10633 return NULL_TREE;
10634
c2f47e15 10635 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
97d67146 10636 return targetm.fold_builtin (fndecl, call_expr_nargs (exp),
10637 CALL_EXPR_ARGP (exp), ignore);
c2f47e15 10638 else
10639 {
9d884767 10640 tree *args = CALL_EXPR_ARGP (exp);
10641 ret = fold_builtin_n (loc, fndecl, args, nargs, ignore);
c2f47e15 10642 if (ret)
389dd41b 10643 return ret;
c2f47e15 10644 }
4ee9c684 10645 }
c2f47e15 10646 return NULL_TREE;
10647}
48e1416a 10648
9d884767 10649/* Fold a CALL_EXPR with type TYPE with FN as the function expression.
10650 N arguments are passed in the array ARGARRAY. Return a folded
10651 expression or NULL_TREE if no simplification was possible. */
805e22b2 10652
10653tree
9d884767 10654fold_builtin_call_array (location_t loc, tree,
d01f58f9 10655 tree fn,
10656 int n,
10657 tree *argarray)
7e15618b 10658{
9d884767 10659 if (TREE_CODE (fn) != ADDR_EXPR)
10660 return NULL_TREE;
c2f47e15 10661
9d884767 10662 tree fndecl = TREE_OPERAND (fn, 0);
10663 if (TREE_CODE (fndecl) == FUNCTION_DECL
10664 && DECL_BUILT_IN (fndecl))
10665 {
10666 /* If last argument is __builtin_va_arg_pack (), arguments to this
10667 function are not finalized yet. Defer folding until they are. */
10668 if (n && TREE_CODE (argarray[n - 1]) == CALL_EXPR)
10669 {
10670 tree fndecl2 = get_callee_fndecl (argarray[n - 1]);
10671 if (fndecl2
10672 && TREE_CODE (fndecl2) == FUNCTION_DECL
10673 && DECL_BUILT_IN_CLASS (fndecl2) == BUILT_IN_NORMAL
10674 && DECL_FUNCTION_CODE (fndecl2) == BUILT_IN_VA_ARG_PACK)
10675 return NULL_TREE;
10676 }
10677 if (avoid_folding_inline_builtin (fndecl))
10678 return NULL_TREE;
10679 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
10680 return targetm.fold_builtin (fndecl, n, argarray, false);
10681 else
10682 return fold_builtin_n (loc, fndecl, argarray, n, false);
10683 }
c2f47e15 10684
9d884767 10685 return NULL_TREE;
c2f47e15 10686}
10687
af1409ad 10688/* Construct a new CALL_EXPR using the tail of the argument list of EXP
10689 along with N new arguments specified as the "..." parameters. SKIP
10690 is the number of arguments in EXP to be omitted. This function is used
10691 to do varargs-to-varargs transformations. */
10692
10693static tree
10694rewrite_call_expr (location_t loc, tree exp, int skip, tree fndecl, int n, ...)
10695{
10696 va_list ap;
10697 tree t;
10698
10699 va_start (ap, n);
10700 t = rewrite_call_expr_valist (loc, call_expr_nargs (exp),
10701 CALL_EXPR_ARGP (exp), skip, fndecl, n, ap);
10702 va_end (ap);
c2f47e15 10703
af1409ad 10704 return t;
c2f47e15 10705}
10706
10707/* Validate a single argument ARG against a tree code CODE representing
10708 a type. */
48e1416a 10709
c2f47e15 10710static bool
b7bf20db 10711validate_arg (const_tree arg, enum tree_code code)
c2f47e15 10712{
10713 if (!arg)
10714 return false;
10715 else if (code == POINTER_TYPE)
10716 return POINTER_TYPE_P (TREE_TYPE (arg));
c7f617c2 10717 else if (code == INTEGER_TYPE)
10718 return INTEGRAL_TYPE_P (TREE_TYPE (arg));
c2f47e15 10719 return code == TREE_CODE (TREE_TYPE (arg));
7e15618b 10720}
0eb671f7 10721
75a70cf9 10722/* This function validates the types of a function call argument list
10723 against a specified list of tree_codes. If the last specifier is a 0,
10724 that represents an ellipses, otherwise the last specifier must be a
10725 VOID_TYPE.
10726
10727 This is the GIMPLE version of validate_arglist. Eventually we want to
10728 completely convert builtins.c to work from GIMPLEs and the tree based
10729 validate_arglist will then be removed. */
10730
10731bool
1a91d914 10732validate_gimple_arglist (const gcall *call, ...)
75a70cf9 10733{
10734 enum tree_code code;
10735 bool res = 0;
10736 va_list ap;
10737 const_tree arg;
10738 size_t i;
10739
10740 va_start (ap, call);
10741 i = 0;
10742
10743 do
10744 {
d62e827b 10745 code = (enum tree_code) va_arg (ap, int);
75a70cf9 10746 switch (code)
10747 {
10748 case 0:
10749 /* This signifies an ellipses, any further arguments are all ok. */
10750 res = true;
10751 goto end;
10752 case VOID_TYPE:
10753 /* This signifies an endlink, if no arguments remain, return
10754 true, otherwise return false. */
10755 res = (i == gimple_call_num_args (call));
10756 goto end;
10757 default:
10758 /* If no parameters remain or the parameter's code does not
10759 match the specified code, return false. Otherwise continue
10760 checking any remaining arguments. */
10761 arg = gimple_call_arg (call, i++);
10762 if (!validate_arg (arg, code))
10763 goto end;
10764 break;
10765 }
10766 }
10767 while (1);
10768
10769 /* We need gotos here since we can only have one VA_CLOSE in a
10770 function. */
10771 end: ;
10772 va_end (ap);
10773
10774 return res;
10775}
10776
fc2a2dcb 10777/* Default target-specific builtin expander that does nothing. */
10778
10779rtx
aecda0d6 10780default_expand_builtin (tree exp ATTRIBUTE_UNUSED,
10781 rtx target ATTRIBUTE_UNUSED,
10782 rtx subtarget ATTRIBUTE_UNUSED,
3754d046 10783 machine_mode mode ATTRIBUTE_UNUSED,
aecda0d6 10784 int ignore ATTRIBUTE_UNUSED)
fc2a2dcb 10785{
10786 return NULL_RTX;
10787}
c7926a82 10788
01537105 10789/* Returns true is EXP represents data that would potentially reside
10790 in a readonly section. */
10791
b9ea678c 10792bool
01537105 10793readonly_data_expr (tree exp)
10794{
10795 STRIP_NOPS (exp);
10796
9ff0637e 10797 if (TREE_CODE (exp) != ADDR_EXPR)
10798 return false;
10799
10800 exp = get_base_address (TREE_OPERAND (exp, 0));
10801 if (!exp)
10802 return false;
10803
10804 /* Make sure we call decl_readonly_section only for trees it
10805 can handle (since it returns true for everything it doesn't
10806 understand). */
491e04ef 10807 if (TREE_CODE (exp) == STRING_CST
9ff0637e 10808 || TREE_CODE (exp) == CONSTRUCTOR
10809 || (TREE_CODE (exp) == VAR_DECL && TREE_STATIC (exp)))
10810 return decl_readonly_section (exp, 0);
01537105 10811 else
10812 return false;
10813}
4ee9c684 10814
c2f47e15 10815/* Simplify a call to the strstr builtin. S1 and S2 are the arguments
10816 to the call, and TYPE is its return type.
4ee9c684 10817
c2f47e15 10818 Return NULL_TREE if no simplification was possible, otherwise return the
4ee9c684 10819 simplified form of the call as a tree.
10820
10821 The simplified form may be a constant or other expression which
10822 computes the same value, but in a more efficient manner (including
10823 calls to other builtin functions).
10824
10825 The call may contain arguments which need to be evaluated, but
10826 which are not useful to determine the result of the call. In
10827 this case we return a chain of COMPOUND_EXPRs. The LHS of each
10828 COMPOUND_EXPR will be an argument which must be evaluated.
10829 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
10830 COMPOUND_EXPR in the chain will contain the tree for the simplified
10831 form of the builtin function call. */
10832
10833static tree
389dd41b 10834fold_builtin_strstr (location_t loc, tree s1, tree s2, tree type)
4ee9c684 10835{
c2f47e15 10836 if (!validate_arg (s1, POINTER_TYPE)
10837 || !validate_arg (s2, POINTER_TYPE))
10838 return NULL_TREE;
4ee9c684 10839 else
10840 {
4ee9c684 10841 tree fn;
10842 const char *p1, *p2;
10843
10844 p2 = c_getstr (s2);
10845 if (p2 == NULL)
c2f47e15 10846 return NULL_TREE;
4ee9c684 10847
10848 p1 = c_getstr (s1);
10849 if (p1 != NULL)
10850 {
10851 const char *r = strstr (p1, p2);
daa1d5f5 10852 tree tem;
4ee9c684 10853
4ee9c684 10854 if (r == NULL)
779b4c41 10855 return build_int_cst (TREE_TYPE (s1), 0);
c0c67e38 10856
10857 /* Return an offset into the constant string argument. */
2cc66f2a 10858 tem = fold_build_pointer_plus_hwi_loc (loc, s1, r - p1);
389dd41b 10859 return fold_convert_loc (loc, type, tem);
4ee9c684 10860 }
10861
7efa231c 10862 /* The argument is const char *, and the result is char *, so we need
10863 a type conversion here to avoid a warning. */
4ee9c684 10864 if (p2[0] == '\0')
389dd41b 10865 return fold_convert_loc (loc, type, s1);
4ee9c684 10866
10867 if (p2[1] != '\0')
c2f47e15 10868 return NULL_TREE;
4ee9c684 10869
b9a16870 10870 fn = builtin_decl_implicit (BUILT_IN_STRCHR);
4ee9c684 10871 if (!fn)
c2f47e15 10872 return NULL_TREE;
4ee9c684 10873
10874 /* New argument list transforming strstr(s1, s2) to
10875 strchr(s1, s2[0]). */
7002a1c8 10876 return build_call_expr_loc (loc, fn, 2, s1,
10877 build_int_cst (integer_type_node, p2[0]));
4ee9c684 10878 }
10879}
10880
c2f47e15 10881/* Simplify a call to the strchr builtin. S1 and S2 are the arguments to
10882 the call, and TYPE is its return type.
4ee9c684 10883
c2f47e15 10884 Return NULL_TREE if no simplification was possible, otherwise return the
4ee9c684 10885 simplified form of the call as a tree.
10886
10887 The simplified form may be a constant or other expression which
10888 computes the same value, but in a more efficient manner (including
10889 calls to other builtin functions).
10890
10891 The call may contain arguments which need to be evaluated, but
10892 which are not useful to determine the result of the call. In
10893 this case we return a chain of COMPOUND_EXPRs. The LHS of each
10894 COMPOUND_EXPR will be an argument which must be evaluated.
10895 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
10896 COMPOUND_EXPR in the chain will contain the tree for the simplified
10897 form of the builtin function call. */
10898
10899static tree
389dd41b 10900fold_builtin_strchr (location_t loc, tree s1, tree s2, tree type)
4ee9c684 10901{
c2f47e15 10902 if (!validate_arg (s1, POINTER_TYPE)
10903 || !validate_arg (s2, INTEGER_TYPE))
10904 return NULL_TREE;
4ee9c684 10905 else
10906 {
4ee9c684 10907 const char *p1;
10908
10909 if (TREE_CODE (s2) != INTEGER_CST)
c2f47e15 10910 return NULL_TREE;
4ee9c684 10911
10912 p1 = c_getstr (s1);
10913 if (p1 != NULL)
10914 {
10915 char c;
10916 const char *r;
daa1d5f5 10917 tree tem;
4ee9c684 10918
10919 if (target_char_cast (s2, &c))
c2f47e15 10920 return NULL_TREE;
4ee9c684 10921
10922 r = strchr (p1, c);
10923
10924 if (r == NULL)
779b4c41 10925 return build_int_cst (TREE_TYPE (s1), 0);
4ee9c684 10926
10927 /* Return an offset into the constant string argument. */
2cc66f2a 10928 tem = fold_build_pointer_plus_hwi_loc (loc, s1, r - p1);
389dd41b 10929 return fold_convert_loc (loc, type, tem);
4ee9c684 10930 }
c2f47e15 10931 return NULL_TREE;
4ee9c684 10932 }
10933}
10934
c2f47e15 10935/* Simplify a call to the strrchr builtin. S1 and S2 are the arguments to
10936 the call, and TYPE is its return type.
4ee9c684 10937
c2f47e15 10938 Return NULL_TREE if no simplification was possible, otherwise return the
4ee9c684 10939 simplified form of the call as a tree.
10940
10941 The simplified form may be a constant or other expression which
10942 computes the same value, but in a more efficient manner (including
10943 calls to other builtin functions).
10944
10945 The call may contain arguments which need to be evaluated, but
10946 which are not useful to determine the result of the call. In
10947 this case we return a chain of COMPOUND_EXPRs. The LHS of each
10948 COMPOUND_EXPR will be an argument which must be evaluated.
10949 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
10950 COMPOUND_EXPR in the chain will contain the tree for the simplified
10951 form of the builtin function call. */
10952
10953static tree
389dd41b 10954fold_builtin_strrchr (location_t loc, tree s1, tree s2, tree type)
4ee9c684 10955{
c2f47e15 10956 if (!validate_arg (s1, POINTER_TYPE)
10957 || !validate_arg (s2, INTEGER_TYPE))
10958 return NULL_TREE;
4ee9c684 10959 else
10960 {
4ee9c684 10961 tree fn;
10962 const char *p1;
10963
10964 if (TREE_CODE (s2) != INTEGER_CST)
c2f47e15 10965 return NULL_TREE;
4ee9c684 10966
10967 p1 = c_getstr (s1);
10968 if (p1 != NULL)
10969 {
10970 char c;
10971 const char *r;
daa1d5f5 10972 tree tem;
4ee9c684 10973
10974 if (target_char_cast (s2, &c))
c2f47e15 10975 return NULL_TREE;
4ee9c684 10976
10977 r = strrchr (p1, c);
10978
10979 if (r == NULL)
779b4c41 10980 return build_int_cst (TREE_TYPE (s1), 0);
4ee9c684 10981
10982 /* Return an offset into the constant string argument. */
2cc66f2a 10983 tem = fold_build_pointer_plus_hwi_loc (loc, s1, r - p1);
389dd41b 10984 return fold_convert_loc (loc, type, tem);
4ee9c684 10985 }
10986
10987 if (! integer_zerop (s2))
c2f47e15 10988 return NULL_TREE;
4ee9c684 10989
b9a16870 10990 fn = builtin_decl_implicit (BUILT_IN_STRCHR);
4ee9c684 10991 if (!fn)
c2f47e15 10992 return NULL_TREE;
4ee9c684 10993
10994 /* Transform strrchr(s1, '\0') to strchr(s1, '\0'). */
389dd41b 10995 return build_call_expr_loc (loc, fn, 2, s1, s2);
4ee9c684 10996 }
10997}
10998
c2f47e15 10999/* Simplify a call to the strpbrk builtin. S1 and S2 are the arguments
11000 to the call, and TYPE is its return type.
4ee9c684 11001
c2f47e15 11002 Return NULL_TREE if no simplification was possible, otherwise return the
4ee9c684 11003 simplified form of the call as a tree.
11004
11005 The simplified form may be a constant or other expression which
11006 computes the same value, but in a more efficient manner (including
11007 calls to other builtin functions).
11008
11009 The call may contain arguments which need to be evaluated, but
11010 which are not useful to determine the result of the call. In
11011 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11012 COMPOUND_EXPR will be an argument which must be evaluated.
11013 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11014 COMPOUND_EXPR in the chain will contain the tree for the simplified
11015 form of the builtin function call. */
11016
11017static tree
389dd41b 11018fold_builtin_strpbrk (location_t loc, tree s1, tree s2, tree type)
4ee9c684 11019{
c2f47e15 11020 if (!validate_arg (s1, POINTER_TYPE)
11021 || !validate_arg (s2, POINTER_TYPE))
11022 return NULL_TREE;
4ee9c684 11023 else
11024 {
4ee9c684 11025 tree fn;
11026 const char *p1, *p2;
11027
11028 p2 = c_getstr (s2);
11029 if (p2 == NULL)
c2f47e15 11030 return NULL_TREE;
4ee9c684 11031
11032 p1 = c_getstr (s1);
11033 if (p1 != NULL)
11034 {
11035 const char *r = strpbrk (p1, p2);
daa1d5f5 11036 tree tem;
4ee9c684 11037
11038 if (r == NULL)
779b4c41 11039 return build_int_cst (TREE_TYPE (s1), 0);
4ee9c684 11040
11041 /* Return an offset into the constant string argument. */
2cc66f2a 11042 tem = fold_build_pointer_plus_hwi_loc (loc, s1, r - p1);
389dd41b 11043 return fold_convert_loc (loc, type, tem);
4ee9c684 11044 }
11045
11046 if (p2[0] == '\0')
05abc81b 11047 /* strpbrk(x, "") == NULL.
11048 Evaluate and ignore s1 in case it had side-effects. */
389dd41b 11049 return omit_one_operand_loc (loc, TREE_TYPE (s1), integer_zero_node, s1);
4ee9c684 11050
11051 if (p2[1] != '\0')
c2f47e15 11052 return NULL_TREE; /* Really call strpbrk. */
4ee9c684 11053
b9a16870 11054 fn = builtin_decl_implicit (BUILT_IN_STRCHR);
4ee9c684 11055 if (!fn)
c2f47e15 11056 return NULL_TREE;
4ee9c684 11057
11058 /* New argument list transforming strpbrk(s1, s2) to
11059 strchr(s1, s2[0]). */
7002a1c8 11060 return build_call_expr_loc (loc, fn, 2, s1,
11061 build_int_cst (integer_type_node, p2[0]));
4ee9c684 11062 }
11063}
11064
c2f47e15 11065/* Simplify a call to the strspn builtin. S1 and S2 are the arguments
11066 to the call.
4ee9c684 11067
c2f47e15 11068 Return NULL_TREE if no simplification was possible, otherwise return the
4ee9c684 11069 simplified form of the call as a tree.
11070
11071 The simplified form may be a constant or other expression which
11072 computes the same value, but in a more efficient manner (including
11073 calls to other builtin functions).
11074
11075 The call may contain arguments which need to be evaluated, but
11076 which are not useful to determine the result of the call. In
11077 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11078 COMPOUND_EXPR will be an argument which must be evaluated.
11079 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11080 COMPOUND_EXPR in the chain will contain the tree for the simplified
11081 form of the builtin function call. */
11082
11083static tree
389dd41b 11084fold_builtin_strspn (location_t loc, tree s1, tree s2)
4ee9c684 11085{
c2f47e15 11086 if (!validate_arg (s1, POINTER_TYPE)
11087 || !validate_arg (s2, POINTER_TYPE))
11088 return NULL_TREE;
4ee9c684 11089 else
11090 {
4ee9c684 11091 const char *p1 = c_getstr (s1), *p2 = c_getstr (s2);
11092
11093 /* If both arguments are constants, evaluate at compile-time. */
11094 if (p1 && p2)
11095 {
11096 const size_t r = strspn (p1, p2);
547b938d 11097 return build_int_cst (size_type_node, r);
4ee9c684 11098 }
11099
c2f47e15 11100 /* If either argument is "", return NULL_TREE. */
4ee9c684 11101 if ((p1 && *p1 == '\0') || (p2 && *p2 == '\0'))
9bc9f15f 11102 /* Evaluate and ignore both arguments in case either one has
11103 side-effects. */
389dd41b 11104 return omit_two_operands_loc (loc, size_type_node, size_zero_node,
9bc9f15f 11105 s1, s2);
c2f47e15 11106 return NULL_TREE;
4ee9c684 11107 }
11108}
11109
c2f47e15 11110/* Simplify a call to the strcspn builtin. S1 and S2 are the arguments
11111 to the call.
4ee9c684 11112
c2f47e15 11113 Return NULL_TREE if no simplification was possible, otherwise return the
4ee9c684 11114 simplified form of the call as a tree.
11115
11116 The simplified form may be a constant or other expression which
11117 computes the same value, but in a more efficient manner (including
11118 calls to other builtin functions).
11119
11120 The call may contain arguments which need to be evaluated, but
11121 which are not useful to determine the result of the call. In
11122 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11123 COMPOUND_EXPR will be an argument which must be evaluated.
11124 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11125 COMPOUND_EXPR in the chain will contain the tree for the simplified
11126 form of the builtin function call. */
11127
11128static tree
389dd41b 11129fold_builtin_strcspn (location_t loc, tree s1, tree s2)
4ee9c684 11130{
c2f47e15 11131 if (!validate_arg (s1, POINTER_TYPE)
11132 || !validate_arg (s2, POINTER_TYPE))
11133 return NULL_TREE;
4ee9c684 11134 else
11135 {
4ee9c684 11136 const char *p1 = c_getstr (s1), *p2 = c_getstr (s2);
11137
11138 /* If both arguments are constants, evaluate at compile-time. */
11139 if (p1 && p2)
11140 {
11141 const size_t r = strcspn (p1, p2);
547b938d 11142 return build_int_cst (size_type_node, r);
4ee9c684 11143 }
11144
c2f47e15 11145 /* If the first argument is "", return NULL_TREE. */
4ee9c684 11146 if (p1 && *p1 == '\0')
11147 {
11148 /* Evaluate and ignore argument s2 in case it has
11149 side-effects. */
389dd41b 11150 return omit_one_operand_loc (loc, size_type_node,
39761420 11151 size_zero_node, s2);
4ee9c684 11152 }
11153
11154 /* If the second argument is "", return __builtin_strlen(s1). */
11155 if (p2 && *p2 == '\0')
11156 {
b9a16870 11157 tree fn = builtin_decl_implicit (BUILT_IN_STRLEN);
4ee9c684 11158
11159 /* If the replacement _DECL isn't initialized, don't do the
11160 transformation. */
11161 if (!fn)
c2f47e15 11162 return NULL_TREE;
4ee9c684 11163
389dd41b 11164 return build_call_expr_loc (loc, fn, 1, s1);
4ee9c684 11165 }
c2f47e15 11166 return NULL_TREE;
4ee9c684 11167 }
11168}
11169
c2f47e15 11170/* Fold the next_arg or va_start call EXP. Returns true if there was an error
743b0c6a 11171 produced. False otherwise. This is done so that we don't output the error
11172 or warning twice or three times. */
75a70cf9 11173
743b0c6a 11174bool
c2f47e15 11175fold_builtin_next_arg (tree exp, bool va_start_p)
4ee9c684 11176{
11177 tree fntype = TREE_TYPE (current_function_decl);
c2f47e15 11178 int nargs = call_expr_nargs (exp);
11179 tree arg;
d98fd4a4 11180 /* There is good chance the current input_location points inside the
11181 definition of the va_start macro (perhaps on the token for
11182 builtin) in a system header, so warnings will not be emitted.
11183 Use the location in real source code. */
11184 source_location current_location =
11185 linemap_unwind_to_first_non_reserved_loc (line_table, input_location,
11186 NULL);
4ee9c684 11187
257d99c3 11188 if (!stdarg_p (fntype))
743b0c6a 11189 {
11190 error ("%<va_start%> used in function with fixed args");
11191 return true;
11192 }
c2f47e15 11193
11194 if (va_start_p)
79012a9d 11195 {
c2f47e15 11196 if (va_start_p && (nargs != 2))
11197 {
11198 error ("wrong number of arguments to function %<va_start%>");
11199 return true;
11200 }
11201 arg = CALL_EXPR_ARG (exp, 1);
79012a9d 11202 }
11203 /* We use __builtin_va_start (ap, 0, 0) or __builtin_next_arg (0, 0)
11204 when we checked the arguments and if needed issued a warning. */
c2f47e15 11205 else
4ee9c684 11206 {
c2f47e15 11207 if (nargs == 0)
11208 {
11209 /* Evidently an out of date version of <stdarg.h>; can't validate
11210 va_start's second argument, but can still work as intended. */
d98fd4a4 11211 warning_at (current_location,
7edb1062 11212 OPT_Wvarargs,
11213 "%<__builtin_next_arg%> called without an argument");
c2f47e15 11214 return true;
11215 }
11216 else if (nargs > 1)
a0c938f0 11217 {
c2f47e15 11218 error ("wrong number of arguments to function %<__builtin_next_arg%>");
a0c938f0 11219 return true;
11220 }
c2f47e15 11221 arg = CALL_EXPR_ARG (exp, 0);
11222 }
11223
a8dd994c 11224 if (TREE_CODE (arg) == SSA_NAME)
11225 arg = SSA_NAME_VAR (arg);
11226
c2f47e15 11227 /* We destructively modify the call to be __builtin_va_start (ap, 0)
48e1416a 11228 or __builtin_next_arg (0) the first time we see it, after checking
c2f47e15 11229 the arguments and if needed issuing a warning. */
11230 if (!integer_zerop (arg))
11231 {
11232 tree last_parm = tree_last (DECL_ARGUMENTS (current_function_decl));
79012a9d 11233
4ee9c684 11234 /* Strip off all nops for the sake of the comparison. This
11235 is not quite the same as STRIP_NOPS. It does more.
11236 We must also strip off INDIRECT_EXPR for C++ reference
11237 parameters. */
72dd6141 11238 while (CONVERT_EXPR_P (arg)
4ee9c684 11239 || TREE_CODE (arg) == INDIRECT_REF)
11240 arg = TREE_OPERAND (arg, 0);
11241 if (arg != last_parm)
a0c938f0 11242 {
b08cf617 11243 /* FIXME: Sometimes with the tree optimizers we can get the
11244 not the last argument even though the user used the last
11245 argument. We just warn and set the arg to be the last
11246 argument so that we will get wrong-code because of
11247 it. */
d98fd4a4 11248 warning_at (current_location,
7edb1062 11249 OPT_Wvarargs,
d98fd4a4 11250 "second parameter of %<va_start%> not last named argument");
743b0c6a 11251 }
24158ad7 11252
11253 /* Undefined by C99 7.15.1.4p4 (va_start):
11254 "If the parameter parmN is declared with the register storage
11255 class, with a function or array type, or with a type that is
11256 not compatible with the type that results after application of
11257 the default argument promotions, the behavior is undefined."
11258 */
11259 else if (DECL_REGISTER (arg))
d98fd4a4 11260 {
11261 warning_at (current_location,
7edb1062 11262 OPT_Wvarargs,
d98fd4a4 11263 "undefined behaviour when second parameter of "
11264 "%<va_start%> is declared with %<register%> storage");
11265 }
24158ad7 11266
79012a9d 11267 /* We want to verify the second parameter just once before the tree
a0c938f0 11268 optimizers are run and then avoid keeping it in the tree,
11269 as otherwise we could warn even for correct code like:
11270 void foo (int i, ...)
11271 { va_list ap; i++; va_start (ap, i); va_end (ap); } */
c2f47e15 11272 if (va_start_p)
11273 CALL_EXPR_ARG (exp, 1) = integer_zero_node;
11274 else
11275 CALL_EXPR_ARG (exp, 0) = integer_zero_node;
743b0c6a 11276 }
11277 return false;
4ee9c684 11278}
11279
11280
c2f47e15 11281/* Expand a call EXP to __builtin_object_size. */
0a39fd54 11282
f7715905 11283static rtx
0a39fd54 11284expand_builtin_object_size (tree exp)
11285{
11286 tree ost;
11287 int object_size_type;
11288 tree fndecl = get_callee_fndecl (exp);
0a39fd54 11289
c2f47e15 11290 if (!validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
0a39fd54 11291 {
b8c23db3 11292 error ("%Kfirst argument of %D must be a pointer, second integer constant",
11293 exp, fndecl);
0a39fd54 11294 expand_builtin_trap ();
11295 return const0_rtx;
11296 }
11297
c2f47e15 11298 ost = CALL_EXPR_ARG (exp, 1);
0a39fd54 11299 STRIP_NOPS (ost);
11300
11301 if (TREE_CODE (ost) != INTEGER_CST
11302 || tree_int_cst_sgn (ost) < 0
11303 || compare_tree_int (ost, 3) > 0)
11304 {
b8c23db3 11305 error ("%Klast argument of %D is not integer constant between 0 and 3",
11306 exp, fndecl);
0a39fd54 11307 expand_builtin_trap ();
11308 return const0_rtx;
11309 }
11310
e913b5cd 11311 object_size_type = tree_to_shwi (ost);
0a39fd54 11312
11313 return object_size_type < 2 ? constm1_rtx : const0_rtx;
11314}
11315
11316/* Expand EXP, a call to the __mem{cpy,pcpy,move,set}_chk builtin.
11317 FCODE is the BUILT_IN_* to use.
c2f47e15 11318 Return NULL_RTX if we failed; the caller should emit a normal call,
0a39fd54 11319 otherwise try to get the result in TARGET, if convenient (and in
11320 mode MODE if that's convenient). */
11321
11322static rtx
3754d046 11323expand_builtin_memory_chk (tree exp, rtx target, machine_mode mode,
0a39fd54 11324 enum built_in_function fcode)
11325{
0a39fd54 11326 tree dest, src, len, size;
11327
c2f47e15 11328 if (!validate_arglist (exp,
0a39fd54 11329 POINTER_TYPE,
11330 fcode == BUILT_IN_MEMSET_CHK
11331 ? INTEGER_TYPE : POINTER_TYPE,
11332 INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
c2f47e15 11333 return NULL_RTX;
0a39fd54 11334
c2f47e15 11335 dest = CALL_EXPR_ARG (exp, 0);
11336 src = CALL_EXPR_ARG (exp, 1);
11337 len = CALL_EXPR_ARG (exp, 2);
11338 size = CALL_EXPR_ARG (exp, 3);
0a39fd54 11339
e913b5cd 11340 if (! tree_fits_uhwi_p (size))
c2f47e15 11341 return NULL_RTX;
0a39fd54 11342
e913b5cd 11343 if (tree_fits_uhwi_p (len) || integer_all_onesp (size))
0a39fd54 11344 {
11345 tree fn;
11346
11347 if (! integer_all_onesp (size) && tree_int_cst_lt (size, len))
11348 {
b430e8d9 11349 warning_at (tree_nonartificial_location (exp),
11350 0, "%Kcall to %D will always overflow destination buffer",
11351 exp, get_callee_fndecl (exp));
c2f47e15 11352 return NULL_RTX;
0a39fd54 11353 }
11354
0a39fd54 11355 fn = NULL_TREE;
11356 /* If __builtin_mem{cpy,pcpy,move,set}_chk is used, assume
11357 mem{cpy,pcpy,move,set} is available. */
11358 switch (fcode)
11359 {
11360 case BUILT_IN_MEMCPY_CHK:
b9a16870 11361 fn = builtin_decl_explicit (BUILT_IN_MEMCPY);
0a39fd54 11362 break;
11363 case BUILT_IN_MEMPCPY_CHK:
b9a16870 11364 fn = builtin_decl_explicit (BUILT_IN_MEMPCPY);
0a39fd54 11365 break;
11366 case BUILT_IN_MEMMOVE_CHK:
b9a16870 11367 fn = builtin_decl_explicit (BUILT_IN_MEMMOVE);
0a39fd54 11368 break;
11369 case BUILT_IN_MEMSET_CHK:
b9a16870 11370 fn = builtin_decl_explicit (BUILT_IN_MEMSET);
0a39fd54 11371 break;
11372 default:
11373 break;
11374 }
11375
11376 if (! fn)
c2f47e15 11377 return NULL_RTX;
0a39fd54 11378
0568e9c1 11379 fn = build_call_nofold_loc (EXPR_LOCATION (exp), fn, 3, dest, src, len);
a65c4d64 11380 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
11381 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
0a39fd54 11382 return expand_expr (fn, target, mode, EXPAND_NORMAL);
11383 }
11384 else if (fcode == BUILT_IN_MEMSET_CHK)
c2f47e15 11385 return NULL_RTX;
0a39fd54 11386 else
11387 {
957d0361 11388 unsigned int dest_align = get_pointer_alignment (dest);
0a39fd54 11389
11390 /* If DEST is not a pointer type, call the normal function. */
11391 if (dest_align == 0)
c2f47e15 11392 return NULL_RTX;
0a39fd54 11393
11394 /* If SRC and DEST are the same (and not volatile), do nothing. */
11395 if (operand_equal_p (src, dest, 0))
11396 {
11397 tree expr;
11398
11399 if (fcode != BUILT_IN_MEMPCPY_CHK)
11400 {
11401 /* Evaluate and ignore LEN in case it has side-effects. */
11402 expand_expr (len, const0_rtx, VOIDmode, EXPAND_NORMAL);
11403 return expand_expr (dest, target, mode, EXPAND_NORMAL);
11404 }
11405
2cc66f2a 11406 expr = fold_build_pointer_plus (dest, len);
0a39fd54 11407 return expand_expr (expr, target, mode, EXPAND_NORMAL);
11408 }
11409
11410 /* __memmove_chk special case. */
11411 if (fcode == BUILT_IN_MEMMOVE_CHK)
11412 {
957d0361 11413 unsigned int src_align = get_pointer_alignment (src);
0a39fd54 11414
11415 if (src_align == 0)
c2f47e15 11416 return NULL_RTX;
0a39fd54 11417
11418 /* If src is categorized for a readonly section we can use
11419 normal __memcpy_chk. */
11420 if (readonly_data_expr (src))
11421 {
b9a16870 11422 tree fn = builtin_decl_explicit (BUILT_IN_MEMCPY_CHK);
0a39fd54 11423 if (!fn)
c2f47e15 11424 return NULL_RTX;
0568e9c1 11425 fn = build_call_nofold_loc (EXPR_LOCATION (exp), fn, 4,
11426 dest, src, len, size);
a65c4d64 11427 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
11428 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
0a39fd54 11429 return expand_expr (fn, target, mode, EXPAND_NORMAL);
11430 }
11431 }
c2f47e15 11432 return NULL_RTX;
0a39fd54 11433 }
11434}
11435
11436/* Emit warning if a buffer overflow is detected at compile time. */
11437
11438static void
11439maybe_emit_chk_warning (tree exp, enum built_in_function fcode)
11440{
c2f47e15 11441 int is_strlen = 0;
0a39fd54 11442 tree len, size;
b430e8d9 11443 location_t loc = tree_nonartificial_location (exp);
0a39fd54 11444
11445 switch (fcode)
11446 {
11447 case BUILT_IN_STRCPY_CHK:
11448 case BUILT_IN_STPCPY_CHK:
11449 /* For __strcat_chk the warning will be emitted only if overflowing
11450 by at least strlen (dest) + 1 bytes. */
11451 case BUILT_IN_STRCAT_CHK:
c2f47e15 11452 len = CALL_EXPR_ARG (exp, 1);
11453 size = CALL_EXPR_ARG (exp, 2);
0a39fd54 11454 is_strlen = 1;
11455 break;
b356dfef 11456 case BUILT_IN_STRNCAT_CHK:
0a39fd54 11457 case BUILT_IN_STRNCPY_CHK:
1063acde 11458 case BUILT_IN_STPNCPY_CHK:
c2f47e15 11459 len = CALL_EXPR_ARG (exp, 2);
11460 size = CALL_EXPR_ARG (exp, 3);
0a39fd54 11461 break;
11462 case BUILT_IN_SNPRINTF_CHK:
11463 case BUILT_IN_VSNPRINTF_CHK:
c2f47e15 11464 len = CALL_EXPR_ARG (exp, 1);
11465 size = CALL_EXPR_ARG (exp, 3);
0a39fd54 11466 break;
11467 default:
11468 gcc_unreachable ();
11469 }
11470
0a39fd54 11471 if (!len || !size)
11472 return;
11473
e913b5cd 11474 if (! tree_fits_uhwi_p (size) || integer_all_onesp (size))
0a39fd54 11475 return;
11476
11477 if (is_strlen)
11478 {
11479 len = c_strlen (len, 1);
e913b5cd 11480 if (! len || ! tree_fits_uhwi_p (len) || tree_int_cst_lt (len, size))
0a39fd54 11481 return;
11482 }
b356dfef 11483 else if (fcode == BUILT_IN_STRNCAT_CHK)
11484 {
c2f47e15 11485 tree src = CALL_EXPR_ARG (exp, 1);
e913b5cd 11486 if (! src || ! tree_fits_uhwi_p (len) || tree_int_cst_lt (len, size))
b356dfef 11487 return;
11488 src = c_strlen (src, 1);
e913b5cd 11489 if (! src || ! tree_fits_uhwi_p (src))
b356dfef 11490 {
b430e8d9 11491 warning_at (loc, 0, "%Kcall to %D might overflow destination buffer",
11492 exp, get_callee_fndecl (exp));
b356dfef 11493 return;
11494 }
11495 else if (tree_int_cst_lt (src, size))
11496 return;
11497 }
e913b5cd 11498 else if (! tree_fits_uhwi_p (len) || ! tree_int_cst_lt (size, len))
0a39fd54 11499 return;
11500
b430e8d9 11501 warning_at (loc, 0, "%Kcall to %D will always overflow destination buffer",
11502 exp, get_callee_fndecl (exp));
0a39fd54 11503}
11504
11505/* Emit warning if a buffer overflow is detected at compile time
11506 in __sprintf_chk/__vsprintf_chk calls. */
11507
11508static void
11509maybe_emit_sprintf_chk_warning (tree exp, enum built_in_function fcode)
11510{
1e4adcfc 11511 tree size, len, fmt;
0a39fd54 11512 const char *fmt_str;
c2f47e15 11513 int nargs = call_expr_nargs (exp);
0a39fd54 11514
11515 /* Verify the required arguments in the original call. */
48e1416a 11516
c2f47e15 11517 if (nargs < 4)
0a39fd54 11518 return;
c2f47e15 11519 size = CALL_EXPR_ARG (exp, 2);
11520 fmt = CALL_EXPR_ARG (exp, 3);
0a39fd54 11521
e913b5cd 11522 if (! tree_fits_uhwi_p (size) || integer_all_onesp (size))
0a39fd54 11523 return;
11524
11525 /* Check whether the format is a literal string constant. */
11526 fmt_str = c_getstr (fmt);
11527 if (fmt_str == NULL)
11528 return;
11529
d4473c84 11530 if (!init_target_chars ())
99eabcc1 11531 return;
11532
0a39fd54 11533 /* If the format doesn't contain % args or %%, we know its size. */
99eabcc1 11534 if (strchr (fmt_str, target_percent) == 0)
0a39fd54 11535 len = build_int_cstu (size_type_node, strlen (fmt_str));
11536 /* If the format is "%s" and first ... argument is a string literal,
11537 we know it too. */
c2f47e15 11538 else if (fcode == BUILT_IN_SPRINTF_CHK
11539 && strcmp (fmt_str, target_percent_s) == 0)
0a39fd54 11540 {
11541 tree arg;
11542
c2f47e15 11543 if (nargs < 5)
0a39fd54 11544 return;
c2f47e15 11545 arg = CALL_EXPR_ARG (exp, 4);
0a39fd54 11546 if (! POINTER_TYPE_P (TREE_TYPE (arg)))
11547 return;
11548
11549 len = c_strlen (arg, 1);
e913b5cd 11550 if (!len || ! tree_fits_uhwi_p (len))
0a39fd54 11551 return;
11552 }
11553 else
11554 return;
11555
11556 if (! tree_int_cst_lt (len, size))
b430e8d9 11557 warning_at (tree_nonartificial_location (exp),
11558 0, "%Kcall to %D will always overflow destination buffer",
11559 exp, get_callee_fndecl (exp));
0a39fd54 11560}
11561
2c281b15 11562/* Emit warning if a free is called with address of a variable. */
11563
11564static void
11565maybe_emit_free_warning (tree exp)
11566{
11567 tree arg = CALL_EXPR_ARG (exp, 0);
11568
11569 STRIP_NOPS (arg);
11570 if (TREE_CODE (arg) != ADDR_EXPR)
11571 return;
11572
11573 arg = get_base_address (TREE_OPERAND (arg, 0));
182cf5a9 11574 if (arg == NULL || INDIRECT_REF_P (arg) || TREE_CODE (arg) == MEM_REF)
2c281b15 11575 return;
11576
11577 if (SSA_VAR_P (arg))
f74ea1c2 11578 warning_at (tree_nonartificial_location (exp), OPT_Wfree_nonheap_object,
11579 "%Kattempt to free a non-heap object %qD", exp, arg);
2c281b15 11580 else
f74ea1c2 11581 warning_at (tree_nonartificial_location (exp), OPT_Wfree_nonheap_object,
11582 "%Kattempt to free a non-heap object", exp);
2c281b15 11583}
11584
c2f47e15 11585/* Fold a call to __builtin_object_size with arguments PTR and OST,
11586 if possible. */
0a39fd54 11587
f7715905 11588static tree
c2f47e15 11589fold_builtin_object_size (tree ptr, tree ost)
0a39fd54 11590{
a6caa15f 11591 unsigned HOST_WIDE_INT bytes;
0a39fd54 11592 int object_size_type;
11593
c2f47e15 11594 if (!validate_arg (ptr, POINTER_TYPE)
11595 || !validate_arg (ost, INTEGER_TYPE))
11596 return NULL_TREE;
0a39fd54 11597
0a39fd54 11598 STRIP_NOPS (ost);
11599
11600 if (TREE_CODE (ost) != INTEGER_CST
11601 || tree_int_cst_sgn (ost) < 0
11602 || compare_tree_int (ost, 3) > 0)
c2f47e15 11603 return NULL_TREE;
0a39fd54 11604
e913b5cd 11605 object_size_type = tree_to_shwi (ost);
0a39fd54 11606
11607 /* __builtin_object_size doesn't evaluate side-effects in its arguments;
11608 if there are any side-effects, it returns (size_t) -1 for types 0 and 1
11609 and (size_t) 0 for types 2 and 3. */
11610 if (TREE_SIDE_EFFECTS (ptr))
697bbc3f 11611 return build_int_cst_type (size_type_node, object_size_type < 2 ? -1 : 0);
0a39fd54 11612
11613 if (TREE_CODE (ptr) == ADDR_EXPR)
a6caa15f 11614 {
6da74b21 11615 bytes = compute_builtin_object_size (ptr, object_size_type);
11616 if (wi::fits_to_tree_p (bytes, size_type_node))
11617 return build_int_cstu (size_type_node, bytes);
a6caa15f 11618 }
0a39fd54 11619 else if (TREE_CODE (ptr) == SSA_NAME)
11620 {
0a39fd54 11621 /* If object size is not known yet, delay folding until
11622 later. Maybe subsequent passes will help determining
11623 it. */
11624 bytes = compute_builtin_object_size (ptr, object_size_type);
a6caa15f 11625 if (bytes != (unsigned HOST_WIDE_INT) (object_size_type < 2 ? -1 : 0)
6da74b21 11626 && wi::fits_to_tree_p (bytes, size_type_node))
11627 return build_int_cstu (size_type_node, bytes);
0a39fd54 11628 }
11629
a6caa15f 11630 return NULL_TREE;
0a39fd54 11631}
11632
0e80b01d 11633/* Builtins with folding operations that operate on "..." arguments
11634 need special handling; we need to store the arguments in a convenient
11635 data structure before attempting any folding. Fortunately there are
11636 only a few builtins that fall into this category. FNDECL is the
e80cc485 11637 function, EXP is the CALL_EXPR for the call. */
0e80b01d 11638
11639static tree
e80cc485 11640fold_builtin_varargs (location_t loc, tree fndecl, tree *args, int nargs)
0e80b01d 11641{
11642 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
11643 tree ret = NULL_TREE;
11644
11645 switch (fcode)
11646 {
0e80b01d 11647 case BUILT_IN_FPCLASSIFY:
9d884767 11648 ret = fold_builtin_fpclassify (loc, args, nargs);
0e80b01d 11649 break;
11650
11651 default:
11652 break;
11653 }
11654 if (ret)
11655 {
11656 ret = build1 (NOP_EXPR, TREE_TYPE (ret), ret);
11657 SET_EXPR_LOCATION (ret, loc);
11658 TREE_NO_WARNING (ret) = 1;
11659 return ret;
11660 }
11661 return NULL_TREE;
11662}
11663
99eabcc1 11664/* Initialize format string characters in the target charset. */
11665
b9ea678c 11666bool
99eabcc1 11667init_target_chars (void)
11668{
11669 static bool init;
11670 if (!init)
11671 {
11672 target_newline = lang_hooks.to_target_charset ('\n');
11673 target_percent = lang_hooks.to_target_charset ('%');
11674 target_c = lang_hooks.to_target_charset ('c');
11675 target_s = lang_hooks.to_target_charset ('s');
11676 if (target_newline == 0 || target_percent == 0 || target_c == 0
11677 || target_s == 0)
11678 return false;
11679
11680 target_percent_c[0] = target_percent;
11681 target_percent_c[1] = target_c;
11682 target_percent_c[2] = '\0';
11683
11684 target_percent_s[0] = target_percent;
11685 target_percent_s[1] = target_s;
11686 target_percent_s[2] = '\0';
11687
11688 target_percent_s_newline[0] = target_percent;
11689 target_percent_s_newline[1] = target_s;
11690 target_percent_s_newline[2] = target_newline;
11691 target_percent_s_newline[3] = '\0';
a0c938f0 11692
99eabcc1 11693 init = true;
11694 }
11695 return true;
11696}
bffb7645 11697
f0c477f2 11698/* Helper function for do_mpfr_arg*(). Ensure M is a normal number
11699 and no overflow/underflow occurred. INEXACT is true if M was not
fa7637bd 11700 exactly calculated. TYPE is the tree type for the result. This
f0c477f2 11701 function assumes that you cleared the MPFR flags and then
11702 calculated M to see if anything subsequently set a flag prior to
11703 entering this function. Return NULL_TREE if any checks fail. */
11704
11705static tree
d4473c84 11706do_mpfr_ckconv (mpfr_srcptr m, tree type, int inexact)
f0c477f2 11707{
11708 /* Proceed iff we get a normal number, i.e. not NaN or Inf and no
11709 overflow/underflow occurred. If -frounding-math, proceed iff the
11710 result of calling FUNC was exact. */
d4473c84 11711 if (mpfr_number_p (m) && !mpfr_overflow_p () && !mpfr_underflow_p ()
f0c477f2 11712 && (!flag_rounding_math || !inexact))
11713 {
11714 REAL_VALUE_TYPE rr;
11715
66fa16e6 11716 real_from_mpfr (&rr, m, type, GMP_RNDN);
f0c477f2 11717 /* Proceed iff GCC's REAL_VALUE_TYPE can hold the MPFR value,
11718 check for overflow/underflow. If the REAL_VALUE_TYPE is zero
11719 but the mpft_t is not, then we underflowed in the
11720 conversion. */
776a7bab 11721 if (real_isfinite (&rr)
f0c477f2 11722 && (rr.cl == rvc_zero) == (mpfr_zero_p (m) != 0))
11723 {
11724 REAL_VALUE_TYPE rmode;
11725
11726 real_convert (&rmode, TYPE_MODE (type), &rr);
11727 /* Proceed iff the specified mode can hold the value. */
11728 if (real_identical (&rmode, &rr))
11729 return build_real (type, rmode);
11730 }
11731 }
11732 return NULL_TREE;
11733}
11734
239d491a 11735/* Helper function for do_mpc_arg*(). Ensure M is a normal complex
11736 number and no overflow/underflow occurred. INEXACT is true if M
11737 was not exactly calculated. TYPE is the tree type for the result.
11738 This function assumes that you cleared the MPFR flags and then
11739 calculated M to see if anything subsequently set a flag prior to
652d9409 11740 entering this function. Return NULL_TREE if any checks fail, if
11741 FORCE_CONVERT is true, then bypass the checks. */
239d491a 11742
11743static tree
652d9409 11744do_mpc_ckconv (mpc_srcptr m, tree type, int inexact, int force_convert)
239d491a 11745{
11746 /* Proceed iff we get a normal number, i.e. not NaN or Inf and no
11747 overflow/underflow occurred. If -frounding-math, proceed iff the
11748 result of calling FUNC was exact. */
652d9409 11749 if (force_convert
11750 || (mpfr_number_p (mpc_realref (m)) && mpfr_number_p (mpc_imagref (m))
11751 && !mpfr_overflow_p () && !mpfr_underflow_p ()
11752 && (!flag_rounding_math || !inexact)))
239d491a 11753 {
11754 REAL_VALUE_TYPE re, im;
11755
b0e7c4d4 11756 real_from_mpfr (&re, mpc_realref (m), TREE_TYPE (type), GMP_RNDN);
11757 real_from_mpfr (&im, mpc_imagref (m), TREE_TYPE (type), GMP_RNDN);
239d491a 11758 /* Proceed iff GCC's REAL_VALUE_TYPE can hold the MPFR values,
11759 check for overflow/underflow. If the REAL_VALUE_TYPE is zero
11760 but the mpft_t is not, then we underflowed in the
11761 conversion. */
652d9409 11762 if (force_convert
11763 || (real_isfinite (&re) && real_isfinite (&im)
11764 && (re.cl == rvc_zero) == (mpfr_zero_p (mpc_realref (m)) != 0)
11765 && (im.cl == rvc_zero) == (mpfr_zero_p (mpc_imagref (m)) != 0)))
239d491a 11766 {
11767 REAL_VALUE_TYPE re_mode, im_mode;
11768
11769 real_convert (&re_mode, TYPE_MODE (TREE_TYPE (type)), &re);
11770 real_convert (&im_mode, TYPE_MODE (TREE_TYPE (type)), &im);
11771 /* Proceed iff the specified mode can hold the value. */
652d9409 11772 if (force_convert
11773 || (real_identical (&re_mode, &re)
11774 && real_identical (&im_mode, &im)))
239d491a 11775 return build_complex (type, build_real (TREE_TYPE (type), re_mode),
11776 build_real (TREE_TYPE (type), im_mode));
11777 }
11778 }
11779 return NULL_TREE;
11780}
239d491a 11781
bffb7645 11782/* If argument ARG is a REAL_CST, call the one-argument mpfr function
11783 FUNC on it and return the resulting value as a tree with type TYPE.
728bac60 11784 If MIN and/or MAX are not NULL, then the supplied ARG must be
11785 within those bounds. If INCLUSIVE is true, then MIN/MAX are
11786 acceptable values, otherwise they are not. The mpfr precision is
11787 set to the precision of TYPE. We assume that function FUNC returns
11788 zero if the result could be calculated exactly within the requested
11789 precision. */
bffb7645 11790
11791static tree
728bac60 11792do_mpfr_arg1 (tree arg, tree type, int (*func)(mpfr_ptr, mpfr_srcptr, mp_rnd_t),
11793 const REAL_VALUE_TYPE *min, const REAL_VALUE_TYPE *max,
11794 bool inclusive)
bffb7645 11795{
11796 tree result = NULL_TREE;
48e1416a 11797
bffb7645 11798 STRIP_NOPS (arg);
11799
bd7d6fa4 11800 /* To proceed, MPFR must exactly represent the target floating point
11801 format, which only happens when the target base equals two. */
11802 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
f96bd2bf 11803 && TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
bffb7645 11804 {
f0c477f2 11805 const REAL_VALUE_TYPE *const ra = &TREE_REAL_CST (arg);
bffb7645 11806
776a7bab 11807 if (real_isfinite (ra)
f0c477f2 11808 && (!min || real_compare (inclusive ? GE_EXPR: GT_EXPR , ra, min))
11809 && (!max || real_compare (inclusive ? LE_EXPR: LT_EXPR , ra, max)))
bffb7645 11810 {
e2eb2b7f 11811 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
11812 const int prec = fmt->p;
11813 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
debf9994 11814 int inexact;
bffb7645 11815 mpfr_t m;
11816
11817 mpfr_init2 (m, prec);
66fa16e6 11818 mpfr_from_real (m, ra, GMP_RNDN);
d4473c84 11819 mpfr_clear_flags ();
e2eb2b7f 11820 inexact = func (m, m, rnd);
f0c477f2 11821 result = do_mpfr_ckconv (m, type, inexact);
bffb7645 11822 mpfr_clear (m);
11823 }
11824 }
48e1416a 11825
bffb7645 11826 return result;
11827}
f0c477f2 11828
11829/* If argument ARG is a REAL_CST, call the two-argument mpfr function
11830 FUNC on it and return the resulting value as a tree with type TYPE.
11831 The mpfr precision is set to the precision of TYPE. We assume that
11832 function FUNC returns zero if the result could be calculated
11833 exactly within the requested precision. */
11834
11835static tree
11836do_mpfr_arg2 (tree arg1, tree arg2, tree type,
11837 int (*func)(mpfr_ptr, mpfr_srcptr, mpfr_srcptr, mp_rnd_t))
11838{
11839 tree result = NULL_TREE;
48e1416a 11840
f0c477f2 11841 STRIP_NOPS (arg1);
11842 STRIP_NOPS (arg2);
11843
bd7d6fa4 11844 /* To proceed, MPFR must exactly represent the target floating point
11845 format, which only happens when the target base equals two. */
11846 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
f96bd2bf 11847 && TREE_CODE (arg1) == REAL_CST && !TREE_OVERFLOW (arg1)
11848 && TREE_CODE (arg2) == REAL_CST && !TREE_OVERFLOW (arg2))
f0c477f2 11849 {
11850 const REAL_VALUE_TYPE *const ra1 = &TREE_REAL_CST (arg1);
11851 const REAL_VALUE_TYPE *const ra2 = &TREE_REAL_CST (arg2);
11852
776a7bab 11853 if (real_isfinite (ra1) && real_isfinite (ra2))
f0c477f2 11854 {
e2eb2b7f 11855 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
11856 const int prec = fmt->p;
11857 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
f0c477f2 11858 int inexact;
11859 mpfr_t m1, m2;
11860
11861 mpfr_inits2 (prec, m1, m2, NULL);
66fa16e6 11862 mpfr_from_real (m1, ra1, GMP_RNDN);
11863 mpfr_from_real (m2, ra2, GMP_RNDN);
d4473c84 11864 mpfr_clear_flags ();
e2eb2b7f 11865 inexact = func (m1, m1, m2, rnd);
f0c477f2 11866 result = do_mpfr_ckconv (m1, type, inexact);
11867 mpfr_clears (m1, m2, NULL);
11868 }
11869 }
48e1416a 11870
f0c477f2 11871 return result;
11872}
d92f994c 11873
9917422b 11874/* If argument ARG is a REAL_CST, call the three-argument mpfr function
11875 FUNC on it and return the resulting value as a tree with type TYPE.
11876 The mpfr precision is set to the precision of TYPE. We assume that
11877 function FUNC returns zero if the result could be calculated
11878 exactly within the requested precision. */
11879
11880static tree
11881do_mpfr_arg3 (tree arg1, tree arg2, tree arg3, tree type,
11882 int (*func)(mpfr_ptr, mpfr_srcptr, mpfr_srcptr, mpfr_srcptr, mp_rnd_t))
11883{
11884 tree result = NULL_TREE;
48e1416a 11885
9917422b 11886 STRIP_NOPS (arg1);
11887 STRIP_NOPS (arg2);
11888 STRIP_NOPS (arg3);
11889
bd7d6fa4 11890 /* To proceed, MPFR must exactly represent the target floating point
11891 format, which only happens when the target base equals two. */
11892 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
f96bd2bf 11893 && TREE_CODE (arg1) == REAL_CST && !TREE_OVERFLOW (arg1)
11894 && TREE_CODE (arg2) == REAL_CST && !TREE_OVERFLOW (arg2)
11895 && TREE_CODE (arg3) == REAL_CST && !TREE_OVERFLOW (arg3))
9917422b 11896 {
11897 const REAL_VALUE_TYPE *const ra1 = &TREE_REAL_CST (arg1);
11898 const REAL_VALUE_TYPE *const ra2 = &TREE_REAL_CST (arg2);
11899 const REAL_VALUE_TYPE *const ra3 = &TREE_REAL_CST (arg3);
11900
776a7bab 11901 if (real_isfinite (ra1) && real_isfinite (ra2) && real_isfinite (ra3))
9917422b 11902 {
e2eb2b7f 11903 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
11904 const int prec = fmt->p;
11905 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
9917422b 11906 int inexact;
11907 mpfr_t m1, m2, m3;
11908
11909 mpfr_inits2 (prec, m1, m2, m3, NULL);
66fa16e6 11910 mpfr_from_real (m1, ra1, GMP_RNDN);
11911 mpfr_from_real (m2, ra2, GMP_RNDN);
11912 mpfr_from_real (m3, ra3, GMP_RNDN);
d4473c84 11913 mpfr_clear_flags ();
e2eb2b7f 11914 inexact = func (m1, m1, m2, m3, rnd);
9917422b 11915 result = do_mpfr_ckconv (m1, type, inexact);
11916 mpfr_clears (m1, m2, m3, NULL);
11917 }
11918 }
48e1416a 11919
9917422b 11920 return result;
11921}
11922
d92f994c 11923/* If argument ARG is a REAL_CST, call mpfr_sin_cos() on it and set
11924 the pointers *(ARG_SINP) and *(ARG_COSP) to the resulting values.
d735c391 11925 If ARG_SINP and ARG_COSP are NULL then the result is returned
11926 as a complex value.
d92f994c 11927 The type is taken from the type of ARG and is used for setting the
11928 precision of the calculation and results. */
11929
11930static tree
11931do_mpfr_sincos (tree arg, tree arg_sinp, tree arg_cosp)
11932{
bd7d6fa4 11933 tree const type = TREE_TYPE (arg);
d92f994c 11934 tree result = NULL_TREE;
48e1416a 11935
d92f994c 11936 STRIP_NOPS (arg);
48e1416a 11937
bd7d6fa4 11938 /* To proceed, MPFR must exactly represent the target floating point
11939 format, which only happens when the target base equals two. */
11940 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
f96bd2bf 11941 && TREE_CODE (arg) == REAL_CST
11942 && !TREE_OVERFLOW (arg))
d92f994c 11943 {
11944 const REAL_VALUE_TYPE *const ra = &TREE_REAL_CST (arg);
11945
776a7bab 11946 if (real_isfinite (ra))
d92f994c 11947 {
e2eb2b7f 11948 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
11949 const int prec = fmt->p;
11950 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
d92f994c 11951 tree result_s, result_c;
11952 int inexact;
11953 mpfr_t m, ms, mc;
11954
11955 mpfr_inits2 (prec, m, ms, mc, NULL);
66fa16e6 11956 mpfr_from_real (m, ra, GMP_RNDN);
d4473c84 11957 mpfr_clear_flags ();
e2eb2b7f 11958 inexact = mpfr_sin_cos (ms, mc, m, rnd);
d92f994c 11959 result_s = do_mpfr_ckconv (ms, type, inexact);
11960 result_c = do_mpfr_ckconv (mc, type, inexact);
11961 mpfr_clears (m, ms, mc, NULL);
11962 if (result_s && result_c)
11963 {
d735c391 11964 /* If we are to return in a complex value do so. */
11965 if (!arg_sinp && !arg_cosp)
11966 return build_complex (build_complex_type (type),
11967 result_c, result_s);
11968
d92f994c 11969 /* Dereference the sin/cos pointer arguments. */
11970 arg_sinp = build_fold_indirect_ref (arg_sinp);
11971 arg_cosp = build_fold_indirect_ref (arg_cosp);
11972 /* Proceed if valid pointer type were passed in. */
11973 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg_sinp)) == TYPE_MAIN_VARIANT (type)
11974 && TYPE_MAIN_VARIANT (TREE_TYPE (arg_cosp)) == TYPE_MAIN_VARIANT (type))
11975 {
11976 /* Set the values. */
41076ef6 11977 result_s = fold_build2 (MODIFY_EXPR, type, arg_sinp,
35cc02b5 11978 result_s);
d92f994c 11979 TREE_SIDE_EFFECTS (result_s) = 1;
41076ef6 11980 result_c = fold_build2 (MODIFY_EXPR, type, arg_cosp,
35cc02b5 11981 result_c);
d92f994c 11982 TREE_SIDE_EFFECTS (result_c) = 1;
11983 /* Combine the assignments into a compound expr. */
11984 result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
11985 result_s, result_c));
11986 }
11987 }
11988 }
11989 }
11990 return result;
11991}
65dd1378 11992
65dd1378 11993/* If argument ARG1 is an INTEGER_CST and ARG2 is a REAL_CST, call the
11994 two-argument mpfr order N Bessel function FUNC on them and return
11995 the resulting value as a tree with type TYPE. The mpfr precision
11996 is set to the precision of TYPE. We assume that function FUNC
11997 returns zero if the result could be calculated exactly within the
11998 requested precision. */
11999static tree
12000do_mpfr_bessel_n (tree arg1, tree arg2, tree type,
12001 int (*func)(mpfr_ptr, long, mpfr_srcptr, mp_rnd_t),
12002 const REAL_VALUE_TYPE *min, bool inclusive)
12003{
12004 tree result = NULL_TREE;
12005
12006 STRIP_NOPS (arg1);
12007 STRIP_NOPS (arg2);
12008
12009 /* To proceed, MPFR must exactly represent the target floating point
12010 format, which only happens when the target base equals two. */
12011 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
e913b5cd 12012 && tree_fits_shwi_p (arg1)
65dd1378 12013 && TREE_CODE (arg2) == REAL_CST && !TREE_OVERFLOW (arg2))
12014 {
e913b5cd 12015 const HOST_WIDE_INT n = tree_to_shwi (arg1);
65dd1378 12016 const REAL_VALUE_TYPE *const ra = &TREE_REAL_CST (arg2);
12017
12018 if (n == (long)n
776a7bab 12019 && real_isfinite (ra)
65dd1378 12020 && (!min || real_compare (inclusive ? GE_EXPR: GT_EXPR , ra, min)))
12021 {
e2eb2b7f 12022 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
12023 const int prec = fmt->p;
12024 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
65dd1378 12025 int inexact;
12026 mpfr_t m;
12027
12028 mpfr_init2 (m, prec);
12029 mpfr_from_real (m, ra, GMP_RNDN);
12030 mpfr_clear_flags ();
e2eb2b7f 12031 inexact = func (m, n, m, rnd);
65dd1378 12032 result = do_mpfr_ckconv (m, type, inexact);
12033 mpfr_clear (m);
12034 }
12035 }
48e1416a 12036
65dd1378 12037 return result;
12038}
e5407ca6 12039
12040/* If arguments ARG0 and ARG1 are REAL_CSTs, call mpfr_remquo() to set
12041 the pointer *(ARG_QUO) and return the result. The type is taken
12042 from the type of ARG0 and is used for setting the precision of the
12043 calculation and results. */
12044
12045static tree
12046do_mpfr_remquo (tree arg0, tree arg1, tree arg_quo)
12047{
12048 tree const type = TREE_TYPE (arg0);
12049 tree result = NULL_TREE;
48e1416a 12050
e5407ca6 12051 STRIP_NOPS (arg0);
12052 STRIP_NOPS (arg1);
48e1416a 12053
e5407ca6 12054 /* To proceed, MPFR must exactly represent the target floating point
12055 format, which only happens when the target base equals two. */
12056 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
12057 && TREE_CODE (arg0) == REAL_CST && !TREE_OVERFLOW (arg0)
12058 && TREE_CODE (arg1) == REAL_CST && !TREE_OVERFLOW (arg1))
12059 {
12060 const REAL_VALUE_TYPE *const ra0 = TREE_REAL_CST_PTR (arg0);
12061 const REAL_VALUE_TYPE *const ra1 = TREE_REAL_CST_PTR (arg1);
12062
776a7bab 12063 if (real_isfinite (ra0) && real_isfinite (ra1))
e5407ca6 12064 {
e2eb2b7f 12065 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
12066 const int prec = fmt->p;
12067 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
e5407ca6 12068 tree result_rem;
12069 long integer_quo;
12070 mpfr_t m0, m1;
12071
12072 mpfr_inits2 (prec, m0, m1, NULL);
12073 mpfr_from_real (m0, ra0, GMP_RNDN);
12074 mpfr_from_real (m1, ra1, GMP_RNDN);
12075 mpfr_clear_flags ();
e2eb2b7f 12076 mpfr_remquo (m0, &integer_quo, m0, m1, rnd);
e5407ca6 12077 /* Remquo is independent of the rounding mode, so pass
12078 inexact=0 to do_mpfr_ckconv(). */
12079 result_rem = do_mpfr_ckconv (m0, type, /*inexact=*/ 0);
12080 mpfr_clears (m0, m1, NULL);
12081 if (result_rem)
12082 {
12083 /* MPFR calculates quo in the host's long so it may
12084 return more bits in quo than the target int can hold
12085 if sizeof(host long) > sizeof(target int). This can
12086 happen even for native compilers in LP64 mode. In
12087 these cases, modulo the quo value with the largest
12088 number that the target int can hold while leaving one
12089 bit for the sign. */
12090 if (sizeof (integer_quo) * CHAR_BIT > INT_TYPE_SIZE)
12091 integer_quo %= (long)(1UL << (INT_TYPE_SIZE - 1));
12092
12093 /* Dereference the quo pointer argument. */
12094 arg_quo = build_fold_indirect_ref (arg_quo);
12095 /* Proceed iff a valid pointer type was passed in. */
12096 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg_quo)) == integer_type_node)
12097 {
12098 /* Set the value. */
7002a1c8 12099 tree result_quo
12100 = fold_build2 (MODIFY_EXPR, TREE_TYPE (arg_quo), arg_quo,
12101 build_int_cst (TREE_TYPE (arg_quo),
12102 integer_quo));
e5407ca6 12103 TREE_SIDE_EFFECTS (result_quo) = 1;
12104 /* Combine the quo assignment with the rem. */
12105 result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
12106 result_quo, result_rem));
12107 }
12108 }
12109 }
12110 }
12111 return result;
12112}
e84da7c1 12113
12114/* If ARG is a REAL_CST, call mpfr_lgamma() on it and return the
12115 resulting value as a tree with type TYPE. The mpfr precision is
12116 set to the precision of TYPE. We assume that this mpfr function
12117 returns zero if the result could be calculated exactly within the
12118 requested precision. In addition, the integer pointer represented
12119 by ARG_SG will be dereferenced and set to the appropriate signgam
12120 (-1,1) value. */
12121
12122static tree
12123do_mpfr_lgamma_r (tree arg, tree arg_sg, tree type)
12124{
12125 tree result = NULL_TREE;
12126
12127 STRIP_NOPS (arg);
48e1416a 12128
e84da7c1 12129 /* To proceed, MPFR must exactly represent the target floating point
12130 format, which only happens when the target base equals two. Also
12131 verify ARG is a constant and that ARG_SG is an int pointer. */
12132 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
12133 && TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg)
12134 && TREE_CODE (TREE_TYPE (arg_sg)) == POINTER_TYPE
12135 && TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (arg_sg))) == integer_type_node)
12136 {
12137 const REAL_VALUE_TYPE *const ra = TREE_REAL_CST_PTR (arg);
12138
12139 /* In addition to NaN and Inf, the argument cannot be zero or a
12140 negative integer. */
776a7bab 12141 if (real_isfinite (ra)
e84da7c1 12142 && ra->cl != rvc_zero
9af5ce0c 12143 && !(real_isneg (ra) && real_isinteger (ra, TYPE_MODE (type))))
e84da7c1 12144 {
e2eb2b7f 12145 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
12146 const int prec = fmt->p;
12147 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
e84da7c1 12148 int inexact, sg;
12149 mpfr_t m;
12150 tree result_lg;
12151
12152 mpfr_init2 (m, prec);
12153 mpfr_from_real (m, ra, GMP_RNDN);
12154 mpfr_clear_flags ();
e2eb2b7f 12155 inexact = mpfr_lgamma (m, &sg, m, rnd);
e84da7c1 12156 result_lg = do_mpfr_ckconv (m, type, inexact);
12157 mpfr_clear (m);
12158 if (result_lg)
12159 {
12160 tree result_sg;
12161
12162 /* Dereference the arg_sg pointer argument. */
12163 arg_sg = build_fold_indirect_ref (arg_sg);
12164 /* Assign the signgam value into *arg_sg. */
12165 result_sg = fold_build2 (MODIFY_EXPR,
12166 TREE_TYPE (arg_sg), arg_sg,
7002a1c8 12167 build_int_cst (TREE_TYPE (arg_sg), sg));
e84da7c1 12168 TREE_SIDE_EFFECTS (result_sg) = 1;
12169 /* Combine the signgam assignment with the lgamma result. */
12170 result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
12171 result_sg, result_lg));
12172 }
12173 }
12174 }
12175
12176 return result;
12177}
75a70cf9 12178
239d491a 12179/* If argument ARG is a COMPLEX_CST, call the one-argument mpc
12180 function FUNC on it and return the resulting value as a tree with
12181 type TYPE. The mpfr precision is set to the precision of TYPE. We
12182 assume that function FUNC returns zero if the result could be
12183 calculated exactly within the requested precision. */
12184
12185static tree
12186do_mpc_arg1 (tree arg, tree type, int (*func)(mpc_ptr, mpc_srcptr, mpc_rnd_t))
12187{
12188 tree result = NULL_TREE;
48e1416a 12189
239d491a 12190 STRIP_NOPS (arg);
12191
12192 /* To proceed, MPFR must exactly represent the target floating point
12193 format, which only happens when the target base equals two. */
12194 if (TREE_CODE (arg) == COMPLEX_CST && !TREE_OVERFLOW (arg)
12195 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) == REAL_TYPE
12196 && REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (TREE_TYPE (arg))))->b == 2)
12197 {
12198 const REAL_VALUE_TYPE *const re = TREE_REAL_CST_PTR (TREE_REALPART (arg));
12199 const REAL_VALUE_TYPE *const im = TREE_REAL_CST_PTR (TREE_IMAGPART (arg));
12200
12201 if (real_isfinite (re) && real_isfinite (im))
12202 {
12203 const struct real_format *const fmt =
12204 REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (type)));
12205 const int prec = fmt->p;
12206 const mp_rnd_t rnd = fmt->round_towards_zero ? GMP_RNDZ : GMP_RNDN;
44d89feb 12207 const mpc_rnd_t crnd = fmt->round_towards_zero ? MPC_RNDZZ : MPC_RNDNN;
239d491a 12208 int inexact;
12209 mpc_t m;
48e1416a 12210
239d491a 12211 mpc_init2 (m, prec);
9af5ce0c 12212 mpfr_from_real (mpc_realref (m), re, rnd);
12213 mpfr_from_real (mpc_imagref (m), im, rnd);
239d491a 12214 mpfr_clear_flags ();
44d89feb 12215 inexact = func (m, m, crnd);
652d9409 12216 result = do_mpc_ckconv (m, type, inexact, /*force_convert=*/ 0);
239d491a 12217 mpc_clear (m);
12218 }
12219 }
12220
12221 return result;
12222}
c699fab8 12223
12224/* If arguments ARG0 and ARG1 are a COMPLEX_CST, call the two-argument
12225 mpc function FUNC on it and return the resulting value as a tree
12226 with type TYPE. The mpfr precision is set to the precision of
12227 TYPE. We assume that function FUNC returns zero if the result
652d9409 12228 could be calculated exactly within the requested precision. If
12229 DO_NONFINITE is true, then fold expressions containing Inf or NaN
12230 in the arguments and/or results. */
c699fab8 12231
63e89698 12232tree
652d9409 12233do_mpc_arg2 (tree arg0, tree arg1, tree type, int do_nonfinite,
c699fab8 12234 int (*func)(mpc_ptr, mpc_srcptr, mpc_srcptr, mpc_rnd_t))
12235{
12236 tree result = NULL_TREE;
48e1416a 12237
c699fab8 12238 STRIP_NOPS (arg0);
12239 STRIP_NOPS (arg1);
12240
12241 /* To proceed, MPFR must exactly represent the target floating point
12242 format, which only happens when the target base equals two. */
12243 if (TREE_CODE (arg0) == COMPLEX_CST && !TREE_OVERFLOW (arg0)
12244 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE
12245 && TREE_CODE (arg1) == COMPLEX_CST && !TREE_OVERFLOW (arg1)
12246 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg1))) == REAL_TYPE
12247 && REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (TREE_TYPE (arg0))))->b == 2)
12248 {
12249 const REAL_VALUE_TYPE *const re0 = TREE_REAL_CST_PTR (TREE_REALPART (arg0));
12250 const REAL_VALUE_TYPE *const im0 = TREE_REAL_CST_PTR (TREE_IMAGPART (arg0));
12251 const REAL_VALUE_TYPE *const re1 = TREE_REAL_CST_PTR (TREE_REALPART (arg1));
12252 const REAL_VALUE_TYPE *const im1 = TREE_REAL_CST_PTR (TREE_IMAGPART (arg1));
12253
652d9409 12254 if (do_nonfinite
12255 || (real_isfinite (re0) && real_isfinite (im0)
12256 && real_isfinite (re1) && real_isfinite (im1)))
c699fab8 12257 {
12258 const struct real_format *const fmt =
12259 REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (type)));
12260 const int prec = fmt->p;
12261 const mp_rnd_t rnd = fmt->round_towards_zero ? GMP_RNDZ : GMP_RNDN;
12262 const mpc_rnd_t crnd = fmt->round_towards_zero ? MPC_RNDZZ : MPC_RNDNN;
12263 int inexact;
12264 mpc_t m0, m1;
48e1416a 12265
c699fab8 12266 mpc_init2 (m0, prec);
12267 mpc_init2 (m1, prec);
9af5ce0c 12268 mpfr_from_real (mpc_realref (m0), re0, rnd);
12269 mpfr_from_real (mpc_imagref (m0), im0, rnd);
12270 mpfr_from_real (mpc_realref (m1), re1, rnd);
12271 mpfr_from_real (mpc_imagref (m1), im1, rnd);
c699fab8 12272 mpfr_clear_flags ();
12273 inexact = func (m0, m0, m1, crnd);
652d9409 12274 result = do_mpc_ckconv (m0, type, inexact, do_nonfinite);
c699fab8 12275 mpc_clear (m0);
12276 mpc_clear (m1);
12277 }
12278 }
12279
12280 return result;
12281}
239d491a 12282
75a70cf9 12283/* A wrapper function for builtin folding that prevents warnings for
12284 "statement without effect" and the like, caused by removing the
12285 call node earlier than the warning is generated. */
12286
12287tree
1a91d914 12288fold_call_stmt (gcall *stmt, bool ignore)
75a70cf9 12289{
12290 tree ret = NULL_TREE;
12291 tree fndecl = gimple_call_fndecl (stmt);
389dd41b 12292 location_t loc = gimple_location (stmt);
75a70cf9 12293 if (fndecl
12294 && TREE_CODE (fndecl) == FUNCTION_DECL
12295 && DECL_BUILT_IN (fndecl)
12296 && !gimple_call_va_arg_pack_p (stmt))
12297 {
12298 int nargs = gimple_call_num_args (stmt);
9845fb99 12299 tree *args = (nargs > 0
12300 ? gimple_call_arg_ptr (stmt, 0)
12301 : &error_mark_node);
75a70cf9 12302
198622c0 12303 if (avoid_folding_inline_builtin (fndecl))
12304 return NULL_TREE;
75a70cf9 12305 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
12306 {
9845fb99 12307 return targetm.fold_builtin (fndecl, nargs, args, ignore);
75a70cf9 12308 }
12309 else
12310 {
9d884767 12311 ret = fold_builtin_n (loc, fndecl, args, nargs, ignore);
75a70cf9 12312 if (ret)
12313 {
12314 /* Propagate location information from original call to
12315 expansion of builtin. Otherwise things like
12316 maybe_emit_chk_warning, that operate on the expansion
12317 of a builtin, will use the wrong location information. */
12318 if (gimple_has_location (stmt))
12319 {
12320 tree realret = ret;
12321 if (TREE_CODE (ret) == NOP_EXPR)
12322 realret = TREE_OPERAND (ret, 0);
12323 if (CAN_HAVE_LOCATION_P (realret)
12324 && !EXPR_HAS_LOCATION (realret))
389dd41b 12325 SET_EXPR_LOCATION (realret, loc);
75a70cf9 12326 return realret;
12327 }
12328 return ret;
12329 }
12330 }
12331 }
12332 return NULL_TREE;
12333}
7bfefa9d 12334
b9a16870 12335/* Look up the function in builtin_decl that corresponds to DECL
7bfefa9d 12336 and set ASMSPEC as its user assembler name. DECL must be a
12337 function decl that declares a builtin. */
12338
12339void
12340set_builtin_user_assembler_name (tree decl, const char *asmspec)
12341{
12342 tree builtin;
12343 gcc_assert (TREE_CODE (decl) == FUNCTION_DECL
12344 && DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL
12345 && asmspec != 0);
12346
b9a16870 12347 builtin = builtin_decl_explicit (DECL_FUNCTION_CODE (decl));
4d8e0d6d 12348 set_user_assembler_name (builtin, asmspec);
7bfefa9d 12349 switch (DECL_FUNCTION_CODE (decl))
12350 {
12351 case BUILT_IN_MEMCPY:
12352 init_block_move_fn (asmspec);
12353 memcpy_libfunc = set_user_assembler_libfunc ("memcpy", asmspec);
12354 break;
12355 case BUILT_IN_MEMSET:
12356 init_block_clear_fn (asmspec);
12357 memset_libfunc = set_user_assembler_libfunc ("memset", asmspec);
12358 break;
12359 case BUILT_IN_MEMMOVE:
12360 memmove_libfunc = set_user_assembler_libfunc ("memmove", asmspec);
12361 break;
12362 case BUILT_IN_MEMCMP:
12363 memcmp_libfunc = set_user_assembler_libfunc ("memcmp", asmspec);
12364 break;
12365 case BUILT_IN_ABORT:
12366 abort_libfunc = set_user_assembler_libfunc ("abort", asmspec);
12367 break;
5a80a58b 12368 case BUILT_IN_FFS:
12369 if (INT_TYPE_SIZE < BITS_PER_WORD)
12370 {
12371 set_user_assembler_libfunc ("ffs", asmspec);
12372 set_optab_libfunc (ffs_optab, mode_for_size (INT_TYPE_SIZE,
12373 MODE_INT, 0), "ffs");
12374 }
12375 break;
7bfefa9d 12376 default:
12377 break;
12378 }
12379}
a6b74a67 12380
12381/* Return true if DECL is a builtin that expands to a constant or similarly
12382 simple code. */
12383bool
12384is_simple_builtin (tree decl)
12385{
12386 if (decl && DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL)
12387 switch (DECL_FUNCTION_CODE (decl))
12388 {
12389 /* Builtins that expand to constants. */
12390 case BUILT_IN_CONSTANT_P:
12391 case BUILT_IN_EXPECT:
12392 case BUILT_IN_OBJECT_SIZE:
12393 case BUILT_IN_UNREACHABLE:
12394 /* Simple register moves or loads from stack. */
fca0886c 12395 case BUILT_IN_ASSUME_ALIGNED:
a6b74a67 12396 case BUILT_IN_RETURN_ADDRESS:
12397 case BUILT_IN_EXTRACT_RETURN_ADDR:
12398 case BUILT_IN_FROB_RETURN_ADDR:
12399 case BUILT_IN_RETURN:
12400 case BUILT_IN_AGGREGATE_INCOMING_ADDRESS:
12401 case BUILT_IN_FRAME_ADDRESS:
12402 case BUILT_IN_VA_END:
12403 case BUILT_IN_STACK_SAVE:
12404 case BUILT_IN_STACK_RESTORE:
12405 /* Exception state returns or moves registers around. */
12406 case BUILT_IN_EH_FILTER:
12407 case BUILT_IN_EH_POINTER:
12408 case BUILT_IN_EH_COPY_VALUES:
12409 return true;
12410
12411 default:
12412 return false;
12413 }
12414
12415 return false;
12416}
12417
12418/* Return true if DECL is a builtin that is not expensive, i.e., they are
12419 most probably expanded inline into reasonably simple code. This is a
12420 superset of is_simple_builtin. */
12421bool
12422is_inexpensive_builtin (tree decl)
12423{
12424 if (!decl)
12425 return false;
12426 else if (DECL_BUILT_IN_CLASS (decl) == BUILT_IN_MD)
12427 return true;
12428 else if (DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL)
12429 switch (DECL_FUNCTION_CODE (decl))
12430 {
12431 case BUILT_IN_ABS:
12432 case BUILT_IN_ALLOCA:
581bf1c2 12433 case BUILT_IN_ALLOCA_WITH_ALIGN:
74bdbe96 12434 case BUILT_IN_BSWAP16:
a6b74a67 12435 case BUILT_IN_BSWAP32:
12436 case BUILT_IN_BSWAP64:
12437 case BUILT_IN_CLZ:
12438 case BUILT_IN_CLZIMAX:
12439 case BUILT_IN_CLZL:
12440 case BUILT_IN_CLZLL:
12441 case BUILT_IN_CTZ:
12442 case BUILT_IN_CTZIMAX:
12443 case BUILT_IN_CTZL:
12444 case BUILT_IN_CTZLL:
12445 case BUILT_IN_FFS:
12446 case BUILT_IN_FFSIMAX:
12447 case BUILT_IN_FFSL:
12448 case BUILT_IN_FFSLL:
12449 case BUILT_IN_IMAXABS:
12450 case BUILT_IN_FINITE:
12451 case BUILT_IN_FINITEF:
12452 case BUILT_IN_FINITEL:
12453 case BUILT_IN_FINITED32:
12454 case BUILT_IN_FINITED64:
12455 case BUILT_IN_FINITED128:
12456 case BUILT_IN_FPCLASSIFY:
12457 case BUILT_IN_ISFINITE:
12458 case BUILT_IN_ISINF_SIGN:
12459 case BUILT_IN_ISINF:
12460 case BUILT_IN_ISINFF:
12461 case BUILT_IN_ISINFL:
12462 case BUILT_IN_ISINFD32:
12463 case BUILT_IN_ISINFD64:
12464 case BUILT_IN_ISINFD128:
12465 case BUILT_IN_ISNAN:
12466 case BUILT_IN_ISNANF:
12467 case BUILT_IN_ISNANL:
12468 case BUILT_IN_ISNAND32:
12469 case BUILT_IN_ISNAND64:
12470 case BUILT_IN_ISNAND128:
12471 case BUILT_IN_ISNORMAL:
12472 case BUILT_IN_ISGREATER:
12473 case BUILT_IN_ISGREATEREQUAL:
12474 case BUILT_IN_ISLESS:
12475 case BUILT_IN_ISLESSEQUAL:
12476 case BUILT_IN_ISLESSGREATER:
12477 case BUILT_IN_ISUNORDERED:
12478 case BUILT_IN_VA_ARG_PACK:
12479 case BUILT_IN_VA_ARG_PACK_LEN:
12480 case BUILT_IN_VA_COPY:
12481 case BUILT_IN_TRAP:
12482 case BUILT_IN_SAVEREGS:
12483 case BUILT_IN_POPCOUNTL:
12484 case BUILT_IN_POPCOUNTLL:
12485 case BUILT_IN_POPCOUNTIMAX:
12486 case BUILT_IN_POPCOUNT:
12487 case BUILT_IN_PARITYL:
12488 case BUILT_IN_PARITYLL:
12489 case BUILT_IN_PARITYIMAX:
12490 case BUILT_IN_PARITY:
12491 case BUILT_IN_LABS:
12492 case BUILT_IN_LLABS:
12493 case BUILT_IN_PREFETCH:
ca4c3545 12494 case BUILT_IN_ACC_ON_DEVICE:
a6b74a67 12495 return true;
12496
12497 default:
12498 return is_simple_builtin (decl);
12499 }
12500
12501 return false;
12502}