]> git.ipfire.org Git - thirdparty/gcc.git/blame - gcc/builtins.c
Move fold_trunc_transparent_mathfn to match.pd
[thirdparty/gcc.git] / gcc / builtins.c
CommitLineData
28f4ec01 1/* Expand builtin functions.
5624e564 2 Copyright (C) 1988-2015 Free Software Foundation, Inc.
28f4ec01 3
1322177d 4This file is part of GCC.
28f4ec01 5
1322177d
LB
6GCC is free software; you can redistribute it and/or modify it under
7the terms of the GNU General Public License as published by the Free
9dcd6f09 8Software Foundation; either version 3, or (at your option) any later
1322177d 9version.
28f4ec01 10
1322177d
LB
11GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12WARRANTY; without even the implied warranty of MERCHANTABILITY or
13FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14for more details.
28f4ec01
BS
15
16You should have received a copy of the GNU General Public License
9dcd6f09
NC
17along with GCC; see the file COPYING3. If not see
18<http://www.gnu.org/licenses/>. */
28f4ec01 19
25ab3b0a
RB
20/* Legacy warning! Please add no further builtin simplifications here
21 (apart from pure constant folding) - builtin simplifications should go
22 to match.pd or gimple-fold.c instead. */
23
28f4ec01
BS
24#include "config.h"
25#include "system.h"
4977bab6 26#include "coretypes.h"
c7131fb2 27#include "backend.h"
9fdcd34e 28#include "predict.h"
c7131fb2
AM
29#include "tree.h"
30#include "gimple.h"
28f4ec01 31#include "rtl.h"
40e23961 32#include "alias.h"
40e23961 33#include "fold-const.h"
d8a2d370
DN
34#include "stringpool.h"
35#include "stor-layout.h"
36#include "calls.h"
37#include "varasm.h"
38#include "tree-object-size.h"
d49b6e1e 39#include "realmpfr.h"
60393bbc 40#include "cfgrtl.h"
2fb9a547 41#include "internal-fn.h"
28f4ec01
BS
42#include "flags.h"
43#include "regs.h"
28f4ec01 44#include "except.h"
28f4ec01 45#include "insn-config.h"
36566b39
PK
46#include "expmed.h"
47#include "dojump.h"
48#include "explow.h"
49#include "emit-rtl.h"
50#include "stmt.h"
28f4ec01 51#include "expr.h"
b0710fe1 52#include "insn-codes.h"
e78d8e51
ZW
53#include "optabs.h"
54#include "libfuncs.h"
28f4ec01
BS
55#include "recog.h"
56#include "output.h"
57#include "typeclass.h"
aa388f29 58#include "tm_p.h"
f6155fda 59#include "target.h"
ab393bf1 60#include "langhooks.h"
442b4905
AM
61#include "tree-ssanames.h"
62#include "tree-dfa.h"
079a182e 63#include "value-prof.h"
1da2ed5f 64#include "diagnostic-core.h"
fa19795e 65#include "builtins.h"
bdea98ca 66#include "asan.h"
939b37da 67#include "cilk.h"
d5e254e1
IE
68#include "cgraph.h"
69#include "tree-chkp.h"
70#include "rtl-chkp.h"
28f4ec01 71
81f5094d 72
c128599a 73static tree do_mpc_arg1 (tree, tree, int (*)(mpc_ptr, mpc_srcptr, mpc_rnd_t));
5e4f6244 74
fa19795e
RS
75struct target_builtins default_target_builtins;
76#if SWITCHABLE_TARGET
77struct target_builtins *this_target_builtins = &default_target_builtins;
78#endif
79
9df2c88c 80/* Define the names of the builtin function types and codes. */
5e351e96 81const char *const built_in_class_names[BUILT_IN_LAST]
9df2c88c
RK
82 = {"NOT_BUILT_IN", "BUILT_IN_FRONTEND", "BUILT_IN_MD", "BUILT_IN_NORMAL"};
83
c6a912da 84#define DEF_BUILTIN(X, N, C, T, LT, B, F, NA, AT, IM, COND) #X,
7e7e470f 85const char * built_in_names[(int) END_BUILTINS] =
cb1072f4
KG
86{
87#include "builtins.def"
88};
89#undef DEF_BUILTIN
9df2c88c 90
cbf5d0e7 91/* Setup an array of builtin_info_type, make sure each element decl is
3ff5f682 92 initialized to NULL_TREE. */
cbf5d0e7 93builtin_info_type builtin_info[(int)END_BUILTINS];
3ff5f682 94
4e7d7b3d
JJ
95/* Non-zero if __builtin_constant_p should be folded right away. */
96bool force_folding_builtin_constant_p;
97
ef4bddc2 98static rtx c_readstr (const char *, machine_mode);
4682ae04 99static int target_char_cast (tree, char *);
435bb2a1 100static rtx get_memory_rtx (tree, tree);
4682ae04
AJ
101static int apply_args_size (void);
102static int apply_result_size (void);
4682ae04 103static rtx result_vector (int, rtx);
4682ae04
AJ
104static void expand_builtin_prefetch (tree);
105static rtx expand_builtin_apply_args (void);
106static rtx expand_builtin_apply_args_1 (void);
107static rtx expand_builtin_apply (rtx, rtx, rtx);
108static void expand_builtin_return (rtx);
109static enum type_class type_to_class (tree);
110static rtx expand_builtin_classify_type (tree);
111static void expand_errno_check (tree, rtx);
112static rtx expand_builtin_mathfn (tree, rtx, rtx);
113static rtx expand_builtin_mathfn_2 (tree, rtx, rtx);
6c7cf1f0 114static rtx expand_builtin_mathfn_3 (tree, rtx, rtx);
1b1562a5 115static rtx expand_builtin_mathfn_ternary (tree, rtx, rtx);
4359dc2a 116static rtx expand_builtin_interclass_mathfn (tree, rtx);
403e54f0 117static rtx expand_builtin_sincos (tree);
4359dc2a 118static rtx expand_builtin_cexpi (tree, rtx);
1856c8dc
JH
119static rtx expand_builtin_int_roundingfn (tree, rtx);
120static rtx expand_builtin_int_roundingfn_2 (tree, rtx);
8870e212 121static rtx expand_builtin_next_arg (void);
4682ae04
AJ
122static rtx expand_builtin_va_start (tree);
123static rtx expand_builtin_va_end (tree);
124static rtx expand_builtin_va_copy (tree);
44e10129 125static rtx expand_builtin_strcmp (tree, rtx);
ef4bddc2
RS
126static rtx expand_builtin_strncmp (tree, rtx, machine_mode);
127static rtx builtin_memcpy_read_str (void *, HOST_WIDE_INT, machine_mode);
44e10129 128static rtx expand_builtin_memcpy (tree, rtx);
edcf72f3
IE
129static rtx expand_builtin_memcpy_with_bounds (tree, rtx);
130static rtx expand_builtin_memcpy_args (tree, tree, tree, rtx, tree);
ef4bddc2 131static rtx expand_builtin_mempcpy (tree, rtx, machine_mode);
edcf72f3 132static rtx expand_builtin_mempcpy_with_bounds (tree, rtx, machine_mode);
b8698a0f 133static rtx expand_builtin_mempcpy_args (tree, tree, tree, rtx,
edcf72f3 134 machine_mode, int, tree);
44e10129
MM
135static rtx expand_builtin_strcpy (tree, rtx);
136static rtx expand_builtin_strcpy_args (tree, tree, rtx);
ef4bddc2 137static rtx expand_builtin_stpcpy (tree, rtx, machine_mode);
44e10129 138static rtx expand_builtin_strncpy (tree, rtx);
ef4bddc2
RS
139static rtx builtin_memset_gen_str (void *, HOST_WIDE_INT, machine_mode);
140static rtx expand_builtin_memset (tree, rtx, machine_mode);
edcf72f3 141static rtx expand_builtin_memset_with_bounds (tree, rtx, machine_mode);
ef4bddc2 142static rtx expand_builtin_memset_args (tree, tree, tree, rtx, machine_mode, tree);
4682ae04 143static rtx expand_builtin_bzero (tree);
ef4bddc2 144static rtx expand_builtin_strlen (tree, rtx, machine_mode);
3a42502d 145static rtx expand_builtin_alloca (tree, bool);
ef4bddc2 146static rtx expand_builtin_unop (machine_mode, tree, rtx, rtx, optab);
4682ae04 147static rtx expand_builtin_frame_address (tree, tree);
db3927fb 148static tree stabilize_va_list_loc (location_t, tree, int);
4682ae04
AJ
149static rtx expand_builtin_expect (tree, rtx);
150static tree fold_builtin_constant_p (tree);
151static tree fold_builtin_classify_type (tree);
ab996409 152static tree fold_builtin_strlen (location_t, tree, tree);
db3927fb 153static tree fold_builtin_inf (location_t, tree, int);
4682ae04 154static tree fold_builtin_nan (tree, tree, int);
db3927fb 155static tree rewrite_call_expr (location_t, tree, int, tree, int, ...);
fa233e34 156static bool validate_arg (const_tree, enum tree_code code);
4682ae04 157static rtx expand_builtin_fabs (tree, rtx, rtx);
ef79730c 158static rtx expand_builtin_signbit (tree, rtx);
db3927fb
AH
159static tree fold_builtin_pow (location_t, tree, tree, tree, tree);
160static tree fold_builtin_powi (location_t, tree, tree, tree, tree);
db3927fb 161static tree fold_builtin_int_roundingfn (location_t, tree, tree);
61f0284e 162static tree fold_builtin_bitop (tree, tree);
db3927fb
AH
163static tree fold_builtin_strchr (location_t, tree, tree, tree);
164static tree fold_builtin_memchr (location_t, tree, tree, tree, tree);
165static tree fold_builtin_memcmp (location_t, tree, tree, tree);
166static tree fold_builtin_strcmp (location_t, tree, tree);
167static tree fold_builtin_strncmp (location_t, tree, tree, tree);
168static tree fold_builtin_signbit (location_t, tree, tree);
db3927fb
AH
169static tree fold_builtin_isascii (location_t, tree);
170static tree fold_builtin_toascii (location_t, tree);
171static tree fold_builtin_isdigit (location_t, tree);
172static tree fold_builtin_fabs (location_t, tree, tree);
173static tree fold_builtin_abs (location_t, tree, tree);
174static tree fold_builtin_unordered_cmp (location_t, tree, tree, tree, enum tree_code,
a35da91f 175 enum tree_code);
2625bb5d
RB
176static tree fold_builtin_0 (location_t, tree);
177static tree fold_builtin_1 (location_t, tree, tree);
178static tree fold_builtin_2 (location_t, tree, tree, tree);
179static tree fold_builtin_3 (location_t, tree, tree, tree, tree);
180static tree fold_builtin_varargs (location_t, tree, tree*, int);
db3927fb
AH
181
182static tree fold_builtin_strpbrk (location_t, tree, tree, tree);
183static tree fold_builtin_strstr (location_t, tree, tree, tree);
184static tree fold_builtin_strrchr (location_t, tree, tree, tree);
db3927fb
AH
185static tree fold_builtin_strspn (location_t, tree, tree);
186static tree fold_builtin_strcspn (location_t, tree, tree);
6de9cd9a 187
10a0d495 188static rtx expand_builtin_object_size (tree);
ef4bddc2 189static rtx expand_builtin_memory_chk (tree, rtx, machine_mode,
10a0d495
JJ
190 enum built_in_function);
191static void maybe_emit_chk_warning (tree, enum built_in_function);
192static void maybe_emit_sprintf_chk_warning (tree, enum built_in_function);
f9555f40 193static void maybe_emit_free_warning (tree);
5039610b 194static tree fold_builtin_object_size (tree, tree);
000ba23d 195
ad03a744 196unsigned HOST_WIDE_INT target_newline;
fef5a0d9 197unsigned HOST_WIDE_INT target_percent;
000ba23d
KG
198static unsigned HOST_WIDE_INT target_c;
199static unsigned HOST_WIDE_INT target_s;
edd7ae68 200char target_percent_c[3];
fef5a0d9 201char target_percent_s[3];
ad03a744 202char target_percent_s_newline[4];
b53fed56
KG
203static tree do_mpfr_arg1 (tree, tree, int (*)(mpfr_ptr, mpfr_srcptr, mp_rnd_t),
204 const REAL_VALUE_TYPE *, const REAL_VALUE_TYPE *, bool);
4413d881
KG
205static tree do_mpfr_arg2 (tree, tree, tree,
206 int (*)(mpfr_ptr, mpfr_srcptr, mpfr_srcptr, mp_rnd_t));
e61e5ddc
KG
207static tree do_mpfr_arg3 (tree, tree, tree, tree,
208 int (*)(mpfr_ptr, mpfr_srcptr, mpfr_srcptr, mpfr_srcptr, mp_rnd_t));
b68bcfff 209static tree do_mpfr_sincos (tree, tree, tree);
550b3187
KG
210static tree do_mpfr_bessel_n (tree, tree, tree,
211 int (*)(mpfr_ptr, long, mpfr_srcptr, mp_rnd_t),
212 const REAL_VALUE_TYPE *, bool);
ea91f957 213static tree do_mpfr_remquo (tree, tree, tree);
752b7d38 214static tree do_mpfr_lgamma_r (tree, tree, tree);
86951993 215static void expand_builtin_sync_synchronize (void);
10a0d495 216
d7f09764
DN
217/* Return true if NAME starts with __builtin_ or __sync_. */
218
0c1e7e42 219static bool
bbf7ce11 220is_builtin_name (const char *name)
48ae6c13 221{
48ae6c13
RH
222 if (strncmp (name, "__builtin_", 10) == 0)
223 return true;
224 if (strncmp (name, "__sync_", 7) == 0)
225 return true;
86951993
AM
226 if (strncmp (name, "__atomic_", 9) == 0)
227 return true;
b72271b9 228 if (flag_cilkplus
939b37da
BI
229 && (!strcmp (name, "__cilkrts_detach")
230 || !strcmp (name, "__cilkrts_pop_frame")))
231 return true;
48ae6c13
RH
232 return false;
233}
6de9cd9a 234
d7f09764
DN
235
236/* Return true if DECL is a function symbol representing a built-in. */
237
238bool
239is_builtin_fn (tree decl)
240{
241 return TREE_CODE (decl) == FUNCTION_DECL && DECL_BUILT_IN (decl);
242}
243
bbf7ce11
RAE
244/* Return true if NODE should be considered for inline expansion regardless
245 of the optimization level. This means whenever a function is invoked with
246 its "internal" name, which normally contains the prefix "__builtin". */
247
248static bool
249called_as_built_in (tree node)
250{
251 /* Note that we must use DECL_NAME, not DECL_ASSEMBLER_NAME_SET_P since
252 we want the name used to call the function, not the name it
253 will have. */
254 const char *name = IDENTIFIER_POINTER (DECL_NAME (node));
255 return is_builtin_name (name);
256}
257
644ffefd
MJ
258/* Compute values M and N such that M divides (address of EXP - N) and such
259 that N < M. If these numbers can be determined, store M in alignp and N in
260 *BITPOSP and return true. Otherwise return false and store BITS_PER_UNIT to
261 *alignp and any bit-offset to *bitposp.
73f6eabc
RS
262
263 Note that the address (and thus the alignment) computed here is based
264 on the address to which a symbol resolves, whereas DECL_ALIGN is based
265 on the address at which an object is actually located. These two
266 addresses are not always the same. For example, on ARM targets,
267 the address &foo of a Thumb function foo() has the lowest bit set,
b0f4a35f 268 whereas foo() itself starts on an even address.
df96b059 269
b0f4a35f
RG
270 If ADDR_P is true we are taking the address of the memory reference EXP
271 and thus cannot rely on the access taking place. */
272
273static bool
274get_object_alignment_2 (tree exp, unsigned int *alignp,
275 unsigned HOST_WIDE_INT *bitposp, bool addr_p)
df96b059 276{
e80c2726
RG
277 HOST_WIDE_INT bitsize, bitpos;
278 tree offset;
ef4bddc2 279 machine_mode mode;
e80c2726 280 int unsignedp, volatilep;
eae76e53 281 unsigned int align = BITS_PER_UNIT;
644ffefd 282 bool known_alignment = false;
df96b059 283
e80c2726
RG
284 /* Get the innermost object and the constant (bitpos) and possibly
285 variable (offset) offset of the access. */
286 exp = get_inner_reference (exp, &bitsize, &bitpos, &offset,
b3ecff82 287 &mode, &unsignedp, &volatilep, true);
e80c2726
RG
288
289 /* Extract alignment information from the innermost object and
290 possibly adjust bitpos and offset. */
b0f4a35f 291 if (TREE_CODE (exp) == FUNCTION_DECL)
73f6eabc 292 {
b0f4a35f
RG
293 /* Function addresses can encode extra information besides their
294 alignment. However, if TARGET_PTRMEMFUNC_VBIT_LOCATION
295 allows the low bit to be used as a virtual bit, we know
296 that the address itself must be at least 2-byte aligned. */
297 if (TARGET_PTRMEMFUNC_VBIT_LOCATION == ptrmemfunc_vbit_in_pfn)
298 align = 2 * BITS_PER_UNIT;
73f6eabc 299 }
b0f4a35f
RG
300 else if (TREE_CODE (exp) == LABEL_DECL)
301 ;
302 else if (TREE_CODE (exp) == CONST_DECL)
e80c2726 303 {
b0f4a35f
RG
304 /* The alignment of a CONST_DECL is determined by its initializer. */
305 exp = DECL_INITIAL (exp);
e80c2726 306 align = TYPE_ALIGN (TREE_TYPE (exp));
b0f4a35f
RG
307 if (CONSTANT_CLASS_P (exp))
308 align = (unsigned) CONSTANT_ALIGNMENT (exp, align);
6b00e42d 309
b0f4a35f 310 known_alignment = true;
e80c2726 311 }
b0f4a35f 312 else if (DECL_P (exp))
644ffefd 313 {
b0f4a35f 314 align = DECL_ALIGN (exp);
644ffefd 315 known_alignment = true;
644ffefd 316 }
b3ecff82
BE
317 else if (TREE_CODE (exp) == VIEW_CONVERT_EXPR)
318 {
319 align = TYPE_ALIGN (TREE_TYPE (exp));
320 }
b0f4a35f
RG
321 else if (TREE_CODE (exp) == INDIRECT_REF
322 || TREE_CODE (exp) == MEM_REF
323 || TREE_CODE (exp) == TARGET_MEM_REF)
e80c2726
RG
324 {
325 tree addr = TREE_OPERAND (exp, 0);
644ffefd
MJ
326 unsigned ptr_align;
327 unsigned HOST_WIDE_INT ptr_bitpos;
4ceae7e9 328 unsigned HOST_WIDE_INT ptr_bitmask = ~0;
644ffefd 329
4ceae7e9 330 /* If the address is explicitely aligned, handle that. */
e80c2726
RG
331 if (TREE_CODE (addr) == BIT_AND_EXPR
332 && TREE_CODE (TREE_OPERAND (addr, 1)) == INTEGER_CST)
333 {
4ceae7e9
RB
334 ptr_bitmask = TREE_INT_CST_LOW (TREE_OPERAND (addr, 1));
335 ptr_bitmask *= BITS_PER_UNIT;
336 align = ptr_bitmask & -ptr_bitmask;
e80c2726
RG
337 addr = TREE_OPERAND (addr, 0);
338 }
644ffefd 339
b0f4a35f
RG
340 known_alignment
341 = get_pointer_alignment_1 (addr, &ptr_align, &ptr_bitpos);
b0f4a35f
RG
342 align = MAX (ptr_align, align);
343
4ceae7e9
RB
344 /* Re-apply explicit alignment to the bitpos. */
345 ptr_bitpos &= ptr_bitmask;
346
3c82efd9
RG
347 /* The alignment of the pointer operand in a TARGET_MEM_REF
348 has to take the variable offset parts into account. */
b0f4a35f 349 if (TREE_CODE (exp) == TARGET_MEM_REF)
1be38ccb 350 {
b0f4a35f
RG
351 if (TMR_INDEX (exp))
352 {
353 unsigned HOST_WIDE_INT step = 1;
354 if (TMR_STEP (exp))
355 step = TREE_INT_CST_LOW (TMR_STEP (exp));
356 align = MIN (align, (step & -step) * BITS_PER_UNIT);
357 }
358 if (TMR_INDEX2 (exp))
359 align = BITS_PER_UNIT;
360 known_alignment = false;
1be38ccb 361 }
644ffefd 362
b0f4a35f
RG
363 /* When EXP is an actual memory reference then we can use
364 TYPE_ALIGN of a pointer indirection to derive alignment.
365 Do so only if get_pointer_alignment_1 did not reveal absolute
3c82efd9
RG
366 alignment knowledge and if using that alignment would
367 improve the situation. */
368 if (!addr_p && !known_alignment
369 && TYPE_ALIGN (TREE_TYPE (exp)) > align)
370 align = TYPE_ALIGN (TREE_TYPE (exp));
371 else
372 {
373 /* Else adjust bitpos accordingly. */
374 bitpos += ptr_bitpos;
375 if (TREE_CODE (exp) == MEM_REF
376 || TREE_CODE (exp) == TARGET_MEM_REF)
807e902e 377 bitpos += mem_ref_offset (exp).to_short_addr () * BITS_PER_UNIT;
3c82efd9 378 }
e80c2726 379 }
b0f4a35f 380 else if (TREE_CODE (exp) == STRING_CST)
1be38ccb 381 {
b0f4a35f
RG
382 /* STRING_CST are the only constant objects we allow to be not
383 wrapped inside a CONST_DECL. */
384 align = TYPE_ALIGN (TREE_TYPE (exp));
b0f4a35f
RG
385 if (CONSTANT_CLASS_P (exp))
386 align = (unsigned) CONSTANT_ALIGNMENT (exp, align);
6b00e42d 387
b0f4a35f 388 known_alignment = true;
e80c2726 389 }
e80c2726
RG
390
391 /* If there is a non-constant offset part extract the maximum
392 alignment that can prevail. */
eae76e53 393 if (offset)
e80c2726 394 {
e75fde1a 395 unsigned int trailing_zeros = tree_ctz (offset);
eae76e53 396 if (trailing_zeros < HOST_BITS_PER_INT)
e80c2726 397 {
eae76e53
JJ
398 unsigned int inner = (1U << trailing_zeros) * BITS_PER_UNIT;
399 if (inner)
400 align = MIN (align, inner);
e80c2726 401 }
e80c2726
RG
402 }
403
b0f4a35f
RG
404 *alignp = align;
405 *bitposp = bitpos & (*alignp - 1);
644ffefd 406 return known_alignment;
daade206
RG
407}
408
b0f4a35f
RG
409/* For a memory reference expression EXP compute values M and N such that M
410 divides (&EXP - N) and such that N < M. If these numbers can be determined,
411 store M in alignp and N in *BITPOSP and return true. Otherwise return false
412 and store BITS_PER_UNIT to *alignp and any bit-offset to *bitposp. */
413
414bool
415get_object_alignment_1 (tree exp, unsigned int *alignp,
416 unsigned HOST_WIDE_INT *bitposp)
417{
418 return get_object_alignment_2 (exp, alignp, bitposp, false);
419}
420
0eb77834 421/* Return the alignment in bits of EXP, an object. */
daade206
RG
422
423unsigned int
0eb77834 424get_object_alignment (tree exp)
daade206
RG
425{
426 unsigned HOST_WIDE_INT bitpos = 0;
427 unsigned int align;
428
644ffefd 429 get_object_alignment_1 (exp, &align, &bitpos);
daade206 430
e80c2726
RG
431 /* align and bitpos now specify known low bits of the pointer.
432 ptr & (align - 1) == bitpos. */
433
434 if (bitpos != 0)
435 align = (bitpos & -bitpos);
0eb77834 436 return align;
df96b059
JJ
437}
438
644ffefd
MJ
439/* For a pointer valued expression EXP compute values M and N such that M
440 divides (EXP - N) and such that N < M. If these numbers can be determined,
b0f4a35f
RG
441 store M in alignp and N in *BITPOSP and return true. Return false if
442 the results are just a conservative approximation.
28f4ec01 443
644ffefd 444 If EXP is not a pointer, false is returned too. */
28f4ec01 445
644ffefd
MJ
446bool
447get_pointer_alignment_1 (tree exp, unsigned int *alignp,
448 unsigned HOST_WIDE_INT *bitposp)
28f4ec01 449{
1be38ccb 450 STRIP_NOPS (exp);
6026b73e 451
1be38ccb 452 if (TREE_CODE (exp) == ADDR_EXPR)
b0f4a35f
RG
453 return get_object_alignment_2 (TREE_OPERAND (exp, 0),
454 alignp, bitposp, true);
5fa79de8
RB
455 else if (TREE_CODE (exp) == POINTER_PLUS_EXPR)
456 {
457 unsigned int align;
458 unsigned HOST_WIDE_INT bitpos;
459 bool res = get_pointer_alignment_1 (TREE_OPERAND (exp, 0),
460 &align, &bitpos);
461 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
462 bitpos += TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)) * BITS_PER_UNIT;
463 else
464 {
465 unsigned int trailing_zeros = tree_ctz (TREE_OPERAND (exp, 1));
466 if (trailing_zeros < HOST_BITS_PER_INT)
467 {
468 unsigned int inner = (1U << trailing_zeros) * BITS_PER_UNIT;
469 if (inner)
470 align = MIN (align, inner);
471 }
472 }
473 *alignp = align;
474 *bitposp = bitpos & (align - 1);
475 return res;
476 }
1be38ccb
RG
477 else if (TREE_CODE (exp) == SSA_NAME
478 && POINTER_TYPE_P (TREE_TYPE (exp)))
28f4ec01 479 {
644ffefd 480 unsigned int ptr_align, ptr_misalign;
1be38ccb 481 struct ptr_info_def *pi = SSA_NAME_PTR_INFO (exp);
644ffefd
MJ
482
483 if (pi && get_ptr_info_alignment (pi, &ptr_align, &ptr_misalign))
484 {
485 *bitposp = ptr_misalign * BITS_PER_UNIT;
486 *alignp = ptr_align * BITS_PER_UNIT;
b0f4a35f 487 /* We cannot really tell whether this result is an approximation. */
644ffefd
MJ
488 return true;
489 }
490 else
87c0fb4b
RG
491 {
492 *bitposp = 0;
644ffefd
MJ
493 *alignp = BITS_PER_UNIT;
494 return false;
87c0fb4b 495 }
28f4ec01 496 }
44fabee4
RG
497 else if (TREE_CODE (exp) == INTEGER_CST)
498 {
499 *alignp = BIGGEST_ALIGNMENT;
500 *bitposp = ((TREE_INT_CST_LOW (exp) * BITS_PER_UNIT)
501 & (BIGGEST_ALIGNMENT - 1));
502 return true;
503 }
1be38ccb 504
87c0fb4b 505 *bitposp = 0;
644ffefd
MJ
506 *alignp = BITS_PER_UNIT;
507 return false;
28f4ec01
BS
508}
509
87c0fb4b
RG
510/* Return the alignment in bits of EXP, a pointer valued expression.
511 The alignment returned is, by default, the alignment of the thing that
512 EXP points to. If it is not a POINTER_TYPE, 0 is returned.
513
514 Otherwise, look at the expression to see if we can do better, i.e., if the
515 expression is actually pointing at an object whose alignment is tighter. */
516
517unsigned int
518get_pointer_alignment (tree exp)
519{
520 unsigned HOST_WIDE_INT bitpos = 0;
521 unsigned int align;
644ffefd
MJ
522
523 get_pointer_alignment_1 (exp, &align, &bitpos);
87c0fb4b
RG
524
525 /* align and bitpos now specify known low bits of the pointer.
526 ptr & (align - 1) == bitpos. */
527
528 if (bitpos != 0)
529 align = (bitpos & -bitpos);
530
531 return align;
532}
533
28f4ec01
BS
534/* Compute the length of a C string. TREE_STRING_LENGTH is not the right
535 way, because it could contain a zero byte in the middle.
536 TREE_STRING_LENGTH is the size of the character array, not the string.
537
f1ba665b 538 ONLY_VALUE should be nonzero if the result is not going to be emitted
88373ed0 539 into the instruction stream and zero if it is going to be expanded.
f1ba665b 540 E.g. with i++ ? "foo" : "bar", if ONLY_VALUE is nonzero, constant 3
ae808627
JJ
541 is returned, otherwise NULL, since
542 len = c_strlen (src, 1); if (len) expand_expr (len, ...); would not
543 evaluate the side-effects.
544
21e8fb22
RB
545 If ONLY_VALUE is two then we do not emit warnings about out-of-bound
546 accesses. Note that this implies the result is not going to be emitted
547 into the instruction stream.
548
fed3cef0
RK
549 The value returned is of type `ssizetype'.
550
28f4ec01
BS
551 Unfortunately, string_constant can't access the values of const char
552 arrays with initializers, so neither can we do so here. */
553
6de9cd9a 554tree
ae808627 555c_strlen (tree src, int only_value)
28f4ec01
BS
556{
557 tree offset_node;
5197bd50
RK
558 HOST_WIDE_INT offset;
559 int max;
520a57c8 560 const char *ptr;
59d49708 561 location_t loc;
28f4ec01 562
ae808627
JJ
563 STRIP_NOPS (src);
564 if (TREE_CODE (src) == COND_EXPR
565 && (only_value || !TREE_SIDE_EFFECTS (TREE_OPERAND (src, 0))))
566 {
567 tree len1, len2;
568
569 len1 = c_strlen (TREE_OPERAND (src, 1), only_value);
570 len2 = c_strlen (TREE_OPERAND (src, 2), only_value);
33521f7d 571 if (tree_int_cst_equal (len1, len2))
ae808627
JJ
572 return len1;
573 }
574
575 if (TREE_CODE (src) == COMPOUND_EXPR
576 && (only_value || !TREE_SIDE_EFFECTS (TREE_OPERAND (src, 0))))
577 return c_strlen (TREE_OPERAND (src, 1), only_value);
578
8400e75e 579 loc = EXPR_LOC_OR_LOC (src, input_location);
59d49708 580
28f4ec01
BS
581 src = string_constant (src, &offset_node);
582 if (src == 0)
5039610b 583 return NULL_TREE;
fed3cef0 584
2dee4af1 585 max = TREE_STRING_LENGTH (src) - 1;
28f4ec01 586 ptr = TREE_STRING_POINTER (src);
fed3cef0 587
28f4ec01
BS
588 if (offset_node && TREE_CODE (offset_node) != INTEGER_CST)
589 {
590 /* If the string has an internal zero byte (e.g., "foo\0bar"), we can't
591 compute the offset to the following null if we don't know where to
592 start searching for it. */
593 int i;
fed3cef0 594
28f4ec01
BS
595 for (i = 0; i < max; i++)
596 if (ptr[i] == 0)
5039610b 597 return NULL_TREE;
fed3cef0 598
28f4ec01
BS
599 /* We don't know the starting offset, but we do know that the string
600 has no internal zero bytes. We can assume that the offset falls
601 within the bounds of the string; otherwise, the programmer deserves
602 what he gets. Subtract the offset from the length of the string,
fed3cef0
RK
603 and return that. This would perhaps not be valid if we were dealing
604 with named arrays in addition to literal string constants. */
605
59d49708 606 return size_diffop_loc (loc, size_int (max), offset_node);
28f4ec01
BS
607 }
608
609 /* We have a known offset into the string. Start searching there for
5197bd50 610 a null character if we can represent it as a single HOST_WIDE_INT. */
1de3d877 611 if (offset_node == 0)
28f4ec01 612 offset = 0;
9541ffee 613 else if (! tree_fits_shwi_p (offset_node))
1de3d877 614 offset = -1;
28f4ec01 615 else
9439e9a1 616 offset = tree_to_shwi (offset_node);
fed3cef0 617
b2ed71b6
BE
618 /* If the offset is known to be out of bounds, warn, and call strlen at
619 runtime. */
3b57ff81 620 if (offset < 0 || offset > max)
28f4ec01 621 {
b2ed71b6 622 /* Suppress multiple warnings for propagated constant strings. */
3b57ff81
RB
623 if (only_value != 2
624 && !TREE_NO_WARNING (src))
b2ed71b6 625 {
59d49708 626 warning_at (loc, 0, "offset outside bounds of constant string");
b2ed71b6
BE
627 TREE_NO_WARNING (src) = 1;
628 }
5039610b 629 return NULL_TREE;
28f4ec01 630 }
fed3cef0 631
28f4ec01
BS
632 /* Use strlen to search for the first zero byte. Since any strings
633 constructed with build_string will have nulls appended, we win even
634 if we get handed something like (char[4])"abcd".
635
636 Since OFFSET is our starting index into the string, no further
637 calculation is needed. */
fed3cef0 638 return ssize_int (strlen (ptr + offset));
28f4ec01
BS
639}
640
2dee4af1
JJ
641/* Return a char pointer for a C string if it is a string constant
642 or sum of string constant and integer constant. */
643
fef5a0d9 644const char *
4682ae04 645c_getstr (tree src)
2dee4af1
JJ
646{
647 tree offset_node;
2dee4af1
JJ
648
649 src = string_constant (src, &offset_node);
650 if (src == 0)
651 return 0;
652
bf06b5d8
RK
653 if (offset_node == 0)
654 return TREE_STRING_POINTER (src);
cc269bb6 655 else if (!tree_fits_uhwi_p (offset_node)
bf06b5d8 656 || compare_tree_int (offset_node, TREE_STRING_LENGTH (src) - 1) > 0)
2dee4af1 657 return 0;
2dee4af1 658
ae7e9ddd 659 return TREE_STRING_POINTER (src) + tree_to_uhwi (offset_node);
2dee4af1
JJ
660}
661
807e902e 662/* Return a constant integer corresponding to target reading
bf06b5d8 663 GET_MODE_BITSIZE (MODE) bits from string constant STR. */
ab937357 664
57814e5e 665static rtx
ef4bddc2 666c_readstr (const char *str, machine_mode mode)
57814e5e 667{
57814e5e
JJ
668 HOST_WIDE_INT ch;
669 unsigned int i, j;
807e902e 670 HOST_WIDE_INT tmp[MAX_BITSIZE_MODE_ANY_INT / HOST_BITS_PER_WIDE_INT];
57814e5e 671
298e6adc 672 gcc_assert (GET_MODE_CLASS (mode) == MODE_INT);
807e902e
KZ
673 unsigned int len = (GET_MODE_PRECISION (mode) + HOST_BITS_PER_WIDE_INT - 1)
674 / HOST_BITS_PER_WIDE_INT;
675
676 gcc_assert (len <= MAX_BITSIZE_MODE_ANY_INT / HOST_BITS_PER_WIDE_INT);
677 for (i = 0; i < len; i++)
678 tmp[i] = 0;
5906d013 679
57814e5e
JJ
680 ch = 1;
681 for (i = 0; i < GET_MODE_SIZE (mode); i++)
682 {
683 j = i;
684 if (WORDS_BIG_ENDIAN)
685 j = GET_MODE_SIZE (mode) - i - 1;
686 if (BYTES_BIG_ENDIAN != WORDS_BIG_ENDIAN
e046112d 687 && GET_MODE_SIZE (mode) >= UNITS_PER_WORD)
57814e5e
JJ
688 j = j + UNITS_PER_WORD - 2 * (j % UNITS_PER_WORD) - 1;
689 j *= BITS_PER_UNIT;
5906d013 690
57814e5e
JJ
691 if (ch)
692 ch = (unsigned char) str[i];
807e902e 693 tmp[j / HOST_BITS_PER_WIDE_INT] |= ch << (j % HOST_BITS_PER_WIDE_INT);
57814e5e 694 }
807e902e
KZ
695
696 wide_int c = wide_int::from_array (tmp, len, GET_MODE_PRECISION (mode));
697 return immed_wide_int_const (c, mode);
57814e5e
JJ
698}
699
ab937357 700/* Cast a target constant CST to target CHAR and if that value fits into
206048bd 701 host char type, return zero and put that value into variable pointed to by
ab937357
JJ
702 P. */
703
704static int
4682ae04 705target_char_cast (tree cst, char *p)
ab937357
JJ
706{
707 unsigned HOST_WIDE_INT val, hostval;
708
de77ab75 709 if (TREE_CODE (cst) != INTEGER_CST
ab937357
JJ
710 || CHAR_TYPE_SIZE > HOST_BITS_PER_WIDE_INT)
711 return 1;
712
807e902e 713 /* Do not care if it fits or not right here. */
de77ab75 714 val = TREE_INT_CST_LOW (cst);
807e902e 715
ab937357
JJ
716 if (CHAR_TYPE_SIZE < HOST_BITS_PER_WIDE_INT)
717 val &= (((unsigned HOST_WIDE_INT) 1) << CHAR_TYPE_SIZE) - 1;
718
719 hostval = val;
720 if (HOST_BITS_PER_CHAR < HOST_BITS_PER_WIDE_INT)
721 hostval &= (((unsigned HOST_WIDE_INT) 1) << HOST_BITS_PER_CHAR) - 1;
722
723 if (val != hostval)
724 return 1;
725
726 *p = hostval;
727 return 0;
728}
729
6de9cd9a
DN
730/* Similar to save_expr, but assumes that arbitrary code is not executed
731 in between the multiple evaluations. In particular, we assume that a
732 non-addressable local variable will not be modified. */
733
734static tree
735builtin_save_expr (tree exp)
736{
5cbf5c20
RG
737 if (TREE_CODE (exp) == SSA_NAME
738 || (TREE_ADDRESSABLE (exp) == 0
739 && (TREE_CODE (exp) == PARM_DECL
740 || (TREE_CODE (exp) == VAR_DECL && !TREE_STATIC (exp)))))
6de9cd9a
DN
741 return exp;
742
743 return save_expr (exp);
744}
745
28f4ec01
BS
746/* Given TEM, a pointer to a stack frame, follow the dynamic chain COUNT
747 times to get the address of either a higher stack frame, or a return
748 address located within it (depending on FNDECL_CODE). */
fed3cef0 749
54e62799 750static rtx
c6d01079 751expand_builtin_return_addr (enum built_in_function fndecl_code, int count)
28f4ec01
BS
752{
753 int i;
c6d01079 754 rtx tem = INITIAL_FRAME_ADDRESS_RTX;
579f2946 755 if (tem == NULL_RTX)
c8f27794 756 {
579f2946
TS
757 /* For a zero count with __builtin_return_address, we don't care what
758 frame address we return, because target-specific definitions will
759 override us. Therefore frame pointer elimination is OK, and using
760 the soft frame pointer is OK.
761
762 For a nonzero count, or a zero count with __builtin_frame_address,
763 we require a stable offset from the current frame pointer to the
764 previous one, so we must use the hard frame pointer, and
765 we must disable frame pointer elimination. */
766 if (count == 0 && fndecl_code == BUILT_IN_RETURN_ADDRESS)
767 tem = frame_pointer_rtx;
768 else
769 {
770 tem = hard_frame_pointer_rtx;
c8f27794 771
579f2946
TS
772 /* Tell reload not to eliminate the frame pointer. */
773 crtl->accesses_prior_frames = 1;
774 }
c8f27794 775 }
c6d01079 776
28f4ec01
BS
777 if (count > 0)
778 SETUP_FRAME_ADDRESSES ();
28f4ec01 779
224869d9 780 /* On the SPARC, the return address is not in the frame, it is in a
28f4ec01
BS
781 register. There is no way to access it off of the current frame
782 pointer, but it can be accessed off the previous frame pointer by
783 reading the value from the register window save area. */
2e612c47 784 if (RETURN_ADDR_IN_PREVIOUS_FRAME && fndecl_code == BUILT_IN_RETURN_ADDRESS)
28f4ec01 785 count--;
28f4ec01
BS
786
787 /* Scan back COUNT frames to the specified frame. */
788 for (i = 0; i < count; i++)
789 {
790 /* Assume the dynamic chain pointer is in the word that the
791 frame address points to, unless otherwise specified. */
28f4ec01 792 tem = DYNAMIC_CHAIN_ADDRESS (tem);
28f4ec01 793 tem = memory_address (Pmode, tem);
bf877a76 794 tem = gen_frame_mem (Pmode, tem);
432fd734 795 tem = copy_to_reg (tem);
28f4ec01
BS
796 }
797
224869d9
EB
798 /* For __builtin_frame_address, return what we've got. But, on
799 the SPARC for example, we may have to add a bias. */
28f4ec01 800 if (fndecl_code == BUILT_IN_FRAME_ADDRESS)
224869d9 801 return FRAME_ADDR_RTX (tem);
28f4ec01 802
224869d9 803 /* For __builtin_return_address, get the return address from that frame. */
28f4ec01
BS
804#ifdef RETURN_ADDR_RTX
805 tem = RETURN_ADDR_RTX (count, tem);
806#else
807 tem = memory_address (Pmode,
0a81f074 808 plus_constant (Pmode, tem, GET_MODE_SIZE (Pmode)));
bf877a76 809 tem = gen_frame_mem (Pmode, tem);
28f4ec01
BS
810#endif
811 return tem;
812}
813
3bdf5ad1 814/* Alias set used for setjmp buffer. */
4862826d 815static alias_set_type setjmp_alias_set = -1;
3bdf5ad1 816
250d07b6 817/* Construct the leading half of a __builtin_setjmp call. Control will
4f6c2131
EB
818 return to RECEIVER_LABEL. This is also called directly by the SJLJ
819 exception handling code. */
28f4ec01 820
250d07b6 821void
4682ae04 822expand_builtin_setjmp_setup (rtx buf_addr, rtx receiver_label)
28f4ec01 823{
ef4bddc2 824 machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
28f4ec01 825 rtx stack_save;
3bdf5ad1 826 rtx mem;
28f4ec01 827
3bdf5ad1
RK
828 if (setjmp_alias_set == -1)
829 setjmp_alias_set = new_alias_set ();
830
5ae6cd0d 831 buf_addr = convert_memory_address (Pmode, buf_addr);
28f4ec01 832
7d505b82 833 buf_addr = force_reg (Pmode, force_operand (buf_addr, NULL_RTX));
28f4ec01 834
250d07b6
RH
835 /* We store the frame pointer and the address of receiver_label in
836 the buffer and use the rest of it for the stack save area, which
837 is machine-dependent. */
28f4ec01 838
3bdf5ad1 839 mem = gen_rtx_MEM (Pmode, buf_addr);
ba4828e0 840 set_mem_alias_set (mem, setjmp_alias_set);
d6da68b9 841 emit_move_insn (mem, targetm.builtin_setjmp_frame_value ());
3bdf5ad1 842
0a81f074
RS
843 mem = gen_rtx_MEM (Pmode, plus_constant (Pmode, buf_addr,
844 GET_MODE_SIZE (Pmode))),
ba4828e0 845 set_mem_alias_set (mem, setjmp_alias_set);
3bdf5ad1
RK
846
847 emit_move_insn (validize_mem (mem),
250d07b6 848 force_reg (Pmode, gen_rtx_LABEL_REF (Pmode, receiver_label)));
28f4ec01
BS
849
850 stack_save = gen_rtx_MEM (sa_mode,
0a81f074 851 plus_constant (Pmode, buf_addr,
28f4ec01 852 2 * GET_MODE_SIZE (Pmode)));
ba4828e0 853 set_mem_alias_set (stack_save, setjmp_alias_set);
9eac0f2a 854 emit_stack_save (SAVE_NONLOCAL, &stack_save);
28f4ec01
BS
855
856 /* If there is further processing to do, do it. */
95a3fb9d
RS
857 if (targetm.have_builtin_setjmp_setup ())
858 emit_insn (targetm.gen_builtin_setjmp_setup (buf_addr));
28f4ec01 859
ecaebb9e 860 /* We have a nonlocal label. */
e3b5732b 861 cfun->has_nonlocal_label = 1;
250d07b6 862}
28f4ec01 863
4f6c2131 864/* Construct the trailing part of a __builtin_setjmp call. This is
e90d1568
HPN
865 also called directly by the SJLJ exception handling code.
866 If RECEIVER_LABEL is NULL, instead contruct a nonlocal goto handler. */
250d07b6
RH
867
868void
95a3fb9d 869expand_builtin_setjmp_receiver (rtx receiver_label)
250d07b6 870{
531ca746
RH
871 rtx chain;
872
e90d1568 873 /* Mark the FP as used when we get here, so we have to make sure it's
28f4ec01 874 marked as used by this function. */
c41c1387 875 emit_use (hard_frame_pointer_rtx);
28f4ec01
BS
876
877 /* Mark the static chain as clobbered here so life information
878 doesn't get messed up for it. */
531ca746
RH
879 chain = targetm.calls.static_chain (current_function_decl, true);
880 if (chain && REG_P (chain))
881 emit_clobber (chain);
28f4ec01
BS
882
883 /* Now put in the code to restore the frame pointer, and argument
caf93cb0 884 pointer, if needed. */
95a3fb9d 885 if (! targetm.have_nonlocal_goto ())
f1257268
RS
886 {
887 /* First adjust our frame pointer to its actual value. It was
888 previously set to the start of the virtual area corresponding to
889 the stacked variables when we branched here and now needs to be
890 adjusted to the actual hardware fp value.
891
892 Assignments to virtual registers are converted by
893 instantiate_virtual_regs into the corresponding assignment
894 to the underlying register (fp in this case) that makes
895 the original assignment true.
896 So the following insn will actually be decrementing fp by
897 STARTING_FRAME_OFFSET. */
898 emit_move_insn (virtual_stack_vars_rtx, hard_frame_pointer_rtx);
899
900 /* Restoring the frame pointer also modifies the hard frame pointer.
901 Mark it used (so that the previous assignment remains live once
902 the frame pointer is eliminated) and clobbered (to represent the
903 implicit update from the assignment). */
904 emit_use (hard_frame_pointer_rtx);
905 emit_clobber (hard_frame_pointer_rtx);
906 }
28f4ec01 907
38b0b093 908 if (!HARD_FRAME_POINTER_IS_ARG_POINTER && fixed_regs[ARG_POINTER_REGNUM])
28f4ec01
BS
909 {
910#ifdef ELIMINABLE_REGS
e90d1568
HPN
911 /* If the argument pointer can be eliminated in favor of the
912 frame pointer, we don't need to restore it. We assume here
913 that if such an elimination is present, it can always be used.
914 This is the case on all known machines; if we don't make this
915 assumption, we do unnecessary saving on many machines. */
28f4ec01 916 size_t i;
8b60264b 917 static const struct elims {const int from, to;} elim_regs[] = ELIMINABLE_REGS;
28f4ec01 918
b6a1cbae 919 for (i = 0; i < ARRAY_SIZE (elim_regs); i++)
28f4ec01
BS
920 if (elim_regs[i].from == ARG_POINTER_REGNUM
921 && elim_regs[i].to == HARD_FRAME_POINTER_REGNUM)
922 break;
923
b6a1cbae 924 if (i == ARRAY_SIZE (elim_regs))
28f4ec01
BS
925#endif
926 {
927 /* Now restore our arg pointer from the address at which it
278ed218 928 was saved in our stack frame. */
2e3f842f 929 emit_move_insn (crtl->args.internal_arg_pointer,
bd60bab2 930 copy_to_reg (get_arg_pointer_save_area ()));
28f4ec01
BS
931 }
932 }
28f4ec01 933
95a3fb9d
RS
934 if (receiver_label != NULL && targetm.have_builtin_setjmp_receiver ())
935 emit_insn (targetm.gen_builtin_setjmp_receiver (receiver_label));
936 else if (targetm.have_nonlocal_goto_receiver ())
937 emit_insn (targetm.gen_nonlocal_goto_receiver ());
28f4ec01 938 else
95a3fb9d 939 { /* Nothing */ }
bcd7edfe 940
6fb5fa3c
DB
941 /* We must not allow the code we just generated to be reordered by
942 scheduling. Specifically, the update of the frame pointer must
f1257268 943 happen immediately, not later. */
6fb5fa3c 944 emit_insn (gen_blockage ());
250d07b6 945}
28f4ec01 946
28f4ec01
BS
947/* __builtin_longjmp is passed a pointer to an array of five words (not
948 all will be used on all machines). It operates similarly to the C
949 library function of the same name, but is more efficient. Much of
4f6c2131 950 the code below is copied from the handling of non-local gotos. */
28f4ec01 951
54e62799 952static void
4682ae04 953expand_builtin_longjmp (rtx buf_addr, rtx value)
28f4ec01 954{
58f4cf2a
DM
955 rtx fp, lab, stack;
956 rtx_insn *insn, *last;
ef4bddc2 957 machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
28f4ec01 958
b8698a0f 959 /* DRAP is needed for stack realign if longjmp is expanded to current
2e3f842f
L
960 function */
961 if (SUPPORTS_STACK_ALIGNMENT)
962 crtl->need_drap = true;
963
3bdf5ad1
RK
964 if (setjmp_alias_set == -1)
965 setjmp_alias_set = new_alias_set ();
966
5ae6cd0d 967 buf_addr = convert_memory_address (Pmode, buf_addr);
4b6c1672 968
28f4ec01
BS
969 buf_addr = force_reg (Pmode, buf_addr);
970
531ca746
RH
971 /* We require that the user must pass a second argument of 1, because
972 that is what builtin_setjmp will return. */
298e6adc 973 gcc_assert (value == const1_rtx);
28f4ec01 974
d337d653 975 last = get_last_insn ();
95a3fb9d
RS
976 if (targetm.have_builtin_longjmp ())
977 emit_insn (targetm.gen_builtin_longjmp (buf_addr));
28f4ec01 978 else
28f4ec01
BS
979 {
980 fp = gen_rtx_MEM (Pmode, buf_addr);
0a81f074 981 lab = gen_rtx_MEM (Pmode, plus_constant (Pmode, buf_addr,
28f4ec01
BS
982 GET_MODE_SIZE (Pmode)));
983
0a81f074 984 stack = gen_rtx_MEM (sa_mode, plus_constant (Pmode, buf_addr,
28f4ec01 985 2 * GET_MODE_SIZE (Pmode)));
ba4828e0
RK
986 set_mem_alias_set (fp, setjmp_alias_set);
987 set_mem_alias_set (lab, setjmp_alias_set);
988 set_mem_alias_set (stack, setjmp_alias_set);
28f4ec01
BS
989
990 /* Pick up FP, label, and SP from the block and jump. This code is
991 from expand_goto in stmt.c; see there for detailed comments. */
95a3fb9d 992 if (targetm.have_nonlocal_goto ())
28f4ec01
BS
993 /* We have to pass a value to the nonlocal_goto pattern that will
994 get copied into the static_chain pointer, but it does not matter
995 what that value is, because builtin_setjmp does not use it. */
95a3fb9d 996 emit_insn (targetm.gen_nonlocal_goto (value, lab, stack, fp));
28f4ec01 997 else
28f4ec01
BS
998 {
999 lab = copy_to_reg (lab);
1000
c41c1387
RS
1001 emit_clobber (gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode)));
1002 emit_clobber (gen_rtx_MEM (BLKmode, hard_frame_pointer_rtx));
41439bf6 1003
28f4ec01 1004 emit_move_insn (hard_frame_pointer_rtx, fp);
9eac0f2a 1005 emit_stack_restore (SAVE_NONLOCAL, stack);
28f4ec01 1006
c41c1387
RS
1007 emit_use (hard_frame_pointer_rtx);
1008 emit_use (stack_pointer_rtx);
28f4ec01
BS
1009 emit_indirect_jump (lab);
1010 }
1011 }
4b01bd16
RH
1012
1013 /* Search backwards and mark the jump insn as a non-local goto.
1014 Note that this precludes the use of __builtin_longjmp to a
1015 __builtin_setjmp target in the same function. However, we've
1016 already cautioned the user that these functions are for
1017 internal exception handling use only. */
8206fc89
AM
1018 for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
1019 {
298e6adc 1020 gcc_assert (insn != last);
5906d013 1021
4b4bf941 1022 if (JUMP_P (insn))
8206fc89 1023 {
65c5f2a6 1024 add_reg_note (insn, REG_NON_LOCAL_GOTO, const0_rtx);
8206fc89
AM
1025 break;
1026 }
4b4bf941 1027 else if (CALL_P (insn))
ca7fd9cd 1028 break;
8206fc89 1029 }
28f4ec01
BS
1030}
1031
862d0b35
DN
1032static inline bool
1033more_const_call_expr_args_p (const const_call_expr_arg_iterator *iter)
1034{
1035 return (iter->i < iter->n);
1036}
1037
1038/* This function validates the types of a function call argument list
1039 against a specified list of tree_codes. If the last specifier is a 0,
1040 that represents an ellipses, otherwise the last specifier must be a
1041 VOID_TYPE. */
1042
1043static bool
1044validate_arglist (const_tree callexpr, ...)
1045{
1046 enum tree_code code;
1047 bool res = 0;
1048 va_list ap;
1049 const_call_expr_arg_iterator iter;
1050 const_tree arg;
1051
1052 va_start (ap, callexpr);
1053 init_const_call_expr_arg_iterator (callexpr, &iter);
1054
1055 do
1056 {
1057 code = (enum tree_code) va_arg (ap, int);
1058 switch (code)
1059 {
1060 case 0:
1061 /* This signifies an ellipses, any further arguments are all ok. */
1062 res = true;
1063 goto end;
1064 case VOID_TYPE:
1065 /* This signifies an endlink, if no arguments remain, return
1066 true, otherwise return false. */
1067 res = !more_const_call_expr_args_p (&iter);
1068 goto end;
1069 default:
1070 /* If no parameters remain or the parameter's code does not
1071 match the specified code, return false. Otherwise continue
1072 checking any remaining arguments. */
1073 arg = next_const_call_expr_arg (&iter);
1074 if (!validate_arg (arg, code))
1075 goto end;
1076 break;
1077 }
1078 }
1079 while (1);
1080
1081 /* We need gotos here since we can only have one VA_CLOSE in a
1082 function. */
1083 end: ;
1084 va_end (ap);
1085
1086 return res;
1087}
1088
6de9cd9a
DN
1089/* Expand a call to __builtin_nonlocal_goto. We're passed the target label
1090 and the address of the save area. */
1091
1092static rtx
5039610b 1093expand_builtin_nonlocal_goto (tree exp)
6de9cd9a
DN
1094{
1095 tree t_label, t_save_area;
58f4cf2a
DM
1096 rtx r_label, r_save_area, r_fp, r_sp;
1097 rtx_insn *insn;
6de9cd9a 1098
5039610b 1099 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
6de9cd9a
DN
1100 return NULL_RTX;
1101
5039610b
SL
1102 t_label = CALL_EXPR_ARG (exp, 0);
1103 t_save_area = CALL_EXPR_ARG (exp, 1);
6de9cd9a 1104
84217346 1105 r_label = expand_normal (t_label);
5e89a381 1106 r_label = convert_memory_address (Pmode, r_label);
84217346 1107 r_save_area = expand_normal (t_save_area);
5e89a381 1108 r_save_area = convert_memory_address (Pmode, r_save_area);
bc6d3f91
EB
1109 /* Copy the address of the save location to a register just in case it was
1110 based on the frame pointer. */
cba2d79f 1111 r_save_area = copy_to_reg (r_save_area);
6de9cd9a
DN
1112 r_fp = gen_rtx_MEM (Pmode, r_save_area);
1113 r_sp = gen_rtx_MEM (STACK_SAVEAREA_MODE (SAVE_NONLOCAL),
0a81f074
RS
1114 plus_constant (Pmode, r_save_area,
1115 GET_MODE_SIZE (Pmode)));
6de9cd9a 1116
e3b5732b 1117 crtl->has_nonlocal_goto = 1;
6de9cd9a 1118
6de9cd9a 1119 /* ??? We no longer need to pass the static chain value, afaik. */
95a3fb9d
RS
1120 if (targetm.have_nonlocal_goto ())
1121 emit_insn (targetm.gen_nonlocal_goto (const0_rtx, r_label, r_sp, r_fp));
6de9cd9a 1122 else
6de9cd9a
DN
1123 {
1124 r_label = copy_to_reg (r_label);
1125
c41c1387
RS
1126 emit_clobber (gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode)));
1127 emit_clobber (gen_rtx_MEM (BLKmode, hard_frame_pointer_rtx));
caf93cb0 1128
bc6d3f91 1129 /* Restore frame pointer for containing function. */
6de9cd9a 1130 emit_move_insn (hard_frame_pointer_rtx, r_fp);
9eac0f2a 1131 emit_stack_restore (SAVE_NONLOCAL, r_sp);
caf93cb0 1132
6de9cd9a
DN
1133 /* USE of hard_frame_pointer_rtx added for consistency;
1134 not clear if really needed. */
c41c1387
RS
1135 emit_use (hard_frame_pointer_rtx);
1136 emit_use (stack_pointer_rtx);
eae645b6
RS
1137
1138 /* If the architecture is using a GP register, we must
1139 conservatively assume that the target function makes use of it.
1140 The prologue of functions with nonlocal gotos must therefore
1141 initialize the GP register to the appropriate value, and we
1142 must then make sure that this value is live at the point
1143 of the jump. (Note that this doesn't necessarily apply
1144 to targets with a nonlocal_goto pattern; they are free
1145 to implement it in their own way. Note also that this is
1146 a no-op if the GP register is a global invariant.) */
1147 if ((unsigned) PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM
1148 && fixed_regs[PIC_OFFSET_TABLE_REGNUM])
c41c1387 1149 emit_use (pic_offset_table_rtx);
eae645b6 1150
6de9cd9a
DN
1151 emit_indirect_jump (r_label);
1152 }
caf93cb0 1153
6de9cd9a
DN
1154 /* Search backwards to the jump insn and mark it as a
1155 non-local goto. */
1156 for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
1157 {
4b4bf941 1158 if (JUMP_P (insn))
6de9cd9a 1159 {
65c5f2a6 1160 add_reg_note (insn, REG_NON_LOCAL_GOTO, const0_rtx);
6de9cd9a
DN
1161 break;
1162 }
4b4bf941 1163 else if (CALL_P (insn))
6de9cd9a
DN
1164 break;
1165 }
1166
1167 return const0_rtx;
1168}
1169
2b92e7f5
RK
1170/* __builtin_update_setjmp_buf is passed a pointer to an array of five words
1171 (not all will be used on all machines) that was passed to __builtin_setjmp.
d33606c3
EB
1172 It updates the stack pointer in that block to the current value. This is
1173 also called directly by the SJLJ exception handling code. */
2b92e7f5 1174
d33606c3 1175void
2b92e7f5
RK
1176expand_builtin_update_setjmp_buf (rtx buf_addr)
1177{
ef4bddc2 1178 machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
bc6d3f91 1179 rtx stack_save
2b92e7f5
RK
1180 = gen_rtx_MEM (sa_mode,
1181 memory_address
1182 (sa_mode,
0a81f074
RS
1183 plus_constant (Pmode, buf_addr,
1184 2 * GET_MODE_SIZE (Pmode))));
2b92e7f5 1185
9eac0f2a 1186 emit_stack_save (SAVE_NONLOCAL, &stack_save);
2b92e7f5
RK
1187}
1188
a9ccbb60
JJ
1189/* Expand a call to __builtin_prefetch. For a target that does not support
1190 data prefetch, evaluate the memory address argument in case it has side
1191 effects. */
1192
1193static void
5039610b 1194expand_builtin_prefetch (tree exp)
a9ccbb60
JJ
1195{
1196 tree arg0, arg1, arg2;
5039610b 1197 int nargs;
a9ccbb60
JJ
1198 rtx op0, op1, op2;
1199
5039610b 1200 if (!validate_arglist (exp, POINTER_TYPE, 0))
e83d297b
JJ
1201 return;
1202
5039610b
SL
1203 arg0 = CALL_EXPR_ARG (exp, 0);
1204
e83d297b
JJ
1205 /* Arguments 1 and 2 are optional; argument 1 (read/write) defaults to
1206 zero (read) and argument 2 (locality) defaults to 3 (high degree of
1207 locality). */
5039610b
SL
1208 nargs = call_expr_nargs (exp);
1209 if (nargs > 1)
1210 arg1 = CALL_EXPR_ARG (exp, 1);
e83d297b 1211 else
5039610b
SL
1212 arg1 = integer_zero_node;
1213 if (nargs > 2)
1214 arg2 = CALL_EXPR_ARG (exp, 2);
1215 else
9a9d280e 1216 arg2 = integer_three_node;
a9ccbb60
JJ
1217
1218 /* Argument 0 is an address. */
1219 op0 = expand_expr (arg0, NULL_RTX, Pmode, EXPAND_NORMAL);
1220
1221 /* Argument 1 (read/write flag) must be a compile-time constant int. */
1222 if (TREE_CODE (arg1) != INTEGER_CST)
1223 {
40b97a2e 1224 error ("second argument to %<__builtin_prefetch%> must be a constant");
ca7fd9cd 1225 arg1 = integer_zero_node;
a9ccbb60 1226 }
84217346 1227 op1 = expand_normal (arg1);
a9ccbb60
JJ
1228 /* Argument 1 must be either zero or one. */
1229 if (INTVAL (op1) != 0 && INTVAL (op1) != 1)
1230 {
d4ee4d25 1231 warning (0, "invalid second argument to %<__builtin_prefetch%>;"
40b97a2e 1232 " using zero");
a9ccbb60
JJ
1233 op1 = const0_rtx;
1234 }
1235
1236 /* Argument 2 (locality) must be a compile-time constant int. */
1237 if (TREE_CODE (arg2) != INTEGER_CST)
1238 {
40b97a2e 1239 error ("third argument to %<__builtin_prefetch%> must be a constant");
a9ccbb60
JJ
1240 arg2 = integer_zero_node;
1241 }
84217346 1242 op2 = expand_normal (arg2);
a9ccbb60
JJ
1243 /* Argument 2 must be 0, 1, 2, or 3. */
1244 if (INTVAL (op2) < 0 || INTVAL (op2) > 3)
1245 {
d4ee4d25 1246 warning (0, "invalid third argument to %<__builtin_prefetch%>; using zero");
a9ccbb60
JJ
1247 op2 = const0_rtx;
1248 }
1249
134b044d 1250 if (targetm.have_prefetch ())
a9ccbb60 1251 {
a5c7d693
RS
1252 struct expand_operand ops[3];
1253
1254 create_address_operand (&ops[0], op0);
1255 create_integer_operand (&ops[1], INTVAL (op1));
1256 create_integer_operand (&ops[2], INTVAL (op2));
134b044d 1257 if (maybe_expand_insn (targetm.code_for_prefetch, 3, ops))
a5c7d693 1258 return;
a9ccbb60 1259 }
ad76cef8 1260
5ab2f7b7
KH
1261 /* Don't do anything with direct references to volatile memory, but
1262 generate code to handle other side effects. */
3c0cb5de 1263 if (!MEM_P (op0) && side_effects_p (op0))
5ab2f7b7 1264 emit_insn (op0);
a9ccbb60
JJ
1265}
1266
3bdf5ad1 1267/* Get a MEM rtx for expression EXP which is the address of an operand
435bb2a1
JJ
1268 to be used in a string instruction (cmpstrsi, movmemsi, ..). LEN is
1269 the maximum length of the block of memory that might be accessed or
1270 NULL if unknown. */
3bdf5ad1 1271
28f4ec01 1272static rtx
435bb2a1 1273get_memory_rtx (tree exp, tree len)
28f4ec01 1274{
805903b5
JJ
1275 tree orig_exp = exp;
1276 rtx addr, mem;
805903b5
JJ
1277
1278 /* When EXP is not resolved SAVE_EXPR, MEM_ATTRS can be still derived
1279 from its expression, for expr->a.b only <variable>.a.b is recorded. */
1280 if (TREE_CODE (exp) == SAVE_EXPR && !SAVE_EXPR_RESOLVED_P (exp))
1281 exp = TREE_OPERAND (exp, 0);
1282
1283 addr = expand_expr (orig_exp, NULL_RTX, ptr_mode, EXPAND_NORMAL);
1284 mem = gen_rtx_MEM (BLKmode, memory_address (BLKmode, addr));
8ac61af7 1285
3bdf5ad1 1286 /* Get an expression we can use to find the attributes to assign to MEM.
625ed172 1287 First remove any nops. */
1043771b 1288 while (CONVERT_EXPR_P (exp)
3bdf5ad1
RK
1289 && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (exp, 0))))
1290 exp = TREE_OPERAND (exp, 0);
1291
625ed172
MM
1292 /* Build a MEM_REF representing the whole accessed area as a byte blob,
1293 (as builtin stringops may alias with anything). */
1294 exp = fold_build2 (MEM_REF,
1295 build_array_type (char_type_node,
1296 build_range_type (sizetype,
1297 size_one_node, len)),
1298 exp, build_int_cst (ptr_type_node, 0));
1299
1300 /* If the MEM_REF has no acceptable address, try to get the base object
1301 from the original address we got, and build an all-aliasing
1302 unknown-sized access to that one. */
1303 if (is_gimple_mem_ref_addr (TREE_OPERAND (exp, 0)))
1304 set_mem_attributes (mem, exp, 0);
1305 else if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
1306 && (exp = get_base_address (TREE_OPERAND (TREE_OPERAND (exp, 0),
1307 0))))
343fb412 1308 {
625ed172
MM
1309 exp = build_fold_addr_expr (exp);
1310 exp = fold_build2 (MEM_REF,
1311 build_array_type (char_type_node,
1312 build_range_type (sizetype,
1313 size_zero_node,
1314 NULL)),
1315 exp, build_int_cst (ptr_type_node, 0));
931e6c29 1316 set_mem_attributes (mem, exp, 0);
343fb412 1317 }
625ed172 1318 set_mem_alias_set (mem, 0);
28f4ec01
BS
1319 return mem;
1320}
1321\f
1322/* Built-in functions to perform an untyped call and return. */
1323
fa19795e
RS
1324#define apply_args_mode \
1325 (this_target_builtins->x_apply_args_mode)
1326#define apply_result_mode \
1327 (this_target_builtins->x_apply_result_mode)
28f4ec01 1328
28f4ec01
BS
1329/* Return the size required for the block returned by __builtin_apply_args,
1330 and initialize apply_args_mode. */
1331
1332static int
4682ae04 1333apply_args_size (void)
28f4ec01
BS
1334{
1335 static int size = -1;
cbf5468f
AH
1336 int align;
1337 unsigned int regno;
ef4bddc2 1338 machine_mode mode;
28f4ec01
BS
1339
1340 /* The values computed by this function never change. */
1341 if (size < 0)
1342 {
1343 /* The first value is the incoming arg-pointer. */
1344 size = GET_MODE_SIZE (Pmode);
1345
1346 /* The second value is the structure value address unless this is
1347 passed as an "invisible" first argument. */
92f6864c 1348 if (targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0))
28f4ec01
BS
1349 size += GET_MODE_SIZE (Pmode);
1350
1351 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1352 if (FUNCTION_ARG_REGNO_P (regno))
1353 {
ffa88471 1354 mode = targetm.calls.get_raw_arg_mode (regno);
33521f7d 1355
298e6adc 1356 gcc_assert (mode != VOIDmode);
28f4ec01
BS
1357
1358 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1359 if (size % align != 0)
1360 size = CEIL (size, align) * align;
28f4ec01
BS
1361 size += GET_MODE_SIZE (mode);
1362 apply_args_mode[regno] = mode;
1363 }
1364 else
1365 {
1366 apply_args_mode[regno] = VOIDmode;
28f4ec01
BS
1367 }
1368 }
1369 return size;
1370}
1371
1372/* Return the size required for the block returned by __builtin_apply,
1373 and initialize apply_result_mode. */
1374
1375static int
4682ae04 1376apply_result_size (void)
28f4ec01
BS
1377{
1378 static int size = -1;
1379 int align, regno;
ef4bddc2 1380 machine_mode mode;
28f4ec01
BS
1381
1382 /* The values computed by this function never change. */
1383 if (size < 0)
1384 {
1385 size = 0;
1386
1387 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
82f81f18 1388 if (targetm.calls.function_value_regno_p (regno))
28f4ec01 1389 {
ffa88471 1390 mode = targetm.calls.get_raw_result_mode (regno);
33521f7d 1391
298e6adc 1392 gcc_assert (mode != VOIDmode);
28f4ec01
BS
1393
1394 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1395 if (size % align != 0)
1396 size = CEIL (size, align) * align;
1397 size += GET_MODE_SIZE (mode);
1398 apply_result_mode[regno] = mode;
1399 }
1400 else
1401 apply_result_mode[regno] = VOIDmode;
1402
1403 /* Allow targets that use untyped_call and untyped_return to override
1404 the size so that machine-specific information can be stored here. */
1405#ifdef APPLY_RESULT_SIZE
1406 size = APPLY_RESULT_SIZE;
1407#endif
1408 }
1409 return size;
1410}
1411
28f4ec01
BS
1412/* Create a vector describing the result block RESULT. If SAVEP is true,
1413 the result block is used to save the values; otherwise it is used to
1414 restore the values. */
1415
1416static rtx
4682ae04 1417result_vector (int savep, rtx result)
28f4ec01
BS
1418{
1419 int regno, size, align, nelts;
ef4bddc2 1420 machine_mode mode;
28f4ec01 1421 rtx reg, mem;
f883e0a7 1422 rtx *savevec = XALLOCAVEC (rtx, FIRST_PSEUDO_REGISTER);
8d51ecf8 1423
28f4ec01
BS
1424 size = nelts = 0;
1425 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1426 if ((mode = apply_result_mode[regno]) != VOIDmode)
1427 {
1428 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1429 if (size % align != 0)
1430 size = CEIL (size, align) * align;
1431 reg = gen_rtx_REG (mode, savep ? regno : INCOMING_REGNO (regno));
f4ef873c 1432 mem = adjust_address (result, mode, size);
28f4ec01 1433 savevec[nelts++] = (savep
f7df4a84
RS
1434 ? gen_rtx_SET (mem, reg)
1435 : gen_rtx_SET (reg, mem));
28f4ec01
BS
1436 size += GET_MODE_SIZE (mode);
1437 }
1438 return gen_rtx_PARALLEL (VOIDmode, gen_rtvec_v (nelts, savevec));
1439}
28f4ec01
BS
1440
1441/* Save the state required to perform an untyped call with the same
1442 arguments as were passed to the current function. */
1443
1444static rtx
4682ae04 1445expand_builtin_apply_args_1 (void)
28f4ec01 1446{
88e541e1 1447 rtx registers, tem;
28f4ec01 1448 int size, align, regno;
ef4bddc2 1449 machine_mode mode;
92f6864c 1450 rtx struct_incoming_value = targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 1);
28f4ec01
BS
1451
1452 /* Create a block where the arg-pointer, structure value address,
1453 and argument registers can be saved. */
1454 registers = assign_stack_local (BLKmode, apply_args_size (), -1);
1455
1456 /* Walk past the arg-pointer and structure value address. */
1457 size = GET_MODE_SIZE (Pmode);
92f6864c 1458 if (targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0))
28f4ec01
BS
1459 size += GET_MODE_SIZE (Pmode);
1460
1461 /* Save each register used in calling a function to the block. */
1462 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1463 if ((mode = apply_args_mode[regno]) != VOIDmode)
1464 {
28f4ec01
BS
1465 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1466 if (size % align != 0)
1467 size = CEIL (size, align) * align;
1468
1469 tem = gen_rtx_REG (mode, INCOMING_REGNO (regno));
1470
f4ef873c 1471 emit_move_insn (adjust_address (registers, mode, size), tem);
28f4ec01
BS
1472 size += GET_MODE_SIZE (mode);
1473 }
1474
1475 /* Save the arg pointer to the block. */
2e3f842f 1476 tem = copy_to_reg (crtl->args.internal_arg_pointer);
88e541e1 1477 /* We need the pointer as the caller actually passed them to us, not
ac3f5df7
HPN
1478 as we might have pretended they were passed. Make sure it's a valid
1479 operand, as emit_move_insn isn't expected to handle a PLUS. */
581edfa3
TS
1480 if (STACK_GROWS_DOWNWARD)
1481 tem
1482 = force_operand (plus_constant (Pmode, tem,
1483 crtl->args.pretend_args_size),
1484 NULL_RTX);
88e541e1 1485 emit_move_insn (adjust_address (registers, Pmode, 0), tem);
33521f7d 1486
28f4ec01
BS
1487 size = GET_MODE_SIZE (Pmode);
1488
1489 /* Save the structure value address unless this is passed as an
1490 "invisible" first argument. */
61f71b34 1491 if (struct_incoming_value)
28f4ec01 1492 {
f4ef873c 1493 emit_move_insn (adjust_address (registers, Pmode, size),
61f71b34 1494 copy_to_reg (struct_incoming_value));
28f4ec01
BS
1495 size += GET_MODE_SIZE (Pmode);
1496 }
1497
1498 /* Return the address of the block. */
1499 return copy_addr_to_reg (XEXP (registers, 0));
1500}
1501
1502/* __builtin_apply_args returns block of memory allocated on
1503 the stack into which is stored the arg pointer, structure
1504 value address, static chain, and all the registers that might
1505 possibly be used in performing a function call. The code is
1506 moved to the start of the function so the incoming values are
1507 saved. */
5197bd50 1508
28f4ec01 1509static rtx
4682ae04 1510expand_builtin_apply_args (void)
28f4ec01
BS
1511{
1512 /* Don't do __builtin_apply_args more than once in a function.
1513 Save the result of the first call and reuse it. */
1514 if (apply_args_value != 0)
1515 return apply_args_value;
1516 {
1517 /* When this function is called, it means that registers must be
1518 saved on entry to this function. So we migrate the
1519 call to the first insn of this function. */
1520 rtx temp;
28f4ec01
BS
1521
1522 start_sequence ();
1523 temp = expand_builtin_apply_args_1 ();
e67d1102 1524 rtx_insn *seq = get_insns ();
28f4ec01
BS
1525 end_sequence ();
1526
1527 apply_args_value = temp;
1528
2f937369
DM
1529 /* Put the insns after the NOTE that starts the function.
1530 If this is inside a start_sequence, make the outer-level insn
28f4ec01 1531 chain current, so the code is placed at the start of the
1f21b6f4
JJ
1532 function. If internal_arg_pointer is a non-virtual pseudo,
1533 it needs to be placed after the function that initializes
1534 that pseudo. */
28f4ec01 1535 push_topmost_sequence ();
1f21b6f4
JJ
1536 if (REG_P (crtl->args.internal_arg_pointer)
1537 && REGNO (crtl->args.internal_arg_pointer) > LAST_VIRTUAL_REGISTER)
1538 emit_insn_before (seq, parm_birth_insn);
1539 else
1540 emit_insn_before (seq, NEXT_INSN (entry_of_function ()));
28f4ec01
BS
1541 pop_topmost_sequence ();
1542 return temp;
1543 }
1544}
1545
1546/* Perform an untyped call and save the state required to perform an
1547 untyped return of whatever value was returned by the given function. */
1548
1549static rtx
4682ae04 1550expand_builtin_apply (rtx function, rtx arguments, rtx argsize)
28f4ec01
BS
1551{
1552 int size, align, regno;
ef4bddc2 1553 machine_mode mode;
58f4cf2a
DM
1554 rtx incoming_args, result, reg, dest, src;
1555 rtx_call_insn *call_insn;
28f4ec01
BS
1556 rtx old_stack_level = 0;
1557 rtx call_fusage = 0;
92f6864c 1558 rtx struct_value = targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0);
28f4ec01 1559
5ae6cd0d 1560 arguments = convert_memory_address (Pmode, arguments);
ce2d32cd 1561
28f4ec01
BS
1562 /* Create a block where the return registers can be saved. */
1563 result = assign_stack_local (BLKmode, apply_result_size (), -1);
1564
28f4ec01
BS
1565 /* Fetch the arg pointer from the ARGUMENTS block. */
1566 incoming_args = gen_reg_rtx (Pmode);
ce2d32cd 1567 emit_move_insn (incoming_args, gen_rtx_MEM (Pmode, arguments));
581edfa3
TS
1568 if (!STACK_GROWS_DOWNWARD)
1569 incoming_args = expand_simple_binop (Pmode, MINUS, incoming_args, argsize,
1570 incoming_args, 0, OPTAB_LIB_WIDEN);
28f4ec01 1571
9d53e585
JM
1572 /* Push a new argument block and copy the arguments. Do not allow
1573 the (potential) memcpy call below to interfere with our stack
1574 manipulations. */
28f4ec01 1575 do_pending_stack_adjust ();
9d53e585 1576 NO_DEFER_POP;
28f4ec01 1577
f9da5064 1578 /* Save the stack with nonlocal if available. */
4476e1a0 1579 if (targetm.have_save_stack_nonlocal ())
9eac0f2a 1580 emit_stack_save (SAVE_NONLOCAL, &old_stack_level);
28f4ec01 1581 else
9eac0f2a 1582 emit_stack_save (SAVE_BLOCK, &old_stack_level);
28f4ec01 1583
316d0b19 1584 /* Allocate a block of memory onto the stack and copy the memory
d3c12306
EB
1585 arguments to the outgoing arguments address. We can pass TRUE
1586 as the 4th argument because we just saved the stack pointer
1587 and will restore it right after the call. */
3a42502d 1588 allocate_dynamic_stack_space (argsize, 0, BIGGEST_ALIGNMENT, true);
2e3f842f
L
1589
1590 /* Set DRAP flag to true, even though allocate_dynamic_stack_space
1591 may have already set current_function_calls_alloca to true.
1592 current_function_calls_alloca won't be set if argsize is zero,
1593 so we have to guarantee need_drap is true here. */
1594 if (SUPPORTS_STACK_ALIGNMENT)
1595 crtl->need_drap = true;
1596
316d0b19 1597 dest = virtual_outgoing_args_rtx;
581edfa3
TS
1598 if (!STACK_GROWS_DOWNWARD)
1599 {
1600 if (CONST_INT_P (argsize))
1601 dest = plus_constant (Pmode, dest, -INTVAL (argsize));
1602 else
1603 dest = gen_rtx_PLUS (Pmode, dest, negate_rtx (Pmode, argsize));
1604 }
8ac61af7
RK
1605 dest = gen_rtx_MEM (BLKmode, dest);
1606 set_mem_align (dest, PARM_BOUNDARY);
1607 src = gen_rtx_MEM (BLKmode, incoming_args);
1608 set_mem_align (src, PARM_BOUNDARY);
44bb111a 1609 emit_block_move (dest, src, argsize, BLOCK_OP_NORMAL);
28f4ec01
BS
1610
1611 /* Refer to the argument block. */
1612 apply_args_size ();
1613 arguments = gen_rtx_MEM (BLKmode, arguments);
8ac61af7 1614 set_mem_align (arguments, PARM_BOUNDARY);
28f4ec01
BS
1615
1616 /* Walk past the arg-pointer and structure value address. */
1617 size = GET_MODE_SIZE (Pmode);
61f71b34 1618 if (struct_value)
28f4ec01
BS
1619 size += GET_MODE_SIZE (Pmode);
1620
1621 /* Restore each of the registers previously saved. Make USE insns
1622 for each of these registers for use in making the call. */
1623 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1624 if ((mode = apply_args_mode[regno]) != VOIDmode)
1625 {
1626 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1627 if (size % align != 0)
1628 size = CEIL (size, align) * align;
1629 reg = gen_rtx_REG (mode, regno);
f4ef873c 1630 emit_move_insn (reg, adjust_address (arguments, mode, size));
28f4ec01
BS
1631 use_reg (&call_fusage, reg);
1632 size += GET_MODE_SIZE (mode);
1633 }
1634
1635 /* Restore the structure value address unless this is passed as an
1636 "invisible" first argument. */
1637 size = GET_MODE_SIZE (Pmode);
61f71b34 1638 if (struct_value)
28f4ec01
BS
1639 {
1640 rtx value = gen_reg_rtx (Pmode);
f4ef873c 1641 emit_move_insn (value, adjust_address (arguments, Pmode, size));
61f71b34 1642 emit_move_insn (struct_value, value);
f8cfc6aa 1643 if (REG_P (struct_value))
61f71b34 1644 use_reg (&call_fusage, struct_value);
28f4ec01
BS
1645 size += GET_MODE_SIZE (Pmode);
1646 }
1647
1648 /* All arguments and registers used for the call are set up by now! */
531ca746 1649 function = prepare_call_address (NULL, function, NULL, &call_fusage, 0, 0);
28f4ec01
BS
1650
1651 /* Ensure address is valid. SYMBOL_REF is already valid, so no need,
1652 and we don't want to load it into a register as an optimization,
1653 because prepare_call_address already did it if it should be done. */
1654 if (GET_CODE (function) != SYMBOL_REF)
1655 function = memory_address (FUNCTION_MODE, function);
1656
1657 /* Generate the actual call instruction and save the return value. */
43c7dca8
RS
1658 if (targetm.have_untyped_call ())
1659 {
1660 rtx mem = gen_rtx_MEM (FUNCTION_MODE, function);
1661 emit_call_insn (targetm.gen_untyped_call (mem, result,
1662 result_vector (1, result)));
1663 }
58d745ec 1664 else if (targetm.have_call_value ())
28f4ec01
BS
1665 {
1666 rtx valreg = 0;
1667
1668 /* Locate the unique return register. It is not possible to
1669 express a call that sets more than one return register using
1670 call_value; use untyped_call for that. In fact, untyped_call
1671 only needs to save the return registers in the given block. */
1672 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1673 if ((mode = apply_result_mode[regno]) != VOIDmode)
1674 {
58d745ec 1675 gcc_assert (!valreg); /* have_untyped_call required. */
5906d013 1676
28f4ec01
BS
1677 valreg = gen_rtx_REG (mode, regno);
1678 }
1679
58d745ec
RS
1680 emit_insn (targetm.gen_call_value (valreg,
1681 gen_rtx_MEM (FUNCTION_MODE, function),
1682 const0_rtx, NULL_RTX, const0_rtx));
28f4ec01 1683
f4ef873c 1684 emit_move_insn (adjust_address (result, GET_MODE (valreg), 0), valreg);
28f4ec01
BS
1685 }
1686 else
298e6adc 1687 gcc_unreachable ();
28f4ec01 1688
ee960939
OH
1689 /* Find the CALL insn we just emitted, and attach the register usage
1690 information. */
1691 call_insn = last_call_insn ();
1692 add_function_usage_to (call_insn, call_fusage);
28f4ec01
BS
1693
1694 /* Restore the stack. */
4476e1a0 1695 if (targetm.have_save_stack_nonlocal ())
9eac0f2a 1696 emit_stack_restore (SAVE_NONLOCAL, old_stack_level);
28f4ec01 1697 else
9eac0f2a 1698 emit_stack_restore (SAVE_BLOCK, old_stack_level);
c3284718 1699 fixup_args_size_notes (call_insn, get_last_insn (), 0);
28f4ec01 1700
9d53e585
JM
1701 OK_DEFER_POP;
1702
28f4ec01 1703 /* Return the address of the result block. */
5ae6cd0d
MM
1704 result = copy_addr_to_reg (XEXP (result, 0));
1705 return convert_memory_address (ptr_mode, result);
28f4ec01
BS
1706}
1707
1708/* Perform an untyped return. */
1709
1710static void
4682ae04 1711expand_builtin_return (rtx result)
28f4ec01
BS
1712{
1713 int size, align, regno;
ef4bddc2 1714 machine_mode mode;
28f4ec01 1715 rtx reg;
fee3e72c 1716 rtx_insn *call_fusage = 0;
28f4ec01 1717
5ae6cd0d 1718 result = convert_memory_address (Pmode, result);
ce2d32cd 1719
28f4ec01
BS
1720 apply_result_size ();
1721 result = gen_rtx_MEM (BLKmode, result);
1722
43c7dca8 1723 if (targetm.have_untyped_return ())
28f4ec01 1724 {
43c7dca8
RS
1725 rtx vector = result_vector (0, result);
1726 emit_jump_insn (targetm.gen_untyped_return (result, vector));
28f4ec01
BS
1727 emit_barrier ();
1728 return;
1729 }
28f4ec01
BS
1730
1731 /* Restore the return value and note that each value is used. */
1732 size = 0;
1733 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1734 if ((mode = apply_result_mode[regno]) != VOIDmode)
1735 {
1736 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1737 if (size % align != 0)
1738 size = CEIL (size, align) * align;
1739 reg = gen_rtx_REG (mode, INCOMING_REGNO (regno));
f4ef873c 1740 emit_move_insn (reg, adjust_address (result, mode, size));
28f4ec01
BS
1741
1742 push_to_sequence (call_fusage);
c41c1387 1743 emit_use (reg);
28f4ec01
BS
1744 call_fusage = get_insns ();
1745 end_sequence ();
1746 size += GET_MODE_SIZE (mode);
1747 }
1748
1749 /* Put the USE insns before the return. */
2f937369 1750 emit_insn (call_fusage);
28f4ec01
BS
1751
1752 /* Return whatever values was restored by jumping directly to the end
1753 of the function. */
6e3077c6 1754 expand_naked_return ();
28f4ec01
BS
1755}
1756
ad82abb8 1757/* Used by expand_builtin_classify_type and fold_builtin_classify_type. */
5197bd50 1758
ad82abb8 1759static enum type_class
4682ae04 1760type_to_class (tree type)
ad82abb8
ZW
1761{
1762 switch (TREE_CODE (type))
1763 {
1764 case VOID_TYPE: return void_type_class;
1765 case INTEGER_TYPE: return integer_type_class;
ad82abb8
ZW
1766 case ENUMERAL_TYPE: return enumeral_type_class;
1767 case BOOLEAN_TYPE: return boolean_type_class;
1768 case POINTER_TYPE: return pointer_type_class;
1769 case REFERENCE_TYPE: return reference_type_class;
1770 case OFFSET_TYPE: return offset_type_class;
1771 case REAL_TYPE: return real_type_class;
1772 case COMPLEX_TYPE: return complex_type_class;
1773 case FUNCTION_TYPE: return function_type_class;
1774 case METHOD_TYPE: return method_type_class;
1775 case RECORD_TYPE: return record_type_class;
1776 case UNION_TYPE:
1777 case QUAL_UNION_TYPE: return union_type_class;
1778 case ARRAY_TYPE: return (TYPE_STRING_FLAG (type)
1779 ? string_type_class : array_type_class);
ad82abb8
ZW
1780 case LANG_TYPE: return lang_type_class;
1781 default: return no_type_class;
1782 }
1783}
8d51ecf8 1784
5039610b 1785/* Expand a call EXP to __builtin_classify_type. */
5197bd50 1786
28f4ec01 1787static rtx
5039610b 1788expand_builtin_classify_type (tree exp)
28f4ec01 1789{
5039610b
SL
1790 if (call_expr_nargs (exp))
1791 return GEN_INT (type_to_class (TREE_TYPE (CALL_EXPR_ARG (exp, 0))));
28f4ec01
BS
1792 return GEN_INT (no_type_class);
1793}
1794
daa027cc
KG
1795/* This helper macro, meant to be used in mathfn_built_in below,
1796 determines which among a set of three builtin math functions is
1797 appropriate for a given type mode. The `F' and `L' cases are
1798 automatically generated from the `double' case. */
1799#define CASE_MATHFN(BUILT_IN_MATHFN) \
1800 case BUILT_IN_MATHFN: case BUILT_IN_MATHFN##F: case BUILT_IN_MATHFN##L: \
1801 fcode = BUILT_IN_MATHFN; fcodef = BUILT_IN_MATHFN##F ; \
1802 fcodel = BUILT_IN_MATHFN##L ; break;
bf460eec
KG
1803/* Similar to above, but appends _R after any F/L suffix. */
1804#define CASE_MATHFN_REENT(BUILT_IN_MATHFN) \
1805 case BUILT_IN_MATHFN##_R: case BUILT_IN_MATHFN##F_R: case BUILT_IN_MATHFN##L_R: \
1806 fcode = BUILT_IN_MATHFN##_R; fcodef = BUILT_IN_MATHFN##F_R ; \
1807 fcodel = BUILT_IN_MATHFN##L_R ; break;
daa027cc 1808
e79983f4
MM
1809/* Return mathematic function equivalent to FN but operating directly on TYPE,
1810 if available. If IMPLICIT is true use the implicit builtin declaration,
1811 otherwise use the explicit declaration. If we can't do the conversion,
1812 return zero. */
05f41289
KG
1813
1814static tree
e79983f4 1815mathfn_built_in_1 (tree type, enum built_in_function fn, bool implicit_p)
272f51a3 1816{
e79983f4 1817 enum built_in_function fcode, fcodef, fcodel, fcode2;
daa027cc
KG
1818
1819 switch (fn)
1820 {
d119e83e
KG
1821 CASE_MATHFN (BUILT_IN_ACOS)
1822 CASE_MATHFN (BUILT_IN_ACOSH)
1823 CASE_MATHFN (BUILT_IN_ASIN)
1824 CASE_MATHFN (BUILT_IN_ASINH)
daa027cc 1825 CASE_MATHFN (BUILT_IN_ATAN)
d119e83e
KG
1826 CASE_MATHFN (BUILT_IN_ATAN2)
1827 CASE_MATHFN (BUILT_IN_ATANH)
1828 CASE_MATHFN (BUILT_IN_CBRT)
daa027cc 1829 CASE_MATHFN (BUILT_IN_CEIL)
75c7c595 1830 CASE_MATHFN (BUILT_IN_CEXPI)
d119e83e 1831 CASE_MATHFN (BUILT_IN_COPYSIGN)
daa027cc 1832 CASE_MATHFN (BUILT_IN_COS)
d119e83e
KG
1833 CASE_MATHFN (BUILT_IN_COSH)
1834 CASE_MATHFN (BUILT_IN_DREM)
1835 CASE_MATHFN (BUILT_IN_ERF)
1836 CASE_MATHFN (BUILT_IN_ERFC)
daa027cc 1837 CASE_MATHFN (BUILT_IN_EXP)
d119e83e
KG
1838 CASE_MATHFN (BUILT_IN_EXP10)
1839 CASE_MATHFN (BUILT_IN_EXP2)
1840 CASE_MATHFN (BUILT_IN_EXPM1)
1841 CASE_MATHFN (BUILT_IN_FABS)
1842 CASE_MATHFN (BUILT_IN_FDIM)
daa027cc 1843 CASE_MATHFN (BUILT_IN_FLOOR)
d119e83e
KG
1844 CASE_MATHFN (BUILT_IN_FMA)
1845 CASE_MATHFN (BUILT_IN_FMAX)
1846 CASE_MATHFN (BUILT_IN_FMIN)
1847 CASE_MATHFN (BUILT_IN_FMOD)
1848 CASE_MATHFN (BUILT_IN_FREXP)
1849 CASE_MATHFN (BUILT_IN_GAMMA)
bf460eec 1850 CASE_MATHFN_REENT (BUILT_IN_GAMMA) /* GAMMA_R */
d119e83e
KG
1851 CASE_MATHFN (BUILT_IN_HUGE_VAL)
1852 CASE_MATHFN (BUILT_IN_HYPOT)
1853 CASE_MATHFN (BUILT_IN_ILOGB)
6c32ee74
UB
1854 CASE_MATHFN (BUILT_IN_ICEIL)
1855 CASE_MATHFN (BUILT_IN_IFLOOR)
d119e83e 1856 CASE_MATHFN (BUILT_IN_INF)
6c32ee74
UB
1857 CASE_MATHFN (BUILT_IN_IRINT)
1858 CASE_MATHFN (BUILT_IN_IROUND)
9ed4207f 1859 CASE_MATHFN (BUILT_IN_ISINF)
d119e83e
KG
1860 CASE_MATHFN (BUILT_IN_J0)
1861 CASE_MATHFN (BUILT_IN_J1)
1862 CASE_MATHFN (BUILT_IN_JN)
f94b1661 1863 CASE_MATHFN (BUILT_IN_LCEIL)
d119e83e 1864 CASE_MATHFN (BUILT_IN_LDEXP)
d8b42d06 1865 CASE_MATHFN (BUILT_IN_LFLOOR)
d119e83e 1866 CASE_MATHFN (BUILT_IN_LGAMMA)
bf460eec 1867 CASE_MATHFN_REENT (BUILT_IN_LGAMMA) /* LGAMMA_R */
f94b1661 1868 CASE_MATHFN (BUILT_IN_LLCEIL)
d8b42d06 1869 CASE_MATHFN (BUILT_IN_LLFLOOR)
d119e83e
KG
1870 CASE_MATHFN (BUILT_IN_LLRINT)
1871 CASE_MATHFN (BUILT_IN_LLROUND)
daa027cc 1872 CASE_MATHFN (BUILT_IN_LOG)
d119e83e
KG
1873 CASE_MATHFN (BUILT_IN_LOG10)
1874 CASE_MATHFN (BUILT_IN_LOG1P)
1875 CASE_MATHFN (BUILT_IN_LOG2)
1876 CASE_MATHFN (BUILT_IN_LOGB)
1877 CASE_MATHFN (BUILT_IN_LRINT)
1878 CASE_MATHFN (BUILT_IN_LROUND)
1879 CASE_MATHFN (BUILT_IN_MODF)
1880 CASE_MATHFN (BUILT_IN_NAN)
1881 CASE_MATHFN (BUILT_IN_NANS)
daa027cc 1882 CASE_MATHFN (BUILT_IN_NEARBYINT)
d119e83e
KG
1883 CASE_MATHFN (BUILT_IN_NEXTAFTER)
1884 CASE_MATHFN (BUILT_IN_NEXTTOWARD)
1885 CASE_MATHFN (BUILT_IN_POW)
17684d46 1886 CASE_MATHFN (BUILT_IN_POWI)
d119e83e
KG
1887 CASE_MATHFN (BUILT_IN_POW10)
1888 CASE_MATHFN (BUILT_IN_REMAINDER)
1889 CASE_MATHFN (BUILT_IN_REMQUO)
1890 CASE_MATHFN (BUILT_IN_RINT)
daa027cc 1891 CASE_MATHFN (BUILT_IN_ROUND)
d119e83e
KG
1892 CASE_MATHFN (BUILT_IN_SCALB)
1893 CASE_MATHFN (BUILT_IN_SCALBLN)
1894 CASE_MATHFN (BUILT_IN_SCALBN)
05f41289 1895 CASE_MATHFN (BUILT_IN_SIGNBIT)
d119e83e 1896 CASE_MATHFN (BUILT_IN_SIGNIFICAND)
daa027cc 1897 CASE_MATHFN (BUILT_IN_SIN)
d119e83e
KG
1898 CASE_MATHFN (BUILT_IN_SINCOS)
1899 CASE_MATHFN (BUILT_IN_SINH)
daa027cc
KG
1900 CASE_MATHFN (BUILT_IN_SQRT)
1901 CASE_MATHFN (BUILT_IN_TAN)
d119e83e
KG
1902 CASE_MATHFN (BUILT_IN_TANH)
1903 CASE_MATHFN (BUILT_IN_TGAMMA)
daa027cc 1904 CASE_MATHFN (BUILT_IN_TRUNC)
d119e83e
KG
1905 CASE_MATHFN (BUILT_IN_Y0)
1906 CASE_MATHFN (BUILT_IN_Y1)
1907 CASE_MATHFN (BUILT_IN_YN)
daa027cc 1908
272f51a3 1909 default:
5039610b 1910 return NULL_TREE;
272f51a3 1911 }
daa027cc 1912
ce58118c 1913 if (TYPE_MAIN_VARIANT (type) == double_type_node)
e79983f4 1914 fcode2 = fcode;
ce58118c 1915 else if (TYPE_MAIN_VARIANT (type) == float_type_node)
e79983f4 1916 fcode2 = fcodef;
ce58118c 1917 else if (TYPE_MAIN_VARIANT (type) == long_double_type_node)
e79983f4 1918 fcode2 = fcodel;
daa027cc 1919 else
5039610b 1920 return NULL_TREE;
e79983f4
MM
1921
1922 if (implicit_p && !builtin_decl_implicit_p (fcode2))
1923 return NULL_TREE;
1924
1925 return builtin_decl_explicit (fcode2);
272f51a3
JH
1926}
1927
05f41289
KG
1928/* Like mathfn_built_in_1(), but always use the implicit array. */
1929
1930tree
1931mathfn_built_in (tree type, enum built_in_function fn)
1932{
1933 return mathfn_built_in_1 (type, fn, /*implicit=*/ 1);
1934}
1935
b5e01d4b
RS
1936/* If errno must be maintained, expand the RTL to check if the result,
1937 TARGET, of a built-in function call, EXP, is NaN, and if so set
1938 errno to EDOM. */
1939
1940static void
4682ae04 1941expand_errno_check (tree exp, rtx target)
b5e01d4b 1942{
58f4cf2a 1943 rtx_code_label *lab = gen_label_rtx ();
b5e01d4b 1944
6dab8d4c
RS
1945 /* Test the result; if it is NaN, set errno=EDOM because
1946 the argument was not in the domain. */
337e5d98 1947 do_compare_rtx_and_jump (target, target, EQ, 0, GET_MODE (target),
1476d1bd 1948 NULL_RTX, NULL, lab,
40e90eac
JJ
1949 /* The jump is very likely. */
1950 REG_BR_PROB_BASE - (REG_BR_PROB_BASE / 2000 - 1));
b5e01d4b
RS
1951
1952#ifdef TARGET_EDOM
6dab8d4c 1953 /* If this built-in doesn't throw an exception, set errno directly. */
5039610b 1954 if (TREE_NOTHROW (TREE_OPERAND (CALL_EXPR_FN (exp), 0)))
6dab8d4c 1955 {
b5e01d4b 1956#ifdef GEN_ERRNO_RTX
6dab8d4c 1957 rtx errno_rtx = GEN_ERRNO_RTX;
b5e01d4b 1958#else
6dab8d4c 1959 rtx errno_rtx
b5e01d4b
RS
1960 = gen_rtx_MEM (word_mode, gen_rtx_SYMBOL_REF (Pmode, "errno"));
1961#endif
69db2d57
RS
1962 emit_move_insn (errno_rtx,
1963 gen_int_mode (TARGET_EDOM, GET_MODE (errno_rtx)));
b5e01d4b 1964 emit_label (lab);
6dab8d4c 1965 return;
b5e01d4b 1966 }
6dab8d4c
RS
1967#endif
1968
63cb92c1
JJ
1969 /* Make sure the library call isn't expanded as a tail call. */
1970 CALL_EXPR_TAILCALL (exp) = 0;
1971
6dab8d4c
RS
1972 /* We can't set errno=EDOM directly; let the library call do it.
1973 Pop the arguments right away in case the call gets deleted. */
1974 NO_DEFER_POP;
1975 expand_call (exp, target, 0);
1976 OK_DEFER_POP;
1977 emit_label (lab);
b5e01d4b
RS
1978}
1979
6c7cf1f0 1980/* Expand a call to one of the builtin math functions (sqrt, exp, or log).
5039610b
SL
1981 Return NULL_RTX if a normal call should be emitted rather than expanding
1982 the function in-line. EXP is the expression that is a call to the builtin
28f4ec01
BS
1983 function; if convenient, the result should be placed in TARGET.
1984 SUBTARGET may be used as the target for computing one of EXP's operands. */
5197bd50 1985
28f4ec01 1986static rtx
4682ae04 1987expand_builtin_mathfn (tree exp, rtx target, rtx subtarget)
28f4ec01 1988{
8d51ecf8 1989 optab builtin_optab;
58f4cf2a
DM
1990 rtx op0;
1991 rtx_insn *insns;
2f503025 1992 tree fndecl = get_callee_fndecl (exp);
ef4bddc2 1993 machine_mode mode;
82d397c7 1994 bool errno_set = false;
5c3eacbb 1995 bool try_widening = false;
5799f732 1996 tree arg;
28f4ec01 1997
5039610b
SL
1998 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
1999 return NULL_RTX;
28f4ec01 2000
5039610b 2001 arg = CALL_EXPR_ARG (exp, 0);
28f4ec01
BS
2002
2003 switch (DECL_FUNCTION_CODE (fndecl))
2004 {
ea6a6627 2005 CASE_FLT_FN (BUILT_IN_SQRT):
6dab8d4c 2006 errno_set = ! tree_expr_nonnegative_p (arg);
5c3eacbb 2007 try_widening = true;
6dab8d4c
RS
2008 builtin_optab = sqrt_optab;
2009 break;
ea6a6627 2010 CASE_FLT_FN (BUILT_IN_EXP):
82d397c7 2011 errno_set = true; builtin_optab = exp_optab; break;
ea6a6627
VR
2012 CASE_FLT_FN (BUILT_IN_EXP10):
2013 CASE_FLT_FN (BUILT_IN_POW10):
a251102e 2014 errno_set = true; builtin_optab = exp10_optab; break;
ea6a6627 2015 CASE_FLT_FN (BUILT_IN_EXP2):
a251102e 2016 errno_set = true; builtin_optab = exp2_optab; break;
ea6a6627 2017 CASE_FLT_FN (BUILT_IN_EXPM1):
7a8e07c7 2018 errno_set = true; builtin_optab = expm1_optab; break;
ea6a6627 2019 CASE_FLT_FN (BUILT_IN_LOGB):
88b28a31 2020 errno_set = true; builtin_optab = logb_optab; break;
ea6a6627 2021 CASE_FLT_FN (BUILT_IN_LOG):
82d397c7 2022 errno_set = true; builtin_optab = log_optab; break;
ea6a6627 2023 CASE_FLT_FN (BUILT_IN_LOG10):
3b8e0c91 2024 errno_set = true; builtin_optab = log10_optab; break;
ea6a6627 2025 CASE_FLT_FN (BUILT_IN_LOG2):
3b8e0c91 2026 errno_set = true; builtin_optab = log2_optab; break;
ea6a6627 2027 CASE_FLT_FN (BUILT_IN_LOG1P):
c2fcfa4f 2028 errno_set = true; builtin_optab = log1p_optab; break;
ea6a6627 2029 CASE_FLT_FN (BUILT_IN_ASIN):
c56122d8 2030 builtin_optab = asin_optab; break;
ea6a6627 2031 CASE_FLT_FN (BUILT_IN_ACOS):
c56122d8 2032 builtin_optab = acos_optab; break;
ea6a6627 2033 CASE_FLT_FN (BUILT_IN_TAN):
82d397c7 2034 builtin_optab = tan_optab; break;
ea6a6627 2035 CASE_FLT_FN (BUILT_IN_ATAN):
82d397c7 2036 builtin_optab = atan_optab; break;
ea6a6627 2037 CASE_FLT_FN (BUILT_IN_FLOOR):
82d397c7 2038 builtin_optab = floor_optab; break;
ea6a6627 2039 CASE_FLT_FN (BUILT_IN_CEIL):
82d397c7 2040 builtin_optab = ceil_optab; break;
ea6a6627 2041 CASE_FLT_FN (BUILT_IN_TRUNC):
85363ca0 2042 builtin_optab = btrunc_optab; break;
ea6a6627 2043 CASE_FLT_FN (BUILT_IN_ROUND):
82d397c7 2044 builtin_optab = round_optab; break;
ea6a6627 2045 CASE_FLT_FN (BUILT_IN_NEARBYINT):
c7d32ff6
RG
2046 builtin_optab = nearbyint_optab;
2047 if (flag_trapping_math)
2048 break;
2049 /* Else fallthrough and expand as rint. */
ea6a6627 2050 CASE_FLT_FN (BUILT_IN_RINT):
edeacc14 2051 builtin_optab = rint_optab; break;
dc6707b8
UB
2052 CASE_FLT_FN (BUILT_IN_SIGNIFICAND):
2053 builtin_optab = significand_optab; break;
e7b489c8 2054 default:
298e6adc 2055 gcc_unreachable ();
28f4ec01
BS
2056 }
2057
6dab8d4c
RS
2058 /* Make a suitable register to place result in. */
2059 mode = TYPE_MODE (TREE_TYPE (exp));
64e97443 2060
6dab8d4c
RS
2061 if (! flag_errno_math || ! HONOR_NANS (mode))
2062 errno_set = false;
2063
5c3eacbb
EB
2064 /* Before working hard, check whether the instruction is available, but try
2065 to widen the mode for specific operations. */
2066 if ((optab_handler (builtin_optab, mode) != CODE_FOR_nothing
2067 || (try_widening && !excess_precision_type (TREE_TYPE (exp))))
fc48e63f 2068 && (!errno_set || !optimize_insn_for_size_p ()))
f5416098 2069 {
04b80dbb 2070 rtx result = gen_reg_rtx (mode);
6dab8d4c 2071
8634629b
RS
2072 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2073 need to expand the argument again. This way, we will not perform
2074 side-effects more the once. */
5799f732 2075 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
6dab8d4c 2076
49452c07 2077 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
6dab8d4c 2078
8634629b 2079 start_sequence ();
28f4ec01 2080
04b80dbb
RS
2081 /* Compute into RESULT.
2082 Set RESULT to wherever the result comes back. */
2083 result = expand_unop (mode, builtin_optab, op0, result, 0);
8634629b 2084
04b80dbb 2085 if (result != 0)
8634629b
RS
2086 {
2087 if (errno_set)
04b80dbb 2088 expand_errno_check (exp, result);
8634629b
RS
2089
2090 /* Output the entire sequence. */
2091 insns = get_insns ();
2092 end_sequence ();
2093 emit_insn (insns);
04b80dbb 2094 return result;
8634629b
RS
2095 }
2096
2097 /* If we were unable to expand via the builtin, stop the sequence
2098 (without outputting the insns) and call to the library function
2099 with the stabilized argument list. */
28f4ec01 2100 end_sequence ();
28f4ec01
BS
2101 }
2102
4a8cae83 2103 return expand_call (exp, target, target == const0_rtx);
b5e01d4b
RS
2104}
2105
2106/* Expand a call to the builtin binary math functions (pow and atan2).
5039610b 2107 Return NULL_RTX if a normal call should be emitted rather than expanding the
b5e01d4b
RS
2108 function in-line. EXP is the expression that is a call to the builtin
2109 function; if convenient, the result should be placed in TARGET.
2110 SUBTARGET may be used as the target for computing one of EXP's
2111 operands. */
2112
2113static rtx
4682ae04 2114expand_builtin_mathfn_2 (tree exp, rtx target, rtx subtarget)
b5e01d4b
RS
2115{
2116 optab builtin_optab;
58f4cf2a
DM
2117 rtx op0, op1, result;
2118 rtx_insn *insns;
c94a75af 2119 int op1_type = REAL_TYPE;
2f503025 2120 tree fndecl = get_callee_fndecl (exp);
5799f732 2121 tree arg0, arg1;
ef4bddc2 2122 machine_mode mode;
b5e01d4b 2123 bool errno_set = true;
b5e01d4b 2124
0c0d910d
KG
2125 switch (DECL_FUNCTION_CODE (fndecl))
2126 {
2127 CASE_FLT_FN (BUILT_IN_SCALBN):
2128 CASE_FLT_FN (BUILT_IN_SCALBLN):
2129 CASE_FLT_FN (BUILT_IN_LDEXP):
2130 op1_type = INTEGER_TYPE;
2131 default:
2132 break;
2133 }
c94a75af 2134
5039610b
SL
2135 if (!validate_arglist (exp, REAL_TYPE, op1_type, VOID_TYPE))
2136 return NULL_RTX;
b5e01d4b 2137
5039610b
SL
2138 arg0 = CALL_EXPR_ARG (exp, 0);
2139 arg1 = CALL_EXPR_ARG (exp, 1);
b5e01d4b 2140
b5e01d4b
RS
2141 switch (DECL_FUNCTION_CODE (fndecl))
2142 {
ea6a6627 2143 CASE_FLT_FN (BUILT_IN_POW):
b5e01d4b 2144 builtin_optab = pow_optab; break;
ea6a6627 2145 CASE_FLT_FN (BUILT_IN_ATAN2):
b5e01d4b 2146 builtin_optab = atan2_optab; break;
0c0d910d
KG
2147 CASE_FLT_FN (BUILT_IN_SCALB):
2148 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (exp)))->b != 2)
2149 return 0;
2150 builtin_optab = scalb_optab; break;
2151 CASE_FLT_FN (BUILT_IN_SCALBN):
2152 CASE_FLT_FN (BUILT_IN_SCALBLN):
2153 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (exp)))->b != 2)
2154 return 0;
2155 /* Fall through... */
ea6a6627 2156 CASE_FLT_FN (BUILT_IN_LDEXP):
c94a75af 2157 builtin_optab = ldexp_optab; break;
ea6a6627 2158 CASE_FLT_FN (BUILT_IN_FMOD):
5ae27cfa 2159 builtin_optab = fmod_optab; break;
17b98269 2160 CASE_FLT_FN (BUILT_IN_REMAINDER):
ea6a6627 2161 CASE_FLT_FN (BUILT_IN_DREM):
17b98269 2162 builtin_optab = remainder_optab; break;
b5e01d4b 2163 default:
298e6adc 2164 gcc_unreachable ();
b5e01d4b
RS
2165 }
2166
6dab8d4c
RS
2167 /* Make a suitable register to place result in. */
2168 mode = TYPE_MODE (TREE_TYPE (exp));
64e97443
JH
2169
2170 /* Before working hard, check whether the instruction is available. */
947131ba 2171 if (optab_handler (builtin_optab, mode) == CODE_FOR_nothing)
5039610b 2172 return NULL_RTX;
64e97443 2173
04b80dbb 2174 result = gen_reg_rtx (mode);
6dab8d4c
RS
2175
2176 if (! flag_errno_math || ! HONOR_NANS (mode))
2177 errno_set = false;
2178
fc48e63f
JH
2179 if (errno_set && optimize_insn_for_size_p ())
2180 return 0;
2181
6de9cd9a 2182 /* Always stabilize the argument list. */
5799f732
RG
2183 CALL_EXPR_ARG (exp, 0) = arg0 = builtin_save_expr (arg0);
2184 CALL_EXPR_ARG (exp, 1) = arg1 = builtin_save_expr (arg1);
6dab8d4c 2185
84217346
MD
2186 op0 = expand_expr (arg0, subtarget, VOIDmode, EXPAND_NORMAL);
2187 op1 = expand_normal (arg1);
6dab8d4c 2188
6dab8d4c
RS
2189 start_sequence ();
2190
04b80dbb
RS
2191 /* Compute into RESULT.
2192 Set RESULT to wherever the result comes back. */
2193 result = expand_binop (mode, builtin_optab, op0, op1,
2194 result, 0, OPTAB_DIRECT);
28f4ec01 2195
f5416098
RS
2196 /* If we were unable to expand via the builtin, stop the sequence
2197 (without outputting the insns) and call to the library function
2198 with the stabilized argument list. */
04b80dbb 2199 if (result == 0)
b5e01d4b
RS
2200 {
2201 end_sequence ();
f5416098 2202 return expand_call (exp, target, target == const0_rtx);
28f4ec01
BS
2203 }
2204
da52a069 2205 if (errno_set)
04b80dbb 2206 expand_errno_check (exp, result);
b5e01d4b 2207
28f4ec01
BS
2208 /* Output the entire sequence. */
2209 insns = get_insns ();
2210 end_sequence ();
2f937369 2211 emit_insn (insns);
8d51ecf8 2212
04b80dbb 2213 return result;
28f4ec01
BS
2214}
2215
1b1562a5
MM
2216/* Expand a call to the builtin trinary math functions (fma).
2217 Return NULL_RTX if a normal call should be emitted rather than expanding the
2218 function in-line. EXP is the expression that is a call to the builtin
2219 function; if convenient, the result should be placed in TARGET.
2220 SUBTARGET may be used as the target for computing one of EXP's
2221 operands. */
2222
2223static rtx
2224expand_builtin_mathfn_ternary (tree exp, rtx target, rtx subtarget)
2225{
2226 optab builtin_optab;
58f4cf2a
DM
2227 rtx op0, op1, op2, result;
2228 rtx_insn *insns;
1b1562a5
MM
2229 tree fndecl = get_callee_fndecl (exp);
2230 tree arg0, arg1, arg2;
ef4bddc2 2231 machine_mode mode;
1b1562a5
MM
2232
2233 if (!validate_arglist (exp, REAL_TYPE, REAL_TYPE, REAL_TYPE, VOID_TYPE))
2234 return NULL_RTX;
2235
2236 arg0 = CALL_EXPR_ARG (exp, 0);
2237 arg1 = CALL_EXPR_ARG (exp, 1);
2238 arg2 = CALL_EXPR_ARG (exp, 2);
2239
2240 switch (DECL_FUNCTION_CODE (fndecl))
2241 {
2242 CASE_FLT_FN (BUILT_IN_FMA):
2243 builtin_optab = fma_optab; break;
2244 default:
2245 gcc_unreachable ();
2246 }
2247
2248 /* Make a suitable register to place result in. */
2249 mode = TYPE_MODE (TREE_TYPE (exp));
2250
2251 /* Before working hard, check whether the instruction is available. */
2252 if (optab_handler (builtin_optab, mode) == CODE_FOR_nothing)
2253 return NULL_RTX;
2254
04b80dbb 2255 result = gen_reg_rtx (mode);
1b1562a5
MM
2256
2257 /* Always stabilize the argument list. */
2258 CALL_EXPR_ARG (exp, 0) = arg0 = builtin_save_expr (arg0);
2259 CALL_EXPR_ARG (exp, 1) = arg1 = builtin_save_expr (arg1);
2260 CALL_EXPR_ARG (exp, 2) = arg2 = builtin_save_expr (arg2);
2261
2262 op0 = expand_expr (arg0, subtarget, VOIDmode, EXPAND_NORMAL);
2263 op1 = expand_normal (arg1);
2264 op2 = expand_normal (arg2);
2265
2266 start_sequence ();
2267
04b80dbb
RS
2268 /* Compute into RESULT.
2269 Set RESULT to wherever the result comes back. */
2270 result = expand_ternary_op (mode, builtin_optab, op0, op1, op2,
2271 result, 0);
1b1562a5
MM
2272
2273 /* If we were unable to expand via the builtin, stop the sequence
2274 (without outputting the insns) and call to the library function
2275 with the stabilized argument list. */
04b80dbb 2276 if (result == 0)
1b1562a5
MM
2277 {
2278 end_sequence ();
2279 return expand_call (exp, target, target == const0_rtx);
2280 }
2281
2282 /* Output the entire sequence. */
2283 insns = get_insns ();
2284 end_sequence ();
2285 emit_insn (insns);
2286
04b80dbb 2287 return result;
1b1562a5
MM
2288}
2289
6c7cf1f0 2290/* Expand a call to the builtin sin and cos math functions.
5039610b 2291 Return NULL_RTX if a normal call should be emitted rather than expanding the
6c7cf1f0
UB
2292 function in-line. EXP is the expression that is a call to the builtin
2293 function; if convenient, the result should be placed in TARGET.
2294 SUBTARGET may be used as the target for computing one of EXP's
2295 operands. */
2296
2297static rtx
2298expand_builtin_mathfn_3 (tree exp, rtx target, rtx subtarget)
2299{
2300 optab builtin_optab;
58f4cf2a
DM
2301 rtx op0;
2302 rtx_insn *insns;
6c7cf1f0 2303 tree fndecl = get_callee_fndecl (exp);
ef4bddc2 2304 machine_mode mode;
5799f732 2305 tree arg;
6c7cf1f0 2306
5039610b
SL
2307 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2308 return NULL_RTX;
6c7cf1f0 2309
5039610b 2310 arg = CALL_EXPR_ARG (exp, 0);
6c7cf1f0
UB
2311
2312 switch (DECL_FUNCTION_CODE (fndecl))
2313 {
ea6a6627
VR
2314 CASE_FLT_FN (BUILT_IN_SIN):
2315 CASE_FLT_FN (BUILT_IN_COS):
6c7cf1f0
UB
2316 builtin_optab = sincos_optab; break;
2317 default:
298e6adc 2318 gcc_unreachable ();
6c7cf1f0
UB
2319 }
2320
2321 /* Make a suitable register to place result in. */
2322 mode = TYPE_MODE (TREE_TYPE (exp));
2323
6c7cf1f0 2324 /* Check if sincos insn is available, otherwise fallback
9cf737f8 2325 to sin or cos insn. */
947131ba 2326 if (optab_handler (builtin_optab, mode) == CODE_FOR_nothing)
6c7cf1f0
UB
2327 switch (DECL_FUNCTION_CODE (fndecl))
2328 {
ea6a6627 2329 CASE_FLT_FN (BUILT_IN_SIN):
6c7cf1f0 2330 builtin_optab = sin_optab; break;
ea6a6627 2331 CASE_FLT_FN (BUILT_IN_COS):
6c7cf1f0
UB
2332 builtin_optab = cos_optab; break;
2333 default:
298e6adc 2334 gcc_unreachable ();
6c7cf1f0 2335 }
6c7cf1f0
UB
2336
2337 /* Before working hard, check whether the instruction is available. */
947131ba 2338 if (optab_handler (builtin_optab, mode) != CODE_FOR_nothing)
6c7cf1f0 2339 {
04b80dbb 2340 rtx result = gen_reg_rtx (mode);
6c7cf1f0
UB
2341
2342 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2343 need to expand the argument again. This way, we will not perform
2344 side-effects more the once. */
5799f732 2345 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
6c7cf1f0 2346
49452c07 2347 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
6c7cf1f0 2348
6c7cf1f0
UB
2349 start_sequence ();
2350
04b80dbb
RS
2351 /* Compute into RESULT.
2352 Set RESULT to wherever the result comes back. */
6c7cf1f0
UB
2353 if (builtin_optab == sincos_optab)
2354 {
04b80dbb 2355 int ok;
5906d013 2356
6c7cf1f0
UB
2357 switch (DECL_FUNCTION_CODE (fndecl))
2358 {
ea6a6627 2359 CASE_FLT_FN (BUILT_IN_SIN):
04b80dbb 2360 ok = expand_twoval_unop (builtin_optab, op0, 0, result, 0);
6c7cf1f0 2361 break;
ea6a6627 2362 CASE_FLT_FN (BUILT_IN_COS):
04b80dbb 2363 ok = expand_twoval_unop (builtin_optab, op0, result, 0, 0);
6c7cf1f0
UB
2364 break;
2365 default:
298e6adc 2366 gcc_unreachable ();
6c7cf1f0 2367 }
04b80dbb 2368 gcc_assert (ok);
6c7cf1f0
UB
2369 }
2370 else
04b80dbb 2371 result = expand_unop (mode, builtin_optab, op0, result, 0);
6c7cf1f0 2372
04b80dbb 2373 if (result != 0)
6c7cf1f0 2374 {
6c7cf1f0
UB
2375 /* Output the entire sequence. */
2376 insns = get_insns ();
2377 end_sequence ();
2378 emit_insn (insns);
04b80dbb 2379 return result;
6c7cf1f0
UB
2380 }
2381
2382 /* If we were unable to expand via the builtin, stop the sequence
2383 (without outputting the insns) and call to the library function
2384 with the stabilized argument list. */
2385 end_sequence ();
2386 }
2387
04b80dbb 2388 return expand_call (exp, target, target == const0_rtx);
6c7cf1f0
UB
2389}
2390
44e10129
MM
2391/* Given an interclass math builtin decl FNDECL and it's argument ARG
2392 return an RTL instruction code that implements the functionality.
2393 If that isn't possible or available return CODE_FOR_nothing. */
eaee4464 2394
44e10129
MM
2395static enum insn_code
2396interclass_mathfn_icode (tree arg, tree fndecl)
eaee4464 2397{
44e10129 2398 bool errno_set = false;
2225b9f2 2399 optab builtin_optab = unknown_optab;
ef4bddc2 2400 machine_mode mode;
eaee4464
UB
2401
2402 switch (DECL_FUNCTION_CODE (fndecl))
2403 {
2404 CASE_FLT_FN (BUILT_IN_ILOGB):
2405 errno_set = true; builtin_optab = ilogb_optab; break;
9ed4207f
UB
2406 CASE_FLT_FN (BUILT_IN_ISINF):
2407 builtin_optab = isinf_optab; break;
8a91c45b 2408 case BUILT_IN_ISNORMAL:
0c8d3c2b
KG
2409 case BUILT_IN_ISFINITE:
2410 CASE_FLT_FN (BUILT_IN_FINITE):
44e10129
MM
2411 case BUILT_IN_FINITED32:
2412 case BUILT_IN_FINITED64:
2413 case BUILT_IN_FINITED128:
2414 case BUILT_IN_ISINFD32:
2415 case BUILT_IN_ISINFD64:
2416 case BUILT_IN_ISINFD128:
0c8d3c2b
KG
2417 /* These builtins have no optabs (yet). */
2418 break;
eaee4464
UB
2419 default:
2420 gcc_unreachable ();
2421 }
2422
2423 /* There's no easy way to detect the case we need to set EDOM. */
2424 if (flag_errno_math && errno_set)
44e10129 2425 return CODE_FOR_nothing;
eaee4464
UB
2426
2427 /* Optab mode depends on the mode of the input argument. */
2428 mode = TYPE_MODE (TREE_TYPE (arg));
2429
0c8d3c2b 2430 if (builtin_optab)
947131ba 2431 return optab_handler (builtin_optab, mode);
44e10129
MM
2432 return CODE_FOR_nothing;
2433}
2434
2435/* Expand a call to one of the builtin math functions that operate on
2436 floating point argument and output an integer result (ilogb, isinf,
2437 isnan, etc).
2438 Return 0 if a normal call should be emitted rather than expanding the
2439 function in-line. EXP is the expression that is a call to the builtin
4359dc2a 2440 function; if convenient, the result should be placed in TARGET. */
44e10129
MM
2441
2442static rtx
4359dc2a 2443expand_builtin_interclass_mathfn (tree exp, rtx target)
44e10129
MM
2444{
2445 enum insn_code icode = CODE_FOR_nothing;
2446 rtx op0;
2447 tree fndecl = get_callee_fndecl (exp);
ef4bddc2 2448 machine_mode mode;
44e10129
MM
2449 tree arg;
2450
2451 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2452 return NULL_RTX;
2453
2454 arg = CALL_EXPR_ARG (exp, 0);
2455 icode = interclass_mathfn_icode (arg, fndecl);
2456 mode = TYPE_MODE (TREE_TYPE (arg));
2457
eaee4464
UB
2458 if (icode != CODE_FOR_nothing)
2459 {
a5c7d693 2460 struct expand_operand ops[1];
58f4cf2a 2461 rtx_insn *last = get_last_insn ();
8a0b1aa4 2462 tree orig_arg = arg;
eaee4464
UB
2463
2464 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2465 need to expand the argument again. This way, we will not perform
2466 side-effects more the once. */
5799f732 2467 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
eaee4464 2468
4359dc2a 2469 op0 = expand_expr (arg, NULL_RTX, VOIDmode, EXPAND_NORMAL);
eaee4464
UB
2470
2471 if (mode != GET_MODE (op0))
2472 op0 = convert_to_mode (mode, op0, 0);
2473
a5c7d693
RS
2474 create_output_operand (&ops[0], target, TYPE_MODE (TREE_TYPE (exp)));
2475 if (maybe_legitimize_operands (icode, 0, 1, ops)
2476 && maybe_emit_unop_insn (icode, ops[0].value, op0, UNKNOWN))
2477 return ops[0].value;
2478
8a0b1aa4
MM
2479 delete_insns_since (last);
2480 CALL_EXPR_ARG (exp, 0) = orig_arg;
eaee4464
UB
2481 }
2482
44e10129 2483 return NULL_RTX;
eaee4464
UB
2484}
2485
403e54f0 2486/* Expand a call to the builtin sincos math function.
5039610b 2487 Return NULL_RTX if a normal call should be emitted rather than expanding the
403e54f0
RG
2488 function in-line. EXP is the expression that is a call to the builtin
2489 function. */
2490
2491static rtx
2492expand_builtin_sincos (tree exp)
2493{
2494 rtx op0, op1, op2, target1, target2;
ef4bddc2 2495 machine_mode mode;
403e54f0
RG
2496 tree arg, sinp, cosp;
2497 int result;
db3927fb 2498 location_t loc = EXPR_LOCATION (exp);
ca818bd9 2499 tree alias_type, alias_off;
403e54f0 2500
5039610b
SL
2501 if (!validate_arglist (exp, REAL_TYPE,
2502 POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
2503 return NULL_RTX;
403e54f0 2504
5039610b
SL
2505 arg = CALL_EXPR_ARG (exp, 0);
2506 sinp = CALL_EXPR_ARG (exp, 1);
2507 cosp = CALL_EXPR_ARG (exp, 2);
403e54f0
RG
2508
2509 /* Make a suitable register to place result in. */
2510 mode = TYPE_MODE (TREE_TYPE (arg));
2511
2512 /* Check if sincos insn is available, otherwise emit the call. */
947131ba 2513 if (optab_handler (sincos_optab, mode) == CODE_FOR_nothing)
403e54f0
RG
2514 return NULL_RTX;
2515
2516 target1 = gen_reg_rtx (mode);
2517 target2 = gen_reg_rtx (mode);
2518
84217346 2519 op0 = expand_normal (arg);
ca818bd9
RG
2520 alias_type = build_pointer_type_for_mode (TREE_TYPE (arg), ptr_mode, true);
2521 alias_off = build_int_cst (alias_type, 0);
2522 op1 = expand_normal (fold_build2_loc (loc, MEM_REF, TREE_TYPE (arg),
2523 sinp, alias_off));
2524 op2 = expand_normal (fold_build2_loc (loc, MEM_REF, TREE_TYPE (arg),
2525 cosp, alias_off));
403e54f0
RG
2526
2527 /* Compute into target1 and target2.
2528 Set TARGET to wherever the result comes back. */
2529 result = expand_twoval_unop (sincos_optab, op0, target2, target1, 0);
2530 gcc_assert (result);
2531
2532 /* Move target1 and target2 to the memory locations indicated
2533 by op1 and op2. */
2534 emit_move_insn (op1, target1);
2535 emit_move_insn (op2, target2);
2536
2537 return const0_rtx;
2538}
2539
75c7c595
RG
2540/* Expand a call to the internal cexpi builtin to the sincos math function.
2541 EXP is the expression that is a call to the builtin function; if convenient,
4359dc2a 2542 the result should be placed in TARGET. */
75c7c595
RG
2543
2544static rtx
4359dc2a 2545expand_builtin_cexpi (tree exp, rtx target)
75c7c595
RG
2546{
2547 tree fndecl = get_callee_fndecl (exp);
75c7c595 2548 tree arg, type;
ef4bddc2 2549 machine_mode mode;
75c7c595 2550 rtx op0, op1, op2;
db3927fb 2551 location_t loc = EXPR_LOCATION (exp);
75c7c595 2552
5039610b
SL
2553 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2554 return NULL_RTX;
75c7c595 2555
5039610b 2556 arg = CALL_EXPR_ARG (exp, 0);
75c7c595
RG
2557 type = TREE_TYPE (arg);
2558 mode = TYPE_MODE (TREE_TYPE (arg));
2559
2560 /* Try expanding via a sincos optab, fall back to emitting a libcall
b54c5497
RG
2561 to sincos or cexp. We are sure we have sincos or cexp because cexpi
2562 is only generated from sincos, cexp or if we have either of them. */
947131ba 2563 if (optab_handler (sincos_optab, mode) != CODE_FOR_nothing)
75c7c595
RG
2564 {
2565 op1 = gen_reg_rtx (mode);
2566 op2 = gen_reg_rtx (mode);
2567
4359dc2a 2568 op0 = expand_expr (arg, NULL_RTX, VOIDmode, EXPAND_NORMAL);
75c7c595
RG
2569
2570 /* Compute into op1 and op2. */
2571 expand_twoval_unop (sincos_optab, op0, op2, op1, 0);
2572 }
d33d9e47 2573 else if (targetm.libc_has_function (function_sincos))
75c7c595 2574 {
5039610b 2575 tree call, fn = NULL_TREE;
75c7c595
RG
2576 tree top1, top2;
2577 rtx op1a, op2a;
2578
2579 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
e79983f4 2580 fn = builtin_decl_explicit (BUILT_IN_SINCOSF);
75c7c595 2581 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
e79983f4 2582 fn = builtin_decl_explicit (BUILT_IN_SINCOS);
75c7c595 2583 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
e79983f4 2584 fn = builtin_decl_explicit (BUILT_IN_SINCOSL);
5039610b
SL
2585 else
2586 gcc_unreachable ();
b8698a0f 2587
9474e8ab
MM
2588 op1 = assign_temp (TREE_TYPE (arg), 1, 1);
2589 op2 = assign_temp (TREE_TYPE (arg), 1, 1);
18ae1560
UB
2590 op1a = copy_addr_to_reg (XEXP (op1, 0));
2591 op2a = copy_addr_to_reg (XEXP (op2, 0));
75c7c595
RG
2592 top1 = make_tree (build_pointer_type (TREE_TYPE (arg)), op1a);
2593 top2 = make_tree (build_pointer_type (TREE_TYPE (arg)), op2a);
2594
75c7c595
RG
2595 /* Make sure not to fold the sincos call again. */
2596 call = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
5039610b
SL
2597 expand_normal (build_call_nary (TREE_TYPE (TREE_TYPE (fn)),
2598 call, 3, arg, top1, top2));
75c7c595 2599 }
b54c5497
RG
2600 else
2601 {
9d972b2d 2602 tree call, fn = NULL_TREE, narg;
b54c5497
RG
2603 tree ctype = build_complex_type (type);
2604
9d972b2d 2605 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
e79983f4 2606 fn = builtin_decl_explicit (BUILT_IN_CEXPF);
9d972b2d 2607 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
e79983f4 2608 fn = builtin_decl_explicit (BUILT_IN_CEXP);
9d972b2d 2609 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
e79983f4 2610 fn = builtin_decl_explicit (BUILT_IN_CEXPL);
5039610b
SL
2611 else
2612 gcc_unreachable ();
34a24c11
RG
2613
2614 /* If we don't have a decl for cexp create one. This is the
2615 friendliest fallback if the user calls __builtin_cexpi
2616 without full target C99 function support. */
2617 if (fn == NULL_TREE)
2618 {
2619 tree fntype;
2620 const char *name = NULL;
2621
2622 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2623 name = "cexpf";
2624 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2625 name = "cexp";
2626 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2627 name = "cexpl";
2628
2629 fntype = build_function_type_list (ctype, ctype, NULL_TREE);
2630 fn = build_fn_decl (name, fntype);
2631 }
2632
db3927fb 2633 narg = fold_build2_loc (loc, COMPLEX_EXPR, ctype,
b54c5497
RG
2634 build_real (type, dconst0), arg);
2635
2636 /* Make sure not to fold the cexp call again. */
2637 call = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
b8698a0f 2638 return expand_expr (build_call_nary (ctype, call, 1, narg),
49452c07 2639 target, VOIDmode, EXPAND_NORMAL);
b54c5497 2640 }
75c7c595
RG
2641
2642 /* Now build the proper return type. */
2643 return expand_expr (build2 (COMPLEX_EXPR, build_complex_type (type),
2644 make_tree (TREE_TYPE (arg), op2),
2645 make_tree (TREE_TYPE (arg), op1)),
49452c07 2646 target, VOIDmode, EXPAND_NORMAL);
75c7c595
RG
2647}
2648
44e10129
MM
2649/* Conveniently construct a function call expression. FNDECL names the
2650 function to be called, N is the number of arguments, and the "..."
2651 parameters are the argument expressions. Unlike build_call_exr
2652 this doesn't fold the call, hence it will always return a CALL_EXPR. */
2653
2654static tree
2655build_call_nofold_loc (location_t loc, tree fndecl, int n, ...)
2656{
2657 va_list ap;
2658 tree fntype = TREE_TYPE (fndecl);
2659 tree fn = build1 (ADDR_EXPR, build_pointer_type (fntype), fndecl);
2660
2661 va_start (ap, n);
2662 fn = build_call_valist (TREE_TYPE (fntype), fn, n, ap);
2663 va_end (ap);
2664 SET_EXPR_LOCATION (fn, loc);
2665 return fn;
2666}
44e10129 2667
0bfa1541
RG
2668/* Expand a call to one of the builtin rounding functions gcc defines
2669 as an extension (lfloor and lceil). As these are gcc extensions we
2670 do not need to worry about setting errno to EDOM.
d8b42d06
UB
2671 If expanding via optab fails, lower expression to (int)(floor(x)).
2672 EXP is the expression that is a call to the builtin function;
1856c8dc 2673 if convenient, the result should be placed in TARGET. */
d8b42d06
UB
2674
2675static rtx
1856c8dc 2676expand_builtin_int_roundingfn (tree exp, rtx target)
d8b42d06 2677{
c3a4177f 2678 convert_optab builtin_optab;
58f4cf2a
DM
2679 rtx op0, tmp;
2680 rtx_insn *insns;
d8b42d06 2681 tree fndecl = get_callee_fndecl (exp);
d8b42d06
UB
2682 enum built_in_function fallback_fn;
2683 tree fallback_fndecl;
ef4bddc2 2684 machine_mode mode;
968fc3b6 2685 tree arg;
d8b42d06 2686
5039610b 2687 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
d8b42d06
UB
2688 gcc_unreachable ();
2689
5039610b 2690 arg = CALL_EXPR_ARG (exp, 0);
d8b42d06
UB
2691
2692 switch (DECL_FUNCTION_CODE (fndecl))
2693 {
6c32ee74 2694 CASE_FLT_FN (BUILT_IN_ICEIL):
ea6a6627
VR
2695 CASE_FLT_FN (BUILT_IN_LCEIL):
2696 CASE_FLT_FN (BUILT_IN_LLCEIL):
f94b1661
UB
2697 builtin_optab = lceil_optab;
2698 fallback_fn = BUILT_IN_CEIL;
2699 break;
2700
6c32ee74 2701 CASE_FLT_FN (BUILT_IN_IFLOOR):
ea6a6627
VR
2702 CASE_FLT_FN (BUILT_IN_LFLOOR):
2703 CASE_FLT_FN (BUILT_IN_LLFLOOR):
d8b42d06
UB
2704 builtin_optab = lfloor_optab;
2705 fallback_fn = BUILT_IN_FLOOR;
2706 break;
2707
2708 default:
2709 gcc_unreachable ();
2710 }
2711
2712 /* Make a suitable register to place result in. */
2713 mode = TYPE_MODE (TREE_TYPE (exp));
2714
c3a4177f 2715 target = gen_reg_rtx (mode);
d8b42d06 2716
c3a4177f
RG
2717 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2718 need to expand the argument again. This way, we will not perform
2719 side-effects more the once. */
5799f732 2720 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
d8b42d06 2721
1856c8dc 2722 op0 = expand_expr (arg, NULL, VOIDmode, EXPAND_NORMAL);
d8b42d06 2723
c3a4177f 2724 start_sequence ();
d8b42d06 2725
c3a4177f
RG
2726 /* Compute into TARGET. */
2727 if (expand_sfix_optab (target, op0, builtin_optab))
2728 {
2729 /* Output the entire sequence. */
2730 insns = get_insns ();
d8b42d06 2731 end_sequence ();
c3a4177f
RG
2732 emit_insn (insns);
2733 return target;
d8b42d06
UB
2734 }
2735
c3a4177f
RG
2736 /* If we were unable to expand via the builtin, stop the sequence
2737 (without outputting the insns). */
2738 end_sequence ();
2739
d8b42d06
UB
2740 /* Fall back to floating point rounding optab. */
2741 fallback_fndecl = mathfn_built_in (TREE_TYPE (arg), fallback_fn);
34a24c11
RG
2742
2743 /* For non-C99 targets we may end up without a fallback fndecl here
2744 if the user called __builtin_lfloor directly. In this case emit
2745 a call to the floor/ceil variants nevertheless. This should result
2746 in the best user experience for not full C99 targets. */
2747 if (fallback_fndecl == NULL_TREE)
2748 {
2749 tree fntype;
2750 const char *name = NULL;
2751
2752 switch (DECL_FUNCTION_CODE (fndecl))
2753 {
6c32ee74 2754 case BUILT_IN_ICEIL:
34a24c11
RG
2755 case BUILT_IN_LCEIL:
2756 case BUILT_IN_LLCEIL:
2757 name = "ceil";
2758 break;
6c32ee74 2759 case BUILT_IN_ICEILF:
34a24c11
RG
2760 case BUILT_IN_LCEILF:
2761 case BUILT_IN_LLCEILF:
2762 name = "ceilf";
2763 break;
6c32ee74 2764 case BUILT_IN_ICEILL:
34a24c11
RG
2765 case BUILT_IN_LCEILL:
2766 case BUILT_IN_LLCEILL:
2767 name = "ceill";
2768 break;
6c32ee74 2769 case BUILT_IN_IFLOOR:
34a24c11
RG
2770 case BUILT_IN_LFLOOR:
2771 case BUILT_IN_LLFLOOR:
2772 name = "floor";
2773 break;
6c32ee74 2774 case BUILT_IN_IFLOORF:
34a24c11
RG
2775 case BUILT_IN_LFLOORF:
2776 case BUILT_IN_LLFLOORF:
2777 name = "floorf";
2778 break;
6c32ee74 2779 case BUILT_IN_IFLOORL:
34a24c11
RG
2780 case BUILT_IN_LFLOORL:
2781 case BUILT_IN_LLFLOORL:
2782 name = "floorl";
2783 break;
2784 default:
2785 gcc_unreachable ();
2786 }
2787
2788 fntype = build_function_type_list (TREE_TYPE (arg),
2789 TREE_TYPE (arg), NULL_TREE);
2790 fallback_fndecl = build_fn_decl (name, fntype);
2791 }
2792
aa493694 2793 exp = build_call_nofold_loc (EXPR_LOCATION (exp), fallback_fndecl, 1, arg);
d8b42d06 2794
39b1ec97 2795 tmp = expand_normal (exp);
9a002da8 2796 tmp = maybe_emit_group_store (tmp, TREE_TYPE (exp));
d8b42d06
UB
2797
2798 /* Truncate the result of floating point optab to integer
2799 via expand_fix (). */
2800 target = gen_reg_rtx (mode);
2801 expand_fix (target, tmp, 0);
2802
2803 return target;
2804}
2805
0bfa1541
RG
2806/* Expand a call to one of the builtin math functions doing integer
2807 conversion (lrint).
2808 Return 0 if a normal call should be emitted rather than expanding the
2809 function in-line. EXP is the expression that is a call to the builtin
1856c8dc 2810 function; if convenient, the result should be placed in TARGET. */
0bfa1541
RG
2811
2812static rtx
1856c8dc 2813expand_builtin_int_roundingfn_2 (tree exp, rtx target)
0bfa1541 2814{
bb7f0423 2815 convert_optab builtin_optab;
58f4cf2a
DM
2816 rtx op0;
2817 rtx_insn *insns;
0bfa1541 2818 tree fndecl = get_callee_fndecl (exp);
968fc3b6 2819 tree arg;
ef4bddc2 2820 machine_mode mode;
ff63ac4d 2821 enum built_in_function fallback_fn = BUILT_IN_NONE;
0bfa1541 2822
5039610b
SL
2823 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2824 gcc_unreachable ();
b8698a0f 2825
5039610b 2826 arg = CALL_EXPR_ARG (exp, 0);
0bfa1541
RG
2827
2828 switch (DECL_FUNCTION_CODE (fndecl))
2829 {
6c32ee74 2830 CASE_FLT_FN (BUILT_IN_IRINT):
ff63ac4d
JJ
2831 fallback_fn = BUILT_IN_LRINT;
2832 /* FALLTHRU */
0bfa1541
RG
2833 CASE_FLT_FN (BUILT_IN_LRINT):
2834 CASE_FLT_FN (BUILT_IN_LLRINT):
ff63ac4d
JJ
2835 builtin_optab = lrint_optab;
2836 break;
6c32ee74
UB
2837
2838 CASE_FLT_FN (BUILT_IN_IROUND):
ff63ac4d
JJ
2839 fallback_fn = BUILT_IN_LROUND;
2840 /* FALLTHRU */
4d81bf84
RG
2841 CASE_FLT_FN (BUILT_IN_LROUND):
2842 CASE_FLT_FN (BUILT_IN_LLROUND):
ff63ac4d
JJ
2843 builtin_optab = lround_optab;
2844 break;
6c32ee74 2845
0bfa1541
RG
2846 default:
2847 gcc_unreachable ();
2848 }
2849
ff63ac4d
JJ
2850 /* There's no easy way to detect the case we need to set EDOM. */
2851 if (flag_errno_math && fallback_fn == BUILT_IN_NONE)
2852 return NULL_RTX;
2853
0bfa1541
RG
2854 /* Make a suitable register to place result in. */
2855 mode = TYPE_MODE (TREE_TYPE (exp));
2856
ff63ac4d
JJ
2857 /* There's no easy way to detect the case we need to set EDOM. */
2858 if (!flag_errno_math)
2859 {
04b80dbb 2860 rtx result = gen_reg_rtx (mode);
0bfa1541 2861
ff63ac4d
JJ
2862 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2863 need to expand the argument again. This way, we will not perform
2864 side-effects more the once. */
2865 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
0bfa1541 2866
ff63ac4d 2867 op0 = expand_expr (arg, NULL, VOIDmode, EXPAND_NORMAL);
0bfa1541 2868
ff63ac4d 2869 start_sequence ();
0bfa1541 2870
04b80dbb 2871 if (expand_sfix_optab (result, op0, builtin_optab))
ff63ac4d
JJ
2872 {
2873 /* Output the entire sequence. */
2874 insns = get_insns ();
2875 end_sequence ();
2876 emit_insn (insns);
04b80dbb 2877 return result;
ff63ac4d
JJ
2878 }
2879
2880 /* If we were unable to expand via the builtin, stop the sequence
2881 (without outputting the insns) and call to the library function
2882 with the stabilized argument list. */
0bfa1541
RG
2883 end_sequence ();
2884 }
2885
ff63ac4d
JJ
2886 if (fallback_fn != BUILT_IN_NONE)
2887 {
2888 /* Fall back to rounding to long int. Use implicit_p 0 - for non-C99
2889 targets, (int) round (x) should never be transformed into
2890 BUILT_IN_IROUND and if __builtin_iround is called directly, emit
2891 a call to lround in the hope that the target provides at least some
2892 C99 functions. This should result in the best user experience for
2893 not full C99 targets. */
2894 tree fallback_fndecl = mathfn_built_in_1 (TREE_TYPE (arg),
2895 fallback_fn, 0);
2896
2897 exp = build_call_nofold_loc (EXPR_LOCATION (exp),
2898 fallback_fndecl, 1, arg);
2899
2900 target = expand_call (exp, NULL_RTX, target == const0_rtx);
9a002da8 2901 target = maybe_emit_group_store (target, TREE_TYPE (exp));
ff63ac4d
JJ
2902 return convert_to_mode (mode, target, 0);
2903 }
bb7f0423 2904
04b80dbb 2905 return expand_call (exp, target, target == const0_rtx);
0bfa1541
RG
2906}
2907
5039610b 2908/* Expand a call to the powi built-in mathematical function. Return NULL_RTX if
17684d46
RG
2909 a normal call should be emitted rather than expanding the function
2910 in-line. EXP is the expression that is a call to the builtin
2911 function; if convenient, the result should be placed in TARGET. */
2912
2913static rtx
4359dc2a 2914expand_builtin_powi (tree exp, rtx target)
17684d46 2915{
17684d46
RG
2916 tree arg0, arg1;
2917 rtx op0, op1;
ef4bddc2
RS
2918 machine_mode mode;
2919 machine_mode mode2;
17684d46 2920
5039610b
SL
2921 if (! validate_arglist (exp, REAL_TYPE, INTEGER_TYPE, VOID_TYPE))
2922 return NULL_RTX;
17684d46 2923
5039610b
SL
2924 arg0 = CALL_EXPR_ARG (exp, 0);
2925 arg1 = CALL_EXPR_ARG (exp, 1);
17684d46
RG
2926 mode = TYPE_MODE (TREE_TYPE (exp));
2927
17684d46
RG
2928 /* Emit a libcall to libgcc. */
2929
5039610b 2930 /* Mode of the 2nd argument must match that of an int. */
0b8495ae
FJ
2931 mode2 = mode_for_size (INT_TYPE_SIZE, MODE_INT, 0);
2932
17684d46
RG
2933 if (target == NULL_RTX)
2934 target = gen_reg_rtx (mode);
2935
4359dc2a 2936 op0 = expand_expr (arg0, NULL_RTX, mode, EXPAND_NORMAL);
17684d46
RG
2937 if (GET_MODE (op0) != mode)
2938 op0 = convert_to_mode (mode, op0, 0);
49452c07 2939 op1 = expand_expr (arg1, NULL_RTX, mode2, EXPAND_NORMAL);
0b8495ae
FJ
2940 if (GET_MODE (op1) != mode2)
2941 op1 = convert_to_mode (mode2, op1, 0);
17684d46 2942
8a33f100 2943 target = emit_library_call_value (optab_libfunc (powi_optab, mode),
84b8030f 2944 target, LCT_CONST, mode, 2,
0b8495ae 2945 op0, mode, op1, mode2);
17684d46
RG
2946
2947 return target;
2948}
2949
b8698a0f 2950/* Expand expression EXP which is a call to the strlen builtin. Return
5039610b 2951 NULL_RTX if we failed the caller should emit a normal call, otherwise
0e9295cf 2952 try to get the result in TARGET, if convenient. */
3bdf5ad1 2953
28f4ec01 2954static rtx
5039610b 2955expand_builtin_strlen (tree exp, rtx target,
ef4bddc2 2956 machine_mode target_mode)
28f4ec01 2957{
5039610b
SL
2958 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
2959 return NULL_RTX;
28f4ec01
BS
2960 else
2961 {
a5c7d693 2962 struct expand_operand ops[4];
dd05e4fa 2963 rtx pat;
5039610b
SL
2964 tree len;
2965 tree src = CALL_EXPR_ARG (exp, 0);
58f4cf2a
DM
2966 rtx src_reg;
2967 rtx_insn *before_strlen;
ef4bddc2 2968 machine_mode insn_mode = target_mode;
a544cfd2 2969 enum insn_code icode = CODE_FOR_nothing;
1be38ccb 2970 unsigned int align;
712b7a05
RS
2971
2972 /* If the length can be computed at compile-time, return it. */
ae808627 2973 len = c_strlen (src, 0);
712b7a05 2974 if (len)
8c9b38d7 2975 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
712b7a05 2976
ae808627
JJ
2977 /* If the length can be computed at compile-time and is constant
2978 integer, but there are side-effects in src, evaluate
2979 src for side-effects, then return len.
2980 E.g. x = strlen (i++ ? "xfoo" + 1 : "bar");
2981 can be optimized into: i++; x = 3; */
2982 len = c_strlen (src, 1);
2983 if (len && TREE_CODE (len) == INTEGER_CST)
2984 {
2985 expand_expr (src, const0_rtx, VOIDmode, EXPAND_NORMAL);
2986 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
2987 }
2988
0eb77834 2989 align = get_pointer_alignment (src) / BITS_PER_UNIT;
28f4ec01 2990
28f4ec01
BS
2991 /* If SRC is not a pointer type, don't do this operation inline. */
2992 if (align == 0)
5039610b 2993 return NULL_RTX;
28f4ec01 2994
dd05e4fa 2995 /* Bail out if we can't compute strlen in the right mode. */
28f4ec01
BS
2996 while (insn_mode != VOIDmode)
2997 {
947131ba 2998 icode = optab_handler (strlen_optab, insn_mode);
28f4ec01 2999 if (icode != CODE_FOR_nothing)
54e43c67 3000 break;
28f4ec01
BS
3001
3002 insn_mode = GET_MODE_WIDER_MODE (insn_mode);
3003 }
3004 if (insn_mode == VOIDmode)
5039610b 3005 return NULL_RTX;
28f4ec01 3006
dd05e4fa
RH
3007 /* Make a place to hold the source address. We will not expand
3008 the actual source until we are sure that the expansion will
3009 not fail -- there are trees that cannot be expanded twice. */
3010 src_reg = gen_reg_rtx (Pmode);
28f4ec01 3011
dd05e4fa
RH
3012 /* Mark the beginning of the strlen sequence so we can emit the
3013 source operand later. */
5ab2f7b7 3014 before_strlen = get_last_insn ();
28f4ec01 3015
a5c7d693
RS
3016 create_output_operand (&ops[0], target, insn_mode);
3017 create_fixed_operand (&ops[1], gen_rtx_MEM (BLKmode, src_reg));
3018 create_integer_operand (&ops[2], 0);
3019 create_integer_operand (&ops[3], align);
3020 if (!maybe_expand_insn (icode, 4, ops))
5039610b 3021 return NULL_RTX;
dd05e4fa
RH
3022
3023 /* Now that we are assured of success, expand the source. */
3024 start_sequence ();
fa465762 3025 pat = expand_expr (src, src_reg, Pmode, EXPAND_NORMAL);
dd05e4fa 3026 if (pat != src_reg)
fa465762
L
3027 {
3028#ifdef POINTERS_EXTEND_UNSIGNED
3029 if (GET_MODE (pat) != Pmode)
3030 pat = convert_to_mode (Pmode, pat,
3031 POINTERS_EXTEND_UNSIGNED);
3032#endif
3033 emit_move_insn (src_reg, pat);
3034 }
2f937369 3035 pat = get_insns ();
dd05e4fa 3036 end_sequence ();
fca9f642
RH
3037
3038 if (before_strlen)
3039 emit_insn_after (pat, before_strlen);
3040 else
3041 emit_insn_before (pat, get_insns ());
28f4ec01
BS
3042
3043 /* Return the value in the proper mode for this function. */
a5c7d693
RS
3044 if (GET_MODE (ops[0].value) == target_mode)
3045 target = ops[0].value;
28f4ec01 3046 else if (target != 0)
a5c7d693 3047 convert_move (target, ops[0].value, 0);
28f4ec01 3048 else
a5c7d693 3049 target = convert_to_mode (target_mode, ops[0].value, 0);
dd05e4fa
RH
3050
3051 return target;
28f4ec01
BS
3052 }
3053}
3054
57814e5e
JJ
3055/* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3056 bytes from constant string DATA + OFFSET and return it as target
3057 constant. */
3058
3059static rtx
4682ae04 3060builtin_memcpy_read_str (void *data, HOST_WIDE_INT offset,
ef4bddc2 3061 machine_mode mode)
57814e5e
JJ
3062{
3063 const char *str = (const char *) data;
3064
298e6adc
NS
3065 gcc_assert (offset >= 0
3066 && ((unsigned HOST_WIDE_INT) offset + GET_MODE_SIZE (mode)
3067 <= strlen (str) + 1));
57814e5e
JJ
3068
3069 return c_readstr (str + offset, mode);
3070}
3071
3918b108 3072/* LEN specify length of the block of memcpy/memset operation.
82bb7d4e
JH
3073 Figure out its range and put it into MIN_SIZE/MAX_SIZE.
3074 In some cases we can make very likely guess on max size, then we
3075 set it into PROBABLE_MAX_SIZE. */
3918b108
JH
3076
3077static void
3078determine_block_size (tree len, rtx len_rtx,
3079 unsigned HOST_WIDE_INT *min_size,
82bb7d4e
JH
3080 unsigned HOST_WIDE_INT *max_size,
3081 unsigned HOST_WIDE_INT *probable_max_size)
3918b108
JH
3082{
3083 if (CONST_INT_P (len_rtx))
3084 {
2738b4c7 3085 *min_size = *max_size = *probable_max_size = UINTVAL (len_rtx);
3918b108
JH
3086 return;
3087 }
3088 else
3089 {
807e902e 3090 wide_int min, max;
82bb7d4e
JH
3091 enum value_range_type range_type = VR_UNDEFINED;
3092
3093 /* Determine bounds from the type. */
3094 if (tree_fits_uhwi_p (TYPE_MIN_VALUE (TREE_TYPE (len))))
3095 *min_size = tree_to_uhwi (TYPE_MIN_VALUE (TREE_TYPE (len)));
3096 else
3097 *min_size = 0;
3098 if (tree_fits_uhwi_p (TYPE_MAX_VALUE (TREE_TYPE (len))))
2738b4c7
JJ
3099 *probable_max_size = *max_size
3100 = tree_to_uhwi (TYPE_MAX_VALUE (TREE_TYPE (len)));
82bb7d4e
JH
3101 else
3102 *probable_max_size = *max_size = GET_MODE_MASK (GET_MODE (len_rtx));
3103
3104 if (TREE_CODE (len) == SSA_NAME)
3105 range_type = get_range_info (len, &min, &max);
3106 if (range_type == VR_RANGE)
3918b108 3107 {
807e902e 3108 if (wi::fits_uhwi_p (min) && *min_size < min.to_uhwi ())
3918b108 3109 *min_size = min.to_uhwi ();
807e902e 3110 if (wi::fits_uhwi_p (max) && *max_size > max.to_uhwi ())
82bb7d4e 3111 *probable_max_size = *max_size = max.to_uhwi ();
3918b108 3112 }
82bb7d4e 3113 else if (range_type == VR_ANTI_RANGE)
3918b108 3114 {
70ec86ee 3115 /* Anti range 0...N lets us to determine minimal size to N+1. */
807e902e 3116 if (min == 0)
82bb7d4e 3117 {
807e902e
KZ
3118 if (wi::fits_uhwi_p (max) && max.to_uhwi () + 1 != 0)
3119 *min_size = max.to_uhwi () + 1;
82bb7d4e
JH
3120 }
3121 /* Code like
3122
3123 int n;
3124 if (n < 100)
70ec86ee 3125 memcpy (a, b, n)
82bb7d4e
JH
3126
3127 Produce anti range allowing negative values of N. We still
3128 can use the information and make a guess that N is not negative.
3129 */
807e902e
KZ
3130 else if (!wi::leu_p (max, 1 << 30) && wi::fits_uhwi_p (min))
3131 *probable_max_size = min.to_uhwi () - 1;
3918b108
JH
3132 }
3133 }
3134 gcc_checking_assert (*max_size <=
3135 (unsigned HOST_WIDE_INT)
3136 GET_MODE_MASK (GET_MODE (len_rtx)));
3137}
3138
edcf72f3
IE
3139/* Helper function to do the actual work for expand_builtin_memcpy. */
3140
3141static rtx
3142expand_builtin_memcpy_args (tree dest, tree src, tree len, rtx target, tree exp)
3143{
3144 const char *src_str;
3145 unsigned int src_align = get_pointer_alignment (src);
3146 unsigned int dest_align = get_pointer_alignment (dest);
3147 rtx dest_mem, src_mem, dest_addr, len_rtx;
3148 HOST_WIDE_INT expected_size = -1;
3149 unsigned int expected_align = 0;
3150 unsigned HOST_WIDE_INT min_size;
3151 unsigned HOST_WIDE_INT max_size;
3152 unsigned HOST_WIDE_INT probable_max_size;
3153
3154 /* If DEST is not a pointer type, call the normal function. */
3155 if (dest_align == 0)
3156 return NULL_RTX;
3157
3158 /* If either SRC is not a pointer type, don't do this
3159 operation in-line. */
3160 if (src_align == 0)
3161 return NULL_RTX;
3162
3163 if (currently_expanding_gimple_stmt)
3164 stringop_block_profile (currently_expanding_gimple_stmt,
3165 &expected_align, &expected_size);
3166
3167 if (expected_align < dest_align)
3168 expected_align = dest_align;
3169 dest_mem = get_memory_rtx (dest, len);
3170 set_mem_align (dest_mem, dest_align);
3171 len_rtx = expand_normal (len);
3172 determine_block_size (len, len_rtx, &min_size, &max_size,
3173 &probable_max_size);
3174 src_str = c_getstr (src);
3175
3176 /* If SRC is a string constant and block move would be done
3177 by pieces, we can avoid loading the string from memory
3178 and only stored the computed constants. */
3179 if (src_str
3180 && CONST_INT_P (len_rtx)
3181 && (unsigned HOST_WIDE_INT) INTVAL (len_rtx) <= strlen (src_str) + 1
3182 && can_store_by_pieces (INTVAL (len_rtx), builtin_memcpy_read_str,
3183 CONST_CAST (char *, src_str),
3184 dest_align, false))
3185 {
3186 dest_mem = store_by_pieces (dest_mem, INTVAL (len_rtx),
3187 builtin_memcpy_read_str,
3188 CONST_CAST (char *, src_str),
3189 dest_align, false, 0);
3190 dest_mem = force_operand (XEXP (dest_mem, 0), target);
3191 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3192 return dest_mem;
3193 }
3194
3195 src_mem = get_memory_rtx (src, len);
3196 set_mem_align (src_mem, src_align);
3197
3198 /* Copy word part most expediently. */
3199 dest_addr = emit_block_move_hints (dest_mem, src_mem, len_rtx,
3200 CALL_EXPR_TAILCALL (exp)
3201 ? BLOCK_OP_TAILCALL : BLOCK_OP_NORMAL,
3202 expected_align, expected_size,
3203 min_size, max_size, probable_max_size);
3204
3205 if (dest_addr == 0)
3206 {
3207 dest_addr = force_operand (XEXP (dest_mem, 0), target);
3208 dest_addr = convert_memory_address (ptr_mode, dest_addr);
3209 }
3210
3211 return dest_addr;
3212}
3213
5039610b
SL
3214/* Expand a call EXP to the memcpy builtin.
3215 Return NULL_RTX if we failed, the caller should emit a normal call,
9cb65f92 3216 otherwise try to get the result in TARGET, if convenient (and in
8fd3cf4e 3217 mode MODE if that's convenient). */
5039610b 3218
28f4ec01 3219static rtx
44e10129 3220expand_builtin_memcpy (tree exp, rtx target)
28f4ec01 3221{
5039610b
SL
3222 if (!validate_arglist (exp,
3223 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3224 return NULL_RTX;
28f4ec01
BS
3225 else
3226 {
5039610b
SL
3227 tree dest = CALL_EXPR_ARG (exp, 0);
3228 tree src = CALL_EXPR_ARG (exp, 1);
3229 tree len = CALL_EXPR_ARG (exp, 2);
edcf72f3
IE
3230 return expand_builtin_memcpy_args (dest, src, len, target, exp);
3231 }
3232}
57814e5e 3233
edcf72f3
IE
3234/* Expand an instrumented call EXP to the memcpy builtin.
3235 Return NULL_RTX if we failed, the caller should emit a normal call,
3236 otherwise try to get the result in TARGET, if convenient (and in
3237 mode MODE if that's convenient). */
28f4ec01 3238
edcf72f3
IE
3239static rtx
3240expand_builtin_memcpy_with_bounds (tree exp, rtx target)
3241{
3242 if (!validate_arglist (exp,
3243 POINTER_TYPE, POINTER_BOUNDS_TYPE,
3244 POINTER_TYPE, POINTER_BOUNDS_TYPE,
3245 INTEGER_TYPE, VOID_TYPE))
3246 return NULL_RTX;
3247 else
3248 {
3249 tree dest = CALL_EXPR_ARG (exp, 0);
3250 tree src = CALL_EXPR_ARG (exp, 2);
3251 tree len = CALL_EXPR_ARG (exp, 4);
3252 rtx res = expand_builtin_memcpy_args (dest, src, len, target, exp);
28f4ec01 3253
edcf72f3
IE
3254 /* Return src bounds with the result. */
3255 if (res)
aa0f70e6 3256 {
30975f63 3257 rtx bnd = force_reg (targetm.chkp_bound_mode (),
edcf72f3
IE
3258 expand_normal (CALL_EXPR_ARG (exp, 1)));
3259 res = chkp_join_splitted_slot (res, bnd);
aa0f70e6 3260 }
edcf72f3 3261 return res;
28f4ec01
BS
3262 }
3263}
3264
5039610b
SL
3265/* Expand a call EXP to the mempcpy builtin.
3266 Return NULL_RTX if we failed; the caller should emit a normal call,
e3e9f108 3267 otherwise try to get the result in TARGET, if convenient (and in
8fd3cf4e
JJ
3268 mode MODE if that's convenient). If ENDP is 0 return the
3269 destination pointer, if ENDP is 1 return the end pointer ala
3270 mempcpy, and if ENDP is 2 return the end pointer minus one ala
3271 stpcpy. */
e3e9f108
JJ
3272
3273static rtx
ef4bddc2 3274expand_builtin_mempcpy (tree exp, rtx target, machine_mode mode)
e3e9f108 3275{
5039610b
SL
3276 if (!validate_arglist (exp,
3277 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3278 return NULL_RTX;
3279 else
3280 {
3281 tree dest = CALL_EXPR_ARG (exp, 0);
3282 tree src = CALL_EXPR_ARG (exp, 1);
3283 tree len = CALL_EXPR_ARG (exp, 2);
3284 return expand_builtin_mempcpy_args (dest, src, len,
edcf72f3
IE
3285 target, mode, /*endp=*/ 1,
3286 exp);
3287 }
3288}
3289
3290/* Expand an instrumented call EXP to the mempcpy builtin.
3291 Return NULL_RTX if we failed, the caller should emit a normal call,
3292 otherwise try to get the result in TARGET, if convenient (and in
3293 mode MODE if that's convenient). */
3294
3295static rtx
3296expand_builtin_mempcpy_with_bounds (tree exp, rtx target, machine_mode mode)
3297{
3298 if (!validate_arglist (exp,
3299 POINTER_TYPE, POINTER_BOUNDS_TYPE,
3300 POINTER_TYPE, POINTER_BOUNDS_TYPE,
3301 INTEGER_TYPE, VOID_TYPE))
3302 return NULL_RTX;
3303 else
3304 {
3305 tree dest = CALL_EXPR_ARG (exp, 0);
3306 tree src = CALL_EXPR_ARG (exp, 2);
3307 tree len = CALL_EXPR_ARG (exp, 4);
3308 rtx res = expand_builtin_mempcpy_args (dest, src, len, target,
3309 mode, 1, exp);
3310
3311 /* Return src bounds with the result. */
3312 if (res)
3313 {
30975f63 3314 rtx bnd = force_reg (targetm.chkp_bound_mode (),
edcf72f3
IE
3315 expand_normal (CALL_EXPR_ARG (exp, 1)));
3316 res = chkp_join_splitted_slot (res, bnd);
3317 }
3318 return res;
5039610b
SL
3319 }
3320}
3321
3322/* Helper function to do the actual work for expand_builtin_mempcpy. The
3323 arguments to the builtin_mempcpy call DEST, SRC, and LEN are broken out
3324 so that this can also be called without constructing an actual CALL_EXPR.
44e10129
MM
3325 The other arguments and return value are the same as for
3326 expand_builtin_mempcpy. */
5039610b
SL
3327
3328static rtx
44e10129 3329expand_builtin_mempcpy_args (tree dest, tree src, tree len,
edcf72f3
IE
3330 rtx target, machine_mode mode, int endp,
3331 tree orig_exp)
5039610b 3332{
edcf72f3
IE
3333 tree fndecl = get_callee_fndecl (orig_exp);
3334
5039610b 3335 /* If return value is ignored, transform mempcpy into memcpy. */
edcf72f3
IE
3336 if (target == const0_rtx
3337 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_MEMPCPY_NOBND_NOCHK_CHKP
3338 && builtin_decl_implicit_p (BUILT_IN_CHKP_MEMCPY_NOBND_NOCHK_CHKP))
3339 {
3340 tree fn = builtin_decl_implicit (BUILT_IN_CHKP_MEMCPY_NOBND_NOCHK_CHKP);
3341 tree result = build_call_nofold_loc (UNKNOWN_LOCATION, fn, 3,
3342 dest, src, len);
3343 return expand_expr (result, target, mode, EXPAND_NORMAL);
3344 }
3345 else if (target == const0_rtx
3346 && builtin_decl_implicit_p (BUILT_IN_MEMCPY))
8fd3cf4e 3347 {
e79983f4 3348 tree fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
aa493694
JJ
3349 tree result = build_call_nofold_loc (UNKNOWN_LOCATION, fn, 3,
3350 dest, src, len);
0d2a6e08 3351 return expand_expr (result, target, mode, EXPAND_NORMAL);
8fd3cf4e 3352 }
e3e9f108
JJ
3353 else
3354 {
8fd3cf4e 3355 const char *src_str;
0eb77834
RG
3356 unsigned int src_align = get_pointer_alignment (src);
3357 unsigned int dest_align = get_pointer_alignment (dest);
8fd3cf4e 3358 rtx dest_mem, src_mem, len_rtx;
c22cacf3 3359
4d9ef6a9 3360 /* If either SRC or DEST is not a pointer type, don't do this
c22cacf3 3361 operation in-line. */
4d9ef6a9 3362 if (dest_align == 0 || src_align == 0)
5039610b 3363 return NULL_RTX;
8fd3cf4e 3364
ea82015c 3365 /* If LEN is not constant, call the normal function. */
cc269bb6 3366 if (! tree_fits_uhwi_p (len))
5039610b 3367 return NULL_RTX;
33521f7d 3368
84217346 3369 len_rtx = expand_normal (len);
8fd3cf4e 3370 src_str = c_getstr (src);
e3e9f108 3371
8fd3cf4e
JJ
3372 /* If SRC is a string constant and block move would be done
3373 by pieces, we can avoid loading the string from memory
3374 and only stored the computed constants. */
3375 if (src_str
481683e1 3376 && CONST_INT_P (len_rtx)
8fd3cf4e
JJ
3377 && (unsigned HOST_WIDE_INT) INTVAL (len_rtx) <= strlen (src_str) + 1
3378 && can_store_by_pieces (INTVAL (len_rtx), builtin_memcpy_read_str,
f883e0a7
KG
3379 CONST_CAST (char *, src_str),
3380 dest_align, false))
8fd3cf4e 3381 {
435bb2a1 3382 dest_mem = get_memory_rtx (dest, len);
8fd3cf4e
JJ
3383 set_mem_align (dest_mem, dest_align);
3384 dest_mem = store_by_pieces (dest_mem, INTVAL (len_rtx),
3385 builtin_memcpy_read_str,
f883e0a7
KG
3386 CONST_CAST (char *, src_str),
3387 dest_align, false, endp);
8fd3cf4e 3388 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
5ae6cd0d 3389 dest_mem = convert_memory_address (ptr_mode, dest_mem);
8fd3cf4e 3390 return dest_mem;
e3e9f108
JJ
3391 }
3392
481683e1 3393 if (CONST_INT_P (len_rtx)
8fd3cf4e
JJ
3394 && can_move_by_pieces (INTVAL (len_rtx),
3395 MIN (dest_align, src_align)))
3396 {
435bb2a1 3397 dest_mem = get_memory_rtx (dest, len);
8fd3cf4e 3398 set_mem_align (dest_mem, dest_align);
435bb2a1 3399 src_mem = get_memory_rtx (src, len);
8fd3cf4e
JJ
3400 set_mem_align (src_mem, src_align);
3401 dest_mem = move_by_pieces (dest_mem, src_mem, INTVAL (len_rtx),
3402 MIN (dest_align, src_align), endp);
3403 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
5ae6cd0d 3404 dest_mem = convert_memory_address (ptr_mode, dest_mem);
8fd3cf4e
JJ
3405 return dest_mem;
3406 }
3407
5039610b 3408 return NULL_RTX;
e3e9f108
JJ
3409 }
3410}
3411
5039610b 3412/* Expand into a movstr instruction, if one is available. Return NULL_RTX if
beed8fc0
AO
3413 we failed, the caller should emit a normal call, otherwise try to
3414 get the result in TARGET, if convenient. If ENDP is 0 return the
3415 destination pointer, if ENDP is 1 return the end pointer ala
3416 mempcpy, and if ENDP is 2 return the end pointer minus one ala
3417 stpcpy. */
3418
3419static rtx
3420expand_movstr (tree dest, tree src, rtx target, int endp)
3421{
a5c7d693 3422 struct expand_operand ops[3];
beed8fc0
AO
3423 rtx dest_mem;
3424 rtx src_mem;
beed8fc0 3425
7cff0471 3426 if (!targetm.have_movstr ())
5039610b 3427 return NULL_RTX;
beed8fc0 3428
435bb2a1
JJ
3429 dest_mem = get_memory_rtx (dest, NULL);
3430 src_mem = get_memory_rtx (src, NULL);
beed8fc0
AO
3431 if (!endp)
3432 {
3433 target = force_reg (Pmode, XEXP (dest_mem, 0));
3434 dest_mem = replace_equiv_address (dest_mem, target);
beed8fc0
AO
3435 }
3436
a5c7d693
RS
3437 create_output_operand (&ops[0], endp ? target : NULL_RTX, Pmode);
3438 create_fixed_operand (&ops[1], dest_mem);
3439 create_fixed_operand (&ops[2], src_mem);
7cff0471 3440 if (!maybe_expand_insn (targetm.code_for_movstr, 3, ops))
7c5425fa 3441 return NULL_RTX;
beed8fc0 3442
a5c7d693 3443 if (endp && target != const0_rtx)
7ce3fc8f 3444 {
a5c7d693
RS
3445 target = ops[0].value;
3446 /* movstr is supposed to set end to the address of the NUL
3447 terminator. If the caller requested a mempcpy-like return value,
3448 adjust it. */
3449 if (endp == 1)
3450 {
0a81f074
RS
3451 rtx tem = plus_constant (GET_MODE (target),
3452 gen_lowpart (GET_MODE (target), target), 1);
a5c7d693
RS
3453 emit_move_insn (target, force_operand (tem, NULL_RTX));
3454 }
7ce3fc8f 3455 }
beed8fc0
AO
3456 return target;
3457}
3458
b8698a0f
L
3459/* Expand expression EXP, which is a call to the strcpy builtin. Return
3460 NULL_RTX if we failed the caller should emit a normal call, otherwise
5039610b 3461 try to get the result in TARGET, if convenient (and in mode MODE if that's
c2bd38e8 3462 convenient). */
fed3cef0 3463
28f4ec01 3464static rtx
44e10129 3465expand_builtin_strcpy (tree exp, rtx target)
28f4ec01 3466{
5039610b
SL
3467 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3468 {
3469 tree dest = CALL_EXPR_ARG (exp, 0);
3470 tree src = CALL_EXPR_ARG (exp, 1);
44e10129 3471 return expand_builtin_strcpy_args (dest, src, target);
5039610b
SL
3472 }
3473 return NULL_RTX;
3474}
3475
3476/* Helper function to do the actual work for expand_builtin_strcpy. The
3477 arguments to the builtin_strcpy call DEST and SRC are broken out
3478 so that this can also be called without constructing an actual CALL_EXPR.
3479 The other arguments and return value are the same as for
3480 expand_builtin_strcpy. */
3481
3482static rtx
44e10129 3483expand_builtin_strcpy_args (tree dest, tree src, rtx target)
5039610b 3484{
5039610b 3485 return expand_movstr (dest, src, target, /*endp=*/0);
28f4ec01
BS
3486}
3487
5039610b
SL
3488/* Expand a call EXP to the stpcpy builtin.
3489 Return NULL_RTX if we failed the caller should emit a normal call,
9cb65f92
KG
3490 otherwise try to get the result in TARGET, if convenient (and in
3491 mode MODE if that's convenient). */
3492
3493static rtx
ef4bddc2 3494expand_builtin_stpcpy (tree exp, rtx target, machine_mode mode)
9cb65f92 3495{
5039610b 3496 tree dst, src;
db3927fb 3497 location_t loc = EXPR_LOCATION (exp);
5039610b
SL
3498
3499 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3500 return NULL_RTX;
3501
3502 dst = CALL_EXPR_ARG (exp, 0);
3503 src = CALL_EXPR_ARG (exp, 1);
3504
beed8fc0 3505 /* If return value is ignored, transform stpcpy into strcpy. */
e79983f4 3506 if (target == const0_rtx && builtin_decl_implicit (BUILT_IN_STRCPY))
ad4319ec 3507 {
e79983f4 3508 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
aa493694 3509 tree result = build_call_nofold_loc (loc, fn, 2, dst, src);
0d2a6e08 3510 return expand_expr (result, target, mode, EXPAND_NORMAL);
ad4319ec 3511 }
9cb65f92
KG
3512 else
3513 {
5039610b 3514 tree len, lenp1;
beed8fc0 3515 rtx ret;
e3e9f108 3516
8fd3cf4e 3517 /* Ensure we get an actual string whose length can be evaluated at
c22cacf3
MS
3518 compile-time, not an expression containing a string. This is
3519 because the latter will potentially produce pessimized code
3520 when used to produce the return value. */
ae808627 3521 if (! c_getstr (src) || ! (len = c_strlen (src, 0)))
5039610b 3522 return expand_movstr (dst, src, target, /*endp=*/2);
9cb65f92 3523
db3927fb 3524 lenp1 = size_binop_loc (loc, PLUS_EXPR, len, ssize_int (1));
44e10129 3525 ret = expand_builtin_mempcpy_args (dst, src, lenp1,
edcf72f3
IE
3526 target, mode, /*endp=*/2,
3527 exp);
beed8fc0
AO
3528
3529 if (ret)
3530 return ret;
3531
3532 if (TREE_CODE (len) == INTEGER_CST)
3533 {
84217346 3534 rtx len_rtx = expand_normal (len);
beed8fc0 3535
481683e1 3536 if (CONST_INT_P (len_rtx))
beed8fc0 3537 {
44e10129 3538 ret = expand_builtin_strcpy_args (dst, src, target);
beed8fc0
AO
3539
3540 if (ret)
3541 {
3542 if (! target)
58ec6ece
SE
3543 {
3544 if (mode != VOIDmode)
3545 target = gen_reg_rtx (mode);
3546 else
3547 target = gen_reg_rtx (GET_MODE (ret));
3548 }
beed8fc0
AO
3549 if (GET_MODE (target) != GET_MODE (ret))
3550 ret = gen_lowpart (GET_MODE (target), ret);
3551
0a81f074 3552 ret = plus_constant (GET_MODE (ret), ret, INTVAL (len_rtx));
7ce3fc8f 3553 ret = emit_move_insn (target, force_operand (ret, NULL_RTX));
298e6adc 3554 gcc_assert (ret);
beed8fc0
AO
3555
3556 return target;
3557 }
3558 }
3559 }
3560
5039610b 3561 return expand_movstr (dst, src, target, /*endp=*/2);
9cb65f92
KG
3562 }
3563}
3564
57814e5e
JJ
3565/* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3566 bytes from constant string DATA + OFFSET and return it as target
3567 constant. */
3568
14a43348 3569rtx
4682ae04 3570builtin_strncpy_read_str (void *data, HOST_WIDE_INT offset,
ef4bddc2 3571 machine_mode mode)
57814e5e
JJ
3572{
3573 const char *str = (const char *) data;
3574
3575 if ((unsigned HOST_WIDE_INT) offset > strlen (str))
3576 return const0_rtx;
3577
3578 return c_readstr (str + offset, mode);
3579}
3580
b8698a0f 3581/* Expand expression EXP, which is a call to the strncpy builtin. Return
5039610b 3582 NULL_RTX if we failed the caller should emit a normal call. */
da9e9f08
KG
3583
3584static rtx
44e10129 3585expand_builtin_strncpy (tree exp, rtx target)
da9e9f08 3586{
db3927fb 3587 location_t loc = EXPR_LOCATION (exp);
5039610b
SL
3588
3589 if (validate_arglist (exp,
3590 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
da9e9f08 3591 {
5039610b
SL
3592 tree dest = CALL_EXPR_ARG (exp, 0);
3593 tree src = CALL_EXPR_ARG (exp, 1);
3594 tree len = CALL_EXPR_ARG (exp, 2);
3595 tree slen = c_strlen (src, 1);
57814e5e 3596
559837f7 3597 /* We must be passed a constant len and src parameter. */
cc269bb6 3598 if (!tree_fits_uhwi_p (len) || !slen || !tree_fits_uhwi_p (slen))
5039610b 3599 return NULL_RTX;
da9e9f08 3600
db3927fb 3601 slen = size_binop_loc (loc, PLUS_EXPR, slen, ssize_int (1));
da9e9f08
KG
3602
3603 /* We're required to pad with trailing zeros if the requested
c22cacf3 3604 len is greater than strlen(s2)+1. In that case try to
57814e5e 3605 use store_by_pieces, if it fails, punt. */
da9e9f08 3606 if (tree_int_cst_lt (slen, len))
57814e5e 3607 {
0eb77834 3608 unsigned int dest_align = get_pointer_alignment (dest);
5039610b 3609 const char *p = c_getstr (src);
57814e5e
JJ
3610 rtx dest_mem;
3611
cc269bb6 3612 if (!p || dest_align == 0 || !tree_fits_uhwi_p (len)
ae7e9ddd 3613 || !can_store_by_pieces (tree_to_uhwi (len),
57814e5e 3614 builtin_strncpy_read_str,
f883e0a7
KG
3615 CONST_CAST (char *, p),
3616 dest_align, false))
5039610b 3617 return NULL_RTX;
57814e5e 3618
435bb2a1 3619 dest_mem = get_memory_rtx (dest, len);
ae7e9ddd 3620 store_by_pieces (dest_mem, tree_to_uhwi (len),
57814e5e 3621 builtin_strncpy_read_str,
f883e0a7 3622 CONST_CAST (char *, p), dest_align, false, 0);
44e10129 3623 dest_mem = force_operand (XEXP (dest_mem, 0), target);
5ae6cd0d 3624 dest_mem = convert_memory_address (ptr_mode, dest_mem);
aa0f70e6 3625 return dest_mem;
57814e5e 3626 }
da9e9f08 3627 }
5039610b 3628 return NULL_RTX;
da9e9f08
KG
3629}
3630
ab937357
JJ
3631/* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3632 bytes from constant string DATA + OFFSET and return it as target
3633 constant. */
3634
34d85166 3635rtx
4682ae04 3636builtin_memset_read_str (void *data, HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
ef4bddc2 3637 machine_mode mode)
ab937357
JJ
3638{
3639 const char *c = (const char *) data;
f883e0a7 3640 char *p = XALLOCAVEC (char, GET_MODE_SIZE (mode));
ab937357
JJ
3641
3642 memset (p, *c, GET_MODE_SIZE (mode));
3643
3644 return c_readstr (p, mode);
3645}
3646
1a887f86
RS
3647/* Callback routine for store_by_pieces. Return the RTL of a register
3648 containing GET_MODE_SIZE (MODE) consecutive copies of the unsigned
3649 char value given in the RTL register data. For example, if mode is
3650 4 bytes wide, return the RTL for 0x01010101*data. */
3651
3652static rtx
4682ae04 3653builtin_memset_gen_str (void *data, HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
ef4bddc2 3654 machine_mode mode)
1a887f86
RS
3655{
3656 rtx target, coeff;
3657 size_t size;
3658 char *p;
3659
3660 size = GET_MODE_SIZE (mode);
5ab2f7b7
KH
3661 if (size == 1)
3662 return (rtx) data;
1a887f86 3663
f883e0a7 3664 p = XALLOCAVEC (char, size);
1a887f86
RS
3665 memset (p, 1, size);
3666 coeff = c_readstr (p, mode);
3667
5ab2f7b7 3668 target = convert_to_mode (mode, (rtx) data, 1);
1a887f86
RS
3669 target = expand_mult (mode, target, coeff, NULL_RTX, 1);
3670 return force_reg (mode, target);
3671}
3672
b8698a0f
L
3673/* Expand expression EXP, which is a call to the memset builtin. Return
3674 NULL_RTX if we failed the caller should emit a normal call, otherwise
5039610b 3675 try to get the result in TARGET, if convenient (and in mode MODE if that's
c2bd38e8 3676 convenient). */
fed3cef0 3677
28f4ec01 3678static rtx
ef4bddc2 3679expand_builtin_memset (tree exp, rtx target, machine_mode mode)
28f4ec01 3680{
5039610b
SL
3681 if (!validate_arglist (exp,
3682 POINTER_TYPE, INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
3683 return NULL_RTX;
28f4ec01
BS
3684 else
3685 {
5039610b
SL
3686 tree dest = CALL_EXPR_ARG (exp, 0);
3687 tree val = CALL_EXPR_ARG (exp, 1);
3688 tree len = CALL_EXPR_ARG (exp, 2);
3689 return expand_builtin_memset_args (dest, val, len, target, mode, exp);
3690 }
3691}
28f4ec01 3692
edcf72f3
IE
3693/* Expand expression EXP, which is an instrumented call to the memset builtin.
3694 Return NULL_RTX if we failed the caller should emit a normal call, otherwise
3695 try to get the result in TARGET, if convenient (and in mode MODE if that's
3696 convenient). */
3697
3698static rtx
3699expand_builtin_memset_with_bounds (tree exp, rtx target, machine_mode mode)
3700{
3701 if (!validate_arglist (exp,
3702 POINTER_TYPE, POINTER_BOUNDS_TYPE,
3703 INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
3704 return NULL_RTX;
3705 else
3706 {
3707 tree dest = CALL_EXPR_ARG (exp, 0);
3708 tree val = CALL_EXPR_ARG (exp, 2);
3709 tree len = CALL_EXPR_ARG (exp, 3);
3710 rtx res = expand_builtin_memset_args (dest, val, len, target, mode, exp);
3711
3712 /* Return src bounds with the result. */
3713 if (res)
3714 {
30975f63 3715 rtx bnd = force_reg (targetm.chkp_bound_mode (),
edcf72f3
IE
3716 expand_normal (CALL_EXPR_ARG (exp, 1)));
3717 res = chkp_join_splitted_slot (res, bnd);
3718 }
3719 return res;
3720 }
3721}
3722
5039610b
SL
3723/* Helper function to do the actual work for expand_builtin_memset. The
3724 arguments to the builtin_memset call DEST, VAL, and LEN are broken out
3725 so that this can also be called without constructing an actual CALL_EXPR.
3726 The other arguments and return value are the same as for
3727 expand_builtin_memset. */
880864cf 3728
5039610b
SL
3729static rtx
3730expand_builtin_memset_args (tree dest, tree val, tree len,
ef4bddc2 3731 rtx target, machine_mode mode, tree orig_exp)
5039610b
SL
3732{
3733 tree fndecl, fn;
3734 enum built_in_function fcode;
ef4bddc2 3735 machine_mode val_mode;
5039610b
SL
3736 char c;
3737 unsigned int dest_align;
3738 rtx dest_mem, dest_addr, len_rtx;
3739 HOST_WIDE_INT expected_size = -1;
3740 unsigned int expected_align = 0;
3918b108
JH
3741 unsigned HOST_WIDE_INT min_size;
3742 unsigned HOST_WIDE_INT max_size;
82bb7d4e 3743 unsigned HOST_WIDE_INT probable_max_size;
28f4ec01 3744
0eb77834 3745 dest_align = get_pointer_alignment (dest);
079a182e 3746
5039610b
SL
3747 /* If DEST is not a pointer type, don't do this operation in-line. */
3748 if (dest_align == 0)
3749 return NULL_RTX;
c2bd38e8 3750
a5883ba0
MM
3751 if (currently_expanding_gimple_stmt)
3752 stringop_block_profile (currently_expanding_gimple_stmt,
3753 &expected_align, &expected_size);
726a989a 3754
5039610b
SL
3755 if (expected_align < dest_align)
3756 expected_align = dest_align;
880864cf 3757
5039610b
SL
3758 /* If the LEN parameter is zero, return DEST. */
3759 if (integer_zerop (len))
3760 {
3761 /* Evaluate and ignore VAL in case it has side-effects. */
3762 expand_expr (val, const0_rtx, VOIDmode, EXPAND_NORMAL);
3763 return expand_expr (dest, target, mode, EXPAND_NORMAL);
3764 }
57e84f18 3765
5039610b
SL
3766 /* Stabilize the arguments in case we fail. */
3767 dest = builtin_save_expr (dest);
3768 val = builtin_save_expr (val);
3769 len = builtin_save_expr (len);
1a887f86 3770
5039610b 3771 len_rtx = expand_normal (len);
82bb7d4e
JH
3772 determine_block_size (len, len_rtx, &min_size, &max_size,
3773 &probable_max_size);
5039610b 3774 dest_mem = get_memory_rtx (dest, len);
8a445129 3775 val_mode = TYPE_MODE (unsigned_char_type_node);
1a887f86 3776
5039610b
SL
3777 if (TREE_CODE (val) != INTEGER_CST)
3778 {
3779 rtx val_rtx;
1a887f86 3780
5039610b 3781 val_rtx = expand_normal (val);
8a445129 3782 val_rtx = convert_to_mode (val_mode, val_rtx, 0);
28f4ec01 3783
5039610b
SL
3784 /* Assume that we can memset by pieces if we can store
3785 * the coefficients by pieces (in the required modes).
3786 * We can't pass builtin_memset_gen_str as that emits RTL. */
3787 c = 1;
cc269bb6 3788 if (tree_fits_uhwi_p (len)
ae7e9ddd 3789 && can_store_by_pieces (tree_to_uhwi (len),
cfa31150
SL
3790 builtin_memset_read_str, &c, dest_align,
3791 true))
5039610b 3792 {
8a445129 3793 val_rtx = force_reg (val_mode, val_rtx);
ae7e9ddd 3794 store_by_pieces (dest_mem, tree_to_uhwi (len),
cfa31150
SL
3795 builtin_memset_gen_str, val_rtx, dest_align,
3796 true, 0);
5039610b
SL
3797 }
3798 else if (!set_storage_via_setmem (dest_mem, len_rtx, val_rtx,
3799 dest_align, expected_align,
82bb7d4e
JH
3800 expected_size, min_size, max_size,
3801 probable_max_size))
880864cf 3802 goto do_libcall;
b8698a0f 3803
5039610b
SL
3804 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3805 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3806 return dest_mem;
3807 }
28f4ec01 3808
5039610b
SL
3809 if (target_char_cast (val, &c))
3810 goto do_libcall;
ab937357 3811
5039610b
SL
3812 if (c)
3813 {
cc269bb6 3814 if (tree_fits_uhwi_p (len)
ae7e9ddd 3815 && can_store_by_pieces (tree_to_uhwi (len),
cfa31150
SL
3816 builtin_memset_read_str, &c, dest_align,
3817 true))
ae7e9ddd 3818 store_by_pieces (dest_mem, tree_to_uhwi (len),
cfa31150 3819 builtin_memset_read_str, &c, dest_align, true, 0);
8a445129
RS
3820 else if (!set_storage_via_setmem (dest_mem, len_rtx,
3821 gen_int_mode (c, val_mode),
5039610b 3822 dest_align, expected_align,
82bb7d4e
JH
3823 expected_size, min_size, max_size,
3824 probable_max_size))
5039610b 3825 goto do_libcall;
b8698a0f 3826
5039610b
SL
3827 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3828 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3829 return dest_mem;
3830 }
ab937357 3831
5039610b
SL
3832 set_mem_align (dest_mem, dest_align);
3833 dest_addr = clear_storage_hints (dest_mem, len_rtx,
3834 CALL_EXPR_TAILCALL (orig_exp)
3835 ? BLOCK_OP_TAILCALL : BLOCK_OP_NORMAL,
3918b108 3836 expected_align, expected_size,
82bb7d4e
JH
3837 min_size, max_size,
3838 probable_max_size);
28f4ec01 3839
5039610b
SL
3840 if (dest_addr == 0)
3841 {
3842 dest_addr = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3843 dest_addr = convert_memory_address (ptr_mode, dest_addr);
3844 }
28f4ec01 3845
5039610b 3846 return dest_addr;
880864cf 3847
5039610b
SL
3848 do_libcall:
3849 fndecl = get_callee_fndecl (orig_exp);
3850 fcode = DECL_FUNCTION_CODE (fndecl);
edcf72f3
IE
3851 if (fcode == BUILT_IN_MEMSET
3852 || fcode == BUILT_IN_CHKP_MEMSET_NOBND_NOCHK_CHKP)
aa493694
JJ
3853 fn = build_call_nofold_loc (EXPR_LOCATION (orig_exp), fndecl, 3,
3854 dest, val, len);
5039610b 3855 else if (fcode == BUILT_IN_BZERO)
aa493694
JJ
3856 fn = build_call_nofold_loc (EXPR_LOCATION (orig_exp), fndecl, 2,
3857 dest, len);
5039610b
SL
3858 else
3859 gcc_unreachable ();
44e10129
MM
3860 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
3861 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (orig_exp);
5039610b 3862 return expand_call (fn, target, target == const0_rtx);
28f4ec01
BS
3863}
3864
b8698a0f 3865/* Expand expression EXP, which is a call to the bzero builtin. Return
5039610b 3866 NULL_RTX if we failed the caller should emit a normal call. */
5197bd50 3867
e3a709be 3868static rtx
8148fe65 3869expand_builtin_bzero (tree exp)
e3a709be 3870{
5039610b 3871 tree dest, size;
db3927fb 3872 location_t loc = EXPR_LOCATION (exp);
e3a709be 3873
5039610b 3874 if (!validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3477addf 3875 return NULL_RTX;
e3a709be 3876
5039610b
SL
3877 dest = CALL_EXPR_ARG (exp, 0);
3878 size = CALL_EXPR_ARG (exp, 1);
8d51ecf8 3879
3477addf 3880 /* New argument list transforming bzero(ptr x, int y) to
c2bd38e8
RS
3881 memset(ptr x, int 0, size_t y). This is done this way
3882 so that if it isn't expanded inline, we fallback to
3883 calling bzero instead of memset. */
8d51ecf8 3884
5039610b 3885 return expand_builtin_memset_args (dest, integer_zero_node,
0d82a1c8
RG
3886 fold_convert_loc (loc,
3887 size_type_node, size),
5039610b 3888 const0_rtx, VOIDmode, exp);
e3a709be
KG
3889}
3890
a666df60
RS
3891/* Try to expand cmpstr operation ICODE with the given operands.
3892 Return the result rtx on success, otherwise return null. */
3893
3894static rtx
3895expand_cmpstr (insn_code icode, rtx target, rtx arg1_rtx, rtx arg2_rtx,
3896 HOST_WIDE_INT align)
3897{
3898 machine_mode insn_mode = insn_data[icode].operand[0].mode;
3899
3900 if (target && (!REG_P (target) || HARD_REGISTER_P (target)))
3901 target = NULL_RTX;
3902
3903 struct expand_operand ops[4];
3904 create_output_operand (&ops[0], target, insn_mode);
3905 create_fixed_operand (&ops[1], arg1_rtx);
3906 create_fixed_operand (&ops[2], arg2_rtx);
3907 create_integer_operand (&ops[3], align);
3908 if (maybe_expand_insn (icode, 4, ops))
3909 return ops[0].value;
3910 return NULL_RTX;
3911}
3912
7f9f48be 3913/* Try to expand cmpstrn or cmpmem operation ICODE with the given operands.
a666df60
RS
3914 ARG3_TYPE is the type of ARG3_RTX. Return the result rtx on success,
3915 otherwise return null. */
3916
3917static rtx
7f9f48be
RS
3918expand_cmpstrn_or_cmpmem (insn_code icode, rtx target, rtx arg1_rtx,
3919 rtx arg2_rtx, tree arg3_type, rtx arg3_rtx,
3920 HOST_WIDE_INT align)
a666df60
RS
3921{
3922 machine_mode insn_mode = insn_data[icode].operand[0].mode;
3923
3924 if (target && (!REG_P (target) || HARD_REGISTER_P (target)))
3925 target = NULL_RTX;
3926
3927 struct expand_operand ops[5];
3928 create_output_operand (&ops[0], target, insn_mode);
3929 create_fixed_operand (&ops[1], arg1_rtx);
3930 create_fixed_operand (&ops[2], arg2_rtx);
3931 create_convert_operand_from (&ops[3], arg3_rtx, TYPE_MODE (arg3_type),
3932 TYPE_UNSIGNED (arg3_type));
3933 create_integer_operand (&ops[4], align);
3934 if (maybe_expand_insn (icode, 5, ops))
3935 return ops[0].value;
3936 return NULL_RTX;
3937}
3938
2be3b5ce 3939/* Expand expression EXP, which is a call to the memcmp built-in function.
9b0f6f5e 3940 Return NULL_RTX if we failed and the caller should emit a normal call,
7f9f48be 3941 otherwise try to get the result in TARGET, if convenient. */
5197bd50 3942
28f4ec01 3943static rtx
7f9f48be 3944expand_builtin_memcmp (tree exp, rtx target)
28f4ec01 3945{
5039610b
SL
3946 if (!validate_arglist (exp,
3947 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3948 return NULL_RTX;
c2bd38e8 3949
9b0f6f5e
NC
3950 /* Note: The cmpstrnsi pattern, if it exists, is not suitable for
3951 implementing memcmp because it will stop if it encounters two
3952 zero bytes. */
7f9f48be
RS
3953 insn_code icode = direct_optab_handler (cmpmem_optab, SImode);
3954 if (icode == CODE_FOR_nothing)
3955 return NULL_RTX;
28f4ec01 3956
7f9f48be
RS
3957 tree arg1 = CALL_EXPR_ARG (exp, 0);
3958 tree arg2 = CALL_EXPR_ARG (exp, 1);
3959 tree len = CALL_EXPR_ARG (exp, 2);
358b8f01 3960
7f9f48be
RS
3961 unsigned int arg1_align = get_pointer_alignment (arg1) / BITS_PER_UNIT;
3962 unsigned int arg2_align = get_pointer_alignment (arg2) / BITS_PER_UNIT;
28f4ec01 3963
7f9f48be
RS
3964 /* If we don't have POINTER_TYPE, call the function. */
3965 if (arg1_align == 0 || arg2_align == 0)
3966 return NULL_RTX;
28f4ec01 3967
7f9f48be
RS
3968 machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
3969 location_t loc = EXPR_LOCATION (exp);
3970 rtx arg1_rtx = get_memory_rtx (arg1, len);
3971 rtx arg2_rtx = get_memory_rtx (arg2, len);
3972 rtx arg3_rtx = expand_normal (fold_convert_loc (loc, sizetype, len));
28f4ec01 3973
7f9f48be
RS
3974 /* Set MEM_SIZE as appropriate. */
3975 if (CONST_INT_P (arg3_rtx))
3976 {
3977 set_mem_size (arg1_rtx, INTVAL (arg3_rtx));
3978 set_mem_size (arg2_rtx, INTVAL (arg3_rtx));
3979 }
6cbaec9e 3980
7f9f48be
RS
3981 rtx result = expand_cmpstrn_or_cmpmem (icode, target, arg1_rtx, arg2_rtx,
3982 TREE_TYPE (len), arg3_rtx,
3983 MIN (arg1_align, arg2_align));
3984 if (result)
3985 {
3986 /* Return the value in the proper mode for this function. */
3987 if (GET_MODE (result) == mode)
3988 return result;
6cbaec9e 3989
7f9f48be
RS
3990 if (target != 0)
3991 {
3992 convert_move (target, result, 0);
3993 return target;
3994 }
8878e913 3995
28f4ec01 3996 return convert_to_mode (mode, result, 0);
7f9f48be 3997 }
28f4ec01 3998
7f9f48be
RS
3999 result = target;
4000 if (! (result != 0
4001 && REG_P (result) && GET_MODE (result) == mode
4002 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
4003 result = gen_reg_rtx (mode);
4004
4005 emit_library_call_value (memcmp_libfunc, result, LCT_PURE,
4006 TYPE_MODE (integer_type_node), 3,
4007 XEXP (arg1_rtx, 0), Pmode,
4008 XEXP (arg2_rtx, 0), Pmode,
4009 convert_to_mode (TYPE_MODE (sizetype), arg3_rtx,
4010 TYPE_UNSIGNED (sizetype)),
4011 TYPE_MODE (sizetype));
4012 return result;
c2bd38e8
RS
4013}
4014
5039610b 4015/* Expand expression EXP, which is a call to the strcmp builtin. Return NULL_RTX
28f4ec01
BS
4016 if we failed the caller should emit a normal call, otherwise try to get
4017 the result in TARGET, if convenient. */
fed3cef0 4018
28f4ec01 4019static rtx
44e10129 4020expand_builtin_strcmp (tree exp, ATTRIBUTE_UNUSED rtx target)
28f4ec01 4021{
5039610b
SL
4022 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
4023 return NULL_RTX;
8d51ecf8 4024
a666df60
RS
4025 insn_code cmpstr_icode = direct_optab_handler (cmpstr_optab, SImode);
4026 insn_code cmpstrn_icode = direct_optab_handler (cmpstrn_optab, SImode);
4027 if (cmpstr_icode != CODE_FOR_nothing || cmpstrn_icode != CODE_FOR_nothing)
40c1d5f8
AS
4028 {
4029 rtx arg1_rtx, arg2_rtx;
40c1d5f8 4030 tree fndecl, fn;
5039610b
SL
4031 tree arg1 = CALL_EXPR_ARG (exp, 0);
4032 tree arg2 = CALL_EXPR_ARG (exp, 1);
a666df60 4033 rtx result = NULL_RTX;
c22cacf3 4034
0eb77834
RG
4035 unsigned int arg1_align = get_pointer_alignment (arg1) / BITS_PER_UNIT;
4036 unsigned int arg2_align = get_pointer_alignment (arg2) / BITS_PER_UNIT;
40c1d5f8
AS
4037
4038 /* If we don't have POINTER_TYPE, call the function. */
4039 if (arg1_align == 0 || arg2_align == 0)
5039610b 4040 return NULL_RTX;
2be3b5ce 4041
40c1d5f8
AS
4042 /* Stabilize the arguments in case gen_cmpstr(n)si fail. */
4043 arg1 = builtin_save_expr (arg1);
4044 arg2 = builtin_save_expr (arg2);
2be3b5ce 4045
435bb2a1
JJ
4046 arg1_rtx = get_memory_rtx (arg1, NULL);
4047 arg2_rtx = get_memory_rtx (arg2, NULL);
28f4ec01 4048
40c1d5f8 4049 /* Try to call cmpstrsi. */
a666df60
RS
4050 if (cmpstr_icode != CODE_FOR_nothing)
4051 result = expand_cmpstr (cmpstr_icode, target, arg1_rtx, arg2_rtx,
4052 MIN (arg1_align, arg2_align));
4053
40c1d5f8 4054 /* Try to determine at least one length and call cmpstrnsi. */
a666df60 4055 if (!result && cmpstrn_icode != CODE_FOR_nothing)
40c1d5f8
AS
4056 {
4057 tree len;
4058 rtx arg3_rtx;
4059
40c1d5f8
AS
4060 tree len1 = c_strlen (arg1, 1);
4061 tree len2 = c_strlen (arg2, 1);
4062
4063 if (len1)
4064 len1 = size_binop (PLUS_EXPR, ssize_int (1), len1);
4065 if (len2)
4066 len2 = size_binop (PLUS_EXPR, ssize_int (1), len2);
4067
4068 /* If we don't have a constant length for the first, use the length
4069 of the second, if we know it. We don't require a constant for
4070 this case; some cost analysis could be done if both are available
4071 but neither is constant. For now, assume they're equally cheap,
4072 unless one has side effects. If both strings have constant lengths,
4073 use the smaller. */
4074
4075 if (!len1)
4076 len = len2;
4077 else if (!len2)
4078 len = len1;
4079 else if (TREE_SIDE_EFFECTS (len1))
4080 len = len2;
4081 else if (TREE_SIDE_EFFECTS (len2))
4082 len = len1;
4083 else if (TREE_CODE (len1) != INTEGER_CST)
4084 len = len2;
4085 else if (TREE_CODE (len2) != INTEGER_CST)
4086 len = len1;
4087 else if (tree_int_cst_lt (len1, len2))
4088 len = len1;
4089 else
4090 len = len2;
4091
4092 /* If both arguments have side effects, we cannot optimize. */
a666df60
RS
4093 if (len && !TREE_SIDE_EFFECTS (len))
4094 {
4095 arg3_rtx = expand_normal (len);
7f9f48be
RS
4096 result = expand_cmpstrn_or_cmpmem
4097 (cmpstrn_icode, target, arg1_rtx, arg2_rtx, TREE_TYPE (len),
4098 arg3_rtx, MIN (arg1_align, arg2_align));
a666df60 4099 }
40c1d5f8 4100 }
c43fa1f5 4101
a666df60 4102 if (result)
40c1d5f8 4103 {
40c1d5f8 4104 /* Return the value in the proper mode for this function. */
a666df60 4105 machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
40c1d5f8
AS
4106 if (GET_MODE (result) == mode)
4107 return result;
4108 if (target == 0)
4109 return convert_to_mode (mode, result, 0);
4110 convert_move (target, result, 0);
4111 return target;
4112 }
fed3cef0 4113
40c1d5f8
AS
4114 /* Expand the library call ourselves using a stabilized argument
4115 list to avoid re-evaluating the function's arguments twice. */
40c1d5f8 4116 fndecl = get_callee_fndecl (exp);
aa493694 4117 fn = build_call_nofold_loc (EXPR_LOCATION (exp), fndecl, 2, arg1, arg2);
44e10129
MM
4118 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
4119 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
40c1d5f8
AS
4120 return expand_call (fn, target, target == const0_rtx);
4121 }
5039610b 4122 return NULL_RTX;
2dee4af1 4123}
28f4ec01 4124
b8698a0f 4125/* Expand expression EXP, which is a call to the strncmp builtin. Return
5039610b 4126 NULL_RTX if we failed the caller should emit a normal call, otherwise try to get
da9e9f08 4127 the result in TARGET, if convenient. */
5197bd50 4128
da9e9f08 4129static rtx
44e10129 4130expand_builtin_strncmp (tree exp, ATTRIBUTE_UNUSED rtx target,
ef4bddc2 4131 ATTRIBUTE_UNUSED machine_mode mode)
da9e9f08 4132{
44e10129 4133 location_t loc ATTRIBUTE_UNUSED = EXPR_LOCATION (exp);
db3927fb 4134
5039610b
SL
4135 if (!validate_arglist (exp,
4136 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
4137 return NULL_RTX;
da9e9f08 4138
819c1488 4139 /* If c_strlen can determine an expression for one of the string
40c1d5f8 4140 lengths, and it doesn't have side effects, then emit cmpstrnsi
2be3b5ce 4141 using length MIN(strlen(string)+1, arg3). */
a666df60
RS
4142 insn_code cmpstrn_icode = direct_optab_handler (cmpstrn_optab, SImode);
4143 if (cmpstrn_icode != CODE_FOR_nothing)
2be3b5ce
RS
4144 {
4145 tree len, len1, len2;
4146 rtx arg1_rtx, arg2_rtx, arg3_rtx;
a666df60 4147 rtx result;
8148fe65 4148 tree fndecl, fn;
5039610b
SL
4149 tree arg1 = CALL_EXPR_ARG (exp, 0);
4150 tree arg2 = CALL_EXPR_ARG (exp, 1);
4151 tree arg3 = CALL_EXPR_ARG (exp, 2);
c2bd38e8 4152
0eb77834
RG
4153 unsigned int arg1_align = get_pointer_alignment (arg1) / BITS_PER_UNIT;
4154 unsigned int arg2_align = get_pointer_alignment (arg2) / BITS_PER_UNIT;
8d51ecf8 4155
ae808627
JJ
4156 len1 = c_strlen (arg1, 1);
4157 len2 = c_strlen (arg2, 1);
2be3b5ce
RS
4158
4159 if (len1)
db3927fb 4160 len1 = size_binop_loc (loc, PLUS_EXPR, ssize_int (1), len1);
2be3b5ce 4161 if (len2)
db3927fb 4162 len2 = size_binop_loc (loc, PLUS_EXPR, ssize_int (1), len2);
2be3b5ce
RS
4163
4164 /* If we don't have a constant length for the first, use the length
4165 of the second, if we know it. We don't require a constant for
4166 this case; some cost analysis could be done if both are available
4167 but neither is constant. For now, assume they're equally cheap,
4168 unless one has side effects. If both strings have constant lengths,
4169 use the smaller. */
4170
4171 if (!len1)
4172 len = len2;
4173 else if (!len2)
4174 len = len1;
4175 else if (TREE_SIDE_EFFECTS (len1))
4176 len = len2;
4177 else if (TREE_SIDE_EFFECTS (len2))
4178 len = len1;
4179 else if (TREE_CODE (len1) != INTEGER_CST)
4180 len = len2;
4181 else if (TREE_CODE (len2) != INTEGER_CST)
4182 len = len1;
4183 else if (tree_int_cst_lt (len1, len2))
4184 len = len1;
4185 else
4186 len = len2;
819c1488 4187
2be3b5ce
RS
4188 /* If both arguments have side effects, we cannot optimize. */
4189 if (!len || TREE_SIDE_EFFECTS (len))
5039610b 4190 return NULL_RTX;
8d51ecf8 4191
2be3b5ce 4192 /* The actual new length parameter is MIN(len,arg3). */
db3927fb
AH
4193 len = fold_build2_loc (loc, MIN_EXPR, TREE_TYPE (len), len,
4194 fold_convert_loc (loc, TREE_TYPE (len), arg3));
2be3b5ce
RS
4195
4196 /* If we don't have POINTER_TYPE, call the function. */
4197 if (arg1_align == 0 || arg2_align == 0)
5039610b 4198 return NULL_RTX;
2be3b5ce 4199
44e10129
MM
4200 /* Stabilize the arguments in case gen_cmpstrnsi fails. */
4201 arg1 = builtin_save_expr (arg1);
4202 arg2 = builtin_save_expr (arg2);
4203 len = builtin_save_expr (len);
5197bd50 4204
44e10129
MM
4205 arg1_rtx = get_memory_rtx (arg1, len);
4206 arg2_rtx = get_memory_rtx (arg2, len);
4207 arg3_rtx = expand_normal (len);
7f9f48be
RS
4208 result = expand_cmpstrn_or_cmpmem (cmpstrn_icode, target, arg1_rtx,
4209 arg2_rtx, TREE_TYPE (len), arg3_rtx,
4210 MIN (arg1_align, arg2_align));
a666df60 4211 if (result)
44e10129 4212 {
44e10129
MM
4213 /* Return the value in the proper mode for this function. */
4214 mode = TYPE_MODE (TREE_TYPE (exp));
4215 if (GET_MODE (result) == mode)
4216 return result;
4217 if (target == 0)
4218 return convert_to_mode (mode, result, 0);
4219 convert_move (target, result, 0);
4220 return target;
4221 }
5197bd50 4222
44e10129
MM
4223 /* Expand the library call ourselves using a stabilized argument
4224 list to avoid re-evaluating the function's arguments twice. */
4225 fndecl = get_callee_fndecl (exp);
aa493694
JJ
4226 fn = build_call_nofold_loc (EXPR_LOCATION (exp), fndecl, 3,
4227 arg1, arg2, len);
44e10129
MM
4228 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
4229 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
4230 return expand_call (fn, target, target == const0_rtx);
4231 }
5039610b 4232 return NULL_RTX;
d118937d
KG
4233}
4234
d3707adb
RH
4235/* Expand a call to __builtin_saveregs, generating the result in TARGET,
4236 if that's convenient. */
fed3cef0 4237
d3707adb 4238rtx
4682ae04 4239expand_builtin_saveregs (void)
28f4ec01 4240{
58f4cf2a
DM
4241 rtx val;
4242 rtx_insn *seq;
28f4ec01
BS
4243
4244 /* Don't do __builtin_saveregs more than once in a function.
4245 Save the result of the first call and reuse it. */
4246 if (saveregs_value != 0)
4247 return saveregs_value;
28f4ec01 4248
d3707adb
RH
4249 /* When this function is called, it means that registers must be
4250 saved on entry to this function. So we migrate the call to the
4251 first insn of this function. */
4252
4253 start_sequence ();
28f4ec01 4254
d3707adb 4255 /* Do whatever the machine needs done in this case. */
61f71b34 4256 val = targetm.calls.expand_builtin_saveregs ();
28f4ec01 4257
d3707adb
RH
4258 seq = get_insns ();
4259 end_sequence ();
28f4ec01 4260
d3707adb 4261 saveregs_value = val;
28f4ec01 4262
2f937369
DM
4263 /* Put the insns after the NOTE that starts the function. If this
4264 is inside a start_sequence, make the outer-level insn chain current, so
d3707adb
RH
4265 the code is placed at the start of the function. */
4266 push_topmost_sequence ();
242229bb 4267 emit_insn_after (seq, entry_of_function ());
d3707adb
RH
4268 pop_topmost_sequence ();
4269
4270 return val;
28f4ec01
BS
4271}
4272
8870e212 4273/* Expand a call to __builtin_next_arg. */
5197bd50 4274
28f4ec01 4275static rtx
8870e212 4276expand_builtin_next_arg (void)
28f4ec01 4277{
8870e212
JJ
4278 /* Checking arguments is already done in fold_builtin_next_arg
4279 that must be called before this function. */
4319e38c 4280 return expand_binop (ptr_mode, add_optab,
38173d38
JH
4281 crtl->args.internal_arg_pointer,
4282 crtl->args.arg_offset_rtx,
28f4ec01
BS
4283 NULL_RTX, 0, OPTAB_LIB_WIDEN);
4284}
4285
d3707adb
RH
4286/* Make it easier for the backends by protecting the valist argument
4287 from multiple evaluations. */
4288
4289static tree
db3927fb 4290stabilize_va_list_loc (location_t loc, tree valist, int needs_lvalue)
d3707adb 4291{
35cbb299
KT
4292 tree vatype = targetm.canonical_va_list_type (TREE_TYPE (valist));
4293
70f34814
RG
4294 /* The current way of determining the type of valist is completely
4295 bogus. We should have the information on the va builtin instead. */
4296 if (!vatype)
4297 vatype = targetm.fn_abi_va_list (cfun->decl);
35cbb299
KT
4298
4299 if (TREE_CODE (vatype) == ARRAY_TYPE)
d3707adb 4300 {
9f720c3e
GK
4301 if (TREE_SIDE_EFFECTS (valist))
4302 valist = save_expr (valist);
8ebecc3b 4303
9f720c3e 4304 /* For this case, the backends will be expecting a pointer to
35cbb299
KT
4305 vatype, but it's possible we've actually been given an array
4306 (an actual TARGET_CANONICAL_VA_LIST_TYPE (valist)).
9f720c3e
GK
4307 So fix it. */
4308 if (TREE_CODE (TREE_TYPE (valist)) == ARRAY_TYPE)
daf68dd7 4309 {
35cbb299 4310 tree p1 = build_pointer_type (TREE_TYPE (vatype));
db3927fb 4311 valist = build_fold_addr_expr_with_type_loc (loc, valist, p1);
daf68dd7 4312 }
d3707adb 4313 }
8ebecc3b 4314 else
d3707adb 4315 {
70f34814 4316 tree pt = build_pointer_type (vatype);
8ebecc3b 4317
9f720c3e
GK
4318 if (! needs_lvalue)
4319 {
8ebecc3b
RH
4320 if (! TREE_SIDE_EFFECTS (valist))
4321 return valist;
8d51ecf8 4322
db3927fb 4323 valist = fold_build1_loc (loc, ADDR_EXPR, pt, valist);
d3707adb 4324 TREE_SIDE_EFFECTS (valist) = 1;
d3707adb 4325 }
9f720c3e 4326
8ebecc3b 4327 if (TREE_SIDE_EFFECTS (valist))
9f720c3e 4328 valist = save_expr (valist);
70f34814
RG
4329 valist = fold_build2_loc (loc, MEM_REF,
4330 vatype, valist, build_int_cst (pt, 0));
d3707adb
RH
4331 }
4332
4333 return valist;
4334}
4335
c35d187f
RH
4336/* The "standard" definition of va_list is void*. */
4337
4338tree
4339std_build_builtin_va_list (void)
4340{
4341 return ptr_type_node;
4342}
4343
35cbb299
KT
4344/* The "standard" abi va_list is va_list_type_node. */
4345
4346tree
4347std_fn_abi_va_list (tree fndecl ATTRIBUTE_UNUSED)
4348{
4349 return va_list_type_node;
4350}
4351
4352/* The "standard" type of va_list is va_list_type_node. */
4353
4354tree
4355std_canonical_va_list_type (tree type)
4356{
4357 tree wtype, htype;
4358
4359 if (INDIRECT_REF_P (type))
4360 type = TREE_TYPE (type);
c3284718 4361 else if (POINTER_TYPE_P (type) && POINTER_TYPE_P (TREE_TYPE (type)))
35cbb299 4362 type = TREE_TYPE (type);
35cbb299
KT
4363 wtype = va_list_type_node;
4364 htype = type;
e65d1ec6
KT
4365 /* Treat structure va_list types. */
4366 if (TREE_CODE (wtype) == RECORD_TYPE && POINTER_TYPE_P (htype))
4367 htype = TREE_TYPE (htype);
4368 else if (TREE_CODE (wtype) == ARRAY_TYPE)
35cbb299
KT
4369 {
4370 /* If va_list is an array type, the argument may have decayed
4371 to a pointer type, e.g. by being passed to another function.
4372 In that case, unwrap both types so that we can compare the
4373 underlying records. */
4374 if (TREE_CODE (htype) == ARRAY_TYPE
4375 || POINTER_TYPE_P (htype))
4376 {
4377 wtype = TREE_TYPE (wtype);
4378 htype = TREE_TYPE (htype);
4379 }
4380 }
4381 if (TYPE_MAIN_VARIANT (wtype) == TYPE_MAIN_VARIANT (htype))
4382 return va_list_type_node;
4383
4384 return NULL_TREE;
4385}
4386
d3707adb
RH
4387/* The "standard" implementation of va_start: just assign `nextarg' to
4388 the variable. */
5197bd50 4389
d3707adb 4390void
4682ae04 4391std_expand_builtin_va_start (tree valist, rtx nextarg)
d3707adb 4392{
508dabda
ILT
4393 rtx va_r = expand_expr (valist, NULL_RTX, VOIDmode, EXPAND_WRITE);
4394 convert_move (va_r, nextarg, 0);
d5e254e1
IE
4395
4396 /* We do not have any valid bounds for the pointer, so
4397 just store zero bounds for it. */
4398 if (chkp_function_instrumented_p (current_function_decl))
4399 chkp_expand_bounds_reset_for_mem (valist,
4400 make_tree (TREE_TYPE (valist),
4401 nextarg));
d3707adb
RH
4402}
4403
5039610b 4404/* Expand EXP, a call to __builtin_va_start. */
5197bd50 4405
d3707adb 4406static rtx
5039610b 4407expand_builtin_va_start (tree exp)
d3707adb
RH
4408{
4409 rtx nextarg;
5039610b 4410 tree valist;
db3927fb 4411 location_t loc = EXPR_LOCATION (exp);
d3707adb 4412
5039610b 4413 if (call_expr_nargs (exp) < 2)
c69c9b36 4414 {
db3927fb 4415 error_at (loc, "too few arguments to function %<va_start%>");
c69c9b36
JM
4416 return const0_rtx;
4417 }
d3707adb 4418
5039610b 4419 if (fold_builtin_next_arg (exp, true))
8870e212 4420 return const0_rtx;
d3147f64 4421
8870e212 4422 nextarg = expand_builtin_next_arg ();
db3927fb 4423 valist = stabilize_va_list_loc (loc, CALL_EXPR_ARG (exp, 0), 1);
d3707adb 4424
d7bd8aeb
JJ
4425 if (targetm.expand_builtin_va_start)
4426 targetm.expand_builtin_va_start (valist, nextarg);
4427 else
4428 std_expand_builtin_va_start (valist, nextarg);
d3707adb
RH
4429
4430 return const0_rtx;
4431}
4432
5039610b 4433/* Expand EXP, a call to __builtin_va_end. */
3bdf5ad1 4434
d3707adb 4435static rtx
5039610b 4436expand_builtin_va_end (tree exp)
d3707adb 4437{
5039610b 4438 tree valist = CALL_EXPR_ARG (exp, 0);
daf68dd7 4439
daf68dd7
RH
4440 /* Evaluate for side effects, if needed. I hate macros that don't
4441 do that. */
4442 if (TREE_SIDE_EFFECTS (valist))
4443 expand_expr (valist, const0_rtx, VOIDmode, EXPAND_NORMAL);
d3707adb
RH
4444
4445 return const0_rtx;
4446}
4447
5039610b 4448/* Expand EXP, a call to __builtin_va_copy. We do this as a
d3707adb
RH
4449 builtin rather than just as an assignment in stdarg.h because of the
4450 nastiness of array-type va_list types. */
3bdf5ad1 4451
d3707adb 4452static rtx
5039610b 4453expand_builtin_va_copy (tree exp)
d3707adb
RH
4454{
4455 tree dst, src, t;
db3927fb 4456 location_t loc = EXPR_LOCATION (exp);
d3707adb 4457
5039610b
SL
4458 dst = CALL_EXPR_ARG (exp, 0);
4459 src = CALL_EXPR_ARG (exp, 1);
d3707adb 4460
db3927fb
AH
4461 dst = stabilize_va_list_loc (loc, dst, 1);
4462 src = stabilize_va_list_loc (loc, src, 0);
d3707adb 4463
35cbb299
KT
4464 gcc_assert (cfun != NULL && cfun->decl != NULL_TREE);
4465
4466 if (TREE_CODE (targetm.fn_abi_va_list (cfun->decl)) != ARRAY_TYPE)
d3707adb 4467 {
35cbb299 4468 t = build2 (MODIFY_EXPR, targetm.fn_abi_va_list (cfun->decl), dst, src);
d3707adb
RH
4469 TREE_SIDE_EFFECTS (t) = 1;
4470 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
4471 }
4472 else
4473 {
8ebecc3b
RH
4474 rtx dstb, srcb, size;
4475
4476 /* Evaluate to pointers. */
4477 dstb = expand_expr (dst, NULL_RTX, Pmode, EXPAND_NORMAL);
4478 srcb = expand_expr (src, NULL_RTX, Pmode, EXPAND_NORMAL);
35cbb299
KT
4479 size = expand_expr (TYPE_SIZE_UNIT (targetm.fn_abi_va_list (cfun->decl)),
4480 NULL_RTX, VOIDmode, EXPAND_NORMAL);
8ebecc3b 4481
5ae6cd0d
MM
4482 dstb = convert_memory_address (Pmode, dstb);
4483 srcb = convert_memory_address (Pmode, srcb);
ce2d32cd 4484
8ebecc3b
RH
4485 /* "Dereference" to BLKmode memories. */
4486 dstb = gen_rtx_MEM (BLKmode, dstb);
ba4828e0 4487 set_mem_alias_set (dstb, get_alias_set (TREE_TYPE (TREE_TYPE (dst))));
35cbb299 4488 set_mem_align (dstb, TYPE_ALIGN (targetm.fn_abi_va_list (cfun->decl)));
8ebecc3b 4489 srcb = gen_rtx_MEM (BLKmode, srcb);
ba4828e0 4490 set_mem_alias_set (srcb, get_alias_set (TREE_TYPE (TREE_TYPE (src))));
35cbb299 4491 set_mem_align (srcb, TYPE_ALIGN (targetm.fn_abi_va_list (cfun->decl)));
8ebecc3b
RH
4492
4493 /* Copy. */
44bb111a 4494 emit_block_move (dstb, srcb, size, BLOCK_OP_NORMAL);
d3707adb
RH
4495 }
4496
4497 return const0_rtx;
4498}
4499
28f4ec01
BS
4500/* Expand a call to one of the builtin functions __builtin_frame_address or
4501 __builtin_return_address. */
5197bd50 4502
28f4ec01 4503static rtx
5039610b 4504expand_builtin_frame_address (tree fndecl, tree exp)
28f4ec01 4505{
28f4ec01
BS
4506 /* The argument must be a nonnegative integer constant.
4507 It counts the number of frames to scan up the stack.
8423e57c
MS
4508 The value is either the frame pointer value or the return
4509 address saved in that frame. */
5039610b 4510 if (call_expr_nargs (exp) == 0)
28f4ec01
BS
4511 /* Warning about missing arg was already issued. */
4512 return const0_rtx;
cc269bb6 4513 else if (! tree_fits_uhwi_p (CALL_EXPR_ARG (exp, 0)))
28f4ec01 4514 {
8423e57c 4515 error ("invalid argument to %qD", fndecl);
28f4ec01
BS
4516 return const0_rtx;
4517 }
4518 else
4519 {
8423e57c
MS
4520 /* Number of frames to scan up the stack. */
4521 unsigned HOST_WIDE_INT count = tree_to_uhwi (CALL_EXPR_ARG (exp, 0));
4522
4523 rtx tem = expand_builtin_return_addr (DECL_FUNCTION_CODE (fndecl), count);
28f4ec01
BS
4524
4525 /* Some ports cannot access arbitrary stack frames. */
4526 if (tem == NULL)
4527 {
8423e57c 4528 warning (0, "unsupported argument to %qD", fndecl);
28f4ec01
BS
4529 return const0_rtx;
4530 }
4531
8423e57c
MS
4532 if (count)
4533 {
4534 /* Warn since no effort is made to ensure that any frame
4535 beyond the current one exists or can be safely reached. */
4536 warning (OPT_Wframe_address, "calling %qD with "
4537 "a nonzero argument is unsafe", fndecl);
4538 }
4539
28f4ec01
BS
4540 /* For __builtin_frame_address, return what we've got. */
4541 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
4542 return tem;
4543
f8cfc6aa 4544 if (!REG_P (tem)
28f4ec01 4545 && ! CONSTANT_P (tem))
18ae1560 4546 tem = copy_addr_to_reg (tem);
28f4ec01
BS
4547 return tem;
4548 }
4549}
4550
d3c12306 4551/* Expand EXP, a call to the alloca builtin. Return NULL_RTX if we
3a42502d
RH
4552 failed and the caller should emit a normal call. CANNOT_ACCUMULATE
4553 is the same as for allocate_dynamic_stack_space. */
d5457140 4554
28f4ec01 4555static rtx
3a42502d 4556expand_builtin_alloca (tree exp, bool cannot_accumulate)
28f4ec01
BS
4557{
4558 rtx op0;
d5457140 4559 rtx result;
13e49da9
TV
4560 bool valid_arglist;
4561 unsigned int align;
4562 bool alloca_with_align = (DECL_FUNCTION_CODE (get_callee_fndecl (exp))
4563 == BUILT_IN_ALLOCA_WITH_ALIGN);
28f4ec01 4564
13e49da9
TV
4565 valid_arglist
4566 = (alloca_with_align
4567 ? validate_arglist (exp, INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE)
4568 : validate_arglist (exp, INTEGER_TYPE, VOID_TYPE));
4569
4570 if (!valid_arglist)
5039610b 4571 return NULL_RTX;
28f4ec01
BS
4572
4573 /* Compute the argument. */
5039610b 4574 op0 = expand_normal (CALL_EXPR_ARG (exp, 0));
28f4ec01 4575
13e49da9
TV
4576 /* Compute the alignment. */
4577 align = (alloca_with_align
4578 ? TREE_INT_CST_LOW (CALL_EXPR_ARG (exp, 1))
4579 : BIGGEST_ALIGNMENT);
4580
28f4ec01 4581 /* Allocate the desired space. */
13e49da9 4582 result = allocate_dynamic_stack_space (op0, 0, align, cannot_accumulate);
5ae6cd0d 4583 result = convert_memory_address (ptr_mode, result);
d5457140
RK
4584
4585 return result;
28f4ec01
BS
4586}
4587
ac868f29
EB
4588/* Expand a call to bswap builtin in EXP.
4589 Return NULL_RTX if a normal call should be emitted rather than expanding the
4590 function in-line. If convenient, the result should be placed in TARGET.
4591 SUBTARGET may be used as the target for computing one of EXP's operands. */
167fa32c
EC
4592
4593static rtx
ef4bddc2 4594expand_builtin_bswap (machine_mode target_mode, tree exp, rtx target,
ac868f29 4595 rtx subtarget)
167fa32c 4596{
167fa32c
EC
4597 tree arg;
4598 rtx op0;
4599
5039610b
SL
4600 if (!validate_arglist (exp, INTEGER_TYPE, VOID_TYPE))
4601 return NULL_RTX;
167fa32c 4602
5039610b 4603 arg = CALL_EXPR_ARG (exp, 0);
ac868f29
EB
4604 op0 = expand_expr (arg,
4605 subtarget && GET_MODE (subtarget) == target_mode
4606 ? subtarget : NULL_RTX,
4607 target_mode, EXPAND_NORMAL);
4608 if (GET_MODE (op0) != target_mode)
4609 op0 = convert_to_mode (target_mode, op0, 1);
167fa32c 4610
ac868f29 4611 target = expand_unop (target_mode, bswap_optab, op0, target, 1);
167fa32c
EC
4612
4613 gcc_assert (target);
4614
ac868f29 4615 return convert_to_mode (target_mode, target, 1);
167fa32c
EC
4616}
4617
5039610b
SL
4618/* Expand a call to a unary builtin in EXP.
4619 Return NULL_RTX if a normal call should be emitted rather than expanding the
28f4ec01
BS
4620 function in-line. If convenient, the result should be placed in TARGET.
4621 SUBTARGET may be used as the target for computing one of EXP's operands. */
d5457140 4622
28f4ec01 4623static rtx
ef4bddc2 4624expand_builtin_unop (machine_mode target_mode, tree exp, rtx target,
4682ae04 4625 rtx subtarget, optab op_optab)
28f4ec01
BS
4626{
4627 rtx op0;
5039610b
SL
4628
4629 if (!validate_arglist (exp, INTEGER_TYPE, VOID_TYPE))
4630 return NULL_RTX;
28f4ec01
BS
4631
4632 /* Compute the argument. */
4359dc2a
JJ
4633 op0 = expand_expr (CALL_EXPR_ARG (exp, 0),
4634 (subtarget
4635 && (TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 0)))
4636 == GET_MODE (subtarget))) ? subtarget : NULL_RTX,
49452c07 4637 VOIDmode, EXPAND_NORMAL);
2928cd7a 4638 /* Compute op, into TARGET if possible.
28f4ec01 4639 Set TARGET to wherever the result comes back. */
5039610b 4640 target = expand_unop (TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 0))),
146aef0b 4641 op_optab, op0, target, op_optab != clrsb_optab);
298e6adc 4642 gcc_assert (target);
5906d013 4643
6c537d03 4644 return convert_to_mode (target_mode, target, 0);
28f4ec01 4645}
994a57cd 4646
b8698a0f 4647/* Expand a call to __builtin_expect. We just return our argument
ef950eba
JH
4648 as the builtin_expect semantic should've been already executed by
4649 tree branch prediction pass. */
994a57cd
RH
4650
4651static rtx
5039610b 4652expand_builtin_expect (tree exp, rtx target)
994a57cd 4653{
451409e4 4654 tree arg;
994a57cd 4655
5039610b 4656 if (call_expr_nargs (exp) < 2)
994a57cd 4657 return const0_rtx;
5039610b 4658 arg = CALL_EXPR_ARG (exp, 0);
994a57cd 4659
5039610b 4660 target = expand_expr (arg, target, VOIDmode, EXPAND_NORMAL);
ef950eba 4661 /* When guessing was done, the hints should be already stripped away. */
1d8381f1 4662 gcc_assert (!flag_guess_branch_prob
1da2ed5f 4663 || optimize == 0 || seen_error ());
994a57cd
RH
4664 return target;
4665}
5f2d6cfa 4666
45d439ac
JJ
4667/* Expand a call to __builtin_assume_aligned. We just return our first
4668 argument as the builtin_assume_aligned semantic should've been already
4669 executed by CCP. */
4670
4671static rtx
4672expand_builtin_assume_aligned (tree exp, rtx target)
4673{
4674 if (call_expr_nargs (exp) < 2)
4675 return const0_rtx;
4676 target = expand_expr (CALL_EXPR_ARG (exp, 0), target, VOIDmode,
4677 EXPAND_NORMAL);
4678 gcc_assert (!TREE_SIDE_EFFECTS (CALL_EXPR_ARG (exp, 1))
4679 && (call_expr_nargs (exp) < 3
4680 || !TREE_SIDE_EFFECTS (CALL_EXPR_ARG (exp, 2))));
4681 return target;
4682}
4683
1e188d1e 4684void
4682ae04 4685expand_builtin_trap (void)
9602f5a0 4686{
eb6f47fb 4687 if (targetm.have_trap ())
206604dc 4688 {
eb6f47fb 4689 rtx_insn *insn = emit_insn (targetm.gen_trap ());
206604dc
JJ
4690 /* For trap insns when not accumulating outgoing args force
4691 REG_ARGS_SIZE note to prevent crossjumping of calls with
4692 different args sizes. */
4693 if (!ACCUMULATE_OUTGOING_ARGS)
4694 add_reg_note (insn, REG_ARGS_SIZE, GEN_INT (stack_pointer_delta));
4695 }
9602f5a0 4696 else
9602f5a0
RH
4697 emit_library_call (abort_libfunc, LCT_NORETURN, VOIDmode, 0);
4698 emit_barrier ();
4699}
075ec276 4700
468059bc
DD
4701/* Expand a call to __builtin_unreachable. We do nothing except emit
4702 a barrier saying that control flow will not pass here.
4703
4704 It is the responsibility of the program being compiled to ensure
4705 that control flow does never reach __builtin_unreachable. */
4706static void
4707expand_builtin_unreachable (void)
4708{
4709 emit_barrier ();
4710}
4711
5039610b
SL
4712/* Expand EXP, a call to fabs, fabsf or fabsl.
4713 Return NULL_RTX if a normal call should be emitted rather than expanding
075ec276
RS
4714 the function inline. If convenient, the result should be placed
4715 in TARGET. SUBTARGET may be used as the target for computing
4716 the operand. */
4717
4718static rtx
5039610b 4719expand_builtin_fabs (tree exp, rtx target, rtx subtarget)
075ec276 4720{
ef4bddc2 4721 machine_mode mode;
075ec276
RS
4722 tree arg;
4723 rtx op0;
4724
5039610b
SL
4725 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
4726 return NULL_RTX;
075ec276 4727
5039610b 4728 arg = CALL_EXPR_ARG (exp, 0);
4cd8e76f 4729 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
075ec276 4730 mode = TYPE_MODE (TREE_TYPE (arg));
49452c07 4731 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
075ec276
RS
4732 return expand_abs (mode, op0, target, 0, safe_from_p (target, arg, 1));
4733}
4734
5039610b 4735/* Expand EXP, a call to copysign, copysignf, or copysignl.
046625fa
RH
4736 Return NULL is a normal call should be emitted rather than expanding the
4737 function inline. If convenient, the result should be placed in TARGET.
4738 SUBTARGET may be used as the target for computing the operand. */
4739
4740static rtx
5039610b 4741expand_builtin_copysign (tree exp, rtx target, rtx subtarget)
046625fa
RH
4742{
4743 rtx op0, op1;
4744 tree arg;
4745
5039610b
SL
4746 if (!validate_arglist (exp, REAL_TYPE, REAL_TYPE, VOID_TYPE))
4747 return NULL_RTX;
046625fa 4748
5039610b 4749 arg = CALL_EXPR_ARG (exp, 0);
84217346 4750 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
046625fa 4751
5039610b 4752 arg = CALL_EXPR_ARG (exp, 1);
84217346 4753 op1 = expand_normal (arg);
046625fa
RH
4754
4755 return expand_copysign (op0, op1, target);
4756}
4757
677feb77
DD
4758/* Expand a call to __builtin___clear_cache. */
4759
4760static rtx
f2cf13bd 4761expand_builtin___clear_cache (tree exp)
677feb77 4762{
f2cf13bd
RS
4763 if (!targetm.code_for_clear_cache)
4764 {
677feb77 4765#ifdef CLEAR_INSN_CACHE
f2cf13bd
RS
4766 /* There is no "clear_cache" insn, and __clear_cache() in libgcc
4767 does something. Just do the default expansion to a call to
4768 __clear_cache(). */
4769 return NULL_RTX;
677feb77 4770#else
f2cf13bd
RS
4771 /* There is no "clear_cache" insn, and __clear_cache() in libgcc
4772 does nothing. There is no need to call it. Do nothing. */
4773 return const0_rtx;
677feb77 4774#endif /* CLEAR_INSN_CACHE */
f2cf13bd
RS
4775 }
4776
677feb77
DD
4777 /* We have a "clear_cache" insn, and it will handle everything. */
4778 tree begin, end;
4779 rtx begin_rtx, end_rtx;
677feb77
DD
4780
4781 /* We must not expand to a library call. If we did, any
4782 fallback library function in libgcc that might contain a call to
4783 __builtin___clear_cache() would recurse infinitely. */
4784 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
4785 {
4786 error ("both arguments to %<__builtin___clear_cache%> must be pointers");
4787 return const0_rtx;
4788 }
4789
f2cf13bd 4790 if (targetm.have_clear_cache ())
677feb77 4791 {
a5c7d693 4792 struct expand_operand ops[2];
677feb77
DD
4793
4794 begin = CALL_EXPR_ARG (exp, 0);
4795 begin_rtx = expand_expr (begin, NULL_RTX, Pmode, EXPAND_NORMAL);
677feb77
DD
4796
4797 end = CALL_EXPR_ARG (exp, 1);
4798 end_rtx = expand_expr (end, NULL_RTX, Pmode, EXPAND_NORMAL);
677feb77 4799
a5c7d693
RS
4800 create_address_operand (&ops[0], begin_rtx);
4801 create_address_operand (&ops[1], end_rtx);
f2cf13bd 4802 if (maybe_expand_insn (targetm.code_for_clear_cache, 2, ops))
a5c7d693 4803 return const0_rtx;
677feb77
DD
4804 }
4805 return const0_rtx;
677feb77
DD
4806}
4807
6de9cd9a
DN
4808/* Given a trampoline address, make sure it satisfies TRAMPOLINE_ALIGNMENT. */
4809
4810static rtx
4811round_trampoline_addr (rtx tramp)
4812{
4813 rtx temp, addend, mask;
4814
4815 /* If we don't need too much alignment, we'll have been guaranteed
4816 proper alignment by get_trampoline_type. */
4817 if (TRAMPOLINE_ALIGNMENT <= STACK_BOUNDARY)
4818 return tramp;
4819
4820 /* Round address up to desired boundary. */
4821 temp = gen_reg_rtx (Pmode);
2f1cd2eb
RS
4822 addend = gen_int_mode (TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT - 1, Pmode);
4823 mask = gen_int_mode (-TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT, Pmode);
6de9cd9a
DN
4824
4825 temp = expand_simple_binop (Pmode, PLUS, tramp, addend,
4826 temp, 0, OPTAB_LIB_WIDEN);
4827 tramp = expand_simple_binop (Pmode, AND, temp, mask,
4828 temp, 0, OPTAB_LIB_WIDEN);
4829
4830 return tramp;
4831}
4832
4833static rtx
183dd130 4834expand_builtin_init_trampoline (tree exp, bool onstack)
6de9cd9a
DN
4835{
4836 tree t_tramp, t_func, t_chain;
531ca746 4837 rtx m_tramp, r_tramp, r_chain, tmp;
6de9cd9a 4838
5039610b 4839 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE,
6de9cd9a
DN
4840 POINTER_TYPE, VOID_TYPE))
4841 return NULL_RTX;
4842
5039610b
SL
4843 t_tramp = CALL_EXPR_ARG (exp, 0);
4844 t_func = CALL_EXPR_ARG (exp, 1);
4845 t_chain = CALL_EXPR_ARG (exp, 2);
6de9cd9a 4846
84217346 4847 r_tramp = expand_normal (t_tramp);
531ca746
RH
4848 m_tramp = gen_rtx_MEM (BLKmode, r_tramp);
4849 MEM_NOTRAP_P (m_tramp) = 1;
4850
183dd130
ILT
4851 /* If ONSTACK, the TRAMP argument should be the address of a field
4852 within the local function's FRAME decl. Either way, let's see if
4853 we can fill in the MEM_ATTRs for this memory. */
531ca746 4854 if (TREE_CODE (t_tramp) == ADDR_EXPR)
ad2e5b71 4855 set_mem_attributes (m_tramp, TREE_OPERAND (t_tramp, 0), true);
531ca746 4856
183dd130
ILT
4857 /* Creator of a heap trampoline is responsible for making sure the
4858 address is aligned to at least STACK_BOUNDARY. Normally malloc
4859 will ensure this anyhow. */
531ca746
RH
4860 tmp = round_trampoline_addr (r_tramp);
4861 if (tmp != r_tramp)
4862 {
4863 m_tramp = change_address (m_tramp, BLKmode, tmp);
4864 set_mem_align (m_tramp, TRAMPOLINE_ALIGNMENT);
f5541398 4865 set_mem_size (m_tramp, TRAMPOLINE_SIZE);
531ca746
RH
4866 }
4867
4868 /* The FUNC argument should be the address of the nested function.
4869 Extract the actual function decl to pass to the hook. */
4870 gcc_assert (TREE_CODE (t_func) == ADDR_EXPR);
4871 t_func = TREE_OPERAND (t_func, 0);
4872 gcc_assert (TREE_CODE (t_func) == FUNCTION_DECL);
4873
84217346 4874 r_chain = expand_normal (t_chain);
6de9cd9a
DN
4875
4876 /* Generate insns to initialize the trampoline. */
531ca746 4877 targetm.calls.trampoline_init (m_tramp, t_func, r_chain);
6de9cd9a 4878
183dd130
ILT
4879 if (onstack)
4880 {
4881 trampolines_created = 1;
8ffadef9 4882
183dd130
ILT
4883 warning_at (DECL_SOURCE_LOCATION (t_func), OPT_Wtrampolines,
4884 "trampoline generated for nested function %qD", t_func);
4885 }
8ffadef9 4886
6de9cd9a
DN
4887 return const0_rtx;
4888}
4889
4890static rtx
5039610b 4891expand_builtin_adjust_trampoline (tree exp)
6de9cd9a
DN
4892{
4893 rtx tramp;
4894
5039610b 4895 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6de9cd9a
DN
4896 return NULL_RTX;
4897
5039610b 4898 tramp = expand_normal (CALL_EXPR_ARG (exp, 0));
6de9cd9a 4899 tramp = round_trampoline_addr (tramp);
531ca746
RH
4900 if (targetm.calls.trampoline_adjust_address)
4901 tramp = targetm.calls.trampoline_adjust_address (tramp);
6de9cd9a
DN
4902
4903 return tramp;
4904}
4905
0f67fa83
WG
4906/* Expand the call EXP to the built-in signbit, signbitf or signbitl
4907 function. The function first checks whether the back end provides
4908 an insn to implement signbit for the respective mode. If not, it
4909 checks whether the floating point format of the value is such that
61717a45
FXC
4910 the sign bit can be extracted. If that is not the case, error out.
4911 EXP is the expression that is a call to the builtin function; if
4912 convenient, the result should be placed in TARGET. */
ef79730c
RS
4913static rtx
4914expand_builtin_signbit (tree exp, rtx target)
4915{
4916 const struct real_format *fmt;
ef4bddc2 4917 machine_mode fmode, imode, rmode;
5039610b 4918 tree arg;
e4fbead1 4919 int word, bitpos;
d0c9d431 4920 enum insn_code icode;
ef79730c 4921 rtx temp;
db3927fb 4922 location_t loc = EXPR_LOCATION (exp);
ef79730c 4923
5039610b
SL
4924 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
4925 return NULL_RTX;
ef79730c 4926
5039610b 4927 arg = CALL_EXPR_ARG (exp, 0);
ef79730c
RS
4928 fmode = TYPE_MODE (TREE_TYPE (arg));
4929 rmode = TYPE_MODE (TREE_TYPE (exp));
4930 fmt = REAL_MODE_FORMAT (fmode);
4931
0f67fa83
WG
4932 arg = builtin_save_expr (arg);
4933
4934 /* Expand the argument yielding a RTX expression. */
4935 temp = expand_normal (arg);
4936
4937 /* Check if the back end provides an insn that handles signbit for the
4938 argument's mode. */
947131ba 4939 icode = optab_handler (signbit_optab, fmode);
d0c9d431 4940 if (icode != CODE_FOR_nothing)
0f67fa83 4941 {
58f4cf2a 4942 rtx_insn *last = get_last_insn ();
0f67fa83 4943 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
8a0b1aa4
MM
4944 if (maybe_emit_unop_insn (icode, target, temp, UNKNOWN))
4945 return target;
4946 delete_insns_since (last);
0f67fa83
WG
4947 }
4948
ef79730c
RS
4949 /* For floating point formats without a sign bit, implement signbit
4950 as "ARG < 0.0". */
b87a0206 4951 bitpos = fmt->signbit_ro;
e4fbead1 4952 if (bitpos < 0)
ef79730c
RS
4953 {
4954 /* But we can't do this if the format supports signed zero. */
61717a45 4955 gcc_assert (!fmt->has_signed_zero || !HONOR_SIGNED_ZEROS (fmode));
ef79730c 4956
db3927fb 4957 arg = fold_build2_loc (loc, LT_EXPR, TREE_TYPE (exp), arg,
987b67bc 4958 build_real (TREE_TYPE (arg), dconst0));
ef79730c
RS
4959 return expand_expr (arg, target, VOIDmode, EXPAND_NORMAL);
4960 }
4961
e4fbead1 4962 if (GET_MODE_SIZE (fmode) <= UNITS_PER_WORD)
ef79730c 4963 {
e4fbead1 4964 imode = int_mode_for_mode (fmode);
61717a45 4965 gcc_assert (imode != BLKmode);
e4fbead1 4966 temp = gen_lowpart (imode, temp);
254878ea
RS
4967 }
4968 else
4969 {
e4fbead1
RS
4970 imode = word_mode;
4971 /* Handle targets with different FP word orders. */
4972 if (FLOAT_WORDS_BIG_ENDIAN)
c22cacf3 4973 word = (GET_MODE_BITSIZE (fmode) - bitpos) / BITS_PER_WORD;
e4fbead1 4974 else
c22cacf3 4975 word = bitpos / BITS_PER_WORD;
e4fbead1
RS
4976 temp = operand_subword_force (temp, word, fmode);
4977 bitpos = bitpos % BITS_PER_WORD;
4978 }
4979
210e1852
RS
4980 /* Force the intermediate word_mode (or narrower) result into a
4981 register. This avoids attempting to create paradoxical SUBREGs
4982 of floating point modes below. */
4983 temp = force_reg (imode, temp);
4984
e4fbead1
RS
4985 /* If the bitpos is within the "result mode" lowpart, the operation
4986 can be implement with a single bitwise AND. Otherwise, we need
4987 a right shift and an AND. */
4988
4989 if (bitpos < GET_MODE_BITSIZE (rmode))
4990 {
807e902e 4991 wide_int mask = wi::set_bit_in_zero (bitpos, GET_MODE_PRECISION (rmode));
ef79730c 4992
515e442a 4993 if (GET_MODE_SIZE (imode) > GET_MODE_SIZE (rmode))
e4fbead1 4994 temp = gen_lowpart (rmode, temp);
254878ea 4995 temp = expand_binop (rmode, and_optab, temp,
807e902e 4996 immed_wide_int_const (mask, rmode),
e4fbead1 4997 NULL_RTX, 1, OPTAB_LIB_WIDEN);
ef79730c 4998 }
e4fbead1
RS
4999 else
5000 {
5001 /* Perform a logical right shift to place the signbit in the least
c22cacf3 5002 significant bit, then truncate the result to the desired mode
e4fbead1 5003 and mask just this bit. */
eb6c3df1 5004 temp = expand_shift (RSHIFT_EXPR, imode, temp, bitpos, NULL_RTX, 1);
e4fbead1
RS
5005 temp = gen_lowpart (rmode, temp);
5006 temp = expand_binop (rmode, and_optab, temp, const1_rtx,
5007 NULL_RTX, 1, OPTAB_LIB_WIDEN);
5008 }
5009
ef79730c
RS
5010 return temp;
5011}
d1c38823
ZD
5012
5013/* Expand fork or exec calls. TARGET is the desired target of the
5039610b 5014 call. EXP is the call. FN is the
d1c38823
ZD
5015 identificator of the actual function. IGNORE is nonzero if the
5016 value is to be ignored. */
5017
5018static rtx
5039610b 5019expand_builtin_fork_or_exec (tree fn, tree exp, rtx target, int ignore)
d1c38823
ZD
5020{
5021 tree id, decl;
5022 tree call;
5023
5024 /* If we are not profiling, just call the function. */
5025 if (!profile_arc_flag)
5026 return NULL_RTX;
5027
5028 /* Otherwise call the wrapper. This should be equivalent for the rest of
5029 compiler, so the code does not diverge, and the wrapper may run the
2b8a92de 5030 code necessary for keeping the profiling sane. */
d1c38823
ZD
5031
5032 switch (DECL_FUNCTION_CODE (fn))
5033 {
5034 case BUILT_IN_FORK:
5035 id = get_identifier ("__gcov_fork");
5036 break;
5037
5038 case BUILT_IN_EXECL:
5039 id = get_identifier ("__gcov_execl");
5040 break;
5041
5042 case BUILT_IN_EXECV:
5043 id = get_identifier ("__gcov_execv");
5044 break;
5045
5046 case BUILT_IN_EXECLP:
5047 id = get_identifier ("__gcov_execlp");
5048 break;
5049
5050 case BUILT_IN_EXECLE:
5051 id = get_identifier ("__gcov_execle");
5052 break;
5053
5054 case BUILT_IN_EXECVP:
5055 id = get_identifier ("__gcov_execvp");
5056 break;
5057
5058 case BUILT_IN_EXECVE:
5059 id = get_identifier ("__gcov_execve");
5060 break;
5061
5062 default:
298e6adc 5063 gcc_unreachable ();
d1c38823
ZD
5064 }
5065
c2255bc4
AH
5066 decl = build_decl (DECL_SOURCE_LOCATION (fn),
5067 FUNCTION_DECL, id, TREE_TYPE (fn));
d1c38823
ZD
5068 DECL_EXTERNAL (decl) = 1;
5069 TREE_PUBLIC (decl) = 1;
5070 DECL_ARTIFICIAL (decl) = 1;
5071 TREE_NOTHROW (decl) = 1;
ac382b62
JM
5072 DECL_VISIBILITY (decl) = VISIBILITY_DEFAULT;
5073 DECL_VISIBILITY_SPECIFIED (decl) = 1;
db3927fb 5074 call = rewrite_call_expr (EXPR_LOCATION (exp), exp, 0, decl, 0);
d1c38823 5075 return expand_call (call, target, ignore);
5039610b 5076 }
b8698a0f 5077
48ae6c13
RH
5078
5079\f
02ee605c
RH
5080/* Reconstitute a mode for a __sync intrinsic operation. Since the type of
5081 the pointer in these functions is void*, the tree optimizers may remove
5082 casts. The mode computed in expand_builtin isn't reliable either, due
5083 to __sync_bool_compare_and_swap.
5084
5085 FCODE_DIFF should be fcode - base, where base is the FOO_1 code for the
5086 group of builtins. This gives us log2 of the mode size. */
5087
ef4bddc2 5088static inline machine_mode
02ee605c
RH
5089get_builtin_sync_mode (int fcode_diff)
5090{
2de0aa52
HPN
5091 /* The size is not negotiable, so ask not to get BLKmode in return
5092 if the target indicates that a smaller size would be better. */
5093 return mode_for_size (BITS_PER_UNIT << fcode_diff, MODE_INT, 0);
02ee605c
RH
5094}
5095
1387fef3
AS
5096/* Expand the memory expression LOC and return the appropriate memory operand
5097 for the builtin_sync operations. */
5098
5099static rtx
ef4bddc2 5100get_builtin_sync_mem (tree loc, machine_mode mode)
1387fef3
AS
5101{
5102 rtx addr, mem;
5103
f46835f5
JJ
5104 addr = expand_expr (loc, NULL_RTX, ptr_mode, EXPAND_SUM);
5105 addr = convert_memory_address (Pmode, addr);
1387fef3
AS
5106
5107 /* Note that we explicitly do not want any alias information for this
5108 memory, so that we kill all other live memories. Otherwise we don't
5109 satisfy the full barrier semantics of the intrinsic. */
5110 mem = validize_mem (gen_rtx_MEM (mode, addr));
5111
1be38ccb
RG
5112 /* The alignment needs to be at least according to that of the mode. */
5113 set_mem_align (mem, MAX (GET_MODE_ALIGNMENT (mode),
0eb77834 5114 get_pointer_alignment (loc)));
9cd9e512 5115 set_mem_alias_set (mem, ALIAS_SET_MEMORY_BARRIER);
1387fef3
AS
5116 MEM_VOLATILE_P (mem) = 1;
5117
5118 return mem;
5119}
5120
86951993
AM
5121/* Make sure an argument is in the right mode.
5122 EXP is the tree argument.
5123 MODE is the mode it should be in. */
5124
5125static rtx
ef4bddc2 5126expand_expr_force_mode (tree exp, machine_mode mode)
86951993
AM
5127{
5128 rtx val;
ef4bddc2 5129 machine_mode old_mode;
86951993
AM
5130
5131 val = expand_expr (exp, NULL_RTX, mode, EXPAND_NORMAL);
5132 /* If VAL is promoted to a wider mode, convert it back to MODE. Take care
5133 of CONST_INTs, where we know the old_mode only from the call argument. */
5134
5135 old_mode = GET_MODE (val);
5136 if (old_mode == VOIDmode)
5137 old_mode = TYPE_MODE (TREE_TYPE (exp));
5138 val = convert_modes (mode, old_mode, val, 1);
5139 return val;
5140}
5141
5142
48ae6c13 5143/* Expand the __sync_xxx_and_fetch and __sync_fetch_and_xxx intrinsics.
5039610b 5144 EXP is the CALL_EXPR. CODE is the rtx code
48ae6c13
RH
5145 that corresponds to the arithmetic or logical operation from the name;
5146 an exception here is that NOT actually means NAND. TARGET is an optional
5147 place for us to store the results; AFTER is true if this is the
86951993 5148 fetch_and_xxx form. */
48ae6c13
RH
5149
5150static rtx
ef4bddc2 5151expand_builtin_sync_operation (machine_mode mode, tree exp,
02ee605c 5152 enum rtx_code code, bool after,
86951993 5153 rtx target)
48ae6c13 5154{
1387fef3 5155 rtx val, mem;
c2255bc4 5156 location_t loc = EXPR_LOCATION (exp);
48ae6c13 5157
23462d4d
UB
5158 if (code == NOT && warn_sync_nand)
5159 {
5160 tree fndecl = get_callee_fndecl (exp);
5161 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
5162
5163 static bool warned_f_a_n, warned_n_a_f;
5164
5165 switch (fcode)
5166 {
e0a8ecf2
AM
5167 case BUILT_IN_SYNC_FETCH_AND_NAND_1:
5168 case BUILT_IN_SYNC_FETCH_AND_NAND_2:
5169 case BUILT_IN_SYNC_FETCH_AND_NAND_4:
5170 case BUILT_IN_SYNC_FETCH_AND_NAND_8:
5171 case BUILT_IN_SYNC_FETCH_AND_NAND_16:
23462d4d
UB
5172 if (warned_f_a_n)
5173 break;
5174
e79983f4 5175 fndecl = builtin_decl_implicit (BUILT_IN_SYNC_FETCH_AND_NAND_N);
c2255bc4 5176 inform (loc, "%qD changed semantics in GCC 4.4", fndecl);
23462d4d
UB
5177 warned_f_a_n = true;
5178 break;
5179
e0a8ecf2
AM
5180 case BUILT_IN_SYNC_NAND_AND_FETCH_1:
5181 case BUILT_IN_SYNC_NAND_AND_FETCH_2:
5182 case BUILT_IN_SYNC_NAND_AND_FETCH_4:
5183 case BUILT_IN_SYNC_NAND_AND_FETCH_8:
5184 case BUILT_IN_SYNC_NAND_AND_FETCH_16:
23462d4d
UB
5185 if (warned_n_a_f)
5186 break;
5187
e79983f4 5188 fndecl = builtin_decl_implicit (BUILT_IN_SYNC_NAND_AND_FETCH_N);
c2255bc4 5189 inform (loc, "%qD changed semantics in GCC 4.4", fndecl);
23462d4d
UB
5190 warned_n_a_f = true;
5191 break;
5192
5193 default:
5194 gcc_unreachable ();
5195 }
5196 }
5197
48ae6c13 5198 /* Expand the operands. */
5039610b 5199 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
86951993 5200 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
48ae6c13 5201
46b35980 5202 return expand_atomic_fetch_op (target, mem, val, code, MEMMODEL_SYNC_SEQ_CST,
86951993 5203 after);
48ae6c13
RH
5204}
5205
5206/* Expand the __sync_val_compare_and_swap and __sync_bool_compare_and_swap
5039610b 5207 intrinsics. EXP is the CALL_EXPR. IS_BOOL is
48ae6c13
RH
5208 true if this is the boolean form. TARGET is a place for us to store the
5209 results; this is NOT optional if IS_BOOL is true. */
5210
5211static rtx
ef4bddc2 5212expand_builtin_compare_and_swap (machine_mode mode, tree exp,
02ee605c 5213 bool is_bool, rtx target)
48ae6c13 5214{
1387fef3 5215 rtx old_val, new_val, mem;
f0409b19 5216 rtx *pbool, *poval;
48ae6c13
RH
5217
5218 /* Expand the operands. */
5039610b 5219 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
86951993
AM
5220 old_val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5221 new_val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 2), mode);
48ae6c13 5222
f0409b19
RH
5223 pbool = poval = NULL;
5224 if (target != const0_rtx)
5225 {
5226 if (is_bool)
5227 pbool = &target;
5228 else
5229 poval = &target;
5230 }
5231 if (!expand_atomic_compare_and_swap (pbool, poval, mem, old_val, new_val,
46b35980
AM
5232 false, MEMMODEL_SYNC_SEQ_CST,
5233 MEMMODEL_SYNC_SEQ_CST))
86951993 5234 return NULL_RTX;
5039610b 5235
86951993 5236 return target;
48ae6c13
RH
5237}
5238
5239/* Expand the __sync_lock_test_and_set intrinsic. Note that the most
5240 general form is actually an atomic exchange, and some targets only
5241 support a reduced form with the second argument being a constant 1.
b8698a0f 5242 EXP is the CALL_EXPR; TARGET is an optional place for us to store
5039610b 5243 the results. */
48ae6c13
RH
5244
5245static rtx
ef4bddc2 5246expand_builtin_sync_lock_test_and_set (machine_mode mode, tree exp,
86951993 5247 rtx target)
48ae6c13 5248{
1387fef3 5249 rtx val, mem;
48ae6c13
RH
5250
5251 /* Expand the operands. */
5039610b 5252 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
86951993
AM
5253 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5254
744accb2 5255 return expand_sync_lock_test_and_set (target, mem, val);
86951993
AM
5256}
5257
5258/* Expand the __sync_lock_release intrinsic. EXP is the CALL_EXPR. */
5259
5260static void
ef4bddc2 5261expand_builtin_sync_lock_release (machine_mode mode, tree exp)
86951993
AM
5262{
5263 rtx mem;
5264
5265 /* Expand the operands. */
5266 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5267
46b35980 5268 expand_atomic_store (mem, const0_rtx, MEMMODEL_SYNC_RELEASE, true);
86951993
AM
5269}
5270
5271/* Given an integer representing an ``enum memmodel'', verify its
5272 correctness and return the memory model enum. */
5273
5274static enum memmodel
5275get_memmodel (tree exp)
5276{
5277 rtx op;
5dcfdccd 5278 unsigned HOST_WIDE_INT val;
86951993
AM
5279
5280 /* If the parameter is not a constant, it's a run time value so we'll just
5281 convert it to MEMMODEL_SEQ_CST to avoid annoying runtime checking. */
5282 if (TREE_CODE (exp) != INTEGER_CST)
5283 return MEMMODEL_SEQ_CST;
5284
5285 op = expand_normal (exp);
5dcfdccd
KY
5286
5287 val = INTVAL (op);
5288 if (targetm.memmodel_check)
5289 val = targetm.memmodel_check (val);
5290 else if (val & ~MEMMODEL_MASK)
5291 {
5292 warning (OPT_Winvalid_memory_model,
5293 "Unknown architecture specifier in memory model to builtin.");
5294 return MEMMODEL_SEQ_CST;
5295 }
5296
46b35980
AM
5297 /* Should never see a user explicit SYNC memodel model, so >= LAST works. */
5298 if (memmodel_base (val) >= MEMMODEL_LAST)
86951993
AM
5299 {
5300 warning (OPT_Winvalid_memory_model,
5301 "invalid memory model argument to builtin");
5302 return MEMMODEL_SEQ_CST;
5303 }
5dcfdccd 5304
8673b671
AM
5305 /* Workaround for Bugzilla 59448. GCC doesn't track consume properly, so
5306 be conservative and promote consume to acquire. */
5307 if (val == MEMMODEL_CONSUME)
5308 val = MEMMODEL_ACQUIRE;
5309
5dcfdccd 5310 return (enum memmodel) val;
86951993
AM
5311}
5312
5313/* Expand the __atomic_exchange intrinsic:
5314 TYPE __atomic_exchange (TYPE *object, TYPE desired, enum memmodel)
5315 EXP is the CALL_EXPR.
5316 TARGET is an optional place for us to store the results. */
5317
5318static rtx
ef4bddc2 5319expand_builtin_atomic_exchange (machine_mode mode, tree exp, rtx target)
86951993
AM
5320{
5321 rtx val, mem;
5322 enum memmodel model;
5323
5324 model = get_memmodel (CALL_EXPR_ARG (exp, 2));
86951993
AM
5325
5326 if (!flag_inline_atomics)
5327 return NULL_RTX;
5328
5329 /* Expand the operands. */
5330 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5331 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5332
744accb2 5333 return expand_atomic_exchange (target, mem, val, model);
86951993
AM
5334}
5335
5336/* Expand the __atomic_compare_exchange intrinsic:
5337 bool __atomic_compare_exchange (TYPE *object, TYPE *expect,
5338 TYPE desired, BOOL weak,
5339 enum memmodel success,
5340 enum memmodel failure)
5341 EXP is the CALL_EXPR.
5342 TARGET is an optional place for us to store the results. */
5343
5344static rtx
ef4bddc2 5345expand_builtin_atomic_compare_exchange (machine_mode mode, tree exp,
86951993
AM
5346 rtx target)
5347{
58f4cf2a
DM
5348 rtx expect, desired, mem, oldval;
5349 rtx_code_label *label;
86951993
AM
5350 enum memmodel success, failure;
5351 tree weak;
5352 bool is_weak;
5353
5354 success = get_memmodel (CALL_EXPR_ARG (exp, 4));
5355 failure = get_memmodel (CALL_EXPR_ARG (exp, 5));
5356
77df5327
AM
5357 if (failure > success)
5358 {
5359 warning (OPT_Winvalid_memory_model,
5360 "failure memory model cannot be stronger than success memory "
5361 "model for %<__atomic_compare_exchange%>");
5362 success = MEMMODEL_SEQ_CST;
5363 }
5364
46b35980 5365 if (is_mm_release (failure) || is_mm_acq_rel (failure))
86951993 5366 {
77df5327
AM
5367 warning (OPT_Winvalid_memory_model,
5368 "invalid failure memory model for "
5369 "%<__atomic_compare_exchange%>");
5370 failure = MEMMODEL_SEQ_CST;
5371 success = MEMMODEL_SEQ_CST;
86951993
AM
5372 }
5373
77df5327 5374
86951993
AM
5375 if (!flag_inline_atomics)
5376 return NULL_RTX;
5377
5378 /* Expand the operands. */
5379 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5380
5381 expect = expand_normal (CALL_EXPR_ARG (exp, 1));
5382 expect = convert_memory_address (Pmode, expect);
215770ad 5383 expect = gen_rtx_MEM (mode, expect);
86951993
AM
5384 desired = expand_expr_force_mode (CALL_EXPR_ARG (exp, 2), mode);
5385
5386 weak = CALL_EXPR_ARG (exp, 3);
5387 is_weak = false;
9439e9a1 5388 if (tree_fits_shwi_p (weak) && tree_to_shwi (weak) != 0)
86951993
AM
5389 is_weak = true;
5390
672ce939
RH
5391 if (target == const0_rtx)
5392 target = NULL;
672ce939 5393
2fdc29e8
RH
5394 /* Lest the rtl backend create a race condition with an imporoper store
5395 to memory, always create a new pseudo for OLDVAL. */
5396 oldval = NULL;
5397
5398 if (!expand_atomic_compare_and_swap (&target, &oldval, mem, expect, desired,
f0409b19 5399 is_weak, success, failure))
86951993
AM
5400 return NULL_RTX;
5401
672ce939
RH
5402 /* Conditionally store back to EXPECT, lest we create a race condition
5403 with an improper store to memory. */
5404 /* ??? With a rearrangement of atomics at the gimple level, we can handle
5405 the normal case where EXPECT is totally private, i.e. a register. At
5406 which point the store can be unconditional. */
5407 label = gen_label_rtx ();
f8940d4a
JG
5408 emit_cmp_and_jump_insns (target, const0_rtx, NE, NULL,
5409 GET_MODE (target), 1, label);
672ce939
RH
5410 emit_move_insn (expect, oldval);
5411 emit_label (label);
215770ad 5412
86951993
AM
5413 return target;
5414}
5415
5416/* Expand the __atomic_load intrinsic:
5417 TYPE __atomic_load (TYPE *object, enum memmodel)
5418 EXP is the CALL_EXPR.
5419 TARGET is an optional place for us to store the results. */
5420
5421static rtx
ef4bddc2 5422expand_builtin_atomic_load (machine_mode mode, tree exp, rtx target)
86951993
AM
5423{
5424 rtx mem;
5425 enum memmodel model;
5426
5427 model = get_memmodel (CALL_EXPR_ARG (exp, 1));
46b35980 5428 if (is_mm_release (model) || is_mm_acq_rel (model))
86951993 5429 {
77df5327
AM
5430 warning (OPT_Winvalid_memory_model,
5431 "invalid memory model for %<__atomic_load%>");
5432 model = MEMMODEL_SEQ_CST;
86951993
AM
5433 }
5434
5435 if (!flag_inline_atomics)
5436 return NULL_RTX;
5437
5438 /* Expand the operand. */
5439 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5440
5441 return expand_atomic_load (target, mem, model);
5442}
5443
5444
5445/* Expand the __atomic_store intrinsic:
5446 void __atomic_store (TYPE *object, TYPE desired, enum memmodel)
5447 EXP is the CALL_EXPR.
5448 TARGET is an optional place for us to store the results. */
5449
5450static rtx
ef4bddc2 5451expand_builtin_atomic_store (machine_mode mode, tree exp)
86951993
AM
5452{
5453 rtx mem, val;
5454 enum memmodel model;
5455
5456 model = get_memmodel (CALL_EXPR_ARG (exp, 2));
46b35980
AM
5457 if (!(is_mm_relaxed (model) || is_mm_seq_cst (model)
5458 || is_mm_release (model)))
86951993 5459 {
77df5327
AM
5460 warning (OPT_Winvalid_memory_model,
5461 "invalid memory model for %<__atomic_store%>");
5462 model = MEMMODEL_SEQ_CST;
86951993
AM
5463 }
5464
5465 if (!flag_inline_atomics)
5466 return NULL_RTX;
5467
5468 /* Expand the operands. */
5469 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5470 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5471
0669295b 5472 return expand_atomic_store (mem, val, model, false);
86951993
AM
5473}
5474
5475/* Expand the __atomic_fetch_XXX intrinsic:
5476 TYPE __atomic_fetch_XXX (TYPE *object, TYPE val, enum memmodel)
5477 EXP is the CALL_EXPR.
5478 TARGET is an optional place for us to store the results.
5479 CODE is the operation, PLUS, MINUS, ADD, XOR, or IOR.
5480 FETCH_AFTER is true if returning the result of the operation.
5481 FETCH_AFTER is false if returning the value before the operation.
5482 IGNORE is true if the result is not used.
5483 EXT_CALL is the correct builtin for an external call if this cannot be
5484 resolved to an instruction sequence. */
5485
5486static rtx
ef4bddc2 5487expand_builtin_atomic_fetch_op (machine_mode mode, tree exp, rtx target,
86951993
AM
5488 enum rtx_code code, bool fetch_after,
5489 bool ignore, enum built_in_function ext_call)
5490{
5491 rtx val, mem, ret;
5492 enum memmodel model;
5493 tree fndecl;
5494 tree addr;
5495
5496 model = get_memmodel (CALL_EXPR_ARG (exp, 2));
5497
5498 /* Expand the operands. */
5499 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5500 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5501
5502 /* Only try generating instructions if inlining is turned on. */
5503 if (flag_inline_atomics)
5504 {
5505 ret = expand_atomic_fetch_op (target, mem, val, code, model, fetch_after);
5506 if (ret)
5507 return ret;
5508 }
5509
5510 /* Return if a different routine isn't needed for the library call. */
5511 if (ext_call == BUILT_IN_NONE)
5512 return NULL_RTX;
5513
5514 /* Change the call to the specified function. */
5515 fndecl = get_callee_fndecl (exp);
5516 addr = CALL_EXPR_FN (exp);
5517 STRIP_NOPS (addr);
5518
5519 gcc_assert (TREE_OPERAND (addr, 0) == fndecl);
c3284718 5520 TREE_OPERAND (addr, 0) = builtin_decl_explicit (ext_call);
86951993
AM
5521
5522 /* Expand the call here so we can emit trailing code. */
5523 ret = expand_call (exp, target, ignore);
5524
5525 /* Replace the original function just in case it matters. */
5526 TREE_OPERAND (addr, 0) = fndecl;
5527
5528 /* Then issue the arithmetic correction to return the right result. */
5529 if (!ignore)
154b68db
AM
5530 {
5531 if (code == NOT)
5532 {
5533 ret = expand_simple_binop (mode, AND, ret, val, NULL_RTX, true,
5534 OPTAB_LIB_WIDEN);
5535 ret = expand_simple_unop (mode, NOT, ret, target, true);
5536 }
5537 else
5538 ret = expand_simple_binop (mode, code, ret, val, target, true,
5539 OPTAB_LIB_WIDEN);
5540 }
86951993
AM
5541 return ret;
5542}
5543
d660c35e
AM
5544/* Expand an atomic clear operation.
5545 void _atomic_clear (BOOL *obj, enum memmodel)
5546 EXP is the call expression. */
5547
5548static rtx
5549expand_builtin_atomic_clear (tree exp)
5550{
ef4bddc2 5551 machine_mode mode;
d660c35e
AM
5552 rtx mem, ret;
5553 enum memmodel model;
5554
5555 mode = mode_for_size (BOOL_TYPE_SIZE, MODE_INT, 0);
5556 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5557 model = get_memmodel (CALL_EXPR_ARG (exp, 1));
5558
46b35980 5559 if (is_mm_consume (model) || is_mm_acquire (model) || is_mm_acq_rel (model))
d660c35e 5560 {
77df5327
AM
5561 warning (OPT_Winvalid_memory_model,
5562 "invalid memory model for %<__atomic_store%>");
5563 model = MEMMODEL_SEQ_CST;
d660c35e
AM
5564 }
5565
5566 /* Try issuing an __atomic_store, and allow fallback to __sync_lock_release.
5567 Failing that, a store is issued by __atomic_store. The only way this can
5568 fail is if the bool type is larger than a word size. Unlikely, but
5569 handle it anyway for completeness. Assume a single threaded model since
5570 there is no atomic support in this case, and no barriers are required. */
5571 ret = expand_atomic_store (mem, const0_rtx, model, true);
5572 if (!ret)
5573 emit_move_insn (mem, const0_rtx);
5574 return const0_rtx;
5575}
5576
5577/* Expand an atomic test_and_set operation.
5578 bool _atomic_test_and_set (BOOL *obj, enum memmodel)
5579 EXP is the call expression. */
5580
5581static rtx
744accb2 5582expand_builtin_atomic_test_and_set (tree exp, rtx target)
d660c35e 5583{
744accb2 5584 rtx mem;
d660c35e 5585 enum memmodel model;
ef4bddc2 5586 machine_mode mode;
d660c35e
AM
5587
5588 mode = mode_for_size (BOOL_TYPE_SIZE, MODE_INT, 0);
5589 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5590 model = get_memmodel (CALL_EXPR_ARG (exp, 1));
5591
744accb2 5592 return expand_atomic_test_and_set (target, mem, model);
d660c35e
AM
5593}
5594
5595
86951993
AM
5596/* Return true if (optional) argument ARG1 of size ARG0 is always lock free on
5597 this architecture. If ARG1 is NULL, use typical alignment for size ARG0. */
5598
5599static tree
5600fold_builtin_atomic_always_lock_free (tree arg0, tree arg1)
5601{
5602 int size;
ef4bddc2 5603 machine_mode mode;
86951993
AM
5604 unsigned int mode_align, type_align;
5605
5606 if (TREE_CODE (arg0) != INTEGER_CST)
5607 return NULL_TREE;
48ae6c13 5608
86951993
AM
5609 size = INTVAL (expand_normal (arg0)) * BITS_PER_UNIT;
5610 mode = mode_for_size (size, MODE_INT, 0);
5611 mode_align = GET_MODE_ALIGNMENT (mode);
5612
310055e7
JW
5613 if (TREE_CODE (arg1) == INTEGER_CST)
5614 {
5615 unsigned HOST_WIDE_INT val = UINTVAL (expand_normal (arg1));
5616
5617 /* Either this argument is null, or it's a fake pointer encoding
5618 the alignment of the object. */
5619 val = val & -val;
5620 val *= BITS_PER_UNIT;
5621
5622 if (val == 0 || mode_align < val)
5623 type_align = mode_align;
5624 else
5625 type_align = val;
5626 }
86951993
AM
5627 else
5628 {
5629 tree ttype = TREE_TYPE (arg1);
5630
5631 /* This function is usually invoked and folded immediately by the front
5632 end before anything else has a chance to look at it. The pointer
5633 parameter at this point is usually cast to a void *, so check for that
5634 and look past the cast. */
625a9766 5635 if (CONVERT_EXPR_P (arg1) && POINTER_TYPE_P (ttype)
86951993
AM
5636 && VOID_TYPE_P (TREE_TYPE (ttype)))
5637 arg1 = TREE_OPERAND (arg1, 0);
5638
5639 ttype = TREE_TYPE (arg1);
5640 gcc_assert (POINTER_TYPE_P (ttype));
5641
5642 /* Get the underlying type of the object. */
5643 ttype = TREE_TYPE (ttype);
5644 type_align = TYPE_ALIGN (ttype);
5645 }
5646
026c3cfd 5647 /* If the object has smaller alignment, the lock free routines cannot
86951993
AM
5648 be used. */
5649 if (type_align < mode_align)
58d38fd2 5650 return boolean_false_node;
86951993
AM
5651
5652 /* Check if a compare_and_swap pattern exists for the mode which represents
5653 the required size. The pattern is not allowed to fail, so the existence
5654 of the pattern indicates support is present. */
cedb4a1a 5655 if (can_compare_and_swap_p (mode, true))
58d38fd2 5656 return boolean_true_node;
86951993 5657 else
58d38fd2 5658 return boolean_false_node;
86951993
AM
5659}
5660
5661/* Return true if the parameters to call EXP represent an object which will
5662 always generate lock free instructions. The first argument represents the
5663 size of the object, and the second parameter is a pointer to the object
5664 itself. If NULL is passed for the object, then the result is based on
5665 typical alignment for an object of the specified size. Otherwise return
5666 false. */
5667
5668static rtx
5669expand_builtin_atomic_always_lock_free (tree exp)
5670{
5671 tree size;
5672 tree arg0 = CALL_EXPR_ARG (exp, 0);
5673 tree arg1 = CALL_EXPR_ARG (exp, 1);
5674
5675 if (TREE_CODE (arg0) != INTEGER_CST)
5676 {
5677 error ("non-constant argument 1 to __atomic_always_lock_free");
5678 return const0_rtx;
5679 }
5680
5681 size = fold_builtin_atomic_always_lock_free (arg0, arg1);
58d38fd2 5682 if (size == boolean_true_node)
86951993
AM
5683 return const1_rtx;
5684 return const0_rtx;
5685}
5686
5687/* Return a one or zero if it can be determined that object ARG1 of size ARG
5688 is lock free on this architecture. */
5689
5690static tree
5691fold_builtin_atomic_is_lock_free (tree arg0, tree arg1)
5692{
5693 if (!flag_inline_atomics)
5694 return NULL_TREE;
5695
5696 /* If it isn't always lock free, don't generate a result. */
58d38fd2
JJ
5697 if (fold_builtin_atomic_always_lock_free (arg0, arg1) == boolean_true_node)
5698 return boolean_true_node;
86951993
AM
5699
5700 return NULL_TREE;
5701}
5702
5703/* Return true if the parameters to call EXP represent an object which will
5704 always generate lock free instructions. The first argument represents the
5705 size of the object, and the second parameter is a pointer to the object
5706 itself. If NULL is passed for the object, then the result is based on
5707 typical alignment for an object of the specified size. Otherwise return
5708 NULL*/
5709
5710static rtx
5711expand_builtin_atomic_is_lock_free (tree exp)
5712{
5713 tree size;
5714 tree arg0 = CALL_EXPR_ARG (exp, 0);
5715 tree arg1 = CALL_EXPR_ARG (exp, 1);
5716
5717 if (!INTEGRAL_TYPE_P (TREE_TYPE (arg0)))
5718 {
5719 error ("non-integer argument 1 to __atomic_is_lock_free");
5720 return NULL_RTX;
5721 }
5722
5723 if (!flag_inline_atomics)
5724 return NULL_RTX;
5725
5726 /* If the value is known at compile time, return the RTX for it. */
5727 size = fold_builtin_atomic_is_lock_free (arg0, arg1);
58d38fd2 5728 if (size == boolean_true_node)
86951993
AM
5729 return const1_rtx;
5730
5731 return NULL_RTX;
5732}
5733
86951993
AM
5734/* Expand the __atomic_thread_fence intrinsic:
5735 void __atomic_thread_fence (enum memmodel)
5736 EXP is the CALL_EXPR. */
5737
5738static void
5739expand_builtin_atomic_thread_fence (tree exp)
5740{
c39169c8
RH
5741 enum memmodel model = get_memmodel (CALL_EXPR_ARG (exp, 0));
5742 expand_mem_thread_fence (model);
86951993
AM
5743}
5744
5745/* Expand the __atomic_signal_fence intrinsic:
5746 void __atomic_signal_fence (enum memmodel)
5747 EXP is the CALL_EXPR. */
5748
5749static void
5750expand_builtin_atomic_signal_fence (tree exp)
5751{
c39169c8
RH
5752 enum memmodel model = get_memmodel (CALL_EXPR_ARG (exp, 0));
5753 expand_mem_signal_fence (model);
48ae6c13
RH
5754}
5755
5756/* Expand the __sync_synchronize intrinsic. */
5757
5758static void
e0a8ecf2 5759expand_builtin_sync_synchronize (void)
48ae6c13 5760{
46b35980 5761 expand_mem_thread_fence (MEMMODEL_SYNC_SEQ_CST);
48ae6c13
RH
5762}
5763
f959607b
CLT
5764static rtx
5765expand_builtin_thread_pointer (tree exp, rtx target)
5766{
5767 enum insn_code icode;
5768 if (!validate_arglist (exp, VOID_TYPE))
5769 return const0_rtx;
5770 icode = direct_optab_handler (get_thread_pointer_optab, Pmode);
5771 if (icode != CODE_FOR_nothing)
5772 {
5773 struct expand_operand op;
b8a542c6
AP
5774 /* If the target is not sutitable then create a new target. */
5775 if (target == NULL_RTX
5776 || !REG_P (target)
5777 || GET_MODE (target) != Pmode)
f959607b
CLT
5778 target = gen_reg_rtx (Pmode);
5779 create_output_operand (&op, target, Pmode);
5780 expand_insn (icode, 1, &op);
5781 return target;
5782 }
5783 error ("__builtin_thread_pointer is not supported on this target");
5784 return const0_rtx;
5785}
5786
5787static void
5788expand_builtin_set_thread_pointer (tree exp)
5789{
5790 enum insn_code icode;
5791 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
5792 return;
5793 icode = direct_optab_handler (set_thread_pointer_optab, Pmode);
5794 if (icode != CODE_FOR_nothing)
5795 {
5796 struct expand_operand op;
5797 rtx val = expand_expr (CALL_EXPR_ARG (exp, 0), NULL_RTX,
5798 Pmode, EXPAND_NORMAL);
5440a1b0 5799 create_input_operand (&op, val, Pmode);
f959607b
CLT
5800 expand_insn (icode, 1, &op);
5801 return;
5802 }
5803 error ("__builtin_set_thread_pointer is not supported on this target");
5804}
5805
28f4ec01 5806\f
862d0b35
DN
5807/* Emit code to restore the current value of stack. */
5808
5809static void
5810expand_stack_restore (tree var)
5811{
58f4cf2a
DM
5812 rtx_insn *prev;
5813 rtx sa = expand_normal (var);
862d0b35
DN
5814
5815 sa = convert_memory_address (Pmode, sa);
5816
5817 prev = get_last_insn ();
5818 emit_stack_restore (SAVE_BLOCK, sa);
d33606c3
EB
5819
5820 record_new_stack_level ();
5821
862d0b35
DN
5822 fixup_args_size_notes (prev, get_last_insn (), 0);
5823}
5824
862d0b35
DN
5825/* Emit code to save the current value of stack. */
5826
5827static rtx
5828expand_stack_save (void)
5829{
5830 rtx ret = NULL_RTX;
5831
862d0b35
DN
5832 emit_stack_save (SAVE_BLOCK, &ret);
5833 return ret;
5834}
5835
41dbbb37 5836
28f4ec01
BS
5837/* Expand an expression EXP that calls a built-in function,
5838 with result going to TARGET if that's convenient
5839 (and in mode MODE if that's convenient).
5840 SUBTARGET may be used as the target for computing one of EXP's operands.
5841 IGNORE is nonzero if the value is to be ignored. */
5842
5843rtx
ef4bddc2 5844expand_builtin (tree exp, rtx target, rtx subtarget, machine_mode mode,
4682ae04 5845 int ignore)
28f4ec01 5846{
2f503025 5847 tree fndecl = get_callee_fndecl (exp);
28f4ec01 5848 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
ef4bddc2 5849 machine_mode target_mode = TYPE_MODE (TREE_TYPE (exp));
9e3920e9 5850 int flags;
28f4ec01 5851
d51151b2
JJ
5852 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
5853 return targetm.expand_builtin (exp, target, subtarget, mode, ignore);
5854
bdea98ca
MO
5855 /* When ASan is enabled, we don't want to expand some memory/string
5856 builtins and rely on libsanitizer's hooks. This allows us to avoid
5857 redundant checks and be sure, that possible overflow will be detected
5858 by ASan. */
5859
5860 if ((flag_sanitize & SANITIZE_ADDRESS) && asan_intercepted_p (fcode))
5861 return expand_call (exp, target, ignore);
5862
28f4ec01
BS
5863 /* When not optimizing, generate calls to library functions for a certain
5864 set of builtins. */
d25225de 5865 if (!optimize
48ae6c13 5866 && !called_as_built_in (fndecl)
63bf9a90
JH
5867 && fcode != BUILT_IN_FORK
5868 && fcode != BUILT_IN_EXECL
5869 && fcode != BUILT_IN_EXECV
5870 && fcode != BUILT_IN_EXECLP
5871 && fcode != BUILT_IN_EXECLE
5872 && fcode != BUILT_IN_EXECVP
5873 && fcode != BUILT_IN_EXECVE
f9555f40 5874 && fcode != BUILT_IN_ALLOCA
13e49da9 5875 && fcode != BUILT_IN_ALLOCA_WITH_ALIGN
d5e254e1
IE
5876 && fcode != BUILT_IN_FREE
5877 && fcode != BUILT_IN_CHKP_SET_PTR_BOUNDS
5878 && fcode != BUILT_IN_CHKP_INIT_PTR_BOUNDS
5879 && fcode != BUILT_IN_CHKP_NULL_PTR_BOUNDS
5880 && fcode != BUILT_IN_CHKP_COPY_PTR_BOUNDS
5881 && fcode != BUILT_IN_CHKP_NARROW_PTR_BOUNDS
5882 && fcode != BUILT_IN_CHKP_STORE_PTR_BOUNDS
5883 && fcode != BUILT_IN_CHKP_CHECK_PTR_LBOUNDS
5884 && fcode != BUILT_IN_CHKP_CHECK_PTR_UBOUNDS
5885 && fcode != BUILT_IN_CHKP_CHECK_PTR_BOUNDS
5886 && fcode != BUILT_IN_CHKP_GET_PTR_LBOUND
5887 && fcode != BUILT_IN_CHKP_GET_PTR_UBOUND
5888 && fcode != BUILT_IN_CHKP_BNDRET)
d25225de 5889 return expand_call (exp, target, ignore);
28f4ec01 5890
0a45ec5c
RS
5891 /* The built-in function expanders test for target == const0_rtx
5892 to determine whether the function's result will be ignored. */
5893 if (ignore)
5894 target = const0_rtx;
5895
5896 /* If the result of a pure or const built-in function is ignored, and
5897 none of its arguments are volatile, we can avoid expanding the
5898 built-in call and just evaluate the arguments for side-effects. */
5899 if (target == const0_rtx
9e3920e9
JJ
5900 && ((flags = flags_from_decl_or_type (fndecl)) & (ECF_CONST | ECF_PURE))
5901 && !(flags & ECF_LOOPING_CONST_OR_PURE))
0a45ec5c
RS
5902 {
5903 bool volatilep = false;
5904 tree arg;
5039610b 5905 call_expr_arg_iterator iter;
0a45ec5c 5906
5039610b
SL
5907 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
5908 if (TREE_THIS_VOLATILE (arg))
0a45ec5c
RS
5909 {
5910 volatilep = true;
5911 break;
5912 }
5913
5914 if (! volatilep)
5915 {
5039610b
SL
5916 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
5917 expand_expr (arg, const0_rtx, VOIDmode, EXPAND_NORMAL);
0a45ec5c
RS
5918 return const0_rtx;
5919 }
5920 }
5921
edcf72f3
IE
5922 /* expand_builtin_with_bounds is supposed to be used for
5923 instrumented builtin calls. */
d5e254e1
IE
5924 gcc_assert (!CALL_WITH_BOUNDS_P (exp));
5925
28f4ec01
BS
5926 switch (fcode)
5927 {
ea6a6627 5928 CASE_FLT_FN (BUILT_IN_FABS):
e2323f5b
PB
5929 case BUILT_IN_FABSD32:
5930 case BUILT_IN_FABSD64:
5931 case BUILT_IN_FABSD128:
5039610b 5932 target = expand_builtin_fabs (exp, target, subtarget);
075ec276 5933 if (target)
c22cacf3 5934 return target;
075ec276
RS
5935 break;
5936
ea6a6627 5937 CASE_FLT_FN (BUILT_IN_COPYSIGN):
5039610b 5938 target = expand_builtin_copysign (exp, target, subtarget);
046625fa
RH
5939 if (target)
5940 return target;
5941 break;
5942
5906d013
EC
5943 /* Just do a normal library call if we were unable to fold
5944 the values. */
ea6a6627 5945 CASE_FLT_FN (BUILT_IN_CABS):
075ec276 5946 break;
28f4ec01 5947
ea6a6627
VR
5948 CASE_FLT_FN (BUILT_IN_EXP):
5949 CASE_FLT_FN (BUILT_IN_EXP10):
5950 CASE_FLT_FN (BUILT_IN_POW10):
5951 CASE_FLT_FN (BUILT_IN_EXP2):
5952 CASE_FLT_FN (BUILT_IN_EXPM1):
5953 CASE_FLT_FN (BUILT_IN_LOGB):
ea6a6627
VR
5954 CASE_FLT_FN (BUILT_IN_LOG):
5955 CASE_FLT_FN (BUILT_IN_LOG10):
5956 CASE_FLT_FN (BUILT_IN_LOG2):
5957 CASE_FLT_FN (BUILT_IN_LOG1P):
5958 CASE_FLT_FN (BUILT_IN_TAN):
5959 CASE_FLT_FN (BUILT_IN_ASIN):
5960 CASE_FLT_FN (BUILT_IN_ACOS):
5961 CASE_FLT_FN (BUILT_IN_ATAN):
dc6707b8 5962 CASE_FLT_FN (BUILT_IN_SIGNIFICAND):
de6c5979
BL
5963 /* Treat these like sqrt only if unsafe math optimizations are allowed,
5964 because of possible accuracy problems. */
5965 if (! flag_unsafe_math_optimizations)
28f4ec01 5966 break;
ea6a6627
VR
5967 CASE_FLT_FN (BUILT_IN_SQRT):
5968 CASE_FLT_FN (BUILT_IN_FLOOR):
5969 CASE_FLT_FN (BUILT_IN_CEIL):
5970 CASE_FLT_FN (BUILT_IN_TRUNC):
5971 CASE_FLT_FN (BUILT_IN_ROUND):
5972 CASE_FLT_FN (BUILT_IN_NEARBYINT):
5973 CASE_FLT_FN (BUILT_IN_RINT):
28f4ec01
BS
5974 target = expand_builtin_mathfn (exp, target, subtarget);
5975 if (target)
5976 return target;
5977 break;
5978
1b1562a5
MM
5979 CASE_FLT_FN (BUILT_IN_FMA):
5980 target = expand_builtin_mathfn_ternary (exp, target, subtarget);
5981 if (target)
5982 return target;
5983 break;
5984
eaee4464
UB
5985 CASE_FLT_FN (BUILT_IN_ILOGB):
5986 if (! flag_unsafe_math_optimizations)
5987 break;
9ed4207f 5988 CASE_FLT_FN (BUILT_IN_ISINF):
0c8d3c2b
KG
5989 CASE_FLT_FN (BUILT_IN_FINITE):
5990 case BUILT_IN_ISFINITE:
8a91c45b 5991 case BUILT_IN_ISNORMAL:
4359dc2a 5992 target = expand_builtin_interclass_mathfn (exp, target);
eaee4464
UB
5993 if (target)
5994 return target;
5995 break;
5996
6c32ee74 5997 CASE_FLT_FN (BUILT_IN_ICEIL):
ea6a6627
VR
5998 CASE_FLT_FN (BUILT_IN_LCEIL):
5999 CASE_FLT_FN (BUILT_IN_LLCEIL):
6000 CASE_FLT_FN (BUILT_IN_LFLOOR):
6c32ee74 6001 CASE_FLT_FN (BUILT_IN_IFLOOR):
ea6a6627 6002 CASE_FLT_FN (BUILT_IN_LLFLOOR):
1856c8dc 6003 target = expand_builtin_int_roundingfn (exp, target);
d8b42d06
UB
6004 if (target)
6005 return target;
6006 break;
6007
6c32ee74 6008 CASE_FLT_FN (BUILT_IN_IRINT):
0bfa1541
RG
6009 CASE_FLT_FN (BUILT_IN_LRINT):
6010 CASE_FLT_FN (BUILT_IN_LLRINT):
6c32ee74 6011 CASE_FLT_FN (BUILT_IN_IROUND):
4d81bf84
RG
6012 CASE_FLT_FN (BUILT_IN_LROUND):
6013 CASE_FLT_FN (BUILT_IN_LLROUND):
1856c8dc 6014 target = expand_builtin_int_roundingfn_2 (exp, target);
0bfa1541
RG
6015 if (target)
6016 return target;
6017 break;
6018
ea6a6627 6019 CASE_FLT_FN (BUILT_IN_POWI):
4359dc2a 6020 target = expand_builtin_powi (exp, target);
17684d46
RG
6021 if (target)
6022 return target;
6023 break;
6024
ea6a6627
VR
6025 CASE_FLT_FN (BUILT_IN_ATAN2):
6026 CASE_FLT_FN (BUILT_IN_LDEXP):
0c0d910d
KG
6027 CASE_FLT_FN (BUILT_IN_SCALB):
6028 CASE_FLT_FN (BUILT_IN_SCALBN):
6029 CASE_FLT_FN (BUILT_IN_SCALBLN):
b5e01d4b
RS
6030 if (! flag_unsafe_math_optimizations)
6031 break;
17b98269
UB
6032
6033 CASE_FLT_FN (BUILT_IN_FMOD):
6034 CASE_FLT_FN (BUILT_IN_REMAINDER):
6035 CASE_FLT_FN (BUILT_IN_DREM):
3906ea1b 6036 CASE_FLT_FN (BUILT_IN_POW):
b5e01d4b
RS
6037 target = expand_builtin_mathfn_2 (exp, target, subtarget);
6038 if (target)
6039 return target;
6040 break;
6041
75c7c595 6042 CASE_FLT_FN (BUILT_IN_CEXPI):
4359dc2a 6043 target = expand_builtin_cexpi (exp, target);
75c7c595
RG
6044 gcc_assert (target);
6045 return target;
6046
ea6a6627
VR
6047 CASE_FLT_FN (BUILT_IN_SIN):
6048 CASE_FLT_FN (BUILT_IN_COS):
6c7cf1f0
UB
6049 if (! flag_unsafe_math_optimizations)
6050 break;
6051 target = expand_builtin_mathfn_3 (exp, target, subtarget);
6052 if (target)
6053 return target;
6054 break;
6055
403e54f0
RG
6056 CASE_FLT_FN (BUILT_IN_SINCOS):
6057 if (! flag_unsafe_math_optimizations)
6058 break;
6059 target = expand_builtin_sincos (exp);
6060 if (target)
6061 return target;
6062 break;
6063
28f4ec01
BS
6064 case BUILT_IN_APPLY_ARGS:
6065 return expand_builtin_apply_args ();
6066
6067 /* __builtin_apply (FUNCTION, ARGUMENTS, ARGSIZE) invokes
6068 FUNCTION with a copy of the parameters described by
6069 ARGUMENTS, and ARGSIZE. It returns a block of memory
6070 allocated on the stack into which is stored all the registers
6071 that might possibly be used for returning the result of a
6072 function. ARGUMENTS is the value returned by
6073 __builtin_apply_args. ARGSIZE is the number of bytes of
6074 arguments that must be copied. ??? How should this value be
6075 computed? We'll also need a safe worst case value for varargs
6076 functions. */
6077 case BUILT_IN_APPLY:
5039610b 6078 if (!validate_arglist (exp, POINTER_TYPE,
019fa094 6079 POINTER_TYPE, INTEGER_TYPE, VOID_TYPE)
5039610b 6080 && !validate_arglist (exp, REFERENCE_TYPE,
019fa094 6081 POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
28f4ec01
BS
6082 return const0_rtx;
6083 else
6084 {
28f4ec01
BS
6085 rtx ops[3];
6086
5039610b
SL
6087 ops[0] = expand_normal (CALL_EXPR_ARG (exp, 0));
6088 ops[1] = expand_normal (CALL_EXPR_ARG (exp, 1));
6089 ops[2] = expand_normal (CALL_EXPR_ARG (exp, 2));
28f4ec01
BS
6090
6091 return expand_builtin_apply (ops[0], ops[1], ops[2]);
6092 }
6093
6094 /* __builtin_return (RESULT) causes the function to return the
6095 value described by RESULT. RESULT is address of the block of
6096 memory returned by __builtin_apply. */
6097 case BUILT_IN_RETURN:
5039610b
SL
6098 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6099 expand_builtin_return (expand_normal (CALL_EXPR_ARG (exp, 0)));
28f4ec01
BS
6100 return const0_rtx;
6101
6102 case BUILT_IN_SAVEREGS:
d3707adb 6103 return expand_builtin_saveregs ();
28f4ec01 6104
6ef5231b
JJ
6105 case BUILT_IN_VA_ARG_PACK:
6106 /* All valid uses of __builtin_va_arg_pack () are removed during
6107 inlining. */
c94ed7a1 6108 error ("%Kinvalid use of %<__builtin_va_arg_pack ()%>", exp);
6ef5231b
JJ
6109 return const0_rtx;
6110
ab0e176c
JJ
6111 case BUILT_IN_VA_ARG_PACK_LEN:
6112 /* All valid uses of __builtin_va_arg_pack_len () are removed during
6113 inlining. */
c94ed7a1 6114 error ("%Kinvalid use of %<__builtin_va_arg_pack_len ()%>", exp);
ab0e176c
JJ
6115 return const0_rtx;
6116
28f4ec01
BS
6117 /* Return the address of the first anonymous stack arg. */
6118 case BUILT_IN_NEXT_ARG:
5039610b 6119 if (fold_builtin_next_arg (exp, false))
c22cacf3 6120 return const0_rtx;
8870e212 6121 return expand_builtin_next_arg ();
28f4ec01 6122
677feb77
DD
6123 case BUILT_IN_CLEAR_CACHE:
6124 target = expand_builtin___clear_cache (exp);
6125 if (target)
6126 return target;
6127 break;
6128
28f4ec01 6129 case BUILT_IN_CLASSIFY_TYPE:
5039610b 6130 return expand_builtin_classify_type (exp);
28f4ec01
BS
6131
6132 case BUILT_IN_CONSTANT_P:
6de9cd9a 6133 return const0_rtx;
28f4ec01
BS
6134
6135 case BUILT_IN_FRAME_ADDRESS:
6136 case BUILT_IN_RETURN_ADDRESS:
5039610b 6137 return expand_builtin_frame_address (fndecl, exp);
28f4ec01
BS
6138
6139 /* Returns the address of the area where the structure is returned.
6140 0 otherwise. */
6141 case BUILT_IN_AGGREGATE_INCOMING_ADDRESS:
5039610b 6142 if (call_expr_nargs (exp) != 0
ca7fd9cd 6143 || ! AGGREGATE_TYPE_P (TREE_TYPE (TREE_TYPE (current_function_decl)))
3c0cb5de 6144 || !MEM_P (DECL_RTL (DECL_RESULT (current_function_decl))))
ca7fd9cd 6145 return const0_rtx;
28f4ec01 6146 else
ca7fd9cd 6147 return XEXP (DECL_RTL (DECL_RESULT (current_function_decl)), 0);
28f4ec01
BS
6148
6149 case BUILT_IN_ALLOCA:
13e49da9 6150 case BUILT_IN_ALLOCA_WITH_ALIGN:
d3c12306
EB
6151 /* If the allocation stems from the declaration of a variable-sized
6152 object, it cannot accumulate. */
63d2a353 6153 target = expand_builtin_alloca (exp, CALL_ALLOCA_FOR_VAR_P (exp));
28f4ec01
BS
6154 if (target)
6155 return target;
6156 break;
6157
6de9cd9a
DN
6158 case BUILT_IN_STACK_SAVE:
6159 return expand_stack_save ();
6160
6161 case BUILT_IN_STACK_RESTORE:
5039610b 6162 expand_stack_restore (CALL_EXPR_ARG (exp, 0));
6de9cd9a
DN
6163 return const0_rtx;
6164
ac868f29 6165 case BUILT_IN_BSWAP16:
167fa32c
EC
6166 case BUILT_IN_BSWAP32:
6167 case BUILT_IN_BSWAP64:
ac868f29 6168 target = expand_builtin_bswap (target_mode, exp, target, subtarget);
167fa32c
EC
6169 if (target)
6170 return target;
6171 break;
6172
ea6a6627 6173 CASE_INT_FN (BUILT_IN_FFS):
5039610b 6174 target = expand_builtin_unop (target_mode, exp, target,
6c537d03 6175 subtarget, ffs_optab);
2928cd7a
RH
6176 if (target)
6177 return target;
6178 break;
6179
ea6a6627 6180 CASE_INT_FN (BUILT_IN_CLZ):
5039610b 6181 target = expand_builtin_unop (target_mode, exp, target,
6c537d03 6182 subtarget, clz_optab);
2928cd7a
RH
6183 if (target)
6184 return target;
6185 break;
6186
ea6a6627 6187 CASE_INT_FN (BUILT_IN_CTZ):
5039610b 6188 target = expand_builtin_unop (target_mode, exp, target,
6c537d03 6189 subtarget, ctz_optab);
2928cd7a
RH
6190 if (target)
6191 return target;
6192 break;
6193
3801c801 6194 CASE_INT_FN (BUILT_IN_CLRSB):
3801c801
BS
6195 target = expand_builtin_unop (target_mode, exp, target,
6196 subtarget, clrsb_optab);
6197 if (target)
6198 return target;
6199 break;
6200
ea6a6627 6201 CASE_INT_FN (BUILT_IN_POPCOUNT):
5039610b 6202 target = expand_builtin_unop (target_mode, exp, target,
6c537d03 6203 subtarget, popcount_optab);
2928cd7a
RH
6204 if (target)
6205 return target;
6206 break;
6207
ea6a6627 6208 CASE_INT_FN (BUILT_IN_PARITY):
5039610b 6209 target = expand_builtin_unop (target_mode, exp, target,
6c537d03 6210 subtarget, parity_optab);
28f4ec01
BS
6211 if (target)
6212 return target;
6213 break;
6214
6215 case BUILT_IN_STRLEN:
5039610b 6216 target = expand_builtin_strlen (exp, target, target_mode);
28f4ec01
BS
6217 if (target)
6218 return target;
6219 break;
6220
6221 case BUILT_IN_STRCPY:
44e10129 6222 target = expand_builtin_strcpy (exp, target);
28f4ec01
BS
6223 if (target)
6224 return target;
6225 break;
8d51ecf8 6226
da9e9f08 6227 case BUILT_IN_STRNCPY:
44e10129 6228 target = expand_builtin_strncpy (exp, target);
da9e9f08
KG
6229 if (target)
6230 return target;
6231 break;
8d51ecf8 6232
9cb65f92 6233 case BUILT_IN_STPCPY:
609ae0e2 6234 target = expand_builtin_stpcpy (exp, target, mode);
9cb65f92
KG
6235 if (target)
6236 return target;
6237 break;
6238
28f4ec01 6239 case BUILT_IN_MEMCPY:
44e10129 6240 target = expand_builtin_memcpy (exp, target);
9cb65f92
KG
6241 if (target)
6242 return target;
6243 break;
6244
6245 case BUILT_IN_MEMPCPY:
5039610b 6246 target = expand_builtin_mempcpy (exp, target, mode);
28f4ec01
BS
6247 if (target)
6248 return target;
6249 break;
6250
6251 case BUILT_IN_MEMSET:
5039610b 6252 target = expand_builtin_memset (exp, target, mode);
28f4ec01
BS
6253 if (target)
6254 return target;
6255 break;
6256
e3a709be 6257 case BUILT_IN_BZERO:
8148fe65 6258 target = expand_builtin_bzero (exp);
e3a709be
KG
6259 if (target)
6260 return target;
6261 break;
6262
28f4ec01 6263 case BUILT_IN_STRCMP:
44e10129 6264 target = expand_builtin_strcmp (exp, target);
28f4ec01
BS
6265 if (target)
6266 return target;
6267 break;
6268
da9e9f08
KG
6269 case BUILT_IN_STRNCMP:
6270 target = expand_builtin_strncmp (exp, target, mode);
6271 if (target)
6272 return target;
6273 break;
6274
4b2a62db 6275 case BUILT_IN_BCMP:
28f4ec01 6276 case BUILT_IN_MEMCMP:
7f9f48be 6277 target = expand_builtin_memcmp (exp, target);
28f4ec01
BS
6278 if (target)
6279 return target;
6280 break;
28f4ec01
BS
6281
6282 case BUILT_IN_SETJMP:
4f6c2131
EB
6283 /* This should have been lowered to the builtins below. */
6284 gcc_unreachable ();
6285
6286 case BUILT_IN_SETJMP_SETUP:
6287 /* __builtin_setjmp_setup is passed a pointer to an array of five words
6288 and the receiver label. */
5039610b 6289 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
4f6c2131 6290 {
5039610b 6291 rtx buf_addr = expand_expr (CALL_EXPR_ARG (exp, 0), subtarget,
4f6c2131 6292 VOIDmode, EXPAND_NORMAL);
5039610b 6293 tree label = TREE_OPERAND (CALL_EXPR_ARG (exp, 1), 0);
e67d1102 6294 rtx_insn *label_r = label_rtx (label);
4f6c2131
EB
6295
6296 /* This is copied from the handling of non-local gotos. */
6297 expand_builtin_setjmp_setup (buf_addr, label_r);
6298 nonlocal_goto_handler_labels
b5241a5a 6299 = gen_rtx_INSN_LIST (VOIDmode, label_r,
4f6c2131
EB
6300 nonlocal_goto_handler_labels);
6301 /* ??? Do not let expand_label treat us as such since we would
6302 not want to be both on the list of non-local labels and on
6303 the list of forced labels. */
6304 FORCED_LABEL (label) = 0;
6305 return const0_rtx;
6306 }
6307 break;
6308
4f6c2131
EB
6309 case BUILT_IN_SETJMP_RECEIVER:
6310 /* __builtin_setjmp_receiver is passed the receiver label. */
5039610b 6311 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
4f6c2131 6312 {
5039610b 6313 tree label = TREE_OPERAND (CALL_EXPR_ARG (exp, 0), 0);
e67d1102 6314 rtx_insn *label_r = label_rtx (label);
4f6c2131
EB
6315
6316 expand_builtin_setjmp_receiver (label_r);
6317 return const0_rtx;
6318 }
250d07b6 6319 break;
28f4ec01
BS
6320
6321 /* __builtin_longjmp is passed a pointer to an array of five words.
6322 It's similar to the C library longjmp function but works with
6323 __builtin_setjmp above. */
6324 case BUILT_IN_LONGJMP:
5039610b 6325 if (validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
28f4ec01 6326 {
5039610b 6327 rtx buf_addr = expand_expr (CALL_EXPR_ARG (exp, 0), subtarget,
84217346 6328 VOIDmode, EXPAND_NORMAL);
5039610b 6329 rtx value = expand_normal (CALL_EXPR_ARG (exp, 1));
28f4ec01
BS
6330
6331 if (value != const1_rtx)
6332 {
9e637a26 6333 error ("%<__builtin_longjmp%> second argument must be 1");
28f4ec01
BS
6334 return const0_rtx;
6335 }
6336
6337 expand_builtin_longjmp (buf_addr, value);
6338 return const0_rtx;
6339 }
4f6c2131 6340 break;
28f4ec01 6341
6de9cd9a 6342 case BUILT_IN_NONLOCAL_GOTO:
5039610b 6343 target = expand_builtin_nonlocal_goto (exp);
6de9cd9a
DN
6344 if (target)
6345 return target;
6346 break;
6347
2b92e7f5
RK
6348 /* This updates the setjmp buffer that is its argument with the value
6349 of the current stack pointer. */
6350 case BUILT_IN_UPDATE_SETJMP_BUF:
5039610b 6351 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
2b92e7f5
RK
6352 {
6353 rtx buf_addr
5039610b 6354 = expand_normal (CALL_EXPR_ARG (exp, 0));
2b92e7f5
RK
6355
6356 expand_builtin_update_setjmp_buf (buf_addr);
6357 return const0_rtx;
6358 }
6359 break;
6360
28f4ec01 6361 case BUILT_IN_TRAP:
9602f5a0 6362 expand_builtin_trap ();
28f4ec01
BS
6363 return const0_rtx;
6364
468059bc
DD
6365 case BUILT_IN_UNREACHABLE:
6366 expand_builtin_unreachable ();
6367 return const0_rtx;
6368
ea6a6627 6369 CASE_FLT_FN (BUILT_IN_SIGNBIT):
44aea9ac
JJ
6370 case BUILT_IN_SIGNBITD32:
6371 case BUILT_IN_SIGNBITD64:
6372 case BUILT_IN_SIGNBITD128:
ef79730c
RS
6373 target = expand_builtin_signbit (exp, target);
6374 if (target)
6375 return target;
6376 break;
6377
28f4ec01
BS
6378 /* Various hooks for the DWARF 2 __throw routine. */
6379 case BUILT_IN_UNWIND_INIT:
6380 expand_builtin_unwind_init ();
6381 return const0_rtx;
6382 case BUILT_IN_DWARF_CFA:
6383 return virtual_cfa_rtx;
6384#ifdef DWARF2_UNWIND_INFO
9c80ff25
RH
6385 case BUILT_IN_DWARF_SP_COLUMN:
6386 return expand_builtin_dwarf_sp_column ();
d9d5c9de 6387 case BUILT_IN_INIT_DWARF_REG_SIZES:
5039610b 6388 expand_builtin_init_dwarf_reg_sizes (CALL_EXPR_ARG (exp, 0));
d9d5c9de 6389 return const0_rtx;
28f4ec01
BS
6390#endif
6391 case BUILT_IN_FROB_RETURN_ADDR:
5039610b 6392 return expand_builtin_frob_return_addr (CALL_EXPR_ARG (exp, 0));
28f4ec01 6393 case BUILT_IN_EXTRACT_RETURN_ADDR:
5039610b 6394 return expand_builtin_extract_return_addr (CALL_EXPR_ARG (exp, 0));
28f4ec01 6395 case BUILT_IN_EH_RETURN:
5039610b
SL
6396 expand_builtin_eh_return (CALL_EXPR_ARG (exp, 0),
6397 CALL_EXPR_ARG (exp, 1));
28f4ec01 6398 return const0_rtx;
52a11cbf 6399 case BUILT_IN_EH_RETURN_DATA_REGNO:
5039610b 6400 return expand_builtin_eh_return_data_regno (exp);
c76362b4 6401 case BUILT_IN_EXTEND_POINTER:
5039610b 6402 return expand_builtin_extend_pointer (CALL_EXPR_ARG (exp, 0));
1d65f45c
RH
6403 case BUILT_IN_EH_POINTER:
6404 return expand_builtin_eh_pointer (exp);
6405 case BUILT_IN_EH_FILTER:
6406 return expand_builtin_eh_filter (exp);
6407 case BUILT_IN_EH_COPY_VALUES:
6408 return expand_builtin_eh_copy_values (exp);
c76362b4 6409
6c535c69 6410 case BUILT_IN_VA_START:
5039610b 6411 return expand_builtin_va_start (exp);
d3707adb 6412 case BUILT_IN_VA_END:
5039610b 6413 return expand_builtin_va_end (exp);
d3707adb 6414 case BUILT_IN_VA_COPY:
5039610b 6415 return expand_builtin_va_copy (exp);
994a57cd 6416 case BUILT_IN_EXPECT:
5039610b 6417 return expand_builtin_expect (exp, target);
45d439ac
JJ
6418 case BUILT_IN_ASSUME_ALIGNED:
6419 return expand_builtin_assume_aligned (exp, target);
a9ccbb60 6420 case BUILT_IN_PREFETCH:
5039610b 6421 expand_builtin_prefetch (exp);
a9ccbb60
JJ
6422 return const0_rtx;
6423
6de9cd9a 6424 case BUILT_IN_INIT_TRAMPOLINE:
183dd130
ILT
6425 return expand_builtin_init_trampoline (exp, true);
6426 case BUILT_IN_INIT_HEAP_TRAMPOLINE:
6427 return expand_builtin_init_trampoline (exp, false);
6de9cd9a 6428 case BUILT_IN_ADJUST_TRAMPOLINE:
5039610b 6429 return expand_builtin_adjust_trampoline (exp);
6de9cd9a 6430
d1c38823
ZD
6431 case BUILT_IN_FORK:
6432 case BUILT_IN_EXECL:
6433 case BUILT_IN_EXECV:
6434 case BUILT_IN_EXECLP:
6435 case BUILT_IN_EXECLE:
6436 case BUILT_IN_EXECVP:
6437 case BUILT_IN_EXECVE:
5039610b 6438 target = expand_builtin_fork_or_exec (fndecl, exp, target, ignore);
d1c38823
ZD
6439 if (target)
6440 return target;
6441 break;
28f4ec01 6442
e0a8ecf2
AM
6443 case BUILT_IN_SYNC_FETCH_AND_ADD_1:
6444 case BUILT_IN_SYNC_FETCH_AND_ADD_2:
6445 case BUILT_IN_SYNC_FETCH_AND_ADD_4:
6446 case BUILT_IN_SYNC_FETCH_AND_ADD_8:
6447 case BUILT_IN_SYNC_FETCH_AND_ADD_16:
6448 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_ADD_1);
86951993 6449 target = expand_builtin_sync_operation (mode, exp, PLUS, false, target);
48ae6c13
RH
6450 if (target)
6451 return target;
6452 break;
6453
e0a8ecf2
AM
6454 case BUILT_IN_SYNC_FETCH_AND_SUB_1:
6455 case BUILT_IN_SYNC_FETCH_AND_SUB_2:
6456 case BUILT_IN_SYNC_FETCH_AND_SUB_4:
6457 case BUILT_IN_SYNC_FETCH_AND_SUB_8:
6458 case BUILT_IN_SYNC_FETCH_AND_SUB_16:
6459 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_SUB_1);
86951993 6460 target = expand_builtin_sync_operation (mode, exp, MINUS, false, target);
48ae6c13
RH
6461 if (target)
6462 return target;
6463 break;
6464
e0a8ecf2
AM
6465 case BUILT_IN_SYNC_FETCH_AND_OR_1:
6466 case BUILT_IN_SYNC_FETCH_AND_OR_2:
6467 case BUILT_IN_SYNC_FETCH_AND_OR_4:
6468 case BUILT_IN_SYNC_FETCH_AND_OR_8:
6469 case BUILT_IN_SYNC_FETCH_AND_OR_16:
6470 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_OR_1);
86951993 6471 target = expand_builtin_sync_operation (mode, exp, IOR, false, target);
48ae6c13
RH
6472 if (target)
6473 return target;
6474 break;
6475
e0a8ecf2
AM
6476 case BUILT_IN_SYNC_FETCH_AND_AND_1:
6477 case BUILT_IN_SYNC_FETCH_AND_AND_2:
6478 case BUILT_IN_SYNC_FETCH_AND_AND_4:
6479 case BUILT_IN_SYNC_FETCH_AND_AND_8:
6480 case BUILT_IN_SYNC_FETCH_AND_AND_16:
6481 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_AND_1);
86951993 6482 target = expand_builtin_sync_operation (mode, exp, AND, false, target);
48ae6c13
RH
6483 if (target)
6484 return target;
6485 break;
6486
e0a8ecf2
AM
6487 case BUILT_IN_SYNC_FETCH_AND_XOR_1:
6488 case BUILT_IN_SYNC_FETCH_AND_XOR_2:
6489 case BUILT_IN_SYNC_FETCH_AND_XOR_4:
6490 case BUILT_IN_SYNC_FETCH_AND_XOR_8:
6491 case BUILT_IN_SYNC_FETCH_AND_XOR_16:
6492 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_XOR_1);
86951993 6493 target = expand_builtin_sync_operation (mode, exp, XOR, false, target);
48ae6c13
RH
6494 if (target)
6495 return target;
6496 break;
6497
e0a8ecf2
AM
6498 case BUILT_IN_SYNC_FETCH_AND_NAND_1:
6499 case BUILT_IN_SYNC_FETCH_AND_NAND_2:
6500 case BUILT_IN_SYNC_FETCH_AND_NAND_4:
6501 case BUILT_IN_SYNC_FETCH_AND_NAND_8:
6502 case BUILT_IN_SYNC_FETCH_AND_NAND_16:
6503 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_NAND_1);
86951993 6504 target = expand_builtin_sync_operation (mode, exp, NOT, false, target);
48ae6c13
RH
6505 if (target)
6506 return target;
6507 break;
6508
e0a8ecf2
AM
6509 case BUILT_IN_SYNC_ADD_AND_FETCH_1:
6510 case BUILT_IN_SYNC_ADD_AND_FETCH_2:
6511 case BUILT_IN_SYNC_ADD_AND_FETCH_4:
6512 case BUILT_IN_SYNC_ADD_AND_FETCH_8:
6513 case BUILT_IN_SYNC_ADD_AND_FETCH_16:
6514 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_ADD_AND_FETCH_1);
86951993 6515 target = expand_builtin_sync_operation (mode, exp, PLUS, true, target);
48ae6c13
RH
6516 if (target)
6517 return target;
6518 break;
6519
e0a8ecf2
AM
6520 case BUILT_IN_SYNC_SUB_AND_FETCH_1:
6521 case BUILT_IN_SYNC_SUB_AND_FETCH_2:
6522 case BUILT_IN_SYNC_SUB_AND_FETCH_4:
6523 case BUILT_IN_SYNC_SUB_AND_FETCH_8:
6524 case BUILT_IN_SYNC_SUB_AND_FETCH_16:
6525 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_SUB_AND_FETCH_1);
86951993 6526 target = expand_builtin_sync_operation (mode, exp, MINUS, true, target);
48ae6c13
RH
6527 if (target)
6528 return target;
6529 break;
6530
e0a8ecf2
AM
6531 case BUILT_IN_SYNC_OR_AND_FETCH_1:
6532 case BUILT_IN_SYNC_OR_AND_FETCH_2:
6533 case BUILT_IN_SYNC_OR_AND_FETCH_4:
6534 case BUILT_IN_SYNC_OR_AND_FETCH_8:
6535 case BUILT_IN_SYNC_OR_AND_FETCH_16:
6536 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_OR_AND_FETCH_1);
86951993 6537 target = expand_builtin_sync_operation (mode, exp, IOR, true, target);
48ae6c13
RH
6538 if (target)
6539 return target;
6540 break;
6541
e0a8ecf2
AM
6542 case BUILT_IN_SYNC_AND_AND_FETCH_1:
6543 case BUILT_IN_SYNC_AND_AND_FETCH_2:
6544 case BUILT_IN_SYNC_AND_AND_FETCH_4:
6545 case BUILT_IN_SYNC_AND_AND_FETCH_8:
6546 case BUILT_IN_SYNC_AND_AND_FETCH_16:
6547 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_AND_AND_FETCH_1);
86951993 6548 target = expand_builtin_sync_operation (mode, exp, AND, true, target);
48ae6c13
RH
6549 if (target)
6550 return target;
6551 break;
6552
e0a8ecf2
AM
6553 case BUILT_IN_SYNC_XOR_AND_FETCH_1:
6554 case BUILT_IN_SYNC_XOR_AND_FETCH_2:
6555 case BUILT_IN_SYNC_XOR_AND_FETCH_4:
6556 case BUILT_IN_SYNC_XOR_AND_FETCH_8:
6557 case BUILT_IN_SYNC_XOR_AND_FETCH_16:
6558 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_XOR_AND_FETCH_1);
86951993 6559 target = expand_builtin_sync_operation (mode, exp, XOR, true, target);
48ae6c13
RH
6560 if (target)
6561 return target;
6562 break;
6563
e0a8ecf2
AM
6564 case BUILT_IN_SYNC_NAND_AND_FETCH_1:
6565 case BUILT_IN_SYNC_NAND_AND_FETCH_2:
6566 case BUILT_IN_SYNC_NAND_AND_FETCH_4:
6567 case BUILT_IN_SYNC_NAND_AND_FETCH_8:
6568 case BUILT_IN_SYNC_NAND_AND_FETCH_16:
6569 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_NAND_AND_FETCH_1);
86951993 6570 target = expand_builtin_sync_operation (mode, exp, NOT, true, target);
48ae6c13
RH
6571 if (target)
6572 return target;
6573 break;
6574
e0a8ecf2
AM
6575 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_1:
6576 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_2:
6577 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_4:
6578 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_8:
6579 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_16:
5b5513d0
RH
6580 if (mode == VOIDmode)
6581 mode = TYPE_MODE (boolean_type_node);
48ae6c13
RH
6582 if (!target || !register_operand (target, mode))
6583 target = gen_reg_rtx (mode);
02ee605c 6584
e0a8ecf2
AM
6585 mode = get_builtin_sync_mode
6586 (fcode - BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_1);
5039610b 6587 target = expand_builtin_compare_and_swap (mode, exp, true, target);
48ae6c13
RH
6588 if (target)
6589 return target;
6590 break;
6591
e0a8ecf2
AM
6592 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_1:
6593 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_2:
6594 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_4:
6595 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_8:
6596 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_16:
6597 mode = get_builtin_sync_mode
6598 (fcode - BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_1);
5039610b 6599 target = expand_builtin_compare_and_swap (mode, exp, false, target);
48ae6c13
RH
6600 if (target)
6601 return target;
6602 break;
6603
e0a8ecf2
AM
6604 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_1:
6605 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_2:
6606 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_4:
6607 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_8:
6608 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_16:
6609 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_LOCK_TEST_AND_SET_1);
6610 target = expand_builtin_sync_lock_test_and_set (mode, exp, target);
48ae6c13
RH
6611 if (target)
6612 return target;
6613 break;
6614
e0a8ecf2
AM
6615 case BUILT_IN_SYNC_LOCK_RELEASE_1:
6616 case BUILT_IN_SYNC_LOCK_RELEASE_2:
6617 case BUILT_IN_SYNC_LOCK_RELEASE_4:
6618 case BUILT_IN_SYNC_LOCK_RELEASE_8:
6619 case BUILT_IN_SYNC_LOCK_RELEASE_16:
6620 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_LOCK_RELEASE_1);
6621 expand_builtin_sync_lock_release (mode, exp);
48ae6c13
RH
6622 return const0_rtx;
6623
e0a8ecf2
AM
6624 case BUILT_IN_SYNC_SYNCHRONIZE:
6625 expand_builtin_sync_synchronize ();
48ae6c13
RH
6626 return const0_rtx;
6627
86951993
AM
6628 case BUILT_IN_ATOMIC_EXCHANGE_1:
6629 case BUILT_IN_ATOMIC_EXCHANGE_2:
6630 case BUILT_IN_ATOMIC_EXCHANGE_4:
6631 case BUILT_IN_ATOMIC_EXCHANGE_8:
6632 case BUILT_IN_ATOMIC_EXCHANGE_16:
6633 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_EXCHANGE_1);
6634 target = expand_builtin_atomic_exchange (mode, exp, target);
6635 if (target)
6636 return target;
6637 break;
6638
6639 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1:
6640 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_2:
6641 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_4:
6642 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_8:
6643 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_16:
e351ae85
AM
6644 {
6645 unsigned int nargs, z;
9771b263 6646 vec<tree, va_gc> *vec;
e351ae85
AM
6647
6648 mode =
6649 get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1);
6650 target = expand_builtin_atomic_compare_exchange (mode, exp, target);
6651 if (target)
6652 return target;
6653
6654 /* If this is turned into an external library call, the weak parameter
6655 must be dropped to match the expected parameter list. */
6656 nargs = call_expr_nargs (exp);
9771b263 6657 vec_alloc (vec, nargs - 1);
e351ae85 6658 for (z = 0; z < 3; z++)
9771b263 6659 vec->quick_push (CALL_EXPR_ARG (exp, z));
e351ae85
AM
6660 /* Skip the boolean weak parameter. */
6661 for (z = 4; z < 6; z++)
9771b263 6662 vec->quick_push (CALL_EXPR_ARG (exp, z));
e351ae85
AM
6663 exp = build_call_vec (TREE_TYPE (exp), CALL_EXPR_FN (exp), vec);
6664 break;
6665 }
86951993
AM
6666
6667 case BUILT_IN_ATOMIC_LOAD_1:
6668 case BUILT_IN_ATOMIC_LOAD_2:
6669 case BUILT_IN_ATOMIC_LOAD_4:
6670 case BUILT_IN_ATOMIC_LOAD_8:
6671 case BUILT_IN_ATOMIC_LOAD_16:
6672 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_LOAD_1);
6673 target = expand_builtin_atomic_load (mode, exp, target);
6674 if (target)
6675 return target;
6676 break;
6677
6678 case BUILT_IN_ATOMIC_STORE_1:
6679 case BUILT_IN_ATOMIC_STORE_2:
6680 case BUILT_IN_ATOMIC_STORE_4:
6681 case BUILT_IN_ATOMIC_STORE_8:
6682 case BUILT_IN_ATOMIC_STORE_16:
6683 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_STORE_1);
6684 target = expand_builtin_atomic_store (mode, exp);
6685 if (target)
6686 return const0_rtx;
6687 break;
6688
6689 case BUILT_IN_ATOMIC_ADD_FETCH_1:
6690 case BUILT_IN_ATOMIC_ADD_FETCH_2:
6691 case BUILT_IN_ATOMIC_ADD_FETCH_4:
6692 case BUILT_IN_ATOMIC_ADD_FETCH_8:
6693 case BUILT_IN_ATOMIC_ADD_FETCH_16:
6694 {
6695 enum built_in_function lib;
6696 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_ADD_FETCH_1);
6697 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_ADD_1 +
6698 (fcode - BUILT_IN_ATOMIC_ADD_FETCH_1));
6699 target = expand_builtin_atomic_fetch_op (mode, exp, target, PLUS, true,
6700 ignore, lib);
6701 if (target)
6702 return target;
6703 break;
6704 }
6705 case BUILT_IN_ATOMIC_SUB_FETCH_1:
6706 case BUILT_IN_ATOMIC_SUB_FETCH_2:
6707 case BUILT_IN_ATOMIC_SUB_FETCH_4:
6708 case BUILT_IN_ATOMIC_SUB_FETCH_8:
6709 case BUILT_IN_ATOMIC_SUB_FETCH_16:
6710 {
6711 enum built_in_function lib;
6712 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_SUB_FETCH_1);
6713 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_SUB_1 +
6714 (fcode - BUILT_IN_ATOMIC_SUB_FETCH_1));
6715 target = expand_builtin_atomic_fetch_op (mode, exp, target, MINUS, true,
6716 ignore, lib);
6717 if (target)
6718 return target;
6719 break;
6720 }
6721 case BUILT_IN_ATOMIC_AND_FETCH_1:
6722 case BUILT_IN_ATOMIC_AND_FETCH_2:
6723 case BUILT_IN_ATOMIC_AND_FETCH_4:
6724 case BUILT_IN_ATOMIC_AND_FETCH_8:
6725 case BUILT_IN_ATOMIC_AND_FETCH_16:
6726 {
6727 enum built_in_function lib;
6728 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_AND_FETCH_1);
6729 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_AND_1 +
6730 (fcode - BUILT_IN_ATOMIC_AND_FETCH_1));
6731 target = expand_builtin_atomic_fetch_op (mode, exp, target, AND, true,
6732 ignore, lib);
6733 if (target)
6734 return target;
6735 break;
6736 }
6737 case BUILT_IN_ATOMIC_NAND_FETCH_1:
6738 case BUILT_IN_ATOMIC_NAND_FETCH_2:
6739 case BUILT_IN_ATOMIC_NAND_FETCH_4:
6740 case BUILT_IN_ATOMIC_NAND_FETCH_8:
6741 case BUILT_IN_ATOMIC_NAND_FETCH_16:
6742 {
6743 enum built_in_function lib;
6744 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_NAND_FETCH_1);
6745 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_NAND_1 +
6746 (fcode - BUILT_IN_ATOMIC_NAND_FETCH_1));
6747 target = expand_builtin_atomic_fetch_op (mode, exp, target, NOT, true,
6748 ignore, lib);
6749 if (target)
6750 return target;
6751 break;
6752 }
6753 case BUILT_IN_ATOMIC_XOR_FETCH_1:
6754 case BUILT_IN_ATOMIC_XOR_FETCH_2:
6755 case BUILT_IN_ATOMIC_XOR_FETCH_4:
6756 case BUILT_IN_ATOMIC_XOR_FETCH_8:
6757 case BUILT_IN_ATOMIC_XOR_FETCH_16:
6758 {
6759 enum built_in_function lib;
6760 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_XOR_FETCH_1);
6761 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_XOR_1 +
6762 (fcode - BUILT_IN_ATOMIC_XOR_FETCH_1));
6763 target = expand_builtin_atomic_fetch_op (mode, exp, target, XOR, true,
6764 ignore, lib);
6765 if (target)
6766 return target;
6767 break;
6768 }
6769 case BUILT_IN_ATOMIC_OR_FETCH_1:
6770 case BUILT_IN_ATOMIC_OR_FETCH_2:
6771 case BUILT_IN_ATOMIC_OR_FETCH_4:
6772 case BUILT_IN_ATOMIC_OR_FETCH_8:
6773 case BUILT_IN_ATOMIC_OR_FETCH_16:
6774 {
6775 enum built_in_function lib;
6776 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_OR_FETCH_1);
6777 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_OR_1 +
6778 (fcode - BUILT_IN_ATOMIC_OR_FETCH_1));
6779 target = expand_builtin_atomic_fetch_op (mode, exp, target, IOR, true,
6780 ignore, lib);
6781 if (target)
6782 return target;
6783 break;
6784 }
6785 case BUILT_IN_ATOMIC_FETCH_ADD_1:
6786 case BUILT_IN_ATOMIC_FETCH_ADD_2:
6787 case BUILT_IN_ATOMIC_FETCH_ADD_4:
6788 case BUILT_IN_ATOMIC_FETCH_ADD_8:
6789 case BUILT_IN_ATOMIC_FETCH_ADD_16:
6790 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_ADD_1);
6791 target = expand_builtin_atomic_fetch_op (mode, exp, target, PLUS, false,
6792 ignore, BUILT_IN_NONE);
6793 if (target)
6794 return target;
6795 break;
6796
6797 case BUILT_IN_ATOMIC_FETCH_SUB_1:
6798 case BUILT_IN_ATOMIC_FETCH_SUB_2:
6799 case BUILT_IN_ATOMIC_FETCH_SUB_4:
6800 case BUILT_IN_ATOMIC_FETCH_SUB_8:
6801 case BUILT_IN_ATOMIC_FETCH_SUB_16:
6802 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_SUB_1);
6803 target = expand_builtin_atomic_fetch_op (mode, exp, target, MINUS, false,
6804 ignore, BUILT_IN_NONE);
6805 if (target)
6806 return target;
6807 break;
6808
6809 case BUILT_IN_ATOMIC_FETCH_AND_1:
6810 case BUILT_IN_ATOMIC_FETCH_AND_2:
6811 case BUILT_IN_ATOMIC_FETCH_AND_4:
6812 case BUILT_IN_ATOMIC_FETCH_AND_8:
6813 case BUILT_IN_ATOMIC_FETCH_AND_16:
6814 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_AND_1);
6815 target = expand_builtin_atomic_fetch_op (mode, exp, target, AND, false,
6816 ignore, BUILT_IN_NONE);
6817 if (target)
6818 return target;
6819 break;
6820
6821 case BUILT_IN_ATOMIC_FETCH_NAND_1:
6822 case BUILT_IN_ATOMIC_FETCH_NAND_2:
6823 case BUILT_IN_ATOMIC_FETCH_NAND_4:
6824 case BUILT_IN_ATOMIC_FETCH_NAND_8:
6825 case BUILT_IN_ATOMIC_FETCH_NAND_16:
6826 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_NAND_1);
6827 target = expand_builtin_atomic_fetch_op (mode, exp, target, NOT, false,
6828 ignore, BUILT_IN_NONE);
6829 if (target)
6830 return target;
6831 break;
6832
6833 case BUILT_IN_ATOMIC_FETCH_XOR_1:
6834 case BUILT_IN_ATOMIC_FETCH_XOR_2:
6835 case BUILT_IN_ATOMIC_FETCH_XOR_4:
6836 case BUILT_IN_ATOMIC_FETCH_XOR_8:
6837 case BUILT_IN_ATOMIC_FETCH_XOR_16:
6838 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_XOR_1);
6839 target = expand_builtin_atomic_fetch_op (mode, exp, target, XOR, false,
6840 ignore, BUILT_IN_NONE);
6841 if (target)
6842 return target;
6843 break;
6844
6845 case BUILT_IN_ATOMIC_FETCH_OR_1:
6846 case BUILT_IN_ATOMIC_FETCH_OR_2:
6847 case BUILT_IN_ATOMIC_FETCH_OR_4:
6848 case BUILT_IN_ATOMIC_FETCH_OR_8:
6849 case BUILT_IN_ATOMIC_FETCH_OR_16:
6850 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_OR_1);
6851 target = expand_builtin_atomic_fetch_op (mode, exp, target, IOR, false,
6852 ignore, BUILT_IN_NONE);
6853 if (target)
6854 return target;
6855 break;
d660c35e
AM
6856
6857 case BUILT_IN_ATOMIC_TEST_AND_SET:
744accb2 6858 return expand_builtin_atomic_test_and_set (exp, target);
d660c35e
AM
6859
6860 case BUILT_IN_ATOMIC_CLEAR:
6861 return expand_builtin_atomic_clear (exp);
86951993
AM
6862
6863 case BUILT_IN_ATOMIC_ALWAYS_LOCK_FREE:
6864 return expand_builtin_atomic_always_lock_free (exp);
6865
6866 case BUILT_IN_ATOMIC_IS_LOCK_FREE:
6867 target = expand_builtin_atomic_is_lock_free (exp);
6868 if (target)
6869 return target;
6870 break;
6871
6872 case BUILT_IN_ATOMIC_THREAD_FENCE:
6873 expand_builtin_atomic_thread_fence (exp);
6874 return const0_rtx;
6875
6876 case BUILT_IN_ATOMIC_SIGNAL_FENCE:
6877 expand_builtin_atomic_signal_fence (exp);
6878 return const0_rtx;
6879
10a0d495
JJ
6880 case BUILT_IN_OBJECT_SIZE:
6881 return expand_builtin_object_size (exp);
6882
6883 case BUILT_IN_MEMCPY_CHK:
6884 case BUILT_IN_MEMPCPY_CHK:
6885 case BUILT_IN_MEMMOVE_CHK:
6886 case BUILT_IN_MEMSET_CHK:
6887 target = expand_builtin_memory_chk (exp, target, mode, fcode);
6888 if (target)
6889 return target;
6890 break;
6891
6892 case BUILT_IN_STRCPY_CHK:
6893 case BUILT_IN_STPCPY_CHK:
6894 case BUILT_IN_STRNCPY_CHK:
f3fc9b80 6895 case BUILT_IN_STPNCPY_CHK:
10a0d495 6896 case BUILT_IN_STRCAT_CHK:
1c2fc017 6897 case BUILT_IN_STRNCAT_CHK:
10a0d495
JJ
6898 case BUILT_IN_SNPRINTF_CHK:
6899 case BUILT_IN_VSNPRINTF_CHK:
6900 maybe_emit_chk_warning (exp, fcode);
6901 break;
6902
6903 case BUILT_IN_SPRINTF_CHK:
6904 case BUILT_IN_VSPRINTF_CHK:
6905 maybe_emit_sprintf_chk_warning (exp, fcode);
6906 break;
6907
f9555f40 6908 case BUILT_IN_FREE:
a3a704a4
MH
6909 if (warn_free_nonheap_object)
6910 maybe_emit_free_warning (exp);
f9555f40
JJ
6911 break;
6912
f959607b
CLT
6913 case BUILT_IN_THREAD_POINTER:
6914 return expand_builtin_thread_pointer (exp, target);
6915
6916 case BUILT_IN_SET_THREAD_POINTER:
6917 expand_builtin_set_thread_pointer (exp);
6918 return const0_rtx;
6919
939b37da
BI
6920 case BUILT_IN_CILK_DETACH:
6921 expand_builtin_cilk_detach (exp);
6922 return const0_rtx;
6923
6924 case BUILT_IN_CILK_POP_FRAME:
6925 expand_builtin_cilk_pop_frame (exp);
6926 return const0_rtx;
6927
d5e254e1
IE
6928 case BUILT_IN_CHKP_INIT_PTR_BOUNDS:
6929 case BUILT_IN_CHKP_NULL_PTR_BOUNDS:
6930 case BUILT_IN_CHKP_COPY_PTR_BOUNDS:
6931 case BUILT_IN_CHKP_CHECK_PTR_LBOUNDS:
6932 case BUILT_IN_CHKP_CHECK_PTR_UBOUNDS:
6933 case BUILT_IN_CHKP_CHECK_PTR_BOUNDS:
6934 case BUILT_IN_CHKP_SET_PTR_BOUNDS:
6935 case BUILT_IN_CHKP_NARROW_PTR_BOUNDS:
6936 case BUILT_IN_CHKP_STORE_PTR_BOUNDS:
6937 case BUILT_IN_CHKP_GET_PTR_LBOUND:
6938 case BUILT_IN_CHKP_GET_PTR_UBOUND:
6939 /* We allow user CHKP builtins if Pointer Bounds
6940 Checker is off. */
6941 if (!chkp_function_instrumented_p (current_function_decl))
6942 {
6943 if (fcode == BUILT_IN_CHKP_SET_PTR_BOUNDS
6944 || fcode == BUILT_IN_CHKP_NARROW_PTR_BOUNDS
6945 || fcode == BUILT_IN_CHKP_INIT_PTR_BOUNDS
6946 || fcode == BUILT_IN_CHKP_NULL_PTR_BOUNDS
6947 || fcode == BUILT_IN_CHKP_COPY_PTR_BOUNDS)
6948 return expand_normal (CALL_EXPR_ARG (exp, 0));
6949 else if (fcode == BUILT_IN_CHKP_GET_PTR_LBOUND)
6950 return expand_normal (size_zero_node);
6951 else if (fcode == BUILT_IN_CHKP_GET_PTR_UBOUND)
6952 return expand_normal (size_int (-1));
6953 else
6954 return const0_rtx;
6955 }
6956 /* FALLTHROUGH */
6957
6958 case BUILT_IN_CHKP_BNDMK:
6959 case BUILT_IN_CHKP_BNDSTX:
6960 case BUILT_IN_CHKP_BNDCL:
6961 case BUILT_IN_CHKP_BNDCU:
6962 case BUILT_IN_CHKP_BNDLDX:
6963 case BUILT_IN_CHKP_BNDRET:
6964 case BUILT_IN_CHKP_INTERSECT:
6965 case BUILT_IN_CHKP_NARROW:
6966 case BUILT_IN_CHKP_EXTRACT_LOWER:
6967 case BUILT_IN_CHKP_EXTRACT_UPPER:
6968 /* Software implementation of Pointer Bounds Checker is NYI.
6969 Target support is required. */
6970 error ("Your target platform does not support -fcheck-pointer-bounds");
6971 break;
6972
41dbbb37 6973 case BUILT_IN_ACC_ON_DEVICE:
164453bb
NS
6974 /* Do library call, if we failed to expand the builtin when
6975 folding. */
41dbbb37
TS
6976 break;
6977
e62f4abc 6978 default: /* just do library call, if unknown builtin */
84b8b0e0 6979 break;
28f4ec01
BS
6980 }
6981
6982 /* The switch statement above can drop through to cause the function
6983 to be called normally. */
6984 return expand_call (exp, target, ignore);
6985}
b0b3afb2 6986
edcf72f3
IE
6987/* Similar to expand_builtin but is used for instrumented calls. */
6988
6989rtx
6990expand_builtin_with_bounds (tree exp, rtx target,
6991 rtx subtarget ATTRIBUTE_UNUSED,
6992 machine_mode mode, int ignore)
6993{
6994 tree fndecl = get_callee_fndecl (exp);
6995 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
6996
6997 gcc_assert (CALL_WITH_BOUNDS_P (exp));
6998
6999 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
7000 return targetm.expand_builtin (exp, target, subtarget, mode, ignore);
7001
7002 gcc_assert (fcode > BEGIN_CHKP_BUILTINS
7003 && fcode < END_CHKP_BUILTINS);
7004
7005 switch (fcode)
7006 {
7007 case BUILT_IN_CHKP_MEMCPY_NOBND_NOCHK_CHKP:
7008 target = expand_builtin_memcpy_with_bounds (exp, target);
7009 if (target)
7010 return target;
7011 break;
7012
7013 case BUILT_IN_CHKP_MEMPCPY_NOBND_NOCHK_CHKP:
7014 target = expand_builtin_mempcpy_with_bounds (exp, target, mode);
7015 if (target)
7016 return target;
7017 break;
7018
7019 case BUILT_IN_CHKP_MEMSET_NOBND_NOCHK_CHKP:
7020 target = expand_builtin_memset_with_bounds (exp, target, mode);
7021 if (target)
7022 return target;
7023 break;
7024
7025 default:
7026 break;
7027 }
7028
7029 /* The switch statement above can drop through to cause the function
7030 to be called normally. */
7031 return expand_call (exp, target, ignore);
7032 }
7033
4977bab6 7034/* Determine whether a tree node represents a call to a built-in
feda1845
RS
7035 function. If the tree T is a call to a built-in function with
7036 the right number of arguments of the appropriate types, return
7037 the DECL_FUNCTION_CODE of the call, e.g. BUILT_IN_SQRT.
7038 Otherwise the return value is END_BUILTINS. */
4682ae04 7039
4977bab6 7040enum built_in_function
fa233e34 7041builtin_mathfn_code (const_tree t)
4977bab6 7042{
fa233e34
KG
7043 const_tree fndecl, arg, parmlist;
7044 const_tree argtype, parmtype;
7045 const_call_expr_arg_iterator iter;
4977bab6
ZW
7046
7047 if (TREE_CODE (t) != CALL_EXPR
5039610b 7048 || TREE_CODE (CALL_EXPR_FN (t)) != ADDR_EXPR)
4977bab6
ZW
7049 return END_BUILTINS;
7050
2f503025
JM
7051 fndecl = get_callee_fndecl (t);
7052 if (fndecl == NULL_TREE
feda1845 7053 || TREE_CODE (fndecl) != FUNCTION_DECL
4977bab6
ZW
7054 || ! DECL_BUILT_IN (fndecl)
7055 || DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
7056 return END_BUILTINS;
7057
feda1845 7058 parmlist = TYPE_ARG_TYPES (TREE_TYPE (fndecl));
fa233e34 7059 init_const_call_expr_arg_iterator (t, &iter);
feda1845 7060 for (; parmlist; parmlist = TREE_CHAIN (parmlist))
c0a47a61 7061 {
feda1845
RS
7062 /* If a function doesn't take a variable number of arguments,
7063 the last element in the list will have type `void'. */
7064 parmtype = TREE_VALUE (parmlist);
7065 if (VOID_TYPE_P (parmtype))
7066 {
fa233e34 7067 if (more_const_call_expr_args_p (&iter))
feda1845
RS
7068 return END_BUILTINS;
7069 return DECL_FUNCTION_CODE (fndecl);
7070 }
7071
fa233e34 7072 if (! more_const_call_expr_args_p (&iter))
c0a47a61 7073 return END_BUILTINS;
b8698a0f 7074
fa233e34 7075 arg = next_const_call_expr_arg (&iter);
5039610b 7076 argtype = TREE_TYPE (arg);
feda1845
RS
7077
7078 if (SCALAR_FLOAT_TYPE_P (parmtype))
7079 {
7080 if (! SCALAR_FLOAT_TYPE_P (argtype))
7081 return END_BUILTINS;
7082 }
7083 else if (COMPLEX_FLOAT_TYPE_P (parmtype))
7084 {
7085 if (! COMPLEX_FLOAT_TYPE_P (argtype))
7086 return END_BUILTINS;
7087 }
7088 else if (POINTER_TYPE_P (parmtype))
7089 {
7090 if (! POINTER_TYPE_P (argtype))
7091 return END_BUILTINS;
7092 }
7093 else if (INTEGRAL_TYPE_P (parmtype))
7094 {
7095 if (! INTEGRAL_TYPE_P (argtype))
7096 return END_BUILTINS;
7097 }
7098 else
c0a47a61 7099 return END_BUILTINS;
c0a47a61
RS
7100 }
7101
feda1845 7102 /* Variable-length argument list. */
4977bab6
ZW
7103 return DECL_FUNCTION_CODE (fndecl);
7104}
7105
5039610b
SL
7106/* Fold a call to __builtin_constant_p, if we know its argument ARG will
7107 evaluate to a constant. */
b0b3afb2
BS
7108
7109static tree
5039610b 7110fold_builtin_constant_p (tree arg)
b0b3afb2 7111{
b0b3afb2
BS
7112 /* We return 1 for a numeric type that's known to be a constant
7113 value at compile-time or for an aggregate type that's a
7114 literal constant. */
5039610b 7115 STRIP_NOPS (arg);
b0b3afb2
BS
7116
7117 /* If we know this is a constant, emit the constant of one. */
5039610b
SL
7118 if (CONSTANT_CLASS_P (arg)
7119 || (TREE_CODE (arg) == CONSTRUCTOR
7120 && TREE_CONSTANT (arg)))
b0b3afb2 7121 return integer_one_node;
5039610b 7122 if (TREE_CODE (arg) == ADDR_EXPR)
fb664a2c 7123 {
5039610b 7124 tree op = TREE_OPERAND (arg, 0);
fb664a2c
RG
7125 if (TREE_CODE (op) == STRING_CST
7126 || (TREE_CODE (op) == ARRAY_REF
7127 && integer_zerop (TREE_OPERAND (op, 1))
7128 && TREE_CODE (TREE_OPERAND (op, 0)) == STRING_CST))
7129 return integer_one_node;
7130 }
b0b3afb2 7131
0dcd3840
RH
7132 /* If this expression has side effects, show we don't know it to be a
7133 constant. Likewise if it's a pointer or aggregate type since in
7134 those case we only want literals, since those are only optimized
13104975
ZW
7135 when generating RTL, not later.
7136 And finally, if we are compiling an initializer, not code, we
7137 need to return a definite result now; there's not going to be any
7138 more optimization done. */
5039610b
SL
7139 if (TREE_SIDE_EFFECTS (arg)
7140 || AGGREGATE_TYPE_P (TREE_TYPE (arg))
7141 || POINTER_TYPE_P (TREE_TYPE (arg))
63b48197 7142 || cfun == 0
4e7d7b3d
JJ
7143 || folding_initializer
7144 || force_folding_builtin_constant_p)
b0b3afb2
BS
7145 return integer_zero_node;
7146
5039610b 7147 return NULL_TREE;
b0b3afb2
BS
7148}
7149
419ce103
AN
7150/* Create builtin_expect with PRED and EXPECTED as its arguments and
7151 return it as a truthvalue. */
6de9cd9a
DN
7152
7153static tree
ed9c79e1
JJ
7154build_builtin_expect_predicate (location_t loc, tree pred, tree expected,
7155 tree predictor)
6de9cd9a 7156{
419ce103 7157 tree fn, arg_types, pred_type, expected_type, call_expr, ret_type;
6de9cd9a 7158
e79983f4 7159 fn = builtin_decl_explicit (BUILT_IN_EXPECT);
419ce103
AN
7160 arg_types = TYPE_ARG_TYPES (TREE_TYPE (fn));
7161 ret_type = TREE_TYPE (TREE_TYPE (fn));
7162 pred_type = TREE_VALUE (arg_types);
7163 expected_type = TREE_VALUE (TREE_CHAIN (arg_types));
7164
db3927fb
AH
7165 pred = fold_convert_loc (loc, pred_type, pred);
7166 expected = fold_convert_loc (loc, expected_type, expected);
ed9c79e1
JJ
7167 call_expr = build_call_expr_loc (loc, fn, predictor ? 3 : 2, pred, expected,
7168 predictor);
419ce103
AN
7169
7170 return build2 (NE_EXPR, TREE_TYPE (pred), call_expr,
7171 build_int_cst (ret_type, 0));
7172}
7173
7174/* Fold a call to builtin_expect with arguments ARG0 and ARG1. Return
7175 NULL_TREE if no simplification is possible. */
7176
ed9c79e1
JJ
7177tree
7178fold_builtin_expect (location_t loc, tree arg0, tree arg1, tree arg2)
419ce103 7179{
be31603a 7180 tree inner, fndecl, inner_arg0;
419ce103
AN
7181 enum tree_code code;
7182
be31603a
KT
7183 /* Distribute the expected value over short-circuiting operators.
7184 See through the cast from truthvalue_type_node to long. */
7185 inner_arg0 = arg0;
625a9766 7186 while (CONVERT_EXPR_P (inner_arg0)
be31603a
KT
7187 && INTEGRAL_TYPE_P (TREE_TYPE (inner_arg0))
7188 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (inner_arg0, 0))))
7189 inner_arg0 = TREE_OPERAND (inner_arg0, 0);
7190
419ce103
AN
7191 /* If this is a builtin_expect within a builtin_expect keep the
7192 inner one. See through a comparison against a constant. It
7193 might have been added to create a thruthvalue. */
be31603a
KT
7194 inner = inner_arg0;
7195
419ce103
AN
7196 if (COMPARISON_CLASS_P (inner)
7197 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST)
7198 inner = TREE_OPERAND (inner, 0);
7199
7200 if (TREE_CODE (inner) == CALL_EXPR
7201 && (fndecl = get_callee_fndecl (inner))
7202 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
7203 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_EXPECT)
7204 return arg0;
7205
be31603a 7206 inner = inner_arg0;
419ce103
AN
7207 code = TREE_CODE (inner);
7208 if (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR)
7209 {
7210 tree op0 = TREE_OPERAND (inner, 0);
7211 tree op1 = TREE_OPERAND (inner, 1);
7212
ed9c79e1
JJ
7213 op0 = build_builtin_expect_predicate (loc, op0, arg1, arg2);
7214 op1 = build_builtin_expect_predicate (loc, op1, arg1, arg2);
419ce103
AN
7215 inner = build2 (code, TREE_TYPE (inner), op0, op1);
7216
db3927fb 7217 return fold_convert_loc (loc, TREE_TYPE (arg0), inner);
419ce103
AN
7218 }
7219
7220 /* If the argument isn't invariant then there's nothing else we can do. */
be31603a 7221 if (!TREE_CONSTANT (inner_arg0))
5039610b 7222 return NULL_TREE;
6de9cd9a 7223
419ce103
AN
7224 /* If we expect that a comparison against the argument will fold to
7225 a constant return the constant. In practice, this means a true
7226 constant or the address of a non-weak symbol. */
be31603a 7227 inner = inner_arg0;
6de9cd9a
DN
7228 STRIP_NOPS (inner);
7229 if (TREE_CODE (inner) == ADDR_EXPR)
7230 {
7231 do
7232 {
7233 inner = TREE_OPERAND (inner, 0);
7234 }
7235 while (TREE_CODE (inner) == COMPONENT_REF
7236 || TREE_CODE (inner) == ARRAY_REF);
5f26a230
JJ
7237 if ((TREE_CODE (inner) == VAR_DECL
7238 || TREE_CODE (inner) == FUNCTION_DECL)
7239 && DECL_WEAK (inner))
5039610b 7240 return NULL_TREE;
6de9cd9a
DN
7241 }
7242
419ce103
AN
7243 /* Otherwise, ARG0 already has the proper type for the return value. */
7244 return arg0;
6de9cd9a
DN
7245}
7246
5039610b 7247/* Fold a call to __builtin_classify_type with argument ARG. */
5197bd50 7248
ad82abb8 7249static tree
5039610b 7250fold_builtin_classify_type (tree arg)
ad82abb8 7251{
5039610b 7252 if (arg == 0)
45a2c477 7253 return build_int_cst (integer_type_node, no_type_class);
ad82abb8 7254
45a2c477 7255 return build_int_cst (integer_type_node, type_to_class (TREE_TYPE (arg)));
ad82abb8
ZW
7256}
7257
5039610b 7258/* Fold a call to __builtin_strlen with argument ARG. */
667bbbbb
EC
7259
7260static tree
ab996409 7261fold_builtin_strlen (location_t loc, tree type, tree arg)
667bbbbb 7262{
5039610b 7263 if (!validate_arg (arg, POINTER_TYPE))
667bbbbb
EC
7264 return NULL_TREE;
7265 else
7266 {
5039610b 7267 tree len = c_strlen (arg, 0);
667bbbbb
EC
7268
7269 if (len)
ab996409 7270 return fold_convert_loc (loc, type, len);
667bbbbb
EC
7271
7272 return NULL_TREE;
7273 }
7274}
7275
ab5e2615
RH
7276/* Fold a call to __builtin_inf or __builtin_huge_val. */
7277
7278static tree
db3927fb 7279fold_builtin_inf (location_t loc, tree type, int warn)
ab5e2615 7280{
efdc7e19
RH
7281 REAL_VALUE_TYPE real;
7282
6d84156b
JM
7283 /* __builtin_inff is intended to be usable to define INFINITY on all
7284 targets. If an infinity is not available, INFINITY expands "to a
7285 positive constant of type float that overflows at translation
7286 time", footnote "In this case, using INFINITY will violate the
7287 constraint in 6.4.4 and thus require a diagnostic." (C99 7.12#4).
7288 Thus we pedwarn to ensure this constraint violation is
7289 diagnosed. */
ab5e2615 7290 if (!MODE_HAS_INFINITIES (TYPE_MODE (type)) && warn)
db3927fb 7291 pedwarn (loc, 0, "target format does not support infinity");
ab5e2615 7292
efdc7e19
RH
7293 real_inf (&real);
7294 return build_real (type, real);
ab5e2615
RH
7295}
7296
5039610b 7297/* Fold a call to __builtin_nan or __builtin_nans with argument ARG. */
1472e41c
RH
7298
7299static tree
5039610b 7300fold_builtin_nan (tree arg, tree type, int quiet)
1472e41c
RH
7301{
7302 REAL_VALUE_TYPE real;
7303 const char *str;
7304
5039610b
SL
7305 if (!validate_arg (arg, POINTER_TYPE))
7306 return NULL_TREE;
7307 str = c_getstr (arg);
1472e41c 7308 if (!str)
5039610b 7309 return NULL_TREE;
1472e41c
RH
7310
7311 if (!real_nan (&real, str, quiet, TYPE_MODE (type)))
5039610b 7312 return NULL_TREE;
1472e41c
RH
7313
7314 return build_real (type, real);
7315}
7316
5039610b
SL
7317/* FNDECL is assumed to be builtin which can narrow the FP type of
7318 the argument, for instance lround((double)f) -> lroundf (f).
7319 Do the transformation for a call with argument ARG. */
ca3df643
KG
7320
7321static tree
db3927fb 7322fold_fixed_mathfn (location_t loc, tree fndecl, tree arg)
ca3df643 7323{
ca3df643 7324 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
ca3df643 7325
5039610b
SL
7326 if (!validate_arg (arg, REAL_TYPE))
7327 return NULL_TREE;
ca3df643
KG
7328
7329 /* If argument is already integer valued, and we don't need to worry
7330 about setting errno, there's no need to perform rounding. */
7331 if (! flag_errno_math && integer_valued_real_p (arg))
db3927fb
AH
7332 return fold_build1_loc (loc, FIX_TRUNC_EXPR,
7333 TREE_TYPE (TREE_TYPE (fndecl)), arg);
ca3df643
KG
7334
7335 if (optimize)
7336 {
7337 tree ftype = TREE_TYPE (arg);
7338 tree arg0 = strip_float_extensions (arg);
7339 tree newtype = TREE_TYPE (arg0);
7340 tree decl;
7341
7342 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (ftype)
7343 && (decl = mathfn_built_in (newtype, fcode)))
db3927fb
AH
7344 return build_call_expr_loc (loc, decl, 1,
7345 fold_convert_loc (loc, newtype, arg0));
ca3df643 7346 }
482c6ce8 7347
6c32ee74
UB
7348 /* Canonicalize iround (x) to lround (x) on ILP32 targets where
7349 sizeof (int) == sizeof (long). */
7350 if (TYPE_PRECISION (integer_type_node)
7351 == TYPE_PRECISION (long_integer_type_node))
7352 {
7353 tree newfn = NULL_TREE;
7354 switch (fcode)
7355 {
7356 CASE_FLT_FN (BUILT_IN_ICEIL):
7357 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LCEIL);
7358 break;
7359
7360 CASE_FLT_FN (BUILT_IN_IFLOOR):
7361 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LFLOOR);
7362 break;
7363
7364 CASE_FLT_FN (BUILT_IN_IROUND):
7365 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LROUND);
7366 break;
7367
7368 CASE_FLT_FN (BUILT_IN_IRINT):
7369 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LRINT);
7370 break;
7371
7372 default:
7373 break;
7374 }
7375
7376 if (newfn)
7377 {
7378 tree newcall = build_call_expr_loc (loc, newfn, 1, arg);
7379 return fold_convert_loc (loc,
7380 TREE_TYPE (TREE_TYPE (fndecl)), newcall);
7381 }
7382 }
7383
482c6ce8
RS
7384 /* Canonicalize llround (x) to lround (x) on LP64 targets where
7385 sizeof (long long) == sizeof (long). */
7386 if (TYPE_PRECISION (long_long_integer_type_node)
7387 == TYPE_PRECISION (long_integer_type_node))
7388 {
7389 tree newfn = NULL_TREE;
7390 switch (fcode)
7391 {
7392 CASE_FLT_FN (BUILT_IN_LLCEIL):
7393 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LCEIL);
7394 break;
7395
7396 CASE_FLT_FN (BUILT_IN_LLFLOOR):
7397 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LFLOOR);
7398 break;
7399
7400 CASE_FLT_FN (BUILT_IN_LLROUND):
7401 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LROUND);
7402 break;
7403
7404 CASE_FLT_FN (BUILT_IN_LLRINT):
7405 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LRINT);
7406 break;
7407
7408 default:
7409 break;
7410 }
7411
7412 if (newfn)
7413 {
db3927fb
AH
7414 tree newcall = build_call_expr_loc (loc, newfn, 1, arg);
7415 return fold_convert_loc (loc,
7416 TREE_TYPE (TREE_TYPE (fndecl)), newcall);
482c6ce8
RS
7417 }
7418 }
7419
5039610b 7420 return NULL_TREE;
ca3df643
KG
7421}
7422
75c7c595
RG
7423/* Fold function call to builtin sincos, sincosf, or sincosl. Return
7424 NULL_TREE if no simplification can be made. */
7425
7426static tree
db3927fb
AH
7427fold_builtin_sincos (location_t loc,
7428 tree arg0, tree arg1, tree arg2)
75c7c595 7429{
5039610b 7430 tree type;
75c7c595
RG
7431 tree res, fn, call;
7432
5039610b
SL
7433 if (!validate_arg (arg0, REAL_TYPE)
7434 || !validate_arg (arg1, POINTER_TYPE)
7435 || !validate_arg (arg2, POINTER_TYPE))
75c7c595
RG
7436 return NULL_TREE;
7437
75c7c595 7438 type = TREE_TYPE (arg0);
75c7c595
RG
7439
7440 /* Calculate the result when the argument is a constant. */
7441 if ((res = do_mpfr_sincos (arg0, arg1, arg2)))
7442 return res;
7443
7444 /* Canonicalize sincos to cexpi. */
d33d9e47 7445 if (!targetm.libc_has_function (function_c99_math_complex))
2d38026b 7446 return NULL_TREE;
75c7c595
RG
7447 fn = mathfn_built_in (type, BUILT_IN_CEXPI);
7448 if (!fn)
7449 return NULL_TREE;
7450
db3927fb 7451 call = build_call_expr_loc (loc, fn, 1, arg0);
75c7c595
RG
7452 call = builtin_save_expr (call);
7453
928c19bb 7454 return build2 (COMPOUND_EXPR, void_type_node,
75c7c595 7455 build2 (MODIFY_EXPR, void_type_node,
db3927fb 7456 build_fold_indirect_ref_loc (loc, arg1),
75c7c595
RG
7457 build1 (IMAGPART_EXPR, type, call)),
7458 build2 (MODIFY_EXPR, void_type_node,
db3927fb 7459 build_fold_indirect_ref_loc (loc, arg2),
75c7c595
RG
7460 build1 (REALPART_EXPR, type, call)));
7461}
7462
28f4586b
RG
7463/* Fold function call to builtin cexp, cexpf, or cexpl. Return
7464 NULL_TREE if no simplification can be made. */
7465
7466static tree
db3927fb 7467fold_builtin_cexp (location_t loc, tree arg0, tree type)
28f4586b 7468{
5039610b 7469 tree rtype;
28f4586b 7470 tree realp, imagp, ifn;
c128599a 7471 tree res;
28f4586b 7472
c128599a 7473 if (!validate_arg (arg0, COMPLEX_TYPE)
376da68e 7474 || TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) != REAL_TYPE)
28f4586b
RG
7475 return NULL_TREE;
7476
c128599a
KG
7477 /* Calculate the result when the argument is a constant. */
7478 if ((res = do_mpc_arg1 (arg0, type, mpc_exp)))
7479 return res;
b8698a0f 7480
28f4586b
RG
7481 rtype = TREE_TYPE (TREE_TYPE (arg0));
7482
7483 /* In case we can figure out the real part of arg0 and it is constant zero
7484 fold to cexpi. */
d33d9e47 7485 if (!targetm.libc_has_function (function_c99_math_complex))
2d38026b 7486 return NULL_TREE;
28f4586b
RG
7487 ifn = mathfn_built_in (rtype, BUILT_IN_CEXPI);
7488 if (!ifn)
7489 return NULL_TREE;
7490
db3927fb 7491 if ((realp = fold_unary_loc (loc, REALPART_EXPR, rtype, arg0))
28f4586b
RG
7492 && real_zerop (realp))
7493 {
db3927fb
AH
7494 tree narg = fold_build1_loc (loc, IMAGPART_EXPR, rtype, arg0);
7495 return build_call_expr_loc (loc, ifn, 1, narg);
28f4586b
RG
7496 }
7497
7498 /* In case we can easily decompose real and imaginary parts split cexp
7499 to exp (r) * cexpi (i). */
7500 if (flag_unsafe_math_optimizations
7501 && realp)
7502 {
7503 tree rfn, rcall, icall;
7504
7505 rfn = mathfn_built_in (rtype, BUILT_IN_EXP);
7506 if (!rfn)
7507 return NULL_TREE;
7508
db3927fb 7509 imagp = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg0);
28f4586b
RG
7510 if (!imagp)
7511 return NULL_TREE;
7512
db3927fb 7513 icall = build_call_expr_loc (loc, ifn, 1, imagp);
28f4586b 7514 icall = builtin_save_expr (icall);
db3927fb 7515 rcall = build_call_expr_loc (loc, rfn, 1, realp);
28f4586b 7516 rcall = builtin_save_expr (rcall);
db3927fb
AH
7517 return fold_build2_loc (loc, COMPLEX_EXPR, type,
7518 fold_build2_loc (loc, MULT_EXPR, rtype,
111f1fca 7519 rcall,
db3927fb
AH
7520 fold_build1_loc (loc, REALPART_EXPR,
7521 rtype, icall)),
7522 fold_build2_loc (loc, MULT_EXPR, rtype,
111f1fca 7523 rcall,
db3927fb
AH
7524 fold_build1_loc (loc, IMAGPART_EXPR,
7525 rtype, icall)));
28f4586b
RG
7526 }
7527
7528 return NULL_TREE;
7529}
7530
3bf05748 7531/* Fold function call to builtin lround, lroundf or lroundl (or the
5039610b
SL
7532 corresponding long long versions) and other rounding functions. ARG
7533 is the argument to the call. Return NULL_TREE if no simplification
7534 can be made. */
3bf05748
KG
7535
7536static tree
db3927fb 7537fold_builtin_int_roundingfn (location_t loc, tree fndecl, tree arg)
3bf05748 7538{
5039610b
SL
7539 if (!validate_arg (arg, REAL_TYPE))
7540 return NULL_TREE;
3bf05748
KG
7541
7542 /* Optimize lround of constant value. */
455f14dd 7543 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
3bf05748
KG
7544 {
7545 const REAL_VALUE_TYPE x = TREE_REAL_CST (arg);
7546
4c8c70e0 7547 if (real_isfinite (&x))
3bf05748 7548 {
b87e6936 7549 tree itype = TREE_TYPE (TREE_TYPE (fndecl));
2b60792f 7550 tree ftype = TREE_TYPE (arg);
3bf05748 7551 REAL_VALUE_TYPE r;
807e902e 7552 bool fail = false;
3bf05748 7553
d8b42d06
UB
7554 switch (DECL_FUNCTION_CODE (fndecl))
7555 {
6c32ee74 7556 CASE_FLT_FN (BUILT_IN_IFLOOR):
ea6a6627
VR
7557 CASE_FLT_FN (BUILT_IN_LFLOOR):
7558 CASE_FLT_FN (BUILT_IN_LLFLOOR):
d8b42d06
UB
7559 real_floor (&r, TYPE_MODE (ftype), &x);
7560 break;
7561
6c32ee74 7562 CASE_FLT_FN (BUILT_IN_ICEIL):
ea6a6627
VR
7563 CASE_FLT_FN (BUILT_IN_LCEIL):
7564 CASE_FLT_FN (BUILT_IN_LLCEIL):
f94b1661
UB
7565 real_ceil (&r, TYPE_MODE (ftype), &x);
7566 break;
7567
6c32ee74 7568 CASE_FLT_FN (BUILT_IN_IROUND):
ea6a6627
VR
7569 CASE_FLT_FN (BUILT_IN_LROUND):
7570 CASE_FLT_FN (BUILT_IN_LLROUND):
d8b42d06
UB
7571 real_round (&r, TYPE_MODE (ftype), &x);
7572 break;
7573
7574 default:
7575 gcc_unreachable ();
7576 }
7577
807e902e
KZ
7578 wide_int val = real_to_integer (&r, &fail, TYPE_PRECISION (itype));
7579 if (!fail)
7580 return wide_int_to_tree (itype, val);
3bf05748
KG
7581 }
7582 }
7583
e1502f6e
RG
7584 switch (DECL_FUNCTION_CODE (fndecl))
7585 {
7586 CASE_FLT_FN (BUILT_IN_LFLOOR):
7587 CASE_FLT_FN (BUILT_IN_LLFLOOR):
7588 /* Fold lfloor (x) where x is nonnegative to FIX_TRUNC (x). */
7589 if (tree_expr_nonnegative_p (arg))
db3927fb
AH
7590 return fold_build1_loc (loc, FIX_TRUNC_EXPR,
7591 TREE_TYPE (TREE_TYPE (fndecl)), arg);
e1502f6e
RG
7592 break;
7593 default:;
7594 }
7595
db3927fb 7596 return fold_fixed_mathfn (loc, fndecl, arg);
3bf05748
KG
7597}
7598
cf42869d 7599/* Fold function call to builtin ffs, clz, ctz, popcount and parity
5039610b
SL
7600 and their long and long long variants (i.e. ffsl and ffsll). ARG is
7601 the argument to the call. Return NULL_TREE if no simplification can
7602 be made. */
cf42869d
RS
7603
7604static tree
5039610b 7605fold_builtin_bitop (tree fndecl, tree arg)
cf42869d 7606{
5039610b 7607 if (!validate_arg (arg, INTEGER_TYPE))
cf42869d
RS
7608 return NULL_TREE;
7609
7610 /* Optimize for constant argument. */
455f14dd 7611 if (TREE_CODE (arg) == INTEGER_CST && !TREE_OVERFLOW (arg))
cf42869d 7612 {
807e902e
KZ
7613 tree type = TREE_TYPE (arg);
7614 int result;
cf42869d
RS
7615
7616 switch (DECL_FUNCTION_CODE (fndecl))
7617 {
ea6a6627 7618 CASE_INT_FN (BUILT_IN_FFS):
807e902e 7619 result = wi::ffs (arg);
cf42869d
RS
7620 break;
7621
ea6a6627 7622 CASE_INT_FN (BUILT_IN_CLZ):
807e902e
KZ
7623 if (wi::ne_p (arg, 0))
7624 result = wi::clz (arg);
cf42869d 7625 else if (! CLZ_DEFINED_VALUE_AT_ZERO (TYPE_MODE (type), result))
807e902e 7626 result = TYPE_PRECISION (type);
cf42869d
RS
7627 break;
7628
ea6a6627 7629 CASE_INT_FN (BUILT_IN_CTZ):
807e902e
KZ
7630 if (wi::ne_p (arg, 0))
7631 result = wi::ctz (arg);
cf42869d 7632 else if (! CTZ_DEFINED_VALUE_AT_ZERO (TYPE_MODE (type), result))
807e902e 7633 result = TYPE_PRECISION (type);
cf42869d
RS
7634 break;
7635
146aef0b 7636 CASE_INT_FN (BUILT_IN_CLRSB):
807e902e 7637 result = wi::clrsb (arg);
146aef0b
JJ
7638 break;
7639
ea6a6627 7640 CASE_INT_FN (BUILT_IN_POPCOUNT):
807e902e 7641 result = wi::popcount (arg);
cf42869d
RS
7642 break;
7643
ea6a6627 7644 CASE_INT_FN (BUILT_IN_PARITY):
807e902e 7645 result = wi::parity (arg);
cf42869d
RS
7646 break;
7647
7648 default:
298e6adc 7649 gcc_unreachable ();
cf42869d
RS
7650 }
7651
b87e6936 7652 return build_int_cst (TREE_TYPE (TREE_TYPE (fndecl)), result);
cf42869d
RS
7653 }
7654
7655 return NULL_TREE;
7656}
7657
ac868f29 7658/* Fold function call to builtin_bswap and the short, long and long long
167fa32c
EC
7659 variants. Return NULL_TREE if no simplification can be made. */
7660static tree
5039610b 7661fold_builtin_bswap (tree fndecl, tree arg)
167fa32c 7662{
5039610b
SL
7663 if (! validate_arg (arg, INTEGER_TYPE))
7664 return NULL_TREE;
167fa32c
EC
7665
7666 /* Optimize constant value. */
455f14dd 7667 if (TREE_CODE (arg) == INTEGER_CST && !TREE_OVERFLOW (arg))
167fa32c 7668 {
ac868f29 7669 tree type = TREE_TYPE (TREE_TYPE (fndecl));
167fa32c 7670
167fa32c
EC
7671 switch (DECL_FUNCTION_CODE (fndecl))
7672 {
ac868f29 7673 case BUILT_IN_BSWAP16:
167fa32c
EC
7674 case BUILT_IN_BSWAP32:
7675 case BUILT_IN_BSWAP64:
7676 {
807e902e
KZ
7677 signop sgn = TYPE_SIGN (type);
7678 tree result =
7679 wide_int_to_tree (type,
7680 wide_int::from (arg, TYPE_PRECISION (type),
7681 sgn).bswap ());
7682 return result;
167fa32c 7683 }
167fa32c
EC
7684 default:
7685 gcc_unreachable ();
7686 }
167fa32c
EC
7687 }
7688
7689 return NULL_TREE;
7690}
5039610b 7691
4413d881
KG
7692/* Fold a builtin function call to hypot, hypotf, or hypotl. Return
7693 NULL_TREE if no simplification can be made. */
7694
7695static tree
4adfc9a5 7696fold_builtin_hypot (location_t loc, tree arg0, tree arg1, tree type)
4413d881 7697{
4adfc9a5 7698 tree res;
4413d881 7699
5039610b
SL
7700 if (!validate_arg (arg0, REAL_TYPE)
7701 || !validate_arg (arg1, REAL_TYPE))
4413d881
KG
7702 return NULL_TREE;
7703
7704 /* Calculate the result when the argument is a constant. */
7705 if ((res = do_mpfr_arg2 (arg0, arg1, type, mpfr_hypot)))
7706 return res;
b8698a0f 7707
4413d881
KG
7708 /* If either argument is zero, hypot is fabs of the other. */
7709 if (real_zerop (arg0))
db3927fb 7710 return fold_build1_loc (loc, ABS_EXPR, type, arg1);
4413d881 7711 else if (real_zerop (arg1))
db3927fb 7712 return fold_build1_loc (loc, ABS_EXPR, type, arg0);
b8698a0f 7713
012c5368
KG
7714 /* hypot(x,x) -> fabs(x)*sqrt(2). */
7715 if (flag_unsafe_math_optimizations
7716 && operand_equal_p (arg0, arg1, OEP_PURE_SAME))
73463c5e
RS
7717 return fold_build2_loc (loc, MULT_EXPR, type,
7718 fold_build1_loc (loc, ABS_EXPR, type, arg0),
7719 build_real_truncate (type, dconst_sqrt2 ()));
4413d881 7720
4413d881
KG
7721 return NULL_TREE;
7722}
7723
7724
667bbbbb
EC
7725/* Fold a builtin function call to pow, powf, or powl. Return
7726 NULL_TREE if no simplification can be made. */
7727static tree
db3927fb 7728fold_builtin_pow (location_t loc, tree fndecl, tree arg0, tree arg1, tree type)
667bbbbb 7729{
4413d881 7730 tree res;
667bbbbb 7731
5039610b
SL
7732 if (!validate_arg (arg0, REAL_TYPE)
7733 || !validate_arg (arg1, REAL_TYPE))
667bbbbb
EC
7734 return NULL_TREE;
7735
4413d881
KG
7736 /* Calculate the result when the argument is a constant. */
7737 if ((res = do_mpfr_arg2 (arg0, arg1, type, mpfr_pow)))
7738 return res;
7739
667bbbbb
EC
7740 /* Optimize pow(1.0,y) = 1.0. */
7741 if (real_onep (arg0))
db3927fb 7742 return omit_one_operand_loc (loc, type, build_real (type, dconst1), arg1);
667bbbbb
EC
7743
7744 if (TREE_CODE (arg1) == REAL_CST
455f14dd 7745 && !TREE_OVERFLOW (arg1))
667bbbbb 7746 {
e3bb43c0 7747 REAL_VALUE_TYPE cint;
667bbbbb 7748 REAL_VALUE_TYPE c;
e3bb43c0
RS
7749 HOST_WIDE_INT n;
7750
667bbbbb
EC
7751 c = TREE_REAL_CST (arg1);
7752
7753 /* Optimize pow(x,0.0) = 1.0. */
624d31fe 7754 if (real_equal (&c, &dconst0))
db3927fb 7755 return omit_one_operand_loc (loc, type, build_real (type, dconst1),
667bbbbb
EC
7756 arg0);
7757
7758 /* Optimize pow(x,1.0) = x. */
624d31fe 7759 if (real_equal (&c, &dconst1))
667bbbbb
EC
7760 return arg0;
7761
7762 /* Optimize pow(x,-1.0) = 1.0/x. */
624d31fe 7763 if (real_equal (&c, &dconstm1))
db3927fb 7764 return fold_build2_loc (loc, RDIV_EXPR, type,
987b67bc 7765 build_real (type, dconst1), arg0);
667bbbbb
EC
7766
7767 /* Optimize pow(x,0.5) = sqrt(x). */
7768 if (flag_unsafe_math_optimizations
624d31fe 7769 && real_equal (&c, &dconsthalf))
667bbbbb
EC
7770 {
7771 tree sqrtfn = mathfn_built_in (type, BUILT_IN_SQRT);
7772
7773 if (sqrtfn != NULL_TREE)
db3927fb 7774 return build_call_expr_loc (loc, sqrtfn, 1, arg0);
667bbbbb
EC
7775 }
7776
495ed96c
UB
7777 /* Optimize pow(x,1.0/3.0) = cbrt(x). */
7778 if (flag_unsafe_math_optimizations)
7779 {
7780 const REAL_VALUE_TYPE dconstroot
9c02cf68 7781 = real_value_truncate (TYPE_MODE (type), dconst_third ());
495ed96c 7782
624d31fe 7783 if (real_equal (&c, &dconstroot))
495ed96c
UB
7784 {
7785 tree cbrtfn = mathfn_built_in (type, BUILT_IN_CBRT);
7786 if (cbrtfn != NULL_TREE)
db3927fb 7787 return build_call_expr_loc (loc, cbrtfn, 1, arg0);
495ed96c
UB
7788 }
7789 }
7790
e3bb43c0
RS
7791 /* Check for an integer exponent. */
7792 n = real_to_integer (&c);
807e902e 7793 real_from_integer (&cint, VOIDmode, n, SIGNED);
e3bb43c0 7794 if (real_identical (&c, &cint))
667bbbbb 7795 {
17372f8c
DJ
7796 /* Attempt to evaluate pow at compile-time, unless this should
7797 raise an exception. */
e3bb43c0 7798 if (TREE_CODE (arg0) == REAL_CST
17372f8c
DJ
7799 && !TREE_OVERFLOW (arg0)
7800 && (n > 0
7801 || (!flag_trapping_math && !flag_errno_math)
624d31fe 7802 || !real_equal (&TREE_REAL_CST (arg0), &dconst0)))
667bbbbb
EC
7803 {
7804 REAL_VALUE_TYPE x;
7805 bool inexact;
7806
7807 x = TREE_REAL_CST (arg0);
7808 inexact = real_powi (&x, TYPE_MODE (type), &x, n);
7809 if (flag_unsafe_math_optimizations || !inexact)
7810 return build_real (type, x);
7811 }
7812 }
7813 }
7814
d86dc303 7815 if (flag_unsafe_math_optimizations)
667bbbbb 7816 {
d86dc303 7817 const enum built_in_function fcode = builtin_mathfn_code (arg0);
667bbbbb 7818
d86dc303
KG
7819 /* Optimize pow(expN(x),y) = expN(x*y). */
7820 if (BUILTIN_EXPONENT_P (fcode))
c22cacf3 7821 {
5039610b
SL
7822 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
7823 tree arg = CALL_EXPR_ARG (arg0, 0);
db3927fb
AH
7824 arg = fold_build2_loc (loc, MULT_EXPR, type, arg, arg1);
7825 return build_call_expr_loc (loc, expfn, 1, arg);
d86dc303 7826 }
667bbbbb 7827
d86dc303
KG
7828 /* Optimize pow(sqrt(x),y) = pow(x,y*0.5). */
7829 if (BUILTIN_SQRT_P (fcode))
c22cacf3 7830 {
5039610b 7831 tree narg0 = CALL_EXPR_ARG (arg0, 0);
db3927fb 7832 tree narg1 = fold_build2_loc (loc, MULT_EXPR, type, arg1,
987b67bc 7833 build_real (type, dconsthalf));
db3927fb 7834 return build_call_expr_loc (loc, fndecl, 2, narg0, narg1);
d86dc303
KG
7835 }
7836
7837 /* Optimize pow(cbrt(x),y) = pow(x,y/3) iff x is nonnegative. */
7838 if (BUILTIN_CBRT_P (fcode))
c22cacf3 7839 {
5039610b 7840 tree arg = CALL_EXPR_ARG (arg0, 0);
d86dc303
KG
7841 if (tree_expr_nonnegative_p (arg))
7842 {
73463c5e
RS
7843 tree c = build_real_truncate (type, dconst_third ());
7844 tree narg1 = fold_build2_loc (loc, MULT_EXPR, type, arg1, c);
db3927fb 7845 return build_call_expr_loc (loc, fndecl, 2, arg, narg1);
d86dc303
KG
7846 }
7847 }
c22cacf3 7848
776e7174 7849 /* Optimize pow(pow(x,y),z) = pow(x,y*z) iff x is nonnegative. */
5039610b
SL
7850 if (fcode == BUILT_IN_POW
7851 || fcode == BUILT_IN_POWF
7852 || fcode == BUILT_IN_POWL)
c22cacf3 7853 {
5039610b 7854 tree arg00 = CALL_EXPR_ARG (arg0, 0);
776e7174
RG
7855 if (tree_expr_nonnegative_p (arg00))
7856 {
7857 tree arg01 = CALL_EXPR_ARG (arg0, 1);
7858 tree narg1 = fold_build2_loc (loc, MULT_EXPR, type, arg01, arg1);
7859 return build_call_expr_loc (loc, fndecl, 2, arg00, narg1);
7860 }
d86dc303 7861 }
667bbbbb 7862 }
d86dc303 7863
667bbbbb
EC
7864 return NULL_TREE;
7865}
7866
5039610b
SL
7867/* Fold a builtin function call to powi, powif, or powil with argument ARG.
7868 Return NULL_TREE if no simplification can be made. */
ba78d452 7869static tree
db3927fb 7870fold_builtin_powi (location_t loc, tree fndecl ATTRIBUTE_UNUSED,
5039610b 7871 tree arg0, tree arg1, tree type)
ba78d452 7872{
5039610b
SL
7873 if (!validate_arg (arg0, REAL_TYPE)
7874 || !validate_arg (arg1, INTEGER_TYPE))
ba78d452
RG
7875 return NULL_TREE;
7876
7877 /* Optimize pow(1.0,y) = 1.0. */
7878 if (real_onep (arg0))
db3927fb 7879 return omit_one_operand_loc (loc, type, build_real (type, dconst1), arg1);
ba78d452 7880
9541ffee 7881 if (tree_fits_shwi_p (arg1))
ba78d452 7882 {
eb1ce453 7883 HOST_WIDE_INT c = tree_to_shwi (arg1);
ba78d452
RG
7884
7885 /* Evaluate powi at compile-time. */
7886 if (TREE_CODE (arg0) == REAL_CST
455f14dd 7887 && !TREE_OVERFLOW (arg0))
ba78d452
RG
7888 {
7889 REAL_VALUE_TYPE x;
7890 x = TREE_REAL_CST (arg0);
7891 real_powi (&x, TYPE_MODE (type), &x, c);
7892 return build_real (type, x);
7893 }
7894
7895 /* Optimize pow(x,0) = 1.0. */
7896 if (c == 0)
db3927fb 7897 return omit_one_operand_loc (loc, type, build_real (type, dconst1),
ba78d452
RG
7898 arg0);
7899
7900 /* Optimize pow(x,1) = x. */
7901 if (c == 1)
7902 return arg0;
7903
7904 /* Optimize pow(x,-1) = 1.0/x. */
7905 if (c == -1)
db3927fb 7906 return fold_build2_loc (loc, RDIV_EXPR, type,
987b67bc 7907 build_real (type, dconst1), arg0);
ba78d452
RG
7908 }
7909
7910 return NULL_TREE;
7911}
7912
f7657db9 7913/* A subroutine of fold_builtin to fold the various exponent
5039610b 7914 functions. Return NULL_TREE if no simplification can be made.
b52dd66c 7915 FUNC is the corresponding MPFR exponent function. */
f7657db9
KG
7916
7917static tree
db3927fb 7918fold_builtin_exponent (location_t loc, tree fndecl, tree arg,
b52dd66c 7919 int (*func)(mpfr_ptr, mpfr_srcptr, mp_rnd_t))
f7657db9 7920{
5039610b 7921 if (validate_arg (arg, REAL_TYPE))
f7657db9 7922 {
f7657db9 7923 tree type = TREE_TYPE (TREE_TYPE (fndecl));
cf1491f0 7924 tree res;
b8698a0f 7925
b52dd66c 7926 /* Calculate the result when the argument is a constant. */
b53fed56 7927 if ((res = do_mpfr_arg1 (arg, type, func, NULL, NULL, 0)))
b52dd66c 7928 return res;
f7657db9
KG
7929
7930 /* Optimize expN(logN(x)) = x. */
7931 if (flag_unsafe_math_optimizations)
c22cacf3 7932 {
f7657db9
KG
7933 const enum built_in_function fcode = builtin_mathfn_code (arg);
7934
b52dd66c 7935 if ((func == mpfr_exp
f7657db9
KG
7936 && (fcode == BUILT_IN_LOG
7937 || fcode == BUILT_IN_LOGF
7938 || fcode == BUILT_IN_LOGL))
b52dd66c 7939 || (func == mpfr_exp2
f7657db9
KG
7940 && (fcode == BUILT_IN_LOG2
7941 || fcode == BUILT_IN_LOG2F
7942 || fcode == BUILT_IN_LOG2L))
b52dd66c 7943 || (func == mpfr_exp10
f7657db9
KG
7944 && (fcode == BUILT_IN_LOG10
7945 || fcode == BUILT_IN_LOG10F
7946 || fcode == BUILT_IN_LOG10L)))
db3927fb 7947 return fold_convert_loc (loc, type, CALL_EXPR_ARG (arg, 0));
f7657db9
KG
7948 }
7949 }
7950
5039610b 7951 return NULL_TREE;
f7657db9
KG
7952}
7953
2a5fce6d
PC
7954/* Fold function call to builtin memchr. ARG1, ARG2 and LEN are the
7955 arguments to the call, and TYPE is its return type.
7956 Return NULL_TREE if no simplification can be made. */
7957
7958static tree
db3927fb 7959fold_builtin_memchr (location_t loc, tree arg1, tree arg2, tree len, tree type)
2a5fce6d
PC
7960{
7961 if (!validate_arg (arg1, POINTER_TYPE)
7962 || !validate_arg (arg2, INTEGER_TYPE)
7963 || !validate_arg (len, INTEGER_TYPE))
7964 return NULL_TREE;
7965 else
7966 {
7967 const char *p1;
7968
7969 if (TREE_CODE (arg2) != INTEGER_CST
cc269bb6 7970 || !tree_fits_uhwi_p (len))
2a5fce6d
PC
7971 return NULL_TREE;
7972
7973 p1 = c_getstr (arg1);
7974 if (p1 && compare_tree_int (len, strlen (p1) + 1) <= 0)
7975 {
7976 char c;
7977 const char *r;
7978 tree tem;
7979
7980 if (target_char_cast (arg2, &c))
7981 return NULL_TREE;
7982
ae7e9ddd 7983 r = (const char *) memchr (p1, c, tree_to_uhwi (len));
2a5fce6d
PC
7984
7985 if (r == NULL)
7986 return build_int_cst (TREE_TYPE (arg1), 0);
7987
5d49b6a7 7988 tem = fold_build_pointer_plus_hwi_loc (loc, arg1, r - p1);
db3927fb 7989 return fold_convert_loc (loc, type, tem);
2a5fce6d
PC
7990 }
7991 return NULL_TREE;
7992 }
7993}
7994
5039610b
SL
7995/* Fold function call to builtin memcmp with arguments ARG1 and ARG2.
7996 Return NULL_TREE if no simplification can be made. */
5bb650ec
RS
7997
7998static tree
db3927fb 7999fold_builtin_memcmp (location_t loc, tree arg1, tree arg2, tree len)
5bb650ec 8000{
01847e9d 8001 const char *p1, *p2;
5bb650ec 8002
5039610b
SL
8003 if (!validate_arg (arg1, POINTER_TYPE)
8004 || !validate_arg (arg2, POINTER_TYPE)
8005 || !validate_arg (len, INTEGER_TYPE))
8006 return NULL_TREE;
5bb650ec
RS
8007
8008 /* If the LEN parameter is zero, return zero. */
8009 if (integer_zerop (len))
db3927fb 8010 return omit_two_operands_loc (loc, integer_type_node, integer_zero_node,
01847e9d 8011 arg1, arg2);
5bb650ec
RS
8012
8013 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
8014 if (operand_equal_p (arg1, arg2, 0))
db3927fb 8015 return omit_one_operand_loc (loc, integer_type_node, integer_zero_node, len);
01847e9d
RS
8016
8017 p1 = c_getstr (arg1);
8018 p2 = c_getstr (arg2);
8019
8020 /* If all arguments are constant, and the value of len is not greater
8021 than the lengths of arg1 and arg2, evaluate at compile-time. */
cc269bb6 8022 if (tree_fits_uhwi_p (len) && p1 && p2
01847e9d
RS
8023 && compare_tree_int (len, strlen (p1) + 1) <= 0
8024 && compare_tree_int (len, strlen (p2) + 1) <= 0)
8025 {
ae7e9ddd 8026 const int r = memcmp (p1, p2, tree_to_uhwi (len));
01847e9d
RS
8027
8028 if (r > 0)
8029 return integer_one_node;
8030 else if (r < 0)
8031 return integer_minus_one_node;
8032 else
8033 return integer_zero_node;
8034 }
8035
8036 /* If len parameter is one, return an expression corresponding to
8037 (*(const unsigned char*)arg1 - (const unsigned char*)arg2). */
ae7e9ddd 8038 if (tree_fits_uhwi_p (len) && tree_to_uhwi (len) == 1)
01847e9d
RS
8039 {
8040 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
baab454a
UW
8041 tree cst_uchar_ptr_node
8042 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
8043
db3927fb
AH
8044 tree ind1
8045 = fold_convert_loc (loc, integer_type_node,
8046 build1 (INDIRECT_REF, cst_uchar_node,
8047 fold_convert_loc (loc,
8048 cst_uchar_ptr_node,
01847e9d 8049 arg1)));
db3927fb
AH
8050 tree ind2
8051 = fold_convert_loc (loc, integer_type_node,
8052 build1 (INDIRECT_REF, cst_uchar_node,
8053 fold_convert_loc (loc,
8054 cst_uchar_ptr_node,
01847e9d 8055 arg2)));
db3927fb 8056 return fold_build2_loc (loc, MINUS_EXPR, integer_type_node, ind1, ind2);
01847e9d 8057 }
5bb650ec 8058
5039610b 8059 return NULL_TREE;
5bb650ec
RS
8060}
8061
5039610b
SL
8062/* Fold function call to builtin strcmp with arguments ARG1 and ARG2.
8063 Return NULL_TREE if no simplification can be made. */
5bb650ec
RS
8064
8065static tree
db3927fb 8066fold_builtin_strcmp (location_t loc, tree arg1, tree arg2)
5bb650ec 8067{
5bb650ec
RS
8068 const char *p1, *p2;
8069
5039610b
SL
8070 if (!validate_arg (arg1, POINTER_TYPE)
8071 || !validate_arg (arg2, POINTER_TYPE))
8072 return NULL_TREE;
5bb650ec
RS
8073
8074 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
8075 if (operand_equal_p (arg1, arg2, 0))
01847e9d 8076 return integer_zero_node;
5bb650ec
RS
8077
8078 p1 = c_getstr (arg1);
8079 p2 = c_getstr (arg2);
8080
8081 if (p1 && p2)
8082 {
5bb650ec
RS
8083 const int i = strcmp (p1, p2);
8084 if (i < 0)
01847e9d 8085 return integer_minus_one_node;
5bb650ec 8086 else if (i > 0)
01847e9d 8087 return integer_one_node;
5bb650ec 8088 else
01847e9d
RS
8089 return integer_zero_node;
8090 }
8091
8092 /* If the second arg is "", return *(const unsigned char*)arg1. */
8093 if (p2 && *p2 == '\0')
8094 {
8095 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
baab454a
UW
8096 tree cst_uchar_ptr_node
8097 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
8098
db3927fb
AH
8099 return fold_convert_loc (loc, integer_type_node,
8100 build1 (INDIRECT_REF, cst_uchar_node,
8101 fold_convert_loc (loc,
8102 cst_uchar_ptr_node,
8103 arg1)));
01847e9d
RS
8104 }
8105
8106 /* If the first arg is "", return -*(const unsigned char*)arg2. */
8107 if (p1 && *p1 == '\0')
8108 {
8109 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
baab454a
UW
8110 tree cst_uchar_ptr_node
8111 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
8112
db3927fb
AH
8113 tree temp
8114 = fold_convert_loc (loc, integer_type_node,
8115 build1 (INDIRECT_REF, cst_uchar_node,
8116 fold_convert_loc (loc,
8117 cst_uchar_ptr_node,
01847e9d 8118 arg2)));
db3927fb 8119 return fold_build1_loc (loc, NEGATE_EXPR, integer_type_node, temp);
5bb650ec
RS
8120 }
8121
5039610b 8122 return NULL_TREE;
5bb650ec
RS
8123}
8124
5039610b
SL
8125/* Fold function call to builtin strncmp with arguments ARG1, ARG2, and LEN.
8126 Return NULL_TREE if no simplification can be made. */
5bb650ec
RS
8127
8128static tree
db3927fb 8129fold_builtin_strncmp (location_t loc, tree arg1, tree arg2, tree len)
5bb650ec 8130{
5bb650ec
RS
8131 const char *p1, *p2;
8132
5039610b
SL
8133 if (!validate_arg (arg1, POINTER_TYPE)
8134 || !validate_arg (arg2, POINTER_TYPE)
8135 || !validate_arg (len, INTEGER_TYPE))
8136 return NULL_TREE;
5bb650ec
RS
8137
8138 /* If the LEN parameter is zero, return zero. */
8139 if (integer_zerop (len))
db3927fb 8140 return omit_two_operands_loc (loc, integer_type_node, integer_zero_node,
01847e9d 8141 arg1, arg2);
5bb650ec
RS
8142
8143 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
8144 if (operand_equal_p (arg1, arg2, 0))
db3927fb 8145 return omit_one_operand_loc (loc, integer_type_node, integer_zero_node, len);
5bb650ec
RS
8146
8147 p1 = c_getstr (arg1);
8148 p2 = c_getstr (arg2);
8149
cc269bb6 8150 if (tree_fits_uhwi_p (len) && p1 && p2)
5bb650ec 8151 {
ae7e9ddd 8152 const int i = strncmp (p1, p2, tree_to_uhwi (len));
01847e9d
RS
8153 if (i > 0)
8154 return integer_one_node;
8155 else if (i < 0)
8156 return integer_minus_one_node;
5bb650ec 8157 else
01847e9d
RS
8158 return integer_zero_node;
8159 }
8160
8161 /* If the second arg is "", and the length is greater than zero,
8162 return *(const unsigned char*)arg1. */
8163 if (p2 && *p2 == '\0'
8164 && TREE_CODE (len) == INTEGER_CST
8165 && tree_int_cst_sgn (len) == 1)
8166 {
8167 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
baab454a
UW
8168 tree cst_uchar_ptr_node
8169 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
8170
db3927fb
AH
8171 return fold_convert_loc (loc, integer_type_node,
8172 build1 (INDIRECT_REF, cst_uchar_node,
8173 fold_convert_loc (loc,
8174 cst_uchar_ptr_node,
8175 arg1)));
01847e9d
RS
8176 }
8177
8178 /* If the first arg is "", and the length is greater than zero,
8179 return -*(const unsigned char*)arg2. */
8180 if (p1 && *p1 == '\0'
8181 && TREE_CODE (len) == INTEGER_CST
8182 && tree_int_cst_sgn (len) == 1)
8183 {
8184 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
baab454a
UW
8185 tree cst_uchar_ptr_node
8186 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
8187
db3927fb
AH
8188 tree temp = fold_convert_loc (loc, integer_type_node,
8189 build1 (INDIRECT_REF, cst_uchar_node,
8190 fold_convert_loc (loc,
8191 cst_uchar_ptr_node,
8192 arg2)));
8193 return fold_build1_loc (loc, NEGATE_EXPR, integer_type_node, temp);
01847e9d
RS
8194 }
8195
8196 /* If len parameter is one, return an expression corresponding to
8197 (*(const unsigned char*)arg1 - (const unsigned char*)arg2). */
ae7e9ddd 8198 if (tree_fits_uhwi_p (len) && tree_to_uhwi (len) == 1)
01847e9d
RS
8199 {
8200 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
baab454a
UW
8201 tree cst_uchar_ptr_node
8202 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
8203
db3927fb
AH
8204 tree ind1 = fold_convert_loc (loc, integer_type_node,
8205 build1 (INDIRECT_REF, cst_uchar_node,
8206 fold_convert_loc (loc,
8207 cst_uchar_ptr_node,
8208 arg1)));
8209 tree ind2 = fold_convert_loc (loc, integer_type_node,
8210 build1 (INDIRECT_REF, cst_uchar_node,
8211 fold_convert_loc (loc,
8212 cst_uchar_ptr_node,
8213 arg2)));
8214 return fold_build2_loc (loc, MINUS_EXPR, integer_type_node, ind1, ind2);
5bb650ec
RS
8215 }
8216
5039610b 8217 return NULL_TREE;
5bb650ec
RS
8218}
8219
5039610b
SL
8220/* Fold function call to builtin signbit, signbitf or signbitl with argument
8221 ARG. Return NULL_TREE if no simplification can be made. */
ef79730c
RS
8222
8223static tree
db3927fb 8224fold_builtin_signbit (location_t loc, tree arg, tree type)
ef79730c 8225{
5039610b 8226 if (!validate_arg (arg, REAL_TYPE))
ef79730c
RS
8227 return NULL_TREE;
8228
ef79730c
RS
8229 /* If ARG is a compile-time constant, determine the result. */
8230 if (TREE_CODE (arg) == REAL_CST
455f14dd 8231 && !TREE_OVERFLOW (arg))
ef79730c
RS
8232 {
8233 REAL_VALUE_TYPE c;
8234
8235 c = TREE_REAL_CST (arg);
e8160c9a
NF
8236 return (REAL_VALUE_NEGATIVE (c)
8237 ? build_one_cst (type)
8238 : build_zero_cst (type));
ef79730c
RS
8239 }
8240
8241 /* If ARG is non-negative, the result is always zero. */
8242 if (tree_expr_nonnegative_p (arg))
db3927fb 8243 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
ef79730c
RS
8244
8245 /* If ARG's format doesn't have signed zeros, return "arg < 0.0". */
3d3dbadd 8246 if (!HONOR_SIGNED_ZEROS (arg))
96d03496
RG
8247 return fold_convert (type,
8248 fold_build2_loc (loc, LT_EXPR, boolean_type_node, arg,
8249 build_real (TREE_TYPE (arg), dconst0)));
ef79730c
RS
8250
8251 return NULL_TREE;
8252}
8253
5039610b
SL
8254/* Fold function call to builtin copysign, copysignf or copysignl with
8255 arguments ARG1 and ARG2. Return NULL_TREE if no simplification can
8256 be made. */
67057c53
RS
8257
8258static tree
4adfc9a5 8259fold_builtin_copysign (location_t loc, tree arg1, tree arg2, tree type)
67057c53 8260{
5039610b
SL
8261 if (!validate_arg (arg1, REAL_TYPE)
8262 || !validate_arg (arg2, REAL_TYPE))
67057c53
RS
8263 return NULL_TREE;
8264
67057c53
RS
8265 /* copysign(X,X) is X. */
8266 if (operand_equal_p (arg1, arg2, 0))
db3927fb 8267 return fold_convert_loc (loc, type, arg1);
67057c53
RS
8268
8269 /* If ARG1 and ARG2 are compile-time constants, determine the result. */
8270 if (TREE_CODE (arg1) == REAL_CST
8271 && TREE_CODE (arg2) == REAL_CST
455f14dd
RS
8272 && !TREE_OVERFLOW (arg1)
8273 && !TREE_OVERFLOW (arg2))
67057c53
RS
8274 {
8275 REAL_VALUE_TYPE c1, c2;
8276
8277 c1 = TREE_REAL_CST (arg1);
8278 c2 = TREE_REAL_CST (arg2);
8acb1b3d 8279 /* c1.sign := c2.sign. */
67057c53
RS
8280 real_copysign (&c1, &c2);
8281 return build_real (type, c1);
67057c53
RS
8282 }
8283
8284 /* copysign(X, Y) is fabs(X) when Y is always non-negative.
8285 Remember to evaluate Y for side-effects. */
8286 if (tree_expr_nonnegative_p (arg2))
db3927fb
AH
8287 return omit_one_operand_loc (loc, type,
8288 fold_build1_loc (loc, ABS_EXPR, type, arg1),
67057c53
RS
8289 arg2);
8290
8291 return NULL_TREE;
8292}
8293
5039610b 8294/* Fold a call to builtin isascii with argument ARG. */
df0785d6
KG
8295
8296static tree
db3927fb 8297fold_builtin_isascii (location_t loc, tree arg)
df0785d6 8298{
5039610b
SL
8299 if (!validate_arg (arg, INTEGER_TYPE))
8300 return NULL_TREE;
df0785d6
KG
8301 else
8302 {
8303 /* Transform isascii(c) -> ((c & ~0x7f) == 0). */
6728ee79 8304 arg = fold_build2 (BIT_AND_EXPR, integer_type_node, arg,
45a2c477 8305 build_int_cst (integer_type_node,
6728ee79 8306 ~ (unsigned HOST_WIDE_INT) 0x7f));
db3927fb 8307 return fold_build2_loc (loc, EQ_EXPR, integer_type_node,
45a2c477 8308 arg, integer_zero_node);
df0785d6
KG
8309 }
8310}
8311
5039610b 8312/* Fold a call to builtin toascii with argument ARG. */
df0785d6
KG
8313
8314static tree
db3927fb 8315fold_builtin_toascii (location_t loc, tree arg)
df0785d6 8316{
5039610b
SL
8317 if (!validate_arg (arg, INTEGER_TYPE))
8318 return NULL_TREE;
b8698a0f 8319
5039610b 8320 /* Transform toascii(c) -> (c & 0x7f). */
db3927fb 8321 return fold_build2_loc (loc, BIT_AND_EXPR, integer_type_node, arg,
45a2c477 8322 build_int_cst (integer_type_node, 0x7f));
df0785d6
KG
8323}
8324
5039610b 8325/* Fold a call to builtin isdigit with argument ARG. */
61218d19
KG
8326
8327static tree
db3927fb 8328fold_builtin_isdigit (location_t loc, tree arg)
61218d19 8329{
5039610b
SL
8330 if (!validate_arg (arg, INTEGER_TYPE))
8331 return NULL_TREE;
61218d19
KG
8332 else
8333 {
8334 /* Transform isdigit(c) -> (unsigned)(c) - '0' <= 9. */
c5ff069d
ZW
8335 /* According to the C standard, isdigit is unaffected by locale.
8336 However, it definitely is affected by the target character set. */
c5ff069d
ZW
8337 unsigned HOST_WIDE_INT target_digit0
8338 = lang_hooks.to_target_charset ('0');
8339
8340 if (target_digit0 == 0)
8341 return NULL_TREE;
8342
db3927fb 8343 arg = fold_convert_loc (loc, unsigned_type_node, arg);
6728ee79
MM
8344 arg = fold_build2 (MINUS_EXPR, unsigned_type_node, arg,
8345 build_int_cst (unsigned_type_node, target_digit0));
db3927fb 8346 return fold_build2_loc (loc, LE_EXPR, integer_type_node, arg,
5cdc4a26 8347 build_int_cst (unsigned_type_node, 9));
61218d19
KG
8348 }
8349}
ef79730c 8350
5039610b 8351/* Fold a call to fabs, fabsf or fabsl with argument ARG. */
9655d83b
RS
8352
8353static tree
db3927fb 8354fold_builtin_fabs (location_t loc, tree arg, tree type)
9655d83b 8355{
5039610b
SL
8356 if (!validate_arg (arg, REAL_TYPE))
8357 return NULL_TREE;
9655d83b 8358
db3927fb 8359 arg = fold_convert_loc (loc, type, arg);
9655d83b
RS
8360 if (TREE_CODE (arg) == REAL_CST)
8361 return fold_abs_const (arg, type);
db3927fb 8362 return fold_build1_loc (loc, ABS_EXPR, type, arg);
9655d83b
RS
8363}
8364
5039610b 8365/* Fold a call to abs, labs, llabs or imaxabs with argument ARG. */
9655d83b
RS
8366
8367static tree
db3927fb 8368fold_builtin_abs (location_t loc, tree arg, tree type)
9655d83b 8369{
5039610b
SL
8370 if (!validate_arg (arg, INTEGER_TYPE))
8371 return NULL_TREE;
9655d83b 8372
db3927fb 8373 arg = fold_convert_loc (loc, type, arg);
9655d83b
RS
8374 if (TREE_CODE (arg) == INTEGER_CST)
8375 return fold_abs_const (arg, type);
db3927fb 8376 return fold_build1_loc (loc, ABS_EXPR, type, arg);
9655d83b
RS
8377}
8378
16949072
RG
8379/* Fold a fma operation with arguments ARG[012]. */
8380
8381tree
8382fold_fma (location_t loc ATTRIBUTE_UNUSED,
8383 tree type, tree arg0, tree arg1, tree arg2)
8384{
8385 if (TREE_CODE (arg0) == REAL_CST
8386 && TREE_CODE (arg1) == REAL_CST
8387 && TREE_CODE (arg2) == REAL_CST)
8388 return do_mpfr_arg3 (arg0, arg1, arg2, type, mpfr_fma);
8389
8390 return NULL_TREE;
8391}
8392
8393/* Fold a call to fma, fmaf, or fmal with arguments ARG[012]. */
8394
8395static tree
8396fold_builtin_fma (location_t loc, tree arg0, tree arg1, tree arg2, tree type)
8397{
8398 if (validate_arg (arg0, REAL_TYPE)
c3284718
RS
8399 && validate_arg (arg1, REAL_TYPE)
8400 && validate_arg (arg2, REAL_TYPE))
16949072
RG
8401 {
8402 tree tem = fold_fma (loc, type, arg0, arg1, arg2);
8403 if (tem)
8404 return tem;
8405
8406 /* ??? Only expand to FMA_EXPR if it's directly supported. */
8407 if (optab_handler (fma_optab, TYPE_MODE (type)) != CODE_FOR_nothing)
8408 return fold_build3_loc (loc, FMA_EXPR, type, arg0, arg1, arg2);
8409 }
8410 return NULL_TREE;
8411}
8412
b64d949c
KG
8413/* Fold a call to builtin fmin or fmax. */
8414
8415static tree
db3927fb
AH
8416fold_builtin_fmin_fmax (location_t loc, tree arg0, tree arg1,
8417 tree type, bool max)
b64d949c 8418{
5039610b 8419 if (validate_arg (arg0, REAL_TYPE) && validate_arg (arg1, REAL_TYPE))
b64d949c 8420 {
b64d949c
KG
8421 /* Calculate the result when the argument is a constant. */
8422 tree res = do_mpfr_arg2 (arg0, arg1, type, (max ? mpfr_max : mpfr_min));
8423
8424 if (res)
8425 return res;
8426
a8e3bad4
KG
8427 /* If either argument is NaN, return the other one. Avoid the
8428 transformation if we get (and honor) a signalling NaN. Using
8429 omit_one_operand() ensures we create a non-lvalue. */
8430 if (TREE_CODE (arg0) == REAL_CST
8431 && real_isnan (&TREE_REAL_CST (arg0))
3d3dbadd 8432 && (! HONOR_SNANS (arg0)
a8e3bad4 8433 || ! TREE_REAL_CST (arg0).signalling))
db3927fb 8434 return omit_one_operand_loc (loc, type, arg1, arg0);
a8e3bad4
KG
8435 if (TREE_CODE (arg1) == REAL_CST
8436 && real_isnan (&TREE_REAL_CST (arg1))
3d3dbadd 8437 && (! HONOR_SNANS (arg1)
a8e3bad4 8438 || ! TREE_REAL_CST (arg1).signalling))
db3927fb 8439 return omit_one_operand_loc (loc, type, arg0, arg1);
a8e3bad4 8440
b64d949c
KG
8441 /* Transform fmin/fmax(x,x) -> x. */
8442 if (operand_equal_p (arg0, arg1, OEP_PURE_SAME))
db3927fb 8443 return omit_one_operand_loc (loc, type, arg0, arg1);
b8698a0f 8444
b64d949c
KG
8445 /* Convert fmin/fmax to MIN_EXPR/MAX_EXPR. C99 requires these
8446 functions to return the numeric arg if the other one is NaN.
8447 These tree codes don't honor that, so only transform if
8448 -ffinite-math-only is set. C99 doesn't require -0.0 to be
8449 handled, so we don't have to worry about it either. */
8450 if (flag_finite_math_only)
db3927fb
AH
8451 return fold_build2_loc (loc, (max ? MAX_EXPR : MIN_EXPR), type,
8452 fold_convert_loc (loc, type, arg0),
8453 fold_convert_loc (loc, type, arg1));
b64d949c
KG
8454 }
8455 return NULL_TREE;
8456}
8457
527cab20
KG
8458/* Fold a call to builtin carg(a+bi) -> atan2(b,a). */
8459
8460static tree
db3927fb 8461fold_builtin_carg (location_t loc, tree arg, tree type)
527cab20 8462{
c128599a
KG
8463 if (validate_arg (arg, COMPLEX_TYPE)
8464 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) == REAL_TYPE)
527cab20
KG
8465 {
8466 tree atan2_fn = mathfn_built_in (type, BUILT_IN_ATAN2);
b8698a0f 8467
527cab20
KG
8468 if (atan2_fn)
8469 {
5039610b 8470 tree new_arg = builtin_save_expr (arg);
db3927fb
AH
8471 tree r_arg = fold_build1_loc (loc, REALPART_EXPR, type, new_arg);
8472 tree i_arg = fold_build1_loc (loc, IMAGPART_EXPR, type, new_arg);
8473 return build_call_expr_loc (loc, atan2_fn, 2, i_arg, r_arg);
527cab20
KG
8474 }
8475 }
b8698a0f 8476
527cab20
KG
8477 return NULL_TREE;
8478}
8479
6351a719
KG
8480/* Fold a call to builtin logb/ilogb. */
8481
8482static tree
db3927fb 8483fold_builtin_logb (location_t loc, tree arg, tree rettype)
6351a719
KG
8484{
8485 if (! validate_arg (arg, REAL_TYPE))
8486 return NULL_TREE;
b8698a0f 8487
6351a719 8488 STRIP_NOPS (arg);
b8698a0f 8489
6351a719
KG
8490 if (TREE_CODE (arg) == REAL_CST && ! TREE_OVERFLOW (arg))
8491 {
8492 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg);
b8698a0f 8493
6351a719
KG
8494 switch (value->cl)
8495 {
8496 case rvc_nan:
8497 case rvc_inf:
8498 /* If arg is Inf or NaN and we're logb, return it. */
8499 if (TREE_CODE (rettype) == REAL_TYPE)
ea28bb0b
MP
8500 {
8501 /* For logb(-Inf) we have to return +Inf. */
8502 if (real_isinf (value) && real_isneg (value))
8503 {
8504 REAL_VALUE_TYPE tem;
8505 real_inf (&tem);
8506 return build_real (rettype, tem);
8507 }
8508 return fold_convert_loc (loc, rettype, arg);
8509 }
6351a719
KG
8510 /* Fall through... */
8511 case rvc_zero:
8512 /* Zero may set errno and/or raise an exception for logb, also
8513 for ilogb we don't know FP_ILOGB0. */
8514 return NULL_TREE;
8515 case rvc_normal:
8516 /* For normal numbers, proceed iff radix == 2. In GCC,
8517 normalized significands are in the range [0.5, 1.0). We
8518 want the exponent as if they were [1.0, 2.0) so get the
8519 exponent and subtract 1. */
8520 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (arg)))->b == 2)
db3927fb 8521 return fold_convert_loc (loc, rettype,
45a2c477 8522 build_int_cst (integer_type_node,
db3927fb 8523 REAL_EXP (value)-1));
6351a719
KG
8524 break;
8525 }
8526 }
b8698a0f 8527
6351a719
KG
8528 return NULL_TREE;
8529}
8530
8531/* Fold a call to builtin significand, if radix == 2. */
8532
8533static tree
db3927fb 8534fold_builtin_significand (location_t loc, tree arg, tree rettype)
6351a719
KG
8535{
8536 if (! validate_arg (arg, REAL_TYPE))
8537 return NULL_TREE;
b8698a0f 8538
6351a719 8539 STRIP_NOPS (arg);
b8698a0f 8540
6351a719
KG
8541 if (TREE_CODE (arg) == REAL_CST && ! TREE_OVERFLOW (arg))
8542 {
8543 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg);
b8698a0f 8544
6351a719
KG
8545 switch (value->cl)
8546 {
8547 case rvc_zero:
8548 case rvc_nan:
8549 case rvc_inf:
8550 /* If arg is +-0, +-Inf or +-NaN, then return it. */
db3927fb 8551 return fold_convert_loc (loc, rettype, arg);
6351a719
KG
8552 case rvc_normal:
8553 /* For normal numbers, proceed iff radix == 2. */
8554 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (arg)))->b == 2)
8555 {
8556 REAL_VALUE_TYPE result = *value;
8557 /* In GCC, normalized significands are in the range [0.5,
8558 1.0). We want them to be [1.0, 2.0) so set the
8559 exponent to 1. */
8560 SET_REAL_EXP (&result, 1);
8561 return build_real (rettype, result);
8562 }
8563 break;
8564 }
8565 }
b8698a0f 8566
6351a719
KG
8567 return NULL_TREE;
8568}
8569
7a2a25ab
KG
8570/* Fold a call to builtin frexp, we can assume the base is 2. */
8571
8572static tree
db3927fb 8573fold_builtin_frexp (location_t loc, tree arg0, tree arg1, tree rettype)
7a2a25ab
KG
8574{
8575 if (! validate_arg (arg0, REAL_TYPE) || ! validate_arg (arg1, POINTER_TYPE))
8576 return NULL_TREE;
b8698a0f 8577
7a2a25ab 8578 STRIP_NOPS (arg0);
b8698a0f 8579
7a2a25ab
KG
8580 if (!(TREE_CODE (arg0) == REAL_CST && ! TREE_OVERFLOW (arg0)))
8581 return NULL_TREE;
b8698a0f 8582
db3927fb 8583 arg1 = build_fold_indirect_ref_loc (loc, arg1);
7a2a25ab
KG
8584
8585 /* Proceed if a valid pointer type was passed in. */
8586 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg1)) == integer_type_node)
8587 {
8588 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg0);
8589 tree frac, exp;
b8698a0f 8590
7a2a25ab
KG
8591 switch (value->cl)
8592 {
8593 case rvc_zero:
8594 /* For +-0, return (*exp = 0, +-0). */
8595 exp = integer_zero_node;
8596 frac = arg0;
8597 break;
8598 case rvc_nan:
8599 case rvc_inf:
8600 /* For +-NaN or +-Inf, *exp is unspecified, return arg0. */
db3927fb 8601 return omit_one_operand_loc (loc, rettype, arg0, arg1);
7a2a25ab
KG
8602 case rvc_normal:
8603 {
8604 /* Since the frexp function always expects base 2, and in
8605 GCC normalized significands are already in the range
8606 [0.5, 1.0), we have exactly what frexp wants. */
8607 REAL_VALUE_TYPE frac_rvt = *value;
8608 SET_REAL_EXP (&frac_rvt, 0);
8609 frac = build_real (rettype, frac_rvt);
45a2c477 8610 exp = build_int_cst (integer_type_node, REAL_EXP (value));
7a2a25ab
KG
8611 }
8612 break;
8613 default:
8614 gcc_unreachable ();
8615 }
b8698a0f 8616
7a2a25ab 8617 /* Create the COMPOUND_EXPR (*arg1 = trunc, frac). */
db3927fb 8618 arg1 = fold_build2_loc (loc, MODIFY_EXPR, rettype, arg1, exp);
7a2a25ab 8619 TREE_SIDE_EFFECTS (arg1) = 1;
db3927fb 8620 return fold_build2_loc (loc, COMPOUND_EXPR, rettype, arg1, frac);
7a2a25ab
KG
8621 }
8622
8623 return NULL_TREE;
8624}
8625
2b5e5642
KG
8626/* Fold a call to builtin ldexp or scalbn/scalbln. If LDEXP is true
8627 then we can assume the base is two. If it's false, then we have to
8628 check the mode of the TYPE parameter in certain cases. */
8629
8630static tree
db3927fb
AH
8631fold_builtin_load_exponent (location_t loc, tree arg0, tree arg1,
8632 tree type, bool ldexp)
2b5e5642
KG
8633{
8634 if (validate_arg (arg0, REAL_TYPE) && validate_arg (arg1, INTEGER_TYPE))
8635 {
8636 STRIP_NOPS (arg0);
8637 STRIP_NOPS (arg1);
8638
8639 /* If arg0 is 0, Inf or NaN, or if arg1 is 0, then return arg0. */
8640 if (real_zerop (arg0) || integer_zerop (arg1)
8641 || (TREE_CODE (arg0) == REAL_CST
4c8c70e0 8642 && !real_isfinite (&TREE_REAL_CST (arg0))))
db3927fb 8643 return omit_one_operand_loc (loc, type, arg0, arg1);
b8698a0f 8644
2b5e5642
KG
8645 /* If both arguments are constant, then try to evaluate it. */
8646 if ((ldexp || REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2)
8647 && TREE_CODE (arg0) == REAL_CST && !TREE_OVERFLOW (arg0)
9541ffee 8648 && tree_fits_shwi_p (arg1))
2b5e5642
KG
8649 {
8650 /* Bound the maximum adjustment to twice the range of the
8651 mode's valid exponents. Use abs to ensure the range is
8652 positive as a sanity check. */
b8698a0f 8653 const long max_exp_adj = 2 *
2b5e5642
KG
8654 labs (REAL_MODE_FORMAT (TYPE_MODE (type))->emax
8655 - REAL_MODE_FORMAT (TYPE_MODE (type))->emin);
8656
8657 /* Get the user-requested adjustment. */
9439e9a1 8658 const HOST_WIDE_INT req_exp_adj = tree_to_shwi (arg1);
b8698a0f 8659
2b5e5642
KG
8660 /* The requested adjustment must be inside this range. This
8661 is a preliminary cap to avoid things like overflow, we
8662 may still fail to compute the result for other reasons. */
8663 if (-max_exp_adj < req_exp_adj && req_exp_adj < max_exp_adj)
8664 {
8665 REAL_VALUE_TYPE initial_result;
b8698a0f 8666
2b5e5642
KG
8667 real_ldexp (&initial_result, &TREE_REAL_CST (arg0), req_exp_adj);
8668
8669 /* Ensure we didn't overflow. */
8670 if (! real_isinf (&initial_result))
8671 {
8672 const REAL_VALUE_TYPE trunc_result
8673 = real_value_truncate (TYPE_MODE (type), initial_result);
b8698a0f 8674
2b5e5642
KG
8675 /* Only proceed if the target mode can hold the
8676 resulting value. */
624d31fe 8677 if (real_equal (&initial_result, &trunc_result))
2b5e5642
KG
8678 return build_real (type, trunc_result);
8679 }
8680 }
8681 }
8682 }
8683
8684 return NULL_TREE;
8685}
8686
3d577eaf
KG
8687/* Fold a call to builtin modf. */
8688
8689static tree
db3927fb 8690fold_builtin_modf (location_t loc, tree arg0, tree arg1, tree rettype)
3d577eaf
KG
8691{
8692 if (! validate_arg (arg0, REAL_TYPE) || ! validate_arg (arg1, POINTER_TYPE))
8693 return NULL_TREE;
b8698a0f 8694
3d577eaf 8695 STRIP_NOPS (arg0);
b8698a0f 8696
3d577eaf
KG
8697 if (!(TREE_CODE (arg0) == REAL_CST && ! TREE_OVERFLOW (arg0)))
8698 return NULL_TREE;
b8698a0f 8699
db3927fb 8700 arg1 = build_fold_indirect_ref_loc (loc, arg1);
3d577eaf
KG
8701
8702 /* Proceed if a valid pointer type was passed in. */
8703 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg1)) == TYPE_MAIN_VARIANT (rettype))
8704 {
8705 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg0);
8706 REAL_VALUE_TYPE trunc, frac;
8707
8708 switch (value->cl)
8709 {
8710 case rvc_nan:
8711 case rvc_zero:
8712 /* For +-NaN or +-0, return (*arg1 = arg0, arg0). */
8713 trunc = frac = *value;
8714 break;
8715 case rvc_inf:
8716 /* For +-Inf, return (*arg1 = arg0, +-0). */
8717 frac = dconst0;
8718 frac.sign = value->sign;
8719 trunc = *value;
8720 break;
8721 case rvc_normal:
8722 /* Return (*arg1 = trunc(arg0), arg0-trunc(arg0)). */
8723 real_trunc (&trunc, VOIDmode, value);
8724 real_arithmetic (&frac, MINUS_EXPR, value, &trunc);
8725 /* If the original number was negative and already
8726 integral, then the fractional part is -0.0. */
8727 if (value->sign && frac.cl == rvc_zero)
8728 frac.sign = value->sign;
8729 break;
8730 }
b8698a0f 8731
3d577eaf 8732 /* Create the COMPOUND_EXPR (*arg1 = trunc, frac). */
db3927fb 8733 arg1 = fold_build2_loc (loc, MODIFY_EXPR, rettype, arg1,
3d577eaf
KG
8734 build_real (rettype, trunc));
8735 TREE_SIDE_EFFECTS (arg1) = 1;
db3927fb 8736 return fold_build2_loc (loc, COMPOUND_EXPR, rettype, arg1,
3d577eaf
KG
8737 build_real (rettype, frac));
8738 }
b8698a0f 8739
3d577eaf
KG
8740 return NULL_TREE;
8741}
8742
44e10129
MM
8743/* Given a location LOC, an interclass builtin function decl FNDECL
8744 and its single argument ARG, return an folded expression computing
8745 the same, or NULL_TREE if we either couldn't or didn't want to fold
8746 (the latter happen if there's an RTL instruction available). */
8747
8748static tree
8749fold_builtin_interclass_mathfn (location_t loc, tree fndecl, tree arg)
8750{
ef4bddc2 8751 machine_mode mode;
44e10129
MM
8752
8753 if (!validate_arg (arg, REAL_TYPE))
8754 return NULL_TREE;
8755
8756 if (interclass_mathfn_icode (arg, fndecl) != CODE_FOR_nothing)
8757 return NULL_TREE;
8758
8759 mode = TYPE_MODE (TREE_TYPE (arg));
8760
8761 /* If there is no optab, try generic code. */
8762 switch (DECL_FUNCTION_CODE (fndecl))
8763 {
8764 tree result;
8765
8766 CASE_FLT_FN (BUILT_IN_ISINF):
8767 {
8768 /* isinf(x) -> isgreater(fabs(x),DBL_MAX). */
e79983f4 8769 tree const isgr_fn = builtin_decl_explicit (BUILT_IN_ISGREATER);
44e10129
MM
8770 tree const type = TREE_TYPE (arg);
8771 REAL_VALUE_TYPE r;
8772 char buf[128];
8773
8774 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf));
8775 real_from_string (&r, buf);
8776 result = build_call_expr (isgr_fn, 2,
8777 fold_build1_loc (loc, ABS_EXPR, type, arg),
8778 build_real (type, r));
8779 return result;
8780 }
8781 CASE_FLT_FN (BUILT_IN_FINITE):
8782 case BUILT_IN_ISFINITE:
8783 {
8784 /* isfinite(x) -> islessequal(fabs(x),DBL_MAX). */
e79983f4 8785 tree const isle_fn = builtin_decl_explicit (BUILT_IN_ISLESSEQUAL);
44e10129
MM
8786 tree const type = TREE_TYPE (arg);
8787 REAL_VALUE_TYPE r;
8788 char buf[128];
8789
8790 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf));
8791 real_from_string (&r, buf);
8792 result = build_call_expr (isle_fn, 2,
8793 fold_build1_loc (loc, ABS_EXPR, type, arg),
8794 build_real (type, r));
8795 /*result = fold_build2_loc (loc, UNGT_EXPR,
8796 TREE_TYPE (TREE_TYPE (fndecl)),
8797 fold_build1_loc (loc, ABS_EXPR, type, arg),
8798 build_real (type, r));
8799 result = fold_build1_loc (loc, TRUTH_NOT_EXPR,
8800 TREE_TYPE (TREE_TYPE (fndecl)),
8801 result);*/
8802 return result;
8803 }
8804 case BUILT_IN_ISNORMAL:
8805 {
8806 /* isnormal(x) -> isgreaterequal(fabs(x),DBL_MIN) &
8807 islessequal(fabs(x),DBL_MAX). */
e79983f4
MM
8808 tree const isle_fn = builtin_decl_explicit (BUILT_IN_ISLESSEQUAL);
8809 tree const isge_fn = builtin_decl_explicit (BUILT_IN_ISGREATEREQUAL);
44e10129
MM
8810 tree const type = TREE_TYPE (arg);
8811 REAL_VALUE_TYPE rmax, rmin;
8812 char buf[128];
8813
8814 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf));
8815 real_from_string (&rmax, buf);
8816 sprintf (buf, "0x1p%d", REAL_MODE_FORMAT (mode)->emin - 1);
8817 real_from_string (&rmin, buf);
8818 arg = builtin_save_expr (fold_build1_loc (loc, ABS_EXPR, type, arg));
8819 result = build_call_expr (isle_fn, 2, arg,
8820 build_real (type, rmax));
8821 result = fold_build2 (BIT_AND_EXPR, integer_type_node, result,
8822 build_call_expr (isge_fn, 2, arg,
8823 build_real (type, rmin)));
8824 return result;
8825 }
8826 default:
8827 break;
8828 }
8829
8830 return NULL_TREE;
8831}
8832
64a9295a 8833/* Fold a call to __builtin_isnan(), __builtin_isinf, __builtin_finite.
5039610b 8834 ARG is the argument for the call. */
64a9295a
PB
8835
8836static tree
db3927fb 8837fold_builtin_classify (location_t loc, tree fndecl, tree arg, int builtin_index)
64a9295a 8838{
64a9295a 8839 tree type = TREE_TYPE (TREE_TYPE (fndecl));
64a9295a
PB
8840 REAL_VALUE_TYPE r;
8841
5039610b 8842 if (!validate_arg (arg, REAL_TYPE))
83322951 8843 return NULL_TREE;
64a9295a 8844
64a9295a
PB
8845 switch (builtin_index)
8846 {
8847 case BUILT_IN_ISINF:
3d3dbadd 8848 if (!HONOR_INFINITIES (arg))
db3927fb 8849 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
64a9295a
PB
8850
8851 if (TREE_CODE (arg) == REAL_CST)
8852 {
8853 r = TREE_REAL_CST (arg);
8854 if (real_isinf (&r))
8855 return real_compare (GT_EXPR, &r, &dconst0)
8856 ? integer_one_node : integer_minus_one_node;
8857 else
8858 return integer_zero_node;
8859 }
8860
8861 return NULL_TREE;
8862
05f41289
KG
8863 case BUILT_IN_ISINF_SIGN:
8864 {
8865 /* isinf_sign(x) -> isinf(x) ? (signbit(x) ? -1 : 1) : 0 */
8866 /* In a boolean context, GCC will fold the inner COND_EXPR to
8867 1. So e.g. "if (isinf_sign(x))" would be folded to just
8868 "if (isinf(x) ? 1 : 0)" which becomes "if (isinf(x))". */
8869 tree signbit_fn = mathfn_built_in_1 (TREE_TYPE (arg), BUILT_IN_SIGNBIT, 0);
e79983f4 8870 tree isinf_fn = builtin_decl_explicit (BUILT_IN_ISINF);
05f41289
KG
8871 tree tmp = NULL_TREE;
8872
8873 arg = builtin_save_expr (arg);
8874
8875 if (signbit_fn && isinf_fn)
8876 {
db3927fb
AH
8877 tree signbit_call = build_call_expr_loc (loc, signbit_fn, 1, arg);
8878 tree isinf_call = build_call_expr_loc (loc, isinf_fn, 1, arg);
05f41289 8879
db3927fb 8880 signbit_call = fold_build2_loc (loc, NE_EXPR, integer_type_node,
05f41289 8881 signbit_call, integer_zero_node);
db3927fb 8882 isinf_call = fold_build2_loc (loc, NE_EXPR, integer_type_node,
05f41289 8883 isinf_call, integer_zero_node);
b8698a0f 8884
db3927fb 8885 tmp = fold_build3_loc (loc, COND_EXPR, integer_type_node, signbit_call,
05f41289 8886 integer_minus_one_node, integer_one_node);
db3927fb
AH
8887 tmp = fold_build3_loc (loc, COND_EXPR, integer_type_node,
8888 isinf_call, tmp,
05f41289
KG
8889 integer_zero_node);
8890 }
8891
8892 return tmp;
8893 }
8894
0c8d3c2b 8895 case BUILT_IN_ISFINITE:
1b457aa4 8896 if (!HONOR_NANS (arg)
3d3dbadd 8897 && !HONOR_INFINITIES (arg))
db3927fb 8898 return omit_one_operand_loc (loc, type, integer_one_node, arg);
64a9295a
PB
8899
8900 if (TREE_CODE (arg) == REAL_CST)
8901 {
8902 r = TREE_REAL_CST (arg);
4c8c70e0 8903 return real_isfinite (&r) ? integer_one_node : integer_zero_node;
64a9295a
PB
8904 }
8905
8906 return NULL_TREE;
8907
8908 case BUILT_IN_ISNAN:
1b457aa4 8909 if (!HONOR_NANS (arg))
db3927fb 8910 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
64a9295a
PB
8911
8912 if (TREE_CODE (arg) == REAL_CST)
8913 {
8914 r = TREE_REAL_CST (arg);
8915 return real_isnan (&r) ? integer_one_node : integer_zero_node;
8916 }
8917
8918 arg = builtin_save_expr (arg);
db3927fb 8919 return fold_build2_loc (loc, UNORDERED_EXPR, type, arg, arg);
64a9295a
PB
8920
8921 default:
298e6adc 8922 gcc_unreachable ();
64a9295a
PB
8923 }
8924}
8925
3bf5906b
KG
8926/* Fold a call to __builtin_fpclassify(int, int, int, int, int, ...).
8927 This builtin will generate code to return the appropriate floating
8928 point classification depending on the value of the floating point
8929 number passed in. The possible return values must be supplied as
32101f99 8930 int arguments to the call in the following order: FP_NAN, FP_INFINITE,
3bf5906b
KG
8931 FP_NORMAL, FP_SUBNORMAL and FP_ZERO. The ellipses is for exactly
8932 one floating point argument which is "type generic". */
8933
8934static tree
a6a0570f 8935fold_builtin_fpclassify (location_t loc, tree *args, int nargs)
3bf5906b 8936{
32101f99
KG
8937 tree fp_nan, fp_infinite, fp_normal, fp_subnormal, fp_zero,
8938 arg, type, res, tmp;
ef4bddc2 8939 machine_mode mode;
3bf5906b
KG
8940 REAL_VALUE_TYPE r;
8941 char buf[128];
b8698a0f 8942
3bf5906b 8943 /* Verify the required arguments in the original call. */
a6a0570f
RB
8944 if (nargs != 6
8945 || !validate_arg (args[0], INTEGER_TYPE)
8946 || !validate_arg (args[1], INTEGER_TYPE)
8947 || !validate_arg (args[2], INTEGER_TYPE)
8948 || !validate_arg (args[3], INTEGER_TYPE)
8949 || !validate_arg (args[4], INTEGER_TYPE)
8950 || !validate_arg (args[5], REAL_TYPE))
3bf5906b 8951 return NULL_TREE;
b8698a0f 8952
a6a0570f
RB
8953 fp_nan = args[0];
8954 fp_infinite = args[1];
8955 fp_normal = args[2];
8956 fp_subnormal = args[3];
8957 fp_zero = args[4];
8958 arg = args[5];
3bf5906b
KG
8959 type = TREE_TYPE (arg);
8960 mode = TYPE_MODE (type);
db3927fb 8961 arg = builtin_save_expr (fold_build1_loc (loc, ABS_EXPR, type, arg));
3bf5906b 8962
b8698a0f 8963 /* fpclassify(x) ->
3bf5906b 8964 isnan(x) ? FP_NAN :
32101f99 8965 (fabs(x) == Inf ? FP_INFINITE :
3bf5906b
KG
8966 (fabs(x) >= DBL_MIN ? FP_NORMAL :
8967 (x == 0 ? FP_ZERO : FP_SUBNORMAL))). */
b8698a0f 8968
db3927fb 8969 tmp = fold_build2_loc (loc, EQ_EXPR, integer_type_node, arg,
3bf5906b 8970 build_real (type, dconst0));
db3927fb
AH
8971 res = fold_build3_loc (loc, COND_EXPR, integer_type_node,
8972 tmp, fp_zero, fp_subnormal);
3bf5906b
KG
8973
8974 sprintf (buf, "0x1p%d", REAL_MODE_FORMAT (mode)->emin - 1);
8975 real_from_string (&r, buf);
db3927fb
AH
8976 tmp = fold_build2_loc (loc, GE_EXPR, integer_type_node,
8977 arg, build_real (type, r));
8978 res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp, fp_normal, res);
b8698a0f 8979
3bf5906b
KG
8980 if (HONOR_INFINITIES (mode))
8981 {
8982 real_inf (&r);
db3927fb 8983 tmp = fold_build2_loc (loc, EQ_EXPR, integer_type_node, arg,
3bf5906b 8984 build_real (type, r));
db3927fb
AH
8985 res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp,
8986 fp_infinite, res);
3bf5906b
KG
8987 }
8988
8989 if (HONOR_NANS (mode))
8990 {
db3927fb
AH
8991 tmp = fold_build2_loc (loc, ORDERED_EXPR, integer_type_node, arg, arg);
8992 res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp, res, fp_nan);
3bf5906b 8993 }
b8698a0f 8994
3bf5906b
KG
8995 return res;
8996}
8997
08039bd8 8998/* Fold a call to an unordered comparison function such as
a35da91f 8999 __builtin_isgreater(). FNDECL is the FUNCTION_DECL for the function
5039610b 9000 being called and ARG0 and ARG1 are the arguments for the call.
64a9295a
PB
9001 UNORDERED_CODE and ORDERED_CODE are comparison codes that give
9002 the opposite of the desired result. UNORDERED_CODE is used
9003 for modes that can hold NaNs and ORDERED_CODE is used for
9004 the rest. */
08039bd8
RS
9005
9006static tree
db3927fb 9007fold_builtin_unordered_cmp (location_t loc, tree fndecl, tree arg0, tree arg1,
08039bd8
RS
9008 enum tree_code unordered_code,
9009 enum tree_code ordered_code)
9010{
14f661f1 9011 tree type = TREE_TYPE (TREE_TYPE (fndecl));
08039bd8 9012 enum tree_code code;
1aeaea8d
GK
9013 tree type0, type1;
9014 enum tree_code code0, code1;
9015 tree cmp_type = NULL_TREE;
08039bd8 9016
1aeaea8d
GK
9017 type0 = TREE_TYPE (arg0);
9018 type1 = TREE_TYPE (arg1);
c22cacf3 9019
1aeaea8d
GK
9020 code0 = TREE_CODE (type0);
9021 code1 = TREE_CODE (type1);
c22cacf3 9022
1aeaea8d
GK
9023 if (code0 == REAL_TYPE && code1 == REAL_TYPE)
9024 /* Choose the wider of two real types. */
9025 cmp_type = TYPE_PRECISION (type0) >= TYPE_PRECISION (type1)
9026 ? type0 : type1;
9027 else if (code0 == REAL_TYPE && code1 == INTEGER_TYPE)
9028 cmp_type = type0;
9029 else if (code0 == INTEGER_TYPE && code1 == REAL_TYPE)
9030 cmp_type = type1;
c22cacf3 9031
db3927fb
AH
9032 arg0 = fold_convert_loc (loc, cmp_type, arg0);
9033 arg1 = fold_convert_loc (loc, cmp_type, arg1);
14f661f1
RS
9034
9035 if (unordered_code == UNORDERED_EXPR)
9036 {
1b457aa4 9037 if (!HONOR_NANS (arg0))
db3927fb
AH
9038 return omit_two_operands_loc (loc, type, integer_zero_node, arg0, arg1);
9039 return fold_build2_loc (loc, UNORDERED_EXPR, type, arg0, arg1);
14f661f1 9040 }
08039bd8 9041
1b457aa4 9042 code = HONOR_NANS (arg0) ? unordered_code : ordered_code;
db3927fb
AH
9043 return fold_build1_loc (loc, TRUTH_NOT_EXPR, type,
9044 fold_build2_loc (loc, code, type, arg0, arg1));
08039bd8
RS
9045}
9046
1304953e
JJ
9047/* Fold __builtin_{,s,u}{add,sub,mul}{,l,ll}_overflow, either into normal
9048 arithmetics if it can never overflow, or into internal functions that
9049 return both result of arithmetics and overflowed boolean flag in
9050 a complex integer result, or some other check for overflow. */
9051
9052static tree
9053fold_builtin_arith_overflow (location_t loc, enum built_in_function fcode,
9054 tree arg0, tree arg1, tree arg2)
9055{
9056 enum internal_fn ifn = IFN_LAST;
9057 tree type = TREE_TYPE (TREE_TYPE (arg2));
9058 tree mem_arg2 = build_fold_indirect_ref_loc (loc, arg2);
9059 switch (fcode)
9060 {
9061 case BUILT_IN_ADD_OVERFLOW:
9062 case BUILT_IN_SADD_OVERFLOW:
9063 case BUILT_IN_SADDL_OVERFLOW:
9064 case BUILT_IN_SADDLL_OVERFLOW:
9065 case BUILT_IN_UADD_OVERFLOW:
9066 case BUILT_IN_UADDL_OVERFLOW:
9067 case BUILT_IN_UADDLL_OVERFLOW:
9068 ifn = IFN_ADD_OVERFLOW;
9069 break;
9070 case BUILT_IN_SUB_OVERFLOW:
9071 case BUILT_IN_SSUB_OVERFLOW:
9072 case BUILT_IN_SSUBL_OVERFLOW:
9073 case BUILT_IN_SSUBLL_OVERFLOW:
9074 case BUILT_IN_USUB_OVERFLOW:
9075 case BUILT_IN_USUBL_OVERFLOW:
9076 case BUILT_IN_USUBLL_OVERFLOW:
9077 ifn = IFN_SUB_OVERFLOW;
9078 break;
9079 case BUILT_IN_MUL_OVERFLOW:
9080 case BUILT_IN_SMUL_OVERFLOW:
9081 case BUILT_IN_SMULL_OVERFLOW:
9082 case BUILT_IN_SMULLL_OVERFLOW:
9083 case BUILT_IN_UMUL_OVERFLOW:
9084 case BUILT_IN_UMULL_OVERFLOW:
9085 case BUILT_IN_UMULLL_OVERFLOW:
9086 ifn = IFN_MUL_OVERFLOW;
9087 break;
9088 default:
9089 gcc_unreachable ();
9090 }
9091 tree ctype = build_complex_type (type);
9092 tree call = build_call_expr_internal_loc (loc, ifn, ctype,
9093 2, arg0, arg1);
9094 tree tgt = save_expr (call);
9095 tree intres = build1_loc (loc, REALPART_EXPR, type, tgt);
9096 tree ovfres = build1_loc (loc, IMAGPART_EXPR, type, tgt);
9097 ovfres = fold_convert_loc (loc, boolean_type_node, ovfres);
9098 tree store
9099 = fold_build2_loc (loc, MODIFY_EXPR, void_type_node, mem_arg2, intres);
9100 return build2_loc (loc, COMPOUND_EXPR, boolean_type_node, store, ovfres);
9101}
9102
5039610b 9103/* Fold a call to built-in function FNDECL with 0 arguments.
2625bb5d 9104 This function returns NULL_TREE if no simplification was possible. */
b0b3afb2 9105
6de9cd9a 9106static tree
2625bb5d 9107fold_builtin_0 (location_t loc, tree fndecl)
b0b3afb2 9108{
c0a47a61 9109 tree type = TREE_TYPE (TREE_TYPE (fndecl));
5039610b 9110 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
a0d2281e 9111 switch (fcode)
b0b3afb2 9112 {
5039610b
SL
9113 CASE_FLT_FN (BUILT_IN_INF):
9114 case BUILT_IN_INFD32:
9115 case BUILT_IN_INFD64:
9116 case BUILT_IN_INFD128:
db3927fb 9117 return fold_builtin_inf (loc, type, true);
d3147f64 9118
5039610b 9119 CASE_FLT_FN (BUILT_IN_HUGE_VAL):
db3927fb 9120 return fold_builtin_inf (loc, type, false);
d3147f64 9121
5039610b
SL
9122 case BUILT_IN_CLASSIFY_TYPE:
9123 return fold_builtin_classify_type (NULL_TREE);
d3147f64 9124
5039610b
SL
9125 default:
9126 break;
9127 }
9128 return NULL_TREE;
9129}
d3147f64 9130
5039610b 9131/* Fold a call to built-in function FNDECL with 1 argument, ARG0.
2625bb5d 9132 This function returns NULL_TREE if no simplification was possible. */
d3147f64 9133
5039610b 9134static tree
2625bb5d 9135fold_builtin_1 (location_t loc, tree fndecl, tree arg0)
5039610b
SL
9136{
9137 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9138 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
9139 switch (fcode)
9140 {
b0b3afb2 9141 case BUILT_IN_CONSTANT_P:
d3147f64 9142 {
5039610b 9143 tree val = fold_builtin_constant_p (arg0);
d3147f64 9144
d3147f64
EC
9145 /* Gimplification will pull the CALL_EXPR for the builtin out of
9146 an if condition. When not optimizing, we'll not CSE it back.
9147 To avoid link error types of regressions, return false now. */
9148 if (!val && !optimize)
9149 val = integer_zero_node;
9150
9151 return val;
9152 }
b0b3afb2 9153
ad82abb8 9154 case BUILT_IN_CLASSIFY_TYPE:
5039610b 9155 return fold_builtin_classify_type (arg0);
ad82abb8 9156
b0b3afb2 9157 case BUILT_IN_STRLEN:
ab996409 9158 return fold_builtin_strlen (loc, type, arg0);
b0b3afb2 9159
ea6a6627 9160 CASE_FLT_FN (BUILT_IN_FABS):
e2323f5b
PB
9161 case BUILT_IN_FABSD32:
9162 case BUILT_IN_FABSD64:
9163 case BUILT_IN_FABSD128:
db3927fb 9164 return fold_builtin_fabs (loc, arg0, type);
9655d83b
RS
9165
9166 case BUILT_IN_ABS:
9167 case BUILT_IN_LABS:
9168 case BUILT_IN_LLABS:
9169 case BUILT_IN_IMAXABS:
db3927fb 9170 return fold_builtin_abs (loc, arg0, type);
07bae5ad 9171
ea6a6627 9172 CASE_FLT_FN (BUILT_IN_CONJ):
c128599a 9173 if (validate_arg (arg0, COMPLEX_TYPE)
b8698a0f 9174 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
db3927fb 9175 return fold_build1_loc (loc, CONJ_EXPR, type, arg0);
5039610b 9176 break;
aa6c7c3a 9177
ea6a6627 9178 CASE_FLT_FN (BUILT_IN_CREAL):
c128599a 9179 if (validate_arg (arg0, COMPLEX_TYPE)
b8698a0f 9180 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
6f3d1a5e 9181 return non_lvalue_loc (loc, fold_build1_loc (loc, REALPART_EXPR, type, arg0));
5039610b 9182 break;
aa6c7c3a 9183
ea6a6627 9184 CASE_FLT_FN (BUILT_IN_CIMAG):
376da68e
KG
9185 if (validate_arg (arg0, COMPLEX_TYPE)
9186 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
db3927fb 9187 return non_lvalue_loc (loc, fold_build1_loc (loc, IMAGPART_EXPR, type, arg0));
5039610b 9188 break;
aa6c7c3a 9189
4b26d10b 9190 CASE_FLT_FN (BUILT_IN_CCOS):
4adfc9a5
RS
9191 if (validate_arg (arg0, COMPLEX_TYPE)
9192 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9193 return do_mpc_arg1 (arg0, type, mpc_cos);
9194 break;
b8698a0f 9195
4b26d10b 9196 CASE_FLT_FN (BUILT_IN_CCOSH):
4adfc9a5
RS
9197 if (validate_arg (arg0, COMPLEX_TYPE)
9198 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9199 return do_mpc_arg1 (arg0, type, mpc_cosh);
9200 break;
b8698a0f 9201
43272bf5 9202 CASE_FLT_FN (BUILT_IN_CPROJ):
92c52eab
RS
9203 if (TREE_CODE (arg0) == COMPLEX_CST
9204 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9205 {
9206 const REAL_VALUE_TYPE *real
9207 = TREE_REAL_CST_PTR (TREE_REALPART (arg0));
9208 const REAL_VALUE_TYPE *imag
9209 = TREE_REAL_CST_PTR (TREE_IMAGPART (arg0));
9210
9211 if (real_isinf (real) || real_isinf (imag))
9212 return build_complex_inf (type, imag->sign);
9213 else
9214 return arg0;
9215 }
9216 break;
43272bf5 9217
c128599a
KG
9218 CASE_FLT_FN (BUILT_IN_CSIN):
9219 if (validate_arg (arg0, COMPLEX_TYPE)
b8698a0f 9220 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
c128599a 9221 return do_mpc_arg1 (arg0, type, mpc_sin);
5039610b 9222 break;
b8698a0f 9223
c128599a
KG
9224 CASE_FLT_FN (BUILT_IN_CSINH):
9225 if (validate_arg (arg0, COMPLEX_TYPE)
b8698a0f 9226 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
c128599a
KG
9227 return do_mpc_arg1 (arg0, type, mpc_sinh);
9228 break;
b8698a0f 9229
c128599a
KG
9230 CASE_FLT_FN (BUILT_IN_CTAN):
9231 if (validate_arg (arg0, COMPLEX_TYPE)
b8698a0f 9232 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
c128599a
KG
9233 return do_mpc_arg1 (arg0, type, mpc_tan);
9234 break;
b8698a0f 9235
c128599a
KG
9236 CASE_FLT_FN (BUILT_IN_CTANH):
9237 if (validate_arg (arg0, COMPLEX_TYPE)
b8698a0f 9238 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
c128599a
KG
9239 return do_mpc_arg1 (arg0, type, mpc_tanh);
9240 break;
b8698a0f 9241
c128599a
KG
9242 CASE_FLT_FN (BUILT_IN_CLOG):
9243 if (validate_arg (arg0, COMPLEX_TYPE)
b8698a0f 9244 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
c128599a
KG
9245 return do_mpc_arg1 (arg0, type, mpc_log);
9246 break;
b8698a0f 9247
c128599a
KG
9248 CASE_FLT_FN (BUILT_IN_CSQRT):
9249 if (validate_arg (arg0, COMPLEX_TYPE)
b8698a0f 9250 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
c128599a
KG
9251 return do_mpc_arg1 (arg0, type, mpc_sqrt);
9252 break;
b8698a0f 9253
7610abd8
KG
9254 CASE_FLT_FN (BUILT_IN_CASIN):
9255 if (validate_arg (arg0, COMPLEX_TYPE)
b8698a0f 9256 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
7610abd8
KG
9257 return do_mpc_arg1 (arg0, type, mpc_asin);
9258 break;
b8698a0f 9259
7610abd8
KG
9260 CASE_FLT_FN (BUILT_IN_CACOS):
9261 if (validate_arg (arg0, COMPLEX_TYPE)
b8698a0f 9262 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
7610abd8
KG
9263 return do_mpc_arg1 (arg0, type, mpc_acos);
9264 break;
b8698a0f 9265
7610abd8
KG
9266 CASE_FLT_FN (BUILT_IN_CATAN):
9267 if (validate_arg (arg0, COMPLEX_TYPE)
b8698a0f 9268 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
7610abd8
KG
9269 return do_mpc_arg1 (arg0, type, mpc_atan);
9270 break;
b8698a0f 9271
7610abd8
KG
9272 CASE_FLT_FN (BUILT_IN_CASINH):
9273 if (validate_arg (arg0, COMPLEX_TYPE)
b8698a0f 9274 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
7610abd8
KG
9275 return do_mpc_arg1 (arg0, type, mpc_asinh);
9276 break;
b8698a0f 9277
7610abd8
KG
9278 CASE_FLT_FN (BUILT_IN_CACOSH):
9279 if (validate_arg (arg0, COMPLEX_TYPE)
b8698a0f 9280 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
7610abd8
KG
9281 return do_mpc_arg1 (arg0, type, mpc_acosh);
9282 break;
b8698a0f 9283
7610abd8
KG
9284 CASE_FLT_FN (BUILT_IN_CATANH):
9285 if (validate_arg (arg0, COMPLEX_TYPE)
b8698a0f 9286 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
7610abd8
KG
9287 return do_mpc_arg1 (arg0, type, mpc_atanh);
9288 break;
b8698a0f 9289
ea6a6627 9290 CASE_FLT_FN (BUILT_IN_CABS):
abcc43f5
RS
9291 if (TREE_CODE (arg0) == COMPLEX_CST
9292 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9293 return do_mpfr_arg2 (TREE_REALPART (arg0), TREE_IMAGPART (arg0),
9294 type, mpfr_hypot);
9295 break;
07bae5ad 9296
527cab20 9297 CASE_FLT_FN (BUILT_IN_CARG):
db3927fb 9298 return fold_builtin_carg (loc, arg0, type);
527cab20 9299
ea6a6627 9300 CASE_FLT_FN (BUILT_IN_SQRT):
4835c978
RS
9301 if (validate_arg (arg0, REAL_TYPE))
9302 return do_mpfr_arg1 (arg0, type, mpfr_sqrt, &dconst0, NULL, true);
9303 break;
4977bab6 9304
ea6a6627 9305 CASE_FLT_FN (BUILT_IN_CBRT):
4835c978
RS
9306 if (validate_arg (arg0, REAL_TYPE))
9307 return do_mpfr_arg1 (arg0, type, mpfr_cbrt, NULL, NULL, 0);
9308 break;
e19f6bde 9309
b53fed56 9310 CASE_FLT_FN (BUILT_IN_ASIN):
5039610b
SL
9311 if (validate_arg (arg0, REAL_TYPE))
9312 return do_mpfr_arg1 (arg0, type, mpfr_asin,
b53fed56
KG
9313 &dconstm1, &dconst1, true);
9314 break;
9315
9316 CASE_FLT_FN (BUILT_IN_ACOS):
5039610b
SL
9317 if (validate_arg (arg0, REAL_TYPE))
9318 return do_mpfr_arg1 (arg0, type, mpfr_acos,
b53fed56
KG
9319 &dconstm1, &dconst1, true);
9320 break;
9321
9322 CASE_FLT_FN (BUILT_IN_ATAN):
5039610b
SL
9323 if (validate_arg (arg0, REAL_TYPE))
9324 return do_mpfr_arg1 (arg0, type, mpfr_atan, NULL, NULL, 0);
b53fed56
KG
9325 break;
9326
9327 CASE_FLT_FN (BUILT_IN_ASINH):
5039610b
SL
9328 if (validate_arg (arg0, REAL_TYPE))
9329 return do_mpfr_arg1 (arg0, type, mpfr_asinh, NULL, NULL, 0);
b53fed56
KG
9330 break;
9331
9332 CASE_FLT_FN (BUILT_IN_ACOSH):
5039610b
SL
9333 if (validate_arg (arg0, REAL_TYPE))
9334 return do_mpfr_arg1 (arg0, type, mpfr_acosh,
b53fed56
KG
9335 &dconst1, NULL, true);
9336 break;
9337
9338 CASE_FLT_FN (BUILT_IN_ATANH):
5039610b
SL
9339 if (validate_arg (arg0, REAL_TYPE))
9340 return do_mpfr_arg1 (arg0, type, mpfr_atanh,
b53fed56
KG
9341 &dconstm1, &dconst1, false);
9342 break;
9343
ea6a6627 9344 CASE_FLT_FN (BUILT_IN_SIN):
5039610b
SL
9345 if (validate_arg (arg0, REAL_TYPE))
9346 return do_mpfr_arg1 (arg0, type, mpfr_sin, NULL, NULL, 0);
b53fed56 9347 break;
03f2ea93 9348
ea6a6627 9349 CASE_FLT_FN (BUILT_IN_COS):
4adfc9a5
RS
9350 if (validate_arg (arg0, REAL_TYPE))
9351 return do_mpfr_arg1 (arg0, type, mpfr_cos, NULL, NULL, 0);
9352 break;
03f2ea93 9353
b53fed56 9354 CASE_FLT_FN (BUILT_IN_TAN):
cfed37a0
RS
9355 if (validate_arg (arg0, REAL_TYPE))
9356 return do_mpfr_arg1 (arg0, type, mpfr_tan, NULL, NULL, 0);
9357 break;
75c7c595 9358
28f4586b 9359 CASE_FLT_FN (BUILT_IN_CEXP):
db3927fb 9360 return fold_builtin_cexp (loc, arg0, type);
28f4586b 9361
75c7c595 9362 CASE_FLT_FN (BUILT_IN_CEXPI):
5039610b
SL
9363 if (validate_arg (arg0, REAL_TYPE))
9364 return do_mpfr_sincos (arg0, NULL_TREE, NULL_TREE);
9365 break;
b68bcfff 9366
b53fed56 9367 CASE_FLT_FN (BUILT_IN_SINH):
5039610b
SL
9368 if (validate_arg (arg0, REAL_TYPE))
9369 return do_mpfr_arg1 (arg0, type, mpfr_sinh, NULL, NULL, 0);
b53fed56
KG
9370 break;
9371
9372 CASE_FLT_FN (BUILT_IN_COSH):
4adfc9a5
RS
9373 if (validate_arg (arg0, REAL_TYPE))
9374 return do_mpfr_arg1 (arg0, type, mpfr_cosh, NULL, NULL, 0);
9375 break;
b53fed56
KG
9376
9377 CASE_FLT_FN (BUILT_IN_TANH):
5039610b
SL
9378 if (validate_arg (arg0, REAL_TYPE))
9379 return do_mpfr_arg1 (arg0, type, mpfr_tanh, NULL, NULL, 0);
b53fed56
KG
9380 break;
9381
cf1491f0 9382 CASE_FLT_FN (BUILT_IN_ERF):
5039610b
SL
9383 if (validate_arg (arg0, REAL_TYPE))
9384 return do_mpfr_arg1 (arg0, type, mpfr_erf, NULL, NULL, 0);
cf1491f0
KG
9385 break;
9386
9387 CASE_FLT_FN (BUILT_IN_ERFC):
5039610b
SL
9388 if (validate_arg (arg0, REAL_TYPE))
9389 return do_mpfr_arg1 (arg0, type, mpfr_erfc, NULL, NULL, 0);
cf1491f0
KG
9390 break;
9391
61fb309f 9392 CASE_FLT_FN (BUILT_IN_TGAMMA):
5039610b
SL
9393 if (validate_arg (arg0, REAL_TYPE))
9394 return do_mpfr_arg1 (arg0, type, mpfr_gamma, NULL, NULL, 0);
61fb309f 9395 break;
b8698a0f 9396
ea6a6627 9397 CASE_FLT_FN (BUILT_IN_EXP):
db3927fb 9398 return fold_builtin_exponent (loc, fndecl, arg0, mpfr_exp);
67057c53 9399
ea6a6627 9400 CASE_FLT_FN (BUILT_IN_EXP2):
db3927fb 9401 return fold_builtin_exponent (loc, fndecl, arg0, mpfr_exp2);
67057c53 9402
ea6a6627
VR
9403 CASE_FLT_FN (BUILT_IN_EXP10):
9404 CASE_FLT_FN (BUILT_IN_POW10):
db3927fb 9405 return fold_builtin_exponent (loc, fndecl, arg0, mpfr_exp10);
67057c53 9406
cf1491f0 9407 CASE_FLT_FN (BUILT_IN_EXPM1):
5039610b
SL
9408 if (validate_arg (arg0, REAL_TYPE))
9409 return do_mpfr_arg1 (arg0, type, mpfr_expm1, NULL, NULL, 0);
e18c1d66 9410 break;
b8698a0f 9411
ea6a6627 9412 CASE_FLT_FN (BUILT_IN_LOG):
e18c1d66
RB
9413 if (validate_arg (arg0, REAL_TYPE))
9414 return do_mpfr_arg1 (arg0, type, mpfr_log, &dconst0, NULL, false);
9415 break;
67057c53 9416
ea6a6627 9417 CASE_FLT_FN (BUILT_IN_LOG2):
e18c1d66
RB
9418 if (validate_arg (arg0, REAL_TYPE))
9419 return do_mpfr_arg1 (arg0, type, mpfr_log2, &dconst0, NULL, false);
9420 break;
67057c53 9421
ea6a6627 9422 CASE_FLT_FN (BUILT_IN_LOG10):
e18c1d66
RB
9423 if (validate_arg (arg0, REAL_TYPE))
9424 return do_mpfr_arg1 (arg0, type, mpfr_log10, &dconst0, NULL, false);
9425 break;
cf1491f0
KG
9426
9427 CASE_FLT_FN (BUILT_IN_LOG1P):
5039610b
SL
9428 if (validate_arg (arg0, REAL_TYPE))
9429 return do_mpfr_arg1 (arg0, type, mpfr_log1p,
cf1491f0
KG
9430 &dconstm1, NULL, false);
9431 break;
4977bab6 9432
550b3187
KG
9433 CASE_FLT_FN (BUILT_IN_J0):
9434 if (validate_arg (arg0, REAL_TYPE))
9435 return do_mpfr_arg1 (arg0, type, mpfr_j0,
9436 NULL, NULL, 0);
9437 break;
9438
9439 CASE_FLT_FN (BUILT_IN_J1):
9440 if (validate_arg (arg0, REAL_TYPE))
9441 return do_mpfr_arg1 (arg0, type, mpfr_j1,
9442 NULL, NULL, 0);
9443 break;
fd2ef596
KG
9444
9445 CASE_FLT_FN (BUILT_IN_Y0):
9446 if (validate_arg (arg0, REAL_TYPE))
9447 return do_mpfr_arg1 (arg0, type, mpfr_y0,
9448 &dconst0, NULL, false);
9449 break;
9450
9451 CASE_FLT_FN (BUILT_IN_Y1):
9452 if (validate_arg (arg0, REAL_TYPE))
9453 return do_mpfr_arg1 (arg0, type, mpfr_y1,
9454 &dconst0, NULL, false);
9455 break;
550b3187 9456
ea6a6627 9457 CASE_FLT_FN (BUILT_IN_NAN):
9a8ce21f
JG
9458 case BUILT_IN_NAND32:
9459 case BUILT_IN_NAND64:
9460 case BUILT_IN_NAND128:
5039610b 9461 return fold_builtin_nan (arg0, type, true);
1472e41c 9462
ea6a6627 9463 CASE_FLT_FN (BUILT_IN_NANS):
5039610b 9464 return fold_builtin_nan (arg0, type, false);
1472e41c 9465
ea6a6627 9466 CASE_FLT_FN (BUILT_IN_FLOOR):
67dbe582
RS
9467 if (TREE_CODE (arg0) == REAL_CST && !TREE_OVERFLOW (arg0))
9468 {
9469 REAL_VALUE_TYPE x = TREE_REAL_CST (arg0);
9470 if (!REAL_VALUE_ISNAN (x) || !flag_errno_math)
9471 {
9472 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9473 REAL_VALUE_TYPE r;
9474 real_floor (&r, TYPE_MODE (type), &x);
9475 return build_real (type, r);
9476 }
9477 }
9478 break;
0a9530a9 9479
ea6a6627 9480 CASE_FLT_FN (BUILT_IN_CEIL):
67dbe582
RS
9481 if (TREE_CODE (arg0) == REAL_CST && !TREE_OVERFLOW (arg0))
9482 {
9483 REAL_VALUE_TYPE x = TREE_REAL_CST (arg0);
9484 if (!REAL_VALUE_ISNAN (x) || !flag_errno_math)
9485 {
9486 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9487 REAL_VALUE_TYPE r;
9488 real_ceil (&r, TYPE_MODE (type), &x);
9489 return build_real (type, r);
9490 }
9491 }
9492 break;
0a9530a9 9493
ea6a6627 9494 CASE_FLT_FN (BUILT_IN_TRUNC):
67dbe582
RS
9495 if (TREE_CODE (arg0) == REAL_CST && !TREE_OVERFLOW (arg0))
9496 {
9497 REAL_VALUE_TYPE x = TREE_REAL_CST (arg0);
9498 REAL_VALUE_TYPE r;
9499 real_trunc (&r, TYPE_MODE (type), &x);
9500 return build_real (type, r);
9501 }
9502 break;
0a9530a9 9503
ea6a6627 9504 CASE_FLT_FN (BUILT_IN_ROUND):
67dbe582
RS
9505 if (TREE_CODE (arg0) == REAL_CST && !TREE_OVERFLOW (arg0))
9506 {
9507 REAL_VALUE_TYPE x = TREE_REAL_CST (arg0);
9508 if (!REAL_VALUE_ISNAN (x) || !flag_errno_math)
9509 {
9510 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9511 REAL_VALUE_TYPE r;
9512 real_round (&r, TYPE_MODE (type), &x);
9513 return build_real (type, r);
9514 }
9515 }
9516 break;
27a6aa72 9517
6c32ee74 9518 CASE_FLT_FN (BUILT_IN_ICEIL):
ea6a6627
VR
9519 CASE_FLT_FN (BUILT_IN_LCEIL):
9520 CASE_FLT_FN (BUILT_IN_LLCEIL):
9521 CASE_FLT_FN (BUILT_IN_LFLOOR):
6c32ee74 9522 CASE_FLT_FN (BUILT_IN_IFLOOR):
ea6a6627 9523 CASE_FLT_FN (BUILT_IN_LLFLOOR):
6c32ee74 9524 CASE_FLT_FN (BUILT_IN_IROUND):
c22cacf3 9525 CASE_FLT_FN (BUILT_IN_LROUND):
ea6a6627 9526 CASE_FLT_FN (BUILT_IN_LLROUND):
db3927fb 9527 return fold_builtin_int_roundingfn (loc, fndecl, arg0);
3bf05748 9528
6c32ee74 9529 CASE_FLT_FN (BUILT_IN_IRINT):
ea6a6627
VR
9530 CASE_FLT_FN (BUILT_IN_LRINT):
9531 CASE_FLT_FN (BUILT_IN_LLRINT):
db3927fb 9532 return fold_fixed_mathfn (loc, fndecl, arg0);
ca3df643 9533
ac868f29 9534 case BUILT_IN_BSWAP16:
167fa32c
EC
9535 case BUILT_IN_BSWAP32:
9536 case BUILT_IN_BSWAP64:
5039610b 9537 return fold_builtin_bswap (fndecl, arg0);
167fa32c 9538
ea6a6627
VR
9539 CASE_INT_FN (BUILT_IN_FFS):
9540 CASE_INT_FN (BUILT_IN_CLZ):
9541 CASE_INT_FN (BUILT_IN_CTZ):
146aef0b 9542 CASE_INT_FN (BUILT_IN_CLRSB):
ea6a6627
VR
9543 CASE_INT_FN (BUILT_IN_POPCOUNT):
9544 CASE_INT_FN (BUILT_IN_PARITY):
5039610b 9545 return fold_builtin_bitop (fndecl, arg0);
5bb650ec 9546
ea6a6627 9547 CASE_FLT_FN (BUILT_IN_SIGNBIT):
db3927fb 9548 return fold_builtin_signbit (loc, arg0, type);
ef79730c 9549
6351a719 9550 CASE_FLT_FN (BUILT_IN_SIGNIFICAND):
db3927fb 9551 return fold_builtin_significand (loc, arg0, type);
6351a719
KG
9552
9553 CASE_FLT_FN (BUILT_IN_ILOGB):
9554 CASE_FLT_FN (BUILT_IN_LOGB):
db3927fb 9555 return fold_builtin_logb (loc, arg0, type);
6351a719 9556
df0785d6 9557 case BUILT_IN_ISASCII:
db3927fb 9558 return fold_builtin_isascii (loc, arg0);
df0785d6
KG
9559
9560 case BUILT_IN_TOASCII:
db3927fb 9561 return fold_builtin_toascii (loc, arg0);
df0785d6 9562
61218d19 9563 case BUILT_IN_ISDIGIT:
db3927fb 9564 return fold_builtin_isdigit (loc, arg0);
67057c53 9565
ea6a6627 9566 CASE_FLT_FN (BUILT_IN_FINITE):
9a8ce21f
JG
9567 case BUILT_IN_FINITED32:
9568 case BUILT_IN_FINITED64:
9569 case BUILT_IN_FINITED128:
0c8d3c2b 9570 case BUILT_IN_ISFINITE:
44e10129
MM
9571 {
9572 tree ret = fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISFINITE);
9573 if (ret)
9574 return ret;
9575 return fold_builtin_interclass_mathfn (loc, fndecl, arg0);
9576 }
64a9295a 9577
ea6a6627 9578 CASE_FLT_FN (BUILT_IN_ISINF):
9a8ce21f
JG
9579 case BUILT_IN_ISINFD32:
9580 case BUILT_IN_ISINFD64:
9581 case BUILT_IN_ISINFD128:
44e10129
MM
9582 {
9583 tree ret = fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISINF);
9584 if (ret)
9585 return ret;
9586 return fold_builtin_interclass_mathfn (loc, fndecl, arg0);
9587 }
9588
9589 case BUILT_IN_ISNORMAL:
9590 return fold_builtin_interclass_mathfn (loc, fndecl, arg0);
64a9295a 9591
05f41289 9592 case BUILT_IN_ISINF_SIGN:
db3927fb 9593 return fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISINF_SIGN);
05f41289 9594
ea6a6627 9595 CASE_FLT_FN (BUILT_IN_ISNAN):
9a8ce21f
JG
9596 case BUILT_IN_ISNAND32:
9597 case BUILT_IN_ISNAND64:
9598 case BUILT_IN_ISNAND128:
db3927fb 9599 return fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISNAN);
5039610b 9600
06468254
RG
9601 case BUILT_IN_FREE:
9602 if (integer_zerop (arg0))
9603 return build_empty_stmt (loc);
9604 break;
9605
5039610b
SL
9606 default:
9607 break;
9608 }
9609
9610 return NULL_TREE;
9611
9612}
9613
9614/* Fold a call to built-in function FNDECL with 2 arguments, ARG0 and ARG1.
2625bb5d 9615 This function returns NULL_TREE if no simplification was possible. */
5039610b
SL
9616
9617static tree
2625bb5d 9618fold_builtin_2 (location_t loc, tree fndecl, tree arg0, tree arg1)
5039610b
SL
9619{
9620 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9621 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
9622
9623 switch (fcode)
9624 {
550b3187
KG
9625 CASE_FLT_FN (BUILT_IN_JN):
9626 if (validate_arg (arg0, INTEGER_TYPE)
9627 && validate_arg (arg1, REAL_TYPE))
9628 return do_mpfr_bessel_n (arg0, arg1, type, mpfr_jn, NULL, 0);
9629 break;
fd2ef596
KG
9630
9631 CASE_FLT_FN (BUILT_IN_YN):
9632 if (validate_arg (arg0, INTEGER_TYPE)
9633 && validate_arg (arg1, REAL_TYPE))
9634 return do_mpfr_bessel_n (arg0, arg1, type, mpfr_yn,
9635 &dconst0, false);
9636 break;
ea91f957
KG
9637
9638 CASE_FLT_FN (BUILT_IN_DREM):
9639 CASE_FLT_FN (BUILT_IN_REMAINDER):
9640 if (validate_arg (arg0, REAL_TYPE)
c3284718 9641 && validate_arg (arg1, REAL_TYPE))
ea91f957
KG
9642 return do_mpfr_arg2 (arg0, arg1, type, mpfr_remainder);
9643 break;
752b7d38
KG
9644
9645 CASE_FLT_FN_REENT (BUILT_IN_GAMMA): /* GAMMA_R */
9646 CASE_FLT_FN_REENT (BUILT_IN_LGAMMA): /* LGAMMA_R */
9647 if (validate_arg (arg0, REAL_TYPE)
c3284718 9648 && validate_arg (arg1, POINTER_TYPE))
752b7d38
KG
9649 return do_mpfr_lgamma_r (arg0, arg1, type);
9650 break;
5039610b
SL
9651
9652 CASE_FLT_FN (BUILT_IN_ATAN2):
9653 if (validate_arg (arg0, REAL_TYPE)
c3284718 9654 && validate_arg (arg1, REAL_TYPE))
5039610b
SL
9655 return do_mpfr_arg2 (arg0, arg1, type, mpfr_atan2);
9656 break;
9657
9658 CASE_FLT_FN (BUILT_IN_FDIM):
9659 if (validate_arg (arg0, REAL_TYPE)
c3284718 9660 && validate_arg (arg1, REAL_TYPE))
5039610b
SL
9661 return do_mpfr_arg2 (arg0, arg1, type, mpfr_dim);
9662 break;
9663
9664 CASE_FLT_FN (BUILT_IN_HYPOT):
4adfc9a5 9665 return fold_builtin_hypot (loc, arg0, arg1, type);
5039610b 9666
a41d064d
KG
9667 CASE_FLT_FN (BUILT_IN_CPOW):
9668 if (validate_arg (arg0, COMPLEX_TYPE)
9669 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE
9670 && validate_arg (arg1, COMPLEX_TYPE)
b8698a0f 9671 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg1))) == REAL_TYPE)
ca75b926 9672 return do_mpc_arg2 (arg0, arg1, type, /*do_nonfinite=*/ 0, mpc_pow);
a41d064d 9673 break;
a41d064d 9674
2b5e5642 9675 CASE_FLT_FN (BUILT_IN_LDEXP):
db3927fb 9676 return fold_builtin_load_exponent (loc, arg0, arg1, type, /*ldexp=*/true);
2b5e5642
KG
9677 CASE_FLT_FN (BUILT_IN_SCALBN):
9678 CASE_FLT_FN (BUILT_IN_SCALBLN):
db3927fb
AH
9679 return fold_builtin_load_exponent (loc, arg0, arg1,
9680 type, /*ldexp=*/false);
2b5e5642 9681
7a2a25ab 9682 CASE_FLT_FN (BUILT_IN_FREXP):
db3927fb 9683 return fold_builtin_frexp (loc, arg0, arg1, type);
7a2a25ab 9684
3d577eaf 9685 CASE_FLT_FN (BUILT_IN_MODF):
db3927fb 9686 return fold_builtin_modf (loc, arg0, arg1, type);
3d577eaf 9687
5039610b 9688 case BUILT_IN_STRSTR:
db3927fb 9689 return fold_builtin_strstr (loc, arg0, arg1, type);
5039610b 9690
5039610b 9691 case BUILT_IN_STRSPN:
db3927fb 9692 return fold_builtin_strspn (loc, arg0, arg1);
5039610b
SL
9693
9694 case BUILT_IN_STRCSPN:
db3927fb 9695 return fold_builtin_strcspn (loc, arg0, arg1);
5039610b
SL
9696
9697 case BUILT_IN_STRCHR:
9698 case BUILT_IN_INDEX:
db3927fb 9699 return fold_builtin_strchr (loc, arg0, arg1, type);
5039610b
SL
9700
9701 case BUILT_IN_STRRCHR:
9702 case BUILT_IN_RINDEX:
db3927fb 9703 return fold_builtin_strrchr (loc, arg0, arg1, type);
5039610b 9704
5039610b 9705 case BUILT_IN_STRCMP:
db3927fb 9706 return fold_builtin_strcmp (loc, arg0, arg1);
5039610b
SL
9707
9708 case BUILT_IN_STRPBRK:
db3927fb 9709 return fold_builtin_strpbrk (loc, arg0, arg1, type);
5039610b
SL
9710
9711 case BUILT_IN_EXPECT:
ed9c79e1 9712 return fold_builtin_expect (loc, arg0, arg1, NULL_TREE);
5039610b
SL
9713
9714 CASE_FLT_FN (BUILT_IN_POW):
db3927fb 9715 return fold_builtin_pow (loc, fndecl, arg0, arg1, type);
5039610b
SL
9716
9717 CASE_FLT_FN (BUILT_IN_POWI):
db3927fb 9718 return fold_builtin_powi (loc, fndecl, arg0, arg1, type);
5039610b
SL
9719
9720 CASE_FLT_FN (BUILT_IN_COPYSIGN):
4adfc9a5 9721 return fold_builtin_copysign (loc, arg0, arg1, type);
5039610b
SL
9722
9723 CASE_FLT_FN (BUILT_IN_FMIN):
db3927fb 9724 return fold_builtin_fmin_fmax (loc, arg0, arg1, type, /*max=*/false);
5039610b
SL
9725
9726 CASE_FLT_FN (BUILT_IN_FMAX):
db3927fb 9727 return fold_builtin_fmin_fmax (loc, arg0, arg1, type, /*max=*/true);
64a9295a 9728
08039bd8 9729 case BUILT_IN_ISGREATER:
db3927fb
AH
9730 return fold_builtin_unordered_cmp (loc, fndecl,
9731 arg0, arg1, UNLE_EXPR, LE_EXPR);
08039bd8 9732 case BUILT_IN_ISGREATEREQUAL:
db3927fb
AH
9733 return fold_builtin_unordered_cmp (loc, fndecl,
9734 arg0, arg1, UNLT_EXPR, LT_EXPR);
08039bd8 9735 case BUILT_IN_ISLESS:
db3927fb
AH
9736 return fold_builtin_unordered_cmp (loc, fndecl,
9737 arg0, arg1, UNGE_EXPR, GE_EXPR);
08039bd8 9738 case BUILT_IN_ISLESSEQUAL:
db3927fb
AH
9739 return fold_builtin_unordered_cmp (loc, fndecl,
9740 arg0, arg1, UNGT_EXPR, GT_EXPR);
08039bd8 9741 case BUILT_IN_ISLESSGREATER:
db3927fb
AH
9742 return fold_builtin_unordered_cmp (loc, fndecl,
9743 arg0, arg1, UNEQ_EXPR, EQ_EXPR);
08039bd8 9744 case BUILT_IN_ISUNORDERED:
db3927fb
AH
9745 return fold_builtin_unordered_cmp (loc, fndecl,
9746 arg0, arg1, UNORDERED_EXPR,
a35da91f 9747 NOP_EXPR);
08039bd8 9748
d3147f64
EC
9749 /* We do the folding for va_start in the expander. */
9750 case BUILT_IN_VA_START:
9751 break;
a32e70c3 9752
10a0d495 9753 case BUILT_IN_OBJECT_SIZE:
5039610b 9754 return fold_builtin_object_size (arg0, arg1);
10a0d495 9755
86951993
AM
9756 case BUILT_IN_ATOMIC_ALWAYS_LOCK_FREE:
9757 return fold_builtin_atomic_always_lock_free (arg0, arg1);
9758
9759 case BUILT_IN_ATOMIC_IS_LOCK_FREE:
9760 return fold_builtin_atomic_is_lock_free (arg0, arg1);
9761
5039610b
SL
9762 default:
9763 break;
9764 }
9765 return NULL_TREE;
9766}
9767
9768/* Fold a call to built-in function FNDECL with 3 arguments, ARG0, ARG1,
2625bb5d 9769 and ARG2.
5039610b
SL
9770 This function returns NULL_TREE if no simplification was possible. */
9771
9772static tree
db3927fb 9773fold_builtin_3 (location_t loc, tree fndecl,
2625bb5d 9774 tree arg0, tree arg1, tree arg2)
5039610b
SL
9775{
9776 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9777 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
9778 switch (fcode)
9779 {
9780
9781 CASE_FLT_FN (BUILT_IN_SINCOS):
db3927fb 9782 return fold_builtin_sincos (loc, arg0, arg1, arg2);
5039610b
SL
9783
9784 CASE_FLT_FN (BUILT_IN_FMA):
16949072 9785 return fold_builtin_fma (loc, arg0, arg1, arg2, type);
5039610b
SL
9786 break;
9787
ea91f957
KG
9788 CASE_FLT_FN (BUILT_IN_REMQUO):
9789 if (validate_arg (arg0, REAL_TYPE)
c3284718
RS
9790 && validate_arg (arg1, REAL_TYPE)
9791 && validate_arg (arg2, POINTER_TYPE))
ea91f957
KG
9792 return do_mpfr_remquo (arg0, arg1, arg2);
9793 break;
ea91f957 9794
5039610b 9795 case BUILT_IN_STRNCMP:
db3927fb 9796 return fold_builtin_strncmp (loc, arg0, arg1, arg2);
5039610b 9797
2a5fce6d 9798 case BUILT_IN_MEMCHR:
db3927fb 9799 return fold_builtin_memchr (loc, arg0, arg1, arg2, type);
2a5fce6d 9800
5039610b
SL
9801 case BUILT_IN_BCMP:
9802 case BUILT_IN_MEMCMP:
db3927fb 9803 return fold_builtin_memcmp (loc, arg0, arg1, arg2);;
5039610b 9804
ed9c79e1
JJ
9805 case BUILT_IN_EXPECT:
9806 return fold_builtin_expect (loc, arg0, arg1, arg2);
9807
1304953e
JJ
9808 case BUILT_IN_ADD_OVERFLOW:
9809 case BUILT_IN_SUB_OVERFLOW:
9810 case BUILT_IN_MUL_OVERFLOW:
9811 case BUILT_IN_SADD_OVERFLOW:
9812 case BUILT_IN_SADDL_OVERFLOW:
9813 case BUILT_IN_SADDLL_OVERFLOW:
9814 case BUILT_IN_SSUB_OVERFLOW:
9815 case BUILT_IN_SSUBL_OVERFLOW:
9816 case BUILT_IN_SSUBLL_OVERFLOW:
9817 case BUILT_IN_SMUL_OVERFLOW:
9818 case BUILT_IN_SMULL_OVERFLOW:
9819 case BUILT_IN_SMULLL_OVERFLOW:
9820 case BUILT_IN_UADD_OVERFLOW:
9821 case BUILT_IN_UADDL_OVERFLOW:
9822 case BUILT_IN_UADDLL_OVERFLOW:
9823 case BUILT_IN_USUB_OVERFLOW:
9824 case BUILT_IN_USUBL_OVERFLOW:
9825 case BUILT_IN_USUBLL_OVERFLOW:
9826 case BUILT_IN_UMUL_OVERFLOW:
9827 case BUILT_IN_UMULL_OVERFLOW:
9828 case BUILT_IN_UMULLL_OVERFLOW:
9829 return fold_builtin_arith_overflow (loc, fcode, arg0, arg1, arg2);
9830
b0b3afb2
BS
9831 default:
9832 break;
9833 }
5039610b
SL
9834 return NULL_TREE;
9835}
b0b3afb2 9836
5039610b 9837/* Fold a call to built-in function FNDECL. ARGS is an array of NARGS
a6a0570f
RB
9838 arguments. IGNORE is true if the result of the
9839 function call is ignored. This function returns NULL_TREE if no
9840 simplification was possible. */
b8698a0f 9841
3d2cf79f 9842tree
2625bb5d 9843fold_builtin_n (location_t loc, tree fndecl, tree *args, int nargs, bool)
5039610b
SL
9844{
9845 tree ret = NULL_TREE;
f4577fcd 9846
5039610b
SL
9847 switch (nargs)
9848 {
9849 case 0:
2625bb5d 9850 ret = fold_builtin_0 (loc, fndecl);
5039610b
SL
9851 break;
9852 case 1:
2625bb5d 9853 ret = fold_builtin_1 (loc, fndecl, args[0]);
5039610b
SL
9854 break;
9855 case 2:
2625bb5d 9856 ret = fold_builtin_2 (loc, fndecl, args[0], args[1]);
5039610b
SL
9857 break;
9858 case 3:
2625bb5d 9859 ret = fold_builtin_3 (loc, fndecl, args[0], args[1], args[2]);
5039610b 9860 break;
5039610b 9861 default:
2625bb5d 9862 ret = fold_builtin_varargs (loc, fndecl, args, nargs);
5039610b
SL
9863 break;
9864 }
9865 if (ret)
9866 {
726a989a 9867 ret = build1 (NOP_EXPR, TREE_TYPE (ret), ret);
db3927fb 9868 SET_EXPR_LOCATION (ret, loc);
5039610b
SL
9869 TREE_NO_WARNING (ret) = 1;
9870 return ret;
9871 }
9872 return NULL_TREE;
9873}
9874
862d0b35
DN
9875/* Construct a new CALL_EXPR to FNDECL using the tail of the argument
9876 list ARGS along with N new arguments in NEWARGS. SKIP is the number
9877 of arguments in ARGS to be omitted. OLDNARGS is the number of
9878 elements in ARGS. */
5039610b
SL
9879
9880static tree
862d0b35
DN
9881rewrite_call_expr_valist (location_t loc, int oldnargs, tree *args,
9882 int skip, tree fndecl, int n, va_list newargs)
5039610b 9883{
862d0b35
DN
9884 int nargs = oldnargs - skip + n;
9885 tree *buffer;
5039610b 9886
862d0b35 9887 if (n > 0)
5039610b 9888 {
862d0b35 9889 int i, j;
5039610b 9890
862d0b35
DN
9891 buffer = XALLOCAVEC (tree, nargs);
9892 for (i = 0; i < n; i++)
9893 buffer[i] = va_arg (newargs, tree);
9894 for (j = skip; j < oldnargs; j++, i++)
9895 buffer[i] = args[j];
9896 }
9897 else
9898 buffer = args + skip;
3bf5906b 9899
862d0b35
DN
9900 return build_call_expr_loc_array (loc, fndecl, nargs, buffer);
9901}
5039610b 9902
0889e9bc
JJ
9903/* Return true if FNDECL shouldn't be folded right now.
9904 If a built-in function has an inline attribute always_inline
9905 wrapper, defer folding it after always_inline functions have
9906 been inlined, otherwise e.g. -D_FORTIFY_SOURCE checking
9907 might not be performed. */
9908
e7f9dae0 9909bool
0889e9bc
JJ
9910avoid_folding_inline_builtin (tree fndecl)
9911{
9912 return (DECL_DECLARED_INLINE_P (fndecl)
9913 && DECL_DISREGARD_INLINE_LIMITS (fndecl)
9914 && cfun
9915 && !cfun->always_inline_functions_inlined
9916 && lookup_attribute ("always_inline", DECL_ATTRIBUTES (fndecl)));
9917}
9918
6de9cd9a 9919/* A wrapper function for builtin folding that prevents warnings for
caf93cb0 9920 "statement without effect" and the like, caused by removing the
6de9cd9a
DN
9921 call node earlier than the warning is generated. */
9922
9923tree
db3927fb 9924fold_call_expr (location_t loc, tree exp, bool ignore)
6de9cd9a 9925{
5039610b
SL
9926 tree ret = NULL_TREE;
9927 tree fndecl = get_callee_fndecl (exp);
9928 if (fndecl
9929 && TREE_CODE (fndecl) == FUNCTION_DECL
6ef5231b
JJ
9930 && DECL_BUILT_IN (fndecl)
9931 /* If CALL_EXPR_VA_ARG_PACK is set, the arguments aren't finalized
9932 yet. Defer folding until we see all the arguments
9933 (after inlining). */
9934 && !CALL_EXPR_VA_ARG_PACK (exp))
9935 {
9936 int nargs = call_expr_nargs (exp);
9937
9938 /* Before gimplification CALL_EXPR_VA_ARG_PACK is not set, but
9939 instead last argument is __builtin_va_arg_pack (). Defer folding
9940 even in that case, until arguments are finalized. */
9941 if (nargs && TREE_CODE (CALL_EXPR_ARG (exp, nargs - 1)) == CALL_EXPR)
9942 {
9943 tree fndecl2 = get_callee_fndecl (CALL_EXPR_ARG (exp, nargs - 1));
9944 if (fndecl2
9945 && TREE_CODE (fndecl2) == FUNCTION_DECL
9946 && DECL_BUILT_IN_CLASS (fndecl2) == BUILT_IN_NORMAL
9947 && DECL_FUNCTION_CODE (fndecl2) == BUILT_IN_VA_ARG_PACK)
9948 return NULL_TREE;
9949 }
9950
0889e9bc
JJ
9951 if (avoid_folding_inline_builtin (fndecl))
9952 return NULL_TREE;
9953
5039610b 9954 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
f311c3b4
NF
9955 return targetm.fold_builtin (fndecl, call_expr_nargs (exp),
9956 CALL_EXPR_ARGP (exp), ignore);
5039610b
SL
9957 else
9958 {
a6a0570f
RB
9959 tree *args = CALL_EXPR_ARGP (exp);
9960 ret = fold_builtin_n (loc, fndecl, args, nargs, ignore);
5039610b 9961 if (ret)
db3927fb 9962 return ret;
5039610b 9963 }
6de9cd9a 9964 }
5039610b
SL
9965 return NULL_TREE;
9966}
b8698a0f 9967
a6a0570f
RB
9968/* Fold a CALL_EXPR with type TYPE with FN as the function expression.
9969 N arguments are passed in the array ARGARRAY. Return a folded
9970 expression or NULL_TREE if no simplification was possible. */
4977bab6
ZW
9971
9972tree
a6a0570f 9973fold_builtin_call_array (location_t loc, tree,
94a0dd7b
SL
9974 tree fn,
9975 int n,
9976 tree *argarray)
6385a28f 9977{
a6a0570f
RB
9978 if (TREE_CODE (fn) != ADDR_EXPR)
9979 return NULL_TREE;
5039610b 9980
a6a0570f
RB
9981 tree fndecl = TREE_OPERAND (fn, 0);
9982 if (TREE_CODE (fndecl) == FUNCTION_DECL
9983 && DECL_BUILT_IN (fndecl))
9984 {
9985 /* If last argument is __builtin_va_arg_pack (), arguments to this
9986 function are not finalized yet. Defer folding until they are. */
9987 if (n && TREE_CODE (argarray[n - 1]) == CALL_EXPR)
9988 {
9989 tree fndecl2 = get_callee_fndecl (argarray[n - 1]);
9990 if (fndecl2
9991 && TREE_CODE (fndecl2) == FUNCTION_DECL
9992 && DECL_BUILT_IN_CLASS (fndecl2) == BUILT_IN_NORMAL
9993 && DECL_FUNCTION_CODE (fndecl2) == BUILT_IN_VA_ARG_PACK)
9994 return NULL_TREE;
9995 }
9996 if (avoid_folding_inline_builtin (fndecl))
9997 return NULL_TREE;
9998 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
9999 return targetm.fold_builtin (fndecl, n, argarray, false);
10000 else
10001 return fold_builtin_n (loc, fndecl, argarray, n, false);
10002 }
5039610b 10003
a6a0570f 10004 return NULL_TREE;
5039610b
SL
10005}
10006
43ea30dc
NF
10007/* Construct a new CALL_EXPR using the tail of the argument list of EXP
10008 along with N new arguments specified as the "..." parameters. SKIP
10009 is the number of arguments in EXP to be omitted. This function is used
10010 to do varargs-to-varargs transformations. */
10011
10012static tree
10013rewrite_call_expr (location_t loc, tree exp, int skip, tree fndecl, int n, ...)
10014{
10015 va_list ap;
10016 tree t;
10017
10018 va_start (ap, n);
10019 t = rewrite_call_expr_valist (loc, call_expr_nargs (exp),
10020 CALL_EXPR_ARGP (exp), skip, fndecl, n, ap);
10021 va_end (ap);
5039610b 10022
43ea30dc 10023 return t;
5039610b
SL
10024}
10025
10026/* Validate a single argument ARG against a tree code CODE representing
10027 a type. */
b8698a0f 10028
5039610b 10029static bool
fa233e34 10030validate_arg (const_tree arg, enum tree_code code)
5039610b
SL
10031{
10032 if (!arg)
10033 return false;
10034 else if (code == POINTER_TYPE)
10035 return POINTER_TYPE_P (TREE_TYPE (arg));
4cd8e76f
RG
10036 else if (code == INTEGER_TYPE)
10037 return INTEGRAL_TYPE_P (TREE_TYPE (arg));
5039610b 10038 return code == TREE_CODE (TREE_TYPE (arg));
6385a28f 10039}
019fa094 10040
726a989a
RB
10041/* This function validates the types of a function call argument list
10042 against a specified list of tree_codes. If the last specifier is a 0,
10043 that represents an ellipses, otherwise the last specifier must be a
10044 VOID_TYPE.
10045
10046 This is the GIMPLE version of validate_arglist. Eventually we want to
10047 completely convert builtins.c to work from GIMPLEs and the tree based
10048 validate_arglist will then be removed. */
10049
10050bool
538dd0b7 10051validate_gimple_arglist (const gcall *call, ...)
726a989a
RB
10052{
10053 enum tree_code code;
10054 bool res = 0;
10055 va_list ap;
10056 const_tree arg;
10057 size_t i;
10058
10059 va_start (ap, call);
10060 i = 0;
10061
10062 do
10063 {
72b5577d 10064 code = (enum tree_code) va_arg (ap, int);
726a989a
RB
10065 switch (code)
10066 {
10067 case 0:
10068 /* This signifies an ellipses, any further arguments are all ok. */
10069 res = true;
10070 goto end;
10071 case VOID_TYPE:
10072 /* This signifies an endlink, if no arguments remain, return
10073 true, otherwise return false. */
10074 res = (i == gimple_call_num_args (call));
10075 goto end;
10076 default:
10077 /* If no parameters remain or the parameter's code does not
10078 match the specified code, return false. Otherwise continue
10079 checking any remaining arguments. */
10080 arg = gimple_call_arg (call, i++);
10081 if (!validate_arg (arg, code))
10082 goto end;
10083 break;
10084 }
10085 }
10086 while (1);
10087
10088 /* We need gotos here since we can only have one VA_CLOSE in a
10089 function. */
10090 end: ;
10091 va_end (ap);
10092
10093 return res;
10094}
10095
f6155fda
SS
10096/* Default target-specific builtin expander that does nothing. */
10097
10098rtx
4682ae04
AJ
10099default_expand_builtin (tree exp ATTRIBUTE_UNUSED,
10100 rtx target ATTRIBUTE_UNUSED,
10101 rtx subtarget ATTRIBUTE_UNUSED,
ef4bddc2 10102 machine_mode mode ATTRIBUTE_UNUSED,
4682ae04 10103 int ignore ATTRIBUTE_UNUSED)
f6155fda
SS
10104{
10105 return NULL_RTX;
10106}
34ee7f82 10107
7dc61d6c
KG
10108/* Returns true is EXP represents data that would potentially reside
10109 in a readonly section. */
10110
fef5a0d9 10111bool
7dc61d6c
KG
10112readonly_data_expr (tree exp)
10113{
10114 STRIP_NOPS (exp);
10115
aef0afc4
UW
10116 if (TREE_CODE (exp) != ADDR_EXPR)
10117 return false;
10118
10119 exp = get_base_address (TREE_OPERAND (exp, 0));
10120 if (!exp)
10121 return false;
10122
10123 /* Make sure we call decl_readonly_section only for trees it
10124 can handle (since it returns true for everything it doesn't
10125 understand). */
caf93cb0 10126 if (TREE_CODE (exp) == STRING_CST
aef0afc4
UW
10127 || TREE_CODE (exp) == CONSTRUCTOR
10128 || (TREE_CODE (exp) == VAR_DECL && TREE_STATIC (exp)))
10129 return decl_readonly_section (exp, 0);
7dc61d6c
KG
10130 else
10131 return false;
10132}
6de9cd9a 10133
5039610b
SL
10134/* Simplify a call to the strstr builtin. S1 and S2 are the arguments
10135 to the call, and TYPE is its return type.
6de9cd9a 10136
5039610b 10137 Return NULL_TREE if no simplification was possible, otherwise return the
6de9cd9a
DN
10138 simplified form of the call as a tree.
10139
10140 The simplified form may be a constant or other expression which
10141 computes the same value, but in a more efficient manner (including
10142 calls to other builtin functions).
10143
10144 The call may contain arguments which need to be evaluated, but
10145 which are not useful to determine the result of the call. In
10146 this case we return a chain of COMPOUND_EXPRs. The LHS of each
10147 COMPOUND_EXPR will be an argument which must be evaluated.
10148 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
10149 COMPOUND_EXPR in the chain will contain the tree for the simplified
10150 form of the builtin function call. */
10151
10152static tree
db3927fb 10153fold_builtin_strstr (location_t loc, tree s1, tree s2, tree type)
6de9cd9a 10154{
5039610b
SL
10155 if (!validate_arg (s1, POINTER_TYPE)
10156 || !validate_arg (s2, POINTER_TYPE))
10157 return NULL_TREE;
6de9cd9a
DN
10158 else
10159 {
6de9cd9a
DN
10160 tree fn;
10161 const char *p1, *p2;
10162
10163 p2 = c_getstr (s2);
10164 if (p2 == NULL)
5039610b 10165 return NULL_TREE;
6de9cd9a
DN
10166
10167 p1 = c_getstr (s1);
10168 if (p1 != NULL)
10169 {
10170 const char *r = strstr (p1, p2);
5fcfe0b2 10171 tree tem;
6de9cd9a 10172
6de9cd9a 10173 if (r == NULL)
5212068f 10174 return build_int_cst (TREE_TYPE (s1), 0);
b953ebd6
RS
10175
10176 /* Return an offset into the constant string argument. */
5d49b6a7 10177 tem = fold_build_pointer_plus_hwi_loc (loc, s1, r - p1);
db3927fb 10178 return fold_convert_loc (loc, type, tem);
6de9cd9a
DN
10179 }
10180
817f9ef2
JW
10181 /* The argument is const char *, and the result is char *, so we need
10182 a type conversion here to avoid a warning. */
6de9cd9a 10183 if (p2[0] == '\0')
db3927fb 10184 return fold_convert_loc (loc, type, s1);
6de9cd9a
DN
10185
10186 if (p2[1] != '\0')
5039610b 10187 return NULL_TREE;
6de9cd9a 10188
e79983f4 10189 fn = builtin_decl_implicit (BUILT_IN_STRCHR);
6de9cd9a 10190 if (!fn)
5039610b 10191 return NULL_TREE;
6de9cd9a
DN
10192
10193 /* New argument list transforming strstr(s1, s2) to
10194 strchr(s1, s2[0]). */
45a2c477
RG
10195 return build_call_expr_loc (loc, fn, 2, s1,
10196 build_int_cst (integer_type_node, p2[0]));
6de9cd9a
DN
10197 }
10198}
10199
5039610b
SL
10200/* Simplify a call to the strchr builtin. S1 and S2 are the arguments to
10201 the call, and TYPE is its return type.
6de9cd9a 10202
5039610b 10203 Return NULL_TREE if no simplification was possible, otherwise return the
6de9cd9a
DN
10204 simplified form of the call as a tree.
10205
10206 The simplified form may be a constant or other expression which
10207 computes the same value, but in a more efficient manner (including
10208 calls to other builtin functions).
10209
10210 The call may contain arguments which need to be evaluated, but
10211 which are not useful to determine the result of the call. In
10212 this case we return a chain of COMPOUND_EXPRs. The LHS of each
10213 COMPOUND_EXPR will be an argument which must be evaluated.
10214 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
10215 COMPOUND_EXPR in the chain will contain the tree for the simplified
10216 form of the builtin function call. */
10217
10218static tree
db3927fb 10219fold_builtin_strchr (location_t loc, tree s1, tree s2, tree type)
6de9cd9a 10220{
5039610b
SL
10221 if (!validate_arg (s1, POINTER_TYPE)
10222 || !validate_arg (s2, INTEGER_TYPE))
10223 return NULL_TREE;
6de9cd9a
DN
10224 else
10225 {
6de9cd9a
DN
10226 const char *p1;
10227
10228 if (TREE_CODE (s2) != INTEGER_CST)
5039610b 10229 return NULL_TREE;
6de9cd9a
DN
10230
10231 p1 = c_getstr (s1);
10232 if (p1 != NULL)
10233 {
10234 char c;
10235 const char *r;
5fcfe0b2 10236 tree tem;
6de9cd9a
DN
10237
10238 if (target_char_cast (s2, &c))
5039610b 10239 return NULL_TREE;
6de9cd9a
DN
10240
10241 r = strchr (p1, c);
10242
10243 if (r == NULL)
5212068f 10244 return build_int_cst (TREE_TYPE (s1), 0);
6de9cd9a
DN
10245
10246 /* Return an offset into the constant string argument. */
5d49b6a7 10247 tem = fold_build_pointer_plus_hwi_loc (loc, s1, r - p1);
db3927fb 10248 return fold_convert_loc (loc, type, tem);
6de9cd9a 10249 }
5039610b 10250 return NULL_TREE;
6de9cd9a
DN
10251 }
10252}
10253
5039610b
SL
10254/* Simplify a call to the strrchr builtin. S1 and S2 are the arguments to
10255 the call, and TYPE is its return type.
6de9cd9a 10256
5039610b 10257 Return NULL_TREE if no simplification was possible, otherwise return the
6de9cd9a
DN
10258 simplified form of the call as a tree.
10259
10260 The simplified form may be a constant or other expression which
10261 computes the same value, but in a more efficient manner (including
10262 calls to other builtin functions).
10263
10264 The call may contain arguments which need to be evaluated, but
10265 which are not useful to determine the result of the call. In
10266 this case we return a chain of COMPOUND_EXPRs. The LHS of each
10267 COMPOUND_EXPR will be an argument which must be evaluated.
10268 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
10269 COMPOUND_EXPR in the chain will contain the tree for the simplified
10270 form of the builtin function call. */
10271
10272static tree
db3927fb 10273fold_builtin_strrchr (location_t loc, tree s1, tree s2, tree type)
6de9cd9a 10274{
5039610b
SL
10275 if (!validate_arg (s1, POINTER_TYPE)
10276 || !validate_arg (s2, INTEGER_TYPE))
10277 return NULL_TREE;
6de9cd9a
DN
10278 else
10279 {
6de9cd9a
DN
10280 tree fn;
10281 const char *p1;
10282
10283 if (TREE_CODE (s2) != INTEGER_CST)
5039610b 10284 return NULL_TREE;
6de9cd9a
DN
10285
10286 p1 = c_getstr (s1);
10287 if (p1 != NULL)
10288 {
10289 char c;
10290 const char *r;
5fcfe0b2 10291 tree tem;
6de9cd9a
DN
10292
10293 if (target_char_cast (s2, &c))
5039610b 10294 return NULL_TREE;
6de9cd9a
DN
10295
10296 r = strrchr (p1, c);
10297
10298 if (r == NULL)
5212068f 10299 return build_int_cst (TREE_TYPE (s1), 0);
6de9cd9a
DN
10300
10301 /* Return an offset into the constant string argument. */
5d49b6a7 10302 tem = fold_build_pointer_plus_hwi_loc (loc, s1, r - p1);
db3927fb 10303 return fold_convert_loc (loc, type, tem);
6de9cd9a
DN
10304 }
10305
10306 if (! integer_zerop (s2))
5039610b 10307 return NULL_TREE;
6de9cd9a 10308
e79983f4 10309 fn = builtin_decl_implicit (BUILT_IN_STRCHR);
6de9cd9a 10310 if (!fn)
5039610b 10311 return NULL_TREE;
6de9cd9a
DN
10312
10313 /* Transform strrchr(s1, '\0') to strchr(s1, '\0'). */
db3927fb 10314 return build_call_expr_loc (loc, fn, 2, s1, s2);
6de9cd9a
DN
10315 }
10316}
10317
5039610b
SL
10318/* Simplify a call to the strpbrk builtin. S1 and S2 are the arguments
10319 to the call, and TYPE is its return type.
6de9cd9a 10320
5039610b 10321 Return NULL_TREE if no simplification was possible, otherwise return the
6de9cd9a
DN
10322 simplified form of the call as a tree.
10323
10324 The simplified form may be a constant or other expression which
10325 computes the same value, but in a more efficient manner (including
10326 calls to other builtin functions).
10327
10328 The call may contain arguments which need to be evaluated, but
10329 which are not useful to determine the result of the call. In
10330 this case we return a chain of COMPOUND_EXPRs. The LHS of each
10331 COMPOUND_EXPR will be an argument which must be evaluated.
10332 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
10333 COMPOUND_EXPR in the chain will contain the tree for the simplified
10334 form of the builtin function call. */
10335
10336static tree
db3927fb 10337fold_builtin_strpbrk (location_t loc, tree s1, tree s2, tree type)
6de9cd9a 10338{
5039610b
SL
10339 if (!validate_arg (s1, POINTER_TYPE)
10340 || !validate_arg (s2, POINTER_TYPE))
10341 return NULL_TREE;
6de9cd9a
DN
10342 else
10343 {
6de9cd9a
DN
10344 tree fn;
10345 const char *p1, *p2;
10346
10347 p2 = c_getstr (s2);
10348 if (p2 == NULL)
5039610b 10349 return NULL_TREE;
6de9cd9a
DN
10350
10351 p1 = c_getstr (s1);
10352 if (p1 != NULL)
10353 {
10354 const char *r = strpbrk (p1, p2);
5fcfe0b2 10355 tree tem;
6de9cd9a
DN
10356
10357 if (r == NULL)
5212068f 10358 return build_int_cst (TREE_TYPE (s1), 0);
6de9cd9a
DN
10359
10360 /* Return an offset into the constant string argument. */
5d49b6a7 10361 tem = fold_build_pointer_plus_hwi_loc (loc, s1, r - p1);
db3927fb 10362 return fold_convert_loc (loc, type, tem);
6de9cd9a
DN
10363 }
10364
10365 if (p2[0] == '\0')
d6dc556b
RS
10366 /* strpbrk(x, "") == NULL.
10367 Evaluate and ignore s1 in case it had side-effects. */
db3927fb 10368 return omit_one_operand_loc (loc, TREE_TYPE (s1), integer_zero_node, s1);
6de9cd9a
DN
10369
10370 if (p2[1] != '\0')
5039610b 10371 return NULL_TREE; /* Really call strpbrk. */
6de9cd9a 10372
e79983f4 10373 fn = builtin_decl_implicit (BUILT_IN_STRCHR);
6de9cd9a 10374 if (!fn)
5039610b 10375 return NULL_TREE;
6de9cd9a
DN
10376
10377 /* New argument list transforming strpbrk(s1, s2) to
10378 strchr(s1, s2[0]). */
45a2c477
RG
10379 return build_call_expr_loc (loc, fn, 2, s1,
10380 build_int_cst (integer_type_node, p2[0]));
6de9cd9a
DN
10381 }
10382}
10383
5039610b
SL
10384/* Simplify a call to the strspn builtin. S1 and S2 are the arguments
10385 to the call.
6de9cd9a 10386
5039610b 10387 Return NULL_TREE if no simplification was possible, otherwise return the
6de9cd9a
DN
10388 simplified form of the call as a tree.
10389
10390 The simplified form may be a constant or other expression which
10391 computes the same value, but in a more efficient manner (including
10392 calls to other builtin functions).
10393
10394 The call may contain arguments which need to be evaluated, but
10395 which are not useful to determine the result of the call. In
10396 this case we return a chain of COMPOUND_EXPRs. The LHS of each
10397 COMPOUND_EXPR will be an argument which must be evaluated.
10398 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
10399 COMPOUND_EXPR in the chain will contain the tree for the simplified
10400 form of the builtin function call. */
10401
10402static tree
db3927fb 10403fold_builtin_strspn (location_t loc, tree s1, tree s2)
6de9cd9a 10404{
5039610b
SL
10405 if (!validate_arg (s1, POINTER_TYPE)
10406 || !validate_arg (s2, POINTER_TYPE))
10407 return NULL_TREE;
6de9cd9a
DN
10408 else
10409 {
6de9cd9a
DN
10410 const char *p1 = c_getstr (s1), *p2 = c_getstr (s2);
10411
10412 /* If both arguments are constants, evaluate at compile-time. */
10413 if (p1 && p2)
10414 {
10415 const size_t r = strspn (p1, p2);
854f9272 10416 return build_int_cst (size_type_node, r);
6de9cd9a
DN
10417 }
10418
5039610b 10419 /* If either argument is "", return NULL_TREE. */
6de9cd9a 10420 if ((p1 && *p1 == '\0') || (p2 && *p2 == '\0'))
08039bd8
RS
10421 /* Evaluate and ignore both arguments in case either one has
10422 side-effects. */
db3927fb 10423 return omit_two_operands_loc (loc, size_type_node, size_zero_node,
08039bd8 10424 s1, s2);
5039610b 10425 return NULL_TREE;
6de9cd9a
DN
10426 }
10427}
10428
5039610b
SL
10429/* Simplify a call to the strcspn builtin. S1 and S2 are the arguments
10430 to the call.
6de9cd9a 10431
5039610b 10432 Return NULL_TREE if no simplification was possible, otherwise return the
6de9cd9a
DN
10433 simplified form of the call as a tree.
10434
10435 The simplified form may be a constant or other expression which
10436 computes the same value, but in a more efficient manner (including
10437 calls to other builtin functions).
10438
10439 The call may contain arguments which need to be evaluated, but
10440 which are not useful to determine the result of the call. In
10441 this case we return a chain of COMPOUND_EXPRs. The LHS of each
10442 COMPOUND_EXPR will be an argument which must be evaluated.
10443 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
10444 COMPOUND_EXPR in the chain will contain the tree for the simplified
10445 form of the builtin function call. */
10446
10447static tree
db3927fb 10448fold_builtin_strcspn (location_t loc, tree s1, tree s2)
6de9cd9a 10449{
5039610b
SL
10450 if (!validate_arg (s1, POINTER_TYPE)
10451 || !validate_arg (s2, POINTER_TYPE))
10452 return NULL_TREE;
6de9cd9a
DN
10453 else
10454 {
6de9cd9a
DN
10455 const char *p1 = c_getstr (s1), *p2 = c_getstr (s2);
10456
10457 /* If both arguments are constants, evaluate at compile-time. */
10458 if (p1 && p2)
10459 {
10460 const size_t r = strcspn (p1, p2);
854f9272 10461 return build_int_cst (size_type_node, r);
6de9cd9a
DN
10462 }
10463
5039610b 10464 /* If the first argument is "", return NULL_TREE. */
6de9cd9a
DN
10465 if (p1 && *p1 == '\0')
10466 {
10467 /* Evaluate and ignore argument s2 in case it has
10468 side-effects. */
db3927fb 10469 return omit_one_operand_loc (loc, size_type_node,
002bd9f0 10470 size_zero_node, s2);
6de9cd9a
DN
10471 }
10472
10473 /* If the second argument is "", return __builtin_strlen(s1). */
10474 if (p2 && *p2 == '\0')
10475 {
e79983f4 10476 tree fn = builtin_decl_implicit (BUILT_IN_STRLEN);
6de9cd9a
DN
10477
10478 /* If the replacement _DECL isn't initialized, don't do the
10479 transformation. */
10480 if (!fn)
5039610b 10481 return NULL_TREE;
6de9cd9a 10482
db3927fb 10483 return build_call_expr_loc (loc, fn, 1, s1);
6de9cd9a 10484 }
5039610b 10485 return NULL_TREE;
6de9cd9a
DN
10486 }
10487}
10488
5039610b 10489/* Fold the next_arg or va_start call EXP. Returns true if there was an error
2efcfa4e
AP
10490 produced. False otherwise. This is done so that we don't output the error
10491 or warning twice or three times. */
726a989a 10492
2efcfa4e 10493bool
5039610b 10494fold_builtin_next_arg (tree exp, bool va_start_p)
6de9cd9a
DN
10495{
10496 tree fntype = TREE_TYPE (current_function_decl);
5039610b
SL
10497 int nargs = call_expr_nargs (exp);
10498 tree arg;
34c88790
DS
10499 /* There is good chance the current input_location points inside the
10500 definition of the va_start macro (perhaps on the token for
10501 builtin) in a system header, so warnings will not be emitted.
10502 Use the location in real source code. */
10503 source_location current_location =
10504 linemap_unwind_to_first_non_reserved_loc (line_table, input_location,
10505 NULL);
6de9cd9a 10506
f38958e8 10507 if (!stdarg_p (fntype))
2efcfa4e
AP
10508 {
10509 error ("%<va_start%> used in function with fixed args");
10510 return true;
10511 }
5039610b
SL
10512
10513 if (va_start_p)
8870e212 10514 {
5039610b
SL
10515 if (va_start_p && (nargs != 2))
10516 {
10517 error ("wrong number of arguments to function %<va_start%>");
10518 return true;
10519 }
10520 arg = CALL_EXPR_ARG (exp, 1);
8870e212
JJ
10521 }
10522 /* We use __builtin_va_start (ap, 0, 0) or __builtin_next_arg (0, 0)
10523 when we checked the arguments and if needed issued a warning. */
5039610b 10524 else
6de9cd9a 10525 {
5039610b
SL
10526 if (nargs == 0)
10527 {
10528 /* Evidently an out of date version of <stdarg.h>; can't validate
10529 va_start's second argument, but can still work as intended. */
34c88790 10530 warning_at (current_location,
b9c8da34
DS
10531 OPT_Wvarargs,
10532 "%<__builtin_next_arg%> called without an argument");
5039610b
SL
10533 return true;
10534 }
10535 else if (nargs > 1)
c22cacf3 10536 {
5039610b 10537 error ("wrong number of arguments to function %<__builtin_next_arg%>");
c22cacf3
MS
10538 return true;
10539 }
5039610b
SL
10540 arg = CALL_EXPR_ARG (exp, 0);
10541 }
10542
4e3825db
MM
10543 if (TREE_CODE (arg) == SSA_NAME)
10544 arg = SSA_NAME_VAR (arg);
10545
5039610b 10546 /* We destructively modify the call to be __builtin_va_start (ap, 0)
b8698a0f 10547 or __builtin_next_arg (0) the first time we see it, after checking
5039610b
SL
10548 the arguments and if needed issuing a warning. */
10549 if (!integer_zerop (arg))
10550 {
10551 tree last_parm = tree_last (DECL_ARGUMENTS (current_function_decl));
8870e212 10552
6de9cd9a
DN
10553 /* Strip off all nops for the sake of the comparison. This
10554 is not quite the same as STRIP_NOPS. It does more.
10555 We must also strip off INDIRECT_EXPR for C++ reference
10556 parameters. */
1043771b 10557 while (CONVERT_EXPR_P (arg)
6de9cd9a
DN
10558 || TREE_CODE (arg) == INDIRECT_REF)
10559 arg = TREE_OPERAND (arg, 0);
10560 if (arg != last_parm)
c22cacf3 10561 {
118f3b19
KH
10562 /* FIXME: Sometimes with the tree optimizers we can get the
10563 not the last argument even though the user used the last
10564 argument. We just warn and set the arg to be the last
10565 argument so that we will get wrong-code because of
10566 it. */
34c88790 10567 warning_at (current_location,
b9c8da34 10568 OPT_Wvarargs,
34c88790 10569 "second parameter of %<va_start%> not last named argument");
2efcfa4e 10570 }
2985f531
MLI
10571
10572 /* Undefined by C99 7.15.1.4p4 (va_start):
10573 "If the parameter parmN is declared with the register storage
10574 class, with a function or array type, or with a type that is
10575 not compatible with the type that results after application of
10576 the default argument promotions, the behavior is undefined."
10577 */
10578 else if (DECL_REGISTER (arg))
34c88790
DS
10579 {
10580 warning_at (current_location,
b9c8da34 10581 OPT_Wvarargs,
34c88790
DS
10582 "undefined behaviour when second parameter of "
10583 "%<va_start%> is declared with %<register%> storage");
10584 }
2985f531 10585
8870e212 10586 /* We want to verify the second parameter just once before the tree
c22cacf3
MS
10587 optimizers are run and then avoid keeping it in the tree,
10588 as otherwise we could warn even for correct code like:
10589 void foo (int i, ...)
10590 { va_list ap; i++; va_start (ap, i); va_end (ap); } */
5039610b
SL
10591 if (va_start_p)
10592 CALL_EXPR_ARG (exp, 1) = integer_zero_node;
10593 else
10594 CALL_EXPR_ARG (exp, 0) = integer_zero_node;
2efcfa4e
AP
10595 }
10596 return false;
6de9cd9a
DN
10597}
10598
10599
5039610b 10600/* Expand a call EXP to __builtin_object_size. */
10a0d495 10601
9b2b7279 10602static rtx
10a0d495
JJ
10603expand_builtin_object_size (tree exp)
10604{
10605 tree ost;
10606 int object_size_type;
10607 tree fndecl = get_callee_fndecl (exp);
10a0d495 10608
5039610b 10609 if (!validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
10a0d495 10610 {
c94ed7a1
JJ
10611 error ("%Kfirst argument of %D must be a pointer, second integer constant",
10612 exp, fndecl);
10a0d495
JJ
10613 expand_builtin_trap ();
10614 return const0_rtx;
10615 }
10616
5039610b 10617 ost = CALL_EXPR_ARG (exp, 1);
10a0d495
JJ
10618 STRIP_NOPS (ost);
10619
10620 if (TREE_CODE (ost) != INTEGER_CST
10621 || tree_int_cst_sgn (ost) < 0
10622 || compare_tree_int (ost, 3) > 0)
10623 {
c94ed7a1
JJ
10624 error ("%Klast argument of %D is not integer constant between 0 and 3",
10625 exp, fndecl);
10a0d495
JJ
10626 expand_builtin_trap ();
10627 return const0_rtx;
10628 }
10629
9439e9a1 10630 object_size_type = tree_to_shwi (ost);
10a0d495
JJ
10631
10632 return object_size_type < 2 ? constm1_rtx : const0_rtx;
10633}
10634
10635/* Expand EXP, a call to the __mem{cpy,pcpy,move,set}_chk builtin.
10636 FCODE is the BUILT_IN_* to use.
5039610b 10637 Return NULL_RTX if we failed; the caller should emit a normal call,
10a0d495
JJ
10638 otherwise try to get the result in TARGET, if convenient (and in
10639 mode MODE if that's convenient). */
10640
10641static rtx
ef4bddc2 10642expand_builtin_memory_chk (tree exp, rtx target, machine_mode mode,
10a0d495
JJ
10643 enum built_in_function fcode)
10644{
10a0d495
JJ
10645 tree dest, src, len, size;
10646
5039610b 10647 if (!validate_arglist (exp,
10a0d495
JJ
10648 POINTER_TYPE,
10649 fcode == BUILT_IN_MEMSET_CHK
10650 ? INTEGER_TYPE : POINTER_TYPE,
10651 INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
5039610b 10652 return NULL_RTX;
10a0d495 10653
5039610b
SL
10654 dest = CALL_EXPR_ARG (exp, 0);
10655 src = CALL_EXPR_ARG (exp, 1);
10656 len = CALL_EXPR_ARG (exp, 2);
10657 size = CALL_EXPR_ARG (exp, 3);
10a0d495 10658
cc269bb6 10659 if (! tree_fits_uhwi_p (size))
5039610b 10660 return NULL_RTX;
10a0d495 10661
cc269bb6 10662 if (tree_fits_uhwi_p (len) || integer_all_onesp (size))
10a0d495
JJ
10663 {
10664 tree fn;
10665
10666 if (! integer_all_onesp (size) && tree_int_cst_lt (size, len))
10667 {
9bd9f738
RG
10668 warning_at (tree_nonartificial_location (exp),
10669 0, "%Kcall to %D will always overflow destination buffer",
10670 exp, get_callee_fndecl (exp));
5039610b 10671 return NULL_RTX;
10a0d495
JJ
10672 }
10673
10a0d495
JJ
10674 fn = NULL_TREE;
10675 /* If __builtin_mem{cpy,pcpy,move,set}_chk is used, assume
10676 mem{cpy,pcpy,move,set} is available. */
10677 switch (fcode)
10678 {
10679 case BUILT_IN_MEMCPY_CHK:
e79983f4 10680 fn = builtin_decl_explicit (BUILT_IN_MEMCPY);
10a0d495
JJ
10681 break;
10682 case BUILT_IN_MEMPCPY_CHK:
e79983f4 10683 fn = builtin_decl_explicit (BUILT_IN_MEMPCPY);
10a0d495
JJ
10684 break;
10685 case BUILT_IN_MEMMOVE_CHK:
e79983f4 10686 fn = builtin_decl_explicit (BUILT_IN_MEMMOVE);
10a0d495
JJ
10687 break;
10688 case BUILT_IN_MEMSET_CHK:
e79983f4 10689 fn = builtin_decl_explicit (BUILT_IN_MEMSET);
10a0d495
JJ
10690 break;
10691 default:
10692 break;
10693 }
10694
10695 if (! fn)
5039610b 10696 return NULL_RTX;
10a0d495 10697
aa493694 10698 fn = build_call_nofold_loc (EXPR_LOCATION (exp), fn, 3, dest, src, len);
44e10129
MM
10699 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
10700 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
10a0d495
JJ
10701 return expand_expr (fn, target, mode, EXPAND_NORMAL);
10702 }
10703 else if (fcode == BUILT_IN_MEMSET_CHK)
5039610b 10704 return NULL_RTX;
10a0d495
JJ
10705 else
10706 {
0eb77834 10707 unsigned int dest_align = get_pointer_alignment (dest);
10a0d495
JJ
10708
10709 /* If DEST is not a pointer type, call the normal function. */
10710 if (dest_align == 0)
5039610b 10711 return NULL_RTX;
10a0d495
JJ
10712
10713 /* If SRC and DEST are the same (and not volatile), do nothing. */
10714 if (operand_equal_p (src, dest, 0))
10715 {
10716 tree expr;
10717
10718 if (fcode != BUILT_IN_MEMPCPY_CHK)
10719 {
10720 /* Evaluate and ignore LEN in case it has side-effects. */
10721 expand_expr (len, const0_rtx, VOIDmode, EXPAND_NORMAL);
10722 return expand_expr (dest, target, mode, EXPAND_NORMAL);
10723 }
10724
5d49b6a7 10725 expr = fold_build_pointer_plus (dest, len);
10a0d495
JJ
10726 return expand_expr (expr, target, mode, EXPAND_NORMAL);
10727 }
10728
10729 /* __memmove_chk special case. */
10730 if (fcode == BUILT_IN_MEMMOVE_CHK)
10731 {
0eb77834 10732 unsigned int src_align = get_pointer_alignment (src);
10a0d495
JJ
10733
10734 if (src_align == 0)
5039610b 10735 return NULL_RTX;
10a0d495
JJ
10736
10737 /* If src is categorized for a readonly section we can use
10738 normal __memcpy_chk. */
10739 if (readonly_data_expr (src))
10740 {
e79983f4 10741 tree fn = builtin_decl_explicit (BUILT_IN_MEMCPY_CHK);
10a0d495 10742 if (!fn)
5039610b 10743 return NULL_RTX;
aa493694
JJ
10744 fn = build_call_nofold_loc (EXPR_LOCATION (exp), fn, 4,
10745 dest, src, len, size);
44e10129
MM
10746 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
10747 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
10a0d495
JJ
10748 return expand_expr (fn, target, mode, EXPAND_NORMAL);
10749 }
10750 }
5039610b 10751 return NULL_RTX;
10a0d495
JJ
10752 }
10753}
10754
10755/* Emit warning if a buffer overflow is detected at compile time. */
10756
10757static void
10758maybe_emit_chk_warning (tree exp, enum built_in_function fcode)
10759{
5039610b 10760 int is_strlen = 0;
10a0d495 10761 tree len, size;
9bd9f738 10762 location_t loc = tree_nonartificial_location (exp);
10a0d495
JJ
10763
10764 switch (fcode)
10765 {
10766 case BUILT_IN_STRCPY_CHK:
10767 case BUILT_IN_STPCPY_CHK:
10768 /* For __strcat_chk the warning will be emitted only if overflowing
10769 by at least strlen (dest) + 1 bytes. */
10770 case BUILT_IN_STRCAT_CHK:
5039610b
SL
10771 len = CALL_EXPR_ARG (exp, 1);
10772 size = CALL_EXPR_ARG (exp, 2);
10a0d495
JJ
10773 is_strlen = 1;
10774 break;
1c2fc017 10775 case BUILT_IN_STRNCAT_CHK:
10a0d495 10776 case BUILT_IN_STRNCPY_CHK:
f3fc9b80 10777 case BUILT_IN_STPNCPY_CHK:
5039610b
SL
10778 len = CALL_EXPR_ARG (exp, 2);
10779 size = CALL_EXPR_ARG (exp, 3);
10a0d495
JJ
10780 break;
10781 case BUILT_IN_SNPRINTF_CHK:
10782 case BUILT_IN_VSNPRINTF_CHK:
5039610b
SL
10783 len = CALL_EXPR_ARG (exp, 1);
10784 size = CALL_EXPR_ARG (exp, 3);
10a0d495
JJ
10785 break;
10786 default:
10787 gcc_unreachable ();
10788 }
10789
10a0d495
JJ
10790 if (!len || !size)
10791 return;
10792
cc269bb6 10793 if (! tree_fits_uhwi_p (size) || integer_all_onesp (size))
10a0d495
JJ
10794 return;
10795
10796 if (is_strlen)
10797 {
10798 len = c_strlen (len, 1);
cc269bb6 10799 if (! len || ! tree_fits_uhwi_p (len) || tree_int_cst_lt (len, size))
10a0d495
JJ
10800 return;
10801 }
1c2fc017
JJ
10802 else if (fcode == BUILT_IN_STRNCAT_CHK)
10803 {
5039610b 10804 tree src = CALL_EXPR_ARG (exp, 1);
cc269bb6 10805 if (! src || ! tree_fits_uhwi_p (len) || tree_int_cst_lt (len, size))
1c2fc017
JJ
10806 return;
10807 src = c_strlen (src, 1);
cc269bb6 10808 if (! src || ! tree_fits_uhwi_p (src))
1c2fc017 10809 {
9bd9f738
RG
10810 warning_at (loc, 0, "%Kcall to %D might overflow destination buffer",
10811 exp, get_callee_fndecl (exp));
1c2fc017
JJ
10812 return;
10813 }
10814 else if (tree_int_cst_lt (src, size))
10815 return;
10816 }
cc269bb6 10817 else if (! tree_fits_uhwi_p (len) || ! tree_int_cst_lt (size, len))
10a0d495
JJ
10818 return;
10819
9bd9f738
RG
10820 warning_at (loc, 0, "%Kcall to %D will always overflow destination buffer",
10821 exp, get_callee_fndecl (exp));
10a0d495
JJ
10822}
10823
10824/* Emit warning if a buffer overflow is detected at compile time
10825 in __sprintf_chk/__vsprintf_chk calls. */
10826
10827static void
10828maybe_emit_sprintf_chk_warning (tree exp, enum built_in_function fcode)
10829{
451409e4 10830 tree size, len, fmt;
10a0d495 10831 const char *fmt_str;
5039610b 10832 int nargs = call_expr_nargs (exp);
10a0d495
JJ
10833
10834 /* Verify the required arguments in the original call. */
b8698a0f 10835
5039610b 10836 if (nargs < 4)
10a0d495 10837 return;
5039610b
SL
10838 size = CALL_EXPR_ARG (exp, 2);
10839 fmt = CALL_EXPR_ARG (exp, 3);
10a0d495 10840
cc269bb6 10841 if (! tree_fits_uhwi_p (size) || integer_all_onesp (size))
10a0d495
JJ
10842 return;
10843
10844 /* Check whether the format is a literal string constant. */
10845 fmt_str = c_getstr (fmt);
10846 if (fmt_str == NULL)
10847 return;
10848
62e5bf5d 10849 if (!init_target_chars ())
000ba23d
KG
10850 return;
10851
10a0d495 10852 /* If the format doesn't contain % args or %%, we know its size. */
000ba23d 10853 if (strchr (fmt_str, target_percent) == 0)
10a0d495
JJ
10854 len = build_int_cstu (size_type_node, strlen (fmt_str));
10855 /* If the format is "%s" and first ... argument is a string literal,
10856 we know it too. */
5039610b
SL
10857 else if (fcode == BUILT_IN_SPRINTF_CHK
10858 && strcmp (fmt_str, target_percent_s) == 0)
10a0d495
JJ
10859 {
10860 tree arg;
10861
5039610b 10862 if (nargs < 5)
10a0d495 10863 return;
5039610b 10864 arg = CALL_EXPR_ARG (exp, 4);
10a0d495
JJ
10865 if (! POINTER_TYPE_P (TREE_TYPE (arg)))
10866 return;
10867
10868 len = c_strlen (arg, 1);
cc269bb6 10869 if (!len || ! tree_fits_uhwi_p (len))
10a0d495
JJ
10870 return;
10871 }
10872 else
10873 return;
10874
10875 if (! tree_int_cst_lt (len, size))
9bd9f738
RG
10876 warning_at (tree_nonartificial_location (exp),
10877 0, "%Kcall to %D will always overflow destination buffer",
10878 exp, get_callee_fndecl (exp));
10a0d495
JJ
10879}
10880
f9555f40
JJ
10881/* Emit warning if a free is called with address of a variable. */
10882
10883static void
10884maybe_emit_free_warning (tree exp)
10885{
10886 tree arg = CALL_EXPR_ARG (exp, 0);
10887
10888 STRIP_NOPS (arg);
10889 if (TREE_CODE (arg) != ADDR_EXPR)
10890 return;
10891
10892 arg = get_base_address (TREE_OPERAND (arg, 0));
70f34814 10893 if (arg == NULL || INDIRECT_REF_P (arg) || TREE_CODE (arg) == MEM_REF)
f9555f40
JJ
10894 return;
10895
10896 if (SSA_VAR_P (arg))
a3a704a4
MH
10897 warning_at (tree_nonartificial_location (exp), OPT_Wfree_nonheap_object,
10898 "%Kattempt to free a non-heap object %qD", exp, arg);
f9555f40 10899 else
a3a704a4
MH
10900 warning_at (tree_nonartificial_location (exp), OPT_Wfree_nonheap_object,
10901 "%Kattempt to free a non-heap object", exp);
f9555f40
JJ
10902}
10903
5039610b
SL
10904/* Fold a call to __builtin_object_size with arguments PTR and OST,
10905 if possible. */
10a0d495 10906
9b2b7279 10907static tree
5039610b 10908fold_builtin_object_size (tree ptr, tree ost)
10a0d495 10909{
88e06841 10910 unsigned HOST_WIDE_INT bytes;
10a0d495
JJ
10911 int object_size_type;
10912
5039610b
SL
10913 if (!validate_arg (ptr, POINTER_TYPE)
10914 || !validate_arg (ost, INTEGER_TYPE))
10915 return NULL_TREE;
10a0d495 10916
10a0d495
JJ
10917 STRIP_NOPS (ost);
10918
10919 if (TREE_CODE (ost) != INTEGER_CST
10920 || tree_int_cst_sgn (ost) < 0
10921 || compare_tree_int (ost, 3) > 0)
5039610b 10922 return NULL_TREE;
10a0d495 10923
9439e9a1 10924 object_size_type = tree_to_shwi (ost);
10a0d495
JJ
10925
10926 /* __builtin_object_size doesn't evaluate side-effects in its arguments;
10927 if there are any side-effects, it returns (size_t) -1 for types 0 and 1
10928 and (size_t) 0 for types 2 and 3. */
10929 if (TREE_SIDE_EFFECTS (ptr))
2ac7cbb5 10930 return build_int_cst_type (size_type_node, object_size_type < 2 ? -1 : 0);
10a0d495
JJ
10931
10932 if (TREE_CODE (ptr) == ADDR_EXPR)
88e06841
AS
10933 {
10934 bytes = compute_builtin_object_size (ptr, object_size_type);
807e902e 10935 if (wi::fits_to_tree_p (bytes, size_type_node))
88e06841
AS
10936 return build_int_cstu (size_type_node, bytes);
10937 }
10a0d495
JJ
10938 else if (TREE_CODE (ptr) == SSA_NAME)
10939 {
10a0d495
JJ
10940 /* If object size is not known yet, delay folding until
10941 later. Maybe subsequent passes will help determining
10942 it. */
10943 bytes = compute_builtin_object_size (ptr, object_size_type);
88e06841 10944 if (bytes != (unsigned HOST_WIDE_INT) (object_size_type < 2 ? -1 : 0)
807e902e 10945 && wi::fits_to_tree_p (bytes, size_type_node))
88e06841 10946 return build_int_cstu (size_type_node, bytes);
10a0d495
JJ
10947 }
10948
88e06841 10949 return NULL_TREE;
10a0d495
JJ
10950}
10951
862d0b35
DN
10952/* Builtins with folding operations that operate on "..." arguments
10953 need special handling; we need to store the arguments in a convenient
10954 data structure before attempting any folding. Fortunately there are
10955 only a few builtins that fall into this category. FNDECL is the
2625bb5d 10956 function, EXP is the CALL_EXPR for the call. */
862d0b35
DN
10957
10958static tree
2625bb5d 10959fold_builtin_varargs (location_t loc, tree fndecl, tree *args, int nargs)
862d0b35
DN
10960{
10961 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10962 tree ret = NULL_TREE;
10963
10964 switch (fcode)
10965 {
862d0b35 10966 case BUILT_IN_FPCLASSIFY:
a6a0570f 10967 ret = fold_builtin_fpclassify (loc, args, nargs);
862d0b35
DN
10968 break;
10969
10970 default:
10971 break;
10972 }
10973 if (ret)
10974 {
10975 ret = build1 (NOP_EXPR, TREE_TYPE (ret), ret);
10976 SET_EXPR_LOCATION (ret, loc);
10977 TREE_NO_WARNING (ret) = 1;
10978 return ret;
10979 }
10980 return NULL_TREE;
10981}
10982
000ba23d
KG
10983/* Initialize format string characters in the target charset. */
10984
fef5a0d9 10985bool
000ba23d
KG
10986init_target_chars (void)
10987{
10988 static bool init;
10989 if (!init)
10990 {
10991 target_newline = lang_hooks.to_target_charset ('\n');
10992 target_percent = lang_hooks.to_target_charset ('%');
10993 target_c = lang_hooks.to_target_charset ('c');
10994 target_s = lang_hooks.to_target_charset ('s');
10995 if (target_newline == 0 || target_percent == 0 || target_c == 0
10996 || target_s == 0)
10997 return false;
10998
10999 target_percent_c[0] = target_percent;
11000 target_percent_c[1] = target_c;
11001 target_percent_c[2] = '\0';
11002
11003 target_percent_s[0] = target_percent;
11004 target_percent_s[1] = target_s;
11005 target_percent_s[2] = '\0';
11006
11007 target_percent_s_newline[0] = target_percent;
11008 target_percent_s_newline[1] = target_s;
11009 target_percent_s_newline[2] = target_newline;
11010 target_percent_s_newline[3] = '\0';
c22cacf3 11011
000ba23d
KG
11012 init = true;
11013 }
11014 return true;
11015}
1f3f1f68 11016
4413d881
KG
11017/* Helper function for do_mpfr_arg*(). Ensure M is a normal number
11018 and no overflow/underflow occurred. INEXACT is true if M was not
2f8e468b 11019 exactly calculated. TYPE is the tree type for the result. This
4413d881
KG
11020 function assumes that you cleared the MPFR flags and then
11021 calculated M to see if anything subsequently set a flag prior to
11022 entering this function. Return NULL_TREE if any checks fail. */
11023
11024static tree
62e5bf5d 11025do_mpfr_ckconv (mpfr_srcptr m, tree type, int inexact)
4413d881
KG
11026{
11027 /* Proceed iff we get a normal number, i.e. not NaN or Inf and no
11028 overflow/underflow occurred. If -frounding-math, proceed iff the
11029 result of calling FUNC was exact. */
62e5bf5d 11030 if (mpfr_number_p (m) && !mpfr_overflow_p () && !mpfr_underflow_p ()
4413d881
KG
11031 && (!flag_rounding_math || !inexact))
11032 {
11033 REAL_VALUE_TYPE rr;
11034
205a4d09 11035 real_from_mpfr (&rr, m, type, GMP_RNDN);
4413d881
KG
11036 /* Proceed iff GCC's REAL_VALUE_TYPE can hold the MPFR value,
11037 check for overflow/underflow. If the REAL_VALUE_TYPE is zero
11038 but the mpft_t is not, then we underflowed in the
11039 conversion. */
4c8c70e0 11040 if (real_isfinite (&rr)
4413d881
KG
11041 && (rr.cl == rvc_zero) == (mpfr_zero_p (m) != 0))
11042 {
11043 REAL_VALUE_TYPE rmode;
11044
11045 real_convert (&rmode, TYPE_MODE (type), &rr);
11046 /* Proceed iff the specified mode can hold the value. */
11047 if (real_identical (&rmode, &rr))
11048 return build_real (type, rmode);
11049 }
11050 }
11051 return NULL_TREE;
11052}
11053
c128599a
KG
11054/* Helper function for do_mpc_arg*(). Ensure M is a normal complex
11055 number and no overflow/underflow occurred. INEXACT is true if M
11056 was not exactly calculated. TYPE is the tree type for the result.
11057 This function assumes that you cleared the MPFR flags and then
11058 calculated M to see if anything subsequently set a flag prior to
ca75b926
KG
11059 entering this function. Return NULL_TREE if any checks fail, if
11060 FORCE_CONVERT is true, then bypass the checks. */
c128599a
KG
11061
11062static tree
ca75b926 11063do_mpc_ckconv (mpc_srcptr m, tree type, int inexact, int force_convert)
c128599a
KG
11064{
11065 /* Proceed iff we get a normal number, i.e. not NaN or Inf and no
11066 overflow/underflow occurred. If -frounding-math, proceed iff the
11067 result of calling FUNC was exact. */
ca75b926
KG
11068 if (force_convert
11069 || (mpfr_number_p (mpc_realref (m)) && mpfr_number_p (mpc_imagref (m))
11070 && !mpfr_overflow_p () && !mpfr_underflow_p ()
11071 && (!flag_rounding_math || !inexact)))
c128599a
KG
11072 {
11073 REAL_VALUE_TYPE re, im;
11074
14aa6352
DE
11075 real_from_mpfr (&re, mpc_realref (m), TREE_TYPE (type), GMP_RNDN);
11076 real_from_mpfr (&im, mpc_imagref (m), TREE_TYPE (type), GMP_RNDN);
c128599a
KG
11077 /* Proceed iff GCC's REAL_VALUE_TYPE can hold the MPFR values,
11078 check for overflow/underflow. If the REAL_VALUE_TYPE is zero
11079 but the mpft_t is not, then we underflowed in the
11080 conversion. */
ca75b926
KG
11081 if (force_convert
11082 || (real_isfinite (&re) && real_isfinite (&im)
11083 && (re.cl == rvc_zero) == (mpfr_zero_p (mpc_realref (m)) != 0)
11084 && (im.cl == rvc_zero) == (mpfr_zero_p (mpc_imagref (m)) != 0)))
c128599a
KG
11085 {
11086 REAL_VALUE_TYPE re_mode, im_mode;
11087
11088 real_convert (&re_mode, TYPE_MODE (TREE_TYPE (type)), &re);
11089 real_convert (&im_mode, TYPE_MODE (TREE_TYPE (type)), &im);
11090 /* Proceed iff the specified mode can hold the value. */
ca75b926
KG
11091 if (force_convert
11092 || (real_identical (&re_mode, &re)
11093 && real_identical (&im_mode, &im)))
c128599a
KG
11094 return build_complex (type, build_real (TREE_TYPE (type), re_mode),
11095 build_real (TREE_TYPE (type), im_mode));
11096 }
11097 }
11098 return NULL_TREE;
11099}
c128599a 11100
1f3f1f68
KG
11101/* If argument ARG is a REAL_CST, call the one-argument mpfr function
11102 FUNC on it and return the resulting value as a tree with type TYPE.
b53fed56
KG
11103 If MIN and/or MAX are not NULL, then the supplied ARG must be
11104 within those bounds. If INCLUSIVE is true, then MIN/MAX are
11105 acceptable values, otherwise they are not. The mpfr precision is
11106 set to the precision of TYPE. We assume that function FUNC returns
11107 zero if the result could be calculated exactly within the requested
11108 precision. */
1f3f1f68
KG
11109
11110static tree
b53fed56
KG
11111do_mpfr_arg1 (tree arg, tree type, int (*func)(mpfr_ptr, mpfr_srcptr, mp_rnd_t),
11112 const REAL_VALUE_TYPE *min, const REAL_VALUE_TYPE *max,
11113 bool inclusive)
1f3f1f68
KG
11114{
11115 tree result = NULL_TREE;
b8698a0f 11116
1f3f1f68
KG
11117 STRIP_NOPS (arg);
11118
5f641bd8
KG
11119 /* To proceed, MPFR must exactly represent the target floating point
11120 format, which only happens when the target base equals two. */
11121 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
455f14dd 11122 && TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
1f3f1f68 11123 {
4413d881 11124 const REAL_VALUE_TYPE *const ra = &TREE_REAL_CST (arg);
1f3f1f68 11125
4c8c70e0 11126 if (real_isfinite (ra)
4413d881
KG
11127 && (!min || real_compare (inclusive ? GE_EXPR: GT_EXPR , ra, min))
11128 && (!max || real_compare (inclusive ? LE_EXPR: LT_EXPR , ra, max)))
1f3f1f68 11129 {
3e479de3
UW
11130 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
11131 const int prec = fmt->p;
11132 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
b52dd66c 11133 int inexact;
1f3f1f68
KG
11134 mpfr_t m;
11135
11136 mpfr_init2 (m, prec);
205a4d09 11137 mpfr_from_real (m, ra, GMP_RNDN);
62e5bf5d 11138 mpfr_clear_flags ();
3e479de3 11139 inexact = func (m, m, rnd);
4413d881 11140 result = do_mpfr_ckconv (m, type, inexact);
1f3f1f68
KG
11141 mpfr_clear (m);
11142 }
11143 }
b8698a0f 11144
1f3f1f68
KG
11145 return result;
11146}
4413d881
KG
11147
11148/* If argument ARG is a REAL_CST, call the two-argument mpfr function
11149 FUNC on it and return the resulting value as a tree with type TYPE.
11150 The mpfr precision is set to the precision of TYPE. We assume that
11151 function FUNC returns zero if the result could be calculated
11152 exactly within the requested precision. */
11153
11154static tree
11155do_mpfr_arg2 (tree arg1, tree arg2, tree type,
11156 int (*func)(mpfr_ptr, mpfr_srcptr, mpfr_srcptr, mp_rnd_t))
11157{
11158 tree result = NULL_TREE;
b8698a0f 11159
4413d881
KG
11160 STRIP_NOPS (arg1);
11161 STRIP_NOPS (arg2);
11162
5f641bd8
KG
11163 /* To proceed, MPFR must exactly represent the target floating point
11164 format, which only happens when the target base equals two. */
11165 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
455f14dd
RS
11166 && TREE_CODE (arg1) == REAL_CST && !TREE_OVERFLOW (arg1)
11167 && TREE_CODE (arg2) == REAL_CST && !TREE_OVERFLOW (arg2))
4413d881
KG
11168 {
11169 const REAL_VALUE_TYPE *const ra1 = &TREE_REAL_CST (arg1);
11170 const REAL_VALUE_TYPE *const ra2 = &TREE_REAL_CST (arg2);
11171
4c8c70e0 11172 if (real_isfinite (ra1) && real_isfinite (ra2))
4413d881 11173 {
3e479de3
UW
11174 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
11175 const int prec = fmt->p;
11176 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
4413d881
KG
11177 int inexact;
11178 mpfr_t m1, m2;
11179
11180 mpfr_inits2 (prec, m1, m2, NULL);
205a4d09
BM
11181 mpfr_from_real (m1, ra1, GMP_RNDN);
11182 mpfr_from_real (m2, ra2, GMP_RNDN);
62e5bf5d 11183 mpfr_clear_flags ();
3e479de3 11184 inexact = func (m1, m1, m2, rnd);
4413d881
KG
11185 result = do_mpfr_ckconv (m1, type, inexact);
11186 mpfr_clears (m1, m2, NULL);
11187 }
11188 }
b8698a0f 11189
4413d881
KG
11190 return result;
11191}
b68bcfff 11192
e61e5ddc
KG
11193/* If argument ARG is a REAL_CST, call the three-argument mpfr function
11194 FUNC on it and return the resulting value as a tree with type TYPE.
11195 The mpfr precision is set to the precision of TYPE. We assume that
11196 function FUNC returns zero if the result could be calculated
11197 exactly within the requested precision. */
11198
11199static tree
11200do_mpfr_arg3 (tree arg1, tree arg2, tree arg3, tree type,
11201 int (*func)(mpfr_ptr, mpfr_srcptr, mpfr_srcptr, mpfr_srcptr, mp_rnd_t))
11202{
11203 tree result = NULL_TREE;
b8698a0f 11204
e61e5ddc
KG
11205 STRIP_NOPS (arg1);
11206 STRIP_NOPS (arg2);
11207 STRIP_NOPS (arg3);
11208
5f641bd8
KG
11209 /* To proceed, MPFR must exactly represent the target floating point
11210 format, which only happens when the target base equals two. */
11211 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
455f14dd
RS
11212 && TREE_CODE (arg1) == REAL_CST && !TREE_OVERFLOW (arg1)
11213 && TREE_CODE (arg2) == REAL_CST && !TREE_OVERFLOW (arg2)
11214 && TREE_CODE (arg3) == REAL_CST && !TREE_OVERFLOW (arg3))
e61e5ddc
KG
11215 {
11216 const REAL_VALUE_TYPE *const ra1 = &TREE_REAL_CST (arg1);
11217 const REAL_VALUE_TYPE *const ra2 = &TREE_REAL_CST (arg2);
11218 const REAL_VALUE_TYPE *const ra3 = &TREE_REAL_CST (arg3);
11219
4c8c70e0 11220 if (real_isfinite (ra1) && real_isfinite (ra2) && real_isfinite (ra3))
e61e5ddc 11221 {
3e479de3
UW
11222 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
11223 const int prec = fmt->p;
11224 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
e61e5ddc
KG
11225 int inexact;
11226 mpfr_t m1, m2, m3;
11227
11228 mpfr_inits2 (prec, m1, m2, m3, NULL);
205a4d09
BM
11229 mpfr_from_real (m1, ra1, GMP_RNDN);
11230 mpfr_from_real (m2, ra2, GMP_RNDN);
11231 mpfr_from_real (m3, ra3, GMP_RNDN);
62e5bf5d 11232 mpfr_clear_flags ();
3e479de3 11233 inexact = func (m1, m1, m2, m3, rnd);
e61e5ddc
KG
11234 result = do_mpfr_ckconv (m1, type, inexact);
11235 mpfr_clears (m1, m2, m3, NULL);
11236 }
11237 }
b8698a0f 11238
e61e5ddc
KG
11239 return result;
11240}
11241
b68bcfff
KG
11242/* If argument ARG is a REAL_CST, call mpfr_sin_cos() on it and set
11243 the pointers *(ARG_SINP) and *(ARG_COSP) to the resulting values.
75c7c595
RG
11244 If ARG_SINP and ARG_COSP are NULL then the result is returned
11245 as a complex value.
b68bcfff
KG
11246 The type is taken from the type of ARG and is used for setting the
11247 precision of the calculation and results. */
11248
11249static tree
11250do_mpfr_sincos (tree arg, tree arg_sinp, tree arg_cosp)
11251{
5f641bd8 11252 tree const type = TREE_TYPE (arg);
b68bcfff 11253 tree result = NULL_TREE;
b8698a0f 11254
b68bcfff 11255 STRIP_NOPS (arg);
b8698a0f 11256
5f641bd8
KG
11257 /* To proceed, MPFR must exactly represent the target floating point
11258 format, which only happens when the target base equals two. */
11259 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
455f14dd
RS
11260 && TREE_CODE (arg) == REAL_CST
11261 && !TREE_OVERFLOW (arg))
b68bcfff
KG
11262 {
11263 const REAL_VALUE_TYPE *const ra = &TREE_REAL_CST (arg);
11264
4c8c70e0 11265 if (real_isfinite (ra))
b68bcfff 11266 {
3e479de3
UW
11267 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
11268 const int prec = fmt->p;
11269 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
b68bcfff
KG
11270 tree result_s, result_c;
11271 int inexact;
11272 mpfr_t m, ms, mc;
11273
11274 mpfr_inits2 (prec, m, ms, mc, NULL);
205a4d09 11275 mpfr_from_real (m, ra, GMP_RNDN);
62e5bf5d 11276 mpfr_clear_flags ();
3e479de3 11277 inexact = mpfr_sin_cos (ms, mc, m, rnd);
b68bcfff
KG
11278 result_s = do_mpfr_ckconv (ms, type, inexact);
11279 result_c = do_mpfr_ckconv (mc, type, inexact);
11280 mpfr_clears (m, ms, mc, NULL);
11281 if (result_s && result_c)
11282 {
75c7c595
RG
11283 /* If we are to return in a complex value do so. */
11284 if (!arg_sinp && !arg_cosp)
11285 return build_complex (build_complex_type (type),
11286 result_c, result_s);
11287
b68bcfff
KG
11288 /* Dereference the sin/cos pointer arguments. */
11289 arg_sinp = build_fold_indirect_ref (arg_sinp);
11290 arg_cosp = build_fold_indirect_ref (arg_cosp);
11291 /* Proceed if valid pointer type were passed in. */
11292 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg_sinp)) == TYPE_MAIN_VARIANT (type)
11293 && TYPE_MAIN_VARIANT (TREE_TYPE (arg_cosp)) == TYPE_MAIN_VARIANT (type))
11294 {
11295 /* Set the values. */
939409af 11296 result_s = fold_build2 (MODIFY_EXPR, type, arg_sinp,
07beea0d 11297 result_s);
b68bcfff 11298 TREE_SIDE_EFFECTS (result_s) = 1;
939409af 11299 result_c = fold_build2 (MODIFY_EXPR, type, arg_cosp,
07beea0d 11300 result_c);
b68bcfff
KG
11301 TREE_SIDE_EFFECTS (result_c) = 1;
11302 /* Combine the assignments into a compound expr. */
11303 result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
11304 result_s, result_c));
11305 }
11306 }
11307 }
11308 }
11309 return result;
11310}
550b3187 11311
550b3187
KG
11312/* If argument ARG1 is an INTEGER_CST and ARG2 is a REAL_CST, call the
11313 two-argument mpfr order N Bessel function FUNC on them and return
11314 the resulting value as a tree with type TYPE. The mpfr precision
11315 is set to the precision of TYPE. We assume that function FUNC
11316 returns zero if the result could be calculated exactly within the
11317 requested precision. */
11318static tree
11319do_mpfr_bessel_n (tree arg1, tree arg2, tree type,
11320 int (*func)(mpfr_ptr, long, mpfr_srcptr, mp_rnd_t),
11321 const REAL_VALUE_TYPE *min, bool inclusive)
11322{
11323 tree result = NULL_TREE;
11324
11325 STRIP_NOPS (arg1);
11326 STRIP_NOPS (arg2);
11327
11328 /* To proceed, MPFR must exactly represent the target floating point
11329 format, which only happens when the target base equals two. */
11330 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
9541ffee 11331 && tree_fits_shwi_p (arg1)
550b3187
KG
11332 && TREE_CODE (arg2) == REAL_CST && !TREE_OVERFLOW (arg2))
11333 {
9439e9a1 11334 const HOST_WIDE_INT n = tree_to_shwi (arg1);
550b3187
KG
11335 const REAL_VALUE_TYPE *const ra = &TREE_REAL_CST (arg2);
11336
11337 if (n == (long)n
4c8c70e0 11338 && real_isfinite (ra)
550b3187
KG
11339 && (!min || real_compare (inclusive ? GE_EXPR: GT_EXPR , ra, min)))
11340 {
3e479de3
UW
11341 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
11342 const int prec = fmt->p;
11343 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
550b3187
KG
11344 int inexact;
11345 mpfr_t m;
11346
11347 mpfr_init2 (m, prec);
11348 mpfr_from_real (m, ra, GMP_RNDN);
11349 mpfr_clear_flags ();
3e479de3 11350 inexact = func (m, n, m, rnd);
550b3187
KG
11351 result = do_mpfr_ckconv (m, type, inexact);
11352 mpfr_clear (m);
11353 }
11354 }
b8698a0f 11355
550b3187
KG
11356 return result;
11357}
ea91f957
KG
11358
11359/* If arguments ARG0 and ARG1 are REAL_CSTs, call mpfr_remquo() to set
11360 the pointer *(ARG_QUO) and return the result. The type is taken
11361 from the type of ARG0 and is used for setting the precision of the
11362 calculation and results. */
11363
11364static tree
11365do_mpfr_remquo (tree arg0, tree arg1, tree arg_quo)
11366{
11367 tree const type = TREE_TYPE (arg0);
11368 tree result = NULL_TREE;
b8698a0f 11369
ea91f957
KG
11370 STRIP_NOPS (arg0);
11371 STRIP_NOPS (arg1);
b8698a0f 11372
ea91f957
KG
11373 /* To proceed, MPFR must exactly represent the target floating point
11374 format, which only happens when the target base equals two. */
11375 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
11376 && TREE_CODE (arg0) == REAL_CST && !TREE_OVERFLOW (arg0)
11377 && TREE_CODE (arg1) == REAL_CST && !TREE_OVERFLOW (arg1))
11378 {
11379 const REAL_VALUE_TYPE *const ra0 = TREE_REAL_CST_PTR (arg0);
11380 const REAL_VALUE_TYPE *const ra1 = TREE_REAL_CST_PTR (arg1);
11381
4c8c70e0 11382 if (real_isfinite (ra0) && real_isfinite (ra1))
ea91f957 11383 {
3e479de3
UW
11384 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
11385 const int prec = fmt->p;
11386 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
ea91f957
KG
11387 tree result_rem;
11388 long integer_quo;
11389 mpfr_t m0, m1;
11390
11391 mpfr_inits2 (prec, m0, m1, NULL);
11392 mpfr_from_real (m0, ra0, GMP_RNDN);
11393 mpfr_from_real (m1, ra1, GMP_RNDN);
11394 mpfr_clear_flags ();
3e479de3 11395 mpfr_remquo (m0, &integer_quo, m0, m1, rnd);
ea91f957
KG
11396 /* Remquo is independent of the rounding mode, so pass
11397 inexact=0 to do_mpfr_ckconv(). */
11398 result_rem = do_mpfr_ckconv (m0, type, /*inexact=*/ 0);
11399 mpfr_clears (m0, m1, NULL);
11400 if (result_rem)
11401 {
11402 /* MPFR calculates quo in the host's long so it may
11403 return more bits in quo than the target int can hold
11404 if sizeof(host long) > sizeof(target int). This can
11405 happen even for native compilers in LP64 mode. In
11406 these cases, modulo the quo value with the largest
11407 number that the target int can hold while leaving one
11408 bit for the sign. */
11409 if (sizeof (integer_quo) * CHAR_BIT > INT_TYPE_SIZE)
11410 integer_quo %= (long)(1UL << (INT_TYPE_SIZE - 1));
11411
11412 /* Dereference the quo pointer argument. */
11413 arg_quo = build_fold_indirect_ref (arg_quo);
11414 /* Proceed iff a valid pointer type was passed in. */
11415 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg_quo)) == integer_type_node)
11416 {
11417 /* Set the value. */
45a2c477
RG
11418 tree result_quo
11419 = fold_build2 (MODIFY_EXPR, TREE_TYPE (arg_quo), arg_quo,
11420 build_int_cst (TREE_TYPE (arg_quo),
11421 integer_quo));
ea91f957
KG
11422 TREE_SIDE_EFFECTS (result_quo) = 1;
11423 /* Combine the quo assignment with the rem. */
11424 result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
11425 result_quo, result_rem));
11426 }
11427 }
11428 }
11429 }
11430 return result;
11431}
752b7d38
KG
11432
11433/* If ARG is a REAL_CST, call mpfr_lgamma() on it and return the
11434 resulting value as a tree with type TYPE. The mpfr precision is
11435 set to the precision of TYPE. We assume that this mpfr function
11436 returns zero if the result could be calculated exactly within the
11437 requested precision. In addition, the integer pointer represented
11438 by ARG_SG will be dereferenced and set to the appropriate signgam
11439 (-1,1) value. */
11440
11441static tree
11442do_mpfr_lgamma_r (tree arg, tree arg_sg, tree type)
11443{
11444 tree result = NULL_TREE;
11445
11446 STRIP_NOPS (arg);
b8698a0f 11447
752b7d38
KG
11448 /* To proceed, MPFR must exactly represent the target floating point
11449 format, which only happens when the target base equals two. Also
11450 verify ARG is a constant and that ARG_SG is an int pointer. */
11451 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
11452 && TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg)
11453 && TREE_CODE (TREE_TYPE (arg_sg)) == POINTER_TYPE
11454 && TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (arg_sg))) == integer_type_node)
11455 {
11456 const REAL_VALUE_TYPE *const ra = TREE_REAL_CST_PTR (arg);
11457
11458 /* In addition to NaN and Inf, the argument cannot be zero or a
11459 negative integer. */
4c8c70e0 11460 if (real_isfinite (ra)
752b7d38 11461 && ra->cl != rvc_zero
c3284718 11462 && !(real_isneg (ra) && real_isinteger (ra, TYPE_MODE (type))))
752b7d38 11463 {
3e479de3
UW
11464 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
11465 const int prec = fmt->p;
11466 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
752b7d38
KG
11467 int inexact, sg;
11468 mpfr_t m;
11469 tree result_lg;
11470
11471 mpfr_init2 (m, prec);
11472 mpfr_from_real (m, ra, GMP_RNDN);
11473 mpfr_clear_flags ();
3e479de3 11474 inexact = mpfr_lgamma (m, &sg, m, rnd);
752b7d38
KG
11475 result_lg = do_mpfr_ckconv (m, type, inexact);
11476 mpfr_clear (m);
11477 if (result_lg)
11478 {
11479 tree result_sg;
11480
11481 /* Dereference the arg_sg pointer argument. */
11482 arg_sg = build_fold_indirect_ref (arg_sg);
11483 /* Assign the signgam value into *arg_sg. */
11484 result_sg = fold_build2 (MODIFY_EXPR,
11485 TREE_TYPE (arg_sg), arg_sg,
45a2c477 11486 build_int_cst (TREE_TYPE (arg_sg), sg));
752b7d38
KG
11487 TREE_SIDE_EFFECTS (result_sg) = 1;
11488 /* Combine the signgam assignment with the lgamma result. */
11489 result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
11490 result_sg, result_lg));
11491 }
11492 }
11493 }
11494
11495 return result;
11496}
726a989a 11497
c128599a
KG
11498/* If argument ARG is a COMPLEX_CST, call the one-argument mpc
11499 function FUNC on it and return the resulting value as a tree with
11500 type TYPE. The mpfr precision is set to the precision of TYPE. We
11501 assume that function FUNC returns zero if the result could be
11502 calculated exactly within the requested precision. */
11503
11504static tree
11505do_mpc_arg1 (tree arg, tree type, int (*func)(mpc_ptr, mpc_srcptr, mpc_rnd_t))
11506{
11507 tree result = NULL_TREE;
b8698a0f 11508
c128599a
KG
11509 STRIP_NOPS (arg);
11510
11511 /* To proceed, MPFR must exactly represent the target floating point
11512 format, which only happens when the target base equals two. */
11513 if (TREE_CODE (arg) == COMPLEX_CST && !TREE_OVERFLOW (arg)
11514 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) == REAL_TYPE
11515 && REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (TREE_TYPE (arg))))->b == 2)
11516 {
11517 const REAL_VALUE_TYPE *const re = TREE_REAL_CST_PTR (TREE_REALPART (arg));
11518 const REAL_VALUE_TYPE *const im = TREE_REAL_CST_PTR (TREE_IMAGPART (arg));
11519
11520 if (real_isfinite (re) && real_isfinite (im))
11521 {
11522 const struct real_format *const fmt =
11523 REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (type)));
11524 const int prec = fmt->p;
11525 const mp_rnd_t rnd = fmt->round_towards_zero ? GMP_RNDZ : GMP_RNDN;
bbb9d91f 11526 const mpc_rnd_t crnd = fmt->round_towards_zero ? MPC_RNDZZ : MPC_RNDNN;
c128599a
KG
11527 int inexact;
11528 mpc_t m;
b8698a0f 11529
c128599a 11530 mpc_init2 (m, prec);
c3284718
RS
11531 mpfr_from_real (mpc_realref (m), re, rnd);
11532 mpfr_from_real (mpc_imagref (m), im, rnd);
c128599a 11533 mpfr_clear_flags ();
bbb9d91f 11534 inexact = func (m, m, crnd);
ca75b926 11535 result = do_mpc_ckconv (m, type, inexact, /*force_convert=*/ 0);
c128599a
KG
11536 mpc_clear (m);
11537 }
11538 }
11539
11540 return result;
11541}
a41d064d
KG
11542
11543/* If arguments ARG0 and ARG1 are a COMPLEX_CST, call the two-argument
11544 mpc function FUNC on it and return the resulting value as a tree
11545 with type TYPE. The mpfr precision is set to the precision of
11546 TYPE. We assume that function FUNC returns zero if the result
ca75b926
KG
11547 could be calculated exactly within the requested precision. If
11548 DO_NONFINITE is true, then fold expressions containing Inf or NaN
11549 in the arguments and/or results. */
a41d064d 11550
2f440f6a 11551tree
ca75b926 11552do_mpc_arg2 (tree arg0, tree arg1, tree type, int do_nonfinite,
a41d064d
KG
11553 int (*func)(mpc_ptr, mpc_srcptr, mpc_srcptr, mpc_rnd_t))
11554{
11555 tree result = NULL_TREE;
b8698a0f 11556
a41d064d
KG
11557 STRIP_NOPS (arg0);
11558 STRIP_NOPS (arg1);
11559
11560 /* To proceed, MPFR must exactly represent the target floating point
11561 format, which only happens when the target base equals two. */
11562 if (TREE_CODE (arg0) == COMPLEX_CST && !TREE_OVERFLOW (arg0)
11563 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE
11564 && TREE_CODE (arg1) == COMPLEX_CST && !TREE_OVERFLOW (arg1)
11565 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg1))) == REAL_TYPE
11566 && REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (TREE_TYPE (arg0))))->b == 2)
11567 {
11568 const REAL_VALUE_TYPE *const re0 = TREE_REAL_CST_PTR (TREE_REALPART (arg0));
11569 const REAL_VALUE_TYPE *const im0 = TREE_REAL_CST_PTR (TREE_IMAGPART (arg0));
11570 const REAL_VALUE_TYPE *const re1 = TREE_REAL_CST_PTR (TREE_REALPART (arg1));
11571 const REAL_VALUE_TYPE *const im1 = TREE_REAL_CST_PTR (TREE_IMAGPART (arg1));
11572
ca75b926
KG
11573 if (do_nonfinite
11574 || (real_isfinite (re0) && real_isfinite (im0)
11575 && real_isfinite (re1) && real_isfinite (im1)))
a41d064d
KG
11576 {
11577 const struct real_format *const fmt =
11578 REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (type)));
11579 const int prec = fmt->p;
11580 const mp_rnd_t rnd = fmt->round_towards_zero ? GMP_RNDZ : GMP_RNDN;
11581 const mpc_rnd_t crnd = fmt->round_towards_zero ? MPC_RNDZZ : MPC_RNDNN;
11582 int inexact;
11583 mpc_t m0, m1;
b8698a0f 11584
a41d064d
KG
11585 mpc_init2 (m0, prec);
11586 mpc_init2 (m1, prec);
c3284718
RS
11587 mpfr_from_real (mpc_realref (m0), re0, rnd);
11588 mpfr_from_real (mpc_imagref (m0), im0, rnd);
11589 mpfr_from_real (mpc_realref (m1), re1, rnd);
11590 mpfr_from_real (mpc_imagref (m1), im1, rnd);
a41d064d
KG
11591 mpfr_clear_flags ();
11592 inexact = func (m0, m0, m1, crnd);
ca75b926 11593 result = do_mpc_ckconv (m0, type, inexact, do_nonfinite);
a41d064d
KG
11594 mpc_clear (m0);
11595 mpc_clear (m1);
11596 }
11597 }
11598
11599 return result;
11600}
c128599a 11601
726a989a
RB
11602/* A wrapper function for builtin folding that prevents warnings for
11603 "statement without effect" and the like, caused by removing the
11604 call node earlier than the warning is generated. */
11605
11606tree
538dd0b7 11607fold_call_stmt (gcall *stmt, bool ignore)
726a989a
RB
11608{
11609 tree ret = NULL_TREE;
11610 tree fndecl = gimple_call_fndecl (stmt);
db3927fb 11611 location_t loc = gimple_location (stmt);
726a989a
RB
11612 if (fndecl
11613 && TREE_CODE (fndecl) == FUNCTION_DECL
11614 && DECL_BUILT_IN (fndecl)
11615 && !gimple_call_va_arg_pack_p (stmt))
11616 {
11617 int nargs = gimple_call_num_args (stmt);
8897c9ce
NF
11618 tree *args = (nargs > 0
11619 ? gimple_call_arg_ptr (stmt, 0)
11620 : &error_mark_node);
726a989a 11621
0889e9bc
JJ
11622 if (avoid_folding_inline_builtin (fndecl))
11623 return NULL_TREE;
726a989a
RB
11624 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
11625 {
8897c9ce 11626 return targetm.fold_builtin (fndecl, nargs, args, ignore);
726a989a
RB
11627 }
11628 else
11629 {
a6a0570f 11630 ret = fold_builtin_n (loc, fndecl, args, nargs, ignore);
726a989a
RB
11631 if (ret)
11632 {
11633 /* Propagate location information from original call to
11634 expansion of builtin. Otherwise things like
11635 maybe_emit_chk_warning, that operate on the expansion
11636 of a builtin, will use the wrong location information. */
11637 if (gimple_has_location (stmt))
11638 {
11639 tree realret = ret;
11640 if (TREE_CODE (ret) == NOP_EXPR)
11641 realret = TREE_OPERAND (ret, 0);
11642 if (CAN_HAVE_LOCATION_P (realret)
11643 && !EXPR_HAS_LOCATION (realret))
db3927fb 11644 SET_EXPR_LOCATION (realret, loc);
726a989a
RB
11645 return realret;
11646 }
11647 return ret;
11648 }
11649 }
11650 }
11651 return NULL_TREE;
11652}
d7f09764 11653
e79983f4 11654/* Look up the function in builtin_decl that corresponds to DECL
d7f09764
DN
11655 and set ASMSPEC as its user assembler name. DECL must be a
11656 function decl that declares a builtin. */
11657
11658void
11659set_builtin_user_assembler_name (tree decl, const char *asmspec)
11660{
11661 tree builtin;
11662 gcc_assert (TREE_CODE (decl) == FUNCTION_DECL
11663 && DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL
11664 && asmspec != 0);
11665
e79983f4 11666 builtin = builtin_decl_explicit (DECL_FUNCTION_CODE (decl));
ce835863 11667 set_user_assembler_name (builtin, asmspec);
d7f09764
DN
11668 switch (DECL_FUNCTION_CODE (decl))
11669 {
11670 case BUILT_IN_MEMCPY:
11671 init_block_move_fn (asmspec);
11672 memcpy_libfunc = set_user_assembler_libfunc ("memcpy", asmspec);
11673 break;
11674 case BUILT_IN_MEMSET:
11675 init_block_clear_fn (asmspec);
11676 memset_libfunc = set_user_assembler_libfunc ("memset", asmspec);
11677 break;
11678 case BUILT_IN_MEMMOVE:
11679 memmove_libfunc = set_user_assembler_libfunc ("memmove", asmspec);
11680 break;
11681 case BUILT_IN_MEMCMP:
11682 memcmp_libfunc = set_user_assembler_libfunc ("memcmp", asmspec);
11683 break;
11684 case BUILT_IN_ABORT:
11685 abort_libfunc = set_user_assembler_libfunc ("abort", asmspec);
11686 break;
cbb1ab10
JJ
11687 case BUILT_IN_FFS:
11688 if (INT_TYPE_SIZE < BITS_PER_WORD)
11689 {
11690 set_user_assembler_libfunc ("ffs", asmspec);
11691 set_optab_libfunc (ffs_optab, mode_for_size (INT_TYPE_SIZE,
11692 MODE_INT, 0), "ffs");
11693 }
11694 break;
d7f09764
DN
11695 default:
11696 break;
11697 }
11698}
bec922f0
SL
11699
11700/* Return true if DECL is a builtin that expands to a constant or similarly
11701 simple code. */
11702bool
11703is_simple_builtin (tree decl)
11704{
11705 if (decl && DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL)
11706 switch (DECL_FUNCTION_CODE (decl))
11707 {
11708 /* Builtins that expand to constants. */
11709 case BUILT_IN_CONSTANT_P:
11710 case BUILT_IN_EXPECT:
11711 case BUILT_IN_OBJECT_SIZE:
11712 case BUILT_IN_UNREACHABLE:
11713 /* Simple register moves or loads from stack. */
45d439ac 11714 case BUILT_IN_ASSUME_ALIGNED:
bec922f0
SL
11715 case BUILT_IN_RETURN_ADDRESS:
11716 case BUILT_IN_EXTRACT_RETURN_ADDR:
11717 case BUILT_IN_FROB_RETURN_ADDR:
11718 case BUILT_IN_RETURN:
11719 case BUILT_IN_AGGREGATE_INCOMING_ADDRESS:
11720 case BUILT_IN_FRAME_ADDRESS:
11721 case BUILT_IN_VA_END:
11722 case BUILT_IN_STACK_SAVE:
11723 case BUILT_IN_STACK_RESTORE:
11724 /* Exception state returns or moves registers around. */
11725 case BUILT_IN_EH_FILTER:
11726 case BUILT_IN_EH_POINTER:
11727 case BUILT_IN_EH_COPY_VALUES:
11728 return true;
11729
11730 default:
11731 return false;
11732 }
11733
11734 return false;
11735}
11736
11737/* Return true if DECL is a builtin that is not expensive, i.e., they are
11738 most probably expanded inline into reasonably simple code. This is a
11739 superset of is_simple_builtin. */
11740bool
11741is_inexpensive_builtin (tree decl)
11742{
11743 if (!decl)
11744 return false;
11745 else if (DECL_BUILT_IN_CLASS (decl) == BUILT_IN_MD)
11746 return true;
11747 else if (DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL)
11748 switch (DECL_FUNCTION_CODE (decl))
11749 {
11750 case BUILT_IN_ABS:
11751 case BUILT_IN_ALLOCA:
13e49da9 11752 case BUILT_IN_ALLOCA_WITH_ALIGN:
ac868f29 11753 case BUILT_IN_BSWAP16:
bec922f0
SL
11754 case BUILT_IN_BSWAP32:
11755 case BUILT_IN_BSWAP64:
11756 case BUILT_IN_CLZ:
11757 case BUILT_IN_CLZIMAX:
11758 case BUILT_IN_CLZL:
11759 case BUILT_IN_CLZLL:
11760 case BUILT_IN_CTZ:
11761 case BUILT_IN_CTZIMAX:
11762 case BUILT_IN_CTZL:
11763 case BUILT_IN_CTZLL:
11764 case BUILT_IN_FFS:
11765 case BUILT_IN_FFSIMAX:
11766 case BUILT_IN_FFSL:
11767 case BUILT_IN_FFSLL:
11768 case BUILT_IN_IMAXABS:
11769 case BUILT_IN_FINITE:
11770 case BUILT_IN_FINITEF:
11771 case BUILT_IN_FINITEL:
11772 case BUILT_IN_FINITED32:
11773 case BUILT_IN_FINITED64:
11774 case BUILT_IN_FINITED128:
11775 case BUILT_IN_FPCLASSIFY:
11776 case BUILT_IN_ISFINITE:
11777 case BUILT_IN_ISINF_SIGN:
11778 case BUILT_IN_ISINF:
11779 case BUILT_IN_ISINFF:
11780 case BUILT_IN_ISINFL:
11781 case BUILT_IN_ISINFD32:
11782 case BUILT_IN_ISINFD64:
11783 case BUILT_IN_ISINFD128:
11784 case BUILT_IN_ISNAN:
11785 case BUILT_IN_ISNANF:
11786 case BUILT_IN_ISNANL:
11787 case BUILT_IN_ISNAND32:
11788 case BUILT_IN_ISNAND64:
11789 case BUILT_IN_ISNAND128:
11790 case BUILT_IN_ISNORMAL:
11791 case BUILT_IN_ISGREATER:
11792 case BUILT_IN_ISGREATEREQUAL:
11793 case BUILT_IN_ISLESS:
11794 case BUILT_IN_ISLESSEQUAL:
11795 case BUILT_IN_ISLESSGREATER:
11796 case BUILT_IN_ISUNORDERED:
11797 case BUILT_IN_VA_ARG_PACK:
11798 case BUILT_IN_VA_ARG_PACK_LEN:
11799 case BUILT_IN_VA_COPY:
11800 case BUILT_IN_TRAP:
11801 case BUILT_IN_SAVEREGS:
11802 case BUILT_IN_POPCOUNTL:
11803 case BUILT_IN_POPCOUNTLL:
11804 case BUILT_IN_POPCOUNTIMAX:
11805 case BUILT_IN_POPCOUNT:
11806 case BUILT_IN_PARITYL:
11807 case BUILT_IN_PARITYLL:
11808 case BUILT_IN_PARITYIMAX:
11809 case BUILT_IN_PARITY:
11810 case BUILT_IN_LABS:
11811 case BUILT_IN_LLABS:
11812 case BUILT_IN_PREFETCH:
41dbbb37 11813 case BUILT_IN_ACC_ON_DEVICE:
bec922f0
SL
11814 return true;
11815
11816 default:
11817 return is_simple_builtin (decl);
11818 }
11819
11820 return false;
11821}